_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
3b3707ea4d236a1ddc4a9da0b33d54f3d4794d45ff4cffccee749792f5066b03 | project-fifo/vmwebadm | core.cljs | (ns client.core
(:use-macros [clojure.core.match.js :only [match]])
(:require
[cljs.reader :as reader]
[clojure.string :as c.s]
[cljs.nodejs :as node]))
(def dbfile "/var/db/vmwebadm/db.clj")
(def fs (node/require "fs"))
(def crypto (node/require "crypto"))
(def util (node/require "util"))
(def cp
(node/require "child_process"))
(defn slurp [f]
(str (.readFileSync fs f)))
(defn read [f]
(reader/read-string (slurp f)))
(defn get-option [path default]
(get-in (read dbfile) path default))
(defn update-config [update-fn]
(.writeFileSync fs dbfile (pr-str (update-fn (read dbfile)))))
(defn hash-str [str]
(-> ( .createHash crypto "sha512")
(.update str)
(.digest "base64")))
(defn help []
(print "Configuration tool\n"
" import package <package-file(s)> - imports one or more package files.\n"
" default-dataset <dataset> - sets the default dataset.\n"
" passwd <user> <pass> - adds a new user or resets a password for an existing one.\n"
" list users - lists all known users\n"
" admin network <net/mask> gw <gw> - defines the ip range for the admin network.\n"
" ext network <net/mask> gw <gw> - defines the ip range for the admin network.\n"
" promote <user> - grants a user admin rights.\n"
" demote <user> - demotes a user from admin rights.\n"
" delete <user> - deletes a user.\n"
" debug <level> - sets log level.\n"
" port <port> - sets the listen port for the server.\n"
" host <host> - sets the listen host for the server.\n"
" resolvers add <ip> - adds a resolver.\n"
" show [debug|host|port] - shows the configuration options.\n"
))
(defn import-pacakge [p]
(let [p (read p)
name (p :name)
p (dissoc p :name)]
(print "updating package:" (pr-str p) "\n")
(update-config #(assoc-in % [:packages name] p))))
(defn format-users [[name u]]
(print (.format util " [%s] | [%s] | %s | %s\n"
(if (:admin u) "*" " ")
(if (:keys u) "*" " ")
(:uuid u) name)))
(defn list-users []
(print "Admin | Key | UUID | Login\n")
(print "------+-----+--------------------------------------+------------------------\n")
(doall
(map
format-users
(:users (read dbfile)))))
(defn passwd-user [user passwd]
(.exec cp
"uuid"
(fn [error stdout stderr]
(let [uuid (c.s/replace stdout #"\n" "")]
(update-config
#(if (get-in % [:users user :uuid])
(assoc-in % [:users user :passwd] (hash-str (str user ":" passwd)))
(assoc-in
(assoc-in % [:users user :passwd] (hash-str (str user ":" passwd)))
[:users user :uuid] uuid)))))))
(defn bit [x n]
(bit-and (bit-shift-right x (* n 8)) 0xFF))
(defn to-bytes [x]
[(bit x 3) (bit x 2) (bit x 1) (bit x 0)])
(defn to-ip [x]
(let [[a b c d] (to-bytes x)]
(str a "." b "." c "." d)))
(defn from-bytes [a b c d]
(+ (bit-shift-left a 24)
(bit-shift-left b 16)
(bit-shift-left c 8)
d))
(defn mask [x]
(let [n (- 32 x)]
(bit-shift-left (bit-shift-right 0xFFFFFFFF n) n)))
(defn from-ip [ip]
(apply from-bytes (map #(js/parseInt %) (rest (re-matches #"(\d+)\.(\d+)\.(\d+)\.(\d+)" ip)))))
(defn range [net]
(let [[a b c d n]
(map #(js/parseInt %) (rest (re-matches #"(\d+)\.(\d+)\.(\d+)\.(\d+)/(\d+)" net)))
x (from-bytes a b c d)
m (mask n)
base (bit-and x m)
last (+ base (bit-not m))]
{:network base
:mask m
:first (inc base)
:last (dec last)
:free []
:broadcast last}))
(defn network [net gw]
(assoc (range net)
:gw (from-ip gw)))
(defn start [& args]
(if (empty? args)
(help)
(match [(vec args)]
[["import" "package" & pkgs]]
(do
(print "packages: " (pr-str pkgs) "\n")
(doseq [pkg pkgs] (import-pacakge pkg)))
[["admin" "network" net "gw" gw]]
(update-config #(assoc-in % [:network :admin] (network net gw)))
[["ext" "network" net "gw" gw]]
(update-config #(assoc-in % [:network :ext] (network net gw)))
[["default-dataset" dataset]]
(update-config #(assoc-in % [:default-dataset] dataset))
[["passwd" user passwd]]
(passwd-user user passwd)
[["promote" user]]
(update-config #(if (get-in % [:users user])
(assoc-in % [:users user :admin] true)
(do
(print "Unknown user" (str user ".\n"))
%)))
[["demote" user]]
(update-config #(if (get-in % [:users user])
(assoc-in % [:users user :admin] false)
(do
(print "Unknown user" (str user ".\n"))
%)))
[["delete" user]]
(update-config #(update-in % [:users] (fn [m] (dissoc m user))))
[["list" "users"]]
(list-users)
[["debug" lvl]]
(update-config #(assoc-in % [:debug] (js/parseInt lvl)))
[["port" port]]
(update-config #(assoc-in % [:server :port] (js/parseInt port)))
[["host" host]]
(update-config #(assoc-in % [:server :host] host))
[["resolvers" "add" ip]]
(update-config #(update-in % [:resolvers] conj ip))
[["show" "debug"]]
(print "debug:" (get-option [:debug] 0) "\n")
[["show" "port"]]
(print "port:" (get-option [:server :port] 80) "\n")
[["show" "host"]]
(print "host:" (get-option [:server :host] "0.0.0.0") "\n")
[["help"]]
(help)
:else
(do
(print "Unknown command: "
(pr m))
(print "\n")
(help)))))
(set! *main-cli-fn* start)
| null | https://raw.githubusercontent.com/project-fifo/vmwebadm/55d83bbc0ac6db8ea1d784c73d91bf4f228fa04a/src/client/core.cljs | clojure | (ns client.core
(:use-macros [clojure.core.match.js :only [match]])
(:require
[cljs.reader :as reader]
[clojure.string :as c.s]
[cljs.nodejs :as node]))
(def dbfile "/var/db/vmwebadm/db.clj")
(def fs (node/require "fs"))
(def crypto (node/require "crypto"))
(def util (node/require "util"))
(def cp
(node/require "child_process"))
(defn slurp [f]
(str (.readFileSync fs f)))
(defn read [f]
(reader/read-string (slurp f)))
(defn get-option [path default]
(get-in (read dbfile) path default))
(defn update-config [update-fn]
(.writeFileSync fs dbfile (pr-str (update-fn (read dbfile)))))
(defn hash-str [str]
(-> ( .createHash crypto "sha512")
(.update str)
(.digest "base64")))
(defn help []
(print "Configuration tool\n"
" import package <package-file(s)> - imports one or more package files.\n"
" default-dataset <dataset> - sets the default dataset.\n"
" passwd <user> <pass> - adds a new user or resets a password for an existing one.\n"
" list users - lists all known users\n"
" admin network <net/mask> gw <gw> - defines the ip range for the admin network.\n"
" ext network <net/mask> gw <gw> - defines the ip range for the admin network.\n"
" promote <user> - grants a user admin rights.\n"
" demote <user> - demotes a user from admin rights.\n"
" delete <user> - deletes a user.\n"
" debug <level> - sets log level.\n"
" port <port> - sets the listen port for the server.\n"
" host <host> - sets the listen host for the server.\n"
" resolvers add <ip> - adds a resolver.\n"
" show [debug|host|port] - shows the configuration options.\n"
))
(defn import-pacakge [p]
(let [p (read p)
name (p :name)
p (dissoc p :name)]
(print "updating package:" (pr-str p) "\n")
(update-config #(assoc-in % [:packages name] p))))
(defn format-users [[name u]]
(print (.format util " [%s] | [%s] | %s | %s\n"
(if (:admin u) "*" " ")
(if (:keys u) "*" " ")
(:uuid u) name)))
(defn list-users []
(print "Admin | Key | UUID | Login\n")
(print "------+-----+--------------------------------------+------------------------\n")
(doall
(map
format-users
(:users (read dbfile)))))
(defn passwd-user [user passwd]
(.exec cp
"uuid"
(fn [error stdout stderr]
(let [uuid (c.s/replace stdout #"\n" "")]
(update-config
#(if (get-in % [:users user :uuid])
(assoc-in % [:users user :passwd] (hash-str (str user ":" passwd)))
(assoc-in
(assoc-in % [:users user :passwd] (hash-str (str user ":" passwd)))
[:users user :uuid] uuid)))))))
(defn bit [x n]
(bit-and (bit-shift-right x (* n 8)) 0xFF))
(defn to-bytes [x]
[(bit x 3) (bit x 2) (bit x 1) (bit x 0)])
(defn to-ip [x]
(let [[a b c d] (to-bytes x)]
(str a "." b "." c "." d)))
(defn from-bytes [a b c d]
(+ (bit-shift-left a 24)
(bit-shift-left b 16)
(bit-shift-left c 8)
d))
(defn mask [x]
(let [n (- 32 x)]
(bit-shift-left (bit-shift-right 0xFFFFFFFF n) n)))
(defn from-ip [ip]
(apply from-bytes (map #(js/parseInt %) (rest (re-matches #"(\d+)\.(\d+)\.(\d+)\.(\d+)" ip)))))
(defn range [net]
(let [[a b c d n]
(map #(js/parseInt %) (rest (re-matches #"(\d+)\.(\d+)\.(\d+)\.(\d+)/(\d+)" net)))
x (from-bytes a b c d)
m (mask n)
base (bit-and x m)
last (+ base (bit-not m))]
{:network base
:mask m
:first (inc base)
:last (dec last)
:free []
:broadcast last}))
(defn network [net gw]
(assoc (range net)
:gw (from-ip gw)))
(defn start [& args]
(if (empty? args)
(help)
(match [(vec args)]
[["import" "package" & pkgs]]
(do
(print "packages: " (pr-str pkgs) "\n")
(doseq [pkg pkgs] (import-pacakge pkg)))
[["admin" "network" net "gw" gw]]
(update-config #(assoc-in % [:network :admin] (network net gw)))
[["ext" "network" net "gw" gw]]
(update-config #(assoc-in % [:network :ext] (network net gw)))
[["default-dataset" dataset]]
(update-config #(assoc-in % [:default-dataset] dataset))
[["passwd" user passwd]]
(passwd-user user passwd)
[["promote" user]]
(update-config #(if (get-in % [:users user])
(assoc-in % [:users user :admin] true)
(do
(print "Unknown user" (str user ".\n"))
%)))
[["demote" user]]
(update-config #(if (get-in % [:users user])
(assoc-in % [:users user :admin] false)
(do
(print "Unknown user" (str user ".\n"))
%)))
[["delete" user]]
(update-config #(update-in % [:users] (fn [m] (dissoc m user))))
[["list" "users"]]
(list-users)
[["debug" lvl]]
(update-config #(assoc-in % [:debug] (js/parseInt lvl)))
[["port" port]]
(update-config #(assoc-in % [:server :port] (js/parseInt port)))
[["host" host]]
(update-config #(assoc-in % [:server :host] host))
[["resolvers" "add" ip]]
(update-config #(update-in % [:resolvers] conj ip))
[["show" "debug"]]
(print "debug:" (get-option [:debug] 0) "\n")
[["show" "port"]]
(print "port:" (get-option [:server :port] 80) "\n")
[["show" "host"]]
(print "host:" (get-option [:server :host] "0.0.0.0") "\n")
[["help"]]
(help)
:else
(do
(print "Unknown command: "
(pr m))
(print "\n")
(help)))))
(set! *main-cli-fn* start)
| |
c5fcba6a002f67088f0599390fb3566fa474ab527b116b7850ed75c92992457a | JeffreyBenjaminBrown/hode | Graph.hs | module Graph where
import Data.Map (Map)
import qualified Data.Map as M
import Data.Set (Set)
import qualified Data.Set as S
type Graph = Map Int (Set Int)
children :: Graph -> Int -> Set Int
children g i = maybe S.empty id $ M.lookup i g
parents :: Graph -> Int -> Set Int
parents g i = M.keysSet $ M.filter (elem i) g
| null | https://raw.githubusercontent.com/JeffreyBenjaminBrown/hode/79a54a6796fa01570cde6903b398675c42954e62/earlier-work/unify-subst/src/Graph.hs | haskell | module Graph where
import Data.Map (Map)
import qualified Data.Map as M
import Data.Set (Set)
import qualified Data.Set as S
type Graph = Map Int (Set Int)
children :: Graph -> Int -> Set Int
children g i = maybe S.empty id $ M.lookup i g
parents :: Graph -> Int -> Set Int
parents g i = M.keysSet $ M.filter (elem i) g
| |
59091b08254f11edc9bbf2b1afb58523d6546df8401299420a591cfeeffc7673 | fetburner/Coq2SML | cemitcodes.mli | open Names
open Cbytecodes
type reloc_info =
| Reloc_annot of annot_switch
| Reloc_const of structured_constant
| Reloc_getglobal of constant
type patch = reloc_info * int
(* A virer *)
val subst_patch : Mod_subst.substitution -> patch -> patch
type emitcodes
val copy : emitcodes -> emitcodes
val length : emitcodes -> int
pos
type to_patch = emitcodes * (patch list) * fv
val subst_to_patch : Mod_subst.substitution -> to_patch -> to_patch
type body_code =
| BCdefined of to_patch
| BCallias of constant
| BCconstant
type to_patch_substituted
val from_val : body_code -> to_patch_substituted
val force : to_patch_substituted -> body_code
val subst_to_patch_subst : Mod_subst.substitution -> to_patch_substituted -> to_patch_substituted
val repr_body_code :
to_patch_substituted -> Mod_subst.substitution list option * body_code
val to_memory : bytecodes * bytecodes * fv -> to_patch
(** init code, fun code, fv *)
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/kernel/cemitcodes.mli | ocaml | A virer
* init code, fun code, fv | open Names
open Cbytecodes
type reloc_info =
| Reloc_annot of annot_switch
| Reloc_const of structured_constant
| Reloc_getglobal of constant
type patch = reloc_info * int
val subst_patch : Mod_subst.substitution -> patch -> patch
type emitcodes
val copy : emitcodes -> emitcodes
val length : emitcodes -> int
pos
type to_patch = emitcodes * (patch list) * fv
val subst_to_patch : Mod_subst.substitution -> to_patch -> to_patch
type body_code =
| BCdefined of to_patch
| BCallias of constant
| BCconstant
type to_patch_substituted
val from_val : body_code -> to_patch_substituted
val force : to_patch_substituted -> body_code
val subst_to_patch_subst : Mod_subst.substitution -> to_patch_substituted -> to_patch_substituted
val repr_body_code :
to_patch_substituted -> Mod_subst.substitution list option * body_code
val to_memory : bytecodes * bytecodes * fv -> to_patch
|
fa4621af2db86d88226e96ea32a742ad649e7cd8a3f1d24d8c601b2380e3cb53 | mfoemmel/erlang-otp | snmpm_user_old.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(snmpm_user_old).
-export([behaviour_info/1]).
behaviour_info(callbacks) ->
[{handle_error, 3},
{handle_agent, 4},
{handle_pdu, 5},
{handle_trap, 4},
{handle_inform, 4},
{handle_report, 4}];
behaviour_info(_) ->
undefined.
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/snmp/src/manager/snmpm_user_old.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
| Copyright Ericsson AB 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(snmpm_user_old).
-export([behaviour_info/1]).
behaviour_info(callbacks) ->
[{handle_error, 3},
{handle_agent, 4},
{handle_pdu, 5},
{handle_trap, 4},
{handle_inform, 4},
{handle_report, 4}];
behaviour_info(_) ->
undefined.
|
e5cabbf40b7d8a993e661ae4a6b2c15bf1f01cd7d29c10fe4ca4880307721671 | haskell-webgear/webgear | OpenApi.hs | | Main module for WebGear OpenAPI support .
Import this module to get all required types and functions for
generating OpenAPI documentation . Alternatively , import individual
modules under @WebGear . OpenApi@.
Typical usage to generate OpenAPI :
@
import WebGear . OpenApi
import Data . OpenApi ( OpenApi )
myHandler : : Handler h m = > h ' [ ]
myHandler = ....
documentation : : OpenApi
documentation = toOpenApi myHandler
@
Import this module to get all required types and functions for
generating OpenAPI documentation. Alternatively, import individual
modules under @WebGear.OpenApi@.
Typical usage to generate OpenAPI:
@
import WebGear.OpenApi
import Data.OpenApi (OpenApi)
myHandler :: Handler h m => RequestHandler h '[]
myHandler = ....
documentation :: OpenApi
documentation = toOpenApi myHandler
@
-}
module WebGear.OpenApi (
module WebGear.Core,
module WebGear.OpenApi.Handler,
) where
import WebGear.Core
import WebGear.OpenApi.Handler
import WebGear.OpenApi.Traits ()
| null | https://raw.githubusercontent.com/haskell-webgear/webgear/60e5547f9450aac36727e8d9980e0a8cbdb69660/webgear-openapi/src/WebGear/OpenApi.hs | haskell | | Main module for WebGear OpenAPI support .
Import this module to get all required types and functions for
generating OpenAPI documentation . Alternatively , import individual
modules under @WebGear . OpenApi@.
Typical usage to generate OpenAPI :
@
import WebGear . OpenApi
import Data . OpenApi ( OpenApi )
myHandler : : Handler h m = > h ' [ ]
myHandler = ....
documentation : : OpenApi
documentation = toOpenApi myHandler
@
Import this module to get all required types and functions for
generating OpenAPI documentation. Alternatively, import individual
modules under @WebGear.OpenApi@.
Typical usage to generate OpenAPI:
@
import WebGear.OpenApi
import Data.OpenApi (OpenApi)
myHandler :: Handler h m => RequestHandler h '[]
myHandler = ....
documentation :: OpenApi
documentation = toOpenApi myHandler
@
-}
module WebGear.OpenApi (
module WebGear.Core,
module WebGear.OpenApi.Handler,
) where
import WebGear.Core
import WebGear.OpenApi.Handler
import WebGear.OpenApi.Traits ()
| |
1bfe52d87eed17eb4f14bdff227040e117a5081ca776f4ffbf1bc467a88e5528 | pixlsus/registry.gimp.org_static | octave-sharpening.scm | ;octave-sharpening.scm
;
by
;
; found at -127-octave-sharpening/
; more information at
;
; Version 1.0 (20091209)
;
; License:
;
; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
The GNU Public License is available at
;
(define (script-fu-octave-sharpening img drw)
(let*
(
(layer1 0)
(layer2 0)
(layer3 0)
(layer4 0)
(layer_os 0)
)
; start
(gimp-context-push)
(gimp-image-undo-group-start img)
copy layer 4 times
(set! layer1 (car (gimp-layer-copy drw FALSE)))
(set! layer2 (car (gimp-layer-copy drw FALSE)))
(set! layer3 (car (gimp-layer-copy drw FALSE)))
(set! layer4 (car (gimp-layer-copy drw FALSE)))
; set opacity of layer
(gimp-layer-set-opacity layer1 12.5)
(gimp-layer-set-opacity layer2 25.0)
(gimp-layer-set-opacity layer3 50.0)
(gimp-layer-set-opacity layer4 100.0)
; add created layers
(gimp-image-add-layer img layer1 0)
(gimp-image-add-layer img layer2 1)
(gimp-image-add-layer img layer3 2)
(gimp-image-add-layer img layer4 3)
; unsharp masking layers
(plug-in-unsharp-mask TRUE img layer1 4.0 5.0 0)
(plug-in-unsharp-mask TRUE img layer2 2.0 5.0 0)
(plug-in-unsharp-mask TRUE img layer3 1.0 5.0 0)
(plug-in-unsharp-mask TRUE img layer4 0.5 5.0 0)
; copy temp layers
(set! layer_os (car (gimp-layer-new-from-visible img img "Octave Sharpening")))
; delete temp layers
(gimp-image-remove-layer img layer1)
(gimp-image-remove-layer img layer2)
(gimp-image-remove-layer img layer3)
(gimp-image-remove-layer img layer4)
; add octave layer
(gimp-image-add-layer img layer_os 0)
; set layer mode
(gimp-layer-set-mode layer_os SATURATION-MODE)
; done
(gimp-image-undo-group-end img)
(gimp-displays-flush)
(gimp-context-pop)
)
)
(script-fu-register "script-fu-octave-sharpening"
"<Image>/Filters/Enhance/Octave Sharpening"
"Octave Sharpening - a special technique for intense sharpening"
"Andreas Schönfelder <passtschu at freenet dot de>"
"Andreas Schönfelder <passtschu at freenet dot de>"
"2009-12-09"
"*"
SF-IMAGE "image" 0
SF-DRAWABLE "drawable" 0
)
| null | https://raw.githubusercontent.com/pixlsus/registry.gimp.org_static/ffcde7400f402728373ff6579947c6ffe87d1a5e/registry.gimp.org/files/octave-sharpening.scm | scheme | octave-sharpening.scm
found at -127-octave-sharpening/
more information at
Version 1.0 (20091209)
License:
This program is free software; you can redistribute it and/or modify
either version 2 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
start
set opacity of layer
add created layers
unsharp masking layers
copy temp layers
delete temp layers
add octave layer
set layer mode
done
| by
it under the terms of the GNU General Public License as published by
The GNU Public License is available at
(define (script-fu-octave-sharpening img drw)
(let*
(
(layer1 0)
(layer2 0)
(layer3 0)
(layer4 0)
(layer_os 0)
)
(gimp-context-push)
(gimp-image-undo-group-start img)
copy layer 4 times
(set! layer1 (car (gimp-layer-copy drw FALSE)))
(set! layer2 (car (gimp-layer-copy drw FALSE)))
(set! layer3 (car (gimp-layer-copy drw FALSE)))
(set! layer4 (car (gimp-layer-copy drw FALSE)))
(gimp-layer-set-opacity layer1 12.5)
(gimp-layer-set-opacity layer2 25.0)
(gimp-layer-set-opacity layer3 50.0)
(gimp-layer-set-opacity layer4 100.0)
(gimp-image-add-layer img layer1 0)
(gimp-image-add-layer img layer2 1)
(gimp-image-add-layer img layer3 2)
(gimp-image-add-layer img layer4 3)
(plug-in-unsharp-mask TRUE img layer1 4.0 5.0 0)
(plug-in-unsharp-mask TRUE img layer2 2.0 5.0 0)
(plug-in-unsharp-mask TRUE img layer3 1.0 5.0 0)
(plug-in-unsharp-mask TRUE img layer4 0.5 5.0 0)
(set! layer_os (car (gimp-layer-new-from-visible img img "Octave Sharpening")))
(gimp-image-remove-layer img layer1)
(gimp-image-remove-layer img layer2)
(gimp-image-remove-layer img layer3)
(gimp-image-remove-layer img layer4)
(gimp-image-add-layer img layer_os 0)
(gimp-layer-set-mode layer_os SATURATION-MODE)
(gimp-image-undo-group-end img)
(gimp-displays-flush)
(gimp-context-pop)
)
)
(script-fu-register "script-fu-octave-sharpening"
"<Image>/Filters/Enhance/Octave Sharpening"
"Octave Sharpening - a special technique for intense sharpening"
"Andreas Schönfelder <passtschu at freenet dot de>"
"Andreas Schönfelder <passtschu at freenet dot de>"
"2009-12-09"
"*"
SF-IMAGE "image" 0
SF-DRAWABLE "drawable" 0
)
|
9c48b047fe9c6f755cbc270db9caddea5227bcc946a947bb05af96ccdd2c63b7 | PolyB/kvm | Args.hs | # LANGUAGE ViewPatterns #
module Args (parseArgs, OptionMonad) where
import System.Linux.Kvm.Components.Init
import System.Linux.Kvm.Components.Ram
import Data.List
import Options.Applicative
type OptionMonad = ConfigRamT (InitT IO)
data Options = Options
{ file :: String
, initrd :: (Maybe String)
, ram :: (Maybe Int)
, cmdline :: [String]
}
opts::Parser Options
opts = Options
<$> argument str (metavar "FILE")
<*> optional (strOption (long "initrd"))
<*> optional (option auto (long "ram" <> short 'm' <> metavar "BYTECOUNT"))
<*> many (argument str (metavar "cmdline..."))
continueWithOpts:: Options -> OptionMonad () -> IO ()
continueWithOpts (Options kern initrd ram cmdline) m = runInit kern initrd (intercalate " " cmdline) $ runConfigRam ram m
parseArgs :: OptionMonad () -> IO ()
parseArgs continue = execParser options >>= \opt -> continueWithOpts opt continue
where options = info (opts <**> helper)
(fullDesc
<> progDesc "kvm"
<> header "kvm")
| null | https://raw.githubusercontent.com/PolyB/kvm/a0431d6558906ec90376864ac665a27978a6a671/app/Args.hs | haskell | # LANGUAGE ViewPatterns #
module Args (parseArgs, OptionMonad) where
import System.Linux.Kvm.Components.Init
import System.Linux.Kvm.Components.Ram
import Data.List
import Options.Applicative
type OptionMonad = ConfigRamT (InitT IO)
data Options = Options
{ file :: String
, initrd :: (Maybe String)
, ram :: (Maybe Int)
, cmdline :: [String]
}
opts::Parser Options
opts = Options
<$> argument str (metavar "FILE")
<*> optional (strOption (long "initrd"))
<*> optional (option auto (long "ram" <> short 'm' <> metavar "BYTECOUNT"))
<*> many (argument str (metavar "cmdline..."))
continueWithOpts:: Options -> OptionMonad () -> IO ()
continueWithOpts (Options kern initrd ram cmdline) m = runInit kern initrd (intercalate " " cmdline) $ runConfigRam ram m
parseArgs :: OptionMonad () -> IO ()
parseArgs continue = execParser options >>= \opt -> continueWithOpts opt continue
where options = info (opts <**> helper)
(fullDesc
<> progDesc "kvm"
<> header "kvm")
| |
84d95747222932ba34761438930656d4bd15a0d9dd9b045e1b4657d399a4e8fd | Stratus3D/programming_erlang_exercises | web_profiler.erl | -module(web_profiler).
-export([ping/2, receive_response/2]).
ping(URL, Timeout) ->
{_Protocol, Host, Port, Path} = parse_url(URL),
{ok, Socket} = gen_tcp:connect(Host, Port, [binary, {packet, 0}, {active, false}]),
% Send the request
ok = gen_tcp:send(Socket, io_lib:format("HEAD ~s HTTP/1.0\r\n\r\n", [Path])),
Time the response
{Time, Result} = timer:tc(fun receive_response/2, [Socket, Timeout]),
% Format the return value of the function
case Result of
timeout ->
timeout;
_ ->
{time, Time}
end.
receive_response(Socket, Timeout) ->
% And receive the response
case gen_tcp:recv(Socket, 0, Timeout) of
{ok, Packet} -> Packet;
{error, timeout} -> timeout
end.
parse_url(Url) ->
{ok, Parsed} = http_uri:parse(Url),
% We ignore the query string for simplicity here
{Protocol, _, Host, Port, Path, _Query} = Parsed,
{Protocol, Host, Port, Path}.
| null | https://raw.githubusercontent.com/Stratus3D/programming_erlang_exercises/e4fd01024812059d338facc20f551e7dff4dac7e/chapter_26/exercise_1/web_profiler.erl | erlang | Send the request
Format the return value of the function
And receive the response
We ignore the query string for simplicity here | -module(web_profiler).
-export([ping/2, receive_response/2]).
ping(URL, Timeout) ->
{_Protocol, Host, Port, Path} = parse_url(URL),
{ok, Socket} = gen_tcp:connect(Host, Port, [binary, {packet, 0}, {active, false}]),
ok = gen_tcp:send(Socket, io_lib:format("HEAD ~s HTTP/1.0\r\n\r\n", [Path])),
Time the response
{Time, Result} = timer:tc(fun receive_response/2, [Socket, Timeout]),
case Result of
timeout ->
timeout;
_ ->
{time, Time}
end.
receive_response(Socket, Timeout) ->
case gen_tcp:recv(Socket, 0, Timeout) of
{ok, Packet} -> Packet;
{error, timeout} -> timeout
end.
parse_url(Url) ->
{ok, Parsed} = http_uri:parse(Url),
{Protocol, _, Host, Port, Path, _Query} = Parsed,
{Protocol, Host, Port, Path}.
|
b3889dc35cd85ee4ebad312427b6edd7ae35a511ee2dbb02b9ba67329f77b886 | nasa/Common-Metadata-Repository | tag_association_test.clj | (ns cmr.system-int-test.search.tagging.tag-association-test
"This tests associating tags with collections."
(:require
[clojure.test :refer :all]
[cmr.common.util :refer [are2] :as util]
[cmr.mock-echo.client.echo-util :as echo-util]
[cmr.system-int-test.data2.collection :as collection]
[cmr.system-int-test.data2.core :as data-core]
[cmr.system-int-test.system :as system]
[cmr.system-int-test.utils.index-util :as index]
[cmr.system-int-test.utils.ingest-util :as ingest]
[cmr.system-int-test.utils.metadata-db-util :as mdb]
[cmr.system-int-test.utils.search-util :as search]
[cmr.system-int-test.utils.tag-util :as tags]
[cmr.transmit.tag :as transmit-tag]))
(use-fixtures :each (join-fixtures
[(ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2" "provguid3" "PROV3"}
{:grant-all-search? false})
tags/grant-all-tag-fixture]))
(deftest associate-tags-by-query-with-collections-test
Grant all collections in PROV1 and 2
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV2"))
Create 4 collections in each provider that are identical .
The first collection will have data :
;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"}
(let [[c1-p1 c2-p1 c3-p1 c4-p1
c1-p2 c2-p2 c3-p2 c4-p2
c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"]
n (range 1 5)]
(:concept-id (data-core/ingest
p
(collection/collection
{:short-name (str "S" n)
:version-id (str "V" n)
:entry-title (str "ET" n)}))))
all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1]
all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2]
tag (tags/make-tag)
tag-key (:tag-key tag)
token (echo-util/login (system/context) "user1")
{:keys [concept-id]} (tags/create-tag token tag)]
(index/wait-until-indexed)
(testing "Successfully Associate tag with collections"
(let [response (tags/associate-by-query token tag-key {:provider "PROV1"})]
(tags/assert-tag-association-response-ok?
{["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR"
:revision-id 1}
["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR"
:revision-id 1}
["C1200000015-PROV1"] {:concept-id "TA1200000028-CMR"
:revision-id 1}
["C1200000016-PROV1"] {:concept-id "TA1200000029-CMR"
:revision-id 1}}
response)))
(testing "Associate using query that finds nothing"
(let [response (tags/associate-by-query token tag-key {:provider "foo"})]
(tags/assert-tag-association-response-ok? {} response)))
(testing "ACLs are applied to collections found"
None of PROV3 's collections are visible
(let [response (tags/associate-by-query token tag-key {:provider "PROV3"})]
(tags/assert-tag-association-response-ok? {} response)))
(testing "Associate more collections"
Associates all the version 2 collections which is c2 - p1 ( already in ) and c2 - p2 ( new )
(let [response (tags/associate-by-query token tag-key {:version "v2"})]
(tags/assert-tag-association-response-ok?
{["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR"
:revision-id 2}
["C1200000018-PROV2"] {:concept-id "TA1200000030-CMR"
:revision-id 1}}
response)))))
(deftest associate-tags-by-concept-ids-with-collections-test
Grant all collections in PROV1 and 2
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV2"))
Create 4 collections in each provider that are identical .
The first collection will have data :
;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"}
(let [[c1-p1 c2-p1 c3-p1 c4-p1
c1-p2 c2-p2 c3-p2 c4-p2
c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"]
n (range 1 5)]
(:concept-id (data-core/ingest
p
(collection/collection
{:short-name (str "S" n)
:version-id (str "V" n)
:entry-title (str "ET" n)}))))
all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1]
all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2]
tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
{:keys [concept-id]} (tags/create-tag token tag)]
(index/wait-until-indexed)
(testing "Associate tag with collections by concept-ids"
(let [response (tags/associate-by-concept-ids
token tag-key [{:concept-id c1-p1}
{:concept-id c3-p2}])]
(tags/assert-tag-association-response-ok?
{["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR"
:revision-id 1}
["C1200000019-PROV2"] {:concept-id "TA1200000027-CMR"
:revision-id 1}}
response)))
(testing "Associate to no collections"
(let [response (tags/associate-by-concept-ids token tag-key [])]
(tags/assert-invalid-data-error
["At least one collection must be provided for tag association."]
response)))
(testing "Associate to collection revision and whole collection at the same time"
(let [response (tags/associate-by-concept-ids
token tag-key [{:concept-id c1-p1}
{:concept-id c1-p1 :revision-id 1}])]
(tags/assert-invalid-data-error
[(format (str "Unable to create tag association on a collection revision and the whole "
"collection at the same time for the following collections: %s.")
c1-p1)]
response)))
(testing "Associate to non-existent collections"
(let [response (tags/associate-by-concept-ids
token tag-key [{:concept-id "C100-P5"}])]
(tags/assert-tag-association-response-error?
{["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}}
response)))
(testing "Associate to deleted collections"
(let [c1-p1-concept (mdb/get-concept c1-p1)
_ (ingest/delete-concept c1-p1-concept)
_ (index/wait-until-indexed)
response (tags/associate-by-concept-ids
token tag-key [{:concept-id c1-p1}])]
(tags/assert-tag-association-response-error?
{[c1-p1] {:errors [(format "Collection [%s] does not exist or is not visible." c1-p1)]}}
response)))
(testing "ACLs are applied to collections found"
None of PROV3 's collections are visible
(let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c4-p3}])]
(tags/assert-tag-association-response-error?
{[c4-p3] {:errors [(format "Collection [%s] does not exist or is not visible." c4-p3)]}}
response)))
(testing "Tag association mixed response"
(let [response (tags/associate-by-concept-ids
token tag-key [{:concept-id c2-p1}
{:concept-id "C100-P5"}])]
(tags/assert-tag-association-response-error?
{["C1200000014-PROV1"] {:concept-id "TA1200000028-CMR"
:revision-id 1}
["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}}
response)))))
(deftest associate-tag-failure-test
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
{:keys [concept-id revision-id]} (tags/create-tag token tag)
The stored updated tag would have user1 in the originator i d
tag (assoc tag :originator-id "user1")
coll-concept-id (:concept-id (data-core/ingest
"PROV1"
(collection/collection)))]
(testing "Associate tag using query sent with invalid content type"
(are [associate-tag-fn request-json]
(= {:status 400
:errors
["The mime types specified in the content-type header [application/xml] are not supported."]}
(associate-tag-fn token tag-key request-json {:http-options {:content-type :xml}}))
tags/associate-by-query {:provider "foo"}
tags/associate-by-concept-ids [{:concept-id coll-concept-id}]))
(testing "Associate applies JSON Query validations"
(are [associate-tag-fn request-json message]
(= {:status 400
:errors [message]}
(associate-tag-fn token tag-key {:foo "bar"}))
tags/associate-by-query {:foo "bar"}
"#/condition: extraneous key [foo] is not permitted"
tags/associate-by-concept-ids {:concept-id coll-concept-id}
"#: expected type: JSONArray, found: JSONObject"))
(testing "Associate tag that doesn't exist"
(are [associate-tag-fn request-json]
(= {:status 404
:errors ["Tag could not be found with tag-key [tag100]"]}
(associate-tag-fn token "tag100" request-json))
tags/associate-by-query {:provider "foo"}
tags/associate-by-concept-ids [{:concept-id coll-concept-id}]))
(testing "Associate deleted tag"
(tags/delete-tag token tag-key)
(are [associate-tag-fn request-json]
(= {:status 404
:errors [(format "Tag with tag-key [%s] was deleted." tag-key)]}
(associate-tag-fn token tag-key request-json))
tags/associate-by-query {:provider "foo"}
tags/associate-by-concept-ids [{:concept-id coll-concept-id}]))))
(deftest dissociate-tags-with-collections-by-query-test
Create 4 collections in each provider that are identical .
The first collection will have data :
;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"}
(let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1")
Grant all collections in PROV1 and 2
_ (echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
_ (echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV2"))
_ (echo-util/grant-group (system/context)
group1-concept-id
(echo-util/coll-catalog-item-id "PROV3"))
[c1-p1 c2-p1 c3-p1 c4-p1
c1-p2 c2-p2 c3-p2 c4-p2
c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"]
n (range 1 5)]
(data-core/ingest
p
(collection/collection
{:short-name (str "S" n)
:version-id (str "V" n)
:entry-title (str "ET" n)})))
all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1]
all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2]
all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3]
all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls)
tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
prov3-token (echo-util/login (system/context)
"prov3-user"
[group1-concept-id])
{:keys [concept-id]} (tags/create-tag token tag)
assert-tag-associated (partial tags/assert-tag-associated-with-query
prov3-token {:tag-key "tag1"})]
(index/wait-until-indexed)
;; Associate the tag with every collection
(tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"}
{:provider "PROV2"}
{:provider "PROV3"}]})
(testing "Dissociate using query that finds nothing"
(let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "foo"})]
(is (= 200 status))
(assert-tag-associated all-colls)))
(testing "ACLs are applied to collections found"
None of PROV3 's collections are visible to normal users
(let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV3"})]
(is (= 200 status))
(assert-tag-associated all-colls)))
(testing "Successfully dissociate tag with collections"
(let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})]
(is (= 200 status))
(assert-tag-associated (concat all-prov2-colls all-prov3-colls)))
;; dissociate tag again is OK. Since there is no existing tag association, it does nothing.
(let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})]
(is (= 200 status))
(assert-tag-associated (concat all-prov2-colls all-prov3-colls))))))
(deftest dissociate-tags-with-collections-by-concept-ids-test
Create 4 collections in each provider that are identical .
The first collection will have data :
;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"}
(let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1")
Grant all collections in PROV1 and 2
_ (echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
_ (echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV2"))
_ (echo-util/grant-group (system/context)
group1-concept-id
(echo-util/coll-catalog-item-id "PROV3"))
[c1-p1 c2-p1 c3-p1 c4-p1
c1-p2 c2-p2 c3-p2 c4-p2
c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"]
n (range 1 5)]
(data-core/ingest
p
(collection/collection
{:short-name (str "S" n)
:version-id (str "V" n)
:entry-title (str "ET" n)})))
all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1]
all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2]
all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3]
all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls)
tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
prov3-token (echo-util/login (system/context)
"prov3-user"
[group1-concept-id])
{:keys [concept-id]} (tags/create-tag token tag)
assert-tag-associated (partial tags/assert-tag-associated-with-query
prov3-token {:tag-key "tag1"})]
(index/wait-until-indexed)
;; Associate the tag with every collection
(tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"}
{:provider "PROV2"}
{:provider "PROV3"}]})
(testing "Successfully dissociate tag with collections"
(let [{:keys [status]} (tags/dissociate-by-concept-ids
token
tag-key
(map #(hash-map :concept-id (:concept-id %)) all-prov1-colls))]
(is (= 200 status))
(assert-tag-associated (concat all-prov2-colls all-prov3-colls))))
(testing "Dissociate non-existent collections"
(let [response (tags/dissociate-by-concept-ids
token tag-key [{:concept-id "C100-P5"}])]
(tags/assert-tag-dissociation-response-error?
{["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}}
response)))
(testing "Dissociate to deleted collections"
(let [c1-p2-concept-id (:concept-id c1-p2)
c1-p2-concept (mdb/get-concept c1-p2-concept-id)
_ (ingest/delete-concept c1-p2-concept)
_ (index/wait-until-indexed)
response (tags/dissociate-by-concept-ids
token tag-key [{:concept-id c1-p2-concept-id}])]
(tags/assert-tag-dissociation-response-error?
{["C1200000019-PROV2"] {:errors [(format "Collection [%s] does not exist or is not visible."
c1-p2-concept-id)]}}
response)))
(testing "ACLs are applied to collections found"
None of PROV3 's collections are visible
(let [coll-concept-id (:concept-id c4-p3)
response (tags/dissociate-by-concept-ids
token tag-key [{:concept-id coll-concept-id}])]
(tags/assert-tag-dissociation-response-error?
{["C1200000026-PROV3"] {:errors [(format "Collection [%s] does not exist or is not visible."
coll-concept-id)]}}
response)))))
(deftest dissociate-tag-failure-test
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
{:keys [concept-id revision-id]} (tags/create-tag token tag)
The stored updated tag would have user1 in the originator i d
tag (assoc tag :originator-id "user1")
coll-concept-id (:concept-id (data-core/ingest
"PROV1"
(collection/collection)))]
(testing "Dissociate tag using query sent with invalid content type"
(are [dissociate-tag-fn request-json]
(= {:status 400
:errors
["The mime types specified in the content-type header [application/xml] are not supported."]}
(dissociate-tag-fn token tag-key request-json {:http-options {:content-type :xml}}))
tags/dissociate-by-query {:provider "foo"}
tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}]))
(testing "Dissociate applies JSON Query validations"
(are [dissociate-tag-fn request-json message]
(= {:status 400
:errors [message]}
(dissociate-tag-fn token tag-key request-json))
tags/dissociate-by-query {:foo "bar"}
"#/condition: extraneous key [foo] is not permitted"
tags/dissociate-by-concept-ids {:concept-id coll-concept-id}
"#: expected type: JSONArray, found: JSONObject"))
(testing "Dissociate tag that doesn't exist"
(are [dissociate-tag-fn request-json]
(= {:status 404
:errors ["Tag could not be found with tag-key [tag100]"]}
(dissociate-tag-fn token "tag100" request-json))
tags/dissociate-by-query {:provider "foo"}
tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}]))
(testing "Dissociate deleted tag"
(tags/delete-tag token tag-key)
(are [dissociate-tag-fn request-json]
(= {:status 404
:errors [(format "Tag with tag-key [%s] was deleted." tag-key)]}
(dissociate-tag-fn token tag-key request-json))
tags/dissociate-by-query {:provider "foo"}
tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}]))))
(deftest dissociate-tags-with-partial-match-query-test
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(testing "dissociate tag with only some of the collections matching the query are associated with the tag is OK"
(let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"}))
coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"}))
token (echo-util/login (system/context) "user1")
_ (index/wait-until-indexed)
tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll1])
assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})]
(assert-tag-associated [coll1])
(let [{:keys [status errors]} (tags/dissociate-by-query token "tag1" {:provider "PROV1"})]
(is (= 200 status))
(assert-tag-associated [])))))
(deftest dissociate-tags-with-mixed-response-test
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(testing "dissociate tag with mixed success and failure response"
(let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"}))
coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"}))
coll3 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET3"}))
token (echo-util/login (system/context) "user1")
tag-key "tag1"
assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})]
(tags/create-tag token (tags/make-tag {:tag-key tag-key}))
(index/wait-until-indexed)
(tags/associate-by-concept-ids token tag-key [{:concept-id (:concept-id coll1)}
{:concept-id (:concept-id coll2)
:revision-id (:revision-id coll2)}])
(assert-tag-associated [coll1 coll2])
(let [response (tags/dissociate-by-concept-ids
token tag-key
[{:concept-id "C100-P5"} ;; non-existent collection
{:concept-id (:concept-id coll1)} ;; success
{:concept-id (:concept-id coll2) :revision-id 1} ;; success
{:concept-id (:concept-id coll3)}])] ;; no tag association
(tags/assert-tag-dissociation-response-error?
{["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}
["C1200000012-PROV1"] {:concept-id "TA1200000016-CMR" :revision-id 2}
["C1200000013-PROV1" 1] {:concept-id "TA1200000017-CMR" :revision-id 2}
["C1200000014-PROV1"] {:warnings ["Tag [tag1] is not associated with collection [C1200000014-PROV1]."]}}
response)
(assert-tag-associated [])))))
;; This tests association retention when collections and tags are updated or deleted.
(deftest association-retention-test
(echo-util/grant-all (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [coll (data-core/ingest "PROV1" (collection/collection))
token (echo-util/login (system/context) "user1")
_ (index/wait-until-indexed)
tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll])
assert-tag-associated (partial tags/assert-tag-associated-with-query nil {:tag-key "tag1"})
assert-tag-not-associated (fn []
(let [refs (search/find-refs :collection {:tag-key "tag1"})]
(is (nil? (:errors refs)))
(is (data-core/refs-match? [] refs))))]
(index/wait-until-indexed)
(testing "Tag initially associated with collection"
(assert-tag-associated [coll]))
(testing "Tag still associated with collection after updating collection"
(let [updated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))]
(is (= 200 (:status updated-coll)))
(index/wait-until-indexed)
(assert-tag-associated [updated-coll])))
(testing "Tag still associated with collection after deleting and recreating the collection"
(is (= 200 (:status (ingest/delete-concept (data-core/item->concept coll)))))
(let [recreated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))]
(is (= 200 (:status recreated-coll)))
(index/wait-until-indexed)
(assert-tag-associated [recreated-coll])))
(let [latest-coll (assoc coll :revision-id 4)]
(testing "Tag still associated with collection after updating tag"
(let [updated-tag (tags/save-tag token tag)]
(is (= {:status 200 :revision-id 2} (select-keys updated-tag [:status :revision-id])))
(index/wait-until-indexed)
(assert-tag-associated [latest-coll])))
(testing "Tag not associated with collection after deleting and recreating the tag"
(is (= {:status 200 :concept-id (:concept-id tag) :revision-id 3}
(tags/delete-tag token (:tag-key tag))))
(index/wait-until-indexed)
(testing "Not associated after tag deleted"
(assert-tag-not-associated))
(is (= {:status 200 :concept-id (:concept-id tag) :revision-id 4}
(tags/create-tag token (tags/make-tag {:tag-key "tag1"}))))
(index/wait-until-indexed)
(testing "Not associated after being recreated."
(assert-tag-not-associated))))))
(defn- assert-tag-association
"Assert the collections are associated with the tag for the given tag-key"
[token colls tag-key]
(is (data-core/refs-match? colls
(search/find-refs :collection {:token token
:tag-key tag-key}))))
(deftest associate-dissociate-tag-with-collections-test
;; Grant all collections in PROV1
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [[coll1 coll2 coll3] (for [n (range 1 4)]
(data-core/ingest "PROV1" (collection/collection)))
[coll1-id coll2-id coll3-id] (map :concept-id [coll1 coll2 coll3])
token (echo-util/login (system/context) "user1")]
(tags/create-tag token (tags/make-tag {:tag-key "tag1"}))
(tags/create-tag token (tags/make-tag {:tag-key "tag2"}))
(index/wait-until-indexed)
;; associate tag1 to coll1, tag2 to coll2
;; both :concept-id and :concept_id works as keys
(tags/associate-by-concept-ids token "tag1" [{:concept_id coll1-id}])
(tags/associate-by-concept-ids token "tag2" [{:concept-id coll2-id}])
(index/wait-until-indexed)
;; verify association
(assert-tag-association token [coll1] "tag1")
(assert-tag-association token [coll2] "tag2")
;; associate tag1 to coll1 again
(tags/associate-by-concept-ids token "tag1" [{:concept-id coll1-id}])
(index/wait-until-indexed)
;; verify association
(assert-tag-association token [coll1] "tag1")
(assert-tag-association token [coll2] "tag2")
;; associate tag1 to coll2
(tags/associate-by-concept-ids token "tag1" [{:concept-id coll2-id}])
(index/wait-until-indexed)
;; verify association
(assert-tag-association token [coll1 coll2] "tag1")
(assert-tag-association token [coll2] "tag2")
;; associate tag2 to coll1, coll2 and coll3
(tags/associate-by-concept-ids token "tag2" [{:concept-id coll1-id}
{:concept-id coll2-id}
{:concept-id coll3-id}])
(index/wait-until-indexed)
;; verify association
(assert-tag-association token [coll1 coll2] "tag1")
(assert-tag-association token [coll1 coll2 coll3] "tag2")
;; dissociate tag1 from coll1
(tags/dissociate-by-concept-ids token "tag1" [{:concept-id coll1-id}])
(index/wait-until-indexed)
;; verify association
(assert-tag-association token [coll2] "tag1")
(assert-tag-association token [coll1 coll2 coll3] "tag2")
;; dissociate tag2 from coll1 and coll2
(tags/dissociate-by-concept-ids token "tag2" [{:concept-id coll1-id}
{:concept-id coll2-id}])
(index/wait-until-indexed)
;; verify association
(assert-tag-association token [coll2] "tag1")
(assert-tag-association token [coll3] "tag2")))
(deftest associate-tags-with-data-test
(echo-util/grant-all (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [coll (data-core/ingest "PROV1" (collection/collection))
coll-concept-id (:concept-id coll)
token (echo-util/login (system/context) "user1")
tag-key "tag1"]
(tags/create-tag token (tags/make-tag {:tag-key tag-key}))
(index/wait-until-indexed)
(testing "Associate tag with collections by concept-id and data"
(are [data]
(let [{:keys [status]} (tags/associate-by-concept-ids
token tag-key [{:concept-id coll-concept-id
:data data}])]
(is (= 200 status)))
"string data"
true
100
123.45
[true "some string" 100]
{"status" "reviewed" "action" "fix typos"}))
(testing "Associate tag with collections with invalid data"
(let [{:keys [status body]} (transmit-tag/associate-tag :concept-ids
(system/context)
tag-key
nil
{:raw? true
:http-options {:body "{{{{"}})
error (-> body :errors first)]
(is (= 400 status))
(is (re-find #"Invalid JSON: A JSON Object can not directly nest another JSON Object"
error))))
(testing "Associate tag with collections with data exceed 32KB"
(let [too-much-data {"a" (tags/string-of-length 32768)}
expected-msg (format
"Tag association data exceed the maximum length of 32KB for collection with concept id [%s] revision id [%s]."
coll-concept-id nil)
response (tags/associate-by-concept-ids
token tag-key [{:concept-id coll-concept-id
:data too-much-data}])]
(tags/assert-tag-association-response-error?
{[coll-concept-id] {:errors [expected-msg]}}
response)))))
(deftest retrieve-concept-by-tag-association-concept-id-test
(let [{:keys [status errors]} (search/get-search-failure-xml-data
(search/retrieve-concept
"TA10000-CMR" nil {:throw-exceptions true}))]
(testing "Retrieve concept by tag association concept-id is invalid"
(is (= [400 ["Retrieving concept by concept id is not supported for concept type [tag-association]."]]
[status errors])))))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/78e9ab2403d35fc1b8480c00463695b79f0ff7e0/system-int-test/test/cmr/system_int_test/search/tagging/tag_association_test.clj | clojure | {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"}
{:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"}
{:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"}
Associate the tag with every collection
dissociate tag again is OK. Since there is no existing tag association, it does nothing.
{:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"}
Associate the tag with every collection
non-existent collection
success
success
no tag association
This tests association retention when collections and tags are updated or deleted.
Grant all collections in PROV1
associate tag1 to coll1, tag2 to coll2
both :concept-id and :concept_id works as keys
verify association
associate tag1 to coll1 again
verify association
associate tag1 to coll2
verify association
associate tag2 to coll1, coll2 and coll3
verify association
dissociate tag1 from coll1
verify association
dissociate tag2 from coll1 and coll2
verify association | (ns cmr.system-int-test.search.tagging.tag-association-test
"This tests associating tags with collections."
(:require
[clojure.test :refer :all]
[cmr.common.util :refer [are2] :as util]
[cmr.mock-echo.client.echo-util :as echo-util]
[cmr.system-int-test.data2.collection :as collection]
[cmr.system-int-test.data2.core :as data-core]
[cmr.system-int-test.system :as system]
[cmr.system-int-test.utils.index-util :as index]
[cmr.system-int-test.utils.ingest-util :as ingest]
[cmr.system-int-test.utils.metadata-db-util :as mdb]
[cmr.system-int-test.utils.search-util :as search]
[cmr.system-int-test.utils.tag-util :as tags]
[cmr.transmit.tag :as transmit-tag]))
(use-fixtures :each (join-fixtures
[(ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2" "provguid3" "PROV3"}
{:grant-all-search? false})
tags/grant-all-tag-fixture]))
(deftest associate-tags-by-query-with-collections-test
Grant all collections in PROV1 and 2
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV2"))
Create 4 collections in each provider that are identical .
The first collection will have data :
(let [[c1-p1 c2-p1 c3-p1 c4-p1
c1-p2 c2-p2 c3-p2 c4-p2
c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"]
n (range 1 5)]
(:concept-id (data-core/ingest
p
(collection/collection
{:short-name (str "S" n)
:version-id (str "V" n)
:entry-title (str "ET" n)}))))
all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1]
all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2]
tag (tags/make-tag)
tag-key (:tag-key tag)
token (echo-util/login (system/context) "user1")
{:keys [concept-id]} (tags/create-tag token tag)]
(index/wait-until-indexed)
(testing "Successfully Associate tag with collections"
(let [response (tags/associate-by-query token tag-key {:provider "PROV1"})]
(tags/assert-tag-association-response-ok?
{["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR"
:revision-id 1}
["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR"
:revision-id 1}
["C1200000015-PROV1"] {:concept-id "TA1200000028-CMR"
:revision-id 1}
["C1200000016-PROV1"] {:concept-id "TA1200000029-CMR"
:revision-id 1}}
response)))
(testing "Associate using query that finds nothing"
(let [response (tags/associate-by-query token tag-key {:provider "foo"})]
(tags/assert-tag-association-response-ok? {} response)))
(testing "ACLs are applied to collections found"
None of PROV3 's collections are visible
(let [response (tags/associate-by-query token tag-key {:provider "PROV3"})]
(tags/assert-tag-association-response-ok? {} response)))
(testing "Associate more collections"
Associates all the version 2 collections which is c2 - p1 ( already in ) and c2 - p2 ( new )
(let [response (tags/associate-by-query token tag-key {:version "v2"})]
(tags/assert-tag-association-response-ok?
{["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR"
:revision-id 2}
["C1200000018-PROV2"] {:concept-id "TA1200000030-CMR"
:revision-id 1}}
response)))))
(deftest associate-tags-by-concept-ids-with-collections-test
Grant all collections in PROV1 and 2
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV2"))
Create 4 collections in each provider that are identical .
The first collection will have data :
(let [[c1-p1 c2-p1 c3-p1 c4-p1
c1-p2 c2-p2 c3-p2 c4-p2
c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"]
n (range 1 5)]
(:concept-id (data-core/ingest
p
(collection/collection
{:short-name (str "S" n)
:version-id (str "V" n)
:entry-title (str "ET" n)}))))
all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1]
all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2]
tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
{:keys [concept-id]} (tags/create-tag token tag)]
(index/wait-until-indexed)
(testing "Associate tag with collections by concept-ids"
(let [response (tags/associate-by-concept-ids
token tag-key [{:concept-id c1-p1}
{:concept-id c3-p2}])]
(tags/assert-tag-association-response-ok?
{["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR"
:revision-id 1}
["C1200000019-PROV2"] {:concept-id "TA1200000027-CMR"
:revision-id 1}}
response)))
(testing "Associate to no collections"
(let [response (tags/associate-by-concept-ids token tag-key [])]
(tags/assert-invalid-data-error
["At least one collection must be provided for tag association."]
response)))
(testing "Associate to collection revision and whole collection at the same time"
(let [response (tags/associate-by-concept-ids
token tag-key [{:concept-id c1-p1}
{:concept-id c1-p1 :revision-id 1}])]
(tags/assert-invalid-data-error
[(format (str "Unable to create tag association on a collection revision and the whole "
"collection at the same time for the following collections: %s.")
c1-p1)]
response)))
(testing "Associate to non-existent collections"
(let [response (tags/associate-by-concept-ids
token tag-key [{:concept-id "C100-P5"}])]
(tags/assert-tag-association-response-error?
{["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}}
response)))
(testing "Associate to deleted collections"
(let [c1-p1-concept (mdb/get-concept c1-p1)
_ (ingest/delete-concept c1-p1-concept)
_ (index/wait-until-indexed)
response (tags/associate-by-concept-ids
token tag-key [{:concept-id c1-p1}])]
(tags/assert-tag-association-response-error?
{[c1-p1] {:errors [(format "Collection [%s] does not exist or is not visible." c1-p1)]}}
response)))
(testing "ACLs are applied to collections found"
None of PROV3 's collections are visible
(let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c4-p3}])]
(tags/assert-tag-association-response-error?
{[c4-p3] {:errors [(format "Collection [%s] does not exist or is not visible." c4-p3)]}}
response)))
(testing "Tag association mixed response"
(let [response (tags/associate-by-concept-ids
token tag-key [{:concept-id c2-p1}
{:concept-id "C100-P5"}])]
(tags/assert-tag-association-response-error?
{["C1200000014-PROV1"] {:concept-id "TA1200000028-CMR"
:revision-id 1}
["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}}
response)))))
(deftest associate-tag-failure-test
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
{:keys [concept-id revision-id]} (tags/create-tag token tag)
The stored updated tag would have user1 in the originator i d
tag (assoc tag :originator-id "user1")
coll-concept-id (:concept-id (data-core/ingest
"PROV1"
(collection/collection)))]
(testing "Associate tag using query sent with invalid content type"
(are [associate-tag-fn request-json]
(= {:status 400
:errors
["The mime types specified in the content-type header [application/xml] are not supported."]}
(associate-tag-fn token tag-key request-json {:http-options {:content-type :xml}}))
tags/associate-by-query {:provider "foo"}
tags/associate-by-concept-ids [{:concept-id coll-concept-id}]))
(testing "Associate applies JSON Query validations"
(are [associate-tag-fn request-json message]
(= {:status 400
:errors [message]}
(associate-tag-fn token tag-key {:foo "bar"}))
tags/associate-by-query {:foo "bar"}
"#/condition: extraneous key [foo] is not permitted"
tags/associate-by-concept-ids {:concept-id coll-concept-id}
"#: expected type: JSONArray, found: JSONObject"))
(testing "Associate tag that doesn't exist"
(are [associate-tag-fn request-json]
(= {:status 404
:errors ["Tag could not be found with tag-key [tag100]"]}
(associate-tag-fn token "tag100" request-json))
tags/associate-by-query {:provider "foo"}
tags/associate-by-concept-ids [{:concept-id coll-concept-id}]))
(testing "Associate deleted tag"
(tags/delete-tag token tag-key)
(are [associate-tag-fn request-json]
(= {:status 404
:errors [(format "Tag with tag-key [%s] was deleted." tag-key)]}
(associate-tag-fn token tag-key request-json))
tags/associate-by-query {:provider "foo"}
tags/associate-by-concept-ids [{:concept-id coll-concept-id}]))))
(deftest dissociate-tags-with-collections-by-query-test
Create 4 collections in each provider that are identical .
The first collection will have data :
(let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1")
Grant all collections in PROV1 and 2
_ (echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
_ (echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV2"))
_ (echo-util/grant-group (system/context)
group1-concept-id
(echo-util/coll-catalog-item-id "PROV3"))
[c1-p1 c2-p1 c3-p1 c4-p1
c1-p2 c2-p2 c3-p2 c4-p2
c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"]
n (range 1 5)]
(data-core/ingest
p
(collection/collection
{:short-name (str "S" n)
:version-id (str "V" n)
:entry-title (str "ET" n)})))
all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1]
all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2]
all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3]
all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls)
tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
prov3-token (echo-util/login (system/context)
"prov3-user"
[group1-concept-id])
{:keys [concept-id]} (tags/create-tag token tag)
assert-tag-associated (partial tags/assert-tag-associated-with-query
prov3-token {:tag-key "tag1"})]
(index/wait-until-indexed)
(tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"}
{:provider "PROV2"}
{:provider "PROV3"}]})
(testing "Dissociate using query that finds nothing"
(let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "foo"})]
(is (= 200 status))
(assert-tag-associated all-colls)))
(testing "ACLs are applied to collections found"
None of PROV3 's collections are visible to normal users
(let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV3"})]
(is (= 200 status))
(assert-tag-associated all-colls)))
(testing "Successfully dissociate tag with collections"
(let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})]
(is (= 200 status))
(assert-tag-associated (concat all-prov2-colls all-prov3-colls)))
(let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})]
(is (= 200 status))
(assert-tag-associated (concat all-prov2-colls all-prov3-colls))))))
(deftest dissociate-tags-with-collections-by-concept-ids-test
Create 4 collections in each provider that are identical .
The first collection will have data :
(let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1")
Grant all collections in PROV1 and 2
_ (echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
_ (echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV2"))
_ (echo-util/grant-group (system/context)
group1-concept-id
(echo-util/coll-catalog-item-id "PROV3"))
[c1-p1 c2-p1 c3-p1 c4-p1
c1-p2 c2-p2 c3-p2 c4-p2
c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"]
n (range 1 5)]
(data-core/ingest
p
(collection/collection
{:short-name (str "S" n)
:version-id (str "V" n)
:entry-title (str "ET" n)})))
all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1]
all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2]
all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3]
all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls)
tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
prov3-token (echo-util/login (system/context)
"prov3-user"
[group1-concept-id])
{:keys [concept-id]} (tags/create-tag token tag)
assert-tag-associated (partial tags/assert-tag-associated-with-query
prov3-token {:tag-key "tag1"})]
(index/wait-until-indexed)
(tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"}
{:provider "PROV2"}
{:provider "PROV3"}]})
(testing "Successfully dissociate tag with collections"
(let [{:keys [status]} (tags/dissociate-by-concept-ids
token
tag-key
(map #(hash-map :concept-id (:concept-id %)) all-prov1-colls))]
(is (= 200 status))
(assert-tag-associated (concat all-prov2-colls all-prov3-colls))))
(testing "Dissociate non-existent collections"
(let [response (tags/dissociate-by-concept-ids
token tag-key [{:concept-id "C100-P5"}])]
(tags/assert-tag-dissociation-response-error?
{["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}}
response)))
(testing "Dissociate to deleted collections"
(let [c1-p2-concept-id (:concept-id c1-p2)
c1-p2-concept (mdb/get-concept c1-p2-concept-id)
_ (ingest/delete-concept c1-p2-concept)
_ (index/wait-until-indexed)
response (tags/dissociate-by-concept-ids
token tag-key [{:concept-id c1-p2-concept-id}])]
(tags/assert-tag-dissociation-response-error?
{["C1200000019-PROV2"] {:errors [(format "Collection [%s] does not exist or is not visible."
c1-p2-concept-id)]}}
response)))
(testing "ACLs are applied to collections found"
None of PROV3 's collections are visible
(let [coll-concept-id (:concept-id c4-p3)
response (tags/dissociate-by-concept-ids
token tag-key [{:concept-id coll-concept-id}])]
(tags/assert-tag-dissociation-response-error?
{["C1200000026-PROV3"] {:errors [(format "Collection [%s] does not exist or is not visible."
coll-concept-id)]}}
response)))))
(deftest dissociate-tag-failure-test
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [tag-key "tag1"
tag (tags/make-tag {:tag-key tag-key})
token (echo-util/login (system/context) "user1")
{:keys [concept-id revision-id]} (tags/create-tag token tag)
The stored updated tag would have user1 in the originator i d
tag (assoc tag :originator-id "user1")
coll-concept-id (:concept-id (data-core/ingest
"PROV1"
(collection/collection)))]
(testing "Dissociate tag using query sent with invalid content type"
(are [dissociate-tag-fn request-json]
(= {:status 400
:errors
["The mime types specified in the content-type header [application/xml] are not supported."]}
(dissociate-tag-fn token tag-key request-json {:http-options {:content-type :xml}}))
tags/dissociate-by-query {:provider "foo"}
tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}]))
(testing "Dissociate applies JSON Query validations"
(are [dissociate-tag-fn request-json message]
(= {:status 400
:errors [message]}
(dissociate-tag-fn token tag-key request-json))
tags/dissociate-by-query {:foo "bar"}
"#/condition: extraneous key [foo] is not permitted"
tags/dissociate-by-concept-ids {:concept-id coll-concept-id}
"#: expected type: JSONArray, found: JSONObject"))
(testing "Dissociate tag that doesn't exist"
(are [dissociate-tag-fn request-json]
(= {:status 404
:errors ["Tag could not be found with tag-key [tag100]"]}
(dissociate-tag-fn token "tag100" request-json))
tags/dissociate-by-query {:provider "foo"}
tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}]))
(testing "Dissociate deleted tag"
(tags/delete-tag token tag-key)
(are [dissociate-tag-fn request-json]
(= {:status 404
:errors [(format "Tag with tag-key [%s] was deleted." tag-key)]}
(dissociate-tag-fn token tag-key request-json))
tags/dissociate-by-query {:provider "foo"}
tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}]))))
(deftest dissociate-tags-with-partial-match-query-test
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(testing "dissociate tag with only some of the collections matching the query are associated with the tag is OK"
(let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"}))
coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"}))
token (echo-util/login (system/context) "user1")
_ (index/wait-until-indexed)
tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll1])
assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})]
(assert-tag-associated [coll1])
(let [{:keys [status errors]} (tags/dissociate-by-query token "tag1" {:provider "PROV1"})]
(is (= 200 status))
(assert-tag-associated [])))))
(deftest dissociate-tags-with-mixed-response-test
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(testing "dissociate tag with mixed success and failure response"
(let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"}))
coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"}))
coll3 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET3"}))
token (echo-util/login (system/context) "user1")
tag-key "tag1"
assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})]
(tags/create-tag token (tags/make-tag {:tag-key tag-key}))
(index/wait-until-indexed)
(tags/associate-by-concept-ids token tag-key [{:concept-id (:concept-id coll1)}
{:concept-id (:concept-id coll2)
:revision-id (:revision-id coll2)}])
(assert-tag-associated [coll1 coll2])
(let [response (tags/dissociate-by-concept-ids
token tag-key
(tags/assert-tag-dissociation-response-error?
{["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}
["C1200000012-PROV1"] {:concept-id "TA1200000016-CMR" :revision-id 2}
["C1200000013-PROV1" 1] {:concept-id "TA1200000017-CMR" :revision-id 2}
["C1200000014-PROV1"] {:warnings ["Tag [tag1] is not associated with collection [C1200000014-PROV1]."]}}
response)
(assert-tag-associated [])))))
(deftest association-retention-test
(echo-util/grant-all (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [coll (data-core/ingest "PROV1" (collection/collection))
token (echo-util/login (system/context) "user1")
_ (index/wait-until-indexed)
tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll])
assert-tag-associated (partial tags/assert-tag-associated-with-query nil {:tag-key "tag1"})
assert-tag-not-associated (fn []
(let [refs (search/find-refs :collection {:tag-key "tag1"})]
(is (nil? (:errors refs)))
(is (data-core/refs-match? [] refs))))]
(index/wait-until-indexed)
(testing "Tag initially associated with collection"
(assert-tag-associated [coll]))
(testing "Tag still associated with collection after updating collection"
(let [updated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))]
(is (= 200 (:status updated-coll)))
(index/wait-until-indexed)
(assert-tag-associated [updated-coll])))
(testing "Tag still associated with collection after deleting and recreating the collection"
(is (= 200 (:status (ingest/delete-concept (data-core/item->concept coll)))))
(let [recreated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))]
(is (= 200 (:status recreated-coll)))
(index/wait-until-indexed)
(assert-tag-associated [recreated-coll])))
(let [latest-coll (assoc coll :revision-id 4)]
(testing "Tag still associated with collection after updating tag"
(let [updated-tag (tags/save-tag token tag)]
(is (= {:status 200 :revision-id 2} (select-keys updated-tag [:status :revision-id])))
(index/wait-until-indexed)
(assert-tag-associated [latest-coll])))
(testing "Tag not associated with collection after deleting and recreating the tag"
(is (= {:status 200 :concept-id (:concept-id tag) :revision-id 3}
(tags/delete-tag token (:tag-key tag))))
(index/wait-until-indexed)
(testing "Not associated after tag deleted"
(assert-tag-not-associated))
(is (= {:status 200 :concept-id (:concept-id tag) :revision-id 4}
(tags/create-tag token (tags/make-tag {:tag-key "tag1"}))))
(index/wait-until-indexed)
(testing "Not associated after being recreated."
(assert-tag-not-associated))))))
(defn- assert-tag-association
"Assert the collections are associated with the tag for the given tag-key"
[token colls tag-key]
(is (data-core/refs-match? colls
(search/find-refs :collection {:token token
:tag-key tag-key}))))
(deftest associate-dissociate-tag-with-collections-test
(echo-util/grant-registered-users (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [[coll1 coll2 coll3] (for [n (range 1 4)]
(data-core/ingest "PROV1" (collection/collection)))
[coll1-id coll2-id coll3-id] (map :concept-id [coll1 coll2 coll3])
token (echo-util/login (system/context) "user1")]
(tags/create-tag token (tags/make-tag {:tag-key "tag1"}))
(tags/create-tag token (tags/make-tag {:tag-key "tag2"}))
(index/wait-until-indexed)
(tags/associate-by-concept-ids token "tag1" [{:concept_id coll1-id}])
(tags/associate-by-concept-ids token "tag2" [{:concept-id coll2-id}])
(index/wait-until-indexed)
(assert-tag-association token [coll1] "tag1")
(assert-tag-association token [coll2] "tag2")
(tags/associate-by-concept-ids token "tag1" [{:concept-id coll1-id}])
(index/wait-until-indexed)
(assert-tag-association token [coll1] "tag1")
(assert-tag-association token [coll2] "tag2")
(tags/associate-by-concept-ids token "tag1" [{:concept-id coll2-id}])
(index/wait-until-indexed)
(assert-tag-association token [coll1 coll2] "tag1")
(assert-tag-association token [coll2] "tag2")
(tags/associate-by-concept-ids token "tag2" [{:concept-id coll1-id}
{:concept-id coll2-id}
{:concept-id coll3-id}])
(index/wait-until-indexed)
(assert-tag-association token [coll1 coll2] "tag1")
(assert-tag-association token [coll1 coll2 coll3] "tag2")
(tags/dissociate-by-concept-ids token "tag1" [{:concept-id coll1-id}])
(index/wait-until-indexed)
(assert-tag-association token [coll2] "tag1")
(assert-tag-association token [coll1 coll2 coll3] "tag2")
(tags/dissociate-by-concept-ids token "tag2" [{:concept-id coll1-id}
{:concept-id coll2-id}])
(index/wait-until-indexed)
(assert-tag-association token [coll2] "tag1")
(assert-tag-association token [coll3] "tag2")))
(deftest associate-tags-with-data-test
(echo-util/grant-all (system/context)
(echo-util/coll-catalog-item-id "PROV1"))
(let [coll (data-core/ingest "PROV1" (collection/collection))
coll-concept-id (:concept-id coll)
token (echo-util/login (system/context) "user1")
tag-key "tag1"]
(tags/create-tag token (tags/make-tag {:tag-key tag-key}))
(index/wait-until-indexed)
(testing "Associate tag with collections by concept-id and data"
(are [data]
(let [{:keys [status]} (tags/associate-by-concept-ids
token tag-key [{:concept-id coll-concept-id
:data data}])]
(is (= 200 status)))
"string data"
true
100
123.45
[true "some string" 100]
{"status" "reviewed" "action" "fix typos"}))
(testing "Associate tag with collections with invalid data"
(let [{:keys [status body]} (transmit-tag/associate-tag :concept-ids
(system/context)
tag-key
nil
{:raw? true
:http-options {:body "{{{{"}})
error (-> body :errors first)]
(is (= 400 status))
(is (re-find #"Invalid JSON: A JSON Object can not directly nest another JSON Object"
error))))
(testing "Associate tag with collections with data exceed 32KB"
(let [too-much-data {"a" (tags/string-of-length 32768)}
expected-msg (format
"Tag association data exceed the maximum length of 32KB for collection with concept id [%s] revision id [%s]."
coll-concept-id nil)
response (tags/associate-by-concept-ids
token tag-key [{:concept-id coll-concept-id
:data too-much-data}])]
(tags/assert-tag-association-response-error?
{[coll-concept-id] {:errors [expected-msg]}}
response)))))
(deftest retrieve-concept-by-tag-association-concept-id-test
(let [{:keys [status errors]} (search/get-search-failure-xml-data
(search/retrieve-concept
"TA10000-CMR" nil {:throw-exceptions true}))]
(testing "Retrieve concept by tag association concept-id is invalid"
(is (= [400 ["Retrieving concept by concept id is not supported for concept type [tag-association]."]]
[status errors])))))
|
da03cd9d1ed3ab0532b45cc477fe3947e56835b912a88d92b85f61b8058277d9 | gator1/jepsen | core_test.clj | (ns block.core-test
(:require [clojure.test :refer :all]
[block.core :refer :all]
[jepsen.core :as jepsen]
[jepsen.generator :as gen]
[jepsen.checker :as checker]
[jepsen.tests :as tests]
[jepsen.nemesis :as nemesis]
[knossos.model :refer [cas-register]])
(:use clojure.tools.logging))
(def ^:private fscap-map
(assoc tests/noop-test
:nodes [:n1 :n2 :n3]
:name "fscp-test"
:concurrency 3
:client (client)
:nemesis (nemesis/partition-random-halves)
:generator (->> (gen/mix [r w cas])
(gen/stagger 1)
(gen/nemesis
(gen/seq (cycle [(gen/sleep 5)
{:type :info, :f :start}
(gen/sleep 5)
{:type :info, :f :stop}])))
(gen/time-limit 100))
:model (cas-register 0)
;:checker checker/linearizable)
:checker (checker/compose
{:perf (checker/perf)
:linear checker/linearizable}))
)
; block consistency testing
(deftest fscp-test
(info "consistency test\n")
(set-reg 0 0)
(is (:valid? (:results (jepsen/run! fscap-map)))))
| null | https://raw.githubusercontent.com/gator1/jepsen/1932cbd72cbc1f6c2a27abe0fe347ea989f0cfbb/block/test/block/core_test.clj | clojure | :checker checker/linearizable)
block consistency testing | (ns block.core-test
(:require [clojure.test :refer :all]
[block.core :refer :all]
[jepsen.core :as jepsen]
[jepsen.generator :as gen]
[jepsen.checker :as checker]
[jepsen.tests :as tests]
[jepsen.nemesis :as nemesis]
[knossos.model :refer [cas-register]])
(:use clojure.tools.logging))
(def ^:private fscap-map
(assoc tests/noop-test
:nodes [:n1 :n2 :n3]
:name "fscp-test"
:concurrency 3
:client (client)
:nemesis (nemesis/partition-random-halves)
:generator (->> (gen/mix [r w cas])
(gen/stagger 1)
(gen/nemesis
(gen/seq (cycle [(gen/sleep 5)
{:type :info, :f :start}
(gen/sleep 5)
{:type :info, :f :stop}])))
(gen/time-limit 100))
:model (cas-register 0)
:checker (checker/compose
{:perf (checker/perf)
:linear checker/linearizable}))
)
(deftest fscp-test
(info "consistency test\n")
(set-reg 0 0)
(is (:valid? (:results (jepsen/run! fscap-map)))))
|
9550c78f5a67dafe9098f98f4eb7190d64e60fa344e5a7870e983ada6b6c5c1e | mitchellwrosen/planet-mitchell | Lens.hs | module Optic.Lens
( -- * Lens
Lens
, Lens'
, lens
-- * At
, At(..)
, sans
, Index
, IxValue
-- * Contains
, Contains(..)
) where
import Control.Lens.At (At(at), Contains(contains), Index,
IxValue, sans)
import Control.Lens.Lens (Lens, Lens', lens)
| null | https://raw.githubusercontent.com/mitchellwrosen/planet-mitchell/18dd83204e70fffcd23fe12dd3a80f70b7fa409b/planet-mitchell/src/Optic/Lens.hs | haskell | * Lens
* At
* Contains | module Optic.Lens
Lens
, Lens'
, lens
, At(..)
, sans
, Index
, IxValue
, Contains(..)
) where
import Control.Lens.At (At(at), Contains(contains), Index,
IxValue, sans)
import Control.Lens.Lens (Lens, Lens', lens)
|
8916f89dd8a24815458091f7d56a80b56045a7a7ffd7b129f07f51d751ced68b | helvm/helma | Main.hs | module Main where
import qualified Spec
import Test.Hspec (hspec)
import Test.Hspec.Slow
main :: IO ()
main = (hspec . flip timeThese Spec.spec) =<< configure 4
| null | https://raw.githubusercontent.com/helvm/helma/a32648cf77dbcf3f5fd3c1bd365b9bc55fbab125/hs/test/Main.hs | haskell | module Main where
import qualified Spec
import Test.Hspec (hspec)
import Test.Hspec.Slow
main :: IO ()
main = (hspec . flip timeThese Spec.spec) =<< configure 4
| |
6e471ba46ef5ee1f992f3edc2c9c9f8f0aebccae03434cf2d742b931d599da98 | EMSL-NMR-EPR/Haskell-MFAPipe-Executable | Lens.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
-----------------------------------------------------------------------------
-- |
-- Module : Data.Graph.Inductive.Graph.Lens
Copyright : 2016 - 17 Pacific Northwest National Laboratory
-- License : ECL-2.0 (see the LICENSE file in the distribution)
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- Lenses for labeled, inductive graphs.
-----------------------------------------------------------------------------
module Data.Graph.Inductive.Graph.Lens
( -- * AsEdge class
AsEdge(..)
, _Graph
-- * Optimization
, mconcatEdgeLabels
) where
import Control.Applicative (liftA2, liftA3)
import Control.Lens (Iso')
import qualified Control.Lens
import qualified Control.Monad.State.Class
import Control.Monad.Trans.State.Strict (StateT)
import qualified Control.Monad.Trans.State.Strict as Control.Monad.Trans.State
import Data.Graph.Inductive.Graph (Graph(), LEdge, LNode, Node)
import qualified Data.Graph.Inductive.Graph
import Data.Map.Strict (Map)
import qualified Data.Map.Strict
import qualified Data.Maybe
import qualified Data.Tuple
import qualified GHC.Exts
| The ' ' class is used for types that can be converted to and from labeled edges .
class (Ord (NodeLabel a), Ord (EdgeLabel a)) => AsEdge a where
{-# MINIMAL _Edge #-}
-- | Type of node labels.
type NodeLabel a :: *
-- | Type of edge labels.
type EdgeLabel a :: *
-- | An isomorphism between a scalar and a labeled edge.
_Edge :: Iso' a (NodeLabel a, NodeLabel a, EdgeLabel a)
instance (Ord a, Ord b) => AsEdge (a, a, b) where
type NodeLabel (a, a, b) = a
type EdgeLabel (a, a, b) = b
_Edge = id
# INLINE _ Edge #
-- | An isomorphism between a list and a labeled, inductive graph.
_Graph :: (AsEdge a, Graph gr) => Iso' [a] (gr (NodeLabel a) (EdgeLabel a))
_Graph = Control.Lens.mapping _Edge . Control.Lens.iso toGraph fromGraph
where
-- | @toGraph xs@ converts @xs@ to an inductive graph.
toGraph xs =
let
Initialize an empty node map .
nodeMap0 = Data.Map.Strict.empty
-- Map each element of the input to a labeled edge from left to right, and collect the results, along with the modified node map.
(edges, nodeMap) = Control.Monad.Trans.State.runState (mapM toLEdgeM xs) nodeMap0
Construct a list of labeled nodes .
nodes = map Data.Tuple.swap (Data.Map.Strict.toAscList nodeMap)
in
Construct an inductive graph .
Data.Graph.Inductive.Graph.mkGraph nodes edges
| @fromGraph converts @gr@ to a list .
fromGraph = liftA2 Data.Maybe.mapMaybe fromLEdgeMaybe Data.Graph.Inductive.Graph.labEdges
| Convert an edge - like thing into a ' LEdge ' .
toLEdgeM :: (Monad m, Ord a) => (a, a, b) -> StateT (Map a Node) m (LEdge b)
toLEdgeM (aL, aR, b) = liftA2 (\(nodeL, _) (nodeR, _) -> (nodeL, nodeR, b)) (toLNodeM aL) (toLNodeM aR)
-- | Convert a node-like thing into a 'LNode'.
toLNodeM :: (Monad m, Ord a) => a -> StateT (Map a Node) m (LNode a)
toLNodeM = (fmap . flip (,)) <*> (Control.Monad.State.Class.state . liftA2 (flip (liftA3 maybe) (flip (,))) (\k m -> let x = Data.Map.Strict.size m in (x, Data.Map.Strict.insert k x m)) Data.Map.Strict.lookup)
| Convert a ' LEdge ' into an edge - like thing .
fromLEdgeMaybe :: (Graph gr) => gr a b -> LEdge b -> Maybe (a, a, b)
fromLEdgeMaybe gr (nodeL, nodeR, b) = liftA2 (\aL aR -> (aL, aR, b)) (Data.Graph.Inductive.Graph.lab gr nodeL) (Data.Graph.Inductive.Graph.lab gr nodeR)
-- | Assume that each edge label is a tuple of a 'Monoid' and a /true/ edge label, and 'mappend' any duplicates. Subject to list fusion.
mconcatEdgeLabels :: (AsEdge a, EdgeLabel a ~ (m, b), Monoid m, Ord b) => [a] -> [a]
mconcatEdgeLabels = Control.Lens.over (Control.Lens.mapping _Edge) f
where
f :: (Monoid m, Ord a, Ord b) => [(a, a, (m, b))] -> [(a, a, (m, b))]
f xs = GHC.Exts.build (\cons nil -> Data.Map.Strict.foldrWithKey (\(aL, aR, b) m -> cons (aL, aR, (m, b))) nil (foldr (\(aL, aR, (m, b)) -> Data.Map.Strict.alter (Just . mappend m . maybe mempty id) (aL, aR, b)) Data.Map.Strict.empty xs))
| null | https://raw.githubusercontent.com/EMSL-NMR-EPR/Haskell-MFAPipe-Executable/8a7fd13202d3b6b7380af52d86e851e995a9b53e/fgl-lens/src/Data/Graph/Inductive/Graph/Lens.hs | haskell | ---------------------------------------------------------------------------
|
Module : Data.Graph.Inductive.Graph.Lens
License : ECL-2.0 (see the LICENSE file in the distribution)
Maintainer :
Stability : experimental
Portability : portable
Lenses for labeled, inductive graphs.
---------------------------------------------------------------------------
* AsEdge class
* Optimization
# MINIMAL _Edge #
| Type of node labels.
| Type of edge labels.
| An isomorphism between a scalar and a labeled edge.
| An isomorphism between a list and a labeled, inductive graph.
| @toGraph xs@ converts @xs@ to an inductive graph.
Map each element of the input to a labeled edge from left to right, and collect the results, along with the modified node map.
| Convert a node-like thing into a 'LNode'.
| Assume that each edge label is a tuple of a 'Monoid' and a /true/ edge label, and 'mappend' any duplicates. Subject to list fusion. | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
Copyright : 2016 - 17 Pacific Northwest National Laboratory
module Data.Graph.Inductive.Graph.Lens
AsEdge(..)
, _Graph
, mconcatEdgeLabels
) where
import Control.Applicative (liftA2, liftA3)
import Control.Lens (Iso')
import qualified Control.Lens
import qualified Control.Monad.State.Class
import Control.Monad.Trans.State.Strict (StateT)
import qualified Control.Monad.Trans.State.Strict as Control.Monad.Trans.State
import Data.Graph.Inductive.Graph (Graph(), LEdge, LNode, Node)
import qualified Data.Graph.Inductive.Graph
import Data.Map.Strict (Map)
import qualified Data.Map.Strict
import qualified Data.Maybe
import qualified Data.Tuple
import qualified GHC.Exts
| The ' ' class is used for types that can be converted to and from labeled edges .
class (Ord (NodeLabel a), Ord (EdgeLabel a)) => AsEdge a where
type NodeLabel a :: *
type EdgeLabel a :: *
_Edge :: Iso' a (NodeLabel a, NodeLabel a, EdgeLabel a)
instance (Ord a, Ord b) => AsEdge (a, a, b) where
type NodeLabel (a, a, b) = a
type EdgeLabel (a, a, b) = b
_Edge = id
# INLINE _ Edge #
_Graph :: (AsEdge a, Graph gr) => Iso' [a] (gr (NodeLabel a) (EdgeLabel a))
_Graph = Control.Lens.mapping _Edge . Control.Lens.iso toGraph fromGraph
where
toGraph xs =
let
Initialize an empty node map .
nodeMap0 = Data.Map.Strict.empty
(edges, nodeMap) = Control.Monad.Trans.State.runState (mapM toLEdgeM xs) nodeMap0
Construct a list of labeled nodes .
nodes = map Data.Tuple.swap (Data.Map.Strict.toAscList nodeMap)
in
Construct an inductive graph .
Data.Graph.Inductive.Graph.mkGraph nodes edges
| @fromGraph converts @gr@ to a list .
fromGraph = liftA2 Data.Maybe.mapMaybe fromLEdgeMaybe Data.Graph.Inductive.Graph.labEdges
| Convert an edge - like thing into a ' LEdge ' .
toLEdgeM :: (Monad m, Ord a) => (a, a, b) -> StateT (Map a Node) m (LEdge b)
toLEdgeM (aL, aR, b) = liftA2 (\(nodeL, _) (nodeR, _) -> (nodeL, nodeR, b)) (toLNodeM aL) (toLNodeM aR)
toLNodeM :: (Monad m, Ord a) => a -> StateT (Map a Node) m (LNode a)
toLNodeM = (fmap . flip (,)) <*> (Control.Monad.State.Class.state . liftA2 (flip (liftA3 maybe) (flip (,))) (\k m -> let x = Data.Map.Strict.size m in (x, Data.Map.Strict.insert k x m)) Data.Map.Strict.lookup)
| Convert a ' LEdge ' into an edge - like thing .
fromLEdgeMaybe :: (Graph gr) => gr a b -> LEdge b -> Maybe (a, a, b)
fromLEdgeMaybe gr (nodeL, nodeR, b) = liftA2 (\aL aR -> (aL, aR, b)) (Data.Graph.Inductive.Graph.lab gr nodeL) (Data.Graph.Inductive.Graph.lab gr nodeR)
mconcatEdgeLabels :: (AsEdge a, EdgeLabel a ~ (m, b), Monoid m, Ord b) => [a] -> [a]
mconcatEdgeLabels = Control.Lens.over (Control.Lens.mapping _Edge) f
where
f :: (Monoid m, Ord a, Ord b) => [(a, a, (m, b))] -> [(a, a, (m, b))]
f xs = GHC.Exts.build (\cons nil -> Data.Map.Strict.foldrWithKey (\(aL, aR, b) m -> cons (aL, aR, (m, b))) nil (foldr (\(aL, aR, (m, b)) -> Data.Map.Strict.alter (Just . mappend m . maybe mempty id) (aL, aR, b)) Data.Map.Strict.empty xs))
|
2d1d48d3a576a30de59e4bc5304a5e47156487188c463dca0c7a1a1b77bec627 | NetComposer/nksip | nksip_parse_via.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2019 . All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% ===================================================================
@doc SIP Via Parser
%%
%% @see nksip_parse
%% @see nksip_parse_header
%% @see nksip_parse_sipmsg
%% @see nksip_parse_via
%% @see vias/1
%% @see nksip_parse:ruris/1
%% @see binary()
@private
%% @end
%% ===================================================================
-module(nksip_parse_via).
-author('Carlos Gonzalez <>').
-include("nksip.hrl").
-export([vias/1]).
%% ===================================================================
%% Public
%% ===================================================================
%% @doc Parse a series of VIAs in a string
%%
%% @end
-spec vias(binary() | string() | #via{}) ->
[#via{}] | error.
vias(#via{}=Via) ->
[Via];
vias(Bin) when is_binary(Bin) ->
vias(binary_to_list(Bin));
vias(String) when is_list(String) ->
vias(strip(String), []).
%% ===================================================================
%% Private
%% ===================================================================
@private
vias(String, Acc) ->
case header(strip(String), #via{}) of
{#via{}=Via, []} when Acc==[] ->
[Via];
{#via{}=Via, []} ->
lists:reverse([Via|Acc]);
{#via{}=Via, Rest} ->
vias(Rest, [Via|Acc]);
{error, _Type, _Line} ->
% lager:debug("Error parsing via ~s: ~p (~p)", [String, _Type, _Line]),
error
end.
@private VIA header
header("SIP"++Rest1, Via) ->
case strip(Rest1) of
[$/|Rest2] ->
case strip(Rest2) of
"2.0"++Rest3 ->
case strip(Rest3) of
[$/|Rest4] ->
proto(strip(Rest4), [], Via);
_ ->
{error, header, ?LINE}
end;
_ ->
{error, header, ?LINE}
end;
_ ->
{error, header, ?LINE}
end;
header(_, _Via) ->
{error, header, ?LINE}.
@private VIA Proto
proto([], _Acc, _Via) ->
{error, proto, ?LINE};
proto([Ch|Rest], Acc, Via) when Ch==32; Ch==9; Ch==13 ->
case Acc of
[] ->
{error, proto, ?LINE};
_ ->
Raw = lists:reverse(Acc),
Transp = case string:to_lower(Raw) of
"udp" -> udp;
"tcp" -> tcp;
"tls" -> tls;
"sctp" -> sctp;
"ws" -> ws;
"wss" -> wss;
_ -> list_to_binary(Raw)
end,
domain(strip(Rest), [], false, Via#via{transp=Transp})
end;
proto([Ch|Rest], Acc, Via) ->
proto(Rest, [Ch|Acc], Via).
%% @private VIA Domain
domain([], Acc, Ip6, Via) ->
case Acc==[] orelse Ip6 of
true ->
{error, domain, ?LINE};
false ->
Via1 = Via#via{domain=list_to_binary(lists:reverse(Acc))},
{Via1, []}
end;
domain([Ch|_]=Rest, Acc, Ip6, Via) when Ch==$;; Ch==$?; Ch==$, ->
case Acc==[] orelse Ip6 of
true ->
{error, domain, ?LINE};
false ->
Via1 = Via#via{domain=list_to_binary(lists:reverse(Acc))},
opts(Rest, Via1)
end;
domain([$[|Rest], Acc, Ip6, Via) ->
case Acc==[] andalso not Ip6 of
true ->
domain(Rest, [$[|Acc], true, Via);
false ->
{error, domain, ?LINE}
end;
domain([$]|Rest], Acc, Ip6, Via) ->
case Acc/=[] andalso Ip6 of
true ->
domain(Rest, [$]|Acc], false, Via);
false ->
{error, domain, ?LINE}
end;
domain([$:|Rest], Acc, false, Via) ->
case Acc==[] of
true ->
{error, domain, ?LINE};
false ->
Via1 = Via#via{domain=list_to_binary(lists:reverse(Acc))},
port(strip(Rest), [], Via1)
end;
domain([Ch|_]=Rest, Acc, Ip6, Via) when Ch==32; Ch==9; Ch==13 ->
case strip(Rest) of
[] ->
domain([], Acc, Ip6, Via);
[Ch1|_]=Rest1 when Ch1==$:; Ch1==$;; Ch1==$,; Ch1==$[; Ch1==$] ->
domain(Rest1, Acc, Ip6, Via);
_ ->
{error, domain, ?LINE}
end;
domain([Ch|Rest], Acc, Ip6, Via) ->
domain(Rest, [Ch|Acc], Ip6, Via).
@private VIA Port
port([], Acc, Via) ->
case Acc==[] of
true ->
{error, port, ?LINE};
false ->
case catch list_to_integer(lists:reverse(Acc)) of
Port when is_integer(Port), Port>=0, Port=<65535 ->
{Via#via{port = Port}, []};
_ ->
{error, port, ?LINE}
end
end;
port([Ch|_]=Rest, Acc, Via) when Ch==$;; Ch==$, ->
case Acc of
[] ->
{error, port, ?LINE};
_ ->
case catch list_to_integer(lists:reverse(Acc)) of
Port when is_integer(Port), Port >= 0, Port =< 65535 ->
Via1 = Via#via{port = Port},
opts(Rest, Via1);
_ ->
{error, port, ?LINE}
end
end;
port([Ch|_]=Rest, Acc, Via) when Ch==32; Ch==9; Ch==13 ->
case strip(Rest) of
[] ->
port([], Acc, Via);
[Ch1|_]=Rest1 when Ch1==$;; Ch1==$, ->
port(Rest1, Acc, Via);
_ ->
{error, port, ?LINE}
end;
port([Ch|Rest], Acc, Via) ->
port(Rest, [Ch|Acc], Via).
@private VIA Opts
opts([], Via) ->
{Via, []};
opts([Ch|Rest], Via) ->
case Ch of
$; ->
opts_key(strip(Rest), [], Via);
$, ->
case strip(Rest) of
[] ->
{error, opts, ?LINE};
Rest1 ->
{Via, Rest1}
end;
_ when Ch==32; Ch==9; Ch==13 ->
opts(strip(Rest), Via);
_ ->
{error, opts, ?LINE}
end.
@private URI Opts Keys
opts_key([], Acc, Via) ->
case Acc of
[] ->
{error, opts_key, ?LINE};
_ ->
Opt = list_to_binary(lists:reverse(Acc)),
Via1 = Via#via{opts = Via#via.opts++[Opt]},
opts([], Via1)
end;
opts_key([Ch|_]=Rest, Acc, Via) when Ch==$;; Ch==$, ->
case Acc of
[] ->
{error, opts_key, ?LINE};
_ ->
Opt = list_to_binary(lists:reverse(Acc)),
Via1 = Via#via{opts = Via#via.opts++[Opt]},
opts(Rest, Via1)
end;
opts_key([$=|Rest], Acc, Via) ->
case Acc of
[] ->
{error, opts_key, ?LINE};
_ ->
opts_value(strip(Rest), lists:reverse(Acc), [], Via)
end;
opts_key([Ch|_]=Rest, Acc, Via) when Ch==32; Ch==9; Ch==13 ->
case strip(Rest) of
[] ->
opts_key([], Acc, Via);
[Ch1|_]=Rest1 when Ch1==$;; Ch1==$,; Ch1==$= ->
opts_key(Rest1, Acc, Via);
_ ->
{error, opts_key, ?LINE}
end;
opts_key([Ch|Rest], Acc, Via) ->
opts_key(Rest, [Ch|Acc], Via).
@private URI Opts Values
opts_value([], Key, Acc, Via) ->
case Acc of
[] ->
{error, opts_value, ?LINE};
_ ->
Opt = {list_to_binary(Key), list_to_binary(lists:reverse(Acc))},
Via1 = Via#via{opts = Via#via.opts++[Opt]},
opts([], Via1)
end;
opts_value([Ch|_]=Rest, Key, Acc, Via) when Ch==$;; Ch==$, ->
case Acc of
[] ->
{error, opts_value, ?LINE};
_ ->
Opt = {list_to_binary(Key), list_to_binary(lists:reverse(Acc))},
Via1 = Via#via{opts = Via#via.opts++[Opt]},
opts(Rest, Via1)
end;
opts_value([Ch|_]=Rest, Key, Acc, Via) when Ch==32; Ch==9; Ch==13 ->
case strip(Rest) of
[] ->
opts_value([], Key, Acc, Via);
[Ch1|_]=Rest1 when Ch1==$;; Ch1==$, ->
opts_value(Rest1, Key, Acc, Via);
_ ->
{error, opts_value, ?LINE}
end;
opts_value([Ch|Rest], Key, Acc, Via) ->
opts_value(Rest, Key, [Ch|Acc], Via).
@private VIA Strip white space
strip([32|Rest]) -> strip(Rest);
strip([13|Rest]) -> strip(Rest);
strip([10|Rest]) -> strip(Rest);
strip([9|Rest]) -> strip(Rest);
strip(Rest) -> Rest.
%% ===================================================================
EUnit tests
%% ===================================================================
% -ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
via_test() ->
error = vias("SIP/3.0/udp host"),
error = vias("SIP/2.0/udp "),
error = vias("SIP/2.0/udp a, "),
[#via{transp=udp, port=0, opts=[<<"rport">>, {<<"received">>, <<"1.2.3.4">>}, <<"c">>]}] =
vias("SIP/2.0/udp host;rport;received=1.2.3.4 ; c"),
[
#via{transp = <<"kkk">>, domain = <<"host">>, port=1500, opts=[]},
#via{transp = udp, domain = <<"[1:2::3]">>, port=25, opts = [<<"d">>]}
] =
vias(" SIP / 2.0 / kkk host : 1500 , SIP/2.0/UdP [1:2::3]:25;d"),
[#via{domain= <<"host">>, port=12}] = vias(" SIP / 2.0/TCP host:12"),
[
#via{transp=tls, domain= <<"host">>, port=0},
#via{domain= <<"host2">>, port=5061,
opts=[<<"maddr">>, {<<"received">>, <<"1.2.3.4">>}, <<"a">>]}
] =
vias("SIP/2.0/TLS host , SIP / 2.0 / UDP host2 : 5061 "
"; maddr; received = 1.2.3.4 ; a").
% -endif.
| null | https://raw.githubusercontent.com/NetComposer/nksip/7fbcc66806635dc8ecc5d11c30322e4d1df36f0a/src/nksip_parse_via.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
===================================================================
@see nksip_parse
@see nksip_parse_header
@see nksip_parse_sipmsg
@see nksip_parse_via
@see vias/1
@see nksip_parse:ruris/1
@see binary()
@end
===================================================================
===================================================================
Public
===================================================================
@doc Parse a series of VIAs in a string
@end
===================================================================
Private
===================================================================
lager:debug("Error parsing via ~s: ~p (~p)", [String, _Type, _Line]),
@private VIA Domain
===================================================================
===================================================================
-ifdef(TEST).
-endif. | Copyright ( c ) 2019 . All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@doc SIP Via Parser
@private
-module(nksip_parse_via).
-author('Carlos Gonzalez <>').
-include("nksip.hrl").
-export([vias/1]).
-spec vias(binary() | string() | #via{}) ->
[#via{}] | error.
vias(#via{}=Via) ->
[Via];
vias(Bin) when is_binary(Bin) ->
vias(binary_to_list(Bin));
vias(String) when is_list(String) ->
vias(strip(String), []).
@private
vias(String, Acc) ->
case header(strip(String), #via{}) of
{#via{}=Via, []} when Acc==[] ->
[Via];
{#via{}=Via, []} ->
lists:reverse([Via|Acc]);
{#via{}=Via, Rest} ->
vias(Rest, [Via|Acc]);
{error, _Type, _Line} ->
error
end.
@private VIA header
header("SIP"++Rest1, Via) ->
case strip(Rest1) of
[$/|Rest2] ->
case strip(Rest2) of
"2.0"++Rest3 ->
case strip(Rest3) of
[$/|Rest4] ->
proto(strip(Rest4), [], Via);
_ ->
{error, header, ?LINE}
end;
_ ->
{error, header, ?LINE}
end;
_ ->
{error, header, ?LINE}
end;
header(_, _Via) ->
{error, header, ?LINE}.
@private VIA Proto
proto([], _Acc, _Via) ->
{error, proto, ?LINE};
proto([Ch|Rest], Acc, Via) when Ch==32; Ch==9; Ch==13 ->
case Acc of
[] ->
{error, proto, ?LINE};
_ ->
Raw = lists:reverse(Acc),
Transp = case string:to_lower(Raw) of
"udp" -> udp;
"tcp" -> tcp;
"tls" -> tls;
"sctp" -> sctp;
"ws" -> ws;
"wss" -> wss;
_ -> list_to_binary(Raw)
end,
domain(strip(Rest), [], false, Via#via{transp=Transp})
end;
proto([Ch|Rest], Acc, Via) ->
proto(Rest, [Ch|Acc], Via).
domain([], Acc, Ip6, Via) ->
case Acc==[] orelse Ip6 of
true ->
{error, domain, ?LINE};
false ->
Via1 = Via#via{domain=list_to_binary(lists:reverse(Acc))},
{Via1, []}
end;
domain([Ch|_]=Rest, Acc, Ip6, Via) when Ch==$;; Ch==$?; Ch==$, ->
case Acc==[] orelse Ip6 of
true ->
{error, domain, ?LINE};
false ->
Via1 = Via#via{domain=list_to_binary(lists:reverse(Acc))},
opts(Rest, Via1)
end;
domain([$[|Rest], Acc, Ip6, Via) ->
case Acc==[] andalso not Ip6 of
true ->
domain(Rest, [$[|Acc], true, Via);
false ->
{error, domain, ?LINE}
end;
domain([$]|Rest], Acc, Ip6, Via) ->
case Acc/=[] andalso Ip6 of
true ->
domain(Rest, [$]|Acc], false, Via);
false ->
{error, domain, ?LINE}
end;
domain([$:|Rest], Acc, false, Via) ->
case Acc==[] of
true ->
{error, domain, ?LINE};
false ->
Via1 = Via#via{domain=list_to_binary(lists:reverse(Acc))},
port(strip(Rest), [], Via1)
end;
domain([Ch|_]=Rest, Acc, Ip6, Via) when Ch==32; Ch==9; Ch==13 ->
case strip(Rest) of
[] ->
domain([], Acc, Ip6, Via);
[Ch1|_]=Rest1 when Ch1==$:; Ch1==$;; Ch1==$,; Ch1==$[; Ch1==$] ->
domain(Rest1, Acc, Ip6, Via);
_ ->
{error, domain, ?LINE}
end;
domain([Ch|Rest], Acc, Ip6, Via) ->
domain(Rest, [Ch|Acc], Ip6, Via).
@private VIA Port
port([], Acc, Via) ->
case Acc==[] of
true ->
{error, port, ?LINE};
false ->
case catch list_to_integer(lists:reverse(Acc)) of
Port when is_integer(Port), Port>=0, Port=<65535 ->
{Via#via{port = Port}, []};
_ ->
{error, port, ?LINE}
end
end;
port([Ch|_]=Rest, Acc, Via) when Ch==$;; Ch==$, ->
case Acc of
[] ->
{error, port, ?LINE};
_ ->
case catch list_to_integer(lists:reverse(Acc)) of
Port when is_integer(Port), Port >= 0, Port =< 65535 ->
Via1 = Via#via{port = Port},
opts(Rest, Via1);
_ ->
{error, port, ?LINE}
end
end;
port([Ch|_]=Rest, Acc, Via) when Ch==32; Ch==9; Ch==13 ->
case strip(Rest) of
[] ->
port([], Acc, Via);
[Ch1|_]=Rest1 when Ch1==$;; Ch1==$, ->
port(Rest1, Acc, Via);
_ ->
{error, port, ?LINE}
end;
port([Ch|Rest], Acc, Via) ->
port(Rest, [Ch|Acc], Via).
@private VIA Opts
opts([], Via) ->
{Via, []};
opts([Ch|Rest], Via) ->
case Ch of
$; ->
opts_key(strip(Rest), [], Via);
$, ->
case strip(Rest) of
[] ->
{error, opts, ?LINE};
Rest1 ->
{Via, Rest1}
end;
_ when Ch==32; Ch==9; Ch==13 ->
opts(strip(Rest), Via);
_ ->
{error, opts, ?LINE}
end.
@private URI Opts Keys
opts_key([], Acc, Via) ->
case Acc of
[] ->
{error, opts_key, ?LINE};
_ ->
Opt = list_to_binary(lists:reverse(Acc)),
Via1 = Via#via{opts = Via#via.opts++[Opt]},
opts([], Via1)
end;
opts_key([Ch|_]=Rest, Acc, Via) when Ch==$;; Ch==$, ->
case Acc of
[] ->
{error, opts_key, ?LINE};
_ ->
Opt = list_to_binary(lists:reverse(Acc)),
Via1 = Via#via{opts = Via#via.opts++[Opt]},
opts(Rest, Via1)
end;
opts_key([$=|Rest], Acc, Via) ->
case Acc of
[] ->
{error, opts_key, ?LINE};
_ ->
opts_value(strip(Rest), lists:reverse(Acc), [], Via)
end;
opts_key([Ch|_]=Rest, Acc, Via) when Ch==32; Ch==9; Ch==13 ->
case strip(Rest) of
[] ->
opts_key([], Acc, Via);
[Ch1|_]=Rest1 when Ch1==$;; Ch1==$,; Ch1==$= ->
opts_key(Rest1, Acc, Via);
_ ->
{error, opts_key, ?LINE}
end;
opts_key([Ch|Rest], Acc, Via) ->
opts_key(Rest, [Ch|Acc], Via).
@private URI Opts Values
opts_value([], Key, Acc, Via) ->
case Acc of
[] ->
{error, opts_value, ?LINE};
_ ->
Opt = {list_to_binary(Key), list_to_binary(lists:reverse(Acc))},
Via1 = Via#via{opts = Via#via.opts++[Opt]},
opts([], Via1)
end;
opts_value([Ch|_]=Rest, Key, Acc, Via) when Ch==$;; Ch==$, ->
case Acc of
[] ->
{error, opts_value, ?LINE};
_ ->
Opt = {list_to_binary(Key), list_to_binary(lists:reverse(Acc))},
Via1 = Via#via{opts = Via#via.opts++[Opt]},
opts(Rest, Via1)
end;
opts_value([Ch|_]=Rest, Key, Acc, Via) when Ch==32; Ch==9; Ch==13 ->
case strip(Rest) of
[] ->
opts_value([], Key, Acc, Via);
[Ch1|_]=Rest1 when Ch1==$;; Ch1==$, ->
opts_value(Rest1, Key, Acc, Via);
_ ->
{error, opts_value, ?LINE}
end;
opts_value([Ch|Rest], Key, Acc, Via) ->
opts_value(Rest, Key, [Ch|Acc], Via).
@private VIA Strip white space
strip([32|Rest]) -> strip(Rest);
strip([13|Rest]) -> strip(Rest);
strip([10|Rest]) -> strip(Rest);
strip([9|Rest]) -> strip(Rest);
strip(Rest) -> Rest.
EUnit tests
-include_lib("eunit/include/eunit.hrl").
via_test() ->
error = vias("SIP/3.0/udp host"),
error = vias("SIP/2.0/udp "),
error = vias("SIP/2.0/udp a, "),
[#via{transp=udp, port=0, opts=[<<"rport">>, {<<"received">>, <<"1.2.3.4">>}, <<"c">>]}] =
vias("SIP/2.0/udp host;rport;received=1.2.3.4 ; c"),
[
#via{transp = <<"kkk">>, domain = <<"host">>, port=1500, opts=[]},
#via{transp = udp, domain = <<"[1:2::3]">>, port=25, opts = [<<"d">>]}
] =
vias(" SIP / 2.0 / kkk host : 1500 , SIP/2.0/UdP [1:2::3]:25;d"),
[#via{domain= <<"host">>, port=12}] = vias(" SIP / 2.0/TCP host:12"),
[
#via{transp=tls, domain= <<"host">>, port=0},
#via{domain= <<"host2">>, port=5061,
opts=[<<"maddr">>, {<<"received">>, <<"1.2.3.4">>}, <<"a">>]}
] =
vias("SIP/2.0/TLS host , SIP / 2.0 / UDP host2 : 5061 "
"; maddr; received = 1.2.3.4 ; a").
|
dae3103b60ff2ab43e81f4ae07b13d382f0b0f908332af5ba6391d908fcc459c | snoyberg/conduit | TextSpec.hs | # LANGUAGE FlexibleContexts , OverloadedStrings #
module Data.Conduit.TextSpec (spec) where
import Data.Conduit ((.|), runConduit, runConduitPure)
import Control.Exception (SomeException)
import qualified Data.Conduit.Text as CT
import qualified Data.Conduit as C
import Data.Conduit.Lift (runCatchC, catchCatchC)
import Data.Functor.Identity (runIdentity)
import qualified Data.Conduit.List as CL
import Test.Hspec
import Test.Hspec.QuickCheck
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TEE
import qualified Data.Text.Lazy.Encoding as TLE
import Control.Arrow
import qualified Data.ByteString as S
import qualified Data.Text.Lazy as TL
import qualified Data.ByteString.Lazy as L
import Control.Monad.Catch.Pure (runCatchT)
spec :: Spec
spec = describe "Data.Conduit.Text" $ do
describe "text" $ do
let go enc tenc tdec cenc = describe enc $ do
prop "single chunk" $ \chars -> do
let tl = TL.pack chars
lbs = tenc tl
src = CL.sourceList $ L.toChunks lbs
ts <- runConduit $ src .| CT.decode cenc .| CL.consume
TL.fromChunks ts `shouldBe` tl
prop "many chunks" $ \chars -> do
let tl = TL.pack chars
lbs = tenc tl
src = mconcat $ map (CL.sourceList . return . S.singleton) $ L.unpack lbs
ts <- runConduit $ src .| CT.decode cenc .| CL.consume
TL.fromChunks ts `shouldBe` tl
-- Check whether raw bytes are decoded correctly, in
-- particular that Text decoding produces an error if
and only if Conduit does .
prop "raw bytes" $ \bytes -> do
let lbs = L.pack bytes
src = CL.sourceList $ L.toChunks lbs
tl' = tdec lbs
etl = runConduit $ src .| CT.decode cenc .| CL.consume
case etl of
(Left _) -> (return $! TL.toStrict tl') `shouldThrow` anyException
(Right tl) -> TL.fromChunks tl `shouldBe` tl'
prop "encoding" $ \chars -> do
let tss = map T.pack chars
lbs = tenc $ TL.fromChunks tss
src = mconcat $ map (CL.sourceList . return) tss
bss <- runConduit $ src .| CT.encode cenc .| CL.consume
L.fromChunks bss `shouldBe` lbs
prop "valid then invalid" $ \x y chars -> do
let tss = map T.pack ([x, y]:chars)
ts = T.concat tss
lbs = tenc (TL.fromChunks tss) `L.append` "\0\0\0\0\0\0\0"
src = mapM_ C.yield $ L.toChunks lbs
Just x' <- runConduit $ src .| CT.decode cenc .| C.await
(x' `T.isPrefixOf` ts) `shouldBe` True
go "utf8" TLE.encodeUtf8 TLE.decodeUtf8 CT.utf8
go "utf16_le" TLE.encodeUtf16LE TLE.decodeUtf16LE CT.utf16_le
go "utf16_be" TLE.encodeUtf16BE TLE.decodeUtf16BE CT.utf16_be
go "utf32_le" TLE.encodeUtf32LE TLE.decodeUtf32LE CT.utf32_le
go "utf32_be" TLE.encodeUtf32BE TLE.decodeUtf32BE CT.utf32_be
it "mixed utf16 and utf8" $ do
let bs = "8\NUL:\NULu\NUL\215\216\217\218"
src = C.yield bs .| CT.decode CT.utf16_le
text <- runConduit $ src .| C.await
text `shouldBe` Just "8:u"
(runConduit $ src .| CL.sinkNull) `shouldThrow` anyException
it "invalid utf8" $ do
let bs = S.pack [0..255]
src = C.yield bs .| CT.decode CT.utf8
text <- runConduit $ src .| C.await
text `shouldBe` Just (T.pack $ map toEnum [0..127])
(runConduit $ src .| CL.sinkNull) `shouldThrow` anyException
it "catch UTF8 exceptions" $ do
let badBS = "this is good\128\128\0that was bad"
grabExceptions inner = C.catchC
(inner .| CL.map Right)
(\e -> C.yield (Left (e :: CT.TextException)))
res <- runConduit $ C.yield badBS .| (,)
<$> (grabExceptions (CT.decode CT.utf8) .| CL.consume)
<*> CL.consume
first (map (either (Left . show) Right)) res `shouldBe`
( [ Right "this is good"
, Left $ show $ CT.NewDecodeException "UTF-8" 12 "\128\128\0t"
]
, ["\128\128\0that was bad"]
)
it "catch UTF8 exceptions, pure" $ do
let badBS = "this is good\128\128\0that was bad"
grabExceptions inner = do
res <- runCatchC $ inner .| CL.map Right
case res of
Left e -> C.yield $ Left e
Right () -> return ()
let res = runConduitPure $ C.yield badBS .| (,)
<$> (grabExceptions (CT.decode CT.utf8) .| CL.consume)
<*> CL.consume
first (map (either (Left . show) Right)) res `shouldBe`
( [ Right "this is good"
, Left $ show $ CT.NewDecodeException "UTF-8" 12 "\128\128\0t"
]
, ["\128\128\0that was bad"]
)
it "catch UTF8 exceptions, catchExceptionC" $ do
let badBS = "this is good\128\128\0that was bad"
grabExceptions inner = catchCatchC
(inner .| CL.map Right)
(\e -> C.yield $ Left e)
let Right res = runIdentity $ runCatchT $ runConduit $ C.yield badBS .| (,)
<$> (grabExceptions (CT.decode CT.utf8) .| CL.consume)
<*> CL.consume
first (map (either (Left . show) Right)) res `shouldBe`
( [ Right "this is good"
, Left $ show $ CT.NewDecodeException "UTF-8" 12 "\128\128\0t"
]
, ["\128\128\0that was bad"]
)
it "catch UTF8 exceptions, catchExceptionC, decodeUtf8" $ do
let badBS = ["this is good", "\128\128\0that was bad"]
grabExceptions inner = catchCatchC
(inner .| CL.map Right)
(\e -> C.yield $ Left e)
let Right res = runIdentity $ runCatchT $ runConduit $
mapM_ C.yield badBS .| (,)
<$> (grabExceptions CT.decodeUtf8 .| CL.consume)
<*> CL.consume
first (map (either (Left . const ()) Right)) res `shouldBe`
( [ Right "this is good"
, Left ()
]
, ["\128\128\0that was bad"]
)
prop "lenient UTF8 decoding" $ \good1 good2 -> do
let bss = [TE.encodeUtf8 $ T.pack good1, "\128\129\130", TE.encodeUtf8 $ T.pack good2]
bs = S.concat bss
expected = TE.decodeUtf8With TEE.lenientDecode bs
actual = runConduitPure $ mapM_ C.yield bss .| CT.decodeUtf8Lenient .| CL.consume
T.concat actual `shouldBe` expected
describe "text lines" $ do
it "yields nothing given nothing" $
(runConduit $ CL.sourceList [] .| CT.lines .| CL.consume) ==
[[]]
it "yields nothing given only empty text" $
(runConduit $ CL.sourceList [""] .| CT.lines .| CL.consume) ==
[[]]
it "works across split lines" $
(runConduit $ CL.sourceList ["abc", "d\nef"] .| CT.lines .| CL.consume) ==
[["abcd", "ef"]]
it "works with multiple lines in an item" $
(runConduit $ CL.sourceList ["ab\ncd\ne"] .| CT.lines .| CL.consume) ==
[["ab", "cd", "e"]]
it "works with ending on a newline" $
(runConduit $ CL.sourceList ["ab\n"] .| CT.lines .| CL.consume) ==
[["ab"]]
it "works with ending a middle item on a newline" $
(runConduit $ CL.sourceList ["ab\n", "cd\ne"] .| CT.lines .| CL.consume) ==
[["ab", "cd", "e"]]
it "works with empty text" $
(runConduit $ CL.sourceList ["ab", "", "cd"] .| CT.lines .| CL.consume) ==
[["abcd"]]
it "works with empty lines" $
(runConduit $ CL.sourceList ["\n\n"] .| CT.lines .| CL.consume) ==
[["", ""]]
describe "text lines bounded" $ do
it "yields nothing given nothing" $
(runConduit $ CL.sourceList [] .| CT.linesBounded 80 .| CL.consume) ==
[[]]
it "yields nothing given only empty text" $
(runConduit $ CL.sourceList [""] .| CT.linesBounded 80 .| CL.consume) ==
[[]]
it "works across split lines" $
(runConduit $ CL.sourceList ["abc", "d\nef"] .| CT.linesBounded 80 .| CL.consume) ==
[["abcd", "ef"]]
it "works with multiple lines in an item" $
(runConduit $ CL.sourceList ["ab\ncd\ne"] .| CT.linesBounded 80 .| CL.consume) ==
[["ab", "cd", "e"]]
it "works with ending on a newline" $
(runConduit $ CL.sourceList ["ab\n"] .| CT.linesBounded 80 .| CL.consume) `shouldBe`
[["ab"]]
it "works with ending a middle item on a newline" $
(runConduit $ CL.sourceList ["ab\n", "cd\ne"] .| CT.linesBounded 80 .| CL.consume) `shouldBe`
[["ab", "cd", "e"]]
it "works with empty text" $
(runConduit $ CL.sourceList ["ab", "", "cd"] .| CT.linesBounded 80 .| CL.consume) `shouldBe`
[["abcd"]]
it "works with empty lines" $
(runConduit (CL.sourceList ["\n\n"] .| CT.linesBounded 80 .| CL.consume)) `shouldBe`
[["", ""]]
it "throws an exception when lines are too long" $ do
let x :: Either SomeException [T.Text]
x = runConduit $ CL.sourceList ["hello\nworld"] .| CT.linesBounded 4 .| CL.consume
show x `shouldBe` show (Left $ CT.LengthExceeded 4 :: Either CT.TextException ())
it "works with infinite input" $ do
let x :: Either SomeException [T.Text]
x = runConduit $ CL.sourceList (cycle ["hello"]) .| CT.linesBounded 256 .| CL.consume
show x `shouldBe` show (Left $ CT.LengthExceeded 256 :: Either CT.TextException ())
describe "text decode" $ do
it' "doesn't throw runtime exceptions" $ do
let x = runConduit $ C.yield "\x89\x243" .| CT.decode CT.utf8 .| CL.consume
case x of
Left _ -> return ()
Right t -> error $ "This should have failed: " ++ show t
it "is not too eager" $ do
x <- runConduit $ CL.sourceList ["foobarbaz", error "ignore me"] .| CT.decode CT.utf8 .| CL.head
x `shouldBe` Just "foobarbaz"
it' :: String -> IO () -> Spec
it' = it
| null | https://raw.githubusercontent.com/snoyberg/conduit/1771780ff4b606296924a28bf5d4433ae6a916f3/conduit-extra/test/Data/Conduit/TextSpec.hs | haskell | Check whether raw bytes are decoded correctly, in
particular that Text decoding produces an error if | # LANGUAGE FlexibleContexts , OverloadedStrings #
module Data.Conduit.TextSpec (spec) where
import Data.Conduit ((.|), runConduit, runConduitPure)
import Control.Exception (SomeException)
import qualified Data.Conduit.Text as CT
import qualified Data.Conduit as C
import Data.Conduit.Lift (runCatchC, catchCatchC)
import Data.Functor.Identity (runIdentity)
import qualified Data.Conduit.List as CL
import Test.Hspec
import Test.Hspec.QuickCheck
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TEE
import qualified Data.Text.Lazy.Encoding as TLE
import Control.Arrow
import qualified Data.ByteString as S
import qualified Data.Text.Lazy as TL
import qualified Data.ByteString.Lazy as L
import Control.Monad.Catch.Pure (runCatchT)
spec :: Spec
spec = describe "Data.Conduit.Text" $ do
describe "text" $ do
let go enc tenc tdec cenc = describe enc $ do
prop "single chunk" $ \chars -> do
let tl = TL.pack chars
lbs = tenc tl
src = CL.sourceList $ L.toChunks lbs
ts <- runConduit $ src .| CT.decode cenc .| CL.consume
TL.fromChunks ts `shouldBe` tl
prop "many chunks" $ \chars -> do
let tl = TL.pack chars
lbs = tenc tl
src = mconcat $ map (CL.sourceList . return . S.singleton) $ L.unpack lbs
ts <- runConduit $ src .| CT.decode cenc .| CL.consume
TL.fromChunks ts `shouldBe` tl
and only if Conduit does .
prop "raw bytes" $ \bytes -> do
let lbs = L.pack bytes
src = CL.sourceList $ L.toChunks lbs
tl' = tdec lbs
etl = runConduit $ src .| CT.decode cenc .| CL.consume
case etl of
(Left _) -> (return $! TL.toStrict tl') `shouldThrow` anyException
(Right tl) -> TL.fromChunks tl `shouldBe` tl'
prop "encoding" $ \chars -> do
let tss = map T.pack chars
lbs = tenc $ TL.fromChunks tss
src = mconcat $ map (CL.sourceList . return) tss
bss <- runConduit $ src .| CT.encode cenc .| CL.consume
L.fromChunks bss `shouldBe` lbs
prop "valid then invalid" $ \x y chars -> do
let tss = map T.pack ([x, y]:chars)
ts = T.concat tss
lbs = tenc (TL.fromChunks tss) `L.append` "\0\0\0\0\0\0\0"
src = mapM_ C.yield $ L.toChunks lbs
Just x' <- runConduit $ src .| CT.decode cenc .| C.await
(x' `T.isPrefixOf` ts) `shouldBe` True
go "utf8" TLE.encodeUtf8 TLE.decodeUtf8 CT.utf8
go "utf16_le" TLE.encodeUtf16LE TLE.decodeUtf16LE CT.utf16_le
go "utf16_be" TLE.encodeUtf16BE TLE.decodeUtf16BE CT.utf16_be
go "utf32_le" TLE.encodeUtf32LE TLE.decodeUtf32LE CT.utf32_le
go "utf32_be" TLE.encodeUtf32BE TLE.decodeUtf32BE CT.utf32_be
it "mixed utf16 and utf8" $ do
let bs = "8\NUL:\NULu\NUL\215\216\217\218"
src = C.yield bs .| CT.decode CT.utf16_le
text <- runConduit $ src .| C.await
text `shouldBe` Just "8:u"
(runConduit $ src .| CL.sinkNull) `shouldThrow` anyException
it "invalid utf8" $ do
let bs = S.pack [0..255]
src = C.yield bs .| CT.decode CT.utf8
text <- runConduit $ src .| C.await
text `shouldBe` Just (T.pack $ map toEnum [0..127])
(runConduit $ src .| CL.sinkNull) `shouldThrow` anyException
it "catch UTF8 exceptions" $ do
let badBS = "this is good\128\128\0that was bad"
grabExceptions inner = C.catchC
(inner .| CL.map Right)
(\e -> C.yield (Left (e :: CT.TextException)))
res <- runConduit $ C.yield badBS .| (,)
<$> (grabExceptions (CT.decode CT.utf8) .| CL.consume)
<*> CL.consume
first (map (either (Left . show) Right)) res `shouldBe`
( [ Right "this is good"
, Left $ show $ CT.NewDecodeException "UTF-8" 12 "\128\128\0t"
]
, ["\128\128\0that was bad"]
)
it "catch UTF8 exceptions, pure" $ do
let badBS = "this is good\128\128\0that was bad"
grabExceptions inner = do
res <- runCatchC $ inner .| CL.map Right
case res of
Left e -> C.yield $ Left e
Right () -> return ()
let res = runConduitPure $ C.yield badBS .| (,)
<$> (grabExceptions (CT.decode CT.utf8) .| CL.consume)
<*> CL.consume
first (map (either (Left . show) Right)) res `shouldBe`
( [ Right "this is good"
, Left $ show $ CT.NewDecodeException "UTF-8" 12 "\128\128\0t"
]
, ["\128\128\0that was bad"]
)
it "catch UTF8 exceptions, catchExceptionC" $ do
let badBS = "this is good\128\128\0that was bad"
grabExceptions inner = catchCatchC
(inner .| CL.map Right)
(\e -> C.yield $ Left e)
let Right res = runIdentity $ runCatchT $ runConduit $ C.yield badBS .| (,)
<$> (grabExceptions (CT.decode CT.utf8) .| CL.consume)
<*> CL.consume
first (map (either (Left . show) Right)) res `shouldBe`
( [ Right "this is good"
, Left $ show $ CT.NewDecodeException "UTF-8" 12 "\128\128\0t"
]
, ["\128\128\0that was bad"]
)
it "catch UTF8 exceptions, catchExceptionC, decodeUtf8" $ do
let badBS = ["this is good", "\128\128\0that was bad"]
grabExceptions inner = catchCatchC
(inner .| CL.map Right)
(\e -> C.yield $ Left e)
let Right res = runIdentity $ runCatchT $ runConduit $
mapM_ C.yield badBS .| (,)
<$> (grabExceptions CT.decodeUtf8 .| CL.consume)
<*> CL.consume
first (map (either (Left . const ()) Right)) res `shouldBe`
( [ Right "this is good"
, Left ()
]
, ["\128\128\0that was bad"]
)
prop "lenient UTF8 decoding" $ \good1 good2 -> do
let bss = [TE.encodeUtf8 $ T.pack good1, "\128\129\130", TE.encodeUtf8 $ T.pack good2]
bs = S.concat bss
expected = TE.decodeUtf8With TEE.lenientDecode bs
actual = runConduitPure $ mapM_ C.yield bss .| CT.decodeUtf8Lenient .| CL.consume
T.concat actual `shouldBe` expected
describe "text lines" $ do
it "yields nothing given nothing" $
(runConduit $ CL.sourceList [] .| CT.lines .| CL.consume) ==
[[]]
it "yields nothing given only empty text" $
(runConduit $ CL.sourceList [""] .| CT.lines .| CL.consume) ==
[[]]
it "works across split lines" $
(runConduit $ CL.sourceList ["abc", "d\nef"] .| CT.lines .| CL.consume) ==
[["abcd", "ef"]]
it "works with multiple lines in an item" $
(runConduit $ CL.sourceList ["ab\ncd\ne"] .| CT.lines .| CL.consume) ==
[["ab", "cd", "e"]]
it "works with ending on a newline" $
(runConduit $ CL.sourceList ["ab\n"] .| CT.lines .| CL.consume) ==
[["ab"]]
it "works with ending a middle item on a newline" $
(runConduit $ CL.sourceList ["ab\n", "cd\ne"] .| CT.lines .| CL.consume) ==
[["ab", "cd", "e"]]
it "works with empty text" $
(runConduit $ CL.sourceList ["ab", "", "cd"] .| CT.lines .| CL.consume) ==
[["abcd"]]
it "works with empty lines" $
(runConduit $ CL.sourceList ["\n\n"] .| CT.lines .| CL.consume) ==
[["", ""]]
describe "text lines bounded" $ do
it "yields nothing given nothing" $
(runConduit $ CL.sourceList [] .| CT.linesBounded 80 .| CL.consume) ==
[[]]
it "yields nothing given only empty text" $
(runConduit $ CL.sourceList [""] .| CT.linesBounded 80 .| CL.consume) ==
[[]]
it "works across split lines" $
(runConduit $ CL.sourceList ["abc", "d\nef"] .| CT.linesBounded 80 .| CL.consume) ==
[["abcd", "ef"]]
it "works with multiple lines in an item" $
(runConduit $ CL.sourceList ["ab\ncd\ne"] .| CT.linesBounded 80 .| CL.consume) ==
[["ab", "cd", "e"]]
it "works with ending on a newline" $
(runConduit $ CL.sourceList ["ab\n"] .| CT.linesBounded 80 .| CL.consume) `shouldBe`
[["ab"]]
it "works with ending a middle item on a newline" $
(runConduit $ CL.sourceList ["ab\n", "cd\ne"] .| CT.linesBounded 80 .| CL.consume) `shouldBe`
[["ab", "cd", "e"]]
it "works with empty text" $
(runConduit $ CL.sourceList ["ab", "", "cd"] .| CT.linesBounded 80 .| CL.consume) `shouldBe`
[["abcd"]]
it "works with empty lines" $
(runConduit (CL.sourceList ["\n\n"] .| CT.linesBounded 80 .| CL.consume)) `shouldBe`
[["", ""]]
it "throws an exception when lines are too long" $ do
let x :: Either SomeException [T.Text]
x = runConduit $ CL.sourceList ["hello\nworld"] .| CT.linesBounded 4 .| CL.consume
show x `shouldBe` show (Left $ CT.LengthExceeded 4 :: Either CT.TextException ())
it "works with infinite input" $ do
let x :: Either SomeException [T.Text]
x = runConduit $ CL.sourceList (cycle ["hello"]) .| CT.linesBounded 256 .| CL.consume
show x `shouldBe` show (Left $ CT.LengthExceeded 256 :: Either CT.TextException ())
describe "text decode" $ do
it' "doesn't throw runtime exceptions" $ do
let x = runConduit $ C.yield "\x89\x243" .| CT.decode CT.utf8 .| CL.consume
case x of
Left _ -> return ()
Right t -> error $ "This should have failed: " ++ show t
it "is not too eager" $ do
x <- runConduit $ CL.sourceList ["foobarbaz", error "ignore me"] .| CT.decode CT.utf8 .| CL.head
x `shouldBe` Just "foobarbaz"
it' :: String -> IO () -> Spec
it' = it
|
80a29b1306a438f3e3dd9ca64366fc0ce01174e1818b8d1f742d8bb62bd8e903 | marick/Midje | t_zip.clj | (ns midje.parsing.util.t-zip
(:require [midje.parsing.util.zip :refer :all]
[midje.sweet :refer :all]
[midje.test-util :refer :all]
[clojure.zip :as zip]))
(defn node [expected] (fn [actual] (= expected (zip/node actual))))
(fact "can position loc at rightmost leaf"
(let [z (zip/seq-zip '(a b "leaf"))]
(skip-to-rightmost-leaf (zip/down z)) => (node "leaf"))
(let [z (zip/seq-zip '(111 (a => [1 2 '(3)]) (("leaf"))))]
(skip-to-rightmost-leaf (zip/down z)) => (node "leaf")))
(fact "it's useful to delete a node and move right"
(let [z (zip/seq-zip '( (f n) => (+ 3 4)))
loc (-> z zip/down zip/right)]
(remove-moving-right loc) => (node '(+ 3 4))
(zip/root (remove-moving-right loc)) => '( (f n) (+ 3 4))))
| null | https://raw.githubusercontent.com/marick/Midje/2b9bcb117442d3bd2d16446b47540888d683c717/test/midje/parsing/util/t_zip.clj | clojure | (ns midje.parsing.util.t-zip
(:require [midje.parsing.util.zip :refer :all]
[midje.sweet :refer :all]
[midje.test-util :refer :all]
[clojure.zip :as zip]))
(defn node [expected] (fn [actual] (= expected (zip/node actual))))
(fact "can position loc at rightmost leaf"
(let [z (zip/seq-zip '(a b "leaf"))]
(skip-to-rightmost-leaf (zip/down z)) => (node "leaf"))
(let [z (zip/seq-zip '(111 (a => [1 2 '(3)]) (("leaf"))))]
(skip-to-rightmost-leaf (zip/down z)) => (node "leaf")))
(fact "it's useful to delete a node and move right"
(let [z (zip/seq-zip '( (f n) => (+ 3 4)))
loc (-> z zip/down zip/right)]
(remove-moving-right loc) => (node '(+ 3 4))
(zip/root (remove-moving-right loc)) => '( (f n) (+ 3 4))))
| |
a84cc3f8ec91ebb1e170a6c68815e401006d99fe1d23833c8f9791768e1a4076 | jwiegley/notes | MonoTraversable.hs | class (MonoFunctor c, MonoFunctor d) => MonoTransform c d where
mtransform :: c -> d
instance (MonoFunctor c, MonoFoldable c,
MonoFunctor d, FromList d,
Convertible (Element c) (Element d))
=> MonoTransform c d where
mtransform xs = fromList (foldr (\x -> (convert x :)) [] xs)
instance MonoTransform [Char] S.ByteString where
mtransform = T.encodeUtf8 . T.pack
instance MonoTransform [Char] T.Text where
mtransform = T.pack
instance MonoTransform [Char] L.ByteString where
mtransform = TL.encodeUtf8 . TL.pack
instance MonoTransform [Char] TL.Text where
mtransform = TL.pack
instance MonoTransform T.Text S.ByteString where
mtransform = T.encodeUtf8
instance MonoTransform T.Text L.ByteString where
mtransform = TL.encodeUtf8 . TL.fromStrict
instance MonoTransform TL.Text S.ByteString where
mtransform = T.encodeUtf8 . TL.toStrict
instance MonoTransform TL.Text L.ByteString where
mtransform = TL.encodeUtf8
instance MonoTransform S.ByteString [Char] where
mtransform = T.unpack . T.decodeUtf8
instance MonoTransform T.Text [Char] where
mtransform = T.unpack
instance MonoTransform L.ByteString [Char] where
mtransform = TL.unpack . TL.decodeUtf8
instance MonoTransform TL.Text [Char] where
mtransform = TL.unpack
instance MonoTransform S.ByteString T.Text where
mtransform = T.decodeUtf8
instance MonoTransform L.ByteString T.Text where
mtransform = TL.toStrict . TL.decodeUtf8
instance MonoTransform S.ByteString TL.Text where
mtransform = TL.fromStrict . T.decodeUtf8
instance MonoTransform L.ByteString TL.Text where
mtransform = TL.decodeUtf8
| null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/gists/6493463/MonoTraversable.hs | haskell | class (MonoFunctor c, MonoFunctor d) => MonoTransform c d where
mtransform :: c -> d
instance (MonoFunctor c, MonoFoldable c,
MonoFunctor d, FromList d,
Convertible (Element c) (Element d))
=> MonoTransform c d where
mtransform xs = fromList (foldr (\x -> (convert x :)) [] xs)
instance MonoTransform [Char] S.ByteString where
mtransform = T.encodeUtf8 . T.pack
instance MonoTransform [Char] T.Text where
mtransform = T.pack
instance MonoTransform [Char] L.ByteString where
mtransform = TL.encodeUtf8 . TL.pack
instance MonoTransform [Char] TL.Text where
mtransform = TL.pack
instance MonoTransform T.Text S.ByteString where
mtransform = T.encodeUtf8
instance MonoTransform T.Text L.ByteString where
mtransform = TL.encodeUtf8 . TL.fromStrict
instance MonoTransform TL.Text S.ByteString where
mtransform = T.encodeUtf8 . TL.toStrict
instance MonoTransform TL.Text L.ByteString where
mtransform = TL.encodeUtf8
instance MonoTransform S.ByteString [Char] where
mtransform = T.unpack . T.decodeUtf8
instance MonoTransform T.Text [Char] where
mtransform = T.unpack
instance MonoTransform L.ByteString [Char] where
mtransform = TL.unpack . TL.decodeUtf8
instance MonoTransform TL.Text [Char] where
mtransform = TL.unpack
instance MonoTransform S.ByteString T.Text where
mtransform = T.decodeUtf8
instance MonoTransform L.ByteString T.Text where
mtransform = TL.toStrict . TL.decodeUtf8
instance MonoTransform S.ByteString TL.Text where
mtransform = TL.fromStrict . T.decodeUtf8
instance MonoTransform L.ByteString TL.Text where
mtransform = TL.decodeUtf8
| |
706aea4da83e8e74288d28f980b117de15925c9de656f2d53495adbc5a275aad | bmeurer/ocaml-arm | typemod.mli | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
(* Type-checking of the module language *)
open Types
open Format
val type_module:
Env.t -> Parsetree.module_expr -> Typedtree.module_expr
val type_structure:
Env.t -> Parsetree.structure -> Location.t ->
Typedtree.structure * Types.signature * Env.t
val type_toplevel_phrase:
Env.t -> Parsetree.structure ->
Typedtree.structure * Types.signature * Env.t
val type_implementation:
string -> string -> string -> Env.t -> Parsetree.structure ->
Typedtree.structure * Typedtree.module_coercion
val transl_signature:
Env.t -> Parsetree.signature -> Typedtree.signature
val check_nongen_schemes:
Env.t -> Typedtree.structure_item list -> unit
val simplify_signature: signature -> signature
val save_signature : string -> Typedtree.signature -> string -> string ->
Env.t -> Types.signature_item list -> unit
val package_units:
string list -> string -> string -> Typedtree.module_coercion
val bound_value_identifiers : Types.signature_item list -> Ident.t list
type error =
Cannot_apply of module_type
| Not_included of Includemod.error list
| Cannot_eliminate_dependency of module_type
| Signature_expected
| Structure_expected of module_type
| With_no_component of Longident.t
| With_mismatch of Longident.t * Includemod.error list
| Repeated_name of string * string
| Non_generalizable of type_expr
| Non_generalizable_class of Ident.t * class_declaration
| Non_generalizable_module of module_type
| Implementation_is_required of string
| Interface_not_compiled of string
| Not_allowed_in_functor_body
| With_need_typeconstr
| Not_a_packed_module of type_expr
| Incomplete_packed_module of type_expr
| Scoping_pack of Longident.t * type_expr
exception Error of Location.t * error
val report_error: formatter -> error -> unit
| null | https://raw.githubusercontent.com/bmeurer/ocaml-arm/43f7689c76a349febe3d06ae7a4fc1d52984fd8b/typing/typemod.mli | ocaml | *********************************************************************
OCaml
*********************************************************************
Type-checking of the module language | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
open Types
open Format
val type_module:
Env.t -> Parsetree.module_expr -> Typedtree.module_expr
val type_structure:
Env.t -> Parsetree.structure -> Location.t ->
Typedtree.structure * Types.signature * Env.t
val type_toplevel_phrase:
Env.t -> Parsetree.structure ->
Typedtree.structure * Types.signature * Env.t
val type_implementation:
string -> string -> string -> Env.t -> Parsetree.structure ->
Typedtree.structure * Typedtree.module_coercion
val transl_signature:
Env.t -> Parsetree.signature -> Typedtree.signature
val check_nongen_schemes:
Env.t -> Typedtree.structure_item list -> unit
val simplify_signature: signature -> signature
val save_signature : string -> Typedtree.signature -> string -> string ->
Env.t -> Types.signature_item list -> unit
val package_units:
string list -> string -> string -> Typedtree.module_coercion
val bound_value_identifiers : Types.signature_item list -> Ident.t list
type error =
Cannot_apply of module_type
| Not_included of Includemod.error list
| Cannot_eliminate_dependency of module_type
| Signature_expected
| Structure_expected of module_type
| With_no_component of Longident.t
| With_mismatch of Longident.t * Includemod.error list
| Repeated_name of string * string
| Non_generalizable of type_expr
| Non_generalizable_class of Ident.t * class_declaration
| Non_generalizable_module of module_type
| Implementation_is_required of string
| Interface_not_compiled of string
| Not_allowed_in_functor_body
| With_need_typeconstr
| Not_a_packed_module of type_expr
| Incomplete_packed_module of type_expr
| Scoping_pack of Longident.t * type_expr
exception Error of Location.t * error
val report_error: formatter -> error -> unit
|
dce8779e5d25208f8fa420dbbfa75f77879f54f943833f0b4a4ce069363ac966 | juxt/edge | main.cljs | (ns ^:figwheel-hooks {{root-ns}}.frontend.main)
(js/console.log "Hello, world")
;; This is called once
(defonce init
(do (set! (.-innerHTML (js/document.getElementById "app"))
"<p>Loaded {{name}}!</p>
<p>Edit <strong><code>src/{{sanitized}}/frontend/main.cljs</code></strong> to change this message.</p>")
true))
;; This is called every time you make a code change
(defn ^:after-load reload []
(set! (.-innerText (js/document.getElementById "app")) "Hot Reloaded {{name}}!"))
| null | https://raw.githubusercontent.com/juxt/edge/dfda42e035714279642b359a8610f57154efd002/lib/edge-app-template/resources/clj/new/app.template/main.cljs | clojure | This is called once
This is called every time you make a code change | (ns ^:figwheel-hooks {{root-ns}}.frontend.main)
(js/console.log "Hello, world")
(defonce init
(do (set! (.-innerHTML (js/document.getElementById "app"))
"<p>Loaded {{name}}!</p>
<p>Edit <strong><code>src/{{sanitized}}/frontend/main.cljs</code></strong> to change this message.</p>")
true))
(defn ^:after-load reload []
(set! (.-innerText (js/document.getElementById "app")) "Hot Reloaded {{name}}!"))
|
943c8cf4aa21beb2e97b94d5bec89407c90b910a8f8b3f47dcaeee72972ba3f0 | ivan-m/Graphalyze | Directed.hs | |
Module : Data . Graph . Analysis . Algorithms . Directed
Description : Algorithms for directed graphs .
Copyright : ( c ) 2009
License : 2 - Clause BSD
Maintainer :
Defines algorithms that work on directed graphs .
Module : Data.Graph.Analysis.Algorithms.Directed
Description : Algorithms for directed graphs.
Copyright : (c) Ivan Lazar Miljenovic 2009
License : 2-Clause BSD
Maintainer :
Defines algorithms that work on directed graphs.
-}
module Data.Graph.Analysis.Algorithms.Directed
( -- * Ending nodes
-- $ends
endNode, endNode',
endBy, endBy',
-- ** Root nodes
rootsOf, rootsOf',
isRoot, isRoot',
-- ** Leaf nodes
leavesOf, leavesOf',
isLeaf, isLeaf',
* * nodes
singletonsOf, singletonsOf',
isSingleton, isSingleton',
-- * Subgraphs
coreOf,
-- * Clustering
levelGraph,
levelGraphFrom,
minLevel,
-- * Node accessibility
accessibleFrom,
accessibleFrom',
accessibleOnlyFrom,
accessibleOnlyFrom',
-- * Other
leafMinPaths,
leafMinPaths'
) where
import Data.Graph.Analysis.Types
import Data.Graph.Analysis.Utils
import Data.Graph.Inductive.Graph
import Data.Graph.Inductive.Query.BFS(esp)
import Data.List(minimumBy, unfoldr)
import Data.Maybe(fromMaybe)
import Data.Function(on)
import qualified Data.Map as M
import Data.Map(Map)
import qualified Data.Set as S
import Data.Set(Set)
import Control.Monad(ap)
-- -----------------------------------------------------------------------------
{- $ends
Find starting/ending nodes.
We define an ending node as one where, given a function:
@
f :: (Graph g) => g a b -> Node -> [Node]
@
the only allowed result is that node itself (to allow for loops).
-}
-- | Determine if this 'LNode' is an ending node.
endNode :: (Graph g) => (g a b -> Node -> NGroup)
-> g a b -> LNode a -> Bool
endNode f g = endNode' f g . node
-- | Determine if this 'Node' is an ending node.
endNode' :: (Graph g) => (g a b -> Node -> NGroup) -> g a b -> Node
-> Bool
endNode' f g n = case (f g n) of
[] -> True
-- Allow loops
[n'] -> n' == n
_ -> False
-- | Find all 'LNode's that meet the ending criteria.
endBy :: (Graph g) => (g a b -> Node -> NGroup) -> g a b -> LNGroup a
endBy = filterNodes . endNode
| Find all ' 's that match the ending criteria .
endBy' :: (Graph g) => (g a b -> Node -> NGroup) -> g a b -> NGroup
endBy' = filterNodes' . endNode'
-- -----------------------------------------------------------------------------
{-
Root detection.
-}
-- | Find all roots of the graph.
rootsOf :: (Graph g) => g a b -> LNGroup a
rootsOf = endBy pre
-- | Find all roots of the graph.
rootsOf' :: (Graph g) => g a b -> NGroup
rootsOf' = endBy' pre
-- | Returns @True@ if this 'LNode' is a root.
isRoot :: (Graph g) => g a b -> LNode a -> Bool
isRoot = endNode pre
| Returns if this ' Node ' is a root .
isRoot' :: (Graph g) => g a b -> Node -> Bool
isRoot' = endNode' pre
-- -----------------------------------------------------------------------------
{-
Leaf detection.
-}
-- | Find all leaves of the graph.
leavesOf :: (Graph g) => g a b -> LNGroup a
leavesOf = endBy suc
-- | Find all leaves of the graph.
leavesOf' :: (Graph g) => g a b -> NGroup
leavesOf' = endBy' suc
-- | Returns @True@ if this 'LNode' is a leaf.
isLeaf :: (Graph g) => g a b -> LNode a -> Bool
isLeaf = endNode suc
| Returns if this ' Node ' is a leaf .
isLeaf' :: (Graph g) => g a b -> Node -> Bool
isLeaf' = endNode' suc
-- -----------------------------------------------------------------------------
detection .
Singleton detection.
-}
-- | Find all singletons of the graph.
singletonsOf :: (Graph g) => g a b -> LNGroup a
singletonsOf = endBy neighbors
-- | Find all singletons of the graph.
singletonsOf' :: (Graph g) => g a b -> NGroup
singletonsOf' = endBy' neighbors
-- | Returns @True@ if this 'LNode' is a singleton.
isSingleton :: (Graph g) => g a b -> LNode a -> Bool
isSingleton = endNode neighbors
| Returns if this ' Node ' is a singleton .
isSingleton' :: (Graph g) => g a b -> Node -> Bool
isSingleton' = endNode' neighbors
-- -----------------------------------------------------------------------------
{- |
The /core/ of the graph is the part of the graph containing all the
cycles, etc. Depending on the context, it could be interpreted as
the part of the graph where all the "work" is done.
-}
coreOf :: (DynGraph g, Eq a, Eq b) => g a b -> g a b
coreOf = fixPointGraphs stripEnds
where
stripEnds gr' = delNodes roots . delNodes leaves $ gr'
where
roots = rootsOf' gr'
leaves = leavesOf' gr'
-- -----------------------------------------------------------------------------
{- |
Cluster the nodes in the graph based upon how far away they are
from a root node. Root nodes are in the cluster labelled 'minLevel',
nodes in level \"n\" (with @n > minLevel@) are at least /n/ edges away
from a root node.
-}
levelGraph :: (Ord a, DynGraph g) => g a b -> g (GenCluster a) b
levelGraph g = levelGraphFrom (rootsOf' g) g
| As with ' ' but provide a custom grouping of ' 's to
consider as the \"roots\ " .
levelGraphFrom :: (Ord a, DynGraph g) => NGroup -> g a b
-> g (GenCluster a) b
levelGraphFrom rs g = gmap addLbl g
where
lvls = zip [minLevel..] . map S.toList $ graphLevels rs g
lvMap = M.fromList
$ concatMap (\(l,ns) -> map (flip (,) l) ns) lvls
mkLbl n l = GC { clust = getLevel n
, nLbl = l
}
addLbl (p,n,l,s) = (p, n, mkLbl n l, s)
-- Have to consider unaccessible nodes.
getLevel n = fromMaybe (pred minLevel) $ n `M.lookup` lvMap
-- | The level of the nodes in the 'NGroup' provided to
' levelGraphFrom ' ( or the root nodes for ' ' ) . A level
-- less than this indicates that the node is not accessible.
minLevel :: Int
minLevel = 0
type NSet = Set Node
-- | Obtain the levels in the graph.
graphLevels :: (Graph g) => NGroup -> g a b -> [NSet]
graphLevels = flip graphLevels' . S.fromList
graphLevels' :: (Graph g) => g a b -> NSet -> [NSet]
graphLevels' g = unfoldr getNextLevel . flip (,) g
-- | The @(NSet, g a b)@ parameters are the current nodes to be
-- starting with in the current graph.
getNextLevel :: (Graph g) => (NSet, g a b)
-> Maybe (NSet, (NSet, g a b))
getNextLevel (ns,g)
| S.null ns = Nothing
| otherwise = Just (ns, (ns', g'))
where
g' = delNodes (S.toList ns) g
ns' = flip S.difference ns
. S.unions . map getSuc
$ S.toList ns
getSuc = S.fromList . suc g
-- -----------------------------------------------------------------------------
{- |
The shortest paths to each of the leaves in the graph (excluding
singletons). This can be used to obtain an indication of the
overall height/depth of the graph.
-}
leafMinPaths :: (Graph g) => g a b -> [LNGroup a]
leafMinPaths g = map (lfMinPth g rs) ls
where
rs = rootsOf' g
ls = leavesOf' g
{- |
The shortest paths to each of the leaves in the graph (excluding
singletons). This can be used to obtain an indication of the
overall height/depth of the graph.
-}
leafMinPaths' :: (Graph g) => g a b -> [NGroup]
leafMinPaths' = map (map node) . leafMinPaths
-- | Given the list of roots in this graph, find the shortest path to
-- this leaf node.
lfMinPth :: (Graph g) => g a b -> [Node] -> Node -> LNGroup a
lfMinPth g rs l = addLabels g
. snd
. minimumBy (compare `on` fst)
. addLengths
$ map (\ r -> esp r l g) rs
-- -----------------------------------------------------------------------------
| Find all ' 's that can be reached from the provided ' 's .
accessibleFrom :: (Graph g) => g a b -> [Node] -> [Node]
accessibleFrom g = S.toList . accessibleFrom' g . S.fromList
| Find all ' 's that can be reached from the provided nodes
-- using 'Set's rather than lists.
accessibleFrom' :: (Graph g) => g a b -> Set Node -> Set Node
accessibleFrom' g = S.unions . graphLevels' g
| Find those ' 's that are reachable only from the provided
' 's .
accessibleOnlyFrom :: (Graph g) => g a b -> [Node] -> [Node]
accessibleOnlyFrom g = S.toList . accessibleOnlyFrom' g . S.fromList
| Find those ' 's that are reachable only from the provided
' 's , using ' Set 's rather than lists .
accessibleOnlyFrom' :: (Graph g) => g a b -> Set Node -> Set Node
accessibleOnlyFrom' g = M.keysSet
. fixPoint keepOnlyInternal
. setKeys (pre g)
. accessibleFrom' g
-- | Pseudo-inverse of 'M.keysSet'.
setKeys :: (Ord a) => (a -> b) -> Set a -> Map a b
setKeys f = M.fromDistinctAscList . map (ap (,) f) . S.toAscList
-- | Removing nodes which have predecessors outside of this Map.
keepOnlyInternal :: Map Node NGroup -> Map Node NGroup
keepOnlyInternal = M.filter =<< onlyInternalPred
-- | Are these predecessor nodes all found within this Map?
onlyInternalPred :: Map Node NGroup -> NGroup -> Bool
onlyInternalPred = all . flip M.member
| null | https://raw.githubusercontent.com/ivan-m/Graphalyze/812f76bb45a77252ed74a8d028bffbae38b240f6/Data/Graph/Analysis/Algorithms/Directed.hs | haskell | * Ending nodes
$ends
** Root nodes
** Leaf nodes
* Subgraphs
* Clustering
* Node accessibility
* Other
-----------------------------------------------------------------------------
$ends
Find starting/ending nodes.
We define an ending node as one where, given a function:
@
f :: (Graph g) => g a b -> Node -> [Node]
@
the only allowed result is that node itself (to allow for loops).
| Determine if this 'LNode' is an ending node.
| Determine if this 'Node' is an ending node.
Allow loops
| Find all 'LNode's that meet the ending criteria.
-----------------------------------------------------------------------------
Root detection.
| Find all roots of the graph.
| Find all roots of the graph.
| Returns @True@ if this 'LNode' is a root.
-----------------------------------------------------------------------------
Leaf detection.
| Find all leaves of the graph.
| Find all leaves of the graph.
| Returns @True@ if this 'LNode' is a leaf.
-----------------------------------------------------------------------------
| Find all singletons of the graph.
| Find all singletons of the graph.
| Returns @True@ if this 'LNode' is a singleton.
-----------------------------------------------------------------------------
|
The /core/ of the graph is the part of the graph containing all the
cycles, etc. Depending on the context, it could be interpreted as
the part of the graph where all the "work" is done.
-----------------------------------------------------------------------------
|
Cluster the nodes in the graph based upon how far away they are
from a root node. Root nodes are in the cluster labelled 'minLevel',
nodes in level \"n\" (with @n > minLevel@) are at least /n/ edges away
from a root node.
Have to consider unaccessible nodes.
| The level of the nodes in the 'NGroup' provided to
less than this indicates that the node is not accessible.
| Obtain the levels in the graph.
| The @(NSet, g a b)@ parameters are the current nodes to be
starting with in the current graph.
-----------------------------------------------------------------------------
|
The shortest paths to each of the leaves in the graph (excluding
singletons). This can be used to obtain an indication of the
overall height/depth of the graph.
|
The shortest paths to each of the leaves in the graph (excluding
singletons). This can be used to obtain an indication of the
overall height/depth of the graph.
| Given the list of roots in this graph, find the shortest path to
this leaf node.
-----------------------------------------------------------------------------
using 'Set's rather than lists.
| Pseudo-inverse of 'M.keysSet'.
| Removing nodes which have predecessors outside of this Map.
| Are these predecessor nodes all found within this Map? | |
Module : Data . Graph . Analysis . Algorithms . Directed
Description : Algorithms for directed graphs .
Copyright : ( c ) 2009
License : 2 - Clause BSD
Maintainer :
Defines algorithms that work on directed graphs .
Module : Data.Graph.Analysis.Algorithms.Directed
Description : Algorithms for directed graphs.
Copyright : (c) Ivan Lazar Miljenovic 2009
License : 2-Clause BSD
Maintainer :
Defines algorithms that work on directed graphs.
-}
module Data.Graph.Analysis.Algorithms.Directed
endNode, endNode',
endBy, endBy',
rootsOf, rootsOf',
isRoot, isRoot',
leavesOf, leavesOf',
isLeaf, isLeaf',
* * nodes
singletonsOf, singletonsOf',
isSingleton, isSingleton',
coreOf,
levelGraph,
levelGraphFrom,
minLevel,
accessibleFrom,
accessibleFrom',
accessibleOnlyFrom,
accessibleOnlyFrom',
leafMinPaths,
leafMinPaths'
) where
import Data.Graph.Analysis.Types
import Data.Graph.Analysis.Utils
import Data.Graph.Inductive.Graph
import Data.Graph.Inductive.Query.BFS(esp)
import Data.List(minimumBy, unfoldr)
import Data.Maybe(fromMaybe)
import Data.Function(on)
import qualified Data.Map as M
import Data.Map(Map)
import qualified Data.Set as S
import Data.Set(Set)
import Control.Monad(ap)
endNode :: (Graph g) => (g a b -> Node -> NGroup)
-> g a b -> LNode a -> Bool
endNode f g = endNode' f g . node
endNode' :: (Graph g) => (g a b -> Node -> NGroup) -> g a b -> Node
-> Bool
endNode' f g n = case (f g n) of
[] -> True
[n'] -> n' == n
_ -> False
endBy :: (Graph g) => (g a b -> Node -> NGroup) -> g a b -> LNGroup a
endBy = filterNodes . endNode
| Find all ' 's that match the ending criteria .
endBy' :: (Graph g) => (g a b -> Node -> NGroup) -> g a b -> NGroup
endBy' = filterNodes' . endNode'
rootsOf :: (Graph g) => g a b -> LNGroup a
rootsOf = endBy pre
rootsOf' :: (Graph g) => g a b -> NGroup
rootsOf' = endBy' pre
isRoot :: (Graph g) => g a b -> LNode a -> Bool
isRoot = endNode pre
| Returns if this ' Node ' is a root .
isRoot' :: (Graph g) => g a b -> Node -> Bool
isRoot' = endNode' pre
leavesOf :: (Graph g) => g a b -> LNGroup a
leavesOf = endBy suc
leavesOf' :: (Graph g) => g a b -> NGroup
leavesOf' = endBy' suc
isLeaf :: (Graph g) => g a b -> LNode a -> Bool
isLeaf = endNode suc
| Returns if this ' Node ' is a leaf .
isLeaf' :: (Graph g) => g a b -> Node -> Bool
isLeaf' = endNode' suc
detection .
Singleton detection.
-}
singletonsOf :: (Graph g) => g a b -> LNGroup a
singletonsOf = endBy neighbors
singletonsOf' :: (Graph g) => g a b -> NGroup
singletonsOf' = endBy' neighbors
isSingleton :: (Graph g) => g a b -> LNode a -> Bool
isSingleton = endNode neighbors
| Returns if this ' Node ' is a singleton .
isSingleton' :: (Graph g) => g a b -> Node -> Bool
isSingleton' = endNode' neighbors
coreOf :: (DynGraph g, Eq a, Eq b) => g a b -> g a b
coreOf = fixPointGraphs stripEnds
where
stripEnds gr' = delNodes roots . delNodes leaves $ gr'
where
roots = rootsOf' gr'
leaves = leavesOf' gr'
levelGraph :: (Ord a, DynGraph g) => g a b -> g (GenCluster a) b
levelGraph g = levelGraphFrom (rootsOf' g) g
| As with ' ' but provide a custom grouping of ' 's to
consider as the \"roots\ " .
levelGraphFrom :: (Ord a, DynGraph g) => NGroup -> g a b
-> g (GenCluster a) b
levelGraphFrom rs g = gmap addLbl g
where
lvls = zip [minLevel..] . map S.toList $ graphLevels rs g
lvMap = M.fromList
$ concatMap (\(l,ns) -> map (flip (,) l) ns) lvls
mkLbl n l = GC { clust = getLevel n
, nLbl = l
}
addLbl (p,n,l,s) = (p, n, mkLbl n l, s)
getLevel n = fromMaybe (pred minLevel) $ n `M.lookup` lvMap
' levelGraphFrom ' ( or the root nodes for ' ' ) . A level
minLevel :: Int
minLevel = 0
type NSet = Set Node
graphLevels :: (Graph g) => NGroup -> g a b -> [NSet]
graphLevels = flip graphLevels' . S.fromList
graphLevels' :: (Graph g) => g a b -> NSet -> [NSet]
graphLevels' g = unfoldr getNextLevel . flip (,) g
getNextLevel :: (Graph g) => (NSet, g a b)
-> Maybe (NSet, (NSet, g a b))
getNextLevel (ns,g)
| S.null ns = Nothing
| otherwise = Just (ns, (ns', g'))
where
g' = delNodes (S.toList ns) g
ns' = flip S.difference ns
. S.unions . map getSuc
$ S.toList ns
getSuc = S.fromList . suc g
leafMinPaths :: (Graph g) => g a b -> [LNGroup a]
leafMinPaths g = map (lfMinPth g rs) ls
where
rs = rootsOf' g
ls = leavesOf' g
leafMinPaths' :: (Graph g) => g a b -> [NGroup]
leafMinPaths' = map (map node) . leafMinPaths
lfMinPth :: (Graph g) => g a b -> [Node] -> Node -> LNGroup a
lfMinPth g rs l = addLabels g
. snd
. minimumBy (compare `on` fst)
. addLengths
$ map (\ r -> esp r l g) rs
| Find all ' 's that can be reached from the provided ' 's .
accessibleFrom :: (Graph g) => g a b -> [Node] -> [Node]
accessibleFrom g = S.toList . accessibleFrom' g . S.fromList
| Find all ' 's that can be reached from the provided nodes
accessibleFrom' :: (Graph g) => g a b -> Set Node -> Set Node
accessibleFrom' g = S.unions . graphLevels' g
| Find those ' 's that are reachable only from the provided
' 's .
accessibleOnlyFrom :: (Graph g) => g a b -> [Node] -> [Node]
accessibleOnlyFrom g = S.toList . accessibleOnlyFrom' g . S.fromList
| Find those ' 's that are reachable only from the provided
' 's , using ' Set 's rather than lists .
accessibleOnlyFrom' :: (Graph g) => g a b -> Set Node -> Set Node
accessibleOnlyFrom' g = M.keysSet
. fixPoint keepOnlyInternal
. setKeys (pre g)
. accessibleFrom' g
setKeys :: (Ord a) => (a -> b) -> Set a -> Map a b
setKeys f = M.fromDistinctAscList . map (ap (,) f) . S.toAscList
keepOnlyInternal :: Map Node NGroup -> Map Node NGroup
keepOnlyInternal = M.filter =<< onlyInternalPred
onlyInternalPred :: Map Node NGroup -> NGroup -> Bool
onlyInternalPred = all . flip M.member
|
35bddb5f73c9bcc8582dc6e51f9226c8135545ec8bf06258503afc4eca7fdf42 | zack-bitcoin/verkle | ff.erl | -module(ff).
-export([sub/3, add/3, mul/3, divide/3,
pow/3, add_all/2,
mod/2,
inverse/2, batch_inverse/2, neg/2]).
mod(X,Y)->(X rem Y + Y) rem Y.
symetric_view([], _) -> [];
symetric_view([H|T], Y) ->
[symetric_view(H, Y)|
symetric_view(T, Y)];
symetric_view(X, Y) ->
Y2 = Y div 2,
if
(X > Y2) -> X - Y;
true -> X
end.
sub(A, B, Base) ->
mod(A - B, Base).
neg(A, Base) ->
sub(0, A, Base).
add(A, B, Base) ->
mod(A + B, Base).
mul(A, B, Base) ->
mod(A * B, Base).
divide(A, B, N) ->
B2 = inverse(B, N),
mul(A, B2, N).
pow(_, 0, _) -> 1;
pow(A, B, N) ->
basics:lrpow(A, B, N).
add_all([A], _) -> A;
add_all([A, B|T], Base) ->
add_all([add(A, B, Base)|T], Base).
inverse(A, Base) ->
basics:inverse(A, Base).
pis([], _, _) -> [];
pis([H|T], A, B) ->
X = mul(H, A, B),
[X|pis(T, X, B)].
batch_inverse([], _) -> [];
batch_inverse(Vs, Base) ->
[ v16 , v15 , v14 , v13 , v12 , v1 ]
i16
VI = lists:map(
fun(V) -> mul(AllI, V, Base) end,
[ i6 , i56 , i46 , i36 , i26 ]
[ v16 , v26 , v36 , v46 , v56 , v6 ]
[ v26 , v36 , v46 , v56 , v6 , 1 ]
[ i16 , i26 , i36 , i46 , i56 , i6 ]
lists:zipwith(fun(A, B) ->
mul(A, B, Base)
end, V4, VI2).
| null | https://raw.githubusercontent.com/zack-bitcoin/verkle/46bf69f17170a71829f9243faea06ee42f224687/src/crypto/ff.erl | erlang | -module(ff).
-export([sub/3, add/3, mul/3, divide/3,
pow/3, add_all/2,
mod/2,
inverse/2, batch_inverse/2, neg/2]).
mod(X,Y)->(X rem Y + Y) rem Y.
symetric_view([], _) -> [];
symetric_view([H|T], Y) ->
[symetric_view(H, Y)|
symetric_view(T, Y)];
symetric_view(X, Y) ->
Y2 = Y div 2,
if
(X > Y2) -> X - Y;
true -> X
end.
sub(A, B, Base) ->
mod(A - B, Base).
neg(A, Base) ->
sub(0, A, Base).
add(A, B, Base) ->
mod(A + B, Base).
mul(A, B, Base) ->
mod(A * B, Base).
divide(A, B, N) ->
B2 = inverse(B, N),
mul(A, B2, N).
pow(_, 0, _) -> 1;
pow(A, B, N) ->
basics:lrpow(A, B, N).
add_all([A], _) -> A;
add_all([A, B|T], Base) ->
add_all([add(A, B, Base)|T], Base).
inverse(A, Base) ->
basics:inverse(A, Base).
pis([], _, _) -> [];
pis([H|T], A, B) ->
X = mul(H, A, B),
[X|pis(T, X, B)].
batch_inverse([], _) -> [];
batch_inverse(Vs, Base) ->
[ v16 , v15 , v14 , v13 , v12 , v1 ]
i16
VI = lists:map(
fun(V) -> mul(AllI, V, Base) end,
[ i6 , i56 , i46 , i36 , i26 ]
[ v16 , v26 , v36 , v46 , v56 , v6 ]
[ v26 , v36 , v46 , v56 , v6 , 1 ]
[ i16 , i26 , i36 , i46 , i56 , i6 ]
lists:zipwith(fun(A, B) ->
mul(A, B, Base)
end, V4, VI2).
| |
4cc881a6d58111560c51551020d1f85584116f1350ecd4aec447498a50b0b8df | 7bridges-eu/clj-odbp | buffer.clj | Copyright 2017 7bridges s.r.l .
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns clj-odbp.binary.deserialize.buffer
(:require [clj-odbp.utils :refer [take-upto]]))
(defn to-buffer
"Transform a sequence into a rewindable buffer."
[data]
(atom {:position 0
:data data
:total-size (count data)}))
(defn buffer-take!
"Returns a vector of n elements from the current position on."
[buffer n]
(let [{position :position
data :data} @buffer]
(swap! buffer assoc :position (+ position n))
(into []
(take n (drop position data)))))
(defn buffer-take-while!
"Returns a vector of n elements while pred is true."
[buffer pred]
(let [{position :position data :data} @buffer
result (take-while pred (drop position data))]
(swap! buffer assoc :position (+ position (count result)))
(vec result)))
(defn buffer-take-upto!
"Returns a vector of n elements while pred is
true including the first false."
[buffer pred]
(let [{position :position data :data} @buffer
result (take-upto pred (drop position data))]
(swap! buffer assoc :position (+ position (count result)))
(vec result)))
(defn buffer-set-position!
"Sets the current position of the buffer."
[buffer new-position]
(swap! buffer assoc :position new-position))
(defn buffer-current-position
"Returns the current position of the buffer."
[buffer]
(:position @buffer))
(defn buffer-reset!
"Rewinds the buffer to the beginning."
[buffer]
(buffer-set-position! buffer 0))
(defn buffer-rest!
"Returns a vector whit the remaining elements."
[buffer]
(let [{size :total-size position :position} @buffer
remains (- size position)]
(swap! buffer assoc :position size)
(into [] (take remains (drop position (:data @buffer))))))
| null | https://raw.githubusercontent.com/7bridges-eu/clj-odbp/5a92515c2e4c6198bd1093ace83da96e30b90829/src/clj_odbp/binary/deserialize/buffer.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright 2017 7bridges s.r.l .
distributed under the License is distributed on an " AS IS " BASIS ,
(ns clj-odbp.binary.deserialize.buffer
(:require [clj-odbp.utils :refer [take-upto]]))
(defn to-buffer
"Transform a sequence into a rewindable buffer."
[data]
(atom {:position 0
:data data
:total-size (count data)}))
(defn buffer-take!
"Returns a vector of n elements from the current position on."
[buffer n]
(let [{position :position
data :data} @buffer]
(swap! buffer assoc :position (+ position n))
(into []
(take n (drop position data)))))
(defn buffer-take-while!
"Returns a vector of n elements while pred is true."
[buffer pred]
(let [{position :position data :data} @buffer
result (take-while pred (drop position data))]
(swap! buffer assoc :position (+ position (count result)))
(vec result)))
(defn buffer-take-upto!
"Returns a vector of n elements while pred is
true including the first false."
[buffer pred]
(let [{position :position data :data} @buffer
result (take-upto pred (drop position data))]
(swap! buffer assoc :position (+ position (count result)))
(vec result)))
(defn buffer-set-position!
"Sets the current position of the buffer."
[buffer new-position]
(swap! buffer assoc :position new-position))
(defn buffer-current-position
"Returns the current position of the buffer."
[buffer]
(:position @buffer))
(defn buffer-reset!
"Rewinds the buffer to the beginning."
[buffer]
(buffer-set-position! buffer 0))
(defn buffer-rest!
"Returns a vector whit the remaining elements."
[buffer]
(let [{size :total-size position :position} @buffer
remains (- size position)]
(swap! buffer assoc :position size)
(into [] (take remains (drop position (:data @buffer))))))
|
f62c813e02db955e63e6018635802f7b17e36d3b57522ff4f14d841306add7c6 | ghc/testsuite | TcNullaryTC.hs | # LANGUAGE NullaryTypeClasses #
module Main where
class R where
f :: Int -> Int
g :: a -> a
instance R where
f = (+1)
g = id
main = print (g (f 0))
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_run/TcNullaryTC.hs | haskell | # LANGUAGE NullaryTypeClasses #
module Main where
class R where
f :: Int -> Int
g :: a -> a
instance R where
f = (+1)
g = id
main = print (g (f 0))
| |
0b8987512ac65d106b9fd91489786529541d5eeee25cfcc24567bf4ffa15c773 | melisgl/mgl | test-gaussian-process.lisp | (in-package :mgl-test)
(defun test-gp-simple ()
(flet ((v (&rest args)
(array-to-mat (apply #'vector
(mapcar (lambda (x) (flt x)) args))
:ctype flt-ctype)))
(let* ((prior (make-instance
'prior-gp
:mean-fn (constantly 5)
:covariance-fn (lambda (x1 x2)
(+ (* 5 (exp (- (expt (/ (- x1 x2) 10) 2))))
1))))
(posterior (update-gp prior (v 1 2 3) (v 2 4 6))))
(assert (eq prior (update-gp prior (v) (v))))
(gp-means-and-covariances posterior (v 1.5))
(let ((posterior2 (update-gp posterior (v 10 30) (v 2 4))))
(multiple-value-bind (means covariances)
(gp-means-and-covariances posterior2 (v 1.5))
(assert (> 0.1 (- 3 (mat-as-scalar
(mv-gaussian-random
:means means :covariances covariances))))))))))
(defun test-gp ()
(do-cuda ()
(test-gp-simple)))
| null | https://raw.githubusercontent.com/melisgl/mgl/27a2552632a6a9330c1a133e519e676d9c6ca714/test/test-gaussian-process.lisp | lisp | (in-package :mgl-test)
(defun test-gp-simple ()
(flet ((v (&rest args)
(array-to-mat (apply #'vector
(mapcar (lambda (x) (flt x)) args))
:ctype flt-ctype)))
(let* ((prior (make-instance
'prior-gp
:mean-fn (constantly 5)
:covariance-fn (lambda (x1 x2)
(+ (* 5 (exp (- (expt (/ (- x1 x2) 10) 2))))
1))))
(posterior (update-gp prior (v 1 2 3) (v 2 4 6))))
(assert (eq prior (update-gp prior (v) (v))))
(gp-means-and-covariances posterior (v 1.5))
(let ((posterior2 (update-gp posterior (v 10 30) (v 2 4))))
(multiple-value-bind (means covariances)
(gp-means-and-covariances posterior2 (v 1.5))
(assert (> 0.1 (- 3 (mat-as-scalar
(mv-gaussian-random
:means means :covariances covariances))))))))))
(defun test-gp ()
(do-cuda ()
(test-gp-simple)))
| |
a9af7a2393412f52e006b868123685ca191c9d3b7fdc179b4a4963542f6ada3f | ocaml/ocaml-re | re_emacs.ml | [@@@deprecated "Use Re.Emacs"]
include Re.Emacs
| null | https://raw.githubusercontent.com/ocaml/ocaml-re/09c2745a2d8d1517b3d597396e82e122903b0017/deprecated/re_emacs.ml | ocaml | [@@@deprecated "Use Re.Emacs"]
include Re.Emacs
| |
03633716dca863e15da4a350f03f1673fe3cf4f0d24b756a4c56229c4a1c667a | haskell-cryptography/libsodium-bindings | XChaCha20.hs | # LANGUAGE CApiFFI #
# LANGUAGE Trustworthy #
-- |
Module : LibSodium . Bindings . XChaCha20
-- Description: Direct bindings to XChaCha20 primitives
Copyright : ( C ) 2022
-- License: BSD-3-Clause
-- Maintainer:
-- Stability: Stable
Portability : GHC only
--
-- Direct bindings to XChaCha20 primitives.
module LibSodium.Bindings.XChaCha20
( -- * Constants
cryptoStreamXChaCha20KeyBytes
, cryptoStreamXChaCha20NonceBytes
-- * Functions
, cryptoStreamXChaCha20
, cryptoStreamXChaCha20Xor
, cryptoStreamXChaCha20XorIC
, cryptoStreamXChaCha20Keygen
)
where
import Data.Word (Word64)
import Foreign.C.Types
( CInt (CInt)
, CSize (CSize)
, CUChar
, CULLong (CULLong)
)
import Foreign.Ptr (Ptr)
-- | Generate and store a given number of pseudorandom bytes, using a nonce
-- and a secret key. The amount of data read from the nonce location and secret
-- key location will be 'cryptoStreamXChaCha20NonceBytes' and
-- 'cryptoStreamXChaCha20KeyBytes' respectively.
--
-- This function theoretically returns 0 on success, and -1 on failure. However,
-- [this cannot ever
-- fail](#discussioncomment-1979161),
-- although its documentation does not explain this.
--
-- = Corresponds to
--
-- [@crypto_stream_xchacha20@](#usage)
--
@since 0.0.1.0
foreign import capi "sodium.h crypto_stream_xchacha20"
cryptoStreamXChaCha20
:: Ptr CUChar
-- ^ Out-parameter where pseudorandom bytes will be stored
-> CULLong
-- ^ How many bytes to write
-> Ptr CUChar
-- ^ Nonce location (see documentation, won't be modified)
-> Ptr CUChar
-- ^ Secret key location (see documentation, won't be modified)
-> IO CInt
-- ^ Always 0 (see documentation)
-- | Encrypt a message of the given length, using a nonce and a secret key. The
-- amount of data read from the nonce location and secret key location will be
-- 'cryptoStreamXChaCha20NonceBytes' and 'cryptoStreamXChaCha20KeyBytes'
-- respectively.
--
The resulting ciphertext does /not/ include an authentication tag . It will be
-- combined with the output of the stream cipher using the XOR operation.
--
-- This function theoretically returns 0 on success, and -1 on failure. However,
-- [this cannot ever
-- fail](#discussioncomment-1979161),
-- although its documentation does not explain this.
--
-- = Important note
--
-- The message location and ciphertext location can be the same: this will
produce in - place encryption . However , if they are /not/ the same , they must
-- be non-overlapping.
--
-- = Corresponds to
--
-- [@crypto_stream_xchacha20_xor@](#usage)
--
@since 0.0.1.0
foreign import capi "sodium.h crypto_stream_xchacha20_xor"
cryptoStreamXChaCha20Xor
:: Ptr CUChar
-- ^ Out-parameter where the ciphertext will be stored
-> Ptr CUChar
-- ^ Message location (won't be modified)
-> CULLong
-- ^ Message length
-> Ptr CUChar
-- ^ Nonce location (see documentation, won't be modified)
-> Ptr CUChar
-- ^ Secret key location (see documentation, won't be modified)
-> IO CInt
-- ^ Always 0 (see documentation)
-- | As 'cryptoStreamXChaCha20Xor', but allows setting the initial value of the
-- block counter to a non-zero value. This permits direct access to any block
-- without having to compute previous ones.
--
-- See the documentation of 'cryptoStreamXChaCha20Xor' for caveats on the use of
-- this function.
--
-- = Corresponds to
--
-- [@crypto_stream_xchacha20_xor_ic@](#usage)
--
@since 0.0.1.0
foreign import capi "sodium.h crypto_stream_xchacha20_xor_ic"
cryptoStreamXChaCha20XorIC
:: Ptr CUChar
-- ^ Out-parameter where the ciphertext will be stored
-> Ptr CUChar
-- ^ Message location (won't be modified)
-> CULLong
-- ^ Message length
-> Ptr CUChar
-- ^ Nonce location (see documentation, won't be modified)
-> Word64
-- ^ Value of block counter (see documentation)
-> Ptr CUChar
-- ^ Secret key location (see documentation, won't be modified)
-> IO CInt
-- ^ Always 0 (see documentation)
-- | Generate a random XChaCha20 secret key. This will always write
-- 'cryptoStreamXChaCha20KeyBytes' to the out-parameter.
--
-- = Corresponds to
--
[ @crypto_stream_xchacha20_keygen@]( / doc / advanced / stream_ciphers / xchacha20#usage )
--
@since 0.0.1.0
foreign import capi "sodium.h crypto_stream_xchacha20_keygen"
cryptoStreamXChaCha20Keygen
:: Ptr CUChar
-- ^ Out-parameter where the key will be stored
-> IO ()
-- ^ Doesn't return anything meaningful
-- | The number of bytes in an XChaCha20 secret key.
--
@since 0.0.1.0
foreign import capi "sodium.h value crypto_stream_xchacha20_KEYBYTES"
cryptoStreamXChaCha20KeyBytes :: CSize
-- | The number of bytes in an XChaCha20 nonce.
--
@since 0.0.1.0
foreign import capi "sodium.h value crypto_stream_xchacha20_NONCEBYTES"
cryptoStreamXChaCha20NonceBytes :: CSize
| null | https://raw.githubusercontent.com/haskell-cryptography/libsodium-bindings/127113199e7b3f91305cc34b169b06a2622d65da/libsodium-bindings/src/LibSodium/Bindings/XChaCha20.hs | haskell | |
Description: Direct bindings to XChaCha20 primitives
License: BSD-3-Clause
Maintainer:
Stability: Stable
Direct bindings to XChaCha20 primitives.
* Constants
* Functions
| Generate and store a given number of pseudorandom bytes, using a nonce
and a secret key. The amount of data read from the nonce location and secret
key location will be 'cryptoStreamXChaCha20NonceBytes' and
'cryptoStreamXChaCha20KeyBytes' respectively.
This function theoretically returns 0 on success, and -1 on failure. However,
[this cannot ever
fail](#discussioncomment-1979161),
although its documentation does not explain this.
= Corresponds to
[@crypto_stream_xchacha20@](#usage)
^ Out-parameter where pseudorandom bytes will be stored
^ How many bytes to write
^ Nonce location (see documentation, won't be modified)
^ Secret key location (see documentation, won't be modified)
^ Always 0 (see documentation)
| Encrypt a message of the given length, using a nonce and a secret key. The
amount of data read from the nonce location and secret key location will be
'cryptoStreamXChaCha20NonceBytes' and 'cryptoStreamXChaCha20KeyBytes'
respectively.
combined with the output of the stream cipher using the XOR operation.
This function theoretically returns 0 on success, and -1 on failure. However,
[this cannot ever
fail](#discussioncomment-1979161),
although its documentation does not explain this.
= Important note
The message location and ciphertext location can be the same: this will
be non-overlapping.
= Corresponds to
[@crypto_stream_xchacha20_xor@](#usage)
^ Out-parameter where the ciphertext will be stored
^ Message location (won't be modified)
^ Message length
^ Nonce location (see documentation, won't be modified)
^ Secret key location (see documentation, won't be modified)
^ Always 0 (see documentation)
| As 'cryptoStreamXChaCha20Xor', but allows setting the initial value of the
block counter to a non-zero value. This permits direct access to any block
without having to compute previous ones.
See the documentation of 'cryptoStreamXChaCha20Xor' for caveats on the use of
this function.
= Corresponds to
[@crypto_stream_xchacha20_xor_ic@](#usage)
^ Out-parameter where the ciphertext will be stored
^ Message location (won't be modified)
^ Message length
^ Nonce location (see documentation, won't be modified)
^ Value of block counter (see documentation)
^ Secret key location (see documentation, won't be modified)
^ Always 0 (see documentation)
| Generate a random XChaCha20 secret key. This will always write
'cryptoStreamXChaCha20KeyBytes' to the out-parameter.
= Corresponds to
^ Out-parameter where the key will be stored
^ Doesn't return anything meaningful
| The number of bytes in an XChaCha20 secret key.
| The number of bytes in an XChaCha20 nonce.
| # LANGUAGE CApiFFI #
# LANGUAGE Trustworthy #
Module : LibSodium . Bindings . XChaCha20
Copyright : ( C ) 2022
Portability : GHC only
module LibSodium.Bindings.XChaCha20
cryptoStreamXChaCha20KeyBytes
, cryptoStreamXChaCha20NonceBytes
, cryptoStreamXChaCha20
, cryptoStreamXChaCha20Xor
, cryptoStreamXChaCha20XorIC
, cryptoStreamXChaCha20Keygen
)
where
import Data.Word (Word64)
import Foreign.C.Types
( CInt (CInt)
, CSize (CSize)
, CUChar
, CULLong (CULLong)
)
import Foreign.Ptr (Ptr)
@since 0.0.1.0
foreign import capi "sodium.h crypto_stream_xchacha20"
cryptoStreamXChaCha20
:: Ptr CUChar
-> CULLong
-> Ptr CUChar
-> Ptr CUChar
-> IO CInt
The resulting ciphertext does /not/ include an authentication tag . It will be
produce in - place encryption . However , if they are /not/ the same , they must
@since 0.0.1.0
foreign import capi "sodium.h crypto_stream_xchacha20_xor"
cryptoStreamXChaCha20Xor
:: Ptr CUChar
-> Ptr CUChar
-> CULLong
-> Ptr CUChar
-> Ptr CUChar
-> IO CInt
@since 0.0.1.0
foreign import capi "sodium.h crypto_stream_xchacha20_xor_ic"
cryptoStreamXChaCha20XorIC
:: Ptr CUChar
-> Ptr CUChar
-> CULLong
-> Ptr CUChar
-> Word64
-> Ptr CUChar
-> IO CInt
[ @crypto_stream_xchacha20_keygen@]( / doc / advanced / stream_ciphers / xchacha20#usage )
@since 0.0.1.0
foreign import capi "sodium.h crypto_stream_xchacha20_keygen"
cryptoStreamXChaCha20Keygen
:: Ptr CUChar
-> IO ()
@since 0.0.1.0
foreign import capi "sodium.h value crypto_stream_xchacha20_KEYBYTES"
cryptoStreamXChaCha20KeyBytes :: CSize
@since 0.0.1.0
foreign import capi "sodium.h value crypto_stream_xchacha20_NONCEBYTES"
cryptoStreamXChaCha20NonceBytes :: CSize
|
21529bf5ba6390f98c83bb0a907f09a724c923b1ecae90db2c1d14b3d8c0a6fd | kupl/LearnML | patch.ml | type formula =
| True
| False
| Not of formula
| AndAlso of (formula * formula)
| OrElse of (formula * formula)
| Imply of (formula * formula)
| Equal of (exp * exp)
and exp = Num of int | Plus of (exp * exp) | Minus of (exp * exp)
let rec eval (f : formula) : bool =
match f with
| True -> true
| False -> false
| Not a -> if eval a then false else true
| AndAlso (left, right) -> if eval left && eval right then true else false
| OrElse (left, right) -> if eval left || eval right then true else false
| Imply (left, right) ->
if eval left = false || eval right = true then true else false
| Equal (left, right) ->
let rec env (v : exp) : int =
match v with
| Num a -> a
| Plus (a, b) -> env a + env b
| Minus (a, b) -> env a - env b
in
if env left = env right then true else false
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/formula/sub32/patch.ml | ocaml | type formula =
| True
| False
| Not of formula
| AndAlso of (formula * formula)
| OrElse of (formula * formula)
| Imply of (formula * formula)
| Equal of (exp * exp)
and exp = Num of int | Plus of (exp * exp) | Minus of (exp * exp)
let rec eval (f : formula) : bool =
match f with
| True -> true
| False -> false
| Not a -> if eval a then false else true
| AndAlso (left, right) -> if eval left && eval right then true else false
| OrElse (left, right) -> if eval left || eval right then true else false
| Imply (left, right) ->
if eval left = false || eval right = true then true else false
| Equal (left, right) ->
let rec env (v : exp) : int =
match v with
| Num a -> a
| Plus (a, b) -> env a + env b
| Minus (a, b) -> env a - env b
in
if env left = env right then true else false
| |
93f7a86bca9ac1834e5d86a8f6640b29ffbd26a59e5732972b042d86af8022fa | mt-caret/io_uring | echo_tcp.ml | open Core
(* a port of -echo-server/blob/master/io_uring_echo_server.c *)
module User_data = struct
type t =
| Accept
| Recv of Unix.File_descr.t * (Bigstring.t[@sexp.opaque])
| Send of Unix.File_descr.t * (Bigstring.t[@sexp.opaque]) * int * int
[@@deriving sexp_of]
let prepare t ~io_uring ~sockfd ~queued_sockaddr_ref =
match t with
| Accept ->
let queued_sockaddr =
Io_uring.prepare_accept io_uring Io_uring.Sqe_flags.none sockfd t
in
queued_sockaddr_ref
:= (match queued_sockaddr with
| None -> raise_s [%message "accept: submission queue is full"]
| Some queued_sockaddr -> Some queued_sockaddr)
| Recv (fd, buf) ->
let sq_full = Io_uring.prepare_recv io_uring Io_uring.Sqe_flags.none fd buf t in
if sq_full then raise_s [%message "recv: submission queue is full"]
| Send (fd, buf, pos, len) ->
let sq_full =
Io_uring.prepare_send io_uring Io_uring.Sqe_flags.none fd ~pos ~len buf t
in
if sq_full then raise_s [%message "send: submission queue is full"]
;;
end
let run ~queue_depth ~port ~backlog ~max_message_len =
let sockfd = Unix.socket ~domain:PF_INET ~kind:SOCK_STREAM ~protocol:0 () in
Unix.setsockopt sockfd SO_REUSEADDR true;
let addr = Unix.ADDR_INET (Unix.Inet_addr.localhost, port) in
Unix.bind sockfd ~addr;
Unix.listen sockfd ~backlog;
let io_uring =
Io_uring.create
~max_submission_entries:queue_depth
~max_completion_entries:(queue_depth * 2)
in
let queued_sockaddr_ref = ref None in
let prepare = User_data.prepare ~io_uring ~sockfd ~queued_sockaddr_ref in
prepare User_data.Accept;
while true do
let (_ : int) = Io_uring.submit io_uring in
Io_uring.wait io_uring ~timeout:`Never;
Io_uring.iter_completions io_uring ~f:(fun ~user_data ~res ~flags ->
print_s [%message "" (user_data : User_data.t) (res : int) (flags : int)];
match (user_data : User_data.t) with
| Accept ->
if res < 0 then Unix.unix_error (-res) "Io_uring.accept" "";
User_data.Recv (Unix.File_descr.of_int res, Bigstring.create max_message_len)
|> prepare;
let sockaddr =
Option.value_exn !queued_sockaddr_ref
|> Io_uring.Queued_sockaddr.thread_unsafe_get
|> Option.value_exn
in
print_s [%message "client connected" (sockaddr : Unix.sockaddr)];
prepare User_data.Accept
| Recv (fd, buf) ->
(* TODO: fix handling? *)
if res < 0 then Unix.unix_error (-res) "Io_uring.recv" "";
if res = 0
then Unix.shutdown fd ~mode:SHUTDOWN_ALL
else User_data.Send (fd, buf, 0, res) |> prepare
| Send (fd, buf, off, len) ->
if res < 0 then Unix.unix_error (-res) "Io_uring.send" "";
if res + off < len
then User_data.Send (fd, buf, off + res, len - res) |> prepare
else User_data.Recv (fd, buf) |> prepare);
Io_uring.clear_completions io_uring
done
;;
let () =
Command.run
@@ Command.basic ~summary:"echo server using io_uring"
@@ let%map_open.Command queue_depth =
flag
"queue-depth"
(optional_with_default 2048 int)
~doc:"INT submission completion queue depth"
and port = flag "port" (required int) ~doc:" port to listen on"
and backlog =
flag
"backlog"
(optional_with_default 100 int)
~doc:"INT size of backlog for listen()"
and max_message_len =
flag
"max-message-len"
(optional_with_default 4096 int)
~doc:"INT maximum size of messages"
in
fun () -> run ~queue_depth ~port ~backlog ~max_message_len
;;
| null | https://raw.githubusercontent.com/mt-caret/io_uring/78012a69a55faf81ab2fbb710c6d5ae71f6b44f0/example/echo_tcp.ml | ocaml | a port of -echo-server/blob/master/io_uring_echo_server.c
TODO: fix handling? | open Core
module User_data = struct
type t =
| Accept
| Recv of Unix.File_descr.t * (Bigstring.t[@sexp.opaque])
| Send of Unix.File_descr.t * (Bigstring.t[@sexp.opaque]) * int * int
[@@deriving sexp_of]
let prepare t ~io_uring ~sockfd ~queued_sockaddr_ref =
match t with
| Accept ->
let queued_sockaddr =
Io_uring.prepare_accept io_uring Io_uring.Sqe_flags.none sockfd t
in
queued_sockaddr_ref
:= (match queued_sockaddr with
| None -> raise_s [%message "accept: submission queue is full"]
| Some queued_sockaddr -> Some queued_sockaddr)
| Recv (fd, buf) ->
let sq_full = Io_uring.prepare_recv io_uring Io_uring.Sqe_flags.none fd buf t in
if sq_full then raise_s [%message "recv: submission queue is full"]
| Send (fd, buf, pos, len) ->
let sq_full =
Io_uring.prepare_send io_uring Io_uring.Sqe_flags.none fd ~pos ~len buf t
in
if sq_full then raise_s [%message "send: submission queue is full"]
;;
end
let run ~queue_depth ~port ~backlog ~max_message_len =
let sockfd = Unix.socket ~domain:PF_INET ~kind:SOCK_STREAM ~protocol:0 () in
Unix.setsockopt sockfd SO_REUSEADDR true;
let addr = Unix.ADDR_INET (Unix.Inet_addr.localhost, port) in
Unix.bind sockfd ~addr;
Unix.listen sockfd ~backlog;
let io_uring =
Io_uring.create
~max_submission_entries:queue_depth
~max_completion_entries:(queue_depth * 2)
in
let queued_sockaddr_ref = ref None in
let prepare = User_data.prepare ~io_uring ~sockfd ~queued_sockaddr_ref in
prepare User_data.Accept;
while true do
let (_ : int) = Io_uring.submit io_uring in
Io_uring.wait io_uring ~timeout:`Never;
Io_uring.iter_completions io_uring ~f:(fun ~user_data ~res ~flags ->
print_s [%message "" (user_data : User_data.t) (res : int) (flags : int)];
match (user_data : User_data.t) with
| Accept ->
if res < 0 then Unix.unix_error (-res) "Io_uring.accept" "";
User_data.Recv (Unix.File_descr.of_int res, Bigstring.create max_message_len)
|> prepare;
let sockaddr =
Option.value_exn !queued_sockaddr_ref
|> Io_uring.Queued_sockaddr.thread_unsafe_get
|> Option.value_exn
in
print_s [%message "client connected" (sockaddr : Unix.sockaddr)];
prepare User_data.Accept
| Recv (fd, buf) ->
if res < 0 then Unix.unix_error (-res) "Io_uring.recv" "";
if res = 0
then Unix.shutdown fd ~mode:SHUTDOWN_ALL
else User_data.Send (fd, buf, 0, res) |> prepare
| Send (fd, buf, off, len) ->
if res < 0 then Unix.unix_error (-res) "Io_uring.send" "";
if res + off < len
then User_data.Send (fd, buf, off + res, len - res) |> prepare
else User_data.Recv (fd, buf) |> prepare);
Io_uring.clear_completions io_uring
done
;;
let () =
Command.run
@@ Command.basic ~summary:"echo server using io_uring"
@@ let%map_open.Command queue_depth =
flag
"queue-depth"
(optional_with_default 2048 int)
~doc:"INT submission completion queue depth"
and port = flag "port" (required int) ~doc:" port to listen on"
and backlog =
flag
"backlog"
(optional_with_default 100 int)
~doc:"INT size of backlog for listen()"
and max_message_len =
flag
"max-message-len"
(optional_with_default 4096 int)
~doc:"INT maximum size of messages"
in
fun () -> run ~queue_depth ~port ~backlog ~max_message_len
;;
|
f2b22bb1d7a07cbdccb1f9b299b237ebe8769d3b8235317f0ad2cff1622e6a6a | emqx/emqx-lwm2m | emqx_tlv_SUITE.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2020 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_tlv_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-define(LOGT(Format, Args), logger:debug("TEST_SUITE: " ++ Format, Args)).
-include("emqx_lwm2m.hrl").
-include_lib("lwm2m_coap/include/coap.hrl").
-include_lib("eunit/include/eunit.hrl").
all() -> [case01, case02, case03, case03_0, case04, case05, case06, case07, case08, case09].
init_per_suite(Config) ->
Config.
end_per_suite(Config) ->
Config.
case01(_Config) ->
Data = <<16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_resource_with_value => 16#00, value => <<"Open Mobile Alliance">>}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case02(_Config) ->
Data = <<16#86, 16#06, 16#41, 16#00, 16#01, 16#41, 16#01, 16#05>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_multiple_resource => 16#06, value => [
#{tlv_resource_instance => 16#00, value => <<1>>},
#{tlv_resource_instance => 16#01, value => <<5>>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case03(_Config) ->
Data = <<16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65, 16#C8, 16#01, 16#16, 16#4C, 16#69, 16#67, 16#68, 16#74, 16#77, 16#65, 16#69, 16#67, 16#68, 16#74, 16#20, 16#4D, 16#32, 16#4D, 16#20, 16#43, 16#6C, 16#69, 16#65, 16#6E, 16#74, 16#C8, 16#02, 16#09, 16#33, 16#34, 16#35, 16#30, 16#30, 16#30, 16#31, 16#32, 16#33>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_resource_with_value => 16#00, value => <<"Open Mobile Alliance">>},
#{tlv_resource_with_value => 16#01, value => <<"Lightweight M2M Client">>},
#{tlv_resource_with_value => 16#02, value => <<"345000123">>}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case03_0(_Config) ->
Data = <<16#87, 16#02, 16#41, 16#7F, 16#07, 16#61, 16#01, 16#36, 16#01>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_multiple_resource => 16#02, value => [
#{tlv_resource_instance => 16#7F, value => <<16#07>>},
#{tlv_resource_instance => 16#0136, value => <<16#01>>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case04(_Config) ->
% 6.4.3.1 Single Object Instance Request Example
Data = <<16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65, 16#C8, 16#01, 16#16, 16#4C, 16#69, 16#67, 16#68, 16#74, 16#77, 16#65, 16#69, 16#67, 16#68, 16#74, 16#20, 16#4D, 16#32, 16#4D, 16#20, 16#43, 16#6C, 16#69, 16#65, 16#6E, 16#74, 16#C8, 16#02, 16#09, 16#33, 16#34, 16#35, 16#30, 16#30, 16#30, 16#31, 16#32, 16#33, 16#C3, 16#03, 16#31, 16#2E, 16#30, 16#86, 16#06, 16#41, 16#00, 16#01, 16#41, 16#01, 16#05, 16#88, 16#07, 16#08, 16#42, 16#00, 16#0E, 16#D8, 16#42, 16#01, 16#13, 16#88, 16#87, 16#08, 16#41, 16#00, 16#7D, 16#42, 16#01, 16#03, 16#84, 16#C1, 16#09, 16#64, 16#C1, 16#0A, 16#0F, 16#83, 16#0B, 16#41, 16#00, 16#00, 16#C4, 16#0D, 16#51, 16#82, 16#42, 16#8F, 16#C6, 16#0E, 16#2B, 16#30, 16#32, 16#3A, 16#30, 16#30, 16#C1, 16#10, 16#55>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_resource_with_value => 16#00, value => <<"Open Mobile Alliance">>},
#{tlv_resource_with_value => 16#01, value => <<"Lightweight M2M Client">>},
#{tlv_resource_with_value => 16#02, value => <<"345000123">>},
#{tlv_resource_with_value => 16#03, value => <<"1.0">>},
#{tlv_multiple_resource => 16#06, value => [
#{tlv_resource_instance => 16#00, value => <<1>>},
#{tlv_resource_instance => 16#01, value => <<5>>}
]},
#{tlv_multiple_resource => 16#07, value => [
#{tlv_resource_instance => 16#00, value => <<16#0ED8:16>>},
#{tlv_resource_instance => 16#01, value => <<16#1388:16>>}
]},
#{tlv_multiple_resource => 16#08, value => [
#{tlv_resource_instance => 16#00, value => <<16#7d>>},
#{tlv_resource_instance => 16#01, value => <<16#0384:16>>}
]},
#{tlv_resource_with_value => 16#09, value => <<16#64>>},
#{tlv_resource_with_value => 16#0A, value => <<16#0F>>},
#{tlv_multiple_resource => 16#0B, value => [
#{tlv_resource_instance => 16#00, value => <<16#00>>}
]},
#{tlv_resource_with_value => 16#0D, value => <<16#5182428F:32>>},
#{tlv_resource_with_value => 16#0E, value => <<"+02:00">>},
#{tlv_resource_with_value => 16#10, value => <<"U">>}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case05(_Config) ->
% 6.4.3.2 Multiple Object Instance Request Examples
% A) Request on Single-Instance Object
Data = <<16#08, 16#00, 16#79, 16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65, 16#C8, 16#01, 16#16, 16#4C, 16#69, 16#67, 16#68, 16#74, 16#77, 16#65, 16#69, 16#67, 16#68, 16#74, 16#20, 16#4D, 16#32, 16#4D, 16#20, 16#43, 16#6C, 16#69, 16#65, 16#6E, 16#74, 16#C8, 16#02, 16#09, 16#33, 16#34, 16#35, 16#30, 16#30, 16#30, 16#31, 16#32, 16#33, 16#C3, 16#03, 16#31, 16#2E, 16#30, 16#86, 16#06, 16#41, 16#00, 16#01, 16#41, 16#01, 16#05, 16#88, 16#07, 16#08, 16#42, 16#00, 16#0E, 16#D8, 16#42, 16#01, 16#13, 16#88, 16#87, 16#08, 16#41, 16#00, 16#7D, 16#42, 16#01, 16#03, 16#84, 16#C1, 16#09, 16#64, 16#C1, 16#0A, 16#0F, 16#83, 16#0B, 16#41, 16#00, 16#00, 16#C4, 16#0D, 16#51, 16#82, 16#42, 16#8F, 16#C6, 16#0E, 16#2B, 16#30, 16#32, 16#3A, 16#30, 16#30, 16#C1, 16#10, 16#55>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_object_instance => 16#00, value => [
#{tlv_resource_with_value => 16#00, value => <<"Open Mobile Alliance">>},
#{tlv_resource_with_value => 16#01, value => <<"Lightweight M2M Client">>},
#{tlv_resource_with_value => 16#02, value => <<"345000123">>},
#{tlv_resource_with_value => 16#03, value => <<"1.0">>},
#{tlv_multiple_resource => 16#06, value => [
#{tlv_resource_instance => 16#00, value => <<1>>},
#{tlv_resource_instance => 16#01, value => <<5>>}
]},
#{tlv_multiple_resource => 16#07, value => [
#{tlv_resource_instance => 16#00, value => <<16#0ED8:16>>},
#{tlv_resource_instance => 16#01, value => <<16#1388:16>>}
]},
#{tlv_multiple_resource => 16#08, value => [
#{tlv_resource_instance => 16#00, value => <<16#7d>>},
#{tlv_resource_instance => 16#01, value => <<16#0384:16>>}
]},
#{tlv_resource_with_value => 16#09, value => <<16#64>>},
#{tlv_resource_with_value => 16#0A, value => <<16#0F>>},
#{tlv_multiple_resource => 16#0B, value => [
#{tlv_resource_instance => 16#00, value => <<16#00>>}
]},
#{tlv_resource_with_value => 16#0D, value => <<16#5182428F:32>>},
#{tlv_resource_with_value => 16#0E, value => <<"+02:00">>},
#{tlv_resource_with_value => 16#10, value => <<"U">>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case06(_Config) ->
% 6.4.3.2 Multiple Object Instance Request Examples
B ) Request on Multiple - Instances Object having 2 instances
Data = <<16#08, 16#00, 16#0E, 16#C1, 16#00, 16#01, 16#C1, 16#01, 16#00, 16#83, 16#02, 16#41, 16#7F, 16#07, 16#C1, 16#03, 16#7F, 16#08, 16#02, 16#12, 16#C1, 16#00, 16#03, 16#C1, 16#01, 16#00, 16#87, 16#02, 16#41, 16#7F, 16#07, 16#61, 16#01, 16#36, 16#01, 16#C1, 16#03, 16#7F>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_object_instance => 16#00, value => [
#{tlv_resource_with_value => 16#00, value => <<16#01>>},
#{tlv_resource_with_value => 16#01, value => <<16#00>>},
#{tlv_multiple_resource => 16#02, value => [
#{tlv_resource_instance => 16#7F, value => <<16#07>>}
]},
#{tlv_resource_with_value => 16#03, value => <<16#7F>>}
]},
#{tlv_object_instance => 16#02, value => [
#{tlv_resource_with_value => 16#00, value => <<16#03>>},
#{tlv_resource_with_value => 16#01, value => <<16#00>>},
#{tlv_multiple_resource => 16#02, value => [
#{tlv_resource_instance => 16#7F, value => <<16#07>>},
#{tlv_resource_instance => 16#0136, value => <<16#01>>}
]},
#{tlv_resource_with_value => 16#03, value => <<16#7F>>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case07(_Config) ->
% 6.4.3.2 Multiple Object Instance Request Examples
C ) Request on Multiple - Instances Object having 1 instance only
Data = <<16#08, 16#00, 16#0F, 16#C1, 16#00, 16#01, 16#C4, 16#01, 16#00, 16#01, 16#51, 16#80, 16#C1, 16#06, 16#01, 16#C1, 16#07, 16#55>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_object_instance => 16#00, value => [
#{tlv_resource_with_value => 16#00, value => <<16#01>>},
#{tlv_resource_with_value => 16#01, value => <<86400:32>>},
#{tlv_resource_with_value => 16#06, value => <<16#01>>},
#{tlv_resource_with_value => 16#07, value => <<$U>>}]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case08(_Config) ->
6.4.3.3 Example of Request on an Object Instance containing an Object Link Resource
Example 1 ) request to Object 65 Instance 0 : Read /65/0
Data = <<16#88, 16#00, 16#0C, 16#44, 16#00, 16#00, 16#42, 16#00, 16#00, 16#44, 16#01, 16#00, 16#42, 16#00, 16#01, 16#C8, 16#01, 16#0D, 16#38, 16#36, 16#31, 16#33, 16#38, 16#30, 16#30, 16#37, 16#35, 16#35, 16#35, 16#30, 16#30, 16#C4, 16#02, 16#12, 16#34, 16#56, 16#78>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_multiple_resource => 16#00, value => [
#{tlv_resource_instance => 16#00, value => <<16#00, 16#42, 16#00, 16#00>>},
#{tlv_resource_instance => 16#01, value => <<16#00, 16#42, 16#00, 16#01>>}
]},
#{tlv_resource_with_value => 16#01, value => <<"8613800755500">>},
#{tlv_resource_with_value => 16#02, value => <<16#12345678:32>>}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case09(_Config) ->
6.4.3.3 Example of Request on an Object Instance containing an Object Link Resource
Example 2 ) request to Object 66 : Read /66 : TLV payload will contain 2 Object Instances
Data = <<16#08, 16#00, 16#26, 16#C8, 16#00, 16#0B, 16#6D, 16#79, 16#53, 16#65, 16#72, 16#76, 16#69, 16#63, 16#65, 16#20, 16#31, 16#C8, 16#01, 16#0F, 16#49, 16#6E, 16#74, 16#65, 16#72, 16#6E, 16#65, 16#74, 16#2E, 16#31, 16#35, 16#2E, 16#32, 16#33, 16#34, 16#C4, 16#02, 16#00, 16#43, 16#00, 16#00, 16#08, 16#01, 16#26, 16#C8, 16#00, 16#0B, 16#6D, 16#79, 16#53, 16#65, 16#72, 16#76, 16#69, 16#63, 16#65, 16#20, 16#32, 16#C8, 16#01, 16#0F, 16#49, 16#6E, 16#74, 16#65, 16#72, 16#6E, 16#65, 16#74, 16#2E, 16#31, 16#35, 16#2E, 16#32, 16#33, 16#35, 16#C4, 16#02, 16#FF, 16#FF, 16#FF, 16#FF>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_object_instance => 16#00, value => [
#{tlv_resource_with_value => 16#00, value => <<"myService 1">>},
#{tlv_resource_with_value => 16#01, value => <<"Internet.15.234">>},
#{tlv_resource_with_value => 16#02, value => <<16#00, 16#43, 16#00, 16#00>>}
]},
#{tlv_object_instance => 16#01, value => [
#{tlv_resource_with_value => 16#00, value => <<"myService 2">>},
#{tlv_resource_with_value => 16#01, value => <<"Internet.15.235">>},
#{tlv_resource_with_value => 16#02, value => <<16#FF, 16#FF, 16#FF, 16#FF>>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
| null | https://raw.githubusercontent.com/emqx/emqx-lwm2m/6b02495beebe3b3596c75f730f4e64f3e92dd3a2/test/emqx_tlv_SUITE.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
6.4.3.1 Single Object Instance Request Example
6.4.3.2 Multiple Object Instance Request Examples
A) Request on Single-Instance Object
6.4.3.2 Multiple Object Instance Request Examples
6.4.3.2 Multiple Object Instance Request Examples | Copyright ( c ) 2020 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_tlv_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-define(LOGT(Format, Args), logger:debug("TEST_SUITE: " ++ Format, Args)).
-include("emqx_lwm2m.hrl").
-include_lib("lwm2m_coap/include/coap.hrl").
-include_lib("eunit/include/eunit.hrl").
all() -> [case01, case02, case03, case03_0, case04, case05, case06, case07, case08, case09].
init_per_suite(Config) ->
Config.
end_per_suite(Config) ->
Config.
case01(_Config) ->
Data = <<16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_resource_with_value => 16#00, value => <<"Open Mobile Alliance">>}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case02(_Config) ->
Data = <<16#86, 16#06, 16#41, 16#00, 16#01, 16#41, 16#01, 16#05>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_multiple_resource => 16#06, value => [
#{tlv_resource_instance => 16#00, value => <<1>>},
#{tlv_resource_instance => 16#01, value => <<5>>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case03(_Config) ->
Data = <<16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65, 16#C8, 16#01, 16#16, 16#4C, 16#69, 16#67, 16#68, 16#74, 16#77, 16#65, 16#69, 16#67, 16#68, 16#74, 16#20, 16#4D, 16#32, 16#4D, 16#20, 16#43, 16#6C, 16#69, 16#65, 16#6E, 16#74, 16#C8, 16#02, 16#09, 16#33, 16#34, 16#35, 16#30, 16#30, 16#30, 16#31, 16#32, 16#33>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_resource_with_value => 16#00, value => <<"Open Mobile Alliance">>},
#{tlv_resource_with_value => 16#01, value => <<"Lightweight M2M Client">>},
#{tlv_resource_with_value => 16#02, value => <<"345000123">>}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case03_0(_Config) ->
Data = <<16#87, 16#02, 16#41, 16#7F, 16#07, 16#61, 16#01, 16#36, 16#01>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_multiple_resource => 16#02, value => [
#{tlv_resource_instance => 16#7F, value => <<16#07>>},
#{tlv_resource_instance => 16#0136, value => <<16#01>>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case04(_Config) ->
Data = <<16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65, 16#C8, 16#01, 16#16, 16#4C, 16#69, 16#67, 16#68, 16#74, 16#77, 16#65, 16#69, 16#67, 16#68, 16#74, 16#20, 16#4D, 16#32, 16#4D, 16#20, 16#43, 16#6C, 16#69, 16#65, 16#6E, 16#74, 16#C8, 16#02, 16#09, 16#33, 16#34, 16#35, 16#30, 16#30, 16#30, 16#31, 16#32, 16#33, 16#C3, 16#03, 16#31, 16#2E, 16#30, 16#86, 16#06, 16#41, 16#00, 16#01, 16#41, 16#01, 16#05, 16#88, 16#07, 16#08, 16#42, 16#00, 16#0E, 16#D8, 16#42, 16#01, 16#13, 16#88, 16#87, 16#08, 16#41, 16#00, 16#7D, 16#42, 16#01, 16#03, 16#84, 16#C1, 16#09, 16#64, 16#C1, 16#0A, 16#0F, 16#83, 16#0B, 16#41, 16#00, 16#00, 16#C4, 16#0D, 16#51, 16#82, 16#42, 16#8F, 16#C6, 16#0E, 16#2B, 16#30, 16#32, 16#3A, 16#30, 16#30, 16#C1, 16#10, 16#55>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_resource_with_value => 16#00, value => <<"Open Mobile Alliance">>},
#{tlv_resource_with_value => 16#01, value => <<"Lightweight M2M Client">>},
#{tlv_resource_with_value => 16#02, value => <<"345000123">>},
#{tlv_resource_with_value => 16#03, value => <<"1.0">>},
#{tlv_multiple_resource => 16#06, value => [
#{tlv_resource_instance => 16#00, value => <<1>>},
#{tlv_resource_instance => 16#01, value => <<5>>}
]},
#{tlv_multiple_resource => 16#07, value => [
#{tlv_resource_instance => 16#00, value => <<16#0ED8:16>>},
#{tlv_resource_instance => 16#01, value => <<16#1388:16>>}
]},
#{tlv_multiple_resource => 16#08, value => [
#{tlv_resource_instance => 16#00, value => <<16#7d>>},
#{tlv_resource_instance => 16#01, value => <<16#0384:16>>}
]},
#{tlv_resource_with_value => 16#09, value => <<16#64>>},
#{tlv_resource_with_value => 16#0A, value => <<16#0F>>},
#{tlv_multiple_resource => 16#0B, value => [
#{tlv_resource_instance => 16#00, value => <<16#00>>}
]},
#{tlv_resource_with_value => 16#0D, value => <<16#5182428F:32>>},
#{tlv_resource_with_value => 16#0E, value => <<"+02:00">>},
#{tlv_resource_with_value => 16#10, value => <<"U">>}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case05(_Config) ->
Data = <<16#08, 16#00, 16#79, 16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65, 16#C8, 16#01, 16#16, 16#4C, 16#69, 16#67, 16#68, 16#74, 16#77, 16#65, 16#69, 16#67, 16#68, 16#74, 16#20, 16#4D, 16#32, 16#4D, 16#20, 16#43, 16#6C, 16#69, 16#65, 16#6E, 16#74, 16#C8, 16#02, 16#09, 16#33, 16#34, 16#35, 16#30, 16#30, 16#30, 16#31, 16#32, 16#33, 16#C3, 16#03, 16#31, 16#2E, 16#30, 16#86, 16#06, 16#41, 16#00, 16#01, 16#41, 16#01, 16#05, 16#88, 16#07, 16#08, 16#42, 16#00, 16#0E, 16#D8, 16#42, 16#01, 16#13, 16#88, 16#87, 16#08, 16#41, 16#00, 16#7D, 16#42, 16#01, 16#03, 16#84, 16#C1, 16#09, 16#64, 16#C1, 16#0A, 16#0F, 16#83, 16#0B, 16#41, 16#00, 16#00, 16#C4, 16#0D, 16#51, 16#82, 16#42, 16#8F, 16#C6, 16#0E, 16#2B, 16#30, 16#32, 16#3A, 16#30, 16#30, 16#C1, 16#10, 16#55>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_object_instance => 16#00, value => [
#{tlv_resource_with_value => 16#00, value => <<"Open Mobile Alliance">>},
#{tlv_resource_with_value => 16#01, value => <<"Lightweight M2M Client">>},
#{tlv_resource_with_value => 16#02, value => <<"345000123">>},
#{tlv_resource_with_value => 16#03, value => <<"1.0">>},
#{tlv_multiple_resource => 16#06, value => [
#{tlv_resource_instance => 16#00, value => <<1>>},
#{tlv_resource_instance => 16#01, value => <<5>>}
]},
#{tlv_multiple_resource => 16#07, value => [
#{tlv_resource_instance => 16#00, value => <<16#0ED8:16>>},
#{tlv_resource_instance => 16#01, value => <<16#1388:16>>}
]},
#{tlv_multiple_resource => 16#08, value => [
#{tlv_resource_instance => 16#00, value => <<16#7d>>},
#{tlv_resource_instance => 16#01, value => <<16#0384:16>>}
]},
#{tlv_resource_with_value => 16#09, value => <<16#64>>},
#{tlv_resource_with_value => 16#0A, value => <<16#0F>>},
#{tlv_multiple_resource => 16#0B, value => [
#{tlv_resource_instance => 16#00, value => <<16#00>>}
]},
#{tlv_resource_with_value => 16#0D, value => <<16#5182428F:32>>},
#{tlv_resource_with_value => 16#0E, value => <<"+02:00">>},
#{tlv_resource_with_value => 16#10, value => <<"U">>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case06(_Config) ->
B ) Request on Multiple - Instances Object having 2 instances
Data = <<16#08, 16#00, 16#0E, 16#C1, 16#00, 16#01, 16#C1, 16#01, 16#00, 16#83, 16#02, 16#41, 16#7F, 16#07, 16#C1, 16#03, 16#7F, 16#08, 16#02, 16#12, 16#C1, 16#00, 16#03, 16#C1, 16#01, 16#00, 16#87, 16#02, 16#41, 16#7F, 16#07, 16#61, 16#01, 16#36, 16#01, 16#C1, 16#03, 16#7F>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_object_instance => 16#00, value => [
#{tlv_resource_with_value => 16#00, value => <<16#01>>},
#{tlv_resource_with_value => 16#01, value => <<16#00>>},
#{tlv_multiple_resource => 16#02, value => [
#{tlv_resource_instance => 16#7F, value => <<16#07>>}
]},
#{tlv_resource_with_value => 16#03, value => <<16#7F>>}
]},
#{tlv_object_instance => 16#02, value => [
#{tlv_resource_with_value => 16#00, value => <<16#03>>},
#{tlv_resource_with_value => 16#01, value => <<16#00>>},
#{tlv_multiple_resource => 16#02, value => [
#{tlv_resource_instance => 16#7F, value => <<16#07>>},
#{tlv_resource_instance => 16#0136, value => <<16#01>>}
]},
#{tlv_resource_with_value => 16#03, value => <<16#7F>>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case07(_Config) ->
C ) Request on Multiple - Instances Object having 1 instance only
Data = <<16#08, 16#00, 16#0F, 16#C1, 16#00, 16#01, 16#C4, 16#01, 16#00, 16#01, 16#51, 16#80, 16#C1, 16#06, 16#01, 16#C1, 16#07, 16#55>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_object_instance => 16#00, value => [
#{tlv_resource_with_value => 16#00, value => <<16#01>>},
#{tlv_resource_with_value => 16#01, value => <<86400:32>>},
#{tlv_resource_with_value => 16#06, value => <<16#01>>},
#{tlv_resource_with_value => 16#07, value => <<$U>>}]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case08(_Config) ->
6.4.3.3 Example of Request on an Object Instance containing an Object Link Resource
Example 1 ) request to Object 65 Instance 0 : Read /65/0
Data = <<16#88, 16#00, 16#0C, 16#44, 16#00, 16#00, 16#42, 16#00, 16#00, 16#44, 16#01, 16#00, 16#42, 16#00, 16#01, 16#C8, 16#01, 16#0D, 16#38, 16#36, 16#31, 16#33, 16#38, 16#30, 16#30, 16#37, 16#35, 16#35, 16#35, 16#30, 16#30, 16#C4, 16#02, 16#12, 16#34, 16#56, 16#78>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_multiple_resource => 16#00, value => [
#{tlv_resource_instance => 16#00, value => <<16#00, 16#42, 16#00, 16#00>>},
#{tlv_resource_instance => 16#01, value => <<16#00, 16#42, 16#00, 16#01>>}
]},
#{tlv_resource_with_value => 16#01, value => <<"8613800755500">>},
#{tlv_resource_with_value => 16#02, value => <<16#12345678:32>>}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
case09(_Config) ->
6.4.3.3 Example of Request on an Object Instance containing an Object Link Resource
Example 2 ) request to Object 66 : Read /66 : TLV payload will contain 2 Object Instances
Data = <<16#08, 16#00, 16#26, 16#C8, 16#00, 16#0B, 16#6D, 16#79, 16#53, 16#65, 16#72, 16#76, 16#69, 16#63, 16#65, 16#20, 16#31, 16#C8, 16#01, 16#0F, 16#49, 16#6E, 16#74, 16#65, 16#72, 16#6E, 16#65, 16#74, 16#2E, 16#31, 16#35, 16#2E, 16#32, 16#33, 16#34, 16#C4, 16#02, 16#00, 16#43, 16#00, 16#00, 16#08, 16#01, 16#26, 16#C8, 16#00, 16#0B, 16#6D, 16#79, 16#53, 16#65, 16#72, 16#76, 16#69, 16#63, 16#65, 16#20, 16#32, 16#C8, 16#01, 16#0F, 16#49, 16#6E, 16#74, 16#65, 16#72, 16#6E, 16#65, 16#74, 16#2E, 16#31, 16#35, 16#2E, 16#32, 16#33, 16#35, 16#C4, 16#02, 16#FF, 16#FF, 16#FF, 16#FF>>,
R = emqx_lwm2m_tlv:parse(Data),
Exp = [
#{tlv_object_instance => 16#00, value => [
#{tlv_resource_with_value => 16#00, value => <<"myService 1">>},
#{tlv_resource_with_value => 16#01, value => <<"Internet.15.234">>},
#{tlv_resource_with_value => 16#02, value => <<16#00, 16#43, 16#00, 16#00>>}
]},
#{tlv_object_instance => 16#01, value => [
#{tlv_resource_with_value => 16#00, value => <<"myService 2">>},
#{tlv_resource_with_value => 16#01, value => <<"Internet.15.235">>},
#{tlv_resource_with_value => 16#02, value => <<16#FF, 16#FF, 16#FF, 16#FF>>}
]}
],
?assertEqual(Exp, R),
EncodedBinary = emqx_lwm2m_tlv:encode(Exp),
?assertEqual(EncodedBinary, Data).
|
66c1f0c2a57d076930628f5012dfe3447f336dd5893cc95cef2968b5ea8b0763 | spurious/snd-mirror | nrev.scm | NREV ( the most popular Samson box reverb )
(provide 'snd-nrev.scm)
(if (provided? 'snd)
(require snd-ws.scm)
(require sndlib-ws.scm))
(definstrument (nrev (reverb-factor 1.09) (lp-coeff 0.7) (volume 1.0))
;; reverb-factor controls the length of the decay -- it should not exceed (/ 1.0 .823)
;; lp-coeff controls the strength of the low pass filter inserted in the feedback loop
;; output-scale can be used to boost the reverb output
(let ((dly-len (if (= (floor *clm-srate*) 44100)
#i(2467 2753 3217 3533 3877 4127 599 197 67 101 97 73 67 53 37)
(and (= (floor *clm-srate*) 22050)
#i(1237 1381 1607 1777 1949 2063 307 97 31 53 47 37 31 29 17))))
(chan2 (> (channels *output*) 1))
(chan4 (= (channels *output*) 4)))
(if (not dly-len)
(let ((srscale (/ *clm-srate* 25641))
(next-prime (lambda (val)
(do ((val val (+ val 2)))
((or (= val 2)
(and (odd? val)
(do ((i 3 (+ i 2))
(lim (sqrt val)))
((or (= 0 (modulo val i))
(> i lim))
(> i lim)))))
val)))))
(set! dly-len #i(1433 1601 1867 2053 2251 2399 347 113 37 59 53 43 37 29 19))
(do ((i 0 (+ i 1)))
((= i 15))
(let ((val (floor (* srscale (dly-len i)))))
(if (even? val) (set! val (+ val 1)))
(set! (dly-len i) (next-prime val))))))
(let* ((len (+ (floor *clm-srate*) (framples *reverb*)))
(comb1 (make-comb (* .822 reverb-factor) (dly-len 0)))
(comb2 (make-comb (* .802 reverb-factor) (dly-len 1)))
(comb3 (make-comb (* .773 reverb-factor) (dly-len 2)))
(comb4 (make-comb (* .753 reverb-factor) (dly-len 3)))
(comb5 (make-comb (* .753 reverb-factor) (dly-len 4)))
(comb6 (make-comb (* .733 reverb-factor) (dly-len 5)))
(low (make-one-pole lp-coeff (- lp-coeff 1.0)))
(allpass1 (make-all-pass -0.700 0.700 (dly-len 6)))
(allpass2 (make-all-pass -0.700 0.700 (dly-len 7)))
(allpass3 (make-all-pass -0.700 0.700 (dly-len 8)))
10 for quad
(allpass5 (make-all-pass -0.700 0.700 (dly-len 11)))
(allpass6 (and chan2 (make-all-pass -0.700 0.700 (dly-len 12))))
(allpass7 (and chan4 (make-all-pass -0.700 0.700 (dly-len 13))))
(allpass8 (and chan4 (make-all-pass -0.700 0.700 (dly-len 14))))
(filts (if (not chan2)
(vector allpass5)
(if (not chan4)
(vector allpass5 allpass6)
(vector allpass5 allpass6 allpass7 allpass8))))
(combs (make-comb-bank (vector comb1 comb2 comb3 comb4 comb5 comb6)))
(allpasses (make-all-pass-bank (vector allpass1 allpass2 allpass3))))
(if chan4
(do ((i 0 (+ i 1)))
((= i len))
(out-bank filts i
(all-pass allpass4
(one-pole low
(all-pass-bank allpasses
(comb-bank combs (* volume (ina i *reverb*))))))))
(if chan2
(let ((gen1 (filts 0))
(gen2 (filts 1)))
(do ((i 0 (+ i 1)))
((= i len))
(let ((val (all-pass allpass4
(one-pole low
(all-pass-bank allpasses
(comb-bank combs (* volume (ina i *reverb*))))))))
(outa i (all-pass gen1 val))
(outb i (all-pass gen2 val)))))
(let ((gen (filts 0)))
(do ((i 0 (+ i 1)))
((= i len))
(outa i (all-pass gen
(all-pass allpass4
(one-pole low
(all-pass-bank allpasses
(comb-bank combs (* volume (ina i *reverb*)))))))))))))))
( with - sound (: reverb ) ( outa 0 .1 ) ( outa 0 .5 * reverb * ) )
| null | https://raw.githubusercontent.com/spurious/snd-mirror/8e7a643c840592797c29384ffe07c87ba5c0a3eb/nrev.scm | scheme | reverb-factor controls the length of the decay -- it should not exceed (/ 1.0 .823)
lp-coeff controls the strength of the low pass filter inserted in the feedback loop
output-scale can be used to boost the reverb output | NREV ( the most popular Samson box reverb )
(provide 'snd-nrev.scm)
(if (provided? 'snd)
(require snd-ws.scm)
(require sndlib-ws.scm))
(definstrument (nrev (reverb-factor 1.09) (lp-coeff 0.7) (volume 1.0))
(let ((dly-len (if (= (floor *clm-srate*) 44100)
#i(2467 2753 3217 3533 3877 4127 599 197 67 101 97 73 67 53 37)
(and (= (floor *clm-srate*) 22050)
#i(1237 1381 1607 1777 1949 2063 307 97 31 53 47 37 31 29 17))))
(chan2 (> (channels *output*) 1))
(chan4 (= (channels *output*) 4)))
(if (not dly-len)
(let ((srscale (/ *clm-srate* 25641))
(next-prime (lambda (val)
(do ((val val (+ val 2)))
((or (= val 2)
(and (odd? val)
(do ((i 3 (+ i 2))
(lim (sqrt val)))
((or (= 0 (modulo val i))
(> i lim))
(> i lim)))))
val)))))
(set! dly-len #i(1433 1601 1867 2053 2251 2399 347 113 37 59 53 43 37 29 19))
(do ((i 0 (+ i 1)))
((= i 15))
(let ((val (floor (* srscale (dly-len i)))))
(if (even? val) (set! val (+ val 1)))
(set! (dly-len i) (next-prime val))))))
(let* ((len (+ (floor *clm-srate*) (framples *reverb*)))
(comb1 (make-comb (* .822 reverb-factor) (dly-len 0)))
(comb2 (make-comb (* .802 reverb-factor) (dly-len 1)))
(comb3 (make-comb (* .773 reverb-factor) (dly-len 2)))
(comb4 (make-comb (* .753 reverb-factor) (dly-len 3)))
(comb5 (make-comb (* .753 reverb-factor) (dly-len 4)))
(comb6 (make-comb (* .733 reverb-factor) (dly-len 5)))
(low (make-one-pole lp-coeff (- lp-coeff 1.0)))
(allpass1 (make-all-pass -0.700 0.700 (dly-len 6)))
(allpass2 (make-all-pass -0.700 0.700 (dly-len 7)))
(allpass3 (make-all-pass -0.700 0.700 (dly-len 8)))
10 for quad
(allpass5 (make-all-pass -0.700 0.700 (dly-len 11)))
(allpass6 (and chan2 (make-all-pass -0.700 0.700 (dly-len 12))))
(allpass7 (and chan4 (make-all-pass -0.700 0.700 (dly-len 13))))
(allpass8 (and chan4 (make-all-pass -0.700 0.700 (dly-len 14))))
(filts (if (not chan2)
(vector allpass5)
(if (not chan4)
(vector allpass5 allpass6)
(vector allpass5 allpass6 allpass7 allpass8))))
(combs (make-comb-bank (vector comb1 comb2 comb3 comb4 comb5 comb6)))
(allpasses (make-all-pass-bank (vector allpass1 allpass2 allpass3))))
(if chan4
(do ((i 0 (+ i 1)))
((= i len))
(out-bank filts i
(all-pass allpass4
(one-pole low
(all-pass-bank allpasses
(comb-bank combs (* volume (ina i *reverb*))))))))
(if chan2
(let ((gen1 (filts 0))
(gen2 (filts 1)))
(do ((i 0 (+ i 1)))
((= i len))
(let ((val (all-pass allpass4
(one-pole low
(all-pass-bank allpasses
(comb-bank combs (* volume (ina i *reverb*))))))))
(outa i (all-pass gen1 val))
(outb i (all-pass gen2 val)))))
(let ((gen (filts 0)))
(do ((i 0 (+ i 1)))
((= i len))
(outa i (all-pass gen
(all-pass allpass4
(one-pole low
(all-pass-bank allpasses
(comb-bank combs (* volume (ina i *reverb*)))))))))))))))
( with - sound (: reverb ) ( outa 0 .1 ) ( outa 0 .5 * reverb * ) )
|
81738e6bca3dd6025b441e85a5b453074213d16715f76bef4c7c95fabeb5934a | ghc/nofib | Infer.hs | module Infer (inferTerm) where
import Data.List(nub)
import MyList (minus)
import Type (TVarId, TConId, MonoType (..), PolyType (All),
arrow, freeTVarMono)
import Term (VarId, Term (Var, Abs, App, Let))
import Substitution (Sub, applySub, lookupSub, makeSub)
import Environment (Env, lookupEnv, extendLocal, extendGlobal,
domEnv, freeTVarEnv)
import InferMonad (Infer, thenI, returnI, guardI, getSubI,
freshI, freshesI, unifyI, substituteI)
import MaybeM
specialiseI :: PolyType -> Infer MonoType
specialiseI (All xxs tt) = freshesI (length xxs) `thenI` (\yys ->
returnI (applySubs xxs yys tt))
applySubs :: [TVarId] -> [MonoType] -> MonoType -> MonoType
applySubs xxs yys tt = applySub (makeSub (zip xxs yys)) tt
generaliseI :: Env -> MonoType -> Infer PolyType
generaliseI aa tt = getSubI `thenI` (\s ->
let aaVars = nub (freeTVarSubEnv s aa) in
let ttVars = nub (freeTVarMono tt) in
let xxs = ttVars `minus` aaVars in
returnI (All xxs tt)
)
freeTVarSubEnv :: Sub -> Env -> [TVarId]
freeTVarSubEnv s aa = concat (map (freeTVarMono . lookupSub s)
(freeTVarEnv aa))
inferTerm :: Env -> Term -> Infer MonoType
inferTerm aa (Var x) =
(x `elem` domEnv aa) `guardI` (
let ss = lookupEnv aa x in
specialiseI ss `thenI` (\tt ->
substituteI tt `thenI` (\uu ->
returnI uu)))
inferTerm aa (Abs x v) =
freshI `thenI` (\xx ->
inferTerm (extendLocal aa x xx) v `thenI` (\vv ->
substituteI xx `thenI` (\uu ->
returnI (uu `arrow` vv))))
inferTerm aa (App t u) =
inferTerm aa t `thenI` (\tt ->
inferTerm aa u `thenI` (\uu ->
freshI `thenI` (\xx ->
unifyI tt (uu `arrow` xx) `thenI` (\() ->
substituteI xx `thenI` (\vv ->
returnI vv)))))
inferTerm aa (Let x u v) =
inferTerm aa u `thenI` (\uu ->
generaliseI aa uu `thenI` (\ss ->
inferTerm (extendGlobal aa x ss) v `thenI` (\vv ->
returnI vv)))
| null | https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/real/infer/Infer.hs | haskell | module Infer (inferTerm) where
import Data.List(nub)
import MyList (minus)
import Type (TVarId, TConId, MonoType (..), PolyType (All),
arrow, freeTVarMono)
import Term (VarId, Term (Var, Abs, App, Let))
import Substitution (Sub, applySub, lookupSub, makeSub)
import Environment (Env, lookupEnv, extendLocal, extendGlobal,
domEnv, freeTVarEnv)
import InferMonad (Infer, thenI, returnI, guardI, getSubI,
freshI, freshesI, unifyI, substituteI)
import MaybeM
specialiseI :: PolyType -> Infer MonoType
specialiseI (All xxs tt) = freshesI (length xxs) `thenI` (\yys ->
returnI (applySubs xxs yys tt))
applySubs :: [TVarId] -> [MonoType] -> MonoType -> MonoType
applySubs xxs yys tt = applySub (makeSub (zip xxs yys)) tt
generaliseI :: Env -> MonoType -> Infer PolyType
generaliseI aa tt = getSubI `thenI` (\s ->
let aaVars = nub (freeTVarSubEnv s aa) in
let ttVars = nub (freeTVarMono tt) in
let xxs = ttVars `minus` aaVars in
returnI (All xxs tt)
)
freeTVarSubEnv :: Sub -> Env -> [TVarId]
freeTVarSubEnv s aa = concat (map (freeTVarMono . lookupSub s)
(freeTVarEnv aa))
inferTerm :: Env -> Term -> Infer MonoType
inferTerm aa (Var x) =
(x `elem` domEnv aa) `guardI` (
let ss = lookupEnv aa x in
specialiseI ss `thenI` (\tt ->
substituteI tt `thenI` (\uu ->
returnI uu)))
inferTerm aa (Abs x v) =
freshI `thenI` (\xx ->
inferTerm (extendLocal aa x xx) v `thenI` (\vv ->
substituteI xx `thenI` (\uu ->
returnI (uu `arrow` vv))))
inferTerm aa (App t u) =
inferTerm aa t `thenI` (\tt ->
inferTerm aa u `thenI` (\uu ->
freshI `thenI` (\xx ->
unifyI tt (uu `arrow` xx) `thenI` (\() ->
substituteI xx `thenI` (\vv ->
returnI vv)))))
inferTerm aa (Let x u v) =
inferTerm aa u `thenI` (\uu ->
generaliseI aa uu `thenI` (\ss ->
inferTerm (extendGlobal aa x ss) v `thenI` (\vv ->
returnI vv)))
| |
cb80d64ac3ba72b0d58efebb0be4d295f9ab30cfb37f7071d6b7897c7f2b281c | otherjoel/tabloid | test-program.rkt | #lang tabloid
YOU WON'T WANT TO MISS 'Hello, World!'
DISCOVER HOW TO factorial WITH n
RUMOR HAS IT
WHAT IF n IS ACTUALLY 0
SHOCKING DEVELOPMENT 1
LIES!
SHOCKING DEVELOPMENT
n TIMES factorial OF n MINUS 1
END OF STORY
EXPERTS CLAIM result TO BE factorial OF 10
YOU WON'T WANT TO MISS 'Result is'
YOU WON'T WANT TO MISS result
PLEASE LIKE AND SUBSCRIBE | null | https://raw.githubusercontent.com/otherjoel/tabloid/6ae438880b050686bc1e437f95f7635026016939/test-program.rkt | racket | #lang tabloid
YOU WON'T WANT TO MISS 'Hello, World!'
DISCOVER HOW TO factorial WITH n
RUMOR HAS IT
WHAT IF n IS ACTUALLY 0
SHOCKING DEVELOPMENT 1
LIES!
SHOCKING DEVELOPMENT
n TIMES factorial OF n MINUS 1
END OF STORY
EXPERTS CLAIM result TO BE factorial OF 10
YOU WON'T WANT TO MISS 'Result is'
YOU WON'T WANT TO MISS result
PLEASE LIKE AND SUBSCRIBE | |
f8ffa1faa83aa524d17a552e7c11cafb529a7d77288a7ee5bd7dcc6bede3c7b0 | mirage/ocaml-dns | dns_server_mirage.ml | ( c ) 2018 , all rights reserved
open Lwt.Infix
let src = Logs.Src.create "dns_server_mirage" ~doc:"effectful DNS server"
module Log = (val Logs.src_log src : Logs.LOG)
module Make (P : Mirage_clock.PCLOCK) (M : Mirage_clock.MCLOCK) (TIME : Mirage_time.S) (S : Tcpip.Stack.V4V6) = struct
let inc =
let f = function
| `Udp_query -> "udp queries"
| `Udp_answer -> "udp answers"
| `Tcp_query -> "tcp queries"
| `Tcp_answer -> "tcp answers"
| `Tcp -> "tcp-server"
| `Tcp_client -> "tcp-client"
| `Tcp_keep -> "keep tcp flow"
| `Notify -> "request"
| `On_update -> "on update"
| `On_notify -> "on notify"
| `Tcp_cache_add -> "tcp cache add"
| `Tcp_cache_drop -> "tcp cache drop"
in
let src = Dns.counter_metrics ~f "dns-server-mirage" in
(fun x -> Metrics.add src (fun x -> x) (fun d -> d x))
module Dns = Dns_mirage.Make(S)
module T = S.TCP
let primary ?(on_update = fun ~old:_ ~authenticated_key:_ ~update_source:_ _ -> Lwt.return_unit) ?(on_notify = fun _ _ -> Lwt.return None) ?(timer = 2) ?(port = 53) stack t =
let state = ref t in
let tcp_out = ref Ipaddr.Map.empty in
let drop ip =
if Ipaddr.Map.mem ip !tcp_out then begin
inc `Tcp_cache_drop;
tcp_out := Ipaddr.Map.remove ip !tcp_out ;
state := Dns_server.Primary.closed !state ip
end
in
let connect recv_task ip =
inc `Tcp_client;
let dport = 53 in
Log.debug (fun m -> m "creating connection to %a:%d" Ipaddr.pp ip dport) ;
T.create_connection (S.tcp stack) (ip, dport) >>= function
| Error e ->
Log.err (fun m -> m "error %a while establishing tcp connection to %a:%d"
T.pp_error e Ipaddr.pp ip port) ;
Lwt.return (Error ())
| Ok flow ->
inc `Tcp_cache_add;
tcp_out := Ipaddr.Map.add ip flow !tcp_out ;
Lwt.async (recv_task ip dport flow);
Lwt.return (Ok flow)
in
let send_notify recv_task (ip, data) =
inc `Notify;
let connect_and_send ip =
connect recv_task ip >>= function
| Ok flow -> Dns.send_tcp_multiple flow data
| Error () -> Lwt.return (Error ())
in
(match Ipaddr.Map.find_opt ip !tcp_out with
| None -> connect_and_send ip
| Some f -> Dns.send_tcp_multiple f data >>= function
| Ok () -> Lwt.return (Ok ())
| Error () -> drop ip ; connect_and_send ip) >>= function
| Ok () -> Lwt.return_unit
| Error () ->
drop ip;
Lwt_list.iter_p (Dns.send_udp stack port ip 53) data
in
let maybe_update_state key ip t =
let old = !state in
let trie server = Dns_server.Primary.data server in
state := t;
if Dns_trie.equal (trie t) (trie old) then
Lwt.return_unit
else begin
inc `On_update ; on_update ~old:(trie old) ~authenticated_key:key ~update_source:ip t
end
and maybe_notify recv_task t now ts = function
| None -> Lwt.return_unit
| Some n -> inc `On_notify ; on_notify n t >>= function
| None -> Lwt.return_unit
| Some (trie, keys) ->
let state', outs = Dns_server.Primary.with_keys t now ts keys in
let state'', outs' = Dns_server.Primary.with_data state' now ts trie in
state := state'';
Lwt_list.iter_p (send_notify recv_task) (outs @ outs')
in
let rec recv_task ip port flow () =
let f = Dns.of_flow flow in
let rec loop () =
Dns.read_tcp f >>= function
| Error () -> drop ip ; Lwt.return_unit
| Ok data ->
inc `Tcp_query;
let now = Ptime.v (P.now_d_ps ()) in
let ts = M.elapsed_ns () in
let t, answers, notify, n, key =
Dns_server.Primary.handle_buf !state now ts `Tcp ip port data
in
let n' = match n with
| Some `Keep -> inc `Tcp_cache_add ; inc `Tcp_keep ; tcp_out := Ipaddr.Map.add ip flow !tcp_out ; None
| Some `Notify soa -> Some (`Notify soa)
| Some `Signed_notify soa -> Some (`Signed_notify soa)
| None -> None
in
maybe_update_state key ip t >>= fun () ->
maybe_notify recv_task t now ts n' >>= fun () ->
if answers <> [] then inc `Tcp_answer;
(Dns.send_tcp_multiple flow answers >|= function
| Ok () -> ()
| Error () -> drop ip) >>= fun () ->
Lwt_list.iter_p (send_notify recv_task) notify >>= fun () ->
loop ()
in
loop ()
in
let tcp_cb flow =
inc `Tcp;
let dst_ip, dst_port = T.dst flow in
recv_task dst_ip dst_port flow ()
in
S.TCP.listen (S.tcp stack) ~port tcp_cb ;
Log.info (fun m -> m "DNS server listening on TCP port %d" port) ;
let udp_cb ~src ~dst:_ ~src_port buf =
inc `Udp_query;
let now = Ptime.v (P.now_d_ps ()) in
let ts = M.elapsed_ns () in
let t, answers, notify, n, key =
Dns_server.Primary.handle_buf !state now ts `Udp src src_port buf
in
let n' = match n with
| None | Some `Keep -> None
| Some `Notify soa -> Some (`Notify soa)
| Some `Signed_notify soa -> Some (`Signed_notify soa)
in
maybe_update_state key src t >>= fun () ->
maybe_notify recv_task t now ts n' >>= fun () ->
if answers <> [] then inc `Udp_answer;
(Lwt_list.iter_s (Dns.send_udp stack port src src_port) answers) >>= fun () ->
Lwt_list.iter_p (send_notify recv_task) notify
in
S.UDP.listen (S.udp stack) ~port udp_cb ;
Log.info (fun m -> m "DNS server listening on UDP port %d" port) ;
let rec time () =
let now = Ptime.v (P.now_d_ps ()) in
let ts = M.elapsed_ns () in
let t, notifies = Dns_server.Primary.timer !state now ts in
maybe_update_state None Ipaddr.(V4 V4.localhost) t >>= fun () ->
Lwt_list.iter_p (send_notify recv_task) notifies >>= fun () ->
TIME.sleep_ns (Duration.of_sec timer) >>= fun () ->
time ()
in
Lwt.async time
let secondary ?(on_update = fun ~old:_ _trie -> Lwt.return_unit) ?(timer = 5) ?(port = 53) stack t =
let state = ref t in
let tcp_out = ref Ipaddr.Map.empty in
let maybe_update_state t =
let old = !state in
let trie server = Dns_server.Secondary.data server in
state := t ;
if Dns_trie.equal (trie t) (trie old) then
Lwt.return_unit
else begin
inc `On_update ; on_update ~old:(trie old) t
end
in
let rec close ip =
(match Ipaddr.Map.find_opt ip !tcp_out with
| None -> Lwt.return_unit
| Some f -> T.close f) >>= fun () ->
tcp_out := Ipaddr.Map.remove ip !tcp_out ;
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let state', out = Dns_server.Secondary.closed !state now elapsed ip in
state := state' ;
request (ip, out)
and read_and_handle ip f =
Dns.read_tcp f >>= function
| Error () ->
Log.debug (fun m -> m "removing %a from tcp_out" Ipaddr.pp ip) ;
close ip
| Ok data ->
inc `Tcp_query;
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let t, answer, out =
Dns_server.Secondary.handle_buf !state now elapsed `Tcp ip data
in
maybe_update_state t >>= fun () ->
(match answer with
| None -> Lwt.return (Ok ())
| Some x ->
inc `Tcp_answer;
Dns.send_tcp (Dns.flow f) x >>= function
| Error () ->
Log.debug (fun m -> m "removing %a from tcp_out" Ipaddr.pp ip) ;
close ip >|= fun () -> Error ()
| Ok () -> Lwt.return (Ok ())) >>= fun r ->
(match out with
| None -> Lwt.return_unit
| Some (ip, data) -> request_one (ip, data)) >>= fun () ->
match r with
| Ok () -> read_and_handle ip f
| Error () -> Lwt.return_unit
and request (ip, data) =
inc `Notify;
let dport = 53 in
match Ipaddr.Map.find_opt ip !tcp_out with
| None ->
begin
Log.debug (fun m -> m "creating connection to %a:%d" Ipaddr.pp ip dport) ;
inc `Tcp_client;
T.create_connection (S.tcp stack) (ip, dport) >>= function
| Error e ->
Log.err (fun m -> m "error %a while establishing tcp connection to %a:%d"
T.pp_error e Ipaddr.pp ip dport) ;
close ip
| Ok flow ->
tcp_out := Ipaddr.Map.add ip flow !tcp_out ;
Dns.send_tcp_multiple flow data >>= function
| Error () -> close ip
| Ok () ->
Lwt.async (fun () -> read_and_handle ip (Dns.of_flow flow)) ;
Lwt.return_unit
end
| Some flow ->
Dns.send_tcp_multiple flow data >>= function
| Ok () -> Lwt.return_unit
| Error () ->
Log.warn (fun m -> m "closing tcp flow to %a:%d, retrying request"
Ipaddr.pp ip dport) ;
T.close flow >>= fun () ->
tcp_out := Ipaddr.Map.remove ip !tcp_out ;
request (ip, data)
and request_one (ip, d) = request (ip, [ d ])
in
let udp_cb ~src ~dst:_ ~src_port buf =
Log.debug (fun m -> m "udp frame from %a:%d" Ipaddr.pp src src_port) ;
inc `Udp_query;
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let t, answer, out =
Dns_server.Secondary.handle_buf !state now elapsed `Udp src buf
in
maybe_update_state t >>= fun () ->
(match out with
| None -> ()
| Some (ip, cs) -> Lwt.async (fun () -> request_one (ip, cs))) ;
match answer with
| None -> Lwt.return_unit
| Some out -> inc `Udp_answer; Dns.send_udp stack port src src_port out
in
S.UDP.listen (S.udp stack) ~port udp_cb ;
Log.info (fun m -> m "secondary DNS listening on UDP port %d" port) ;
let tcp_cb flow =
inc `Tcp;
let dst_ip, dst_port = T.dst flow in
tcp_out := Ipaddr.Map.add dst_ip flow !tcp_out ;
Log.debug (fun m -> m "tcp connection from %a:%d" Ipaddr.pp dst_ip dst_port) ;
let f = Dns.of_flow flow in
let rec loop () =
Dns.read_tcp f >>= function
| Error () -> tcp_out := Ipaddr.Map.remove dst_ip !tcp_out ; Lwt.return_unit
| Ok data ->
inc `Tcp_query;
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let t, answer, out =
Dns_server.Secondary.handle_buf !state now elapsed `Tcp dst_ip data
in
maybe_update_state t >>= fun () ->
(match out with
| None -> ()
| Some (ip, cs) -> Lwt.async (fun () -> request_one (ip, cs)));
match answer with
| None ->
Log.warn (fun m -> m "no TCP output") ;
loop ()
| Some data ->
inc `Tcp_answer;
Dns.send_tcp flow data >>= function
| Ok () -> loop ()
| Error () -> tcp_out := Ipaddr.Map.remove dst_ip !tcp_out ; Lwt.return_unit
in
loop ()
in
S.TCP.listen (S.tcp stack) ~port tcp_cb ;
Log.info (fun m -> m "secondary DNS listening on TCP port %d" port) ;
let rec time () =
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let t, out = Dns_server.Secondary.timer !state now elapsed in
maybe_update_state t >>= fun () ->
List.iter (fun (ip, cs) ->
Lwt.async (fun () -> request (ip, cs))) out ;
TIME.sleep_ns (Duration.of_sec timer) >>= fun () ->
time ()
in
Lwt.async time
end
| null | https://raw.githubusercontent.com/mirage/ocaml-dns/e8703c5e7d679b5242b064c009bd0f195422883d/mirage/server/dns_server_mirage.ml | ocaml | ( c ) 2018 , all rights reserved
open Lwt.Infix
let src = Logs.Src.create "dns_server_mirage" ~doc:"effectful DNS server"
module Log = (val Logs.src_log src : Logs.LOG)
module Make (P : Mirage_clock.PCLOCK) (M : Mirage_clock.MCLOCK) (TIME : Mirage_time.S) (S : Tcpip.Stack.V4V6) = struct
let inc =
let f = function
| `Udp_query -> "udp queries"
| `Udp_answer -> "udp answers"
| `Tcp_query -> "tcp queries"
| `Tcp_answer -> "tcp answers"
| `Tcp -> "tcp-server"
| `Tcp_client -> "tcp-client"
| `Tcp_keep -> "keep tcp flow"
| `Notify -> "request"
| `On_update -> "on update"
| `On_notify -> "on notify"
| `Tcp_cache_add -> "tcp cache add"
| `Tcp_cache_drop -> "tcp cache drop"
in
let src = Dns.counter_metrics ~f "dns-server-mirage" in
(fun x -> Metrics.add src (fun x -> x) (fun d -> d x))
module Dns = Dns_mirage.Make(S)
module T = S.TCP
let primary ?(on_update = fun ~old:_ ~authenticated_key:_ ~update_source:_ _ -> Lwt.return_unit) ?(on_notify = fun _ _ -> Lwt.return None) ?(timer = 2) ?(port = 53) stack t =
let state = ref t in
let tcp_out = ref Ipaddr.Map.empty in
let drop ip =
if Ipaddr.Map.mem ip !tcp_out then begin
inc `Tcp_cache_drop;
tcp_out := Ipaddr.Map.remove ip !tcp_out ;
state := Dns_server.Primary.closed !state ip
end
in
let connect recv_task ip =
inc `Tcp_client;
let dport = 53 in
Log.debug (fun m -> m "creating connection to %a:%d" Ipaddr.pp ip dport) ;
T.create_connection (S.tcp stack) (ip, dport) >>= function
| Error e ->
Log.err (fun m -> m "error %a while establishing tcp connection to %a:%d"
T.pp_error e Ipaddr.pp ip port) ;
Lwt.return (Error ())
| Ok flow ->
inc `Tcp_cache_add;
tcp_out := Ipaddr.Map.add ip flow !tcp_out ;
Lwt.async (recv_task ip dport flow);
Lwt.return (Ok flow)
in
let send_notify recv_task (ip, data) =
inc `Notify;
let connect_and_send ip =
connect recv_task ip >>= function
| Ok flow -> Dns.send_tcp_multiple flow data
| Error () -> Lwt.return (Error ())
in
(match Ipaddr.Map.find_opt ip !tcp_out with
| None -> connect_and_send ip
| Some f -> Dns.send_tcp_multiple f data >>= function
| Ok () -> Lwt.return (Ok ())
| Error () -> drop ip ; connect_and_send ip) >>= function
| Ok () -> Lwt.return_unit
| Error () ->
drop ip;
Lwt_list.iter_p (Dns.send_udp stack port ip 53) data
in
let maybe_update_state key ip t =
let old = !state in
let trie server = Dns_server.Primary.data server in
state := t;
if Dns_trie.equal (trie t) (trie old) then
Lwt.return_unit
else begin
inc `On_update ; on_update ~old:(trie old) ~authenticated_key:key ~update_source:ip t
end
and maybe_notify recv_task t now ts = function
| None -> Lwt.return_unit
| Some n -> inc `On_notify ; on_notify n t >>= function
| None -> Lwt.return_unit
| Some (trie, keys) ->
let state', outs = Dns_server.Primary.with_keys t now ts keys in
let state'', outs' = Dns_server.Primary.with_data state' now ts trie in
state := state'';
Lwt_list.iter_p (send_notify recv_task) (outs @ outs')
in
let rec recv_task ip port flow () =
let f = Dns.of_flow flow in
let rec loop () =
Dns.read_tcp f >>= function
| Error () -> drop ip ; Lwt.return_unit
| Ok data ->
inc `Tcp_query;
let now = Ptime.v (P.now_d_ps ()) in
let ts = M.elapsed_ns () in
let t, answers, notify, n, key =
Dns_server.Primary.handle_buf !state now ts `Tcp ip port data
in
let n' = match n with
| Some `Keep -> inc `Tcp_cache_add ; inc `Tcp_keep ; tcp_out := Ipaddr.Map.add ip flow !tcp_out ; None
| Some `Notify soa -> Some (`Notify soa)
| Some `Signed_notify soa -> Some (`Signed_notify soa)
| None -> None
in
maybe_update_state key ip t >>= fun () ->
maybe_notify recv_task t now ts n' >>= fun () ->
if answers <> [] then inc `Tcp_answer;
(Dns.send_tcp_multiple flow answers >|= function
| Ok () -> ()
| Error () -> drop ip) >>= fun () ->
Lwt_list.iter_p (send_notify recv_task) notify >>= fun () ->
loop ()
in
loop ()
in
let tcp_cb flow =
inc `Tcp;
let dst_ip, dst_port = T.dst flow in
recv_task dst_ip dst_port flow ()
in
S.TCP.listen (S.tcp stack) ~port tcp_cb ;
Log.info (fun m -> m "DNS server listening on TCP port %d" port) ;
let udp_cb ~src ~dst:_ ~src_port buf =
inc `Udp_query;
let now = Ptime.v (P.now_d_ps ()) in
let ts = M.elapsed_ns () in
let t, answers, notify, n, key =
Dns_server.Primary.handle_buf !state now ts `Udp src src_port buf
in
let n' = match n with
| None | Some `Keep -> None
| Some `Notify soa -> Some (`Notify soa)
| Some `Signed_notify soa -> Some (`Signed_notify soa)
in
maybe_update_state key src t >>= fun () ->
maybe_notify recv_task t now ts n' >>= fun () ->
if answers <> [] then inc `Udp_answer;
(Lwt_list.iter_s (Dns.send_udp stack port src src_port) answers) >>= fun () ->
Lwt_list.iter_p (send_notify recv_task) notify
in
S.UDP.listen (S.udp stack) ~port udp_cb ;
Log.info (fun m -> m "DNS server listening on UDP port %d" port) ;
let rec time () =
let now = Ptime.v (P.now_d_ps ()) in
let ts = M.elapsed_ns () in
let t, notifies = Dns_server.Primary.timer !state now ts in
maybe_update_state None Ipaddr.(V4 V4.localhost) t >>= fun () ->
Lwt_list.iter_p (send_notify recv_task) notifies >>= fun () ->
TIME.sleep_ns (Duration.of_sec timer) >>= fun () ->
time ()
in
Lwt.async time
let secondary ?(on_update = fun ~old:_ _trie -> Lwt.return_unit) ?(timer = 5) ?(port = 53) stack t =
let state = ref t in
let tcp_out = ref Ipaddr.Map.empty in
let maybe_update_state t =
let old = !state in
let trie server = Dns_server.Secondary.data server in
state := t ;
if Dns_trie.equal (trie t) (trie old) then
Lwt.return_unit
else begin
inc `On_update ; on_update ~old:(trie old) t
end
in
let rec close ip =
(match Ipaddr.Map.find_opt ip !tcp_out with
| None -> Lwt.return_unit
| Some f -> T.close f) >>= fun () ->
tcp_out := Ipaddr.Map.remove ip !tcp_out ;
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let state', out = Dns_server.Secondary.closed !state now elapsed ip in
state := state' ;
request (ip, out)
and read_and_handle ip f =
Dns.read_tcp f >>= function
| Error () ->
Log.debug (fun m -> m "removing %a from tcp_out" Ipaddr.pp ip) ;
close ip
| Ok data ->
inc `Tcp_query;
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let t, answer, out =
Dns_server.Secondary.handle_buf !state now elapsed `Tcp ip data
in
maybe_update_state t >>= fun () ->
(match answer with
| None -> Lwt.return (Ok ())
| Some x ->
inc `Tcp_answer;
Dns.send_tcp (Dns.flow f) x >>= function
| Error () ->
Log.debug (fun m -> m "removing %a from tcp_out" Ipaddr.pp ip) ;
close ip >|= fun () -> Error ()
| Ok () -> Lwt.return (Ok ())) >>= fun r ->
(match out with
| None -> Lwt.return_unit
| Some (ip, data) -> request_one (ip, data)) >>= fun () ->
match r with
| Ok () -> read_and_handle ip f
| Error () -> Lwt.return_unit
and request (ip, data) =
inc `Notify;
let dport = 53 in
match Ipaddr.Map.find_opt ip !tcp_out with
| None ->
begin
Log.debug (fun m -> m "creating connection to %a:%d" Ipaddr.pp ip dport) ;
inc `Tcp_client;
T.create_connection (S.tcp stack) (ip, dport) >>= function
| Error e ->
Log.err (fun m -> m "error %a while establishing tcp connection to %a:%d"
T.pp_error e Ipaddr.pp ip dport) ;
close ip
| Ok flow ->
tcp_out := Ipaddr.Map.add ip flow !tcp_out ;
Dns.send_tcp_multiple flow data >>= function
| Error () -> close ip
| Ok () ->
Lwt.async (fun () -> read_and_handle ip (Dns.of_flow flow)) ;
Lwt.return_unit
end
| Some flow ->
Dns.send_tcp_multiple flow data >>= function
| Ok () -> Lwt.return_unit
| Error () ->
Log.warn (fun m -> m "closing tcp flow to %a:%d, retrying request"
Ipaddr.pp ip dport) ;
T.close flow >>= fun () ->
tcp_out := Ipaddr.Map.remove ip !tcp_out ;
request (ip, data)
and request_one (ip, d) = request (ip, [ d ])
in
let udp_cb ~src ~dst:_ ~src_port buf =
Log.debug (fun m -> m "udp frame from %a:%d" Ipaddr.pp src src_port) ;
inc `Udp_query;
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let t, answer, out =
Dns_server.Secondary.handle_buf !state now elapsed `Udp src buf
in
maybe_update_state t >>= fun () ->
(match out with
| None -> ()
| Some (ip, cs) -> Lwt.async (fun () -> request_one (ip, cs))) ;
match answer with
| None -> Lwt.return_unit
| Some out -> inc `Udp_answer; Dns.send_udp stack port src src_port out
in
S.UDP.listen (S.udp stack) ~port udp_cb ;
Log.info (fun m -> m "secondary DNS listening on UDP port %d" port) ;
let tcp_cb flow =
inc `Tcp;
let dst_ip, dst_port = T.dst flow in
tcp_out := Ipaddr.Map.add dst_ip flow !tcp_out ;
Log.debug (fun m -> m "tcp connection from %a:%d" Ipaddr.pp dst_ip dst_port) ;
let f = Dns.of_flow flow in
let rec loop () =
Dns.read_tcp f >>= function
| Error () -> tcp_out := Ipaddr.Map.remove dst_ip !tcp_out ; Lwt.return_unit
| Ok data ->
inc `Tcp_query;
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let t, answer, out =
Dns_server.Secondary.handle_buf !state now elapsed `Tcp dst_ip data
in
maybe_update_state t >>= fun () ->
(match out with
| None -> ()
| Some (ip, cs) -> Lwt.async (fun () -> request_one (ip, cs)));
match answer with
| None ->
Log.warn (fun m -> m "no TCP output") ;
loop ()
| Some data ->
inc `Tcp_answer;
Dns.send_tcp flow data >>= function
| Ok () -> loop ()
| Error () -> tcp_out := Ipaddr.Map.remove dst_ip !tcp_out ; Lwt.return_unit
in
loop ()
in
S.TCP.listen (S.tcp stack) ~port tcp_cb ;
Log.info (fun m -> m "secondary DNS listening on TCP port %d" port) ;
let rec time () =
let now = Ptime.v (P.now_d_ps ()) in
let elapsed = M.elapsed_ns () in
let t, out = Dns_server.Secondary.timer !state now elapsed in
maybe_update_state t >>= fun () ->
List.iter (fun (ip, cs) ->
Lwt.async (fun () -> request (ip, cs))) out ;
TIME.sleep_ns (Duration.of_sec timer) >>= fun () ->
time ()
in
Lwt.async time
end
| |
96df0e02035382819ef7375a489ed443dad2d4f85c94453ed91ff1723ef97ac9 | monadicsystems/okapi | Main.hs | {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeApplications #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE UnicodeSyntax #
module Main where
import Chess
import Control.Applicative
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.STM.TVar
import Control.Monad.Extra
import Control.Monad.IO.Class
import Control.Monad.Reader.Class
import Control.Monad.Trans.Reader hiding (ask, asks)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Data.IORef
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text
import Data.Time
import GHC.Generics
import Lucid
import Lucid.Base
import Lucid.Htmx
import Okapi
import qualified SlaveThread
import Text.InterpolatedString.Perl6
import Web.FormUrlEncoded
import Control.Monad.Combinators
type Okapi a = OkapiT App a
newtype App a = App {runApp :: ReaderT (TVar Env) IO a}
deriving newtype
( Functor,
Applicative,
Monad,
MonadReader (TVar Env),
MonadIO
)
data Env = Env
{ envWaitPool :: WaitPool,
envConfirmedPool :: ConfirmedPool,
envMatches :: Set Match,
envEventSource :: Event
}
newtype Match = Match (Text, Text) deriving (Show, Ord)
instance Eq Match where
(Match (p1, p2)) == (Match (p1', p2')) =
p1 == p1' && p2 == p2' || p1 == p2' && p2 == p1'
newtype WaitPool = WaitPool {unWaitPool :: Set Text} -- todo: Add timestamp
newtype ConfirmedPool = ConfirmedPool {unConfirmedPool :: Set Text}
data Player = Player {playerName :: Text} deriving (Eq, Show, Generic, FromForm)
type MonadApp m =
( Monad m,
MonadReader Env m,
MonadIO m
)
main :: IO ()
main = do
print "Running chess app"
let
newEnvTVar :: IO (TVar Env)
newEnvTVar = do
eventSource <- Okapi.newEventSource
newTVarIO $ Env (WaitPool mempty) (ConfirmedPool mempty) mempty eventSource
hoistApp :: TVar Env -> App a -> IO a
hoistApp envTVar app = runReaderT (runApp app) envTVar
envTVar <- newEnvTVar
SlaveThread.fork $ forever $ do
threadDelay 1000000
confirmer envTVar
matchmaker envTVar
Okapi.run (hoistApp envTVar) 3000 chess
confirmer :: TVar Env -> IO ()
confirmer = sendConfirmMessages
where
sendConfirmMessages :: TVar Env -> IO ()
sendConfirmMessages envRef = do
waitPool <- unWaitPool <$> atomically (readWaitPool envRef)
if Set.null waitPool
then pure ()
else do
eventSource <- atomically $ readEventSource envRef
forM_ (Set.toList waitPool) (sendConfirmMessage eventSource)
sendConfirmMessage :: Event -> Text -> IO ()
sendConfirmMessage eventSource playerName = Okapi.sendEvent eventSource $ Event (Just $ "confirm-" <> playerName) Nothing ""
matchmaker :: TVar Env -> IO ()
matchmaker = tryNewMatch
where
tryNewMatch :: TVar Env -> IO ()
tryNewMatch envRef = do
confirmedPool <- unConfirmedPool <$> atomically (readConfirmedPool envRef)
at least 2 players confirmed
then do
let p1 = Set.elemAt 0 confirmedPool -- get p1
p2 = Set.elemAt 1 confirmedPool -- get p2
atomically $ deletePlayerFromConfirmedPool envRef p1 -- delete p1 from pool
atomically $ deletePlayerFromConfirmedPool envRef p2 -- delete p2 from pool
atomically $ modifyMatches (Set.insert $ Match (p1, p2)) envRef -- add new match
eventSource <- atomically $ readEventSource envRef
sendStartEvents eventSource p1 p2
else pure ()
sendStartEvents :: Event -> Text -> Text -> IO ()
sendStartEvents eventSource p1Name p2Name = do
let event1 = Event (Just $ "init-" <> p1Name) Nothing $ renderBS $ toHtml startingBoard
event2 = Event (Just $ "init-" <> p2Name) Nothing $ renderBS $ toHtml startingBoard
Okapi.sendEvent eventSource event1
Okapi.sendEvent eventSource event2
addPlayerToWaitPool :: TVar Env -> Text -> STM ()
addPlayerToWaitPool envRef playerName = modifyWaitPool (WaitPool . Set.insert playerName . unWaitPool) envRef
addPlayerToConfirmedPool :: TVar Env -> Text -> STM ()
addPlayerToConfirmedPool envRef playerName = modifyConfirmedPool (ConfirmedPool . Set.insert playerName . unConfirmedPool) envRef
deletePlayerFromWaitPool :: TVar Env -> Text -> STM ()
deletePlayerFromWaitPool envRef playerName = modifyWaitPool (WaitPool . Set.delete playerName . unWaitPool) envRef
deletePlayerFromConfirmedPool :: TVar Env -> Text -> STM ()
deletePlayerFromConfirmedPool envRef playerName = modifyConfirmedPool (ConfirmedPool . Set.delete playerName . unConfirmedPool) envRef
readFromEnvTVar :: (Env -> a) -> TVar Env -> STM a
readFromEnvTVar f envTVar = do
env <- readTVar envTVar
pure $ f env
readWaitPool :: TVar Env -> STM WaitPool
readWaitPool = readFromEnvTVar envWaitPool
readConfirmedPool :: TVar Env -> STM ConfirmedPool
readConfirmedPool = readFromEnvTVar envConfirmedPool
readMatches :: TVar Env -> STM (Set Match)
readMatches = readFromEnvTVar envMatches
readEventSource :: TVar Env -> STM Event
readEventSource = readFromEnvTVar envEventSource
modfyEnvTVar :: (Env -> Env) -> TVar Env -> STM ()
modfyEnvTVar f envTVar = modifyTVar' envTVar f
modifyWaitPool :: (WaitPool -> WaitPool) -> TVar Env -> STM ()
modifyWaitPool f = modfyEnvTVar (\env -> env {envWaitPool = f $ envWaitPool env})
modifyConfirmedPool :: (ConfirmedPool -> ConfirmedPool) -> TVar Env -> STM ()
modifyConfirmedPool f = modfyEnvTVar (\env -> env {envConfirmedPool = f $ envConfirmedPool env})
modifyMatches :: (Set Match -> Set Match) -> TVar Env -> STM ()
modifyMatches f = modfyEnvTVar (\env -> env {envMatches = f $ envMatches env})
newtype Wrap a = Wrap a
instance ToHtml a => ToHtml (Wrap a) where
toHtml (Wrap inner) = do
doctype_
html_ $ do
head_ $ do
meta_ [charset_ "UTF-8"]
meta_ [name_ "viewport", content_ "width=device-width, initial-scale=1.0"]
title_ "Simple Chess"
script_ [src_ ""] ("" :: Text)
useHtmx
useHtmxExtension "sse"
body_ $ do
main_ [class_ "container mx-auto px-40 my-auto"] $ do
toHtml inner
toHtmlRaw = toHtml
data Home = Home
instance ToHtml Home where
toHtml Home = do
h1_ [] "Hyperchess"
div_ [] $ do
div_ [class_ "grid grid-cols-3 gap-10"] $ do
div_ [class_ "rounded-md flex flex-col"] $ do
h4_ "👁️ Watch Game"
form_ [class_ "flex flex-col"] $ do
select_ [id_ "stream"] $ do
option_ "John ⚔️ Bob"
option_ "Carol ⚔️ Bob"
option_ "John ⚔️ Bob"
button_ [type_ "submit", class_ "px-4 py-2 bg-blue-200 text-white"] "Watch Match"
div_ [class_ "rounded-md flex flex-col"] $ do
h4_ "♟️ Play Game"
form_ [hxPost_ "/register", hxTarget_ "#content", class_ "flex flex-col"] $ do
input_ [name_ "playerName", type_ "text", placeholder_ "Name"]
button_
[ type_ "submit",
class_ "px-4 py-2 bg-blue-200 text-white"
]
"Join Match"
div_ [class_ "rounded-md flex flex-col"] $ do
h4_ "🤔 How To Play"
ul_ [] $ do
li_ [] "Learn chess"
li_ [] "Register"
li_ [] "Start playing"
div_ [id_ "content"] "Hello"
toHtmlRaw = toHtml
data JoinedPool = JoinedPool Text
instance ToHtml JoinedPool where
toHtml (JoinedPool name) = do
div_ [hxExt_ "sse", sseConnect_ "/stream", class_ "grid grid-cols-4 grid-rows-4 gap-2 my-10"] $ do
div_ [hxGet_ $ "/confirm?player=" <> name, hxSwap_ "outerHTML", hxTrigger_ $ "sse:confirm-" <> name, class_ "hidden"] ""
div_ [sseSwap_ ["init-" <> name, "update-" <> name], class_ "col-span-3 row-span-4 aspect-square container"] $ do
h4_ $ toHtml $ "Hello, " <> name <> ". Finding an opponent..."
div_ [class_ "flex flex-col gap-2 col-span-1 row-span-4 justify-items-center", sseSwap_ ["game-" <> name]] $ do
p_ "Finding an opponent..."
toHtmlRaw = toHtml
sseConnect_ :: Text -> Attribute
sseConnect_ = makeAttribute "sse-connect"
sseSwap_ :: [Text] -> Attribute
sseSwap_ messageNames = makeAttribute "sse-swap" $ Data.Text.intercalate "," messageNames
-- API
chess :: Okapi Result
chess = choice
[ home
, register
, stream
, confirm
, select
, move
]
-- home <|> register <|> stream <|> confirm <|> select <|> move
home :: Okapi Result
home = do
get
okLucid [] $ Wrap Home
register :: Okapi Result
register = do
Okapi.post
Okapi.seg "register"
Player {..} <- bodyURLEncoded
envRef <- ask
liftIO $ atomically $ addPlayerToWaitPool envRef playerName
okLucid [] $ JoinedPool playerName
stream :: Okapi Result
stream = do
get
seg "stream"
envRef <- ask
eventSource <- liftIO $ atomically $ readEventSource envRef
connectEventSource eventSource
confirm :: Okapi Result
confirm = do
get
Okapi.seg "confirm"
playerName <- queryParam "player"
envRef <- ask
liftIO $ SlaveThread.fork $ do
atomically $ deletePlayerFromWaitPool envRef playerName
atomically $ addPlayerToConfirmedPool envRef playerName
noContent []
select :: Okapi Result
select = do
get
Okapi.seg "select"
position <- queryParam "position"
piece <- queryParam "piece"
possibleMovesResult (Board mempty) position piece
where
possibleMovesResult :: MonadOkapi m => Board -> Position -> Piece -> m Result
possibleMovesResult board startPosition piece = do
let possibleMoves = calculatePossibleMoves board startPosition piece
case possibleMoves of
[] -> noContent []
(position:otherPositions) -> okLucid [] $ do
let
possibleMoveClass =
\pos -> positionToTileClass pos <> class_ " border-2 border-green-300"
div_ [id_ $ tShow position, possibleMoveClass position] $
case Map.lookup position $ unBoard board of
Nothing -> ""
Just piece' -> toHtml piece'
forM_ otherPositions
(\pos -> do
div_ [id_ $ tShow pos, possibleMoveClass pos, hxSwapOob_ "true"] $
case Map.lookup pos $ unBoard board of
Nothing -> ""
Just piece -> toHtml piece
)
move :: Okapi Result
move = do
get
Okapi.seg "move"
position <- queryParam "position"
piece <- queryParam "piece"
moveResult
where
moveResult :: Board -> Position -> Piece -> m Result
moveResult
| null | https://raw.githubusercontent.com/monadicsystems/okapi/8a0c17485b3778d819d0cf38b63a2e20406f466a/experimental/chess/Main.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
# LANGUAGE TypeSynonymInstances #
todo: Add timestamp
get p1
get p2
delete p1 from pool
delete p2 from pool
add new match
API
home <|> register <|> stream <|> confirm <|> select <|> move | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE QuasiQuotes #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
# LANGUAGE UnicodeSyntax #
module Main where
import Chess
import Control.Applicative
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.STM.TVar
import Control.Monad.Extra
import Control.Monad.IO.Class
import Control.Monad.Reader.Class
import Control.Monad.Trans.Reader hiding (ask, asks)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Data.IORef
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text
import Data.Time
import GHC.Generics
import Lucid
import Lucid.Base
import Lucid.Htmx
import Okapi
import qualified SlaveThread
import Text.InterpolatedString.Perl6
import Web.FormUrlEncoded
import Control.Monad.Combinators
type Okapi a = OkapiT App a
newtype App a = App {runApp :: ReaderT (TVar Env) IO a}
deriving newtype
( Functor,
Applicative,
Monad,
MonadReader (TVar Env),
MonadIO
)
data Env = Env
{ envWaitPool :: WaitPool,
envConfirmedPool :: ConfirmedPool,
envMatches :: Set Match,
envEventSource :: Event
}
newtype Match = Match (Text, Text) deriving (Show, Ord)
instance Eq Match where
(Match (p1, p2)) == (Match (p1', p2')) =
p1 == p1' && p2 == p2' || p1 == p2' && p2 == p1'
newtype ConfirmedPool = ConfirmedPool {unConfirmedPool :: Set Text}
data Player = Player {playerName :: Text} deriving (Eq, Show, Generic, FromForm)
type MonadApp m =
( Monad m,
MonadReader Env m,
MonadIO m
)
main :: IO ()
main = do
print "Running chess app"
let
newEnvTVar :: IO (TVar Env)
newEnvTVar = do
eventSource <- Okapi.newEventSource
newTVarIO $ Env (WaitPool mempty) (ConfirmedPool mempty) mempty eventSource
hoistApp :: TVar Env -> App a -> IO a
hoistApp envTVar app = runReaderT (runApp app) envTVar
envTVar <- newEnvTVar
SlaveThread.fork $ forever $ do
threadDelay 1000000
confirmer envTVar
matchmaker envTVar
Okapi.run (hoistApp envTVar) 3000 chess
confirmer :: TVar Env -> IO ()
confirmer = sendConfirmMessages
where
sendConfirmMessages :: TVar Env -> IO ()
sendConfirmMessages envRef = do
waitPool <- unWaitPool <$> atomically (readWaitPool envRef)
if Set.null waitPool
then pure ()
else do
eventSource <- atomically $ readEventSource envRef
forM_ (Set.toList waitPool) (sendConfirmMessage eventSource)
sendConfirmMessage :: Event -> Text -> IO ()
sendConfirmMessage eventSource playerName = Okapi.sendEvent eventSource $ Event (Just $ "confirm-" <> playerName) Nothing ""
matchmaker :: TVar Env -> IO ()
matchmaker = tryNewMatch
where
tryNewMatch :: TVar Env -> IO ()
tryNewMatch envRef = do
confirmedPool <- unConfirmedPool <$> atomically (readConfirmedPool envRef)
at least 2 players confirmed
then do
eventSource <- atomically $ readEventSource envRef
sendStartEvents eventSource p1 p2
else pure ()
sendStartEvents :: Event -> Text -> Text -> IO ()
sendStartEvents eventSource p1Name p2Name = do
let event1 = Event (Just $ "init-" <> p1Name) Nothing $ renderBS $ toHtml startingBoard
event2 = Event (Just $ "init-" <> p2Name) Nothing $ renderBS $ toHtml startingBoard
Okapi.sendEvent eventSource event1
Okapi.sendEvent eventSource event2
addPlayerToWaitPool :: TVar Env -> Text -> STM ()
addPlayerToWaitPool envRef playerName = modifyWaitPool (WaitPool . Set.insert playerName . unWaitPool) envRef
addPlayerToConfirmedPool :: TVar Env -> Text -> STM ()
addPlayerToConfirmedPool envRef playerName = modifyConfirmedPool (ConfirmedPool . Set.insert playerName . unConfirmedPool) envRef
deletePlayerFromWaitPool :: TVar Env -> Text -> STM ()
deletePlayerFromWaitPool envRef playerName = modifyWaitPool (WaitPool . Set.delete playerName . unWaitPool) envRef
deletePlayerFromConfirmedPool :: TVar Env -> Text -> STM ()
deletePlayerFromConfirmedPool envRef playerName = modifyConfirmedPool (ConfirmedPool . Set.delete playerName . unConfirmedPool) envRef
readFromEnvTVar :: (Env -> a) -> TVar Env -> STM a
readFromEnvTVar f envTVar = do
env <- readTVar envTVar
pure $ f env
readWaitPool :: TVar Env -> STM WaitPool
readWaitPool = readFromEnvTVar envWaitPool
readConfirmedPool :: TVar Env -> STM ConfirmedPool
readConfirmedPool = readFromEnvTVar envConfirmedPool
readMatches :: TVar Env -> STM (Set Match)
readMatches = readFromEnvTVar envMatches
readEventSource :: TVar Env -> STM Event
readEventSource = readFromEnvTVar envEventSource
modfyEnvTVar :: (Env -> Env) -> TVar Env -> STM ()
modfyEnvTVar f envTVar = modifyTVar' envTVar f
modifyWaitPool :: (WaitPool -> WaitPool) -> TVar Env -> STM ()
modifyWaitPool f = modfyEnvTVar (\env -> env {envWaitPool = f $ envWaitPool env})
modifyConfirmedPool :: (ConfirmedPool -> ConfirmedPool) -> TVar Env -> STM ()
modifyConfirmedPool f = modfyEnvTVar (\env -> env {envConfirmedPool = f $ envConfirmedPool env})
modifyMatches :: (Set Match -> Set Match) -> TVar Env -> STM ()
modifyMatches f = modfyEnvTVar (\env -> env {envMatches = f $ envMatches env})
newtype Wrap a = Wrap a
instance ToHtml a => ToHtml (Wrap a) where
toHtml (Wrap inner) = do
doctype_
html_ $ do
head_ $ do
meta_ [charset_ "UTF-8"]
meta_ [name_ "viewport", content_ "width=device-width, initial-scale=1.0"]
title_ "Simple Chess"
script_ [src_ ""] ("" :: Text)
useHtmx
useHtmxExtension "sse"
body_ $ do
main_ [class_ "container mx-auto px-40 my-auto"] $ do
toHtml inner
toHtmlRaw = toHtml
data Home = Home
instance ToHtml Home where
toHtml Home = do
h1_ [] "Hyperchess"
div_ [] $ do
div_ [class_ "grid grid-cols-3 gap-10"] $ do
div_ [class_ "rounded-md flex flex-col"] $ do
h4_ "👁️ Watch Game"
form_ [class_ "flex flex-col"] $ do
select_ [id_ "stream"] $ do
option_ "John ⚔️ Bob"
option_ "Carol ⚔️ Bob"
option_ "John ⚔️ Bob"
button_ [type_ "submit", class_ "px-4 py-2 bg-blue-200 text-white"] "Watch Match"
div_ [class_ "rounded-md flex flex-col"] $ do
h4_ "♟️ Play Game"
form_ [hxPost_ "/register", hxTarget_ "#content", class_ "flex flex-col"] $ do
input_ [name_ "playerName", type_ "text", placeholder_ "Name"]
button_
[ type_ "submit",
class_ "px-4 py-2 bg-blue-200 text-white"
]
"Join Match"
div_ [class_ "rounded-md flex flex-col"] $ do
h4_ "🤔 How To Play"
ul_ [] $ do
li_ [] "Learn chess"
li_ [] "Register"
li_ [] "Start playing"
div_ [id_ "content"] "Hello"
toHtmlRaw = toHtml
data JoinedPool = JoinedPool Text
instance ToHtml JoinedPool where
toHtml (JoinedPool name) = do
div_ [hxExt_ "sse", sseConnect_ "/stream", class_ "grid grid-cols-4 grid-rows-4 gap-2 my-10"] $ do
div_ [hxGet_ $ "/confirm?player=" <> name, hxSwap_ "outerHTML", hxTrigger_ $ "sse:confirm-" <> name, class_ "hidden"] ""
div_ [sseSwap_ ["init-" <> name, "update-" <> name], class_ "col-span-3 row-span-4 aspect-square container"] $ do
h4_ $ toHtml $ "Hello, " <> name <> ". Finding an opponent..."
div_ [class_ "flex flex-col gap-2 col-span-1 row-span-4 justify-items-center", sseSwap_ ["game-" <> name]] $ do
p_ "Finding an opponent..."
toHtmlRaw = toHtml
sseConnect_ :: Text -> Attribute
sseConnect_ = makeAttribute "sse-connect"
sseSwap_ :: [Text] -> Attribute
sseSwap_ messageNames = makeAttribute "sse-swap" $ Data.Text.intercalate "," messageNames
chess :: Okapi Result
chess = choice
[ home
, register
, stream
, confirm
, select
, move
]
home :: Okapi Result
home = do
get
okLucid [] $ Wrap Home
register :: Okapi Result
register = do
Okapi.post
Okapi.seg "register"
Player {..} <- bodyURLEncoded
envRef <- ask
liftIO $ atomically $ addPlayerToWaitPool envRef playerName
okLucid [] $ JoinedPool playerName
stream :: Okapi Result
stream = do
get
seg "stream"
envRef <- ask
eventSource <- liftIO $ atomically $ readEventSource envRef
connectEventSource eventSource
confirm :: Okapi Result
confirm = do
get
Okapi.seg "confirm"
playerName <- queryParam "player"
envRef <- ask
liftIO $ SlaveThread.fork $ do
atomically $ deletePlayerFromWaitPool envRef playerName
atomically $ addPlayerToConfirmedPool envRef playerName
noContent []
select :: Okapi Result
select = do
get
Okapi.seg "select"
position <- queryParam "position"
piece <- queryParam "piece"
possibleMovesResult (Board mempty) position piece
where
possibleMovesResult :: MonadOkapi m => Board -> Position -> Piece -> m Result
possibleMovesResult board startPosition piece = do
let possibleMoves = calculatePossibleMoves board startPosition piece
case possibleMoves of
[] -> noContent []
(position:otherPositions) -> okLucid [] $ do
let
possibleMoveClass =
\pos -> positionToTileClass pos <> class_ " border-2 border-green-300"
div_ [id_ $ tShow position, possibleMoveClass position] $
case Map.lookup position $ unBoard board of
Nothing -> ""
Just piece' -> toHtml piece'
forM_ otherPositions
(\pos -> do
div_ [id_ $ tShow pos, possibleMoveClass pos, hxSwapOob_ "true"] $
case Map.lookup pos $ unBoard board of
Nothing -> ""
Just piece -> toHtml piece
)
move :: Okapi Result
move = do
get
Okapi.seg "move"
position <- queryParam "position"
piece <- queryParam "piece"
moveResult
where
moveResult :: Board -> Position -> Piece -> m Result
moveResult
|
91014556741cc667434c72907d4ec9977b58c3d97d30434065f9cb0b1c12b4ae | AccelerateHS/accelerate-llvm | Cache.hs | {-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.Array.Accelerate.LLVM.Link.Cache
Copyright : [ 2017 .. 2020 ] The Accelerate Team
-- License : BSD3
--
Maintainer : < >
-- Stability : experimental
Portability : non - portable ( GHC extensions )
--
module Data.Array.Accelerate.LLVM.Link.Cache (
LinkCache,
new, dlsym,
) where
import Data.Array.Accelerate.Debug.Internal
import Data.Array.Accelerate.Lifetime
import Data.Array.Accelerate.LLVM.Compile.Cache
import Control.Monad
import Control.Concurrent.MVar
import Data.Map.Strict ( Map )
import Formatting
import Prelude hiding ( lookup )
import qualified Data.Map.Strict as Map
-- Simple reference-counted linker cache for function tables 'f' implemented by
-- object code 'o'.
--
data LinkCache f o = LinkCache {-# UNPACK #-} !(MVar (Map UID (Entry f o)))
data Entry f o = Entry {-# UNPACK #-} !Int !f !o
-- Create a new linker cache
--
new :: IO (LinkCache f o)
new = LinkCache `liftM` newMVar Map.empty
-- Return the binding addresses for the given kernel functions (by key). If the
-- functions do not already exist in the cache, the supplied continuation will
-- be run in order to generate them. This happens as a single atomic step; thus
-- the cache is thread safe.
--
dlsym :: UID -> LinkCache f o -> IO (f,o) -> IO (Lifetime f)
dlsym key cache@(LinkCache var) k = do
modifyMVar var $ \m ->
case Map.lookup key m of
-- Run the supplied function to generate the object code and add to the cache
Nothing -> do
(f,o) <- k
ticket <- issue key f cache
return ( Map.insert key (Entry 1 f o) m, ticket )
-- Return the existing object code
Just (Entry c f o) -> do
ticket <- issue key f cache
return ( Map.insert key (Entry (c+1) f o) m, ticket )
-
-- Insert the given function table and object code into the cache . The returned
-- value must be kept alive for as long as you need the object code to live ;
-- linker table entries are removed once all tickets referring to them are
-- GC'ed .
--
-- NOTE : It is an error if the entry already exists in the table . Thus , there is
-- a potential race condition between ' lookup ' and ' insert ' . On collision , it
-- would be fine to return a reference to the existing implementation instead
-- and discard the input values , but ' dlsym ' solves this anyway .
--
insert : : Int - > f - > o - > LinkCache f o - > IO ( Lifetime f )
insert key functionTable objectCode cache@(LinkCache var ) = do
ticket < - issue key functionTable cache
modifyMVar _ var $ \m - >
let collision = $ internalError " insert " " duplicate entry "
in return $ ! Map.insertWith collision key ( Entry 1 functionTable objectCode ) m
--
return ticket
-- Check the linker cache for the given functions ; if found return the
-- corresponding function table .
--
lookup : : Int - > LinkCache f o - > IO ( Maybe ( Lifetime f ) )
lookup key cache@(LinkCache var ) = do
modifyMVar var $ \m - >
case Map.lookup key m of
Nothing - > return ( m , Nothing )
Just ( Entry c f o ) - > do
ticket < - issue key f cache
return ( Map.insert key ( Entry ( c+1 ) f o ) m , Just ticket )
-
-- Insert the given function table and object code into the cache. The returned
-- value must be kept alive for as long as you need the object code to live;
-- linker table entries are removed once all tickets referring to them are
-- GC'ed.
--
-- NOTE: It is an error if the entry already exists in the table. Thus, there is
-- a potential race condition between 'lookup' and 'insert'. On collision, it
-- would be fine to return a reference to the existing implementation instead
-- and discard the input values, but 'dlsym' solves this anyway.
--
insert :: Int -> f -> o -> LinkCache f o -> IO (Lifetime f)
insert key functionTable objectCode cache@(LinkCache var) = do
ticket <- issue key functionTable cache
modifyMVar_ var $ \m ->
let collision = $internalError "insert" "duplicate entry"
in return $! Map.insertWith collision key (Entry 1 functionTable objectCode) m
--
return ticket
-- Check the linker cache for the given functions; if found return the
-- corresponding function table.
--
lookup :: Int -> LinkCache f o -> IO (Maybe (Lifetime f))
lookup key cache@(LinkCache var) = do
modifyMVar var $ \m ->
case Map.lookup key m of
Nothing -> return (m, Nothing)
Just (Entry c f o) -> do
ticket <- issue key f cache
return ( Map.insert key (Entry (c+1) f o) m, Just ticket )
--}
-- Issue a new ticket for the given table key/function table. When the returned
-- lifetime is GC'ed it decreasing the reference count of the corresponding
entry , and removes it from the table entirely once the count drops to zero .
--
issue :: UID -> f -> LinkCache f o -> IO (Lifetime f)
issue key fun (LinkCache var) = do
ticket <- newLifetime fun
addFinalizer ticket $
let refcount (Entry c f o)
| c <= 1 = trace dump_ld (bformat ("ld: remove object code " % shown) key) Nothing
| otherwise = Just (Entry (c-1) f o)
in
modifyMVar_ var $ \m -> return $! Map.update refcount key m
--
return ticket
| null | https://raw.githubusercontent.com/AccelerateHS/accelerate-llvm/1da65b5951b1410b5e4e1a646fbe1b17bee780a8/accelerate-llvm/src/Data/Array/Accelerate/LLVM/Link/Cache.hs | haskell | # LANGUAGE OverloadedStrings #
# OPTIONS_HADDOCK hide #
|
Module : Data.Array.Accelerate.LLVM.Link.Cache
License : BSD3
Stability : experimental
Simple reference-counted linker cache for function tables 'f' implemented by
object code 'o'.
# UNPACK #
# UNPACK #
Create a new linker cache
Return the binding addresses for the given kernel functions (by key). If the
functions do not already exist in the cache, the supplied continuation will
be run in order to generate them. This happens as a single atomic step; thus
the cache is thread safe.
Run the supplied function to generate the object code and add to the cache
Return the existing object code
Insert the given function table and object code into the cache . The returned
value must be kept alive for as long as you need the object code to live ;
linker table entries are removed once all tickets referring to them are
GC'ed .
NOTE : It is an error if the entry already exists in the table . Thus , there is
a potential race condition between ' lookup ' and ' insert ' . On collision , it
would be fine to return a reference to the existing implementation instead
and discard the input values , but ' dlsym ' solves this anyway .
Check the linker cache for the given functions ; if found return the
corresponding function table .
Insert the given function table and object code into the cache. The returned
value must be kept alive for as long as you need the object code to live;
linker table entries are removed once all tickets referring to them are
GC'ed.
NOTE: It is an error if the entry already exists in the table. Thus, there is
a potential race condition between 'lookup' and 'insert'. On collision, it
would be fine to return a reference to the existing implementation instead
and discard the input values, but 'dlsym' solves this anyway.
Check the linker cache for the given functions; if found return the
corresponding function table.
}
Issue a new ticket for the given table key/function table. When the returned
lifetime is GC'ed it decreasing the reference count of the corresponding
| Copyright : [ 2017 .. 2020 ] The Accelerate Team
Maintainer : < >
Portability : non - portable ( GHC extensions )
module Data.Array.Accelerate.LLVM.Link.Cache (
LinkCache,
new, dlsym,
) where
import Data.Array.Accelerate.Debug.Internal
import Data.Array.Accelerate.Lifetime
import Data.Array.Accelerate.LLVM.Compile.Cache
import Control.Monad
import Control.Concurrent.MVar
import Data.Map.Strict ( Map )
import Formatting
import Prelude hiding ( lookup )
import qualified Data.Map.Strict as Map
new :: IO (LinkCache f o)
new = LinkCache `liftM` newMVar Map.empty
dlsym :: UID -> LinkCache f o -> IO (f,o) -> IO (Lifetime f)
dlsym key cache@(LinkCache var) k = do
modifyMVar var $ \m ->
case Map.lookup key m of
Nothing -> do
(f,o) <- k
ticket <- issue key f cache
return ( Map.insert key (Entry 1 f o) m, ticket )
Just (Entry c f o) -> do
ticket <- issue key f cache
return ( Map.insert key (Entry (c+1) f o) m, ticket )
-
insert : : Int - > f - > o - > LinkCache f o - > IO ( Lifetime f )
insert key functionTable objectCode cache@(LinkCache var ) = do
ticket < - issue key functionTable cache
modifyMVar _ var $ \m - >
let collision = $ internalError " insert " " duplicate entry "
in return $ ! Map.insertWith collision key ( Entry 1 functionTable objectCode ) m
return ticket
lookup : : Int - > LinkCache f o - > IO ( Maybe ( Lifetime f ) )
lookup key cache@(LinkCache var ) = do
modifyMVar var $ \m - >
case Map.lookup key m of
Nothing - > return ( m , Nothing )
Just ( Entry c f o ) - > do
ticket < - issue key f cache
return ( Map.insert key ( Entry ( c+1 ) f o ) m , Just ticket )
-
insert :: Int -> f -> o -> LinkCache f o -> IO (Lifetime f)
insert key functionTable objectCode cache@(LinkCache var) = do
ticket <- issue key functionTable cache
modifyMVar_ var $ \m ->
let collision = $internalError "insert" "duplicate entry"
in return $! Map.insertWith collision key (Entry 1 functionTable objectCode) m
return ticket
lookup :: Int -> LinkCache f o -> IO (Maybe (Lifetime f))
lookup key cache@(LinkCache var) = do
modifyMVar var $ \m ->
case Map.lookup key m of
Nothing -> return (m, Nothing)
Just (Entry c f o) -> do
ticket <- issue key f cache
return ( Map.insert key (Entry (c+1) f o) m, Just ticket )
entry , and removes it from the table entirely once the count drops to zero .
issue :: UID -> f -> LinkCache f o -> IO (Lifetime f)
issue key fun (LinkCache var) = do
ticket <- newLifetime fun
addFinalizer ticket $
let refcount (Entry c f o)
| c <= 1 = trace dump_ld (bformat ("ld: remove object code " % shown) key) Nothing
| otherwise = Just (Entry (c-1) f o)
in
modifyMVar_ var $ \m -> return $! Map.update refcount key m
return ticket
|
f522c432776a7bba1275295f0724863da1a5c707ec01d894525d7429f145c7ef | ml4tp/tcoq | type_errors.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(*i*)
open Names
open Cic
open Environ
(*i*)
type unsafe_judgment = constr * constr
(* Type errors. \label{typeerrors} *)
i Rem : NotEnoughAbstractionInFixBody should only occur with " /i " Fix
notation i
notation i*)
type guard_error =
(* Fixpoints *)
| NotEnoughAbstractionInFixBody
| RecursionNotOnInductiveType of constr
| RecursionOnIllegalTerm of int * (env * constr) * int list * int list
| NotEnoughArgumentsForFixCall of int
(* CoFixpoints *)
| CodomainNotInductiveType of constr
| NestedRecursiveOccurrences
| UnguardedRecursiveCall of constr
| RecCallInTypeOfAbstraction of constr
| RecCallInNonRecArgOfConstructor of constr
| RecCallInTypeOfDef of constr
| RecCallInCaseFun of constr
| RecCallInCaseArg of constr
| RecCallInCasePred of constr
| NotGuardedForm of constr
| ReturnPredicateNotCoInductive of constr
type arity_error =
| NonInformativeToInformative
| StrongEliminationOnNonSmallType
| WrongArity
type type_error =
| UnboundRel of int
| UnboundVar of variable
| NotAType of unsafe_judgment
| BadAssumption of unsafe_judgment
| ReferenceVariables of constr
| ElimArity of pinductive * sorts_family list * constr * unsafe_judgment
* (sorts_family * sorts_family * arity_error) option
| CaseNotInductive of unsafe_judgment
| WrongCaseInfo of inductive * case_info
| NumberBranches of unsafe_judgment * int
| IllFormedBranch of constr * int * constr * constr
| Generalization of (name * constr) * unsafe_judgment
| ActualType of unsafe_judgment * constr
| CantApplyBadType of
(int * constr * constr) * unsafe_judgment * unsafe_judgment array
| CantApplyNonFunctional of unsafe_judgment * unsafe_judgment array
| IllFormedRecBody of guard_error * name array * int
| IllTypedRecBody of
int * name array * unsafe_judgment array * constr array
| UnsatisfiedConstraints of Univ.constraints
exception TypeError of env * type_error
val error_unbound_rel : env -> int -> 'a
val error_unbound_var : env -> variable -> 'a
val error_not_type : env -> unsafe_judgment -> 'a
val error_assumption : env -> unsafe_judgment -> 'a
val error_reference_variables : env -> constr -> 'a
val error_elim_arity :
env -> pinductive -> sorts_family list -> constr -> unsafe_judgment ->
(sorts_family * sorts_family * arity_error) option -> 'a
val error_case_not_inductive : env -> unsafe_judgment -> 'a
val error_number_branches : env -> unsafe_judgment -> int -> 'a
val error_ill_formed_branch : env -> constr -> int -> constr -> constr -> 'a
val error_actual_type : env -> unsafe_judgment -> constr -> 'a
val error_cant_apply_not_functional :
env -> unsafe_judgment -> unsafe_judgment array -> 'a
val error_cant_apply_bad_type :
env -> int * constr * constr ->
unsafe_judgment -> unsafe_judgment array -> 'a
val error_ill_formed_rec_body :
env -> guard_error -> name array -> int -> 'a
val error_ill_typed_rec_body :
env -> int -> name array -> unsafe_judgment array -> constr array -> 'a
val error_unsatisfied_constraints : env -> Univ.constraints -> 'a
| null | https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/checker/type_errors.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
i
i
Type errors. \label{typeerrors}
Fixpoints
CoFixpoints | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Names
open Cic
open Environ
type unsafe_judgment = constr * constr
i Rem : NotEnoughAbstractionInFixBody should only occur with " /i " Fix
notation i
notation i*)
type guard_error =
| NotEnoughAbstractionInFixBody
| RecursionNotOnInductiveType of constr
| RecursionOnIllegalTerm of int * (env * constr) * int list * int list
| NotEnoughArgumentsForFixCall of int
| CodomainNotInductiveType of constr
| NestedRecursiveOccurrences
| UnguardedRecursiveCall of constr
| RecCallInTypeOfAbstraction of constr
| RecCallInNonRecArgOfConstructor of constr
| RecCallInTypeOfDef of constr
| RecCallInCaseFun of constr
| RecCallInCaseArg of constr
| RecCallInCasePred of constr
| NotGuardedForm of constr
| ReturnPredicateNotCoInductive of constr
type arity_error =
| NonInformativeToInformative
| StrongEliminationOnNonSmallType
| WrongArity
type type_error =
| UnboundRel of int
| UnboundVar of variable
| NotAType of unsafe_judgment
| BadAssumption of unsafe_judgment
| ReferenceVariables of constr
| ElimArity of pinductive * sorts_family list * constr * unsafe_judgment
* (sorts_family * sorts_family * arity_error) option
| CaseNotInductive of unsafe_judgment
| WrongCaseInfo of inductive * case_info
| NumberBranches of unsafe_judgment * int
| IllFormedBranch of constr * int * constr * constr
| Generalization of (name * constr) * unsafe_judgment
| ActualType of unsafe_judgment * constr
| CantApplyBadType of
(int * constr * constr) * unsafe_judgment * unsafe_judgment array
| CantApplyNonFunctional of unsafe_judgment * unsafe_judgment array
| IllFormedRecBody of guard_error * name array * int
| IllTypedRecBody of
int * name array * unsafe_judgment array * constr array
| UnsatisfiedConstraints of Univ.constraints
exception TypeError of env * type_error
val error_unbound_rel : env -> int -> 'a
val error_unbound_var : env -> variable -> 'a
val error_not_type : env -> unsafe_judgment -> 'a
val error_assumption : env -> unsafe_judgment -> 'a
val error_reference_variables : env -> constr -> 'a
val error_elim_arity :
env -> pinductive -> sorts_family list -> constr -> unsafe_judgment ->
(sorts_family * sorts_family * arity_error) option -> 'a
val error_case_not_inductive : env -> unsafe_judgment -> 'a
val error_number_branches : env -> unsafe_judgment -> int -> 'a
val error_ill_formed_branch : env -> constr -> int -> constr -> constr -> 'a
val error_actual_type : env -> unsafe_judgment -> constr -> 'a
val error_cant_apply_not_functional :
env -> unsafe_judgment -> unsafe_judgment array -> 'a
val error_cant_apply_bad_type :
env -> int * constr * constr ->
unsafe_judgment -> unsafe_judgment array -> 'a
val error_ill_formed_rec_body :
env -> guard_error -> name array -> int -> 'a
val error_ill_typed_rec_body :
env -> int -> name array -> unsafe_judgment array -> constr array -> 'a
val error_unsatisfied_constraints : env -> Univ.constraints -> 'a
|
2e388dad373e356e2d3ca2d3f7acd7f698684739df6af397fa2c2115fc7baf6b | google/mlir-hs | ControlFlow.hs | Copyright 2022 Google LLC
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module MLIR.AST.Dialect.ControlFlow
( module MLIR.AST.Dialect.ControlFlow,
module MLIR.AST.Dialect.Generated.ControlFlow
) where
import Prelude hiding (return)
import Data.Array.IArray
import MLIR.AST
import MLIR.AST.Builder
import MLIR.AST.Dialect.Generated.ControlFlow
pattern Branch :: Location -> BlockName -> [Name] -> Operation
pattern Branch loc block args = Operation
{ opName = "cf.br"
, opLocation = loc
, opResultTypes = Explicit []
, opOperands = args
, opRegions = []
, opSuccessors = [block]
, opAttributes = NoAttrs
}
br :: MonadBlockBuilder m => BlockName -> [Value] -> m EndOfBlock
br block args = emitOp (Branch UnknownLocation block $ operands args) >> terminateBlock
cond_br :: MonadBlockBuilder m => Value -> BlockName -> [Value] -> BlockName -> [Value] -> m EndOfBlock
cond_br cond trueBlock trueArgs falseBlock falseArgs = do
emitOp_ $ Operation
{ opName = "cf.cond_br"
, opLocation = UnknownLocation
, opResultTypes = Explicit []
, opOperands = operands $ [cond] <> trueArgs <> falseArgs
, opRegions = []
, opSuccessors = [trueBlock, falseBlock]
, opAttributes = namedAttribute "operand_segment_sizes" $
DenseArrayAttr $
DenseInt32 $ listArray (0 :: Int, 2) $ fromIntegral <$> [1, length trueArgs, length falseArgs]
}
terminateBlock
| null | https://raw.githubusercontent.com/google/mlir-hs/b2b23d156caf824cecf00b8616108659702bad52/src/MLIR/AST/Dialect/ControlFlow.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright 2022 Google LLC
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
module MLIR.AST.Dialect.ControlFlow
( module MLIR.AST.Dialect.ControlFlow,
module MLIR.AST.Dialect.Generated.ControlFlow
) where
import Prelude hiding (return)
import Data.Array.IArray
import MLIR.AST
import MLIR.AST.Builder
import MLIR.AST.Dialect.Generated.ControlFlow
pattern Branch :: Location -> BlockName -> [Name] -> Operation
pattern Branch loc block args = Operation
{ opName = "cf.br"
, opLocation = loc
, opResultTypes = Explicit []
, opOperands = args
, opRegions = []
, opSuccessors = [block]
, opAttributes = NoAttrs
}
br :: MonadBlockBuilder m => BlockName -> [Value] -> m EndOfBlock
br block args = emitOp (Branch UnknownLocation block $ operands args) >> terminateBlock
cond_br :: MonadBlockBuilder m => Value -> BlockName -> [Value] -> BlockName -> [Value] -> m EndOfBlock
cond_br cond trueBlock trueArgs falseBlock falseArgs = do
emitOp_ $ Operation
{ opName = "cf.cond_br"
, opLocation = UnknownLocation
, opResultTypes = Explicit []
, opOperands = operands $ [cond] <> trueArgs <> falseArgs
, opRegions = []
, opSuccessors = [trueBlock, falseBlock]
, opAttributes = namedAttribute "operand_segment_sizes" $
DenseArrayAttr $
DenseInt32 $ listArray (0 :: Int, 2) $ fromIntegral <$> [1, length trueArgs, length falseArgs]
}
terminateBlock
|
fec59628bfdfc3fd9780d1e80a1592159f09893aa6200cd68f061ce03dae3d82 | hadolint/hadolint | DL3046.hs | module Hadolint.Rule.DL3046 (rule) where
import qualified Data.Text as Text
import Hadolint.Rule
import Hadolint.Shell (ParsedShell)
import qualified Hadolint.Shell as Shell
import Language.Docker.Syntax (Instruction (..), RunArgs (..))
rule :: Rule ParsedShell
rule = dl3046 <> onbuild dl3046
# INLINEABLE rule #
dl3046 :: Rule ParsedShell
dl3046 = simpleRule code severity message check
where
code = "DL3046"
severity = DLWarningC
message = "`useradd` without flag `-l` and high UID will result in excessively large Image."
check (Run (RunArgs args _)) = foldArguments (Shell.noCommands forgotFlagL) args
check _ = True
forgotFlagL cmd = isUseradd cmd && (not (hasLFlag cmd) && hasUFlag cmd && hasLongUID cmd)
isUseradd (Shell.Command name _ _) = name == "useradd"
hasLFlag = Shell.hasAnyFlag ["l", "no-log-init"]
hasUFlag = Shell.hasAnyFlag ["u", "uid"]
hasLongUID cmd = any ((> 5) . Text.length) (Shell.getFlagArg "u" cmd)
# INLINEABLE dl3046 #
| null | https://raw.githubusercontent.com/hadolint/hadolint/6c4632387a3485a9b7e1bf46c58da697178e02e2/src/Hadolint/Rule/DL3046.hs | haskell | module Hadolint.Rule.DL3046 (rule) where
import qualified Data.Text as Text
import Hadolint.Rule
import Hadolint.Shell (ParsedShell)
import qualified Hadolint.Shell as Shell
import Language.Docker.Syntax (Instruction (..), RunArgs (..))
rule :: Rule ParsedShell
rule = dl3046 <> onbuild dl3046
# INLINEABLE rule #
dl3046 :: Rule ParsedShell
dl3046 = simpleRule code severity message check
where
code = "DL3046"
severity = DLWarningC
message = "`useradd` without flag `-l` and high UID will result in excessively large Image."
check (Run (RunArgs args _)) = foldArguments (Shell.noCommands forgotFlagL) args
check _ = True
forgotFlagL cmd = isUseradd cmd && (not (hasLFlag cmd) && hasUFlag cmd && hasLongUID cmd)
isUseradd (Shell.Command name _ _) = name == "useradd"
hasLFlag = Shell.hasAnyFlag ["l", "no-log-init"]
hasUFlag = Shell.hasAnyFlag ["u", "uid"]
hasLongUID cmd = any ((> 5) . Text.length) (Shell.getFlagArg "u" cmd)
# INLINEABLE dl3046 #
| |
63cd2f1f38d3d64b42478c1e90a153c1deef1bda3875f13b44e6df0eb6793d18 | simmsb/calamity | Permissions.hs | -- | Permission utilities
module Calamity.Utils.Permissions (
basePermissions,
applyOverwrites,
PermissionsIn (..),
PermissionsIn' (..),
) where
import Calamity.Client.Types
import Calamity.Internal.SnowflakeMap qualified as SM
import Calamity.Types.Model.Channel.Guild
import Calamity.Types.Model.Guild.Guild
import Calamity.Types.Model.Guild.Member
import Calamity.Types.Model.Guild.Overwrite
import Calamity.Types.Model.Guild.Permissions
import Calamity.Types.Model.User
import Calamity.Types.Snowflake
import Calamity.Types.Upgradeable
import Data.Flags
import Data.Foldable (Foldable (foldl'))
import Data.Maybe (mapMaybe)
import Data.Vector.Unboxing qualified as V
import Optics
import Polysemy qualified as P
-- | Calculate a 'Member'\'s 'Permissions' in a 'Guild'
basePermissions :: Guild -> Member -> Permissions
basePermissions g m
| g ^. #ownerID == getID m = allFlags
| otherwise =
let everyoneRole = g ^. #roles % at (coerceSnowflake $ getID @Guild g)
permsEveryone = maybe noFlags (^. #permissions) everyoneRole
roleIDs = V.toList $ m ^. #roles
rolePerms = mapMaybe (\rid -> g ^? #roles % ix rid % #permissions) roleIDs
perms = foldl' andFlags noFlags (permsEveryone : rolePerms)
in if perms .<=. administrator
then allFlags
else perms
overwrites :: GuildChannel -> SM.SnowflakeMap Overwrite
overwrites (GuildTextChannel c) = c ^. #permissionOverwrites
overwrites (GuildVoiceChannel c) = c ^. #permissionOverwrites
overwrites (GuildCategory c) = c ^. #permissionOverwrites
overwrites _ = SM.empty
-- | Apply any 'Overwrite's for a 'GuildChannel' onto some 'Permissions'
applyOverwrites :: GuildChannel -> Member -> Permissions -> Permissions
applyOverwrites c m p
| p .<=. administrator = allFlags
| otherwise =
let everyoneOverwrite = overwrites c ^. at (coerceSnowflake $ getID @Guild c)
everyoneAllow = maybe noFlags (^. #allow) everyoneOverwrite
everyoneDeny = maybe noFlags (^. #deny) everyoneOverwrite
p' = p .-. everyoneDeny .+. everyoneAllow
roleOverwriteIDs = map (coerceSnowflake @_ @Overwrite) . V.toList $ m ^. #roles
roleOverwrites = mapMaybe (\oid -> overwrites c ^? ix oid) roleOverwriteIDs
roleAllow = foldl' andFlags noFlags (roleOverwrites ^.. traversed % #allow)
roleDeny = foldl' andFlags noFlags (roleOverwrites ^.. traversed % #deny)
p'' = p' .-. roleDeny .+. roleAllow
memberOverwrite = overwrites c ^. at (coerceSnowflake @_ @Overwrite $ getID @Member m)
memberAllow = maybe noFlags (^. #allow) memberOverwrite
memberDeny = maybe noFlags (^. #deny) memberOverwrite
p''' = p'' .-. memberDeny .+. memberAllow
in p'''
-- | Things that 'Member's have 'Permissions' in
class PermissionsIn a where
-- | Calculate a 'Member'\'s 'Permissions' in something
--
-- If permissions could not be calculated because something couldn't be found
-- in the cache, this will return an empty set of permissions. Use
-- 'permissionsIn'' if you want to handle cases where something might not exist
-- in cache.
permissionsIn :: a -> Member -> Permissions
-- | A 'Member'\'s 'Permissions' in a channel are their roles and overwrites
instance PermissionsIn (Guild, GuildChannel) where
permissionsIn (g, c) m = applyOverwrites c m $ basePermissions g m
-- | A 'Member'\'s 'Permissions' in a guild are just their roles
instance PermissionsIn Guild where
permissionsIn = basePermissions
| A variant of ' ' that will use the cache / http .
class PermissionsIn' a where
-- | Calculate the permissions of something that has a 'User' id
permissionsIn' :: (BotC r, HasID User u) => a -> u -> P.Sem r Permissions
{- | A 'User''s 'Permissions' in a channel are their roles and overwrites
This will fetch the guild from the cache or http as needed
-}
instance PermissionsIn' GuildChannel where
permissionsIn' c (getID @User -> uid) = do
m <- upgrade (getID @Guild c, coerceSnowflake @_ @Member uid)
g <- upgrade (getID @Guild c)
case (m, g) of
(Just m, Just g') -> pure $ permissionsIn (g', c) m
_cantFind -> pure noFlags
-- | A 'Member'\'s 'Permissions' in a guild are just their roles
instance PermissionsIn' Guild where
permissionsIn' g (getID @User -> uid) = do
m <- upgrade (getID @Guild g, coerceSnowflake @_ @Member uid)
case m of
Just m' -> pure $ permissionsIn g m'
Nothing -> pure noFlags
{- | A 'Member'\'s 'Permissions' in a channel are their roles and overwrites
This will fetch the guild and channel from the cache or http as needed
-}
instance PermissionsIn' (Snowflake GuildChannel) where
permissionsIn' cid u = do
c <- upgrade cid
case c of
Just c' -> permissionsIn' c' u
Nothing -> pure noFlags
{- | A 'Member'\'s 'Permissions' in a guild are just their roles
This will fetch the guild from the cache or http as needed
-}
instance PermissionsIn' (Snowflake Guild) where
permissionsIn' gid u = do
g <- upgrade gid
case g of
Just g' -> permissionsIn' g' u
Nothing -> pure noFlags
| null | https://raw.githubusercontent.com/simmsb/calamity/be310255b446e87e7432673de1fbc67ef46de3ae/calamity/Calamity/Utils/Permissions.hs | haskell | | Permission utilities
| Calculate a 'Member'\'s 'Permissions' in a 'Guild'
| Apply any 'Overwrite's for a 'GuildChannel' onto some 'Permissions'
| Things that 'Member's have 'Permissions' in
| Calculate a 'Member'\'s 'Permissions' in something
If permissions could not be calculated because something couldn't be found
in the cache, this will return an empty set of permissions. Use
'permissionsIn'' if you want to handle cases where something might not exist
in cache.
| A 'Member'\'s 'Permissions' in a channel are their roles and overwrites
| A 'Member'\'s 'Permissions' in a guild are just their roles
| Calculate the permissions of something that has a 'User' id
| A 'User''s 'Permissions' in a channel are their roles and overwrites
This will fetch the guild from the cache or http as needed
| A 'Member'\'s 'Permissions' in a guild are just their roles
| A 'Member'\'s 'Permissions' in a channel are their roles and overwrites
This will fetch the guild and channel from the cache or http as needed
| A 'Member'\'s 'Permissions' in a guild are just their roles
This will fetch the guild from the cache or http as needed
| module Calamity.Utils.Permissions (
basePermissions,
applyOverwrites,
PermissionsIn (..),
PermissionsIn' (..),
) where
import Calamity.Client.Types
import Calamity.Internal.SnowflakeMap qualified as SM
import Calamity.Types.Model.Channel.Guild
import Calamity.Types.Model.Guild.Guild
import Calamity.Types.Model.Guild.Member
import Calamity.Types.Model.Guild.Overwrite
import Calamity.Types.Model.Guild.Permissions
import Calamity.Types.Model.User
import Calamity.Types.Snowflake
import Calamity.Types.Upgradeable
import Data.Flags
import Data.Foldable (Foldable (foldl'))
import Data.Maybe (mapMaybe)
import Data.Vector.Unboxing qualified as V
import Optics
import Polysemy qualified as P
basePermissions :: Guild -> Member -> Permissions
basePermissions g m
| g ^. #ownerID == getID m = allFlags
| otherwise =
let everyoneRole = g ^. #roles % at (coerceSnowflake $ getID @Guild g)
permsEveryone = maybe noFlags (^. #permissions) everyoneRole
roleIDs = V.toList $ m ^. #roles
rolePerms = mapMaybe (\rid -> g ^? #roles % ix rid % #permissions) roleIDs
perms = foldl' andFlags noFlags (permsEveryone : rolePerms)
in if perms .<=. administrator
then allFlags
else perms
overwrites :: GuildChannel -> SM.SnowflakeMap Overwrite
overwrites (GuildTextChannel c) = c ^. #permissionOverwrites
overwrites (GuildVoiceChannel c) = c ^. #permissionOverwrites
overwrites (GuildCategory c) = c ^. #permissionOverwrites
overwrites _ = SM.empty
applyOverwrites :: GuildChannel -> Member -> Permissions -> Permissions
applyOverwrites c m p
| p .<=. administrator = allFlags
| otherwise =
let everyoneOverwrite = overwrites c ^. at (coerceSnowflake $ getID @Guild c)
everyoneAllow = maybe noFlags (^. #allow) everyoneOverwrite
everyoneDeny = maybe noFlags (^. #deny) everyoneOverwrite
p' = p .-. everyoneDeny .+. everyoneAllow
roleOverwriteIDs = map (coerceSnowflake @_ @Overwrite) . V.toList $ m ^. #roles
roleOverwrites = mapMaybe (\oid -> overwrites c ^? ix oid) roleOverwriteIDs
roleAllow = foldl' andFlags noFlags (roleOverwrites ^.. traversed % #allow)
roleDeny = foldl' andFlags noFlags (roleOverwrites ^.. traversed % #deny)
p'' = p' .-. roleDeny .+. roleAllow
memberOverwrite = overwrites c ^. at (coerceSnowflake @_ @Overwrite $ getID @Member m)
memberAllow = maybe noFlags (^. #allow) memberOverwrite
memberDeny = maybe noFlags (^. #deny) memberOverwrite
p''' = p'' .-. memberDeny .+. memberAllow
in p'''
class PermissionsIn a where
permissionsIn :: a -> Member -> Permissions
instance PermissionsIn (Guild, GuildChannel) where
permissionsIn (g, c) m = applyOverwrites c m $ basePermissions g m
instance PermissionsIn Guild where
permissionsIn = basePermissions
| A variant of ' ' that will use the cache / http .
class PermissionsIn' a where
permissionsIn' :: (BotC r, HasID User u) => a -> u -> P.Sem r Permissions
instance PermissionsIn' GuildChannel where
permissionsIn' c (getID @User -> uid) = do
m <- upgrade (getID @Guild c, coerceSnowflake @_ @Member uid)
g <- upgrade (getID @Guild c)
case (m, g) of
(Just m, Just g') -> pure $ permissionsIn (g', c) m
_cantFind -> pure noFlags
instance PermissionsIn' Guild where
permissionsIn' g (getID @User -> uid) = do
m <- upgrade (getID @Guild g, coerceSnowflake @_ @Member uid)
case m of
Just m' -> pure $ permissionsIn g m'
Nothing -> pure noFlags
instance PermissionsIn' (Snowflake GuildChannel) where
permissionsIn' cid u = do
c <- upgrade cid
case c of
Just c' -> permissionsIn' c' u
Nothing -> pure noFlags
instance PermissionsIn' (Snowflake Guild) where
permissionsIn' gid u = do
g <- upgrade gid
case g of
Just g' -> permissionsIn' g' u
Nothing -> pure noFlags
|
3133fa0dbc4105f0a7f11d1d461a903e81477f97272eee2edd62a91aefff80cc | bnoordhuis/chicken-core | test-finalizers.scm | ;;;; test-finalizers.scm
(##sys#eval-debug-level 0) ; disable keeping trace-buffer with frameinfo
(define x (list 1 2 3))
(define y (list 4 5 6))
(define x-f #f)
(define y-f #f)
(begin
(set-finalizer!
x
(lambda (o)
(format #t "Delete: ~A (y: ~a)~%" o y-f)
(set! x-f #t)))
#t)
(begin
(set-finalizer!
y
(let ((p x))
(lambda (o)
(format #t "Delete: ~A: ~A~%" o p)
(set! y-f #t))))
#t)
(gc #t)
(assert (not x-f))
#|
This ought to work, see patches/finalizer.closures.diff for
a fix that unfortunately disables finalizers in the interpreter
(probably due to the different closure representation).
(assert (not y-f))
(set! x #f)
(gc #t)
(assert (not x-f))
(assert (not y-f))
(set! y #f)
(gc #t)
(assert y-f)
(assert x-f)
|#
(define foo-f #f)
(let ((foo (vector 1 2 3)))
(set-finalizer! foo (lambda _ (set! foo-f #t)))
#t)
(gc #t)
(assert foo-f)
;; double finalizer
(define n 0)
(define (bump . _) (set! n (add1 n)))
(define x (vector 1))
(set-finalizer! x bump)
(set-finalizer! x bump)
(set! x #f)
(gc #t)
(print n)
(assert (= 2 n))
| null | https://raw.githubusercontent.com/bnoordhuis/chicken-core/56d30e3be095b6abe1bddcfe10505fa726a43bb5/tests/test-finalizers.scm | scheme | test-finalizers.scm
disable keeping trace-buffer with frameinfo
This ought to work, see patches/finalizer.closures.diff for
a fix that unfortunately disables finalizers in the interpreter
(probably due to the different closure representation).
(assert (not y-f))
(set! x #f)
(gc #t)
(assert (not x-f))
(assert (not y-f))
(set! y #f)
(gc #t)
(assert y-f)
(assert x-f)
double finalizer |
(define x (list 1 2 3))
(define y (list 4 5 6))
(define x-f #f)
(define y-f #f)
(begin
(set-finalizer!
x
(lambda (o)
(format #t "Delete: ~A (y: ~a)~%" o y-f)
(set! x-f #t)))
#t)
(begin
(set-finalizer!
y
(let ((p x))
(lambda (o)
(format #t "Delete: ~A: ~A~%" o p)
(set! y-f #t))))
#t)
(gc #t)
(assert (not x-f))
(define foo-f #f)
(let ((foo (vector 1 2 3)))
(set-finalizer! foo (lambda _ (set! foo-f #t)))
#t)
(gc #t)
(assert foo-f)
(define n 0)
(define (bump . _) (set! n (add1 n)))
(define x (vector 1))
(set-finalizer! x bump)
(set-finalizer! x bump)
(set! x #f)
(gc #t)
(print n)
(assert (= 2 n))
|
e4d05b86613b9c00e978999ae9454e94078f1443fc77ff759e16c94821b427e6 | dgtized/shimmers | garden_hose.cljs | (ns shimmers.sketches.garden-hose
"Concept is a randomly generated hose and then slowly unwind as water flows through it."
(:require
[quil.core :as q :include-macros true]
[quil.middleware :as m]
[shimmers.algorithm.kinematic-chain :as chain]
[shimmers.common.framerate :as framerate]
[shimmers.common.quil :as cq]
[shimmers.math.probability :as p]
[shimmers.math.vector :as v]
[shimmers.sketch :as sketch :include-macros true]
[thi.ng.geom.core :as g]
[thi.ng.math.core :as tm]))
(defn next-point [bounds variance {:keys [angle length] :as segment}]
(loop [variance variance]
(let [theta (mod (p/gaussian angle variance) tm/TWO_PI)
endpoint (chain/segment-endpoint (assoc segment :angle theta))]
(if (g/contains-point? bounds endpoint)
(chain/->KinematicSegment endpoint theta length)
(recur (+ variance 0.01))))))
(defn make-hose [n segment next-point]
(->> segment
(iterate next-point)
(take n)
chain/->KinematicChain))
(defn hose-pressure [hose clamped pressure]
(let [segments (:segments hose)]
(assoc hose :segments
(conj
(mapv (fn [[{base :base a-theta :angle length :length :as a}
{b-theta :angle target :base}]]
(let [diff (- b-theta a-theta)
change (* (/ (Math/abs diff) tm/PI) diff)
new-angle (+ a-theta change)
new-base (clamped (tm/mix base (v/-polar target length new-angle) pressure))]
(assoc a
:base new-base
:angle (g/angle-between new-base target))))
(partition 2 1 segments))
(last segments)))))
(defn hose-pressure-midpoint [hose clamped pressure]
(let [segments (:segments hose)
move-segment
(fn [[{a-base :base}
{b-base :base len :length :as b}
{c-base :base}]]
;; this could be better, basically trying to keep a and c apart
;; instead of folding them together into inflection points.
(let [dist-ac (g/dist a-base c-base)
midpoint (tm/mix a-base c-base 0.5)
new-base (->> (if (> dist-ac len) (* 2 pressure) pressure)
(tm/mix b-base midpoint)
clamped)]
(assoc b
:base new-base
:angle (g/angle-between new-base c-base))))]
(assoc hose :segments
(concat (take 1 segments)
(mapv move-segment (partition 3 1 segments))
(take-last 1 segments)))))
(defn target-position []
(tm/+ (cq/rel-vec 0.5 0.5)
(v/polar (cq/rel-h 0.4)
(/ (q/frame-count) 250))))
(defn setup []
(q/color-mode :hsl 1.0)
(let [bounds (cq/screen-rect 0.8)]
{:start (cq/rel-vec 0.5 0.15)
:bounds bounds
:hose (make-hose 2048 (chain/->KinematicSegment (cq/rel-vec 0.5 0.5) tm/HALF_PI 8)
(partial next-point bounds 0.6))}))
(defn update-state [{:keys [start bounds hose] :as state}]
(let [segments (-> hose :segments)
first-pos (v/clamp-bounds bounds (tm/mix (:base (first segments)) start 0.0))
last-pos (->> (tm/mix (chain/segment-endpoint (last segments))
(target-position) 0.01)
(v/clamp-bounds bounds))]
(-> state
(update :hose hose-pressure-midpoint (partial v/clamp-bounds bounds) 0.02)
(update :hose chain/chain-update first-pos last-pos))))
(defn alt-draw [{:keys [hose]}]
(q/stroke 0.0 0.025)
(q/no-fill)
(doseq [[i [p q]] (map-indexed vector (g/edges hose))]
(q/stroke (mod (* i tm/PHI 0.005) 1.0) 0.5 0.3 0.025)
(q/line p q)))
(defn draw [{:keys [hose]}]
(q/background 1.0 0.2)
(q/no-fill)
(cq/draw-path (g/vertices hose)))
(sketch/defquil garden-hose
:created-at "2021-09-25"
:size [800 600]
:setup setup
:update update-state
:draw draw
:middleware [m/fun-mode framerate/mode])
| null | https://raw.githubusercontent.com/dgtized/shimmers/0113e089ce894b20831c26cbb79b3b87a22c7146/src/shimmers/sketches/garden_hose.cljs | clojure | this could be better, basically trying to keep a and c apart
instead of folding them together into inflection points. | (ns shimmers.sketches.garden-hose
"Concept is a randomly generated hose and then slowly unwind as water flows through it."
(:require
[quil.core :as q :include-macros true]
[quil.middleware :as m]
[shimmers.algorithm.kinematic-chain :as chain]
[shimmers.common.framerate :as framerate]
[shimmers.common.quil :as cq]
[shimmers.math.probability :as p]
[shimmers.math.vector :as v]
[shimmers.sketch :as sketch :include-macros true]
[thi.ng.geom.core :as g]
[thi.ng.math.core :as tm]))
(defn next-point [bounds variance {:keys [angle length] :as segment}]
(loop [variance variance]
(let [theta (mod (p/gaussian angle variance) tm/TWO_PI)
endpoint (chain/segment-endpoint (assoc segment :angle theta))]
(if (g/contains-point? bounds endpoint)
(chain/->KinematicSegment endpoint theta length)
(recur (+ variance 0.01))))))
(defn make-hose [n segment next-point]
(->> segment
(iterate next-point)
(take n)
chain/->KinematicChain))
(defn hose-pressure [hose clamped pressure]
(let [segments (:segments hose)]
(assoc hose :segments
(conj
(mapv (fn [[{base :base a-theta :angle length :length :as a}
{b-theta :angle target :base}]]
(let [diff (- b-theta a-theta)
change (* (/ (Math/abs diff) tm/PI) diff)
new-angle (+ a-theta change)
new-base (clamped (tm/mix base (v/-polar target length new-angle) pressure))]
(assoc a
:base new-base
:angle (g/angle-between new-base target))))
(partition 2 1 segments))
(last segments)))))
(defn hose-pressure-midpoint [hose clamped pressure]
(let [segments (:segments hose)
move-segment
(fn [[{a-base :base}
{b-base :base len :length :as b}
{c-base :base}]]
(let [dist-ac (g/dist a-base c-base)
midpoint (tm/mix a-base c-base 0.5)
new-base (->> (if (> dist-ac len) (* 2 pressure) pressure)
(tm/mix b-base midpoint)
clamped)]
(assoc b
:base new-base
:angle (g/angle-between new-base c-base))))]
(assoc hose :segments
(concat (take 1 segments)
(mapv move-segment (partition 3 1 segments))
(take-last 1 segments)))))
(defn target-position []
(tm/+ (cq/rel-vec 0.5 0.5)
(v/polar (cq/rel-h 0.4)
(/ (q/frame-count) 250))))
(defn setup []
(q/color-mode :hsl 1.0)
(let [bounds (cq/screen-rect 0.8)]
{:start (cq/rel-vec 0.5 0.15)
:bounds bounds
:hose (make-hose 2048 (chain/->KinematicSegment (cq/rel-vec 0.5 0.5) tm/HALF_PI 8)
(partial next-point bounds 0.6))}))
(defn update-state [{:keys [start bounds hose] :as state}]
(let [segments (-> hose :segments)
first-pos (v/clamp-bounds bounds (tm/mix (:base (first segments)) start 0.0))
last-pos (->> (tm/mix (chain/segment-endpoint (last segments))
(target-position) 0.01)
(v/clamp-bounds bounds))]
(-> state
(update :hose hose-pressure-midpoint (partial v/clamp-bounds bounds) 0.02)
(update :hose chain/chain-update first-pos last-pos))))
(defn alt-draw [{:keys [hose]}]
(q/stroke 0.0 0.025)
(q/no-fill)
(doseq [[i [p q]] (map-indexed vector (g/edges hose))]
(q/stroke (mod (* i tm/PHI 0.005) 1.0) 0.5 0.3 0.025)
(q/line p q)))
(defn draw [{:keys [hose]}]
(q/background 1.0 0.2)
(q/no-fill)
(cq/draw-path (g/vertices hose)))
(sketch/defquil garden-hose
:created-at "2021-09-25"
:size [800 600]
:setup setup
:update update-state
:draw draw
:middleware [m/fun-mode framerate/mode])
|
eef00abb33bb35c9ba9d0d73fc038aeb907eb636f066536fd7d33bca7626ba12 | mstksg/advent-of-code-2021 | Day10.hs | -- |
-- Module : AOC.Challenge.Day10
-- License : BSD3
--
-- Stability : experimental
-- Portability : non-portable
--
Day 10 . See " AOC.Solver " for the types used in this module !
module AOC.Challenge.Day10 (
day10a
, day10b
) where
import AOC.Solver ((:~>)(..))
import Control.DeepSeq (NFData)
import GHC.Generics (Generic)
import Data.Maybe (mapMaybe)
import AOC.Common (traverseLines)
import Data.List (sort)
data Bracket = Round | Square | Curly | Angle
deriving stock (Eq, Show, Ord, Generic)
deriving anyclass (NFData)
data Direction = Open | Close
deriving stock (Eq, Show, Ord, Generic)
deriving anyclass (NFData)
data Symbol = Symbol { direction :: Direction, bracket :: Bracket }
deriving stock (Eq, Show, Ord, Generic)
deriving anyclass (NFData)
-- | Left: error (with offending bracket)
-- Right: no error, but a list of leftover incompletes
runSymbols :: [Symbol] -> Either Bracket [Bracket]
runSymbols = go []
where
go stk = \case
Symbol Close b:xs -> case stk of
s:ss | s == b -> go ss xs
_ -> Left b
Symbol Open b:xs -> go (b:stk) xs
[] -> Right stk
parseString :: String -> Maybe [Symbol]
parseString = traverse lookupSymbol
where
lookupSymbol = \case
'(' -> Just $ Symbol Open Round
'[' -> Just $ Symbol Open Square
'{' -> Just $ Symbol Open Curly
'<' -> Just $ Symbol Open Angle
')' -> Just $ Symbol Close Round
']' -> Just $ Symbol Close Square
'}' -> Just $ Symbol Close Curly
'>' -> Just $ Symbol Close Angle
_ -> Nothing
day10a :: [[Symbol]] :~> Int
day10a = MkSol
{ sParse = traverseLines parseString
, sShow = show
, sSolve = Just . sum . map (either bracketScore (const 0) . runSymbols)
}
where
bracketScore :: Bracket -> Int
bracketScore = \case
Round -> 3
Square -> 57
Curly -> 1197
Angle -> 25137
day10b :: [[Symbol]] :~> Int
day10b = MkSol
{ sParse = sParse day10a
, sShow = show
, sSolve = takeMid . sort . mapMaybe (either (const Nothing) (Just . getScore) . runSymbols)
}
where
getScore :: [Bracket] -> Int
getScore = go 0
where
go !n (b:xs) = go (n * 5 + bracketScore b) xs
go !n [] = n
bracketScore = \case
Round -> 1
Square -> 2
Curly -> 3
Angle -> 4
| Return the middle item in a list . Step through the list at two
different speeds and return when the double - speed one hits the end .
takeMid :: [a] -> Maybe a
takeMid qs = go qs qs
where
go (_:xs) (_:_:ys) = go xs ys
go (x:_) _ = Just x
go [] _ = Nothing
| null | https://raw.githubusercontent.com/mstksg/advent-of-code-2021/7c14ffc5bdbdeecd6b0592c1718419b01f4a7e9e/src/AOC/Challenge/Day10.hs | haskell | |
Module : AOC.Challenge.Day10
License : BSD3
Stability : experimental
Portability : non-portable
| Left: error (with offending bracket)
Right: no error, but a list of leftover incompletes | Day 10 . See " AOC.Solver " for the types used in this module !
module AOC.Challenge.Day10 (
day10a
, day10b
) where
import AOC.Solver ((:~>)(..))
import Control.DeepSeq (NFData)
import GHC.Generics (Generic)
import Data.Maybe (mapMaybe)
import AOC.Common (traverseLines)
import Data.List (sort)
data Bracket = Round | Square | Curly | Angle
deriving stock (Eq, Show, Ord, Generic)
deriving anyclass (NFData)
data Direction = Open | Close
deriving stock (Eq, Show, Ord, Generic)
deriving anyclass (NFData)
data Symbol = Symbol { direction :: Direction, bracket :: Bracket }
deriving stock (Eq, Show, Ord, Generic)
deriving anyclass (NFData)
runSymbols :: [Symbol] -> Either Bracket [Bracket]
runSymbols = go []
where
go stk = \case
Symbol Close b:xs -> case stk of
s:ss | s == b -> go ss xs
_ -> Left b
Symbol Open b:xs -> go (b:stk) xs
[] -> Right stk
parseString :: String -> Maybe [Symbol]
parseString = traverse lookupSymbol
where
lookupSymbol = \case
'(' -> Just $ Symbol Open Round
'[' -> Just $ Symbol Open Square
'{' -> Just $ Symbol Open Curly
'<' -> Just $ Symbol Open Angle
')' -> Just $ Symbol Close Round
']' -> Just $ Symbol Close Square
'}' -> Just $ Symbol Close Curly
'>' -> Just $ Symbol Close Angle
_ -> Nothing
day10a :: [[Symbol]] :~> Int
day10a = MkSol
{ sParse = traverseLines parseString
, sShow = show
, sSolve = Just . sum . map (either bracketScore (const 0) . runSymbols)
}
where
bracketScore :: Bracket -> Int
bracketScore = \case
Round -> 3
Square -> 57
Curly -> 1197
Angle -> 25137
day10b :: [[Symbol]] :~> Int
day10b = MkSol
{ sParse = sParse day10a
, sShow = show
, sSolve = takeMid . sort . mapMaybe (either (const Nothing) (Just . getScore) . runSymbols)
}
where
getScore :: [Bracket] -> Int
getScore = go 0
where
go !n (b:xs) = go (n * 5 + bracketScore b) xs
go !n [] = n
bracketScore = \case
Round -> 1
Square -> 2
Curly -> 3
Angle -> 4
| Return the middle item in a list . Step through the list at two
different speeds and return when the double - speed one hits the end .
takeMid :: [a] -> Maybe a
takeMid qs = go qs qs
where
go (_:xs) (_:_:ys) = go xs ys
go (x:_) _ = Just x
go [] _ = Nothing
|
ab192132b26053ba08caa6b2e23b47ce61861c714309f12da543c1f9a84185b0 | jackdoe/bzzz | filtered.clj | (ns bzzz.queries.filtered
(:import (org.apache.lucene.search FilteredQuery QueryWrapperFilter)))
(defn parse
[generic input analyzer ]
(let [{:keys [query filter boost]
:or {boost 1}} input
q (FilteredQuery. (generic query analyzer)
(QueryWrapperFilter. (generic filter analyzer)))]
(.setBoost q boost)
q))
| null | https://raw.githubusercontent.com/jackdoe/bzzz/ae98708056e39ada28f22aad9e43ea91695b346b/src/bzzz/queries/filtered.clj | clojure | (ns bzzz.queries.filtered
(:import (org.apache.lucene.search FilteredQuery QueryWrapperFilter)))
(defn parse
[generic input analyzer ]
(let [{:keys [query filter boost]
:or {boost 1}} input
q (FilteredQuery. (generic query analyzer)
(QueryWrapperFilter. (generic filter analyzer)))]
(.setBoost q boost)
q))
| |
63f202db66134e34e71f5f245083c95a4a8054ad049c6b438ed76565e06a3656 | cstar/erldis | erldis_list_tests.erl | -module(erldis_list_tests).
-include_lib("eunit/include/eunit.hrl").
queue_test() ->
Client = setup(),
% queue api
?assertEqual(true, erldis_list:is_empty(<<"foo">>, Client)),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:out(<<"foo">>, Client)),
erldis_list:in(<<"a">>, <<"foo">>, Client),
?assertEqual(false, erldis_list:is_empty(<<"foo">>, Client)),
erldis_list:in(<<"b">>, <<"foo">>, Client),
?assertEqual(2, erldis_list:len(<<"foo">>, Client)),
?assertEqual({value, <<"a">>}, erldis_list:out(<<"foo">>, Client)),
?assertEqual(1, erldis_list:len(<<"foo">>, Client)),
erldis_list:in_r(<<"x">>, <<"foo">>, Client),
?assertEqual({value, <<"b">>}, erldis_list:out_r(<<"foo">>, Client)),
?assertEqual(false, erldis_list:is_empty(<<"foo">>, Client)),
?assertEqual({value, <<"x">>}, erldis_list:out(<<"foo">>, Client)),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:out(<<"foo">>, Client)),
erldis_client:stop(Client).
extended_queue_test() ->
Client = setup(),
?assertEqual(empty, erldis_list:get(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:get_r(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:peek(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:peek_r(<<"foo">>, Client)),
erldis_list:in(<<"a">>, <<"foo">>, Client),
erldis_list:in(<<"b">>, <<"foo">>, Client),
?assertEqual(<<"a">>, erldis_list:get(<<"foo">>, Client)),
?assertEqual(<<"b">>, erldis_list:get_r(<<"foo">>, Client)),
?assertEqual(2, erldis_list:len(<<"foo">>, Client)),
?assertEqual({value, <<"a">>}, erldis_list:peek(<<"foo">>, Client)),
?assertEqual({value, <<"b">>}, erldis_list:peek_r(<<"foo">>, Client)),
erldis_list:drop(<<"foo">>, Client),
erldis_list:drop_r(<<"foo">>, Client),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
erldis_client:stop(Client).
array_test() ->
Client = setup(),
erldis_list:in(<<"a">>, <<"foo">>, Client),
erldis_list:in(<<"b">>, <<"foo">>, Client),
?assertEqual(<<"b">>, erldis_list:get(1, <<"foo">>, Client)),
erldis_list:set(1, <<"x">>, <<"foo">>, Client),
?assertEqual(<<"x">>, erldis_list:get(1, <<"foo">>, Client)),
?assertEqual(2, erldis_list:size(<<"foo">>, Client)),
?assertEqual({value, <<"a">>}, erldis_list:out(<<"foo">>, Client)),
?assertEqual({value, <<"x">>}, erldis_list:out(<<"foo">>, Client)),
erldis_client:stop(Client).
lists_test() ->
Client = setup(),
?assertEqual(false, erldis_list:is_list(<<"foo">>, Client)),
?assertEqual([], erldis_list:sublist(<<"foo">>, Client, 1)),
erldis_list:in(<<"a">>, <<"foo">>, Client),
erldis_list:in(<<"b">>, <<"foo">>, Client),
erldis_list:in(<<"c">>, <<"foo">>, Client),
erldis_list:in(<<"b">>, <<"foo">>, Client),
?assertEqual([<<"b">>, <<"c">>], erldis_list:sublist(<<"foo">>, Client, 2, 2)),
?assertEqual(<<"b">>, erldis_list:nth(1, <<"foo">>, Client)),
erldis_list:delete(<<"b">>, <<"foo">>, Client),
?assertEqual(<<"c">>, erldis_list:nth(1, <<"foo">>, Client)),
?assertEqual(3, erldis_list:len(<<"foo">>, Client)),
?assertEqual([<<"c">>, <<"b">>], erldis_list:sublist(<<"foo">>, Client, 2, 2)),
erldis_list:drop(<<"foo">>, Client),
erldis_list:drop(<<"foo">>, Client),
erldis_list:drop(<<"foo">>, Client),
erldis_client:stop(Client).
% this last call always produces a timeout error
? ( [ ] , erldis_list : sublist(<<"foo " > > , Client , 3 ) ) .
% TODO: test negative sublist start index
blocking_queue_test() ->
Client = setup(),
erldis:rpush(Client, <<"a">>, <<"value">>),
?assertEqual([<<"a">>, <<"value">>], erldis:blpop(Client, [<<"a">>, <<"b">>])),
erldis:rpush(Client, <<"b">>, <<"value">>),
?assertEqual([<<"b">>, <<"value">>], erldis:blpop(Client, [<<"a">>, <<"b">>])),
erldis:rpush(Client, <<"a">>, <<"first">>),
erldis:rpush(Client, <<"a">>, <<"second">>),
?assertEqual([<<"a">>, <<"first">>], erldis:blpop(Client, [<<"a">>, <<"b">>])),
?assertEqual([<<"a">>, <<"second">>], erldis:blpop(Client, [<<"a">>, <<"b">>])),
spawn_link(fun blocking_queue_sender/0),
?assertEqual([<<"a">>, <<1>>], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
?assertEqual([<<"b">>, <<1>>], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
?assertEqual([<<"a">>, <<2>>], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
?assertEqual([], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
?assertEqual([<<"a">>, <<3>>], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
erldis_client:stop(Client).
blocking_queue_sender() ->
Client = setup(),
erldis:rpush(Client, <<"a">>, <<1>>),
timer:sleep(100),
erldis:rpush(Client, <<"b">>, <<1>>),
timer:sleep(100),
erldis:rpush(Client, <<"a">>, <<2>>),
timer:sleep(3000),
erldis:rpush(Client, <<"a">>, <<3>>),
erldis_client:stop(Client).
foreach_test() ->
Client = setup(),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
L = [<<"a">>, <<"b">>, <<"c">>],
erldis_list:from_list(L, <<"foo">>, Client),
?assertEqual(length(L), erldis_list:len(<<"foo">>, Client)),
put(n, 1),
F = fun(Item) ->
N = get(n),
?assertEqual(lists:nth(N, L), Item),
put(n, N+1)
end,
erldis_list:foreach(F, <<"foo">>, Client),
erldis_client:stop(Client).
merge_test() ->
Client = setup(),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
L1 = [<<"a">>, <<"c">>, <<"e">>],
erldis_list:from_list(L1, <<"foo">>, Client),
?assertEqual(length(L1), erldis_list:len(<<"foo">>, Client)),
L2 = [<<"b">>, <<"d">>, <<"f">>],
F = fun(A, B) -> A =< B end,
erldis_list:merge(F, L2, <<"foo">>, Client),
Merged = lists:merge(F, L1, L2),
?assertEqual(Merged, lists:merge(L1, L2)),
?assertEqual(length(Merged), erldis_list:len(<<"foo">>, Client)),
?assertEqual(Merged, erldis_list:to_list(<<"foo">>, Client)),
L3 = [<<"a">>, <<"c">>, <<"f">>, <<"g">>],
erldis_list:umerge(F, L3, <<"foo">>, Client),
Merged2 = lists:umerge(F, Merged, L3),
?assertEqual(Merged2, lists:umerge(Merged, L3)),
?assertEqual(length(Merged2), erldis_list:len(<<"foo">>, Client)),
?assertEqual(Merged2, erldis_list:to_list(<<"foo">>, Client)),
erldis_client:stop(Client).
umerge_test() ->
Client = setup(),
Key = <<"foo">>,
F = fun(A, B) -> A =< B end,
?assertEqual(0, erldis_list:len(Key, Client)),
L1 = [<<"a">>, <<"c">>, <<"e">>],
erldis_list:umerge(F, L1, Key, Client),
L1 = erldis_list:to_list(Key, Client),
erldis_list:umerge(F, L1, Key, Client),
L1 = erldis_list:to_list(Key, Client),
erldis_list:umerge(F, [<<"a">>], Key, Client),
L1 = erldis_list:to_list(Key, Client),
erldis_client:stop(Client).
common_test() ->
Client = setup(),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
L = [<<"a">>, <<"b">>, <<"c">>],
erldis_list:from_list(L, <<"foo">>, Client),
?assertEqual(length(L), erldis_list:len(<<"foo">>, Client)),
% to_list uses foldr
?assertEqual(L, erldis_list:to_list(<<"foo">>, Client)),
% reverse uses foldl
?assertEqual(lists:reverse(L), erldis_list:reverse(<<"foo">>, Client)),
% from_list overwrites current list if it exists
L2 = [<<"d">> | L],
erldis_list:from_list(L2, <<"foo">>, Client),
?assertEqual(length(L2), erldis_list:len(<<"foo">>, Client)),
?assertEqual(L2, erldis_list:to_list(<<"foo">>, Client)),
erldis_client:stop(Client).
extra_queue_test() ->
Client = setup(),
L = [<<"a">>, <<"b">>, <<"c">>],
Length = length(L),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
erldis_list:from_list(L, <<"foo">>, Client),
?assertEqual(Length, erldis_list:len(<<"foo">>, Client)),
F = fun(Item) ->
N = Length - erldis_list:len(<<"foo">>, Client),
?assertEqual(lists:nth(N, L), Item)
end,
erldis_list:out_foreach(F, <<"foo">>, Client),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
erldis_client:stop(Client).
setup() ->
% setup
application:load(erldis),
{ok, Client} = erldis_client:connect(),
?assertEqual(erldis:flushdb(Client), ok),
Client.
| null | https://raw.githubusercontent.com/cstar/erldis/375260795e6a3de2600e297658f364deedbe6f1b/test/erldis_list_tests.erl | erlang | queue api
this last call always produces a timeout error
TODO: test negative sublist start index
to_list uses foldr
reverse uses foldl
from_list overwrites current list if it exists
setup | -module(erldis_list_tests).
-include_lib("eunit/include/eunit.hrl").
queue_test() ->
Client = setup(),
?assertEqual(true, erldis_list:is_empty(<<"foo">>, Client)),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:out(<<"foo">>, Client)),
erldis_list:in(<<"a">>, <<"foo">>, Client),
?assertEqual(false, erldis_list:is_empty(<<"foo">>, Client)),
erldis_list:in(<<"b">>, <<"foo">>, Client),
?assertEqual(2, erldis_list:len(<<"foo">>, Client)),
?assertEqual({value, <<"a">>}, erldis_list:out(<<"foo">>, Client)),
?assertEqual(1, erldis_list:len(<<"foo">>, Client)),
erldis_list:in_r(<<"x">>, <<"foo">>, Client),
?assertEqual({value, <<"b">>}, erldis_list:out_r(<<"foo">>, Client)),
?assertEqual(false, erldis_list:is_empty(<<"foo">>, Client)),
?assertEqual({value, <<"x">>}, erldis_list:out(<<"foo">>, Client)),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:out(<<"foo">>, Client)),
erldis_client:stop(Client).
extended_queue_test() ->
Client = setup(),
?assertEqual(empty, erldis_list:get(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:get_r(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:peek(<<"foo">>, Client)),
?assertEqual(empty, erldis_list:peek_r(<<"foo">>, Client)),
erldis_list:in(<<"a">>, <<"foo">>, Client),
erldis_list:in(<<"b">>, <<"foo">>, Client),
?assertEqual(<<"a">>, erldis_list:get(<<"foo">>, Client)),
?assertEqual(<<"b">>, erldis_list:get_r(<<"foo">>, Client)),
?assertEqual(2, erldis_list:len(<<"foo">>, Client)),
?assertEqual({value, <<"a">>}, erldis_list:peek(<<"foo">>, Client)),
?assertEqual({value, <<"b">>}, erldis_list:peek_r(<<"foo">>, Client)),
erldis_list:drop(<<"foo">>, Client),
erldis_list:drop_r(<<"foo">>, Client),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
erldis_client:stop(Client).
array_test() ->
Client = setup(),
erldis_list:in(<<"a">>, <<"foo">>, Client),
erldis_list:in(<<"b">>, <<"foo">>, Client),
?assertEqual(<<"b">>, erldis_list:get(1, <<"foo">>, Client)),
erldis_list:set(1, <<"x">>, <<"foo">>, Client),
?assertEqual(<<"x">>, erldis_list:get(1, <<"foo">>, Client)),
?assertEqual(2, erldis_list:size(<<"foo">>, Client)),
?assertEqual({value, <<"a">>}, erldis_list:out(<<"foo">>, Client)),
?assertEqual({value, <<"x">>}, erldis_list:out(<<"foo">>, Client)),
erldis_client:stop(Client).
lists_test() ->
Client = setup(),
?assertEqual(false, erldis_list:is_list(<<"foo">>, Client)),
?assertEqual([], erldis_list:sublist(<<"foo">>, Client, 1)),
erldis_list:in(<<"a">>, <<"foo">>, Client),
erldis_list:in(<<"b">>, <<"foo">>, Client),
erldis_list:in(<<"c">>, <<"foo">>, Client),
erldis_list:in(<<"b">>, <<"foo">>, Client),
?assertEqual([<<"b">>, <<"c">>], erldis_list:sublist(<<"foo">>, Client, 2, 2)),
?assertEqual(<<"b">>, erldis_list:nth(1, <<"foo">>, Client)),
erldis_list:delete(<<"b">>, <<"foo">>, Client),
?assertEqual(<<"c">>, erldis_list:nth(1, <<"foo">>, Client)),
?assertEqual(3, erldis_list:len(<<"foo">>, Client)),
?assertEqual([<<"c">>, <<"b">>], erldis_list:sublist(<<"foo">>, Client, 2, 2)),
erldis_list:drop(<<"foo">>, Client),
erldis_list:drop(<<"foo">>, Client),
erldis_list:drop(<<"foo">>, Client),
erldis_client:stop(Client).
? ( [ ] , erldis_list : sublist(<<"foo " > > , Client , 3 ) ) .
blocking_queue_test() ->
Client = setup(),
erldis:rpush(Client, <<"a">>, <<"value">>),
?assertEqual([<<"a">>, <<"value">>], erldis:blpop(Client, [<<"a">>, <<"b">>])),
erldis:rpush(Client, <<"b">>, <<"value">>),
?assertEqual([<<"b">>, <<"value">>], erldis:blpop(Client, [<<"a">>, <<"b">>])),
erldis:rpush(Client, <<"a">>, <<"first">>),
erldis:rpush(Client, <<"a">>, <<"second">>),
?assertEqual([<<"a">>, <<"first">>], erldis:blpop(Client, [<<"a">>, <<"b">>])),
?assertEqual([<<"a">>, <<"second">>], erldis:blpop(Client, [<<"a">>, <<"b">>])),
spawn_link(fun blocking_queue_sender/0),
?assertEqual([<<"a">>, <<1>>], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
?assertEqual([<<"b">>, <<1>>], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
?assertEqual([<<"a">>, <<2>>], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
?assertEqual([], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
?assertEqual([<<"a">>, <<3>>], erldis:blpop(Client, [<<"a">>, <<"b">>], 1000)),
erldis_client:stop(Client).
blocking_queue_sender() ->
Client = setup(),
erldis:rpush(Client, <<"a">>, <<1>>),
timer:sleep(100),
erldis:rpush(Client, <<"b">>, <<1>>),
timer:sleep(100),
erldis:rpush(Client, <<"a">>, <<2>>),
timer:sleep(3000),
erldis:rpush(Client, <<"a">>, <<3>>),
erldis_client:stop(Client).
foreach_test() ->
Client = setup(),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
L = [<<"a">>, <<"b">>, <<"c">>],
erldis_list:from_list(L, <<"foo">>, Client),
?assertEqual(length(L), erldis_list:len(<<"foo">>, Client)),
put(n, 1),
F = fun(Item) ->
N = get(n),
?assertEqual(lists:nth(N, L), Item),
put(n, N+1)
end,
erldis_list:foreach(F, <<"foo">>, Client),
erldis_client:stop(Client).
merge_test() ->
Client = setup(),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
L1 = [<<"a">>, <<"c">>, <<"e">>],
erldis_list:from_list(L1, <<"foo">>, Client),
?assertEqual(length(L1), erldis_list:len(<<"foo">>, Client)),
L2 = [<<"b">>, <<"d">>, <<"f">>],
F = fun(A, B) -> A =< B end,
erldis_list:merge(F, L2, <<"foo">>, Client),
Merged = lists:merge(F, L1, L2),
?assertEqual(Merged, lists:merge(L1, L2)),
?assertEqual(length(Merged), erldis_list:len(<<"foo">>, Client)),
?assertEqual(Merged, erldis_list:to_list(<<"foo">>, Client)),
L3 = [<<"a">>, <<"c">>, <<"f">>, <<"g">>],
erldis_list:umerge(F, L3, <<"foo">>, Client),
Merged2 = lists:umerge(F, Merged, L3),
?assertEqual(Merged2, lists:umerge(Merged, L3)),
?assertEqual(length(Merged2), erldis_list:len(<<"foo">>, Client)),
?assertEqual(Merged2, erldis_list:to_list(<<"foo">>, Client)),
erldis_client:stop(Client).
umerge_test() ->
Client = setup(),
Key = <<"foo">>,
F = fun(A, B) -> A =< B end,
?assertEqual(0, erldis_list:len(Key, Client)),
L1 = [<<"a">>, <<"c">>, <<"e">>],
erldis_list:umerge(F, L1, Key, Client),
L1 = erldis_list:to_list(Key, Client),
erldis_list:umerge(F, L1, Key, Client),
L1 = erldis_list:to_list(Key, Client),
erldis_list:umerge(F, [<<"a">>], Key, Client),
L1 = erldis_list:to_list(Key, Client),
erldis_client:stop(Client).
common_test() ->
Client = setup(),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
L = [<<"a">>, <<"b">>, <<"c">>],
erldis_list:from_list(L, <<"foo">>, Client),
?assertEqual(length(L), erldis_list:len(<<"foo">>, Client)),
?assertEqual(L, erldis_list:to_list(<<"foo">>, Client)),
?assertEqual(lists:reverse(L), erldis_list:reverse(<<"foo">>, Client)),
L2 = [<<"d">> | L],
erldis_list:from_list(L2, <<"foo">>, Client),
?assertEqual(length(L2), erldis_list:len(<<"foo">>, Client)),
?assertEqual(L2, erldis_list:to_list(<<"foo">>, Client)),
erldis_client:stop(Client).
extra_queue_test() ->
Client = setup(),
L = [<<"a">>, <<"b">>, <<"c">>],
Length = length(L),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
erldis_list:from_list(L, <<"foo">>, Client),
?assertEqual(Length, erldis_list:len(<<"foo">>, Client)),
F = fun(Item) ->
N = Length - erldis_list:len(<<"foo">>, Client),
?assertEqual(lists:nth(N, L), Item)
end,
erldis_list:out_foreach(F, <<"foo">>, Client),
?assertEqual(0, erldis_list:len(<<"foo">>, Client)),
erldis_client:stop(Client).
setup() ->
application:load(erldis),
{ok, Client} = erldis_client:connect(),
?assertEqual(erldis:flushdb(Client), ok),
Client.
|
18b5c6c18dce0028490d04da3b0f4699f2b9f51b3ad90623eda9999f93947b70 | yav/hobbit | Type.hs | module AST.Type
( ModName, QName(..), Name(..)
, Type(..), Kind, TyVar(..), Pred(..), Poly(..), Schema, Rule(..), RuleEnv(..)
, Goal(..), DGoal(..), FunDep'
, sSort, sFun
, kType, kArea, kNat, kLab, kPred, kFun
, tcFun
, tBool, tString, tChar, tSub, subName
, tFun, tTup, tBit, tIx, tARef, tM
, tNat, tLab
, tLE, tBE, tArray
, cAdd, cTimes, cExp2, cGCD
, cDNat, cWidth, cIndex, cAlign
, cBitRep, cBitData, cJoin
, cLiteral, cEq, cOrd, cNum, cBitOps, cBounded
, cField, cUpdField, cSizeOf, cAreaDecl, cValIn, cBytes
, tDom, tCod
, defName, ifCheckName, mainName, mainType, entryName
, ruleHead
, qPrim, qPrel
, mono
, typeToPred, predToType, splitTApp
) where
import AST.Evidence
import Error
import PP
import Data.IORef
import Data.Word
type ModName = String
newtype QName = Q Name deriving (Eq,Ord)
data Name = VarName String -- ^ User variables
| ConName String -- ^ User constructors
| Qual ModName Name -- ^ Qualified name
| Select Name -- ^ Record selector
| Update Name -- ^ record updator
| IfCheck Name -- ^ An 'if' clause
^ A default ( ctr , field )
| Tup Integer -- ^ Tuple constructors
-- Sort names
| SFun -- ^ Function scpace
| SSort -- ^ Sort
| TNat Word32 -- ^ A natural number types
| TLab Name -- ^ A label type
| TSub Name Name -- ^ Bitdata constructor types
| TSome -- ^ No empty kinds (Spec.hs)
-- Introduced during specialization.
| Inst Name [Name] -- ^ Type application
deriving (Eq,Ord)
data Type = TApp Type Type
| TCon Name
| TFree TyVar
| TSyn Name [Type] Type -- ^ Last type is expansion
| TInfix Type Name Type -- Parsing
| TParens Type
deriving (Eq)
type Kind = Type
data TyVar = TyVar
{ tyVarName :: Name -- ^ Suggested name
, tyVarPtr :: IORef (Maybe Type) -- ^ To get bound
, tyVarKind :: Kind -- ^ The variable kind
}
| TyUser
{ tyVarName :: Name }
instance Eq TyVar where
TyVar { tyVarPtr = x } == TyVar { tyVarPtr = y } = x == y
TyUser { tyVarName = x } == TyUser { tyVarName = y } = x == y
_ == _ = False
data Pred = CIn Name [Type]
data Poly p = Forall { polyVars :: [TyVar]
, polyPreds :: [Pred]
, poly :: p }
type Schema = Poly Type
type FunDep' = ([Int],[Int])
-- | Note: the arguments in 'ruleProof' should match the 'rulePred'
data Rule = Rule { ruleProof :: [Ev] -> Ev
, rulePred :: Poly Pred }
data RuleEnv = RuleEnv
{ superRules :: [Rule]
, instRules :: [Rule]
}
data Goal = Ev { goalName :: EvName, goalPred :: Pred }
data DGoal = DGoal [Ev] (Poly [Goal])
-- Sugar -----------------------------------------------------------------------
ruleHead :: Rule -> Pred
ruleHead r = poly (rulePred r)
tDom, tCod :: Type -> Type
tDom (_ `TApp` x `TApp` _) = x
tDom _ = bug "tDom" "Not a function type."
tCod (_ `TApp` _ `TApp` x) = x
tCod _ = bug "tCod" "Not a function type."
infixr `tFun`
infixr `kFun`
infixr `sFun`
-- Sorts
sSort = TCon SSort
sFun s1 s2 = TCon SFun `TApp` s1 `TApp` s2
-- Kinds (built in)
kFun k1 k2 = TCon (ConName "->") `TApp` k1 `TApp` k2
kType = TCon (ConName "Type")
kArea = TCon (ConName "Area")
kNat = TCon (ConName "Nat")
kLab = TCon (ConName "Label")
kPred = TCon (ConName "Pred")
qPrim x = Qual "Prims" x -- where prims are defined
qPrel x = Qual "Prelude" x
-- Types
tcFun = TCon (qPrim (ConName "->"))
tFun t1 t2 = tcFun `TApp` t1 `TApp` t2
tM t = TCon (qPrim (ConName "M")) `TApp` t
tTup xs = foldl TApp (TCon (Tup (fromIntegral (length xs)))) xs
tSub t c = TCon (subName t c)
XXX : argh
subName :: Name -> Name -> Name
subName b c = qual $ TSub (unqual b) (unqual c)
where unqual (Qual _ x) = x
unqual x = x
qual x = case b of
Qual q _ -> Qual q x
_ -> x
tARef n t = TCon (qPrim (ConName "ARef")) `TApp` n `TApp` t
tBit t = TCon (qPrim (ConName "Bit")) `TApp` t
tIx t = TCon (qPrim (ConName "Ix")) `TApp` t
tNat n = TCon (TNat n) -- built in
tLab l = TCon (TLab l) -- built in
tArray n a = TCon (qPrim (ConName "Array")) `TApp` n `TApp` a
tLE a = TCon (qPrim (ConName "LE")) `TApp` a
tBE a = TCon (qPrim (ConName "BE")) `TApp` a
tChar = TCon (qPrim (ConName "Char"))
tString = TCon (qPrim (ConName "String"))
tBool = TCon (qPrel (ConName "Bool"))
-- predicates/classes
cLiteral = qPrim (ConName "Literal")
cEq = qPrim (ConName "Eq")
cOrd = qPrim (ConName "Ord")
cNum = qPrim (ConName "Num")
cBitOps = qPrim (ConName "BitOps")
cBounded = qPrim (ConName "Bounded")
cAdd = qPrim (ConName ":+")
cTimes = qPrim (ConName ":*")
cExp2 = qPrim (ConName "Exp2")
cGCD = qPrim (ConName "GCD")
cDNat = qPrim (ConName "DNat")
cWidth = qPrim (ConName "Width")
cIndex = qPrim (ConName "Index")
cAlign = qPrim (ConName "Align")
cJoin = qPrim (ConName ":#")
cBitRep = qPrim (ConName "BitRep")
cBitData = qPrim (ConName "BitData")
cField = qPrim (ConName "Field")
cUpdField = qPrim (ConName "UpdField")
cSizeOf = qPrim (ConName "SizeOf")
cValIn = qPrim (ConName "ValIn")
cBytes = qPrim (ConName "Bytes")
cAreaDecl = qPrim (ConName "AreaDecl")
Schemas
mono t = Forall [] [] t
typeToPred :: Type -> Maybe Pred
typeToPred t = let (t',ts) = splitTApp t []
in case t' of
TCon c -> Just (CIn c ts)
TSyn _ _ t -> do CIn c ts1 <- typeToPred t
return (CIn c (ts1 ++ ts))
_ -> Nothing
predToType :: Pred -> Type
predToType (CIn c ts) = foldl TApp (TCon c) ts
splitTApp (TApp t1 t2) ts = splitTApp t1 (t2:ts)
splitTApp t ts = (t,ts)
-- Names
defName c f = DefaultVal c f
ifCheckName c = IfCheck c
The original name of the entry point ( see also AST / SIL )
mainName = Qual "Main" (VarName "main")
mainType = tM (tBit (tNat 0))
entryName = qPrel (VarName "$entry")
-- Pretty printing -------------------------------------------------------------
instance Show QName where show x = prShow x
instance Pr QName where pr (Q x) = pr x
instance Show Name where show x = prShow x
instance Pr Name where
pr (VarName x) = text x
pr (ConName x) = text x
-- pr (Qual _ x) = {-text m <> char '.' <>-} pr x
pr (Qual m x) = text m <> char '.' <> pr x
pr (Select l) = parens (char '.' <> pr l)
pr (Update l) = parens (pr l <> char '=')
pr (IfCheck c) = text "$if_" <> pr c
pr (DefaultVal c f) = text "$dfl_"<> pr c <> text "." <> pr f
pr SFun = text "(->)"
pr SSort = text "sort"
pr (TSub x y) = pr x <> char '\'' <> pr y
pr TSome = text "$SomeT"
pr (TNat n) = text (show n)
pr (TLab l) = pr l
pr (Tup 0) = text "()"
pr (Tup 1) = bug "prName" "1-tuple"
pr (Tup n) = parens (hsep (replicate (fromIntegral (n-1)) comma))
pr (Inst x xs) = pr x <> tris (commaSep (map pr xs))
prFun arr n a b = wrap (prn 1 a <+> text arr <+> prn 0 b)
where wrap
| n < 1 = id
| otherwise = parens
instance Show Type where show x = prShow x
instance Pr Type where
prn n (TCon SFun `TApp` t1 `TApp` t2) = prFun "--->" n t1 t2
prn n (TCon (ConName "->") `TApp` t1 `TApp` t2) = prFun "->" n t1 t2
prn n (TApp t1 t2)
| n < 2 = prn 1 t1 <+> prn 2 t2
prn _ (TSyn c [] _) = pr c
prn n (TSyn c ts _)
| n < 2 = pr c <+> hsep (map (prn 2) ts)
prn _ (TCon c) = pr c
prn _ (TFree x) = pr x
prn _ (TParens t) = parens (pr t)
prn _ (TInfix s op t) = parens (pr s <+> pr op <+> pr t)
prn _ t = parens (pr t)
instance Show TyVar where show x = prShow x
instance Pr TyVar where
pr x = char '?' <> pr (tyVarName x)
instance Show Pred where show x = prShow x
instance Pr Pred where
pr (CIn c ts) = pr c <+> hsep (map (prn 2) ts)
instance Pr t => Show (Poly t) where show x = prShow x
instance Pr t => Pr (Poly t) where
pr (Forall xs ps t) = prSchema xs ps (pr t)
prSchema xs [] d = withQs xs d
prSchema xs ps d = withQs xs (parens (hsep (punctuate comma (map pr ps)))
<+> text "=>" <+> d)
withQs [] t = t
withQs qs t = text "forall" <+> hsep (map prQ qs) <> char '.' <+> t
where
prQ v = parens (pr v <+> text "::" <+> pr (tyVarKind v))
instance Show RuleEnv where show = prShow
instance Pr RuleEnv where
pr p = prSuper $$ prInsts
where
prSuper = case superRules p of
[] -> text "-- No super class rules"
ps -> text "-- Super class rules --" $$ vcat (map pr ps)
prInsts = case instRules p of
[] -> text "-- No instance rules"
ps -> text "-- Intsnace rules --" $$ vcat (map pr ps)
instance Show Rule where show = prShow
instance Pr Rule where pr p = pr (rulePred p)
instance Show Goal where show x = prShow x
instance Pr Goal where
pr (Ev x p) = pr x <+> text "::" <+> pr p
instance Show DGoal where show x = prShow x
instance Pr DGoal where
pr (DGoal _ p) = prSchema (polyVars p) (polyPreds p)
$ case poly p of
[d] -> pr d
ds -> parens (commaSep (map pr ds))
| null | https://raw.githubusercontent.com/yav/hobbit/31414ba1188f4b39620c2553b45b9e4d4aa40169/src/AST/Type.hs | haskell | ^ User variables
^ User constructors
^ Qualified name
^ Record selector
^ record updator
^ An 'if' clause
^ Tuple constructors
Sort names
^ Function scpace
^ Sort
^ A natural number types
^ A label type
^ Bitdata constructor types
^ No empty kinds (Spec.hs)
Introduced during specialization.
^ Type application
^ Last type is expansion
Parsing
^ Suggested name
^ To get bound
^ The variable kind
| Note: the arguments in 'ruleProof' should match the 'rulePred'
Sugar -----------------------------------------------------------------------
Sorts
Kinds (built in)
where prims are defined
Types
built in
built in
predicates/classes
Names
Pretty printing -------------------------------------------------------------
pr (Qual _ x) = {-text m <> char '.' <>-} pr x | module AST.Type
( ModName, QName(..), Name(..)
, Type(..), Kind, TyVar(..), Pred(..), Poly(..), Schema, Rule(..), RuleEnv(..)
, Goal(..), DGoal(..), FunDep'
, sSort, sFun
, kType, kArea, kNat, kLab, kPred, kFun
, tcFun
, tBool, tString, tChar, tSub, subName
, tFun, tTup, tBit, tIx, tARef, tM
, tNat, tLab
, tLE, tBE, tArray
, cAdd, cTimes, cExp2, cGCD
, cDNat, cWidth, cIndex, cAlign
, cBitRep, cBitData, cJoin
, cLiteral, cEq, cOrd, cNum, cBitOps, cBounded
, cField, cUpdField, cSizeOf, cAreaDecl, cValIn, cBytes
, tDom, tCod
, defName, ifCheckName, mainName, mainType, entryName
, ruleHead
, qPrim, qPrel
, mono
, typeToPred, predToType, splitTApp
) where
import AST.Evidence
import Error
import PP
import Data.IORef
import Data.Word
type ModName = String
newtype QName = Q Name deriving (Eq,Ord)
^ A default ( ctr , field )
deriving (Eq,Ord)
data Type = TApp Type Type
| TCon Name
| TFree TyVar
| TParens Type
deriving (Eq)
type Kind = Type
data TyVar = TyVar
}
| TyUser
{ tyVarName :: Name }
instance Eq TyVar where
TyVar { tyVarPtr = x } == TyVar { tyVarPtr = y } = x == y
TyUser { tyVarName = x } == TyUser { tyVarName = y } = x == y
_ == _ = False
data Pred = CIn Name [Type]
data Poly p = Forall { polyVars :: [TyVar]
, polyPreds :: [Pred]
, poly :: p }
type Schema = Poly Type
type FunDep' = ([Int],[Int])
data Rule = Rule { ruleProof :: [Ev] -> Ev
, rulePred :: Poly Pred }
data RuleEnv = RuleEnv
{ superRules :: [Rule]
, instRules :: [Rule]
}
data Goal = Ev { goalName :: EvName, goalPred :: Pred }
data DGoal = DGoal [Ev] (Poly [Goal])
ruleHead :: Rule -> Pred
ruleHead r = poly (rulePred r)
tDom, tCod :: Type -> Type
tDom (_ `TApp` x `TApp` _) = x
tDom _ = bug "tDom" "Not a function type."
tCod (_ `TApp` _ `TApp` x) = x
tCod _ = bug "tCod" "Not a function type."
infixr `tFun`
infixr `kFun`
infixr `sFun`
sSort = TCon SSort
sFun s1 s2 = TCon SFun `TApp` s1 `TApp` s2
kFun k1 k2 = TCon (ConName "->") `TApp` k1 `TApp` k2
kType = TCon (ConName "Type")
kArea = TCon (ConName "Area")
kNat = TCon (ConName "Nat")
kLab = TCon (ConName "Label")
kPred = TCon (ConName "Pred")
qPrel x = Qual "Prelude" x
tcFun = TCon (qPrim (ConName "->"))
tFun t1 t2 = tcFun `TApp` t1 `TApp` t2
tM t = TCon (qPrim (ConName "M")) `TApp` t
tTup xs = foldl TApp (TCon (Tup (fromIntegral (length xs)))) xs
tSub t c = TCon (subName t c)
XXX : argh
subName :: Name -> Name -> Name
subName b c = qual $ TSub (unqual b) (unqual c)
where unqual (Qual _ x) = x
unqual x = x
qual x = case b of
Qual q _ -> Qual q x
_ -> x
tARef n t = TCon (qPrim (ConName "ARef")) `TApp` n `TApp` t
tBit t = TCon (qPrim (ConName "Bit")) `TApp` t
tIx t = TCon (qPrim (ConName "Ix")) `TApp` t
tArray n a = TCon (qPrim (ConName "Array")) `TApp` n `TApp` a
tLE a = TCon (qPrim (ConName "LE")) `TApp` a
tBE a = TCon (qPrim (ConName "BE")) `TApp` a
tChar = TCon (qPrim (ConName "Char"))
tString = TCon (qPrim (ConName "String"))
tBool = TCon (qPrel (ConName "Bool"))
cLiteral = qPrim (ConName "Literal")
cEq = qPrim (ConName "Eq")
cOrd = qPrim (ConName "Ord")
cNum = qPrim (ConName "Num")
cBitOps = qPrim (ConName "BitOps")
cBounded = qPrim (ConName "Bounded")
cAdd = qPrim (ConName ":+")
cTimes = qPrim (ConName ":*")
cExp2 = qPrim (ConName "Exp2")
cGCD = qPrim (ConName "GCD")
cDNat = qPrim (ConName "DNat")
cWidth = qPrim (ConName "Width")
cIndex = qPrim (ConName "Index")
cAlign = qPrim (ConName "Align")
cJoin = qPrim (ConName ":#")
cBitRep = qPrim (ConName "BitRep")
cBitData = qPrim (ConName "BitData")
cField = qPrim (ConName "Field")
cUpdField = qPrim (ConName "UpdField")
cSizeOf = qPrim (ConName "SizeOf")
cValIn = qPrim (ConName "ValIn")
cBytes = qPrim (ConName "Bytes")
cAreaDecl = qPrim (ConName "AreaDecl")
Schemas
mono t = Forall [] [] t
typeToPred :: Type -> Maybe Pred
typeToPred t = let (t',ts) = splitTApp t []
in case t' of
TCon c -> Just (CIn c ts)
TSyn _ _ t -> do CIn c ts1 <- typeToPred t
return (CIn c (ts1 ++ ts))
_ -> Nothing
predToType :: Pred -> Type
predToType (CIn c ts) = foldl TApp (TCon c) ts
splitTApp (TApp t1 t2) ts = splitTApp t1 (t2:ts)
splitTApp t ts = (t,ts)
defName c f = DefaultVal c f
ifCheckName c = IfCheck c
The original name of the entry point ( see also AST / SIL )
mainName = Qual "Main" (VarName "main")
mainType = tM (tBit (tNat 0))
entryName = qPrel (VarName "$entry")
instance Show QName where show x = prShow x
instance Pr QName where pr (Q x) = pr x
instance Show Name where show x = prShow x
instance Pr Name where
pr (VarName x) = text x
pr (ConName x) = text x
pr (Qual m x) = text m <> char '.' <> pr x
pr (Select l) = parens (char '.' <> pr l)
pr (Update l) = parens (pr l <> char '=')
pr (IfCheck c) = text "$if_" <> pr c
pr (DefaultVal c f) = text "$dfl_"<> pr c <> text "." <> pr f
pr SFun = text "(->)"
pr SSort = text "sort"
pr (TSub x y) = pr x <> char '\'' <> pr y
pr TSome = text "$SomeT"
pr (TNat n) = text (show n)
pr (TLab l) = pr l
pr (Tup 0) = text "()"
pr (Tup 1) = bug "prName" "1-tuple"
pr (Tup n) = parens (hsep (replicate (fromIntegral (n-1)) comma))
pr (Inst x xs) = pr x <> tris (commaSep (map pr xs))
prFun arr n a b = wrap (prn 1 a <+> text arr <+> prn 0 b)
where wrap
| n < 1 = id
| otherwise = parens
instance Show Type where show x = prShow x
instance Pr Type where
prn n (TCon SFun `TApp` t1 `TApp` t2) = prFun "--->" n t1 t2
prn n (TCon (ConName "->") `TApp` t1 `TApp` t2) = prFun "->" n t1 t2
prn n (TApp t1 t2)
| n < 2 = prn 1 t1 <+> prn 2 t2
prn _ (TSyn c [] _) = pr c
prn n (TSyn c ts _)
| n < 2 = pr c <+> hsep (map (prn 2) ts)
prn _ (TCon c) = pr c
prn _ (TFree x) = pr x
prn _ (TParens t) = parens (pr t)
prn _ (TInfix s op t) = parens (pr s <+> pr op <+> pr t)
prn _ t = parens (pr t)
instance Show TyVar where show x = prShow x
instance Pr TyVar where
pr x = char '?' <> pr (tyVarName x)
instance Show Pred where show x = prShow x
instance Pr Pred where
pr (CIn c ts) = pr c <+> hsep (map (prn 2) ts)
instance Pr t => Show (Poly t) where show x = prShow x
instance Pr t => Pr (Poly t) where
pr (Forall xs ps t) = prSchema xs ps (pr t)
prSchema xs [] d = withQs xs d
prSchema xs ps d = withQs xs (parens (hsep (punctuate comma (map pr ps)))
<+> text "=>" <+> d)
withQs [] t = t
withQs qs t = text "forall" <+> hsep (map prQ qs) <> char '.' <+> t
where
prQ v = parens (pr v <+> text "::" <+> pr (tyVarKind v))
instance Show RuleEnv where show = prShow
instance Pr RuleEnv where
pr p = prSuper $$ prInsts
where
prSuper = case superRules p of
[] -> text "-- No super class rules"
ps -> text "-- Super class rules --" $$ vcat (map pr ps)
prInsts = case instRules p of
[] -> text "-- No instance rules"
ps -> text "-- Intsnace rules --" $$ vcat (map pr ps)
instance Show Rule where show = prShow
instance Pr Rule where pr p = pr (rulePred p)
instance Show Goal where show x = prShow x
instance Pr Goal where
pr (Ev x p) = pr x <+> text "::" <+> pr p
instance Show DGoal where show x = prShow x
instance Pr DGoal where
pr (DGoal _ p) = prSchema (polyVars p) (polyPreds p)
$ case poly p of
[d] -> pr d
ds -> parens (commaSep (map pr ds))
|
6c083e11867ff82a21e49d035cfaa79c64a0941b71edf22bb7ef8fa3673c4be0 | CatalaLang/catala | compile_without_exceptions.ml | This file is part of the Catala compiler , a specification language for tax
and social benefits computation rules . Copyright ( C ) 2020 - 2022 Inria ,
contributor : Tixeuil < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the
License for the specific language governing permissions and limitations under
the License .
and social benefits computation rules. Copyright (C) 2020-2022 Inria,
contributor: Alain Delaët-Tixeuil <>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. *)
open Catala_utils
module D = Dcalc.Ast
module A = Ast
* The main idea around this pass is to compile Dcalc to Lcalc without using
[ raise EmptyError ] nor [ try _ with EmptyError - > _ ] . To do so , we use the
same technique as in rust or erlang to handle this kind of exceptions . Each
[ raise EmptyError ] will be translated as [ None ] and each
[ try e1 with EmtpyError - > e2 ] as
[ match e1 with | None - > e2 | Some x - > x ] .
When doing this naively , this requires to add matches and Some constructor
everywhere . We apply here an other technique where we generate what we call
` hoists ` . Hoists are expression whom could minimally [ raise EmptyError ] . For
instance in [ let x = < e1 , e2 , ... , en| e_just : - e_cons > * 3 in x + 1 ] , the
sub - expression [ < e1 , e2 , ... , en| e_just : - e_cons > ] can produce an empty
error . So we make a hoist with a new variable [ y ] linked to the Dcalc
expression [ < e1 , e2 , ... , en| e_just : - e_cons > ] , and we return as the
translated expression [ let x = y * 3 in x + 1 ] .
The compilation of expressions is found in the functions
[ translate_and_hoist ctx e ] and [ ctx e ] . Every
option - generating expression when calling [ translate_and_hoist ] will be
hoisted and later handled by the [ ] function . Every other
cases is found in the translate_and_hoist function .
[raise EmptyError] nor [try _ with EmptyError -> _]. To do so, we use the
same technique as in rust or erlang to handle this kind of exceptions. Each
[raise EmptyError] will be translated as [None] and each
[try e1 with EmtpyError -> e2] as
[match e1 with | None -> e2 | Some x -> x].
When doing this naively, this requires to add matches and Some constructor
everywhere. We apply here an other technique where we generate what we call
`hoists`. Hoists are expression whom could minimally [raise EmptyError]. For
instance in [let x = <e1, e2, ..., en| e_just :- e_cons> * 3 in x + 1], the
sub-expression [<e1, e2, ..., en| e_just :- e_cons>] can produce an empty
error. So we make a hoist with a new variable [y] linked to the Dcalc
expression [<e1, e2, ..., en| e_just :- e_cons>], and we return as the
translated expression [let x = y * 3 in x + 1].
The compilation of expressions is found in the functions
[translate_and_hoist ctx e] and [translate_expr ctx e]. Every
option-generating expression when calling [translate_and_hoist] will be
hoisted and later handled by the [translate_expr] function. Every other
cases is found in the translate_and_hoist function. *)
open Shared_ast
type 'm hoists = ('m A.expr, 'm D.expr) Var.Map.t
(** Hoists definition. It represent bindings between [A.Var.t] and [D.expr]. *)
type 'm info = { expr : 'm A.expr boxed; var : 'm A.expr Var.t; is_pure : bool }
* Information about each encontered Dcalc variable is stored inside a context
: what is the corresponding LCalc variable ; an expression corresponding to
the variable build correctly using Bindlib , and a boolean ` is_pure `
indicating whenever the variable can be an EmptyError and hence should be
matched ( false ) or if it never can be EmptyError ( true ) .
: what is the corresponding LCalc variable; an expression corresponding to
the variable build correctly using Bindlib, and a boolean `is_pure`
indicating whenever the variable can be an EmptyError and hence should be
matched (false) or if it never can be EmptyError (true). *)
let pp_info (fmt : Format.formatter) (info : 'm info) =
Format.fprintf fmt "{var: %a; is_pure: %b}" Print.var info.var info.is_pure
type 'm ctx = {
decl_ctx : decl_ctx;
vars : ('m D.expr, 'm info) Var.Map.t;
(** information context about variables in the current scope *)
}
let _pp_ctx (fmt : Format.formatter) (ctx : 'm ctx) =
let pp_binding
(fmt : Format.formatter)
((v, info) : 'm D.expr Var.t * 'm info) =
Format.fprintf fmt "%a: %a" Print.var v pp_info info
in
let pp_bindings =
Format.pp_print_list
~pp_sep:(fun fmt () -> Format.pp_print_string fmt "; ")
pp_binding
in
Format.fprintf fmt "@[<2>[%a]@]" pp_bindings (Var.Map.bindings ctx.vars)
* [ find ~info n ctx ] is a warpper to ocaml 's Map.find that handle errors in a
slightly better way .
slightly better way. *)
let find ?(info : string = "none") (n : 'm D.expr Var.t) (ctx : 'm ctx) :
'm info =
(* let _ = Format.asprintf "Searching for variable %a inside context %a"
Print.var n pp_ctx ctx |> Cli.debug_print in *)
try Var.Map.find n ctx.vars
with Not_found ->
Errors.raise_spanned_error Pos.no_pos
"Internal Error: Variable %a was not found in the current environment. \
Additional informations : %s."
Print.var n info
* [ add_var pos var is_pure ctx ] add to the context [ ctx ] the Dcalc variable
var , creating a unique corresponding variable in Lcalc , with the
corresponding expression , and the boolean is_pure . It is usefull for
debuging purposes as it printing each of the Dcalc / Lcalc variable pairs .
var, creating a unique corresponding variable in Lcalc, with the
corresponding expression, and the boolean is_pure. It is usefull for
debuging purposes as it printing each of the Dcalc/Lcalc variable pairs. *)
let add_var
(mark : 'm mark)
(var : 'm D.expr Var.t)
(is_pure : bool)
(ctx : 'm ctx) : 'm ctx =
let new_var = Var.make (Bindlib.name_of var) in
let expr = Expr.make_var new_var mark in
Cli.debug_print @@ Format.asprintf " D.%a |- > " Print.var var Print.var
new_var ;
new_var; *)
{
ctx with
vars =
Var.Map.update var
(fun _ -> Some { expr; var = new_var; is_pure })
ctx.vars;
}
* [ tau ' = translate_typ tau ] translate the a dcalc type into a lcalc type .
Since positions where there is thunked expressions is exactly where we will
put option expressions . Hence , the transformation simply reduce [ unit - > ' a ]
into [ ' a option ] recursivly . There is no polymorphism inside catala .
Since positions where there is thunked expressions is exactly where we will
put option expressions. Hence, the transformation simply reduce [unit -> 'a]
into ['a option] recursivly. There is no polymorphism inside catala. *)
let rec translate_typ (tau : typ) : typ =
(Fun.flip Marked.same_mark_as)
tau
begin
match Marked.unmark tau with
| TLit l -> TLit l
| TTuple ts -> TTuple (List.map translate_typ ts)
| TStruct s -> TStruct s
| TEnum en -> TEnum en
| TOption t -> TOption t
| TAny -> TAny
| TArray ts -> TArray (translate_typ ts)
(* catala is not polymorphic *)
| TArrow ([(TLit TUnit, _)], t2) -> TOption (translate_typ t2)
| TArrow (t1, t2) -> TArrow (List.map translate_typ t1, translate_typ t2)
end
* [ c = disjoint_union_maps cs ] Compute the disjoint union of multiple maps .
Raises an internal error if there is two identicals keys in differnts parts .
Raises an internal error if there is two identicals keys in differnts parts. *)
let disjoint_union_maps (pos : Pos.t) (cs : ('e, 'a) Var.Map.t list) :
('e, 'a) Var.Map.t =
let disjoint_union =
Var.Map.union (fun _ _ _ ->
Errors.raise_spanned_error pos
"Internal Error: Two supposed to be disjoints maps have one shared \
key.")
in
List.fold_left disjoint_union Var.Map.empty cs
* [ e ' = translate_and_hoist ctx e ] Translate the Dcalc expression e into an
expression in , given we translate each hoists correctly . It ensures
the equivalence between the execution of e and the execution of e ' are
equivalent in an environement where each variable v , where ( v , e_v ) is in
hoists , has the non - empty value in e_v .
expression in Lcalc, given we translate each hoists correctly. It ensures
the equivalence between the execution of e and the execution of e' are
equivalent in an environement where each variable v, where (v, e_v) is in
hoists, has the non-empty value in e_v. *)
let rec translate_and_hoist (ctx : 'm ctx) (e : 'm D.expr) :
'm A.expr boxed * 'm hoists =
let mark = Marked.get_mark e in
let pos = Expr.mark_pos mark in
match Marked.unmark e with
empty - producing / using terms . We hoist those . ( D.EVar in some cases ,
EApp(D.EVar _ , [ ELit LUnit ] ) , EDefault _ , ELit LEmptyDefault ) I 'm unsure
about assert .
EApp(D.EVar _, [ELit LUnit]), EDefault _, ELit LEmptyDefault) I'm unsure
about assert. *)
| EVar v ->
(* todo: for now, every unpure (such that [is_pure] is [false] in the
current context) is thunked, hence matched in the next case. This
assumption can change in the future, and this case is here for this
reason. *)
if not (find ~info:"search for a variable" v ctx).is_pure then
let v' = Var.make (Bindlib.name_of v) in
Cli.debug_print @@ Format.asprintf " Found an unpure variable % a ,
created a variable % a to replace it " Print.var v Print.var v ' ;
created a variable %a to replace it" Print.var v Print.var v'; *)
Expr.make_var v' mark, Var.Map.singleton v' e
else (find ~info:"should never happen" v ctx).expr, Var.Map.empty
| EApp { f = EVar v, p; args = [(ELit LUnit, _)] } ->
if not (find ~info:"search for a variable" v ctx).is_pure then
let v' = Var.make (Bindlib.name_of v) in
Cli.debug_print @@ Format.asprintf " Found an unpure variable % a ,
created a variable % a to replace it " Print.var v Print.var v ' ;
created a variable %a to replace it" Print.var v Print.var v'; *)
Expr.make_var v' mark, Var.Map.singleton v' (EVar v, p)
else
Errors.raise_spanned_error (Expr.pos e)
"Internal error: an pure variable was found in an unpure environment."
| EDefault _ ->
let v' = Var.make "default_term" in
Expr.make_var v' mark, Var.Map.singleton v' e
| ELit LEmptyError ->
let v' = Var.make "empty_litteral" in
Expr.make_var v' mark, Var.Map.singleton v' e
(* This one is a very special case. It transform an unpure expression
environement to a pure expression. *)
| EErrorOnEmpty arg ->
(* [ match arg with | None -> raise NoValueProvided | Some v -> {{ v }} ] *)
let silent_var = Var.make "_" in
let x = Var.make "non_empty_argument" in
let arg' = translate_expr ctx arg in
let rty = Expr.maybe_ty mark in
( A.make_matchopt_with_abs_arms arg'
(Expr.make_abs [| silent_var |]
(Expr.eraise NoValueProvided (Expr.with_ty mark rty))
[rty] pos)
(Expr.make_abs [| x |] (Expr.make_var x mark) [rty] pos),
Var.Map.empty )
(* pure terms *)
| ELit
((LBool _ | LInt _ | LRat _ | LMoney _ | LUnit | LDate _ | LDuration _) as
l) ->
Expr.elit l mark, Var.Map.empty
| EIfThenElse { cond; etrue; efalse } ->
let cond', h1 = translate_and_hoist ctx cond in
let etrue', h2 = translate_and_hoist ctx etrue in
let efalse', h3 = translate_and_hoist ctx efalse in
let e' = Expr.eifthenelse cond' etrue' efalse' mark in
(*(* equivalent code : *) let e' = let+ cond' = cond' and+ etrue' = etrue'
and+ efalse' = efalse' in (A.EIfThenElse (cond', etrue', efalse'), pos)
in *)
e', disjoint_union_maps (Expr.pos e) [h1; h2; h3]
| EAssert e1 ->
(* same behavior as in the ICFP paper: if e1 is empty, then no error is
raised. *)
let e1', h1 = translate_and_hoist ctx e1 in
Expr.eassert e1' mark, h1
| EAbs { binder; tys } ->
let vars, body = Bindlib.unmbind binder in
let ctx, lc_vars =
ArrayLabels.fold_right vars ~init:(ctx, []) ~f:(fun var (ctx, lc_vars) ->
(* we suppose the invariant that when applying a function, its
arguments cannot be of the type "option".
The code should behave correctly in the without this assumption if
we put here an is_pure=false, but the types are more compilcated.
(unimplemented for now) *)
let ctx = add_var mark var true ctx in
let lc_var = (find var ctx).var in
ctx, lc_var :: lc_vars)
in
let lc_vars = Array.of_list lc_vars in
here we take the guess that if we can not build the closure because one of
the variable is empty , then we can not build the function .
the variable is empty, then we cannot build the function. *)
let new_body, hoists = translate_and_hoist ctx body in
let new_binder = Expr.bind lc_vars new_body in
Expr.eabs new_binder (List.map translate_typ tys) mark, hoists
| EApp { f = e1; args } ->
let e1', h1 = translate_and_hoist ctx e1 in
let args', h_args =
args |> List.map (translate_and_hoist ctx) |> List.split
in
let hoists = disjoint_union_maps (Expr.pos e) (h1 :: h_args) in
let e' = Expr.eapp e1' args' mark in
e', hoists
| EStruct { name; fields } ->
let fields', h_fields =
StructField.Map.fold
(fun field e (fields, hoists) ->
let e, h = translate_and_hoist ctx e in
StructField.Map.add field e fields, h :: hoists)
fields
(StructField.Map.empty, [])
in
let hoists = disjoint_union_maps (Expr.pos e) h_fields in
Expr.estruct name fields' mark, hoists
| EStructAccess { name; e = e1; field } ->
let e1', hoists = translate_and_hoist ctx e1 in
let e1' = Expr.estructaccess e1' field name mark in
e1', hoists
| ETuple es ->
let hoists, es' =
List.fold_left_map
(fun hoists e ->
let e, h = translate_and_hoist ctx e in
h :: hoists, e)
[] es
in
Expr.etuple es' mark, disjoint_union_maps (Expr.pos e) hoists
| ETupleAccess { e = e1; index; size } ->
let e1', hoists = translate_and_hoist ctx e1 in
let e1' = Expr.etupleaccess e1' index size mark in
e1', hoists
| EInj { name; e = e1; cons } ->
let e1', hoists = translate_and_hoist ctx e1 in
let e1' = Expr.einj e1' cons name mark in
e1', hoists
| EMatch { name; e = e1; cases } ->
let e1', h1 = translate_and_hoist ctx e1 in
let cases', h_cases =
EnumConstructor.Map.fold
(fun cons e (cases, hoists) ->
let e', h = translate_and_hoist ctx e in
EnumConstructor.Map.add cons e' cases, h :: hoists)
cases
(EnumConstructor.Map.empty, [])
in
let hoists = disjoint_union_maps (Expr.pos e) (h1 :: h_cases) in
let e' = Expr.ematch e1' name cases' mark in
e', hoists
| EArray es ->
let es', hoists = es |> List.map (translate_and_hoist ctx) |> List.split in
Expr.earray es' mark, disjoint_union_maps (Expr.pos e) hoists
| EOp { op; tys } -> Expr.eop (Operator.translate op) tys mark, Var.Map.empty
and translate_expr ?(append_esome = true) (ctx : 'm ctx) (e : 'm D.expr) :
'm A.expr boxed =
let e', hoists = translate_and_hoist ctx e in
let hoists = Var.Map.bindings hoists in
let _pos = Marked.get_mark e in
(* build the hoists *)
Cli.debug_print @@ Format.asprintf " hoist for the expression : [ % a ] "
( Format.pp_print_list Print.var ) ( List.map fst hoists ) ;
(Format.pp_print_list Print.var) (List.map fst hoists); *)
ListLabels.fold_left hoists
~init:(if append_esome then A.make_some e' else e')
~f:(fun acc (v, (hoist, mark_hoist)) ->
(* Cli.debug_print @@ Format.asprintf "hoist using A.%a" Print.var v; *)
let pos = Expr.mark_pos mark_hoist in
let c' : 'm A.expr boxed =
match hoist with
(* Here we have to handle only the cases appearing in hoists, as defined
the [translate_and_hoist] function. *)
| EVar v -> (find ~info:"should never happen" v ctx).expr
| EDefault { excepts; just; cons } ->
let excepts' = List.map (translate_expr ctx) excepts in
let just' = translate_expr ctx just in
let cons' = translate_expr ctx cons in
(* calls handle_option. *)
Expr.make_app
(Expr.make_var (Var.translate A.handle_default_opt) mark_hoist)
[Expr.earray excepts' mark_hoist; just'; cons']
pos
| ELit LEmptyError -> A.make_none mark_hoist
| EAssert arg ->
let arg' = translate_expr ctx arg in
(* [ match arg with | None -> raise NoValueProvided | Some v -> assert
{{ v }} ] *)
let silent_var = Var.make "_" in
let x = Var.make "assertion_argument" in
A.make_matchopt_with_abs_arms arg'
(Expr.make_abs [| silent_var |]
(Expr.eraise NoValueProvided mark_hoist)
[TAny, Expr.mark_pos mark_hoist]
pos)
(Expr.make_abs [| x |]
(Expr.eassert (Expr.make_var x mark_hoist) mark_hoist)
[TAny, Expr.mark_pos mark_hoist]
pos)
| _ ->
Errors.raise_spanned_error (Expr.mark_pos mark_hoist)
"Internal Error: An term was found in a position where it should \
not be"
in
(* [ match {{ c' }} with | None -> None | Some {{ v }} -> {{ acc }} end
] *)
(* Cli.debug_print @@ Format.asprintf "build matchopt using %a" Print.var
v; *)
A.make_matchopt pos v
(TAny, Expr.mark_pos mark_hoist)
c' (A.make_none mark_hoist) acc)
let rec translate_scope_let (ctx : 'm ctx) (lets : 'm D.expr scope_body_expr) :
'm A.expr scope_body_expr Bindlib.box =
match lets with
| Result e ->
Bindlib.box_apply
(fun e -> Result e)
(Expr.Box.lift (translate_expr ~append_esome:false ctx e))
| ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_typ = typ;
scope_let_expr = EAbs { binder; _ }, emark;
scope_let_next = next;
scope_let_pos = pos;
} ->
(* special case : the subscope variable is thunked (context i/o). We remove
this thunking. *)
let _, expr = Bindlib.unmbind binder in
let var_is_pure = true in
let var, next = Bindlib.unbind next in
(* Cli.debug_print @@ Format.asprintf "unbinding %a" Print.var var; *)
let vmark = Expr.with_ty emark ~pos typ in
let ctx' = add_var vmark var var_is_pure ctx in
let new_var = (find ~info:"variable that was just created" var ctx').var in
let new_next = translate_scope_let ctx' next in
Bindlib.box_apply2
(fun new_expr new_next ->
ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_typ = translate_typ typ;
scope_let_expr = new_expr;
scope_let_next = new_next;
scope_let_pos = pos;
})
(Expr.Box.lift (translate_expr ctx ~append_esome:false expr))
(Bindlib.bind_var new_var new_next)
| ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_typ = typ;
scope_let_expr = (EErrorOnEmpty _, emark) as expr;
scope_let_next = next;
scope_let_pos = pos;
} ->
(* special case: regular input to the subscope *)
let var_is_pure = true in
let var, next = Bindlib.unbind next in
(* Cli.debug_print @@ Format.asprintf "unbinding %a" Print.var var; *)
let vmark = Expr.with_ty emark ~pos typ in
let ctx' = add_var vmark var var_is_pure ctx in
let new_var = (find ~info:"variable that was just created" var ctx').var in
Bindlib.box_apply2
(fun new_expr new_next ->
ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_typ = translate_typ typ;
scope_let_expr = new_expr;
scope_let_next = new_next;
scope_let_pos = pos;
})
(Expr.Box.lift (translate_expr ctx ~append_esome:false expr))
(Bindlib.bind_var new_var (translate_scope_let ctx' next))
| ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_pos = pos;
scope_let_expr = expr;
_;
} ->
Errors.raise_spanned_error pos
"Internal Error: found an SubScopeVarDefinition that does not satisfy \
the invariants when translating Dcalc to Lcalc without exceptions: \
@[<hov 2>%a@]"
(Expr.format ctx.decl_ctx) expr
| ScopeLet
{
scope_let_kind = kind;
scope_let_typ = typ;
scope_let_expr = expr;
scope_let_next = next;
scope_let_pos = pos;
} ->
let var_is_pure =
match kind with
| DestructuringInputStruct -> (
(* Here, we have to distinguish between context and input variables. We
can do so by looking at the typ of the destructuring: if it's
thunked, then the variable is context. If it's not thunked, it's a
regular input. *)
match Marked.unmark typ with
| TArrow ([(TLit TUnit, _)], _) -> false
| _ -> true)
| ScopeVarDefinition | SubScopeVarDefinition | CallingSubScope
| DestructuringSubScopeResults | Assertion ->
true
in
let var, next = Bindlib.unbind next in
(* Cli.debug_print @@ Format.asprintf "unbinding %a" Print.var var; *)
let vmark = Expr.with_ty (Marked.get_mark expr) ~pos typ in
let ctx' = add_var vmark var var_is_pure ctx in
let new_var = (find ~info:"variable that was just created" var ctx').var in
Bindlib.box_apply2
(fun new_expr new_next ->
ScopeLet
{
scope_let_kind = kind;
scope_let_typ = translate_typ typ;
scope_let_expr = new_expr;
scope_let_next = new_next;
scope_let_pos = pos;
})
(Expr.Box.lift (translate_expr ctx ~append_esome:false expr))
(Bindlib.bind_var new_var (translate_scope_let ctx' next))
let translate_scope_body
(scope_pos : Pos.t)
(ctx : 'm ctx)
(body : 'm D.expr scope_body) : 'm A.expr scope_body Bindlib.box =
match body with
| {
scope_body_expr = result;
scope_body_input_struct = input_struct;
scope_body_output_struct = output_struct;
} ->
let v, lets = Bindlib.unbind result in
let vmark =
let m =
match lets with
| Result e | ScopeLet { scope_let_expr = e; _ } -> Marked.get_mark e
in
Expr.map_mark (fun _ -> scope_pos) (fun ty -> ty) m
in
let ctx' = add_var vmark v true ctx in
let v' = (find ~info:"variable that was just created" v ctx').var in
Bindlib.box_apply
(fun new_expr ->
{
scope_body_expr = new_expr;
scope_body_input_struct = input_struct;
scope_body_output_struct = output_struct;
})
(Bindlib.bind_var v' (translate_scope_let ctx' lets))
let translate_code_items (ctx : 'm ctx) (scopes : 'm D.expr code_item_list) :
'm A.expr code_item_list Bindlib.box =
let _ctx, scopes =
Scope.fold_map
~f:
(fun ctx var -> function
| Topdef (name, ty, e) ->
( add_var (Marked.get_mark e) var true ctx,
Bindlib.box_apply
(fun e -> Topdef (name, ty, e))
(Expr.Box.lift (translate_expr ~append_esome:false ctx e)) )
| ScopeDef (scope_name, scope_body) ->
( ctx,
let scope_pos = Marked.get_mark (ScopeName.get_info scope_name) in
Bindlib.box_apply
(fun body -> ScopeDef (scope_name, body))
(translate_scope_body scope_pos ctx scope_body) ))
~varf:Var.translate ctx scopes
in
scopes
let translate_program (prgm : 'm D.program) : 'm A.program =
let inputs_structs =
Scope.fold_left prgm.code_items ~init:[] ~f:(fun acc def _ ->
match def with
| ScopeDef (_, body) -> body.scope_body_input_struct :: acc
| Topdef _ -> acc)
in
(* Cli.debug_print @@ Format.asprintf "List of structs to modify: [%a]"
(Format.pp_print_list D.StructName.format_t) inputs_structs; *)
let decl_ctx =
{
prgm.decl_ctx with
ctx_enums =
prgm.decl_ctx.ctx_enums
|> EnumName.Map.add A.option_enum A.option_enum_config;
}
in
let decl_ctx =
{
decl_ctx with
ctx_structs =
prgm.decl_ctx.ctx_structs
|> StructName.Map.mapi (fun n str ->
if List.mem n inputs_structs then
StructField.Map.map translate_typ str
Cli.debug_print @@ Format.asprintf " Input type : % a "
( Print.typ decl_ctx ) tau ; Cli.debug_print @@ Format.asprintf
" Output type : % a " ( Print.typ decl_ctx ) ( translate_typ
tau ) ;
(Print.typ decl_ctx) tau; Cli.debug_print @@ Format.asprintf
"Output type: %a" (Print.typ decl_ctx) (translate_typ
tau); *)
else str);
}
in
let code_items =
Bindlib.unbox
(translate_code_items { decl_ctx; vars = Var.Map.empty } prgm.code_items)
in
{ code_items; decl_ctx }
| null | https://raw.githubusercontent.com/CatalaLang/catala/5bd140ae5fb2a997a578b9cd67a932c4a8733526/compiler/lcalc/compile_without_exceptions.ml | ocaml | * Hoists definition. It represent bindings between [A.Var.t] and [D.expr].
* information context about variables in the current scope
let _ = Format.asprintf "Searching for variable %a inside context %a"
Print.var n pp_ctx ctx |> Cli.debug_print in
catala is not polymorphic
todo: for now, every unpure (such that [is_pure] is [false] in the
current context) is thunked, hence matched in the next case. This
assumption can change in the future, and this case is here for this
reason.
This one is a very special case. It transform an unpure expression
environement to a pure expression.
[ match arg with | None -> raise NoValueProvided | Some v -> {{ v }} ]
pure terms
(* equivalent code :
same behavior as in the ICFP paper: if e1 is empty, then no error is
raised.
we suppose the invariant that when applying a function, its
arguments cannot be of the type "option".
The code should behave correctly in the without this assumption if
we put here an is_pure=false, but the types are more compilcated.
(unimplemented for now)
build the hoists
Cli.debug_print @@ Format.asprintf "hoist using A.%a" Print.var v;
Here we have to handle only the cases appearing in hoists, as defined
the [translate_and_hoist] function.
calls handle_option.
[ match arg with | None -> raise NoValueProvided | Some v -> assert
{{ v }} ]
[ match {{ c' }} with | None -> None | Some {{ v }} -> {{ acc }} end
]
Cli.debug_print @@ Format.asprintf "build matchopt using %a" Print.var
v;
special case : the subscope variable is thunked (context i/o). We remove
this thunking.
Cli.debug_print @@ Format.asprintf "unbinding %a" Print.var var;
special case: regular input to the subscope
Cli.debug_print @@ Format.asprintf "unbinding %a" Print.var var;
Here, we have to distinguish between context and input variables. We
can do so by looking at the typ of the destructuring: if it's
thunked, then the variable is context. If it's not thunked, it's a
regular input.
Cli.debug_print @@ Format.asprintf "unbinding %a" Print.var var;
Cli.debug_print @@ Format.asprintf "List of structs to modify: [%a]"
(Format.pp_print_list D.StructName.format_t) inputs_structs; | This file is part of the Catala compiler , a specification language for tax
and social benefits computation rules . Copyright ( C ) 2020 - 2022 Inria ,
contributor : Tixeuil < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
use this file except in compliance with the License . You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the
License for the specific language governing permissions and limitations under
the License .
and social benefits computation rules. Copyright (C) 2020-2022 Inria,
contributor: Alain Delaët-Tixeuil <>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. *)
open Catala_utils
module D = Dcalc.Ast
module A = Ast
* The main idea around this pass is to compile Dcalc to Lcalc without using
[ raise EmptyError ] nor [ try _ with EmptyError - > _ ] . To do so , we use the
same technique as in rust or erlang to handle this kind of exceptions . Each
[ raise EmptyError ] will be translated as [ None ] and each
[ try e1 with EmtpyError - > e2 ] as
[ match e1 with | None - > e2 | Some x - > x ] .
When doing this naively , this requires to add matches and Some constructor
everywhere . We apply here an other technique where we generate what we call
` hoists ` . Hoists are expression whom could minimally [ raise EmptyError ] . For
instance in [ let x = < e1 , e2 , ... , en| e_just : - e_cons > * 3 in x + 1 ] , the
sub - expression [ < e1 , e2 , ... , en| e_just : - e_cons > ] can produce an empty
error . So we make a hoist with a new variable [ y ] linked to the Dcalc
expression [ < e1 , e2 , ... , en| e_just : - e_cons > ] , and we return as the
translated expression [ let x = y * 3 in x + 1 ] .
The compilation of expressions is found in the functions
[ translate_and_hoist ctx e ] and [ ctx e ] . Every
option - generating expression when calling [ translate_and_hoist ] will be
hoisted and later handled by the [ ] function . Every other
cases is found in the translate_and_hoist function .
[raise EmptyError] nor [try _ with EmptyError -> _]. To do so, we use the
same technique as in rust or erlang to handle this kind of exceptions. Each
[raise EmptyError] will be translated as [None] and each
[try e1 with EmtpyError -> e2] as
[match e1 with | None -> e2 | Some x -> x].
When doing this naively, this requires to add matches and Some constructor
everywhere. We apply here an other technique where we generate what we call
`hoists`. Hoists are expression whom could minimally [raise EmptyError]. For
instance in [let x = <e1, e2, ..., en| e_just :- e_cons> * 3 in x + 1], the
sub-expression [<e1, e2, ..., en| e_just :- e_cons>] can produce an empty
error. So we make a hoist with a new variable [y] linked to the Dcalc
expression [<e1, e2, ..., en| e_just :- e_cons>], and we return as the
translated expression [let x = y * 3 in x + 1].
The compilation of expressions is found in the functions
[translate_and_hoist ctx e] and [translate_expr ctx e]. Every
option-generating expression when calling [translate_and_hoist] will be
hoisted and later handled by the [translate_expr] function. Every other
cases is found in the translate_and_hoist function. *)
open Shared_ast
type 'm hoists = ('m A.expr, 'm D.expr) Var.Map.t
type 'm info = { expr : 'm A.expr boxed; var : 'm A.expr Var.t; is_pure : bool }
* Information about each encontered Dcalc variable is stored inside a context
: what is the corresponding LCalc variable ; an expression corresponding to
the variable build correctly using Bindlib , and a boolean ` is_pure `
indicating whenever the variable can be an EmptyError and hence should be
matched ( false ) or if it never can be EmptyError ( true ) .
: what is the corresponding LCalc variable; an expression corresponding to
the variable build correctly using Bindlib, and a boolean `is_pure`
indicating whenever the variable can be an EmptyError and hence should be
matched (false) or if it never can be EmptyError (true). *)
let pp_info (fmt : Format.formatter) (info : 'm info) =
Format.fprintf fmt "{var: %a; is_pure: %b}" Print.var info.var info.is_pure
type 'm ctx = {
decl_ctx : decl_ctx;
vars : ('m D.expr, 'm info) Var.Map.t;
}
let _pp_ctx (fmt : Format.formatter) (ctx : 'm ctx) =
let pp_binding
(fmt : Format.formatter)
((v, info) : 'm D.expr Var.t * 'm info) =
Format.fprintf fmt "%a: %a" Print.var v pp_info info
in
let pp_bindings =
Format.pp_print_list
~pp_sep:(fun fmt () -> Format.pp_print_string fmt "; ")
pp_binding
in
Format.fprintf fmt "@[<2>[%a]@]" pp_bindings (Var.Map.bindings ctx.vars)
* [ find ~info n ctx ] is a warpper to ocaml 's Map.find that handle errors in a
slightly better way .
slightly better way. *)
let find ?(info : string = "none") (n : 'm D.expr Var.t) (ctx : 'm ctx) :
'm info =
try Var.Map.find n ctx.vars
with Not_found ->
Errors.raise_spanned_error Pos.no_pos
"Internal Error: Variable %a was not found in the current environment. \
Additional informations : %s."
Print.var n info
* [ add_var pos var is_pure ctx ] add to the context [ ctx ] the Dcalc variable
var , creating a unique corresponding variable in Lcalc , with the
corresponding expression , and the boolean is_pure . It is usefull for
debuging purposes as it printing each of the Dcalc / Lcalc variable pairs .
var, creating a unique corresponding variable in Lcalc, with the
corresponding expression, and the boolean is_pure. It is usefull for
debuging purposes as it printing each of the Dcalc/Lcalc variable pairs. *)
let add_var
(mark : 'm mark)
(var : 'm D.expr Var.t)
(is_pure : bool)
(ctx : 'm ctx) : 'm ctx =
let new_var = Var.make (Bindlib.name_of var) in
let expr = Expr.make_var new_var mark in
Cli.debug_print @@ Format.asprintf " D.%a |- > " Print.var var Print.var
new_var ;
new_var; *)
{
ctx with
vars =
Var.Map.update var
(fun _ -> Some { expr; var = new_var; is_pure })
ctx.vars;
}
* [ tau ' = translate_typ tau ] translate the a dcalc type into a lcalc type .
Since positions where there is thunked expressions is exactly where we will
put option expressions . Hence , the transformation simply reduce [ unit - > ' a ]
into [ ' a option ] recursivly . There is no polymorphism inside catala .
Since positions where there is thunked expressions is exactly where we will
put option expressions. Hence, the transformation simply reduce [unit -> 'a]
into ['a option] recursivly. There is no polymorphism inside catala. *)
let rec translate_typ (tau : typ) : typ =
(Fun.flip Marked.same_mark_as)
tau
begin
match Marked.unmark tau with
| TLit l -> TLit l
| TTuple ts -> TTuple (List.map translate_typ ts)
| TStruct s -> TStruct s
| TEnum en -> TEnum en
| TOption t -> TOption t
| TAny -> TAny
| TArray ts -> TArray (translate_typ ts)
| TArrow ([(TLit TUnit, _)], t2) -> TOption (translate_typ t2)
| TArrow (t1, t2) -> TArrow (List.map translate_typ t1, translate_typ t2)
end
* [ c = disjoint_union_maps cs ] Compute the disjoint union of multiple maps .
Raises an internal error if there is two identicals keys in differnts parts .
Raises an internal error if there is two identicals keys in differnts parts. *)
let disjoint_union_maps (pos : Pos.t) (cs : ('e, 'a) Var.Map.t list) :
('e, 'a) Var.Map.t =
let disjoint_union =
Var.Map.union (fun _ _ _ ->
Errors.raise_spanned_error pos
"Internal Error: Two supposed to be disjoints maps have one shared \
key.")
in
List.fold_left disjoint_union Var.Map.empty cs
* [ e ' = translate_and_hoist ctx e ] Translate the Dcalc expression e into an
expression in , given we translate each hoists correctly . It ensures
the equivalence between the execution of e and the execution of e ' are
equivalent in an environement where each variable v , where ( v , e_v ) is in
hoists , has the non - empty value in e_v .
expression in Lcalc, given we translate each hoists correctly. It ensures
the equivalence between the execution of e and the execution of e' are
equivalent in an environement where each variable v, where (v, e_v) is in
hoists, has the non-empty value in e_v. *)
let rec translate_and_hoist (ctx : 'm ctx) (e : 'm D.expr) :
'm A.expr boxed * 'm hoists =
let mark = Marked.get_mark e in
let pos = Expr.mark_pos mark in
match Marked.unmark e with
empty - producing / using terms . We hoist those . ( D.EVar in some cases ,
EApp(D.EVar _ , [ ELit LUnit ] ) , EDefault _ , ELit LEmptyDefault ) I 'm unsure
about assert .
EApp(D.EVar _, [ELit LUnit]), EDefault _, ELit LEmptyDefault) I'm unsure
about assert. *)
| EVar v ->
if not (find ~info:"search for a variable" v ctx).is_pure then
let v' = Var.make (Bindlib.name_of v) in
Cli.debug_print @@ Format.asprintf " Found an unpure variable % a ,
created a variable % a to replace it " Print.var v Print.var v ' ;
created a variable %a to replace it" Print.var v Print.var v'; *)
Expr.make_var v' mark, Var.Map.singleton v' e
else (find ~info:"should never happen" v ctx).expr, Var.Map.empty
| EApp { f = EVar v, p; args = [(ELit LUnit, _)] } ->
if not (find ~info:"search for a variable" v ctx).is_pure then
let v' = Var.make (Bindlib.name_of v) in
Cli.debug_print @@ Format.asprintf " Found an unpure variable % a ,
created a variable % a to replace it " Print.var v Print.var v ' ;
created a variable %a to replace it" Print.var v Print.var v'; *)
Expr.make_var v' mark, Var.Map.singleton v' (EVar v, p)
else
Errors.raise_spanned_error (Expr.pos e)
"Internal error: an pure variable was found in an unpure environment."
| EDefault _ ->
let v' = Var.make "default_term" in
Expr.make_var v' mark, Var.Map.singleton v' e
| ELit LEmptyError ->
let v' = Var.make "empty_litteral" in
Expr.make_var v' mark, Var.Map.singleton v' e
| EErrorOnEmpty arg ->
let silent_var = Var.make "_" in
let x = Var.make "non_empty_argument" in
let arg' = translate_expr ctx arg in
let rty = Expr.maybe_ty mark in
( A.make_matchopt_with_abs_arms arg'
(Expr.make_abs [| silent_var |]
(Expr.eraise NoValueProvided (Expr.with_ty mark rty))
[rty] pos)
(Expr.make_abs [| x |] (Expr.make_var x mark) [rty] pos),
Var.Map.empty )
| ELit
((LBool _ | LInt _ | LRat _ | LMoney _ | LUnit | LDate _ | LDuration _) as
l) ->
Expr.elit l mark, Var.Map.empty
| EIfThenElse { cond; etrue; efalse } ->
let cond', h1 = translate_and_hoist ctx cond in
let etrue', h2 = translate_and_hoist ctx etrue in
let efalse', h3 = translate_and_hoist ctx efalse in
let e' = Expr.eifthenelse cond' etrue' efalse' mark in
and+ efalse' = efalse' in (A.EIfThenElse (cond', etrue', efalse'), pos)
in *)
e', disjoint_union_maps (Expr.pos e) [h1; h2; h3]
| EAssert e1 ->
let e1', h1 = translate_and_hoist ctx e1 in
Expr.eassert e1' mark, h1
| EAbs { binder; tys } ->
let vars, body = Bindlib.unmbind binder in
let ctx, lc_vars =
ArrayLabels.fold_right vars ~init:(ctx, []) ~f:(fun var (ctx, lc_vars) ->
let ctx = add_var mark var true ctx in
let lc_var = (find var ctx).var in
ctx, lc_var :: lc_vars)
in
let lc_vars = Array.of_list lc_vars in
here we take the guess that if we can not build the closure because one of
the variable is empty , then we can not build the function .
the variable is empty, then we cannot build the function. *)
let new_body, hoists = translate_and_hoist ctx body in
let new_binder = Expr.bind lc_vars new_body in
Expr.eabs new_binder (List.map translate_typ tys) mark, hoists
| EApp { f = e1; args } ->
let e1', h1 = translate_and_hoist ctx e1 in
let args', h_args =
args |> List.map (translate_and_hoist ctx) |> List.split
in
let hoists = disjoint_union_maps (Expr.pos e) (h1 :: h_args) in
let e' = Expr.eapp e1' args' mark in
e', hoists
| EStruct { name; fields } ->
let fields', h_fields =
StructField.Map.fold
(fun field e (fields, hoists) ->
let e, h = translate_and_hoist ctx e in
StructField.Map.add field e fields, h :: hoists)
fields
(StructField.Map.empty, [])
in
let hoists = disjoint_union_maps (Expr.pos e) h_fields in
Expr.estruct name fields' mark, hoists
| EStructAccess { name; e = e1; field } ->
let e1', hoists = translate_and_hoist ctx e1 in
let e1' = Expr.estructaccess e1' field name mark in
e1', hoists
| ETuple es ->
let hoists, es' =
List.fold_left_map
(fun hoists e ->
let e, h = translate_and_hoist ctx e in
h :: hoists, e)
[] es
in
Expr.etuple es' mark, disjoint_union_maps (Expr.pos e) hoists
| ETupleAccess { e = e1; index; size } ->
let e1', hoists = translate_and_hoist ctx e1 in
let e1' = Expr.etupleaccess e1' index size mark in
e1', hoists
| EInj { name; e = e1; cons } ->
let e1', hoists = translate_and_hoist ctx e1 in
let e1' = Expr.einj e1' cons name mark in
e1', hoists
| EMatch { name; e = e1; cases } ->
let e1', h1 = translate_and_hoist ctx e1 in
let cases', h_cases =
EnumConstructor.Map.fold
(fun cons e (cases, hoists) ->
let e', h = translate_and_hoist ctx e in
EnumConstructor.Map.add cons e' cases, h :: hoists)
cases
(EnumConstructor.Map.empty, [])
in
let hoists = disjoint_union_maps (Expr.pos e) (h1 :: h_cases) in
let e' = Expr.ematch e1' name cases' mark in
e', hoists
| EArray es ->
let es', hoists = es |> List.map (translate_and_hoist ctx) |> List.split in
Expr.earray es' mark, disjoint_union_maps (Expr.pos e) hoists
| EOp { op; tys } -> Expr.eop (Operator.translate op) tys mark, Var.Map.empty
and translate_expr ?(append_esome = true) (ctx : 'm ctx) (e : 'm D.expr) :
'm A.expr boxed =
let e', hoists = translate_and_hoist ctx e in
let hoists = Var.Map.bindings hoists in
let _pos = Marked.get_mark e in
Cli.debug_print @@ Format.asprintf " hoist for the expression : [ % a ] "
( Format.pp_print_list Print.var ) ( List.map fst hoists ) ;
(Format.pp_print_list Print.var) (List.map fst hoists); *)
ListLabels.fold_left hoists
~init:(if append_esome then A.make_some e' else e')
~f:(fun acc (v, (hoist, mark_hoist)) ->
let pos = Expr.mark_pos mark_hoist in
let c' : 'm A.expr boxed =
match hoist with
| EVar v -> (find ~info:"should never happen" v ctx).expr
| EDefault { excepts; just; cons } ->
let excepts' = List.map (translate_expr ctx) excepts in
let just' = translate_expr ctx just in
let cons' = translate_expr ctx cons in
Expr.make_app
(Expr.make_var (Var.translate A.handle_default_opt) mark_hoist)
[Expr.earray excepts' mark_hoist; just'; cons']
pos
| ELit LEmptyError -> A.make_none mark_hoist
| EAssert arg ->
let arg' = translate_expr ctx arg in
let silent_var = Var.make "_" in
let x = Var.make "assertion_argument" in
A.make_matchopt_with_abs_arms arg'
(Expr.make_abs [| silent_var |]
(Expr.eraise NoValueProvided mark_hoist)
[TAny, Expr.mark_pos mark_hoist]
pos)
(Expr.make_abs [| x |]
(Expr.eassert (Expr.make_var x mark_hoist) mark_hoist)
[TAny, Expr.mark_pos mark_hoist]
pos)
| _ ->
Errors.raise_spanned_error (Expr.mark_pos mark_hoist)
"Internal Error: An term was found in a position where it should \
not be"
in
A.make_matchopt pos v
(TAny, Expr.mark_pos mark_hoist)
c' (A.make_none mark_hoist) acc)
let rec translate_scope_let (ctx : 'm ctx) (lets : 'm D.expr scope_body_expr) :
'm A.expr scope_body_expr Bindlib.box =
match lets with
| Result e ->
Bindlib.box_apply
(fun e -> Result e)
(Expr.Box.lift (translate_expr ~append_esome:false ctx e))
| ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_typ = typ;
scope_let_expr = EAbs { binder; _ }, emark;
scope_let_next = next;
scope_let_pos = pos;
} ->
let _, expr = Bindlib.unmbind binder in
let var_is_pure = true in
let var, next = Bindlib.unbind next in
let vmark = Expr.with_ty emark ~pos typ in
let ctx' = add_var vmark var var_is_pure ctx in
let new_var = (find ~info:"variable that was just created" var ctx').var in
let new_next = translate_scope_let ctx' next in
Bindlib.box_apply2
(fun new_expr new_next ->
ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_typ = translate_typ typ;
scope_let_expr = new_expr;
scope_let_next = new_next;
scope_let_pos = pos;
})
(Expr.Box.lift (translate_expr ctx ~append_esome:false expr))
(Bindlib.bind_var new_var new_next)
| ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_typ = typ;
scope_let_expr = (EErrorOnEmpty _, emark) as expr;
scope_let_next = next;
scope_let_pos = pos;
} ->
let var_is_pure = true in
let var, next = Bindlib.unbind next in
let vmark = Expr.with_ty emark ~pos typ in
let ctx' = add_var vmark var var_is_pure ctx in
let new_var = (find ~info:"variable that was just created" var ctx').var in
Bindlib.box_apply2
(fun new_expr new_next ->
ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_typ = translate_typ typ;
scope_let_expr = new_expr;
scope_let_next = new_next;
scope_let_pos = pos;
})
(Expr.Box.lift (translate_expr ctx ~append_esome:false expr))
(Bindlib.bind_var new_var (translate_scope_let ctx' next))
| ScopeLet
{
scope_let_kind = SubScopeVarDefinition;
scope_let_pos = pos;
scope_let_expr = expr;
_;
} ->
Errors.raise_spanned_error pos
"Internal Error: found an SubScopeVarDefinition that does not satisfy \
the invariants when translating Dcalc to Lcalc without exceptions: \
@[<hov 2>%a@]"
(Expr.format ctx.decl_ctx) expr
| ScopeLet
{
scope_let_kind = kind;
scope_let_typ = typ;
scope_let_expr = expr;
scope_let_next = next;
scope_let_pos = pos;
} ->
let var_is_pure =
match kind with
| DestructuringInputStruct -> (
match Marked.unmark typ with
| TArrow ([(TLit TUnit, _)], _) -> false
| _ -> true)
| ScopeVarDefinition | SubScopeVarDefinition | CallingSubScope
| DestructuringSubScopeResults | Assertion ->
true
in
let var, next = Bindlib.unbind next in
let vmark = Expr.with_ty (Marked.get_mark expr) ~pos typ in
let ctx' = add_var vmark var var_is_pure ctx in
let new_var = (find ~info:"variable that was just created" var ctx').var in
Bindlib.box_apply2
(fun new_expr new_next ->
ScopeLet
{
scope_let_kind = kind;
scope_let_typ = translate_typ typ;
scope_let_expr = new_expr;
scope_let_next = new_next;
scope_let_pos = pos;
})
(Expr.Box.lift (translate_expr ctx ~append_esome:false expr))
(Bindlib.bind_var new_var (translate_scope_let ctx' next))
let translate_scope_body
(scope_pos : Pos.t)
(ctx : 'm ctx)
(body : 'm D.expr scope_body) : 'm A.expr scope_body Bindlib.box =
match body with
| {
scope_body_expr = result;
scope_body_input_struct = input_struct;
scope_body_output_struct = output_struct;
} ->
let v, lets = Bindlib.unbind result in
let vmark =
let m =
match lets with
| Result e | ScopeLet { scope_let_expr = e; _ } -> Marked.get_mark e
in
Expr.map_mark (fun _ -> scope_pos) (fun ty -> ty) m
in
let ctx' = add_var vmark v true ctx in
let v' = (find ~info:"variable that was just created" v ctx').var in
Bindlib.box_apply
(fun new_expr ->
{
scope_body_expr = new_expr;
scope_body_input_struct = input_struct;
scope_body_output_struct = output_struct;
})
(Bindlib.bind_var v' (translate_scope_let ctx' lets))
let translate_code_items (ctx : 'm ctx) (scopes : 'm D.expr code_item_list) :
'm A.expr code_item_list Bindlib.box =
let _ctx, scopes =
Scope.fold_map
~f:
(fun ctx var -> function
| Topdef (name, ty, e) ->
( add_var (Marked.get_mark e) var true ctx,
Bindlib.box_apply
(fun e -> Topdef (name, ty, e))
(Expr.Box.lift (translate_expr ~append_esome:false ctx e)) )
| ScopeDef (scope_name, scope_body) ->
( ctx,
let scope_pos = Marked.get_mark (ScopeName.get_info scope_name) in
Bindlib.box_apply
(fun body -> ScopeDef (scope_name, body))
(translate_scope_body scope_pos ctx scope_body) ))
~varf:Var.translate ctx scopes
in
scopes
let translate_program (prgm : 'm D.program) : 'm A.program =
let inputs_structs =
Scope.fold_left prgm.code_items ~init:[] ~f:(fun acc def _ ->
match def with
| ScopeDef (_, body) -> body.scope_body_input_struct :: acc
| Topdef _ -> acc)
in
let decl_ctx =
{
prgm.decl_ctx with
ctx_enums =
prgm.decl_ctx.ctx_enums
|> EnumName.Map.add A.option_enum A.option_enum_config;
}
in
let decl_ctx =
{
decl_ctx with
ctx_structs =
prgm.decl_ctx.ctx_structs
|> StructName.Map.mapi (fun n str ->
if List.mem n inputs_structs then
StructField.Map.map translate_typ str
Cli.debug_print @@ Format.asprintf " Input type : % a "
( Print.typ decl_ctx ) tau ; Cli.debug_print @@ Format.asprintf
" Output type : % a " ( Print.typ decl_ctx ) ( translate_typ
tau ) ;
(Print.typ decl_ctx) tau; Cli.debug_print @@ Format.asprintf
"Output type: %a" (Print.typ decl_ctx) (translate_typ
tau); *)
else str);
}
in
let code_items =
Bindlib.unbox
(translate_code_items { decl_ctx; vars = Var.Map.empty } prgm.code_items)
in
{ code_items; decl_ctx }
|
57a48f9dc976308847fcf4a6353bcbdf4a95f69d67556fa2cbcf977b8c9bf48e | ocsigen/ojwidgets | ojw_hammer_swipe.ml | Hammer . JS - v1.0.6dev - 2013 - 07 - 31
Binding by
Copyright ( c ) 2013 < > ;
Licensed under the MIT license
Binding by Arnaud Parant
Copyright (c) 2013 Jorik Tangelder <>;
Licensed under the MIT license
*)
(* Fake type to hide js obj type *)
type t = int Js.t
let create panes_list =
let ul_elt = Dom_html.createUl Dom_html.document in
ul_elt##className <- Js.string "hsw_conteneur";
let action e =
let li_elt = Dom_html.createLi Dom_html.document in
li_elt##className <- Js.string "hsw_pane";
Dom.appendChild li_elt e;
Dom.appendChild ul_elt li_elt
in
List.iter action panes_list;
let carousel_elt = Dom_html.createDiv Dom_html.document in
carousel_elt##id <- Js.string "carousel";
Dom.appendChild carousel_elt ul_elt;
Dom.appendChild Dom_html.document##body carousel_elt;
(* let jq_carousel = Ojquery.jQelt dom_carousel in *)
(** it will be better to use jq_carousel, but js code do not allow it. *)
let carousel = Js.Unsafe.eval_string "new Carousel('#carousel')" in
carousel
let init carousel =
ignore (Js.Unsafe.meth_call carousel "init" [||])
let get_current_pane carousel =
Js.Unsafe.meth_call carousel "getCurrentPane" [||]
let show_pane carousel index =
let idx = Js.Unsafe.inject index in
ignore (Js.Unsafe.meth_call carousel "showPane" [| idx |])
let next carousel =
ignore (Js.Unsafe.meth_call carousel "next" [||])
let prev carousel =
ignore (Js.Unsafe.meth_call carousel "prev" [||])
| null | https://raw.githubusercontent.com/ocsigen/ojwidgets/4be2233980bdd1cae187c749bd27ddbfff389880/src/ojw_hammer_swipe.ml | ocaml | Fake type to hide js obj type
let jq_carousel = Ojquery.jQelt dom_carousel in
* it will be better to use jq_carousel, but js code do not allow it. | Hammer . JS - v1.0.6dev - 2013 - 07 - 31
Binding by
Copyright ( c ) 2013 < > ;
Licensed under the MIT license
Binding by Arnaud Parant
Copyright (c) 2013 Jorik Tangelder <>;
Licensed under the MIT license
*)
type t = int Js.t
let create panes_list =
let ul_elt = Dom_html.createUl Dom_html.document in
ul_elt##className <- Js.string "hsw_conteneur";
let action e =
let li_elt = Dom_html.createLi Dom_html.document in
li_elt##className <- Js.string "hsw_pane";
Dom.appendChild li_elt e;
Dom.appendChild ul_elt li_elt
in
List.iter action panes_list;
let carousel_elt = Dom_html.createDiv Dom_html.document in
carousel_elt##id <- Js.string "carousel";
Dom.appendChild carousel_elt ul_elt;
Dom.appendChild Dom_html.document##body carousel_elt;
let carousel = Js.Unsafe.eval_string "new Carousel('#carousel')" in
carousel
let init carousel =
ignore (Js.Unsafe.meth_call carousel "init" [||])
let get_current_pane carousel =
Js.Unsafe.meth_call carousel "getCurrentPane" [||]
let show_pane carousel index =
let idx = Js.Unsafe.inject index in
ignore (Js.Unsafe.meth_call carousel "showPane" [| idx |])
let next carousel =
ignore (Js.Unsafe.meth_call carousel "next" [||])
let prev carousel =
ignore (Js.Unsafe.meth_call carousel "prev" [||])
|
6963352ad6556848057f50538b76f86a68a49349fa970f1db91dc155b5845241 | onyx-platform/onyx | queryable_state_manager.clj | (ns onyx.peer.queryable-state-manager
(:require [clojure.core.async :refer [chan close! poll! >!!]]
[onyx.state.serializers.utils]
[com.stuartsierra.component :as component]
[onyx.peer.window-state :as ws]
[onyx.state.protocol.db :as db]
[onyx.peer.grouping :as g]
[taoensso.timbre :refer [fatal info]])
(:import [java.util.concurrent.locks LockSupport]))
(def required-event-keys
[:onyx.core/job-id :onyx.core/task
:onyx.core/slot-id :onyx.core/task-map
:onyx.core/windows :onyx.core/triggers])
(defn state-key [replica-version event]
[(:onyx.core/job-id event)
(:onyx.core/task event)
(:onyx.core/slot-id event)
replica-version])
(defmulti process-store
(fn [[cmd] _ _]
;(println "CMD" cmd)
cmd))
(defn remove-db [state k-rem]
(swap! state
(fn [m]
(->> m
(remove (fn [[k v]]
(= k k-rem)))
(into {})))))
(defn add-new-db [st event replica-version peer-config exported]
(let [serializers (onyx.state.serializers.utils/event->state-serializers event)]
(assoc st
(state-key replica-version event)
{:state-indices (ws/state-indices event)
:idx->trigger (into {}
(map (fn [[idx t]]
[idx (:trigger t)])
(:trigger-coders serializers)))
:idx->window (into {}
(map (fn [[idx w]]
[idx (:window w)])
(:window-coders serializers)))
:grouped? (g/grouped-task? (:onyx.core/task-map event))
:db (db/open-db-reader peer-config exported serializers)})))
(defmethod process-store :created-db
[[_ replica-version event exported] state peer-config]
(swap! state add-new-db event replica-version peer-config exported))
(defmethod process-store :drop-job-dbs
[[_ deallocated] state peer-config]
(run! (fn [[job-id replica-version]]
(->> @state
(filter (fn [[[j _ _ r] _]]
(and (= job-id j)
(= replica-version r))))
(run! (fn [[k store]]
(remove-db state k)
(db/close! (:db store))))))
deallocated))
(defn processing-loop [peer-config shutdown state ch]
(try (loop []
(when-not @shutdown
(if-let [cmd (poll! ch)]
(process-store cmd state peer-config)
(LockSupport/parkNanos (* 100 1000000)))
(recur)))
(catch Throwable t
(info t "Error in OnyxStateStoreGroup loop."))))
(defrecord OnyxStateStoreGroup [peer-config ch state shutdown]
component/Lifecycle
(start [this]
(let [shutdown (atom false)
state (atom {})
ch (chan 1000)
fut (future (processing-loop peer-config shutdown state ch))]
(assoc this
:fut fut
:shutdown shutdown
:ch ch
:state state)))
(stop [this]
(close! ch)
(reset! shutdown true)
(future-cancel (:fut this))
(assoc this :ch nil :state nil :state nil :fut nil)))
(defn new-state-store-group [peer-config]
(map->OnyxStateStoreGroup {:peer-config peer-config}))
| null | https://raw.githubusercontent.com/onyx-platform/onyx/74f9ae58cdbcfcb1163464595f1e6ae6444c9782/src/onyx/peer/queryable_state_manager.clj | clojure | (println "CMD" cmd) | (ns onyx.peer.queryable-state-manager
(:require [clojure.core.async :refer [chan close! poll! >!!]]
[onyx.state.serializers.utils]
[com.stuartsierra.component :as component]
[onyx.peer.window-state :as ws]
[onyx.state.protocol.db :as db]
[onyx.peer.grouping :as g]
[taoensso.timbre :refer [fatal info]])
(:import [java.util.concurrent.locks LockSupport]))
(def required-event-keys
[:onyx.core/job-id :onyx.core/task
:onyx.core/slot-id :onyx.core/task-map
:onyx.core/windows :onyx.core/triggers])
(defn state-key [replica-version event]
[(:onyx.core/job-id event)
(:onyx.core/task event)
(:onyx.core/slot-id event)
replica-version])
(defmulti process-store
(fn [[cmd] _ _]
cmd))
(defn remove-db [state k-rem]
(swap! state
(fn [m]
(->> m
(remove (fn [[k v]]
(= k k-rem)))
(into {})))))
(defn add-new-db [st event replica-version peer-config exported]
(let [serializers (onyx.state.serializers.utils/event->state-serializers event)]
(assoc st
(state-key replica-version event)
{:state-indices (ws/state-indices event)
:idx->trigger (into {}
(map (fn [[idx t]]
[idx (:trigger t)])
(:trigger-coders serializers)))
:idx->window (into {}
(map (fn [[idx w]]
[idx (:window w)])
(:window-coders serializers)))
:grouped? (g/grouped-task? (:onyx.core/task-map event))
:db (db/open-db-reader peer-config exported serializers)})))
(defmethod process-store :created-db
[[_ replica-version event exported] state peer-config]
(swap! state add-new-db event replica-version peer-config exported))
(defmethod process-store :drop-job-dbs
[[_ deallocated] state peer-config]
(run! (fn [[job-id replica-version]]
(->> @state
(filter (fn [[[j _ _ r] _]]
(and (= job-id j)
(= replica-version r))))
(run! (fn [[k store]]
(remove-db state k)
(db/close! (:db store))))))
deallocated))
(defn processing-loop [peer-config shutdown state ch]
(try (loop []
(when-not @shutdown
(if-let [cmd (poll! ch)]
(process-store cmd state peer-config)
(LockSupport/parkNanos (* 100 1000000)))
(recur)))
(catch Throwable t
(info t "Error in OnyxStateStoreGroup loop."))))
(defrecord OnyxStateStoreGroup [peer-config ch state shutdown]
component/Lifecycle
(start [this]
(let [shutdown (atom false)
state (atom {})
ch (chan 1000)
fut (future (processing-loop peer-config shutdown state ch))]
(assoc this
:fut fut
:shutdown shutdown
:ch ch
:state state)))
(stop [this]
(close! ch)
(reset! shutdown true)
(future-cancel (:fut this))
(assoc this :ch nil :state nil :state nil :fut nil)))
(defn new-state-store-group [peer-config]
(map->OnyxStateStoreGroup {:peer-config peer-config}))
|
8d4bf0c0e9a59e59639a22e38d1ebd43d8fdf70cf10b21ab04c2ba44874b33f0 | yogthos/config | project.clj | (defproject yogthos/config "1.2.0"
:description "library for managing configuration using environment variables/EDN configuration files"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.3"]])
| null | https://raw.githubusercontent.com/yogthos/config/62f39a5a7adc934787d6a57f4829791f54fe01ba/project.clj | clojure | (defproject yogthos/config "1.2.0"
:description "library for managing configuration using environment variables/EDN configuration files"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.3"]])
| |
f4b8c42bafc5e2235c80a7e2bb8881c8d039879f29ba7f6fe791ba0f499dfd1b | slyrus/clem | macros.lisp |
(in-package :clem)
(defmacro def-unary-op (name op type-1 accumulator-type &key suffix)
(let ((element-type-1 (element-type (find-class `,type-1)))
(accumulator-element-type (element-type (find-class `,accumulator-type))))
`(progn
(defmethod ,(make-intern (concatenate 'string name "-range" suffix))
((m ,type-1) startr endr startc endc &key in-place)
(destructuring-bind (mr mc) (dim m)
(if in-place
(with-typed-mref (m ,element-type-1)
(do ((i startr (1+ i)))
((> i endr))
(declare (dynamic-extent i) (type fixnum i))
(do ((j startc (1+ j)))
((> j endc))
(declare (dynamic-extent j) (type fixnum j))
(setf (mref m i j)
(,op (mref m i j)))))
m)
(let ((p (make-instance ',accumulator-type :rows mr :cols mc)))
(with-typed-mref (m ,element-type-1)
(with-typed-mref (p ,accumulator-element-type)
(do ((i startr (1+ i)))
((> i endr))
(declare (dynamic-extent i) (type fixnum i))
(do ((j startc (1+ j)))
((> j endc))
(declare (dynamic-extent j) (type fixnum j))
(setf (mref p i j)
(,op (mref m i j)))))))
p))))
(defmethod ,(make-intern (concatenate 'string name suffix))
((m ,type-1) &key in-place)
(if in-place
(with-typed-mref (m ,element-type-1)
(loop for i from 0 below (matrix-total-size m)
do (setf (row-major-mref m i)
(,op (row-major-mref m i))))
m)
(let ((p (make-instance ',accumulator-type :dimensions (matrix-dimensions m))))
(with-typed-mref (m ,element-type-1)
(with-typed-mref (p ,accumulator-element-type)
(loop for i from 0 below (matrix-total-size m)
do (setf (row-major-mref p i)
(,op (row-major-mref m i))))))
p))))))
(defmacro def-binary-op (name op type-1 type-2 accumulator-type &key suffix (allow-in-place t))
(let ((element-type-1 (element-type (find-class `,type-1)))
(element-type-2 (element-type (find-class `,type-2)))
(accumulator-element-type (element-type (find-class `,accumulator-type))))
`(progn
(defmethod ,(make-intern (concatenate 'string name "-range" suffix))
((m ,type-1) (n ,type-2) startr endr startc endc &key in-place)
(destructuring-bind (mr mc) (dim m)
(if in-place
,(if allow-in-place
`(with-typed-mref (m ,element-type-1)
(with-typed-mref (n ,element-type-2)
(do ((i startr (1+ i)))
((> i endr))
(declare (dynamic-extent i) (type fixnum i))
(do ((j startc (1+ j)))
((> j endc))
(declare (dynamic-extent j) (type fixnum j))
(setf (mref m i j)
(,op (mref m i j) (mref n i j))))))
m)
`(error 'matrix-argument-error
:format-control
"in-place operation not allowed (~S of ~S and ~S"
:format-arguments (list '+ ',type-1 ',type-2)))
(let ((p (make-instance ',accumulator-type :rows mr :cols mc)))
(with-typed-mref (m ,element-type-1)
(with-typed-mref (p ,accumulator-element-type)
(with-typed-mref (n ,element-type-2)
(do ((i startr (1+ i)))
((> i endr))
(declare (dynamic-extent i) (type fixnum i))
(do ((j startc (1+ j)))
((> j endc))
(declare (dynamic-extent j) (type fixnum j))
(setf (mref p i j)
(,op (mref m i j) (mref n i j))))))))
p))))
(defmethod ,(make-intern (concatenate 'string name suffix))
((m ,type-1) (n ,type-2) &key in-place)
(if in-place
,(if allow-in-place
`(with-typed-mref (m ,element-type-1)
(with-typed-mref (n ,element-type-2)
(loop for i from 0 below (matrix-total-size m)
do (setf (row-major-mref m i)
(,op (row-major-mref m i) (row-major-mref n i)))))
m)
`(error 'matrix-argument-error
:format-control
"in-place operation not allowed (~S of ~S and ~S"
:format-arguments (list '+ ',type-1 ',type-2)))
(let ((p (make-instance ',accumulator-type :dimensions (matrix-dimensions m))))
(with-typed-mref (m ,element-type-1)
(with-typed-mref (n ,element-type-2)
(with-typed-mref (p ,accumulator-element-type)
(loop for i from 0 below (matrix-total-size m)
do (setf (row-major-mref p i)
(,op (row-major-mref m i) (row-major-mref n i)))))))
p))))))
| null | https://raw.githubusercontent.com/slyrus/clem/5eb055bb3f45840b24fd44825b975aa36bd6d97c/src/macros.lisp | lisp |
(in-package :clem)
(defmacro def-unary-op (name op type-1 accumulator-type &key suffix)
(let ((element-type-1 (element-type (find-class `,type-1)))
(accumulator-element-type (element-type (find-class `,accumulator-type))))
`(progn
(defmethod ,(make-intern (concatenate 'string name "-range" suffix))
((m ,type-1) startr endr startc endc &key in-place)
(destructuring-bind (mr mc) (dim m)
(if in-place
(with-typed-mref (m ,element-type-1)
(do ((i startr (1+ i)))
((> i endr))
(declare (dynamic-extent i) (type fixnum i))
(do ((j startc (1+ j)))
((> j endc))
(declare (dynamic-extent j) (type fixnum j))
(setf (mref m i j)
(,op (mref m i j)))))
m)
(let ((p (make-instance ',accumulator-type :rows mr :cols mc)))
(with-typed-mref (m ,element-type-1)
(with-typed-mref (p ,accumulator-element-type)
(do ((i startr (1+ i)))
((> i endr))
(declare (dynamic-extent i) (type fixnum i))
(do ((j startc (1+ j)))
((> j endc))
(declare (dynamic-extent j) (type fixnum j))
(setf (mref p i j)
(,op (mref m i j)))))))
p))))
(defmethod ,(make-intern (concatenate 'string name suffix))
((m ,type-1) &key in-place)
(if in-place
(with-typed-mref (m ,element-type-1)
(loop for i from 0 below (matrix-total-size m)
do (setf (row-major-mref m i)
(,op (row-major-mref m i))))
m)
(let ((p (make-instance ',accumulator-type :dimensions (matrix-dimensions m))))
(with-typed-mref (m ,element-type-1)
(with-typed-mref (p ,accumulator-element-type)
(loop for i from 0 below (matrix-total-size m)
do (setf (row-major-mref p i)
(,op (row-major-mref m i))))))
p))))))
(defmacro def-binary-op (name op type-1 type-2 accumulator-type &key suffix (allow-in-place t))
(let ((element-type-1 (element-type (find-class `,type-1)))
(element-type-2 (element-type (find-class `,type-2)))
(accumulator-element-type (element-type (find-class `,accumulator-type))))
`(progn
(defmethod ,(make-intern (concatenate 'string name "-range" suffix))
((m ,type-1) (n ,type-2) startr endr startc endc &key in-place)
(destructuring-bind (mr mc) (dim m)
(if in-place
,(if allow-in-place
`(with-typed-mref (m ,element-type-1)
(with-typed-mref (n ,element-type-2)
(do ((i startr (1+ i)))
((> i endr))
(declare (dynamic-extent i) (type fixnum i))
(do ((j startc (1+ j)))
((> j endc))
(declare (dynamic-extent j) (type fixnum j))
(setf (mref m i j)
(,op (mref m i j) (mref n i j))))))
m)
`(error 'matrix-argument-error
:format-control
"in-place operation not allowed (~S of ~S and ~S"
:format-arguments (list '+ ',type-1 ',type-2)))
(let ((p (make-instance ',accumulator-type :rows mr :cols mc)))
(with-typed-mref (m ,element-type-1)
(with-typed-mref (p ,accumulator-element-type)
(with-typed-mref (n ,element-type-2)
(do ((i startr (1+ i)))
((> i endr))
(declare (dynamic-extent i) (type fixnum i))
(do ((j startc (1+ j)))
((> j endc))
(declare (dynamic-extent j) (type fixnum j))
(setf (mref p i j)
(,op (mref m i j) (mref n i j))))))))
p))))
(defmethod ,(make-intern (concatenate 'string name suffix))
((m ,type-1) (n ,type-2) &key in-place)
(if in-place
,(if allow-in-place
`(with-typed-mref (m ,element-type-1)
(with-typed-mref (n ,element-type-2)
(loop for i from 0 below (matrix-total-size m)
do (setf (row-major-mref m i)
(,op (row-major-mref m i) (row-major-mref n i)))))
m)
`(error 'matrix-argument-error
:format-control
"in-place operation not allowed (~S of ~S and ~S"
:format-arguments (list '+ ',type-1 ',type-2)))
(let ((p (make-instance ',accumulator-type :dimensions (matrix-dimensions m))))
(with-typed-mref (m ,element-type-1)
(with-typed-mref (n ,element-type-2)
(with-typed-mref (p ,accumulator-element-type)
(loop for i from 0 below (matrix-total-size m)
do (setf (row-major-mref p i)
(,op (row-major-mref m i) (row-major-mref n i)))))))
p))))))
| |
bb42a70dc0b0c1d725038ac278bc2fbd148adb0f4d2aee4c08052e000232b7dc | haskell/haskell-language-server | GhcIde.hs | # LANGUAGE DuplicateRecordFields #
# LANGUAGE OverloadedStrings #
| Exposes the ghcide features as an HLS plugin
module Development.IDE.Plugin.HLS.GhcIde
(
descriptors
, Log(..)
) where
import Control.Monad.IO.Class
import Development.IDE
import Development.IDE.LSP.HoverDefinition
import qualified Development.IDE.LSP.Notifications as Notifications
import Development.IDE.LSP.Outline
import qualified Development.IDE.Plugin.Completions as Completions
import qualified Development.IDE.Plugin.TypeLenses as TypeLenses
import Ide.Types
import Language.LSP.Server (LspM)
import Language.LSP.Types
import Text.Regex.TDFA.Text ()
data Log
= LogNotifications Notifications.Log
| LogCompletions Completions.Log
| LogTypeLenses TypeLenses.Log
deriving Show
instance Pretty Log where
pretty = \case
LogNotifications log -> pretty log
LogCompletions log -> pretty log
LogTypeLenses log -> pretty log
descriptors :: Recorder (WithPriority Log) -> [PluginDescriptor IdeState]
descriptors recorder =
[ descriptor "ghcide-hover-and-symbols",
Completions.descriptor (cmapWithPrio LogCompletions recorder) "ghcide-completions",
TypeLenses.descriptor (cmapWithPrio LogTypeLenses recorder) "ghcide-type-lenses",
Notifications.descriptor (cmapWithPrio LogNotifications recorder) "ghcide-core"
]
-- ---------------------------------------------------------------------
descriptor :: PluginId -> PluginDescriptor IdeState
descriptor plId = (defaultPluginDescriptor plId)
{ pluginHandlers = mkPluginHandler STextDocumentHover hover'
<> mkPluginHandler STextDocumentDocumentSymbol symbolsProvider
<> mkPluginHandler STextDocumentDefinition (\ide _ DefinitionParams{..} ->
gotoDefinition ide TextDocumentPositionParams{..})
<> mkPluginHandler STextDocumentTypeDefinition (\ide _ TypeDefinitionParams{..} ->
gotoTypeDefinition ide TextDocumentPositionParams{..})
<> mkPluginHandler STextDocumentDocumentHighlight (\ide _ DocumentHighlightParams{..} ->
documentHighlight ide TextDocumentPositionParams{..})
<> mkPluginHandler STextDocumentReferences (\ide _ params -> references ide params)
<> mkPluginHandler SWorkspaceSymbol (\ide _ params -> wsSymbols ide params),
pluginConfigDescriptor = defaultConfigDescriptor
}
-- ---------------------------------------------------------------------
hover' :: IdeState -> PluginId -> HoverParams -> LspM c (Either ResponseError (Maybe Hover))
hover' ideState _ HoverParams{..} = do
AZ
hover ideState TextDocumentPositionParams{..}
-- ---------------------------------------------------------------------
symbolsProvider :: IdeState -> PluginId -> DocumentSymbolParams -> LspM c (Either ResponseError (List DocumentSymbol |? List SymbolInformation))
symbolsProvider ide _ params = moduleOutline ide params
-- ---------------------------------------------------------------------
| null | https://raw.githubusercontent.com/haskell/haskell-language-server/6f5a73507f8d9266a486feaf8695c052362b9b95/ghcide/src/Development/IDE/Plugin/HLS/GhcIde.hs | haskell | ---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
--------------------------------------------------------------------- | # LANGUAGE DuplicateRecordFields #
# LANGUAGE OverloadedStrings #
| Exposes the ghcide features as an HLS plugin
module Development.IDE.Plugin.HLS.GhcIde
(
descriptors
, Log(..)
) where
import Control.Monad.IO.Class
import Development.IDE
import Development.IDE.LSP.HoverDefinition
import qualified Development.IDE.LSP.Notifications as Notifications
import Development.IDE.LSP.Outline
import qualified Development.IDE.Plugin.Completions as Completions
import qualified Development.IDE.Plugin.TypeLenses as TypeLenses
import Ide.Types
import Language.LSP.Server (LspM)
import Language.LSP.Types
import Text.Regex.TDFA.Text ()
data Log
= LogNotifications Notifications.Log
| LogCompletions Completions.Log
| LogTypeLenses TypeLenses.Log
deriving Show
instance Pretty Log where
pretty = \case
LogNotifications log -> pretty log
LogCompletions log -> pretty log
LogTypeLenses log -> pretty log
descriptors :: Recorder (WithPriority Log) -> [PluginDescriptor IdeState]
descriptors recorder =
[ descriptor "ghcide-hover-and-symbols",
Completions.descriptor (cmapWithPrio LogCompletions recorder) "ghcide-completions",
TypeLenses.descriptor (cmapWithPrio LogTypeLenses recorder) "ghcide-type-lenses",
Notifications.descriptor (cmapWithPrio LogNotifications recorder) "ghcide-core"
]
descriptor :: PluginId -> PluginDescriptor IdeState
descriptor plId = (defaultPluginDescriptor plId)
{ pluginHandlers = mkPluginHandler STextDocumentHover hover'
<> mkPluginHandler STextDocumentDocumentSymbol symbolsProvider
<> mkPluginHandler STextDocumentDefinition (\ide _ DefinitionParams{..} ->
gotoDefinition ide TextDocumentPositionParams{..})
<> mkPluginHandler STextDocumentTypeDefinition (\ide _ TypeDefinitionParams{..} ->
gotoTypeDefinition ide TextDocumentPositionParams{..})
<> mkPluginHandler STextDocumentDocumentHighlight (\ide _ DocumentHighlightParams{..} ->
documentHighlight ide TextDocumentPositionParams{..})
<> mkPluginHandler STextDocumentReferences (\ide _ params -> references ide params)
<> mkPluginHandler SWorkspaceSymbol (\ide _ params -> wsSymbols ide params),
pluginConfigDescriptor = defaultConfigDescriptor
}
hover' :: IdeState -> PluginId -> HoverParams -> LspM c (Either ResponseError (Maybe Hover))
hover' ideState _ HoverParams{..} = do
AZ
hover ideState TextDocumentPositionParams{..}
symbolsProvider :: IdeState -> PluginId -> DocumentSymbolParams -> LspM c (Either ResponseError (List DocumentSymbol |? List SymbolInformation))
symbolsProvider ide _ params = moduleOutline ide params
|
1aabd6b7ea0c99e58b4a5143e01a3da0abee2b3f11fd0862563e9e49c925752b | gfngfn/otfed | encode.ml |
open Basic
open EncodeOperation.Open
include EncodeTable
include EncodeBasic
let get_table_tag (table : table) =
table.tag
let get_contents (table : table) : string =
table.contents
let compare_table (table1 : table) (table2 : table) =
Value.Tag.compare table1.tag table2.tag
type relative_offset = int
type table_directory_entry = {
table_tag : Value.Tag.t;
relative_offset : relative_offset;
table_length : int;
table_checksum : wint;
}
type table_accumulator = relative_offset option * table_directory_entry Alist.t
let add_checksum (x : wint) (y : wint) : wint =
let open WideInt in
let q = (of_int 1) lsl 32 in
(x +% y) mod q
let calculate_checksum (s : string) : wint =
let open WideInt in
let len = String.length s in
let access i =
if i < len then
of_byte (String.get s i)
else
!% 0
in
let rec aux acc i =
if i >= len then
acc
else
let b0 = access i in
let b1 = access (i + 1) in
let b2 = access (i + 2) in
let b3 = access (i + 3) in
let ui = (b0 lsl 24) lor (b1 lsl 16) lor (b2 lsl 8) lor b3 in
let acc = add_checksum acc ui in
aux acc (i + 4)
in
aux (of_int 0) 0
(* `e_single_table` is used as a folding function in `enc_tables`. *)
let e_single_table ((checksum_reloffset_opt, entries) : table_accumulator) (table : table) =
let open EncodeOperation in
pad_to_long_aligned >>= fun () ->
current >>= fun reloffset ->
e_bytes table.contents >>= fun () ->
let checksum_reloffset_opt =
if Value.Tag.equal Value.Tag.table_head table.tag then
Some(reloffset + 8)
else
checksum_reloffset_opt
in
let table_checksum = calculate_checksum table.contents in
let entry =
{
table_tag = table.tag;
relative_offset = reloffset;
table_length = String.length table.contents;
table_checksum = table_checksum;
}
in
return (checksum_reloffset_opt, Alist.extend entries entry)
(* `enc_tables tables` writes the tables `tables` and returns the pair of
- an offset to `CheckSumAdjustment` relative to the position immediately after the table directory, and
- all the entries for the construction of the table directory. *)
let enc_tables (tables : table list) : (relative_offset * table_directory_entry list) encoder =
let open EncodeOperation in
foldM e_single_table tables (None, Alist.empty) >>= fun (checksum_reloffset_opt, entries) ->
match checksum_reloffset_opt with
| None -> assert false
| Some(checksum_reloffset) -> return (checksum_reloffset, Alist.to_list entries)
let enc_table_directory_entry ~first_offset (all_table_checksum : wint) (entry : table_directory_entry) : wint encoder =
let open EncodeOperation in
e_tag entry.table_tag >>= fun () ->
e_uint32 entry.table_checksum >>= fun () ->
e_uint32 (!% (first_offset + entry.relative_offset)) >>= fun () ->
e_uint32 (!% (entry.table_length)) >>= fun () ->
return @@ add_checksum all_table_checksum entry.table_checksum
let enc_table_directory_entries ~first_offset (entries : table_directory_entry list) : wint encoder =
let open EncodeOperation in
foldM (enc_table_directory_entry ~first_offset) entries (!% 0)
let cut_uint32_to_bytes (u : wint) : char * char * char * char =
let open WideInt in
let b0 = u lsr 24 in
let r0 = u -% (b0 lsl 24) in
let b1 = r0 lsr 16 in
let r1 = r0 -% (b1 lsl 16) in
let b2 = r1 lsr 8 in
let b3 = r1 -% (b2 lsl 8) in
(to_byte b0, to_byte b1, to_byte b2, to_byte b3)
let update_checksum_adjustment ~checksum_offset ~checksum_value (contents : string) =
let checksum_adjustment =
let temp = (!%% 0xB1B0AFBAL) -% checksum_value in
if WideInt.is_neg temp then temp +% (!% (1 lsl 32)) else temp
in
try
let bytes = Bytes.of_string contents in
let (b0, b1, b2, b3) = cut_uint32_to_bytes checksum_adjustment in
Bytes.set bytes checksum_offset b0;
Bytes.set bytes (checksum_offset + 1) b1;
Bytes.set bytes (checksum_offset + 2) b2;
Bytes.set bytes (checksum_offset + 3) b3;
Bytes.to_string bytes
with
| _ -> assert false
Writes the 12 - byte header of the entire TrueType - based OpenType font .
let enc_header ~(ttf : bool) (numTables : int) =
let open EncodeOperation in
let enc_version =
if ttf then
e_uint32 (!% 0x00010000)
else
e_bytes "OTTO"
in
let entrySelector = Stdlib.(truncate (log (float_of_int numTables) /. log 2.0)) in
let searchRange = (1 lsl entrySelector) * 16 in
let rangeShift = numTables * 16 - searchRange in
enc_version >>= fun () ->
e_uint16 numTables >>= fun () ->
e_uint16 searchRange >>= fun () ->
e_uint16 entrySelector >>= fun () ->
e_uint16 rangeShift >>= fun () ->
return ()
let make_font_data_from_tables ~(ttf : bool) (tables : table list) : string ok =
let tables = tables |> List.sort compare_table in
let numTables = List.length tables in
let first_offset = 12 + numTables * 16 in
(* `first_offset` is the offset where the table directory ends. *)
let open ResultMonad in
enc_tables tables |> EncodeOperation.run >>= fun (table_contents, (checksum_reloffset, entries)) ->
let enc =
let open EncodeOperation in
enc_header ~ttf numTables >>= fun () ->
enc_table_directory_entries ~first_offset entries >>= fun all_table_checksum ->
e_bytes table_contents >>= fun () ->
return all_table_checksum
in
enc |> EncodeOperation.run >>= fun (contents, all_table_checksum) ->
let checksum_offset = first_offset + checksum_reloffset in
let prelude_checksum = calculate_checksum (String.sub contents 0 first_offset) in
let checksum_value = add_checksum prelude_checksum all_table_checksum in
return (update_checksum_adjustment ~checksum_offset ~checksum_value contents)
| null | https://raw.githubusercontent.com/gfngfn/otfed/2f7bd15456a8326f03ca7b20ff251d7ac24c475d/src/encode.ml | ocaml | `e_single_table` is used as a folding function in `enc_tables`.
`enc_tables tables` writes the tables `tables` and returns the pair of
- an offset to `CheckSumAdjustment` relative to the position immediately after the table directory, and
- all the entries for the construction of the table directory.
`first_offset` is the offset where the table directory ends. |
open Basic
open EncodeOperation.Open
include EncodeTable
include EncodeBasic
let get_table_tag (table : table) =
table.tag
let get_contents (table : table) : string =
table.contents
let compare_table (table1 : table) (table2 : table) =
Value.Tag.compare table1.tag table2.tag
type relative_offset = int
type table_directory_entry = {
table_tag : Value.Tag.t;
relative_offset : relative_offset;
table_length : int;
table_checksum : wint;
}
type table_accumulator = relative_offset option * table_directory_entry Alist.t
let add_checksum (x : wint) (y : wint) : wint =
let open WideInt in
let q = (of_int 1) lsl 32 in
(x +% y) mod q
let calculate_checksum (s : string) : wint =
let open WideInt in
let len = String.length s in
let access i =
if i < len then
of_byte (String.get s i)
else
!% 0
in
let rec aux acc i =
if i >= len then
acc
else
let b0 = access i in
let b1 = access (i + 1) in
let b2 = access (i + 2) in
let b3 = access (i + 3) in
let ui = (b0 lsl 24) lor (b1 lsl 16) lor (b2 lsl 8) lor b3 in
let acc = add_checksum acc ui in
aux acc (i + 4)
in
aux (of_int 0) 0
let e_single_table ((checksum_reloffset_opt, entries) : table_accumulator) (table : table) =
let open EncodeOperation in
pad_to_long_aligned >>= fun () ->
current >>= fun reloffset ->
e_bytes table.contents >>= fun () ->
let checksum_reloffset_opt =
if Value.Tag.equal Value.Tag.table_head table.tag then
Some(reloffset + 8)
else
checksum_reloffset_opt
in
let table_checksum = calculate_checksum table.contents in
let entry =
{
table_tag = table.tag;
relative_offset = reloffset;
table_length = String.length table.contents;
table_checksum = table_checksum;
}
in
return (checksum_reloffset_opt, Alist.extend entries entry)
let enc_tables (tables : table list) : (relative_offset * table_directory_entry list) encoder =
let open EncodeOperation in
foldM e_single_table tables (None, Alist.empty) >>= fun (checksum_reloffset_opt, entries) ->
match checksum_reloffset_opt with
| None -> assert false
| Some(checksum_reloffset) -> return (checksum_reloffset, Alist.to_list entries)
let enc_table_directory_entry ~first_offset (all_table_checksum : wint) (entry : table_directory_entry) : wint encoder =
let open EncodeOperation in
e_tag entry.table_tag >>= fun () ->
e_uint32 entry.table_checksum >>= fun () ->
e_uint32 (!% (first_offset + entry.relative_offset)) >>= fun () ->
e_uint32 (!% (entry.table_length)) >>= fun () ->
return @@ add_checksum all_table_checksum entry.table_checksum
let enc_table_directory_entries ~first_offset (entries : table_directory_entry list) : wint encoder =
let open EncodeOperation in
foldM (enc_table_directory_entry ~first_offset) entries (!% 0)
let cut_uint32_to_bytes (u : wint) : char * char * char * char =
let open WideInt in
let b0 = u lsr 24 in
let r0 = u -% (b0 lsl 24) in
let b1 = r0 lsr 16 in
let r1 = r0 -% (b1 lsl 16) in
let b2 = r1 lsr 8 in
let b3 = r1 -% (b2 lsl 8) in
(to_byte b0, to_byte b1, to_byte b2, to_byte b3)
let update_checksum_adjustment ~checksum_offset ~checksum_value (contents : string) =
let checksum_adjustment =
let temp = (!%% 0xB1B0AFBAL) -% checksum_value in
if WideInt.is_neg temp then temp +% (!% (1 lsl 32)) else temp
in
try
let bytes = Bytes.of_string contents in
let (b0, b1, b2, b3) = cut_uint32_to_bytes checksum_adjustment in
Bytes.set bytes checksum_offset b0;
Bytes.set bytes (checksum_offset + 1) b1;
Bytes.set bytes (checksum_offset + 2) b2;
Bytes.set bytes (checksum_offset + 3) b3;
Bytes.to_string bytes
with
| _ -> assert false
Writes the 12 - byte header of the entire TrueType - based OpenType font .
let enc_header ~(ttf : bool) (numTables : int) =
let open EncodeOperation in
let enc_version =
if ttf then
e_uint32 (!% 0x00010000)
else
e_bytes "OTTO"
in
let entrySelector = Stdlib.(truncate (log (float_of_int numTables) /. log 2.0)) in
let searchRange = (1 lsl entrySelector) * 16 in
let rangeShift = numTables * 16 - searchRange in
enc_version >>= fun () ->
e_uint16 numTables >>= fun () ->
e_uint16 searchRange >>= fun () ->
e_uint16 entrySelector >>= fun () ->
e_uint16 rangeShift >>= fun () ->
return ()
let make_font_data_from_tables ~(ttf : bool) (tables : table list) : string ok =
let tables = tables |> List.sort compare_table in
let numTables = List.length tables in
let first_offset = 12 + numTables * 16 in
let open ResultMonad in
enc_tables tables |> EncodeOperation.run >>= fun (table_contents, (checksum_reloffset, entries)) ->
let enc =
let open EncodeOperation in
enc_header ~ttf numTables >>= fun () ->
enc_table_directory_entries ~first_offset entries >>= fun all_table_checksum ->
e_bytes table_contents >>= fun () ->
return all_table_checksum
in
enc |> EncodeOperation.run >>= fun (contents, all_table_checksum) ->
let checksum_offset = first_offset + checksum_reloffset in
let prelude_checksum = calculate_checksum (String.sub contents 0 first_offset) in
let checksum_value = add_checksum prelude_checksum all_table_checksum in
return (update_checksum_adjustment ~checksum_offset ~checksum_value contents)
|
fe768b746223c370a8d0d882040219c6f433a76f8f6bc68b1949b1a84368e15e | ndmitchell/catch | CmdLine.hs |
module Front.CmdLine(cmdLine, CmdLineState(..), Action(..)) where
import Control.Exception
import System.IO
import System.Directory
import System.FilePath
import Data.List
import Data.Char
import Control.Monad
data CmdLineState = CmdLineState
{cmdLineHandle :: Handle -- the handle of the open file
,cmdLineOutput :: String -> FilePath -- generate a log file near this file
,cmdLineOptions :: [String] -- options passed
,cmdLineName :: String -- the name that you started from
}
data Action val = Action
{actionName :: String
,actionExec :: CmdLineState -> String -> val -> IO val}
cmdLine :: Show val => (CmdLineState -> FilePath -> IO val) -> [Action val] -> [String] -> IO [val]
cmdLine initial actions cmds =
do
() <- assert (length actions == length (nub $ map actionName actions)) $ return ()
aliases <- loadAliases
when (null files) $ putStrLn "No files, nothing to do"
mapM (f aliases) files
where
(acts,other) = partition ("-" `isPrefixOf`) cmds
(opts,files) = partition ("@" `isPrefixOf`) other
f aliases file = do
let logger = logFile file ".log"
createDirectoryIfMissing True (takeDirectory logger)
hndl <- openFile logger WriteMode
hPutStrLn hndl $ "-- Catch log file, " ++ file
let state = CmdLineState hndl (\x -> logFile file ('.':x)) (map tail opts) file
x <- initial state file
x <- g aliases state x (map tail acts)
hPutStrLn hndl "-- Result:"
hPutStrLn hndl (show x)
hClose hndl
print x
return x
g aliases state val [] = return val
g aliases state val (a:cts) =
let (a1,a2) = break (== '=') a in
case lookup a aliases of
Just q -> g aliases state val (q++cts)
Nothing -> case [x | x <- actions, actionName x == a1] of
[] -> error $ "Command not found, " ++ a
(x:_) -> do
let hndl = cmdLineHandle state
putStrLn $ "-- Executing " ++ a
hPutStrLn hndl $ "-- Executing " ++ a
val <- (actionExec x) state (drop 1 a2) val
hPutStrLn hndl $ show val
hPutStrLn hndl $ replicate 70 '-'
g aliases state val cts
logFile :: String -> String -> FilePath
logFile source tag = "../logs/" ++ source ++ "/" ++ source ++ tag
loadAliases :: IO [(String,[String])]
loadAliases =
do
src <- readFile "catch.txt"
return $ tail $ f ("",[]) $ filter (not.null) $ dropWhile null $ lines src
where
f acc [] = [acc]
f acc@(name,cmds) ((x1:xs):ss)
| isSpace x1 && not (all isSpace xs) =
f (name, cmds ++ [dropWhile isSpace xs]) ss
| otherwise =
acc : f (newComp (x1:xs)) ss
newComp xs | null b = (xs,[])
| otherwise = (a,[])
where (a,b) = break isSpace xs
| null | https://raw.githubusercontent.com/ndmitchell/catch/5d834416a27b4df3f7ce7830c4757d4505aaf96e/src/Front/CmdLine.hs | haskell | the handle of the open file
generate a log file near this file
options passed
the name that you started from |
module Front.CmdLine(cmdLine, CmdLineState(..), Action(..)) where
import Control.Exception
import System.IO
import System.Directory
import System.FilePath
import Data.List
import Data.Char
import Control.Monad
data CmdLineState = CmdLineState
}
data Action val = Action
{actionName :: String
,actionExec :: CmdLineState -> String -> val -> IO val}
cmdLine :: Show val => (CmdLineState -> FilePath -> IO val) -> [Action val] -> [String] -> IO [val]
cmdLine initial actions cmds =
do
() <- assert (length actions == length (nub $ map actionName actions)) $ return ()
aliases <- loadAliases
when (null files) $ putStrLn "No files, nothing to do"
mapM (f aliases) files
where
(acts,other) = partition ("-" `isPrefixOf`) cmds
(opts,files) = partition ("@" `isPrefixOf`) other
f aliases file = do
let logger = logFile file ".log"
createDirectoryIfMissing True (takeDirectory logger)
hndl <- openFile logger WriteMode
hPutStrLn hndl $ "-- Catch log file, " ++ file
let state = CmdLineState hndl (\x -> logFile file ('.':x)) (map tail opts) file
x <- initial state file
x <- g aliases state x (map tail acts)
hPutStrLn hndl "-- Result:"
hPutStrLn hndl (show x)
hClose hndl
print x
return x
g aliases state val [] = return val
g aliases state val (a:cts) =
let (a1,a2) = break (== '=') a in
case lookup a aliases of
Just q -> g aliases state val (q++cts)
Nothing -> case [x | x <- actions, actionName x == a1] of
[] -> error $ "Command not found, " ++ a
(x:_) -> do
let hndl = cmdLineHandle state
putStrLn $ "-- Executing " ++ a
hPutStrLn hndl $ "-- Executing " ++ a
val <- (actionExec x) state (drop 1 a2) val
hPutStrLn hndl $ show val
hPutStrLn hndl $ replicate 70 '-'
g aliases state val cts
logFile :: String -> String -> FilePath
logFile source tag = "../logs/" ++ source ++ "/" ++ source ++ tag
loadAliases :: IO [(String,[String])]
loadAliases =
do
src <- readFile "catch.txt"
return $ tail $ f ("",[]) $ filter (not.null) $ dropWhile null $ lines src
where
f acc [] = [acc]
f acc@(name,cmds) ((x1:xs):ss)
| isSpace x1 && not (all isSpace xs) =
f (name, cmds ++ [dropWhile isSpace xs]) ss
| otherwise =
acc : f (newComp (x1:xs)) ss
newComp xs | null b = (xs,[])
| otherwise = (a,[])
where (a,b) = break isSpace xs
|
148880a58258fe9f22e1c72ef14e59011738e6f283d5b5cced05c59fea816ef9 | cram2/cram | generic.lisp | ;; Define generic functions used across chapters
2016 - 06 - 12 15:37:54EDT generic.lisp
Time - stamp : < 2016 - 06 - 12 16:38:57EDT >
Copyright 2016
Distributed under the terms of the GNU General Public License
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with this program. If not, see </>.
(in-package :gsl)
(export '(parameter))
(defgeneric parameter (object parameter)
(:documentation "Get the value of the GSL parameter from the GSL object."))
(defgeneric (setf parameter) (value object parameter)
(:documentation "Set the value of the GSL parameter from the GSL object."))
| null | https://raw.githubusercontent.com/cram2/cram/dcb73031ee944d04215bbff9e98b9e8c210ef6c5/cram_3rdparty/gsll/src/init/generic.lisp | lisp | Define generic functions used across chapters
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>. | 2016 - 06 - 12 15:37:54EDT generic.lisp
Time - stamp : < 2016 - 06 - 12 16:38:57EDT >
Copyright 2016
Distributed under the terms of the GNU General Public License
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(in-package :gsl)
(export '(parameter))
(defgeneric parameter (object parameter)
(:documentation "Get the value of the GSL parameter from the GSL object."))
(defgeneric (setf parameter) (value object parameter)
(:documentation "Set the value of the GSL parameter from the GSL object."))
|
6f4a192158789377b07dbddba0fc457086d74bb4364ddaf7af439db89efcd831 | re-xyr/cleff | Mask.hs | # LANGUAGE Trustworthy #
-- |
Copyright : ( c ) 2021
-- License: BSD3
-- Maintainer:
-- Stability: experimental
Portability : non - portable ( GHC only )
module Cleff.Mask
( -- * Effect
Mask (..)
-- * High-level operations
, bracket
, bracketOnError
, bracket_
, bracketOnError_
, onError
, finally
-- * Primitive operations
, mask
, uninterruptibleMask
, onException
, mask_
, uninterruptibleMask_
-- * Interpretations
, runMask
) where
import Cleff
import Cleff.Internal.Base
import qualified Control.Exception as Exc
-- * Effect
| An effect capable of ' Exc.mask'ing and performing cleanup operations when an computation is interrupted . In
-- particular, this effects allows the use of 'bracket'.
--
-- === Technical details
--
-- Regarding the nuances of 'bracket' semantics, this effect uses the semantics of "UnliftIO.Exception" rather than
-- "Control.Exception". They are more sensible defaults and users can implement other semantics out of the primitive
-- operations if they want to.
data Mask :: Effect where
Mask :: ((m ~> m) -> m a) -> Mask m a
UninterruptibleMask :: ((m ~> m) -> m a) -> Mask m a
OnException :: m a -> m b -> Mask m a
-- * Operations
makeEffect_ ''Mask
-- | Prevents a computation from receiving asynchronous exceptions, /i.e./ being interrupted by another thread. Also
-- provides a function to restore receiving async exceptions for a computation.
--
However , some potentially blocking actions like @takeMVar@ can still be interrupted , and for them also not to be
interrupted in any case you 'll need ' uninterruptibleMask ' . See ' Control.Exception.mask ' for details .
mask :: Mask :> es => ((Eff es ~> Eff es) -> Eff es a) -> Eff es a
-- | Prevents a computation from receiving asynchronous exceptions, even if there is an interruptible operation
-- (operations that potentially deadlocks or otherwise blocks indefinitely). Therefore this function is potentially
-- dangerous in the sense that it can make a thread both unresponsive and unkillable. See
-- 'Control.Exception.uninterruptibleMask' for details.
uninterruptibleMask :: Mask :> es => ((Eff es ~> Eff es) -> Eff es a) -> Eff es a
-- | Like 'onError', but without 'uninterruptibleMask'ing the cleanup action, making it possible that a cleanup action
-- is interrupted. Use 'onError' is usually the safer option.
onException :: Mask :> es
=> Eff es a -- ^ The main computation that may throw an exception
-> Eff es b -- ^ The computation that runs when an exception is thrown
-> Eff es a
| Run a computation that acquires a resource ( @alloc@ ) , then a main computation using that resource , then a cleanup
computation ( @dealloc@ ) . ' bracket ' guarantees that @alloc@ and @dealloc@ will always run , regardless of whether an
-- exception is thrown in the main computation. Note that if an exception is thrown in the main computation, it will
-- be rethrown after 'bracket' finishes.
--
-- === Technical details
--
Note that this function uses @unliftio@ semantics : resource acquiring action is interruptibly ' mask'ed while
resource cleanup is ' uninterruptibleMask'ed . Most of the times , this will be what you want . Other functions in this
-- module use @unliftio@ semantics too.
bracket :: Mask :> es
^ The computation to run first , usually acquires a resource
-> (a -> Eff es c) -- ^ The computation to run after the main computation, usually cleans up
-> (a -> Eff es b) -- ^ The main computation that uses the resource
-> Eff es b
bracket alloc dealloc action = mask \restore -> do
res <- alloc
restore (action res) `finally` dealloc res
-- | Like 'bracket', but only runs cleanup if an exception is thrown in the main computation.
bracketOnError :: Mask :> es
^ The computation to run first , usually acquires a resource
-> (a -> Eff es c) -- ^ The computation to run when the main computation throws an exception, usually cleans up
-> (a -> Eff es b) -- ^ The main computation that uses the resource
-> Eff es b
bracketOnError alloc dealloc action = mask \restore -> do
res <- alloc
restore (action res) `onError` dealloc res
-- | Variant of 'mask' that does not provide a restoring function.
mask_ :: Mask :> es => Eff es a -> Eff es a
mask_ m = mask \_ -> m
-- | Variant of 'uninterruptibleMask' that does not provide a restoring function.
uninterruptibleMask_ :: Mask :> es => Eff es a -> Eff es a
uninterruptibleMask_ m = uninterruptibleMask \_ -> m
-- | Variant of 'bracket' that does not pass the allocated resource to the cleanup action.
bracket_ :: Mask :> es => Eff es a -> Eff es c -> (a -> Eff es b) -> Eff es b
bracket_ ma = bracket ma . const
-- | Variant of 'bracketOnError' that does not pass the allocated resource to the cleanup action.
bracketOnError_ :: Mask :> es => Eff es a -> Eff es c -> (a -> Eff es b) -> Eff es b
bracketOnError_ ma = bracketOnError ma . const
-- | Attach an action that runs if the main computation throws an exception. Note that this will rethrow the exception
-- instead of returning to normal control flow.
--
-- The cleanup action is guaranteed not to be interrupted halfways.
onError :: Mask :> es
=> Eff es a -- ^ The main computation that may throw an exception
-> Eff es b -- ^ The computation that runs when an exception is thrown
-> Eff es a
onError m n = m `onException` uninterruptibleMask_ n
-- | Attach a cleanup action that will always run after a potentially throwing computation.
finally :: Mask :> es
=> Eff es a -- ^ The main computation that may throw an exception
-> Eff es b -- ^ The computation that runs after the main computation, regardless of whether an exception is thrown
-> Eff es a
finally m mz = (m `onError` mz) <* uninterruptibleMask_ mz
-- * Interpretations
-- | Interpret the 'Mask' effect in terms of primitive 'IO' actions.
runMask :: Eff (Mask : es) ~> Eff es
runMask = thisIsPureTrustMe . reinterpret \case
Mask f -> withToIO \toIO -> Exc.mask \restore -> toIO $ f (fromIO . restore . toIO)
UninterruptibleMask f -> withToIO \toIO -> Exc.uninterruptibleMask \restore -> toIO $ f (fromIO . restore . toIO)
OnException m n -> withToIO \toIO -> toIO m `Exc.catch` \(e :: Exc.SomeException) ->
Exc.try @Exc.SomeException (toIO n) *> Exc.throwIO e
| null | https://raw.githubusercontent.com/re-xyr/cleff/28c74f3c6dd473e6b773ba26b785980ee5607234/src/Cleff/Mask.hs | haskell | |
License: BSD3
Maintainer:
Stability: experimental
* Effect
* High-level operations
* Primitive operations
* Interpretations
* Effect
particular, this effects allows the use of 'bracket'.
=== Technical details
Regarding the nuances of 'bracket' semantics, this effect uses the semantics of "UnliftIO.Exception" rather than
"Control.Exception". They are more sensible defaults and users can implement other semantics out of the primitive
operations if they want to.
* Operations
| Prevents a computation from receiving asynchronous exceptions, /i.e./ being interrupted by another thread. Also
provides a function to restore receiving async exceptions for a computation.
| Prevents a computation from receiving asynchronous exceptions, even if there is an interruptible operation
(operations that potentially deadlocks or otherwise blocks indefinitely). Therefore this function is potentially
dangerous in the sense that it can make a thread both unresponsive and unkillable. See
'Control.Exception.uninterruptibleMask' for details.
| Like 'onError', but without 'uninterruptibleMask'ing the cleanup action, making it possible that a cleanup action
is interrupted. Use 'onError' is usually the safer option.
^ The main computation that may throw an exception
^ The computation that runs when an exception is thrown
exception is thrown in the main computation. Note that if an exception is thrown in the main computation, it will
be rethrown after 'bracket' finishes.
=== Technical details
module use @unliftio@ semantics too.
^ The computation to run after the main computation, usually cleans up
^ The main computation that uses the resource
| Like 'bracket', but only runs cleanup if an exception is thrown in the main computation.
^ The computation to run when the main computation throws an exception, usually cleans up
^ The main computation that uses the resource
| Variant of 'mask' that does not provide a restoring function.
| Variant of 'uninterruptibleMask' that does not provide a restoring function.
| Variant of 'bracket' that does not pass the allocated resource to the cleanup action.
| Variant of 'bracketOnError' that does not pass the allocated resource to the cleanup action.
| Attach an action that runs if the main computation throws an exception. Note that this will rethrow the exception
instead of returning to normal control flow.
The cleanup action is guaranteed not to be interrupted halfways.
^ The main computation that may throw an exception
^ The computation that runs when an exception is thrown
| Attach a cleanup action that will always run after a potentially throwing computation.
^ The main computation that may throw an exception
^ The computation that runs after the main computation, regardless of whether an exception is thrown
* Interpretations
| Interpret the 'Mask' effect in terms of primitive 'IO' actions. | # LANGUAGE Trustworthy #
Copyright : ( c ) 2021
Portability : non - portable ( GHC only )
module Cleff.Mask
Mask (..)
, bracket
, bracketOnError
, bracket_
, bracketOnError_
, onError
, finally
, mask
, uninterruptibleMask
, onException
, mask_
, uninterruptibleMask_
, runMask
) where
import Cleff
import Cleff.Internal.Base
import qualified Control.Exception as Exc
| An effect capable of ' Exc.mask'ing and performing cleanup operations when an computation is interrupted . In
data Mask :: Effect where
Mask :: ((m ~> m) -> m a) -> Mask m a
UninterruptibleMask :: ((m ~> m) -> m a) -> Mask m a
OnException :: m a -> m b -> Mask m a
makeEffect_ ''Mask
However , some potentially blocking actions like @takeMVar@ can still be interrupted , and for them also not to be
interrupted in any case you 'll need ' uninterruptibleMask ' . See ' Control.Exception.mask ' for details .
mask :: Mask :> es => ((Eff es ~> Eff es) -> Eff es a) -> Eff es a
uninterruptibleMask :: Mask :> es => ((Eff es ~> Eff es) -> Eff es a) -> Eff es a
onException :: Mask :> es
-> Eff es a
| Run a computation that acquires a resource ( @alloc@ ) , then a main computation using that resource , then a cleanup
computation ( @dealloc@ ) . ' bracket ' guarantees that @alloc@ and @dealloc@ will always run , regardless of whether an
Note that this function uses @unliftio@ semantics : resource acquiring action is interruptibly ' mask'ed while
resource cleanup is ' uninterruptibleMask'ed . Most of the times , this will be what you want . Other functions in this
bracket :: Mask :> es
^ The computation to run first , usually acquires a resource
-> Eff es b
bracket alloc dealloc action = mask \restore -> do
res <- alloc
restore (action res) `finally` dealloc res
bracketOnError :: Mask :> es
^ The computation to run first , usually acquires a resource
-> Eff es b
bracketOnError alloc dealloc action = mask \restore -> do
res <- alloc
restore (action res) `onError` dealloc res
mask_ :: Mask :> es => Eff es a -> Eff es a
mask_ m = mask \_ -> m
uninterruptibleMask_ :: Mask :> es => Eff es a -> Eff es a
uninterruptibleMask_ m = uninterruptibleMask \_ -> m
bracket_ :: Mask :> es => Eff es a -> Eff es c -> (a -> Eff es b) -> Eff es b
bracket_ ma = bracket ma . const
bracketOnError_ :: Mask :> es => Eff es a -> Eff es c -> (a -> Eff es b) -> Eff es b
bracketOnError_ ma = bracketOnError ma . const
onError :: Mask :> es
-> Eff es a
onError m n = m `onException` uninterruptibleMask_ n
finally :: Mask :> es
-> Eff es a
finally m mz = (m `onError` mz) <* uninterruptibleMask_ mz
runMask :: Eff (Mask : es) ~> Eff es
runMask = thisIsPureTrustMe . reinterpret \case
Mask f -> withToIO \toIO -> Exc.mask \restore -> toIO $ f (fromIO . restore . toIO)
UninterruptibleMask f -> withToIO \toIO -> Exc.uninterruptibleMask \restore -> toIO $ f (fromIO . restore . toIO)
OnException m n -> withToIO \toIO -> toIO m `Exc.catch` \(e :: Exc.SomeException) ->
Exc.try @Exc.SomeException (toIO n) *> Exc.throwIO e
|
6d76f1e1f30b8f00ed80aa0ec0cc6a199660472711107c484f97edf921a3befc | cram2/cram | generate-tests.lisp | ;; Make tests from examples.
2008 - 09 - 07 21:00:48EDT generate-tests.lisp
Time - stamp : < 2009 - 12 - 27 10:12:12EST generate-tests.lisp >
;;
Copyright 2008 , 2009
Distributed under the terms of the GNU General Public License
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with this program. If not, see </>.
the GSLL interface definition files are # ' save - test
;;; forms. These serve to define both examples and tests. Getting an
;;; example involves calling (examples) to get a list of names, then
;;; the calling the function with a particular name to get the
;;; examples, e.g.
;;; (examples)
;;; (examples 'matrix-add)
;;; To do all the tests,
;;; (lisp-unit:run-tests)
;;; The files that define the tests are in tests/. These files are
;;; generated automatically and checked into the repository; they
;;; shouldn't be changed very often. Rarely, it may be necessary to
;;; generate such a file. In this case, #'write-test-to-file recreates
;;; the file, e.g.
;;; (write-test-to-file 'matrix-add "test/")
(in-package :gsl)
(defun numerical-serialize (form)
(if (typep form 'list)
(cons 'list (mapcar #'numerical-serialize form))
(if (typep form 'mobject)
(make-load-form form)
form)))
;;; (make-test '(legendre-conicalP-half 3.5d0 10.0d0))
(defun make-test (form &optional answer)
"Make a test for lisp-unit. If the answer is known separately from
evaluation of the form, it may be supplied in 'answer; note that
this only accommodates a single value at present."
(let ((vals (multiple-value-list (or answer (ignore-errors (eval form))))))
(if (typep (second vals) 'condition)
`(lisp-unit::assert-error
',(type-of (second vals))
,form)
`(lisp-unit::assert-numerical-equal
,(numerical-serialize vals)
(multiple-value-list ,form)))))
(defun create-test (test-name &optional answers)
"Find the saved test by name and create it, with the generated results."
(append
`(lisp-unit:define-test ,test-name)
(let ((test-set (getf *all-generated-tests* test-name)))
(mapcar #'make-test test-set
(or answers (make-list (length test-set) :initial-element nil))))))
(defun write-test-to-file (test path &optional answers)
"Write the test to a file with the same name under path.
Use this function with caution; it will replace an existing
test file and thus the opportunity for regression test will be lost."
(let ((pathname (merge-pathnames (format nil "~(~a~).lisp" test) path))
(*read-default-float-format* 'single-float))
(with-open-file
(stream pathname :direction :output :if-exists :rename)
(format
stream
";; Regression test ~a for GSLL, automatically generated~%~%" test)
(format stream "(in-package :gsl)~%~%")
(format t "Writing test ~a to file ~a~&" test pathname)
(format stream "~s~%~%" (create-test test answers)))))
;;; This is commented out because it shouldn't normally be run. It
;;; will regenerate all tests, so there will be no regression tests to
;;; previous versions. DON'T FORGET THE TRAILING SLASH ON THE PATH
( write - tests " /home / / mathematics / gsl / tests/ " )
#+(or)
(defun write-tests (path)
"Write all the tests to the appropriate file."
(iter:iter
(iter:for (key val) on *all-generated-tests* by #'cddr)
(write-test-to-file key path)))
| null | https://raw.githubusercontent.com/cram2/cram/dcb73031ee944d04215bbff9e98b9e8c210ef6c5/cram_3rdparty/gsll/src/test-unit/generate-tests.lisp | lisp | Make tests from examples.
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
forms. These serve to define both examples and tests. Getting an
example involves calling (examples) to get a list of names, then
the calling the function with a particular name to get the
examples, e.g.
(examples)
(examples 'matrix-add)
To do all the tests,
(lisp-unit:run-tests)
The files that define the tests are in tests/. These files are
generated automatically and checked into the repository; they
shouldn't be changed very often. Rarely, it may be necessary to
generate such a file. In this case, #'write-test-to-file recreates
the file, e.g.
(write-test-to-file 'matrix-add "test/")
(make-test '(legendre-conicalP-half 3.5d0 10.0d0))
note that
it will replace an existing
This is commented out because it shouldn't normally be run. It
will regenerate all tests, so there will be no regression tests to
previous versions. DON'T FORGET THE TRAILING SLASH ON THE PATH | 2008 - 09 - 07 21:00:48EDT generate-tests.lisp
Time - stamp : < 2009 - 12 - 27 10:12:12EST generate-tests.lisp >
Copyright 2008 , 2009
Distributed under the terms of the GNU General Public License
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
the GSLL interface definition files are # ' save - test
(in-package :gsl)
(defun numerical-serialize (form)
(if (typep form 'list)
(cons 'list (mapcar #'numerical-serialize form))
(if (typep form 'mobject)
(make-load-form form)
form)))
(defun make-test (form &optional answer)
"Make a test for lisp-unit. If the answer is known separately from
this only accommodates a single value at present."
(let ((vals (multiple-value-list (or answer (ignore-errors (eval form))))))
(if (typep (second vals) 'condition)
`(lisp-unit::assert-error
',(type-of (second vals))
,form)
`(lisp-unit::assert-numerical-equal
,(numerical-serialize vals)
(multiple-value-list ,form)))))
(defun create-test (test-name &optional answers)
"Find the saved test by name and create it, with the generated results."
(append
`(lisp-unit:define-test ,test-name)
(let ((test-set (getf *all-generated-tests* test-name)))
(mapcar #'make-test test-set
(or answers (make-list (length test-set) :initial-element nil))))))
(defun write-test-to-file (test path &optional answers)
"Write the test to a file with the same name under path.
test file and thus the opportunity for regression test will be lost."
(let ((pathname (merge-pathnames (format nil "~(~a~).lisp" test) path))
(*read-default-float-format* 'single-float))
(with-open-file
(stream pathname :direction :output :if-exists :rename)
(format
stream
";; Regression test ~a for GSLL, automatically generated~%~%" test)
(format stream "(in-package :gsl)~%~%")
(format t "Writing test ~a to file ~a~&" test pathname)
(format stream "~s~%~%" (create-test test answers)))))
( write - tests " /home / / mathematics / gsl / tests/ " )
#+(or)
(defun write-tests (path)
"Write all the tests to the appropriate file."
(iter:iter
(iter:for (key val) on *all-generated-tests* by #'cddr)
(write-test-to-file key path)))
|
aee5986af4fe2b188bbd0cb7e5ffc219ec15699ab809dcf7c18c3c1f425a2c48 | expipiplus1/vulkan | VK_KHR_external_semaphore_fd.hs | {-# language CPP #-}
-- | = Name
--
VK_KHR_external_semaphore_fd - device extension
--
= = VK_KHR_external_semaphore_fd
--
-- [__Name String__]
-- @VK_KHR_external_semaphore_fd@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
80
--
-- [__Revision__]
1
--
-- [__Extension and Version Dependencies__]
--
- Requires support for Vulkan 1.0
--
-- - Requires @VK_KHR_external_semaphore@ to be enabled for any
-- device-level functionality
--
-- [__Contact__]
--
-
< -Docs/issues/new?body=[VK_KHR_external_semaphore_fd ] @cubanismo%0A*Here describe the issue or question you have about the VK_KHR_external_semaphore_fd extension * >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
2016 - 10 - 21
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
- , Google
--
- , NVIDIA
--
- , NVIDIA
--
- , NVIDIA
--
-- == Description
--
-- An application using external memory may wish to synchronize access to
-- that memory using semaphores. This extension enables an application to
-- export semaphore payload to and import semaphore payload from POSIX file
-- descriptors.
--
-- == New Commands
--
-- - 'getSemaphoreFdKHR'
--
- ' importSemaphoreFdKHR '
--
-- == New Structures
--
-- - 'ImportSemaphoreFdInfoKHR'
--
-- - 'SemaphoreGetFdInfoKHR'
--
-- == New Enum Constants
--
-- - 'KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME'
--
-- - 'KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION'
--
- Extending ' Vulkan . Core10.Enums . StructureType . StructureType ' :
--
- ' Vulkan . Core10.Enums . StructureType . '
--
- ' Vulkan . Core10.Enums . StructureType . '
--
-- == Issues
--
-- 1) Does the application need to close the file descriptor returned by
-- 'getSemaphoreFdKHR'?
--
-- __RESOLVED__: Yes, unless it is passed back in to a driver instance to
-- import the semaphore. A successful get call transfers ownership of the
-- file descriptor to the application, and a successful import transfers it
-- back to the driver. Destroying the original semaphore object will not
-- close the file descriptor or remove its reference to the underlying
-- semaphore resource associated with it.
--
-- == Version History
--
- Revision 1 , 2016 - 10 - 21 ( )
--
-- - Initial revision
--
-- == See Also
--
-- 'ImportSemaphoreFdInfoKHR', 'SemaphoreGetFdInfoKHR',
-- 'getSemaphoreFdKHR', 'importSemaphoreFdKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_external_semaphore_fd ( getSemaphoreFdKHR
, importSemaphoreFdKHR
, ImportSemaphoreFdInfoKHR(..)
, SemaphoreGetFdInfoKHR(..)
, KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION
, pattern KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION
, KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME
, pattern KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Data.Coerce (coerce)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Foreign.C.Types (CInt(..))
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.C.Types (CInt)
import Foreign.C.Types (CInt(..))
import Foreign.C.Types (CInt(CInt))
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Data.Int (Int32)
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Vulkan.NamedType ((:::))
import Vulkan.Core10.Handles (Device)
import Vulkan.Core10.Handles (Device(..))
import Vulkan.Core10.Handles (Device(Device))
import Vulkan.Dynamic (DeviceCmds(pVkGetSemaphoreFdKHR))
import Vulkan.Dynamic (DeviceCmds(pVkImportSemaphoreFdKHR))
import Vulkan.Core10.Handles (Device_T)
import Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits (ExternalSemaphoreHandleTypeFlagBits)
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Core10.Handles (Semaphore)
import Vulkan.Core11.Enums.SemaphoreImportFlagBits (SemaphoreImportFlags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetSemaphoreFdKHR
:: FunPtr (Ptr Device_T -> Ptr SemaphoreGetFdInfoKHR -> Ptr CInt -> IO Result) -> Ptr Device_T -> Ptr SemaphoreGetFdInfoKHR -> Ptr CInt -> IO Result
-- | vkGetSemaphoreFdKHR - Get a POSIX file descriptor handle for a semaphore
--
-- = Description
--
-- Each call to 'getSemaphoreFdKHR' /must/ create a new file descriptor and
-- transfer ownership of it to the application. To avoid leaking resources,
-- the application /must/ release ownership of the file descriptor when it
-- is no longer needed.
--
-- Note
--
-- Ownership can be released in many ways. For example, the application can
-- call @close@() on the file descriptor, or transfer ownership back to
Vulkan by using the file descriptor to import a semaphore payload .
--
-- Where supported by the operating system, the implementation /must/ set
-- the file descriptor to be closed automatically when an @execve@ system
-- call is made.
--
-- Exporting a file descriptor from a semaphore /may/ have side effects
-- depending on the transference of the specified handle type, as described
-- in
-- <-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore State>.
--
-- == Return Codes
--
-- [<-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
- ' Vulkan . Core10.Enums . Result . SUCCESS '
--
-- [<-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
- ' Vulkan . Core10.Enums . Result . ERROR_TOO_MANY_OBJECTS '
--
- ' Vulkan . Core10.Enums . Result . ERROR_OUT_OF_HOST_MEMORY '
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd VK_KHR_external_semaphore_fd>,
' Vulkan . Core10.Handles . Device ' , ' SemaphoreGetFdInfoKHR '
getSemaphoreFdKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that created the semaphore being
-- exported.
--
-- #VUID-vkGetSemaphoreFdKHR-device-parameter# @device@ /must/ be a valid
' Vulkan . Core10.Handles . Device ' handle
Device
-> -- | @pGetFdInfo@ is a pointer to a 'SemaphoreGetFdInfoKHR' structure
-- containing parameters of the export operation.
--
-- #VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter# @pGetFdInfo@ /must/ be a
-- valid pointer to a valid 'SemaphoreGetFdInfoKHR' structure
SemaphoreGetFdInfoKHR
-> io (("fd" ::: Int32))
getSemaphoreFdKHR device getFdInfo = liftIO . evalContT $ do
let vkGetSemaphoreFdKHRPtr = pVkGetSemaphoreFdKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkGetSemaphoreFdKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetSemaphoreFdKHR is null" Nothing Nothing
let vkGetSemaphoreFdKHR' = mkVkGetSemaphoreFdKHR vkGetSemaphoreFdKHRPtr
pGetFdInfo <- ContT $ withCStruct (getFdInfo)
pPFd <- ContT $ bracket (callocBytes @CInt 4) free
r <- lift $ traceAroundEvent "vkGetSemaphoreFdKHR" (vkGetSemaphoreFdKHR'
(deviceHandle (device))
pGetFdInfo
(pPFd))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pFd <- lift $ peek @CInt pPFd
pure $ ((coerce @CInt @Int32 pFd))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkImportSemaphoreFdKHR
:: FunPtr (Ptr Device_T -> Ptr ImportSemaphoreFdInfoKHR -> IO Result) -> Ptr Device_T -> Ptr ImportSemaphoreFdInfoKHR -> IO Result
-- | vkImportSemaphoreFdKHR - Import a semaphore from a POSIX file descriptor
--
-- = Description
--
-- Importing a semaphore payload from a file descriptor transfers ownership
of the file descriptor from the application to the Vulkan
-- implementation. The application /must/ not perform any operations on the
-- file descriptor after a successful import.
--
-- Applications /can/ import the same semaphore payload into multiple
instances of Vulkan , into the same instance from which it was exported ,
and multiple times into a given Vulkan instance .
--
-- == Return Codes
--
-- [<-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
- ' Vulkan . Core10.Enums . Result . SUCCESS '
--
-- [<-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
- ' Vulkan . Core10.Enums . Result . ERROR_OUT_OF_HOST_MEMORY '
--
- ' Vulkan . Core10.Enums . Result . ERROR_INVALID_EXTERNAL_HANDLE '
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd VK_KHR_external_semaphore_fd>,
' Vulkan . Core10.Handles . Device ' , ' ImportSemaphoreFdInfoKHR '
importSemaphoreFdKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the logical device that created the semaphore.
--
-- #VUID-vkImportSemaphoreFdKHR-device-parameter# @device@ /must/ be a
valid ' Vulkan . Core10.Handles . Device ' handle
Device
-> -- | @pImportSemaphoreFdInfo@ is a pointer to a 'ImportSemaphoreFdInfoKHR'
-- structure specifying the semaphore and import parameters.
--
-- #VUID-vkImportSemaphoreFdKHR-pImportSemaphoreFdInfo-parameter#
-- @pImportSemaphoreFdInfo@ /must/ be a valid pointer to a valid
-- 'ImportSemaphoreFdInfoKHR' structure
ImportSemaphoreFdInfoKHR
-> io ()
importSemaphoreFdKHR device importSemaphoreFdInfo = liftIO . evalContT $ do
let vkImportSemaphoreFdKHRPtr = pVkImportSemaphoreFdKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkImportSemaphoreFdKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkImportSemaphoreFdKHR is null" Nothing Nothing
let vkImportSemaphoreFdKHR' = mkVkImportSemaphoreFdKHR vkImportSemaphoreFdKHRPtr
pImportSemaphoreFdInfo <- ContT $ withCStruct (importSemaphoreFdInfo)
r <- lift $ traceAroundEvent "vkImportSemaphoreFdKHR" (vkImportSemaphoreFdKHR'
(deviceHandle (device))
pImportSemaphoreFdInfo)
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
-- | VkImportSemaphoreFdInfoKHR - Structure specifying POSIX file descriptor
-- to import to a semaphore
--
-- = Description
--
The handle types supported by @handleType@ are :
--
-- +--------------------------------------------------------------------------------------------------------+------------------+---------------------+
-- | Handle Type | Transference | Permanence |
-- | | | Supported |
-- +========================================================================================================+==================+=====================+
| ' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT ' | Reference | Temporary , Permanent |
-- +--------------------------------------------------------------------------------------------------------+------------------+---------------------+
| ' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . | Copy | Temporary |
-- +--------------------------------------------------------------------------------------------------------+------------------+---------------------+
--
-- Handle Types Supported by 'ImportSemaphoreFdInfoKHR'
--
-- == Valid Usage
--
- # VUID - VkImportSemaphoreFdInfoKHR - handleType-01143 # @handleType@
-- /must/ be a value included in the
-- <-extensions/html/vkspec.html#synchronization-semaphore-handletypes-fd Handle Types Supported by >
-- table
--
-- - #VUID-VkImportSemaphoreFdInfoKHR-fd-01544# @fd@ /must/ obey any
requirements listed for @handleType@ in
-- <-extensions/html/vkspec.html#external-semaphore-handle-types-compatibility external semaphore handle types compatibility>
--
- # VUID - VkImportSemaphoreFdInfoKHR - handleType-03263 # If @handleType@
-- is
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT ' ,
the ' Vulkan . Core10.QueueSemaphore .
field /must/ match that of the semaphore from which @fd@ was
-- exported
--
- # VUID - VkImportSemaphoreFdInfoKHR - handleType-07307 # If @handleType@
-- refers to a handle type with copy payload transference semantics,
-- @flags@ /must/ contain
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SEMAPHORE_IMPORT_TEMPORARY_BIT '
--
- # VUID - VkImportSemaphoreFdInfoKHR - handleType-03264 # If @handleType@
-- is
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT ' ,
-- the
' Vulkan . Core12.Promoted_From_VK_KHR_timeline_semaphore . SemaphoreTypeCreateInfo'::@semaphoreType@
field /must/ match that of the semaphore from which @fd@ was
-- exported
--
-- - #VUID-VkImportSemaphoreFdInfoKHR-flags-03323# If @flags@ contains
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SEMAPHORE_IMPORT_TEMPORARY_BIT ' ,
-- the
' Vulkan . Core12.Promoted_From_VK_KHR_timeline_semaphore . SemaphoreTypeCreateInfo'::@semaphoreType@
-- field of the semaphore from which @fd@ was exported /must/ not be
' Vulkan . Core12.Enums . . SEMAPHORE_TYPE_TIMELINE '
--
If @handleType@ is
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ,
-- the special value @-1@ for @fd@ is treated like a valid sync file
-- descriptor referring to an object that has already signaled. The import
operation will succeed and the ' Vulkan . Core10.Handles . Semaphore ' will
-- have a temporarily imported payload as if a valid file descriptor had
-- been provided.
--
-- Note
--
-- This special behavior for importing an invalid sync file descriptor
-- allows easier interoperability with other system APIs which use the
-- convention that an invalid sync file descriptor represents work that has
-- already completed and does not need to be waited for. It is consistent
-- with the option for implementations to return a @-1@ file descriptor
-- when exporting a
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . '
from a ' Vulkan . Core10.Handles . Semaphore ' which is signaled .
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkImportSemaphoreFdInfoKHR-sType-sType# @sType@ /must/ be
' Vulkan . Core10.Enums . StructureType . '
--
- # VUID - VkImportSemaphoreFdInfoKHR - pNext - pNext # @pNext@ /must/ be
-- @NULL@
--
-- - #VUID-VkImportSemaphoreFdInfoKHR-semaphore-parameter# @semaphore@
/must/ be a valid ' Vulkan . Core10.Handles . Semaphore ' handle
--
-- - #VUID-VkImportSemaphoreFdInfoKHR-flags-parameter# @flags@ /must/ be
-- a valid combination of
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SemaphoreImportFlagBits '
-- values
--
- # VUID - VkImportSemaphoreFdInfoKHR - handleType - parameter # @handleType@
-- /must/ be a valid
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
-- value
--
-- == Host Synchronization
--
-- - Host access to @semaphore@ /must/ be externally synchronized
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd VK_KHR_external_semaphore_fd>,
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits ' ,
' Vulkan . Core10.Handles . Semaphore ' ,
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SemaphoreImportFlags ' ,
' Vulkan . Core10.Enums . StructureType . StructureType ' ,
-- 'importSemaphoreFdKHR'
data ImportSemaphoreFdInfoKHR = ImportSemaphoreFdInfoKHR
{ -- | @semaphore@ is the semaphore into which the payload will be imported.
semaphore :: Semaphore
, -- | @flags@ is a bitmask of
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SemaphoreImportFlagBits '
-- specifying additional parameters for the semaphore payload import
-- operation.
flags :: SemaphoreImportFlags
, -- | @handleType@ is a
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
-- value specifying the type of @fd@.
handleType :: ExternalSemaphoreHandleTypeFlagBits
, -- | @fd@ is the external handle to import.
fd :: Int32
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (ImportSemaphoreFdInfoKHR)
#endif
deriving instance Show ImportSemaphoreFdInfoKHR
instance ToCStruct ImportSemaphoreFdInfoKHR where
withCStruct x f = allocaBytes 40 $ \p -> pokeCStruct p x (f p)
pokeCStruct p ImportSemaphoreFdInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (semaphore)
poke ((p `plusPtr` 24 :: Ptr SemaphoreImportFlags)) (flags)
poke ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (handleType)
poke ((p `plusPtr` 32 :: Ptr CInt)) (CInt (fd))
f
cStructSize = 40
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (zero)
poke ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (zero)
poke ((p `plusPtr` 32 :: Ptr CInt)) (CInt (zero))
f
instance FromCStruct ImportSemaphoreFdInfoKHR where
peekCStruct p = do
semaphore <- peek @Semaphore ((p `plusPtr` 16 :: Ptr Semaphore))
flags <- peek @SemaphoreImportFlags ((p `plusPtr` 24 :: Ptr SemaphoreImportFlags))
handleType <- peek @ExternalSemaphoreHandleTypeFlagBits ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits))
fd <- peek @CInt ((p `plusPtr` 32 :: Ptr CInt))
pure $ ImportSemaphoreFdInfoKHR
semaphore flags handleType (coerce @CInt @Int32 fd)
instance Storable ImportSemaphoreFdInfoKHR where
sizeOf ~_ = 40
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero ImportSemaphoreFdInfoKHR where
zero = ImportSemaphoreFdInfoKHR
zero
zero
zero
zero
-- | VkSemaphoreGetFdInfoKHR - Structure describing a POSIX FD semaphore
-- export operation
--
-- = Description
--
-- The properties of the file descriptor returned depend on the value of
@handleType@. See
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
-- for a description of the properties of the defined external semaphore
-- handle types.
--
-- == Valid Usage
--
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-01132 # @handleType@ /must/
-- have been included in
' Vulkan . Core11.Promoted_From_VK_KHR_external_semaphore . ExportSemaphoreCreateInfo'::@handleTypes@
-- when @semaphore@’s current payload was created
--
-- - #VUID-VkSemaphoreGetFdInfoKHR-semaphore-01133# @semaphore@ /must/
-- not currently have its payload replaced by an imported payload as
-- described below in
-- <-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore Payloads>
-- unless that imported payload’s handle type was included in
' Vulkan . Core11.Promoted_From_VK_KHR_external_semaphore_capabilities . ExternalSemaphoreProperties'::@exportFromImportedHandleTypes@
for
--
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-01134 # If @handleType@
-- refers to a handle type with copy payload transference semantics, as
-- defined below in
-- <-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore Payloads>,
-- there /must/ be no queue waiting on @semaphore@
--
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-01135 # If @handleType@
-- refers to a handle type with copy payload transference semantics,
-- @semaphore@ /must/ be signaled, or have an associated
-- <-extensions/html/vkspec.html#synchronization-semaphores-signaling semaphore signal operation>
-- pending execution
--
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-01136 # @handleType@ /must/
-- be defined as a POSIX file descriptor handle
--
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-03253 # If @handleType@
-- refers to a handle type with copy payload transference semantics,
-- @semaphore@ /must/ have been created with a
' Vulkan . Core12.Enums . SemaphoreType . ' of
' Vulkan . Core12.Enums . SemaphoreType . SEMAPHORE_TYPE_BINARY '
--
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-03254 # If @handleType@
-- refers to a handle type with copy payload transference semantics,
-- @semaphore@ /must/ have an associated semaphore signal operation
-- that has been submitted for execution and any semaphore signal
-- operations on which it depends (if any) /must/ have also been
-- submitted for execution
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkSemaphoreGetFdInfoKHR-sType-sType# @sType@ /must/ be
' Vulkan . Core10.Enums . StructureType . '
--
- # VUID - VkSemaphoreGetFdInfoKHR - pNext - pNext # @pNext@ /must/ be @NULL@
--
-- - #VUID-VkSemaphoreGetFdInfoKHR-semaphore-parameter# @semaphore@
/must/ be a valid ' Vulkan . Core10.Handles . Semaphore ' handle
--
- # VUID - VkSemaphoreGetFdInfoKHR - handleType - parameter # @handleType@
-- /must/ be a valid
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
-- value
--
-- = See Also
--
-- <-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd VK_KHR_external_semaphore_fd>,
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits ' ,
' Vulkan . Core10.Handles . Semaphore ' ,
' Vulkan . Core10.Enums . StructureType . StructureType ' , ' getSemaphoreFdKHR '
data SemaphoreGetFdInfoKHR = SemaphoreGetFdInfoKHR
{ -- | @semaphore@ is the semaphore from which state will be exported.
semaphore :: Semaphore
, -- | @handleType@ is a
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
-- value specifying the type of handle requested.
handleType :: ExternalSemaphoreHandleTypeFlagBits
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (SemaphoreGetFdInfoKHR)
#endif
deriving instance Show SemaphoreGetFdInfoKHR
instance ToCStruct SemaphoreGetFdInfoKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p SemaphoreGetFdInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (semaphore)
poke ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (handleType)
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (zero)
poke ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (zero)
f
instance FromCStruct SemaphoreGetFdInfoKHR where
peekCStruct p = do
semaphore <- peek @Semaphore ((p `plusPtr` 16 :: Ptr Semaphore))
handleType <- peek @ExternalSemaphoreHandleTypeFlagBits ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits))
pure $ SemaphoreGetFdInfoKHR
semaphore handleType
instance Storable SemaphoreGetFdInfoKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero SemaphoreGetFdInfoKHR where
zero = SemaphoreGetFdInfoKHR
zero
zero
type KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION = 1
No documentation found for TopLevel " VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION "
pattern KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION = 1
type KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME = "VK_KHR_external_semaphore_fd"
No documentation found for TopLevel " VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "
pattern KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME = "VK_KHR_external_semaphore_fd"
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/ebc0dde0bcd9cf251f18538de6524eb4f2ab3e9d/src/Vulkan/Extensions/VK_KHR_external_semaphore_fd.hs | haskell | # language CPP #
| = Name
[__Name String__]
@VK_KHR_external_semaphore_fd@
[__Extension Type__]
Device extension
[__Registered Extension Number__]
[__Revision__]
[__Extension and Version Dependencies__]
- Requires @VK_KHR_external_semaphore@ to be enabled for any
device-level functionality
[__Contact__]
== Other Extension Metadata
[__Last Modified Date__]
[__IP Status__]
No known IP claims.
[__Contributors__]
== Description
An application using external memory may wish to synchronize access to
that memory using semaphores. This extension enables an application to
export semaphore payload to and import semaphore payload from POSIX file
descriptors.
== New Commands
- 'getSemaphoreFdKHR'
== New Structures
- 'ImportSemaphoreFdInfoKHR'
- 'SemaphoreGetFdInfoKHR'
== New Enum Constants
- 'KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME'
- 'KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION'
== Issues
1) Does the application need to close the file descriptor returned by
'getSemaphoreFdKHR'?
__RESOLVED__: Yes, unless it is passed back in to a driver instance to
import the semaphore. A successful get call transfers ownership of the
file descriptor to the application, and a successful import transfers it
back to the driver. Destroying the original semaphore object will not
close the file descriptor or remove its reference to the underlying
semaphore resource associated with it.
== Version History
- Initial revision
== See Also
'ImportSemaphoreFdInfoKHR', 'SemaphoreGetFdInfoKHR',
'getSemaphoreFdKHR', 'importSemaphoreFdKHR'
== Document Notes
For more information, see the
<-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd Vulkan Specification>
This page is a generated document. Fixes and changes should be made to
the generator scripts, not directly.
| vkGetSemaphoreFdKHR - Get a POSIX file descriptor handle for a semaphore
= Description
Each call to 'getSemaphoreFdKHR' /must/ create a new file descriptor and
transfer ownership of it to the application. To avoid leaking resources,
the application /must/ release ownership of the file descriptor when it
is no longer needed.
Note
Ownership can be released in many ways. For example, the application can
call @close@() on the file descriptor, or transfer ownership back to
Where supported by the operating system, the implementation /must/ set
the file descriptor to be closed automatically when an @execve@ system
call is made.
Exporting a file descriptor from a semaphore /may/ have side effects
depending on the transference of the specified handle type, as described
in
<-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore State>.
== Return Codes
[<-extensions/html/vkspec.html#fundamentals-successcodes Success>]
[<-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
= See Also
<-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd VK_KHR_external_semaphore_fd>,
| @device@ is the logical device that created the semaphore being
exported.
#VUID-vkGetSemaphoreFdKHR-device-parameter# @device@ /must/ be a valid
| @pGetFdInfo@ is a pointer to a 'SemaphoreGetFdInfoKHR' structure
containing parameters of the export operation.
#VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter# @pGetFdInfo@ /must/ be a
valid pointer to a valid 'SemaphoreGetFdInfoKHR' structure
| vkImportSemaphoreFdKHR - Import a semaphore from a POSIX file descriptor
= Description
Importing a semaphore payload from a file descriptor transfers ownership
implementation. The application /must/ not perform any operations on the
file descriptor after a successful import.
Applications /can/ import the same semaphore payload into multiple
== Return Codes
[<-extensions/html/vkspec.html#fundamentals-successcodes Success>]
[<-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
= See Also
<-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd VK_KHR_external_semaphore_fd>,
| @device@ is the logical device that created the semaphore.
#VUID-vkImportSemaphoreFdKHR-device-parameter# @device@ /must/ be a
| @pImportSemaphoreFdInfo@ is a pointer to a 'ImportSemaphoreFdInfoKHR'
structure specifying the semaphore and import parameters.
#VUID-vkImportSemaphoreFdKHR-pImportSemaphoreFdInfo-parameter#
@pImportSemaphoreFdInfo@ /must/ be a valid pointer to a valid
'ImportSemaphoreFdInfoKHR' structure
| VkImportSemaphoreFdInfoKHR - Structure specifying POSIX file descriptor
to import to a semaphore
= Description
+--------------------------------------------------------------------------------------------------------+------------------+---------------------+
| Handle Type | Transference | Permanence |
| | | Supported |
+========================================================================================================+==================+=====================+
+--------------------------------------------------------------------------------------------------------+------------------+---------------------+
+--------------------------------------------------------------------------------------------------------+------------------+---------------------+
Handle Types Supported by 'ImportSemaphoreFdInfoKHR'
== Valid Usage
/must/ be a value included in the
<-extensions/html/vkspec.html#synchronization-semaphore-handletypes-fd Handle Types Supported by >
table
- #VUID-VkImportSemaphoreFdInfoKHR-fd-01544# @fd@ /must/ obey any
<-extensions/html/vkspec.html#external-semaphore-handle-types-compatibility external semaphore handle types compatibility>
is
exported
refers to a handle type with copy payload transference semantics,
@flags@ /must/ contain
is
the
exported
- #VUID-VkImportSemaphoreFdInfoKHR-flags-03323# If @flags@ contains
the
field of the semaphore from which @fd@ was exported /must/ not be
the special value @-1@ for @fd@ is treated like a valid sync file
descriptor referring to an object that has already signaled. The import
have a temporarily imported payload as if a valid file descriptor had
been provided.
Note
This special behavior for importing an invalid sync file descriptor
allows easier interoperability with other system APIs which use the
convention that an invalid sync file descriptor represents work that has
already completed and does not need to be waited for. It is consistent
with the option for implementations to return a @-1@ file descriptor
when exporting a
== Valid Usage (Implicit)
- #VUID-VkImportSemaphoreFdInfoKHR-sType-sType# @sType@ /must/ be
@NULL@
- #VUID-VkImportSemaphoreFdInfoKHR-semaphore-parameter# @semaphore@
- #VUID-VkImportSemaphoreFdInfoKHR-flags-parameter# @flags@ /must/ be
a valid combination of
values
/must/ be a valid
value
== Host Synchronization
- Host access to @semaphore@ /must/ be externally synchronized
= See Also
<-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd VK_KHR_external_semaphore_fd>,
'importSemaphoreFdKHR'
| @semaphore@ is the semaphore into which the payload will be imported.
| @flags@ is a bitmask of
specifying additional parameters for the semaphore payload import
operation.
| @handleType@ is a
value specifying the type of @fd@.
| @fd@ is the external handle to import.
| VkSemaphoreGetFdInfoKHR - Structure describing a POSIX FD semaphore
export operation
= Description
The properties of the file descriptor returned depend on the value of
for a description of the properties of the defined external semaphore
handle types.
== Valid Usage
have been included in
when @semaphore@’s current payload was created
- #VUID-VkSemaphoreGetFdInfoKHR-semaphore-01133# @semaphore@ /must/
not currently have its payload replaced by an imported payload as
described below in
<-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore Payloads>
unless that imported payload’s handle type was included in
refers to a handle type with copy payload transference semantics, as
defined below in
<-extensions/html/vkspec.html#synchronization-semaphores-importing Importing Semaphore Payloads>,
there /must/ be no queue waiting on @semaphore@
refers to a handle type with copy payload transference semantics,
@semaphore@ /must/ be signaled, or have an associated
<-extensions/html/vkspec.html#synchronization-semaphores-signaling semaphore signal operation>
pending execution
be defined as a POSIX file descriptor handle
refers to a handle type with copy payload transference semantics,
@semaphore@ /must/ have been created with a
refers to a handle type with copy payload transference semantics,
@semaphore@ /must/ have an associated semaphore signal operation
that has been submitted for execution and any semaphore signal
operations on which it depends (if any) /must/ have also been
submitted for execution
== Valid Usage (Implicit)
- #VUID-VkSemaphoreGetFdInfoKHR-sType-sType# @sType@ /must/ be
- #VUID-VkSemaphoreGetFdInfoKHR-semaphore-parameter# @semaphore@
/must/ be a valid
value
= See Also
<-extensions/html/vkspec.html#VK_KHR_external_semaphore_fd VK_KHR_external_semaphore_fd>,
| @semaphore@ is the semaphore from which state will be exported.
| @handleType@ is a
value specifying the type of handle requested. | VK_KHR_external_semaphore_fd - device extension
= = VK_KHR_external_semaphore_fd
80
1
- Requires support for Vulkan 1.0
-
< -Docs/issues/new?body=[VK_KHR_external_semaphore_fd ] @cubanismo%0A*Here describe the issue or question you have about the VK_KHR_external_semaphore_fd extension * >
2016 - 10 - 21
- , Google
- , NVIDIA
- , NVIDIA
- , NVIDIA
- ' importSemaphoreFdKHR '
- Extending ' Vulkan . Core10.Enums . StructureType . StructureType ' :
- ' Vulkan . Core10.Enums . StructureType . '
- ' Vulkan . Core10.Enums . StructureType . '
- Revision 1 , 2016 - 10 - 21 ( )
module Vulkan.Extensions.VK_KHR_external_semaphore_fd ( getSemaphoreFdKHR
, importSemaphoreFdKHR
, ImportSemaphoreFdInfoKHR(..)
, SemaphoreGetFdInfoKHR(..)
, KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION
, pattern KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION
, KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME
, pattern KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME
) where
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Data.Coerce (coerce)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Foreign.C.Types (CInt(..))
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.C.Types (CInt)
import Foreign.C.Types (CInt(..))
import Foreign.C.Types (CInt(CInt))
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Data.Int (Int32)
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Vulkan.NamedType ((:::))
import Vulkan.Core10.Handles (Device)
import Vulkan.Core10.Handles (Device(..))
import Vulkan.Core10.Handles (Device(Device))
import Vulkan.Dynamic (DeviceCmds(pVkGetSemaphoreFdKHR))
import Vulkan.Dynamic (DeviceCmds(pVkImportSemaphoreFdKHR))
import Vulkan.Core10.Handles (Device_T)
import Vulkan.Core11.Enums.ExternalSemaphoreHandleTypeFlagBits (ExternalSemaphoreHandleTypeFlagBits)
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Core10.Handles (Semaphore)
import Vulkan.Core11.Enums.SemaphoreImportFlagBits (SemaphoreImportFlags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetSemaphoreFdKHR
:: FunPtr (Ptr Device_T -> Ptr SemaphoreGetFdInfoKHR -> Ptr CInt -> IO Result) -> Ptr Device_T -> Ptr SemaphoreGetFdInfoKHR -> Ptr CInt -> IO Result
Vulkan by using the file descriptor to import a semaphore payload .
- ' Vulkan . Core10.Enums . Result . SUCCESS '
- ' Vulkan . Core10.Enums . Result . ERROR_TOO_MANY_OBJECTS '
- ' Vulkan . Core10.Enums . Result . ERROR_OUT_OF_HOST_MEMORY '
' Vulkan . Core10.Handles . Device ' , ' SemaphoreGetFdInfoKHR '
getSemaphoreFdKHR :: forall io
. (MonadIO io)
' Vulkan . Core10.Handles . Device ' handle
Device
SemaphoreGetFdInfoKHR
-> io (("fd" ::: Int32))
getSemaphoreFdKHR device getFdInfo = liftIO . evalContT $ do
let vkGetSemaphoreFdKHRPtr = pVkGetSemaphoreFdKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkGetSemaphoreFdKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetSemaphoreFdKHR is null" Nothing Nothing
let vkGetSemaphoreFdKHR' = mkVkGetSemaphoreFdKHR vkGetSemaphoreFdKHRPtr
pGetFdInfo <- ContT $ withCStruct (getFdInfo)
pPFd <- ContT $ bracket (callocBytes @CInt 4) free
r <- lift $ traceAroundEvent "vkGetSemaphoreFdKHR" (vkGetSemaphoreFdKHR'
(deviceHandle (device))
pGetFdInfo
(pPFd))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pFd <- lift $ peek @CInt pPFd
pure $ ((coerce @CInt @Int32 pFd))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkImportSemaphoreFdKHR
:: FunPtr (Ptr Device_T -> Ptr ImportSemaphoreFdInfoKHR -> IO Result) -> Ptr Device_T -> Ptr ImportSemaphoreFdInfoKHR -> IO Result
of the file descriptor from the application to the Vulkan
instances of Vulkan , into the same instance from which it was exported ,
and multiple times into a given Vulkan instance .
- ' Vulkan . Core10.Enums . Result . SUCCESS '
- ' Vulkan . Core10.Enums . Result . ERROR_OUT_OF_HOST_MEMORY '
- ' Vulkan . Core10.Enums . Result . ERROR_INVALID_EXTERNAL_HANDLE '
' Vulkan . Core10.Handles . Device ' , ' ImportSemaphoreFdInfoKHR '
importSemaphoreFdKHR :: forall io
. (MonadIO io)
valid ' Vulkan . Core10.Handles . Device ' handle
Device
ImportSemaphoreFdInfoKHR
-> io ()
importSemaphoreFdKHR device importSemaphoreFdInfo = liftIO . evalContT $ do
let vkImportSemaphoreFdKHRPtr = pVkImportSemaphoreFdKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkImportSemaphoreFdKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkImportSemaphoreFdKHR is null" Nothing Nothing
let vkImportSemaphoreFdKHR' = mkVkImportSemaphoreFdKHR vkImportSemaphoreFdKHRPtr
pImportSemaphoreFdInfo <- ContT $ withCStruct (importSemaphoreFdInfo)
r <- lift $ traceAroundEvent "vkImportSemaphoreFdKHR" (vkImportSemaphoreFdKHR'
(deviceHandle (device))
pImportSemaphoreFdInfo)
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
The handle types supported by @handleType@ are :
| ' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT ' | Reference | Temporary , Permanent |
| ' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . | Copy | Temporary |
- # VUID - VkImportSemaphoreFdInfoKHR - handleType-01143 # @handleType@
requirements listed for @handleType@ in
- # VUID - VkImportSemaphoreFdInfoKHR - handleType-03263 # If @handleType@
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT ' ,
the ' Vulkan . Core10.QueueSemaphore .
field /must/ match that of the semaphore from which @fd@ was
- # VUID - VkImportSemaphoreFdInfoKHR - handleType-07307 # If @handleType@
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SEMAPHORE_IMPORT_TEMPORARY_BIT '
- # VUID - VkImportSemaphoreFdInfoKHR - handleType-03264 # If @handleType@
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT ' ,
' Vulkan . Core12.Promoted_From_VK_KHR_timeline_semaphore . SemaphoreTypeCreateInfo'::@semaphoreType@
field /must/ match that of the semaphore from which @fd@ was
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SEMAPHORE_IMPORT_TEMPORARY_BIT ' ,
' Vulkan . Core12.Promoted_From_VK_KHR_timeline_semaphore . SemaphoreTypeCreateInfo'::@semaphoreType@
' Vulkan . Core12.Enums . . SEMAPHORE_TYPE_TIMELINE '
If @handleType@ is
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ,
operation will succeed and the ' Vulkan . Core10.Handles . Semaphore ' will
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . '
from a ' Vulkan . Core10.Handles . Semaphore ' which is signaled .
' Vulkan . Core10.Enums . StructureType . '
- # VUID - VkImportSemaphoreFdInfoKHR - pNext - pNext # @pNext@ /must/ be
/must/ be a valid ' Vulkan . Core10.Handles . Semaphore ' handle
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SemaphoreImportFlagBits '
- # VUID - VkImportSemaphoreFdInfoKHR - handleType - parameter # @handleType@
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits ' ,
' Vulkan . Core10.Handles . Semaphore ' ,
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SemaphoreImportFlags ' ,
' Vulkan . Core10.Enums . StructureType . StructureType ' ,
data ImportSemaphoreFdInfoKHR = ImportSemaphoreFdInfoKHR
semaphore :: Semaphore
' Vulkan . Core11.Enums . SemaphoreImportFlagBits . SemaphoreImportFlagBits '
flags :: SemaphoreImportFlags
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
handleType :: ExternalSemaphoreHandleTypeFlagBits
fd :: Int32
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (ImportSemaphoreFdInfoKHR)
#endif
deriving instance Show ImportSemaphoreFdInfoKHR
instance ToCStruct ImportSemaphoreFdInfoKHR where
withCStruct x f = allocaBytes 40 $ \p -> pokeCStruct p x (f p)
pokeCStruct p ImportSemaphoreFdInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (semaphore)
poke ((p `plusPtr` 24 :: Ptr SemaphoreImportFlags)) (flags)
poke ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (handleType)
poke ((p `plusPtr` 32 :: Ptr CInt)) (CInt (fd))
f
cStructSize = 40
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (zero)
poke ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (zero)
poke ((p `plusPtr` 32 :: Ptr CInt)) (CInt (zero))
f
instance FromCStruct ImportSemaphoreFdInfoKHR where
peekCStruct p = do
semaphore <- peek @Semaphore ((p `plusPtr` 16 :: Ptr Semaphore))
flags <- peek @SemaphoreImportFlags ((p `plusPtr` 24 :: Ptr SemaphoreImportFlags))
handleType <- peek @ExternalSemaphoreHandleTypeFlagBits ((p `plusPtr` 28 :: Ptr ExternalSemaphoreHandleTypeFlagBits))
fd <- peek @CInt ((p `plusPtr` 32 :: Ptr CInt))
pure $ ImportSemaphoreFdInfoKHR
semaphore flags handleType (coerce @CInt @Int32 fd)
instance Storable ImportSemaphoreFdInfoKHR where
sizeOf ~_ = 40
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero ImportSemaphoreFdInfoKHR where
zero = ImportSemaphoreFdInfoKHR
zero
zero
zero
zero
@handleType@. See
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-01132 # @handleType@ /must/
' Vulkan . Core11.Promoted_From_VK_KHR_external_semaphore . ExportSemaphoreCreateInfo'::@handleTypes@
' Vulkan . Core11.Promoted_From_VK_KHR_external_semaphore_capabilities . ExternalSemaphoreProperties'::@exportFromImportedHandleTypes@
for
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-01134 # If @handleType@
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-01135 # If @handleType@
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-01136 # @handleType@ /must/
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-03253 # If @handleType@
' Vulkan . Core12.Enums . SemaphoreType . ' of
' Vulkan . Core12.Enums . SemaphoreType . SEMAPHORE_TYPE_BINARY '
- # VUID - VkSemaphoreGetFdInfoKHR - handleType-03254 # If @handleType@
' Vulkan . Core10.Enums . StructureType . '
- # VUID - VkSemaphoreGetFdInfoKHR - pNext - pNext # @pNext@ /must/ be @NULL@
/must/ be a valid ' Vulkan . Core10.Handles . Semaphore ' handle
- # VUID - VkSemaphoreGetFdInfoKHR - handleType - parameter # @handleType@
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits ' ,
' Vulkan . Core10.Handles . Semaphore ' ,
' Vulkan . Core10.Enums . StructureType . StructureType ' , ' getSemaphoreFdKHR '
data SemaphoreGetFdInfoKHR = SemaphoreGetFdInfoKHR
semaphore :: Semaphore
' Vulkan . Core11.Enums . ExternalSemaphoreHandleTypeFlagBits . ExternalSemaphoreHandleTypeFlagBits '
handleType :: ExternalSemaphoreHandleTypeFlagBits
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (SemaphoreGetFdInfoKHR)
#endif
deriving instance Show SemaphoreGetFdInfoKHR
instance ToCStruct SemaphoreGetFdInfoKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p SemaphoreGetFdInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (semaphore)
poke ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (handleType)
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Semaphore)) (zero)
poke ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits)) (zero)
f
instance FromCStruct SemaphoreGetFdInfoKHR where
peekCStruct p = do
semaphore <- peek @Semaphore ((p `plusPtr` 16 :: Ptr Semaphore))
handleType <- peek @ExternalSemaphoreHandleTypeFlagBits ((p `plusPtr` 24 :: Ptr ExternalSemaphoreHandleTypeFlagBits))
pure $ SemaphoreGetFdInfoKHR
semaphore handleType
instance Storable SemaphoreGetFdInfoKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero SemaphoreGetFdInfoKHR where
zero = SemaphoreGetFdInfoKHR
zero
zero
type KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION = 1
No documentation found for TopLevel " VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION "
pattern KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION = 1
type KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME = "VK_KHR_external_semaphore_fd"
No documentation found for TopLevel " VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "
pattern KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME = "VK_KHR_external_semaphore_fd"
|
015546de1981ee71d760449c1872583c14be21593ea2ab83c368f7f03a0a10e3 | mzp/coq-ide-for-ios | proof_trees.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
$ I d : 2010 - 07 - 24 15:57:30Z herbelin $
open Closure
open Util
open Names
open Nameops
open Term
open Termops
open Sign
open Evd
open Environ
open Evarutil
open Decl_expr
open Proof_type
open Tacred
open Typing
open Libnames
open Nametab
let is_bind = function
| Tacexpr . true
| _ - > false
let is_bind = function
| Tacexpr.Cbindings _ -> true
| _ -> false
*)
(* Functions on goals *)
let mk_goal hyps cl extra =
{ evar_hyps = hyps; evar_concl = cl;
evar_filter = List.map (fun _ -> true) (named_context_of_val hyps);
evar_body = Evar_empty; evar_source = (dummy_loc,GoalEvar);
evar_extra = extra }
(* Functions on proof trees *)
let ref_of_proof pf =
match pf.ref with
| None -> failwith "rule_of_proof"
| Some r -> r
let rule_of_proof pf =
let (r,_) = ref_of_proof pf in r
let children_of_proof pf =
let (_,cl) = ref_of_proof pf in cl
let goal_of_proof pf = pf.goal
let subproof_of_proof pf = match pf.ref with
| Some (Nested (_,pf), _) -> pf
| _ -> failwith "subproof_of_proof"
let status_of_proof pf = pf.open_subgoals
let is_complete_proof pf = pf.open_subgoals = 0
let is_leaf_proof pf = (pf.ref = None)
let is_tactic_proof pf = match pf.ref with
| Some (Nested (Tactic _,_),_) -> true
| _ -> false
let pf_lookup_name_as_displayed env ccl s =
Detyping.lookup_name_as_displayed env ccl s
let pf_lookup_index_as_renamed env ccl n =
Detyping.lookup_index_as_renamed env ccl n
(* Functions on rules (Proof mode) *)
let is_dem_rule = function
Decl_proof _ -> true
| _ -> false
let is_proof_instr = function
Nested(Proof_instr (_,_),_) -> true
| _ -> false
let is_focussing_command = function
Decl_proof b -> b
| Nested (Proof_instr (b,_),_) -> b
| _ -> false
(*********************************************************************)
(* Pretty printing functions *)
(*********************************************************************)
open Pp
let db_pr_goal g =
let env = evar_env g in
let penv = print_named_context env in
let pc = print_constr_env env g.evar_concl in
str" " ++ hv 0 (penv ++ fnl () ++
str "============================" ++ fnl () ++
str" " ++ pc) ++ fnl ()
| null | https://raw.githubusercontent.com/mzp/coq-ide-for-ios/4cdb389bbecd7cdd114666a8450ecf5b5f0391d3/coqlib/proofs/proof_trees.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
Functions on goals
Functions on proof trees
Functions on rules (Proof mode)
*******************************************************************
Pretty printing functions
******************************************************************* | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
$ I d : 2010 - 07 - 24 15:57:30Z herbelin $
open Closure
open Util
open Names
open Nameops
open Term
open Termops
open Sign
open Evd
open Environ
open Evarutil
open Decl_expr
open Proof_type
open Tacred
open Typing
open Libnames
open Nametab
let is_bind = function
| Tacexpr . true
| _ - > false
let is_bind = function
| Tacexpr.Cbindings _ -> true
| _ -> false
*)
let mk_goal hyps cl extra =
{ evar_hyps = hyps; evar_concl = cl;
evar_filter = List.map (fun _ -> true) (named_context_of_val hyps);
evar_body = Evar_empty; evar_source = (dummy_loc,GoalEvar);
evar_extra = extra }
let ref_of_proof pf =
match pf.ref with
| None -> failwith "rule_of_proof"
| Some r -> r
let rule_of_proof pf =
let (r,_) = ref_of_proof pf in r
let children_of_proof pf =
let (_,cl) = ref_of_proof pf in cl
let goal_of_proof pf = pf.goal
let subproof_of_proof pf = match pf.ref with
| Some (Nested (_,pf), _) -> pf
| _ -> failwith "subproof_of_proof"
let status_of_proof pf = pf.open_subgoals
let is_complete_proof pf = pf.open_subgoals = 0
let is_leaf_proof pf = (pf.ref = None)
let is_tactic_proof pf = match pf.ref with
| Some (Nested (Tactic _,_),_) -> true
| _ -> false
let pf_lookup_name_as_displayed env ccl s =
Detyping.lookup_name_as_displayed env ccl s
let pf_lookup_index_as_renamed env ccl n =
Detyping.lookup_index_as_renamed env ccl n
let is_dem_rule = function
Decl_proof _ -> true
| _ -> false
let is_proof_instr = function
Nested(Proof_instr (_,_),_) -> true
| _ -> false
let is_focussing_command = function
Decl_proof b -> b
| Nested (Proof_instr (b,_),_) -> b
| _ -> false
open Pp
let db_pr_goal g =
let env = evar_env g in
let penv = print_named_context env in
let pc = print_constr_env env g.evar_concl in
str" " ++ hv 0 (penv ++ fnl () ++
str "============================" ++ fnl () ++
str" " ++ pc) ++ fnl ()
|
c449a2456a482dc51244182f86e5b87f99d7bad42234b064765e40aa86b9e11d | joeltg/mit-scheme-kernel | canvas.scm | (define *canvas-size* 300)
(define *frame-height* 400)
(define *frame-width* 400)
(define *foreground-color* "white")
(define *background-color* "black")
(define *can-use-colors* #t)
(define *frame-x-position* (if (eq? 'unix microcode-id/operating-system) -10 532))
(define *frame-y-position* 0)
(define *frame-width* (if (eq? 'unix microcode-id/operating-system) 400 100))
(define *frame-height* (if (eq? 'unix microcode-id/operating-system) 400 100))
(define (get-pointer-coordinates-default-continuation x y button) *silence*)
(define get-pointer-coordinates-continuation get-pointer-coordinates-default-continuation)
(define-structure
(canvas (constructor make-canvas (#!optional xmin xmax ymin ymax)))
(id 0)
(xmin 0)
(xmax *canvas-size*)
(ymin 0)
(ymax *canvas-size*)
(frame-width *canvas-size*)
(frame-height *canvas-size*)
(frame-x-position 0)
(frame-y-position 0))
(define (send-canvas canvas action #!optional value)
(*send* 2 (symbol->json action) (number->string (canvas-id canvas)) (json value)))
(define (canvas-available? . args) #t)
(define (canvas-coordinate-limits canvas)
(list (canvas-xmin canvas) (canvas-ymax canvas)
(canvas-xmax canvas) (canvas-ymin canvas)))
(define (canvas-device-coordinate-limits canvas)
(list 0 0 (canvas-frame-width canvas) (canvas-frame-height canvas)))
(define (canvas-set-coordinate-limits canvas x-left y-bottom x-right y-top)
(set-canvas-xmin! canvas x-left)
(set-canvas-ymin! canvas y-bottom)
(set-canvas-xmax! canvas x-right)
(set-canvas-ymax! canvas y-top)
(send-canvas canvas 'set_coordinate_limits (list x-left y-bottom x-right y-top)))
(define (canvas-drag-cursor canvas x y)
(send-canvas canvas 'drag_cursor (list x y)))
(define (canvas-move-cursor canvas x y)
(send-canvas canvas 'move_cursor (list x y)))
(define (canvas-reset-clip-rectangle canvas)
(send-canvas canvas 'reset_clip_rectangle (canvas-coordinate-limits canvas)))
(define (canvas-set-clip-rectangle canvas x-left y-bottom x-right y-top)
(send-canvas canvas 'set_clip_rectangle (list x-left y-bottom x-right y-top)))
(define (canvas-set-drawing-mode canvas mode)
(send-canvas canvas 'set_drawing_mode `((mode ,mode))))
(define (canvas-set-line-style canvas style)
(send-canvas canvas 'set_line_style `((style ,style))))
(define (canvas-clear canvas)
(send-canvas canvas 'clear))
(define (canvas-flush canvas)
; *silence*)
#!unspecific)
(define (canvas-close canvas)
(send-canvas canvas 'close))
(define (canvas-draw-rect canvas x y width height)
(send-canvas canvas 'draw_rect (list x y width height)))
(define (canvas-erase-rect canvas x y width height)
(send-canvas canvas 'erase_rect (list x y width height)))
(define (canvas-draw-rects canvas rects)
(send-canvas canvas 'draw_rects rects))
(define (canvas-erase-rects canvas rects)
(send-canvas canvas 'erase_rects rects))
(define (canvas-draw-point canvas x y)
(send-canvas canvas 'draw_point (list x y)))
(define (canvas-draw-points canvas points)
(send-canvas canvas 'draw_points points))
(define (canvas-erase-point canvas x y)
(send-canvas canvas 'erase_point (list x y)))
(define (canvas-erase-points canvas points)
(send-canvas canvas 'erase_points points))
(define (canvas-draw-line canvas x-start y-start x-end y-end)
(send-canvas canvas 'draw_line (list x-start y-start x-end y-end)))
(define (canvas-draw-text canvas x y string)
(send-canvas canvas 'draw_text (list x y string)))
(define (canvas-set-font canvas font-name)
(send-canvas canvas 'set_font font-name))
(define (canvas-set-background-color canvas color)
(send-canvas canvas 'set_background_color color))
(define (canvas-set-foreground-color canvas color)
(send-canvas canvas 'set_foreground_color color))
(define (canvas-get-pointer-coordinates canvas cont)
(set! get-pointer-coordinates-continuation
(lambda (x y button)
(set! get-pointer-coordinates-continuation get-pointer-coordinates-default-continuation)
(cont x y button)))
(send-canvas canvas 'get_pointer_coordinates)) | null | https://raw.githubusercontent.com/joeltg/mit-scheme-kernel/b8ed3443a075e46567570062d73d3eb775b8743f/src/runtime/canvas/canvas.scm | scheme | *silence*) | (define *canvas-size* 300)
(define *frame-height* 400)
(define *frame-width* 400)
(define *foreground-color* "white")
(define *background-color* "black")
(define *can-use-colors* #t)
(define *frame-x-position* (if (eq? 'unix microcode-id/operating-system) -10 532))
(define *frame-y-position* 0)
(define *frame-width* (if (eq? 'unix microcode-id/operating-system) 400 100))
(define *frame-height* (if (eq? 'unix microcode-id/operating-system) 400 100))
(define (get-pointer-coordinates-default-continuation x y button) *silence*)
(define get-pointer-coordinates-continuation get-pointer-coordinates-default-continuation)
(define-structure
(canvas (constructor make-canvas (#!optional xmin xmax ymin ymax)))
(id 0)
(xmin 0)
(xmax *canvas-size*)
(ymin 0)
(ymax *canvas-size*)
(frame-width *canvas-size*)
(frame-height *canvas-size*)
(frame-x-position 0)
(frame-y-position 0))
(define (send-canvas canvas action #!optional value)
(*send* 2 (symbol->json action) (number->string (canvas-id canvas)) (json value)))
(define (canvas-available? . args) #t)
(define (canvas-coordinate-limits canvas)
(list (canvas-xmin canvas) (canvas-ymax canvas)
(canvas-xmax canvas) (canvas-ymin canvas)))
(define (canvas-device-coordinate-limits canvas)
(list 0 0 (canvas-frame-width canvas) (canvas-frame-height canvas)))
(define (canvas-set-coordinate-limits canvas x-left y-bottom x-right y-top)
(set-canvas-xmin! canvas x-left)
(set-canvas-ymin! canvas y-bottom)
(set-canvas-xmax! canvas x-right)
(set-canvas-ymax! canvas y-top)
(send-canvas canvas 'set_coordinate_limits (list x-left y-bottom x-right y-top)))
(define (canvas-drag-cursor canvas x y)
(send-canvas canvas 'drag_cursor (list x y)))
(define (canvas-move-cursor canvas x y)
(send-canvas canvas 'move_cursor (list x y)))
(define (canvas-reset-clip-rectangle canvas)
(send-canvas canvas 'reset_clip_rectangle (canvas-coordinate-limits canvas)))
(define (canvas-set-clip-rectangle canvas x-left y-bottom x-right y-top)
(send-canvas canvas 'set_clip_rectangle (list x-left y-bottom x-right y-top)))
(define (canvas-set-drawing-mode canvas mode)
(send-canvas canvas 'set_drawing_mode `((mode ,mode))))
(define (canvas-set-line-style canvas style)
(send-canvas canvas 'set_line_style `((style ,style))))
(define (canvas-clear canvas)
(send-canvas canvas 'clear))
(define (canvas-flush canvas)
#!unspecific)
(define (canvas-close canvas)
(send-canvas canvas 'close))
(define (canvas-draw-rect canvas x y width height)
(send-canvas canvas 'draw_rect (list x y width height)))
(define (canvas-erase-rect canvas x y width height)
(send-canvas canvas 'erase_rect (list x y width height)))
(define (canvas-draw-rects canvas rects)
(send-canvas canvas 'draw_rects rects))
(define (canvas-erase-rects canvas rects)
(send-canvas canvas 'erase_rects rects))
(define (canvas-draw-point canvas x y)
(send-canvas canvas 'draw_point (list x y)))
(define (canvas-draw-points canvas points)
(send-canvas canvas 'draw_points points))
(define (canvas-erase-point canvas x y)
(send-canvas canvas 'erase_point (list x y)))
(define (canvas-erase-points canvas points)
(send-canvas canvas 'erase_points points))
(define (canvas-draw-line canvas x-start y-start x-end y-end)
(send-canvas canvas 'draw_line (list x-start y-start x-end y-end)))
(define (canvas-draw-text canvas x y string)
(send-canvas canvas 'draw_text (list x y string)))
(define (canvas-set-font canvas font-name)
(send-canvas canvas 'set_font font-name))
(define (canvas-set-background-color canvas color)
(send-canvas canvas 'set_background_color color))
(define (canvas-set-foreground-color canvas color)
(send-canvas canvas 'set_foreground_color color))
(define (canvas-get-pointer-coordinates canvas cont)
(set! get-pointer-coordinates-continuation
(lambda (x y button)
(set! get-pointer-coordinates-continuation get-pointer-coordinates-default-continuation)
(cont x y button)))
(send-canvas canvas 'get_pointer_coordinates)) |
63707c7b3bdb0d5f04bc1165b98989f711a7941955f359b8f28f8b5e59031d55 | MinaProtocol/mina | dlog_plonk_based_keypair.ml | module Poly_comm0 = Poly_comm
open Unsigned.Size_t
module type Stable_v1 = sig
module Stable : sig
module V1 : sig
type t [@@deriving version, bin_io, sexp, compare, yojson, hash, equal]
end
module Latest = V1
end
type t = Stable.V1.t [@@deriving sexp, compare, yojson]
end
module type Inputs_intf = sig
open Intf
val name : string
module Rounds : Pickles_types.Nat.Intf
module Gate_vector : sig
open Unsigned
type t
val wrap : t -> Kimchi_types.wire -> Kimchi_types.wire -> unit
end
module Urs : sig
type t
val read : int option -> string -> t option
val write : bool option -> t -> string -> unit
val create : int -> t
end
module Scalar_field : sig
include Stable_v1
val one : t
end
module Constraint_system : sig
type t
val get_primary_input_size : t -> int
val get_prev_challenges : t -> int option
val set_prev_challenges : t -> int -> unit
val finalize_and_get_gates : t -> Gate_vector.t
end
module Index : sig
type t
val create : Gate_vector.t -> int -> int -> Urs.t -> t
end
module Curve : sig
module Base_field : sig
type t
end
module Affine : sig
type t = Base_field.t * Base_field.t
end
end
module Poly_comm : sig
module Backend : sig
type t
end
type t = Curve.Base_field.t Poly_comm0.t
val of_backend_without_degree_bound : Backend.t -> t
end
module Verifier_index : sig
type t =
( Scalar_field.t
, Urs.t
, Poly_comm.Backend.t )
Kimchi_types.VerifierIndex.verifier_index
val create : Index.t -> t
end
end
module Make (Inputs : Inputs_intf) = struct
open Core_kernel
type t = { index : Inputs.Index.t; cs : Inputs.Constraint_system.t }
let name =
sprintf "%s_%d_v4" Inputs.name (Pickles_types.Nat.to_int Inputs.Rounds.n)
let set_urs_info, load_urs =
let urs_info = Set_once.create () in
let urs = ref None in
let degree = 1 lsl Pickles_types.Nat.to_int Inputs.Rounds.n in
let set_urs_info specs = Set_once.set_exn urs_info Lexing.dummy_pos specs in
let load () =
match !urs with
| Some urs ->
urs
| None ->
let specs =
match Set_once.get urs_info with
| None ->
failwith "Dlog_based.urs: Info not set"
| Some t ->
t
in
let store =
Key_cache.Sync.Disk_storable.simple
(fun () -> name)
(fun () ~path ->
Or_error.try_with_join (fun () ->
match Inputs.Urs.read None path with
| Some urs ->
Ok urs
| None ->
Or_error.errorf
"Could not read the URS from disk; its format did \
not match the expected format" ) )
(fun _ urs path ->
Or_error.try_with (fun () -> Inputs.Urs.write None urs path) )
in
let u =
match Key_cache.Sync.read specs store () with
| Ok (u, _) ->
u
| Error _e ->
let urs = Inputs.Urs.create degree in
let (_ : (unit, Error.t) Result.t) =
Key_cache.Sync.write
(List.filter specs ~f:(function
| On_disk _ ->
true
| S3 _ ->
false ) )
store () urs
in
urs
in
urs := Some u ;
u
in
(set_urs_info, load)
let create ~prev_challenges cs =
let gates = Inputs.Constraint_system.finalize_and_get_gates cs in
let public_input_size =
Inputs.Constraint_system.get_primary_input_size cs
in
let prev_challenges =
match Inputs.Constraint_system.get_prev_challenges cs with
| None ->
Inputs.Constraint_system.set_prev_challenges cs prev_challenges ;
prev_challenges
| Some prev_challenges' ->
assert (prev_challenges = prev_challenges') ;
prev_challenges'
in
let index =
Inputs.Index.create gates public_input_size prev_challenges (load_urs ())
in
{ index; cs }
let vk t = Inputs.Verifier_index.create t.index
let pk t = t
let array_to_vector a = Pickles_types.Vector.of_list (Array.to_list a)
(** does this convert a backend.verifier_index to a pickles_types.verifier_index? *)
let vk_commitments (t : Inputs.Verifier_index.t) :
Inputs.Curve.Affine.t Pickles_types.Plonk_verification_key_evals.t =
let g c : Inputs.Curve.Affine.t =
match Inputs.Poly_comm.of_backend_without_degree_bound c with
| `Without_degree_bound x ->
x.(0)
| `With_degree_bound _ ->
assert false
in
{ sigma_comm =
Pickles_types.Vector.init Pickles_types.Plonk_types.Permuts.n
~f:(fun i -> g t.evals.sigma_comm.(i))
; coefficients_comm =
Pickles_types.Vector.init Pickles_types.Plonk_types.Columns.n
~f:(fun i -> g t.evals.coefficients_comm.(i))
; generic_comm = g t.evals.generic_comm
; psm_comm = g t.evals.psm_comm
; complete_add_comm = g t.evals.complete_add_comm
; mul_comm = g t.evals.mul_comm
; emul_comm = g t.evals.emul_comm
; endomul_scalar_comm = g t.evals.endomul_scalar_comm
}
end
| null | https://raw.githubusercontent.com/MinaProtocol/mina/a40d965ae6b39ca93d9eed17efcbf77e0778de0a/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml | ocaml | * does this convert a backend.verifier_index to a pickles_types.verifier_index? | module Poly_comm0 = Poly_comm
open Unsigned.Size_t
module type Stable_v1 = sig
module Stable : sig
module V1 : sig
type t [@@deriving version, bin_io, sexp, compare, yojson, hash, equal]
end
module Latest = V1
end
type t = Stable.V1.t [@@deriving sexp, compare, yojson]
end
module type Inputs_intf = sig
open Intf
val name : string
module Rounds : Pickles_types.Nat.Intf
module Gate_vector : sig
open Unsigned
type t
val wrap : t -> Kimchi_types.wire -> Kimchi_types.wire -> unit
end
module Urs : sig
type t
val read : int option -> string -> t option
val write : bool option -> t -> string -> unit
val create : int -> t
end
module Scalar_field : sig
include Stable_v1
val one : t
end
module Constraint_system : sig
type t
val get_primary_input_size : t -> int
val get_prev_challenges : t -> int option
val set_prev_challenges : t -> int -> unit
val finalize_and_get_gates : t -> Gate_vector.t
end
module Index : sig
type t
val create : Gate_vector.t -> int -> int -> Urs.t -> t
end
module Curve : sig
module Base_field : sig
type t
end
module Affine : sig
type t = Base_field.t * Base_field.t
end
end
module Poly_comm : sig
module Backend : sig
type t
end
type t = Curve.Base_field.t Poly_comm0.t
val of_backend_without_degree_bound : Backend.t -> t
end
module Verifier_index : sig
type t =
( Scalar_field.t
, Urs.t
, Poly_comm.Backend.t )
Kimchi_types.VerifierIndex.verifier_index
val create : Index.t -> t
end
end
module Make (Inputs : Inputs_intf) = struct
open Core_kernel
type t = { index : Inputs.Index.t; cs : Inputs.Constraint_system.t }
let name =
sprintf "%s_%d_v4" Inputs.name (Pickles_types.Nat.to_int Inputs.Rounds.n)
let set_urs_info, load_urs =
let urs_info = Set_once.create () in
let urs = ref None in
let degree = 1 lsl Pickles_types.Nat.to_int Inputs.Rounds.n in
let set_urs_info specs = Set_once.set_exn urs_info Lexing.dummy_pos specs in
let load () =
match !urs with
| Some urs ->
urs
| None ->
let specs =
match Set_once.get urs_info with
| None ->
failwith "Dlog_based.urs: Info not set"
| Some t ->
t
in
let store =
Key_cache.Sync.Disk_storable.simple
(fun () -> name)
(fun () ~path ->
Or_error.try_with_join (fun () ->
match Inputs.Urs.read None path with
| Some urs ->
Ok urs
| None ->
Or_error.errorf
"Could not read the URS from disk; its format did \
not match the expected format" ) )
(fun _ urs path ->
Or_error.try_with (fun () -> Inputs.Urs.write None urs path) )
in
let u =
match Key_cache.Sync.read specs store () with
| Ok (u, _) ->
u
| Error _e ->
let urs = Inputs.Urs.create degree in
let (_ : (unit, Error.t) Result.t) =
Key_cache.Sync.write
(List.filter specs ~f:(function
| On_disk _ ->
true
| S3 _ ->
false ) )
store () urs
in
urs
in
urs := Some u ;
u
in
(set_urs_info, load)
let create ~prev_challenges cs =
let gates = Inputs.Constraint_system.finalize_and_get_gates cs in
let public_input_size =
Inputs.Constraint_system.get_primary_input_size cs
in
let prev_challenges =
match Inputs.Constraint_system.get_prev_challenges cs with
| None ->
Inputs.Constraint_system.set_prev_challenges cs prev_challenges ;
prev_challenges
| Some prev_challenges' ->
assert (prev_challenges = prev_challenges') ;
prev_challenges'
in
let index =
Inputs.Index.create gates public_input_size prev_challenges (load_urs ())
in
{ index; cs }
let vk t = Inputs.Verifier_index.create t.index
let pk t = t
let array_to_vector a = Pickles_types.Vector.of_list (Array.to_list a)
let vk_commitments (t : Inputs.Verifier_index.t) :
Inputs.Curve.Affine.t Pickles_types.Plonk_verification_key_evals.t =
let g c : Inputs.Curve.Affine.t =
match Inputs.Poly_comm.of_backend_without_degree_bound c with
| `Without_degree_bound x ->
x.(0)
| `With_degree_bound _ ->
assert false
in
{ sigma_comm =
Pickles_types.Vector.init Pickles_types.Plonk_types.Permuts.n
~f:(fun i -> g t.evals.sigma_comm.(i))
; coefficients_comm =
Pickles_types.Vector.init Pickles_types.Plonk_types.Columns.n
~f:(fun i -> g t.evals.coefficients_comm.(i))
; generic_comm = g t.evals.generic_comm
; psm_comm = g t.evals.psm_comm
; complete_add_comm = g t.evals.complete_add_comm
; mul_comm = g t.evals.mul_comm
; emul_comm = g t.evals.emul_comm
; endomul_scalar_comm = g t.evals.endomul_scalar_comm
}
end
|
99bd48bdcb83dbdc4a0f8bb9e51eb5c65c52b9f1846a4f1534a73784e74e7a8a | runtimeverification/haskell-backend | WarnIfLowProductivity.hs | # LANGUAGE NoStrict #
# LANGUAGE NoStrictData #
|
Copyright : ( c ) Runtime Verification , 2020 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2020-2021
License : BSD-3-Clause
-}
module Kore.Log.WarnIfLowProductivity (
WarnIfLowProductivity (..),
warnIfLowProductivity,
) where
import Kore.Attribute.Definition (KFileLocations (..))
import Log
import Numeric.Natural
import Prelude.Kore
import Pretty (
Pretty,
)
import Pretty qualified
import Stats
| @WarnIfLowProductivity@ is emitted when productuvity drops below a certain
point .
The warning message also displays the locations of the original K files used if
they are provided as attributes in the kore file .
point.
The warning message also displays the locations of the original K files used if
they are provided as attributes in the kore file.
-}
data WarnIfLowProductivity = WarnIfLowProductivity
{ productivityPercent :: Natural
, kFileLocations :: KFileLocations
}
deriving stock (Show)
instance Pretty WarnIfLowProductivity where
pretty
WarnIfLowProductivity
{ productivityPercent
, kFileLocations = KFileLocations locations
} =
(Pretty.vsep . concat)
[
[ Pretty.hsep
[ "Productivity dropped to:"
, Pretty.pretty productivityPercent <> "%"
]
]
, kFiles
,
[ "Poor productivity may indicate a performance bug."
, "Please file a bug report: -backend/issues"
]
]
where
kFiles
| not . null $ locations =
[ (Pretty.nest 4 . Pretty.vsep)
("Relevant K files include:" : fmap Pretty.pretty locations)
]
| otherwise = []
instance Entry WarnIfLowProductivity where
entrySeverity _ = Warning
oneLineDoc (WarnIfLowProductivity productivityPercent _) =
Pretty.pretty productivityPercent
helpDoc _ = "warn when productivty (MUT time / Total time) drops below 90%"
warnIfLowProductivity ::
MonadLog log =>
MonadIO log =>
KFileLocations ->
log ()
warnIfLowProductivity kFileLocations = do
Stats{gc_cpu_ns, cpu_ns} <- liftIO getStats
let gcTimeOver10Percent = gc_cpu_ns * 10 > cpu_ns
gcPercentage = gc_cpu_ns * 100 `div` cpu_ns
productivity = 100 - gcPercentage & fromIntegral
runTimeOver60Seconds = cpu_ns >= 60 * 10 ^ (9 :: Int)
when (runTimeOver60Seconds && gcTimeOver10Percent) . logEntry $
WarnIfLowProductivity productivity kFileLocations
| null | https://raw.githubusercontent.com/runtimeverification/haskell-backend/c86e9d2c0a2d7800a3cb49443d962463f88175d1/kore/src/Kore/Log/WarnIfLowProductivity.hs | haskell | # LANGUAGE NoStrict #
# LANGUAGE NoStrictData #
|
Copyright : ( c ) Runtime Verification , 2020 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2020-2021
License : BSD-3-Clause
-}
module Kore.Log.WarnIfLowProductivity (
WarnIfLowProductivity (..),
warnIfLowProductivity,
) where
import Kore.Attribute.Definition (KFileLocations (..))
import Log
import Numeric.Natural
import Prelude.Kore
import Pretty (
Pretty,
)
import Pretty qualified
import Stats
| @WarnIfLowProductivity@ is emitted when productuvity drops below a certain
point .
The warning message also displays the locations of the original K files used if
they are provided as attributes in the kore file .
point.
The warning message also displays the locations of the original K files used if
they are provided as attributes in the kore file.
-}
data WarnIfLowProductivity = WarnIfLowProductivity
{ productivityPercent :: Natural
, kFileLocations :: KFileLocations
}
deriving stock (Show)
instance Pretty WarnIfLowProductivity where
pretty
WarnIfLowProductivity
{ productivityPercent
, kFileLocations = KFileLocations locations
} =
(Pretty.vsep . concat)
[
[ Pretty.hsep
[ "Productivity dropped to:"
, Pretty.pretty productivityPercent <> "%"
]
]
, kFiles
,
[ "Poor productivity may indicate a performance bug."
, "Please file a bug report: -backend/issues"
]
]
where
kFiles
| not . null $ locations =
[ (Pretty.nest 4 . Pretty.vsep)
("Relevant K files include:" : fmap Pretty.pretty locations)
]
| otherwise = []
instance Entry WarnIfLowProductivity where
entrySeverity _ = Warning
oneLineDoc (WarnIfLowProductivity productivityPercent _) =
Pretty.pretty productivityPercent
helpDoc _ = "warn when productivty (MUT time / Total time) drops below 90%"
warnIfLowProductivity ::
MonadLog log =>
MonadIO log =>
KFileLocations ->
log ()
warnIfLowProductivity kFileLocations = do
Stats{gc_cpu_ns, cpu_ns} <- liftIO getStats
let gcTimeOver10Percent = gc_cpu_ns * 10 > cpu_ns
gcPercentage = gc_cpu_ns * 100 `div` cpu_ns
productivity = 100 - gcPercentage & fromIntegral
runTimeOver60Seconds = cpu_ns >= 60 * 10 ^ (9 :: Int)
when (runTimeOver60Seconds && gcTimeOver10Percent) . logEntry $
WarnIfLowProductivity productivity kFileLocations
| |
7373f0a4438ae32d66e2789fa2bdb776024919e041204efadeb31c73c74105fc | nklein/cl-reactive | reduce-t.lisp | ;;;; reduce-t.lisp
(in-package #:cl-reactive/tests)
(nst:def-test-group reduce-tests ()
(nst:def-test simple-reduce-test (:equal 7)
(signal-let ((sig-x 3 :type integer)
(sig-y 4 :type integer))
(with-signal-values ((s (signal-reduce #'+ (list sig-x sig-y))))
s)))
(nst:def-test reduce-subseq-test (:equal '((#\! . #\c) . #\d))
(signal-let ((sig-a #\a)
(sig-b #\b)
(sig-c #\c)
(sig-d #\d)
(sig-e #\e))
(with-signal-values ((s (signal-reduce #'cons
(list sig-a sig-b sig-c
sig-d sig-e)
:start 2 :end 4
:initial-value #\!)))
s)))
(nst:def-test reduce-from-end-subseq-test (:equal '(#\c #\d))
(signal-let ((sig-a #\a)
(sig-b #\b)
(sig-c #\c)
(sig-d #\d)
(sig-e #\e))
(with-signal-values ((s (signal-reduce #'cons
(list sig-a sig-b sig-c
sig-d sig-e)
:from-end t
:start 2 :end 4
:initial-value nil)))
s)))
(nst:def-test reduce-with-key-test (:equal 6)
(signal-let ((sig-a 0)
(sig-b 1)
(sig-c 2))
(with-signal-values ((s (signal-reduce #'*
(list sig-a sig-b sig-c)
:key #'1+)))
s)))
(nst:def-test reduce-documentation-test (:equal "Yes")
(signal-let (sig-x)
(documentation (signal-reduce #'+ (list sig-x)
:documentation "Yes") t))))
| null | https://raw.githubusercontent.com/nklein/cl-reactive/e322391f553989add18e6755e810351085c28197/src/reduce-t.lisp | lisp | reduce-t.lisp |
(in-package #:cl-reactive/tests)
(nst:def-test-group reduce-tests ()
(nst:def-test simple-reduce-test (:equal 7)
(signal-let ((sig-x 3 :type integer)
(sig-y 4 :type integer))
(with-signal-values ((s (signal-reduce #'+ (list sig-x sig-y))))
s)))
(nst:def-test reduce-subseq-test (:equal '((#\! . #\c) . #\d))
(signal-let ((sig-a #\a)
(sig-b #\b)
(sig-c #\c)
(sig-d #\d)
(sig-e #\e))
(with-signal-values ((s (signal-reduce #'cons
(list sig-a sig-b sig-c
sig-d sig-e)
:start 2 :end 4
:initial-value #\!)))
s)))
(nst:def-test reduce-from-end-subseq-test (:equal '(#\c #\d))
(signal-let ((sig-a #\a)
(sig-b #\b)
(sig-c #\c)
(sig-d #\d)
(sig-e #\e))
(with-signal-values ((s (signal-reduce #'cons
(list sig-a sig-b sig-c
sig-d sig-e)
:from-end t
:start 2 :end 4
:initial-value nil)))
s)))
(nst:def-test reduce-with-key-test (:equal 6)
(signal-let ((sig-a 0)
(sig-b 1)
(sig-c 2))
(with-signal-values ((s (signal-reduce #'*
(list sig-a sig-b sig-c)
:key #'1+)))
s)))
(nst:def-test reduce-documentation-test (:equal "Yes")
(signal-let (sig-x)
(documentation (signal-reduce #'+ (list sig-x)
:documentation "Yes") t))))
|
32a5ccca213b0371bc3cd9ae59dfbf28fde604fb56ae9ffbc05fa09f3bae49e6 | apache/couchdb-rebar | simpleevent.erl | -module({{eventid}}).
-behaviour(gen_event).
%% ------------------------------------------------------------------
%% API Function Exports
%% ------------------------------------------------------------------
-export([start_link/0,
add_handler/2]).
%% ------------------------------------------------------------------
%% gen_event Function Exports
%% ------------------------------------------------------------------
-export([init/1,
handle_event/2,
handle_call/2,
handle_info/2,
terminate/2,
code_change/3]).
-record(state, {}).
%% ------------------------------------------------------------------
%% API Function Definitions
%% ------------------------------------------------------------------
start_link() ->
gen_event:start_link({local, ?MODULE}).
add_handler(Handler, Args) ->
gen_event:add_handler(?MODULE, Handler, Args).
%% ------------------------------------------------------------------
%% gen_event Function Definitions
%% ------------------------------------------------------------------
init([]) ->
{ok, #state{}}.
handle_event(_Event, State) ->
{ok, State}.
handle_call(_Request, State) ->
Reply = ok,
{ok, Reply, State}.
handle_info(_Info, State) ->
{ok, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
| null | https://raw.githubusercontent.com/apache/couchdb-rebar/8578221c20d0caa3deb724e5622a924045ffa8bf/priv/templates/simpleevent.erl | erlang | ------------------------------------------------------------------
API Function Exports
------------------------------------------------------------------
------------------------------------------------------------------
gen_event Function Exports
------------------------------------------------------------------
------------------------------------------------------------------
API Function Definitions
------------------------------------------------------------------
------------------------------------------------------------------
gen_event Function Definitions
------------------------------------------------------------------
------------------------------------------------------------------
Internal Function Definitions
------------------------------------------------------------------ | -module({{eventid}}).
-behaviour(gen_event).
-export([start_link/0,
add_handler/2]).
-export([init/1,
handle_event/2,
handle_call/2,
handle_info/2,
terminate/2,
code_change/3]).
-record(state, {}).
start_link() ->
gen_event:start_link({local, ?MODULE}).
add_handler(Handler, Args) ->
gen_event:add_handler(?MODULE, Handler, Args).
init([]) ->
{ok, #state{}}.
handle_event(_Event, State) ->
{ok, State}.
handle_call(_Request, State) ->
Reply = ok,
{ok, Reply, State}.
handle_info(_Info, State) ->
{ok, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
|
be5cf06814c69dff8def24dd32911a21fdba886118e5f5a00ff239d30c78fcc7 | samply/blaze | cql_translator_spec.clj | (ns blaze.cql-translator-spec
(:require
[blaze.anomaly-spec]
[blaze.cql-translator :as cql-translator]
[blaze.elm.spec]
[clojure.spec.alpha :as s]
[cognitect.anomalies :as anom]))
(s/fdef cql-translator/translate
:args (s/cat :cql string?)
:ret (s/or :library :elm/library :anomaly ::anom/anomaly))
| null | https://raw.githubusercontent.com/samply/blaze/b3207ccad998607052c5155df1319872ccf77326/modules/cql/src/blaze/cql_translator_spec.clj | clojure | (ns blaze.cql-translator-spec
(:require
[blaze.anomaly-spec]
[blaze.cql-translator :as cql-translator]
[blaze.elm.spec]
[clojure.spec.alpha :as s]
[cognitect.anomalies :as anom]))
(s/fdef cql-translator/translate
:args (s/cat :cql string?)
:ret (s/or :library :elm/library :anomaly ::anom/anomaly))
| |
b7c8a75d4319a3d320730595fd0fb02d2de9a972cee5e0d4da73fa85dc340e11 | glebec/haskell-programming-allen-moronuki | Ex22_11.hs | module Ex22_11 where
import Control.Applicative
import Data.Maybe
x = [1, 2, 3]
y = [4, 5, 6]
z = [7, 8, 9]
xs :: Maybe Integer
xs = lookup 3 $ zip x y
ys :: Maybe Integer
ys = lookup 6 $ zip y z
zs :: Maybe Integer
zs = lookup 4 $ zip x y
z' :: Integer -> Maybe Integer
z' n = lookup n $ zip x z
x1 :: Maybe (Integer, Integer)
x1 = (,) <$> xs <*> ys
x2 :: Maybe (Integer, Integer)
x2 = (,) <$> ys <*> zs
x3 :: Integer -> (Maybe Integer, Maybe Integer)
x3 n = (z' n, z' n)
summed :: Num c => (c, c) -> c
summed = uncurry (+)
bolt :: Integer -> Bool
bolt = (&&) <$> (>3) <*> (<8)
main :: IO ()
main = do
print $ sequenceA [ Just 3 , Just 2 , Just 1 ]
-- print $ sequenceA [x, y]
-- print $ sequenceA [xs, ys]
print $ summed < $ > ( ( , ) < $ > xs < * > ys )
print $ fmap summed ( ( , ) < $ > xs < * > zs )
print $ bolt 7
-- print $ fmap bolt z
print $ sequenceA [ ( > 3 ) , ( < 8) , even ] 7
print $ foldr (&&) True (sequA 3)
print $ sequA (fromMaybe 0 s')
print $ bolt (fromMaybe 0 ys)
sequA :: Integral a => a -> [Bool]
sequA = sequenceA [(>3), (<8), even]
s' :: Maybe Integer
s' = summed <$> ((,) <$> xs <*> ys)
| null | https://raw.githubusercontent.com/glebec/haskell-programming-allen-moronuki/99bd232f523e426d18a5e096f1cf771228c55f52/22-reader/Ex22_11.hs | haskell | print $ sequenceA [x, y]
print $ sequenceA [xs, ys]
print $ fmap bolt z | module Ex22_11 where
import Control.Applicative
import Data.Maybe
x = [1, 2, 3]
y = [4, 5, 6]
z = [7, 8, 9]
xs :: Maybe Integer
xs = lookup 3 $ zip x y
ys :: Maybe Integer
ys = lookup 6 $ zip y z
zs :: Maybe Integer
zs = lookup 4 $ zip x y
z' :: Integer -> Maybe Integer
z' n = lookup n $ zip x z
x1 :: Maybe (Integer, Integer)
x1 = (,) <$> xs <*> ys
x2 :: Maybe (Integer, Integer)
x2 = (,) <$> ys <*> zs
x3 :: Integer -> (Maybe Integer, Maybe Integer)
x3 n = (z' n, z' n)
summed :: Num c => (c, c) -> c
summed = uncurry (+)
bolt :: Integer -> Bool
bolt = (&&) <$> (>3) <*> (<8)
main :: IO ()
main = do
print $ sequenceA [ Just 3 , Just 2 , Just 1 ]
print $ summed < $ > ( ( , ) < $ > xs < * > ys )
print $ fmap summed ( ( , ) < $ > xs < * > zs )
print $ bolt 7
print $ sequenceA [ ( > 3 ) , ( < 8) , even ] 7
print $ foldr (&&) True (sequA 3)
print $ sequA (fromMaybe 0 s')
print $ bolt (fromMaybe 0 ys)
sequA :: Integral a => a -> [Bool]
sequA = sequenceA [(>3), (<8), even]
s' :: Maybe Integer
s' = summed <$> ((,) <$> xs <*> ys)
|
2d857c7ca3c33b67a9fc0f9420500048f42be3130a8dded6ab056e3181efc691 | ijvcms/chuanqi_dev | equips_stren_config.erl | %%%-------------------------------------------------------------------
@author zhengsiying
%%% @doc
%%% 自动生成文件,不要手动修改
%%% @end
Created : 2016/10/12
%%%-------------------------------------------------------------------
-module(equips_stren_config).
-include("common.hrl").
-include("config.hrl").
-compile([export_all]).
get_list_conf() ->
[ equips_stren_config:get(X) || X <- get_list() ].
get_list() ->
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40].
get(1) ->
#equips_stren_conf{
key = 1,
coin = 8000,
change_jade = 0,
max_bless = 0
};
get(2) ->
#equips_stren_conf{
key = 2,
coin = 16000,
change_jade = 0,
max_bless = 0
};
get(3) ->
#equips_stren_conf{
key = 3,
coin = 24000,
change_jade = 0,
max_bless = 0
};
get(4) ->
#equips_stren_conf{
key = 4,
coin = 32000,
change_jade = 0,
max_bless = 0
};
get(5) ->
#equips_stren_conf{
key = 5,
coin = 40000,
change_jade = 0,
max_bless = 0
};
get(6) ->
#equips_stren_conf{
key = 6,
coin = 48000,
change_jade = 38,
max_bless = 0
};
get(7) ->
#equips_stren_conf{
key = 7,
coin = 56000,
change_jade = 76,
max_bless = 0
};
get(8) ->
#equips_stren_conf{
key = 8,
coin = 64000,
change_jade = 153,
max_bless = 0
};
get(9) ->
#equips_stren_conf{
key = 9,
coin = 72000,
change_jade = 200,
max_bless = 0
};
get(10) ->
#equips_stren_conf{
key = 10,
coin = 80000,
change_jade = 250,
max_bless = 0
};
get(11) ->
#equips_stren_conf{
key = 11,
coin = 88000,
change_jade = 300,
max_bless = 0
};
get(12) ->
#equips_stren_conf{
key = 12,
coin = 96000,
change_jade = 400,
max_bless = 0
};
get(13) ->
#equips_stren_conf{
key = 13,
coin = 104000,
change_jade = 500,
max_bless = 0
};
get(14) ->
#equips_stren_conf{
key = 14,
coin = 112000,
change_jade = 600,
max_bless = 0
};
get(15) ->
#equips_stren_conf{
key = 15,
coin = 120000,
change_jade = 700,
max_bless = 0
};
get(16) ->
#equips_stren_conf{
key = 16,
coin = 128000,
change_jade = 800,
max_bless = 0
};
get(17) ->
#equips_stren_conf{
key = 17,
coin = 136000,
change_jade = 900,
max_bless = 0
};
get(18) ->
#equips_stren_conf{
key = 18,
coin = 144000,
change_jade = 1000,
max_bless = 1280
};
get(19) ->
#equips_stren_conf{
key = 19,
coin = 152000,
change_jade = 1100,
max_bless = 2560
};
get(20) ->
#equips_stren_conf{
key = 20,
coin = 160000,
change_jade = 1200,
max_bless = 5120
};
get(21) ->
#equips_stren_conf{
key = 21,
coin = 168000,
change_jade = 1300,
max_bless = 0
};
get(22) ->
#equips_stren_conf{
key = 22,
coin = 176000,
change_jade = 1400,
max_bless = 0
};
get(23) ->
#equips_stren_conf{
key = 23,
coin = 184000,
change_jade = 1500,
max_bless = 0
};
get(24) ->
#equips_stren_conf{
key = 24,
coin = 192000,
change_jade = 1600,
max_bless = 0
};
get(25) ->
#equips_stren_conf{
key = 25,
coin = 200000,
change_jade = 1700,
max_bless = 0
};
get(26) ->
#equips_stren_conf{
key = 26,
coin = 208000,
change_jade = 1800,
max_bless = 0
};
get(27) ->
#equips_stren_conf{
key = 27,
coin = 216000,
change_jade = 1900,
max_bless = 0
};
get(28) ->
#equips_stren_conf{
key = 28,
coin = 224000,
change_jade = 2000,
max_bless = 0
};
get(29) ->
#equips_stren_conf{
key = 29,
coin = 232000,
change_jade = 2100,
max_bless = 0
};
get(30) ->
#equips_stren_conf{
key = 30,
coin = 240000,
change_jade = 2200,
max_bless = 0
};
get(31) ->
#equips_stren_conf{
key = 31,
coin = 320000,
change_jade = 2300,
max_bless = 5120
};
get(32) ->
#equips_stren_conf{
key = 32,
coin = 400000,
change_jade = 2400,
max_bless = 10240
};
get(33) ->
#equips_stren_conf{
key = 33,
coin = 480000,
change_jade = 2500,
max_bless = 20480
};
get(34) ->
#equips_stren_conf{
key = 34,
coin = 560000,
change_jade = 2600,
max_bless = 40960
};
get(35) ->
#equips_stren_conf{
key = 35,
coin = 640000,
change_jade = 2700,
max_bless = 81920
};
get(36) ->
#equips_stren_conf{
key = 36,
coin = 720000,
change_jade = 2800,
max_bless = 122880
};
get(37) ->
#equips_stren_conf{
key = 37,
coin = 800000,
change_jade = 2900,
max_bless = 122880
};
get(38) ->
#equips_stren_conf{
key = 38,
coin = 880000,
change_jade = 3000,
max_bless = 122880
};
get(39) ->
#equips_stren_conf{
key = 39,
coin = 960000,
change_jade = 3100,
max_bless = 122880
};
get(40) ->
#equips_stren_conf{
key = 40,
coin = 1040000,
change_jade = 3200,
max_bless = 122880
};
get(_Key) ->
?ERR("undefined key from equips_stren_config ~p", [_Key]). | null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/config/equips_stren_config.erl | erlang | -------------------------------------------------------------------
@doc
自动生成文件,不要手动修改
@end
------------------------------------------------------------------- | @author zhengsiying
Created : 2016/10/12
-module(equips_stren_config).
-include("common.hrl").
-include("config.hrl").
-compile([export_all]).
get_list_conf() ->
[ equips_stren_config:get(X) || X <- get_list() ].
get_list() ->
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40].
get(1) ->
#equips_stren_conf{
key = 1,
coin = 8000,
change_jade = 0,
max_bless = 0
};
get(2) ->
#equips_stren_conf{
key = 2,
coin = 16000,
change_jade = 0,
max_bless = 0
};
get(3) ->
#equips_stren_conf{
key = 3,
coin = 24000,
change_jade = 0,
max_bless = 0
};
get(4) ->
#equips_stren_conf{
key = 4,
coin = 32000,
change_jade = 0,
max_bless = 0
};
get(5) ->
#equips_stren_conf{
key = 5,
coin = 40000,
change_jade = 0,
max_bless = 0
};
get(6) ->
#equips_stren_conf{
key = 6,
coin = 48000,
change_jade = 38,
max_bless = 0
};
get(7) ->
#equips_stren_conf{
key = 7,
coin = 56000,
change_jade = 76,
max_bless = 0
};
get(8) ->
#equips_stren_conf{
key = 8,
coin = 64000,
change_jade = 153,
max_bless = 0
};
get(9) ->
#equips_stren_conf{
key = 9,
coin = 72000,
change_jade = 200,
max_bless = 0
};
get(10) ->
#equips_stren_conf{
key = 10,
coin = 80000,
change_jade = 250,
max_bless = 0
};
get(11) ->
#equips_stren_conf{
key = 11,
coin = 88000,
change_jade = 300,
max_bless = 0
};
get(12) ->
#equips_stren_conf{
key = 12,
coin = 96000,
change_jade = 400,
max_bless = 0
};
get(13) ->
#equips_stren_conf{
key = 13,
coin = 104000,
change_jade = 500,
max_bless = 0
};
get(14) ->
#equips_stren_conf{
key = 14,
coin = 112000,
change_jade = 600,
max_bless = 0
};
get(15) ->
#equips_stren_conf{
key = 15,
coin = 120000,
change_jade = 700,
max_bless = 0
};
get(16) ->
#equips_stren_conf{
key = 16,
coin = 128000,
change_jade = 800,
max_bless = 0
};
get(17) ->
#equips_stren_conf{
key = 17,
coin = 136000,
change_jade = 900,
max_bless = 0
};
get(18) ->
#equips_stren_conf{
key = 18,
coin = 144000,
change_jade = 1000,
max_bless = 1280
};
get(19) ->
#equips_stren_conf{
key = 19,
coin = 152000,
change_jade = 1100,
max_bless = 2560
};
get(20) ->
#equips_stren_conf{
key = 20,
coin = 160000,
change_jade = 1200,
max_bless = 5120
};
get(21) ->
#equips_stren_conf{
key = 21,
coin = 168000,
change_jade = 1300,
max_bless = 0
};
get(22) ->
#equips_stren_conf{
key = 22,
coin = 176000,
change_jade = 1400,
max_bless = 0
};
get(23) ->
#equips_stren_conf{
key = 23,
coin = 184000,
change_jade = 1500,
max_bless = 0
};
get(24) ->
#equips_stren_conf{
key = 24,
coin = 192000,
change_jade = 1600,
max_bless = 0
};
get(25) ->
#equips_stren_conf{
key = 25,
coin = 200000,
change_jade = 1700,
max_bless = 0
};
get(26) ->
#equips_stren_conf{
key = 26,
coin = 208000,
change_jade = 1800,
max_bless = 0
};
get(27) ->
#equips_stren_conf{
key = 27,
coin = 216000,
change_jade = 1900,
max_bless = 0
};
get(28) ->
#equips_stren_conf{
key = 28,
coin = 224000,
change_jade = 2000,
max_bless = 0
};
get(29) ->
#equips_stren_conf{
key = 29,
coin = 232000,
change_jade = 2100,
max_bless = 0
};
get(30) ->
#equips_stren_conf{
key = 30,
coin = 240000,
change_jade = 2200,
max_bless = 0
};
get(31) ->
#equips_stren_conf{
key = 31,
coin = 320000,
change_jade = 2300,
max_bless = 5120
};
get(32) ->
#equips_stren_conf{
key = 32,
coin = 400000,
change_jade = 2400,
max_bless = 10240
};
get(33) ->
#equips_stren_conf{
key = 33,
coin = 480000,
change_jade = 2500,
max_bless = 20480
};
get(34) ->
#equips_stren_conf{
key = 34,
coin = 560000,
change_jade = 2600,
max_bless = 40960
};
get(35) ->
#equips_stren_conf{
key = 35,
coin = 640000,
change_jade = 2700,
max_bless = 81920
};
get(36) ->
#equips_stren_conf{
key = 36,
coin = 720000,
change_jade = 2800,
max_bless = 122880
};
get(37) ->
#equips_stren_conf{
key = 37,
coin = 800000,
change_jade = 2900,
max_bless = 122880
};
get(38) ->
#equips_stren_conf{
key = 38,
coin = 880000,
change_jade = 3000,
max_bless = 122880
};
get(39) ->
#equips_stren_conf{
key = 39,
coin = 960000,
change_jade = 3100,
max_bless = 122880
};
get(40) ->
#equips_stren_conf{
key = 40,
coin = 1040000,
change_jade = 3200,
max_bless = 122880
};
get(_Key) ->
?ERR("undefined key from equips_stren_config ~p", [_Key]). |
4555bb83a763ede4b7da05df060a6cf08d3ccdec1c26b26f0ced07e66761e572 | nominolo/lambdachine | Read.hs | # LANGUAGE NoImplicitPrelude #
module GHC.Read
( Read(..) -- class
-- ReadS type
, ReadS -- :: *; = String -> [(a,String)]
-- H98 compatibility
, lex -- :: ReadS String
, lexLitChar -- :: ReadS String
: : ReadS
, lexDigits -- :: ReadS String
-- defining readers
: : ReadPrec Lexeme
, paren -- :: ReadPrec a -> ReadPrec a
, parens -- :: ReadPrec a -> ReadPrec a
, list -- :: ReadPrec a -> ReadPrec [a]
: : [ ( String , ReadPrec a ) ] - > ReadPrec a
, readListDefault, readListPrecDefault
-- Temporary
, readParen
-- XXX Can this be removed?
, readp
)
where
import qualified Text.ParserCombinators.ReadP as P
import qualified Text.Read.Lex as L
import Text.ParserCombinators.ReadP ( ReadP, ReadS, readP_to_S )
import Text.ParserCombinators.ReadPrec
import Data.Maybe
import {-# SOURCE #-} GHC.Unicode ( isDigit )
import GHC.Num
import GHC.Real
-- import GHC.Float ()
import GHC.Show
import GHC.Base
import GHC.Arr
readParen :: Bool -> ReadS a -> ReadS a
-- A Haskell 98 function
readParen b g = if b then mandatory else optional
where optional r = g r ++ mandatory r
mandatory r = do
("(",s) <- lex r
(x,t) <- optional s
(")",u) <- lex t
return (x,u)
class Read a where
readsPrec :: Int -> ReadS a
readList :: ReadS [a]
readPrec :: ReadPrec a
readListPrec :: ReadPrec [a]
-- default definitions
readsPrec = readPrec_to_S readPrec
readList = readPrec_to_S (list readPrec) 0
readPrec = readS_to_Prec readsPrec
readListPrec = readS_to_Prec (\_ -> readList)
readListDefault :: Read a => ReadS [a]
readListDefault = readPrec_to_S readListPrec 0
readListPrecDefault :: Read a => ReadPrec [a]
readListPrecDefault = list readPrec
lex :: ReadS String -- As defined by H98
lex s = readP_to_S L.hsLex s
lexLitChar :: ReadS String -- As defined by H98
lexLitChar = readP_to_S (do { (s, _) <- P.gather L.lexChar ;
return s })
There was a skipSpaces before the P.gather L.lexChar ,
but that seems inconsistent with
readLitChar :: ReadS Char -- As defined by H98
readLitChar = readP_to_S L.lexChar
lexDigits :: ReadS String
lexDigits = readP_to_S (P.munch1 isDigit)
lexP :: ReadPrec L.Lexeme
lexP = lift L.lex
paren :: ReadPrec a -> ReadPrec a
paren p = do L.Punc "(" <- lexP
x <- reset p
L.Punc ")" <- lexP
return x
parens :: ReadPrec a -> ReadPrec a
parens p = optional
where
optional = p +++ mandatory
mandatory = paren optional
list :: ReadPrec a -> ReadPrec [a]
list readx =
parens
( do L.Punc "[" <- lexP
(listRest False +++ listNext)
)
where
listRest started =
do L.Punc c <- lexP
case c of
"]" -> return []
"," | started -> listNext
_ -> pfail
listNext =
do x <- reset readx
xs <- listRest True
return (x:xs)
choose :: [(String, ReadPrec a)] -> ReadPrec a
choose sps = foldr ((+++) . try_one) pfail sps
where
try_one (s,p) = do { token <- lexP ;
case token of
L.Ident s' | s==s' -> p
L.Symbol s' | s==s' -> p
_other -> pfail }
instance Read Char where
readPrec =
parens
( do L.Char c <- lexP
return c
)
readListPrec =
parens
( do L.String s <- lexP -- Looks for "foo"
return s
+++
readListPrecDefault -- Looks for ['f','o','o']
) -- (more generous than H98 spec)
readList = readListDefault
instance Read Bool where
readPrec =
parens
( do L.Ident s <- lexP
case s of
"True" -> return True
"False" -> return False
_ -> pfail
)
readListPrec = readListPrecDefault
readList = readListDefault
instance Read Ordering where
readPrec =
parens
( do L.Ident s <- lexP
case s of
"LT" -> return LT
"EQ" -> return EQ
"GT" -> return GT
_ -> pfail
)
readListPrec = readListPrecDefault
readList = readListDefault
instance Read a => Read (Maybe a) where
readPrec =
parens
(do L.Ident "Nothing" <- lexP
return Nothing
+++
prec appPrec (
do L.Ident "Just" <- lexP
x <- step readPrec
return (Just x))
)
readListPrec = readListPrecDefault
readList = readListDefault
instance Read a => Read [a] where
readPrec = readListPrec
readListPrec = readListPrecDefault
readList = readListDefault
-- instance (Ix a, Read a, Read b) => Read (Array a b) where
-- readPrec = parens $ prec appPrec $
do " array " < - lexP
theBounds < - step readPrec
-- vals <- step readPrec
return ( array theBounds vals )
-- readListPrec = readListPrecDefault
readList = readListDefault
instance Read L.Lexeme where
readPrec = lexP
readListPrec = readListPrecDefault
readList = readListDefault
readNumber :: Num a => (L.Lexeme -> ReadPrec a) -> ReadPrec a
-- Read a signed number
readNumber convert =
parens
( do x <- lexP
case x of
L.Symbol "-" -> do y <- lexP
n <- convert y
return (negate n)
_ -> convert x
)
convertInt :: Num a => L.Lexeme -> ReadPrec a
convertInt (L.Int i) = return (fromInteger i)
convertInt _ = pfail
convertFrac :: Fractional a => L.Lexeme -> ReadPrec a
convertFrac (L.Int i) = return (fromInteger i)
convertFrac (L.Rat r) = return (fromRational r)
convertFrac _ = pfail
instance Read Int where
readPrec = readNumber convertInt
readListPrec = readListPrecDefault
readList = readListDefault
instance Read Integer where
readPrec = readNumber convertInt
readListPrec = readListPrecDefault
readList = readListDefault
instance Read Float where
-- readPrec = readNumber convertFrac
-- readListPrec = readListPrecDefault
readList = readListDefault
-- instance Read Double where
-- readPrec = readNumber convertFrac
-- readListPrec = readListPrecDefault
readList = readListDefault
instance (Integral a, Read a) => Read (Ratio a) where
readPrec =
parens
( prec ratioPrec
( do x <- step readPrec
L.Symbol "%" <- lexP
y <- step readPrec
return (x % y)
)
)
readListPrec = readListPrecDefault
readList = readListDefault
instance Read () where
readPrec =
parens
( paren
( return ()
)
)
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b) => Read (a,b) where
readPrec = wrap_tup read_tup2
readListPrec = readListPrecDefault
readList = readListDefault
wrap_tup :: ReadPrec a -> ReadPrec a
wrap_tup p = parens (paren p)
read_comma :: ReadPrec ()
read_comma = do { L.Punc "," <- lexP; return () }
read_tup2 :: (Read a, Read b) => ReadPrec (a,b)
-- Reads "a , b" no parens!
read_tup2 = do x <- readPrec
read_comma
y <- readPrec
return (x,y)
read_tup4 :: (Read a, Read b, Read c, Read d) => ReadPrec (a,b,c,d)
read_tup4 = do (a,b) <- read_tup2
read_comma
(c,d) <- read_tup2
return (a,b,c,d)
read_tup8 :: (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h)
=> ReadPrec (a,b,c,d,e,f,g,h)
read_tup8 = do (a,b,c,d) <- read_tup4
read_comma
(e,f,g,h) <- read_tup4
return (a,b,c,d,e,f,g,h)
instance (Read a, Read b, Read c) => Read (a, b, c) where
readPrec = wrap_tup (do { (a,b) <- read_tup2; read_comma
; c <- readPrec
; return (a,b,c) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d) => Read (a, b, c, d) where
readPrec = wrap_tup read_tup4
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e ) = > Read ( a , b , c , d , e ) where
readPrec = wrap_tup ( do { ( a , b , c , d ) < - read_tup4 ; read_comma
; e < - readPrec
; return ( a , b , c , d , e ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f )
= > Read ( a , b , c , d , e , f ) where
readPrec = wrap_tup ( do { ( a , b , c , d ) < - read_tup4 ; read_comma
; ( e , f ) < - read_tup2
; return ( a , b , c , d , e , f ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , )
= > Read ( a , b , c , d , e , f , ) where
readPrec = wrap_tup ( do { ( a , b , c , d ) < - read_tup4 ; read_comma
; ( e , f ) < - read_tup2 ; read_comma
;
; return ( a , b , c , d , e , f , ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h )
= > Read ( a , b , c , d , e , f , , h ) where
readPrec = wrap_tup read_tup8
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i )
= > Read ( a , b , c , d , e , f , , h , i ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; i < - readPrec
; return ( a , b , c , d , e , f , , h , i ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j )
= > Read ( a , b , c , d , e , f , , h , i , j ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j ) < - read_tup2
; return ( a , b , c , d , e , f , , h , i , j ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k )
= > Read ( a , b , c , d , e , f , , h , i , j , k ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j ) < - read_tup2 ; read_comma
; k < - readPrec
; return ( a , b , c , d , e , f , , h , i , j , k ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k , Read l )
= > Read ( a , b , c , d , e , f , , h , i , j , k , l ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j , k , l ) < - read_tup4
; return ( a , b , c , d , e , f , , h , i , j , k , l ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k , Read l , Read m )
= > Read ( a , b , c , d , e , f , , h , i , j , k , l , m ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j , k , l ) < - read_tup4 ; read_comma
; m < - readPrec
; return ( a , b , c , d , e , f , , h , i , j , k , l , m ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k , Read l , Read m , Read n )
= > Read ( a , b , c , d , e , f , , h , i , j , k , l , m , n ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j , k , l ) < - read_tup4 ; read_comma
; ( m , n ) < - read_tup2
; return ( a , b , c , d , e , f , , h , i , j , k , l , m , n ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k , Read l , Read m , Read n , Read o )
= > Read ( a , b , c , d , e , f , , h , i , j , k , l , m , n , o ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j , k , l ) < - read_tup4 ; read_comma
; ( m , n ) < - read_tup2 ; read_comma
; o < - readPrec
; return ( a , b , c , d , e , f , , h , i , j , k , l , m , n , o ) } )
readListPrec = readListPrecDefault
readList = readListDefault
--
instance (Read a, Read b, Read c, Read d, Read e) => Read (a, b, c, d, e) where
readPrec = wrap_tup (do { (a,b,c,d) <- read_tup4; read_comma
; e <- readPrec
; return (a,b,c,d,e) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f)
=> Read (a, b, c, d, e, f) where
readPrec = wrap_tup (do { (a,b,c,d) <- read_tup4; read_comma
; (e,f) <- read_tup2
; return (a,b,c,d,e,f) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g)
=> Read (a, b, c, d, e, f, g) where
readPrec = wrap_tup (do { (a,b,c,d) <- read_tup4; read_comma
; (e,f) <- read_tup2; read_comma
; g <- readPrec
; return (a,b,c,d,e,f,g) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h)
=> Read (a, b, c, d, e, f, g, h) where
readPrec = wrap_tup read_tup8
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i)
=> Read (a, b, c, d, e, f, g, h, i) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; i <- readPrec
; return (a,b,c,d,e,f,g,h,i) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j)
=> Read (a, b, c, d, e, f, g, h, i, j) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j) <- read_tup2
; return (a,b,c,d,e,f,g,h,i,j) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k)
=> Read (a, b, c, d, e, f, g, h, i, j, k) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j) <- read_tup2; read_comma
; k <- readPrec
; return (a,b,c,d,e,f,g,h,i,j,k) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k, Read l)
=> Read (a, b, c, d, e, f, g, h, i, j, k, l) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j,k,l) <- read_tup4
; return (a,b,c,d,e,f,g,h,i,j,k,l) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k, Read l, Read m)
=> Read (a, b, c, d, e, f, g, h, i, j, k, l, m) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j,k,l) <- read_tup4; read_comma
; m <- readPrec
; return (a,b,c,d,e,f,g,h,i,j,k,l,m) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k, Read l, Read m, Read n)
=> Read (a, b, c, d, e, f, g, h, i, j, k, l, m, n) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j,k,l) <- read_tup4; read_comma
; (m,n) <- read_tup2
; return (a,b,c,d,e,f,g,h,i,j,k,l,m,n) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k, Read l, Read m, Read n, Read o)
=> Read (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j,k,l) <- read_tup4; read_comma
; (m,n) <- read_tup2; read_comma
; o <- readPrec
; return (a,b,c,d,e,f,g,h,i,j,k,l,m,n,o) })
readListPrec = readListPrecDefault
readList = readListDefault
-- -}
readp :: Read a => ReadP a
readp = readPrec_to_P readPrec minPrec
| null | https://raw.githubusercontent.com/nominolo/lambdachine/49d97cf7a367a650ab421f7aa19feb90bfe14731/libraries/base/GHC/Read.hs | haskell | class
ReadS type
:: *; = String -> [(a,String)]
H98 compatibility
:: ReadS String
:: ReadS String
:: ReadS String
defining readers
:: ReadPrec a -> ReadPrec a
:: ReadPrec a -> ReadPrec a
:: ReadPrec a -> ReadPrec [a]
Temporary
XXX Can this be removed?
# SOURCE #
import GHC.Float ()
A Haskell 98 function
default definitions
As defined by H98
As defined by H98
As defined by H98
Looks for "foo"
Looks for ['f','o','o']
(more generous than H98 spec)
instance (Ix a, Read a, Read b) => Read (Array a b) where
readPrec = parens $ prec appPrec $
vals <- step readPrec
readListPrec = readListPrecDefault
Read a signed number
readPrec = readNumber convertFrac
readListPrec = readListPrecDefault
instance Read Double where
readPrec = readNumber convertFrac
readListPrec = readListPrecDefault
Reads "a , b" no parens!
-} | # LANGUAGE NoImplicitPrelude #
module GHC.Read
: : ReadS
: : ReadPrec Lexeme
: : [ ( String , ReadPrec a ) ] - > ReadPrec a
, readListDefault, readListPrecDefault
, readParen
, readp
)
where
import qualified Text.ParserCombinators.ReadP as P
import qualified Text.Read.Lex as L
import Text.ParserCombinators.ReadP ( ReadP, ReadS, readP_to_S )
import Text.ParserCombinators.ReadPrec
import Data.Maybe
import GHC.Num
import GHC.Real
import GHC.Show
import GHC.Base
import GHC.Arr
readParen :: Bool -> ReadS a -> ReadS a
readParen b g = if b then mandatory else optional
where optional r = g r ++ mandatory r
mandatory r = do
("(",s) <- lex r
(x,t) <- optional s
(")",u) <- lex t
return (x,u)
class Read a where
readsPrec :: Int -> ReadS a
readList :: ReadS [a]
readPrec :: ReadPrec a
readListPrec :: ReadPrec [a]
readsPrec = readPrec_to_S readPrec
readList = readPrec_to_S (list readPrec) 0
readPrec = readS_to_Prec readsPrec
readListPrec = readS_to_Prec (\_ -> readList)
readListDefault :: Read a => ReadS [a]
readListDefault = readPrec_to_S readListPrec 0
readListPrecDefault :: Read a => ReadPrec [a]
readListPrecDefault = list readPrec
lex s = readP_to_S L.hsLex s
lexLitChar = readP_to_S (do { (s, _) <- P.gather L.lexChar ;
return s })
There was a skipSpaces before the P.gather L.lexChar ,
but that seems inconsistent with
readLitChar = readP_to_S L.lexChar
lexDigits :: ReadS String
lexDigits = readP_to_S (P.munch1 isDigit)
lexP :: ReadPrec L.Lexeme
lexP = lift L.lex
paren :: ReadPrec a -> ReadPrec a
paren p = do L.Punc "(" <- lexP
x <- reset p
L.Punc ")" <- lexP
return x
parens :: ReadPrec a -> ReadPrec a
parens p = optional
where
optional = p +++ mandatory
mandatory = paren optional
list :: ReadPrec a -> ReadPrec [a]
list readx =
parens
( do L.Punc "[" <- lexP
(listRest False +++ listNext)
)
where
listRest started =
do L.Punc c <- lexP
case c of
"]" -> return []
"," | started -> listNext
_ -> pfail
listNext =
do x <- reset readx
xs <- listRest True
return (x:xs)
choose :: [(String, ReadPrec a)] -> ReadPrec a
choose sps = foldr ((+++) . try_one) pfail sps
where
try_one (s,p) = do { token <- lexP ;
case token of
L.Ident s' | s==s' -> p
L.Symbol s' | s==s' -> p
_other -> pfail }
instance Read Char where
readPrec =
parens
( do L.Char c <- lexP
return c
)
readListPrec =
parens
return s
+++
readList = readListDefault
instance Read Bool where
readPrec =
parens
( do L.Ident s <- lexP
case s of
"True" -> return True
"False" -> return False
_ -> pfail
)
readListPrec = readListPrecDefault
readList = readListDefault
instance Read Ordering where
readPrec =
parens
( do L.Ident s <- lexP
case s of
"LT" -> return LT
"EQ" -> return EQ
"GT" -> return GT
_ -> pfail
)
readListPrec = readListPrecDefault
readList = readListDefault
instance Read a => Read (Maybe a) where
readPrec =
parens
(do L.Ident "Nothing" <- lexP
return Nothing
+++
prec appPrec (
do L.Ident "Just" <- lexP
x <- step readPrec
return (Just x))
)
readListPrec = readListPrecDefault
readList = readListDefault
instance Read a => Read [a] where
readPrec = readListPrec
readListPrec = readListPrecDefault
readList = readListDefault
do " array " < - lexP
theBounds < - step readPrec
return ( array theBounds vals )
readList = readListDefault
instance Read L.Lexeme where
readPrec = lexP
readListPrec = readListPrecDefault
readList = readListDefault
readNumber :: Num a => (L.Lexeme -> ReadPrec a) -> ReadPrec a
readNumber convert =
parens
( do x <- lexP
case x of
L.Symbol "-" -> do y <- lexP
n <- convert y
return (negate n)
_ -> convert x
)
convertInt :: Num a => L.Lexeme -> ReadPrec a
convertInt (L.Int i) = return (fromInteger i)
convertInt _ = pfail
convertFrac :: Fractional a => L.Lexeme -> ReadPrec a
convertFrac (L.Int i) = return (fromInteger i)
convertFrac (L.Rat r) = return (fromRational r)
convertFrac _ = pfail
instance Read Int where
readPrec = readNumber convertInt
readListPrec = readListPrecDefault
readList = readListDefault
instance Read Integer where
readPrec = readNumber convertInt
readListPrec = readListPrecDefault
readList = readListDefault
instance Read Float where
readList = readListDefault
readList = readListDefault
instance (Integral a, Read a) => Read (Ratio a) where
readPrec =
parens
( prec ratioPrec
( do x <- step readPrec
L.Symbol "%" <- lexP
y <- step readPrec
return (x % y)
)
)
readListPrec = readListPrecDefault
readList = readListDefault
instance Read () where
readPrec =
parens
( paren
( return ()
)
)
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b) => Read (a,b) where
readPrec = wrap_tup read_tup2
readListPrec = readListPrecDefault
readList = readListDefault
wrap_tup :: ReadPrec a -> ReadPrec a
wrap_tup p = parens (paren p)
read_comma :: ReadPrec ()
read_comma = do { L.Punc "," <- lexP; return () }
read_tup2 :: (Read a, Read b) => ReadPrec (a,b)
read_tup2 = do x <- readPrec
read_comma
y <- readPrec
return (x,y)
read_tup4 :: (Read a, Read b, Read c, Read d) => ReadPrec (a,b,c,d)
read_tup4 = do (a,b) <- read_tup2
read_comma
(c,d) <- read_tup2
return (a,b,c,d)
read_tup8 :: (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h)
=> ReadPrec (a,b,c,d,e,f,g,h)
read_tup8 = do (a,b,c,d) <- read_tup4
read_comma
(e,f,g,h) <- read_tup4
return (a,b,c,d,e,f,g,h)
instance (Read a, Read b, Read c) => Read (a, b, c) where
readPrec = wrap_tup (do { (a,b) <- read_tup2; read_comma
; c <- readPrec
; return (a,b,c) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d) => Read (a, b, c, d) where
readPrec = wrap_tup read_tup4
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e ) = > Read ( a , b , c , d , e ) where
readPrec = wrap_tup ( do { ( a , b , c , d ) < - read_tup4 ; read_comma
; e < - readPrec
; return ( a , b , c , d , e ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f )
= > Read ( a , b , c , d , e , f ) where
readPrec = wrap_tup ( do { ( a , b , c , d ) < - read_tup4 ; read_comma
; ( e , f ) < - read_tup2
; return ( a , b , c , d , e , f ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , )
= > Read ( a , b , c , d , e , f , ) where
readPrec = wrap_tup ( do { ( a , b , c , d ) < - read_tup4 ; read_comma
; ( e , f ) < - read_tup2 ; read_comma
;
; return ( a , b , c , d , e , f , ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h )
= > Read ( a , b , c , d , e , f , , h ) where
readPrec = wrap_tup read_tup8
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i )
= > Read ( a , b , c , d , e , f , , h , i ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; i < - readPrec
; return ( a , b , c , d , e , f , , h , i ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j )
= > Read ( a , b , c , d , e , f , , h , i , j ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j ) < - read_tup2
; return ( a , b , c , d , e , f , , h , i , j ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k )
= > Read ( a , b , c , d , e , f , , h , i , j , k ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j ) < - read_tup2 ; read_comma
; k < - readPrec
; return ( a , b , c , d , e , f , , h , i , j , k ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k , Read l )
= > Read ( a , b , c , d , e , f , , h , i , j , k , l ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j , k , l ) < - read_tup4
; return ( a , b , c , d , e , f , , h , i , j , k , l ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k , Read l , Read m )
= > Read ( a , b , c , d , e , f , , h , i , j , k , l , m ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j , k , l ) < - read_tup4 ; read_comma
; m < - readPrec
; return ( a , b , c , d , e , f , , h , i , j , k , l , m ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k , Read l , Read m , Read n )
= > Read ( a , b , c , d , e , f , , h , i , j , k , l , m , n ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j , k , l ) < - read_tup4 ; read_comma
; ( m , n ) < - read_tup2
; return ( a , b , c , d , e , f , , h , i , j , k , l , m , n ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance ( Read a , Read b , Read c , Read d , Read e , Read f , , Read h ,
Read i , Read j , Read k , Read l , Read m , Read n , Read o )
= > Read ( a , b , c , d , e , f , , h , i , j , k , l , m , n , o ) where
readPrec = wrap_tup ( do { ( a , b , c , d , e , f , , h ) < - read_tup8 ; read_comma
; ( i , j , k , l ) < - read_tup4 ; read_comma
; ( m , n ) < - read_tup2 ; read_comma
; o < - readPrec
; return ( a , b , c , d , e , f , , h , i , j , k , l , m , n , o ) } )
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e) => Read (a, b, c, d, e) where
readPrec = wrap_tup (do { (a,b,c,d) <- read_tup4; read_comma
; e <- readPrec
; return (a,b,c,d,e) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f)
=> Read (a, b, c, d, e, f) where
readPrec = wrap_tup (do { (a,b,c,d) <- read_tup4; read_comma
; (e,f) <- read_tup2
; return (a,b,c,d,e,f) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g)
=> Read (a, b, c, d, e, f, g) where
readPrec = wrap_tup (do { (a,b,c,d) <- read_tup4; read_comma
; (e,f) <- read_tup2; read_comma
; g <- readPrec
; return (a,b,c,d,e,f,g) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h)
=> Read (a, b, c, d, e, f, g, h) where
readPrec = wrap_tup read_tup8
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i)
=> Read (a, b, c, d, e, f, g, h, i) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; i <- readPrec
; return (a,b,c,d,e,f,g,h,i) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j)
=> Read (a, b, c, d, e, f, g, h, i, j) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j) <- read_tup2
; return (a,b,c,d,e,f,g,h,i,j) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k)
=> Read (a, b, c, d, e, f, g, h, i, j, k) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j) <- read_tup2; read_comma
; k <- readPrec
; return (a,b,c,d,e,f,g,h,i,j,k) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k, Read l)
=> Read (a, b, c, d, e, f, g, h, i, j, k, l) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j,k,l) <- read_tup4
; return (a,b,c,d,e,f,g,h,i,j,k,l) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k, Read l, Read m)
=> Read (a, b, c, d, e, f, g, h, i, j, k, l, m) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j,k,l) <- read_tup4; read_comma
; m <- readPrec
; return (a,b,c,d,e,f,g,h,i,j,k,l,m) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k, Read l, Read m, Read n)
=> Read (a, b, c, d, e, f, g, h, i, j, k, l, m, n) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j,k,l) <- read_tup4; read_comma
; (m,n) <- read_tup2
; return (a,b,c,d,e,f,g,h,i,j,k,l,m,n) })
readListPrec = readListPrecDefault
readList = readListDefault
instance (Read a, Read b, Read c, Read d, Read e, Read f, Read g, Read h,
Read i, Read j, Read k, Read l, Read m, Read n, Read o)
=> Read (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) where
readPrec = wrap_tup (do { (a,b,c,d,e,f,g,h) <- read_tup8; read_comma
; (i,j,k,l) <- read_tup4; read_comma
; (m,n) <- read_tup2; read_comma
; o <- readPrec
; return (a,b,c,d,e,f,g,h,i,j,k,l,m,n,o) })
readListPrec = readListPrecDefault
readList = readListDefault
readp :: Read a => ReadP a
readp = readPrec_to_P readPrec minPrec
|
b0fa52beb6a023c37e783b765efc3c3f10035fc5dee132ca334e5904e924d58e | samth/tr-tutorial | 2.rkt | #lang typed/racket
Report each unique line from stdin
(: saw : (HashTable String Boolean))
(define saw (make-hash))
(for ([line (in-lines)])
(unless (hash-ref saw line (λ () #f))
(displayln line))
(hash-set! saw line #t))
| null | https://raw.githubusercontent.com/samth/tr-tutorial/3457df465921274f410bfa0ec0a623410fb22b47/2.rkt | racket | #lang typed/racket
Report each unique line from stdin
(: saw : (HashTable String Boolean))
(define saw (make-hash))
(for ([line (in-lines)])
(unless (hash-ref saw line (λ () #f))
(displayln line))
(hash-set! saw line #t))
| |
757117384ab1e0e2f682a91006310e45856b30b7d004de72c6408d33f7fc2ecf | GillianPlatform/Gillian | config.ml | let include_dirs = ref ([] : string list)
let source_paths = ref ([] : string list)
let burn_csm = ref false
let hide_genv = ref false
let warnings = ref true
let hide_undef = ref false
let hide_mult_def = ref false
let verbose_compcert = ref false
let pp_full_tree = ref false
let allocated_functions = ref false
| null | https://raw.githubusercontent.com/GillianPlatform/Gillian/1c8d65120c04ef87cda689a9d41268e25b5ffa7e/Gillian-C/lib/config.ml | ocaml | let include_dirs = ref ([] : string list)
let source_paths = ref ([] : string list)
let burn_csm = ref false
let hide_genv = ref false
let warnings = ref true
let hide_undef = ref false
let hide_mult_def = ref false
let verbose_compcert = ref false
let pp_full_tree = ref false
let allocated_functions = ref false
| |
09462a4f5cab8f9f867a8981908cbc187cacabe83f28078de14304205f3df081 | cognitect-labs/aws-api | validation.clj | Copyright ( c ) Cognitect , Inc.
;; All rights reserved.
(ns ^:skip-wiki cognitect.aws.client.validation
"For internal use. Don't call directly."
(:require [cognitect.aws.client.protocol :as client.protocol]
[cognitect.aws.dynaload :as dynaload]
[cognitect.aws.service :as service]))
(set! *warn-on-reflection* true)
(defn validate-requests?
"For internal use. Don't call directly."
[client]
(some-> client client.protocol/-get-info :validate-requests? deref))
(def ^:private registry-ref (delay (dynaload/load-var 'clojure.spec.alpha/registry)))
(defn registry
"For internal use. Don't call directly."
[& args] (apply @registry-ref args))
(def ^:private valid?-ref (delay (dynaload/load-var 'clojure.spec.alpha/valid?)))
(defn valid?
"For internal use. Don't call directly."
[& args] (apply @valid?-ref args))
(def ^:private explain-data-ref (delay (dynaload/load-var 'clojure.spec.alpha/explain-data)))
(defn explain-data
"For internal use. Don't call directly."
[& args] (apply @explain-data-ref args))
(defn request-spec
"For internal use. Don't call directly."
[service op]
(when-let [spec (service/request-spec-key service op)]
(when (contains? (-> (registry) keys set) spec)
spec)))
(defn invalid-request-anomaly
"For internal use. Don't call directly."
[spec request]
(assoc (explain-data spec request)
:cognitect.anomalies/category :cognitect.anomalies/incorrect))
(defn unsupported-op-anomaly
"For internal use. Don't call directly."
[service op]
{:cognitect.anomalies/category :cognitect.anomalies/unsupported
:cognitect.anomalies/message "Operation not supported"
:service (keyword (service/service-name service))
:op op}) | null | https://raw.githubusercontent.com/cognitect-labs/aws-api/e262aac7a5cd2c1214a632ba851f95b6686a9b0f/src/cognitect/aws/client/validation.clj | clojure | All rights reserved. | Copyright ( c ) Cognitect , Inc.
(ns ^:skip-wiki cognitect.aws.client.validation
"For internal use. Don't call directly."
(:require [cognitect.aws.client.protocol :as client.protocol]
[cognitect.aws.dynaload :as dynaload]
[cognitect.aws.service :as service]))
(set! *warn-on-reflection* true)
(defn validate-requests?
"For internal use. Don't call directly."
[client]
(some-> client client.protocol/-get-info :validate-requests? deref))
(def ^:private registry-ref (delay (dynaload/load-var 'clojure.spec.alpha/registry)))
(defn registry
"For internal use. Don't call directly."
[& args] (apply @registry-ref args))
(def ^:private valid?-ref (delay (dynaload/load-var 'clojure.spec.alpha/valid?)))
(defn valid?
"For internal use. Don't call directly."
[& args] (apply @valid?-ref args))
(def ^:private explain-data-ref (delay (dynaload/load-var 'clojure.spec.alpha/explain-data)))
(defn explain-data
"For internal use. Don't call directly."
[& args] (apply @explain-data-ref args))
(defn request-spec
"For internal use. Don't call directly."
[service op]
(when-let [spec (service/request-spec-key service op)]
(when (contains? (-> (registry) keys set) spec)
spec)))
(defn invalid-request-anomaly
"For internal use. Don't call directly."
[spec request]
(assoc (explain-data spec request)
:cognitect.anomalies/category :cognitect.anomalies/incorrect))
(defn unsupported-op-anomaly
"For internal use. Don't call directly."
[service op]
{:cognitect.anomalies/category :cognitect.anomalies/unsupported
:cognitect.anomalies/message "Operation not supported"
:service (keyword (service/service-name service))
:op op}) |
ba28806c92e0340c39f9cfb7739faf24223eebe8dbb620568ce3e0fb3f8ab454 | typedclojure/typedclojure | track.cljc | Copyright ( c ) , contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns typed.clj.annotator.track
(:require #?@(:clj [[potemkin.collections :as pot]])
[typed.clj.annotator.rep :refer [-val key-path map-vals-path
infer-results -class -any
fn-dom-path fn-rng-path
-nothing seq-entry
transient-vector-entry index-path
vec-entry-path
set-entry make-HMap
map-keys-path
atom-contents
var-path]]
[clojure.core.typed.contract-utils :as con]
[typed.clj.annotator.env :refer [add-infer-results!
results-atom]]
[typed.clj.annotator.util :refer [classify]]
[clojure.math.combinatorics :as comb]
[clojure.core.typed.util-vars :as vs]
[clojure.core.typed.current-impl :as impl]
))
(defn local-fn-symbol? [s]
(= :local-fn (:typed.clj.annotator.track/track-kind (meta s))))
(defn loop-var-symbol? [s]
(= :loop-var (:typed.clj.annotator.track/track-kind (meta s))))
(defn extend-paths [paths extension]
(into #{}
(map (fn [path]
(conj path extension)))
paths))
(def ^:dynamic *should-track* true)
(def ^:const apply-realize-limit 20)
(def ^:dynamic *track-depth* nil #_5)
(def ^:dynamic *track-count* nil #_5)
(def ^:dynamic *root-results* nil #_5)
(def stored-call-ids (atom {}))
(defn gen-call-id [paths]
[paths (swap! stored-call-ids update paths (fnil inc 0))])
(declare track)
#?(:clj
(pot/def-map-type PersistentMapProxy [^clojure.lang.IPersistentMap m k-to-track-info config results-atom
if started as HMap tracking map , map from kw->Type
;; for all keyword keys with keyword values
current-kw-entries-types
current-ks current-all-kws?]
Object
(toString [this] (.toString m))
(equals [this obj] (.equals m obj))
clojure.lang.Counted
(count [this] (count m))
;; TODO (.seq this), .iterator, .vals
java.util.Map
(size [this] (.size ^java.util.Map m))
(containsKey [this obj] (.containsKey ^java.util.Map m obj))
(equiv [this obj]
(.equiv m obj))
(get [this key default-value] (if (contains? m key)
(let [v (get m key)
track-infos (get k-to-track-info key)]
(if (empty? track-infos)
;; this entry has no relation to paths
v
(let [{:keys [paths call-ids]}
(binding [*should-track* false]
(reduce (fn [acc [{:keys [ks kw-entries-types all-kws?] :as track-info}
{:keys [paths call-ids]}]]
{:pre [(boolean? all-kws?)
(set? ks)
(map? kw-entries-types)
(set? paths)
(set? call-ids)]}
(let [path-extension (if (and (keyword? key)
all-kws?)
HMap tracking
(key-path kw-entries-types ks key)
;; homogeneous map tracking
;; FIXME what about map-keys-path tracking?
(map-vals-path))]
(-> acc
(update :call-ids into call-ids)
(update :paths into (extend-paths paths path-extension)))))
{:paths #{}
:call-ids #{}}
track-infos))]
(track config results-atom v paths call-ids))))
default-value))
(assoc [this key value] (PersistentMapProxy. (assoc m key value)
;; new value has no relation to paths
(dissoc k-to-track-info key)
config
results-atom
(if (and (keyword? key)
(keyword? value))
(assoc current-kw-entries-types key (-val value))
current-kw-entries-types)
(conj current-ks key)
(and current-all-kws?
(keyword? key))))
(dissoc [this key] (PersistentMapProxy. (dissoc m key)
;; new value has no relation to paths
(dissoc k-to-track-info key)
config
results-atom
(dissoc current-kw-entries-types key)
(disj current-ks key)
(or current-all-kws?
;; might have deleted the last non-keyword key
(every? keyword? (disj current-ks key)))))
;; TODO wrap
(keys [this] (keys m))
;; TODO vals
(meta [this] (meta m))
(hashCode [this] (.hashCode ^Object m))
(hasheq [this] (.hasheq ^clojure.lang.IHashEq m))
(with-meta [this meta] (PersistentMapProxy. (with-meta m meta)
k-to-track-info
config
results-atom
current-kw-entries-types
current-ks
current-all-kws?))))
(defn unwrap-value [v]
(if-some [[_ u] (or (-> v meta (find ::unwrapped-fn))
(-> v meta (find ::unwrapped-seq))
#?(:clj
(when (instance? PersistentMapProxy v)
[nil (.m ^PersistentMapProxy v)])))]
values are only wrapped one level , no recursion calls needed
u
v))
(def track-metric-cache (atom {}))
; track : (Atom InferResultEnv) Value Path -> Value
(defn track
([{:keys [track-depth track-count track-strategy track-metric root-results force-depth] :as config} results-atom v paths call-ids]
{:pre [((con/set-c? vector?) paths)
(seq paths)
((con/set-c? vector?) call-ids)]}
#?(:clj
(when (string? track-metric)
(let [tm (or (get @track-metric-cache track-metric)
(-> track-metric read-string eval))
_ (when-not (@track-metric-cache track-metric)
(reset! track-metric-cache {track-metric tm}))]
(tm (merge config
{:results-atom results-atom
:v v
:paths paths
:call-ids call-ids})))))
(let [;FIXME memory intensive
#_#_
_ (let [hs ((juxt
#(System/identityHashCode %)
class)
(unwrap-value v))]
( prn " call - ids " ( map ( comp # ( map ( comp : name first ) % ) first ) call - ids ) )
(swap! results-atom update :call-flows
(fn [flows]
(reduce (fn [flows call-id]
(reduce (fn [flows path]
(let [vname (-> path first :name)
_ (assert (symbol? vname))]
(update-in flows [vname call-id]
(fn [m]
(-> m
(update-in [:path-hashes path] (fnil conj #{}) hs)
(update-in [:hash-occurrences hs] (fnil conj #{}) path))))))
flows
paths))
flows
call-ids))))
paths-that-exceed-root-results (let [rr (:root-results @results-atom)]
(when root-results
(filter #(< root-results (get rr (-> % first :name) 0))
paths)))]
(cond
((some-fn keyword? nil? false?) v)
(do
(add-infer-results! results-atom (infer-results (remove (set paths-that-exceed-root-results) paths)
(-val v)))
v)
;; cut off path
(or
(not *should-track*)
cap at 1000 results per var
(seq paths-that-exceed-root-results)
(let [smallest-path-count (apply min (map count paths))]
(if (and force-depth (>= force-depth smallest-path-count))
false
(when track-depth
(> smallest-path-count track-depth)))))
;(debug
; (println "Cut off inference at path "
; (unparse-path path)
; "(due to " (if *should-track*
; (str "track depth of" *track-depth*
; "being exceeded")
; (str "disabled tracking of internal ops"))
; ")")
(let [;; record as unknown so this doesn't
;; cut off actually recursive types.
_ (add-infer-results! results-atom (infer-results (remove (set paths-that-exceed-root-results) paths)
{:op :unknown}))]
(unwrap-value v))
;)
only accurate up to 20 arguments .
all arities 21 and over will collapse into one .
(fn? v) (let [[paths unwrapped-fn] (if (-> v meta ::wrapped-fn?)
((juxt ::paths ::unwrapped-fn)
;; combine paths
(update (meta v) ::paths into paths))
[paths v])
_ (assert (set? paths))
;; Now, remember this value is at least a function, in case it is never invoked.
;; This will get noted redundantly for older paths, if that's
;; some kind of issue, we should remember which paths we've already noted.
_ (add-infer-results! results-atom (infer-results paths (-class :ifn [])))
call-ids (conj call-ids (gen-call-id paths))
;; space-efficient function wrapping
wrap-fn (fn [paths unwrapped-fn]
(with-meta
(fn [& args]
apply only realises 20 places
_ (when (= 0 blen)
(track config results-atom
-any ;ignored, just noting this is called with 0-args
(extend-paths paths (fn-dom-path 0 -1))
call-ids))
here we throw away arities after 20 places .
;; no concrete reason for this other than it feeling like a sensible
;; compromise.
args (map-indexed
(fn [n arg]
(if (< n blen)
(track config results-atom arg
(extend-paths paths (fn-dom-path blen n))
call-ids)
arg))
args)]
(track config results-atom (apply unwrapped-fn args)
(extend-paths paths (fn-rng-path blen))
call-ids)))
(merge (meta unwrapped-fn)
{::wrapped-fn? true
::paths paths
::unwrapped-fn unwrapped-fn})))]
(wrap-fn paths v))
(list? v)
(let []
(when (empty? v)
(add-infer-results!
results-atom
(infer-results paths
(-class :list [-nothing]))))
(let [res
(with-meta
(apply list
(map (fn [e]
(track config results-atom e (extend-paths paths (seq-entry))
call-ids))
v))
(meta v))]
(assert (list? res))
res))
(and (seq? v)
(not (list? v)))
(let [[paths unwrapped-seq paths-where-original-coll-could-be-empty]
(if (-> v meta ::wrapped-seq?)
((juxt ::paths ::unwrapped-seq ::paths-where-original-coll-could-be-empty)
;; combine paths
(-> (meta v)
(update ::paths into paths)
(update ::paths-where-original-coll-could-be-empty into paths)))
[paths v paths])
_ (assert (set? paths))
;; space-efficient wrapping
wrap-lseq
(fn wrap-lseq [unwrapped-seq paths-where-original-coll-could-be-empty]
(with-meta
(lazy-seq
(if (empty? unwrapped-seq)
(let []
(when (seq paths-where-original-coll-could-be-empty)
(add-infer-results!
results-atom
(infer-results
paths-where-original-coll-could-be-empty
(-class :seq [-nothing]))))
unwrapped-seq)
(cons (track config results-atom
(first unwrapped-seq)
(extend-paths paths (seq-entry))
call-ids)
(wrap-lseq (rest unwrapped-seq)
;; collection can no longer be empty for these paths
#{}))))
(merge (meta unwrapped-seq)
{::wrapped-seq? true
::paths-where-original-coll-could-be-empty paths-where-original-coll-could-be-empty
::paths paths
::unwrapped-seq unwrapped-seq})))]
(wrap-lseq unwrapped-seq paths-where-original-coll-could-be-empty))
(instance? #?(:clj clojure.lang.ITransientVector :cljs TransientVector) v)
(let [cnt (count v)]
(reduce
(fn [v i]
(let [e (nth v i)
e' (track config results-atom e
(extend-paths paths (transient-vector-entry))
call-ids)]
(if (identical? e e')
v
(binding [*should-track* false]
(assoc! v i e')))))
v
(range cnt)))
;; cover map entries
(and (vector? v)
(= 2 (count v)))
(let [k (track config results-atom (nth v 0) (extend-paths paths (index-path 2 0)) call-ids)
vl (track config results-atom (nth v 1) (extend-paths paths (index-path 2 1)) call-ids)]
(assoc v 0 k 1 vl))
(vector? v)
(let [heterogeneous? (<= (count v) 4)
len (count v)
so-far (atom 0)]
(when (= 0 len)
(add-infer-results! results-atom (infer-results paths (-class :vector [-nothing]))))
(reduce
(fn [e [k v]]
(swap! so-far inc)
(let [v' (track config results-atom v (extend-paths
paths
(if heterogeneous?
(index-path len k)
(vec-entry-path)))
call-ids)]
(cond
(when-let [tc track-count]
(< tc @so-far))
(reduced (binding [*should-track* false]
(assoc e k v')))
(identical? v v') e
:else
(binding [*should-track* false]
(assoc e k v')))))
v
(map-indexed vector v)))
(set? v)
(do
(when (empty? v)
(add-infer-results!
results-atom
(infer-results paths
(-class :set [-nothing]))))
;; preserve sorted sets
(binding [*should-track* false]
(into (empty v)
(map (fn [e]
(binding [*should-track* true]
(track config results-atom e (extend-paths paths (set-entry))
call-ids))))
v)))
#?(:clj (instance? PersistentMapProxy v))
#?(:clj
(let [^PersistentMapProxy v v
ks (.current-ks v)
_ (assert (set? ks))
all-kws? (.current-all-kws? v)
_ (assert (boolean? all-kws?))
kw-entries-types (.current-kw-entries-types v)
_ (assert (map? kw-entries-types))
track-info {:all-kws? all-kws?
:ks ks
:kw-entries-types kw-entries-types}]
;; TODO do we update the config/results-atom? What if they're different than the proxy's?
(PersistentMapProxy. (.m v)
(reduce (fn [m k]
(update-in m [k track-info]
#(merge-with (fnil into #{})
%
{:paths paths
:call-ids call-ids})))
(.k-to-track-info v)
;; FIXME we should remove known kw entries
ks)
(.config v)
(.results-atom v)
(.current-kw-entries-types v)
(.current-ks v)
(.current-all-kws? v))))
#?(:clj
(or (instance? clojure.lang.PersistentHashMap v)
(instance? clojure.lang.PersistentArrayMap v)
(instance? clojure.lang.PersistentTreeMap v))
:cljs (map? v))
(let [ks (set (keys v))]
(when (empty? v)
(add-infer-results!
results-atom
(infer-results paths (make-HMap {} {}))))
(cond
(every? keyword? ks)
(let [{with-kw-val true
no-kw-val false}
(binding [*should-track* false]
(group-by (fn [e]
(keyword? (val e)))
v))
kw-entries-types
(into {}
(map (fn [[k v]]
{:pre [(keyword? v)]}
[k (-val v)]))
with-kw-val)
;; we rely on the no-kw-val map to
;; track the simple keyword entries -- if there
are none , just pick one of the kw - entries - types
;; and track it.
_ (when (and (empty? no-kw-val)
(seq kw-entries-types))
(let [k (key (first kw-entries-types))]
(track config results-atom (get v k)
(binding [*should-track* false]
(extend-paths paths (key-path kw-entries-types ks k)))
call-ids)))
v #?(:cljs v
:clj (if (= track-strategy :lazy)
(PersistentMapProxy. v
(zipmap (apply disj ks with-kw-val)
(repeat {{:all-kws? true
:kw-entries-types kw-entries-types
:ks ks}
{:paths paths
:call-ids call-ids}}))
config
results-atom
kw-entries-types
ks
true)
v))]
(reduce
(fn [m [k orig-v]]
(let [v (track config results-atom orig-v
(binding [*should-track* false]
(extend-paths paths (key-path kw-entries-types ks k)))
call-ids)]
(cond
;; only assoc if needed
(identical? v orig-v) m
:else
(binding [*should-track* false]
(assoc m k v)))))
v
no-kw-val))
:else
(let [so-far (atom 0)
v #?(:cljs v
:clj (if (= track-strategy :lazy)
(PersistentMapProxy. v
(zipmap ks (repeat {{:all-kws? false
:kw-entries-types {}
:ks ks}
{:paths paths
:call-ids call-ids}}))
config
results-atom
{}
ks
false)
v))]
(reduce
(fn [m k]
(swap! so-far inc)
(let [orig-v (get m k)
[new-k v]
(cond
We do n't want to pollute the HMap - req - ks with
;; non keywords (yet), disable.
;(keyword? k)
;[k (track config results-atom orig-v
; (binding [*should-track* false]
; (extend-paths paths (key-path {} ks k))))]
:else
[(track config results-atom k
(binding [*should-track* false]
(extend-paths paths (map-keys-path)))
call-ids)
(track config results-atom orig-v
(binding [*should-track* false]
(extend-paths paths (map-vals-path)))
call-ids)])]
(cond
; cut off homogeneous map
(when-let [tc *track-count*]
(< tc @so-far))
(reduced
(binding [*should-track* false]
(-> m
;; ensure we replace the key
(dissoc k)
(assoc new-k v))))
;; only assoc if needed
(identical? v orig-v) m
;; make sure we replace the key
(not (identical? new-k k))
(binding [*should-track* false]
(-> m
(dissoc k)
(assoc new-k v)))
:else
(binding [*should-track* false]
(assoc m new-k v)))))
v
(keys v)))))
(instance? #?(:clj clojure.lang.IAtom :cljs Atom) v)
(let [old-val (-> v meta :clojure.core.typed/old-val)
new-paths (binding [*should-track* false]
(extend-paths paths (atom-contents)))
should-track? (binding [*should-track* false]
(not= @v old-val))
_ (when should-track?
(track config results-atom @v new-paths
call-ids))
#_#_
_ (binding [*should-track* false]
(add-watch
v
new-paths
(fn [_ _ _ new]
(binding [*should-track* true]
(track config results-atom new new-paths
call-ids)))))]
v)
:else (do
(add-infer-results! results-atom (infer-results paths (-class (classify v) [])))
v)))))
(declare gen-track-config)
#?(:cljs
(defn track-cljs-val [v root]
(track (gen-track-config)
results-atom
v
#{[(var-path
'root
root)]}
#{})))
#?(:clj
(def prim-invoke-interfaces
(into #{}
(comp
(mapcat (fn [n]
(apply comb/cartesian-product (repeat n [\D \O \L]))))
(map (fn [ss] (apply str ss)))
(remove (fn [ss]
(every? #{\O} ss))))
(range 1 6))))
#?(:clj
(defn char->tag [c]
{:pre [(char? c)]
:post [(symbol? %)]}
(case c
\L 'long
\D 'double
\O 'java.lang.Object)))
#?(:clj
(defn tag->char [t]
{:pre [((some-fn nil? symbol?) t)]
:post [(char? %)]}
(case t
long \L
double \D
\O)))
#?(:clj
(defn gen-prim-invokes [f-this prims]
;(prn "gen-prim-invokes" prims)
(mapcat
(fn [p]
{:pre [(string? p)]}
(let [args (into []
(map-indexed
(fn [n c]
(-> (symbol (str "arg" n))
#_(vary-meta
assoc :tag (char->tag c)))))
(butlast p))
interface (symbol (str "clojure.lang.IFn$" p))
rettag (char->tag (nth p (dec (count p))))
;_ (prn "rettag" rettag)
this (gensym 'this)
argvec (-> (vec (cons this args))
#_(vary-meta assoc :tag rettag))]
#_
(binding [*print-meta* true]
(prn "argvec" argvec))
[interface
(list 'invokePrim argvec
`(~(f-this this) ~@(map #(with-meta % nil) args)))]))
prims)))
#?(:clj
(defn gen-nonvariadic-invokes [f-this]
(for [arity (range 0 20),
:let [args (repeatedly arity gensym)
this (gensym 'this)]]
`(~'invoke [~this ~@args]
(~(f-this this) ~@args)))))
#?(:clj
(defn gen-variadic-invoke [f-this]
(let [args (repeatedly 21 gensym)
this (gensym 'this)]
`(~'invoke [~this ~@args] (apply ~(f-this this) ~@args)))))
#?(:clj
(defn gen-apply-to [f-this]
(let [this (gensym 'this)]
`(~'applyTo [~this args#] (apply ~(f-this this) args#)))))
#?(:clj
(defn extend-IFn [f-this prims]
`(clojure.lang.IFn
~@(gen-nonvariadic-invokes f-this)
~(gen-variadic-invoke f-this)
~(gen-apply-to f-this)
~@(gen-prim-invokes f-this prims))))
#?(:clj
(defmacro deftypefn
"Like deftype, but accepts a function f before any specs that is
used to implement clojure.lang.IFn. f should accept at least one
argument, 'this'."
[name prims & opts+specs]
(let [field 'f
f-this (fn [this]
(list '. this (symbol (str "-" field))))
source `(deftype ~name [~field]
~@(extend-IFn f-this prims)
~@opts+specs)]
#_
(binding [*print-meta* true]
(pprint source))
source)))
#?(:clj
(def this-ns *ns*))
#?(:clj
(defn arglist-prim-string [args]
{:pre [(vector? args)]
:post [((some-fn nil? string?) %)]}
(let [s (apply str
(concat
(->> args
(map (comp :tag meta))
(map tag->char))
[(tag->char (-> args meta :tag))]))]
(when (prim-invoke-interfaces s)
s))))
#?(:clj
(defn wrap-prim [vr f]
{:pre [(var? vr)]}
;(prn "wrap-prim" vr)
(let [prim-arglists
(sort
(->> (-> vr meta :arglists)
(map arglist-prim-string)
(filter string?)))]
(cond
(seq prim-arglists)
(let [type-name (symbol
(str "PrimFn"
(apply str
(interpose
"_"
prim-arglists))))
;_ (prn "type-name" type-name)
cls (or #_(ns-resolve this-ns type-name)
(binding [*ns* this-ns]
(eval
`(deftypefn ~type-name ~prim-arglists))))
_ (assert (class? cls))
ctor (ns-resolve this-ns
(symbol
(str "->" type-name)))
_ (assert (var? ctor))]
(ctor f))
:else f))))
(defn gen-track-config []
(merge
{:track-strategy :lazy
:track-depth *track-depth*
:track-count *track-count*
:root-results *root-results*}
vs/*instrument-infer-config*))
track - var : ( IFn [ Var - > Value ] [ ( Atom Result ) ] )
#?(:clj
(defn track-var'
([vr] (track-var' (gen-track-config) results-atom vr *ns*))
([config vr] (track-var' config results-atom vr *ns*))
([config results-atom vr ns]
{:pre [(var? vr)
(instance? #?(:clj clojure.lang.IAtom :cljs Atom) results-atom)]}
;(prn "tracking" vr "in ns" ns)
(wrap-prim
vr
(track config
results-atom @vr #{[(var-path
(ns-name (the-ns ns))
(impl/var->symbol vr))]}
#{})))))
#?(:clj
(defmacro track-var [v]
`(track-var' (var ~v))))
track - def - init : Value
#?(:clj
(defn track-def-init [config vsym ns val]
{:pre [(symbol? vsym)
(namespace vsym)]}
;(prn "track-def-init")
(let [v (ns-resolve ns vsym)]
;(prn v)
(wrap-prim
v
(track config
results-atom val
#{[{:op :var
:ns (ns-name ns)
:name vsym}]}
#{})))))
#?(:clj
(defn track-local-fn [config track-kind line column end-line end-column ns val]
{:pre [(#{:local-fn :loop-var} track-kind)]}
#_
(prn "track-local-fn"
(symbol
(str (ns-name ns)
"|"
line
"|"
column
"|"
end-line
"|"
end-column)))
(track config
results-atom val
#{[{:op :var
:typed.clj.annotator.track/track-kind track-kind
:line line
:column column
:end-line end-line
:end-column end-column
:ns (ns-name ns)
:name (with-meta
(symbol
(str (ns-name ns)
"|"
line
"|"
column
"|"
end-line
"|"
end-column))
{:typed.clj.annotator.track/track-kind track-kind
:line line
:column column
:end-line end-line
:end-column end-column
:ns (ns-name ns)})}]}
#{})))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/82be8b54924ba5ccd0c1cebd509eabc9905ebf32/typed/clj.annotator/src/typed/clj/annotator/track.cljc | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
for all keyword keys with keyword values
TODO (.seq this), .iterator, .vals
this entry has no relation to paths
homogeneous map tracking
FIXME what about map-keys-path tracking?
new value has no relation to paths
new value has no relation to paths
might have deleted the last non-keyword key
TODO wrap
TODO vals
track : (Atom InferResultEnv) Value Path -> Value
FIXME memory intensive
cut off path
(debug
(println "Cut off inference at path "
(unparse-path path)
"(due to " (if *should-track*
(str "track depth of" *track-depth*
"being exceeded")
(str "disabled tracking of internal ops"))
")")
record as unknown so this doesn't
cut off actually recursive types.
)
combine paths
Now, remember this value is at least a function, in case it is never invoked.
This will get noted redundantly for older paths, if that's
some kind of issue, we should remember which paths we've already noted.
space-efficient function wrapping
ignored, just noting this is called with 0-args
no concrete reason for this other than it feeling like a sensible
compromise.
combine paths
space-efficient wrapping
collection can no longer be empty for these paths
cover map entries
preserve sorted sets
TODO do we update the config/results-atom? What if they're different than the proxy's?
FIXME we should remove known kw entries
we rely on the no-kw-val map to
track the simple keyword entries -- if there
and track it.
only assoc if needed
non keywords (yet), disable.
(keyword? k)
[k (track config results-atom orig-v
(binding [*should-track* false]
(extend-paths paths (key-path {} ks k))))]
cut off homogeneous map
ensure we replace the key
only assoc if needed
make sure we replace the key
(prn "gen-prim-invokes" prims)
_ (prn "rettag" rettag)
(prn "wrap-prim" vr)
_ (prn "type-name" type-name)
(prn "tracking" vr "in ns" ns)
(prn "track-def-init")
(prn v) | Copyright ( c ) , contributors .
(ns typed.clj.annotator.track
(:require #?@(:clj [[potemkin.collections :as pot]])
[typed.clj.annotator.rep :refer [-val key-path map-vals-path
infer-results -class -any
fn-dom-path fn-rng-path
-nothing seq-entry
transient-vector-entry index-path
vec-entry-path
set-entry make-HMap
map-keys-path
atom-contents
var-path]]
[clojure.core.typed.contract-utils :as con]
[typed.clj.annotator.env :refer [add-infer-results!
results-atom]]
[typed.clj.annotator.util :refer [classify]]
[clojure.math.combinatorics :as comb]
[clojure.core.typed.util-vars :as vs]
[clojure.core.typed.current-impl :as impl]
))
(defn local-fn-symbol? [s]
(= :local-fn (:typed.clj.annotator.track/track-kind (meta s))))
(defn loop-var-symbol? [s]
(= :loop-var (:typed.clj.annotator.track/track-kind (meta s))))
(defn extend-paths [paths extension]
(into #{}
(map (fn [path]
(conj path extension)))
paths))
(def ^:dynamic *should-track* true)
(def ^:const apply-realize-limit 20)
(def ^:dynamic *track-depth* nil #_5)
(def ^:dynamic *track-count* nil #_5)
(def ^:dynamic *root-results* nil #_5)
(def stored-call-ids (atom {}))
(defn gen-call-id [paths]
[paths (swap! stored-call-ids update paths (fnil inc 0))])
(declare track)
#?(:clj
(pot/def-map-type PersistentMapProxy [^clojure.lang.IPersistentMap m k-to-track-info config results-atom
if started as HMap tracking map , map from kw->Type
current-kw-entries-types
current-ks current-all-kws?]
Object
(toString [this] (.toString m))
(equals [this obj] (.equals m obj))
clojure.lang.Counted
(count [this] (count m))
java.util.Map
(size [this] (.size ^java.util.Map m))
(containsKey [this obj] (.containsKey ^java.util.Map m obj))
(equiv [this obj]
(.equiv m obj))
(get [this key default-value] (if (contains? m key)
(let [v (get m key)
track-infos (get k-to-track-info key)]
(if (empty? track-infos)
v
(let [{:keys [paths call-ids]}
(binding [*should-track* false]
(reduce (fn [acc [{:keys [ks kw-entries-types all-kws?] :as track-info}
{:keys [paths call-ids]}]]
{:pre [(boolean? all-kws?)
(set? ks)
(map? kw-entries-types)
(set? paths)
(set? call-ids)]}
(let [path-extension (if (and (keyword? key)
all-kws?)
HMap tracking
(key-path kw-entries-types ks key)
(map-vals-path))]
(-> acc
(update :call-ids into call-ids)
(update :paths into (extend-paths paths path-extension)))))
{:paths #{}
:call-ids #{}}
track-infos))]
(track config results-atom v paths call-ids))))
default-value))
(assoc [this key value] (PersistentMapProxy. (assoc m key value)
(dissoc k-to-track-info key)
config
results-atom
(if (and (keyword? key)
(keyword? value))
(assoc current-kw-entries-types key (-val value))
current-kw-entries-types)
(conj current-ks key)
(and current-all-kws?
(keyword? key))))
(dissoc [this key] (PersistentMapProxy. (dissoc m key)
(dissoc k-to-track-info key)
config
results-atom
(dissoc current-kw-entries-types key)
(disj current-ks key)
(or current-all-kws?
(every? keyword? (disj current-ks key)))))
(keys [this] (keys m))
(meta [this] (meta m))
(hashCode [this] (.hashCode ^Object m))
(hasheq [this] (.hasheq ^clojure.lang.IHashEq m))
(with-meta [this meta] (PersistentMapProxy. (with-meta m meta)
k-to-track-info
config
results-atom
current-kw-entries-types
current-ks
current-all-kws?))))
(defn unwrap-value [v]
(if-some [[_ u] (or (-> v meta (find ::unwrapped-fn))
(-> v meta (find ::unwrapped-seq))
#?(:clj
(when (instance? PersistentMapProxy v)
[nil (.m ^PersistentMapProxy v)])))]
values are only wrapped one level , no recursion calls needed
u
v))
(def track-metric-cache (atom {}))
(defn track
([{:keys [track-depth track-count track-strategy track-metric root-results force-depth] :as config} results-atom v paths call-ids]
{:pre [((con/set-c? vector?) paths)
(seq paths)
((con/set-c? vector?) call-ids)]}
#?(:clj
(when (string? track-metric)
(let [tm (or (get @track-metric-cache track-metric)
(-> track-metric read-string eval))
_ (when-not (@track-metric-cache track-metric)
(reset! track-metric-cache {track-metric tm}))]
(tm (merge config
{:results-atom results-atom
:v v
:paths paths
:call-ids call-ids})))))
#_#_
_ (let [hs ((juxt
#(System/identityHashCode %)
class)
(unwrap-value v))]
( prn " call - ids " ( map ( comp # ( map ( comp : name first ) % ) first ) call - ids ) )
(swap! results-atom update :call-flows
(fn [flows]
(reduce (fn [flows call-id]
(reduce (fn [flows path]
(let [vname (-> path first :name)
_ (assert (symbol? vname))]
(update-in flows [vname call-id]
(fn [m]
(-> m
(update-in [:path-hashes path] (fnil conj #{}) hs)
(update-in [:hash-occurrences hs] (fnil conj #{}) path))))))
flows
paths))
flows
call-ids))))
paths-that-exceed-root-results (let [rr (:root-results @results-atom)]
(when root-results
(filter #(< root-results (get rr (-> % first :name) 0))
paths)))]
(cond
((some-fn keyword? nil? false?) v)
(do
(add-infer-results! results-atom (infer-results (remove (set paths-that-exceed-root-results) paths)
(-val v)))
v)
(or
(not *should-track*)
cap at 1000 results per var
(seq paths-that-exceed-root-results)
(let [smallest-path-count (apply min (map count paths))]
(if (and force-depth (>= force-depth smallest-path-count))
false
(when track-depth
(> smallest-path-count track-depth)))))
_ (add-infer-results! results-atom (infer-results (remove (set paths-that-exceed-root-results) paths)
{:op :unknown}))]
(unwrap-value v))
only accurate up to 20 arguments .
all arities 21 and over will collapse into one .
(fn? v) (let [[paths unwrapped-fn] (if (-> v meta ::wrapped-fn?)
((juxt ::paths ::unwrapped-fn)
(update (meta v) ::paths into paths))
[paths v])
_ (assert (set? paths))
_ (add-infer-results! results-atom (infer-results paths (-class :ifn [])))
call-ids (conj call-ids (gen-call-id paths))
wrap-fn (fn [paths unwrapped-fn]
(with-meta
(fn [& args]
apply only realises 20 places
_ (when (= 0 blen)
(track config results-atom
(extend-paths paths (fn-dom-path 0 -1))
call-ids))
here we throw away arities after 20 places .
args (map-indexed
(fn [n arg]
(if (< n blen)
(track config results-atom arg
(extend-paths paths (fn-dom-path blen n))
call-ids)
arg))
args)]
(track config results-atom (apply unwrapped-fn args)
(extend-paths paths (fn-rng-path blen))
call-ids)))
(merge (meta unwrapped-fn)
{::wrapped-fn? true
::paths paths
::unwrapped-fn unwrapped-fn})))]
(wrap-fn paths v))
(list? v)
(let []
(when (empty? v)
(add-infer-results!
results-atom
(infer-results paths
(-class :list [-nothing]))))
(let [res
(with-meta
(apply list
(map (fn [e]
(track config results-atom e (extend-paths paths (seq-entry))
call-ids))
v))
(meta v))]
(assert (list? res))
res))
(and (seq? v)
(not (list? v)))
(let [[paths unwrapped-seq paths-where-original-coll-could-be-empty]
(if (-> v meta ::wrapped-seq?)
((juxt ::paths ::unwrapped-seq ::paths-where-original-coll-could-be-empty)
(-> (meta v)
(update ::paths into paths)
(update ::paths-where-original-coll-could-be-empty into paths)))
[paths v paths])
_ (assert (set? paths))
wrap-lseq
(fn wrap-lseq [unwrapped-seq paths-where-original-coll-could-be-empty]
(with-meta
(lazy-seq
(if (empty? unwrapped-seq)
(let []
(when (seq paths-where-original-coll-could-be-empty)
(add-infer-results!
results-atom
(infer-results
paths-where-original-coll-could-be-empty
(-class :seq [-nothing]))))
unwrapped-seq)
(cons (track config results-atom
(first unwrapped-seq)
(extend-paths paths (seq-entry))
call-ids)
(wrap-lseq (rest unwrapped-seq)
#{}))))
(merge (meta unwrapped-seq)
{::wrapped-seq? true
::paths-where-original-coll-could-be-empty paths-where-original-coll-could-be-empty
::paths paths
::unwrapped-seq unwrapped-seq})))]
(wrap-lseq unwrapped-seq paths-where-original-coll-could-be-empty))
(instance? #?(:clj clojure.lang.ITransientVector :cljs TransientVector) v)
(let [cnt (count v)]
(reduce
(fn [v i]
(let [e (nth v i)
e' (track config results-atom e
(extend-paths paths (transient-vector-entry))
call-ids)]
(if (identical? e e')
v
(binding [*should-track* false]
(assoc! v i e')))))
v
(range cnt)))
(and (vector? v)
(= 2 (count v)))
(let [k (track config results-atom (nth v 0) (extend-paths paths (index-path 2 0)) call-ids)
vl (track config results-atom (nth v 1) (extend-paths paths (index-path 2 1)) call-ids)]
(assoc v 0 k 1 vl))
(vector? v)
(let [heterogeneous? (<= (count v) 4)
len (count v)
so-far (atom 0)]
(when (= 0 len)
(add-infer-results! results-atom (infer-results paths (-class :vector [-nothing]))))
(reduce
(fn [e [k v]]
(swap! so-far inc)
(let [v' (track config results-atom v (extend-paths
paths
(if heterogeneous?
(index-path len k)
(vec-entry-path)))
call-ids)]
(cond
(when-let [tc track-count]
(< tc @so-far))
(reduced (binding [*should-track* false]
(assoc e k v')))
(identical? v v') e
:else
(binding [*should-track* false]
(assoc e k v')))))
v
(map-indexed vector v)))
(set? v)
(do
(when (empty? v)
(add-infer-results!
results-atom
(infer-results paths
(-class :set [-nothing]))))
(binding [*should-track* false]
(into (empty v)
(map (fn [e]
(binding [*should-track* true]
(track config results-atom e (extend-paths paths (set-entry))
call-ids))))
v)))
#?(:clj (instance? PersistentMapProxy v))
#?(:clj
(let [^PersistentMapProxy v v
ks (.current-ks v)
_ (assert (set? ks))
all-kws? (.current-all-kws? v)
_ (assert (boolean? all-kws?))
kw-entries-types (.current-kw-entries-types v)
_ (assert (map? kw-entries-types))
track-info {:all-kws? all-kws?
:ks ks
:kw-entries-types kw-entries-types}]
(PersistentMapProxy. (.m v)
(reduce (fn [m k]
(update-in m [k track-info]
#(merge-with (fnil into #{})
%
{:paths paths
:call-ids call-ids})))
(.k-to-track-info v)
ks)
(.config v)
(.results-atom v)
(.current-kw-entries-types v)
(.current-ks v)
(.current-all-kws? v))))
#?(:clj
(or (instance? clojure.lang.PersistentHashMap v)
(instance? clojure.lang.PersistentArrayMap v)
(instance? clojure.lang.PersistentTreeMap v))
:cljs (map? v))
(let [ks (set (keys v))]
(when (empty? v)
(add-infer-results!
results-atom
(infer-results paths (make-HMap {} {}))))
(cond
(every? keyword? ks)
(let [{with-kw-val true
no-kw-val false}
(binding [*should-track* false]
(group-by (fn [e]
(keyword? (val e)))
v))
kw-entries-types
(into {}
(map (fn [[k v]]
{:pre [(keyword? v)]}
[k (-val v)]))
with-kw-val)
are none , just pick one of the kw - entries - types
_ (when (and (empty? no-kw-val)
(seq kw-entries-types))
(let [k (key (first kw-entries-types))]
(track config results-atom (get v k)
(binding [*should-track* false]
(extend-paths paths (key-path kw-entries-types ks k)))
call-ids)))
v #?(:cljs v
:clj (if (= track-strategy :lazy)
(PersistentMapProxy. v
(zipmap (apply disj ks with-kw-val)
(repeat {{:all-kws? true
:kw-entries-types kw-entries-types
:ks ks}
{:paths paths
:call-ids call-ids}}))
config
results-atom
kw-entries-types
ks
true)
v))]
(reduce
(fn [m [k orig-v]]
(let [v (track config results-atom orig-v
(binding [*should-track* false]
(extend-paths paths (key-path kw-entries-types ks k)))
call-ids)]
(cond
(identical? v orig-v) m
:else
(binding [*should-track* false]
(assoc m k v)))))
v
no-kw-val))
:else
(let [so-far (atom 0)
v #?(:cljs v
:clj (if (= track-strategy :lazy)
(PersistentMapProxy. v
(zipmap ks (repeat {{:all-kws? false
:kw-entries-types {}
:ks ks}
{:paths paths
:call-ids call-ids}}))
config
results-atom
{}
ks
false)
v))]
(reduce
(fn [m k]
(swap! so-far inc)
(let [orig-v (get m k)
[new-k v]
(cond
We do n't want to pollute the HMap - req - ks with
:else
[(track config results-atom k
(binding [*should-track* false]
(extend-paths paths (map-keys-path)))
call-ids)
(track config results-atom orig-v
(binding [*should-track* false]
(extend-paths paths (map-vals-path)))
call-ids)])]
(cond
(when-let [tc *track-count*]
(< tc @so-far))
(reduced
(binding [*should-track* false]
(-> m
(dissoc k)
(assoc new-k v))))
(identical? v orig-v) m
(not (identical? new-k k))
(binding [*should-track* false]
(-> m
(dissoc k)
(assoc new-k v)))
:else
(binding [*should-track* false]
(assoc m new-k v)))))
v
(keys v)))))
(instance? #?(:clj clojure.lang.IAtom :cljs Atom) v)
(let [old-val (-> v meta :clojure.core.typed/old-val)
new-paths (binding [*should-track* false]
(extend-paths paths (atom-contents)))
should-track? (binding [*should-track* false]
(not= @v old-val))
_ (when should-track?
(track config results-atom @v new-paths
call-ids))
#_#_
_ (binding [*should-track* false]
(add-watch
v
new-paths
(fn [_ _ _ new]
(binding [*should-track* true]
(track config results-atom new new-paths
call-ids)))))]
v)
:else (do
(add-infer-results! results-atom (infer-results paths (-class (classify v) [])))
v)))))
(declare gen-track-config)
#?(:cljs
(defn track-cljs-val [v root]
(track (gen-track-config)
results-atom
v
#{[(var-path
'root
root)]}
#{})))
#?(:clj
(def prim-invoke-interfaces
(into #{}
(comp
(mapcat (fn [n]
(apply comb/cartesian-product (repeat n [\D \O \L]))))
(map (fn [ss] (apply str ss)))
(remove (fn [ss]
(every? #{\O} ss))))
(range 1 6))))
#?(:clj
(defn char->tag [c]
{:pre [(char? c)]
:post [(symbol? %)]}
(case c
\L 'long
\D 'double
\O 'java.lang.Object)))
#?(:clj
(defn tag->char [t]
{:pre [((some-fn nil? symbol?) t)]
:post [(char? %)]}
(case t
long \L
double \D
\O)))
#?(:clj
(defn gen-prim-invokes [f-this prims]
(mapcat
(fn [p]
{:pre [(string? p)]}
(let [args (into []
(map-indexed
(fn [n c]
(-> (symbol (str "arg" n))
#_(vary-meta
assoc :tag (char->tag c)))))
(butlast p))
interface (symbol (str "clojure.lang.IFn$" p))
rettag (char->tag (nth p (dec (count p))))
this (gensym 'this)
argvec (-> (vec (cons this args))
#_(vary-meta assoc :tag rettag))]
#_
(binding [*print-meta* true]
(prn "argvec" argvec))
[interface
(list 'invokePrim argvec
`(~(f-this this) ~@(map #(with-meta % nil) args)))]))
prims)))
#?(:clj
(defn gen-nonvariadic-invokes [f-this]
(for [arity (range 0 20),
:let [args (repeatedly arity gensym)
this (gensym 'this)]]
`(~'invoke [~this ~@args]
(~(f-this this) ~@args)))))
#?(:clj
(defn gen-variadic-invoke [f-this]
(let [args (repeatedly 21 gensym)
this (gensym 'this)]
`(~'invoke [~this ~@args] (apply ~(f-this this) ~@args)))))
#?(:clj
(defn gen-apply-to [f-this]
(let [this (gensym 'this)]
`(~'applyTo [~this args#] (apply ~(f-this this) args#)))))
#?(:clj
(defn extend-IFn [f-this prims]
`(clojure.lang.IFn
~@(gen-nonvariadic-invokes f-this)
~(gen-variadic-invoke f-this)
~(gen-apply-to f-this)
~@(gen-prim-invokes f-this prims))))
#?(:clj
(defmacro deftypefn
"Like deftype, but accepts a function f before any specs that is
used to implement clojure.lang.IFn. f should accept at least one
argument, 'this'."
[name prims & opts+specs]
(let [field 'f
f-this (fn [this]
(list '. this (symbol (str "-" field))))
source `(deftype ~name [~field]
~@(extend-IFn f-this prims)
~@opts+specs)]
#_
(binding [*print-meta* true]
(pprint source))
source)))
#?(:clj
(def this-ns *ns*))
#?(:clj
(defn arglist-prim-string [args]
{:pre [(vector? args)]
:post [((some-fn nil? string?) %)]}
(let [s (apply str
(concat
(->> args
(map (comp :tag meta))
(map tag->char))
[(tag->char (-> args meta :tag))]))]
(when (prim-invoke-interfaces s)
s))))
#?(:clj
(defn wrap-prim [vr f]
{:pre [(var? vr)]}
(let [prim-arglists
(sort
(->> (-> vr meta :arglists)
(map arglist-prim-string)
(filter string?)))]
(cond
(seq prim-arglists)
(let [type-name (symbol
(str "PrimFn"
(apply str
(interpose
"_"
prim-arglists))))
cls (or #_(ns-resolve this-ns type-name)
(binding [*ns* this-ns]
(eval
`(deftypefn ~type-name ~prim-arglists))))
_ (assert (class? cls))
ctor (ns-resolve this-ns
(symbol
(str "->" type-name)))
_ (assert (var? ctor))]
(ctor f))
:else f))))
(defn gen-track-config []
(merge
{:track-strategy :lazy
:track-depth *track-depth*
:track-count *track-count*
:root-results *root-results*}
vs/*instrument-infer-config*))
track - var : ( IFn [ Var - > Value ] [ ( Atom Result ) ] )
#?(:clj
(defn track-var'
([vr] (track-var' (gen-track-config) results-atom vr *ns*))
([config vr] (track-var' config results-atom vr *ns*))
([config results-atom vr ns]
{:pre [(var? vr)
(instance? #?(:clj clojure.lang.IAtom :cljs Atom) results-atom)]}
(wrap-prim
vr
(track config
results-atom @vr #{[(var-path
(ns-name (the-ns ns))
(impl/var->symbol vr))]}
#{})))))
#?(:clj
(defmacro track-var [v]
`(track-var' (var ~v))))
track - def - init : Value
#?(:clj
(defn track-def-init [config vsym ns val]
{:pre [(symbol? vsym)
(namespace vsym)]}
(let [v (ns-resolve ns vsym)]
(wrap-prim
v
(track config
results-atom val
#{[{:op :var
:ns (ns-name ns)
:name vsym}]}
#{})))))
#?(:clj
(defn track-local-fn [config track-kind line column end-line end-column ns val]
{:pre [(#{:local-fn :loop-var} track-kind)]}
#_
(prn "track-local-fn"
(symbol
(str (ns-name ns)
"|"
line
"|"
column
"|"
end-line
"|"
end-column)))
(track config
results-atom val
#{[{:op :var
:typed.clj.annotator.track/track-kind track-kind
:line line
:column column
:end-line end-line
:end-column end-column
:ns (ns-name ns)
:name (with-meta
(symbol
(str (ns-name ns)
"|"
line
"|"
column
"|"
end-line
"|"
end-column))
{:typed.clj.annotator.track/track-kind track-kind
:line line
:column column
:end-line end-line
:end-column end-column
:ns (ns-name ns)})}]}
#{})))
|
80e151f665e9dffa2abbfe9b7cb130090dfd3431dac553acc6fdc3f6d3d01db4 | schemeorg-community/index.scheme.org | srfi.156.scm | (((name . "is")
(signature
syntax-rules
(_)
((is val1 predicate-or-comparator val2 ...) (or boolean? procedure?)))
(subsigs (val (pattern _ obj)) (predicate-or-comparator (value procedure?))))
((name . "isnt")
(signature
syntax-rules
(_)
((isnt val1 predicate-or-comparator val2 ...) (or boolean? procedure?)))
(subsigs
(val (pattern _ obj))
(predicate-or-comparator (value procedure?)))))
| null | https://raw.githubusercontent.com/schemeorg-community/index.scheme.org/32e1afcfe423a158ac8ce014f5c0b8399d12a1ea/types/srfi.156.scm | scheme | (((name . "is")
(signature
syntax-rules
(_)
((is val1 predicate-or-comparator val2 ...) (or boolean? procedure?)))
(subsigs (val (pattern _ obj)) (predicate-or-comparator (value procedure?))))
((name . "isnt")
(signature
syntax-rules
(_)
((isnt val1 predicate-or-comparator val2 ...) (or boolean? procedure?)))
(subsigs
(val (pattern _ obj))
(predicate-or-comparator (value procedure?)))))
| |
62ecc71ec077147d5174c6a4c66d6f965f073d095a6de44343db4b15eda3a955 | magnars/realize | core_test.clj | (ns realize.core-test
(:require [clojure.data.generators :as gen]
[clojure.test.generative :as test :refer [defspec]]
[clojure.test.generative.runner :as runner]
[clojure.test :refer [deftest testing is]]
[datomic.api :as d]
[realize.core :as sut]))
(defn is-lazy? [form]
(instance? clojure.lang.LazySeq form))
(defn collection-including-lazy-seqs []
(let [[coll args] (gen/rand-nth (conj gen/collections
[gen/list [gen/scalars]]))]
(apply coll (map gen/rand-nth args))))
(defspec all-seqs-are-realized-but-the-same
sut/realize
[^{:tag (realize.core-test/collection-including-lazy-seqs)} form]
(assert (= form %))
(clojure.walk/prewalk (fn [f]
(when (and (is-lazy? f)
(not (realized? f)))
(throw (AssertionError. "lazy but not realized!")))
f)
%))
(def tests (runner/get-tests #'all-seqs-are-realized-but-the-same))
(def e (Exception. "Boom!"))
(def e2 (Exception. "Bang!"))
(def datomic-entity (d/entity (d/db (do (d/create-database "datomic:mem-db")
(d/connect "datomic:mem-db"))) 1))
(deftest realize
(testing "no errors"
(is (= '(1 2 3) (sut/realize (map identity [1 2 3]))))
(is (= 0 (:failures (runner/run-suite {:nthreads 2 :msec 1000} tests)))))
(testing "at root"
(is (= {:realize.core/exception e}
(sut/realize (map (fn [_] (throw e)) [1 2 3])))))
(testing "nested in map"
(is (= {:foo {:realize.core/exception e}}
(sut/realize {:foo (map (fn [_] (throw e)) [1 2 3])}))))
(testing "nested in vec"
(is (= [:before {:realize.core/exception e} :after]
(sut/realize [:before (map (fn [_] (throw e)) [1 2 3]) :after]))))
(testing "deeply nested"
(is (= {:foo {:bar [:baz {:boo {:realize.core/exception e}}]}}
(sut/realize {:foo {:bar [:baz {:boo (map (fn [_] (throw e)) [1 2 3])}]}}))))
(testing "don't walk into collections that cannot be reconstructed via empty"
(is (= datomic-entity (sut/realize datomic-entity))))
(testing "infinite lazy seq"
(is (= "Sequence of > 500 items found, aborting to guard against infinite seqs!"
(.getMessage (:realize.core/exception (sut/realize (range) 500)))))))
(comment
(list? (map inc (range))))
(deftest find-exceptions
(testing "no errors"
(is (empty? (sut/find-exceptions '(1 2 3)))))
(testing "at root"
(is (= [{:exception e :path []}]
(sut/find-exceptions {:realize.core/exception e}))))
(testing "nested in map value"
(is (= [{:exception e :path [:foo]}]
(sut/find-exceptions {:foo {:realize.core/exception e}}))))
(testing "nested in map key"
(is (= [{:exception e :path []}]
(sut/find-exceptions {{:realize.core/exception e} :bar}))))
(testing "nested in vec"
(is (= [{:exception e :path [1]}]
(sut/find-exceptions [:before {:realize.core/exception e} :after]))))
(testing "deeply nested"
(is (= [{:exception e :path [:foo :bar 1 :boo]}]
(sut/find-exceptions {:foo {:bar [:baz {:boo {:realize.core/exception e}}]}}))))
(testing "multiple exceptions"
(is (= [{:exception e :path [:foo :bar 1 :boo]}
{:exception e2 :path [:foo :far 4]}]
(sut/find-exceptions {:foo {:bar [:baz {:boo {:realize.core/exception e}}]
:far (list 0 1 2 3 {:realize.core/exception e2})}})))))
| null | https://raw.githubusercontent.com/magnars/realize/801a9d5c24e116ee7a703937a9ad4672f27a5f33/test/realize/core_test.clj | clojure | (ns realize.core-test
(:require [clojure.data.generators :as gen]
[clojure.test.generative :as test :refer [defspec]]
[clojure.test.generative.runner :as runner]
[clojure.test :refer [deftest testing is]]
[datomic.api :as d]
[realize.core :as sut]))
(defn is-lazy? [form]
(instance? clojure.lang.LazySeq form))
(defn collection-including-lazy-seqs []
(let [[coll args] (gen/rand-nth (conj gen/collections
[gen/list [gen/scalars]]))]
(apply coll (map gen/rand-nth args))))
(defspec all-seqs-are-realized-but-the-same
sut/realize
[^{:tag (realize.core-test/collection-including-lazy-seqs)} form]
(assert (= form %))
(clojure.walk/prewalk (fn [f]
(when (and (is-lazy? f)
(not (realized? f)))
(throw (AssertionError. "lazy but not realized!")))
f)
%))
(def tests (runner/get-tests #'all-seqs-are-realized-but-the-same))
(def e (Exception. "Boom!"))
(def e2 (Exception. "Bang!"))
(def datomic-entity (d/entity (d/db (do (d/create-database "datomic:mem-db")
(d/connect "datomic:mem-db"))) 1))
(deftest realize
(testing "no errors"
(is (= '(1 2 3) (sut/realize (map identity [1 2 3]))))
(is (= 0 (:failures (runner/run-suite {:nthreads 2 :msec 1000} tests)))))
(testing "at root"
(is (= {:realize.core/exception e}
(sut/realize (map (fn [_] (throw e)) [1 2 3])))))
(testing "nested in map"
(is (= {:foo {:realize.core/exception e}}
(sut/realize {:foo (map (fn [_] (throw e)) [1 2 3])}))))
(testing "nested in vec"
(is (= [:before {:realize.core/exception e} :after]
(sut/realize [:before (map (fn [_] (throw e)) [1 2 3]) :after]))))
(testing "deeply nested"
(is (= {:foo {:bar [:baz {:boo {:realize.core/exception e}}]}}
(sut/realize {:foo {:bar [:baz {:boo (map (fn [_] (throw e)) [1 2 3])}]}}))))
(testing "don't walk into collections that cannot be reconstructed via empty"
(is (= datomic-entity (sut/realize datomic-entity))))
(testing "infinite lazy seq"
(is (= "Sequence of > 500 items found, aborting to guard against infinite seqs!"
(.getMessage (:realize.core/exception (sut/realize (range) 500)))))))
(comment
(list? (map inc (range))))
(deftest find-exceptions
(testing "no errors"
(is (empty? (sut/find-exceptions '(1 2 3)))))
(testing "at root"
(is (= [{:exception e :path []}]
(sut/find-exceptions {:realize.core/exception e}))))
(testing "nested in map value"
(is (= [{:exception e :path [:foo]}]
(sut/find-exceptions {:foo {:realize.core/exception e}}))))
(testing "nested in map key"
(is (= [{:exception e :path []}]
(sut/find-exceptions {{:realize.core/exception e} :bar}))))
(testing "nested in vec"
(is (= [{:exception e :path [1]}]
(sut/find-exceptions [:before {:realize.core/exception e} :after]))))
(testing "deeply nested"
(is (= [{:exception e :path [:foo :bar 1 :boo]}]
(sut/find-exceptions {:foo {:bar [:baz {:boo {:realize.core/exception e}}]}}))))
(testing "multiple exceptions"
(is (= [{:exception e :path [:foo :bar 1 :boo]}
{:exception e2 :path [:foo :far 4]}]
(sut/find-exceptions {:foo {:bar [:baz {:boo {:realize.core/exception e}}]
:far (list 0 1 2 3 {:realize.core/exception e2})}})))))
| |
4ffe9fbadb2b0ee31675266838fceebbe1d476f86347c732e3d9542406ddbc5a | Nutr1t07/wl-bot | Misc.hs | {-# LANGUAGE OverloadedStrings #-}
module Utils.Misc where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Search as BL
import qualified Data.List as List
import Data.String ( IsString )
import qualified Data.Text as Text
checkEmpty :: (Monoid a, Eq a) => a -> Maybe a
checkEmpty txt = if txt == mempty then Nothing else Just txt
searchBetweenBL
:: BS.ByteString -> BS.ByteString -> BL.ByteString -> Maybe BL.ByteString
searchBetweenBL left right content =
let fstround = snd $ BL.breakAfter left content
in checkEmpty $ fst (BL.breakOn right fstround)
searchBetweenText :: Text.Text -> Text.Text -> Text.Text -> Maybe Text.Text
searchBetweenText left right content =
let fstround = snd $ Text.breakOn left content
in checkEmpty $ fst (Text.breakOn right fstround)
searchAllBetweenBL
:: BS.ByteString -> BS.ByteString -> BL.ByteString -> [BL.ByteString]
searchAllBetweenBL _ _ "" = []
searchAllBetweenBL left right content =
let matchLeft = snd $ BL.breakAfter left content
matchRight = BL.breakOn right matchLeft
in if fst matchRight /= ""
then fst matchRight : searchAllBetweenBL left right (snd matchRight)
else searchAllBetweenBL left right (snd matchRight)
unlines :: (Monoid a, IsString a) => [a] -> a
unlines = mconcat . List.intersperse "\n"
maybe' :: Maybe a -> c -> (a -> c) -> c
maybe' = flip $ flip <$> maybe
either' :: Either a b -> (a -> c) -> (b -> c) -> c
either' = flip $ flip <$> either
| null | https://raw.githubusercontent.com/Nutr1t07/wl-bot/4d9db61613a3819d0addbc7e04d77bb2f57892f0/src/Utils/Misc.hs | haskell | # LANGUAGE OverloadedStrings # | module Utils.Misc where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Search as BL
import qualified Data.List as List
import Data.String ( IsString )
import qualified Data.Text as Text
checkEmpty :: (Monoid a, Eq a) => a -> Maybe a
checkEmpty txt = if txt == mempty then Nothing else Just txt
searchBetweenBL
:: BS.ByteString -> BS.ByteString -> BL.ByteString -> Maybe BL.ByteString
searchBetweenBL left right content =
let fstround = snd $ BL.breakAfter left content
in checkEmpty $ fst (BL.breakOn right fstround)
searchBetweenText :: Text.Text -> Text.Text -> Text.Text -> Maybe Text.Text
searchBetweenText left right content =
let fstround = snd $ Text.breakOn left content
in checkEmpty $ fst (Text.breakOn right fstround)
searchAllBetweenBL
:: BS.ByteString -> BS.ByteString -> BL.ByteString -> [BL.ByteString]
searchAllBetweenBL _ _ "" = []
searchAllBetweenBL left right content =
let matchLeft = snd $ BL.breakAfter left content
matchRight = BL.breakOn right matchLeft
in if fst matchRight /= ""
then fst matchRight : searchAllBetweenBL left right (snd matchRight)
else searchAllBetweenBL left right (snd matchRight)
unlines :: (Monoid a, IsString a) => [a] -> a
unlines = mconcat . List.intersperse "\n"
maybe' :: Maybe a -> c -> (a -> c) -> c
maybe' = flip $ flip <$> maybe
either' :: Either a b -> (a -> c) -> (b -> c) -> c
either' = flip $ flip <$> either
|
77122baed3998d1799fc9d642aa3b2166ba8c151855f1f3650aabd8cb8f12474 | ghilesZ/geoml | triangle.mli | (** Triangles manipulation *)
(** the type of triangles *)
type t = private Point.t * Point.t * Point.t
val tri_map : (Point.t -> Point.t) -> t -> t
(** tri_map f t applies function f in turn to all the points of t and stores
the results in a new triangle that is returned. *)
val tri_exists : (Point.t -> bool) -> t -> bool
(** Higher order utilities over the triangles *)
val tri_find : (Point.t -> bool) -> t -> Point.t
val tri_forall : (Point.t -> bool) -> t -> bool
val tri_iter : (Point.t -> unit) -> t -> unit
val transform : t -> Affine.t -> t
(** affine transformation of a triangle *)
val make : Point.t -> Point.t -> Point.t -> t
* builds a triangle from three different points . raises Invalid_arg if at
least two points are equal
least two points are equal*)
val extr1 : t -> Point.t
* returns a vertice of the triangle . The vertice returned is different than
the one returned by extr2 and extr3 .
the one returned by extr2 and extr3.*)
val extr2 : t -> Point.t
* returns a vertice of the triangle . The vertice returned is different than
the one returned by extr1 and extr3 .
the one returned by extr1 and extr3.*)
val extr3 : t -> Point.t
* returns a vertice of the triangle . The vertice returned is different than
the one returned by and extr2 .
the one returned by extr1 and extr2.*)
val scale_x : t -> float -> t
val scale_y : t -> float -> t
val translate : float -> float -> t -> t
val reflection : Point.t -> t -> t
val contains : t -> Point.t -> bool
(** tests if a point is in a triangle *)
val rotate : t -> Point.t -> float -> t
(** rotation, angle in radian *)
val rotate_angle : t -> Point.t -> float -> t
(** rotation, angle in degree *)
val area : t -> float
val perimeter : t -> float
val proj_x : t -> float * float
val proj_y : t -> float * float
val segments : t -> Segment.t * Segment.t * Segment.t
val intersects : t -> t -> bool
val intersect_line : t -> Line.t -> Point.t list
val is_isoscele : t -> bool
val is_equilateral : t -> bool
val is_right : t -> bool
val points : t -> Point.t * Point.t * Point.t
val of_points : Point.t * Point.t * Point.t -> t
val angles : t -> float * float * float
val centroid : t -> Point.t
(** returns the gravity center of a triangle *)
val random_point : Random.State.t -> t -> Point.t
(** returns a randomly and uniformly chosen point of the triangle *)
val print : Format.formatter -> t -> unit
(** printer *)
| null | https://raw.githubusercontent.com/ghilesZ/geoml/19af3bcc3e9e8c865ad5a3ea73e3736c0c7b7e7b/src/triangle.mli | ocaml | * Triangles manipulation
* the type of triangles
* tri_map f t applies function f in turn to all the points of t and stores
the results in a new triangle that is returned.
* Higher order utilities over the triangles
* affine transformation of a triangle
* tests if a point is in a triangle
* rotation, angle in radian
* rotation, angle in degree
* returns the gravity center of a triangle
* returns a randomly and uniformly chosen point of the triangle
* printer |
type t = private Point.t * Point.t * Point.t
val tri_map : (Point.t -> Point.t) -> t -> t
val tri_exists : (Point.t -> bool) -> t -> bool
val tri_find : (Point.t -> bool) -> t -> Point.t
val tri_forall : (Point.t -> bool) -> t -> bool
val tri_iter : (Point.t -> unit) -> t -> unit
val transform : t -> Affine.t -> t
val make : Point.t -> Point.t -> Point.t -> t
* builds a triangle from three different points . raises Invalid_arg if at
least two points are equal
least two points are equal*)
val extr1 : t -> Point.t
* returns a vertice of the triangle . The vertice returned is different than
the one returned by extr2 and extr3 .
the one returned by extr2 and extr3.*)
val extr2 : t -> Point.t
* returns a vertice of the triangle . The vertice returned is different than
the one returned by extr1 and extr3 .
the one returned by extr1 and extr3.*)
val extr3 : t -> Point.t
* returns a vertice of the triangle . The vertice returned is different than
the one returned by and extr2 .
the one returned by extr1 and extr2.*)
val scale_x : t -> float -> t
val scale_y : t -> float -> t
val translate : float -> float -> t -> t
val reflection : Point.t -> t -> t
val contains : t -> Point.t -> bool
val rotate : t -> Point.t -> float -> t
val rotate_angle : t -> Point.t -> float -> t
val area : t -> float
val perimeter : t -> float
val proj_x : t -> float * float
val proj_y : t -> float * float
val segments : t -> Segment.t * Segment.t * Segment.t
val intersects : t -> t -> bool
val intersect_line : t -> Line.t -> Point.t list
val is_isoscele : t -> bool
val is_equilateral : t -> bool
val is_right : t -> bool
val points : t -> Point.t * Point.t * Point.t
val of_points : Point.t * Point.t * Point.t -> t
val angles : t -> float * float * float
val centroid : t -> Point.t
val random_point : Random.State.t -> t -> Point.t
val print : Format.formatter -> t -> unit
|
c9cc6dd64651cc3428e29b3537913ee558d0c24f9c16b020088c15730fede862 | martintrojer/datalog | magic.clj | Copyright ( c ) . All rights reserved . The use and
;; distribution terms for this software are covered by the Eclipse Public
;; License 1.0 (-1.0.php) which can
;; be found in the file epl-v10.html at the root of this distribution. By
;; using this software in any fashion, you are agreeing to be bound by the
;; terms of this license. You must not remove this notice, or any other,
;; from this software.
;;
;; magic.clj
;;
A Clojure implementation of Datalog -- Magic Sets
;;
straszheimjeffrey ( gmail )
Created 18 Feburary 2009
Converted to Clojure1.4 by 2012 .
(ns datalog.magic
(:use [datalog.util]
[datalog.literals]
[datalog.rules])
(:use [clojure.set :only (union intersection difference)]))
;; =============================
;; Adornment
(defn adorn-query
"Adorn a query"
[q]
(adorned-literal q (get-self-bound-cs q)))
(defn adorn-rules-set
"Adorns the given rules-set for the given query. (rs) is a
rules-set, (q) is an adorned query."
[rs q]
(let [i-preds (all-predicates rs)
p-map (predicate-map rs)]
(loop [nrs empty-rules-set ; The rules set being built
needed #{(literal-predicate q)}]
(if (empty? needed)
nrs
(let [pred (first needed)
remaining (disj needed pred)
base-pred (get-base-predicate pred)
bindings (get-adorned-bindings pred)
new-rules (p-map base-pred)
new-adorned-rules (map (partial compute-sip bindings i-preds)
new-rules)
new-nrs (reduce conj nrs new-adorned-rules)
current-preds (all-predicates new-nrs)
not-needed? (fn [pred]
(or (current-preds pred)
(-> pred get-base-predicate i-preds not)))
add-pred (fn [np pred]
(if (not-needed? pred) np (conj np pred)))
add-preds (fn [np rule]
(reduce add-pred np (map literal-predicate (:body rule))))
new-needed (reduce add-preds remaining new-adorned-rules)]
(recur new-nrs new-needed))))))
;; =============================
Magic !
(defn seed-relation
"Given a magic form of a query, give back the literal form of its seed
relation"
[q]
(let [pred (-> q literal-predicate get-base-predicate)
bnds (-> q literal-predicate get-adorned-bindings)]
(with-meta (assoc q :predicate [pred :magic-seed bnds]) {})))
(defn seed-rule
"Given an adorned query, give back its seed rule"
[q]
(let [mq (build-seed-bindings (magic-literal q))
sr (seed-relation mq)]
(build-rule mq [sr])))
(defn build-partial-tuple
"Given a query and a set of bindings, build a partial tuple needed
to extract the relation from the database."
[q bindings]
(into {} (remove nil? (map (fn [[k v :as pair]]
(if (is-var? v)
nil
(if (is-query-var? v)
[k (bindings v)]
pair)))
(:term-bindings q)))))
(defn seed-predicate-for-insertion
"Given a query, return the predicate to use for database insertion."
[q]
(let [seed (-> q seed-rule :body first)
columns (-> seed :term-bindings keys)
new-term-bindings (-> q :term-bindings (select-keys columns))]
(assoc seed :term-bindings new-term-bindings)))
(defn magic-transform
"Return a magic transformation of an adorned rules-set (rs). The
(i-preds) are the predicates of the intension database. These
default to the predicates within the rules-set."
([rs]
(magic-transform rs (all-predicates rs)))
([rs i-preds]
(let [not-duplicate? (fn [l mh bd]
(or (not (empty? bd))
(not (= (magic-literal l)
mh))))
xr (fn [rs rule]
(let [head (:head rule)
body (:body rule)
mh (magic-literal head)
answer-rule (build-rule head
(concat [mh] body))
step (fn [[rs bd] l]
(if (and (i-preds (literal-predicate l))
(not-duplicate? l mh bd))
(let [nr (build-rule (magic-literal l)
(concat [mh] bd))]
[(conj rs nr) (conj bd l)])
[rs (conj bd l)]))
[nrs _] (reduce step [rs []] body)]
(conj nrs answer-rule)))]
(reduce xr empty-rules-set rs))))
| null | https://raw.githubusercontent.com/martintrojer/datalog/bd64e76ceb02c0494a654010c0221ada5fc2a002/src/datalog/magic.clj | clojure | distribution terms for this software are covered by the Eclipse Public
License 1.0 (-1.0.php) which can
be found in the file epl-v10.html at the root of this distribution. By
using this software in any fashion, you are agreeing to be bound by the
terms of this license. You must not remove this notice, or any other,
from this software.
magic.clj
=============================
Adornment
The rules set being built
============================= | Copyright ( c ) . All rights reserved . The use and
A Clojure implementation of Datalog -- Magic Sets
straszheimjeffrey ( gmail )
Created 18 Feburary 2009
Converted to Clojure1.4 by 2012 .
(ns datalog.magic
(:use [datalog.util]
[datalog.literals]
[datalog.rules])
(:use [clojure.set :only (union intersection difference)]))
(defn adorn-query
"Adorn a query"
[q]
(adorned-literal q (get-self-bound-cs q)))
(defn adorn-rules-set
"Adorns the given rules-set for the given query. (rs) is a
rules-set, (q) is an adorned query."
[rs q]
(let [i-preds (all-predicates rs)
p-map (predicate-map rs)]
needed #{(literal-predicate q)}]
(if (empty? needed)
nrs
(let [pred (first needed)
remaining (disj needed pred)
base-pred (get-base-predicate pred)
bindings (get-adorned-bindings pred)
new-rules (p-map base-pred)
new-adorned-rules (map (partial compute-sip bindings i-preds)
new-rules)
new-nrs (reduce conj nrs new-adorned-rules)
current-preds (all-predicates new-nrs)
not-needed? (fn [pred]
(or (current-preds pred)
(-> pred get-base-predicate i-preds not)))
add-pred (fn [np pred]
(if (not-needed? pred) np (conj np pred)))
add-preds (fn [np rule]
(reduce add-pred np (map literal-predicate (:body rule))))
new-needed (reduce add-preds remaining new-adorned-rules)]
(recur new-nrs new-needed))))))
Magic !
(defn seed-relation
"Given a magic form of a query, give back the literal form of its seed
relation"
[q]
(let [pred (-> q literal-predicate get-base-predicate)
bnds (-> q literal-predicate get-adorned-bindings)]
(with-meta (assoc q :predicate [pred :magic-seed bnds]) {})))
(defn seed-rule
"Given an adorned query, give back its seed rule"
[q]
(let [mq (build-seed-bindings (magic-literal q))
sr (seed-relation mq)]
(build-rule mq [sr])))
(defn build-partial-tuple
"Given a query and a set of bindings, build a partial tuple needed
to extract the relation from the database."
[q bindings]
(into {} (remove nil? (map (fn [[k v :as pair]]
(if (is-var? v)
nil
(if (is-query-var? v)
[k (bindings v)]
pair)))
(:term-bindings q)))))
(defn seed-predicate-for-insertion
"Given a query, return the predicate to use for database insertion."
[q]
(let [seed (-> q seed-rule :body first)
columns (-> seed :term-bindings keys)
new-term-bindings (-> q :term-bindings (select-keys columns))]
(assoc seed :term-bindings new-term-bindings)))
(defn magic-transform
"Return a magic transformation of an adorned rules-set (rs). The
(i-preds) are the predicates of the intension database. These
default to the predicates within the rules-set."
([rs]
(magic-transform rs (all-predicates rs)))
([rs i-preds]
(let [not-duplicate? (fn [l mh bd]
(or (not (empty? bd))
(not (= (magic-literal l)
mh))))
xr (fn [rs rule]
(let [head (:head rule)
body (:body rule)
mh (magic-literal head)
answer-rule (build-rule head
(concat [mh] body))
step (fn [[rs bd] l]
(if (and (i-preds (literal-predicate l))
(not-duplicate? l mh bd))
(let [nr (build-rule (magic-literal l)
(concat [mh] bd))]
[(conj rs nr) (conj bd l)])
[rs (conj bd l)]))
[nrs _] (reduce step [rs []] body)]
(conj nrs answer-rule)))]
(reduce xr empty-rules-set rs))))
|
e2eb3b74c3c24cd554c0cfabfff96b85fafd66ae26e1d990585d18bd927ac25b | patricoferris/ocaml-multicore-monorepo | httpaf_lwt_unix.mli | ----------------------------------------------------------------------------
Copyright ( c ) 2018 Inhabited Type LLC .
Copyright ( c ) 2018 ( c ) 2019
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
3 . Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ` ` AS IS '' AND ANY EXPRESS
OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
----------------------------------------------------------------------------
Copyright (c) 2018 Inhabited Type LLC.
Copyright (c) 2018 Anton Bachin
Copyright (c) 2019 António Nuno Monteiro
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------------*)
open Httpaf
(* The function that results from [create_connection_handler] should be passed
to [Lwt_io.establish_server_with_client_socket]. For an example, see
[examples/lwt_echo_server.ml]. *)
module Server : sig
include Httpaf_lwt.Server
with type socket = Lwt_unix.file_descr
and type addr := Unix.sockaddr
module TLS : sig
include Httpaf_lwt.Server
with type socket = Gluten_lwt_unix.Server.TLS.socket
and type addr := Unix.sockaddr
val create_connection_handler_with_default
: certfile : string
-> keyfile : string
-> ?config : Config.t
-> request_handler : (Unix.sockaddr -> Httpaf.Reqd.t Gluten.reqd -> unit)
-> error_handler : (Unix.sockaddr -> Server_connection.error_handler)
-> Unix.sockaddr
-> Lwt_unix.file_descr
-> unit Lwt.t
end
module SSL : sig
include Httpaf_lwt.Server
with type socket = Gluten_lwt_unix.Server.SSL.socket
and type addr := Unix.sockaddr
val create_connection_handler_with_default
: certfile : string
-> keyfile : string
-> ?config : Config.t
-> request_handler : (Unix.sockaddr -> Httpaf.Reqd.t Gluten.reqd -> unit)
-> error_handler : (Unix.sockaddr -> Server_connection.error_handler)
-> Unix.sockaddr
-> Lwt_unix.file_descr
-> unit Lwt.t
end
end
(* For an example, see [examples/lwt_get.ml]. *)
module Client : sig
include Httpaf_lwt.Client
with type socket = Lwt_unix.file_descr
and type runtime = Gluten_lwt_unix.Client.t
module TLS : sig
include Httpaf_lwt.Client
with type socket = Gluten_lwt_unix.Client.TLS.socket
and type runtime = Gluten_lwt_unix.Client.TLS.t
val create_connection_with_default
: ?config : Config.t
-> Lwt_unix.file_descr
-> t Lwt.t
end
module SSL : sig
include Httpaf_lwt.Client
with type socket = Gluten_lwt_unix.Client.SSL.socket
and type runtime = Gluten_lwt_unix.Client.SSL.t
val create_connection_with_default
: ?config : Config.t
-> Lwt_unix.file_descr
-> t Lwt.t
end
end
| null | https://raw.githubusercontent.com/patricoferris/ocaml-multicore-monorepo/22b441e6727bc303950b3b37c8fbc024c748fe55/duniverse/dream/src/vendor/httpaf/lwt-unix/httpaf_lwt_unix.mli | ocaml | The function that results from [create_connection_handler] should be passed
to [Lwt_io.establish_server_with_client_socket]. For an example, see
[examples/lwt_echo_server.ml].
For an example, see [examples/lwt_get.ml]. | ----------------------------------------------------------------------------
Copyright ( c ) 2018 Inhabited Type LLC .
Copyright ( c ) 2018 ( c ) 2019
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
3 . Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ` ` AS IS '' AND ANY EXPRESS
OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE .
----------------------------------------------------------------------------
Copyright (c) 2018 Inhabited Type LLC.
Copyright (c) 2018 Anton Bachin
Copyright (c) 2019 António Nuno Monteiro
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------------*)
open Httpaf
module Server : sig
include Httpaf_lwt.Server
with type socket = Lwt_unix.file_descr
and type addr := Unix.sockaddr
module TLS : sig
include Httpaf_lwt.Server
with type socket = Gluten_lwt_unix.Server.TLS.socket
and type addr := Unix.sockaddr
val create_connection_handler_with_default
: certfile : string
-> keyfile : string
-> ?config : Config.t
-> request_handler : (Unix.sockaddr -> Httpaf.Reqd.t Gluten.reqd -> unit)
-> error_handler : (Unix.sockaddr -> Server_connection.error_handler)
-> Unix.sockaddr
-> Lwt_unix.file_descr
-> unit Lwt.t
end
module SSL : sig
include Httpaf_lwt.Server
with type socket = Gluten_lwt_unix.Server.SSL.socket
and type addr := Unix.sockaddr
val create_connection_handler_with_default
: certfile : string
-> keyfile : string
-> ?config : Config.t
-> request_handler : (Unix.sockaddr -> Httpaf.Reqd.t Gluten.reqd -> unit)
-> error_handler : (Unix.sockaddr -> Server_connection.error_handler)
-> Unix.sockaddr
-> Lwt_unix.file_descr
-> unit Lwt.t
end
end
module Client : sig
include Httpaf_lwt.Client
with type socket = Lwt_unix.file_descr
and type runtime = Gluten_lwt_unix.Client.t
module TLS : sig
include Httpaf_lwt.Client
with type socket = Gluten_lwt_unix.Client.TLS.socket
and type runtime = Gluten_lwt_unix.Client.TLS.t
val create_connection_with_default
: ?config : Config.t
-> Lwt_unix.file_descr
-> t Lwt.t
end
module SSL : sig
include Httpaf_lwt.Client
with type socket = Gluten_lwt_unix.Client.SSL.socket
and type runtime = Gluten_lwt_unix.Client.SSL.t
val create_connection_with_default
: ?config : Config.t
-> Lwt_unix.file_descr
-> t Lwt.t
end
end
|
1cf09788761250f2a4a880d7e806bcaaf6fb254e7cb3a42cf56abd657775100b | johnwickerson/memalloy | archs.ml |
MIT License
Copyright ( c ) 2017 by .
Permission is hereby granted , free of charge , to any person obtaining
a copy of this software and associated documentation files ( the
" Software " ) , to deal in the Software without restriction , including
without limitation the rights to use , copy , modify , merge , publish ,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
the following conditions :
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE .
MIT License
Copyright (c) 2017 by John Wickerson.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*)
(** The architectures supported by the tool *)
open! Format
open! General_purpose
type t =
| Basic
| C
| Basic_HW
| X86
| Power
| Arm7
| Arm8
| PTX
| OpenCL
| OCaml
(** Defining a hierarchy of architectures *)
let parent_arch = function
| Basic -> None
| C -> Some Basic
| Basic_HW -> Some Basic
| X86 | Power | Arm7 | PTX -> Some Basic_HW
| Arm8 -> Some Arm7
| OpenCL -> Some C
| OCaml -> Some Basic
(** Convert architecture to Alloy module name *)
let pp_arch fences_as_relations oc arch =
let module_name = match arch with
| Basic -> "exec"
| C -> "exec_C"
| Basic_HW -> "exec_H"
| X86 -> "exec_x86"
| Power -> "exec_ppc"
| Arm7 -> "exec_arm7"
| Arm8 -> "exec_arm8"
| PTX -> "exec_ptx"
| OpenCL -> "exec_OpenCL"
| OCaml -> "exec_OCaml"
in
if fences_as_relations then
fprintf oc "../archs/fences_as_relations/%s" module_name
else
fprintf oc "../archs/%s" module_name
* Convert architecture to signature name
let pp_Arch oc = function
| Basic -> fprintf oc "Exec"
| C -> fprintf oc "Exec_C"
| Basic_HW -> fprintf oc "Exec_H"
| X86 -> fprintf oc "Exec_X86"
| Power -> fprintf oc "Exec_PPC"
| Arm7 -> fprintf oc "Exec_Arm7"
| Arm8 -> fprintf oc "Exec_Arm8"
| PTX -> fprintf oc "Exec_PTX"
| OpenCL -> fprintf oc "Exec_OpenCL"
| OCaml -> fprintf oc "Exec_OCaml"
(** Convert Alloy signature name to architecture *)
let parse_Arch = function
| "Exec" -> Basic
| "Exec_C" -> C
| "Exec_H" -> Basic_HW
| "Exec_X86" -> X86
| "Exec_PPC" -> Power
| "Exec_Arm7" -> Arm7
| "Exec_Arm8" -> Arm8
| "Exec_PTX" -> PTX
| "Exec_OpenCL" -> OpenCL
| "Exec_OCaml" -> OCaml
| x -> failwith "Unexpected architecture %s" x
* architecture name
let parse_arch = function
| "BASIC" -> Basic
| "C" -> C
| "HW" -> Basic_HW
| "X86" -> X86
| "PPC" -> Power
| "ARM7" -> Arm7
| "ARM8" -> Arm8
| "PTX" -> PTX
| "OpenCL" -> OpenCL
| "OCaml" -> OCaml
| x -> failwith "Unexpected architecture %s" x
(** All supported architectures *)
let all = ["BASIC"; "C"; "HW"; "X86"; "PPC"; "ARM7";
"ARM8"; "PTX"; "OpenCL"; "OCaml"]
(** Pre-defined fence sets for given architecture *)
let fence_sets = function
| X86 -> ["MFENCE"]
| Power -> ["SYNC"; "LWSYNC"; "ISYNC"]
| Arm7 | Arm8 -> ["DMB"; "DMBST"; "DMBLD"; "ISB"]
| PTX -> ["MEMBAR_CTA"; "MEMBAR_GL"; "MEMBAR_SYS"]
| _ -> []
(** Pre-defined fence relations for given architecture *)
let fence_rels = function
| X86 -> ["mfence"]
| Power -> ["sync"; "lwsync"; "isync"]
| Arm7 | Arm8 -> ["dmb"; "dmbst"; "dmbld"; "isb"]
| PTX -> ["membar_cta"; "membar_gl"; "membar_sys"]
| _ -> []
(** Pre-defined event sets for given architecture *)
let arch_sets fences_as_relations arch =
let rec arch_sets = function
| Basic -> ["EV"; "W"; "R"; "F"; "NAL"; "IW"]
| C -> arch_sets Basic @ ["A"; "ACQ"; "REL"; "SC"]
| Basic_HW -> arch_sets Basic
| X86 -> arch_sets Basic_HW
| Power -> arch_sets Basic_HW
| Arm7 -> arch_sets Basic_HW
| Arm8 -> arch_sets Arm7 @ ["SCREL"; "SCACQ"]
| PTX -> arch_sets Basic_HW
| OpenCL -> arch_sets C @ ["L"; "G"; "FGA"; "REM"; "WG"; "DV"; "SY"]
| OCaml -> arch_sets Basic @ ["A"]
in
let fences = if fences_as_relations then [] else fence_sets arch in
fences @ arch_sets arch
(** Pre-defined event relations for given architecture *)
let rec arch_rels = function
| Basic -> ["ad"; "atom"; "cd"; "co"; "dd"; "rf"; "sb"; "sloc"; "sthd"] @ ["fr"]
| C -> arch_rels Basic
| Basic_HW -> arch_rels Basic
| X86 -> arch_rels Basic_HW @ ["mfence"]
| Power -> arch_rels Basic_HW @ ["sync"; "lwsync"; "isync"]
| Arm7 -> arch_rels Basic_HW @ ["dmb"; "dmbst"; "dmbld"; "isb"]
| Arm8 -> arch_rels Arm7
| PTX -> arch_rels Basic_HW @
["scta"; "sgl"; "membar_cta"; "membar_gl"; "membar_sys"]
| OpenCL -> arch_rels C @ ["swg"; "sdv"; "sbar"]
| OCaml -> arch_rels Basic
(** Sets that should be reduced *)
let arch_min_sets fences_as_relations arch =
let rec arch_min_sets = function
| Basic -> []
| C -> arch_min_sets Basic @ ["A"; "ACQ"; "REL"; "SC"]
| Basic_HW -> arch_min_sets Basic
| X86 -> arch_min_sets Basic_HW
| Power -> arch_min_sets Basic_HW
| Arm7 -> arch_min_sets Basic_HW
| Arm8 -> arch_min_sets Arm7 @ ["SCREL"; "SCACQ"]
| PTX -> arch_min_sets Basic_HW
| OpenCL -> arch_min_sets C @ ["WG"; "DV"; "SY"]
| OCaml -> arch_min_sets Basic @ ["A"]
in
let fence_min_sets = function
| Power -> ["SYNC","SYNC"]
| Arm7 | Arm8 -> ["DMBLD & DMBST", "DMBST"; "DMBLD & DMBST", "DMBLD"]
| PTX -> ["MEMBAR_GL", "MEMBAR_GL"; "MEMBAR_SYS", "MEMBAR_SYS"]
| _ -> []
in
let fences = if fences_as_relations then [] else fence_min_sets arch in
fences @ List.map (fun x -> x,x) (arch_min_sets arch)
let is_hw = function
| Basic | C | OpenCL | OCaml -> false
| Basic_HW | X86 | Power | Arm7 | Arm8 | PTX -> true
(** Relations that should be reduced *)
let arch_min_rels fences_as_relations arch =
let fences = if fences_as_relations then fence_rels arch else [] in
fences @ ["ad"; "cd"; "dd"; "atom"]
(** List of all fence relations *)
let all_fences =
["dmb"; "dmbst"; "dmbld"; "isb";
"sync"; "lwsync"; "isync";
"membar_cta"; "membar_gl"; "membar_sys";
"mfence"]
(** List of all pairs of relations [(r1,r2)] where membership of [r1] implies membership of [r2] (and hence [r2] need not be drawn) *)
let all_implied_rels =
["dmb", "dmbst";
"dmb", "dmbld";
"sync", "lwsync";
"membar_gl", "membar_cta";
"membar_sys", "membar_gl";
"membar_sys", "membar_cta"]
* List of all pairs of sets [ ( s1,s2 ) ] where membership of [ s1 ] implies membership of [ s2 ] ( and hence [ s2 ] need not be drawn )
let all_implied_sets =
["SC", "ACQ"; "SC", "REL"; "SC", "A";
"ACQ", "A"; "REL", "A"]
(** List of all sets that should be reduced as much as possible *)
let min_sets = [
"SC"; "ACQ"; "REL"; "A"; "SCREL"; "SCACQ"; "MFENCE"; "SYNC";
"LWSYNC"; "ISYNC"; "DMB"; "DMBST"; "DMBLD"; "ISB";
"MEMBAR_CTA"; "MEMBAR_GL"; "MEMBAR_SYS"
]
(** List of all relations that should be reduced as much as possible *)
let min_rels = [
"ad"; "cd"; "dd"; "dmb"; "dmbst"; "dmbld"; "isb"; "sync";
"lwsync"; "isync"; "membar_cta"; "membar_gl";
"membar_sys"; "mfence";
]
| null | https://raw.githubusercontent.com/johnwickerson/memalloy/aa15776873fe3440cb7f5dae8e174466689d4fba/src/archs.ml | ocaml | * The architectures supported by the tool
* Defining a hierarchy of architectures
* Convert architecture to Alloy module name
* Convert Alloy signature name to architecture
* All supported architectures
* Pre-defined fence sets for given architecture
* Pre-defined fence relations for given architecture
* Pre-defined event sets for given architecture
* Pre-defined event relations for given architecture
* Sets that should be reduced
* Relations that should be reduced
* List of all fence relations
* List of all pairs of relations [(r1,r2)] where membership of [r1] implies membership of [r2] (and hence [r2] need not be drawn)
* List of all sets that should be reduced as much as possible
* List of all relations that should be reduced as much as possible |
MIT License
Copyright ( c ) 2017 by .
Permission is hereby granted , free of charge , to any person obtaining
a copy of this software and associated documentation files ( the
" Software " ) , to deal in the Software without restriction , including
without limitation the rights to use , copy , modify , merge , publish ,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
the following conditions :
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE .
MIT License
Copyright (c) 2017 by John Wickerson.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*)
open! Format
open! General_purpose
type t =
| Basic
| C
| Basic_HW
| X86
| Power
| Arm7
| Arm8
| PTX
| OpenCL
| OCaml
let parent_arch = function
| Basic -> None
| C -> Some Basic
| Basic_HW -> Some Basic
| X86 | Power | Arm7 | PTX -> Some Basic_HW
| Arm8 -> Some Arm7
| OpenCL -> Some C
| OCaml -> Some Basic
let pp_arch fences_as_relations oc arch =
let module_name = match arch with
| Basic -> "exec"
| C -> "exec_C"
| Basic_HW -> "exec_H"
| X86 -> "exec_x86"
| Power -> "exec_ppc"
| Arm7 -> "exec_arm7"
| Arm8 -> "exec_arm8"
| PTX -> "exec_ptx"
| OpenCL -> "exec_OpenCL"
| OCaml -> "exec_OCaml"
in
if fences_as_relations then
fprintf oc "../archs/fences_as_relations/%s" module_name
else
fprintf oc "../archs/%s" module_name
* Convert architecture to signature name
let pp_Arch oc = function
| Basic -> fprintf oc "Exec"
| C -> fprintf oc "Exec_C"
| Basic_HW -> fprintf oc "Exec_H"
| X86 -> fprintf oc "Exec_X86"
| Power -> fprintf oc "Exec_PPC"
| Arm7 -> fprintf oc "Exec_Arm7"
| Arm8 -> fprintf oc "Exec_Arm8"
| PTX -> fprintf oc "Exec_PTX"
| OpenCL -> fprintf oc "Exec_OpenCL"
| OCaml -> fprintf oc "Exec_OCaml"
let parse_Arch = function
| "Exec" -> Basic
| "Exec_C" -> C
| "Exec_H" -> Basic_HW
| "Exec_X86" -> X86
| "Exec_PPC" -> Power
| "Exec_Arm7" -> Arm7
| "Exec_Arm8" -> Arm8
| "Exec_PTX" -> PTX
| "Exec_OpenCL" -> OpenCL
| "Exec_OCaml" -> OCaml
| x -> failwith "Unexpected architecture %s" x
* architecture name
let parse_arch = function
| "BASIC" -> Basic
| "C" -> C
| "HW" -> Basic_HW
| "X86" -> X86
| "PPC" -> Power
| "ARM7" -> Arm7
| "ARM8" -> Arm8
| "PTX" -> PTX
| "OpenCL" -> OpenCL
| "OCaml" -> OCaml
| x -> failwith "Unexpected architecture %s" x
let all = ["BASIC"; "C"; "HW"; "X86"; "PPC"; "ARM7";
"ARM8"; "PTX"; "OpenCL"; "OCaml"]
let fence_sets = function
| X86 -> ["MFENCE"]
| Power -> ["SYNC"; "LWSYNC"; "ISYNC"]
| Arm7 | Arm8 -> ["DMB"; "DMBST"; "DMBLD"; "ISB"]
| PTX -> ["MEMBAR_CTA"; "MEMBAR_GL"; "MEMBAR_SYS"]
| _ -> []
let fence_rels = function
| X86 -> ["mfence"]
| Power -> ["sync"; "lwsync"; "isync"]
| Arm7 | Arm8 -> ["dmb"; "dmbst"; "dmbld"; "isb"]
| PTX -> ["membar_cta"; "membar_gl"; "membar_sys"]
| _ -> []
let arch_sets fences_as_relations arch =
let rec arch_sets = function
| Basic -> ["EV"; "W"; "R"; "F"; "NAL"; "IW"]
| C -> arch_sets Basic @ ["A"; "ACQ"; "REL"; "SC"]
| Basic_HW -> arch_sets Basic
| X86 -> arch_sets Basic_HW
| Power -> arch_sets Basic_HW
| Arm7 -> arch_sets Basic_HW
| Arm8 -> arch_sets Arm7 @ ["SCREL"; "SCACQ"]
| PTX -> arch_sets Basic_HW
| OpenCL -> arch_sets C @ ["L"; "G"; "FGA"; "REM"; "WG"; "DV"; "SY"]
| OCaml -> arch_sets Basic @ ["A"]
in
let fences = if fences_as_relations then [] else fence_sets arch in
fences @ arch_sets arch
let rec arch_rels = function
| Basic -> ["ad"; "atom"; "cd"; "co"; "dd"; "rf"; "sb"; "sloc"; "sthd"] @ ["fr"]
| C -> arch_rels Basic
| Basic_HW -> arch_rels Basic
| X86 -> arch_rels Basic_HW @ ["mfence"]
| Power -> arch_rels Basic_HW @ ["sync"; "lwsync"; "isync"]
| Arm7 -> arch_rels Basic_HW @ ["dmb"; "dmbst"; "dmbld"; "isb"]
| Arm8 -> arch_rels Arm7
| PTX -> arch_rels Basic_HW @
["scta"; "sgl"; "membar_cta"; "membar_gl"; "membar_sys"]
| OpenCL -> arch_rels C @ ["swg"; "sdv"; "sbar"]
| OCaml -> arch_rels Basic
let arch_min_sets fences_as_relations arch =
let rec arch_min_sets = function
| Basic -> []
| C -> arch_min_sets Basic @ ["A"; "ACQ"; "REL"; "SC"]
| Basic_HW -> arch_min_sets Basic
| X86 -> arch_min_sets Basic_HW
| Power -> arch_min_sets Basic_HW
| Arm7 -> arch_min_sets Basic_HW
| Arm8 -> arch_min_sets Arm7 @ ["SCREL"; "SCACQ"]
| PTX -> arch_min_sets Basic_HW
| OpenCL -> arch_min_sets C @ ["WG"; "DV"; "SY"]
| OCaml -> arch_min_sets Basic @ ["A"]
in
let fence_min_sets = function
| Power -> ["SYNC","SYNC"]
| Arm7 | Arm8 -> ["DMBLD & DMBST", "DMBST"; "DMBLD & DMBST", "DMBLD"]
| PTX -> ["MEMBAR_GL", "MEMBAR_GL"; "MEMBAR_SYS", "MEMBAR_SYS"]
| _ -> []
in
let fences = if fences_as_relations then [] else fence_min_sets arch in
fences @ List.map (fun x -> x,x) (arch_min_sets arch)
let is_hw = function
| Basic | C | OpenCL | OCaml -> false
| Basic_HW | X86 | Power | Arm7 | Arm8 | PTX -> true
let arch_min_rels fences_as_relations arch =
let fences = if fences_as_relations then fence_rels arch else [] in
fences @ ["ad"; "cd"; "dd"; "atom"]
let all_fences =
["dmb"; "dmbst"; "dmbld"; "isb";
"sync"; "lwsync"; "isync";
"membar_cta"; "membar_gl"; "membar_sys";
"mfence"]
let all_implied_rels =
["dmb", "dmbst";
"dmb", "dmbld";
"sync", "lwsync";
"membar_gl", "membar_cta";
"membar_sys", "membar_gl";
"membar_sys", "membar_cta"]
* List of all pairs of sets [ ( s1,s2 ) ] where membership of [ s1 ] implies membership of [ s2 ] ( and hence [ s2 ] need not be drawn )
let all_implied_sets =
["SC", "ACQ"; "SC", "REL"; "SC", "A";
"ACQ", "A"; "REL", "A"]
let min_sets = [
"SC"; "ACQ"; "REL"; "A"; "SCREL"; "SCACQ"; "MFENCE"; "SYNC";
"LWSYNC"; "ISYNC"; "DMB"; "DMBST"; "DMBLD"; "ISB";
"MEMBAR_CTA"; "MEMBAR_GL"; "MEMBAR_SYS"
]
let min_rels = [
"ad"; "cd"; "dd"; "dmb"; "dmbst"; "dmbld"; "isb"; "sync";
"lwsync"; "isync"; "membar_cta"; "membar_gl";
"membar_sys"; "mfence";
]
|
8876d04768abb327ddbd991900436a674876d3f70151740daa2463a3c338a5f2 | babashka/pod-registry | bbssh.clj | #!/usr/bin/env bb
(require '[babashka.pods :as pods]
'[clojure.java.io :as io])
(pods/load-pod 'epiccastle/bbssh "0.3.0")
(require '[pod.epiccastle.bbssh.core :as bbssh]
'[pod.epiccastle.bbssh.scp :as scp])
(let [session (bbssh/ssh "remote-host" {:username "remote-user"})]
;; execute
(-> (bbssh/exec session "echo 'I am running over ssh'" {:out :string})
deref
:out
println)
;; copy to remote
(scp/scp-to
;; multiple sources
[(io/file "single-file") ;; file
(io/file "directory") ;; directory
["content🚀" {:filename "string"}] ;; string data
[(byte-array [1 2 3 4])
{:filename "byte-array"}] ;; byte-array data
[(io/input-stream (byte-array [0xf0 0x9f 0x9a 0x80 0x00]))
{:filename "input-stream"
:size 5
}] ;; input stream
]
"remote-path" ;; remote path
{:session session ;; options
:recurse? true})
;; copy to local
(scp/scp-from "remote-path" "local-path" {:session session}))
| null | https://raw.githubusercontent.com/babashka/pod-registry/1e1aea4774987d1f6d78325fec51e96dd07cf032/examples/bbssh.clj | clojure | execute
copy to remote
multiple sources
file
directory
string data
byte-array data
input stream
remote path
options
copy to local | #!/usr/bin/env bb
(require '[babashka.pods :as pods]
'[clojure.java.io :as io])
(pods/load-pod 'epiccastle/bbssh "0.3.0")
(require '[pod.epiccastle.bbssh.core :as bbssh]
'[pod.epiccastle.bbssh.scp :as scp])
(let [session (bbssh/ssh "remote-host" {:username "remote-user"})]
(-> (bbssh/exec session "echo 'I am running over ssh'" {:out :string})
deref
:out
println)
(scp/scp-to
[(byte-array [1 2 3 4])
[(io/input-stream (byte-array [0xf0 0x9f 0x9a 0x80 0x00]))
{:filename "input-stream"
:size 5
]
:recurse? true})
(scp/scp-from "remote-path" "local-path" {:session session}))
|
07b2e54768b80ad621d5ed2a19cc7899239fc2ed8f13cf10b321a9a67366160c | fetburner/Coq2SML | sml.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
s Production of Sml syntax .
open Pp
open Util
open Names
open Nameops
open Libnames
open Table
open Miniml
open Mlutil
open Modutil
open Common
open Declarations
(*s Some utility functions. *)
let pp_tvar id =
let s = string_of_id id in
if String.length s < 2 || s.[1]<>'\''
then str ("'"^s)
else str ("' "^s)
let pp_abst = function
| [] -> mt ()
| l ->
str "fn " ++ prlist_with_sep (fun () -> str " => fn ") pr_id l ++
str " =>" ++ spc ()
let pp_parameters l =
(pp_boxed_tuple pp_tvar l ++ space_if (l<>[]))
let pp_string_parameters l =
(pp_boxed_tuple str l ++ space_if (l<>[]))
let pp_letin pat def body =
let fstline = str "let val " ++ pat ++ str " =" ++ spc () ++ def in
hv 0 (hv 0 (hov 2 fstline ++ spc () ++ str "in") ++ spc () ++ hov 0 body ++ spc () ++ str "end")
s Sml renaming issues .
let keywords =
List.fold_right (fun s -> Idset.add (id_of_string s))
[ "abstype"; "and"; "andalso"; "as"; "case"; "datatype"; "do";
"else"; "end"; "exception"; "div"; "fn"; "fun"; "bandle"; "if";
"in"; "infix"; "infixr"; "let"; "local"; "nonfix"; "of";
"op"; "open"; "orelse"; "raise"; "rec"; "sig"; "then";
"type"; "val"; "with"; "withtype"; "while"; "o"; "_"; "coq___" ]
Idset.empty
let pp_open mp = str ("open "^ string_of_modfile mp ^"\n")
let preamble _ used_modules usf =
prlist pp_open used_modules ++
(if used_modules = [] then mt () else fnl ()) ++
(if usf.tdummy || usf.tunknown then str "type coq___ = unit\n" else mt()) ++
(if usf.mldummy then
str "val coq___ = ()\n"
else mt ()) ++
(if usf.tdummy || usf.tunknown || usf.mldummy then fnl () else mt ())
let sig_preamble _ used_modules usf =
str "_require \"basis.smi\"\n\n" ++
prlist pp_open used_modules ++
(if used_modules = [] then mt () else fnl ()) ++
(if usf.tdummy || usf.tunknown then str "type coq___ = unit\n" else mt())
s The pretty - printer for Sml syntax
(* Beware of the side-effects of [pp_global] and [pp_modname].
They are used to update table of content for modules. Many [let]
below should not be altered since they force evaluation order.
*)
let str_global k r =
if is_inline_custom r then find_custom r else Common.pp_global k r
let pp_global k r = str (str_global k r)
let pp_modname mp = str (Common.pp_module mp)
let is_infix r =
is_inline_custom r &&
(let s = find_custom r in
let l = String.length s in
l >= 2 && s.[0] = '(' && s.[l-1] = ')')
let get_infix r =
let s = find_custom r in
String.sub s 1 (String.length s - 2)
let get_ind = function
| IndRef _ as r -> r
| ConstructRef (ind,_) -> IndRef ind
| _ -> assert false
let pp_one_field r i = function
| Some r -> pp_global Term r
| None -> pp_global Type (get_ind r) ++ str "coq___" ++ int i
let pp_field r fields i = pp_one_field r i (List.nth fields i)
let pp_fields r fields = list_map_i (pp_one_field r) 0 fields
(*s Pretty-printing of types. [par] is a boolean indicating whether parentheses
are needed or not. *)
let rec pp_type par vl t =
let rec pp_rec par = function
| Tmeta _ | Tvar' _ | Taxiom -> assert false
| Tvar i -> (try pp_tvar (List.nth vl (pred i))
with e when Errors.noncritical e ->
(str "'a" ++ int i))
| Tglob (r,[a1;a2]) when is_infix r ->
pp_par par (pp_rec true a1 ++ str (get_infix r) ++ pp_rec true a2)
| Tglob (r,[]) -> pp_global Type r
| Tglob (IndRef(kn,0),l)
when not (keep_singleton ()) && kn = mk_ind "Coq.Init.Specif" "sig" ->
pp_tuple_light pp_rec l
| Tglob (r,l) ->
pp_tuple_light pp_rec l ++ spc () ++ pp_global Type r
| Tarr (t1,t2) ->
pp_par par
(pp_rec true t1 ++ spc () ++ str "->" ++ spc () ++ pp_rec false t2)
| Tdummy _ -> str "coq___"
| Tunknown -> str "coq___"
in
hov 0 (pp_rec par t)
s Pretty - printing of expressions . [ par ] indicates whether
parentheses are needed or not . [ env ] is the list of names for the
de Bruijn variables . [ args ] is the list of collected arguments
( already pretty - printed ) .
parentheses are needed or not. [env] is the list of names for the
de Bruijn variables. [args] is the list of collected arguments
(already pretty-printed). *)
let is_bool_patt p s =
try
let r = match p with
| Pusual r -> r
| Pcons (r,[]) -> r
| _ -> raise Not_found
in
find_custom r = s
with Not_found -> false
let is_ifthenelse = function
| [|([],p1,_);([],p2,_)|] -> is_bool_patt p1 "true" && is_bool_patt p2 "false"
| _ -> false
let expr_needs_par = function
| MLlam _ -> true
| MLcase (_,_,[|_|]) -> false
| MLcase (_,_,pv) -> not (is_ifthenelse pv)
| _ -> false
let rec pp_expr par env args =
let apply st = pp_apply st par args
and apply2 st = pp_apply2 st par args in
function
| MLrel n ->
let id = get_db_name n env in apply (pr_id id)
| MLapp (f,args') ->
let stl = List.map (pp_expr true env []) args' in
pp_expr par env (stl @ args) f
| MLlam _ as a ->
let fl,a' = collect_lams a in
let fl = List.map id_of_mlid fl in
let fl,env' = push_vars fl env in
let st = pp_abst (List.rev fl) ++ pp_expr false env' [] a' in
apply2 st
| MLletin (id,a1,a2) ->
let i,env' = push_vars [id_of_mlid id] env in
let pp_id = pr_id (List.hd i)
and pp_a1 = pp_expr false env [] a1
and pp_a2 = pp_expr (not par && expr_needs_par a2) env' [] a2 in
hv 0 (apply2 (pp_letin pp_id pp_a1 pp_a2))
| MLglob r ->
(try
let args = list_skipn (projection_arity r) args in
let record = List.hd args in
pp_apply (pp_apply (str "#" ++ pp_global Term r) par [record]) par (List.tl args)
with e when Errors.noncritical e -> apply (pp_global Term r))
| MLfix (i,ids,defs) ->
let ids',env' = push_vars (List.rev (Array.to_list ids)) env in
pp_fix par env' i (Array.of_list (List.rev ids'),defs) args
| MLexn s ->
An [ MLexn ] may be applied , but I do n't really care .
pp_par par (str ("raise (Fail \""^s^"\")") ++ spc () ++ str ("(* "^s^" *)"))
| MLdummy ->
str "coq___" (* An [MLdummy] may be applied, but I don't really care. *)
| MLmagic a ->
pp_apply (str "Unsafe.cast") par (pp_expr true env [] a :: args)
| MLaxiom ->
pp_par par (str "failwith \"AXIOM TO BE REALIZED\"")
| MLcons (_,r,a) as c ->
assert (args=[]);
begin match a with
| _ when is_native_char c -> pp_native_char c
| [a1;a2] when is_infix r ->
let pp = pp_expr true env [] in
pp_par par (pp a1 ++ str (get_infix r) ++ pp a2)
| _ when is_coinductive r ->
let ne = (a<>[]) in
let tuple = space_if ne ++ pp_tuple (pp_expr true env []) a in
pp_par par (str "lazy " ++ pp_par ne (pp_global Cons r ++ tuple))
| [] -> pp_global Cons r
| _ ->
let fds = get_record_fields r in
if fds <> [] then
pp_record_pat (pp_fields r fds, List.map (pp_expr true env []) a)
else
let tuple = pp_tuple (pp_expr true env []) a in
hack Extract Inductive prod
then tuple
else pp_par par (pp_global Cons r ++ spc () ++ tuple)
end
| MLtuple l ->
assert (args = []);
pp_boxed_tuple (pp_expr true env []) l
| MLcase (_, t, pv) when is_custom_match pv ->
if not (is_regular_match pv) then
error "Cannot mix yet user-given match and general patterns.";
let mkfun (ids,_,e) =
if ids <> [] then named_lams (List.rev ids) e
else dummy_lams (ast_lift 1 e) 1
in
let pp_branch tr = pp_expr true env [] (mkfun tr) ++ fnl () in
let inner =
str (find_custom_match pv) ++ fnl () ++
prvect pp_branch pv ++
pp_expr true env [] t
in
apply2 (hov 2 inner)
| MLcase (typ, t, pv) ->
let head =
if not (is_coinductive_type typ) then pp_expr false env [] t
else (str "Lazy.force" ++ spc () ++ pp_expr true env [] t)
in
First , can this match be printed as a mere record projection ?
(try pp_record_proj par env typ t pv args
with Impossible ->
Second , can this match be printed as a let - in ?
if Array.length pv = 1 then
let s1,s2 = pp_one_pat env pv.(0) in
hv 0 (apply2 (pp_letin s1 head s2))
else
(* Third, can this match be printed as [if ... then ... else] ? *)
(try apply2 (pp_ifthenelse env head pv)
with Not_found ->
(* Otherwise, standard match *)
apply2
(v 0 (str "case " ++ head ++ str " of" ++ fnl () ++
pp_pat env pv))))
and pp_record_proj par env typ t pv args =
(* Can a match be printed as a mere record projection ? *)
let fields = record_fields_of_type typ in
if fields = [] then raise Impossible;
if Array.length pv <> 1 then raise Impossible;
if has_deep_pattern pv then raise Impossible;
let (ids,pat,body) = pv.(0) in
let n = List.length ids in
let no_patvar a = not (List.exists (ast_occurs_itvl 1 n) a) in
let rel_i,a = match body with
| MLrel i when i <= n -> i,[]
| MLapp(MLrel i, a) when i<=n && no_patvar a -> i,a
| _ -> raise Impossible
in
let rec lookup_rel i idx = function
| Prel j :: l -> if i = j then idx else lookup_rel i (idx+1) l
| Pwild :: l -> lookup_rel i (idx+1) l
| _ -> raise Impossible
in
let r,idx = match pat with
| Pusual r -> r, n-rel_i
| Pcons (r,l) -> r, lookup_rel rel_i 0 l
| _ -> raise Impossible
in
if is_infix r then raise Impossible;
let env' = snd (push_vars (List.rev_map id_of_mlid ids) env) in
let pp_args = (List.map (pp_expr true env' []) a) @ args in
let pp_head = str "#" ++ pp_field r fields idx ++ str " " ++ pp_expr true env [] t
in
pp_apply pp_head par pp_args
and pp_record_pat (fields, args) =
str "{ " ++
prlist_with_sep (fun () -> str "," ++ spc ())
(fun (f,a) -> f ++ str " =" ++ spc () ++ a)
(List.combine fields args) ++
str " }"
and pp_cons_pat r ppl =
if is_infix r && List.length ppl = 2 then
List.hd ppl ++ str (get_infix r) ++ List.hd (List.tl ppl)
else
let fields = get_record_fields r in
if fields <> [] then pp_record_pat (pp_fields r fields, ppl)
else if str_global Cons r = "" then
pp_boxed_tuple identity ppl (* Hack Extract Inductive prod *)
else
pp_global Cons r ++ space_if (ppl<>[]) ++ pp_boxed_tuple identity ppl
and pp_gen_pat ids env = function
| Pcons (r, l) -> pp_cons_pat r (List.map (pp_gen_pat ids env) l)
| Pusual r -> pp_cons_pat r (List.map pr_id ids)
| Ptuple l -> pp_boxed_tuple (pp_gen_pat ids env) l
| Pwild -> str "_"
| Prel n -> pr_id (get_db_name n env)
and pp_ifthenelse env expr pv = match pv with
| [|([],tru,the);([],fal,els)|] when
(is_bool_patt tru "true") && (is_bool_patt fal "false")
->
hv 0 (hov 2 (str "if " ++ expr) ++ spc () ++
hov 2 (str "then " ++
hov 2 (pp_expr (expr_needs_par the) env [] the)) ++ spc () ++
hov 2 (str "else " ++
hov 2 (pp_expr (expr_needs_par els) env [] els)))
| _ -> raise Not_found
and pp_one_pat env (ids,p,t) =
let ids',env' = push_vars (List.rev_map id_of_mlid ids) env in
pp_gen_pat (List.rev ids') env' p,
pp_expr (expr_needs_par t) env' [] t
and pp_pat env pv =
prvecti
(fun i x ->
let s1,s2 = pp_one_pat env x in
hv 2 (hov 4 ((if i = 0 then str " " else str "| ") ++ s1 ++ str " =>") ++ spc () ++ hov 2 s2) ++
if i = Array.length pv - 1 then mt () else fnl ())
pv
and pp_function env t =
let bl,t' = collect_lams t in
let bl,env' = push_vars (List.map id_of_mlid bl) env in
(List.length bl <> 0, pr_binding (List.rev bl) ++
str " =" ++ fnl () ++ str " " ++
hov 2 (pp_expr false env' [] t'))
(*s names of the functions ([ids]) are already pushed in [env],
and passed here just for convenience. *)
and pp_fix par env i (ids,bl) args =
pp_par par
(v 0 (str "let fun " ++
prvect_with_sep
(fun () -> fnl () ++ str "and ")
(fun (fi,ti) -> pr_id fi ++ snd (pp_function env ti))
(array_map2 (fun id b -> (id,b)) ids bl) ++
fnl () ++
hov 2 (str "in " ++ pp_apply (pr_id ids.(i)) false args ++ str " end ")))
let pp_val e typ =
hov 4 (str "(** val " ++ e ++ str " :" ++ spc () ++ pp_type false [] typ ++
str " **)") ++ fnl2 ()
(*s Pretty-printing of [Dfix] *)
let pp_Dfix (rv,c,t) =
let names = Array.map
(fun r -> if is_inline_custom r then mt () else pp_global Term r) rv
in
let rec pp init i =
if i >= Array.length rv then
(if init then failwith "empty phrase" else mt ())
else
let void = is_inline_custom rv.(i) ||
(not (is_custom rv.(i)) && c.(i) = MLexn "UNUSED")
in
if void then pp init (i+1)
else
let (isfun, def) =
if is_custom rv.(i) then (false, str " = " ++ str (find_custom rv.(i)))
else pp_function (empty_env ()) c.(i)
in
(if init then mt () else fnl2 ()) ++
pp_val names.(i) t.(i) ++
str
(if init then
if isfun then "fun "
else "val "
else "and ") ++ names.(i) ++ def ++
pp false (i+1)
in pp true 0
(*s Pretty-printing of inductive types declaration. *)
let pp_equiv param_list name = function
| NoEquiv, _ -> mt (), false
| Equiv kn, i ->
str " = datatype " ++ pp_parameters param_list ++ pp_global Type (IndRef (mind_of_kn kn,i)), true
| RenEquiv ren, _ ->
str " = datatype " ++ pp_parameters param_list ++ str (ren^".") ++ name, true
let pp_comment s = str "(* " ++ s ++ str " *)"
let pp_one_ind prefix ip_equiv pl name cnames ctyps =
let pl = rename_tvars keywords pl in
let pp_constructor i typs =
(if i=0 then mt () else fnl ()) ++
hov 3 ((if i = 0 then str " " else str "| ") ++ cnames.(i) ++
(if typs = [] then mt () else str " of ") ++
prlist_with_sep
(fun () -> spc () ++ str "* ") (pp_type true pl) typs)
in
let pp_eq, is_rep = pp_equiv pl name ip_equiv in
pp_parameters pl ++ str prefix ++ name ++
pp_eq ++ if is_rep then mt () else str " =" ++
if Array.length ctyps = 0 then str " unit (* empty inductive *)"
else fnl () ++ v 0 (prvecti pp_constructor ctyps)
let pp_logical_ind packet =
pp_comment (pr_id packet.ip_typename ++ str " : logical inductive") ++
fnl () ++
pp_comment (str "with constructors : " ++
prvect_with_sep spc pr_id packet.ip_consnames) ++
fnl ()
let pp_singleton kn packet =
let name = pp_global Type (IndRef (kn,0)) in
let l = rename_tvars keywords packet.ip_vars in
hov 2 (str "type " ++ pp_parameters l ++ name ++ str " =" ++ spc () ++
pp_type false l (List.hd packet.ip_types.(0)) ++ fnl () ++
pp_comment (str "singleton inductive, whose constructor was " ++
pr_id packet.ip_consnames.(0)))
let pp_record kn fields ip_equiv packet =
let ind = IndRef (kn,0) in
let name = pp_global Type ind in
let fieldnames = pp_fields ind fields in
let l = List.combine fieldnames packet.ip_types.(0) in
let pl = rename_tvars keywords packet.ip_vars in
let pp_eq, is_rep = pp_equiv pl name ip_equiv in
str "type " ++ pp_parameters pl ++ name ++
pp_eq ++ if is_rep then mt () else str " = { "++
hov 0 (prlist_with_sep (fun () -> str "," ++ spc ())
(fun (p,t) -> p ++ str " : " ++ pp_type true pl t) l)
++ str " }"
let pp_coind pl name =
let pl = rename_tvars keywords pl in
pp_parameters pl ++ name ++ str " = " ++
pp_parameters pl ++ str "coq___" ++ name ++ str " Lazy.t" ++
fnl() ++ str "and "
let pp_ind co kn ind =
let prefix = if co then "coq___" else "" in
let some = ref false in
let init= ref (str "datatype ") in
let names =
Array.mapi (fun i p -> if p.ip_logical then mt () else
pp_global Type (IndRef (kn,i)))
ind.ind_packets
in
let cnames =
Array.mapi
(fun i p -> if p.ip_logical then [||] else
Array.mapi (fun j _ -> pp_global Cons (ConstructRef ((kn,i),j+1)))
p.ip_types)
ind.ind_packets
in
let rec pp i =
if i >= Array.length ind.ind_packets then mt ()
else
let ip = (kn,i) in
let ip_equiv = ind.ind_equiv, i in
let p = ind.ind_packets.(i) in
if is_custom (IndRef ip) then pp (i+1)
else begin
some := true;
if p.ip_logical then pp_logical_ind p ++ pp (i+1)
else
let s = !init in
begin
init := (fnl () ++ str "and ");
s ++
(if co then pp_coind p.ip_vars names.(i) else mt ()) ++
pp_one_ind
prefix ip_equiv p.ip_vars names.(i) cnames.(i) p.ip_types ++
pp (i+1)
end
end
in
let st = pp 0 in if !some then st else failwith "empty phrase"
(*s Pretty-printing of a declaration. *)
let pp_mind kn i =
match i.ind_kind with
| Singleton -> pp_singleton kn i.ind_packets.(0)
| Coinductive -> pp_ind true kn i
| Record fields -> pp_record kn fields (i.ind_equiv,0) i.ind_packets.(0)
| Standard -> pp_ind false kn i
let pp_decl = function
| Dtype (r,_,_) when is_inline_custom r -> failwith "empty phrase"
| Dterm (r,_,_) when is_inline_custom r -> failwith "empty phrase"
| Dind (kn,i) -> pp_mind kn i
| Dtype (r, l, t) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids, def =
try
let ids,s = find_type_custom r in
pp_string_parameters ids, str "=" ++ spc () ++ str s
with Not_found ->
pp_parameters l,
if t = Taxiom then str "(* AXIOM TO BE REALIZED *)"
else str "=" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ ids ++ name ++ spc () ++ def)
| Dterm (r, a, t) ->
let (isfun, def) =
if is_custom r then (false, str (" = " ^ find_custom r))
else if is_projection r then
(false, (prvect str (Array.make (projection_arity r) " _")) ++
str " : " ++ pp_type false [] t ++ str " = fn x => #")
else pp_function (empty_env ()) a
in
let name = pp_global Term r in
let postdef = if is_projection r then name ++ str " x" else mt () in
pp_val name t ++ hov 0 (str (if isfun then "fun " else "val ") ++ name ++ def ++ postdef)
| Dfix (rv,defs,typs) ->
pp_Dfix (rv,defs,typs)
let pp_alias_decl ren = function
| Dind (kn,i) -> pp_mind kn { i with ind_equiv = RenEquiv ren }
| Dtype (r, l, _) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids = pp_parameters l in
hov 2 (str "type " ++ ids ++ name ++ str " =" ++ spc () ++ ids ++
str (ren^".") ++ name)
| Dterm (r, a, t) ->
let name = pp_global Term r in
hov 2 (str "val " ++ name ++ str (" = "^ren^".") ++ name)
| Dfix (rv, _, _) ->
prvecti (fun i r -> if is_inline_custom r then mt () else
let name = pp_global Term r in
hov 2 (str "val " ++ name ++ str (" = "^ren^".") ++ name) ++
fnl ())
rv
let pp_spec = function
| Sval (r,_) when is_inline_custom r -> failwith "empty phrase"
| Stype (r,_,_) when is_inline_custom r -> failwith "empty phrase"
| Sind (kn,i) -> pp_mind kn i
| Sval (r,t) ->
let def = pp_type false [] t in
let name = pp_global Term r in
hov 2 (str "val " ++ name ++ str " :" ++ spc () ++ def)
| Stype (r,vl,ot) ->
let name = pp_global Type r in
let l = rename_tvars keywords vl in
let ids, def =
try
let ids, s = find_type_custom r in
pp_string_parameters ids, str "= " ++ str s
with Not_found ->
let ids = pp_parameters l in
match ot with
| None -> ids, mt ()
| Some Taxiom -> ids, str "(* AXIOM TO BE REALIZED *)"
| Some t -> ids, str "=" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ ids ++ name ++ spc () ++ def)
let pp_alias_spec ren = function
| Sind (kn,i) -> pp_mind kn { i with ind_equiv = RenEquiv ren }
| Stype (r,l,_) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids = pp_parameters l in
hov 2 (str "datatype " ++ ids ++ name ++ str " =" ++ spc () ++ ids ++
str (ren^".") ++ name)
| Sval _ -> assert false
let rec pp_specif = function
| (_,Spec (Sval _ as s)) -> pp_spec s
| (l,Spec s) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("structure "^ren^" = struct ") ++ fnl () ++ pp_spec s) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_spec ren s
with Not_found -> pp_spec s)
| (l,Smodule mt) ->
let def = pp_module_type [] mt in
let def' = pp_module_type [] mt in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "signature " ++ name ++ str " = " ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ hov 1 (str ("sig "^ren^" = ") ++ fnl () ++ def')
with Not_found -> Pp.mt ())
| (l,Smodtype mt) ->
let def = pp_module_type [] mt in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "signature " ++ name ++ str " = " ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("signature "^ren^" = ") ++ name
with Not_found -> Pp.mt ())
and pp_module_type params = function
| MTident kn ->
pp_modname kn
| MTfunsig (mbid, mt, mt') ->
let typ = pp_module_type [] mt in
let name = pp_modname (MPbound mbid) in
let def = pp_module_type (MPbound mbid :: params) mt' in
str "functor (" ++ name ++ str ":" ++ typ ++ str ") ->" ++ fnl () ++ def
| MTsig (mp, sign) ->
push_visible mp params;
let l = map_succeed pp_specif sign in
pop_visible ();
str "sig " ++ fnl () ++
v 1 (str " " ++ prlist_with_sep fnl2 identity l) ++
fnl () ++ str "end"
| MTwith(mt,ML_With_type(idl,vl,typ)) ->
let ids = pp_parameters (rename_tvars keywords vl) in
let mp_mt = msid_of_mt mt in
let l,idl' = list_sep_last idl in
let mp_w =
List.fold_left (fun mp l -> MPdot(mp,label_of_id l)) mp_mt idl'
in
let r = ConstRef (make_con mp_w empty_dirpath (label_of_id l)) in
push_visible mp_mt [];
let pp_w = str " with type " ++ ids ++ pp_global Type r in
pop_visible();
pp_module_type [] mt ++ pp_w ++ str " = " ++ pp_type false vl typ
| MTwith(mt,ML_With_module(idl,mp)) ->
let mp_mt = msid_of_mt mt in
let mp_w =
List.fold_left (fun mp id -> MPdot(mp,label_of_id id)) mp_mt idl
in
push_visible mp_mt [];
let pp_w = str " and " ++ pp_modname mp_w in
pop_visible ();
pp_module_type [] mt ++ pp_w ++ str " = " ++ pp_modname mp
let is_short = function MEident _ | MEapply _ -> true | _ -> false
let rec collect_functors = function
| MEfunctor (mbid, mt, me) ->
let args, me' = collect_functors me in
((mbid, mt) :: args, me')
| me -> [], me
let rec pp_structure_elem = function
| (l,SEdecl d) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("structure "^ren^" = struct ") ++ fnl () ++ pp_decl d) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_decl ren d
with Not_found -> pp_decl d)
| (l,SEmodule m) ->
let typ =
(* virtual printing of the type, in order to have a correct mli later*)
if Common.get_phase () = Pre then
str ": " ++ pp_module_type [] m.ml_mod_type
else mt ()
in
let args, me = collect_functors m.ml_mod_expr in
let def = pp_module_expr [] me in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
let prefix = if args = [] then "structure " else "functor " in
hov 1
(str prefix ++ name ++ fnl () ++ pp_meargs args ++ typ ++ str " = " ++
(if is_short me then mt () else fnl ()) ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("structure "^ren^" = ") ++ name
with Not_found -> mt ())
| (l,SEmodtype m) ->
let def = pp_module_type [] m in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "signature " ++ name ++ str " = " ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("signature "^ren^" = ") ++ name
with Not_found -> mt ())
and pp_meargs args =
let pp_functor (mbid, mt) =
let name = pp_modname (MPbound mbid) in
let typ = pp_module_type [] mt in
str "(" ++ name ++ str ":" ++ typ ++ str ")" ++ fnl () in
List.fold_left ( ++ ) (mt ())
(List.map pp_functor args)
and pp_module_expr params = function
| MEident mp -> pp_modname mp
| MEapply (me, me') ->
pp_module_expr [] me ++ str "(" ++ pp_module_expr [] me' ++ str ")"
| MEfunctor (mbid, mt, me) -> failwith "pp_module_expr"
| MEstruct (mp, sel) ->
push_visible mp params;
let l = map_succeed pp_structure_elem sel in
pop_visible ();
str "struct " ++ fnl () ++
v 1 (str " " ++ prlist_with_sep fnl2 identity l) ++
fnl () ++ str "end"
let do_struct f s =
let pp s = try f s ++ fnl2 () with Failure "empty phrase" -> mt ()
in
let ppl (mp,sel) =
push_visible mp [];
let p = prlist_strict pp sel in
(* for monolithic extraction, we try to simulate the unavailability
of [MPfile] in names by artificially nesting these [MPfile] *)
(if modular () then pop_visible ()); p
in
let p = prlist_strict ppl s in
(if not (modular ()) then repeat (List.length s) pop_visible ());
p
let pp_struct s = do_struct pp_structure_elem s
let push_module_type, get_module_type =
let env = ref [] in
((fun l mt -> env := (MPdot (top_visible_mp (), l), mt) :: !env),
(fun kn -> List.assoc kn !env))
let rec collect_funsigs = function
| MTfunsig (mbid, mt, mt') ->
let (args, modtype) = collect_funsigs mt' in
((mbid, mt) :: args, modtype)
| mt -> ([], mt)
let rec pp_specif' = function
| (_,Spec (Sval _ as s)) -> pp_spec s
| (l,Spec s) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("structure "^ren^" = struct ") ++ fnl () ++ pp_spec s) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_spec ren s
with Not_found -> pp_spec s)
| (l,Smodule mt) ->
let args, mt' = collect_funsigs mt in
let def = pp_module_type' [] mt' in
let def' = pp_module_type' [] mt' in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
let prefix = if args = [] then "structure " else "functor " in
hov 1 (str prefix ++ name ++ fnl () ++ pp_mtargs args ++ str " = " ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ hov 1 (str (prefix ^ ren) ++ pp_mtargs args ++ str " = " ++ def')
with Not_found -> Pp.mt ())
| (l,Smodtype mt) -> push_module_type l mt; Pp.mt ()
and pp_mtargs args =
let pp_funsig (mbid, mt) =
let typ = pp_module_type'' [] mt in
let name = pp_modname (MPbound mbid) in
str "(" ++ name ++ str ":" ++ typ ++ str ")" ++ fnl () in
List.fold_left ( ++ ) (mt ()) (List.map pp_funsig args)
and pp_module_type' params = function
| MTident kn ->
pp_module_type' [] (get_module_type kn)
| MTfunsig (mbid, mt, mt') -> failwith "pp_module_type'"
| MTsig (mp, sign) ->
push_visible mp params;
let l = map_succeed pp_specif' sign in
pop_visible ();
str "struct " ++ fnl () ++
v 1 (str " " ++ prlist_with_sep fnl2 identity l) ++
fnl () ++ str "end"
| MTwith(mt,ML_With_type(idl,vl,typ)) ->
let ids = pp_parameters (rename_tvars keywords vl) in
let mp_mt = msid_of_mt mt in
let l,idl' = list_sep_last idl in
let mp_w =
List.fold_left (fun mp l -> MPdot(mp,label_of_id l)) mp_mt idl'
in
let r = ConstRef (make_con mp_w empty_dirpath (label_of_id l)) in
push_visible mp_mt [];
let pp_w = str " with type " ++ ids ++ pp_global Type r in
pop_visible();
pp_module_type' [] mt ++ pp_w ++ str " = " ++ pp_type false vl typ
| MTwith(mt,ML_With_module(idl,mp)) ->
let mp_mt = msid_of_mt mt in
let mp_w =
List.fold_left (fun mp id -> MPdot(mp,label_of_id id)) mp_mt idl
in
push_visible mp_mt [];
let pp_w = str " and " ++ pp_modname mp_w in
pop_visible ();
pp_module_type' [] mt ++ pp_w ++ str " = " ++ pp_modname mp
and pp_module_type'' params = function
| MTident kn ->
pp_module_type'' [] (get_module_type kn)
| MTfunsig (mbid, mt, mt') -> failwith "pp_module_type''"
| MTsig (mp, sign) ->
push_visible mp params;
let l = map_succeed pp_specif sign in
pop_visible ();
str "sig " ++ fnl () ++
v 1 (str " " ++ prlist_with_sep fnl2 identity l) ++
fnl () ++ str "end"
| MTwith(mt,ML_With_type(idl,vl,typ)) ->
let ids = pp_parameters (rename_tvars keywords vl) in
let mp_mt = msid_of_mt mt in
let l,idl' = list_sep_last idl in
let mp_w =
List.fold_left (fun mp l -> MPdot(mp,label_of_id l)) mp_mt idl'
in
let r = ConstRef (make_con mp_w empty_dirpath (label_of_id l)) in
push_visible mp_mt [];
let pp_w = str " with type " ++ ids ++ pp_global Type r in
pop_visible();
pp_module_type'' [] mt ++ pp_w ++ str " = " ++ pp_type false vl typ
| MTwith(mt,ML_With_module(idl,mp)) ->
let mp_mt = msid_of_mt mt in
let mp_w =
List.fold_left (fun mp id -> MPdot(mp,label_of_id id)) mp_mt idl
in
push_visible mp_mt [];
let pp_w = str " and " ++ pp_modname mp_w in
pop_visible ();
pp_module_type'' [] mt ++ pp_w ++ str " = " ++ pp_modname mp
let pp_signature s = do_struct pp_specif' s
let pp_decl d = try pp_decl d with Failure "empty phrase" -> mt ()
let sml_descr = {
keywords = keywords;
file_suffix = ".sml";
preamble = preamble;
pp_struct = pp_struct;
sig_suffix = Some ".smi";
sig_preamble = sig_preamble;
pp_sig = pp_signature;
pp_decl = pp_decl;
}
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/plugins/extraction/sml.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
s Some utility functions.
Beware of the side-effects of [pp_global] and [pp_modname].
They are used to update table of content for modules. Many [let]
below should not be altered since they force evaluation order.
s Pretty-printing of types. [par] is a boolean indicating whether parentheses
are needed or not.
An [MLdummy] may be applied, but I don't really care.
Third, can this match be printed as [if ... then ... else] ?
Otherwise, standard match
Can a match be printed as a mere record projection ?
Hack Extract Inductive prod
s names of the functions ([ids]) are already pushed in [env],
and passed here just for convenience.
s Pretty-printing of [Dfix]
s Pretty-printing of inductive types declaration.
s Pretty-printing of a declaration.
virtual printing of the type, in order to have a correct mli later
for monolithic extraction, we try to simulate the unavailability
of [MPfile] in names by artificially nesting these [MPfile] | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
s Production of Sml syntax .
open Pp
open Util
open Names
open Nameops
open Libnames
open Table
open Miniml
open Mlutil
open Modutil
open Common
open Declarations
let pp_tvar id =
let s = string_of_id id in
if String.length s < 2 || s.[1]<>'\''
then str ("'"^s)
else str ("' "^s)
let pp_abst = function
| [] -> mt ()
| l ->
str "fn " ++ prlist_with_sep (fun () -> str " => fn ") pr_id l ++
str " =>" ++ spc ()
let pp_parameters l =
(pp_boxed_tuple pp_tvar l ++ space_if (l<>[]))
let pp_string_parameters l =
(pp_boxed_tuple str l ++ space_if (l<>[]))
let pp_letin pat def body =
let fstline = str "let val " ++ pat ++ str " =" ++ spc () ++ def in
hv 0 (hv 0 (hov 2 fstline ++ spc () ++ str "in") ++ spc () ++ hov 0 body ++ spc () ++ str "end")
s Sml renaming issues .
let keywords =
List.fold_right (fun s -> Idset.add (id_of_string s))
[ "abstype"; "and"; "andalso"; "as"; "case"; "datatype"; "do";
"else"; "end"; "exception"; "div"; "fn"; "fun"; "bandle"; "if";
"in"; "infix"; "infixr"; "let"; "local"; "nonfix"; "of";
"op"; "open"; "orelse"; "raise"; "rec"; "sig"; "then";
"type"; "val"; "with"; "withtype"; "while"; "o"; "_"; "coq___" ]
Idset.empty
let pp_open mp = str ("open "^ string_of_modfile mp ^"\n")
let preamble _ used_modules usf =
prlist pp_open used_modules ++
(if used_modules = [] then mt () else fnl ()) ++
(if usf.tdummy || usf.tunknown then str "type coq___ = unit\n" else mt()) ++
(if usf.mldummy then
str "val coq___ = ()\n"
else mt ()) ++
(if usf.tdummy || usf.tunknown || usf.mldummy then fnl () else mt ())
let sig_preamble _ used_modules usf =
str "_require \"basis.smi\"\n\n" ++
prlist pp_open used_modules ++
(if used_modules = [] then mt () else fnl ()) ++
(if usf.tdummy || usf.tunknown then str "type coq___ = unit\n" else mt())
s The pretty - printer for Sml syntax
let str_global k r =
if is_inline_custom r then find_custom r else Common.pp_global k r
let pp_global k r = str (str_global k r)
let pp_modname mp = str (Common.pp_module mp)
let is_infix r =
is_inline_custom r &&
(let s = find_custom r in
let l = String.length s in
l >= 2 && s.[0] = '(' && s.[l-1] = ')')
let get_infix r =
let s = find_custom r in
String.sub s 1 (String.length s - 2)
let get_ind = function
| IndRef _ as r -> r
| ConstructRef (ind,_) -> IndRef ind
| _ -> assert false
let pp_one_field r i = function
| Some r -> pp_global Term r
| None -> pp_global Type (get_ind r) ++ str "coq___" ++ int i
let pp_field r fields i = pp_one_field r i (List.nth fields i)
let pp_fields r fields = list_map_i (pp_one_field r) 0 fields
let rec pp_type par vl t =
let rec pp_rec par = function
| Tmeta _ | Tvar' _ | Taxiom -> assert false
| Tvar i -> (try pp_tvar (List.nth vl (pred i))
with e when Errors.noncritical e ->
(str "'a" ++ int i))
| Tglob (r,[a1;a2]) when is_infix r ->
pp_par par (pp_rec true a1 ++ str (get_infix r) ++ pp_rec true a2)
| Tglob (r,[]) -> pp_global Type r
| Tglob (IndRef(kn,0),l)
when not (keep_singleton ()) && kn = mk_ind "Coq.Init.Specif" "sig" ->
pp_tuple_light pp_rec l
| Tglob (r,l) ->
pp_tuple_light pp_rec l ++ spc () ++ pp_global Type r
| Tarr (t1,t2) ->
pp_par par
(pp_rec true t1 ++ spc () ++ str "->" ++ spc () ++ pp_rec false t2)
| Tdummy _ -> str "coq___"
| Tunknown -> str "coq___"
in
hov 0 (pp_rec par t)
s Pretty - printing of expressions . [ par ] indicates whether
parentheses are needed or not . [ env ] is the list of names for the
de Bruijn variables . [ args ] is the list of collected arguments
( already pretty - printed ) .
parentheses are needed or not. [env] is the list of names for the
de Bruijn variables. [args] is the list of collected arguments
(already pretty-printed). *)
let is_bool_patt p s =
try
let r = match p with
| Pusual r -> r
| Pcons (r,[]) -> r
| _ -> raise Not_found
in
find_custom r = s
with Not_found -> false
let is_ifthenelse = function
| [|([],p1,_);([],p2,_)|] -> is_bool_patt p1 "true" && is_bool_patt p2 "false"
| _ -> false
let expr_needs_par = function
| MLlam _ -> true
| MLcase (_,_,[|_|]) -> false
| MLcase (_,_,pv) -> not (is_ifthenelse pv)
| _ -> false
let rec pp_expr par env args =
let apply st = pp_apply st par args
and apply2 st = pp_apply2 st par args in
function
| MLrel n ->
let id = get_db_name n env in apply (pr_id id)
| MLapp (f,args') ->
let stl = List.map (pp_expr true env []) args' in
pp_expr par env (stl @ args) f
| MLlam _ as a ->
let fl,a' = collect_lams a in
let fl = List.map id_of_mlid fl in
let fl,env' = push_vars fl env in
let st = pp_abst (List.rev fl) ++ pp_expr false env' [] a' in
apply2 st
| MLletin (id,a1,a2) ->
let i,env' = push_vars [id_of_mlid id] env in
let pp_id = pr_id (List.hd i)
and pp_a1 = pp_expr false env [] a1
and pp_a2 = pp_expr (not par && expr_needs_par a2) env' [] a2 in
hv 0 (apply2 (pp_letin pp_id pp_a1 pp_a2))
| MLglob r ->
(try
let args = list_skipn (projection_arity r) args in
let record = List.hd args in
pp_apply (pp_apply (str "#" ++ pp_global Term r) par [record]) par (List.tl args)
with e when Errors.noncritical e -> apply (pp_global Term r))
| MLfix (i,ids,defs) ->
let ids',env' = push_vars (List.rev (Array.to_list ids)) env in
pp_fix par env' i (Array.of_list (List.rev ids'),defs) args
| MLexn s ->
An [ MLexn ] may be applied , but I do n't really care .
pp_par par (str ("raise (Fail \""^s^"\")") ++ spc () ++ str ("(* "^s^" *)"))
| MLdummy ->
| MLmagic a ->
pp_apply (str "Unsafe.cast") par (pp_expr true env [] a :: args)
| MLaxiom ->
pp_par par (str "failwith \"AXIOM TO BE REALIZED\"")
| MLcons (_,r,a) as c ->
assert (args=[]);
begin match a with
| _ when is_native_char c -> pp_native_char c
| [a1;a2] when is_infix r ->
let pp = pp_expr true env [] in
pp_par par (pp a1 ++ str (get_infix r) ++ pp a2)
| _ when is_coinductive r ->
let ne = (a<>[]) in
let tuple = space_if ne ++ pp_tuple (pp_expr true env []) a in
pp_par par (str "lazy " ++ pp_par ne (pp_global Cons r ++ tuple))
| [] -> pp_global Cons r
| _ ->
let fds = get_record_fields r in
if fds <> [] then
pp_record_pat (pp_fields r fds, List.map (pp_expr true env []) a)
else
let tuple = pp_tuple (pp_expr true env []) a in
hack Extract Inductive prod
then tuple
else pp_par par (pp_global Cons r ++ spc () ++ tuple)
end
| MLtuple l ->
assert (args = []);
pp_boxed_tuple (pp_expr true env []) l
| MLcase (_, t, pv) when is_custom_match pv ->
if not (is_regular_match pv) then
error "Cannot mix yet user-given match and general patterns.";
let mkfun (ids,_,e) =
if ids <> [] then named_lams (List.rev ids) e
else dummy_lams (ast_lift 1 e) 1
in
let pp_branch tr = pp_expr true env [] (mkfun tr) ++ fnl () in
let inner =
str (find_custom_match pv) ++ fnl () ++
prvect pp_branch pv ++
pp_expr true env [] t
in
apply2 (hov 2 inner)
| MLcase (typ, t, pv) ->
let head =
if not (is_coinductive_type typ) then pp_expr false env [] t
else (str "Lazy.force" ++ spc () ++ pp_expr true env [] t)
in
First , can this match be printed as a mere record projection ?
(try pp_record_proj par env typ t pv args
with Impossible ->
Second , can this match be printed as a let - in ?
if Array.length pv = 1 then
let s1,s2 = pp_one_pat env pv.(0) in
hv 0 (apply2 (pp_letin s1 head s2))
else
(try apply2 (pp_ifthenelse env head pv)
with Not_found ->
apply2
(v 0 (str "case " ++ head ++ str " of" ++ fnl () ++
pp_pat env pv))))
and pp_record_proj par env typ t pv args =
let fields = record_fields_of_type typ in
if fields = [] then raise Impossible;
if Array.length pv <> 1 then raise Impossible;
if has_deep_pattern pv then raise Impossible;
let (ids,pat,body) = pv.(0) in
let n = List.length ids in
let no_patvar a = not (List.exists (ast_occurs_itvl 1 n) a) in
let rel_i,a = match body with
| MLrel i when i <= n -> i,[]
| MLapp(MLrel i, a) when i<=n && no_patvar a -> i,a
| _ -> raise Impossible
in
let rec lookup_rel i idx = function
| Prel j :: l -> if i = j then idx else lookup_rel i (idx+1) l
| Pwild :: l -> lookup_rel i (idx+1) l
| _ -> raise Impossible
in
let r,idx = match pat with
| Pusual r -> r, n-rel_i
| Pcons (r,l) -> r, lookup_rel rel_i 0 l
| _ -> raise Impossible
in
if is_infix r then raise Impossible;
let env' = snd (push_vars (List.rev_map id_of_mlid ids) env) in
let pp_args = (List.map (pp_expr true env' []) a) @ args in
let pp_head = str "#" ++ pp_field r fields idx ++ str " " ++ pp_expr true env [] t
in
pp_apply pp_head par pp_args
and pp_record_pat (fields, args) =
str "{ " ++
prlist_with_sep (fun () -> str "," ++ spc ())
(fun (f,a) -> f ++ str " =" ++ spc () ++ a)
(List.combine fields args) ++
str " }"
and pp_cons_pat r ppl =
if is_infix r && List.length ppl = 2 then
List.hd ppl ++ str (get_infix r) ++ List.hd (List.tl ppl)
else
let fields = get_record_fields r in
if fields <> [] then pp_record_pat (pp_fields r fields, ppl)
else if str_global Cons r = "" then
else
pp_global Cons r ++ space_if (ppl<>[]) ++ pp_boxed_tuple identity ppl
and pp_gen_pat ids env = function
| Pcons (r, l) -> pp_cons_pat r (List.map (pp_gen_pat ids env) l)
| Pusual r -> pp_cons_pat r (List.map pr_id ids)
| Ptuple l -> pp_boxed_tuple (pp_gen_pat ids env) l
| Pwild -> str "_"
| Prel n -> pr_id (get_db_name n env)
and pp_ifthenelse env expr pv = match pv with
| [|([],tru,the);([],fal,els)|] when
(is_bool_patt tru "true") && (is_bool_patt fal "false")
->
hv 0 (hov 2 (str "if " ++ expr) ++ spc () ++
hov 2 (str "then " ++
hov 2 (pp_expr (expr_needs_par the) env [] the)) ++ spc () ++
hov 2 (str "else " ++
hov 2 (pp_expr (expr_needs_par els) env [] els)))
| _ -> raise Not_found
and pp_one_pat env (ids,p,t) =
let ids',env' = push_vars (List.rev_map id_of_mlid ids) env in
pp_gen_pat (List.rev ids') env' p,
pp_expr (expr_needs_par t) env' [] t
and pp_pat env pv =
prvecti
(fun i x ->
let s1,s2 = pp_one_pat env x in
hv 2 (hov 4 ((if i = 0 then str " " else str "| ") ++ s1 ++ str " =>") ++ spc () ++ hov 2 s2) ++
if i = Array.length pv - 1 then mt () else fnl ())
pv
and pp_function env t =
let bl,t' = collect_lams t in
let bl,env' = push_vars (List.map id_of_mlid bl) env in
(List.length bl <> 0, pr_binding (List.rev bl) ++
str " =" ++ fnl () ++ str " " ++
hov 2 (pp_expr false env' [] t'))
and pp_fix par env i (ids,bl) args =
pp_par par
(v 0 (str "let fun " ++
prvect_with_sep
(fun () -> fnl () ++ str "and ")
(fun (fi,ti) -> pr_id fi ++ snd (pp_function env ti))
(array_map2 (fun id b -> (id,b)) ids bl) ++
fnl () ++
hov 2 (str "in " ++ pp_apply (pr_id ids.(i)) false args ++ str " end ")))
let pp_val e typ =
hov 4 (str "(** val " ++ e ++ str " :" ++ spc () ++ pp_type false [] typ ++
str " **)") ++ fnl2 ()
let pp_Dfix (rv,c,t) =
let names = Array.map
(fun r -> if is_inline_custom r then mt () else pp_global Term r) rv
in
let rec pp init i =
if i >= Array.length rv then
(if init then failwith "empty phrase" else mt ())
else
let void = is_inline_custom rv.(i) ||
(not (is_custom rv.(i)) && c.(i) = MLexn "UNUSED")
in
if void then pp init (i+1)
else
let (isfun, def) =
if is_custom rv.(i) then (false, str " = " ++ str (find_custom rv.(i)))
else pp_function (empty_env ()) c.(i)
in
(if init then mt () else fnl2 ()) ++
pp_val names.(i) t.(i) ++
str
(if init then
if isfun then "fun "
else "val "
else "and ") ++ names.(i) ++ def ++
pp false (i+1)
in pp true 0
let pp_equiv param_list name = function
| NoEquiv, _ -> mt (), false
| Equiv kn, i ->
str " = datatype " ++ pp_parameters param_list ++ pp_global Type (IndRef (mind_of_kn kn,i)), true
| RenEquiv ren, _ ->
str " = datatype " ++ pp_parameters param_list ++ str (ren^".") ++ name, true
let pp_comment s = str "(* " ++ s ++ str " *)"
let pp_one_ind prefix ip_equiv pl name cnames ctyps =
let pl = rename_tvars keywords pl in
let pp_constructor i typs =
(if i=0 then mt () else fnl ()) ++
hov 3 ((if i = 0 then str " " else str "| ") ++ cnames.(i) ++
(if typs = [] then mt () else str " of ") ++
prlist_with_sep
(fun () -> spc () ++ str "* ") (pp_type true pl) typs)
in
let pp_eq, is_rep = pp_equiv pl name ip_equiv in
pp_parameters pl ++ str prefix ++ name ++
pp_eq ++ if is_rep then mt () else str " =" ++
if Array.length ctyps = 0 then str " unit (* empty inductive *)"
else fnl () ++ v 0 (prvecti pp_constructor ctyps)
let pp_logical_ind packet =
pp_comment (pr_id packet.ip_typename ++ str " : logical inductive") ++
fnl () ++
pp_comment (str "with constructors : " ++
prvect_with_sep spc pr_id packet.ip_consnames) ++
fnl ()
let pp_singleton kn packet =
let name = pp_global Type (IndRef (kn,0)) in
let l = rename_tvars keywords packet.ip_vars in
hov 2 (str "type " ++ pp_parameters l ++ name ++ str " =" ++ spc () ++
pp_type false l (List.hd packet.ip_types.(0)) ++ fnl () ++
pp_comment (str "singleton inductive, whose constructor was " ++
pr_id packet.ip_consnames.(0)))
let pp_record kn fields ip_equiv packet =
let ind = IndRef (kn,0) in
let name = pp_global Type ind in
let fieldnames = pp_fields ind fields in
let l = List.combine fieldnames packet.ip_types.(0) in
let pl = rename_tvars keywords packet.ip_vars in
let pp_eq, is_rep = pp_equiv pl name ip_equiv in
str "type " ++ pp_parameters pl ++ name ++
pp_eq ++ if is_rep then mt () else str " = { "++
hov 0 (prlist_with_sep (fun () -> str "," ++ spc ())
(fun (p,t) -> p ++ str " : " ++ pp_type true pl t) l)
++ str " }"
let pp_coind pl name =
let pl = rename_tvars keywords pl in
pp_parameters pl ++ name ++ str " = " ++
pp_parameters pl ++ str "coq___" ++ name ++ str " Lazy.t" ++
fnl() ++ str "and "
let pp_ind co kn ind =
let prefix = if co then "coq___" else "" in
let some = ref false in
let init= ref (str "datatype ") in
let names =
Array.mapi (fun i p -> if p.ip_logical then mt () else
pp_global Type (IndRef (kn,i)))
ind.ind_packets
in
let cnames =
Array.mapi
(fun i p -> if p.ip_logical then [||] else
Array.mapi (fun j _ -> pp_global Cons (ConstructRef ((kn,i),j+1)))
p.ip_types)
ind.ind_packets
in
let rec pp i =
if i >= Array.length ind.ind_packets then mt ()
else
let ip = (kn,i) in
let ip_equiv = ind.ind_equiv, i in
let p = ind.ind_packets.(i) in
if is_custom (IndRef ip) then pp (i+1)
else begin
some := true;
if p.ip_logical then pp_logical_ind p ++ pp (i+1)
else
let s = !init in
begin
init := (fnl () ++ str "and ");
s ++
(if co then pp_coind p.ip_vars names.(i) else mt ()) ++
pp_one_ind
prefix ip_equiv p.ip_vars names.(i) cnames.(i) p.ip_types ++
pp (i+1)
end
end
in
let st = pp 0 in if !some then st else failwith "empty phrase"
let pp_mind kn i =
match i.ind_kind with
| Singleton -> pp_singleton kn i.ind_packets.(0)
| Coinductive -> pp_ind true kn i
| Record fields -> pp_record kn fields (i.ind_equiv,0) i.ind_packets.(0)
| Standard -> pp_ind false kn i
let pp_decl = function
| Dtype (r,_,_) when is_inline_custom r -> failwith "empty phrase"
| Dterm (r,_,_) when is_inline_custom r -> failwith "empty phrase"
| Dind (kn,i) -> pp_mind kn i
| Dtype (r, l, t) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids, def =
try
let ids,s = find_type_custom r in
pp_string_parameters ids, str "=" ++ spc () ++ str s
with Not_found ->
pp_parameters l,
if t = Taxiom then str "(* AXIOM TO BE REALIZED *)"
else str "=" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ ids ++ name ++ spc () ++ def)
| Dterm (r, a, t) ->
let (isfun, def) =
if is_custom r then (false, str (" = " ^ find_custom r))
else if is_projection r then
(false, (prvect str (Array.make (projection_arity r) " _")) ++
str " : " ++ pp_type false [] t ++ str " = fn x => #")
else pp_function (empty_env ()) a
in
let name = pp_global Term r in
let postdef = if is_projection r then name ++ str " x" else mt () in
pp_val name t ++ hov 0 (str (if isfun then "fun " else "val ") ++ name ++ def ++ postdef)
| Dfix (rv,defs,typs) ->
pp_Dfix (rv,defs,typs)
let pp_alias_decl ren = function
| Dind (kn,i) -> pp_mind kn { i with ind_equiv = RenEquiv ren }
| Dtype (r, l, _) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids = pp_parameters l in
hov 2 (str "type " ++ ids ++ name ++ str " =" ++ spc () ++ ids ++
str (ren^".") ++ name)
| Dterm (r, a, t) ->
let name = pp_global Term r in
hov 2 (str "val " ++ name ++ str (" = "^ren^".") ++ name)
| Dfix (rv, _, _) ->
prvecti (fun i r -> if is_inline_custom r then mt () else
let name = pp_global Term r in
hov 2 (str "val " ++ name ++ str (" = "^ren^".") ++ name) ++
fnl ())
rv
let pp_spec = function
| Sval (r,_) when is_inline_custom r -> failwith "empty phrase"
| Stype (r,_,_) when is_inline_custom r -> failwith "empty phrase"
| Sind (kn,i) -> pp_mind kn i
| Sval (r,t) ->
let def = pp_type false [] t in
let name = pp_global Term r in
hov 2 (str "val " ++ name ++ str " :" ++ spc () ++ def)
| Stype (r,vl,ot) ->
let name = pp_global Type r in
let l = rename_tvars keywords vl in
let ids, def =
try
let ids, s = find_type_custom r in
pp_string_parameters ids, str "= " ++ str s
with Not_found ->
let ids = pp_parameters l in
match ot with
| None -> ids, mt ()
| Some Taxiom -> ids, str "(* AXIOM TO BE REALIZED *)"
| Some t -> ids, str "=" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ ids ++ name ++ spc () ++ def)
let pp_alias_spec ren = function
| Sind (kn,i) -> pp_mind kn { i with ind_equiv = RenEquiv ren }
| Stype (r,l,_) ->
let name = pp_global Type r in
let l = rename_tvars keywords l in
let ids = pp_parameters l in
hov 2 (str "datatype " ++ ids ++ name ++ str " =" ++ spc () ++ ids ++
str (ren^".") ++ name)
| Sval _ -> assert false
let rec pp_specif = function
| (_,Spec (Sval _ as s)) -> pp_spec s
| (l,Spec s) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("structure "^ren^" = struct ") ++ fnl () ++ pp_spec s) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_spec ren s
with Not_found -> pp_spec s)
| (l,Smodule mt) ->
let def = pp_module_type [] mt in
let def' = pp_module_type [] mt in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "signature " ++ name ++ str " = " ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ hov 1 (str ("sig "^ren^" = ") ++ fnl () ++ def')
with Not_found -> Pp.mt ())
| (l,Smodtype mt) ->
let def = pp_module_type [] mt in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "signature " ++ name ++ str " = " ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("signature "^ren^" = ") ++ name
with Not_found -> Pp.mt ())
and pp_module_type params = function
| MTident kn ->
pp_modname kn
| MTfunsig (mbid, mt, mt') ->
let typ = pp_module_type [] mt in
let name = pp_modname (MPbound mbid) in
let def = pp_module_type (MPbound mbid :: params) mt' in
str "functor (" ++ name ++ str ":" ++ typ ++ str ") ->" ++ fnl () ++ def
| MTsig (mp, sign) ->
push_visible mp params;
let l = map_succeed pp_specif sign in
pop_visible ();
str "sig " ++ fnl () ++
v 1 (str " " ++ prlist_with_sep fnl2 identity l) ++
fnl () ++ str "end"
| MTwith(mt,ML_With_type(idl,vl,typ)) ->
let ids = pp_parameters (rename_tvars keywords vl) in
let mp_mt = msid_of_mt mt in
let l,idl' = list_sep_last idl in
let mp_w =
List.fold_left (fun mp l -> MPdot(mp,label_of_id l)) mp_mt idl'
in
let r = ConstRef (make_con mp_w empty_dirpath (label_of_id l)) in
push_visible mp_mt [];
let pp_w = str " with type " ++ ids ++ pp_global Type r in
pop_visible();
pp_module_type [] mt ++ pp_w ++ str " = " ++ pp_type false vl typ
| MTwith(mt,ML_With_module(idl,mp)) ->
let mp_mt = msid_of_mt mt in
let mp_w =
List.fold_left (fun mp id -> MPdot(mp,label_of_id id)) mp_mt idl
in
push_visible mp_mt [];
let pp_w = str " and " ++ pp_modname mp_w in
pop_visible ();
pp_module_type [] mt ++ pp_w ++ str " = " ++ pp_modname mp
let is_short = function MEident _ | MEapply _ -> true | _ -> false
let rec collect_functors = function
| MEfunctor (mbid, mt, me) ->
let args, me' = collect_functors me in
((mbid, mt) :: args, me')
| me -> [], me
let rec pp_structure_elem = function
| (l,SEdecl d) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("structure "^ren^" = struct ") ++ fnl () ++ pp_decl d) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_decl ren d
with Not_found -> pp_decl d)
| (l,SEmodule m) ->
let typ =
if Common.get_phase () = Pre then
str ": " ++ pp_module_type [] m.ml_mod_type
else mt ()
in
let args, me = collect_functors m.ml_mod_expr in
let def = pp_module_expr [] me in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
let prefix = if args = [] then "structure " else "functor " in
hov 1
(str prefix ++ name ++ fnl () ++ pp_meargs args ++ typ ++ str " = " ++
(if is_short me then mt () else fnl ()) ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("structure "^ren^" = ") ++ name
with Not_found -> mt ())
| (l,SEmodtype m) ->
let def = pp_module_type [] m in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
hov 1 (str "signature " ++ name ++ str " = " ++ fnl () ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ str ("signature "^ren^" = ") ++ name
with Not_found -> mt ())
and pp_meargs args =
let pp_functor (mbid, mt) =
let name = pp_modname (MPbound mbid) in
let typ = pp_module_type [] mt in
str "(" ++ name ++ str ":" ++ typ ++ str ")" ++ fnl () in
List.fold_left ( ++ ) (mt ())
(List.map pp_functor args)
and pp_module_expr params = function
| MEident mp -> pp_modname mp
| MEapply (me, me') ->
pp_module_expr [] me ++ str "(" ++ pp_module_expr [] me' ++ str ")"
| MEfunctor (mbid, mt, me) -> failwith "pp_module_expr"
| MEstruct (mp, sel) ->
push_visible mp params;
let l = map_succeed pp_structure_elem sel in
pop_visible ();
str "struct " ++ fnl () ++
v 1 (str " " ++ prlist_with_sep fnl2 identity l) ++
fnl () ++ str "end"
let do_struct f s =
let pp s = try f s ++ fnl2 () with Failure "empty phrase" -> mt ()
in
let ppl (mp,sel) =
push_visible mp [];
let p = prlist_strict pp sel in
(if modular () then pop_visible ()); p
in
let p = prlist_strict ppl s in
(if not (modular ()) then repeat (List.length s) pop_visible ());
p
let pp_struct s = do_struct pp_structure_elem s
let push_module_type, get_module_type =
let env = ref [] in
((fun l mt -> env := (MPdot (top_visible_mp (), l), mt) :: !env),
(fun kn -> List.assoc kn !env))
let rec collect_funsigs = function
| MTfunsig (mbid, mt, mt') ->
let (args, modtype) = collect_funsigs mt' in
((mbid, mt) :: args, modtype)
| mt -> ([], mt)
let rec pp_specif' = function
| (_,Spec (Sval _ as s)) -> pp_spec s
| (l,Spec s) ->
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
hov 1 (str ("structure "^ren^" = struct ") ++ fnl () ++ pp_spec s) ++
fnl () ++ str "end" ++ fnl () ++
pp_alias_spec ren s
with Not_found -> pp_spec s)
| (l,Smodule mt) ->
let args, mt' = collect_funsigs mt in
let def = pp_module_type' [] mt' in
let def' = pp_module_type' [] mt' in
let name = pp_modname (MPdot (top_visible_mp (), l)) in
let prefix = if args = [] then "structure " else "functor " in
hov 1 (str prefix ++ name ++ fnl () ++ pp_mtargs args ++ str " = " ++ def) ++
(try
let ren = Common.check_duplicate (top_visible_mp ()) l in
fnl () ++ hov 1 (str (prefix ^ ren) ++ pp_mtargs args ++ str " = " ++ def')
with Not_found -> Pp.mt ())
| (l,Smodtype mt) -> push_module_type l mt; Pp.mt ()
and pp_mtargs args =
let pp_funsig (mbid, mt) =
let typ = pp_module_type'' [] mt in
let name = pp_modname (MPbound mbid) in
str "(" ++ name ++ str ":" ++ typ ++ str ")" ++ fnl () in
List.fold_left ( ++ ) (mt ()) (List.map pp_funsig args)
and pp_module_type' params = function
| MTident kn ->
pp_module_type' [] (get_module_type kn)
| MTfunsig (mbid, mt, mt') -> failwith "pp_module_type'"
| MTsig (mp, sign) ->
push_visible mp params;
let l = map_succeed pp_specif' sign in
pop_visible ();
str "struct " ++ fnl () ++
v 1 (str " " ++ prlist_with_sep fnl2 identity l) ++
fnl () ++ str "end"
| MTwith(mt,ML_With_type(idl,vl,typ)) ->
let ids = pp_parameters (rename_tvars keywords vl) in
let mp_mt = msid_of_mt mt in
let l,idl' = list_sep_last idl in
let mp_w =
List.fold_left (fun mp l -> MPdot(mp,label_of_id l)) mp_mt idl'
in
let r = ConstRef (make_con mp_w empty_dirpath (label_of_id l)) in
push_visible mp_mt [];
let pp_w = str " with type " ++ ids ++ pp_global Type r in
pop_visible();
pp_module_type' [] mt ++ pp_w ++ str " = " ++ pp_type false vl typ
| MTwith(mt,ML_With_module(idl,mp)) ->
let mp_mt = msid_of_mt mt in
let mp_w =
List.fold_left (fun mp id -> MPdot(mp,label_of_id id)) mp_mt idl
in
push_visible mp_mt [];
let pp_w = str " and " ++ pp_modname mp_w in
pop_visible ();
pp_module_type' [] mt ++ pp_w ++ str " = " ++ pp_modname mp
and pp_module_type'' params = function
| MTident kn ->
pp_module_type'' [] (get_module_type kn)
| MTfunsig (mbid, mt, mt') -> failwith "pp_module_type''"
| MTsig (mp, sign) ->
push_visible mp params;
let l = map_succeed pp_specif sign in
pop_visible ();
str "sig " ++ fnl () ++
v 1 (str " " ++ prlist_with_sep fnl2 identity l) ++
fnl () ++ str "end"
| MTwith(mt,ML_With_type(idl,vl,typ)) ->
let ids = pp_parameters (rename_tvars keywords vl) in
let mp_mt = msid_of_mt mt in
let l,idl' = list_sep_last idl in
let mp_w =
List.fold_left (fun mp l -> MPdot(mp,label_of_id l)) mp_mt idl'
in
let r = ConstRef (make_con mp_w empty_dirpath (label_of_id l)) in
push_visible mp_mt [];
let pp_w = str " with type " ++ ids ++ pp_global Type r in
pop_visible();
pp_module_type'' [] mt ++ pp_w ++ str " = " ++ pp_type false vl typ
| MTwith(mt,ML_With_module(idl,mp)) ->
let mp_mt = msid_of_mt mt in
let mp_w =
List.fold_left (fun mp id -> MPdot(mp,label_of_id id)) mp_mt idl
in
push_visible mp_mt [];
let pp_w = str " and " ++ pp_modname mp_w in
pop_visible ();
pp_module_type'' [] mt ++ pp_w ++ str " = " ++ pp_modname mp
let pp_signature s = do_struct pp_specif' s
let pp_decl d = try pp_decl d with Failure "empty phrase" -> mt ()
let sml_descr = {
keywords = keywords;
file_suffix = ".sml";
preamble = preamble;
pp_struct = pp_struct;
sig_suffix = Some ".smi";
sig_preamble = sig_preamble;
pp_sig = pp_signature;
pp_decl = pp_decl;
}
|
be18968f134d8dc7af51925f66be0b60a880e3e41194d5853ff8bd97b2ce3161 | kthielen/stlcc | Check.hs |
module STLC.Check where
import STLC.Term
import STLC.Type
import Util.Annotated
checkDefinitions :: (Annotation a, TypeEnv tenv a) => tenv -> Definitions a -> b -> IO b
checkDefinitions tenv defs result = chooseErr (concatMap (checkDefinition tenv) defs) where
chooseErr [] = return result
chooseErr msgs = do
printMsgs msgs
error "Compile failure, cannot continue."
printMsgs [] = return ()
printMsgs (m:msgs) = do
putStrLn m;
printMsgs msgs
checkDefinition :: (Annotation a, TypeEnv tenv a) => tenv -> Definition a -> [String]
checkDefinition tenv (_, [], ty, e) = check tenv e ++ checkEqTy tenv e ty
checkDefinition tenv (_, ans, fty, e) = check tenv' e ++ checkEqTy tenv' e (fnRTy fty) where
tenv' = pushtys tenv (zip ans (fnArgTys fty))
check :: (Annotation a, TypeEnv tenv a) => tenv -> Term a -> [String]
check tenv (Prim _ _) = []
check tenv (Var _ _) = []
check tenv (Roll p t e) = check tenv e ++ checkRoll p tenv t e
check tenv (Unroll p t e) = check tenv e ++ checkUnroll p tenv t e
check tenv (Let _ x e b) = check tenv e ++ check (pushty tenv x (typeof tenv e)) b
check tenv (Variant _ _ e _) = check tenv e
check tenv (VCase _ e cs) = check tenv e ++ concat [check (pushty tenv v (vtype vty ctor)) b | (ctor, v, b) <- cs] where vty = typeof tenv e
check tenv (Record _ cs) = concat [check tenv e | (_, e) <- cs]
check tenv (RProj _ e lbl) = check tenv e
check tenv (Pack _ e _) = check tenv e
check tenv (Array _ es) = concatMap (check tenv) es ++ checkEqTys tenv es
check tenv (ArrAlloc _ _ e) = check tenv e ++ checkEqTy tenv e (TPrim nullAnnotation "int")
check tenv (ArrElem _ ae ie) = check tenv ae ++ check tenv ie ++ checkArrayTy tenv ae ++ checkEqTy tenv ie (TPrim nullAnnotation "int")
check tenv (App p fe aes) = check tenv fe ++ concatMap (check tenv) aes ++ checkFnApp p tenv fe aes
check tenv (Unpack p pe v vtn b) = check tenv pe ++ check (pushty tenv v (exname (typeof tenv pe) vtn)) b ++ checkUnpack p tenv pe v vtn b
checkRoll :: (Annotation a, TypeEnv tenv a) => a -> tenv -> Ty a -> Term a -> [String]
checkRoll p tenv t e = []
checkUnroll :: (Annotation a, TypeEnv tenv a) => a -> tenv -> Ty a -> Term a -> [String]
checkUnroll p tenv t e = []
checkEqTys :: (Annotation a, TypeEnv tenv a) => tenv -> [Term a] -> [String]
checkEqTys _ [] = []
checkEqTys _ [_] = []
checkEqTys tenv (e:e':es) = checkEqTy tenv e' (typeof tenv e) ++ checkEqTys tenv (e':es)
checkEqTy :: (Annotation a, TypeEnv tenv a) => tenv -> Term a -> Ty a -> [String]
checkEqTy tenv e ty = checkExprTyP tenv e ((==) ty) ("Expected type " ++ show ty)
checkArrayTy :: (Annotation a, TypeEnv tenv a) => tenv -> Term a -> [String]
checkArrayTy tenv e = checkExprTyP tenv e isArrayTy "Expected array type"
checkFnApp :: (Annotation a, TypeEnv tenv a) => a -> tenv -> Term a -> [Term a] -> [String]
checkFnApp p tenv fe aes = chooseErrPath (isFNTy fty) (length aes == length argTys) where
fty = typeof tenv fe
argTys = fnArgTys fty
chooseErrPath False _ = ["Expected function type\n actual type: " ++ show fty ++ "\n at: " ++ describe (annotation fe)]
chooseErrPath True False = ["Argument length mismatch\n expected " ++ show (length argTys) ++ " arguments but received " ++ show (length aes)]
chooseErrPath True True = concat [checkEqTy tenv e ety | (e, ety) <- zip aes argTys]
checkUnpack :: (Annotation a, TypeEnv tenv a) => a -> tenv -> Term a -> String -> String -> Term a -> [String]
checkUnpack p tenv pe v vtn b = chooseErrPath (isExType pety) (not (elem (TVar p vtn) (map (TVar p) (tyFV bty)))) where
pety = typeof tenv pe
bty = typeof (pushty tenv v (exname (typeof tenv pe) vtn)) b
chooseErrPath False _ = ["Expected existential type to unpack\n actual type: " ++ show pety ++ "\n at: " ++ describe (annotation pe)]
chooseErrPath True False = ["Existential type variable cannot escape the scope of an unpack\n with type: " ++ show bty ++ "\n at: " ++ describe (annotation b)]
chooseErrPath True True = []
checkExprTyP :: (Annotation a, TypeEnv tenv a) => tenv -> Term a -> (Ty a -> Bool) -> String -> [String]
checkExprTyP tenv e p msg | not (p ety) = [errMsg] where
ety = typeof tenv e
errMsg = msg ++ "\n actual type: " ++ show ety ++ "\n at: " ++ describe (annotation e)
checkExprTyP _ _ _ _ = []
| null | https://raw.githubusercontent.com/kthielen/stlcc/369492daad6498a93c00f5924a99ceb65b5f1062/STLC/Check.hs | haskell |
module STLC.Check where
import STLC.Term
import STLC.Type
import Util.Annotated
checkDefinitions :: (Annotation a, TypeEnv tenv a) => tenv -> Definitions a -> b -> IO b
checkDefinitions tenv defs result = chooseErr (concatMap (checkDefinition tenv) defs) where
chooseErr [] = return result
chooseErr msgs = do
printMsgs msgs
error "Compile failure, cannot continue."
printMsgs [] = return ()
printMsgs (m:msgs) = do
putStrLn m;
printMsgs msgs
checkDefinition :: (Annotation a, TypeEnv tenv a) => tenv -> Definition a -> [String]
checkDefinition tenv (_, [], ty, e) = check tenv e ++ checkEqTy tenv e ty
checkDefinition tenv (_, ans, fty, e) = check tenv' e ++ checkEqTy tenv' e (fnRTy fty) where
tenv' = pushtys tenv (zip ans (fnArgTys fty))
check :: (Annotation a, TypeEnv tenv a) => tenv -> Term a -> [String]
check tenv (Prim _ _) = []
check tenv (Var _ _) = []
check tenv (Roll p t e) = check tenv e ++ checkRoll p tenv t e
check tenv (Unroll p t e) = check tenv e ++ checkUnroll p tenv t e
check tenv (Let _ x e b) = check tenv e ++ check (pushty tenv x (typeof tenv e)) b
check tenv (Variant _ _ e _) = check tenv e
check tenv (VCase _ e cs) = check tenv e ++ concat [check (pushty tenv v (vtype vty ctor)) b | (ctor, v, b) <- cs] where vty = typeof tenv e
check tenv (Record _ cs) = concat [check tenv e | (_, e) <- cs]
check tenv (RProj _ e lbl) = check tenv e
check tenv (Pack _ e _) = check tenv e
check tenv (Array _ es) = concatMap (check tenv) es ++ checkEqTys tenv es
check tenv (ArrAlloc _ _ e) = check tenv e ++ checkEqTy tenv e (TPrim nullAnnotation "int")
check tenv (ArrElem _ ae ie) = check tenv ae ++ check tenv ie ++ checkArrayTy tenv ae ++ checkEqTy tenv ie (TPrim nullAnnotation "int")
check tenv (App p fe aes) = check tenv fe ++ concatMap (check tenv) aes ++ checkFnApp p tenv fe aes
check tenv (Unpack p pe v vtn b) = check tenv pe ++ check (pushty tenv v (exname (typeof tenv pe) vtn)) b ++ checkUnpack p tenv pe v vtn b
checkRoll :: (Annotation a, TypeEnv tenv a) => a -> tenv -> Ty a -> Term a -> [String]
checkRoll p tenv t e = []
checkUnroll :: (Annotation a, TypeEnv tenv a) => a -> tenv -> Ty a -> Term a -> [String]
checkUnroll p tenv t e = []
checkEqTys :: (Annotation a, TypeEnv tenv a) => tenv -> [Term a] -> [String]
checkEqTys _ [] = []
checkEqTys _ [_] = []
checkEqTys tenv (e:e':es) = checkEqTy tenv e' (typeof tenv e) ++ checkEqTys tenv (e':es)
checkEqTy :: (Annotation a, TypeEnv tenv a) => tenv -> Term a -> Ty a -> [String]
checkEqTy tenv e ty = checkExprTyP tenv e ((==) ty) ("Expected type " ++ show ty)
checkArrayTy :: (Annotation a, TypeEnv tenv a) => tenv -> Term a -> [String]
checkArrayTy tenv e = checkExprTyP tenv e isArrayTy "Expected array type"
checkFnApp :: (Annotation a, TypeEnv tenv a) => a -> tenv -> Term a -> [Term a] -> [String]
checkFnApp p tenv fe aes = chooseErrPath (isFNTy fty) (length aes == length argTys) where
fty = typeof tenv fe
argTys = fnArgTys fty
chooseErrPath False _ = ["Expected function type\n actual type: " ++ show fty ++ "\n at: " ++ describe (annotation fe)]
chooseErrPath True False = ["Argument length mismatch\n expected " ++ show (length argTys) ++ " arguments but received " ++ show (length aes)]
chooseErrPath True True = concat [checkEqTy tenv e ety | (e, ety) <- zip aes argTys]
checkUnpack :: (Annotation a, TypeEnv tenv a) => a -> tenv -> Term a -> String -> String -> Term a -> [String]
checkUnpack p tenv pe v vtn b = chooseErrPath (isExType pety) (not (elem (TVar p vtn) (map (TVar p) (tyFV bty)))) where
pety = typeof tenv pe
bty = typeof (pushty tenv v (exname (typeof tenv pe) vtn)) b
chooseErrPath False _ = ["Expected existential type to unpack\n actual type: " ++ show pety ++ "\n at: " ++ describe (annotation pe)]
chooseErrPath True False = ["Existential type variable cannot escape the scope of an unpack\n with type: " ++ show bty ++ "\n at: " ++ describe (annotation b)]
chooseErrPath True True = []
checkExprTyP :: (Annotation a, TypeEnv tenv a) => tenv -> Term a -> (Ty a -> Bool) -> String -> [String]
checkExprTyP tenv e p msg | not (p ety) = [errMsg] where
ety = typeof tenv e
errMsg = msg ++ "\n actual type: " ++ show ety ++ "\n at: " ++ describe (annotation e)
checkExprTyP _ _ _ _ = []
| |
4eda03ab980523258c114ff32a1b99759641fb7b51872439c1b6ab435144232b | TrustInSoft/tis-kernel | upper.ml | (**************************************************************************)
(* *)
This file is part of .
(* *)
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
(* *)
is released under GPLv2
(* *)
(**************************************************************************)
let () = Printf.printf "%s" (String.capitalize Sys.argv.(1))
| null | https://raw.githubusercontent.com/TrustInSoft/tis-kernel/748d28baba90c03c0f5f4654d2e7bb47dfbe4e7d/src/plugins/wp/share/src/upper.ml | ocaml | ************************************************************************
************************************************************************ | This file is part of .
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
is released under GPLv2
let () = Printf.printf "%s" (String.capitalize Sys.argv.(1))
|
91138b91310d284d58f19d6d412b877c46ada0ebe0cbeddcf98c6e086e2f9dfe | AndrasKovacs/ELTE-func-lang | Notes06.hs | {-# LANGUAGE DeriveFunctor, MonadComprehensions #-}
module Notes06 where
import Control.Monad (ap, forM, forM_, liftM2)
--------------------------------------------------------------------------------
Also in Control . Monad . State
newtype State s a = State { runState :: s -> (s, a) }
deriving(Functor)
execState :: State s a -> s -> s
execState (State f) s = fst (f s)
evalState :: State s a -> s -> a
evalState (State f) s = snd (f s)
put :: s -> State s ()
put s = State (\_ -> (s, ()))
get :: State s s
get = State (\s -> (s, s))
modify :: (s -> s) -> State s ()
modify f = State (\s -> (f s, ()))
instance Applicative (State s) where pure = return; (<*>) = ap
instance Monad (State s) where
return x = State (\s -> (s, x))
State f >>= g = State (\s -> let (s', a) = f s in runState (g a) s')
--------------------------------------------------------------------------------
Labelling trees using the State monad
data BinTree a = Leaf a
| Node (BinTree a) (BinTree a)
deriving( Eq, Ord, Show, Functor )
-- The function labelTree should label the leaves of a tree with increasing integers:
-- labelTree (Leaf ()) == Leaf 0
-- labelTree (Node (Leaf ()) (Leaf ())) == Node (Leaf 0) (Leaf 1)
-- labelTree (Node (Leaf ()) (Node (Leaf ()) (Leaf ()))) == Node (Leaf 0) (Node (Leaf 1) (Leaf 2))
-- ..
Hint : define a function labelTree_State : : State Int ( ) ,
-- where the state represents the next leaf value.
-- labelTree_State should be defined by recursion on its argument.
labelTree_State :: BinTree a -> State Int (BinTree Int)
labelTree_State (Leaf _) = do
x <- labelLeaf
pure (Leaf x)
-- labelTree_State (Node l r) = liftM2 Node (labelTree_State l) (labelTree_State r)
-- labelTree_State (Node l r) = Node <$> labelTree_State l <*> labelTree_State r
labelTree_State (Node l r) = do
l' <- labelTree_State l
r' <- labelTree_State r
pure $ Node l' r'
-- When reaching a leaf, we should use the current state as the leaf value and increment the state.
labelLeaf should increment the state by 1 and return the previous state .
labelLeaf :: State Int Int
labelLeaf = do
x <- get
modify (+1) -- or: put (x+1)
return x
-- labelTree should be defined using evalState and labelTree_State
labelTree :: BinTree a -> BinTree Int
labelTree t = evalState (labelTree_State t) 0
-- The function labelTreeMax should label the leaves of a tree with the maximum leaf value
-- to the left of it (you can assume that all values are positive).
labelTreeMax ( Leaf 10 ) = = Leaf 10
labelTreeMax ( Node ( Leaf 10 ) ( Leaf 100 ) ) = = Node ( Leaf 10 ) ( Leaf 100 )
labelTreeMax ( Node ( Leaf 100 ) ( Leaf 10 ) ) = = Node ( Leaf 100 ) ( Leaf 100 )
-- labelTreeMax (Node (Leaf 2) (Node (Leaf 1) (Leaf 3))) == Node (Leaf 2) (Node (Leaf 2) (Node Leaf 3))
-- ..
labelTreeMax_State :: BinTree Int -> State Int (BinTree Int)
labelTreeMax_State (Leaf x) = Leaf <$> labelMaxLeaf x
labelTreeMax_State (Node l r) = Node <$> labelTreeMax_State l <*> labelTreeMax_State r
-- When reaching a leaf, we should use the current state as the leaf value and increment the state.
labelLeaf should increment the state by 1 and return the previous state .
labelMaxLeaf :: Int -> State Int Int
labelMaxLeaf x = do
modify (\y -> max x y)
get
labelTreeMax :: BinTree Int -> BinTree Int
labelTreeMax t = evalState (labelTreeMax_State t) 0
--------------------------------------------------------------------------------
Foldable and
-- foldMap :: (Foldable t, Monoid m) => (a -> m) -> t a -> m
mapM : : ( t , ) = > ( a - > m b ) - > t a - > m ( t b )
More general : traverse : : ( t , Applicative m ) = > ( a - > m b ) - > t a - > m ( t b )
Example : [ ] is Foldable and
foldMap_List :: Monoid m => (a -> m) -> [a] -> m
foldMap_List f [] = mempty
foldMap_List f (x:xs) = f x <> foldMap_List f xs
mapM_List :: Monad m => (a -> m b) -> [a] -> m [b]
mapM_List f [] = pure []
mapM_List f (x:xs) = (:) <$> f x <*> mapM_List f xs
forM :: Monad m => [a] -> (a -> m b) -> m [b]
forM xs f = mapM_List f xs
-- Define foldMap and mapM for BinTree
fmap_BinTree :: (a -> b) -> BinTree a -> BinTree b
fmap_BinTree f (Leaf x) = Leaf (f x)
fmap_BinTree f (Node l r) = Node (fmap_BinTree f l) (fmap_BinTree f r)
foldMap_BinTree :: Monoid m => (a -> m) -> BinTree a -> m
foldMap_BinTree f (Leaf x) = f x
foldMap_BinTree f (Node l r) = foldMap_BinTree f l <> foldMap_BinTree f r
mapM_BinTree :: Monad m => (a -> m b) -> BinTree a -> m (BinTree b)
mapM_BinTree f (Leaf x) = Leaf <$> f x
mapM_BinTree f (Node l r) = Node <$> mapM_BinTree f l <*> mapM_BinTree f r
traverse_BinTree :: Applicative m => (a -> m b) -> BinTree a -> m (BinTree b)
traverse_BinTree f (Leaf x) = Leaf <$> f x
traverse_BinTree f (Node l r) = Node <$> traverse_BinTree f l <*> traverse_BinTree f r
instance Foldable BinTree where foldMap = foldMap_BinTree
instance Traversable BinTree where
mapM = mapM_BinTree
traverse = traverse_BinTree
--------------------------------------------------------------------------------
-- We can use mapM_BinTree to redefine labelTree and labelTreeMax
labelTree' :: BinTree a -> BinTree Int
labelTree' t = evalState (mapM_BinTree (\_ -> labelLeaf) t) 0
labelTreeMax' :: BinTree Int -> BinTree Int
labelTreeMax' t = evalState (mapM_BinTree labelMaxLeaf t) 0
--
data Tree2 a = Leaf2 a | Node2 [Tree2 a]
deriving (Show, Functor)
instance Foldable Tree2 where
foldMap f (Leaf2 x) = f x
foldMap f (Node2 xs) = foldMap (foldMap f) xs
instance Traversable Tree2 where
traverse f (Leaf2 x) = Leaf2 <$> f x
traverse f (Node2 xs) = Node2 <$> traverse (traverse f) xs | null | https://raw.githubusercontent.com/AndrasKovacs/ELTE-func-lang/88d41930999d6056bdd7bfaa85761a527cce4113/2020-21-1/gyak_3/Notes06.hs | haskell | # LANGUAGE DeriveFunctor, MonadComprehensions #
------------------------------------------------------------------------------
------------------------------------------------------------------------------
The function labelTree should label the leaves of a tree with increasing integers:
labelTree (Leaf ()) == Leaf 0
labelTree (Node (Leaf ()) (Leaf ())) == Node (Leaf 0) (Leaf 1)
labelTree (Node (Leaf ()) (Node (Leaf ()) (Leaf ()))) == Node (Leaf 0) (Node (Leaf 1) (Leaf 2))
..
where the state represents the next leaf value.
labelTree_State should be defined by recursion on its argument.
labelTree_State (Node l r) = liftM2 Node (labelTree_State l) (labelTree_State r)
labelTree_State (Node l r) = Node <$> labelTree_State l <*> labelTree_State r
When reaching a leaf, we should use the current state as the leaf value and increment the state.
or: put (x+1)
labelTree should be defined using evalState and labelTree_State
The function labelTreeMax should label the leaves of a tree with the maximum leaf value
to the left of it (you can assume that all values are positive).
labelTreeMax (Node (Leaf 2) (Node (Leaf 1) (Leaf 3))) == Node (Leaf 2) (Node (Leaf 2) (Node Leaf 3))
..
When reaching a leaf, we should use the current state as the leaf value and increment the state.
------------------------------------------------------------------------------
foldMap :: (Foldable t, Monoid m) => (a -> m) -> t a -> m
Define foldMap and mapM for BinTree
------------------------------------------------------------------------------
We can use mapM_BinTree to redefine labelTree and labelTreeMax
| module Notes06 where
import Control.Monad (ap, forM, forM_, liftM2)
Also in Control . Monad . State
newtype State s a = State { runState :: s -> (s, a) }
deriving(Functor)
execState :: State s a -> s -> s
execState (State f) s = fst (f s)
evalState :: State s a -> s -> a
evalState (State f) s = snd (f s)
put :: s -> State s ()
put s = State (\_ -> (s, ()))
get :: State s s
get = State (\s -> (s, s))
modify :: (s -> s) -> State s ()
modify f = State (\s -> (f s, ()))
instance Applicative (State s) where pure = return; (<*>) = ap
instance Monad (State s) where
return x = State (\s -> (s, x))
State f >>= g = State (\s -> let (s', a) = f s in runState (g a) s')
Labelling trees using the State monad
data BinTree a = Leaf a
| Node (BinTree a) (BinTree a)
deriving( Eq, Ord, Show, Functor )
Hint : define a function labelTree_State : : State Int ( ) ,
labelTree_State :: BinTree a -> State Int (BinTree Int)
labelTree_State (Leaf _) = do
x <- labelLeaf
pure (Leaf x)
labelTree_State (Node l r) = do
l' <- labelTree_State l
r' <- labelTree_State r
pure $ Node l' r'
labelLeaf should increment the state by 1 and return the previous state .
labelLeaf :: State Int Int
labelLeaf = do
x <- get
return x
labelTree :: BinTree a -> BinTree Int
labelTree t = evalState (labelTree_State t) 0
labelTreeMax ( Leaf 10 ) = = Leaf 10
labelTreeMax ( Node ( Leaf 10 ) ( Leaf 100 ) ) = = Node ( Leaf 10 ) ( Leaf 100 )
labelTreeMax ( Node ( Leaf 100 ) ( Leaf 10 ) ) = = Node ( Leaf 100 ) ( Leaf 100 )
labelTreeMax_State :: BinTree Int -> State Int (BinTree Int)
labelTreeMax_State (Leaf x) = Leaf <$> labelMaxLeaf x
labelTreeMax_State (Node l r) = Node <$> labelTreeMax_State l <*> labelTreeMax_State r
labelLeaf should increment the state by 1 and return the previous state .
labelMaxLeaf :: Int -> State Int Int
labelMaxLeaf x = do
modify (\y -> max x y)
get
labelTreeMax :: BinTree Int -> BinTree Int
labelTreeMax t = evalState (labelTreeMax_State t) 0
Foldable and
mapM : : ( t , ) = > ( a - > m b ) - > t a - > m ( t b )
More general : traverse : : ( t , Applicative m ) = > ( a - > m b ) - > t a - > m ( t b )
Example : [ ] is Foldable and
foldMap_List :: Monoid m => (a -> m) -> [a] -> m
foldMap_List f [] = mempty
foldMap_List f (x:xs) = f x <> foldMap_List f xs
mapM_List :: Monad m => (a -> m b) -> [a] -> m [b]
mapM_List f [] = pure []
mapM_List f (x:xs) = (:) <$> f x <*> mapM_List f xs
forM :: Monad m => [a] -> (a -> m b) -> m [b]
forM xs f = mapM_List f xs
fmap_BinTree :: (a -> b) -> BinTree a -> BinTree b
fmap_BinTree f (Leaf x) = Leaf (f x)
fmap_BinTree f (Node l r) = Node (fmap_BinTree f l) (fmap_BinTree f r)
foldMap_BinTree :: Monoid m => (a -> m) -> BinTree a -> m
foldMap_BinTree f (Leaf x) = f x
foldMap_BinTree f (Node l r) = foldMap_BinTree f l <> foldMap_BinTree f r
mapM_BinTree :: Monad m => (a -> m b) -> BinTree a -> m (BinTree b)
mapM_BinTree f (Leaf x) = Leaf <$> f x
mapM_BinTree f (Node l r) = Node <$> mapM_BinTree f l <*> mapM_BinTree f r
traverse_BinTree :: Applicative m => (a -> m b) -> BinTree a -> m (BinTree b)
traverse_BinTree f (Leaf x) = Leaf <$> f x
traverse_BinTree f (Node l r) = Node <$> traverse_BinTree f l <*> traverse_BinTree f r
instance Foldable BinTree where foldMap = foldMap_BinTree
instance Traversable BinTree where
mapM = mapM_BinTree
traverse = traverse_BinTree
labelTree' :: BinTree a -> BinTree Int
labelTree' t = evalState (mapM_BinTree (\_ -> labelLeaf) t) 0
labelTreeMax' :: BinTree Int -> BinTree Int
labelTreeMax' t = evalState (mapM_BinTree labelMaxLeaf t) 0
data Tree2 a = Leaf2 a | Node2 [Tree2 a]
deriving (Show, Functor)
instance Foldable Tree2 where
foldMap f (Leaf2 x) = f x
foldMap f (Node2 xs) = foldMap (foldMap f) xs
instance Traversable Tree2 where
traverse f (Leaf2 x) = Leaf2 <$> f x
traverse f (Node2 xs) = Node2 <$> traverse (traverse f) xs |
1bf3771ed37e7032ec8646429f6047b3fde8c4f41673e24146eae3f181604740 | tomhanika/conexp-clj | project.clj | ;; Copyright ⓒ the conexp-clj developers; all rights reserved.
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file LICENSE at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(defproject conexp-clj "2.3.1-SNAPSHOT"
:min-lein-version "2.0.0"
:description "A ConExp rewrite in clojure -- and so much more ..."
:url "-clj/"
:scm {:url ":tomhanika/conexp-clj.git"}
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[org.clojure/core.async "1.3.610"]
[org.clojure/data.int-map "1.0.0"]
[org.clojure/data.json "2.4.0"]
[org.clojure/data.xml "0.0.8"]
[org.clojure/math.combinatorics "0.1.6"]
[org.clojure/math.numeric-tower "0.0.4"]
[org.clojure/tools.cli "1.0.194"]
[org.apache.commons/commons-math "2.2"]
[org.clojure/algo.generic "0.1.3"]
[seesaw "1.5.0"]
[reply "0.4.4"
:exclusions [org.clojure/clojure
clojure-complete
com.cemerick/drawbridge]]
[aysylu/loom "1.0.2"]
[rolling-stones "1.0.1"
:exclusions [org.clojure/clojure]]
[clj-http "3.11.0"]
[clojure-complete "0.2.5"]
[ring/ring-devel "1.8.2"]
[ring/ring-core "1.8.2"]
[ring/ring-json "0.5.0"]
[ring-cors "0.1.13"]
[http-kit "2.5.0"]
[org.apache.commons/commons-math3 "3.6.1"]
[luposlip/json-schema "0.3.4"]
[org.clojure/data.csv "1.0.1"]]
:profiles {:uberjar {:main conexp.main
:dependencies [[javax.servlet/servlet-api "2.5"]
[ring/ring-mock "0.4.0"]
[nrepl/nrepl "0.6.0"]]
:aot :all}
:dev {:main conexp.main
:dependencies [[javax.servlet/servlet-api "2.5"]
[ring/ring-mock "0.4.0"]
[nrepl/nrepl "0.6.0"]]
:javac-options ["-Xlint:deprecation" "-Xlint:unchecked"]}
:gorilla {:main conexp.main
:plugins [[org.clojars.benfb/lein-gorilla "0.7.0"]]}}
:keep-non-project-classes true
:source-paths ["src/main/clojure" "src/test/clojure"]
:java-source-paths ["src/main/java"]
:test-paths ["src/test/clojure"]
:resource-paths ["src/main/resources"]
:target-path "builds/%s"
:compile-path "%s/classes/"
:java-opts ["-Dawt.useSystemAAFontSettings=on" "-Xmx4G"])
| null | https://raw.githubusercontent.com/tomhanika/conexp-clj/24dafecd1add05ca311d7e0ef66493e93cc96dc6/project.clj | clojure | Copyright ⓒ the conexp-clj developers; all rights reserved.
The use and distribution terms for this software are covered by the
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Eclipse Public License 1.0 ( -1.0.php )
(defproject conexp-clj "2.3.1-SNAPSHOT"
:min-lein-version "2.0.0"
:description "A ConExp rewrite in clojure -- and so much more ..."
:url "-clj/"
:scm {:url ":tomhanika/conexp-clj.git"}
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[org.clojure/core.async "1.3.610"]
[org.clojure/data.int-map "1.0.0"]
[org.clojure/data.json "2.4.0"]
[org.clojure/data.xml "0.0.8"]
[org.clojure/math.combinatorics "0.1.6"]
[org.clojure/math.numeric-tower "0.0.4"]
[org.clojure/tools.cli "1.0.194"]
[org.apache.commons/commons-math "2.2"]
[org.clojure/algo.generic "0.1.3"]
[seesaw "1.5.0"]
[reply "0.4.4"
:exclusions [org.clojure/clojure
clojure-complete
com.cemerick/drawbridge]]
[aysylu/loom "1.0.2"]
[rolling-stones "1.0.1"
:exclusions [org.clojure/clojure]]
[clj-http "3.11.0"]
[clojure-complete "0.2.5"]
[ring/ring-devel "1.8.2"]
[ring/ring-core "1.8.2"]
[ring/ring-json "0.5.0"]
[ring-cors "0.1.13"]
[http-kit "2.5.0"]
[org.apache.commons/commons-math3 "3.6.1"]
[luposlip/json-schema "0.3.4"]
[org.clojure/data.csv "1.0.1"]]
:profiles {:uberjar {:main conexp.main
:dependencies [[javax.servlet/servlet-api "2.5"]
[ring/ring-mock "0.4.0"]
[nrepl/nrepl "0.6.0"]]
:aot :all}
:dev {:main conexp.main
:dependencies [[javax.servlet/servlet-api "2.5"]
[ring/ring-mock "0.4.0"]
[nrepl/nrepl "0.6.0"]]
:javac-options ["-Xlint:deprecation" "-Xlint:unchecked"]}
:gorilla {:main conexp.main
:plugins [[org.clojars.benfb/lein-gorilla "0.7.0"]]}}
:keep-non-project-classes true
:source-paths ["src/main/clojure" "src/test/clojure"]
:java-source-paths ["src/main/java"]
:test-paths ["src/test/clojure"]
:resource-paths ["src/main/resources"]
:target-path "builds/%s"
:compile-path "%s/classes/"
:java-opts ["-Dawt.useSystemAAFontSettings=on" "-Xmx4G"])
|
0c31e4ee46c89d2127f26cd57c0d5d684c6c1bf4e7017d9645588c8ec9809c83 | ucsd-progsys/liquidhaskell | StateConstraints.hs | module StateConstraints where
data ST s a = ST {runState :: s -> (a,s)}
@ data ST s a < p : : s - > Bool , q : : s - > s - > Bool , r : : s - > a - > Bool >
= ST ( runState : : x : s < p > - > ( a < r x > , s < q x > ) ) @
= ST (runState :: x:s<p> -> (a<r x>, s<q x>)) @-}
@ runState : : forall < p : : s - > Bool , q : : s - > s - > Bool , r : : s - > a - > Bool > . ST < p , q , r > s a - > x : s < p > - > ( a < r x > , s < q x > ) @
cmp : : forall < pref : : s - > Bool , postf : : s - > s - > Bool
, pre : : s - > Bool , : : s - > s - > Bool
, post : : s - > s - > Bool
, rg : : s - > a - > Bool
, rf : : s - > b - > Bool
, r : : s - > b - > Bool
> .
{ xx::s < pre > , w::s < xx > |- s < postf w > < : s < post xx > }
{ ww::s < pre > |- s < postg ww > < : s < pref > }
( ST < pre , postg , rg > s a )
- > ( ST < pref , postf , rf > s b )
- > ( ST < pre , post , r > s b )
@
cmp :: forall < pref :: s -> Bool, postf :: s -> s -> Bool
, pre :: s -> Bool, postg :: s -> s -> Bool
, post :: s -> s -> Bool
, rg :: s -> a -> Bool
, rf :: s -> b -> Bool
, r :: s -> b -> Bool
>.
{xx::s<pre>, w::s<postg xx> |- s<postf w> <: s<post xx>}
{ww::s<pre> |- s<postg ww> <: s<pref>}
(ST <pre, postg, rg> s a)
-> (ST <pref, postf, rf> s b)
-> (ST <pre, post, r> s b)
@-}
cmp :: (ST s a)
-> (ST s b)
-> (ST s b)
cmp (ST g) (ST f) = ST (\x -> case g x of {(_, s) -> f s})
@
bind : : forall < pref : : s - > Bool , postf : : s - > s - > Bool
, pre : : s - > Bool , : : s - > s - > Bool
, post : : s - > s - > Bool
, rg : : s - > a - > Bool
, rf : : s - > b - > Bool
, r : : s - > b - > Bool
, : : a - > Bool
> .
{ x::s < pre > |- a < rg x > < : a < > }
{ x::s < pre > , y::s < postg x > |- b < rf y > < : b < r x > }
{ xx::s < pre > , w::s < xx > |- s < postf w > < : s < post xx > }
{ ww::s < pre > |- s < postg ww > < : s < pref > }
( ST < pre , postg , rg > s a )
- > ( a < > - > ST < pref , postf , rf > s b )
- > ( ST < pre , post , r > s b )
@
bind :: forall < pref :: s -> Bool, postf :: s -> s -> Bool
, pre :: s -> Bool, postg :: s -> s -> Bool
, post :: s -> s -> Bool
, rg :: s -> a -> Bool
, rf :: s -> b -> Bool
, r :: s -> b -> Bool
, pref0 :: a -> Bool
>.
{x::s<pre> |- a<rg x> <: a<pref0>}
{x::s<pre>, y::s<postg x> |- b<rf y> <: b<r x>}
{xx::s<pre>, w::s<postg xx> |- s<postf w> <: s<post xx>}
{ww::s<pre> |- s<postg ww> <: s<pref>}
(ST <pre, postg, rg> s a)
-> (a<pref0> -> ST <pref, postf, rf> s b)
-> (ST <pre, post, r> s b)
@-}
bind :: (ST s a)
-> (a -> ST s b)
-> (ST s b)
bind (ST g) f = ST (\x -> case g x of {(y, s) -> (runState (f y)) s})
@ incr : : ST < { \x - > x > = 0 } , { \x v - > v = x + 1 } , { \x v - > v = x } > @
incr :: ST Int Int
incr = ST $ \x -> (x, x + 1)
@ incr2 : : ST < { \x - > x > = 0 } , { \x v - > v = x + 2 } , { \x v - > v = x + 1 } > @
incr2 :: ST Int Int
incr2 = bind incr (\_ -> incr)
@ incr3 : : ST < { \x - > x > = 0 } , { \x v - > v = x + 3 } , { \x v - > v = x + 2 } > @
incr3 :: ST Int Int
incr3 = bind (bind incr (\_ -> incr)) (\_ -> incr)
foo :: (Int, Int)
@ foo : : ( { v : v = 2 } , { v : v = 3 } ) @
foo = (runState incr3) 0
| null | https://raw.githubusercontent.com/ucsd-progsys/liquidhaskell/20cd67af038930cb592d68d272c8eb1cbe3cb6bf/tests/pos/StateConstraints.hs | haskell | module StateConstraints where
data ST s a = ST {runState :: s -> (a,s)}
@ data ST s a < p : : s - > Bool , q : : s - > s - > Bool , r : : s - > a - > Bool >
= ST ( runState : : x : s < p > - > ( a < r x > , s < q x > ) ) @
= ST (runState :: x:s<p> -> (a<r x>, s<q x>)) @-}
@ runState : : forall < p : : s - > Bool , q : : s - > s - > Bool , r : : s - > a - > Bool > . ST < p , q , r > s a - > x : s < p > - > ( a < r x > , s < q x > ) @
cmp : : forall < pref : : s - > Bool , postf : : s - > s - > Bool
, pre : : s - > Bool , : : s - > s - > Bool
, post : : s - > s - > Bool
, rg : : s - > a - > Bool
, rf : : s - > b - > Bool
, r : : s - > b - > Bool
> .
{ xx::s < pre > , w::s < xx > |- s < postf w > < : s < post xx > }
{ ww::s < pre > |- s < postg ww > < : s < pref > }
( ST < pre , postg , rg > s a )
- > ( ST < pref , postf , rf > s b )
- > ( ST < pre , post , r > s b )
@
cmp :: forall < pref :: s -> Bool, postf :: s -> s -> Bool
, pre :: s -> Bool, postg :: s -> s -> Bool
, post :: s -> s -> Bool
, rg :: s -> a -> Bool
, rf :: s -> b -> Bool
, r :: s -> b -> Bool
>.
{xx::s<pre>, w::s<postg xx> |- s<postf w> <: s<post xx>}
{ww::s<pre> |- s<postg ww> <: s<pref>}
(ST <pre, postg, rg> s a)
-> (ST <pref, postf, rf> s b)
-> (ST <pre, post, r> s b)
@-}
cmp :: (ST s a)
-> (ST s b)
-> (ST s b)
cmp (ST g) (ST f) = ST (\x -> case g x of {(_, s) -> f s})
@
bind : : forall < pref : : s - > Bool , postf : : s - > s - > Bool
, pre : : s - > Bool , : : s - > s - > Bool
, post : : s - > s - > Bool
, rg : : s - > a - > Bool
, rf : : s - > b - > Bool
, r : : s - > b - > Bool
, : : a - > Bool
> .
{ x::s < pre > |- a < rg x > < : a < > }
{ x::s < pre > , y::s < postg x > |- b < rf y > < : b < r x > }
{ xx::s < pre > , w::s < xx > |- s < postf w > < : s < post xx > }
{ ww::s < pre > |- s < postg ww > < : s < pref > }
( ST < pre , postg , rg > s a )
- > ( a < > - > ST < pref , postf , rf > s b )
- > ( ST < pre , post , r > s b )
@
bind :: forall < pref :: s -> Bool, postf :: s -> s -> Bool
, pre :: s -> Bool, postg :: s -> s -> Bool
, post :: s -> s -> Bool
, rg :: s -> a -> Bool
, rf :: s -> b -> Bool
, r :: s -> b -> Bool
, pref0 :: a -> Bool
>.
{x::s<pre> |- a<rg x> <: a<pref0>}
{x::s<pre>, y::s<postg x> |- b<rf y> <: b<r x>}
{xx::s<pre>, w::s<postg xx> |- s<postf w> <: s<post xx>}
{ww::s<pre> |- s<postg ww> <: s<pref>}
(ST <pre, postg, rg> s a)
-> (a<pref0> -> ST <pref, postf, rf> s b)
-> (ST <pre, post, r> s b)
@-}
bind :: (ST s a)
-> (a -> ST s b)
-> (ST s b)
bind (ST g) f = ST (\x -> case g x of {(y, s) -> (runState (f y)) s})
@ incr : : ST < { \x - > x > = 0 } , { \x v - > v = x + 1 } , { \x v - > v = x } > @
incr :: ST Int Int
incr = ST $ \x -> (x, x + 1)
@ incr2 : : ST < { \x - > x > = 0 } , { \x v - > v = x + 2 } , { \x v - > v = x + 1 } > @
incr2 :: ST Int Int
incr2 = bind incr (\_ -> incr)
@ incr3 : : ST < { \x - > x > = 0 } , { \x v - > v = x + 3 } , { \x v - > v = x + 2 } > @
incr3 :: ST Int Int
incr3 = bind (bind incr (\_ -> incr)) (\_ -> incr)
foo :: (Int, Int)
@ foo : : ( { v : v = 2 } , { v : v = 3 } ) @
foo = (runState incr3) 0
| |
9eb0db72045297d2ef9700192bcedd51f24ec4faaeb48ea5f95d9b6d72dac05d | garrigue/lablgtk | xml_lexer.mli | (**************************************************************************)
Lablgtk - Camlirc
(* *)
(* * You are free to do anything you want with this code as long *)
(* as it is for personal use. *)
(* *)
(* * Redistribution can only be "as is". Binary distribution *)
(* and bug fixes are allowed, but you cannot extensively *)
(* modify the code without asking the authors. *)
(* *)
(* The authors may choose to remove any of the above *)
(* restrictions on a per request basis. *)
(* *)
(* Authors: *)
< >
< >
(* *)
(**************************************************************************)
$ Id$
type error =
| Illegal_character of char
| Bad_entity of string
| Unterminated of string
| Tag_expected
| Other of string
exception Error of error * int
val error_string : error -> string
type token =
| Tag of string * (string * string) list * bool
(* [Tag (name, attributes, closed)] denotes an opening tag with
the specified [name] and [attributes]. If [closed], then the tag
ended in "/>", meaning that it has no sub-elements. *)
| Chars of string
(* Some text between the tags, cut by line *)
| Endtag of string
(* A closing tag *)
| EOF
(* End of input *)
val token : Lexing.lexbuf -> token
val token_start : unit -> int
val base64 : Lexing.lexbuf -> int
Decode base 64 data to 6 - bit ints , skipping blanks
| null | https://raw.githubusercontent.com/garrigue/lablgtk/504fac1257e900e6044c638025a4d6c5a321284c/applications/camlirc/xml_lexer.mli | ocaml | ************************************************************************
* You are free to do anything you want with this code as long
as it is for personal use.
* Redistribution can only be "as is". Binary distribution
and bug fixes are allowed, but you cannot extensively
modify the code without asking the authors.
The authors may choose to remove any of the above
restrictions on a per request basis.
Authors:
************************************************************************
[Tag (name, attributes, closed)] denotes an opening tag with
the specified [name] and [attributes]. If [closed], then the tag
ended in "/>", meaning that it has no sub-elements.
Some text between the tags, cut by line
A closing tag
End of input | Lablgtk - Camlirc
< >
< >
$ Id$
type error =
| Illegal_character of char
| Bad_entity of string
| Unterminated of string
| Tag_expected
| Other of string
exception Error of error * int
val error_string : error -> string
type token =
| Tag of string * (string * string) list * bool
| Chars of string
| Endtag of string
| EOF
val token : Lexing.lexbuf -> token
val token_start : unit -> int
val base64 : Lexing.lexbuf -> int
Decode base 64 data to 6 - bit ints , skipping blanks
|
9a06801266964f87a9e21aa9353aff79b2f1e0c6ad9778fec49f9b26ad027cd2 | FranklinChen/hugs98-plus-Sep2006 | Pen.hs | -----------------------------------------------------------------------------
-- |
-- Module : Graphics.HGL.Draw.Pen
Copyright : ( c ) , 1999 - 2003
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : provisional
-- Portability : non-portable (requires concurrency)
--
-- Pens, used for drawing lines.
--
-- Portability notes:
--
* On , the pen is also used to draw a line round all the filled
-- shapes --- so the pen color also affects how polygons, ellipses
-- and regions are drawn.
--
* On , the ' Style ' is ignored ( i.e. treated as ' Solid ' ) for pens
of width greater than 1 . This problem does not apply to X11 .
--
-----------------------------------------------------------------------------
#include "HsHGLConfig.h"
module Graphics.HGL.Draw.Pen
( Pen
, Style(Solid, Dash, Dot, DashDot, DashDotDot, Null, InsideFrame)
, createPen -- :: Style -> Int -> RGB -> IO Pen
, deletePen
, selectPen -- :: Pen -> Draw Pen
, mkPen -- :: Style -> Int -> RGB -> (Pen -> Draw a) -> Draw a
) where
import Graphics.HGL.Draw.Text (RGB)
import Graphics.HGL.Draw.Monad (Draw, ioToDraw)
import Graphics.HGL.Internals.Types (Style(..))
import Graphics.HGL.Internals.Draw (mkDraw)
#if !X_DISPLAY_MISSING
import Graphics.HGL.X11.Types
import Graphics.HGL.X11.Display
import qualified Graphics.X11.Xlib as X
import Control.Concurrent (takeMVar, putMVar)
#else
import Graphics.HGL.Win32.Types
import qualified Graphics.Win32 as Win32
import Graphics.HGL.Draw.Monad (bracket)
#endif
----------------------------------------------------------------
#if X_DISPLAY_MISSING
newtype Pen = Pen Win32.HPEN
#endif
-- | Create a 'Pen'.
createPen :: Style -> Int -> RGB -> IO Pen
-- | Destroy a 'Pen' created with 'createPen'.
deletePen :: Pen -> IO ()
-- | Set the 'Pen' for subsequent drawing, returning the previous setting.
selectPen :: Pen -> Draw Pen
-- | Create a 'Pen' locally to a drawing.
mkPen :: Style -> Int -> RGB -> (Pen -> Draw a) -> Draw a
----------------------------------------------------------------
#if !X_DISPLAY_MISSING
----------------------------------------------------------------
-- Pens
--
-- Used to draw lines and boundaries of filled shapes
----------------------------------------------------------------
createPen style width col = do
display <- getDisplay
pixel <- lookupColor display col
return (Pen style width pixel)
deletePen _ = return ()
ToDo : how do I set background colour for brush and pen ?
selectPen p@(Pen _ lwidth c) = mkDraw $ \ dc -> do
bs <- takeMVar (ref_bits dc)
putMVar (ref_bits dc) bs{pen=p}
X.setForeground (disp dc) (paintGC dc) c
X.setLineAttributes (disp dc) (paintGC dc) lwidth X.lineSolid X.capButt X.joinMiter
return (pen bs)
mkPen style width color g = do
p <- ioToDraw $ createPen style width color
g p
#else /* X_DISPLAY_MISSING */
style :: Style -> Win32.PenStyle
style Solid = Win32.pS_SOLID
style Dash = Win32.pS_DASH
style Dot = Win32.pS_DOT
style DashDot = Win32.pS_DASHDOT
style DashDotDot = Win32.pS_DASHDOTDOT
style Null = Win32.pS_NULL
style InsideFrame = Win32.pS_INSIDEFRAME
createPen sty width c =
Win32.createPen (style sty) (fromIntegral width) (fromRGB c) >>= return . Pen
deletePen (Pen pen) =
Win32.deletePen pen
selectPen (Pen p) = mkDraw (\hdc -> do
p' <- Win32.selectPen hdc p
return (Pen p'))
mkPen sty width c =
bracket (ioToDraw $ createPen sty width c)
(ioToDraw . deletePen)
#endif /* X_DISPLAY_MISSING */
----------------------------------------------------------------
-- The end
----------------------------------------------------------------
| null | https://raw.githubusercontent.com/FranklinChen/hugs98-plus-Sep2006/54ab69bd6313adbbed1d790b46aca2a0305ea67e/packages/HGL/Graphics/HGL/Draw/Pen.hs | haskell | ---------------------------------------------------------------------------
|
Module : Graphics.HGL.Draw.Pen
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : provisional
Portability : non-portable (requires concurrency)
Pens, used for drawing lines.
Portability notes:
shapes --- so the pen color also affects how polygons, ellipses
and regions are drawn.
---------------------------------------------------------------------------
:: Style -> Int -> RGB -> IO Pen
:: Pen -> Draw Pen
:: Style -> Int -> RGB -> (Pen -> Draw a) -> Draw a
--------------------------------------------------------------
| Create a 'Pen'.
| Destroy a 'Pen' created with 'createPen'.
| Set the 'Pen' for subsequent drawing, returning the previous setting.
| Create a 'Pen' locally to a drawing.
--------------------------------------------------------------
--------------------------------------------------------------
Pens
Used to draw lines and boundaries of filled shapes
--------------------------------------------------------------
--------------------------------------------------------------
The end
-------------------------------------------------------------- | Copyright : ( c ) , 1999 - 2003
* On , the pen is also used to draw a line round all the filled
* On , the ' Style ' is ignored ( i.e. treated as ' Solid ' ) for pens
of width greater than 1 . This problem does not apply to X11 .
#include "HsHGLConfig.h"
module Graphics.HGL.Draw.Pen
( Pen
, Style(Solid, Dash, Dot, DashDot, DashDotDot, Null, InsideFrame)
, deletePen
) where
import Graphics.HGL.Draw.Text (RGB)
import Graphics.HGL.Draw.Monad (Draw, ioToDraw)
import Graphics.HGL.Internals.Types (Style(..))
import Graphics.HGL.Internals.Draw (mkDraw)
#if !X_DISPLAY_MISSING
import Graphics.HGL.X11.Types
import Graphics.HGL.X11.Display
import qualified Graphics.X11.Xlib as X
import Control.Concurrent (takeMVar, putMVar)
#else
import Graphics.HGL.Win32.Types
import qualified Graphics.Win32 as Win32
import Graphics.HGL.Draw.Monad (bracket)
#endif
#if X_DISPLAY_MISSING
newtype Pen = Pen Win32.HPEN
#endif
createPen :: Style -> Int -> RGB -> IO Pen
deletePen :: Pen -> IO ()
selectPen :: Pen -> Draw Pen
mkPen :: Style -> Int -> RGB -> (Pen -> Draw a) -> Draw a
#if !X_DISPLAY_MISSING
createPen style width col = do
display <- getDisplay
pixel <- lookupColor display col
return (Pen style width pixel)
deletePen _ = return ()
ToDo : how do I set background colour for brush and pen ?
selectPen p@(Pen _ lwidth c) = mkDraw $ \ dc -> do
bs <- takeMVar (ref_bits dc)
putMVar (ref_bits dc) bs{pen=p}
X.setForeground (disp dc) (paintGC dc) c
X.setLineAttributes (disp dc) (paintGC dc) lwidth X.lineSolid X.capButt X.joinMiter
return (pen bs)
mkPen style width color g = do
p <- ioToDraw $ createPen style width color
g p
#else /* X_DISPLAY_MISSING */
style :: Style -> Win32.PenStyle
style Solid = Win32.pS_SOLID
style Dash = Win32.pS_DASH
style Dot = Win32.pS_DOT
style DashDot = Win32.pS_DASHDOT
style DashDotDot = Win32.pS_DASHDOTDOT
style Null = Win32.pS_NULL
style InsideFrame = Win32.pS_INSIDEFRAME
createPen sty width c =
Win32.createPen (style sty) (fromIntegral width) (fromRGB c) >>= return . Pen
deletePen (Pen pen) =
Win32.deletePen pen
selectPen (Pen p) = mkDraw (\hdc -> do
p' <- Win32.selectPen hdc p
return (Pen p'))
mkPen sty width c =
bracket (ioToDraw $ createPen sty width c)
(ioToDraw . deletePen)
#endif /* X_DISPLAY_MISSING */
|
d6542f690292092b548fc5a85df89a533b4b849de015ff483d79c7356d0961c5 | pflanze/chj-schemelib | define-strict-and-lazy.scm | Copyright ( < ) 2011 - 2018 by < >
;;; This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
;;; (at your option) any later version.
(require (define-macro-star)
(simple-match))
(define-macro* (define-strict-and-lazy
strict-name
stream-name
#!key
(aliases '()) ;; list of `(name strict-name stream-name)
#!rest
exprs)
replaces |DELAY| with delay or ' nothing ' , same for |FV| and
|FORCE| , and binds each name in aliases to strict - name or
;; stream-name
`(begin
(define ,strict-name
(let ,(map (lambda (alias-lis3)
(let ((l (source-code alias-lis3)))
`(,(car l) ,(cadr l))))
(source-code aliases))
(##define-syntax DELAY
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((DELAY expr)
expr))
stx)))
(##define-syntax FV
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((FV vars . body)
`(begin
,@body)))
stx)))
(##define-syntax FORCE
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((FORCE expr)
expr))
stx)))
,@exprs))
(define ,stream-name
(let ,(map (lambda (alias-lis3)
(let ((l (source-code alias-lis3)))
`(,(car l) ,(caddr l))))
(source-code aliases))
(##define-syntax DELAY
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((DELAY expr)
`(delay ,expr)))
stx)))
(##define-syntax FV
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((FV vars . body)
`(let ,(map (lambda (v)
`(,v (force ,v)))
(source-code vars))
,@body)))
stx)))
(##define-syntax FORCE
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((FORCE expr)
`(force ,expr)))
stx)))
,@exprs))))
| null | https://raw.githubusercontent.com/pflanze/chj-schemelib/59ff8476e39f207c2f1d807cfc9670581c8cedd3/define-strict-and-lazy.scm | scheme | This file is free software; you can redistribute it and/or modify
(at your option) any later version.
list of `(name strict-name stream-name)
stream-name | Copyright ( < ) 2011 - 2018 by < >
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
(require (define-macro-star)
(simple-match))
(define-macro* (define-strict-and-lazy
strict-name
stream-name
#!key
#!rest
exprs)
replaces |DELAY| with delay or ' nothing ' , same for |FV| and
|FORCE| , and binds each name in aliases to strict - name or
`(begin
(define ,strict-name
(let ,(map (lambda (alias-lis3)
(let ((l (source-code alias-lis3)))
`(,(car l) ,(cadr l))))
(source-code aliases))
(##define-syntax DELAY
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((DELAY expr)
expr))
stx)))
(##define-syntax FV
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((FV vars . body)
`(begin
,@body)))
stx)))
(##define-syntax FORCE
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((FORCE expr)
expr))
stx)))
,@exprs))
(define ,stream-name
(let ,(map (lambda (alias-lis3)
(let ((l (source-code alias-lis3)))
`(,(car l) ,(caddr l))))
(source-code aliases))
(##define-syntax DELAY
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((DELAY expr)
`(delay ,expr)))
stx)))
(##define-syntax FV
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((FV vars . body)
`(let ,(map (lambda (v)
`(,v (force ,v)))
(source-code vars))
,@body)))
stx)))
(##define-syntax FORCE
(lambda (stx)
(cj-sourcify-deep
(match*
stx
((FORCE expr)
`(force ,expr)))
stx)))
,@exprs))))
|
ca20c937447fa3b0480a59a11840022fbcae5502e017721d93e51dcabaa178c0 | anishathalye/knox | lib.rkt | #lang rosette/safe
(require
(only-in rosette/base/core/bool [@true? true?])
(only-in rosette/base/core/polymorphic ite)
(for-syntax syntax/parse racket/syntax)
(prefix-in ! (combine-in racket/base racket/match))
syntax/parse/define
"parameters.rkt"
"libopt.rkt"
rosutil)
(provide
= distinct _ select store bvxnor
(rename-out [$xor xor]
[$if ite])
exports from Rosette
true false ; constants
(rename-out [! not] [&& and] [|| or]) ; logical
bv ; types
bvult bvslt bvule bvsle bvuge bvsge bvugt bvsgt ; comparison
bvnot bvand bvor bvxor bvshl bvlshr bvashr ; bitwise
bvneg bvadd bvsub bvmul bvudiv bvsdiv bvurem bvsrem bvsmod ; arithmetic
concat) ; conversion
(define-simple-macro ($if test-expr then-expr else-expr)
(if* test-expr (thunk then-expr) (thunk else-expr)))
this is a workaround for Rosette 's ` if ` statement producing assertions
; when it's not necessary. `if` eventually calls `eval-assuming` to evaluate
; the then and else expressions. before doing so, `eval-assuming` augments
; the verification condition with the guard; sometimes, this immediately
; results in an exception due to the path being infeasible, and so `if`
; adds an assertion that the path condition (vc-assumes (vc)) implies that
; the test must be true or false (depending on which branch failed). this assertion,
; even though it's useless, sometimes gets added to the vc,
; because `(&& a b)`, which is used when augmenting the path condition,
; sometimes results in a concrete Racket value of `#f`, but `(=> a (! b))`,
; which is used when adding an assertion, does not simplify in Racket to `#t`
; even though it is provably so.
;
; this is an example of such a program:
; (define-symbolic* a1 a2 boolean?)
( if a1 ( if a2 0 ( if ( & & a1 a2 ) 1 2 ) ) 3 )
;
; after running this program, the (vc) is:
; (vc (|| (! a1$0) (|| a2$1 (&& (&& a1$0 (! a2$1)) (! (&& a1$0 a2$1))))) #t)
;
this thin wrapper around Rosette 's ` if ` does this test eagerly , looking
; at the combination of the verification condition's assumption along
; with the test, and if it can be determined that the other path is
; infeasible, it skips evaluating it altogether.
;
this should be safe to use with arbitrary Rosette code ( even code
; e.g. involving mutation).
(define (if* test-expr then-expr else-expr)
(define test (true? test-expr))
(define assumes (vc-assumes (vc)))
(!cond
[(!or (!eq? test #t) (!not (&& assumes (! test))))
(then-expr)]
[(!or (!eq? test #f) (!not (&& assumes test)))
(else-expr)]
[else
(rewrite-if (if test (then-expr) (else-expr)))]))
we could implement this with ` equal ? ` , but that is slow . uses ` = ` mostly for
; bitvectors, and only in a few cases for booleans. The boolean cases are:
;
; - in the invariant function, when comparing a boolean with the literal 'true' or 'false'
; - in the transition function (this is a macro anyways, that treats the '=' specially)
(define-syntax (= stx)
(syntax-parse stx
[(_ x:expr (~datum true))
#'(<=> x true)]
[(_ x:expr (~datum false))
#'(<=> x false)]
[(_ x:expr y:expr)
#'(bveq x y)]))
(define (distinct x y)
(not (bveq x y)))
(define ((extractor i j) x)
(extract i j x))
(define-simple-macro (_ (~datum extract) i:expr j:expr)
(extractor i j))
(define (select a i)
(if (array-representation-vector)
; vector representation
(let ([symbolic-index (not (concrete-head? i))]
[thresh (overapproximate-symbolic-load-threshold)])
(if (and symbolic-index thresh (>= (vector-length a) thresh))
; overapproximate, return fresh symbolic value
(fresh-symbolic 'select-overapproximated-value (type-of (vector-ref a 0)))
; do the indexing into the vector
(vector-ref-bv a i)))
UF representation
(a i)))
(define (vector-update vec pos v)
(define symbolic-index (not (concrete-head? pos)))
(define thresh (overapproximate-symbolic-store-threshold))
(if (and symbolic-index thresh (>= (vector-length vec) thresh))
(let ([type (type-of (vector-ref vec 0))])
(!build-vector (vector-length vec)
(lambda (_) (fresh-symbolic 'overapproximation type))))
; XXX this seems inefficient
(let ([vec-copy (list->vector (vector->list vec))])
(vector-set!-bv vec-copy pos v)
(vector->immutable-vector vec-copy))))
(define (store a i v)
(if (array-representation-vector)
; vector representation
(vector-update a i v)
UF representation
(lambda (i*) (if (bveq i i*) v (a i*)))))
(define (<=>* . args)
(foldl <=> #t args))
to match SMTLIB 's xor , which can take multiple arguments
(define-syntax ($xor stx)
(syntax-parse stx
[(_ (~seq a0 a1) ...) #'(! (<=>* (~@ a0 a1) ...))]
[(_ a (~seq b0 b1) ...) #'(<=>* a (~@ b0 b1) ...)]))
(define (bvxnor . args)
(bvnot (apply bvxor args)))
| null | https://raw.githubusercontent.com/anishathalye/knox/161cda3e5274cc69012830f477749954ddcf736d/yosys/lib.rkt | racket | constants
logical
types
comparison
bitwise
arithmetic
conversion
when it's not necessary. `if` eventually calls `eval-assuming` to evaluate
the then and else expressions. before doing so, `eval-assuming` augments
the verification condition with the guard; sometimes, this immediately
results in an exception due to the path being infeasible, and so `if`
adds an assertion that the path condition (vc-assumes (vc)) implies that
the test must be true or false (depending on which branch failed). this assertion,
even though it's useless, sometimes gets added to the vc,
because `(&& a b)`, which is used when augmenting the path condition,
sometimes results in a concrete Racket value of `#f`, but `(=> a (! b))`,
which is used when adding an assertion, does not simplify in Racket to `#t`
even though it is provably so.
this is an example of such a program:
(define-symbolic* a1 a2 boolean?)
after running this program, the (vc) is:
(vc (|| (! a1$0) (|| a2$1 (&& (&& a1$0 (! a2$1)) (! (&& a1$0 a2$1))))) #t)
at the combination of the verification condition's assumption along
with the test, and if it can be determined that the other path is
infeasible, it skips evaluating it altogether.
e.g. involving mutation).
bitvectors, and only in a few cases for booleans. The boolean cases are:
- in the invariant function, when comparing a boolean with the literal 'true' or 'false'
- in the transition function (this is a macro anyways, that treats the '=' specially)
vector representation
overapproximate, return fresh symbolic value
do the indexing into the vector
XXX this seems inefficient
vector representation | #lang rosette/safe
(require
(only-in rosette/base/core/bool [@true? true?])
(only-in rosette/base/core/polymorphic ite)
(for-syntax syntax/parse racket/syntax)
(prefix-in ! (combine-in racket/base racket/match))
syntax/parse/define
"parameters.rkt"
"libopt.rkt"
rosutil)
(provide
= distinct _ select store bvxnor
(rename-out [$xor xor]
[$if ite])
exports from Rosette
(define-simple-macro ($if test-expr then-expr else-expr)
(if* test-expr (thunk then-expr) (thunk else-expr)))
this is a workaround for Rosette 's ` if ` statement producing assertions
( if a1 ( if a2 0 ( if ( & & a1 a2 ) 1 2 ) ) 3 )
this thin wrapper around Rosette 's ` if ` does this test eagerly , looking
this should be safe to use with arbitrary Rosette code ( even code
(define (if* test-expr then-expr else-expr)
(define test (true? test-expr))
(define assumes (vc-assumes (vc)))
(!cond
[(!or (!eq? test #t) (!not (&& assumes (! test))))
(then-expr)]
[(!or (!eq? test #f) (!not (&& assumes test)))
(else-expr)]
[else
(rewrite-if (if test (then-expr) (else-expr)))]))
we could implement this with ` equal ? ` , but that is slow . uses ` = ` mostly for
(define-syntax (= stx)
(syntax-parse stx
[(_ x:expr (~datum true))
#'(<=> x true)]
[(_ x:expr (~datum false))
#'(<=> x false)]
[(_ x:expr y:expr)
#'(bveq x y)]))
(define (distinct x y)
(not (bveq x y)))
(define ((extractor i j) x)
(extract i j x))
(define-simple-macro (_ (~datum extract) i:expr j:expr)
(extractor i j))
(define (select a i)
(if (array-representation-vector)
(let ([symbolic-index (not (concrete-head? i))]
[thresh (overapproximate-symbolic-load-threshold)])
(if (and symbolic-index thresh (>= (vector-length a) thresh))
(fresh-symbolic 'select-overapproximated-value (type-of (vector-ref a 0)))
(vector-ref-bv a i)))
UF representation
(a i)))
(define (vector-update vec pos v)
(define symbolic-index (not (concrete-head? pos)))
(define thresh (overapproximate-symbolic-store-threshold))
(if (and symbolic-index thresh (>= (vector-length vec) thresh))
(let ([type (type-of (vector-ref vec 0))])
(!build-vector (vector-length vec)
(lambda (_) (fresh-symbolic 'overapproximation type))))
(let ([vec-copy (list->vector (vector->list vec))])
(vector-set!-bv vec-copy pos v)
(vector->immutable-vector vec-copy))))
(define (store a i v)
(if (array-representation-vector)
(vector-update a i v)
UF representation
(lambda (i*) (if (bveq i i*) v (a i*)))))
(define (<=>* . args)
(foldl <=> #t args))
to match SMTLIB 's xor , which can take multiple arguments
(define-syntax ($xor stx)
(syntax-parse stx
[(_ (~seq a0 a1) ...) #'(! (<=>* (~@ a0 a1) ...))]
[(_ a (~seq b0 b1) ...) #'(<=>* a (~@ b0 b1) ...)]))
(define (bvxnor . args)
(bvnot (apply bvxor args)))
|
1557c549501eaa88d352a362764eede55167305ed6f1bb15f5bfcec8d76062e3 | geophf/1HaskellADay | Solution.hs | # LANGUAGE QuasiQuotes #
module Y2018.M01.D09.Solution where
-
Okay , we 've parsed articles from JSON and we 've stored those articles .
Let 's start expanding the scope here both in breath and in depth .
Depth first .
What happens when we do n't parse an article ? Or we ca n't store one ?
We 've logged parsing process information to a Logger - type ( of the monad ) ,
today , let 's record what we 've logged into the database with a new log - table .
-
Okay, we've parsed articles from JSON and we've stored those articles.
Let's start expanding the scope here both in breath and in depth.
Depth first.
What happens when we don't parse an article? Or we can't store one?
We've logged parsing process information to a Logger-type (of the Writer monad),
today, let's record what we've logged into the database with a new log-table.
--}
import Control.Monad (void)
import Data.Aeson
import Data.Functor.Identity (Identity)
import qualified Data.Map as Map
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
below imports available via 1HaskellADay git repository
import Data.Logger
import Store.SQL.Connection (withConnection)
import Store.SQL.Util.Indexed
import Store.SQL.Util.LookupTable
for DatedArticle
for filtering out AP articles
import Y2018.M01.D02.Solution (parseArticles, storeArticles)
import Y2018.M01.D04.Solution (Authors,pa)
import Y2018.M01.D08.Solution
data Severity = TRACE | DEBUG | INFO | WARN | ERROR | FATAL
deriving (Eq, Ord, Show)
instance ToField Severity where
toField = toField . show
data LogEntry = Entry { sev :: Severity,app,mod,msg :: String }
deriving (Eq, Show)
data LogEntry' = LE' { ent :: LogEntry, lk :: LookupTable }
deriving Eq
instance Show LogEntry' where
show (LE' (Entry sev app mod msg) lk) =
"Entry' { sev :: " ++ show sev ++ ('/':show (lk Map.! show sev))
++ concat (zipWith (\ a b -> ", " ++ a ++ " :: \"" ++ b ++ "\"")
(words "app mod msg") [app, mod, msg]) ++ " }"
instance ToRow LogEntry where
toRow (Entry _ a m e) = map toField [a,m,e]
instance ToRow LogEntry' where
toRow (LE' ent lk) =
toField (lk Map.! show (sev ent)):toRow ent
insertLogEntryStmt :: Query
insertLogEntryStmt =
[sql|INSERT INTO log (severity,app,module,message) VALUES (?,?,?,?)|]
insertLogEntries :: Connection -> LookupTable -> [LogEntry] -> IO ()
insertLogEntries conn lk =
void . executeMany conn insertLogEntryStmt . map (`LE'` lk)
modify the ETL process from Y2018.M01.D02.Solution to spill the log entries
-- to the database (also, the Logger m should be cleared, so you don't keep
-- reentering them.
-- down the road, we will enhance the logger to be application-specific and
-- tailor behavior around severity. You know, like log4h ('log for haskell')
etl :: BlockParser Identity Authors
-> (Connection -> [IxValue (DatedArticle Authors)] -> IO ())
-> Connection -> FilePath -> IO ()
etl generator ancilliaryFn conn json =
parseArticles generator json >>= \(arts,logentries) ->
lookupTable conn "severity_lk" >>= \lk ->
insertLogEntries conn lk (map mkentry logentries) >>
storeArticles conn arts >>= \ixarts ->
storeAncilliary conn ixarts >>
insertLogEntries conn lk [mkentry ("stored " ++ (show $ length ixarts)
++ " articles")]
where mkentry = Entry INFO "etl_pilot" "Y2018.M01.D09.Solution"
-- moving LogEntry etc to Data.Logger
-- moving SQL marshalling to Store.SQL.Util.Logging
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2018/M01/D09/Solution.hs | haskell | }
to the database (also, the Logger m should be cleared, so you don't keep
reentering them.
down the road, we will enhance the logger to be application-specific and
tailor behavior around severity. You know, like log4h ('log for haskell')
moving LogEntry etc to Data.Logger
moving SQL marshalling to Store.SQL.Util.Logging | # LANGUAGE QuasiQuotes #
module Y2018.M01.D09.Solution where
-
Okay , we 've parsed articles from JSON and we 've stored those articles .
Let 's start expanding the scope here both in breath and in depth .
Depth first .
What happens when we do n't parse an article ? Or we ca n't store one ?
We 've logged parsing process information to a Logger - type ( of the monad ) ,
today , let 's record what we 've logged into the database with a new log - table .
-
Okay, we've parsed articles from JSON and we've stored those articles.
Let's start expanding the scope here both in breath and in depth.
Depth first.
What happens when we don't parse an article? Or we can't store one?
We've logged parsing process information to a Logger-type (of the Writer monad),
today, let's record what we've logged into the database with a new log-table.
import Control.Monad (void)
import Data.Aeson
import Data.Functor.Identity (Identity)
import qualified Data.Map as Map
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
below imports available via 1HaskellADay git repository
import Data.Logger
import Store.SQL.Connection (withConnection)
import Store.SQL.Util.Indexed
import Store.SQL.Util.LookupTable
for DatedArticle
for filtering out AP articles
import Y2018.M01.D02.Solution (parseArticles, storeArticles)
import Y2018.M01.D04.Solution (Authors,pa)
import Y2018.M01.D08.Solution
data Severity = TRACE | DEBUG | INFO | WARN | ERROR | FATAL
deriving (Eq, Ord, Show)
instance ToField Severity where
toField = toField . show
data LogEntry = Entry { sev :: Severity,app,mod,msg :: String }
deriving (Eq, Show)
data LogEntry' = LE' { ent :: LogEntry, lk :: LookupTable }
deriving Eq
instance Show LogEntry' where
show (LE' (Entry sev app mod msg) lk) =
"Entry' { sev :: " ++ show sev ++ ('/':show (lk Map.! show sev))
++ concat (zipWith (\ a b -> ", " ++ a ++ " :: \"" ++ b ++ "\"")
(words "app mod msg") [app, mod, msg]) ++ " }"
instance ToRow LogEntry where
toRow (Entry _ a m e) = map toField [a,m,e]
instance ToRow LogEntry' where
toRow (LE' ent lk) =
toField (lk Map.! show (sev ent)):toRow ent
insertLogEntryStmt :: Query
insertLogEntryStmt =
[sql|INSERT INTO log (severity,app,module,message) VALUES (?,?,?,?)|]
insertLogEntries :: Connection -> LookupTable -> [LogEntry] -> IO ()
insertLogEntries conn lk =
void . executeMany conn insertLogEntryStmt . map (`LE'` lk)
modify the ETL process from Y2018.M01.D02.Solution to spill the log entries
etl :: BlockParser Identity Authors
-> (Connection -> [IxValue (DatedArticle Authors)] -> IO ())
-> Connection -> FilePath -> IO ()
etl generator ancilliaryFn conn json =
parseArticles generator json >>= \(arts,logentries) ->
lookupTable conn "severity_lk" >>= \lk ->
insertLogEntries conn lk (map mkentry logentries) >>
storeArticles conn arts >>= \ixarts ->
storeAncilliary conn ixarts >>
insertLogEntries conn lk [mkentry ("stored " ++ (show $ length ixarts)
++ " articles")]
where mkentry = Entry INFO "etl_pilot" "Y2018.M01.D09.Solution"
|
cbc5c3338900e34f47bbf68ab2d672539f80460b4b9f19a5e383e7f879a62c8d | fccm/OCamlSDL2 | test_surf_ba.ml | open Sdl
open Sdlba
let color_of_rgb ~surf ~rgb =
let fmt_kind = Surface.get_pixelformat_t surf in
let fmt = Pixel.alloc_format fmt_kind in
let color = Pixel.map_RGB fmt ~rgb in
Pixel.free_format fmt;
(color)
let fill_rect surf x y w h ~rgb =
let color = color_of_rgb ~surf ~rgb in
let rect = Rect.make4 x y w h in
Surface.fill_rect ~dst:surf ~rect ~color;
;;
let output_ppm ~oc ~img ~width ~height =
Printf.fprintf oc "P6\n%d %d\n255\n" width height;
let n = Bigarray.Array1.dim img in
for i = 0 to pred n do
output_char oc (char_of_int img.{i});
done;
output_char oc '\n';
flush oc;
;;
let () =
Sdl.init [`VIDEO];
let width, height = (320, 240) in
let surf = Surface.create_rgb ~width ~height ~depth:24 in
fill_rect surf 0 0 320 240 ~rgb:(0,0,0);
fill_rect surf 20 20 200 120 ~rgb:(0,255,0);
fill_rect surf 60 60 200 140 ~rgb:(255,0,0);
fill_rect surf 0 0 100 100 ~rgb:(0,0,255);
let ba = Surface_ba.get_pixels surf in
let oc = open_out "test_surf_ba.ppm" in
output_ppm ~oc ~img:ba ~width ~height;
close_out oc;
Sdl.quit ()
| null | https://raw.githubusercontent.com/fccm/OCamlSDL2/01fa29187cab90052d2581eb509d1bca1a85418f/tests/test_surf_ba.ml | ocaml | open Sdl
open Sdlba
let color_of_rgb ~surf ~rgb =
let fmt_kind = Surface.get_pixelformat_t surf in
let fmt = Pixel.alloc_format fmt_kind in
let color = Pixel.map_RGB fmt ~rgb in
Pixel.free_format fmt;
(color)
let fill_rect surf x y w h ~rgb =
let color = color_of_rgb ~surf ~rgb in
let rect = Rect.make4 x y w h in
Surface.fill_rect ~dst:surf ~rect ~color;
;;
let output_ppm ~oc ~img ~width ~height =
Printf.fprintf oc "P6\n%d %d\n255\n" width height;
let n = Bigarray.Array1.dim img in
for i = 0 to pred n do
output_char oc (char_of_int img.{i});
done;
output_char oc '\n';
flush oc;
;;
let () =
Sdl.init [`VIDEO];
let width, height = (320, 240) in
let surf = Surface.create_rgb ~width ~height ~depth:24 in
fill_rect surf 0 0 320 240 ~rgb:(0,0,0);
fill_rect surf 20 20 200 120 ~rgb:(0,255,0);
fill_rect surf 60 60 200 140 ~rgb:(255,0,0);
fill_rect surf 0 0 100 100 ~rgb:(0,0,255);
let ba = Surface_ba.get_pixels surf in
let oc = open_out "test_surf_ba.ppm" in
output_ppm ~oc ~img:ba ~width ~height;
close_out oc;
Sdl.quit ()
| |
d36b4c202820d0bc89032d644d3c33bbf528fc004fccddbe4662788c582797d7 | fluree/db | core.cljc | (ns fluree.db.util.core
(:require [clojure.string :as str]
#?@(:clj [[fluree.db.util.clj-exceptions :as clj-exceptions]
[fluree.db.util.cljs-exceptions :as cljs-exceptions]]))
#?(:cljs (:require-macros [fluree.db.util.core :refer [case+]]))
#?(:clj (:import (java.util UUID Date)
(java.time Instant OffsetDateTime ZoneId)
(java.time.format DateTimeFormatter)
(java.net URLEncoder URLDecoder)))
(:refer-clojure :exclude [vswap!]))
#?(:clj (set! *warn-on-reflection* true))
(def ^:const max-long #?(:clj (Long/MAX_VALUE)
2 ^ 53 - 1 for javascript
(def ^:const min-long #?(:clj (Long/MIN_VALUE)
:cljs -9007199254740991))
(def ^:const max-integer 2147483647)
(def ^:const min-integer -2147483647)
(defn cljs-env?
"Take the &env from a macro, and tell whether we are expanding into cljs."
[env]
(boolean (:ns env)))
#?(:clj
(defmacro if-cljs
"Return then if we are generating cljs code and else for Clojure code.
"
[then else]
(if (cljs-env? &env) then else)))
#?(:clj
(defmacro try-catchall
"A cross-platform variant of try-catch that catches all exceptions.
Does not (yet) support finally, and does not need or want an exception class."
[& body]
(let [try-body (butlast body)
[catch sym & catch-body :as catch-form] (last body)]
(assert (= catch 'catch))
(assert (symbol? sym))
`(if-cljs
(try ~@try-body (~'catch js/Object ~sym ~@catch-body))
(try ~@try-body (~'catch Throwable ~sym ~@catch-body))))))
(declare catch*)
#?(:clj
(defmacro try*
"Like try but supports catch*. catch* is like catch but supports CLJ/CLJS
with less boilerplate. In CLJ it catches `Exception`. In CLJS it catches
`:default`.
Use it like this: `(try* ... (catch* err (handle-err err)))`.
Also supports an optional finally clause."
[& body]
`(if-cljs
(cljs-exceptions/try* ~@body)
(clj-exceptions/try* ~@body))))
(defn index-of
"Returns index integer (n) of item within a Vector.
If item cannot be found, returns nil."
[^clojure.lang.PersistentVector coll value]
#?(:clj (let [n (.indexOf coll value)]
(if (< n 0)
nil
n))
:cljs (some (fn [[item idx]] (when (= value item) idx))
(partition 2 (interleave coll (range))))))
(defn date->millis
"Given a date, returns epoch millis if possible."
[date]
(cond
(string? date)
#?(:clj (-> (Instant/parse date)
(.toEpochMilli))
:cljs (-> (js/Date.parse date)
(.getTime)))
(number? date)
date
#?@(:clj [(instance? Instant date)
(.toEpochMilli ^Instant date)
(instance? Date date)
(.getTime ^Date date)]
:cljs [(instance? js/Date date)
(.getTime date)])
:else
(throw (ex-info (str "Invalid date: " (pr-str date))
{:status 400 :error :db/invalid-date}))))
(defn current-time-millis
"Returns current time in epoch milliseonds for closure/script"
[]
#?(:clj (System/currentTimeMillis)
:cljs (js/Date.now)))
(defn current-time-iso
"Returns current time as string for ISO-8601 format"
[]
#?(:clj (str (Instant/now))
:cljs (.toISOString (js/Date.))))
(defn response-time-formatted
"Returns response time, formatted as string. Must provide start time of request
for clj as (System/nanoTime), or for cljs epoch milliseconds"
[start-time]
#?(:clj (-> (- (System/nanoTime) start-time)
(/ 1000000)
(#(format "%.2fms" (float %))))
:cljs (-> (- (current-time-millis) start-time)
(str "ms"))))
(defn deep-merge [v & vs]
(letfn [(rec-merge [v1 v2]
(if (and (map? v1) (map? v2))
(merge-with deep-merge v1 v2)
v2))]
(if (some identity vs)
(reduce #(rec-merge %1 %2) v vs)
v)))
(defn email?
[email]
(re-matches #"^[\w-\+]+(\.[\w]+)*@[\w-]+(\.[\w]+)*(\.[a-z]{2,})$" email))
(defn pred-ident?
"Tests if an predicate identity two-tuple
in form of [pred-name-or-id pred-value]"
[x]
(and (sequential? x)
(= 2 (count x))
(string? (first x))))
(defn temp-ident?
[x]
(string? x))
(defn subj-ident?
"Tests if an _id is a numeric or predicate-ident"
[x]
(or (int? x)
(pred-ident? x)))
(defn str->int
"Converts string to integer. Assumes you've already verified the string is
parsable to an integer."
[s]
#?(:clj (Integer/parseInt s)
:cljs (js/parseInt s)))
(defn keyword->str
"Converts a keyword to string. Can safely be called on a
string which will return itself."
[k]
(cond
(keyword? k) (subs (str k) 1)
(string? k) k
:else (throw (ex-info (str "Cannot convert type " (type k) " to string: " (pr-str k))
{:status 500 :error :db/unexpected-error}))))
(defn str->keyword
"Converts a string to a keyword, checking to see if
the string starts with a ':', which it strips before converting."
[s]
(cond
(string? s) (if (str/starts-with? s ":")
(keyword (subs s 1))
(keyword s))
(keyword? s) s
:else (throw (ex-info (str "Cannot convert type " (type s) " to keyword: " (pr-str s))
{:status 500 :error :db/unexpected-error}))))
(defn keywordize-keys
"Does simple (top-level keys only) keyworize-keys if the key is a string."
[m]
(reduce-kv
(fn [acc k v]
(if (string? k)
(assoc acc (keyword k) v)
(assoc acc k v)))
{} m))
(defn stringify-keys
"Does simple (top-level keys only) conversion of keyword keys to strings.
This only takes the 'name' value of keywords, not the namespace. Could do
namespace too, but nothing currently needs that. Used mostly for serializing
properly to JSON."
[m]
(reduce-kv
(fn [acc k v]
(if (keyword? k)
(assoc acc (name k) v)
(assoc acc k v)))
{} m))
(defn normalize-context
"Keywordizes string contexts so they merge correctly with other keyword
contexts."
[context-type context]
(if (= :keyword context-type)
context
(keywordize-keys context)))
(defn str->epoch-ms
"Takes time as a string and returns epoch millis."
[time-str]
(try
#?(:clj (.toEpochMilli (Instant/parse time-str))
:cljs (js/Date.parse time-str))
(catch #?(:clj Exception :cljs :default) _
(throw (ex-info (str "Invalid time string. Ensure format is ISO-8601 compatible. Provided: " (pr-str time-str))
{:status 400
:error :db/invalid-time})))))
(defn epoch-ms->iso-8601-str
"Takes milliseconds since the epoch and returns an ISO-8601 formatted string
for that datetime. Optionally takes a ZoneId string (e.g. 'America/Denver')."
([millis] (epoch-ms->iso-8601-str millis "Z"))
([millis zone-id]
#?(:clj (-> millis Instant/ofEpochMilli
(OffsetDateTime/ofInstant (ZoneId/of zone-id))
(.format DateTimeFormatter/ISO_OFFSET_DATE_TIME))
:cljs (-> millis js/Date. .toISOString))))
(defn trunc
"Truncate string s to n characters."
[s n]
(if (< (count s) n)
s
(str (subs s 0 n) " ...")))
#?(:clj
(defmacro some-of
([] nil)
([x] x)
([x & more]
`(let [x# ~x] (if (nil? x#) (some-of ~@more) x#)))))
(defn filter-vals
"Filters map k/v pairs dropping any where predicate applied to value is false."
[pred m]
(reduce-kv (fn [m k v] (if (pred v) (assoc m k v) m)) {} m))
(defn without-nils
"Remove all keys from a map that have nil or empty collection values."
[m]
(filter-vals #(if (coll? %) (not-empty %) (some? %)) m))
(defn inclusive-range
"Like range, but includes start/end values."
([] (range))
([end] (range (inc end)))
([start end] (range start (inc end)))
([start end step] (range start (+ end step) step)))
(defn exception?
"x-platform, returns true if is an exception"
[x]
(instance? #?(:clj Throwable :cljs js/Error) x))
(defn url-encode
[string]
#?(:clj (some-> string str (URLEncoder/encode "UTF-8") (.replace "+" "%20"))
:cljs (some-> string str (js/encodeURIComponent) (.replace "+" "%20"))))
(defn url-decode
([string] (url-decode string "UTF-8"))
([string ^String encoding]
#?(:clj (some-> string str (URLDecoder/decode encoding))
:cljs (some-> string str (js/decodeURIComponent)))))
(defn map-invert
[m]
(reduce (fn [m [k v]] (assoc m v k)) {} m))
(defn zero-pad
"Zero pads x"
[x pad]
(loop [s (str x)]
(if (< #?(:clj (.length s) :cljs (.-length s)) pad)
(recur (str "0" s))
s)))
(defn conjv
"Like conj, but if collection is nil creates a new vector instead of list.
Not built to handle variable arity values"
[coll x]
(if (nil? coll)
(vector x)
(conj coll x)))
(defn conjs
"Like conj, but if collection is nil creates a new set instead of list.
Not built to handle variable arity values"
[coll x]
(if (nil? coll)
#{x}
(conj coll x)))
(defn sequential
"Returns input wrapped in a vector if not already sequential."
[x]
(if (sequential? x)
x
[x]))
#?(:clj
(defmacro condps
"Takes an expression and a set of clauses.
Each clause can take the form of either:
unary-predicate-fn? result-expr
(unary-predicate-fn?-1 ... unary-predicate-fn?-N) result-expr
For each clause, (unary-predicate-fn? expr) is evalated (for each
unary-predicate-fn? in the clause when >1 is given). If it returns logical
true, the clause is a match.
Similar to condp but takes unary predicates instead of binary and allows
multiple predicates to be supplied in a list similar to case."
[expr & clauses]
(let [gexpr (gensym "expr__")
emit (fn emit [expr args]
(let [[[a b :as clause] more] (split-at 2 args)
n (count clause)]
(case n
0 `(throw (IllegalArgumentException.
(str "No matching clause: " ~expr)))
1 a
(let [preds (if (and (coll? a)
(not (= 'fn* (first a)))
(not (= 'fn (first a))))
(vec a)
[a])]
`(if ((apply some-fn ~preds) ~expr)
~b
~(emit expr more))))))]
`(let [~gexpr ~expr]
~(emit gexpr clauses)))))
#?(:clj
(defn- eval-dispatch
[d]
(if (list? d)
(map eval d)
(eval d))))
#?(:clj
(defmacro case+
"Same as case, but evaluates dispatch values, needed for referring to
class and def'ed constants as well as java.util.Enum instances.
NB: Don't use this in CLJS if your dispatch values are :const.
CLJS (but not CLJ sadly) inlines these and they work fine
with regular old cljs.core/case. Or check out const-case if you want a
macro that does the best thing with :const values in both CLJ & CLJS."
[value & clauses]
(let [clauses (partition 2 2 nil clauses)
default (when (-> clauses last count (= 1))
(last clauses))
clauses (if default (drop-last clauses) clauses)]
(if-cljs
`(condp = ~value
~@(concat clauses default))
`(case ~value
~@(concat (->> clauses
(map #(-> % first eval-dispatch (list (second %))))
(mapcat identity))
default))))))
(defn vswap!
"This silly fn exists to work around a bug in go macros where they sometimes clobber
type hints and issue reflection warnings. The vswap! macro uses interop so those forms
get macroexpanded into the go block. You'll then see reflection warnings for reset
deref. By letting the macro expand into this fn instead, it avoids the go bug.
I've filed a JIRA issue here: -240
NB: I couldn't figure out how to get a var-arg version working so this only supports
0-3 args. I didn't see any usages in here that need more than 2, but note well and
feel free to add additional arities if needed (but maybe see if that linked bug has
been fixed first in which case delete this thing with a vengeance and remove the
refer-clojure exclude in the ns form).
- WSM 2021-08-26"
([vol f]
(clojure.core/vswap! vol f))
([vol f arg1]
(clojure.core/vswap! vol f arg1))
([vol f arg1 arg2]
(clojure.core/vswap! vol f arg1 arg2))
([vol f arg1 arg2 arg3]
(clojure.core/vswap! vol f arg1 arg2 arg3)))
| null | https://raw.githubusercontent.com/fluree/db/9e9718b11e954c47621ea2c4651105f6d0765535/src/fluree/db/util/core.cljc | clojure | (ns fluree.db.util.core
(:require [clojure.string :as str]
#?@(:clj [[fluree.db.util.clj-exceptions :as clj-exceptions]
[fluree.db.util.cljs-exceptions :as cljs-exceptions]]))
#?(:cljs (:require-macros [fluree.db.util.core :refer [case+]]))
#?(:clj (:import (java.util UUID Date)
(java.time Instant OffsetDateTime ZoneId)
(java.time.format DateTimeFormatter)
(java.net URLEncoder URLDecoder)))
(:refer-clojure :exclude [vswap!]))
#?(:clj (set! *warn-on-reflection* true))
(def ^:const max-long #?(:clj (Long/MAX_VALUE)
2 ^ 53 - 1 for javascript
(def ^:const min-long #?(:clj (Long/MIN_VALUE)
:cljs -9007199254740991))
(def ^:const max-integer 2147483647)
(def ^:const min-integer -2147483647)
(defn cljs-env?
"Take the &env from a macro, and tell whether we are expanding into cljs."
[env]
(boolean (:ns env)))
#?(:clj
(defmacro if-cljs
"Return then if we are generating cljs code and else for Clojure code.
"
[then else]
(if (cljs-env? &env) then else)))
#?(:clj
(defmacro try-catchall
"A cross-platform variant of try-catch that catches all exceptions.
Does not (yet) support finally, and does not need or want an exception class."
[& body]
(let [try-body (butlast body)
[catch sym & catch-body :as catch-form] (last body)]
(assert (= catch 'catch))
(assert (symbol? sym))
`(if-cljs
(try ~@try-body (~'catch js/Object ~sym ~@catch-body))
(try ~@try-body (~'catch Throwable ~sym ~@catch-body))))))
(declare catch*)
#?(:clj
(defmacro try*
"Like try but supports catch*. catch* is like catch but supports CLJ/CLJS
with less boilerplate. In CLJ it catches `Exception`. In CLJS it catches
`:default`.
Use it like this: `(try* ... (catch* err (handle-err err)))`.
Also supports an optional finally clause."
[& body]
`(if-cljs
(cljs-exceptions/try* ~@body)
(clj-exceptions/try* ~@body))))
(defn index-of
"Returns index integer (n) of item within a Vector.
If item cannot be found, returns nil."
[^clojure.lang.PersistentVector coll value]
#?(:clj (let [n (.indexOf coll value)]
(if (< n 0)
nil
n))
:cljs (some (fn [[item idx]] (when (= value item) idx))
(partition 2 (interleave coll (range))))))
(defn date->millis
"Given a date, returns epoch millis if possible."
[date]
(cond
(string? date)
#?(:clj (-> (Instant/parse date)
(.toEpochMilli))
:cljs (-> (js/Date.parse date)
(.getTime)))
(number? date)
date
#?@(:clj [(instance? Instant date)
(.toEpochMilli ^Instant date)
(instance? Date date)
(.getTime ^Date date)]
:cljs [(instance? js/Date date)
(.getTime date)])
:else
(throw (ex-info (str "Invalid date: " (pr-str date))
{:status 400 :error :db/invalid-date}))))
(defn current-time-millis
"Returns current time in epoch milliseonds for closure/script"
[]
#?(:clj (System/currentTimeMillis)
:cljs (js/Date.now)))
(defn current-time-iso
"Returns current time as string for ISO-8601 format"
[]
#?(:clj (str (Instant/now))
:cljs (.toISOString (js/Date.))))
(defn response-time-formatted
"Returns response time, formatted as string. Must provide start time of request
for clj as (System/nanoTime), or for cljs epoch milliseconds"
[start-time]
#?(:clj (-> (- (System/nanoTime) start-time)
(/ 1000000)
(#(format "%.2fms" (float %))))
:cljs (-> (- (current-time-millis) start-time)
(str "ms"))))
(defn deep-merge [v & vs]
(letfn [(rec-merge [v1 v2]
(if (and (map? v1) (map? v2))
(merge-with deep-merge v1 v2)
v2))]
(if (some identity vs)
(reduce #(rec-merge %1 %2) v vs)
v)))
(defn email?
[email]
(re-matches #"^[\w-\+]+(\.[\w]+)*@[\w-]+(\.[\w]+)*(\.[a-z]{2,})$" email))
(defn pred-ident?
"Tests if an predicate identity two-tuple
in form of [pred-name-or-id pred-value]"
[x]
(and (sequential? x)
(= 2 (count x))
(string? (first x))))
(defn temp-ident?
[x]
(string? x))
(defn subj-ident?
"Tests if an _id is a numeric or predicate-ident"
[x]
(or (int? x)
(pred-ident? x)))
(defn str->int
"Converts string to integer. Assumes you've already verified the string is
parsable to an integer."
[s]
#?(:clj (Integer/parseInt s)
:cljs (js/parseInt s)))
(defn keyword->str
"Converts a keyword to string. Can safely be called on a
string which will return itself."
[k]
(cond
(keyword? k) (subs (str k) 1)
(string? k) k
:else (throw (ex-info (str "Cannot convert type " (type k) " to string: " (pr-str k))
{:status 500 :error :db/unexpected-error}))))
(defn str->keyword
"Converts a string to a keyword, checking to see if
the string starts with a ':', which it strips before converting."
[s]
(cond
(string? s) (if (str/starts-with? s ":")
(keyword (subs s 1))
(keyword s))
(keyword? s) s
:else (throw (ex-info (str "Cannot convert type " (type s) " to keyword: " (pr-str s))
{:status 500 :error :db/unexpected-error}))))
(defn keywordize-keys
"Does simple (top-level keys only) keyworize-keys if the key is a string."
[m]
(reduce-kv
(fn [acc k v]
(if (string? k)
(assoc acc (keyword k) v)
(assoc acc k v)))
{} m))
(defn stringify-keys
"Does simple (top-level keys only) conversion of keyword keys to strings.
This only takes the 'name' value of keywords, not the namespace. Could do
namespace too, but nothing currently needs that. Used mostly for serializing
properly to JSON."
[m]
(reduce-kv
(fn [acc k v]
(if (keyword? k)
(assoc acc (name k) v)
(assoc acc k v)))
{} m))
(defn normalize-context
"Keywordizes string contexts so they merge correctly with other keyword
contexts."
[context-type context]
(if (= :keyword context-type)
context
(keywordize-keys context)))
(defn str->epoch-ms
"Takes time as a string and returns epoch millis."
[time-str]
(try
#?(:clj (.toEpochMilli (Instant/parse time-str))
:cljs (js/Date.parse time-str))
(catch #?(:clj Exception :cljs :default) _
(throw (ex-info (str "Invalid time string. Ensure format is ISO-8601 compatible. Provided: " (pr-str time-str))
{:status 400
:error :db/invalid-time})))))
(defn epoch-ms->iso-8601-str
"Takes milliseconds since the epoch and returns an ISO-8601 formatted string
for that datetime. Optionally takes a ZoneId string (e.g. 'America/Denver')."
([millis] (epoch-ms->iso-8601-str millis "Z"))
([millis zone-id]
#?(:clj (-> millis Instant/ofEpochMilli
(OffsetDateTime/ofInstant (ZoneId/of zone-id))
(.format DateTimeFormatter/ISO_OFFSET_DATE_TIME))
:cljs (-> millis js/Date. .toISOString))))
(defn trunc
"Truncate string s to n characters."
[s n]
(if (< (count s) n)
s
(str (subs s 0 n) " ...")))
#?(:clj
(defmacro some-of
([] nil)
([x] x)
([x & more]
`(let [x# ~x] (if (nil? x#) (some-of ~@more) x#)))))
(defn filter-vals
"Filters map k/v pairs dropping any where predicate applied to value is false."
[pred m]
(reduce-kv (fn [m k v] (if (pred v) (assoc m k v) m)) {} m))
(defn without-nils
"Remove all keys from a map that have nil or empty collection values."
[m]
(filter-vals #(if (coll? %) (not-empty %) (some? %)) m))
(defn inclusive-range
"Like range, but includes start/end values."
([] (range))
([end] (range (inc end)))
([start end] (range start (inc end)))
([start end step] (range start (+ end step) step)))
(defn exception?
"x-platform, returns true if is an exception"
[x]
(instance? #?(:clj Throwable :cljs js/Error) x))
(defn url-encode
[string]
#?(:clj (some-> string str (URLEncoder/encode "UTF-8") (.replace "+" "%20"))
:cljs (some-> string str (js/encodeURIComponent) (.replace "+" "%20"))))
(defn url-decode
([string] (url-decode string "UTF-8"))
([string ^String encoding]
#?(:clj (some-> string str (URLDecoder/decode encoding))
:cljs (some-> string str (js/decodeURIComponent)))))
(defn map-invert
[m]
(reduce (fn [m [k v]] (assoc m v k)) {} m))
(defn zero-pad
"Zero pads x"
[x pad]
(loop [s (str x)]
(if (< #?(:clj (.length s) :cljs (.-length s)) pad)
(recur (str "0" s))
s)))
(defn conjv
"Like conj, but if collection is nil creates a new vector instead of list.
Not built to handle variable arity values"
[coll x]
(if (nil? coll)
(vector x)
(conj coll x)))
(defn conjs
"Like conj, but if collection is nil creates a new set instead of list.
Not built to handle variable arity values"
[coll x]
(if (nil? coll)
#{x}
(conj coll x)))
(defn sequential
"Returns input wrapped in a vector if not already sequential."
[x]
(if (sequential? x)
x
[x]))
#?(:clj
(defmacro condps
"Takes an expression and a set of clauses.
Each clause can take the form of either:
unary-predicate-fn? result-expr
(unary-predicate-fn?-1 ... unary-predicate-fn?-N) result-expr
For each clause, (unary-predicate-fn? expr) is evalated (for each
unary-predicate-fn? in the clause when >1 is given). If it returns logical
true, the clause is a match.
Similar to condp but takes unary predicates instead of binary and allows
multiple predicates to be supplied in a list similar to case."
[expr & clauses]
(let [gexpr (gensym "expr__")
emit (fn emit [expr args]
(let [[[a b :as clause] more] (split-at 2 args)
n (count clause)]
(case n
0 `(throw (IllegalArgumentException.
(str "No matching clause: " ~expr)))
1 a
(let [preds (if (and (coll? a)
(not (= 'fn* (first a)))
(not (= 'fn (first a))))
(vec a)
[a])]
`(if ((apply some-fn ~preds) ~expr)
~b
~(emit expr more))))))]
`(let [~gexpr ~expr]
~(emit gexpr clauses)))))
#?(:clj
(defn- eval-dispatch
[d]
(if (list? d)
(map eval d)
(eval d))))
#?(:clj
(defmacro case+
"Same as case, but evaluates dispatch values, needed for referring to
class and def'ed constants as well as java.util.Enum instances.
NB: Don't use this in CLJS if your dispatch values are :const.
CLJS (but not CLJ sadly) inlines these and they work fine
with regular old cljs.core/case. Or check out const-case if you want a
macro that does the best thing with :const values in both CLJ & CLJS."
[value & clauses]
(let [clauses (partition 2 2 nil clauses)
default (when (-> clauses last count (= 1))
(last clauses))
clauses (if default (drop-last clauses) clauses)]
(if-cljs
`(condp = ~value
~@(concat clauses default))
`(case ~value
~@(concat (->> clauses
(map #(-> % first eval-dispatch (list (second %))))
(mapcat identity))
default))))))
(defn vswap!
"This silly fn exists to work around a bug in go macros where they sometimes clobber
type hints and issue reflection warnings. The vswap! macro uses interop so those forms
get macroexpanded into the go block. You'll then see reflection warnings for reset
deref. By letting the macro expand into this fn instead, it avoids the go bug.
I've filed a JIRA issue here: -240
NB: I couldn't figure out how to get a var-arg version working so this only supports
0-3 args. I didn't see any usages in here that need more than 2, but note well and
feel free to add additional arities if needed (but maybe see if that linked bug has
been fixed first in which case delete this thing with a vengeance and remove the
refer-clojure exclude in the ns form).
- WSM 2021-08-26"
([vol f]
(clojure.core/vswap! vol f))
([vol f arg1]
(clojure.core/vswap! vol f arg1))
([vol f arg1 arg2]
(clojure.core/vswap! vol f arg1 arg2))
([vol f arg1 arg2 arg3]
(clojure.core/vswap! vol f arg1 arg2 arg3)))
| |
411055e066ea5f82501cd85c5f72bb457add3d6a1e1da8e4a4e16553da920be3 | Decentralized-Pictures/T4L3NT | cache_repr.mli | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2021 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
*
Frequently used data should be kept in memory and persisted along a
chain of blocks . The caching mechanism allows the economic protocol
to declare such data and to rely on a Least Recently Used strategy
to keep the cache size under a fixed limit .
Take a look at { ! Environment_cache } and { ! Environment_context }
for additional implementation details about the protocol cache .
The protocol has two main kinds of interaction with the cache :
1 . It is responsible for setting up the cache with appropriate
parameter values and callbacks . It must also compute cache nonces
to give the shell enough information to properly synchronize the
in - memory cache with the block contexts and protocol upgrades .
A typical place where this happens is { ! Apply } .
This aspect must be implemented using { ! Cache . Admin } .
2 . It can exploit the cache to retrieve , to insert , and to update
cached values from the in - memory cache . The basic idea is to
avoid recomputing values from scratch at each block when they are
frequently used . { ! Script_cache } is an example of such usage .
This aspect must be implemented using { ! Cache . Interface } .
Frequently used data should be kept in memory and persisted along a
chain of blocks. The caching mechanism allows the economic protocol
to declare such data and to rely on a Least Recently Used strategy
to keep the cache size under a fixed limit.
Take a look at {!Environment_cache} and {!Environment_context}
for additional implementation details about the protocol cache.
The protocol has two main kinds of interaction with the cache:
1. It is responsible for setting up the cache with appropriate
parameter values and callbacks. It must also compute cache nonces
to give the shell enough information to properly synchronize the
in-memory cache with the block contexts and protocol upgrades.
A typical place where this happens is {!Apply}.
This aspect must be implemented using {!Cache.Admin}.
2. It can exploit the cache to retrieve, to insert, and to update
cached values from the in-memory cache. The basic idea is to
avoid recomputing values from scratch at each block when they are
frequently used. {!Script_cache} is an example of such usage.
This aspect must be implemented using {!Cache.Interface}.
*)
(** Size for subcaches and values of the cache. *)
type size = int
(** Index type to index caches. *)
type index = int
(**
The following module acts on the whole cache, not on a specific
sub-cache, unlike {!Interface}. It is used to administrate the
protocol cache, e.g., to maintain the cache in a consistent state
with respect to the chain. This module is typically used by
low-level layers of the protocol and by the shell.
*)
module Admin : sig
* A key uniquely identifies a cached [ value ] in some subcache .
type key
(** Cached values. *)
type value
* [ pp ] is a pretty printter for the [ cache ] of [ ctxt ] .
val pp : Format.formatter -> Raw_context.t -> unit
* [ set_cache_layout layout ] sets the caches of [ ctxt ] to
comply with given [ layout ] . If there was already a cache in
[ ctxt ] , it is erased by the new layout .
In that case , a fresh collection of empty caches is reconstructed
from the new [ layout ] . Notice that cache [ key]s are invalidated
in that case , i.e. [ find t k ] will return [ None ] .
comply with given [layout]. If there was already a cache in
[ctxt], it is erased by the new layout.
In that case, a fresh collection of empty caches is reconstructed
from the new [layout]. Notice that cache [key]s are invalidated
in that case, i.e. [find t k] will return [None]. *)
val set_cache_layout : Raw_context.t -> size list -> Raw_context.t Lwt.t
* [ sync ~cache_nonce ] updates the context with the domain of
the cache computed so far . Such function is expected to be called
at the end of the validation of a block , when there is no more
accesses to the cache .
[ cache_nonce ] identifies the block that introduced new cache
entries . The nonce should identify uniquely the block which
modifies this value . It can not be the block hash for circularity
reasons : The value of the nonce is stored onto the context and
consequently influences the context hash of the very same
block . Such nonce can not be determined by the shell and its
computation is delegated to the economic protocol .
the cache computed so far. Such function is expected to be called
at the end of the validation of a block, when there is no more
accesses to the cache.
[cache_nonce] identifies the block that introduced new cache
entries. The nonce should identify uniquely the block which
modifies this value. It cannot be the block hash for circularity
reasons: The value of the nonce is stored onto the context and
consequently influences the context hash of the very same
block. Such nonce cannot be determined by the shell and its
computation is delegated to the economic protocol. *)
val sync : Raw_context.t -> cache_nonce:Bytes.t -> Raw_context.t Lwt.t
* [ clear ] removes all cache entries .
val clear : Raw_context.t -> Raw_context.t
* { 3 Cache helpers for RPCs }
(** [future_cache_expectation ctxt ~time_in_blocks] returns [ctxt] except
that the entries of the caches that are presumably too old to
still be in the caches in [n_blocks] are removed.
This function is based on a heuristic. The context maintains
the median of the number of removed entries: this number is
multipled by `n_blocks` to determine the entries that are
likely to be removed in `n_blocks`. *)
val future_cache_expectation :
Raw_context.t -> time_in_blocks:int -> Raw_context.t
* [ cache_size ~cache_index ] returns an overapproximation of
the size of the cache . Returns [ None ] if [ cache_index ] is
greater than the number of subcaches declared by the cache
layout .
the size of the cache. Returns [None] if [cache_index] is
greater than the number of subcaches declared by the cache
layout. *)
val cache_size : Raw_context.t -> cache_index:int -> size option
(** [cache_size_limit ctxt ~cache_index] returns the maximal size of
the cache indexed by [cache_index]. Returns [None] if
[cache_index] is greater than the number of subcaches declared
by the cache layout. *)
val cache_size_limit : Raw_context.t -> cache_index:int -> size option
(** [value_of_key ctxt k] interprets the functions introduced by
[register] to construct a cacheable value for a key [k]. *)
val value_of_key :
Raw_context.t -> Context.Cache.key -> Context.Cache.value tzresult Lwt.t
end
(** A client uses a unique namespace (represented as a string
without '@') to avoid collision with the keys of other
clients. *)
type namespace = private string
(** [create_namespace str] creates a valid namespace from [str]
@raise Invalid_argument if [str] contains '@'
*)
val create_namespace : string -> namespace
(** A key is fully determined by a namespace and an identifier. *)
type identifier = string
(**
To use the cache, a client must implement the [CLIENT]
interface.
*)
module type CLIENT = sig
(** The type of value to be stored in the cache. *)
type cached_value
* The client must declare the index of the subcache where its
values shall live . [ cache_index ] must be between [ 0 ] and
[ Constants_repr.cache_layout - 1 ] .
values shall live. [cache_index] must be between [0] and
[List.length Constants_repr.cache_layout - 1]. *)
val cache_index : index
(** The client must declare a namespace. This namespace must
be unique. Otherwise, the program stops.
A namespace cannot contain '@'. *)
val namespace : namespace
* [ value_of_identifier i d ] builds the cached value identified by
[ i d ] . This function is called when the subcache is loaded into
memory from the on - disk representation of its domain .
An error during the execution of this function is fatal as
witnessed by its type : an error embedded in a [ tzresult ] is not
supposed to be caught by the protocol .
[id]. This function is called when the subcache is loaded into
memory from the on-disk representation of its domain.
An error during the execution of this function is fatal as
witnessed by its type: an error embedded in a [tzresult] is not
supposed to be caught by the protocol. *)
val value_of_identifier :
Raw_context.t -> identifier -> cached_value tzresult Lwt.t
end
*
An [ INTERFACE ] to the subcache where keys live in a given [ namespace ] .
An [INTERFACE] to the subcache where keys live in a given [namespace].
*)
module type INTERFACE = sig
(** The type of value to be stored in the cache. *)
type cached_value
* [ update i ( Some ( e , size ) ) ] returns a context where the
value [ e ] of given [ size ] is associated to identifier [ i ] in
the subcache . If [ i ] is already in the subcache , the cache
entry is updated .
[ update i None ] removes [ i ] from the subcache .
value [e] of given [size] is associated to identifier [i] in
the subcache. If [i] is already in the subcache, the cache
entry is updated.
[update ctxt i None] removes [i] from the subcache. *)
val update :
Raw_context.t ->
identifier ->
(cached_value * size) option ->
Raw_context.t tzresult
* [ find i = Some v ] if [ v ] is the value associated to [ i ]
in the subcache . Returns [ None ] if there is no such value in
the subcache . This function is in the Lwt monad because if the
value may have not been constructed ( see the lazy loading
mode in { ! Environment_context } ) , it is constructed on the fly .
in the subcache. Returns [None] if there is no such value in
the subcache. This function is in the Lwt monad because if the
value may have not been constructed (see the lazy loading
mode in {!Environment_context}), it is constructed on the fly. *)
val find : Raw_context.t -> identifier -> cached_value option tzresult Lwt.t
* [ list_identifiers ] returns the list of the
identifiers of the cached values along with their respective
size . The returned list is sorted in terms of their age in the
cache , the oldest coming first .
identifiers of the cached values along with their respective
size. The returned list is sorted in terms of their age in the
cache, the oldest coming first. *)
val list_identifiers : Raw_context.t -> (string * int) list
* [ identifier_rank identifier ] returns the number of cached values
older than the one of [ identifier ] ; or , [ None ] if the [ identifier ] has
no associated value in the subcache .
older than the one of [identifier]; or, [None] if the [identifier] has
no associated value in the subcache. *)
val identifier_rank : Raw_context.t -> string -> int option
* [ size ctxt ] returns an overapproximation of the subcache size
( in bytes ) .
(in bytes). *)
val size : Raw_context.t -> int
* [ size_limit ctxt ] returns the maximal size of the subcache
( in bytes ) .
(in bytes). *)
val size_limit : Raw_context.t -> int
end
(** [register_exn client] produces an [Interface] specific to a
given [client]. This function can fail if [client] does not
respect the invariant declared in the documentation of
{!CLIENT}. *)
val register_exn :
(module CLIENT with type cached_value = 'a) ->
(module INTERFACE with type cached_value = 'a)
| null | https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/proto_012_Psithaca/lib_protocol/cache_repr.mli | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* Size for subcaches and values of the cache.
* Index type to index caches.
*
The following module acts on the whole cache, not on a specific
sub-cache, unlike {!Interface}. It is used to administrate the
protocol cache, e.g., to maintain the cache in a consistent state
with respect to the chain. This module is typically used by
low-level layers of the protocol and by the shell.
* Cached values.
* [future_cache_expectation ctxt ~time_in_blocks] returns [ctxt] except
that the entries of the caches that are presumably too old to
still be in the caches in [n_blocks] are removed.
This function is based on a heuristic. The context maintains
the median of the number of removed entries: this number is
multipled by `n_blocks` to determine the entries that are
likely to be removed in `n_blocks`.
* [cache_size_limit ctxt ~cache_index] returns the maximal size of
the cache indexed by [cache_index]. Returns [None] if
[cache_index] is greater than the number of subcaches declared
by the cache layout.
* [value_of_key ctxt k] interprets the functions introduced by
[register] to construct a cacheable value for a key [k].
* A client uses a unique namespace (represented as a string
without '@') to avoid collision with the keys of other
clients.
* [create_namespace str] creates a valid namespace from [str]
@raise Invalid_argument if [str] contains '@'
* A key is fully determined by a namespace and an identifier.
*
To use the cache, a client must implement the [CLIENT]
interface.
* The type of value to be stored in the cache.
* The client must declare a namespace. This namespace must
be unique. Otherwise, the program stops.
A namespace cannot contain '@'.
* The type of value to be stored in the cache.
* [register_exn client] produces an [Interface] specific to a
given [client]. This function can fail if [client] does not
respect the invariant declared in the documentation of
{!CLIENT}. | Copyright ( c ) 2021 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
*
Frequently used data should be kept in memory and persisted along a
chain of blocks . The caching mechanism allows the economic protocol
to declare such data and to rely on a Least Recently Used strategy
to keep the cache size under a fixed limit .
Take a look at { ! Environment_cache } and { ! Environment_context }
for additional implementation details about the protocol cache .
The protocol has two main kinds of interaction with the cache :
1 . It is responsible for setting up the cache with appropriate
parameter values and callbacks . It must also compute cache nonces
to give the shell enough information to properly synchronize the
in - memory cache with the block contexts and protocol upgrades .
A typical place where this happens is { ! Apply } .
This aspect must be implemented using { ! Cache . Admin } .
2 . It can exploit the cache to retrieve , to insert , and to update
cached values from the in - memory cache . The basic idea is to
avoid recomputing values from scratch at each block when they are
frequently used . { ! Script_cache } is an example of such usage .
This aspect must be implemented using { ! Cache . Interface } .
Frequently used data should be kept in memory and persisted along a
chain of blocks. The caching mechanism allows the economic protocol
to declare such data and to rely on a Least Recently Used strategy
to keep the cache size under a fixed limit.
Take a look at {!Environment_cache} and {!Environment_context}
for additional implementation details about the protocol cache.
The protocol has two main kinds of interaction with the cache:
1. It is responsible for setting up the cache with appropriate
parameter values and callbacks. It must also compute cache nonces
to give the shell enough information to properly synchronize the
in-memory cache with the block contexts and protocol upgrades.
A typical place where this happens is {!Apply}.
This aspect must be implemented using {!Cache.Admin}.
2. It can exploit the cache to retrieve, to insert, and to update
cached values from the in-memory cache. The basic idea is to
avoid recomputing values from scratch at each block when they are
frequently used. {!Script_cache} is an example of such usage.
This aspect must be implemented using {!Cache.Interface}.
*)
type size = int
type index = int
module Admin : sig
* A key uniquely identifies a cached [ value ] in some subcache .
type key
type value
* [ pp ] is a pretty printter for the [ cache ] of [ ctxt ] .
val pp : Format.formatter -> Raw_context.t -> unit
* [ set_cache_layout layout ] sets the caches of [ ctxt ] to
comply with given [ layout ] . If there was already a cache in
[ ctxt ] , it is erased by the new layout .
In that case , a fresh collection of empty caches is reconstructed
from the new [ layout ] . Notice that cache [ key]s are invalidated
in that case , i.e. [ find t k ] will return [ None ] .
comply with given [layout]. If there was already a cache in
[ctxt], it is erased by the new layout.
In that case, a fresh collection of empty caches is reconstructed
from the new [layout]. Notice that cache [key]s are invalidated
in that case, i.e. [find t k] will return [None]. *)
val set_cache_layout : Raw_context.t -> size list -> Raw_context.t Lwt.t
* [ sync ~cache_nonce ] updates the context with the domain of
the cache computed so far . Such function is expected to be called
at the end of the validation of a block , when there is no more
accesses to the cache .
[ cache_nonce ] identifies the block that introduced new cache
entries . The nonce should identify uniquely the block which
modifies this value . It can not be the block hash for circularity
reasons : The value of the nonce is stored onto the context and
consequently influences the context hash of the very same
block . Such nonce can not be determined by the shell and its
computation is delegated to the economic protocol .
the cache computed so far. Such function is expected to be called
at the end of the validation of a block, when there is no more
accesses to the cache.
[cache_nonce] identifies the block that introduced new cache
entries. The nonce should identify uniquely the block which
modifies this value. It cannot be the block hash for circularity
reasons: The value of the nonce is stored onto the context and
consequently influences the context hash of the very same
block. Such nonce cannot be determined by the shell and its
computation is delegated to the economic protocol. *)
val sync : Raw_context.t -> cache_nonce:Bytes.t -> Raw_context.t Lwt.t
* [ clear ] removes all cache entries .
val clear : Raw_context.t -> Raw_context.t
* { 3 Cache helpers for RPCs }
val future_cache_expectation :
Raw_context.t -> time_in_blocks:int -> Raw_context.t
* [ cache_size ~cache_index ] returns an overapproximation of
the size of the cache . Returns [ None ] if [ cache_index ] is
greater than the number of subcaches declared by the cache
layout .
the size of the cache. Returns [None] if [cache_index] is
greater than the number of subcaches declared by the cache
layout. *)
val cache_size : Raw_context.t -> cache_index:int -> size option
val cache_size_limit : Raw_context.t -> cache_index:int -> size option
val value_of_key :
Raw_context.t -> Context.Cache.key -> Context.Cache.value tzresult Lwt.t
end
type namespace = private string
val create_namespace : string -> namespace
type identifier = string
module type CLIENT = sig
type cached_value
* The client must declare the index of the subcache where its
values shall live . [ cache_index ] must be between [ 0 ] and
[ Constants_repr.cache_layout - 1 ] .
values shall live. [cache_index] must be between [0] and
[List.length Constants_repr.cache_layout - 1]. *)
val cache_index : index
val namespace : namespace
* [ value_of_identifier i d ] builds the cached value identified by
[ i d ] . This function is called when the subcache is loaded into
memory from the on - disk representation of its domain .
An error during the execution of this function is fatal as
witnessed by its type : an error embedded in a [ tzresult ] is not
supposed to be caught by the protocol .
[id]. This function is called when the subcache is loaded into
memory from the on-disk representation of its domain.
An error during the execution of this function is fatal as
witnessed by its type: an error embedded in a [tzresult] is not
supposed to be caught by the protocol. *)
val value_of_identifier :
Raw_context.t -> identifier -> cached_value tzresult Lwt.t
end
*
An [ INTERFACE ] to the subcache where keys live in a given [ namespace ] .
An [INTERFACE] to the subcache where keys live in a given [namespace].
*)
module type INTERFACE = sig
type cached_value
* [ update i ( Some ( e , size ) ) ] returns a context where the
value [ e ] of given [ size ] is associated to identifier [ i ] in
the subcache . If [ i ] is already in the subcache , the cache
entry is updated .
[ update i None ] removes [ i ] from the subcache .
value [e] of given [size] is associated to identifier [i] in
the subcache. If [i] is already in the subcache, the cache
entry is updated.
[update ctxt i None] removes [i] from the subcache. *)
val update :
Raw_context.t ->
identifier ->
(cached_value * size) option ->
Raw_context.t tzresult
* [ find i = Some v ] if [ v ] is the value associated to [ i ]
in the subcache . Returns [ None ] if there is no such value in
the subcache . This function is in the Lwt monad because if the
value may have not been constructed ( see the lazy loading
mode in { ! Environment_context } ) , it is constructed on the fly .
in the subcache. Returns [None] if there is no such value in
the subcache. This function is in the Lwt monad because if the
value may have not been constructed (see the lazy loading
mode in {!Environment_context}), it is constructed on the fly. *)
val find : Raw_context.t -> identifier -> cached_value option tzresult Lwt.t
* [ list_identifiers ] returns the list of the
identifiers of the cached values along with their respective
size . The returned list is sorted in terms of their age in the
cache , the oldest coming first .
identifiers of the cached values along with their respective
size. The returned list is sorted in terms of their age in the
cache, the oldest coming first. *)
val list_identifiers : Raw_context.t -> (string * int) list
* [ identifier_rank identifier ] returns the number of cached values
older than the one of [ identifier ] ; or , [ None ] if the [ identifier ] has
no associated value in the subcache .
older than the one of [identifier]; or, [None] if the [identifier] has
no associated value in the subcache. *)
val identifier_rank : Raw_context.t -> string -> int option
* [ size ctxt ] returns an overapproximation of the subcache size
( in bytes ) .
(in bytes). *)
val size : Raw_context.t -> int
* [ size_limit ctxt ] returns the maximal size of the subcache
( in bytes ) .
(in bytes). *)
val size_limit : Raw_context.t -> int
end
val register_exn :
(module CLIENT with type cached_value = 'a) ->
(module INTERFACE with type cached_value = 'a)
|
5063ebc632858238e53be462418eff13d806abaf0f520aee17aa57eaf9d50ef8 | ujamjar/hardcaml | eventsim2.mli |
* hardcaml - hardware design in OCaml
*
* ( c ) 2014 MicroJamJar Ltd
*
* Author(s ):
* Description :
*
* hardcaml - hardware design in OCaml
*
* (c) 2014 MicroJamJar Ltd
*
* Author(s):
* Description:
*
*)
open HardCaml
module B : Comb.S
type value = B.t
type time = int
module Signal : sig
type t =
{
(* current value *)
mutable value : value;
(* time of last transition *)
mutable time : time;
(* debug name *)
name : string;
}
val mk : string -> int -> t
end
module Event : sig
type t =
{
(* time of even *)
time : time;
(* value of event *)
value : value;
(* signal to change *)
signal : Signal.t;
}
val mk : time -> value -> Signal.t -> t
end
module Process : sig
type t =
{
sensitivity : Signal.t list;
run : unit -> unit;
}
val mk : Signal.t list -> (unit -> unit) -> t
end
| null | https://raw.githubusercontent.com/ujamjar/hardcaml/65f32f543348e81cbb7f6d0e77f1f1203bf4e335/staging/eventsim2.mli | ocaml | current value
time of last transition
debug name
time of even
value of event
signal to change |
* hardcaml - hardware design in OCaml
*
* ( c ) 2014 MicroJamJar Ltd
*
* Author(s ):
* Description :
*
* hardcaml - hardware design in OCaml
*
* (c) 2014 MicroJamJar Ltd
*
* Author(s):
* Description:
*
*)
open HardCaml
module B : Comb.S
type value = B.t
type time = int
module Signal : sig
type t =
{
mutable value : value;
mutable time : time;
name : string;
}
val mk : string -> int -> t
end
module Event : sig
type t =
{
time : time;
value : value;
signal : Signal.t;
}
val mk : time -> value -> Signal.t -> t
end
module Process : sig
type t =
{
sensitivity : Signal.t list;
run : unit -> unit;
}
val mk : Signal.t list -> (unit -> unit) -> t
end
|
4a53ed9c9c441cdc5f8b867a927a134663b814aa6215ecffd331f38163eab7b3 | mit-plv/riscv-semantics | RunFast.hs | {-# OPTIONS -Wall #-}
module Platform.RunFast where
import System.IO
import System.Environment
import System.Exit
import Data.Int
import Data.IORef
import Data.Array.IO
import Data.List
import Data.Word
import Data.Bits
import Utility.Utility
import Spec.Machine
import Platform.CleanTest
import Utility.Elf
import qualified Spec.CSRField as Field
import Spec.CSRFileIO
import Spec.CSRSpec
import Spec.CSR
import Spec.Decode
import Spec.Spec
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.State
import qualified Data.ByteString as B
import Numeric (readHex, showHex)
import Platform.Pty
import Platform.Plic
import Platform.Clint
import Control.Concurrent.MVar
import System.CPUTime
processLine :: String -> (Int, [(Int, Word8)]) -> (Int, [(Int, Word8)])
processLine ('@':xs) (_, l) = ((fst $ head $ readHex xs) * 4, l)
processLine s (p, l) = (p + 4, l ++ (zip [p..] $ splitWord (fst $ head $ readHex s :: Word32)))
readHexFile :: FilePath -> IO [(Int, Word8)]
readHexFile f = do
h <- openFile f ReadMode
helper h (0, [])
where helper h l = do
s <- hGetLine h
done <- hIsEOF h
if (null s)
then return $ snd l
else if done
then return $ snd $ processLine s l
else helper h (processLine s l)
runProgram :: Maybe Int64 -> VerifMinimal64 -> IO (Int64, VerifMinimal64)
runProgram maybeToHostAddress c = do
handle <- openFile "/tmp/hs2tandem" ReadWriteMode
runStateT (stepHelper RV64IMAF maybeToHostAddress
-- Check external interrupt
(lift $(do
refs <- get
(do -- Restore state to nothing
exception <- lift $ writeIORef (exception refs) False
interrupt <- lift $ writeIORef (interrupt refs) False
valid_dst <- lift $ writeIORef (valid_dst refs) False
valid_addr <- lift $ writeIORef (valid_addr refs) False
valid_timer <- lift $ writeIORef (valid_timer refs) False
instruction <- lift $ writeIORef (instruction refs) 0
cause <- lift $ writeIORef (cause refs) 0
d <- lift $ writeIORef (d refs) 0
dst <- lift $ writeIORef (dst refs) 0
addrPacket <- lift $ writeIORef (addrPacket refs) 0
pcPacket <- lift $ writeIORef (pcPacket refs) 0
timer <- lift $ writeIORef (timer refs) 0
lift $ writeIORef (mipPacket refs) 0
return ())
let (_, rtc) = clint refs
time <- getCSRField Field.MCycle
mie < - getCSRField Field . MIE
--pc <- getPC
lift . putStrLn $ showHex ( fromIntegral pc : : Word64 ) " "
_ <- lift $ takeMVar rtc
Time goes 10000 times slower for the RV
plicAlive <- lift $ takeMVar (toDo (plic refs))
lift $ putMVar (toDo (plic refs)) DoNothing
return plicAlive
))
Get timer and from the CLINT need more work refactorization
(lift $(do
refs <- get
pc <- getPC
lift $ writeIORef (pcPacket refs) pc
let (mtimecmp, rtc) = clint c
vmtimecmp <- lift $ readIORef mtimecmp
vrtc <- lift $ readMVar rtc
return $! (vmtimecmp, vrtc)))
(\inst -> do
mepc <- getCSR MEPC
when ( inst = = 0x30200073 ) . lift . lift . " mepc on is " + + show mepc
refs <- lift $ get
lift . lift $ writeIORef (instruction refs) inst
pc <- getPC
lift . lift $ writeIORef (pcPacket refs) pc
return (inst /= 0x6f)) -- Predecode
(do
refs <- get
mtval <- getCSR MTVec
stval <- getCSR STVec
mipP <- getCSR MIP
lift . writeIORef (mipPacket refs) . fromIntegral $ (fromIntegral mipP :: Word64)
(do
npc <- lift $ readIORef (nextPC refs)
lift . writeIORef ( pcPacket refs ) $ npc
& & ! 3
& & ! 3
if trappedM
then do
isInterruptM <- getCSRField Field.MCauseInterrupt
let isInterrupt = isInterruptM == 1
codeM <- getCSRField Field.MCauseCode
lift $ writeIORef (exception refs) (not isInterrupt)
lift $ writeIORef (interrupt refs) (isInterrupt)
lift $ writeIORef (cause refs) (fromIntegral codeM)
else
if trappedS
then do
isInterruptS <- getCSRField Field.SCauseInterrupt
let isInterrupt = isInterruptS == 1
codeS <- getCSRField Field.SCauseCode
lift $ writeIORef (exception refs) (not isInterrupt)
lift $ writeIORef (interrupt refs) (isInterrupt)
lift $ writeIORef (cause refs) (fromIntegral codeS)
else do
lift $ writeIORef (exception refs) False
lift $ writeIORef (interrupt refs) False
lift $ writeIORef (cause refs) 0)
-- pcPacket <- lift $ readIORef (pcPacket refs)
-- instruction <- lift $ readIORef (instruction refs)
-- exception <- lift $ readIORef (exception refs)
-- interrupt <- lift $ readIORef (interrupt refs)
-- cause <- lift $ readIORef (cause refs)
addr < - lift $ readIORef ( )
valid_addr < - lift $ readIORef ( valid_addr refs )
-- d <- lift $ readIORef (d refs)
-- valid_dst <- lift $ readIORef (valid_dst refs)
-- dst <- lift $ readIORef (dst refs)
-- valid_timer <- lift $ readIORef (valid_timer refs)
-- timer <- lift $ readIORef (timer refs)
< - lift $ readIORef ( refs )
-- lift $ hPutStrLn handle "s"
-- lift . hPutStrLn handle . show $ pcPacket
-- lift . hPutStrLn handle . show $ instruction
-- lift . hPutStrLn handle . show . fromEnum $ exception
-- lift . hPutStrLn handle . show . fromEnum $ interrupt
-- lift . hPutStrLn handle . show $ cause
-- lift . hPutStrLn handle . show $ addr
-- lift . hPutStrLn handle . show . fromEnum $ valid_addr
-- -- data is what we load or what we want to store
-- lift . hPutStrLn handle . show $ d
-- lift . hPutStrLn handle . show . fromEnum $ valid_dst
-- -- dst is the idx of destination register
lift . hPutStrLn handle . show $ dst
-- lift . hPutStrLn handle . show . fromEnum $ valid_timer
-- lift . hPutStrLn handle . show $ timer
-- lift . hPutStrLn handle . show $ mipPacket
-- lift $ hPutStrLn handle "e"
Precommit
)
c
where
readProgram :: String -> IO (Maybe Int64, [(Int, Word8)])
readProgram f = do
if ".hex" `isSuffixOf` f
then do
mem <- readHexFile f
return (Nothing, mem)
else do
mem <- readElf f
maybeToHostAddress <- readElfSymbol "tohost" f
return (fmap (fromIntegral:: Word64 -> Int64) maybeToHostAddress, mem)
runFile :: String -> IO Int64
runFile f = do
deviceTree <- B.readFile "device_tree.bin"
(maybeToHostAddress, program) <- readProgram f
-- Create the references and the arrays that are going to be passed around
registers <- newArray (0,31) 0
pc <- newIORef 0x80000000
fpregisters <- newArray (0,31) 0
npc <- newIORef 0
privMode <- newIORef Machine
Create a big 2 GB chunk of memory
reservation <- newIORef Nothing
csrs <- newArray (Field.MXL,Field.FRM) 0 --GUESS TO GET ALL THE CSRFIELDS
plic <- initPlic
putStrLn "init PTY"
console <- initPty plic
clint <- initClint
writeArray csrs Field.MXL 2
writeArray csrs Field.Extensions $! encodeExtensions "IAMSU"
putStrLn "All the state is created"
Create Refs for verification packet
exception <- newIORef False
interrupt <- newIORef False
valid_dst <- newIORef False
valid_addr <- newIORef False
valid_timer <- newIORef False
instruction <- newIORef 0
cause <- newIORef 0
d <- newIORef 0
dst <- newIORef 0
addrPacket <- newIORef 0
pcPacket <- newIORef 0
timer <- newIORef 0
mipPacket <- newIORef 0
-- Create device tree and program
let addressCommaByteS = (zip [0..] (B.unpack deviceTree)) ++ program
forM_ addressCommaByteS $ (\(addr,byte)-> writeArray mem (fromIntegral addr) (fromIntegral byte))
putStrLn "The program is copied"
let c = VerifMinimal64 { registers = registers,
fpregisters = fpregisters,
csrs = csrs,
pc = pc,
nextPC = npc,
privMode = privMode,
mem = mem,
plic = plic,
clint = clint,
console = console,
reservation = reservation,
-- Verification packet:
exception = exception,
interrupt = interrupt,
valid_dst = valid_dst,
valid_addr = valid_addr,
valid_timer = valid_timer,
timer = timer,
instruction = instruction,
cause = cause,
d = d,
dst = dst,
addrPacket = addrPacket,
pcPacket = pcPacket,
mipPacket = mipPacket
} in
fmap fst $ runProgram maybeToHostAddress c
runFiles :: [String] -> IO Int64
runFiles (file:files) = do
myreturn <- runFile file
putStr (file ++ ": " ++ (show myreturn) ++ "\n")
othersreturn <- runFiles files
if myreturn /= 0
then return myreturn
else return othersreturn
runFiles [] = return 0
main :: IO ()
main = do
args <- getArgs
retval <- case args of
[] -> do
putStr "ERROR: this program expects one or more elf files as command-line arguments\n"
return 1
[file] -> runFile file
files -> runFiles files
exitWith (if retval == 0 then ExitSuccess else ExitFailure $ (fromIntegral:: Int64 -> Int) retval)
| null | https://raw.githubusercontent.com/mit-plv/riscv-semantics/1c0da3cac9d3f8dd813d26c0d2fbaccbb2210313/src/Platform/RunFast.hs | haskell | # OPTIONS -Wall #
Check external interrupt
Restore state to nothing
pc <- getPC
Predecode
pcPacket <- lift $ readIORef (pcPacket refs)
instruction <- lift $ readIORef (instruction refs)
exception <- lift $ readIORef (exception refs)
interrupt <- lift $ readIORef (interrupt refs)
cause <- lift $ readIORef (cause refs)
d <- lift $ readIORef (d refs)
valid_dst <- lift $ readIORef (valid_dst refs)
dst <- lift $ readIORef (dst refs)
valid_timer <- lift $ readIORef (valid_timer refs)
timer <- lift $ readIORef (timer refs)
lift $ hPutStrLn handle "s"
lift . hPutStrLn handle . show $ pcPacket
lift . hPutStrLn handle . show $ instruction
lift . hPutStrLn handle . show . fromEnum $ exception
lift . hPutStrLn handle . show . fromEnum $ interrupt
lift . hPutStrLn handle . show $ cause
lift . hPutStrLn handle . show $ addr
lift . hPutStrLn handle . show . fromEnum $ valid_addr
-- data is what we load or what we want to store
lift . hPutStrLn handle . show $ d
lift . hPutStrLn handle . show . fromEnum $ valid_dst
-- dst is the idx of destination register
lift . hPutStrLn handle . show . fromEnum $ valid_timer
lift . hPutStrLn handle . show $ timer
lift . hPutStrLn handle . show $ mipPacket
lift $ hPutStrLn handle "e"
Create the references and the arrays that are going to be passed around
GUESS TO GET ALL THE CSRFIELDS
Create device tree and program
Verification packet: | module Platform.RunFast where
import System.IO
import System.Environment
import System.Exit
import Data.Int
import Data.IORef
import Data.Array.IO
import Data.List
import Data.Word
import Data.Bits
import Utility.Utility
import Spec.Machine
import Platform.CleanTest
import Utility.Elf
import qualified Spec.CSRField as Field
import Spec.CSRFileIO
import Spec.CSRSpec
import Spec.CSR
import Spec.Decode
import Spec.Spec
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.State
import qualified Data.ByteString as B
import Numeric (readHex, showHex)
import Platform.Pty
import Platform.Plic
import Platform.Clint
import Control.Concurrent.MVar
import System.CPUTime
processLine :: String -> (Int, [(Int, Word8)]) -> (Int, [(Int, Word8)])
processLine ('@':xs) (_, l) = ((fst $ head $ readHex xs) * 4, l)
processLine s (p, l) = (p + 4, l ++ (zip [p..] $ splitWord (fst $ head $ readHex s :: Word32)))
readHexFile :: FilePath -> IO [(Int, Word8)]
readHexFile f = do
h <- openFile f ReadMode
helper h (0, [])
where helper h l = do
s <- hGetLine h
done <- hIsEOF h
if (null s)
then return $ snd l
else if done
then return $ snd $ processLine s l
else helper h (processLine s l)
runProgram :: Maybe Int64 -> VerifMinimal64 -> IO (Int64, VerifMinimal64)
runProgram maybeToHostAddress c = do
handle <- openFile "/tmp/hs2tandem" ReadWriteMode
runStateT (stepHelper RV64IMAF maybeToHostAddress
(lift $(do
refs <- get
exception <- lift $ writeIORef (exception refs) False
interrupt <- lift $ writeIORef (interrupt refs) False
valid_dst <- lift $ writeIORef (valid_dst refs) False
valid_addr <- lift $ writeIORef (valid_addr refs) False
valid_timer <- lift $ writeIORef (valid_timer refs) False
instruction <- lift $ writeIORef (instruction refs) 0
cause <- lift $ writeIORef (cause refs) 0
d <- lift $ writeIORef (d refs) 0
dst <- lift $ writeIORef (dst refs) 0
addrPacket <- lift $ writeIORef (addrPacket refs) 0
pcPacket <- lift $ writeIORef (pcPacket refs) 0
timer <- lift $ writeIORef (timer refs) 0
lift $ writeIORef (mipPacket refs) 0
return ())
let (_, rtc) = clint refs
time <- getCSRField Field.MCycle
mie < - getCSRField Field . MIE
lift . putStrLn $ showHex ( fromIntegral pc : : Word64 ) " "
_ <- lift $ takeMVar rtc
Time goes 10000 times slower for the RV
plicAlive <- lift $ takeMVar (toDo (plic refs))
lift $ putMVar (toDo (plic refs)) DoNothing
return plicAlive
))
Get timer and from the CLINT need more work refactorization
(lift $(do
refs <- get
pc <- getPC
lift $ writeIORef (pcPacket refs) pc
let (mtimecmp, rtc) = clint c
vmtimecmp <- lift $ readIORef mtimecmp
vrtc <- lift $ readMVar rtc
return $! (vmtimecmp, vrtc)))
(\inst -> do
mepc <- getCSR MEPC
when ( inst = = 0x30200073 ) . lift . lift . " mepc on is " + + show mepc
refs <- lift $ get
lift . lift $ writeIORef (instruction refs) inst
pc <- getPC
lift . lift $ writeIORef (pcPacket refs) pc
(do
refs <- get
mtval <- getCSR MTVec
stval <- getCSR STVec
mipP <- getCSR MIP
lift . writeIORef (mipPacket refs) . fromIntegral $ (fromIntegral mipP :: Word64)
(do
npc <- lift $ readIORef (nextPC refs)
lift . writeIORef ( pcPacket refs ) $ npc
& & ! 3
& & ! 3
if trappedM
then do
isInterruptM <- getCSRField Field.MCauseInterrupt
let isInterrupt = isInterruptM == 1
codeM <- getCSRField Field.MCauseCode
lift $ writeIORef (exception refs) (not isInterrupt)
lift $ writeIORef (interrupt refs) (isInterrupt)
lift $ writeIORef (cause refs) (fromIntegral codeM)
else
if trappedS
then do
isInterruptS <- getCSRField Field.SCauseInterrupt
let isInterrupt = isInterruptS == 1
codeS <- getCSRField Field.SCauseCode
lift $ writeIORef (exception refs) (not isInterrupt)
lift $ writeIORef (interrupt refs) (isInterrupt)
lift $ writeIORef (cause refs) (fromIntegral codeS)
else do
lift $ writeIORef (exception refs) False
lift $ writeIORef (interrupt refs) False
lift $ writeIORef (cause refs) 0)
addr < - lift $ readIORef ( )
valid_addr < - lift $ readIORef ( valid_addr refs )
< - lift $ readIORef ( refs )
lift . hPutStrLn handle . show $ dst
Precommit
)
c
where
readProgram :: String -> IO (Maybe Int64, [(Int, Word8)])
readProgram f = do
if ".hex" `isSuffixOf` f
then do
mem <- readHexFile f
return (Nothing, mem)
else do
mem <- readElf f
maybeToHostAddress <- readElfSymbol "tohost" f
return (fmap (fromIntegral:: Word64 -> Int64) maybeToHostAddress, mem)
runFile :: String -> IO Int64
runFile f = do
deviceTree <- B.readFile "device_tree.bin"
(maybeToHostAddress, program) <- readProgram f
registers <- newArray (0,31) 0
pc <- newIORef 0x80000000
fpregisters <- newArray (0,31) 0
npc <- newIORef 0
privMode <- newIORef Machine
Create a big 2 GB chunk of memory
reservation <- newIORef Nothing
plic <- initPlic
putStrLn "init PTY"
console <- initPty plic
clint <- initClint
writeArray csrs Field.MXL 2
writeArray csrs Field.Extensions $! encodeExtensions "IAMSU"
putStrLn "All the state is created"
Create Refs for verification packet
exception <- newIORef False
interrupt <- newIORef False
valid_dst <- newIORef False
valid_addr <- newIORef False
valid_timer <- newIORef False
instruction <- newIORef 0
cause <- newIORef 0
d <- newIORef 0
dst <- newIORef 0
addrPacket <- newIORef 0
pcPacket <- newIORef 0
timer <- newIORef 0
mipPacket <- newIORef 0
let addressCommaByteS = (zip [0..] (B.unpack deviceTree)) ++ program
forM_ addressCommaByteS $ (\(addr,byte)-> writeArray mem (fromIntegral addr) (fromIntegral byte))
putStrLn "The program is copied"
let c = VerifMinimal64 { registers = registers,
fpregisters = fpregisters,
csrs = csrs,
pc = pc,
nextPC = npc,
privMode = privMode,
mem = mem,
plic = plic,
clint = clint,
console = console,
reservation = reservation,
exception = exception,
interrupt = interrupt,
valid_dst = valid_dst,
valid_addr = valid_addr,
valid_timer = valid_timer,
timer = timer,
instruction = instruction,
cause = cause,
d = d,
dst = dst,
addrPacket = addrPacket,
pcPacket = pcPacket,
mipPacket = mipPacket
} in
fmap fst $ runProgram maybeToHostAddress c
runFiles :: [String] -> IO Int64
runFiles (file:files) = do
myreturn <- runFile file
putStr (file ++ ": " ++ (show myreturn) ++ "\n")
othersreturn <- runFiles files
if myreturn /= 0
then return myreturn
else return othersreturn
runFiles [] = return 0
main :: IO ()
main = do
args <- getArgs
retval <- case args of
[] -> do
putStr "ERROR: this program expects one or more elf files as command-line arguments\n"
return 1
[file] -> runFile file
files -> runFiles files
exitWith (if retval == 0 then ExitSuccess else ExitFailure $ (fromIntegral:: Int64 -> Int) retval)
|
8ea6d02bbb4cee47b3a0004363a04b0c436cb955e715e7384196cf627040fd67 | kushidesign/kushi | core.cljs | (ns kushi.ui.input.checkbox.core
(:require-macros
[kushi.core :refer (sx)])
(:require
[kushi.core :refer (merge-attrs)]
[kushi.ui.core :refer (opts+children)]))
TODO outlines for ally
(defn checkbox
{:desc ["Checkboxes are used to provide multiple options for selection. One or more checkboxes can be checked at a time."]
:opts '[{:name input-attrs
:type :map
:default nil
:desc "html attributes map applied to the underlying `input` div."}]}
[& args]
(let [[opts attrs & children] (opts+children args)
{:keys [label-attrs]} opts]
[:label
(merge-attrs
(sx
'kushi-checkbox
:.transition
:.fast!
:.pointer
:.grid
:gtc--1em:auto
:gap--0.4em
:line-height--1.1
:+.form-control:mbs--1em)
label-attrs)
[:input
(merge-attrs
(sx
'kushi-checkbox-input
:.transition
:.fast!
:.pointer
:-webkit-appearance--none
:appearance--none
:m--0
:font--inherit
:color--currentColor
:width--1em
:height--1em
:border--0.15em:solid:currentColor
:border-radius--0em
:display--inline-grid
:place-content--center
:bgc--transparent
:checked:bgc--currentColor
:checked:o--1
:o--0.6
{:style {:before:content "\"\""
:before:width :0.65em
:before:height :0.65em
:before:transform "scale(0) rotate(15deg)"
:checked:before:transform "scale(1) rotate(15deg)"
:before:transition :120ms:transform:ease-in-out
:before:box-shadow "inset 1em 1em white"
:before:transform-origin :center:center
:before:clip-path "polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%)"}
:type :checkbox})
attrs)]
(into [:span] children)]))
| null | https://raw.githubusercontent.com/kushidesign/kushi/3759d5762e07588721276cd026c7c98d3ae5bdac/src/kushi/ui/input/checkbox/core.cljs | clojure | (ns kushi.ui.input.checkbox.core
(:require-macros
[kushi.core :refer (sx)])
(:require
[kushi.core :refer (merge-attrs)]
[kushi.ui.core :refer (opts+children)]))
TODO outlines for ally
(defn checkbox
{:desc ["Checkboxes are used to provide multiple options for selection. One or more checkboxes can be checked at a time."]
:opts '[{:name input-attrs
:type :map
:default nil
:desc "html attributes map applied to the underlying `input` div."}]}
[& args]
(let [[opts attrs & children] (opts+children args)
{:keys [label-attrs]} opts]
[:label
(merge-attrs
(sx
'kushi-checkbox
:.transition
:.fast!
:.pointer
:.grid
:gtc--1em:auto
:gap--0.4em
:line-height--1.1
:+.form-control:mbs--1em)
label-attrs)
[:input
(merge-attrs
(sx
'kushi-checkbox-input
:.transition
:.fast!
:.pointer
:-webkit-appearance--none
:appearance--none
:m--0
:font--inherit
:color--currentColor
:width--1em
:height--1em
:border--0.15em:solid:currentColor
:border-radius--0em
:display--inline-grid
:place-content--center
:bgc--transparent
:checked:bgc--currentColor
:checked:o--1
:o--0.6
{:style {:before:content "\"\""
:before:width :0.65em
:before:height :0.65em
:before:transform "scale(0) rotate(15deg)"
:checked:before:transform "scale(1) rotate(15deg)"
:before:transition :120ms:transform:ease-in-out
:before:box-shadow "inset 1em 1em white"
:before:transform-origin :center:center
:before:clip-path "polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%)"}
:type :checkbox})
attrs)]
(into [:span] children)]))
| |
0f61062b52b78e61af9696536c593447024eb9f1fb7e3ce104cafb79fb566d14 | 6502/JSLisp | bigfib.lisp | (defconstant +BASE+ 10000000)
(defun L+ (a b)
"Sum of two large integers [a] and [b]"
(do ((result (list))
(carry 0)
(na (length a))
(nb (length b))
(i 0 (1+ i)))
((and (>= i na) (>= i nb) (= carry 0))
result)
(let ((x (+ (or (aref a i) 0)
(or (aref b i) 0)
carry)))
(if (>= x +BASE+)
(progn
(setf carry 1)
(setf (aref result i) (- x +BASE+)))
(progn
(setf carry 0)
(setf (aref result i) x))))))
(defun Lk* (a k)
"Product of a large integer [a] for a small integer [k]"
(let ((result (list))
(carry 0))
(dolist (v a)
(let ((x (+ (* v k) carry)))
(push (% x +BASE+) result)
(setf carry (floor (/ x +BASE+)))))
(when carry
(push carry result))
result))
(defun L* (a b)
"Product of two large integers [a] and [b]"
(let ((result (list)))
(dolist (k b)
(setf result (L+ result (Lk* a k)))
(setf a (append '(0) a)))
result))
(defun Lmat2* (m1 m2)
"Product of two 2x2 matrices"
;; a b e f
;; c d g h
(let ((a (first m1))
(b (second m1))
(c (third m1))
(d (fourth m1))
(e (first m2))
(f (second m2))
(g (third m2))
(h (fourth m2)))
(list (L+ (L* a e) (L* b g))
(L+ (L* a f) (L* b h))
(L+ (L* c e) (L* d g))
(L+ (L* c f) (L* d h)))))
(defun Lmat2exp (m exp)
"Raises 2x2 matrix [m] to the [exp] power"
(cond
((= exp 0) (list 1 0 0 1))
((= exp 1) m)
((% exp 2) (Lmat2* m (Lmat2exp m (1- exp))))
(true (let ((h (Lmat2exp m (ash exp -1))))
(Lmat2* h h)))))
(defun Lstr (n)
"Converts a large integer to a string"
(let ((x (apply #'+ (map (lambda (x)
(let ((s ~"00000000{x}"))
(slice s (- (length s) 7))))
(reverse n)))))
(replace x "^0*(.)" "$1")))
(defun Lfibo (n)
"Computes the [n]-th fibonacci number (result as a string)"
(Lstr (first (Lmat2exp '((1)(1)
(1)(0))
n))))
| null | https://raw.githubusercontent.com/6502/JSLisp/9a4aa1a9116f0cfc598ec9f3f30b59d99810a728/examples/bigfib.lisp | lisp | a b e f
c d g h | (defconstant +BASE+ 10000000)
(defun L+ (a b)
"Sum of two large integers [a] and [b]"
(do ((result (list))
(carry 0)
(na (length a))
(nb (length b))
(i 0 (1+ i)))
((and (>= i na) (>= i nb) (= carry 0))
result)
(let ((x (+ (or (aref a i) 0)
(or (aref b i) 0)
carry)))
(if (>= x +BASE+)
(progn
(setf carry 1)
(setf (aref result i) (- x +BASE+)))
(progn
(setf carry 0)
(setf (aref result i) x))))))
(defun Lk* (a k)
"Product of a large integer [a] for a small integer [k]"
(let ((result (list))
(carry 0))
(dolist (v a)
(let ((x (+ (* v k) carry)))
(push (% x +BASE+) result)
(setf carry (floor (/ x +BASE+)))))
(when carry
(push carry result))
result))
(defun L* (a b)
"Product of two large integers [a] and [b]"
(let ((result (list)))
(dolist (k b)
(setf result (L+ result (Lk* a k)))
(setf a (append '(0) a)))
result))
(defun Lmat2* (m1 m2)
"Product of two 2x2 matrices"
(let ((a (first m1))
(b (second m1))
(c (third m1))
(d (fourth m1))
(e (first m2))
(f (second m2))
(g (third m2))
(h (fourth m2)))
(list (L+ (L* a e) (L* b g))
(L+ (L* a f) (L* b h))
(L+ (L* c e) (L* d g))
(L+ (L* c f) (L* d h)))))
(defun Lmat2exp (m exp)
"Raises 2x2 matrix [m] to the [exp] power"
(cond
((= exp 0) (list 1 0 0 1))
((= exp 1) m)
((% exp 2) (Lmat2* m (Lmat2exp m (1- exp))))
(true (let ((h (Lmat2exp m (ash exp -1))))
(Lmat2* h h)))))
(defun Lstr (n)
"Converts a large integer to a string"
(let ((x (apply #'+ (map (lambda (x)
(let ((s ~"00000000{x}"))
(slice s (- (length s) 7))))
(reverse n)))))
(replace x "^0*(.)" "$1")))
(defun Lfibo (n)
"Computes the [n]-th fibonacci number (result as a string)"
(Lstr (first (Lmat2exp '((1)(1)
(1)(0))
n))))
|
b43107cbea08951a1cea0715e13016ae95c4db77861396e11ced4958d1f71920 | lambdaisland/webstuff | http.clj | (ns lambdaisland.webstuff.http
"Provide all the building blocks for setting up Jetty/Sieppari/Reitit
In particular this provides a setup where routes can easily use content
negotiation to return either data (as EDN/JSON/Transit/etc), or to run that
same data through a view function (data->hiccup) to render HTML.
In this case the `:body` in the response is always a data structure, and if
the body is able to return HTML then the response should also include a
`:view` function for rendering."
(:require [clojure.pprint :as pprint]
[lambdaisland.glogc :as log]
[lambdaisland.hiccup :as hiccup]
[muuntaja.core :as m]
[muuntaja.format.core :as muuntaja-format]
[muuntaja.interceptor :as muuntaja-interceptor]
[reitit.http :as http]
[reitit.http.coercion :as coercion]
[reitit.http.interceptors.exception :as exception]
[reitit.http.interceptors.multipart :as multipart]
[reitit.http.interceptors.muuntaja :as muuntaja]
[reitit.http.interceptors.parameters :as parameters]
[reitit.interceptor.sieppari :as sieppari]
[reitit.middleware :as middleware]
[reitit.ring :as ring]
[ring.adapter.jetty :as jetty])
(:import (java.io OutputStream)
(org.eclipse.jetty.server Server)))
(defn- terminate
"Terminate the interceptor chain
An odly missing piece in Sieppari's API, other interceptor implementations
have this out of the box."
[ctx response]
(-> ctx
(dissoc :queue)
(assoc :response response)))
(defn html-encoder
"Muuntaja encoder that renders HTML
Expects a Clojure collection with a `:view-fn` in the metadata, which takes
the `:body` collection as argument, and returns the body as a string."
[opts]
(reify muuntaja-format/EncodeToBytes
(encode-to-bytes [_ data charset]
(let [view (get (meta data) :view-fn)
rendered (cond
(ifn? view)
(view data)
(string? data)
data
:else
(pr-str data))]
(.getBytes ^String rendered ^String charset)))
muuntaja-format/EncodeToOutputStream
(encode-to-output-stream [_ data charset]
(fn [^OutputStream output-stream]
(let [view (get (meta data) :view-fn)
rendered (view data)]
(.write output-stream (.getBytes ^String rendered ^String charset)))))))
(defn muuntaja-instance
"Create a Muuntaja instance that includes HTML handling
Can take options just like [[muuntaja.core/create]],
see [[muuntaja.core/default-options]]."
([]
(muuntaja-instance m/default-options))
([opts]
(m/create
(-> opts
(assoc :default-format "text/html")
(assoc-in [:formats "text/html"]
(muuntaja-format/map->Format
{:name :html
:encoder [html-encoder]}))))))
(defn view-fn-interceptor
"Interceptor for handling HTML responses with a data `:body` and `:view` fn
If the user agent requested a HTML response, and the response body returned
from the route is a Clojure collection, and the response contains a `:view`
function, then attach that view to the `:body` metadata, so the Muuntaja
encoder has everything it needs to do the rendering, see [[html-encoder]].
By default will compose the view function with `lambdaisland.hiccup/render`,
but you can pass an alternative `:render-fn` if you want to handle the output
of the view functions in a different way. It should return a HTML response
body as a string."
([]
(view-fn-interceptor nil))
([{:keys [render-fn]
:or {render-fn hiccup/render}}]
{:name ::view-fn
:leave
(fn [ctx]
(cond
(not= "text/html" (some-> ctx :request :muuntaja/response .-format))
ctx
(and (nil? (get-in ctx [:response :body]))
(nil? (get-in ctx [:response :view])))
ctx
(string? (get-in ctx [:response :body]))
ctx
(and (coll? (get-in ctx [:response :body]))
(get-in ctx [:response :view]))
(update-in ctx [:response :body]
vary-meta
assoc
:view-fn (comp hiccup/render (get-in ctx [:response :view])))
(and (nil? (get-in ctx [:response :body]))
(get-in ctx [:response :view]))
(assoc-in ctx [:response :body]
^{:view-fn (comp hiccup/render (get-in ctx [:response :view]))}
{})
(get-in ctx [:response :html])
(assoc-in ctx [:response :body]
^{:view-fn (fn [_] (hiccup/render-html (get-in ctx [:response :html])))}
{})
:else
(assoc ctx
:response
{:status 415
:headers {"Content-Type" "text/html"}
:body "<h1>Unsupported Media Type. No HTML view defined for route.</h1>"})))}))
(defn inject-components-interceptor
"Generic interceptor for inject system components into the request map
Takes a map which will be merged into each request, so its contents are
available to route implementations."
[components]
{:name ::inject-components
:enter
(fn [ctx]
(update ctx :request merge components))})
(defn log-request-interceptor
"Interceptor to log requests
Logs request start at the `trace` level, and request end at the `info` level.
Includes the total time it took to handle the request."
[]
{:name ::log-request
:enter
(fn [ctx]
(log/trace :request/starting (select-keys (:request ctx) [:request-method :uri]))
(assoc ctx ::start-time (System/nanoTime)))
:leave
(fn [ctx]
(let [{:keys [request response]} ctx
time (format "%.2fms" (/ (- (System/nanoTime) (::start-time ctx)) 1e6))]
(log/info :request/done {:method (:request-method request)
:uri (:uri request)
:status (:status response)
:content-type (get (:headers response) "Content-Type")
:time time}))
ctx)})
(defn exception-handler
"Render 500 errors, but log them as well."
[^Throwable error request]
(log/error :handler/error {:method (:request-method request)
:uri (:uri request)}
:exception error)
{:status 500
:body
^{:view-fn
(fn [{:keys [type class message]}]
(hiccup/render
[:div
[:h1 "500 Server Error"]
[:h2 class]
[:p message]]))}
{:type "exception"
:class (.getName (.getClass error))
:message (.getMessage error)}})
(defn ring-default-handler
"The default fallback handler
- Strip trailing slashes (will cause a redirect)
- Handler 404/405/406 responses, see [[reitit.ring/create-default-handler]]
for options
"
([]
(ring-default-handler nil))
([opts]
(ring/routes
(ring/redirect-trailing-slash-handler {:method :strip})
(ring/create-default-handler opts))))
(defn default-interceptors
"Default interceptor chain
Includes content negotiation, HTML view handling, exception handling, and
request logging.
"
[]
[(log-request-interceptor)
(muuntaja-interceptor/format-interceptor)
(parameters/parameters-interceptor)
(muuntaja/format-negotiate-interceptor)
(muuntaja/format-response-interceptor)
(exception/exception-interceptor (assoc exception/default-handlers ::exception/default exception-handler))
(muuntaja/format-request-interceptor)
(coercion/coerce-response-interceptor)
(coercion/coerce-request-interceptor)
(multipart/multipart-interceptor)
(view-fn-interceptor)])
(defn ring-handler
"Build up a ring handler based on Reitit and Sieppari
Takes a collection of reitit `:routes`, and optionally a `:muuntaja-instance`,
a sequence of `:interceptors`, and a `:default-handler` which is used when no
route matches. See [[muuntaja-instance]], [[default-interceptors]]
and [[ring-default-handler]] for the default values.
Can also optionally take a sequence of `:middleware`, which is handled through
`reitit.middleware`, so it accepts anything that implements `IntoMiddleware`."
[{:keys [muuntaja-instance interceptors default-handler routes middleware]
:or {muuntaja-instance (muuntaja-instance)
interceptors (default-interceptors)
default-handler (ring-default-handler)}}]
(let [wrap (if (seq middleware)
(partial middleware/chain middleware)
identity)
handler (http/ring-handler
(http/router
routes
{:data {:muuntaja muuntaja-instance
:interceptors interceptors}})
default-handler
{:executor sieppari/executor})]
(with-meta (wrap handler) (meta handler))))
(defn start-jetty!
"Start a Jetty instance and start listening for requests
The `:port` must be specified. Takes a `:build-handler` function which should
return a valid Ring handler function. When `:rebuild-on-request?` is `true`
this build-handler is called on every request. This is useful in development
for ensuring that changes made on the REPL are picked up, but is not
recommended for production use, since rebuilding the handler (and with it, the
router) is expensive.
`:wrap-handler` can be used to wrap the handler in static middleware, this
does not get updated on each request.
Returns the Jetty instance"
[{:keys [port rebuild-on-request? build-handler join? wrap-handler]
:or {join? false
wrap-handler identity}}]
(log/info :server/starting {:port port :rebuild-on-request? rebuild-on-request?})
(jetty/run-jetty (wrap-handler
(if rebuild-on-request?
#((build-handler) %)
(build-handler)))
{:port port
:join? false}))
(defn stop-jetty!
"Stop a Jetty instance"
[^Server jetty]
(log/info :server/stopping {:uri (str (.getURI jetty))})
(.stop jetty))
| null | https://raw.githubusercontent.com/lambdaisland/webstuff/5d5669e7f7829c65fdd00b19ef826565ce41b7ea/src/lambdaisland/webstuff/http.clj | clojure | (ns lambdaisland.webstuff.http
"Provide all the building blocks for setting up Jetty/Sieppari/Reitit
In particular this provides a setup where routes can easily use content
negotiation to return either data (as EDN/JSON/Transit/etc), or to run that
same data through a view function (data->hiccup) to render HTML.
In this case the `:body` in the response is always a data structure, and if
the body is able to return HTML then the response should also include a
`:view` function for rendering."
(:require [clojure.pprint :as pprint]
[lambdaisland.glogc :as log]
[lambdaisland.hiccup :as hiccup]
[muuntaja.core :as m]
[muuntaja.format.core :as muuntaja-format]
[muuntaja.interceptor :as muuntaja-interceptor]
[reitit.http :as http]
[reitit.http.coercion :as coercion]
[reitit.http.interceptors.exception :as exception]
[reitit.http.interceptors.multipart :as multipart]
[reitit.http.interceptors.muuntaja :as muuntaja]
[reitit.http.interceptors.parameters :as parameters]
[reitit.interceptor.sieppari :as sieppari]
[reitit.middleware :as middleware]
[reitit.ring :as ring]
[ring.adapter.jetty :as jetty])
(:import (java.io OutputStream)
(org.eclipse.jetty.server Server)))
(defn- terminate
"Terminate the interceptor chain
An odly missing piece in Sieppari's API, other interceptor implementations
have this out of the box."
[ctx response]
(-> ctx
(dissoc :queue)
(assoc :response response)))
(defn html-encoder
"Muuntaja encoder that renders HTML
Expects a Clojure collection with a `:view-fn` in the metadata, which takes
the `:body` collection as argument, and returns the body as a string."
[opts]
(reify muuntaja-format/EncodeToBytes
(encode-to-bytes [_ data charset]
(let [view (get (meta data) :view-fn)
rendered (cond
(ifn? view)
(view data)
(string? data)
data
:else
(pr-str data))]
(.getBytes ^String rendered ^String charset)))
muuntaja-format/EncodeToOutputStream
(encode-to-output-stream [_ data charset]
(fn [^OutputStream output-stream]
(let [view (get (meta data) :view-fn)
rendered (view data)]
(.write output-stream (.getBytes ^String rendered ^String charset)))))))
(defn muuntaja-instance
"Create a Muuntaja instance that includes HTML handling
Can take options just like [[muuntaja.core/create]],
see [[muuntaja.core/default-options]]."
([]
(muuntaja-instance m/default-options))
([opts]
(m/create
(-> opts
(assoc :default-format "text/html")
(assoc-in [:formats "text/html"]
(muuntaja-format/map->Format
{:name :html
:encoder [html-encoder]}))))))
(defn view-fn-interceptor
"Interceptor for handling HTML responses with a data `:body` and `:view` fn
If the user agent requested a HTML response, and the response body returned
from the route is a Clojure collection, and the response contains a `:view`
function, then attach that view to the `:body` metadata, so the Muuntaja
encoder has everything it needs to do the rendering, see [[html-encoder]].
By default will compose the view function with `lambdaisland.hiccup/render`,
but you can pass an alternative `:render-fn` if you want to handle the output
of the view functions in a different way. It should return a HTML response
body as a string."
([]
(view-fn-interceptor nil))
([{:keys [render-fn]
:or {render-fn hiccup/render}}]
{:name ::view-fn
:leave
(fn [ctx]
(cond
(not= "text/html" (some-> ctx :request :muuntaja/response .-format))
ctx
(and (nil? (get-in ctx [:response :body]))
(nil? (get-in ctx [:response :view])))
ctx
(string? (get-in ctx [:response :body]))
ctx
(and (coll? (get-in ctx [:response :body]))
(get-in ctx [:response :view]))
(update-in ctx [:response :body]
vary-meta
assoc
:view-fn (comp hiccup/render (get-in ctx [:response :view])))
(and (nil? (get-in ctx [:response :body]))
(get-in ctx [:response :view]))
(assoc-in ctx [:response :body]
^{:view-fn (comp hiccup/render (get-in ctx [:response :view]))}
{})
(get-in ctx [:response :html])
(assoc-in ctx [:response :body]
^{:view-fn (fn [_] (hiccup/render-html (get-in ctx [:response :html])))}
{})
:else
(assoc ctx
:response
{:status 415
:headers {"Content-Type" "text/html"}
:body "<h1>Unsupported Media Type. No HTML view defined for route.</h1>"})))}))
(defn inject-components-interceptor
"Generic interceptor for inject system components into the request map
Takes a map which will be merged into each request, so its contents are
available to route implementations."
[components]
{:name ::inject-components
:enter
(fn [ctx]
(update ctx :request merge components))})
(defn log-request-interceptor
"Interceptor to log requests
Logs request start at the `trace` level, and request end at the `info` level.
Includes the total time it took to handle the request."
[]
{:name ::log-request
:enter
(fn [ctx]
(log/trace :request/starting (select-keys (:request ctx) [:request-method :uri]))
(assoc ctx ::start-time (System/nanoTime)))
:leave
(fn [ctx]
(let [{:keys [request response]} ctx
time (format "%.2fms" (/ (- (System/nanoTime) (::start-time ctx)) 1e6))]
(log/info :request/done {:method (:request-method request)
:uri (:uri request)
:status (:status response)
:content-type (get (:headers response) "Content-Type")
:time time}))
ctx)})
(defn exception-handler
"Render 500 errors, but log them as well."
[^Throwable error request]
(log/error :handler/error {:method (:request-method request)
:uri (:uri request)}
:exception error)
{:status 500
:body
^{:view-fn
(fn [{:keys [type class message]}]
(hiccup/render
[:div
[:h1 "500 Server Error"]
[:h2 class]
[:p message]]))}
{:type "exception"
:class (.getName (.getClass error))
:message (.getMessage error)}})
(defn ring-default-handler
"The default fallback handler
- Strip trailing slashes (will cause a redirect)
- Handler 404/405/406 responses, see [[reitit.ring/create-default-handler]]
for options
"
([]
(ring-default-handler nil))
([opts]
(ring/routes
(ring/redirect-trailing-slash-handler {:method :strip})
(ring/create-default-handler opts))))
(defn default-interceptors
"Default interceptor chain
Includes content negotiation, HTML view handling, exception handling, and
request logging.
"
[]
[(log-request-interceptor)
(muuntaja-interceptor/format-interceptor)
(parameters/parameters-interceptor)
(muuntaja/format-negotiate-interceptor)
(muuntaja/format-response-interceptor)
(exception/exception-interceptor (assoc exception/default-handlers ::exception/default exception-handler))
(muuntaja/format-request-interceptor)
(coercion/coerce-response-interceptor)
(coercion/coerce-request-interceptor)
(multipart/multipart-interceptor)
(view-fn-interceptor)])
(defn ring-handler
"Build up a ring handler based on Reitit and Sieppari
Takes a collection of reitit `:routes`, and optionally a `:muuntaja-instance`,
a sequence of `:interceptors`, and a `:default-handler` which is used when no
route matches. See [[muuntaja-instance]], [[default-interceptors]]
and [[ring-default-handler]] for the default values.
Can also optionally take a sequence of `:middleware`, which is handled through
`reitit.middleware`, so it accepts anything that implements `IntoMiddleware`."
[{:keys [muuntaja-instance interceptors default-handler routes middleware]
:or {muuntaja-instance (muuntaja-instance)
interceptors (default-interceptors)
default-handler (ring-default-handler)}}]
(let [wrap (if (seq middleware)
(partial middleware/chain middleware)
identity)
handler (http/ring-handler
(http/router
routes
{:data {:muuntaja muuntaja-instance
:interceptors interceptors}})
default-handler
{:executor sieppari/executor})]
(with-meta (wrap handler) (meta handler))))
(defn start-jetty!
"Start a Jetty instance and start listening for requests
The `:port` must be specified. Takes a `:build-handler` function which should
return a valid Ring handler function. When `:rebuild-on-request?` is `true`
this build-handler is called on every request. This is useful in development
for ensuring that changes made on the REPL are picked up, but is not
recommended for production use, since rebuilding the handler (and with it, the
router) is expensive.
`:wrap-handler` can be used to wrap the handler in static middleware, this
does not get updated on each request.
Returns the Jetty instance"
[{:keys [port rebuild-on-request? build-handler join? wrap-handler]
:or {join? false
wrap-handler identity}}]
(log/info :server/starting {:port port :rebuild-on-request? rebuild-on-request?})
(jetty/run-jetty (wrap-handler
(if rebuild-on-request?
#((build-handler) %)
(build-handler)))
{:port port
:join? false}))
(defn stop-jetty!
"Stop a Jetty instance"
[^Server jetty]
(log/info :server/stopping {:uri (str (.getURI jetty))})
(.stop jetty))
| |
b7e40ed1714cd6e7e1e7ea8dea1af53af350712b66f1f9135a71bb17281ac495 | jlouis/graphql-erlang | dungeon_enum.erl | -module(dungeon_enum).
-export([input/2,
output/2]).
input(<<"Mood">>, <<Bin/bitstring>>) ->
{ok, binary_to_existing_atom(Bin, utf8)};
input(<<"Mood">>, X) ->
{error, {invalid_mood, X}}.
output(<<"Mood">>, 'DODGY') -> {ok, <<"DODGY">>};
output(<<"Mood">>, 'TRANQUIL') -> {ok, <<"TRANQUIL">>};
output(<<"Mood">>, 'AGGRESSIVE') -> {ok, <<"AGGRESSIVE">>};
output(<<"Mood">>, 'BEAST') -> {ok, <<"BEAST">>};
%% This is a deliberate error case
output(<<"Mood">>, <<"INVALIDMOOD">>) -> {ok, <<"INVALIDMOOD">>}.
| null | https://raw.githubusercontent.com/jlouis/graphql-erlang/4fd356294c2acea42a024366bc5a64661e4862d7/test/dungeon_enum.erl | erlang | This is a deliberate error case | -module(dungeon_enum).
-export([input/2,
output/2]).
input(<<"Mood">>, <<Bin/bitstring>>) ->
{ok, binary_to_existing_atom(Bin, utf8)};
input(<<"Mood">>, X) ->
{error, {invalid_mood, X}}.
output(<<"Mood">>, 'DODGY') -> {ok, <<"DODGY">>};
output(<<"Mood">>, 'TRANQUIL') -> {ok, <<"TRANQUIL">>};
output(<<"Mood">>, 'AGGRESSIVE') -> {ok, <<"AGGRESSIVE">>};
output(<<"Mood">>, 'BEAST') -> {ok, <<"BEAST">>};
output(<<"Mood">>, <<"INVALIDMOOD">>) -> {ok, <<"INVALIDMOOD">>}.
|
e3cf4b2d743b88f3e094c29faa082f36577a8333902382b0b4631d1e7e004e0a | ku-fpg/remote-monad | Alternative.hs | # LANGUAGE GADTs #
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE TypeOperators #-}
|
Module : Control . Remote . Monad . Packet . Alternative
Copyright : ( C ) 2016 , The University of Kansas
License : BSD - style ( see the file LICENSE )
Maintainer :
Stability : Alpha
Portability : GHC
Module: Control.Remote.Monad.Packet.Alternative
Copyright: (C) 2016, The University of Kansas
License: BSD-style (see the file LICENSE)
Maintainer: Andy Gill
Stability: Alpha
Portability: GHC
-}
module Control.Remote.Packet.Alternative
( -- * The remote applicative
AlternativePacket(..)
) where
import Control.Applicative
-- | A Remote Applicative, that can encode both commands and procedures, bundled together.
data AlternativePacket (cp :: * -> *) (a :: *) where
Primitive :: cp a -> AlternativePacket cp a
Zip :: (x -> y -> z)
-> AlternativePacket cp x
-> AlternativePacket cp y -> AlternativePacket cp z
Pure :: a -> AlternativePacket cp a
Alt :: AlternativePacket cp a
-> AlternativePacket cp a -> AlternativePacket cp a
Empty :: AlternativePacket cp a
instance Functor (AlternativePacket cp) where
fmap f g = pure f <*> g
instance Applicative (AlternativePacket cp) where
pure a = Pure a
g <*> h = Zip ($) g h
instance Alternative (AlternativePacket cp) where
g <|> h = g `Alt` h
empty = Empty
| null | https://raw.githubusercontent.com/ku-fpg/remote-monad/79d22ea2bde876e6640a25472d90fc3d7dd82a3a/src/Control/Remote/Packet/Alternative.hs | haskell | # LANGUAGE KindSignatures #
# LANGUAGE RankNTypes #
# LANGUAGE TypeOperators #
* The remote applicative
| A Remote Applicative, that can encode both commands and procedures, bundled together. | # LANGUAGE GADTs #
# LANGUAGE ScopedTypeVariables #
|
Module : Control . Remote . Monad . Packet . Alternative
Copyright : ( C ) 2016 , The University of Kansas
License : BSD - style ( see the file LICENSE )
Maintainer :
Stability : Alpha
Portability : GHC
Module: Control.Remote.Monad.Packet.Alternative
Copyright: (C) 2016, The University of Kansas
License: BSD-style (see the file LICENSE)
Maintainer: Andy Gill
Stability: Alpha
Portability: GHC
-}
module Control.Remote.Packet.Alternative
AlternativePacket(..)
) where
import Control.Applicative
data AlternativePacket (cp :: * -> *) (a :: *) where
Primitive :: cp a -> AlternativePacket cp a
Zip :: (x -> y -> z)
-> AlternativePacket cp x
-> AlternativePacket cp y -> AlternativePacket cp z
Pure :: a -> AlternativePacket cp a
Alt :: AlternativePacket cp a
-> AlternativePacket cp a -> AlternativePacket cp a
Empty :: AlternativePacket cp a
instance Functor (AlternativePacket cp) where
fmap f g = pure f <*> g
instance Applicative (AlternativePacket cp) where
pure a = Pure a
g <*> h = Zip ($) g h
instance Alternative (AlternativePacket cp) where
g <|> h = g `Alt` h
empty = Empty
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.