_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
ce19a58767d1540cad8973c275b378988a63a26dd80f05d4a207dc4620096b4b | pallix/tikkba | analemma.clj | (ns tikkba.examples.analemma
(:use [analemma svg charts xml]
[tikkba swing dom])
(:import (javax.swing JFrame SwingUtilities)))
(def analemma-data)
(defn analemma-svg
"Creates a SVG representation with the Analemma functions"
[]
(svg
(apply group
(-> (text "Analemma")
(add-attrs :x 120 :y 60)
(style :fill "#000066"
:font-family "Garamond"
:font-size "75px"
:alignment-baseline :middle))
(for [[x y] analemma-data]
(circle (translate-value x -30 5 0 125)
(translate-value y -25 30 125 0)
2 :fill "#000066")))))
(defn create-frame
[canvas]
(let [frame (JFrame.)]
(.add (.getContentPane frame) canvas)
(.setSize frame 800 200)
(.setDefaultCloseOperation frame JFrame/EXIT_ON_CLOSE)
(SwingUtilities/invokeAndWait
(fn [] (.setVisible frame true)))))
(defn -main
[]
Converts the SVG representation to a XML Document
;; and displays the SVG in a JFrame
(let [doc (svg-doc (analemma-svg))
canvas (jsvgcanvas)]
(set-document canvas doc)
(create-frame canvas)))
(def analemma-data
[[-15.165 -23.07]
[-17.016 -22.70]
[-19.171 -22.08]
[-21.099 -21.27]
[-22.755 -20.30]
[-24.107 -19.16]
[-25.446 -17.33]
[-25.914 -16.17]
[-26.198 -14.62]
[-26.158 -12.96]
[-25.814 -11.21]
[-25.194 -9.39]
[-24.520 -7.89]
[-23.708 -6.37]
[-22.529 -4.42]
[-21.205 -2.45]
[-19.777 -0.48]
[-18.289 1.50]
[-16.185 4.24]
[-15.009 5.78]
[-13.605 7.66]
[-12.309 9.49]
[-11.153 11.26]
[-10.169 12.94]
[-9.250 14.85]
[-8.811 16.04]
[-8.469 17.43]
[-8.364 18.69]
[-8.493 19.83]
[-8.847 20.82]
[-9.685 21.96]
[-10.317 22.47]
[-11.231 22.96]
[-12.243 23.28]
[-13.308 23.43]
[-14.378 23.41]
[-15.599 23.16]
[-16.339 22.86]
[-17.139 22.33]
[-17.767 21.64]
[-18.191 20.80]
[-18.387 19.81]
[-18.253 18.20]
[-17.956 17.17]
[-17.361 15.78]
[-16.529 14.28]
[-15.474 12.68]
[-14.221 11.01]
[-12.183 8.54]
[-10.901 7.07]
[-9.212 5.20]
[-7.462 3.29]
[-5.693 1.36]
[-3.946 -0.59]
[-1.938 -2.93]
[-0.686 -4.48]
[0.742 -6.39]
[1.982 -8.28]
[2.993 -10.11]
[3.742 -11.88]
[4.290 -14.23]
[4.318 -15.49]
[4.044 -16.97]
[3.420 -18.33]
[2.446 -19.55]
[1.135 -20.63]
[-0.852 -21.71]
[-2.398 -22.29]
[-4.538 -22.86]
[-6.855 -23.24]
[-9.286 -23.42]
[-11.761 -23.41]
[-14.691 -23.14]]) | null | https://raw.githubusercontent.com/pallix/tikkba/86fda7f97c3b1ff835f02c2b1c0337f3e134fd2c/src/tikkba/examples/analemma.clj | clojure | and displays the SVG in a JFrame | (ns tikkba.examples.analemma
(:use [analemma svg charts xml]
[tikkba swing dom])
(:import (javax.swing JFrame SwingUtilities)))
(def analemma-data)
(defn analemma-svg
"Creates a SVG representation with the Analemma functions"
[]
(svg
(apply group
(-> (text "Analemma")
(add-attrs :x 120 :y 60)
(style :fill "#000066"
:font-family "Garamond"
:font-size "75px"
:alignment-baseline :middle))
(for [[x y] analemma-data]
(circle (translate-value x -30 5 0 125)
(translate-value y -25 30 125 0)
2 :fill "#000066")))))
(defn create-frame
[canvas]
(let [frame (JFrame.)]
(.add (.getContentPane frame) canvas)
(.setSize frame 800 200)
(.setDefaultCloseOperation frame JFrame/EXIT_ON_CLOSE)
(SwingUtilities/invokeAndWait
(fn [] (.setVisible frame true)))))
(defn -main
[]
Converts the SVG representation to a XML Document
(let [doc (svg-doc (analemma-svg))
canvas (jsvgcanvas)]
(set-document canvas doc)
(create-frame canvas)))
(def analemma-data
[[-15.165 -23.07]
[-17.016 -22.70]
[-19.171 -22.08]
[-21.099 -21.27]
[-22.755 -20.30]
[-24.107 -19.16]
[-25.446 -17.33]
[-25.914 -16.17]
[-26.198 -14.62]
[-26.158 -12.96]
[-25.814 -11.21]
[-25.194 -9.39]
[-24.520 -7.89]
[-23.708 -6.37]
[-22.529 -4.42]
[-21.205 -2.45]
[-19.777 -0.48]
[-18.289 1.50]
[-16.185 4.24]
[-15.009 5.78]
[-13.605 7.66]
[-12.309 9.49]
[-11.153 11.26]
[-10.169 12.94]
[-9.250 14.85]
[-8.811 16.04]
[-8.469 17.43]
[-8.364 18.69]
[-8.493 19.83]
[-8.847 20.82]
[-9.685 21.96]
[-10.317 22.47]
[-11.231 22.96]
[-12.243 23.28]
[-13.308 23.43]
[-14.378 23.41]
[-15.599 23.16]
[-16.339 22.86]
[-17.139 22.33]
[-17.767 21.64]
[-18.191 20.80]
[-18.387 19.81]
[-18.253 18.20]
[-17.956 17.17]
[-17.361 15.78]
[-16.529 14.28]
[-15.474 12.68]
[-14.221 11.01]
[-12.183 8.54]
[-10.901 7.07]
[-9.212 5.20]
[-7.462 3.29]
[-5.693 1.36]
[-3.946 -0.59]
[-1.938 -2.93]
[-0.686 -4.48]
[0.742 -6.39]
[1.982 -8.28]
[2.993 -10.11]
[3.742 -11.88]
[4.290 -14.23]
[4.318 -15.49]
[4.044 -16.97]
[3.420 -18.33]
[2.446 -19.55]
[1.135 -20.63]
[-0.852 -21.71]
[-2.398 -22.29]
[-4.538 -22.86]
[-6.855 -23.24]
[-9.286 -23.42]
[-11.761 -23.41]
[-14.691 -23.14]]) |
ecbb80645f24fd90b7083af26c061e2c25a5be37c2d6de884933bf06b7c47372 | amw-zero/sligh | process.ml | open Core
type schema = {
name: string;
attrs: typed_attr list;
}
type action = {
action_ast: Core.proc_action;
state_vars: Core.typed_attr list;
}
type variant = {
vname: string;
variants: variant_tag list
}
type process = {
schemas: schema list;
variants: variant list;
variables: Core.typed_attr list;
actions: action list;
}
let new_process () = {
schemas=[];
variants=[];
variables=[];
actions=[];
}
let filter_model stmts = List.filter_map (fun stmt -> match stmt with
| Core.Process(_) -> Some(stmt)
| Core.Entity(_) -> Some(stmt)
| _ -> None) stmts
let collect_actions actions def = match def with
| ProcAction(act) -> act :: actions
| _ -> actions
let filter_actions defs = List.fold_left collect_actions [] defs
let collect_attrs attrs def = match def with
| ProcAttr(attr) -> attr :: attrs
| _ -> attrs
let filter_attrs (defs: proc_def list): typed_attr list = List.fold_left collect_attrs [] defs
let rec collect_state_vars state_vars e (proc_attrs: typed_attr list): typed_attr list =
match e with
| Let(_, value) -> state_vars @ collect_state_vars [] value proc_attrs
| Assignment(var, e) ->
(* Failure to find attr here means assignment is on a non-state variable *)
let proc_attr = List.find (fun attr -> Core.(attr.name = var)) proc_attrs in
{name=var; typ=proc_attr.typ} :: state_vars @ collect_state_vars [] e proc_attrs
| Iden(i, _) ->
let proc_attr = List.find_opt (fun attr -> Core.(attr.name = i)) proc_attrs in
(match proc_attr with
| Some(pa) -> {name=i; typ=pa.typ} :: state_vars
| None -> state_vars)
| Array(es) ->
List.concat_map
(fun e -> collect_state_vars [] e proc_attrs)
es @ state_vars
| If(cond, then_e, else_e) ->
let else_state_vars: typed_attr list = match else_e with
| Some(ee) -> collect_state_vars [] ee proc_attrs
| None -> [] in
collect_state_vars [] cond proc_attrs @
collect_state_vars [] then_e proc_attrs @ else_state_vars
| StmtList(es) ->
List.concat_map
(fun e -> collect_state_vars [] e proc_attrs)
es @ state_vars
| Call(_, args) ->
List.concat_map
(fun e -> collect_state_vars [] e proc_attrs)
args @ state_vars
| Access(l, r) ->
let proc_attr = List.find_opt (fun attr -> Core.(attr.name = r)) proc_attrs in
(match proc_attr with
| Some(pa) ->
let l_state_vars = collect_state_vars [] l proc_attrs in
{name=r; typ=pa.typ} :: state_vars @ l_state_vars
| None -> state_vars)
| Case(e, cases) ->
collect_state_vars [] e proc_attrs @
(List.concat_map (fun c -> collect_state_vars [] c.value proc_attrs) cases) @
state_vars
| String(_) -> state_vars
| Num(_) -> state_vars
| Bool(_) -> state_vars
Should only be , possibly only be class decl
| Implementation(_) -> state_vars
| FuncDef(_) -> state_vars
| File(_) -> state_vars
| Effect(_) -> state_vars
| Process(_, _) -> state_vars
| Entity(_, _) -> state_vars
| TS(_) -> state_vars
| Variant(_, _) -> state_vars
let state_vars_of_action (action: Core.proc_action) (proc_attrs: typed_attr list) =
List.fold_left
(fun state_vars e -> collect_state_vars state_vars e proc_attrs)
[]
action.body
(* collect_state_vars is returning duplicates - should ultimately fix that instead
of this unique sort *)
|> List.sort_uniq (fun sv1 sv2 -> Core.(compare sv1.name sv2.name))
let analyze_action actions action proc_attrs =
{
state_vars=state_vars_of_action action proc_attrs;
action_ast=action;
} :: actions
let analyze_actions (actions: Core.proc_action list) (proc_attrs: typed_attr list) =
List.fold_left
(fun analyzed_actions action -> analyze_action analyzed_actions action proc_attrs)
[]
actions
let analyze_model m stmt =
match stmt with
| Core.Process(_, defs) ->
let actions = filter_actions defs in
let attrs = filter_attrs defs in
{ m with
schemas = m.schemas;
variables = attrs @ m.variables;
actions = m.actions @ (analyze_actions actions attrs);
}
| Core.Entity(e, attrs) ->
{ m with
schemas = {name=e; attrs;} :: m.schemas;
}
| Core.Variant(n, vs) ->
{ m with
variants = {vname=n; variants=vs} :: m.variants}
| _ -> m
let print_schema s =
Printf.printf "Schema: %s,\n\t%s\n" s.name (Util.print_list "\n" (List.map Util.string_of_typed_attr s.attrs))
let print_variable v =
Printf.printf "Var: %s\n" (Util.string_of_typed_attr v)
let print_action a =
Printf.printf "Action: \n ast: %s\n\n state_vars: %s\n"
(Util.string_of_proc_action a.action_ast)
(String.concat "\n" (List.map Util.string_of_typed_attr a.state_vars))
let print_process m =
print_endline "Process.schemas";
List.iter print_schema m.schemas;
print_endline "Process.variables";
List.iter print_variable m.variables;
print_endline "Process.actions";
List.iter print_action m.actions
| null | https://raw.githubusercontent.com/amw-zero/sligh/bb979a0f7c0c57dfc9c2fd00ee02e490816ce786/lib/process.ml | ocaml | Failure to find attr here means assignment is on a non-state variable
collect_state_vars is returning duplicates - should ultimately fix that instead
of this unique sort | open Core
type schema = {
name: string;
attrs: typed_attr list;
}
type action = {
action_ast: Core.proc_action;
state_vars: Core.typed_attr list;
}
type variant = {
vname: string;
variants: variant_tag list
}
type process = {
schemas: schema list;
variants: variant list;
variables: Core.typed_attr list;
actions: action list;
}
let new_process () = {
schemas=[];
variants=[];
variables=[];
actions=[];
}
let filter_model stmts = List.filter_map (fun stmt -> match stmt with
| Core.Process(_) -> Some(stmt)
| Core.Entity(_) -> Some(stmt)
| _ -> None) stmts
let collect_actions actions def = match def with
| ProcAction(act) -> act :: actions
| _ -> actions
let filter_actions defs = List.fold_left collect_actions [] defs
let collect_attrs attrs def = match def with
| ProcAttr(attr) -> attr :: attrs
| _ -> attrs
let filter_attrs (defs: proc_def list): typed_attr list = List.fold_left collect_attrs [] defs
let rec collect_state_vars state_vars e (proc_attrs: typed_attr list): typed_attr list =
match e with
| Let(_, value) -> state_vars @ collect_state_vars [] value proc_attrs
| Assignment(var, e) ->
let proc_attr = List.find (fun attr -> Core.(attr.name = var)) proc_attrs in
{name=var; typ=proc_attr.typ} :: state_vars @ collect_state_vars [] e proc_attrs
| Iden(i, _) ->
let proc_attr = List.find_opt (fun attr -> Core.(attr.name = i)) proc_attrs in
(match proc_attr with
| Some(pa) -> {name=i; typ=pa.typ} :: state_vars
| None -> state_vars)
| Array(es) ->
List.concat_map
(fun e -> collect_state_vars [] e proc_attrs)
es @ state_vars
| If(cond, then_e, else_e) ->
let else_state_vars: typed_attr list = match else_e with
| Some(ee) -> collect_state_vars [] ee proc_attrs
| None -> [] in
collect_state_vars [] cond proc_attrs @
collect_state_vars [] then_e proc_attrs @ else_state_vars
| StmtList(es) ->
List.concat_map
(fun e -> collect_state_vars [] e proc_attrs)
es @ state_vars
| Call(_, args) ->
List.concat_map
(fun e -> collect_state_vars [] e proc_attrs)
args @ state_vars
| Access(l, r) ->
let proc_attr = List.find_opt (fun attr -> Core.(attr.name = r)) proc_attrs in
(match proc_attr with
| Some(pa) ->
let l_state_vars = collect_state_vars [] l proc_attrs in
{name=r; typ=pa.typ} :: state_vars @ l_state_vars
| None -> state_vars)
| Case(e, cases) ->
collect_state_vars [] e proc_attrs @
(List.concat_map (fun c -> collect_state_vars [] c.value proc_attrs) cases) @
state_vars
| String(_) -> state_vars
| Num(_) -> state_vars
| Bool(_) -> state_vars
Should only be , possibly only be class decl
| Implementation(_) -> state_vars
| FuncDef(_) -> state_vars
| File(_) -> state_vars
| Effect(_) -> state_vars
| Process(_, _) -> state_vars
| Entity(_, _) -> state_vars
| TS(_) -> state_vars
| Variant(_, _) -> state_vars
let state_vars_of_action (action: Core.proc_action) (proc_attrs: typed_attr list) =
List.fold_left
(fun state_vars e -> collect_state_vars state_vars e proc_attrs)
[]
action.body
|> List.sort_uniq (fun sv1 sv2 -> Core.(compare sv1.name sv2.name))
let analyze_action actions action proc_attrs =
{
state_vars=state_vars_of_action action proc_attrs;
action_ast=action;
} :: actions
let analyze_actions (actions: Core.proc_action list) (proc_attrs: typed_attr list) =
List.fold_left
(fun analyzed_actions action -> analyze_action analyzed_actions action proc_attrs)
[]
actions
let analyze_model m stmt =
match stmt with
| Core.Process(_, defs) ->
let actions = filter_actions defs in
let attrs = filter_attrs defs in
{ m with
schemas = m.schemas;
variables = attrs @ m.variables;
actions = m.actions @ (analyze_actions actions attrs);
}
| Core.Entity(e, attrs) ->
{ m with
schemas = {name=e; attrs;} :: m.schemas;
}
| Core.Variant(n, vs) ->
{ m with
variants = {vname=n; variants=vs} :: m.variants}
| _ -> m
let print_schema s =
Printf.printf "Schema: %s,\n\t%s\n" s.name (Util.print_list "\n" (List.map Util.string_of_typed_attr s.attrs))
let print_variable v =
Printf.printf "Var: %s\n" (Util.string_of_typed_attr v)
let print_action a =
Printf.printf "Action: \n ast: %s\n\n state_vars: %s\n"
(Util.string_of_proc_action a.action_ast)
(String.concat "\n" (List.map Util.string_of_typed_attr a.state_vars))
let print_process m =
print_endline "Process.schemas";
List.iter print_schema m.schemas;
print_endline "Process.variables";
List.iter print_variable m.variables;
print_endline "Process.actions";
List.iter print_action m.actions
|
ac0e6749e2d68d2099c18d8bc265302561834de0d699d27eb1a0880a44d34df1 | sdiehl/print | Example.hs | # LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ExtendedDefaultRules #
# LANGUAGE NoImplicitPrelude #
module Main where
import Print
import Protolude hiding (Show, show, print)
data Animal
= Dog
| Cat
deriving (Generic, Show)
data T1
= T1 Int Bool
deriving (Generic, Show)
data T2
= T2 Int Bool
| T3 { x :: Bool, y :: Int }
deriving (Generic, Show)
data B a = B
{ first :: Int
, second :: a
} deriving (Generic, Show)
data I a b = a :. b
deriving (Generic, Show)
main :: IO ()
main = do
print [Cat, Dog]
print [0 :: Int,5..100]
print (T1 42 False)
print (T2 1 True, B 1 (T3 False 3))
print (B 3 [Cat, Dog])
print (show (0.5 :: Double) == show ((1/2) :: Double))
print ("継続は力なり" :: Text)
print ("To be or not to be.\nThat is the question." :: Text)
putStrLn (show (3.1415926535 :: Double))
| null | https://raw.githubusercontent.com/sdiehl/print/0b434500b78bf5a8b1081957ffc0a2de693e6429/Example.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings # | # LANGUAGE DeriveGeneric #
# LANGUAGE ExtendedDefaultRules #
# LANGUAGE NoImplicitPrelude #
module Main where
import Print
import Protolude hiding (Show, show, print)
data Animal
= Dog
| Cat
deriving (Generic, Show)
data T1
= T1 Int Bool
deriving (Generic, Show)
data T2
= T2 Int Bool
| T3 { x :: Bool, y :: Int }
deriving (Generic, Show)
data B a = B
{ first :: Int
, second :: a
} deriving (Generic, Show)
data I a b = a :. b
deriving (Generic, Show)
main :: IO ()
main = do
print [Cat, Dog]
print [0 :: Int,5..100]
print (T1 42 False)
print (T2 1 True, B 1 (T3 False 3))
print (B 3 [Cat, Dog])
print (show (0.5 :: Double) == show ((1/2) :: Double))
print ("継続は力なり" :: Text)
print ("To be or not to be.\nThat is the question." :: Text)
putStrLn (show (3.1415926535 :: Double))
|
22d41d67c1d6b0c67c6d71c5fd302813909710d1c46d672751c39236de9f98f1 | babashka/sci | utils.cljc | (ns sci.impl.utils
{:no-doc true}
(:refer-clojure :exclude [eval demunge var?])
(:require [clojure.string :as str]
[sci.impl.macros :as macros]
[sci.impl.types :as t]
[sci.impl.vars :as vars]
[sci.lang :as lang])
#?(:cljs (:require-macros [sci.impl.utils :refer [kw-identical?]])))
#?(:clj (set! *warn-on-reflection* true))
(derive :sci.error/realized-beyond-max :sci/error)
(derive :sci.error/parse :sci/error)
(defn constant? [x]
(or (nil? x)
(number? x)
(string? x)
(keyword? x)
(boolean? x)
#?(:clj
(instance? java.util.regex.Pattern x)
:cljs
(instance? js/RegExp x))))
(defmacro kw-identical? [k v]
(macros/?
:clj `(identical? ~k ~v)
:cljs `(cljs.core/keyword-identical? ~k ~v)))
(declare current-file current-ns)
(def ^:dynamic *top-level-location* nil)
(defn throw-error-with-location
([msg iobj] (throw-error-with-location msg iobj {}))
([msg iobj data]
(let [{:keys [:line :column :file]
:or {file @current-file}} (meta iobj)]
(throw (ex-info msg (merge {:type :sci/error
:line (or line (:line *top-level-location*))
:column (or column (:column *top-level-location*))
:file file} data))))))
(def ^:dynamic *in-try* false)
(defn macro? [f]
(when-some [m (meta f)]
(or (:sci/macro m)
(:macro m))))
#?(:cljs
(def allowed-append "used for allowing interop in with-out-str"
(symbol "append")))
(defn demunge [s]
#?(:clj (clojure.lang.Compiler/demunge s)
:cljs (cljs.core/demunge s)))
#?(:clj
(defn rewrite-ex-msg [ex-msg env fm]
(when ex-msg
(if-let [[_ printed-fn] (re-matches #"Wrong number of args \(\d+\) passed to: (.*)" ex-msg)]
(let [fn-pat #"(sci\.impl\.)?fns/fun/arity-([0-9])+--\d+"
[fn-match prefix arity] (re-find fn-pat ex-msg)
friendly-name (when arity (str "function of arity " arity))]
(if (:name fm)
(let [ns (symbol (str (:ns fm)))
var-name (:name fm)
var (get-in @env [:namespaces ns var-name])
fstr (when var (let [varf (if (instance? clojure.lang.IDeref var)
(deref var)
var)
varf (or
;; resolve macro inner fn for comparison
(some-> varf meta :sci.impl/inner-fn)
varf)
fstr (clojure.lang.Compiler/demunge (str varf))
fstr (first (str/split fstr #"@"))
fstr (if prefix
fstr
(str/replace fstr #"^sci\.impl\." ""))]
fstr))]
(cond (and fstr printed-fn (= fstr printed-fn))
(str/replace ex-msg printed-fn
(str (:ns fm) "/" (:name fm)))
friendly-name (str/replace ex-msg fn-match friendly-name)
:else ex-msg))
ex-msg))
ex-msg))))
(defn rethrow-with-location-of-node
([ctx ^Throwable e raw-node] (rethrow-with-location-of-node ctx (:bindings ctx) e raw-node))
([ctx _bindings ^Throwable e raw-node]
(if #?(:clj (or *in-try*
(not= (:main-thread-id ctx)
(.getId (Thread/currentThread))))
:cljs *in-try*) (throw e)
(let [stack (t/stack raw-node)
#?@(:clj [fm (:sci.impl/f-meta stack)])
env (:env ctx)
id (:id ctx)
d (ex-data e)
st (or (when-let [st (:sci.impl/callstack d)]
st)
(volatile! '()))]
(when stack
(vswap! st conj stack))
(let [d (ex-data e)
;; st (:sci.impl/callstack d)
wrapping-sci-error? (and (isa? (:type d) :sci/error)
(:sci.impl/callstack d))]
(if wrapping-sci-error?
(throw e)
(let [ex-msg #?(:clj (.getMessage e)
:cljs (.-message e))
{:keys [:line :column :file]}
(or stack
(some-> env deref
:sci.impl/callstack (get id)
deref last meta)
#_(meta node))]
(if (and line column)
(let [ex-msg #?(:clj (rewrite-ex-msg ex-msg env fm)
:cljs ex-msg)
phase (:phase d)
new-exception
(let [new-d (cond-> {:type :sci/error
:line line
:column column
:message ex-msg
:sci.impl/callstack st
:file file}
phase (assoc :phase phase))]
(ex-info ex-msg new-d e))]
(throw new-exception))
(throw e)))))))))
(defn- iobj? [obj]
(and #?(:clj (instance? clojure.lang.IObj obj)
:cljs (implements? IWithMeta obj))
(meta obj)))
(defn vary-meta*
"Only adds metadata to obj if d is not nil and if obj already has meta"
[obj f & args]
(if (iobj? obj)
(apply vary-meta obj f args)
obj))
(defn strip-core-ns [sym]
(case (namespace sym)
("clojure.core" "cljs.core") (symbol (name sym))
sym))
(def allowed-loop (symbol "loop"))
(def allowed-recur (symbol "recur"))
(def var-unbound #?(:clj (Object.)
:cljs (js/Object.)))
(defn namespace-object
"Fetches namespaces from env if it exists. Else, if `create?`,
produces one regardless of the existince of the namespace in env and
adds it to env before returning it."
[env ns-sym create? attr-map]
(let [env* @env
ns-map (get-in env* [:namespaces ns-sym])]
(or (:obj ns-map)
(when (or ns-map create?)
(let [ns-obj (lang/->Namespace ns-sym attr-map)]
(swap! env assoc-in [:namespaces ns-sym :obj] ns-obj)
ns-obj)))))
(defn set-namespace! [ctx ns-sym attr-map]
(let [env (:env ctx)
attr-map (merge (meta ns-sym) attr-map)
ns-obj (namespace-object env ns-sym true attr-map)]
(t/setVal current-ns ns-obj)))
(def eval-form-state (volatile! nil))
(def eval-require-state (volatile! nil))
(def eval-use-state (volatile! nil))
(def eval-resolve-state (volatile! nil))
(def eval-refer-state (volatile! nil))
(def macroexpand* (volatile! nil))
(def macroexpand-1* (volatile! nil))
(def eval-string* (volatile! nil))
(def lookup (volatile! nil))
(def analyze (volatile! nil))
(defn eval [sci-ctx form]
(@eval-form-state sci-ctx form))
(defn split-when
"Like partition-by but splits collection only when `pred` returns
a truthy value. E.g. `(split-when odd? [1 2 3 4 5]) => ((1 2) (3 4) (5))`"
[pred coll]
(let [f (complement pred)]
(lazy-seq
(when-let [s (seq coll)]
(let [fst (first s)
run (cons fst (take-while f (next s)))]
(cons run (split-when pred (lazy-seq (drop (count run) s)))))))))
(def ana-macros
'#{do if and or fn fn* def defn
lazy-seq case try defmacro
expand-dot* expand-constructor new . import in-ns ns var
set! resolve})
(defn maybe-destructured
[params body]
(if (every? symbol? params)
{:params params
:body body}
(loop [params params
new-params (with-meta [] (meta params))
lets []]
(if params
(if (symbol? (first params))
(recur (next params) (conj new-params (first params)) lets)
(let [gparam (gensym "p__")]
(recur (next params) (conj new-params gparam)
(-> lets (conj (first params)) (conj gparam)))))
{:params new-params
:body [`(let ~lets
~@body)]}))))
(def unqualify-symbol vars/unqualify-symbol)
(defn make-stack
([expr-meta] (make-stack expr-meta false))
([expr-meta special?]
(cond-> (assoc expr-meta
:ns @current-ns
:file @current-file)
special? (assoc :special true))))
(defn log [& xs]
#?(:clj (.println System/err (str/join " " xs))
:cljs (.log js/console (str/join " " xs))))
(defn dynamic-var
([name]
(dynamic-var name nil (meta name)))
([name init-val]
(dynamic-var name init-val (meta name)))
([name init-val meta]
(let [meta (assoc meta :dynamic true :name (unqualify-symbol name))]
(sci.lang.Var. init-val name meta false false nil))))
;; foundational namespaces
(def user-ns (lang/->Namespace 'user nil))
(def clojure-core-ns (lang/->Namespace 'clojure.core nil))
(def current-file
(dynamic-var '*file* nil
{:doc "The path of the file being evaluated, as a String.\n\n When there is no file, e.g. in the REPL, the value is not defined."
:ns clojure-core-ns}))
(def current-ns
(dynamic-var '*ns* user-ns
{:ns clojure-core-ns
:doc "A sci.lang.Namespace object representing the current namespace."}))
(defn current-ns-name []
(t/getName @current-ns))
(defn new-var
"Returns a new sci var."
([name] (doto (new-var name nil nil false)
(vars/unbind)))
([name init-val] (new-var name init-val (meta name) false))
([name init-val meta] (new-var name init-val meta false))
([name init-val meta ctx?] (sci.lang.Var. init-val name (assoc meta :name (unqualify-symbol name)) false ctx? nil)))
(defn var? [x]
(instance? sci.lang.Var x))
(defn namespace? [x]
(instance? #?(:clj sci.lang.Namespace
:cljs sci.lang/Namespace) x))
| null | https://raw.githubusercontent.com/babashka/sci/0fee91953e0d0504e80a5997585c6d00833f98c0/src/sci/impl/utils.cljc | clojure | resolve macro inner fn for comparison
st (:sci.impl/callstack d)
foundational namespaces | (ns sci.impl.utils
{:no-doc true}
(:refer-clojure :exclude [eval demunge var?])
(:require [clojure.string :as str]
[sci.impl.macros :as macros]
[sci.impl.types :as t]
[sci.impl.vars :as vars]
[sci.lang :as lang])
#?(:cljs (:require-macros [sci.impl.utils :refer [kw-identical?]])))
#?(:clj (set! *warn-on-reflection* true))
(derive :sci.error/realized-beyond-max :sci/error)
(derive :sci.error/parse :sci/error)
(defn constant? [x]
(or (nil? x)
(number? x)
(string? x)
(keyword? x)
(boolean? x)
#?(:clj
(instance? java.util.regex.Pattern x)
:cljs
(instance? js/RegExp x))))
(defmacro kw-identical? [k v]
(macros/?
:clj `(identical? ~k ~v)
:cljs `(cljs.core/keyword-identical? ~k ~v)))
(declare current-file current-ns)
(def ^:dynamic *top-level-location* nil)
(defn throw-error-with-location
([msg iobj] (throw-error-with-location msg iobj {}))
([msg iobj data]
(let [{:keys [:line :column :file]
:or {file @current-file}} (meta iobj)]
(throw (ex-info msg (merge {:type :sci/error
:line (or line (:line *top-level-location*))
:column (or column (:column *top-level-location*))
:file file} data))))))
(def ^:dynamic *in-try* false)
(defn macro? [f]
(when-some [m (meta f)]
(or (:sci/macro m)
(:macro m))))
#?(:cljs
(def allowed-append "used for allowing interop in with-out-str"
(symbol "append")))
(defn demunge [s]
#?(:clj (clojure.lang.Compiler/demunge s)
:cljs (cljs.core/demunge s)))
#?(:clj
(defn rewrite-ex-msg [ex-msg env fm]
(when ex-msg
(if-let [[_ printed-fn] (re-matches #"Wrong number of args \(\d+\) passed to: (.*)" ex-msg)]
(let [fn-pat #"(sci\.impl\.)?fns/fun/arity-([0-9])+--\d+"
[fn-match prefix arity] (re-find fn-pat ex-msg)
friendly-name (when arity (str "function of arity " arity))]
(if (:name fm)
(let [ns (symbol (str (:ns fm)))
var-name (:name fm)
var (get-in @env [:namespaces ns var-name])
fstr (when var (let [varf (if (instance? clojure.lang.IDeref var)
(deref var)
var)
varf (or
(some-> varf meta :sci.impl/inner-fn)
varf)
fstr (clojure.lang.Compiler/demunge (str varf))
fstr (first (str/split fstr #"@"))
fstr (if prefix
fstr
(str/replace fstr #"^sci\.impl\." ""))]
fstr))]
(cond (and fstr printed-fn (= fstr printed-fn))
(str/replace ex-msg printed-fn
(str (:ns fm) "/" (:name fm)))
friendly-name (str/replace ex-msg fn-match friendly-name)
:else ex-msg))
ex-msg))
ex-msg))))
(defn rethrow-with-location-of-node
([ctx ^Throwable e raw-node] (rethrow-with-location-of-node ctx (:bindings ctx) e raw-node))
([ctx _bindings ^Throwable e raw-node]
(if #?(:clj (or *in-try*
(not= (:main-thread-id ctx)
(.getId (Thread/currentThread))))
:cljs *in-try*) (throw e)
(let [stack (t/stack raw-node)
#?@(:clj [fm (:sci.impl/f-meta stack)])
env (:env ctx)
id (:id ctx)
d (ex-data e)
st (or (when-let [st (:sci.impl/callstack d)]
st)
(volatile! '()))]
(when stack
(vswap! st conj stack))
(let [d (ex-data e)
wrapping-sci-error? (and (isa? (:type d) :sci/error)
(:sci.impl/callstack d))]
(if wrapping-sci-error?
(throw e)
(let [ex-msg #?(:clj (.getMessage e)
:cljs (.-message e))
{:keys [:line :column :file]}
(or stack
(some-> env deref
:sci.impl/callstack (get id)
deref last meta)
#_(meta node))]
(if (and line column)
(let [ex-msg #?(:clj (rewrite-ex-msg ex-msg env fm)
:cljs ex-msg)
phase (:phase d)
new-exception
(let [new-d (cond-> {:type :sci/error
:line line
:column column
:message ex-msg
:sci.impl/callstack st
:file file}
phase (assoc :phase phase))]
(ex-info ex-msg new-d e))]
(throw new-exception))
(throw e)))))))))
(defn- iobj? [obj]
(and #?(:clj (instance? clojure.lang.IObj obj)
:cljs (implements? IWithMeta obj))
(meta obj)))
(defn vary-meta*
"Only adds metadata to obj if d is not nil and if obj already has meta"
[obj f & args]
(if (iobj? obj)
(apply vary-meta obj f args)
obj))
(defn strip-core-ns [sym]
(case (namespace sym)
("clojure.core" "cljs.core") (symbol (name sym))
sym))
(def allowed-loop (symbol "loop"))
(def allowed-recur (symbol "recur"))
(def var-unbound #?(:clj (Object.)
:cljs (js/Object.)))
(defn namespace-object
"Fetches namespaces from env if it exists. Else, if `create?`,
produces one regardless of the existince of the namespace in env and
adds it to env before returning it."
[env ns-sym create? attr-map]
(let [env* @env
ns-map (get-in env* [:namespaces ns-sym])]
(or (:obj ns-map)
(when (or ns-map create?)
(let [ns-obj (lang/->Namespace ns-sym attr-map)]
(swap! env assoc-in [:namespaces ns-sym :obj] ns-obj)
ns-obj)))))
(defn set-namespace! [ctx ns-sym attr-map]
(let [env (:env ctx)
attr-map (merge (meta ns-sym) attr-map)
ns-obj (namespace-object env ns-sym true attr-map)]
(t/setVal current-ns ns-obj)))
(def eval-form-state (volatile! nil))
(def eval-require-state (volatile! nil))
(def eval-use-state (volatile! nil))
(def eval-resolve-state (volatile! nil))
(def eval-refer-state (volatile! nil))
(def macroexpand* (volatile! nil))
(def macroexpand-1* (volatile! nil))
(def eval-string* (volatile! nil))
(def lookup (volatile! nil))
(def analyze (volatile! nil))
(defn eval [sci-ctx form]
(@eval-form-state sci-ctx form))
(defn split-when
"Like partition-by but splits collection only when `pred` returns
a truthy value. E.g. `(split-when odd? [1 2 3 4 5]) => ((1 2) (3 4) (5))`"
[pred coll]
(let [f (complement pred)]
(lazy-seq
(when-let [s (seq coll)]
(let [fst (first s)
run (cons fst (take-while f (next s)))]
(cons run (split-when pred (lazy-seq (drop (count run) s)))))))))
(def ana-macros
'#{do if and or fn fn* def defn
lazy-seq case try defmacro
expand-dot* expand-constructor new . import in-ns ns var
set! resolve})
(defn maybe-destructured
[params body]
(if (every? symbol? params)
{:params params
:body body}
(loop [params params
new-params (with-meta [] (meta params))
lets []]
(if params
(if (symbol? (first params))
(recur (next params) (conj new-params (first params)) lets)
(let [gparam (gensym "p__")]
(recur (next params) (conj new-params gparam)
(-> lets (conj (first params)) (conj gparam)))))
{:params new-params
:body [`(let ~lets
~@body)]}))))
(def unqualify-symbol vars/unqualify-symbol)
(defn make-stack
([expr-meta] (make-stack expr-meta false))
([expr-meta special?]
(cond-> (assoc expr-meta
:ns @current-ns
:file @current-file)
special? (assoc :special true))))
(defn log [& xs]
#?(:clj (.println System/err (str/join " " xs))
:cljs (.log js/console (str/join " " xs))))
(defn dynamic-var
([name]
(dynamic-var name nil (meta name)))
([name init-val]
(dynamic-var name init-val (meta name)))
([name init-val meta]
(let [meta (assoc meta :dynamic true :name (unqualify-symbol name))]
(sci.lang.Var. init-val name meta false false nil))))
(def user-ns (lang/->Namespace 'user nil))
(def clojure-core-ns (lang/->Namespace 'clojure.core nil))
(def current-file
(dynamic-var '*file* nil
{:doc "The path of the file being evaluated, as a String.\n\n When there is no file, e.g. in the REPL, the value is not defined."
:ns clojure-core-ns}))
(def current-ns
(dynamic-var '*ns* user-ns
{:ns clojure-core-ns
:doc "A sci.lang.Namespace object representing the current namespace."}))
(defn current-ns-name []
(t/getName @current-ns))
(defn new-var
"Returns a new sci var."
([name] (doto (new-var name nil nil false)
(vars/unbind)))
([name init-val] (new-var name init-val (meta name) false))
([name init-val meta] (new-var name init-val meta false))
([name init-val meta ctx?] (sci.lang.Var. init-val name (assoc meta :name (unqualify-symbol name)) false ctx? nil)))
(defn var? [x]
(instance? sci.lang.Var x))
(defn namespace? [x]
(instance? #?(:clj sci.lang.Namespace
:cljs sci.lang/Namespace) x))
|
f7f36b923fa6e6daf9aa0ee83bd59ed6275ea5c2fc95eee78cede512d8a83775 | facebook/duckling | Corpus.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.Corpus
( datetime
, datetimeHoliday
, datetimeInterval
, datetimeIntervalHoliday
, datetimeOpenInterval
, examples
) where
import Data.Aeson
import qualified Data.HashMap.Strict as H
import Data.Text (Text)
import qualified Data.Time.LocalTime.TimeZone.Series as Series
import Prelude
import Data.String
import Duckling.Resolve
import Duckling.Testing.Types hiding (examples)
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
import Duckling.Types hiding (Entity(..))
datetime :: Datetime -> Grain -> Context -> TimeValue
datetime d g ctx = datetimeIntervalHolidayHelper (d, Nothing) g Nothing ctx
datetimeHoliday :: Datetime -> Grain -> Text -> Context -> TimeValue
datetimeHoliday d g h ctx =
datetimeIntervalHolidayHelper (d, Nothing) g (Just h) ctx
datetimeInterval :: (Datetime, Datetime) -> Grain -> Context -> TimeValue
datetimeInterval (d1, d2) g ctx =
datetimeIntervalHolidayHelper (d1, Just d2) g Nothing ctx
datetimeIntervalHoliday ::
(Datetime, Datetime) -> Grain -> Text -> Context -> TimeValue
datetimeIntervalHoliday (d1, d2) g h ctx =
datetimeIntervalHolidayHelper (d1, Just d2) g (Just h) ctx
datetimeIntervalHolidayHelper ::
(Datetime, Maybe Datetime) -> Grain -> Maybe Text -> Context -> TimeValue
datetimeIntervalHolidayHelper (d1, md2) g hol ctx = TimeValue tv [tv] hol
where
DucklingTime (Series.ZoneSeriesTime _ tzSeries) = referenceTime ctx
tv = timeValue tzSeries TimeObject {start = dt d1, end = d, grain = g}
d = case md2 of
Nothing -> Nothing
Just d2 -> Just $ dt d2
datetimeOpenInterval
:: IntervalDirection -> Datetime -> Grain -> Context -> TimeValue
datetimeOpenInterval dir d g ctx = TimeValue tv [tv] Nothing
where
DucklingTime (Series.ZoneSeriesTime _ tzSeries) = referenceTime ctx
tv = openInterval tzSeries dir TimeObject
{start = dt d, end = Nothing, grain = g}
check :: ToJSON a => (Context -> a) -> TestPredicate
check f context Resolved{rval = RVal _ v} = case toJSON v of
Object o -> deleteValues (toJSON (f context)) == deleteValues (Object o)
_ -> False
where
deleteValues :: Value -> Value
deleteValues (Object o) = Object $ H.delete "values" o
deleteValues _ = Object H.empty
examples :: ToJSON a => (Context -> a) -> [Text] -> [Example]
examples f = examplesCustom (check f)
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/Duckling/Time/Corpus.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
# LANGUAGE OverloadedStrings # | Copyright ( c ) 2016 - present , Facebook , Inc.
# LANGUAGE NamedFieldPuns #
module Duckling.Time.Corpus
( datetime
, datetimeHoliday
, datetimeInterval
, datetimeIntervalHoliday
, datetimeOpenInterval
, examples
) where
import Data.Aeson
import qualified Data.HashMap.Strict as H
import Data.Text (Text)
import qualified Data.Time.LocalTime.TimeZone.Series as Series
import Prelude
import Data.String
import Duckling.Resolve
import Duckling.Testing.Types hiding (examples)
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
import Duckling.Types hiding (Entity(..))
datetime :: Datetime -> Grain -> Context -> TimeValue
datetime d g ctx = datetimeIntervalHolidayHelper (d, Nothing) g Nothing ctx
datetimeHoliday :: Datetime -> Grain -> Text -> Context -> TimeValue
datetimeHoliday d g h ctx =
datetimeIntervalHolidayHelper (d, Nothing) g (Just h) ctx
datetimeInterval :: (Datetime, Datetime) -> Grain -> Context -> TimeValue
datetimeInterval (d1, d2) g ctx =
datetimeIntervalHolidayHelper (d1, Just d2) g Nothing ctx
datetimeIntervalHoliday ::
(Datetime, Datetime) -> Grain -> Text -> Context -> TimeValue
datetimeIntervalHoliday (d1, d2) g h ctx =
datetimeIntervalHolidayHelper (d1, Just d2) g (Just h) ctx
datetimeIntervalHolidayHelper ::
(Datetime, Maybe Datetime) -> Grain -> Maybe Text -> Context -> TimeValue
datetimeIntervalHolidayHelper (d1, md2) g hol ctx = TimeValue tv [tv] hol
where
DucklingTime (Series.ZoneSeriesTime _ tzSeries) = referenceTime ctx
tv = timeValue tzSeries TimeObject {start = dt d1, end = d, grain = g}
d = case md2 of
Nothing -> Nothing
Just d2 -> Just $ dt d2
datetimeOpenInterval
:: IntervalDirection -> Datetime -> Grain -> Context -> TimeValue
datetimeOpenInterval dir d g ctx = TimeValue tv [tv] Nothing
where
DucklingTime (Series.ZoneSeriesTime _ tzSeries) = referenceTime ctx
tv = openInterval tzSeries dir TimeObject
{start = dt d, end = Nothing, grain = g}
check :: ToJSON a => (Context -> a) -> TestPredicate
check f context Resolved{rval = RVal _ v} = case toJSON v of
Object o -> deleteValues (toJSON (f context)) == deleteValues (Object o)
_ -> False
where
deleteValues :: Value -> Value
deleteValues (Object o) = Object $ H.delete "values" o
deleteValues _ = Object H.empty
examples :: ToJSON a => (Context -> a) -> [Text] -> [Example]
examples f = examplesCustom (check f)
|
4ee315b78ab38eae9a9c7a67f9b03eeee88e0d6a37f24c11b307e4aac697e8fe | samrushing/irken-compiler | t42.scm |
(include "lib/core.scm")
(define (make-int-generator n)
(make-generator
(lambda (consumer)
(let loop ((n n))
(consumer n)
(loop (+ n 1))))))
(let ((g (make-int-generator 42)))
(printn (g))
(printn (g))
(printn (g))
(printn (g))
(printn (g))
(printn (g))
(printn (g))
)
| null | https://raw.githubusercontent.com/samrushing/irken-compiler/690da48852d55497f873738df54f14e8e135d006/tests/t42.scm | scheme |
(include "lib/core.scm")
(define (make-int-generator n)
(make-generator
(lambda (consumer)
(let loop ((n n))
(consumer n)
(loop (+ n 1))))))
(let ((g (make-int-generator 42)))
(printn (g))
(printn (g))
(printn (g))
(printn (g))
(printn (g))
(printn (g))
(printn (g))
)
| |
ec77255f539c13fafb45748b62e25633f3ede3fe7d4b4da561d573b228790866 | bazqux/bazqux-urweb | Merge.hs | | Утилиты слияния ключей
module Lib.Merge
( unionByWith
)
where
import Data.List
import Data.Ord
| Объединяет два списка по ключам с объединением одинаковых значений .
unionByWith key merge a b = go (s a) (s b)
where s = mergeSame . sortBy (comparing key)
mergeSame [] = []
mergeSame [x] = [x]
mergeSame (a:b:xs)
| key a == key b = mergeSame (merge a b : xs)
| otherwise = a : mergeSame (b:xs)
go a [] = a
go [] b = b
go (a:as) (b:bs) = case compare (key a) (key b) of
LT -> a : go as (b:bs)
GT -> b : go (a:as) bs
EQ -> go (merge a b : as) bs
| null | https://raw.githubusercontent.com/bazqux/bazqux-urweb/bf2d5a65b5b286348c131e91b6e57df9e8045c3f/crawler/Lib/Merge.hs | haskell | | Утилиты слияния ключей
module Lib.Merge
( unionByWith
)
where
import Data.List
import Data.Ord
| Объединяет два списка по ключам с объединением одинаковых значений .
unionByWith key merge a b = go (s a) (s b)
where s = mergeSame . sortBy (comparing key)
mergeSame [] = []
mergeSame [x] = [x]
mergeSame (a:b:xs)
| key a == key b = mergeSame (merge a b : xs)
| otherwise = a : mergeSame (b:xs)
go a [] = a
go [] b = b
go (a:as) (b:bs) = case compare (key a) (key b) of
LT -> a : go as (b:bs)
GT -> b : go (a:as) bs
EQ -> go (merge a b : as) bs
| |
a5a9d1a2d3938a81fb4805d6fbf718eb60f75ac85849fa3e956f190d6334cd93 | clojure-interop/aws-api | AWSResourceGroupsClient.clj | (ns com.amazonaws.services.resourcegroups.AWSResourceGroupsClient
"Client for accessing Resource Groups. All service calls made using this client are blocking, and will not return
until the service call completes.
AWS Resource Groups
AWS Resource Groups lets you organize AWS resources such as Amazon EC2 instances, Amazon Relational Database Service
databases, and Amazon S3 buckets into groups using criteria that you define as tags. A resource group is a collection
of resources that match the resource types specified in a query, and share one or more tags or portions of tags. You
can create a group of resources based on their roles in your cloud infrastructure, lifecycle stages, regions,
application layers, or virtually any criteria. Resource groups enable you to automate management tasks, such as those
in AWS Systems Manager Automation documents, on tag-related resources in AWS Systems Manager. Groups of tagged
resources also let you quickly view a custom console in AWS Systems Manager that shows AWS Config compliance and
other monitoring data about member resources.
To create a resource group, build a resource query, and specify tags that identify the criteria that members of the
group have in common. Tags are key-value pairs.
For more information about Resource Groups, see the AWS Resource Groups User Guide.
AWS Resource Groups uses a REST-compliant API that you can use to perform the following types of operations.
Create, Read, Update, and Delete (CRUD) operations on resource groups and resource query entities
Applying, editing, and removing tags from resource groups
Resolving resource group member ARNs so they can be returned as search results
Getting data about resources that are members of a group
Searching AWS resources based on a resource query"
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.resourcegroups AWSResourceGroupsClient]))
(defn *builder
"returns: `com.amazonaws.services.resourcegroups.AWSResourceGroupsClientBuilder`"
(^com.amazonaws.services.resourcegroups.AWSResourceGroupsClientBuilder []
(AWSResourceGroupsClient/builder )))
(defn get-group-query
"Returns the resource query associated with the specified resource group.
request - `com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest`
returns: Result of the GetGroupQuery operation returned by the service. - `com.amazonaws.services.resourcegroups.model.GetGroupQueryResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.GetGroupQueryResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest request]
(-> this (.getGroupQuery request))))
(defn delete-group
"Deletes a specified resource group. Deleting a resource group does not delete resources that are members of the
group; it only deletes the group structure.
request - `com.amazonaws.services.resourcegroups.model.DeleteGroupRequest`
returns: Result of the DeleteGroup operation returned by the service. - `com.amazonaws.services.resourcegroups.model.DeleteGroupResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.DeleteGroupResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.DeleteGroupRequest request]
(-> this (.deleteGroup request))))
(defn update-group
"Updates an existing group with a new or changed description. You cannot update the name of a resource group.
request - `com.amazonaws.services.resourcegroups.model.UpdateGroupRequest`
returns: Result of the UpdateGroup operation returned by the service. - `com.amazonaws.services.resourcegroups.model.UpdateGroupResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.UpdateGroupResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.UpdateGroupRequest request]
(-> this (.updateGroup request))))
(defn create-group
"Creates a group with a specified name, description, and resource query.
request - `com.amazonaws.services.resourcegroups.model.CreateGroupRequest`
returns: Result of the CreateGroup operation returned by the service. - `com.amazonaws.services.resourcegroups.model.CreateGroupResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.CreateGroupResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.CreateGroupRequest request]
(-> this (.createGroup request))))
(defn list-group-resources
"Returns a list of ARNs of resources that are members of a specified resource group.
request - `com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest`
returns: Result of the ListGroupResources operation returned by the service. - `com.amazonaws.services.resourcegroups.model.ListGroupResourcesResult`
throws: com.amazonaws.services.resourcegroups.model.UnauthorizedException - The request has not been applied because it lacks valid authentication credentials for the target resource."
(^com.amazonaws.services.resourcegroups.model.ListGroupResourcesResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest request]
(-> this (.listGroupResources request))))
(defn tag
"Adds tags to a resource group with the specified ARN. Existing tags on a resource group are not changed if they
are not specified in the request parameters.
request - `com.amazonaws.services.resourcegroups.model.TagRequest`
returns: Result of the Tag operation returned by the service. - `com.amazonaws.services.resourcegroups.model.TagResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.TagResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.TagRequest request]
(-> this (.tag request))))
(defn update-group-query
"Updates the resource query of a group.
request - `com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest`
returns: Result of the UpdateGroupQuery operation returned by the service. - `com.amazonaws.services.resourcegroups.model.UpdateGroupQueryResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.UpdateGroupQueryResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest request]
(-> this (.updateGroupQuery request))))
(defn untag
"Deletes specified tags from a specified resource.
request - `com.amazonaws.services.resourcegroups.model.UntagRequest`
returns: Result of the Untag operation returned by the service. - `com.amazonaws.services.resourcegroups.model.UntagResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.UntagResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.UntagRequest request]
(-> this (.untag request))))
(defn search-resources
"Returns a list of AWS resource identifiers that matches a specified query. The query uses the same format as a
resource query in a CreateGroup or UpdateGroupQuery operation.
request - `com.amazonaws.services.resourcegroups.model.SearchResourcesRequest`
returns: Result of the SearchResources operation returned by the service. - `com.amazonaws.services.resourcegroups.model.SearchResourcesResult`
throws: com.amazonaws.services.resourcegroups.model.UnauthorizedException - The request has not been applied because it lacks valid authentication credentials for the target resource."
(^com.amazonaws.services.resourcegroups.model.SearchResourcesResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.SearchResourcesRequest request]
(-> this (.searchResources request))))
(defn get-tags
"Returns a list of tags that are associated with a resource group, specified by an ARN.
request - `com.amazonaws.services.resourcegroups.model.GetTagsRequest`
returns: Result of the GetTags operation returned by the service. - `com.amazonaws.services.resourcegroups.model.GetTagsResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.GetTagsResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.GetTagsRequest request]
(-> this (.getTags request))))
(defn list-groups
"Returns a list of existing resource groups in your account.
request - `com.amazonaws.services.resourcegroups.model.ListGroupsRequest`
returns: Result of the ListGroups operation returned by the service. - `com.amazonaws.services.resourcegroups.model.ListGroupsResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.ListGroupsResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.ListGroupsRequest request]
(-> this (.listGroups request))))
(defn get-cached-response-metadata
"Returns additional metadata for a previously executed successful, request, typically used for debugging issues
where a service isn't acting as expected. This data isn't considered part of the result data returned by an
operation, so it's available through this separate, diagnostic interface.
Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic
information for an executed request, you should use this method to retrieve it as soon as possible after
executing the request.
request - The originally executed request - `com.amazonaws.AmazonWebServiceRequest`
returns: The response metadata for the specified request, or null if none is available. - `com.amazonaws.ResponseMetadata`"
(^com.amazonaws.ResponseMetadata [^AWSResourceGroupsClient this ^com.amazonaws.AmazonWebServiceRequest request]
(-> this (.getCachedResponseMetadata request))))
(defn get-group
"Returns information about a specified resource group.
request - `com.amazonaws.services.resourcegroups.model.GetGroupRequest`
returns: Result of the GetGroup operation returned by the service. - `com.amazonaws.services.resourcegroups.model.GetGroupResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.GetGroupResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.GetGroupRequest request]
(-> this (.getGroup request))))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.resourcegroups/src/com/amazonaws/services/resourcegroups/AWSResourceGroupsClient.clj | clojure | it only deletes the group structure. | (ns com.amazonaws.services.resourcegroups.AWSResourceGroupsClient
"Client for accessing Resource Groups. All service calls made using this client are blocking, and will not return
until the service call completes.
AWS Resource Groups
AWS Resource Groups lets you organize AWS resources such as Amazon EC2 instances, Amazon Relational Database Service
databases, and Amazon S3 buckets into groups using criteria that you define as tags. A resource group is a collection
of resources that match the resource types specified in a query, and share one or more tags or portions of tags. You
can create a group of resources based on their roles in your cloud infrastructure, lifecycle stages, regions,
application layers, or virtually any criteria. Resource groups enable you to automate management tasks, such as those
in AWS Systems Manager Automation documents, on tag-related resources in AWS Systems Manager. Groups of tagged
resources also let you quickly view a custom console in AWS Systems Manager that shows AWS Config compliance and
other monitoring data about member resources.
To create a resource group, build a resource query, and specify tags that identify the criteria that members of the
group have in common. Tags are key-value pairs.
For more information about Resource Groups, see the AWS Resource Groups User Guide.
AWS Resource Groups uses a REST-compliant API that you can use to perform the following types of operations.
Create, Read, Update, and Delete (CRUD) operations on resource groups and resource query entities
Applying, editing, and removing tags from resource groups
Resolving resource group member ARNs so they can be returned as search results
Getting data about resources that are members of a group
Searching AWS resources based on a resource query"
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.resourcegroups AWSResourceGroupsClient]))
(defn *builder
"returns: `com.amazonaws.services.resourcegroups.AWSResourceGroupsClientBuilder`"
(^com.amazonaws.services.resourcegroups.AWSResourceGroupsClientBuilder []
(AWSResourceGroupsClient/builder )))
(defn get-group-query
"Returns the resource query associated with the specified resource group.
request - `com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest`
returns: Result of the GetGroupQuery operation returned by the service. - `com.amazonaws.services.resourcegroups.model.GetGroupQueryResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.GetGroupQueryResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest request]
(-> this (.getGroupQuery request))))
(defn delete-group
"Deletes a specified resource group. Deleting a resource group does not delete resources that are members of the
request - `com.amazonaws.services.resourcegroups.model.DeleteGroupRequest`
returns: Result of the DeleteGroup operation returned by the service. - `com.amazonaws.services.resourcegroups.model.DeleteGroupResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.DeleteGroupResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.DeleteGroupRequest request]
(-> this (.deleteGroup request))))
(defn update-group
"Updates an existing group with a new or changed description. You cannot update the name of a resource group.
request - `com.amazonaws.services.resourcegroups.model.UpdateGroupRequest`
returns: Result of the UpdateGroup operation returned by the service. - `com.amazonaws.services.resourcegroups.model.UpdateGroupResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.UpdateGroupResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.UpdateGroupRequest request]
(-> this (.updateGroup request))))
(defn create-group
"Creates a group with a specified name, description, and resource query.
request - `com.amazonaws.services.resourcegroups.model.CreateGroupRequest`
returns: Result of the CreateGroup operation returned by the service. - `com.amazonaws.services.resourcegroups.model.CreateGroupResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.CreateGroupResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.CreateGroupRequest request]
(-> this (.createGroup request))))
(defn list-group-resources
"Returns a list of ARNs of resources that are members of a specified resource group.
request - `com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest`
returns: Result of the ListGroupResources operation returned by the service. - `com.amazonaws.services.resourcegroups.model.ListGroupResourcesResult`
throws: com.amazonaws.services.resourcegroups.model.UnauthorizedException - The request has not been applied because it lacks valid authentication credentials for the target resource."
(^com.amazonaws.services.resourcegroups.model.ListGroupResourcesResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest request]
(-> this (.listGroupResources request))))
(defn tag
"Adds tags to a resource group with the specified ARN. Existing tags on a resource group are not changed if they
are not specified in the request parameters.
request - `com.amazonaws.services.resourcegroups.model.TagRequest`
returns: Result of the Tag operation returned by the service. - `com.amazonaws.services.resourcegroups.model.TagResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.TagResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.TagRequest request]
(-> this (.tag request))))
(defn update-group-query
"Updates the resource query of a group.
request - `com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest`
returns: Result of the UpdateGroupQuery operation returned by the service. - `com.amazonaws.services.resourcegroups.model.UpdateGroupQueryResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.UpdateGroupQueryResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest request]
(-> this (.updateGroupQuery request))))
(defn untag
"Deletes specified tags from a specified resource.
request - `com.amazonaws.services.resourcegroups.model.UntagRequest`
returns: Result of the Untag operation returned by the service. - `com.amazonaws.services.resourcegroups.model.UntagResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.UntagResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.UntagRequest request]
(-> this (.untag request))))
(defn search-resources
"Returns a list of AWS resource identifiers that matches a specified query. The query uses the same format as a
resource query in a CreateGroup or UpdateGroupQuery operation.
request - `com.amazonaws.services.resourcegroups.model.SearchResourcesRequest`
returns: Result of the SearchResources operation returned by the service. - `com.amazonaws.services.resourcegroups.model.SearchResourcesResult`
throws: com.amazonaws.services.resourcegroups.model.UnauthorizedException - The request has not been applied because it lacks valid authentication credentials for the target resource."
(^com.amazonaws.services.resourcegroups.model.SearchResourcesResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.SearchResourcesRequest request]
(-> this (.searchResources request))))
(defn get-tags
"Returns a list of tags that are associated with a resource group, specified by an ARN.
request - `com.amazonaws.services.resourcegroups.model.GetTagsRequest`
returns: Result of the GetTags operation returned by the service. - `com.amazonaws.services.resourcegroups.model.GetTagsResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.GetTagsResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.GetTagsRequest request]
(-> this (.getTags request))))
(defn list-groups
"Returns a list of existing resource groups in your account.
request - `com.amazonaws.services.resourcegroups.model.ListGroupsRequest`
returns: Result of the ListGroups operation returned by the service. - `com.amazonaws.services.resourcegroups.model.ListGroupsResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.ListGroupsResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.ListGroupsRequest request]
(-> this (.listGroups request))))
(defn get-cached-response-metadata
"Returns additional metadata for a previously executed successful, request, typically used for debugging issues
where a service isn't acting as expected. This data isn't considered part of the result data returned by an
operation, so it's available through this separate, diagnostic interface.
Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic
information for an executed request, you should use this method to retrieve it as soon as possible after
executing the request.
request - The originally executed request - `com.amazonaws.AmazonWebServiceRequest`
returns: The response metadata for the specified request, or null if none is available. - `com.amazonaws.ResponseMetadata`"
(^com.amazonaws.ResponseMetadata [^AWSResourceGroupsClient this ^com.amazonaws.AmazonWebServiceRequest request]
(-> this (.getCachedResponseMetadata request))))
(defn get-group
"Returns information about a specified resource group.
request - `com.amazonaws.services.resourcegroups.model.GetGroupRequest`
returns: Result of the GetGroup operation returned by the service. - `com.amazonaws.services.resourcegroups.model.GetGroupResult`
throws: com.amazonaws.services.resourcegroups.model.BadRequestException - The request does not comply with validation rules that are defined for the request parameters."
(^com.amazonaws.services.resourcegroups.model.GetGroupResult [^AWSResourceGroupsClient this ^com.amazonaws.services.resourcegroups.model.GetGroupRequest request]
(-> this (.getGroup request))))
|
f4469d4964530faf0ee91fcd2606373e60dc8b150bd8f359dcc94859f20d0d8a | cljfx/cljfx | number_axis.clj | (ns cljfx.fx.number-axis
"Part of a public API"
(:require [cljfx.composite :as composite]
[cljfx.lifecycle :as lifecycle]
[cljfx.fx.value-axis :as fx.value-axis])
(:import [javafx.scene.chart NumberAxis]))
(set! *warn-on-reflection* true)
(def props
(merge
fx.value-axis/props
(composite/props NumberAxis
:force-zero-in-range [:setter lifecycle/scalar :default true]
:tick-unit [:setter lifecycle/scalar :coerce double :default 5.0])))
(def lifecycle
(lifecycle/annotate
(composite/describe NumberAxis
:ctor []
:props props)
:number-axis))
| null | https://raw.githubusercontent.com/cljfx/cljfx/543f7409290051e9444771d2cd86dadeb8cdce33/src/cljfx/fx/number_axis.clj | clojure | (ns cljfx.fx.number-axis
"Part of a public API"
(:require [cljfx.composite :as composite]
[cljfx.lifecycle :as lifecycle]
[cljfx.fx.value-axis :as fx.value-axis])
(:import [javafx.scene.chart NumberAxis]))
(set! *warn-on-reflection* true)
(def props
(merge
fx.value-axis/props
(composite/props NumberAxis
:force-zero-in-range [:setter lifecycle/scalar :default true]
:tick-unit [:setter lifecycle/scalar :coerce double :default 5.0])))
(def lifecycle
(lifecycle/annotate
(composite/describe NumberAxis
:ctor []
:props props)
:number-axis))
| |
7caa624823a7b9533c03edc0773c72b85c50d4356e852b36131f8f1840b9305e | gildor478/ocaml-fileutils | FilePath.ml | (******************************************************************************)
(* ocaml-fileutils: files and filenames common operations *)
(* *)
Copyright ( C ) 2003 - 2014 ,
(* *)
(* This library is free software; you can redistribute it and/or modify it *)
(* under the terms of the GNU Lesser General Public License as published by *)
the Free Software Foundation ; either version 2.1 of the License , or ( at
(* your option) any later version, with the OCaml static compilation *)
(* exception. *)
(* *)
(* This library is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file *)
(* COPYING for more details. *)
(* *)
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
(******************************************************************************)
open FilePath_type
exception BaseFilenameRelative of filename
exception UnrecognizedOS of string
exception EmptyFilename
exception NoExtension of filename
exception InvalidFilename of filename
module type OS_SPECIFICATION =
sig
val dir_writer: (filename_part list) -> filename
val dir_reader: filename -> (filename_part list)
val path_writer: (filename list) -> string
val path_reader: string -> (filename list)
val fast_concat: filename -> filename -> filename
val fast_basename: filename -> filename
val fast_dirname: filename -> filename
val fast_is_relative: filename -> bool
val fast_is_current: filename -> bool
val fast_is_parent: filename -> bool
end
module type PATH_SPECIFICATION =
sig
type filename
type extension
val string_of_filename: filename -> string
val filename_of_string: string -> filename
val extension_of_string: string -> extension
val string_of_extension: extension -> string
val make_filename: string list -> filename
val is_subdir: filename -> filename -> bool
val is_updir: filename -> filename -> bool
val compare: filename -> filename -> int
val basename: filename -> filename
val dirname: filename -> filename
val concat: filename -> filename -> filename
val reduce: ?no_symlink:bool -> filename -> filename
val make_absolute: filename -> filename -> filename
val make_relative: filename -> filename -> filename
val reparent: filename -> filename -> filename -> filename
val identity: filename -> filename
val is_valid: filename -> bool
val is_relative: filename -> bool
val is_current: filename -> bool
val is_parent: filename -> bool
val chop_extension: filename -> filename
val get_extension: filename -> extension
val check_extension: filename -> extension -> bool
val add_extension: filename -> extension -> filename
val replace_extension: filename -> extension -> filename
val string_of_path: filename list -> string
val path_of_string: string -> filename list
val current_dir: filename
val parent_dir: filename
end
module type PATH_STRING_SPECIFICATION =
sig
module Abstract: PATH_SPECIFICATION
include PATH_SPECIFICATION with
type filename = string and
type extension = string
end
Convert an to PATH_SPECIFICATION
module GenericPath =
functor (OsOperation: OS_SPECIFICATION) ->
struct
type filename = FilePath_type.filename_part list
type extension = FilePath_type.extension
(* Filename_from_string *)
let filename_of_string str =
try
OsOperation.dir_reader str
with Parsing.Parse_error ->
raise (InvalidFilename str)
(* String_from_filename *)
let string_of_filename path =
OsOperation.dir_writer path
(* Reduce *)
let reduce ?(no_symlink=false) path =
(* TODO: not tail recursive ! *)
let rec reduce_aux lst =
match lst with
| ParentDir :: tl when no_symlink ->
begin
match reduce_aux tl with
| Root s :: tl ->
Root s :: tl
| ParentDir :: tl ->
ParentDir :: ParentDir :: tl
| [] ->
ParentDir :: tl
| _ :: tl ->
tl
end
| ParentDir :: tl ->
ParentDir :: (reduce_aux tl)
| CurrentDir _ :: tl
| Component "" :: tl ->
(reduce_aux tl)
| Component s :: tl ->
Component s :: (reduce_aux tl)
| Root s :: tl ->
Root s :: (reduce_aux tl)
| [] ->
[]
in
let rev_path = List.rev path in
match reduce_aux rev_path with
| [] when no_symlink = false->
(* assert
* ( List.for_all ( function | Component ""
* | CurrentDir _ -> true | _ -> false ) rev_path ) *)
(try
(* use last CurrentDir _ *)
[ List.find ( function | CurrentDir _ -> true | _ -> false ) rev_path ]
with
| Not_found -> [] ) (* Only Component "" *)
|l -> List.rev l
Compare , subdir , updir
type filename_relation = SubDir | UpDir | Equal | NoRelation of int
let relation_of_filename path1 path2 =
let rec relation_of_filename_aux path1 path2 =
match (path1, path2) with
([], []) ->
Equal
| (hd1 :: tl1, hd2 :: tl2) ->
if hd1 = hd2 then
relation_of_filename_aux tl1 tl2
else
begin
NoRelation (String.compare
(string_of_filename [hd1])
(string_of_filename [hd2])
)
end
| (_, []) -> SubDir
| ([], _) -> UpDir
in
relation_of_filename_aux path1 path2
let is_subdir path1 path2 =
match relation_of_filename path1 path2 with
SubDir ->
true
| _ ->
false
let is_updir path1 path2 =
match relation_of_filename path1 path2 with
UpDir ->
true
| _ ->
false
let compare path1 path2 =
match relation_of_filename path1 path2 with
SubDir -> -1
| UpDir -> 1
| Equal -> 0
| NoRelation i -> i
Concat
let concat lst_path1 lst_path2 =
reduce
(match lst_path2 with
| CurrentDir Short :: tl_path2 ->
lst_path1 @ tl_path2
| _ ->
lst_path1 @ lst_path2)
(* Is_relative *)
let is_relative lst_path =
match lst_path with
(Root _) :: _ -> false
| _ -> true
let is_valid _ =
(* As we are manipulating abstract filename,
and that it has been parsed, we are
sure that all is correct *)
true
let is_current path =
match path with
[ (CurrentDir _) ] -> true
| _ -> false
let is_parent path =
match path with
[ ParentDir ] -> true
| _ -> false
Basename
let basename path =
match List.rev path with
| hd :: _ -> [hd]
| [] -> raise EmptyFilename
(* Dirname *)
let dirname path =
match List.rev path with
| _ :: tl -> List.rev tl
| [] -> raise EmptyFilename
(* Extension manipulation *)
let wrap_extension f path =
match basename path with
| [Component fn] ->
f fn
| _ ->
raise (NoExtension (string_of_filename path))
let check_extension path ext =
wrap_extension
(fun fn -> ExtensionPath.check fn ext)
path
let get_extension path =
wrap_extension
(fun fn -> ExtensionPath.get fn)
path
let chop_extension path =
wrap_extension
(fun fn ->
concat
(dirname path)
[Component (ExtensionPath.chop fn)])
path
let add_extension path ext =
wrap_extension
(fun fn ->
concat
(dirname path)
[Component (ExtensionPath.add fn ext)])
path
let replace_extension path ext =
wrap_extension
(fun fn ->
concat
(dirname path)
[Component (ExtensionPath.replace fn ext)])
path
let extension_of_string x = x
let string_of_extension x = x
Make_asbolute
let make_absolute path_base path_path =
reduce
(if is_relative path_base then
raise (BaseFilenameRelative (string_of_filename path_base))
else if is_relative path_path then
path_base @ path_path
else
path_path)
(* Make_relative *)
let make_relative path_base path_path =
let rec make_relative_aux lst_base lst_path =
match (lst_base, lst_path) with
x :: tl_base, a :: tl_path when x = a ->
make_relative_aux tl_base tl_path
| _, _ ->
let back_to_base = List.rev_map
(fun _ -> ParentDir)
lst_base
in
back_to_base @ lst_path
in
reduce
(if is_relative path_base then
raise (BaseFilenameRelative (string_of_filename path_base))
else if is_relative path_path then
path_path
else
make_relative_aux path_base path_path)
(* Make_filename *)
let make_filename lst_path =
reduce (List.flatten (List.map filename_of_string lst_path))
(* Reparent *)
let reparent path_src path_dst path =
let path_relative =
make_relative path_src path
in
make_absolute path_dst path_relative
(* Identity *)
let identity path = path
(* Manipulate path like variable *)
let string_of_path lst =
OsOperation.path_writer (List.map string_of_filename lst)
let path_of_string str =
List.map
filename_of_string
(OsOperation.path_reader str)
Generic filename component
let current_dir = [ CurrentDir Long ]
let parent_dir = [ ParentDir ]
end
Convert an OS_SPECIFICATION to PATH_STRING_SPECIFICATION
module GenericStringPath =
functor (OsOperation: OS_SPECIFICATION) ->
struct
module Abstract = GenericPath(OsOperation)
type filename = string
type extension = string
let string_of_filename path =
path
let filename_of_string path =
path
let string_of_extension ext =
ext
let extension_of_string str =
str
let f2s = Abstract.string_of_filename
let s2f = Abstract.filename_of_string
let e2s = Abstract.string_of_extension
let s2e = Abstract.extension_of_string
let is_subdir path1 path2 =
Abstract.is_subdir (s2f path1) (s2f path2)
let is_updir path1 path2 =
Abstract.is_updir (s2f path1) (s2f path2)
let compare path1 path2 =
Abstract.compare (s2f path1) (s2f path2)
let basename path =
try
OsOperation.fast_basename path
with CommonPath.CannotHandleFast ->
f2s (Abstract.basename (s2f path))
let dirname path =
try
OsOperation.fast_dirname path
with CommonPath.CannotHandleFast ->
f2s (Abstract.dirname (s2f path))
let concat path1 path2 =
try
OsOperation.fast_concat path1 path2
with CommonPath.CannotHandleFast ->
f2s (Abstract.concat (s2f path1) (s2f path2))
let make_filename path_lst =
f2s (Abstract.make_filename path_lst)
let reduce ?no_symlink path =
f2s (Abstract.reduce ?no_symlink (s2f path))
let make_absolute base_path path =
f2s (Abstract.make_absolute (s2f base_path) (s2f path))
let make_relative base_path path =
f2s (Abstract.make_relative (s2f base_path) (s2f path))
let reparent path_src path_dst path =
f2s (Abstract.reparent (s2f path_src) (s2f path_dst) (s2f path))
let identity path =
f2s (Abstract.identity (s2f path))
let is_valid path =
try
Abstract.is_valid (s2f path)
with InvalidFilename _ ->
false
let is_relative path =
try
OsOperation.fast_is_relative path
with CommonPath.CannotHandleFast ->
Abstract.is_relative (s2f path)
let is_current path =
try
OsOperation.fast_is_current path
with CommonPath.CannotHandleFast ->
Abstract.is_current (s2f path)
let is_parent path =
try
OsOperation.fast_is_parent path
with CommonPath.CannotHandleFast ->
Abstract.is_parent (s2f path)
let wrap_extension f path =
let bfn =
OsOperation.fast_basename path
in
if OsOperation.fast_is_parent bfn ||
OsOperation.fast_is_current bfn ||
not (OsOperation.fast_is_relative bfn) then
raise (NoExtension path)
else
f bfn
let chop_extension path =
try
wrap_extension
(fun fn ->
OsOperation.fast_concat
(OsOperation.fast_dirname path)
(ExtensionPath.chop fn))
path
with CommonPath.CannotHandleFast ->
f2s (Abstract.chop_extension (s2f path))
let get_extension path =
try
wrap_extension
(fun fn -> ExtensionPath.get fn)
path
with CommonPath.CannotHandleFast ->
e2s (Abstract.get_extension (s2f path))
let check_extension path ext =
try
wrap_extension
(fun fn -> ExtensionPath.check fn ext)
path
with CommonPath.CannotHandleFast ->
Abstract.check_extension (s2f path) (s2e ext)
let add_extension path ext =
try
wrap_extension
(fun fn ->
OsOperation.fast_concat
(OsOperation.fast_dirname path)
(ExtensionPath.add fn ext))
path
with CommonPath.CannotHandleFast ->
f2s (Abstract.add_extension (s2f path) (s2e ext))
let replace_extension path ext =
try
wrap_extension
(fun fn ->
OsOperation.fast_concat
(OsOperation.fast_dirname path)
(ExtensionPath.replace fn ext))
path
with CommonPath.CannotHandleFast ->
f2s (Abstract.replace_extension (s2f path) (s2e ext))
let string_of_path path_lst =
Abstract.string_of_path (List.map s2f path_lst)
let path_of_string str =
List.map f2s (Abstract.path_of_string str)
let current_dir =
f2s (Abstract.current_dir)
let parent_dir =
f2s (Abstract.parent_dir)
end
module DefaultPath = GenericStringPath(struct
let os_depend unix win32 =
match Sys.os_type with
"Unix"
| "Cygwin" -> unix
| "Win32" -> win32
| s -> raise (UnrecognizedOS s)
let dir_writer =
os_depend
UnixPath.dir_writer
Win32Path.dir_writer
let dir_reader =
os_depend
UnixPath.dir_reader
Win32Path.dir_reader
let path_writer =
os_depend
UnixPath.path_writer
Win32Path.path_writer
let path_reader =
os_depend
UnixPath.path_reader
Win32Path.path_reader
let fast_concat =
os_depend
UnixPath.fast_concat
Win32Path.fast_concat
let fast_basename =
os_depend
UnixPath.fast_basename
Win32Path.fast_basename
let fast_dirname =
os_depend
UnixPath.fast_dirname
Win32Path.fast_dirname
let fast_is_relative =
os_depend
UnixPath.fast_is_relative
Win32Path.fast_is_relative
let fast_is_current =
os_depend
UnixPath.fast_is_current
Win32Path.fast_is_current
let fast_is_parent =
os_depend
UnixPath.fast_is_parent
Win32Path.fast_is_parent
end)
module UnixPath = GenericStringPath(UnixPath)
module Win32Path = GenericStringPath(Win32Path)
module CygwinPath = UnixPath
include DefaultPath
| null | https://raw.githubusercontent.com/gildor478/ocaml-fileutils/9ad8d2ee342c551391f2a9873de01982d24b36d5/src/lib/fileutils/FilePath.ml | ocaml | ****************************************************************************
ocaml-fileutils: files and filenames common operations
This library is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
your option) any later version, with the OCaml static compilation
exception.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file
COPYING for more details.
****************************************************************************
Filename_from_string
String_from_filename
Reduce
TODO: not tail recursive !
assert
* ( List.for_all ( function | Component ""
* | CurrentDir _ -> true | _ -> false ) rev_path )
use last CurrentDir _
Only Component ""
Is_relative
As we are manipulating abstract filename,
and that it has been parsed, we are
sure that all is correct
Dirname
Extension manipulation
Make_relative
Make_filename
Reparent
Identity
Manipulate path like variable | Copyright ( C ) 2003 - 2014 ,
the Free Software Foundation ; either version 2.1 of the License , or ( at
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
open FilePath_type
exception BaseFilenameRelative of filename
exception UnrecognizedOS of string
exception EmptyFilename
exception NoExtension of filename
exception InvalidFilename of filename
module type OS_SPECIFICATION =
sig
val dir_writer: (filename_part list) -> filename
val dir_reader: filename -> (filename_part list)
val path_writer: (filename list) -> string
val path_reader: string -> (filename list)
val fast_concat: filename -> filename -> filename
val fast_basename: filename -> filename
val fast_dirname: filename -> filename
val fast_is_relative: filename -> bool
val fast_is_current: filename -> bool
val fast_is_parent: filename -> bool
end
module type PATH_SPECIFICATION =
sig
type filename
type extension
val string_of_filename: filename -> string
val filename_of_string: string -> filename
val extension_of_string: string -> extension
val string_of_extension: extension -> string
val make_filename: string list -> filename
val is_subdir: filename -> filename -> bool
val is_updir: filename -> filename -> bool
val compare: filename -> filename -> int
val basename: filename -> filename
val dirname: filename -> filename
val concat: filename -> filename -> filename
val reduce: ?no_symlink:bool -> filename -> filename
val make_absolute: filename -> filename -> filename
val make_relative: filename -> filename -> filename
val reparent: filename -> filename -> filename -> filename
val identity: filename -> filename
val is_valid: filename -> bool
val is_relative: filename -> bool
val is_current: filename -> bool
val is_parent: filename -> bool
val chop_extension: filename -> filename
val get_extension: filename -> extension
val check_extension: filename -> extension -> bool
val add_extension: filename -> extension -> filename
val replace_extension: filename -> extension -> filename
val string_of_path: filename list -> string
val path_of_string: string -> filename list
val current_dir: filename
val parent_dir: filename
end
module type PATH_STRING_SPECIFICATION =
sig
module Abstract: PATH_SPECIFICATION
include PATH_SPECIFICATION with
type filename = string and
type extension = string
end
Convert an to PATH_SPECIFICATION
module GenericPath =
functor (OsOperation: OS_SPECIFICATION) ->
struct
type filename = FilePath_type.filename_part list
type extension = FilePath_type.extension
let filename_of_string str =
try
OsOperation.dir_reader str
with Parsing.Parse_error ->
raise (InvalidFilename str)
let string_of_filename path =
OsOperation.dir_writer path
let reduce ?(no_symlink=false) path =
let rec reduce_aux lst =
match lst with
| ParentDir :: tl when no_symlink ->
begin
match reduce_aux tl with
| Root s :: tl ->
Root s :: tl
| ParentDir :: tl ->
ParentDir :: ParentDir :: tl
| [] ->
ParentDir :: tl
| _ :: tl ->
tl
end
| ParentDir :: tl ->
ParentDir :: (reduce_aux tl)
| CurrentDir _ :: tl
| Component "" :: tl ->
(reduce_aux tl)
| Component s :: tl ->
Component s :: (reduce_aux tl)
| Root s :: tl ->
Root s :: (reduce_aux tl)
| [] ->
[]
in
let rev_path = List.rev path in
match reduce_aux rev_path with
| [] when no_symlink = false->
(try
[ List.find ( function | CurrentDir _ -> true | _ -> false ) rev_path ]
with
|l -> List.rev l
Compare , subdir , updir
type filename_relation = SubDir | UpDir | Equal | NoRelation of int
let relation_of_filename path1 path2 =
let rec relation_of_filename_aux path1 path2 =
match (path1, path2) with
([], []) ->
Equal
| (hd1 :: tl1, hd2 :: tl2) ->
if hd1 = hd2 then
relation_of_filename_aux tl1 tl2
else
begin
NoRelation (String.compare
(string_of_filename [hd1])
(string_of_filename [hd2])
)
end
| (_, []) -> SubDir
| ([], _) -> UpDir
in
relation_of_filename_aux path1 path2
let is_subdir path1 path2 =
match relation_of_filename path1 path2 with
SubDir ->
true
| _ ->
false
let is_updir path1 path2 =
match relation_of_filename path1 path2 with
UpDir ->
true
| _ ->
false
let compare path1 path2 =
match relation_of_filename path1 path2 with
SubDir -> -1
| UpDir -> 1
| Equal -> 0
| NoRelation i -> i
Concat
let concat lst_path1 lst_path2 =
reduce
(match lst_path2 with
| CurrentDir Short :: tl_path2 ->
lst_path1 @ tl_path2
| _ ->
lst_path1 @ lst_path2)
let is_relative lst_path =
match lst_path with
(Root _) :: _ -> false
| _ -> true
let is_valid _ =
true
let is_current path =
match path with
[ (CurrentDir _) ] -> true
| _ -> false
let is_parent path =
match path with
[ ParentDir ] -> true
| _ -> false
Basename
let basename path =
match List.rev path with
| hd :: _ -> [hd]
| [] -> raise EmptyFilename
let dirname path =
match List.rev path with
| _ :: tl -> List.rev tl
| [] -> raise EmptyFilename
let wrap_extension f path =
match basename path with
| [Component fn] ->
f fn
| _ ->
raise (NoExtension (string_of_filename path))
let check_extension path ext =
wrap_extension
(fun fn -> ExtensionPath.check fn ext)
path
let get_extension path =
wrap_extension
(fun fn -> ExtensionPath.get fn)
path
let chop_extension path =
wrap_extension
(fun fn ->
concat
(dirname path)
[Component (ExtensionPath.chop fn)])
path
let add_extension path ext =
wrap_extension
(fun fn ->
concat
(dirname path)
[Component (ExtensionPath.add fn ext)])
path
let replace_extension path ext =
wrap_extension
(fun fn ->
concat
(dirname path)
[Component (ExtensionPath.replace fn ext)])
path
let extension_of_string x = x
let string_of_extension x = x
Make_asbolute
let make_absolute path_base path_path =
reduce
(if is_relative path_base then
raise (BaseFilenameRelative (string_of_filename path_base))
else if is_relative path_path then
path_base @ path_path
else
path_path)
let make_relative path_base path_path =
let rec make_relative_aux lst_base lst_path =
match (lst_base, lst_path) with
x :: tl_base, a :: tl_path when x = a ->
make_relative_aux tl_base tl_path
| _, _ ->
let back_to_base = List.rev_map
(fun _ -> ParentDir)
lst_base
in
back_to_base @ lst_path
in
reduce
(if is_relative path_base then
raise (BaseFilenameRelative (string_of_filename path_base))
else if is_relative path_path then
path_path
else
make_relative_aux path_base path_path)
let make_filename lst_path =
reduce (List.flatten (List.map filename_of_string lst_path))
let reparent path_src path_dst path =
let path_relative =
make_relative path_src path
in
make_absolute path_dst path_relative
let identity path = path
let string_of_path lst =
OsOperation.path_writer (List.map string_of_filename lst)
let path_of_string str =
List.map
filename_of_string
(OsOperation.path_reader str)
Generic filename component
let current_dir = [ CurrentDir Long ]
let parent_dir = [ ParentDir ]
end
Convert an OS_SPECIFICATION to PATH_STRING_SPECIFICATION
module GenericStringPath =
functor (OsOperation: OS_SPECIFICATION) ->
struct
module Abstract = GenericPath(OsOperation)
type filename = string
type extension = string
let string_of_filename path =
path
let filename_of_string path =
path
let string_of_extension ext =
ext
let extension_of_string str =
str
let f2s = Abstract.string_of_filename
let s2f = Abstract.filename_of_string
let e2s = Abstract.string_of_extension
let s2e = Abstract.extension_of_string
let is_subdir path1 path2 =
Abstract.is_subdir (s2f path1) (s2f path2)
let is_updir path1 path2 =
Abstract.is_updir (s2f path1) (s2f path2)
let compare path1 path2 =
Abstract.compare (s2f path1) (s2f path2)
let basename path =
try
OsOperation.fast_basename path
with CommonPath.CannotHandleFast ->
f2s (Abstract.basename (s2f path))
let dirname path =
try
OsOperation.fast_dirname path
with CommonPath.CannotHandleFast ->
f2s (Abstract.dirname (s2f path))
let concat path1 path2 =
try
OsOperation.fast_concat path1 path2
with CommonPath.CannotHandleFast ->
f2s (Abstract.concat (s2f path1) (s2f path2))
let make_filename path_lst =
f2s (Abstract.make_filename path_lst)
let reduce ?no_symlink path =
f2s (Abstract.reduce ?no_symlink (s2f path))
let make_absolute base_path path =
f2s (Abstract.make_absolute (s2f base_path) (s2f path))
let make_relative base_path path =
f2s (Abstract.make_relative (s2f base_path) (s2f path))
let reparent path_src path_dst path =
f2s (Abstract.reparent (s2f path_src) (s2f path_dst) (s2f path))
let identity path =
f2s (Abstract.identity (s2f path))
let is_valid path =
try
Abstract.is_valid (s2f path)
with InvalidFilename _ ->
false
let is_relative path =
try
OsOperation.fast_is_relative path
with CommonPath.CannotHandleFast ->
Abstract.is_relative (s2f path)
let is_current path =
try
OsOperation.fast_is_current path
with CommonPath.CannotHandleFast ->
Abstract.is_current (s2f path)
let is_parent path =
try
OsOperation.fast_is_parent path
with CommonPath.CannotHandleFast ->
Abstract.is_parent (s2f path)
let wrap_extension f path =
let bfn =
OsOperation.fast_basename path
in
if OsOperation.fast_is_parent bfn ||
OsOperation.fast_is_current bfn ||
not (OsOperation.fast_is_relative bfn) then
raise (NoExtension path)
else
f bfn
let chop_extension path =
try
wrap_extension
(fun fn ->
OsOperation.fast_concat
(OsOperation.fast_dirname path)
(ExtensionPath.chop fn))
path
with CommonPath.CannotHandleFast ->
f2s (Abstract.chop_extension (s2f path))
let get_extension path =
try
wrap_extension
(fun fn -> ExtensionPath.get fn)
path
with CommonPath.CannotHandleFast ->
e2s (Abstract.get_extension (s2f path))
let check_extension path ext =
try
wrap_extension
(fun fn -> ExtensionPath.check fn ext)
path
with CommonPath.CannotHandleFast ->
Abstract.check_extension (s2f path) (s2e ext)
let add_extension path ext =
try
wrap_extension
(fun fn ->
OsOperation.fast_concat
(OsOperation.fast_dirname path)
(ExtensionPath.add fn ext))
path
with CommonPath.CannotHandleFast ->
f2s (Abstract.add_extension (s2f path) (s2e ext))
let replace_extension path ext =
try
wrap_extension
(fun fn ->
OsOperation.fast_concat
(OsOperation.fast_dirname path)
(ExtensionPath.replace fn ext))
path
with CommonPath.CannotHandleFast ->
f2s (Abstract.replace_extension (s2f path) (s2e ext))
let string_of_path path_lst =
Abstract.string_of_path (List.map s2f path_lst)
let path_of_string str =
List.map f2s (Abstract.path_of_string str)
let current_dir =
f2s (Abstract.current_dir)
let parent_dir =
f2s (Abstract.parent_dir)
end
module DefaultPath = GenericStringPath(struct
let os_depend unix win32 =
match Sys.os_type with
"Unix"
| "Cygwin" -> unix
| "Win32" -> win32
| s -> raise (UnrecognizedOS s)
let dir_writer =
os_depend
UnixPath.dir_writer
Win32Path.dir_writer
let dir_reader =
os_depend
UnixPath.dir_reader
Win32Path.dir_reader
let path_writer =
os_depend
UnixPath.path_writer
Win32Path.path_writer
let path_reader =
os_depend
UnixPath.path_reader
Win32Path.path_reader
let fast_concat =
os_depend
UnixPath.fast_concat
Win32Path.fast_concat
let fast_basename =
os_depend
UnixPath.fast_basename
Win32Path.fast_basename
let fast_dirname =
os_depend
UnixPath.fast_dirname
Win32Path.fast_dirname
let fast_is_relative =
os_depend
UnixPath.fast_is_relative
Win32Path.fast_is_relative
let fast_is_current =
os_depend
UnixPath.fast_is_current
Win32Path.fast_is_current
let fast_is_parent =
os_depend
UnixPath.fast_is_parent
Win32Path.fast_is_parent
end)
module UnixPath = GenericStringPath(UnixPath)
module Win32Path = GenericStringPath(Win32Path)
module CygwinPath = UnixPath
include DefaultPath
|
31d103554673c524fe78753154464ab4b1bcd9947bb03943c43fb379492f1aa7 | dongcarl/guix | jose.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2018 < >
Copyright © 2021 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages jose)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system meson)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages web)
#:use-module (gnu packages compression)
#:use-module (gnu packages tls))
(define-public jose
(package
(name "jose")
(version "11")
(source (origin
(method url-fetch)
(uri
(string-append "/"
"download/v" version "/jose-" version ".tar.xz"))
(sha256
(base32
"09c2kn9cjqkgx4g0rf6c2k3ng9970r468c9z7067j8kyf7ksywp2"))))
(build-system meson-build-system)
(native-inputs `(("pkg-config" ,pkg-config)))
(inputs `(("jansson" ,jansson)
("zlib" ,zlib)
("libcrypto" ,openssl)))
(home-page "")
(synopsis "Object Signing and Encryption")
(description "C-language implementation of Javascript Object Signing and
Encryption")
(license license:asl2.0)))
| null | https://raw.githubusercontent.com/dongcarl/guix/d2b30db788f1743f9f8738cb1de977b77748567f/gnu/packages/jose.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
| Copyright © 2018 < >
Copyright © 2021 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages jose)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system meson)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages web)
#:use-module (gnu packages compression)
#:use-module (gnu packages tls))
(define-public jose
(package
(name "jose")
(version "11")
(source (origin
(method url-fetch)
(uri
(string-append "/"
"download/v" version "/jose-" version ".tar.xz"))
(sha256
(base32
"09c2kn9cjqkgx4g0rf6c2k3ng9970r468c9z7067j8kyf7ksywp2"))))
(build-system meson-build-system)
(native-inputs `(("pkg-config" ,pkg-config)))
(inputs `(("jansson" ,jansson)
("zlib" ,zlib)
("libcrypto" ,openssl)))
(home-page "")
(synopsis "Object Signing and Encryption")
(description "C-language implementation of Javascript Object Signing and
Encryption")
(license license:asl2.0)))
|
e5d9784ed2d2352fb36e166bed78fbd208706a9b0cd88f162bbf0b914d582aba | Helkafen/wai-middleware-metrics | Metrics.hs | {-# LANGUAGE OverloadedStrings #-}
|
Module : Network . Wai . Metrics
License : : experimental
A < WAI > middleware to collect the following < -12-11-24-day-of-hackage-ekg.html EKG > metrics from compatible web servers :
* number of requests ( counter @wai.request_count@ )
* number of response by status code , broken down class ( count )
* latency distribution ( distribution @wai.latency_distribution@ )
Here 's an example of reading these metrics from a server , and displaying them with EKG .
> -- Compile with GHC option ` -with - rtsopts=-T ` for GC metrics
> import Web .
> import Control . Applicative
> import System . Remote . Monitoring ( serverMetricStore , forkServer )
> import Network . Wai . Metrics
>
> main : : IO ( )
> main = do
> store < - serverMetricStore < $ > forkServer " localhost " 8000
> store
> scotty 3000 $ do
> middleware ( metrics waiMetrics )
> get " / " $ html " "
Now have a look at < :8000 your local EKG instance > and display the request count by clicking on ' wai.request_count ' .
WAI metrics can also be stored in a bare EKG store , with no UI and no GC metrics . Use ekg - core 's function .
Compatible web servers include the following :
* Yesod
*
* Spock
* Servant
* Warp
Module : Network.Wai.Metrics
License : BSD3
Stability : experimental
A < WAI> middleware to collect the following <-12-11-24-day-of-hackage-ekg.html EKG> metrics from compatible web servers:
* number of requests (counter @wai.request_count@)
* number of response by status code, broken down class (count @wai.response_status_xxx@)
* latency distribution (distribution @wai.latency_distribution@)
Here's an example of reading these metrics from a Scotty server, and displaying them with EKG.
> -- Compile with GHC option `-with-rtsopts=-T` for GC metrics
> import Web.Scotty
> import Control.Applicative
> import System.Remote.Monitoring (serverMetricStore, forkServer)
> import Network.Wai.Metrics
>
> main :: IO()
> main = do
> store <- serverMetricStore <$> forkServer "localhost" 8000
> waiMetrics <- registerWaiMetrics store
> scotty 3000 $ do
> middleware (metrics waiMetrics)
> get "/" $ html "Ping"
Now have a look at <:8000 your local EKG instance> and display the request count by clicking on 'wai.request_count'.
WAI metrics can also be stored in a bare EKG store, with no UI and no GC metrics. Use ekg-core's newStore function.
Compatible web servers include the following:
*Yesod
*Scotty
*Spock
*Servant
*Warp
-}
module Network.Wai.Metrics (
registerWaiMetrics,
registerNamedWaiMetrics,
WaiMetrics(..),
metrics) where
import Control.Applicative
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Time.Clock
import Network.HTTP.Types.Status (statusCode)
import Network.Wai
import Prelude
import System.Metrics
import qualified System.Metrics.Counter as Counter
import qualified System.Metrics.Distribution as Distribution
|
The metrics to feed in WAI and register in EKG .
The metrics to feed in WAI and register in EKG.
-}
data WaiMetrics = WaiMetrics {
requestCounter :: Counter.Counter
,latencyDistribution :: Distribution.Distribution
,statusCode100Counter :: Counter.Counter
,statusCode200Counter :: Counter.Counter
,statusCode300Counter :: Counter.Counter
,statusCode400Counter :: Counter.Counter
,statusCode500Counter :: Counter.Counter
}
{-|
Register in EKG a number of metrics related to web server activity using empty namespace.
* @wai.request_count@
* @wai.response_status_1xx@
* @wai.response_status_2xx@
* @wai.response_status_3xx@
* @wai.response_status_4xx@
* @wai.response_status_5xx@
* @wai.latency_distribution@
-}
registerWaiMetrics :: Store -> IO WaiMetrics
registerWaiMetrics = registerNamedWaiMetrics ""
|
Register in EKG a number of metrics related to web server activity with a
namespace .
* @<namespace>.wai.request_count@
* @<namespace>.wai.response_status_1xx@
* @<namespace>.wai.response_status_2xx@
* @<namespace>.wai.response_status_3xx@
* @<namespace>.wai.response_status_4xx@
* @<namespace>.wai.response_status_5xx@
* @<namespace>.wai.latency_distribution@
Register in EKG a number of metrics related to web server activity with a
namespace.
* @<namespace>.wai.request_count@
* @<namespace>.wai.response_status_1xx@
* @<namespace>.wai.response_status_2xx@
* @<namespace>.wai.response_status_3xx@
* @<namespace>.wai.response_status_4xx@
* @<namespace>.wai.response_status_5xx@
* @<namespace>.wai.latency_distribution@
-}
registerNamedWaiMetrics :: Text -> Store -> IO WaiMetrics
registerNamedWaiMetrics namespace store =
WaiMetrics
<$> createCounter (namespace' <> "wai.request_count") store
<*> createDistribution (namespace' <> "wai.latency_distribution") store
<*> createCounter (namespace' <> "wai.response_status_1xx") store
<*> createCounter (namespace' <> "wai.response_status_2xx") store
<*> createCounter (namespace' <> "wai.response_status_3xx") store
<*> createCounter (namespace' <> "wai.response_status_4xx") store
<*> createCounter (namespace' <> "wai.response_status_5xx") store
where
-- append a '.' to a given namespace, if not empty
namespace'
|Text.null namespace = namespace
|otherwise = namespace <> "."
{-|
Create a middleware to be added to a WAI-based webserver.
-}
metrics :: WaiMetrics -> Middleware
metrics waiMetrics app req respond = do
Counter.inc (requestCounter waiMetrics)
start <- getCurrentTime
app req (respond' start)
where respond' :: UTCTime -> Response -> IO ResponseReceived
respond' start res = do
Counter.inc $ case statusCode $ responseStatus res of
s | s >= 500 -> statusCode500Counter waiMetrics
| s >= 400 -> statusCode400Counter waiMetrics
| s >= 300 -> statusCode300Counter waiMetrics
| s >= 200 -> statusCode200Counter waiMetrics
| otherwise -> statusCode100Counter waiMetrics
end <- getCurrentTime
Distribution.add (latencyDistribution waiMetrics) (realToFrac $ diffUTCTime end start)
respond res
| null | https://raw.githubusercontent.com/Helkafen/wai-middleware-metrics/2d2d9fd6ade1e07b2bae129da6af0a9fa5c5a9e5/Network/Wai/Metrics.hs | haskell | # LANGUAGE OverloadedStrings #
Compile with GHC option ` -with - rtsopts=-T ` for GC metrics
Compile with GHC option `-with-rtsopts=-T` for GC metrics
|
Register in EKG a number of metrics related to web server activity using empty namespace.
* @wai.request_count@
* @wai.response_status_1xx@
* @wai.response_status_2xx@
* @wai.response_status_3xx@
* @wai.response_status_4xx@
* @wai.response_status_5xx@
* @wai.latency_distribution@
append a '.' to a given namespace, if not empty
|
Create a middleware to be added to a WAI-based webserver.
|
|
Module : Network . Wai . Metrics
License : : experimental
A < WAI > middleware to collect the following < -12-11-24-day-of-hackage-ekg.html EKG > metrics from compatible web servers :
* number of requests ( counter @wai.request_count@ )
* number of response by status code , broken down class ( count )
* latency distribution ( distribution @wai.latency_distribution@ )
Here 's an example of reading these metrics from a server , and displaying them with EKG .
> import Web .
> import Control . Applicative
> import System . Remote . Monitoring ( serverMetricStore , forkServer )
> import Network . Wai . Metrics
>
> main : : IO ( )
> main = do
> store < - serverMetricStore < $ > forkServer " localhost " 8000
> store
> scotty 3000 $ do
> middleware ( metrics waiMetrics )
> get " / " $ html " "
Now have a look at < :8000 your local EKG instance > and display the request count by clicking on ' wai.request_count ' .
WAI metrics can also be stored in a bare EKG store , with no UI and no GC metrics . Use ekg - core 's function .
Compatible web servers include the following :
* Yesod
*
* Spock
* Servant
* Warp
Module : Network.Wai.Metrics
License : BSD3
Stability : experimental
A < WAI> middleware to collect the following <-12-11-24-day-of-hackage-ekg.html EKG> metrics from compatible web servers:
* number of requests (counter @wai.request_count@)
* number of response by status code, broken down class (count @wai.response_status_xxx@)
* latency distribution (distribution @wai.latency_distribution@)
Here's an example of reading these metrics from a Scotty server, and displaying them with EKG.
> import Web.Scotty
> import Control.Applicative
> import System.Remote.Monitoring (serverMetricStore, forkServer)
> import Network.Wai.Metrics
>
> main :: IO()
> main = do
> store <- serverMetricStore <$> forkServer "localhost" 8000
> waiMetrics <- registerWaiMetrics store
> scotty 3000 $ do
> middleware (metrics waiMetrics)
> get "/" $ html "Ping"
Now have a look at <:8000 your local EKG instance> and display the request count by clicking on 'wai.request_count'.
WAI metrics can also be stored in a bare EKG store, with no UI and no GC metrics. Use ekg-core's newStore function.
Compatible web servers include the following:
*Yesod
*Scotty
*Spock
*Servant
*Warp
-}
module Network.Wai.Metrics (
registerWaiMetrics,
registerNamedWaiMetrics,
WaiMetrics(..),
metrics) where
import Control.Applicative
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Time.Clock
import Network.HTTP.Types.Status (statusCode)
import Network.Wai
import Prelude
import System.Metrics
import qualified System.Metrics.Counter as Counter
import qualified System.Metrics.Distribution as Distribution
|
The metrics to feed in WAI and register in EKG .
The metrics to feed in WAI and register in EKG.
-}
data WaiMetrics = WaiMetrics {
requestCounter :: Counter.Counter
,latencyDistribution :: Distribution.Distribution
,statusCode100Counter :: Counter.Counter
,statusCode200Counter :: Counter.Counter
,statusCode300Counter :: Counter.Counter
,statusCode400Counter :: Counter.Counter
,statusCode500Counter :: Counter.Counter
}
registerWaiMetrics :: Store -> IO WaiMetrics
registerWaiMetrics = registerNamedWaiMetrics ""
|
Register in EKG a number of metrics related to web server activity with a
namespace .
* @<namespace>.wai.request_count@
* @<namespace>.wai.response_status_1xx@
* @<namespace>.wai.response_status_2xx@
* @<namespace>.wai.response_status_3xx@
* @<namespace>.wai.response_status_4xx@
* @<namespace>.wai.response_status_5xx@
* @<namespace>.wai.latency_distribution@
Register in EKG a number of metrics related to web server activity with a
namespace.
* @<namespace>.wai.request_count@
* @<namespace>.wai.response_status_1xx@
* @<namespace>.wai.response_status_2xx@
* @<namespace>.wai.response_status_3xx@
* @<namespace>.wai.response_status_4xx@
* @<namespace>.wai.response_status_5xx@
* @<namespace>.wai.latency_distribution@
-}
registerNamedWaiMetrics :: Text -> Store -> IO WaiMetrics
registerNamedWaiMetrics namespace store =
WaiMetrics
<$> createCounter (namespace' <> "wai.request_count") store
<*> createDistribution (namespace' <> "wai.latency_distribution") store
<*> createCounter (namespace' <> "wai.response_status_1xx") store
<*> createCounter (namespace' <> "wai.response_status_2xx") store
<*> createCounter (namespace' <> "wai.response_status_3xx") store
<*> createCounter (namespace' <> "wai.response_status_4xx") store
<*> createCounter (namespace' <> "wai.response_status_5xx") store
where
namespace'
|Text.null namespace = namespace
|otherwise = namespace <> "."
metrics :: WaiMetrics -> Middleware
metrics waiMetrics app req respond = do
Counter.inc (requestCounter waiMetrics)
start <- getCurrentTime
app req (respond' start)
where respond' :: UTCTime -> Response -> IO ResponseReceived
respond' start res = do
Counter.inc $ case statusCode $ responseStatus res of
s | s >= 500 -> statusCode500Counter waiMetrics
| s >= 400 -> statusCode400Counter waiMetrics
| s >= 300 -> statusCode300Counter waiMetrics
| s >= 200 -> statusCode200Counter waiMetrics
| otherwise -> statusCode100Counter waiMetrics
end <- getCurrentTime
Distribution.add (latencyDistribution waiMetrics) (realToFrac $ diffUTCTime end start)
respond res
|
75a865939e8ecc19e440fb1671d983f3b26abf4a25382fc108731f50a1f74656 | robertmeta/cowboy-examples | hello_world_rest.erl | -module(hello_world_rest).
-export([start/0, start/2, stop/0]).
start() ->
application:start(cowboy),
application:start(hello_world_rest).
start(_Type, _Args) ->
Dispatch = [
{'_', [{'_', hello_world_rest_handler, []}]}
],
cowboy:start_listener(my_http_listener, 1,
cowboy_tcp_transport, [{port, 8080}],
cowboy_http_protocol, [{dispatch, Dispatch}]
).
stop() ->
application:stop(cowboy).
| null | https://raw.githubusercontent.com/robertmeta/cowboy-examples/d03c289c9fb0d750eca11e3f1671e74d1841bd09/apps/hello_world_rest/src/hello_world_rest.erl | erlang | -module(hello_world_rest).
-export([start/0, start/2, stop/0]).
start() ->
application:start(cowboy),
application:start(hello_world_rest).
start(_Type, _Args) ->
Dispatch = [
{'_', [{'_', hello_world_rest_handler, []}]}
],
cowboy:start_listener(my_http_listener, 1,
cowboy_tcp_transport, [{port, 8080}],
cowboy_http_protocol, [{dispatch, Dispatch}]
).
stop() ->
application:stop(cowboy).
| |
35f282c6dd3ae947aa87e4976cc695ead482045a426c5b10c2f992627d9ddc8f | barbuz/Husk | InputParser.hs |
Parser for recognizing types of inputs
module InputParser where
import Expr
import Infer
import Debug
import Text.Parsec
import Data.List (intercalate,nub)
import Control.Monad (foldM)
type InputParser = Parsec String () (Maybe (String, Type))
unifyInputs :: Type -> Type -> Maybe Type
unifyInputs t1 t2 | trace' 2 ("unifying input types " ++ show t1 ++ " and " ++ show t2) False = undefined
unifyInputs (TPair t1 t2) (TPair s1 s2) = do
r1 <- unifyInputs t1 s1
r2 <- unifyInputs t2 s2
return $ TPair r1 r2
unifyInputs (TList t1) (TList t2) = unifyInputs t1 t2 >>= return . TList
unifyInputs t1@(TConc _) t2 | t1 == t2 = Just t1
unifyInputs (TVar _) t = Just t
unifyInputs t (TVar _) = Just t
unifyInputs _ _ = Nothing
number :: InputParser
number = do
minus <- optionMaybe $ char '-'
prefix <- many1 digit
suffix <- optionMaybe $ do
sep <- oneOf "./"
rest <- many1 digit
return (sep:rest)
let number = case (minus, suffix) of
(Just _, Just suffix) -> '-' : prefix ++ suffix
(Just _, Nothing) -> '-' : prefix
(Nothing, Just suffix) -> prefix ++ suffix
(Nothing, Nothing) -> prefix
return $ Just (number, TConc TNum)
character :: InputParser
character = do
char '\''
c <- noneOf "\\'" <|> (fmap (\c -> if c == 'n' then '\n' else c) $ char '\\' >> oneOf "\\'n")
char '\''
return $ Just (show c, TConc TChar)
plainStr :: InputParser
plainStr = do
chars <- many $ noneOf "\\\"" <|> (fmap (\c -> if c == 'n' then '\n' else c) $ char '\\' >> oneOf "\\\"n")
return $ Just (show chars, TList (TConc TChar))
str :: InputParser
str = do
char '"'
contents <- plainStr
char '"'
return contents
list :: InputParser
list = do
char '['
maybeElems <- sepBy inputVal (char ',')
char ']'
return $ do
elems <- sequence maybeElems
let outStr = "[" ++ intercalate "," (map fst elems) ++ "]"
outType <- foldM unifyInputs (TVar "x") $ map snd elems
return (outStr, TList outType)
pair :: InputParser
pair = do
char '('
elem1 <- inputVal
char ','
elem2 <- inputVal
char ')'
return $ do
(str1, typ1) <- elem1
(str2, typ2) <- elem2
return ("(" ++ str1 ++ "," ++ str2 ++ ")", TPair typ1 typ2)
inputVal :: InputParser
inputVal = try number <|> try character <|> try list <|> try pair <|> str
input :: InputParser
input = do
maybeInputVal <- inputVal
maybeTyp <- optionMaybe $ char ':' >> inputType
eof
return $ case (maybeInputVal, maybeTyp) of
(Nothing, _) -> Nothing
(val@(Just _), Nothing) -> val
(val, Just typ) -> do
(str, infTyp) <- val
newTyp <- unifyInputs infTyp typ
return $ (str, newTyp)
inputType :: Parsec String () Type
inputType = numT <|> charT <|> varT <|> listT <|> pairT
where numT = char 'N' >> return (TConc TNum)
charT = char 'C' >> return (TConc TChar)
varT = lower >>= \c-> return (TVar [c])
listT = char 'L' >> fmap TList inputType
pairT = char 'P' >> do
first <- inputType
second <- inputType
return $ TPair first second
parseInput :: Int -> String -> Either String (Maybe (String, Type))
parseInput inputIndex str =
case parse (try input <|> plainStr) ("input" ++ show inputIndex) str of
Left err -> Left $ show err
Right val -> Right $ trace' 1 ("input " ++ show inputIndex ++ ", " ++ str ++ ", is " ++ show val) val
| null | https://raw.githubusercontent.com/barbuz/Husk/40fbd352d5a6a9a709ea0901756fa3e0557e2063/InputParser.hs | haskell |
Parser for recognizing types of inputs
module InputParser where
import Expr
import Infer
import Debug
import Text.Parsec
import Data.List (intercalate,nub)
import Control.Monad (foldM)
type InputParser = Parsec String () (Maybe (String, Type))
unifyInputs :: Type -> Type -> Maybe Type
unifyInputs t1 t2 | trace' 2 ("unifying input types " ++ show t1 ++ " and " ++ show t2) False = undefined
unifyInputs (TPair t1 t2) (TPair s1 s2) = do
r1 <- unifyInputs t1 s1
r2 <- unifyInputs t2 s2
return $ TPair r1 r2
unifyInputs (TList t1) (TList t2) = unifyInputs t1 t2 >>= return . TList
unifyInputs t1@(TConc _) t2 | t1 == t2 = Just t1
unifyInputs (TVar _) t = Just t
unifyInputs t (TVar _) = Just t
unifyInputs _ _ = Nothing
number :: InputParser
number = do
minus <- optionMaybe $ char '-'
prefix <- many1 digit
suffix <- optionMaybe $ do
sep <- oneOf "./"
rest <- many1 digit
return (sep:rest)
let number = case (minus, suffix) of
(Just _, Just suffix) -> '-' : prefix ++ suffix
(Just _, Nothing) -> '-' : prefix
(Nothing, Just suffix) -> prefix ++ suffix
(Nothing, Nothing) -> prefix
return $ Just (number, TConc TNum)
character :: InputParser
character = do
char '\''
c <- noneOf "\\'" <|> (fmap (\c -> if c == 'n' then '\n' else c) $ char '\\' >> oneOf "\\'n")
char '\''
return $ Just (show c, TConc TChar)
plainStr :: InputParser
plainStr = do
chars <- many $ noneOf "\\\"" <|> (fmap (\c -> if c == 'n' then '\n' else c) $ char '\\' >> oneOf "\\\"n")
return $ Just (show chars, TList (TConc TChar))
str :: InputParser
str = do
char '"'
contents <- plainStr
char '"'
return contents
list :: InputParser
list = do
char '['
maybeElems <- sepBy inputVal (char ',')
char ']'
return $ do
elems <- sequence maybeElems
let outStr = "[" ++ intercalate "," (map fst elems) ++ "]"
outType <- foldM unifyInputs (TVar "x") $ map snd elems
return (outStr, TList outType)
pair :: InputParser
pair = do
char '('
elem1 <- inputVal
char ','
elem2 <- inputVal
char ')'
return $ do
(str1, typ1) <- elem1
(str2, typ2) <- elem2
return ("(" ++ str1 ++ "," ++ str2 ++ ")", TPair typ1 typ2)
inputVal :: InputParser
inputVal = try number <|> try character <|> try list <|> try pair <|> str
input :: InputParser
input = do
maybeInputVal <- inputVal
maybeTyp <- optionMaybe $ char ':' >> inputType
eof
return $ case (maybeInputVal, maybeTyp) of
(Nothing, _) -> Nothing
(val@(Just _), Nothing) -> val
(val, Just typ) -> do
(str, infTyp) <- val
newTyp <- unifyInputs infTyp typ
return $ (str, newTyp)
inputType :: Parsec String () Type
inputType = numT <|> charT <|> varT <|> listT <|> pairT
where numT = char 'N' >> return (TConc TNum)
charT = char 'C' >> return (TConc TChar)
varT = lower >>= \c-> return (TVar [c])
listT = char 'L' >> fmap TList inputType
pairT = char 'P' >> do
first <- inputType
second <- inputType
return $ TPair first second
parseInput :: Int -> String -> Either String (Maybe (String, Type))
parseInput inputIndex str =
case parse (try input <|> plainStr) ("input" ++ show inputIndex) str of
Left err -> Left $ show err
Right val -> Right $ trace' 1 ("input " ++ show inputIndex ++ ", " ++ str ++ ", is " ++ show val) val
| |
f72fa3c6d86e63337e75c744eadd65c7257a06f3e8dca4189047ae0266a03675 | mentat-collective/emmy | interpolate_test.cljc | #_"SPDX-License-Identifier: GPL-3.0"
(ns emmy.polynomial.interpolate-test
(:require [clojure.test :refer [is deftest testing use-fixtures]]
[emmy.generic :as g]
[emmy.numsymb]
[emmy.polynomial.gcd :as pg]
[emmy.polynomial.interpolate :as pi]
[emmy.simplify :as s :refer [hermetic-simplify-fixture]]
[emmy.value :as v]
[same :refer [ish?] :include-macros true]))
(use-fixtures :each hermetic-simplify-fixture)
(deftest symbolic-tests
(letfn [(lagrange-incremental [points x]
(let [n (count points)]
(map (fn [i]
(pi/lagrange (take i points) x))
(range 1 (inc n)))))
(diff [l r]
(g/simplify (g/- l r)))]
(testing "Neville and Lagrange interpolation are equivalent"
(let [points [['x_1 'y_1] ['x_2 'y_2]]]
(is (v/zero?
(diff (pi/lagrange points 'x)
(pi/neville-recursive points 'x))))))
(testing "points ordering doesn't matter for the final value. (Should test
all permutations...)"
(is (v/zero?
(diff
(pi/lagrange [['x_1 'y_1] ['x_2 'y_2] ['x_3 'y_3]] 'x)
(pi/lagrange [['x_2 'y_2] ['x_1 'y_1] ['x_3 'y_3]] 'x))))
(is (v/zero?
(diff
(pi/lagrange [['x_2 'y_2] ['x_1 'y_1] ['x_3 'y_3]] 'x)
(pi/lagrange [['x_3 'y_3] ['x_2 'y_2] ['x_1 'y_1]] 'x)))))
This was giving cljs some trouble on , so here we are .
(binding [pg/*poly-gcd-time-limit* #?(:clj [2 :seconds]
:cljs [6 :seconds])]
(testing "symbolic incremental methods should be identical to the full
lagrange method at each point prefix."
(let [points [['x_1 'y_1] ['x_2 'y_2] ['x_3 'y_3] ['x_4 'y_4]]
diffs (map diff
(lagrange-incremental points 'x)
(pi/neville-incremental points 'x))]
(is (v/= [0 0 0 0] diffs)))))))
(deftest performance-tests
(let [points [[0 1] [2 1] [5 2] [8 10]]
expected [1 1.0 0.9359999999999999 1.0829333333333333]]
(testing "each function returns a sequence of successive approximations. The
approximation around 1.2 gets better the more points we add in."
(is (ish? (last expected) (pi/lagrange points 1.2))
"Lagrange only returns the final value.")
(is (ish? (last expected) (pi/neville-recursive points 1.2))
"Non-incremental neville.")
(is (ish? expected (pi/neville-incremental points 1.2))
"This is the initial, unabstracted version.")
(is (ish? expected (pi/neville points 1.2))
"incremental calculation via full Neville's algorithm")
(is (ish? expected (pi/modified-neville points 1.2))
"incremental calculation via modified Neville"))
(testing "folding points should match the best estimate received through
column-wise processing."
(is (ish? (last expected) ((pi/neville-sum 1.2) points)))
(is (ish? (last expected) ((pi/modified-neville-sum 1.2) points))))
(testing "the diagonal of the tableau processed with a fold should match the
first row of column-wise processing. (Scan produces the diagonal by
returning a sequence of the final values in each row.)"
(is (ish? expected ((pi/neville-scan 1.2) points)))
(is (ish? expected ((pi/modified-neville-scan 1.2) points))))
(testing "the tableau processed with a fold should match the first row of
column-wise processing."
(is (ish? ((pi/neville-sum 1.2) points)
(last ((pi/neville-scan 1.2) points))))
(is (ish? ((pi/modified-neville-sum 1.2) points)
(last ((pi/modified-neville-scan 1.2) points)))))))
| null | https://raw.githubusercontent.com/mentat-collective/emmy/90a1de10e78187c70d546c3cfd63c8d32b783bed/test/emmy/polynomial/interpolate_test.cljc | clojure | #_"SPDX-License-Identifier: GPL-3.0"
(ns emmy.polynomial.interpolate-test
(:require [clojure.test :refer [is deftest testing use-fixtures]]
[emmy.generic :as g]
[emmy.numsymb]
[emmy.polynomial.gcd :as pg]
[emmy.polynomial.interpolate :as pi]
[emmy.simplify :as s :refer [hermetic-simplify-fixture]]
[emmy.value :as v]
[same :refer [ish?] :include-macros true]))
(use-fixtures :each hermetic-simplify-fixture)
(deftest symbolic-tests
(letfn [(lagrange-incremental [points x]
(let [n (count points)]
(map (fn [i]
(pi/lagrange (take i points) x))
(range 1 (inc n)))))
(diff [l r]
(g/simplify (g/- l r)))]
(testing "Neville and Lagrange interpolation are equivalent"
(let [points [['x_1 'y_1] ['x_2 'y_2]]]
(is (v/zero?
(diff (pi/lagrange points 'x)
(pi/neville-recursive points 'x))))))
(testing "points ordering doesn't matter for the final value. (Should test
all permutations...)"
(is (v/zero?
(diff
(pi/lagrange [['x_1 'y_1] ['x_2 'y_2] ['x_3 'y_3]] 'x)
(pi/lagrange [['x_2 'y_2] ['x_1 'y_1] ['x_3 'y_3]] 'x))))
(is (v/zero?
(diff
(pi/lagrange [['x_2 'y_2] ['x_1 'y_1] ['x_3 'y_3]] 'x)
(pi/lagrange [['x_3 'y_3] ['x_2 'y_2] ['x_1 'y_1]] 'x)))))
This was giving cljs some trouble on , so here we are .
(binding [pg/*poly-gcd-time-limit* #?(:clj [2 :seconds]
:cljs [6 :seconds])]
(testing "symbolic incremental methods should be identical to the full
lagrange method at each point prefix."
(let [points [['x_1 'y_1] ['x_2 'y_2] ['x_3 'y_3] ['x_4 'y_4]]
diffs (map diff
(lagrange-incremental points 'x)
(pi/neville-incremental points 'x))]
(is (v/= [0 0 0 0] diffs)))))))
(deftest performance-tests
(let [points [[0 1] [2 1] [5 2] [8 10]]
expected [1 1.0 0.9359999999999999 1.0829333333333333]]
(testing "each function returns a sequence of successive approximations. The
approximation around 1.2 gets better the more points we add in."
(is (ish? (last expected) (pi/lagrange points 1.2))
"Lagrange only returns the final value.")
(is (ish? (last expected) (pi/neville-recursive points 1.2))
"Non-incremental neville.")
(is (ish? expected (pi/neville-incremental points 1.2))
"This is the initial, unabstracted version.")
(is (ish? expected (pi/neville points 1.2))
"incremental calculation via full Neville's algorithm")
(is (ish? expected (pi/modified-neville points 1.2))
"incremental calculation via modified Neville"))
(testing "folding points should match the best estimate received through
column-wise processing."
(is (ish? (last expected) ((pi/neville-sum 1.2) points)))
(is (ish? (last expected) ((pi/modified-neville-sum 1.2) points))))
(testing "the diagonal of the tableau processed with a fold should match the
first row of column-wise processing. (Scan produces the diagonal by
returning a sequence of the final values in each row.)"
(is (ish? expected ((pi/neville-scan 1.2) points)))
(is (ish? expected ((pi/modified-neville-scan 1.2) points))))
(testing "the tableau processed with a fold should match the first row of
column-wise processing."
(is (ish? ((pi/neville-sum 1.2) points)
(last ((pi/neville-scan 1.2) points))))
(is (ish? ((pi/modified-neville-sum 1.2) points)
(last ((pi/modified-neville-scan 1.2) points)))))))
| |
74810568dbd518a068021c77804a79d8ba24a4759ab9302e80d39069830a1549 | mukul-rathi/bolt | good_consume_variable.ml | open Core
open Print_typed_ast
let%expect_test "Consume variable" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
const int g : Bar ;
const int h : Bar;
}
class Choco {
capability local Late;
const int f : Late;
}
class Bana {
capability read Na;
const int f : Na;
}
void main(){
if true {
let x = new Foo(f:4, g:5, h:6);
let y = consume x; // Consume linear variable
let z = 5;
let w = consume z; // Can consume an int
y.h
}
else {
if false {
let x = new Choco(f:5);
let y = consume x;
y.f
}
else{
let x = new Bana(f:5);
let y = consume x.f;
y
}
}
}
" ;
[%expect {|
Line:33 Position:18 Type error - Trying to consume a const field. |}]
let%expect_test "Access object after consumption of field" =
print_typed_ast
"
class Foo {
capability linear Bar;
var int f : Bar;
}
void main(){
let x = new Foo();
consume x.f;
x
}
" ;
[%expect
{|
Program
└──Class: Foo
└──Capabilities:
└──Capability: Linear Bar
└──Field Defn: f
└──Modifier: Var
└──Type expr: Int
└──Capabilities: Bar
└──Main block
└──Type expr: Foo
└──Expr: Let var: x
└──Type expr: Foo
└──Expr: Constructor for: Foo
└──Type expr: Foo
└──Expr: Consume
└──Expr: Objfield: (Class: Foo) x.f
└──Type expr: Int
└──Expr: Variable: x
└──Type expr: Foo |}]
let%expect_test "Access other field after consumption of field" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
const int g : Bar;
}
void main(){
let x = new Foo();
consume x.f;
x.g
}
" ;
[%expect {|
Line:10 Position:7 Type error - Trying to consume a const field. |}]
let%expect_test "Access method after consumption of field" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
int test() : Bar { 42 }
}
void main(){
let x = new Foo();
consume x.f;
x.test()
}
" ;
[%expect {| Line:9 Position:7 Type error - Trying to consume a const field. |}]
let%expect_test "Access field in method after consumption of field" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
int test() : Bar { this.f } // this.f has been consumed, but we can't tell this
locally, so would be accepted
}
void main(){
let x = new Foo();
consume x.f;
x.test()
}
" ;
[%expect {|
Line:6 Position:15: syntax error |}]
let%expect_test "Access variable after consumed then reassigned" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
int test() : Bar { this.f } // this.f has been consumed, but we can't tell this
locally, so would be accepted
}
void main(){
let x = new Foo();
consume x;
x := new Foo()
}
" ;
[%expect {|
Line:6 Position:15: syntax error
|}]
let%expect_test "Access variable after consumed then shadowed in an inner scope" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
int test() : Bar { this.f } // this.f has been consumed, but we can't tell this
locally, so would be accepted
}
void main(){
let x = new Foo();
consume x;
if (true){
let x = 42;
x // this access is fine as shadowed
}
else{
42
}
}
" ;
[%expect {|
Line:6 Position:15: syntax error
|}]
let%expect_test "Consume shared variable if only accessed by one local" =
print_typed_ast
"
class Foo {
capability linear Bar;
var int f : Bar;
int test() : Bar { this.f }
}
void main(){
let x = new Foo();
let y = new Foo();
finish{
async{
while((x.test()) < 10){
x.f := x.f +1
};
consume x // note accessed in only one local
}
y.f := 5
}
}
" ;
[%expect
{|
Program
└──Class: Foo
└──Capabilities:
└──Capability: Linear Bar
└──Field Defn: f
└──Modifier: Var
└──Type expr: Int
└──Capabilities: Bar
└── Method: test
└── Return type: Int
└──Param: Void
└── Used capabilities
└── Capabilities: Bar
└──Body block
└──Type expr: Int
└──Expr: Objfield: (Class: Foo) this.f
└──Type expr: Int
└──Main block
└──Type expr: Int
└──Expr: Let var: x
└──Type expr: Foo
└──Expr: Constructor for: Foo
└──Type expr: Foo
└──Expr: Let var: y
└──Type expr: Foo
└──Expr: Constructor for: Foo
└──Type expr: Foo
└──Expr: Finish_async
└──Type expr: Int
└──Async Expr block
└──Type expr: Foo
└──Expr: While
└──Type expr: Void
└──Expr: Bin Op: <
└──Type expr: Bool
└──Expr: ObjMethod: (Class: Foo) x.test
└──Type expr: Int
└──()
└──Expr: Int:10
└──Body block
└──Type expr: Int
└──Expr: Assign
└──Type expr: Int
└──Expr: Objfield: (Class: Foo) x.f
└──Type expr: Int
└──Expr: Bin Op: +
└──Type expr: Int
└──Expr: Objfield: (Class: Foo) x.f
└──Type expr: Int
└──Expr: Int:1
└──Expr: Consume
└──Expr: Variable: x
└──Type expr: Foo
└──Current thread block
└──Type expr: Int
└──Expr: Assign
└──Type expr: Int
└──Expr: Objfield: (Class: Foo) y.f
└──Type expr: Int
└──Expr: Int:5 |}]
| null | https://raw.githubusercontent.com/mukul-rathi/bolt/1faf19d698852fdb6af2ee005a5f036ee1c76503/tests/frontend/expect/typing/good_consume_variable.ml | ocaml | open Core
open Print_typed_ast
let%expect_test "Consume variable" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
const int g : Bar ;
const int h : Bar;
}
class Choco {
capability local Late;
const int f : Late;
}
class Bana {
capability read Na;
const int f : Na;
}
void main(){
if true {
let x = new Foo(f:4, g:5, h:6);
let y = consume x; // Consume linear variable
let z = 5;
let w = consume z; // Can consume an int
y.h
}
else {
if false {
let x = new Choco(f:5);
let y = consume x;
y.f
}
else{
let x = new Bana(f:5);
let y = consume x.f;
y
}
}
}
" ;
[%expect {|
Line:33 Position:18 Type error - Trying to consume a const field. |}]
let%expect_test "Access object after consumption of field" =
print_typed_ast
"
class Foo {
capability linear Bar;
var int f : Bar;
}
void main(){
let x = new Foo();
consume x.f;
x
}
" ;
[%expect
{|
Program
└──Class: Foo
└──Capabilities:
└──Capability: Linear Bar
└──Field Defn: f
└──Modifier: Var
└──Type expr: Int
└──Capabilities: Bar
└──Main block
└──Type expr: Foo
└──Expr: Let var: x
└──Type expr: Foo
└──Expr: Constructor for: Foo
└──Type expr: Foo
└──Expr: Consume
└──Expr: Objfield: (Class: Foo) x.f
└──Type expr: Int
└──Expr: Variable: x
└──Type expr: Foo |}]
let%expect_test "Access other field after consumption of field" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
const int g : Bar;
}
void main(){
let x = new Foo();
consume x.f;
x.g
}
" ;
[%expect {|
Line:10 Position:7 Type error - Trying to consume a const field. |}]
let%expect_test "Access method after consumption of field" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
int test() : Bar { 42 }
}
void main(){
let x = new Foo();
consume x.f;
x.test()
}
" ;
[%expect {| Line:9 Position:7 Type error - Trying to consume a const field. |}]
let%expect_test "Access field in method after consumption of field" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
int test() : Bar { this.f } // this.f has been consumed, but we can't tell this
locally, so would be accepted
}
void main(){
let x = new Foo();
consume x.f;
x.test()
}
" ;
[%expect {|
Line:6 Position:15: syntax error |}]
let%expect_test "Access variable after consumed then reassigned" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
int test() : Bar { this.f } // this.f has been consumed, but we can't tell this
locally, so would be accepted
}
void main(){
let x = new Foo();
consume x;
x := new Foo()
}
" ;
[%expect {|
Line:6 Position:15: syntax error
|}]
let%expect_test "Access variable after consumed then shadowed in an inner scope" =
print_typed_ast
"
class Foo {
capability linear Bar;
const int f : Bar;
int test() : Bar { this.f } // this.f has been consumed, but we can't tell this
locally, so would be accepted
}
void main(){
let x = new Foo();
consume x;
if (true){
let x = 42;
x // this access is fine as shadowed
}
else{
42
}
}
" ;
[%expect {|
Line:6 Position:15: syntax error
|}]
let%expect_test "Consume shared variable if only accessed by one local" =
print_typed_ast
"
class Foo {
capability linear Bar;
var int f : Bar;
int test() : Bar { this.f }
}
void main(){
let x = new Foo();
let y = new Foo();
finish{
async{
while((x.test()) < 10){
x.f := x.f +1
};
consume x // note accessed in only one local
}
y.f := 5
}
}
" ;
[%expect
{|
Program
└──Class: Foo
└──Capabilities:
└──Capability: Linear Bar
└──Field Defn: f
└──Modifier: Var
└──Type expr: Int
└──Capabilities: Bar
└── Method: test
└── Return type: Int
└──Param: Void
└── Used capabilities
└── Capabilities: Bar
└──Body block
└──Type expr: Int
└──Expr: Objfield: (Class: Foo) this.f
└──Type expr: Int
└──Main block
└──Type expr: Int
└──Expr: Let var: x
└──Type expr: Foo
└──Expr: Constructor for: Foo
└──Type expr: Foo
└──Expr: Let var: y
└──Type expr: Foo
└──Expr: Constructor for: Foo
└──Type expr: Foo
└──Expr: Finish_async
└──Type expr: Int
└──Async Expr block
└──Type expr: Foo
└──Expr: While
└──Type expr: Void
└──Expr: Bin Op: <
└──Type expr: Bool
└──Expr: ObjMethod: (Class: Foo) x.test
└──Type expr: Int
└──()
└──Expr: Int:10
└──Body block
└──Type expr: Int
└──Expr: Assign
└──Type expr: Int
└──Expr: Objfield: (Class: Foo) x.f
└──Type expr: Int
└──Expr: Bin Op: +
└──Type expr: Int
└──Expr: Objfield: (Class: Foo) x.f
└──Type expr: Int
└──Expr: Int:1
└──Expr: Consume
└──Expr: Variable: x
└──Type expr: Foo
└──Current thread block
└──Type expr: Int
└──Expr: Assign
└──Type expr: Int
└──Expr: Objfield: (Class: Foo) y.f
└──Type expr: Int
└──Expr: Int:5 |}]
| |
e06c7dc7f228b599635b3defab740120c72d566eae7a18b48933701a93d1fbbf | bob-cd/wendy | impl.clj | (ns wendy.impl)
(defn bail!
[ex]
(println (str "Error: " ex))
(System/exit 1))
(defn try!
[f]
(try
(let [resp (f)]
(if (and (map? resp)
(instance? Exception (:error resp)))
(bail! (:error resp))
resp))
(catch Exception e
(bail! e))))
| null | https://raw.githubusercontent.com/bob-cd/wendy/9e7399a261d836d994a3154b1c2084cda37d72fb/src/wendy/impl.clj | clojure | (ns wendy.impl)
(defn bail!
[ex]
(println (str "Error: " ex))
(System/exit 1))
(defn try!
[f]
(try
(let [resp (f)]
(if (and (map? resp)
(instance? Exception (:error resp)))
(bail! (:error resp))
resp))
(catch Exception e
(bail! e))))
| |
76951417e58419723fc59cdfbff00032a60d2790b044e78dd6ee2614884c6b53 | ml4tp/tcoq | typing.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Term
open Environ
open Evd
(** This module provides the typing machine with existential variables
and universes. *)
* a term and return its type . May trigger an evarmap leak .
val unsafe_type_of : env -> evar_map -> constr -> types
* a term and return its type + updated evars , optionally refreshing
universes
universes *)
val type_of : ?refresh:bool -> env -> evar_map -> constr -> evar_map * types
(** Variant of [type_of] using references instead of state-passing. *)
val e_type_of : ?refresh:bool -> env -> evar_map ref -> constr -> types
* a type and return its sort
val e_sort_of : env -> evar_map ref -> types -> sorts
* a term has a given type ( assuming the type is OK )
val e_check : env -> evar_map ref -> constr -> types -> unit
(** Returns the instantiated type of a metavariable *)
val meta_type : evar_map -> metavariable -> types
(** Solve existential variables using typing *)
val e_solve_evars : env -> evar_map ref -> constr -> constr
(** Raise an error message if incorrect elimination for this inductive *)
* ( first constr is term to match , second is return predicate )
val check_allowed_sort : env -> evar_map -> pinductive -> constr -> constr ->
unit
(** Raise an error message if bodies have types not unifiable with the
expected ones *)
val check_type_fixpoint : Loc.t -> env -> evar_map ref ->
Names.Name.t array -> types array -> unsafe_judgment array -> unit
| null | https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/pretyping/typing.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* This module provides the typing machine with existential variables
and universes.
* Variant of [type_of] using references instead of state-passing.
* Returns the instantiated type of a metavariable
* Solve existential variables using typing
* Raise an error message if incorrect elimination for this inductive
* Raise an error message if bodies have types not unifiable with the
expected ones | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Term
open Environ
open Evd
* a term and return its type . May trigger an evarmap leak .
val unsafe_type_of : env -> evar_map -> constr -> types
* a term and return its type + updated evars , optionally refreshing
universes
universes *)
val type_of : ?refresh:bool -> env -> evar_map -> constr -> evar_map * types
val e_type_of : ?refresh:bool -> env -> evar_map ref -> constr -> types
* a type and return its sort
val e_sort_of : env -> evar_map ref -> types -> sorts
* a term has a given type ( assuming the type is OK )
val e_check : env -> evar_map ref -> constr -> types -> unit
val meta_type : evar_map -> metavariable -> types
val e_solve_evars : env -> evar_map ref -> constr -> constr
* ( first constr is term to match , second is return predicate )
val check_allowed_sort : env -> evar_map -> pinductive -> constr -> constr ->
unit
val check_type_fixpoint : Loc.t -> env -> evar_map ref ->
Names.Name.t array -> types array -> unsafe_judgment array -> unit
|
736d0ab35a369202e5e354b1b5a4730088ebfddb29f722e81ff160a5965d2abe | borodust/alien-works-delivery | blobs.lisp | (cl:in-package :cl-user)
;;
;; INPUT:
;; *bodge-blob-systems*
;; *foreign-library-dir*
;;
;; FROM build.lisp:
;; *delivery-bundle-directory*
;; *target-features*
;;
(when *bodge-blob-systems*
(asdf:load-system :bodge-blobs-support))
(when *bodge-blob-systems*
(let ((dst-dir (dir *delivery-bundle-directory* *foreign-library-dir*))
(libraries (loop for system in *bodge-blob-systems*
append (bodge-blobs-support:find-system-libraries-by-features
system
*target-features*))))
(ensure-directories-exist dst-dir)
(shout "Copying foreing libraries into ~A." dst-dir)
(loop for lib in libraries
for path = (file (bodge-blobs-support:library-descriptor-search-path lib)
(bodge-blobs-support:library-descriptor-name lib))
do (shout "Copying ~A." path)
(cp dst-dir path))))
| null | https://raw.githubusercontent.com/borodust/alien-works-delivery/604bae3c93be96413aec150101981142ae0bd884/delivery/scripts/blobs.lisp | lisp |
INPUT:
*bodge-blob-systems*
*foreign-library-dir*
FROM build.lisp:
*delivery-bundle-directory*
*target-features*
| (cl:in-package :cl-user)
(when *bodge-blob-systems*
(asdf:load-system :bodge-blobs-support))
(when *bodge-blob-systems*
(let ((dst-dir (dir *delivery-bundle-directory* *foreign-library-dir*))
(libraries (loop for system in *bodge-blob-systems*
append (bodge-blobs-support:find-system-libraries-by-features
system
*target-features*))))
(ensure-directories-exist dst-dir)
(shout "Copying foreing libraries into ~A." dst-dir)
(loop for lib in libraries
for path = (file (bodge-blobs-support:library-descriptor-search-path lib)
(bodge-blobs-support:library-descriptor-name lib))
do (shout "Copying ~A." path)
(cp dst-dir path))))
|
33a68b08c81fbd8911c52f8da34206be63aef6fe14e26f2e0fd30c35cb3d95ea | ocaml-ppx/ocamlformat | break_string_literals.ml | let () =
if true then (* Shrinking the margin a bit *)
Format.printf
"@[<v 2>@{<warning>@{<title>Warning@}@}@,@,\
\ These are @{<warning>NOT@} the Droids you are looking for!@,\
@,\
\ Some more text. Just more letters and words.@,\
\ All this text is left-aligned because it's part of the UI.@,\
\ It'll be easier for the user to read this message.@]@\n@."
let fooooooo =
"@\n\n\
\ [Perf Profiler Log] Function: '%s' @\n\
\ count trace id = %i @\n\
\ sum inclusive cpu time = %f@\n\
\ avg inclusive time = %f @\n\
\ sum exclusive cpu time = %f @\n\
\ avg exclusive_time = %f @\n\
\ inclusive p90 = %f @\n\
\ exclusive p90 = %f @\n\
\ inclusive p50 = %f @\n\
\ exclusive p50 = %f @\n\
\ inclusive p25 = %f @\n\
\ exclusive p25 = %f @\n"
let foooo =
Printf.sprintf
"%s\nUsage: infer %s [options]\nSee `infer%s --help` for more information."
let pp_sep fmt () = F.fprintf fmt ", @,\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n\n\n"
let pp_sep fmt () = F.fprintf fmt ", @,@\n"
let pp_sep fmt () = F.fprintf fmt ", @,@\n@\n"
let pp_sep fmt () = F.fprintf fmt ", @,@\n@\n@\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n\n@\n\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n@;@\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n\n@,@\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n\n@\n\n"
let fooooooooo = Fooooo "[%a]\n"
let fooooooooo = Fooooo "[%a]@\n"
let fooooooooo = Fooooo "[%a]\n@\n"
let fooooooooo = Fooooo "[%a]@\n\n"
let fooo = Fooo "@\nFooooo: `%s`\n"
let fooooooooooo = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
let fooooooooooo = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.@;Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.@;Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.@;Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
let _ = "abc@,def\n\nghi"
let _ = "abc@,def\n\n ghi"
let _ = "abc@,def\n\n"
let _ = "abc@,def@\n\n"
| null | https://raw.githubusercontent.com/ocaml-ppx/ocamlformat/3d1c992240f7d30bcb8151285274f44619dae197/test/passing/tests/break_string_literals.ml | ocaml | Shrinking the margin a bit | let () =
Format.printf
"@[<v 2>@{<warning>@{<title>Warning@}@}@,@,\
\ These are @{<warning>NOT@} the Droids you are looking for!@,\
@,\
\ Some more text. Just more letters and words.@,\
\ All this text is left-aligned because it's part of the UI.@,\
\ It'll be easier for the user to read this message.@]@\n@."
let fooooooo =
"@\n\n\
\ [Perf Profiler Log] Function: '%s' @\n\
\ count trace id = %i @\n\
\ sum inclusive cpu time = %f@\n\
\ avg inclusive time = %f @\n\
\ sum exclusive cpu time = %f @\n\
\ avg exclusive_time = %f @\n\
\ inclusive p90 = %f @\n\
\ exclusive p90 = %f @\n\
\ inclusive p50 = %f @\n\
\ exclusive p50 = %f @\n\
\ inclusive p25 = %f @\n\
\ exclusive p25 = %f @\n"
let foooo =
Printf.sprintf
"%s\nUsage: infer %s [options]\nSee `infer%s --help` for more information."
let pp_sep fmt () = F.fprintf fmt ", @,\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n\n\n"
let pp_sep fmt () = F.fprintf fmt ", @,@\n"
let pp_sep fmt () = F.fprintf fmt ", @,@\n@\n"
let pp_sep fmt () = F.fprintf fmt ", @,@\n@\n@\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n\n@\n\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n@;@\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n\n@,@\n"
let pp_sep fmt () = F.fprintf fmt ", @,\n@\n\n@\n\n"
let fooooooooo = Fooooo "[%a]\n"
let fooooooooo = Fooooo "[%a]@\n"
let fooooooooo = Fooooo "[%a]\n@\n"
let fooooooooo = Fooooo "[%a]@\n\n"
let fooo = Fooo "@\nFooooo: `%s`\n"
let fooooooooooo = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
let fooooooooooo = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.@;Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.@;Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.@;Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
let _ = "abc@,def\n\nghi"
let _ = "abc@,def\n\n ghi"
let _ = "abc@,def\n\n"
let _ = "abc@,def@\n\n"
|
090d0869647f36bf1ffdaadde0fb3fbb17287156cafa9023c65f7329bb2e953b | aistrate/Okasaki | SimpleCatenableDeque.hs | -- Source code from
-- Purely Functional Data Structures
Cambridge University Press , 1998
--
Copyright ( c ) 1998 Cambridge University Press
module SimpleCatenableDeque (module CatenableDeque,SimpleCatDeque) where
import Prelude hiding (head,tail,last,init,(++))
import CatenableDeque
data SimpleCatDeque d a =
Shallow (d a)
| Deep (d a) (SimpleCatDeque d (d a)) (d a)
tooSmall d = isEmpty d || isEmpty (tail d)
dappendL d1 d2 = if isEmpty d1 then d2 else cons (head d1) d2
dappendR d1 d2 = if isEmpty d2 then d1 else snoc d1 (head d2)
instance Deque d => Deque (SimpleCatDeque d) where
empty = Shallow empty
isEmpty (Shallow d) = isEmpty d
isEmpty _ = False
cons x (Shallow d) = Shallow (cons x d)
cons x (Deep f m r) = Deep (cons x f) m r
head (Shallow d) = head d
head (Deep f m r) = head f
tail (Shallow d) = Shallow (tail d)
tail (Deep f m r)
| not (tooSmall f') = Deep f' m r
| isEmpty m = Shallow (dappendL f' r)
| otherwise = Deep (dappendL f' (head m)) (tail m) r
where f' = tail f
snoc (Shallow d) x = Shallow (snoc d x)
snoc (Deep f m r) x = Deep f m (snoc r x)
last (Shallow d) = last d
last (Deep f m r) = last r
init (Shallow d) = Shallow (init d)
init (Deep f m r)
| not (tooSmall r') = Deep f m r'
| isEmpty m = Shallow (dappendR f r')
| otherwise = Deep f (init m) (dappendR (last m) r')
where r' = init r
instance Deque d => CatenableDeque (SimpleCatDeque d) where
(Shallow d1) ++ (Shallow d2)
| tooSmall d1 = Shallow (dappendL d1 d2)
| tooSmall d2 = Shallow (dappendR d1 d2)
| otherwise = Deep d1 empty d2
(Shallow d) ++ (Deep f m r)
| tooSmall d = Deep (dappendL d f) m r
| otherwise = Deep d (cons f m) r
(Deep f m r) ++ (Shallow d)
| tooSmall d = Deep f m (dappendR r d)
| otherwise = Deep f (snoc m r) d
(Deep f1 m1 r1) ++ (Deep f2 m2 r2) = Deep f1 (snoc m1 r1 ++ cons f2 m2) r2
| null | https://raw.githubusercontent.com/aistrate/Okasaki/cc1473c81d053483bb5e327409346da7fda10fb4/Original/SimpleCatenableDeque.hs | haskell | Source code from
Purely Functional Data Structures
|
Cambridge University Press , 1998
Copyright ( c ) 1998 Cambridge University Press
module SimpleCatenableDeque (module CatenableDeque,SimpleCatDeque) where
import Prelude hiding (head,tail,last,init,(++))
import CatenableDeque
data SimpleCatDeque d a =
Shallow (d a)
| Deep (d a) (SimpleCatDeque d (d a)) (d a)
tooSmall d = isEmpty d || isEmpty (tail d)
dappendL d1 d2 = if isEmpty d1 then d2 else cons (head d1) d2
dappendR d1 d2 = if isEmpty d2 then d1 else snoc d1 (head d2)
instance Deque d => Deque (SimpleCatDeque d) where
empty = Shallow empty
isEmpty (Shallow d) = isEmpty d
isEmpty _ = False
cons x (Shallow d) = Shallow (cons x d)
cons x (Deep f m r) = Deep (cons x f) m r
head (Shallow d) = head d
head (Deep f m r) = head f
tail (Shallow d) = Shallow (tail d)
tail (Deep f m r)
| not (tooSmall f') = Deep f' m r
| isEmpty m = Shallow (dappendL f' r)
| otherwise = Deep (dappendL f' (head m)) (tail m) r
where f' = tail f
snoc (Shallow d) x = Shallow (snoc d x)
snoc (Deep f m r) x = Deep f m (snoc r x)
last (Shallow d) = last d
last (Deep f m r) = last r
init (Shallow d) = Shallow (init d)
init (Deep f m r)
| not (tooSmall r') = Deep f m r'
| isEmpty m = Shallow (dappendR f r')
| otherwise = Deep f (init m) (dappendR (last m) r')
where r' = init r
instance Deque d => CatenableDeque (SimpleCatDeque d) where
(Shallow d1) ++ (Shallow d2)
| tooSmall d1 = Shallow (dappendL d1 d2)
| tooSmall d2 = Shallow (dappendR d1 d2)
| otherwise = Deep d1 empty d2
(Shallow d) ++ (Deep f m r)
| tooSmall d = Deep (dappendL d f) m r
| otherwise = Deep d (cons f m) r
(Deep f m r) ++ (Shallow d)
| tooSmall d = Deep f m (dappendR r d)
| otherwise = Deep f (snoc m r) d
(Deep f1 m1 r1) ++ (Deep f2 m2 r2) = Deep f1 (snoc m1 r1 ++ cons f2 m2) r2
|
97171f5f4ab78968343c34d214e4696e7fff9d1e5736ecf96bcfaecab9f035b6 | pixlsus/registry.gimp.org_static | scale_pattern.scm | ; scale_pattern.scm
by
;
Version 1.2 ( 20090413 )
; Description
; Scales the selected pattern and puts it in the clipboard to allow it to be used as a pattern
;
; Changes
1.1 - Added interactive calls to sharpen the pattern .
1.2 - If larger than 512x512 will save it , refresh patterns , and set the active pattern to be the saved file .
; License:
;
; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
The GNU Public License is available at
;
(define (script-fu-scale_pattern inScale inSharpen)
(let*
(
(inScale (/ inScale 100))
(width (car (gimp-pattern-get-info (car(gimp-context-get-pattern)))))
(height (cadr (gimp-pattern-get-info (car(gimp-context-get-pattern)))))
(tempimage (car (gimp-image-new (* 3 width) (* 3 height) RGB)))
(templayer (car (gimp-layer-new tempimage (* 3 width) (* 3 height) RGBA-IMAGE "temp" 100 NORMAL-MODE)))
(filename (string-append gimp-directory DIR-SEPARATOR "patterns" DIR-SEPARATOR "scaledpat.pat"))
)
(gimp-image-add-layer tempimage templayer -1)
(gimp-drawable-fill templayer PATTERN-FILL)
(gimp-image-scale-full tempimage (* width inScale 3) (* height inScale 3) INTERPOLATION-LANCZOS)
(if (= inSharpen 1)
(plug-in-sharpen 0 tempimage templayer 10)
)
(if (= inSharpen 2)
(plug-in-unsharp-mask 0 tempimage templayer 1 3 0)
)
(if (and (< (* width inScale) 512) (< (* height inScale) 512))
(begin
(gimp-rect-select tempimage (* width inScale) (* height inScale) (* width inScale) (* height inScale) CHANNEL-OP-REPLACE FALSE 0)
(gimp-edit-copy templayer)
(gimp-context-set-pattern (list-ref (cadr (gimp-patterns-get-list "")) 0))
)
(begin
(gimp-image-crop tempimage (* width inScale) (* height inScale) (* width inScale) (* height inScale))
(file-pat-save RUN-NONINTERACTIVE tempimage templayer filename "scaledpat.pat" "Scaled Pattern")
(gimp-patterns-refresh)
(gimp-context-set-pattern "Scaled Pattern")
)
)
(gimp-image-delete tempimage)
)
)
(script-fu-register "script-fu-scale_pattern"
"<Patterns>/Scale Pattern..."
"Scales the current pattern and make it the active pattern."
"Rob Antonishen"
"Rob Antonishen"
"Jan 2009"
""
SF-ADJUSTMENT "Pattern Scale %" (list 100 20 500 5 10 0 SF-SLIDER)
SF-OPTION "Sharpening" (list "None" "Sharpen" "Unsharp Mask")
) | null | https://raw.githubusercontent.com/pixlsus/registry.gimp.org_static/ffcde7400f402728373ff6579947c6ffe87d1a5e/registry.gimp.org/files/scale_pattern.scm | scheme | scale_pattern.scm
Description
Scales the selected pattern and puts it in the clipboard to allow it to be used as a pattern
Changes
License:
This program is free software; you can redistribute it and/or modify
either version 2 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
| by
Version 1.2 ( 20090413 )
1.1 - Added interactive calls to sharpen the pattern .
1.2 - If larger than 512x512 will save it , refresh patterns , and set the active pattern to be the saved file .
it under the terms of the GNU General Public License as published by
The GNU Public License is available at
(define (script-fu-scale_pattern inScale inSharpen)
(let*
(
(inScale (/ inScale 100))
(width (car (gimp-pattern-get-info (car(gimp-context-get-pattern)))))
(height (cadr (gimp-pattern-get-info (car(gimp-context-get-pattern)))))
(tempimage (car (gimp-image-new (* 3 width) (* 3 height) RGB)))
(templayer (car (gimp-layer-new tempimage (* 3 width) (* 3 height) RGBA-IMAGE "temp" 100 NORMAL-MODE)))
(filename (string-append gimp-directory DIR-SEPARATOR "patterns" DIR-SEPARATOR "scaledpat.pat"))
)
(gimp-image-add-layer tempimage templayer -1)
(gimp-drawable-fill templayer PATTERN-FILL)
(gimp-image-scale-full tempimage (* width inScale 3) (* height inScale 3) INTERPOLATION-LANCZOS)
(if (= inSharpen 1)
(plug-in-sharpen 0 tempimage templayer 10)
)
(if (= inSharpen 2)
(plug-in-unsharp-mask 0 tempimage templayer 1 3 0)
)
(if (and (< (* width inScale) 512) (< (* height inScale) 512))
(begin
(gimp-rect-select tempimage (* width inScale) (* height inScale) (* width inScale) (* height inScale) CHANNEL-OP-REPLACE FALSE 0)
(gimp-edit-copy templayer)
(gimp-context-set-pattern (list-ref (cadr (gimp-patterns-get-list "")) 0))
)
(begin
(gimp-image-crop tempimage (* width inScale) (* height inScale) (* width inScale) (* height inScale))
(file-pat-save RUN-NONINTERACTIVE tempimage templayer filename "scaledpat.pat" "Scaled Pattern")
(gimp-patterns-refresh)
(gimp-context-set-pattern "Scaled Pattern")
)
)
(gimp-image-delete tempimage)
)
)
(script-fu-register "script-fu-scale_pattern"
"<Patterns>/Scale Pattern..."
"Scales the current pattern and make it the active pattern."
"Rob Antonishen"
"Rob Antonishen"
"Jan 2009"
""
SF-ADJUSTMENT "Pattern Scale %" (list 100 20 500 5 10 0 SF-SLIDER)
SF-OPTION "Sharpening" (list "None" "Sharpen" "Unsharp Mask")
) |
bb80a1c53a59434b1c12c25732d5cae7f21b3f9cda1f62743fb55ba62dbbb4da | clj-kondo/clj-kondo | schema.clj | (ns clj-kondo.impl.schema
{:no-doc true}
(:require
[clj-kondo.impl.utils :as utils]))
(defn remove-schemas-from-children [expr]
(let [children (:children expr)
{:keys [:new-children :schemas]}
(loop [[fst-child & rest-children] children
res {:new-children []
:schemas []}]
(let [sexpr (when fst-child (utils/sexpr fst-child))]
(cond (not fst-child)
res
(= ':- (utils/sexpr fst-child))
(recur (next rest-children)
(update res :schemas conj (first rest-children)))
(vector? sexpr)
(recur rest-children
(let [{:keys [:expr :schemas]} (remove-schemas-from-children fst-child)]
(-> res
(update :schemas into schemas)
(update :new-children conj expr))))
:else (recur rest-children
(update res :new-children conj fst-child)))))]
{:expr (assoc expr :children new-children)
:schemas schemas}))
(defn- defmethod-dispatch-val? [fn-sym index]
(and (= fn-sym 'defmethod) (= index 2)))
(defn expand-schema
[_ctx fn-sym expr]
(let [children (:children expr)
{:keys [new-children
schemas]}
(loop [[fst-child & rest-children] children
index 0
res {:new-children []
:schemas []}
past-arg-schemas false]
(let [sexpr (when fst-child (utils/sexpr fst-child))]
(cond
past-arg-schemas
(if (and (= 'defrecord fn-sym)
(map? sexpr))
(-> res
(update :new-children (fn [children]
(into children rest-children)))
(update :schemas conj fst-child))
(update res :new-children (fn [children]
(into (conj children fst-child) rest-children))))
(not fst-child)
res
(= ':- sexpr)
(recur (next rest-children)
(inc index)
(update res :schemas conj (first rest-children))
past-arg-schemas)
(and (vector? sexpr) (not (defmethod-dispatch-val? fn-sym index)))
(let [{:keys [expr schemas]} (remove-schemas-from-children fst-child)]
(recur rest-children
(inc index)
(-> res
(update :schemas into schemas)
(update :new-children conj expr)
)
true))
(seq? sexpr)
(recur rest-children
(inc index)
(let [cchildren (:children fst-child)
{:keys [:expr :schemas]} (remove-schemas-from-children (first cchildren))
new-cchildren (cons expr (rest cchildren))
new-fst-child (assoc fst-child :children new-cchildren)]
(-> res
(update :schemas into schemas)
(update :new-children conj new-fst-child)))
past-arg-schemas)
:else (recur rest-children
(inc index)
(update res :new-children conj fst-child)
past-arg-schemas))))]
{:expr (assoc expr :children new-children)
:schemas schemas}))
;;;; Scratch
(comment)
| null | https://raw.githubusercontent.com/clj-kondo/clj-kondo/ed9f29052ecf54ee5fe90ebe53f4544382e46e9f/src/clj_kondo/impl/schema.clj | clojure | Scratch | (ns clj-kondo.impl.schema
{:no-doc true}
(:require
[clj-kondo.impl.utils :as utils]))
(defn remove-schemas-from-children [expr]
(let [children (:children expr)
{:keys [:new-children :schemas]}
(loop [[fst-child & rest-children] children
res {:new-children []
:schemas []}]
(let [sexpr (when fst-child (utils/sexpr fst-child))]
(cond (not fst-child)
res
(= ':- (utils/sexpr fst-child))
(recur (next rest-children)
(update res :schemas conj (first rest-children)))
(vector? sexpr)
(recur rest-children
(let [{:keys [:expr :schemas]} (remove-schemas-from-children fst-child)]
(-> res
(update :schemas into schemas)
(update :new-children conj expr))))
:else (recur rest-children
(update res :new-children conj fst-child)))))]
{:expr (assoc expr :children new-children)
:schemas schemas}))
(defn- defmethod-dispatch-val? [fn-sym index]
(and (= fn-sym 'defmethod) (= index 2)))
(defn expand-schema
[_ctx fn-sym expr]
(let [children (:children expr)
{:keys [new-children
schemas]}
(loop [[fst-child & rest-children] children
index 0
res {:new-children []
:schemas []}
past-arg-schemas false]
(let [sexpr (when fst-child (utils/sexpr fst-child))]
(cond
past-arg-schemas
(if (and (= 'defrecord fn-sym)
(map? sexpr))
(-> res
(update :new-children (fn [children]
(into children rest-children)))
(update :schemas conj fst-child))
(update res :new-children (fn [children]
(into (conj children fst-child) rest-children))))
(not fst-child)
res
(= ':- sexpr)
(recur (next rest-children)
(inc index)
(update res :schemas conj (first rest-children))
past-arg-schemas)
(and (vector? sexpr) (not (defmethod-dispatch-val? fn-sym index)))
(let [{:keys [expr schemas]} (remove-schemas-from-children fst-child)]
(recur rest-children
(inc index)
(-> res
(update :schemas into schemas)
(update :new-children conj expr)
)
true))
(seq? sexpr)
(recur rest-children
(inc index)
(let [cchildren (:children fst-child)
{:keys [:expr :schemas]} (remove-schemas-from-children (first cchildren))
new-cchildren (cons expr (rest cchildren))
new-fst-child (assoc fst-child :children new-cchildren)]
(-> res
(update :schemas into schemas)
(update :new-children conj new-fst-child)))
past-arg-schemas)
:else (recur rest-children
(inc index)
(update res :new-children conj fst-child)
past-arg-schemas))))]
{:expr (assoc expr :children new-children)
:schemas schemas}))
(comment)
|
4b15ae5fab591d1b333ec0dc01caea25b40b87b293058e08946bef8ff8dabae7 | remixlabs/wasicaml | t010_funcall4.ml | let rec f1 a b c =
a * f2 b c
and f2 b c =
b + c
let () =
let x = f1 3 4 5 in
Testprint.int "x" x
| null | https://raw.githubusercontent.com/remixlabs/wasicaml/74ff72535aa8e49ab94a05d9c32c059ce264c1bb/test/t010_funcall4.ml | ocaml | let rec f1 a b c =
a * f2 b c
and f2 b c =
b + c
let () =
let x = f1 3 4 5 in
Testprint.int "x" x
| |
88956b2099fbed472721bb4582dc0b4d44465aeb612107b8abecbb360fccfcb5 | bondy-io/bondy | bondy_registry_entry_SUITE.erl | %% =============================================================================
%% bondy_realm_SUITE.erl -
%%
Copyright ( c ) 2016 - 2022 Leapsight . All rights reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% =============================================================================
-module(bondy_registry_entry_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
-include_lib("wamp/include/wamp.hrl").
-include("bondy.hrl").
-include("bondy_plum_db.hrl").
-include("bondy_security.hrl").
-compile([nowarn_export_all, export_all]).
all() ->
[
mg_comparator,
composite_comparator
].
init_per_suite(Config) ->
bondy_ct:start_bondy(),
Config.
end_per_suite(Config) ->
: ( ) ,
{save_config, Config}.
mg_comparator(_) ->
%% All using ?INVOKE_SINGLE by default
L = [
{<<"a1....">>, ?WILDCARD_MATCH},
{<<"a1....e5">>, ?WILDCARD_MATCH},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH},
{<<"a1.b2..d4.">>, ?WILDCARD_MATCH},
{<<"a1.b2.c3">>, ?PREFIX_MATCH},
{<<"a1.b2.c3.d4">>, ?PREFIX_MATCH},
{<<"a1.b2.c3.d4.e55">>, ?EXACT_MATCH},
{<<"a1.b2.c33..e5">>, ?WILDCARD_MATCH}
],
Expected = [
{<<"a1.b2.c3.d4.e55">>, ?EXACT_MATCH},
{<<"a1.b2.c3.d4">>, ?PREFIX_MATCH},
{<<"a1.b2.c3">>, ?PREFIX_MATCH},
{<<"a1.b2.c33..e5">>, ?WILDCARD_MATCH},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH},
{<<"a1.b2..d4.">>, ?WILDCARD_MATCH},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH},
{<<"a1....e5">>, ?WILDCARD_MATCH},
{<<"a1....">>, ?WILDCARD_MATCH}
],
Ref = bondy_ref:new(internal, self()),
Entries = [
bondy_registry_entry:new(
registration, <<"com.foo">>, Ref, Uri, #{match => P}
)
|| {Uri, P} <- L
],
Fun = bondy_registry_entry:mg_comparator(),
?assertEqual(
Expected,
[
{bondy_registry_entry:uri(E), bondy_registry_entry:match_policy(E)}
|| E <- lists:sort(Fun, Entries)
]
),
ok.
composite_comparator(_) ->
L = [
{<<"a1....">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1....">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3">>, ?PREFIX_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3.d4">>, ?PREFIX_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3.d4.e55">>, ?EXACT_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c33..e5">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE}
],
Expected = [
{<<"a1.b2.c3.d4.e55">>, ?EXACT_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3.d4">>, ?PREFIX_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3">>, ? PREFIX_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c33..e5">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1....">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN}
],
Ref = bondy_ref:new(internal, self()),
Entries = [
bondy_registry_entry:new(
registration, <<"com.foo">>, Ref, Uri, #{match => P, invoke => I}
)
|| {Uri, P, I} <- L
],
Fun = bondy_registry_entry:mg_comparator(),
?assertEqual(
Expected,
[
{
bondy_registry_entry:uri(E),
bondy_registry_entry:match_policy(E),
bondy_registry_entry:invocation_policy(E)
}
|| E <- lists:sort(Fun, Entries)
]
),
ok.
| null | https://raw.githubusercontent.com/bondy-io/bondy/a1267e7e5526db24f278e12315020753f3168b44/apps/bondy/test/bondy_registry_entry_SUITE.erl | erlang | =============================================================================
bondy_realm_SUITE.erl -
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=============================================================================
All using ?INVOKE_SINGLE by default | Copyright ( c ) 2016 - 2022 Leapsight . All rights reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(bondy_registry_entry_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
-include_lib("wamp/include/wamp.hrl").
-include("bondy.hrl").
-include("bondy_plum_db.hrl").
-include("bondy_security.hrl").
-compile([nowarn_export_all, export_all]).
all() ->
[
mg_comparator,
composite_comparator
].
init_per_suite(Config) ->
bondy_ct:start_bondy(),
Config.
end_per_suite(Config) ->
: ( ) ,
{save_config, Config}.
mg_comparator(_) ->
L = [
{<<"a1....">>, ?WILDCARD_MATCH},
{<<"a1....e5">>, ?WILDCARD_MATCH},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH},
{<<"a1.b2..d4.">>, ?WILDCARD_MATCH},
{<<"a1.b2.c3">>, ?PREFIX_MATCH},
{<<"a1.b2.c3.d4">>, ?PREFIX_MATCH},
{<<"a1.b2.c3.d4.e55">>, ?EXACT_MATCH},
{<<"a1.b2.c33..e5">>, ?WILDCARD_MATCH}
],
Expected = [
{<<"a1.b2.c3.d4.e55">>, ?EXACT_MATCH},
{<<"a1.b2.c3.d4">>, ?PREFIX_MATCH},
{<<"a1.b2.c3">>, ?PREFIX_MATCH},
{<<"a1.b2.c33..e5">>, ?WILDCARD_MATCH},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH},
{<<"a1.b2..d4.">>, ?WILDCARD_MATCH},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH},
{<<"a1....e5">>, ?WILDCARD_MATCH},
{<<"a1....">>, ?WILDCARD_MATCH}
],
Ref = bondy_ref:new(internal, self()),
Entries = [
bondy_registry_entry:new(
registration, <<"com.foo">>, Ref, Uri, #{match => P}
)
|| {Uri, P} <- L
],
Fun = bondy_registry_entry:mg_comparator(),
?assertEqual(
Expected,
[
{bondy_registry_entry:uri(E), bondy_registry_entry:match_policy(E)}
|| E <- lists:sort(Fun, Entries)
]
),
ok.
composite_comparator(_) ->
L = [
{<<"a1....">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1....">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3">>, ?PREFIX_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3.d4">>, ?PREFIX_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3.d4.e55">>, ?EXACT_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c33..e5">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE}
],
Expected = [
{<<"a1.b2.c3.d4.e55">>, ?EXACT_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3.d4">>, ?PREFIX_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c3">>, ? PREFIX_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2.c33..e5">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2..d4.">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_FIRST},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1.b2...e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1...d4.e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....e5">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN},
{<<"a1....">>, ?WILDCARD_MATCH, ?INVOKE_SINGLE},
{<<"a1....">>, ?WILDCARD_MATCH, ?INVOKE_ROUND_ROBIN}
],
Ref = bondy_ref:new(internal, self()),
Entries = [
bondy_registry_entry:new(
registration, <<"com.foo">>, Ref, Uri, #{match => P, invoke => I}
)
|| {Uri, P, I} <- L
],
Fun = bondy_registry_entry:mg_comparator(),
?assertEqual(
Expected,
[
{
bondy_registry_entry:uri(E),
bondy_registry_entry:match_policy(E),
bondy_registry_entry:invocation_policy(E)
}
|| E <- lists:sort(Fun, Entries)
]
),
ok.
|
21e9d152de584af19cbaa85b329870d57c631e6fa6a61b6f3e65f3ec6a2369ae | futurice/haskell-mega-repo | TH.hs | # LANGUAGE TemplateHaskell #
module Futurice.JavaScript.TH (
embedJS,
) where
import Futurice.JavaScript
import Futurice.Prelude
import Language.Haskell.TH (Exp, Q, runIO)
import Prelude ()
import qualified Data.Text.IO as T
-- | Create 'JS' from a file, compile-time verifying it can be parsed.
--
-- > $(embedJS "supersource.js")
embedJS :: FilePath -> Q Exp
embedJS fp = do
contents <- runIO $ T.readFile fp
case makeJS contents fp of
Left err -> fail $ "embedJS " <> fp <> " -- " <> err
Right _js -> [| unsafeMakeJS $(embedText fp) |]
| null | https://raw.githubusercontent.com/futurice/haskell-mega-repo/95fe8e33ad6426eb6e52a9c23db4aeffd443b3e5/futurice-foundation/src/Futurice/JavaScript/TH.hs | haskell | | Create 'JS' from a file, compile-time verifying it can be parsed.
> $(embedJS "supersource.js") | # LANGUAGE TemplateHaskell #
module Futurice.JavaScript.TH (
embedJS,
) where
import Futurice.JavaScript
import Futurice.Prelude
import Language.Haskell.TH (Exp, Q, runIO)
import Prelude ()
import qualified Data.Text.IO as T
embedJS :: FilePath -> Q Exp
embedJS fp = do
contents <- runIO $ T.readFile fp
case makeJS contents fp of
Left err -> fail $ "embedJS " <> fp <> " -- " <> err
Right _js -> [| unsafeMakeJS $(embedText fp) |]
|
e07948c3b892673d50444ad59bb72cdec8aa8059cada8070c9f15222439db83e | gbour/wave | mqtt_msg.erl | %%
Wave - MQTT Broker
Copyright ( C ) 2014 - 2016 -
%%
%% This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation , version 3 of the License .
%%
%% This program is distributed in the hope that it will be useful,
%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
%%
You should have received a copy of the GNU Affero General Public License
%% along with this program. If not, see </>.
-module(mqtt_msg).
-author("Guillaume Bour <>").
-export([encode/1, decode/1]).
-include("mqtt_msg.hrl").
%
% messages length:
%
exactly 3 bytes ( type + flags + rle , rle = 0 ):
% - PINREQ
% - PINRESP
% - DISCONNECT
%
exactly 5 bytes ( type + flags + rle = 2 + varheader ):
% - CONNACK
% - PUBACK
% - PUBREC
% - PUBREL
% - PUBCOMP
% - UNSUBACK
%
more than 3 bytes ( type + flags + rle + varheader + payload ):
- CONNECT ( min 13 bytes )
- PUBLISH ( min 3 )
% - SUBSCRIBE (min 3)
- SUBACK ( min 3 )
- UNSUBSCRIBE ( min 3 )
%
-spec decode(binary()) -> {ok, mqtt_msg(), binary()}
| {error, size, integer()}
| {error, overflow|{type, integer()}}
| {error, disconnect|conformity|protocol_version, binary()}.
decode(<<Type:4, Flags:4, Rest/binary>>) ->
decode(type2atom(Type), <<Flags:4>>, decode_rlength(Rest, erlang:byte_size(Rest), minlen(Type))).
% invalid MQTT type
decode({invalid, T}, _, _) ->
{error, {type, T}};
% invalid Remaining Length header or not enough buffer to decode RLen
decode(_, _, {error, overflow}) ->
{error, overflow};
% buffer is too short to decode remaining-length header
decode(_Type, _, {error, size, Size}) ->
lager:warning("~p: not enough data to decode rlen. missing ~p bytes", [_Type, Size]),
{error, size, Size};
% Buffer is too short (do not contains the whole MQTT message)
decode(_, _, {ESize, RSize, _}) when ESize > RSize ->
{error, size, ESize-RSize};
decode(Type, Flags= <<Dup:1, Qos:2, Retain:1>>, {RLen, _, Rest}) ->
checkflags(Type, Flags), % throw exception on error
<<BPayload:RLen/binary, Rest2/binary>> = Rest,
Msg = case decode_payload(Type, {Dup, Qos, Retain}, {RLen, BPayload}) of
{ok, Payload} ->
{ok, #mqtt_msg{
type=Type,
retain=Retain,
qos=Qos,
dup=Dup,
payload = Payload
}, Rest2};
{error, Err} ->
{error, Err, Rest2}
end,
%lager:debug("~p", [Msg]),
Msg.
%%
%% @doc «CONNECT» message
support both MQTT 3.1 and 3.1.1 versions
%%
-spec decode_payload(mqtt_verb(), {integer(), integer(), integer()}, {integer(), binary()}) ->
{error, disconnect|conformity|protocol_version}
| {ok, list({atom(), any()})}.
decode_payload('CONNECT', _, {_Len, <<
PLen:16,
Protocol:PLen/binary,
Version:8/integer,
Flags:7,
Reserved:1,
Ka:16,
Rest/binary>>}) ->
decode_connect(Protocol, Version, Reserved, {<<Flags:7/integer>>, Ka, Rest});
decode_payload('PUBLISH', {_Dup=1, _Qos=0, _}, _) ->
erlang:throw({'PUBLISH', "3.3.1-2", "DUP flag MUST be 0 when QOS = 0"});
decode_payload('PUBLISH', {_ , _Qos=3, _}, _) ->
erlang:throw({'PUBLISH', "3.3.1-4", "invalid QOS value (3)"});
decode_payload('PUBLISH', {_, Qos, _}, {_Len, Rest}) ->
lager : debug("PUBLISH ( ) ~p ~p " , [ Qos , Len , Rest ] ) ,
{Topic, Rest2} = decode_string(Rest),
checktopic(Topic), % raise exception
Ret = if
Qos =:= 0 ->
[{topic,Topic}, {data, Rest2}];
true ->
<<MsgID:16, Rest3/binary>> = Rest2,
case MsgID of
0 -> erlang:throw({'PUBLISH', "2.3.1-1", "null msgid"});
_ -> pass
end,
[{topic,Topic}, {msgid,MsgID}, {data, Rest3}]
end,
{ Topic , < < , Rest2 / binary > > } = decode_string(Rest ) ,
%lager:debug("ret= ~p", [Ret]),
{ok, Ret};
decode_payload('SUBSCRIBE', _, {_Len, <<MsgID:16, Payload/binary>>}) ->
lager : ~p " , [ MsgID ] ) ,
case MsgID of
0 -> erlang:throw({'SUBSCRIBE', "2.3.1-1", "null msgid"});
_ -> pass
end,
Topics = get_topics(Payload, [], true),
%lager:debug("topics= ~p", [Topics]),
{ok, [{msgid, MsgID},{topics, Topics}]};
decode_payload('UNSUBSCRIBE', _, {_Len, <<MsgID:16, Payload/binary>>}) ->
%lager:debug("UNSUBSCRIBE: ~p", [Payload]),
case MsgID of
0 -> erlang:throw({'UNSUBSCRIBE', "2.3.1-1", "null msgid"});
_ -> pass
end,
Topics = get_topics(Payload, [], false),
{ok, [{msgid, MsgID}, {topics, Topics}]};
decode_payload('PINGREQ', _, {0, <<>>}) ->
{ok, []};
decode_payload('PINGRESP', _, {0, <<>>}) ->
{ok, []};
decode_payload('DISCONNECT', _, {0, <<>>}) ->
%lager:debug("DISCONNECT"),
% not a real error, we just want to close the connection
%TODO: return a disconnect object; and do cleanup upward
%{error, disconnect};
{ok, []};
decode_payload('CONNACK', _, {_Len, <<_:8, RetCode:8/integer>>}) ->
%lager:debug("CONNACK"),
{ok, [{retcode, RetCode}]};
decode_payload('PUBACK', _, {_Len=2, <<MsgID:16>>}) ->
lager : . MsgID= ~p " , [ MsgID ] ) ,
{ok, [{msgid, MsgID}]};
decode_payload('PUBREC', _, {_Len, <<MsgID:16>>}) ->
%lager:debug("PUBREC. MsgID= ~p", [MsgID]),
{ok, [{msgid, MsgID}]};
TODO : throw exception with custom message when ' PUBREL ' and qos ! = 1
decode_payload('PUBREL', {_, _Qos=1, _}, {_Len, <<MsgID:16>>}) ->
lager : . MsgID= ~p " , [ MsgID ] ) ,
{ok, [{msgid, MsgID}]};
decode_payload('PUBCOMP', _, {_Len, <<MsgID:16>>}) ->
lager : . MsgID= ~p " , [ MsgID ] ) ,
{ok, [{msgid, MsgID}]};
decode_payload('SUBACK', _, {_Len, <<MsgID:16, _Qos/binary>>}) ->
%lager:debug("SUBACK. MsgID= ~p", [MsgID]),
{ok, [{msgid, MsgID}]};
decode_payload(Cmd, Flags, Args) ->
lager:notice("invalid command ~p (flags=~p, payload=~p)", [Cmd, Flags, Args]),
{error, disconnect}.
%%%
% match wrong protocol versions
VALID
-spec decode_connect(binary(), byte(), 0|1, {bitstring(), char(), binary()}) ->
{error, conformity|protocol_version} | {ok, list({atom(), any()})}.
decode_connect(<<"MQIsdp">>, Vers=3, 0, Payload) ->
decode_connect2(Vers, Payload);
decode_connect(<<"MQTT">> , Vers=4, 0, Payload) ->
decode_connect2(Vers, Payload);
% ERRORS
decode_connect(_, _, _Reserved=1, _) ->
lager:notice("CONNECT: reserved flag MUST be 0"),
{error, conformity};
decode_connect(Protocol= <<"MQIsdp">>, Version, _, _) ->
lager:notice("CONNECT: invalid protocol version (~p/~p)", [Protocol, Version]),
{error, protocol_version};
decode_connect(Protocol= <<"MQTT">>, Version, _, _) ->
lager:notice("CONNECT: invalid protocol version (~p/~p)", [Protocol, Version]),
{error, protocol_version};
decode_connect(Protocol, _, _, _) ->
lager:notice("CONNECT: invalid protocol name (~p)", [Protocol]),
{error, conformity}.
-spec decode_connect2(byte(), {bitstring(), char(), binary()}) -> {error, conformity} | {ok, [{atom(), any}]}.
decode_connect2(_Version, {<<0:1, 1:1, _:5>>, _, _}) ->
lager:notice("CONNECT: password flag is set while username flag is not"),
{error, conformity};
decode_connect2(_, {<<_:3, WillQos:2, _:2>>, _, _}) when WillQos =:= 3 ->
erlang:throw({'CONNECT', "MQTT-3.1.2-14", "invalid will qos (3)"});
decode_connect2(_, {<<_:3, WillQos:2, WillFlag:1, _:1>>, _, _}) when WillFlag =:= 0, WillQos =/= 0 ->
erlang:throw({'CONNECT', "MQTT-3.1.2-13", "if will flag is 0, will qos MUST be 0 too"});
decode_connect2(_, {<<_:2, WillRetain:1, _:2, WillFlag:1, _:1>>, _, _}) when WillFlag =:= 0, WillRetain =:= 1 ->
erlang:throw({'CONNECT', "MQTT-3.1.2-15", "if will flag is 0, will retain MUST be 0 too"});
decode_connect2(Version,
{<<User:1, Pwd:1, WillRetain:1, WillQos:2, WillFlag:1, Clean:1>>, Ka, Rest}) ->
% decoding Client-ID
{ClientID, Rest2} = decode_string(Rest),
%NOTE: disabled for now as it would require to refactor all tests
TODO : spec says MAY be > 23 chars and other characters
% add config option to change behaviour
%check_clientid(ClientID, 0),
% decoding will topic & message
{Will, Rest3} = case WillFlag of
1 ->
{_WillTopic, _R} = decode_string(Rest2),
checktopic(_WillTopic), % ensure topic is valid, raise exception either (w/ WRONG msg)
Will message is any binary content . 2 first bytes are will message length
TODO : this throws an " anonymous " exception if MsgLen is missing
% Effect is to "kill" ranch listener, and session is ended when timeout reached
%
% we should catch it (subfun) to throw a named exception
%TODO: test is ranch listener destruction do not disconnect other clients
<<MsgLen:16/integer, _WillMsg:MsgLen/binary, _R2/binary>> = _R,
{#{topic => _WillTopic, message => _WillMsg, qos => WillQos, retain => WillRetain}, _R2};
_ -> {undefined, Rest2}
end,
% decoding username
{Username , Rest4} = case User of
1 -> decode_string(Rest3);
_ -> {undefined, Rest3}
end,
% decoding password
{Password, _Rest5} = case Pwd of
1 -> decode_string(Rest4);
_ -> {undefined, Rest4}
end,
{ok, [
{clientid , ClientID},
{will , Will},
{username , Username},
{password , Password},
{keepalive, Ka},
{clean , Clean},
{version , Version}
]}.
-spec get_topics(Data :: binary(), Acc :: list(any()), Subscription :: true|false) ->
Topics::list(Topic::binary()|{Topic::binary(), Qos::integer()}).
get_topics(<<>>, [], true) ->
erlang:throw({'SUBSCRIBE' , "MQTT-3.8.3-1" , "no topic filter/qos"});
get_topics(<<>>, [], false) ->
erlang:throw({'UNSUBSCRIBE', "MQTT-3.10.3-2", "no topic filter/qos"});
get_topics(<<>>, Topics, _) ->
lists:reverse(Topics);
with QOS field ( SUBSCRIBE )
get_topics(Payload, Topics, true) ->
{Name, Rest} = decode_string(Payload),
checktopicfilter(Name),
{Qos, Rest2} = decode_subscribe_qos(Rest),
get_topics(Rest2, [{Name,Qos}|Topics], true);
without QOS field ( UNSUBSCRIBE )
get_topics(Payload, Topics, _) ->
{Name, Rest} = decode_string(Payload),
checktopicfilter(Name),
get_topics(Rest, [Name|Topics], false).
% decode utf8 string
-spec decode_string(Data :: binary()) -> {String :: binary(), Rest :: binary()}.
decode_string(<<>>) ->
{<<>>, <<>>};
decode_string(Pkt) ->
%lager:debug("~p",[Pkt]),
<<Len:16/integer, Str:Len/binary, Rest2/binary>> = Pkt,
lager : debug("~p ~p ~p",[Len , ) ,
case wave_utf8:validate(Str) of
ok ->
{Str, Rest2};
Err ->
erlang:throw(Err)
end.
decode_subscribe_qos(<<_:6, Qos:2/integer, _/binary>>) when Qos =:= 3 ->
erlang:throw({'SUBSCRIBE', "MQTT-3.8.3-4", "invalid qos (3)"});
decode_subscribe_qos(<<_:6, Qos:2/integer, Rest/binary>>) ->
{Qos, Rest}.
-spec decode_rlength(binary(), integer(), integer()) -> {error, overflow}
| {error, size, integer()}
| {Size::integer(), RestSize::integer(), Rest::binary()}.
decode_rlength(_Pkt, PktSize, MinLen) when PktSize < MinLen ->
{error, size, MinLen-PktSize};
decode_rlength(Pkt, _, _) ->
p_decode_rlength(Pkt, 1, 0).
p_decode_rlength(_, 5, _) ->
% remaining length overflow
{error, overflow};
p_decode_rlength(<<0:1, Len:7/integer, Rest/binary>>, Mult, Acc) ->
{Acc + Mult*Len, erlang:byte_size(Rest), Rest};
p_decode_rlength(<<1:1, Len:7/integer, Rest/binary>>, Mult, Acc) ->
p_decode_rlength(Rest, Mult*128, Acc + Mult*Len).
-spec encode_rlength(binary()) -> binary().
encode_rlength(Payload) ->
encode_rlength(erlang:byte_size(Payload), <<"">>).
shortcut for 1 byte only rlength ( < 128 )
-spec encode_rlength(integer(), binary()) -> binary().
encode_rlength(Size, <<"">>) when Size < 128 ->
<<Size:8>>;
encode_rlength(0, RLen) ->
RLen;
encode_rlength(Size, RLen) ->
division by 128
Digit = (Size rem 128) + ( if
RLen2 > 0 -> 128;
true -> 0
end ),
encode_rlength(RLen2, <<RLen/binary, Digit:8>>).
-spec encode(mqtt_msg()) -> binary().
encode(#mqtt_msg{retain=Retain, qos=Qos, dup=Dup, type=Type, payload=Payload}) ->
P = encode_payload(Type, Qos, Payload),
%lager:info("~p ~p", [P, is_binary(P)]),
<<
% fixed headers
(atom2type(Type)):4, Dup:1, Qos:2, Retain:1,
% remaining length
(encode_rlength(P))/binary,
% variable headers + payload
P/binary
>>.
-spec encode_payload(mqtt_verb(), integer(), list({atom(), any()})) -> binary().
encode_payload('CONNECT', _Qos, Opts) ->
ClientID = proplists:get_value(clientid, Opts),
Username = proplists:get_value(username, Opts),
Password = proplists:get_value(password, Opts),
<<
6:16, % protocol name
<<"MQIsdp">>/binary,
3:8, % version
% connect flags
(setflag(Username)):1,
(setflag(Password)):1,
0:6,
10:16, % keep-alive
(encode_string(ClientID))/binary,
(encode_string(Username))/binary,
(encode_string(Password))/binary
>>;
encode_payload('PUBLISH', _Qos=0, Opts) ->
Topic = proplists:get_value(topic, Opts),
Content = proplists:get_value(content, Opts),
<<
(encode_string(Topic))/binary,
% payload
(bin(Content))/binary
>>;
encode_payload('PUBLISH', _Qos, Opts) ->
Topic = proplists:get_value(topic, Opts),
MsgID = proplists:get_value(msgid, Opts),
Content = proplists:get_value(content, Opts),
<<
(encode_string(Topic))/binary,
MsgID:16,
% payload
(bin(Content))/binary
>>;
encode_payload('SUBSCRIBE', _Qos, Opts) ->
Topic = proplists:get_value(topic, Opts),
%lager:info("topic= ~p", [Topic]),
<<
MsgID - mandatory
(encode_string(Topic))/binary,
QoS
>>;
encode_payload('CONNACK', _Qos, Opts) ->
SessionPresent = proplists:get_value(session, Opts, 0),
RetCode = proplists:get_value(retcode, Opts),
<<
% var headers
0:7,
SessionPresent:1,
% payload
RetCode:8
>>;
encode_payload('PUBACK', _Qos, Opts) ->
MsgID = proplists:get_value(msgid, Opts),
<<
MsgID:16
>>;
encode_payload('PUBREC', _Qos, Opts) ->
MsgID = proplists:get_value(msgid, Opts),
<<
MsgID:16
>>;
encode_payload('PUBREL', _Qos, Opts) ->
MsgID = proplists:get_value(msgid, Opts),
<<
MsgID:16
>>;
encode_payload('PUBCOMP', _Qos, Opts) ->
MsgID = proplists:get_value(msgid, Opts),
<<
MsgID:16
>>;
encode_payload('SUBACK', _Qos, Opts) ->
MsgId = proplists:get_value(msgid, Opts),
Qos = proplists:get_value(qos, Opts),
<<
MsgId:16,
(encode_qos(Qos))/binary
>>;
encode_payload('UNSUBACK', _Qos, [{msgid, MsgID}]) ->
<<MsgID:16>>;
encode_payload('PINGREQ', _Qos, _) ->
<<>>;
encode_payload('PINGRESP', _Qos, _) ->
<<>>.
-spec encode_string(undefined|string()) -> binary().
encode_string(undefined) ->
<<>>;
encode_string(Str) ->
<<
(size(Str)):16,
Str/binary
>>.
-spec encode_qos(undefined|list(integer())) -> binary().
encode_qos(undefined) ->
<<>>;
encode_qos([]) ->
<<>>;
encode_qos([H|T]) ->
<<H:8/integer, (encode_qos(T))/binary>>.
-spec atom2type(mqtt_verb()) -> integer().
atom2type('CONNECT') -> 1;
atom2type('CONNACK') -> 2;
atom2type('PUBLISH') -> 3;
atom2type('PUBACK') -> 4;
atom2type('PUBREC') -> 5;
atom2type('PUBREL') -> 6;
atom2type('PUBCOMP') -> 7;
atom2type('SUBSCRIBE') -> 8;
atom2type('SUBACK') -> 9;
atom2type('UNSUBSCRIBE') -> 10; % dialyzer generates a warning because this message is nowhere generated
atom2type('UNSUBACK') -> 11;
atom2type('PINGREQ') -> 12;
atom2type('PINGRESP') -> 13;
atom2type('DISCONNECT') -> 14. % dialyzer generates a warning because this message is nowhere generated
-spec type2atom(integer()) -> mqtt_verb() | {invalid, integer()}.
type2atom(1) -> 'CONNECT';
type2atom(2) -> 'CONNACK';
type2atom(3) -> 'PUBLISH';
type2atom(4) -> 'PUBACK';
type2atom(5) -> 'PUBREC';
type2atom(6) -> 'PUBREL';
type2atom(7) -> 'PUBCOMP';
type2atom(8) -> 'SUBSCRIBE';
type2atom(9) -> 'SUBACK';
type2atom(10) -> 'UNSUBSCRIBE';
type2atom(11) -> 'UNSUBACK';
type2atom(12) -> 'PINGREQ';
type2atom(13) -> 'PINGRESP';
type2atom(14) -> 'DISCONNECT';
type2atom(T) -> {invalid, T}.
Validate flags according to MQTT verb
[ MQTT-2.2.2 - 1 ] , [ MQTT-2.2.2 - 2 ] .
%
-spec checkflags(mqtt_verb(), <<_:4>>) -> ok.
checkflags('CONNECT' , <<0:4>>) -> ok;
checkflags('CONNACK' , <<0:4>>) -> ok;
checkflags('PUBLISH' , <<_:4>>) -> ok;
checkflags('PUBACK' , <<0:4>>) -> ok;
checkflags('PUBREC' , <<0:4>>) -> ok;
checkflags('PUBREL' , <<2:4>>) -> ok;
checkflags('PUBCOMP' , <<0:4>>) -> ok;
checkflags('SUBSCRIBE' , <<2:4>>) -> ok;
checkflags('SUBACK' , <<0:4>>) -> ok;
checkflags('UNSUBSCRIBE', <<2:4>>) -> ok;
checkflags('UNSUBACK' , <<0:4>>) -> ok;
checkflags('PINGREQ' , <<0:4>>) -> ok;
checkflags('PINRESP' , <<0:4>>) -> ok;
checkflags('DISCONNECT' , <<0:4>>) -> ok;
checkflags(Verb , Flags) -> erlang:throw({Verb, reserved_flags, Flags}).
% Check topic name does not contains wildcard characters (+ or #)
[ MQTT-3.3.2 - 2 ] , [ MQTT-4.7.3 - 1 ]
%
-spec checktopic(unicode:unicode_binary()) -> ok.
checktopic(<<>>) ->
erlang:throw({'PUBLISH', "MQTT-4.7.3-1", "0-length topic"});
checktopic(Topic) ->
checktopic2(Topic).
checktopic2(<<>>) ->
ok;
checktopic2(<<H/utf8, _Rest/binary>>) when H =:= $+; H =:= $# ->
erlang:throw({'PUBLISH', "MQTT-3.3.2-2", H});
checktopic2(<<_/utf8, Rest/binary>>) ->
checktopic2(Rest).
Validate a topic filter
[ MQTT-4.7.1 - 2 ] , [ MQTT-4.7.1 - 3 ] , [ MQTT-4.7.3 - 1 ]
%
-spec checktopicfilter(unicode:unicode_binary()) -> ok.
checktopicfilter(<<>>) ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.3-1]", "0-length topic filter"});
checktopicfilter(TopicF) ->
checktopicfilter2(TopicF).
checktopicfilter2(<<>>) ->
ok;
checktopicfilter2(<<H/utf8, $#>>) when H =/= $/ ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.1-2]", "misplaced # wildcard character"});
checktopicfilter2(<<$#, _/utf8, _/binary>>) ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.1-2]", "misplaced # wildcard character"});
checktopicfilter2(<<H/utf8, $+, _/binary>>) when H =/= $/ ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.1-3]", "misplaced + wildcard character"});
checktopicfilter2(<<$+, H/utf8, _/binary>>) when H =/= $/ ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.1-3]", "misplaced + wildcard character"});
checktopicfilter2(<<_/utf8, Rest/binary>>) ->
checktopicfilter2(Rest).
Validate clientid
% [MQTT-3.1.3-5]
%
-spec check_clientid(unicode:unicode_binary(), integer()) -> ok.
check_clientid(_, Len) when Len > 23 ->
erlang:throw({'CONNECT', "[MQTT-3.1.3-5]", "clientid > 23 characters"});
check_clientid(<<>>, _) ->
ok;
check_clientid(<<H/utf8, Rest/binary>>, Len) when H >= $0, H =< $9 ->
check_clientid(Rest, Len+1);
check_clientid(<<H/utf8, Rest/binary>>, Len) when H >= $a, H =< $z ->
check_clientid(Rest, Len+1);
check_clientid(<<H/utf8, Rest/binary>>, Len) when H >= $A, H =< $Z ->
check_clientid(Rest, Len+1);
check_clientid(_, _) ->
erlang:throw({'CONNECT', "[MQTT-3.1.3-5]", "clientid contains non-valid characters"}).
minlen(1) -> 3;
minlen(2) -> 3;
minlen(3) -> 3;
minlen(4) -> 3;
minlen(5) -> 3;
minlen(6) -> 3;
minlen(7) -> 3;
minlen(8) -> 3;
minlen(9) -> 3;
minlen(10) -> 3;
minlen(11) -> 3;
minlen(12) -> 1;
minlen(13) -> 1;
minlen(14) -> 1;
minlen(_) -> -1.
setflag(undefined) -> 0;
setflag(_) -> 1.
%TODO: why ???
bin(X) when is_binary(X) ->
X.
| null | https://raw.githubusercontent.com/gbour/wave/fe5b78408a7c6e723b19cd454068958058e5e072/apps/wave/src/mqtt_msg.erl | erlang |
This program is free software: you can redistribute it and/or modify
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
messages length:
- PINREQ
- PINRESP
- DISCONNECT
- CONNACK
- PUBACK
- PUBREC
- PUBREL
- PUBCOMP
- UNSUBACK
- SUBSCRIBE (min 3)
invalid MQTT type
invalid Remaining Length header or not enough buffer to decode RLen
buffer is too short to decode remaining-length header
Buffer is too short (do not contains the whole MQTT message)
throw exception on error
lager:debug("~p", [Msg]),
@doc «CONNECT» message
raise exception
lager:debug("ret= ~p", [Ret]),
lager:debug("topics= ~p", [Topics]),
lager:debug("UNSUBSCRIBE: ~p", [Payload]),
lager:debug("DISCONNECT"),
not a real error, we just want to close the connection
TODO: return a disconnect object; and do cleanup upward
{error, disconnect};
lager:debug("CONNACK"),
lager:debug("PUBREC. MsgID= ~p", [MsgID]),
lager:debug("SUBACK. MsgID= ~p", [MsgID]),
match wrong protocol versions
ERRORS
decoding Client-ID
NOTE: disabled for now as it would require to refactor all tests
add config option to change behaviour
check_clientid(ClientID, 0),
decoding will topic & message
ensure topic is valid, raise exception either (w/ WRONG msg)
Effect is to "kill" ranch listener, and session is ended when timeout reached
we should catch it (subfun) to throw a named exception
TODO: test is ranch listener destruction do not disconnect other clients
decoding username
decoding password
decode utf8 string
lager:debug("~p",[Pkt]),
remaining length overflow
lager:info("~p ~p", [P, is_binary(P)]),
fixed headers
remaining length
variable headers + payload
protocol name
version
connect flags
keep-alive
payload
payload
lager:info("topic= ~p", [Topic]),
var headers
payload
dialyzer generates a warning because this message is nowhere generated
dialyzer generates a warning because this message is nowhere generated
Check topic name does not contains wildcard characters (+ or #)
[MQTT-3.1.3-5]
TODO: why ??? | Wave - MQTT Broker
Copyright ( C ) 2014 - 2016 -
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation , version 3 of the License .
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
-module(mqtt_msg).
-author("Guillaume Bour <>").
-export([encode/1, decode/1]).
-include("mqtt_msg.hrl").
exactly 3 bytes ( type + flags + rle , rle = 0 ):
exactly 5 bytes ( type + flags + rle = 2 + varheader ):
more than 3 bytes ( type + flags + rle + varheader + payload ):
- CONNECT ( min 13 bytes )
- PUBLISH ( min 3 )
- SUBACK ( min 3 )
- UNSUBSCRIBE ( min 3 )
-spec decode(binary()) -> {ok, mqtt_msg(), binary()}
| {error, size, integer()}
| {error, overflow|{type, integer()}}
| {error, disconnect|conformity|protocol_version, binary()}.
decode(<<Type:4, Flags:4, Rest/binary>>) ->
decode(type2atom(Type), <<Flags:4>>, decode_rlength(Rest, erlang:byte_size(Rest), minlen(Type))).
decode({invalid, T}, _, _) ->
{error, {type, T}};
decode(_, _, {error, overflow}) ->
{error, overflow};
decode(_Type, _, {error, size, Size}) ->
lager:warning("~p: not enough data to decode rlen. missing ~p bytes", [_Type, Size]),
{error, size, Size};
decode(_, _, {ESize, RSize, _}) when ESize > RSize ->
{error, size, ESize-RSize};
decode(Type, Flags= <<Dup:1, Qos:2, Retain:1>>, {RLen, _, Rest}) ->
<<BPayload:RLen/binary, Rest2/binary>> = Rest,
Msg = case decode_payload(Type, {Dup, Qos, Retain}, {RLen, BPayload}) of
{ok, Payload} ->
{ok, #mqtt_msg{
type=Type,
retain=Retain,
qos=Qos,
dup=Dup,
payload = Payload
}, Rest2};
{error, Err} ->
{error, Err, Rest2}
end,
Msg.
support both MQTT 3.1 and 3.1.1 versions
-spec decode_payload(mqtt_verb(), {integer(), integer(), integer()}, {integer(), binary()}) ->
{error, disconnect|conformity|protocol_version}
| {ok, list({atom(), any()})}.
decode_payload('CONNECT', _, {_Len, <<
PLen:16,
Protocol:PLen/binary,
Version:8/integer,
Flags:7,
Reserved:1,
Ka:16,
Rest/binary>>}) ->
decode_connect(Protocol, Version, Reserved, {<<Flags:7/integer>>, Ka, Rest});
decode_payload('PUBLISH', {_Dup=1, _Qos=0, _}, _) ->
erlang:throw({'PUBLISH', "3.3.1-2", "DUP flag MUST be 0 when QOS = 0"});
decode_payload('PUBLISH', {_ , _Qos=3, _}, _) ->
erlang:throw({'PUBLISH', "3.3.1-4", "invalid QOS value (3)"});
decode_payload('PUBLISH', {_, Qos, _}, {_Len, Rest}) ->
lager : debug("PUBLISH ( ) ~p ~p " , [ Qos , Len , Rest ] ) ,
{Topic, Rest2} = decode_string(Rest),
Ret = if
Qos =:= 0 ->
[{topic,Topic}, {data, Rest2}];
true ->
<<MsgID:16, Rest3/binary>> = Rest2,
case MsgID of
0 -> erlang:throw({'PUBLISH', "2.3.1-1", "null msgid"});
_ -> pass
end,
[{topic,Topic}, {msgid,MsgID}, {data, Rest3}]
end,
{ Topic , < < , Rest2 / binary > > } = decode_string(Rest ) ,
{ok, Ret};
decode_payload('SUBSCRIBE', _, {_Len, <<MsgID:16, Payload/binary>>}) ->
lager : ~p " , [ MsgID ] ) ,
case MsgID of
0 -> erlang:throw({'SUBSCRIBE', "2.3.1-1", "null msgid"});
_ -> pass
end,
Topics = get_topics(Payload, [], true),
{ok, [{msgid, MsgID},{topics, Topics}]};
decode_payload('UNSUBSCRIBE', _, {_Len, <<MsgID:16, Payload/binary>>}) ->
case MsgID of
0 -> erlang:throw({'UNSUBSCRIBE', "2.3.1-1", "null msgid"});
_ -> pass
end,
Topics = get_topics(Payload, [], false),
{ok, [{msgid, MsgID}, {topics, Topics}]};
decode_payload('PINGREQ', _, {0, <<>>}) ->
{ok, []};
decode_payload('PINGRESP', _, {0, <<>>}) ->
{ok, []};
decode_payload('DISCONNECT', _, {0, <<>>}) ->
{ok, []};
decode_payload('CONNACK', _, {_Len, <<_:8, RetCode:8/integer>>}) ->
{ok, [{retcode, RetCode}]};
decode_payload('PUBACK', _, {_Len=2, <<MsgID:16>>}) ->
lager : . MsgID= ~p " , [ MsgID ] ) ,
{ok, [{msgid, MsgID}]};
decode_payload('PUBREC', _, {_Len, <<MsgID:16>>}) ->
{ok, [{msgid, MsgID}]};
TODO : throw exception with custom message when ' PUBREL ' and qos ! = 1
decode_payload('PUBREL', {_, _Qos=1, _}, {_Len, <<MsgID:16>>}) ->
lager : . MsgID= ~p " , [ MsgID ] ) ,
{ok, [{msgid, MsgID}]};
decode_payload('PUBCOMP', _, {_Len, <<MsgID:16>>}) ->
lager : . MsgID= ~p " , [ MsgID ] ) ,
{ok, [{msgid, MsgID}]};
decode_payload('SUBACK', _, {_Len, <<MsgID:16, _Qos/binary>>}) ->
{ok, [{msgid, MsgID}]};
decode_payload(Cmd, Flags, Args) ->
lager:notice("invalid command ~p (flags=~p, payload=~p)", [Cmd, Flags, Args]),
{error, disconnect}.
VALID
-spec decode_connect(binary(), byte(), 0|1, {bitstring(), char(), binary()}) ->
{error, conformity|protocol_version} | {ok, list({atom(), any()})}.
decode_connect(<<"MQIsdp">>, Vers=3, 0, Payload) ->
decode_connect2(Vers, Payload);
decode_connect(<<"MQTT">> , Vers=4, 0, Payload) ->
decode_connect2(Vers, Payload);
decode_connect(_, _, _Reserved=1, _) ->
lager:notice("CONNECT: reserved flag MUST be 0"),
{error, conformity};
decode_connect(Protocol= <<"MQIsdp">>, Version, _, _) ->
lager:notice("CONNECT: invalid protocol version (~p/~p)", [Protocol, Version]),
{error, protocol_version};
decode_connect(Protocol= <<"MQTT">>, Version, _, _) ->
lager:notice("CONNECT: invalid protocol version (~p/~p)", [Protocol, Version]),
{error, protocol_version};
decode_connect(Protocol, _, _, _) ->
lager:notice("CONNECT: invalid protocol name (~p)", [Protocol]),
{error, conformity}.
-spec decode_connect2(byte(), {bitstring(), char(), binary()}) -> {error, conformity} | {ok, [{atom(), any}]}.
decode_connect2(_Version, {<<0:1, 1:1, _:5>>, _, _}) ->
lager:notice("CONNECT: password flag is set while username flag is not"),
{error, conformity};
decode_connect2(_, {<<_:3, WillQos:2, _:2>>, _, _}) when WillQos =:= 3 ->
erlang:throw({'CONNECT', "MQTT-3.1.2-14", "invalid will qos (3)"});
decode_connect2(_, {<<_:3, WillQos:2, WillFlag:1, _:1>>, _, _}) when WillFlag =:= 0, WillQos =/= 0 ->
erlang:throw({'CONNECT', "MQTT-3.1.2-13", "if will flag is 0, will qos MUST be 0 too"});
decode_connect2(_, {<<_:2, WillRetain:1, _:2, WillFlag:1, _:1>>, _, _}) when WillFlag =:= 0, WillRetain =:= 1 ->
erlang:throw({'CONNECT', "MQTT-3.1.2-15", "if will flag is 0, will retain MUST be 0 too"});
decode_connect2(Version,
{<<User:1, Pwd:1, WillRetain:1, WillQos:2, WillFlag:1, Clean:1>>, Ka, Rest}) ->
{ClientID, Rest2} = decode_string(Rest),
TODO : spec says MAY be > 23 chars and other characters
{Will, Rest3} = case WillFlag of
1 ->
{_WillTopic, _R} = decode_string(Rest2),
Will message is any binary content . 2 first bytes are will message length
TODO : this throws an " anonymous " exception if MsgLen is missing
<<MsgLen:16/integer, _WillMsg:MsgLen/binary, _R2/binary>> = _R,
{#{topic => _WillTopic, message => _WillMsg, qos => WillQos, retain => WillRetain}, _R2};
_ -> {undefined, Rest2}
end,
{Username , Rest4} = case User of
1 -> decode_string(Rest3);
_ -> {undefined, Rest3}
end,
{Password, _Rest5} = case Pwd of
1 -> decode_string(Rest4);
_ -> {undefined, Rest4}
end,
{ok, [
{clientid , ClientID},
{will , Will},
{username , Username},
{password , Password},
{keepalive, Ka},
{clean , Clean},
{version , Version}
]}.
-spec get_topics(Data :: binary(), Acc :: list(any()), Subscription :: true|false) ->
Topics::list(Topic::binary()|{Topic::binary(), Qos::integer()}).
get_topics(<<>>, [], true) ->
erlang:throw({'SUBSCRIBE' , "MQTT-3.8.3-1" , "no topic filter/qos"});
get_topics(<<>>, [], false) ->
erlang:throw({'UNSUBSCRIBE', "MQTT-3.10.3-2", "no topic filter/qos"});
get_topics(<<>>, Topics, _) ->
lists:reverse(Topics);
with QOS field ( SUBSCRIBE )
get_topics(Payload, Topics, true) ->
{Name, Rest} = decode_string(Payload),
checktopicfilter(Name),
{Qos, Rest2} = decode_subscribe_qos(Rest),
get_topics(Rest2, [{Name,Qos}|Topics], true);
without QOS field ( UNSUBSCRIBE )
get_topics(Payload, Topics, _) ->
{Name, Rest} = decode_string(Payload),
checktopicfilter(Name),
get_topics(Rest, [Name|Topics], false).
-spec decode_string(Data :: binary()) -> {String :: binary(), Rest :: binary()}.
decode_string(<<>>) ->
{<<>>, <<>>};
decode_string(Pkt) ->
<<Len:16/integer, Str:Len/binary, Rest2/binary>> = Pkt,
lager : debug("~p ~p ~p",[Len , ) ,
case wave_utf8:validate(Str) of
ok ->
{Str, Rest2};
Err ->
erlang:throw(Err)
end.
decode_subscribe_qos(<<_:6, Qos:2/integer, _/binary>>) when Qos =:= 3 ->
erlang:throw({'SUBSCRIBE', "MQTT-3.8.3-4", "invalid qos (3)"});
decode_subscribe_qos(<<_:6, Qos:2/integer, Rest/binary>>) ->
{Qos, Rest}.
-spec decode_rlength(binary(), integer(), integer()) -> {error, overflow}
| {error, size, integer()}
| {Size::integer(), RestSize::integer(), Rest::binary()}.
decode_rlength(_Pkt, PktSize, MinLen) when PktSize < MinLen ->
{error, size, MinLen-PktSize};
decode_rlength(Pkt, _, _) ->
p_decode_rlength(Pkt, 1, 0).
p_decode_rlength(_, 5, _) ->
{error, overflow};
p_decode_rlength(<<0:1, Len:7/integer, Rest/binary>>, Mult, Acc) ->
{Acc + Mult*Len, erlang:byte_size(Rest), Rest};
p_decode_rlength(<<1:1, Len:7/integer, Rest/binary>>, Mult, Acc) ->
p_decode_rlength(Rest, Mult*128, Acc + Mult*Len).
-spec encode_rlength(binary()) -> binary().
encode_rlength(Payload) ->
encode_rlength(erlang:byte_size(Payload), <<"">>).
shortcut for 1 byte only rlength ( < 128 )
-spec encode_rlength(integer(), binary()) -> binary().
encode_rlength(Size, <<"">>) when Size < 128 ->
<<Size:8>>;
encode_rlength(0, RLen) ->
RLen;
encode_rlength(Size, RLen) ->
division by 128
Digit = (Size rem 128) + ( if
RLen2 > 0 -> 128;
true -> 0
end ),
encode_rlength(RLen2, <<RLen/binary, Digit:8>>).
-spec encode(mqtt_msg()) -> binary().
encode(#mqtt_msg{retain=Retain, qos=Qos, dup=Dup, type=Type, payload=Payload}) ->
P = encode_payload(Type, Qos, Payload),
<<
(atom2type(Type)):4, Dup:1, Qos:2, Retain:1,
(encode_rlength(P))/binary,
P/binary
>>.
-spec encode_payload(mqtt_verb(), integer(), list({atom(), any()})) -> binary().
encode_payload('CONNECT', _Qos, Opts) ->
ClientID = proplists:get_value(clientid, Opts),
Username = proplists:get_value(username, Opts),
Password = proplists:get_value(password, Opts),
<<
<<"MQIsdp">>/binary,
(setflag(Username)):1,
(setflag(Password)):1,
0:6,
(encode_string(ClientID))/binary,
(encode_string(Username))/binary,
(encode_string(Password))/binary
>>;
encode_payload('PUBLISH', _Qos=0, Opts) ->
Topic = proplists:get_value(topic, Opts),
Content = proplists:get_value(content, Opts),
<<
(encode_string(Topic))/binary,
(bin(Content))/binary
>>;
encode_payload('PUBLISH', _Qos, Opts) ->
Topic = proplists:get_value(topic, Opts),
MsgID = proplists:get_value(msgid, Opts),
Content = proplists:get_value(content, Opts),
<<
(encode_string(Topic))/binary,
MsgID:16,
(bin(Content))/binary
>>;
encode_payload('SUBSCRIBE', _Qos, Opts) ->
Topic = proplists:get_value(topic, Opts),
<<
MsgID - mandatory
(encode_string(Topic))/binary,
QoS
>>;
encode_payload('CONNACK', _Qos, Opts) ->
SessionPresent = proplists:get_value(session, Opts, 0),
RetCode = proplists:get_value(retcode, Opts),
<<
0:7,
SessionPresent:1,
RetCode:8
>>;
encode_payload('PUBACK', _Qos, Opts) ->
MsgID = proplists:get_value(msgid, Opts),
<<
MsgID:16
>>;
encode_payload('PUBREC', _Qos, Opts) ->
MsgID = proplists:get_value(msgid, Opts),
<<
MsgID:16
>>;
encode_payload('PUBREL', _Qos, Opts) ->
MsgID = proplists:get_value(msgid, Opts),
<<
MsgID:16
>>;
encode_payload('PUBCOMP', _Qos, Opts) ->
MsgID = proplists:get_value(msgid, Opts),
<<
MsgID:16
>>;
encode_payload('SUBACK', _Qos, Opts) ->
MsgId = proplists:get_value(msgid, Opts),
Qos = proplists:get_value(qos, Opts),
<<
MsgId:16,
(encode_qos(Qos))/binary
>>;
encode_payload('UNSUBACK', _Qos, [{msgid, MsgID}]) ->
<<MsgID:16>>;
encode_payload('PINGREQ', _Qos, _) ->
<<>>;
encode_payload('PINGRESP', _Qos, _) ->
<<>>.
-spec encode_string(undefined|string()) -> binary().
encode_string(undefined) ->
<<>>;
encode_string(Str) ->
<<
(size(Str)):16,
Str/binary
>>.
-spec encode_qos(undefined|list(integer())) -> binary().
encode_qos(undefined) ->
<<>>;
encode_qos([]) ->
<<>>;
encode_qos([H|T]) ->
<<H:8/integer, (encode_qos(T))/binary>>.
-spec atom2type(mqtt_verb()) -> integer().
atom2type('CONNECT') -> 1;
atom2type('CONNACK') -> 2;
atom2type('PUBLISH') -> 3;
atom2type('PUBACK') -> 4;
atom2type('PUBREC') -> 5;
atom2type('PUBREL') -> 6;
atom2type('PUBCOMP') -> 7;
atom2type('SUBSCRIBE') -> 8;
atom2type('SUBACK') -> 9;
atom2type('UNSUBACK') -> 11;
atom2type('PINGREQ') -> 12;
atom2type('PINGRESP') -> 13;
-spec type2atom(integer()) -> mqtt_verb() | {invalid, integer()}.
type2atom(1) -> 'CONNECT';
type2atom(2) -> 'CONNACK';
type2atom(3) -> 'PUBLISH';
type2atom(4) -> 'PUBACK';
type2atom(5) -> 'PUBREC';
type2atom(6) -> 'PUBREL';
type2atom(7) -> 'PUBCOMP';
type2atom(8) -> 'SUBSCRIBE';
type2atom(9) -> 'SUBACK';
type2atom(10) -> 'UNSUBSCRIBE';
type2atom(11) -> 'UNSUBACK';
type2atom(12) -> 'PINGREQ';
type2atom(13) -> 'PINGRESP';
type2atom(14) -> 'DISCONNECT';
type2atom(T) -> {invalid, T}.
Validate flags according to MQTT verb
[ MQTT-2.2.2 - 1 ] , [ MQTT-2.2.2 - 2 ] .
-spec checkflags(mqtt_verb(), <<_:4>>) -> ok.
checkflags('CONNECT' , <<0:4>>) -> ok;
checkflags('CONNACK' , <<0:4>>) -> ok;
checkflags('PUBLISH' , <<_:4>>) -> ok;
checkflags('PUBACK' , <<0:4>>) -> ok;
checkflags('PUBREC' , <<0:4>>) -> ok;
checkflags('PUBREL' , <<2:4>>) -> ok;
checkflags('PUBCOMP' , <<0:4>>) -> ok;
checkflags('SUBSCRIBE' , <<2:4>>) -> ok;
checkflags('SUBACK' , <<0:4>>) -> ok;
checkflags('UNSUBSCRIBE', <<2:4>>) -> ok;
checkflags('UNSUBACK' , <<0:4>>) -> ok;
checkflags('PINGREQ' , <<0:4>>) -> ok;
checkflags('PINRESP' , <<0:4>>) -> ok;
checkflags('DISCONNECT' , <<0:4>>) -> ok;
checkflags(Verb , Flags) -> erlang:throw({Verb, reserved_flags, Flags}).
[ MQTT-3.3.2 - 2 ] , [ MQTT-4.7.3 - 1 ]
-spec checktopic(unicode:unicode_binary()) -> ok.
checktopic(<<>>) ->
erlang:throw({'PUBLISH', "MQTT-4.7.3-1", "0-length topic"});
checktopic(Topic) ->
checktopic2(Topic).
checktopic2(<<>>) ->
ok;
checktopic2(<<H/utf8, _Rest/binary>>) when H =:= $+; H =:= $# ->
erlang:throw({'PUBLISH', "MQTT-3.3.2-2", H});
checktopic2(<<_/utf8, Rest/binary>>) ->
checktopic2(Rest).
Validate a topic filter
[ MQTT-4.7.1 - 2 ] , [ MQTT-4.7.1 - 3 ] , [ MQTT-4.7.3 - 1 ]
-spec checktopicfilter(unicode:unicode_binary()) -> ok.
checktopicfilter(<<>>) ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.3-1]", "0-length topic filter"});
checktopicfilter(TopicF) ->
checktopicfilter2(TopicF).
checktopicfilter2(<<>>) ->
ok;
checktopicfilter2(<<H/utf8, $#>>) when H =/= $/ ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.1-2]", "misplaced # wildcard character"});
checktopicfilter2(<<$#, _/utf8, _/binary>>) ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.1-2]", "misplaced # wildcard character"});
checktopicfilter2(<<H/utf8, $+, _/binary>>) when H =/= $/ ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.1-3]", "misplaced + wildcard character"});
checktopicfilter2(<<$+, H/utf8, _/binary>>) when H =/= $/ ->
erlang:throw({'(UN)SUBSCRIBE', "[MQTT-4.7.1-3]", "misplaced + wildcard character"});
checktopicfilter2(<<_/utf8, Rest/binary>>) ->
checktopicfilter2(Rest).
Validate clientid
-spec check_clientid(unicode:unicode_binary(), integer()) -> ok.
check_clientid(_, Len) when Len > 23 ->
erlang:throw({'CONNECT', "[MQTT-3.1.3-5]", "clientid > 23 characters"});
check_clientid(<<>>, _) ->
ok;
check_clientid(<<H/utf8, Rest/binary>>, Len) when H >= $0, H =< $9 ->
check_clientid(Rest, Len+1);
check_clientid(<<H/utf8, Rest/binary>>, Len) when H >= $a, H =< $z ->
check_clientid(Rest, Len+1);
check_clientid(<<H/utf8, Rest/binary>>, Len) when H >= $A, H =< $Z ->
check_clientid(Rest, Len+1);
check_clientid(_, _) ->
erlang:throw({'CONNECT', "[MQTT-3.1.3-5]", "clientid contains non-valid characters"}).
minlen(1) -> 3;
minlen(2) -> 3;
minlen(3) -> 3;
minlen(4) -> 3;
minlen(5) -> 3;
minlen(6) -> 3;
minlen(7) -> 3;
minlen(8) -> 3;
minlen(9) -> 3;
minlen(10) -> 3;
minlen(11) -> 3;
minlen(12) -> 1;
minlen(13) -> 1;
minlen(14) -> 1;
minlen(_) -> -1.
setflag(undefined) -> 0;
setflag(_) -> 1.
bin(X) when is_binary(X) ->
X.
|
5753137585be8912d70db3eef4ccb741f99ff77d13671015b6731fef008e0bb4 | commercialhaskell/path | Path.hs | -- | This library provides a well-typed representation of paths in a filesystem
-- directory tree.
--
Both " Path . " and " Path . Windows " provide the same interface . This
-- module will reexport the appropriate module for your platform.
# LANGUAGE CPP #
#if defined(mingw32_HOST_OS)
module Path(module Path.Windows) where
import Path.Windows
#else
module Path(module Path.Posix) where
import Path.Posix
#endif
| null | https://raw.githubusercontent.com/commercialhaskell/path/2bec48c17be4b82f7dabc8e2dd167444e558e9a5/src/Path.hs | haskell | | This library provides a well-typed representation of paths in a filesystem
directory tree.
module will reexport the appropriate module for your platform. | Both " Path . " and " Path . Windows " provide the same interface . This
# LANGUAGE CPP #
#if defined(mingw32_HOST_OS)
module Path(module Path.Windows) where
import Path.Windows
#else
module Path(module Path.Posix) where
import Path.Posix
#endif
|
0274dd0a73fa139a51b113593a901dc94fff820c79abaf390009feda563f307b | jyh/metaprl | meta_context_terms2.mli |
* Context rewrites .
*
* ----------------------------------------------------------------
*
* @begin[license ]
* Copyright ( C ) 2005 - 2006 Mojave Group , Caltech
*
* This program is free software ; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation ; either version 2
* of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
*
* Author : @email{ }
* Modified by : @email{ }
* @end[license ]
* Context rewrites.
*
* ----------------------------------------------------------------
*
* @begin[license]
* Copyright (C) 2005-2006 Mojave Group, Caltech
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* Author: Jason Hickey @email{}
* Modified by: Aleksey Nogin @email{}
* @end[license]
*)
extends Meta_context_ind1
(*
* In the common case, the sequents are defined over terms.
*)
declare sequent [TermSequent] { Term : Term >- Term } : Sequent{Term; Term; Term}
(*
* Split the hyp.
*)
declare sequent_ind{u : 'b, v : HFun{'a; 'b; 'c}. 'step['u; 'v] : 'c; 'e : Sequent{'a; 'b; 'c}} : 'c
declare sequent_ind{x : 'c. 'concl['x] : 'result;
u : 'b, v : HFun{'a; 'b; 'result}. 'step['u; 'v] : 'result;
'e : Sequent{'a; 'b; 'c}} : 'result
(*!
* @docoff
*
* -*-
* Local Variables:
* Caml-master: "compile"
* End:
* -*-
*)
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/theories/meta/extensions/meta_context_terms2.mli | ocaml |
* In the common case, the sequents are defined over terms.
* Split the hyp.
!
* @docoff
*
* -*-
* Local Variables:
* Caml-master: "compile"
* End:
* -*-
|
* Context rewrites .
*
* ----------------------------------------------------------------
*
* @begin[license ]
* Copyright ( C ) 2005 - 2006 Mojave Group , Caltech
*
* This program is free software ; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation ; either version 2
* of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
*
* Author : @email{ }
* Modified by : @email{ }
* @end[license ]
* Context rewrites.
*
* ----------------------------------------------------------------
*
* @begin[license]
* Copyright (C) 2005-2006 Mojave Group, Caltech
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* Author: Jason Hickey @email{}
* Modified by: Aleksey Nogin @email{}
* @end[license]
*)
extends Meta_context_ind1
declare sequent [TermSequent] { Term : Term >- Term } : Sequent{Term; Term; Term}
declare sequent_ind{u : 'b, v : HFun{'a; 'b; 'c}. 'step['u; 'v] : 'c; 'e : Sequent{'a; 'b; 'c}} : 'c
declare sequent_ind{x : 'c. 'concl['x] : 'result;
u : 'b, v : HFun{'a; 'b; 'result}. 'step['u; 'v] : 'result;
'e : Sequent{'a; 'b; 'c}} : 'result
|
b884e2b64964dbd3ee30d62814af1c9ae16cf1bced3788ef86551218b001ee10 | rabbitmq/rabbit-socks | rabbit_socks.erl | -module(rabbit_socks).
-behaviour(application).
-export([start/2, stop/1]).
%% For other apps to drive rabbit-socks
-export([start_listener/3]).
start(normal, []) ->
{ok, SupPid} = rabbit_socks_sup:start_link(),
case application:get_env(listeners) of
undefined ->
throw({error, socks_no_listeners_given});
{ok, Listeners} ->
error_logger:info_msg("Starting ~s~nbinding to:~n~p",
["Rabbit Socks", Listeners]),
ok = rabbit_socks_mochiweb:start(Listeners)
end,
{ok, SupPid}.
stop(_State) ->
ok.
start_listener(ListenerSpec, Subprotocol, Options) ->
rabbit_socks_mochiweb:start_listener(ListenerSpec, Subprotocol, Options).
| null | https://raw.githubusercontent.com/rabbitmq/rabbit-socks/c2f2c894c2537f028c1eb01bccc59b74736c1eee/src/rabbit_socks.erl | erlang | For other apps to drive rabbit-socks | -module(rabbit_socks).
-behaviour(application).
-export([start/2, stop/1]).
-export([start_listener/3]).
start(normal, []) ->
{ok, SupPid} = rabbit_socks_sup:start_link(),
case application:get_env(listeners) of
undefined ->
throw({error, socks_no_listeners_given});
{ok, Listeners} ->
error_logger:info_msg("Starting ~s~nbinding to:~n~p",
["Rabbit Socks", Listeners]),
ok = rabbit_socks_mochiweb:start(Listeners)
end,
{ok, SupPid}.
stop(_State) ->
ok.
start_listener(ListenerSpec, Subprotocol, Options) ->
rabbit_socks_mochiweb:start_listener(ListenerSpec, Subprotocol, Options).
|
d7573b1a887907710c548e54caa28a0654bc73839e99ef9a40a2ae7afc6acfc1 | SevereOverfl0w/bukkure | util.clj | ;; TODO: Check this file manually
(ns bukkure.util
(:require [clojure.set :as set])
(:require [bukkure.logging :as log])
(:import (java.net ServerSocket InetSocketAddress))
(:import (java.io IOException))
)
(defmacro map-enums [enumclass]
`(apply merge (map #(hash-map (keyword (.toLowerCase (.name %))) %) (~(symbol (apply str (name enumclass) "/values"))))))
(defmacro auto-proxy
"Automatically build a proxy, stubbing out useless entries, ala: -automatic-proxy-creation-in-clojure"
[interfaces variables & args]
(let [defined (set (map #(str (first %)) args))
names (fn [i] (map #(.getName %) (.getMethods i)))
all-names (into #{} (apply concat (map names (map resolve interfaces))))
undefined (set/difference all-names defined)
auto-gen (map (fn [x] `(~(symbol x) [& ~'args])) undefined)]
`(proxy ~interfaces ~variables ~@args ~@auto-gen)))
(defn capitalize [s]
(if (> (count s) 0)
(str (Character/toUpperCase (.charAt s 0))
(.toLowerCase (subs s 1)))
s))
(defn capitalize-all [s]
(let [matcher (re-matcher #"(\w+)" s)
buffer (new StringBuffer)]
(while (.find matcher)
(.appendReplacement matcher buffer (capitalize (.group matcher 1))))
(.appendTail matcher buffer)
(.toString buffer)))
(defn camelcase
"Removes dashes, camelcases words and then removes spaces."
[str]
(.replaceAll (capitalize-all str) "-" ""))
(defn uncamelcase
"Add dashes and lowercase everything"
[str]
(.substring (.toLowerCase (.replaceAll str "([A-Z])" "-$1")) 1))
(defn glue [sep & strs]
(apply str (interpose sep (filter #(and (not (nil? %)) (> (.length (.trim (str %))) 0)) strs))))
(defmacro let-info
"A let replacement that logs the bindings at each step. Useful for debugging."
[bindings & body]
(let [split (partition 2 bindings)
info-bindings (map (fn [[n _]] `(~(symbol "_") (log/info ~(str (if (instance? clojure.lang.Named n) (name n) (str n)) ":" ) ~n))) split)]
`(let [~@(apply concat (interleave split info-bindings))]
~@body)))
(defn package-classname [base-package nm]
(let [split (seq (.split nm "[.]"))
classname (camelcase (last split))
package (apply glue "." base-package (pop (vec split)))]
(glue "." package classname)))
(defn class-named [class]
(let [split (seq (.split (.getName class) "[.]"))
classname (uncamelcase (last split))
package (apply glue "." (pop (vec split)))]
(glue "." package classname)))
(defn port-in-use? [port bind]
(let [bind-addr (if (InetSocketAddress. bind port) (InetSocketAddress. port))]
(try
(with-open [ss (ServerSocket. port 0 (.getAddress bind-addr))] false)
(catch IOException e true))))
(defn throw-runtime [fmt & args]
(throw (java.lang.RuntimeException. (apply format fmt args))))
(defn find-subclasses [package-name class]
(filter #(not (nil? %))
(seq (.getSubTypesOf (org.reflections.Reflections.
package-name
(into-array org.reflections.scanners.Scanner []))
class))))
| null | https://raw.githubusercontent.com/SevereOverfl0w/bukkure/2091d70191127e617c1a7ce12f1c7b96585f124e/src/bukkure/util.clj | clojure | TODO: Check this file manually | (ns bukkure.util
(:require [clojure.set :as set])
(:require [bukkure.logging :as log])
(:import (java.net ServerSocket InetSocketAddress))
(:import (java.io IOException))
)
(defmacro map-enums [enumclass]
`(apply merge (map #(hash-map (keyword (.toLowerCase (.name %))) %) (~(symbol (apply str (name enumclass) "/values"))))))
(defmacro auto-proxy
"Automatically build a proxy, stubbing out useless entries, ala: -automatic-proxy-creation-in-clojure"
[interfaces variables & args]
(let [defined (set (map #(str (first %)) args))
names (fn [i] (map #(.getName %) (.getMethods i)))
all-names (into #{} (apply concat (map names (map resolve interfaces))))
undefined (set/difference all-names defined)
auto-gen (map (fn [x] `(~(symbol x) [& ~'args])) undefined)]
`(proxy ~interfaces ~variables ~@args ~@auto-gen)))
(defn capitalize [s]
(if (> (count s) 0)
(str (Character/toUpperCase (.charAt s 0))
(.toLowerCase (subs s 1)))
s))
(defn capitalize-all [s]
(let [matcher (re-matcher #"(\w+)" s)
buffer (new StringBuffer)]
(while (.find matcher)
(.appendReplacement matcher buffer (capitalize (.group matcher 1))))
(.appendTail matcher buffer)
(.toString buffer)))
(defn camelcase
"Removes dashes, camelcases words and then removes spaces."
[str]
(.replaceAll (capitalize-all str) "-" ""))
(defn uncamelcase
"Add dashes and lowercase everything"
[str]
(.substring (.toLowerCase (.replaceAll str "([A-Z])" "-$1")) 1))
(defn glue [sep & strs]
(apply str (interpose sep (filter #(and (not (nil? %)) (> (.length (.trim (str %))) 0)) strs))))
(defmacro let-info
"A let replacement that logs the bindings at each step. Useful for debugging."
[bindings & body]
(let [split (partition 2 bindings)
info-bindings (map (fn [[n _]] `(~(symbol "_") (log/info ~(str (if (instance? clojure.lang.Named n) (name n) (str n)) ":" ) ~n))) split)]
`(let [~@(apply concat (interleave split info-bindings))]
~@body)))
(defn package-classname [base-package nm]
(let [split (seq (.split nm "[.]"))
classname (camelcase (last split))
package (apply glue "." base-package (pop (vec split)))]
(glue "." package classname)))
(defn class-named [class]
(let [split (seq (.split (.getName class) "[.]"))
classname (uncamelcase (last split))
package (apply glue "." (pop (vec split)))]
(glue "." package classname)))
(defn port-in-use? [port bind]
(let [bind-addr (if (InetSocketAddress. bind port) (InetSocketAddress. port))]
(try
(with-open [ss (ServerSocket. port 0 (.getAddress bind-addr))] false)
(catch IOException e true))))
(defn throw-runtime [fmt & args]
(throw (java.lang.RuntimeException. (apply format fmt args))))
(defn find-subclasses [package-name class]
(filter #(not (nil? %))
(seq (.getSubTypesOf (org.reflections.Reflections.
package-name
(into-array org.reflections.scanners.Scanner []))
class))))
|
e1b690290642cdcc060be13754f92b7330a1098550db34308f6997027848f324 | typelead/etlas | Parsec.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
{-# LANGUAGE FlexibleContexts #-}
-----------------------------------------------------------------------------
-- |
Module : Distribution . PackageDescription . Parsec
Copyright : 2003 - 2005
-- License : BSD3
--
-- Maintainer :
-- Portability : portable
--
-- This defined parsers and partial pretty printers for the @.cabal@ format.
module Distribution.PackageDescription.Parsec (
-- * Package descriptions
readGenericPackageDescription,
parseGenericPackageDescription,
parseGenericPackageDescriptionMaybe,
-- ** Parsing
ParseResult,
runParseResult,
-- ** Supplementary build information
-- readHookedBuildInfo,
-- parseHookedBuildInfo,
) where
import Prelude ()
import Distribution.Compat.Prelude
import qualified Data.ByteString as BS
import Data.List (partition)
import qualified Data.Map as Map
import qualified Distribution.Compat.SnocList as SnocList
import Distribution.PackageDescription
import Distribution.PackageDescription.Parsec.FieldDescr
import Distribution.Parsec.Class (parsec)
import Distribution.Parsec.ConfVar
(parseConditionConfVar)
import Distribution.Parsec.LexerMonad
(LexWarning, toPWarning)
import Distribution.Parsec.Parser
import Distribution.Parsec.Types.Common
import Distribution.Parsec.Types.Field (getName)
import Distribution.Parsec.Types.FieldDescr
import Distribution.Parsec.Types.ParseResult
import Distribution.Simple.Utils
(die', fromUTF8BS, warn)
import Distribution.Text (display)
import Distribution.Types.ForeignLib
import Distribution.Types.CondTree
import Distribution.Types.UnqualComponentName
(UnqualComponentName, mkUnqualComponentName)
import Distribution.Verbosity (Verbosity)
import Distribution.Version
(LowerBound (..), Version, asVersionIntervals, mkVersion,
orLaterVersion)
import System.Directory
(doesFileExist)
import qualified Text.Parsec as P
import qualified Text.Parsec.Error as P
-- ---------------------------------------------------------------
-- Parsing
-- | Helper combinator to do parsing plumbing for files.
--
-- Given a parser and a filename, return the parse of the file,
-- after checking if the file exists.
--
-- Argument order is chosen to encourage partial application.
readAndParseFile
:: (BS.ByteString -> ParseResult a) -- ^ File contents to final value parser
-> Verbosity -- ^ Verbosity level
-> FilePath -- ^ File to read
-> IO a
readAndParseFile parser verbosity fpath = do
exists <- doesFileExist fpath
unless exists $
die' verbosity $
"Error Parsing: file \"" ++ fpath ++ "\" doesn't exist. Cannot continue."
bs <- BS.readFile fpath
let (warnings, errors, result) = runParseResult (parser bs)
traverse_ (warn verbosity . showPWarning fpath) warnings
traverse_ (warn verbosity . showPError fpath) errors
case result of
Nothing -> die' verbosity $ "Failing parsing \"" ++ fpath ++ "\"."
Just x -> return x
-- | Parse the given package file.
readGenericPackageDescription :: Verbosity -> FilePath -> IO GenericPackageDescription
readGenericPackageDescription = readAndParseFile parseGenericPackageDescription
------------------------------------------------------------------------------
-- | Parses the given file into a 'GenericPackageDescription'.
--
In Cabal 1.2 the syntax for package descriptions was changed to a format
-- with sections and possibly indented property descriptions.
--
-- TODO: add lex warnings
parseGenericPackageDescription :: BS.ByteString -> ParseResult GenericPackageDescription
parseGenericPackageDescription bs = case readFields' bs of
Right (fs, lexWarnings) -> parseGenericPackageDescription' lexWarnings fs
-- TODO: better marshalling of errors
Left perr -> parseFatalFailure (Position 0 0) (show perr)
-- | 'Maybe' variant of 'parseGenericPackageDescription'
parseGenericPackageDescriptionMaybe :: BS.ByteString -> Maybe GenericPackageDescription
parseGenericPackageDescriptionMaybe =
trdOf3 . runParseResult . parseGenericPackageDescription
where
trdOf3 (_, _, x) = x
runFieldParser :: FieldParser a -> [FieldLine Position] -> ParseResult a
runFieldParser p ls = runFieldParser' pos p =<< fieldlinesToString pos ls
where
-- TODO: make per line lookup
pos = case ls of
[] -> Position 0 0
(FieldLine pos' _ : _) -> pos'
fieldlinesToBS :: [FieldLine ann] -> BS.ByteString
fieldlinesToBS = BS.intercalate "\n" . map (\(FieldLine _ bs) -> bs)
TODO : Take position from FieldLine
-- TODO: Take field name
fieldlinesToString :: Position -> [FieldLine ann] -> ParseResult String
fieldlinesToString pos fls =
let str = intercalate "\n" . map (\(FieldLine _ bs') -> fromUTF8BS bs') $ fls
in if '\xfffd' `elem` str
then str <$ parseWarning pos PWTUTF "Invalid UTF8 encoding"
else pure str
runFieldParser' :: Position -> FieldParser a -> String -> ParseResult a
runFieldParser' (Position row col) p str = case P.runParser p' [] "<field>" str of
Right (pok, ws) -> do
-- TODO: map pos
traverse_ (\(PWarning t pos w) -> parseWarning pos t w) ws
pure pok
Left err -> do
let ppos = P.errorPos err
Positions start from 1:1 , not 0:0
let epos = Position (row - 1 + P.sourceLine ppos) (col - 1 + P.sourceColumn ppos)
let msg = P.showErrorMessages
"or" "unknown parse error" "expecting" "unexpected" "end of input"
(P.errorMessages err)
parseFatalFailure epos $ msg ++ ": " ++ show str
where
p' = (,) <$ P.spaces <*> p <* P.spaces <* P.eof <*> P.getState
-- Note [Accumulating parser]
--
This parser has two " states " :
* first we parse fields of PackageDescription
-- * then we parse sections (libraries, executables, etc)
parseGenericPackageDescription'
:: [LexWarning]
-> [Field Position]
-> ParseResult GenericPackageDescription
parseGenericPackageDescription' lexWarnings fs = do
parseWarnings' (fmap toPWarning lexWarnings)
let (syntax, fs') = sectionizeFields fs
gpd <- goFields emptyGpd fs'
-- Various post checks
maybeWarnCabalVersion syntax (packageDescription gpd)
checkForUndefinedFlags gpd
-- TODO: do other validations
return gpd
where
First fields
goFields
:: GenericPackageDescription
-> [Field Position]
-> ParseResult GenericPackageDescription
goFields gpd [] = pure gpd
goFields gpd (Field (Name pos name) fieldLines : fields) =
case Map.lookup name pdFieldParsers of
-- TODO: can be more elegant
Nothing -> fieldlinesToString pos fieldLines >>= \value -> case storeXFieldsPD name value (packageDescription gpd) of
Nothing -> do
parseWarning pos PWTUnknownField $ "Unknown field: " ++ show name
goFields gpd fields
Just pd ->
goFields (gpd { packageDescription = pd }) fields
Just parser -> do
pd <- runFieldParser (parser $ packageDescription gpd) fieldLines
let gpd' = gpd { packageDescription = pd }
goFields gpd' fields
goFields gpd fields@(Section _ _ _ : _) = goSections gpd fields
-- Sections
goSections
:: GenericPackageDescription
-> [Field Position]
-> ParseResult GenericPackageDescription
goSections gpd [] = pure gpd
goSections gpd (Field (Name pos name) _ : fields) = do
parseWarning pos PWTTrailingFields $ "Ignoring trailing fields after sections: " ++ show name
goSections gpd fields
goSections gpd (Section name args secFields : fields) = do
gpd' <- parseSection gpd name args secFields
goSections gpd' fields
emptyGpd :: GenericPackageDescription
emptyGpd = GenericPackageDescription emptyPackageDescription [] Nothing [] [] [] [] []
pdFieldParsers :: Map FieldName (PackageDescription -> FieldParser PackageDescription)
pdFieldParsers = Map.fromList $
map (\x -> (fieldName x, fieldParser x)) pkgDescrFieldDescrs
parseSection
:: GenericPackageDescription
-> Name Position
-> [SectionArg Position]
-> [Field Position]
-> ParseResult GenericPackageDescription
parseSection gpd (Name pos name) args fields
| name == "library" && null args = do
-- TODO: check that library is defined once
l <- parseCondTree libFieldDescrs storeXFieldsLib (targetBuildDepends . libBuildInfo) emptyLibrary fields
let gpd' = gpd { condLibrary = Just l }
pure gpd'
Sublibraries
| name == "library" = do
name' <- parseUnqualComponentName pos args
lib <- parseCondTree libFieldDescrs storeXFieldsLib (targetBuildDepends . libBuildInfo) emptyLibrary fields
TODO check duplicate name here ?
let gpd' = gpd { condSubLibraries = condSubLibraries gpd ++ [(name', lib)] }
pure gpd'
| name == "foreign-library" = do
name' <- parseUnqualComponentName pos args
flib <- parseCondTree foreignLibFieldDescrs storeXFieldsForeignLib (targetBuildDepends . foreignLibBuildInfo) emptyForeignLib fields
TODO check duplicate name here ?
let gpd' = gpd { condForeignLibs = condForeignLibs gpd ++ [(name', flib)] }
pure gpd'
| name == "executable" = do
name' <- parseUnqualComponentName pos args
Note : we do n't parse the " executable " field here , hence the tail hack . 2010
exe <- parseCondTree (tail executableFieldDescrs) storeXFieldsExe (targetBuildDepends . buildInfo) emptyExecutable fields
TODO check duplicate name here ?
let gpd' = gpd { condExecutables = condExecutables gpd ++ [(name', exe)] }
pure gpd'
| name == "test-suite" = do
name' <- parseUnqualComponentName pos args
testStanza <- parseCondTree testSuiteFieldDescrs storeXFieldsTest (targetBuildDepends . testStanzaBuildInfo) emptyTestStanza fields
testSuite <- traverse (validateTestSuite pos) testStanza
TODO check duplicate name here ?
let gpd' = gpd { condTestSuites = condTestSuites gpd ++ [(name', testSuite)] }
pure gpd'
| name == "benchmark" = do
name' <- parseUnqualComponentName pos args
benchStanza <- parseCondTree benchmarkFieldDescrs storeXFieldsBenchmark (targetBuildDepends . benchmarkStanzaBuildInfo) emptyBenchmarkStanza fields
bench <- traverse (validateBenchmark pos) benchStanza
TODO check duplicate name here ?
let gpd' = gpd { condBenchmarks = condBenchmarks gpd ++ [(name', bench)] }
pure gpd'
| name == "flag" = do
name' <- parseName pos args
name'' <- runFieldParser' pos parsec name' `recoverWith` mkFlagName ""
flag <- parseFields flagFieldDescrs warnUnrec (emptyFlag name'') fields
-- Check default flag
let gpd' = gpd { genPackageFlags = genPackageFlags gpd ++ [flag] }
pure gpd'
| name == "custom-setup" && null args = do
sbi <- parseFields setupBInfoFieldDescrs warnUnrec mempty fields
let pd = packageDescription gpd
-- TODO: what if already defined?
let gpd' = gpd { packageDescription = pd { setupBuildInfo = Just sbi } }
pure gpd'
| name == "source-repository" = do
kind <- case args of
[SecArgName spos secName] ->
runFieldParser' spos parsec (fromUTF8BS secName) `recoverWith` RepoHead
[] -> do
parseFailure pos $ "'source-repository' needs one argument"
pure RepoHead
_ -> do
parseFailure pos $ "Invalid source-repository kind " ++ show args
pure RepoHead
sr <- parseFields sourceRepoFieldDescrs warnUnrec (emptySourceRepo kind) fields
-- I want lens
let pd = packageDescription gpd
let srs = sourceRepos pd
let gpd' = gpd { packageDescription = pd { sourceRepos = srs ++ [sr] } }
pure gpd'
| otherwise = do
parseWarning pos PWTUnknownSection $ "Ignoring section: " ++ show name
pure gpd
newSyntaxVersion :: Version
newSyntaxVersion = mkVersion [1, 2]
maybeWarnCabalVersion :: Syntax -> PackageDescription -> ParseResult ()
maybeWarnCabalVersion syntax pkg
| syntax == NewSyntax && specVersion pkg < newSyntaxVersion
= parseWarning (Position 0 0) PWTNewSyntax $
"A package using section syntax must specify at least\n"
++ "'cabal-version: >= 1.2'."
maybeWarnCabalVersion syntax pkg
| syntax == OldSyntax && specVersion pkg >= newSyntaxVersion
= parseWarning (Position 0 0) PWTOldSyntax $
"A package using 'cabal-version: "
++ displaySpecVersion (specVersionRaw pkg)
++ "' must use section syntax. See the Cabal user guide for details."
where
displaySpecVersion (Left version) = display version
displaySpecVersion (Right versionRange) =
case asVersionIntervals versionRange of
[] {- impossible -} -> display versionRange
((LowerBound version _, _):_) -> display (orLaterVersion version)
maybeWarnCabalVersion _ _ = return ()
handleFutureVersionParseFailure : : Version - > ParseResult a - > ParseResult GenericPackageDescription
handleFutureVersionParseFailure _ cabalVersionNeeded _ parseBody =
error " handleFutureVersionParseFailure "
handleFutureVersionParseFailure :: Version -> ParseResult a -> ParseResult GenericPackageDescription
handleFutureVersionParseFailure _cabalVersionNeeded _parseBody =
error "handleFutureVersionParseFailure"
-}
undefined ( unless versionOk ( warning message ) > > parseBody )
` catchParseError ` \parseError - > case of
TabsError _ - > parseFail parseError
_ | versionOk - > parseFail parseError
| otherwise - > fail message
where versionOk = cabalVersionNeeded < = cabalVersion
message = " This package requires at least Cabal version "
+ + display cabalVersionNeeded
undefined (unless versionOk (warning message) >> parseBody)
`catchParseError` \parseError -> case parseError of
TabsError _ -> parseFail parseError
_ | versionOk -> parseFail parseError
| otherwise -> fail message
where versionOk = cabalVersionNeeded <= cabalVersion
message = "This package requires at least Cabal version "
++ display cabalVersionNeeded
-}
checkForUndefinedFlags
:: GenericPackageDescription
-> ParseResult ()
checkForUndefinedFlags _gpd = pure ()
let = map flagName flags
mapM _ ( ) ( maybeToList mlib )
mapM _ ( . snd ) sub_libs
mapM _ ( . snd ) exes
mapM _ ( . snd ) tests
checkCondTreeFlags : : [ FlagName ] - > CondTree ConfVar c a - > PM ( )
checkCondTreeFlags definedFlags ct = do
let fv = nub $ freeVars ct
unless ( all ( ` elem ` definedFlags ) fv ) $
fail $ " These flags are used without having been defined : "
+ + intercalate " , " [ n | FlagName n < - fv \\ ]
let definedFlags = map flagName flags
mapM_ (checkCondTreeFlags definedFlags) (maybeToList mlib)
mapM_ (checkCondTreeFlags definedFlags . snd) sub_libs
mapM_ (checkCondTreeFlags definedFlags . snd) exes
mapM_ (checkCondTreeFlags definedFlags . snd) tests
checkCondTreeFlags :: [FlagName] -> CondTree ConfVar c a -> PM ()
checkCondTreeFlags definedFlags ct = do
let fv = nub $ freeVars ct
unless (all (`elem` definedFlags) fv) $
fail $ "These flags are used without having been defined: "
++ intercalate ", " [ n | FlagName n <- fv \\ definedFlags ]
-}
parseName :: Position -> [SectionArg Position] -> ParseResult String
parseName pos args = case args of
[SecArgName _pos secName] ->
pure $ fromUTF8BS secName
[SecArgStr _pos secName] ->
pure secName
[] -> do
parseFailure pos $ "name required"
pure ""
_ -> do
-- TODO: pretty print args
parseFailure pos $ "Invalid name " ++ show args
pure ""
parseUnqualComponentName :: Position -> [SectionArg Position] -> ParseResult UnqualComponentName
parseUnqualComponentName pos args = mkUnqualComponentName <$> parseName pos args
-- | Parse a non-recursive list of fields, given a list of field descriptions,
-- a structure to accumulate the parsed fields, and a function
-- that can decide what to do with fields which don't match any
-- of the field descriptions.
parseFields
:: forall a.
[FieldDescr a] -- ^ descriptions of fields we know how to parse
-> UnknownFieldParser a -- ^ possibly do something with unrecognized fields
-> a -- ^ accumulator
-> [Field Position] -- ^ fields to be parsed
-> ParseResult a
parseFields descrs _unknown = foldM go
where
go :: a -> Field Position -> ParseResult a
go x (Section (Name pos name) _ _) = do
-- Even we occur a subsection, we can continue parsing
parseFailure pos $ "invalid subsection " ++ show name
return x
go x (Field (Name pos name) fieldLines) =
case Map.lookup name fieldParsers of
Nothing -> do
-- TODO: use 'unknown'
parseWarning pos PWTUnknownField $ "Unknown field: " ++ show name
return x
Just parser ->
runFieldParser (parser x) fieldLines
fieldParsers :: Map FieldName (a -> FieldParser a)
fieldParsers = Map.fromList $
map (\x -> (fieldName x, fieldParser x)) descrs
type C c a = CondBranch ConfVar c a
parseCondTree
:: forall a c.
[FieldDescr a] -- ^ Field descriptions
-> UnknownFieldParser a -- ^ How to parse unknown fields
-> (a -> c) -- ^ Condition extractor
-> a -- ^ Initial value
-> [Field Position] -- ^ Fields to parse
-> ParseResult (CondTree ConfVar c a)
parseCondTree descs unknown cond ini = impl
where
impl :: [Field Position] -> ParseResult (CondTree ConfVar c a)
impl fields = do
(x, xs) <- goFields (ini, mempty) fields
return $ CondNode x (cond x) (SnocList.runSnocList xs)
goFields
:: (a, SnocList.SnocList (C c a))
-> [Field Position]
-> ParseResult (a, SnocList.SnocList (C c a))
goFields xss [] = return xss
goFields xxs (Section (Name _pos name) tes con : fields) | name == "if" = do
tes' <- parseConditionConfVar tes
con' <- impl con
-- Jump to 'else' state
goElse tes' con' xxs fields
goFields xxs (Section (Name pos name) _ _ : fields) = do
-- Even we occur a subsection, we can continue parsing
-- -0.1/constraints.cabal
parseWarning pos PWTInvalidSubsection $ "invalid subsection " ++ show name
goFields xxs fields
goFields (x, xs) (Field (Name pos name) fieldLines : fields) =
case Map.lookup name fieldParsers of
Nothing -> fieldlinesToString pos fieldLines >>= \value -> case unknown name value x of
Nothing -> do
parseWarning pos PWTUnknownField $ "Unknown field: " ++ show name
goFields (x, xs) fields
Just x' -> do
goFields (x', xs) fields
Just parser -> do
x' <- runFieldParser (parser x) fieldLines
goFields (x', xs) fields
-- Try to parse else branch
goElse
:: Condition ConfVar
-> CondTree ConfVar c a
-> (a, SnocList.SnocList (C c a))
-> [Field Position]
-> ParseResult (a, SnocList.SnocList (C c a))
goElse tes con (x, xs) (Section (Name pos name) secArgs alt : fields) | name == "else" = do
when (not . null $ secArgs) $ do
parseFailure pos $ "`else` section has section arguments " ++ show secArgs
alt' <- case alt of
[] -> pure Nothing
_ -> Just <$> impl alt
let ieb = (CondBranch tes con alt')
goFields (x, SnocList.snoc xs ieb) fields
goElse tes con (x, xs) fields = do
let ieb = (CondBranch tes con Nothing)
goFields (x, SnocList.snoc xs ieb) fields
fieldParsers :: Map FieldName (a -> FieldParser a)
fieldParsers = Map.fromList $
map (\x -> (fieldName x, fieldParser x)) descs
Note [ Accumulating parser ]
In there parser , @'FieldDescr ' a@ is transformed into @Map FieldName ( a - >
FieldParser a)@. The weird value is used because we accumulate structure of
@a@ by folding over the fields . There are various reasons for that :
* Almost all fields are optional
* This is simple approach so declarative bi - directional format ( parsing and
printing ) of structure could be specified ( list of @'FieldDescr ' a@ )
* There are surface syntax fields corresponding to single field in the file :
@license - file@ and @license - files@
* This is quite safe approach .
When / if we re - implement the parser to support formatting preservging roundtrip
with new AST , this all need to be rewritten .
In there parser, @'FieldDescr' a@ is transformed into @Map FieldName (a ->
FieldParser a)@. The weird value is used because we accumulate structure of
@a@ by folding over the fields. There are various reasons for that:
* Almost all fields are optional
* This is simple approach so declarative bi-directional format (parsing and
printing) of structure could be specified (list of @'FieldDescr' a@)
* There are surface syntax fields corresponding to single field in the file:
@license-file@ and @license-files@
* This is quite safe approach.
When/if we re-implement the parser to support formatting preservging roundtrip
with new AST, this all need to be rewritten.
-}
-------------------------------------------------------------------------------
-- Old syntax
-------------------------------------------------------------------------------
| " " an old - style Cabal file . A sectionized file has :
--
-- * all global fields at the beginning, followed by
--
-- * all flag declarations, followed by
--
-- * an optional library section, and an arbitrary number of executable
-- sections (in any order).
--
-- The current implementation just gathers all library-specific fields
-- in a library section and wraps all executable stanzas in an executable
-- section.
sectionizeFields :: [Field ann] -> (Syntax, [Field ann])
sectionizeFields fs = case classifyFields fs of
Just fields -> (OldSyntax, convert fields)
Nothing -> (NewSyntax, fs)
where
-- return 'Just' if all fields are simple fields
classifyFields :: [Field ann] -> Maybe [(Name ann, [FieldLine ann])]
classifyFields = traverse f
where
f (Field name fieldlines) = Just (name, fieldlines)
f _ = Nothing
trim = BS.dropWhile isSpace' . BS.reverse . BS.dropWhile isSpace' . BS.reverse
isSpace' = (== 32)
convert :: [(Name ann, [FieldLine ann])] -> [Field ann]
convert fields =
let
toField (name, ls) = Field name ls
-- "build-depends" is a local field now. To be backwards
-- compatible, we still allow it as a global field in old-style
-- package description files and translate it to a local field by
-- adding it to every non-empty section
(hdr0, exes0) = break ((=="executable") . getName . fst) fields
(hdr, libfs0) = partition (not . (`elem` libFieldNames) . getName . fst) hdr0
(deps, libfs) = partition ((== "build-depends") . getName . fst)
libfs0
exes = unfoldr toExe exes0
toExe [] = Nothing
toExe ((Name pos n, ls) : r)
| n == "executable" =
let (efs, r') = break ((== "executable") . getName . fst) r
in Just (Section (Name pos "executable") [SecArgName pos $ trim $ fieldlinesToBS ls] (map toField $ deps ++ efs), r')
toExe _ = error "unexpected input to 'toExe'"
lib = case libfs of
[] -> []
((Name pos _, _) : _) ->
[Section (Name pos "library") [] (map toField $ deps ++ libfs)]
in map toField hdr ++ lib ++ exes
-- | See 'sectionizeFields'.
data Syntax = OldSyntax | NewSyntax
deriving (Eq, Show)
libFieldNames :: [FieldName]
libFieldNames = map fieldName libFieldDescrs
| null | https://raw.githubusercontent.com/typelead/etlas/bbd7c558169e1fda086e759e1a6f8c8ca2807583/etlas-cabal/Distribution/PackageDescription/Parsec.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE Rank2Types #
# LANGUAGE FlexibleContexts #
---------------------------------------------------------------------------
|
License : BSD3
Maintainer :
Portability : portable
This defined parsers and partial pretty printers for the @.cabal@ format.
* Package descriptions
** Parsing
** Supplementary build information
readHookedBuildInfo,
parseHookedBuildInfo,
---------------------------------------------------------------
Parsing
| Helper combinator to do parsing plumbing for files.
Given a parser and a filename, return the parse of the file,
after checking if the file exists.
Argument order is chosen to encourage partial application.
^ File contents to final value parser
^ Verbosity level
^ File to read
| Parse the given package file.
----------------------------------------------------------------------------
| Parses the given file into a 'GenericPackageDescription'.
with sections and possibly indented property descriptions.
TODO: add lex warnings
TODO: better marshalling of errors
| 'Maybe' variant of 'parseGenericPackageDescription'
TODO: make per line lookup
TODO: Take field name
TODO: map pos
Note [Accumulating parser]
* then we parse sections (libraries, executables, etc)
Various post checks
TODO: do other validations
TODO: can be more elegant
Sections
TODO: check that library is defined once
Check default flag
TODO: what if already defined?
I want lens
impossible
TODO: pretty print args
| Parse a non-recursive list of fields, given a list of field descriptions,
a structure to accumulate the parsed fields, and a function
that can decide what to do with fields which don't match any
of the field descriptions.
^ descriptions of fields we know how to parse
^ possibly do something with unrecognized fields
^ accumulator
^ fields to be parsed
Even we occur a subsection, we can continue parsing
TODO: use 'unknown'
^ Field descriptions
^ How to parse unknown fields
^ Condition extractor
^ Initial value
^ Fields to parse
Jump to 'else' state
Even we occur a subsection, we can continue parsing
-0.1/constraints.cabal
Try to parse else branch
-----------------------------------------------------------------------------
Old syntax
-----------------------------------------------------------------------------
* all global fields at the beginning, followed by
* all flag declarations, followed by
* an optional library section, and an arbitrary number of executable
sections (in any order).
The current implementation just gathers all library-specific fields
in a library section and wraps all executable stanzas in an executable
section.
return 'Just' if all fields are simple fields
"build-depends" is a local field now. To be backwards
compatible, we still allow it as a global field in old-style
package description files and translate it to a local field by
adding it to every non-empty section
| See 'sectionizeFields'. | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
Module : Distribution . PackageDescription . Parsec
Copyright : 2003 - 2005
module Distribution.PackageDescription.Parsec (
readGenericPackageDescription,
parseGenericPackageDescription,
parseGenericPackageDescriptionMaybe,
ParseResult,
runParseResult,
) where
import Prelude ()
import Distribution.Compat.Prelude
import qualified Data.ByteString as BS
import Data.List (partition)
import qualified Data.Map as Map
import qualified Distribution.Compat.SnocList as SnocList
import Distribution.PackageDescription
import Distribution.PackageDescription.Parsec.FieldDescr
import Distribution.Parsec.Class (parsec)
import Distribution.Parsec.ConfVar
(parseConditionConfVar)
import Distribution.Parsec.LexerMonad
(LexWarning, toPWarning)
import Distribution.Parsec.Parser
import Distribution.Parsec.Types.Common
import Distribution.Parsec.Types.Field (getName)
import Distribution.Parsec.Types.FieldDescr
import Distribution.Parsec.Types.ParseResult
import Distribution.Simple.Utils
(die', fromUTF8BS, warn)
import Distribution.Text (display)
import Distribution.Types.ForeignLib
import Distribution.Types.CondTree
import Distribution.Types.UnqualComponentName
(UnqualComponentName, mkUnqualComponentName)
import Distribution.Verbosity (Verbosity)
import Distribution.Version
(LowerBound (..), Version, asVersionIntervals, mkVersion,
orLaterVersion)
import System.Directory
(doesFileExist)
import qualified Text.Parsec as P
import qualified Text.Parsec.Error as P
readAndParseFile
-> IO a
readAndParseFile parser verbosity fpath = do
exists <- doesFileExist fpath
unless exists $
die' verbosity $
"Error Parsing: file \"" ++ fpath ++ "\" doesn't exist. Cannot continue."
bs <- BS.readFile fpath
let (warnings, errors, result) = runParseResult (parser bs)
traverse_ (warn verbosity . showPWarning fpath) warnings
traverse_ (warn verbosity . showPError fpath) errors
case result of
Nothing -> die' verbosity $ "Failing parsing \"" ++ fpath ++ "\"."
Just x -> return x
readGenericPackageDescription :: Verbosity -> FilePath -> IO GenericPackageDescription
readGenericPackageDescription = readAndParseFile parseGenericPackageDescription
In Cabal 1.2 the syntax for package descriptions was changed to a format
parseGenericPackageDescription :: BS.ByteString -> ParseResult GenericPackageDescription
parseGenericPackageDescription bs = case readFields' bs of
Right (fs, lexWarnings) -> parseGenericPackageDescription' lexWarnings fs
Left perr -> parseFatalFailure (Position 0 0) (show perr)
parseGenericPackageDescriptionMaybe :: BS.ByteString -> Maybe GenericPackageDescription
parseGenericPackageDescriptionMaybe =
trdOf3 . runParseResult . parseGenericPackageDescription
where
trdOf3 (_, _, x) = x
runFieldParser :: FieldParser a -> [FieldLine Position] -> ParseResult a
runFieldParser p ls = runFieldParser' pos p =<< fieldlinesToString pos ls
where
pos = case ls of
[] -> Position 0 0
(FieldLine pos' _ : _) -> pos'
fieldlinesToBS :: [FieldLine ann] -> BS.ByteString
fieldlinesToBS = BS.intercalate "\n" . map (\(FieldLine _ bs) -> bs)
TODO : Take position from FieldLine
fieldlinesToString :: Position -> [FieldLine ann] -> ParseResult String
fieldlinesToString pos fls =
let str = intercalate "\n" . map (\(FieldLine _ bs') -> fromUTF8BS bs') $ fls
in if '\xfffd' `elem` str
then str <$ parseWarning pos PWTUTF "Invalid UTF8 encoding"
else pure str
runFieldParser' :: Position -> FieldParser a -> String -> ParseResult a
runFieldParser' (Position row col) p str = case P.runParser p' [] "<field>" str of
Right (pok, ws) -> do
traverse_ (\(PWarning t pos w) -> parseWarning pos t w) ws
pure pok
Left err -> do
let ppos = P.errorPos err
Positions start from 1:1 , not 0:0
let epos = Position (row - 1 + P.sourceLine ppos) (col - 1 + P.sourceColumn ppos)
let msg = P.showErrorMessages
"or" "unknown parse error" "expecting" "unexpected" "end of input"
(P.errorMessages err)
parseFatalFailure epos $ msg ++ ": " ++ show str
where
p' = (,) <$ P.spaces <*> p <* P.spaces <* P.eof <*> P.getState
This parser has two " states " :
* first we parse fields of PackageDescription
parseGenericPackageDescription'
:: [LexWarning]
-> [Field Position]
-> ParseResult GenericPackageDescription
parseGenericPackageDescription' lexWarnings fs = do
parseWarnings' (fmap toPWarning lexWarnings)
let (syntax, fs') = sectionizeFields fs
gpd <- goFields emptyGpd fs'
maybeWarnCabalVersion syntax (packageDescription gpd)
checkForUndefinedFlags gpd
return gpd
where
First fields
goFields
:: GenericPackageDescription
-> [Field Position]
-> ParseResult GenericPackageDescription
goFields gpd [] = pure gpd
goFields gpd (Field (Name pos name) fieldLines : fields) =
case Map.lookup name pdFieldParsers of
Nothing -> fieldlinesToString pos fieldLines >>= \value -> case storeXFieldsPD name value (packageDescription gpd) of
Nothing -> do
parseWarning pos PWTUnknownField $ "Unknown field: " ++ show name
goFields gpd fields
Just pd ->
goFields (gpd { packageDescription = pd }) fields
Just parser -> do
pd <- runFieldParser (parser $ packageDescription gpd) fieldLines
let gpd' = gpd { packageDescription = pd }
goFields gpd' fields
goFields gpd fields@(Section _ _ _ : _) = goSections gpd fields
goSections
:: GenericPackageDescription
-> [Field Position]
-> ParseResult GenericPackageDescription
goSections gpd [] = pure gpd
goSections gpd (Field (Name pos name) _ : fields) = do
parseWarning pos PWTTrailingFields $ "Ignoring trailing fields after sections: " ++ show name
goSections gpd fields
goSections gpd (Section name args secFields : fields) = do
gpd' <- parseSection gpd name args secFields
goSections gpd' fields
emptyGpd :: GenericPackageDescription
emptyGpd = GenericPackageDescription emptyPackageDescription [] Nothing [] [] [] [] []
pdFieldParsers :: Map FieldName (PackageDescription -> FieldParser PackageDescription)
pdFieldParsers = Map.fromList $
map (\x -> (fieldName x, fieldParser x)) pkgDescrFieldDescrs
parseSection
:: GenericPackageDescription
-> Name Position
-> [SectionArg Position]
-> [Field Position]
-> ParseResult GenericPackageDescription
parseSection gpd (Name pos name) args fields
| name == "library" && null args = do
l <- parseCondTree libFieldDescrs storeXFieldsLib (targetBuildDepends . libBuildInfo) emptyLibrary fields
let gpd' = gpd { condLibrary = Just l }
pure gpd'
Sublibraries
| name == "library" = do
name' <- parseUnqualComponentName pos args
lib <- parseCondTree libFieldDescrs storeXFieldsLib (targetBuildDepends . libBuildInfo) emptyLibrary fields
TODO check duplicate name here ?
let gpd' = gpd { condSubLibraries = condSubLibraries gpd ++ [(name', lib)] }
pure gpd'
| name == "foreign-library" = do
name' <- parseUnqualComponentName pos args
flib <- parseCondTree foreignLibFieldDescrs storeXFieldsForeignLib (targetBuildDepends . foreignLibBuildInfo) emptyForeignLib fields
TODO check duplicate name here ?
let gpd' = gpd { condForeignLibs = condForeignLibs gpd ++ [(name', flib)] }
pure gpd'
| name == "executable" = do
name' <- parseUnqualComponentName pos args
Note : we do n't parse the " executable " field here , hence the tail hack . 2010
exe <- parseCondTree (tail executableFieldDescrs) storeXFieldsExe (targetBuildDepends . buildInfo) emptyExecutable fields
TODO check duplicate name here ?
let gpd' = gpd { condExecutables = condExecutables gpd ++ [(name', exe)] }
pure gpd'
| name == "test-suite" = do
name' <- parseUnqualComponentName pos args
testStanza <- parseCondTree testSuiteFieldDescrs storeXFieldsTest (targetBuildDepends . testStanzaBuildInfo) emptyTestStanza fields
testSuite <- traverse (validateTestSuite pos) testStanza
TODO check duplicate name here ?
let gpd' = gpd { condTestSuites = condTestSuites gpd ++ [(name', testSuite)] }
pure gpd'
| name == "benchmark" = do
name' <- parseUnqualComponentName pos args
benchStanza <- parseCondTree benchmarkFieldDescrs storeXFieldsBenchmark (targetBuildDepends . benchmarkStanzaBuildInfo) emptyBenchmarkStanza fields
bench <- traverse (validateBenchmark pos) benchStanza
TODO check duplicate name here ?
let gpd' = gpd { condBenchmarks = condBenchmarks gpd ++ [(name', bench)] }
pure gpd'
| name == "flag" = do
name' <- parseName pos args
name'' <- runFieldParser' pos parsec name' `recoverWith` mkFlagName ""
flag <- parseFields flagFieldDescrs warnUnrec (emptyFlag name'') fields
let gpd' = gpd { genPackageFlags = genPackageFlags gpd ++ [flag] }
pure gpd'
| name == "custom-setup" && null args = do
sbi <- parseFields setupBInfoFieldDescrs warnUnrec mempty fields
let pd = packageDescription gpd
let gpd' = gpd { packageDescription = pd { setupBuildInfo = Just sbi } }
pure gpd'
| name == "source-repository" = do
kind <- case args of
[SecArgName spos secName] ->
runFieldParser' spos parsec (fromUTF8BS secName) `recoverWith` RepoHead
[] -> do
parseFailure pos $ "'source-repository' needs one argument"
pure RepoHead
_ -> do
parseFailure pos $ "Invalid source-repository kind " ++ show args
pure RepoHead
sr <- parseFields sourceRepoFieldDescrs warnUnrec (emptySourceRepo kind) fields
let pd = packageDescription gpd
let srs = sourceRepos pd
let gpd' = gpd { packageDescription = pd { sourceRepos = srs ++ [sr] } }
pure gpd'
| otherwise = do
parseWarning pos PWTUnknownSection $ "Ignoring section: " ++ show name
pure gpd
newSyntaxVersion :: Version
newSyntaxVersion = mkVersion [1, 2]
maybeWarnCabalVersion :: Syntax -> PackageDescription -> ParseResult ()
maybeWarnCabalVersion syntax pkg
| syntax == NewSyntax && specVersion pkg < newSyntaxVersion
= parseWarning (Position 0 0) PWTNewSyntax $
"A package using section syntax must specify at least\n"
++ "'cabal-version: >= 1.2'."
maybeWarnCabalVersion syntax pkg
| syntax == OldSyntax && specVersion pkg >= newSyntaxVersion
= parseWarning (Position 0 0) PWTOldSyntax $
"A package using 'cabal-version: "
++ displaySpecVersion (specVersionRaw pkg)
++ "' must use section syntax. See the Cabal user guide for details."
where
displaySpecVersion (Left version) = display version
displaySpecVersion (Right versionRange) =
case asVersionIntervals versionRange of
((LowerBound version _, _):_) -> display (orLaterVersion version)
maybeWarnCabalVersion _ _ = return ()
handleFutureVersionParseFailure : : Version - > ParseResult a - > ParseResult GenericPackageDescription
handleFutureVersionParseFailure _ cabalVersionNeeded _ parseBody =
error " handleFutureVersionParseFailure "
handleFutureVersionParseFailure :: Version -> ParseResult a -> ParseResult GenericPackageDescription
handleFutureVersionParseFailure _cabalVersionNeeded _parseBody =
error "handleFutureVersionParseFailure"
-}
undefined ( unless versionOk ( warning message ) > > parseBody )
` catchParseError ` \parseError - > case of
TabsError _ - > parseFail parseError
_ | versionOk - > parseFail parseError
| otherwise - > fail message
where versionOk = cabalVersionNeeded < = cabalVersion
message = " This package requires at least Cabal version "
+ + display cabalVersionNeeded
undefined (unless versionOk (warning message) >> parseBody)
`catchParseError` \parseError -> case parseError of
TabsError _ -> parseFail parseError
_ | versionOk -> parseFail parseError
| otherwise -> fail message
where versionOk = cabalVersionNeeded <= cabalVersion
message = "This package requires at least Cabal version "
++ display cabalVersionNeeded
-}
checkForUndefinedFlags
:: GenericPackageDescription
-> ParseResult ()
checkForUndefinedFlags _gpd = pure ()
let = map flagName flags
mapM _ ( ) ( maybeToList mlib )
mapM _ ( . snd ) sub_libs
mapM _ ( . snd ) exes
mapM _ ( . snd ) tests
checkCondTreeFlags : : [ FlagName ] - > CondTree ConfVar c a - > PM ( )
checkCondTreeFlags definedFlags ct = do
let fv = nub $ freeVars ct
unless ( all ( ` elem ` definedFlags ) fv ) $
fail $ " These flags are used without having been defined : "
+ + intercalate " , " [ n | FlagName n < - fv \\ ]
let definedFlags = map flagName flags
mapM_ (checkCondTreeFlags definedFlags) (maybeToList mlib)
mapM_ (checkCondTreeFlags definedFlags . snd) sub_libs
mapM_ (checkCondTreeFlags definedFlags . snd) exes
mapM_ (checkCondTreeFlags definedFlags . snd) tests
checkCondTreeFlags :: [FlagName] -> CondTree ConfVar c a -> PM ()
checkCondTreeFlags definedFlags ct = do
let fv = nub $ freeVars ct
unless (all (`elem` definedFlags) fv) $
fail $ "These flags are used without having been defined: "
++ intercalate ", " [ n | FlagName n <- fv \\ definedFlags ]
-}
parseName :: Position -> [SectionArg Position] -> ParseResult String
parseName pos args = case args of
[SecArgName _pos secName] ->
pure $ fromUTF8BS secName
[SecArgStr _pos secName] ->
pure secName
[] -> do
parseFailure pos $ "name required"
pure ""
_ -> do
parseFailure pos $ "Invalid name " ++ show args
pure ""
parseUnqualComponentName :: Position -> [SectionArg Position] -> ParseResult UnqualComponentName
parseUnqualComponentName pos args = mkUnqualComponentName <$> parseName pos args
parseFields
:: forall a.
-> ParseResult a
parseFields descrs _unknown = foldM go
where
go :: a -> Field Position -> ParseResult a
go x (Section (Name pos name) _ _) = do
parseFailure pos $ "invalid subsection " ++ show name
return x
go x (Field (Name pos name) fieldLines) =
case Map.lookup name fieldParsers of
Nothing -> do
parseWarning pos PWTUnknownField $ "Unknown field: " ++ show name
return x
Just parser ->
runFieldParser (parser x) fieldLines
fieldParsers :: Map FieldName (a -> FieldParser a)
fieldParsers = Map.fromList $
map (\x -> (fieldName x, fieldParser x)) descrs
type C c a = CondBranch ConfVar c a
parseCondTree
:: forall a c.
-> ParseResult (CondTree ConfVar c a)
parseCondTree descs unknown cond ini = impl
where
impl :: [Field Position] -> ParseResult (CondTree ConfVar c a)
impl fields = do
(x, xs) <- goFields (ini, mempty) fields
return $ CondNode x (cond x) (SnocList.runSnocList xs)
goFields
:: (a, SnocList.SnocList (C c a))
-> [Field Position]
-> ParseResult (a, SnocList.SnocList (C c a))
goFields xss [] = return xss
goFields xxs (Section (Name _pos name) tes con : fields) | name == "if" = do
tes' <- parseConditionConfVar tes
con' <- impl con
goElse tes' con' xxs fields
goFields xxs (Section (Name pos name) _ _ : fields) = do
parseWarning pos PWTInvalidSubsection $ "invalid subsection " ++ show name
goFields xxs fields
goFields (x, xs) (Field (Name pos name) fieldLines : fields) =
case Map.lookup name fieldParsers of
Nothing -> fieldlinesToString pos fieldLines >>= \value -> case unknown name value x of
Nothing -> do
parseWarning pos PWTUnknownField $ "Unknown field: " ++ show name
goFields (x, xs) fields
Just x' -> do
goFields (x', xs) fields
Just parser -> do
x' <- runFieldParser (parser x) fieldLines
goFields (x', xs) fields
goElse
:: Condition ConfVar
-> CondTree ConfVar c a
-> (a, SnocList.SnocList (C c a))
-> [Field Position]
-> ParseResult (a, SnocList.SnocList (C c a))
goElse tes con (x, xs) (Section (Name pos name) secArgs alt : fields) | name == "else" = do
when (not . null $ secArgs) $ do
parseFailure pos $ "`else` section has section arguments " ++ show secArgs
alt' <- case alt of
[] -> pure Nothing
_ -> Just <$> impl alt
let ieb = (CondBranch tes con alt')
goFields (x, SnocList.snoc xs ieb) fields
goElse tes con (x, xs) fields = do
let ieb = (CondBranch tes con Nothing)
goFields (x, SnocList.snoc xs ieb) fields
fieldParsers :: Map FieldName (a -> FieldParser a)
fieldParsers = Map.fromList $
map (\x -> (fieldName x, fieldParser x)) descs
Note [ Accumulating parser ]
In there parser , @'FieldDescr ' a@ is transformed into @Map FieldName ( a - >
FieldParser a)@. The weird value is used because we accumulate structure of
@a@ by folding over the fields . There are various reasons for that :
* Almost all fields are optional
* This is simple approach so declarative bi - directional format ( parsing and
printing ) of structure could be specified ( list of @'FieldDescr ' a@ )
* There are surface syntax fields corresponding to single field in the file :
@license - file@ and @license - files@
* This is quite safe approach .
When / if we re - implement the parser to support formatting preservging roundtrip
with new AST , this all need to be rewritten .
In there parser, @'FieldDescr' a@ is transformed into @Map FieldName (a ->
FieldParser a)@. The weird value is used because we accumulate structure of
@a@ by folding over the fields. There are various reasons for that:
* Almost all fields are optional
* This is simple approach so declarative bi-directional format (parsing and
printing) of structure could be specified (list of @'FieldDescr' a@)
* There are surface syntax fields corresponding to single field in the file:
@license-file@ and @license-files@
* This is quite safe approach.
When/if we re-implement the parser to support formatting preservging roundtrip
with new AST, this all need to be rewritten.
-}
| " " an old - style Cabal file . A sectionized file has :
sectionizeFields :: [Field ann] -> (Syntax, [Field ann])
sectionizeFields fs = case classifyFields fs of
Just fields -> (OldSyntax, convert fields)
Nothing -> (NewSyntax, fs)
where
classifyFields :: [Field ann] -> Maybe [(Name ann, [FieldLine ann])]
classifyFields = traverse f
where
f (Field name fieldlines) = Just (name, fieldlines)
f _ = Nothing
trim = BS.dropWhile isSpace' . BS.reverse . BS.dropWhile isSpace' . BS.reverse
isSpace' = (== 32)
convert :: [(Name ann, [FieldLine ann])] -> [Field ann]
convert fields =
let
toField (name, ls) = Field name ls
(hdr0, exes0) = break ((=="executable") . getName . fst) fields
(hdr, libfs0) = partition (not . (`elem` libFieldNames) . getName . fst) hdr0
(deps, libfs) = partition ((== "build-depends") . getName . fst)
libfs0
exes = unfoldr toExe exes0
toExe [] = Nothing
toExe ((Name pos n, ls) : r)
| n == "executable" =
let (efs, r') = break ((== "executable") . getName . fst) r
in Just (Section (Name pos "executable") [SecArgName pos $ trim $ fieldlinesToBS ls] (map toField $ deps ++ efs), r')
toExe _ = error "unexpected input to 'toExe'"
lib = case libfs of
[] -> []
((Name pos _, _) : _) ->
[Section (Name pos "library") [] (map toField $ deps ++ libfs)]
in map toField hdr ++ lib ++ exes
data Syntax = OldSyntax | NewSyntax
deriving (Eq, Show)
libFieldNames :: [FieldName]
libFieldNames = map fieldName libFieldDescrs
|
70bbb9f75121fd5f2c0a90e1f415593d2b1e1da1871776bcce90360c605c3b6f | noinia/hgeometry | BallSpec.hs | module Geometry.BallSpec where
import Control.Lens
import Control.Monad (forM_)
import Data.Ext
import Geometry.Ball
import Geometry.LineSegment
import Geometry.Point
import Data.Intersection
import Data.RealNumber.Rational
import Test.Hspec
import Test.QuickCheck
import Test.Util
--------------------------------------------------------------------------------
type R = RealNumber 5
spec :: Spec
spec = do
describe "Ball" $ do
describe "intersection tests" $ do
it "grav2ity's tests with Double" $
forM_ [1.0,1.1..1.6] $ \r ->
(segment r (0.1 :: Double) `intersects` unitCircle @Double) `shouldBe` True
it "touching line segment" $ do
let mySeg = ClosedLineSegment (ext $ Point2 @R (-1) 1) (ext $ Point2 1 1)
(mySeg `intersects` unitCircle @R) `shouldBe` True
unitCircle :: Num r => Circle () r
unitCircle = Circle (ext origin) 1
segment :: Floating r => r -> r -> LineSegment 2 () r
segment r x = ClosedLineSegment (ext origin) (ext $ Point2 (r*cos x) (r*sin x))
| null | https://raw.githubusercontent.com/noinia/hgeometry/89cd3d3109ec68f877bf8e34dc34b6df337a4ec1/hgeometry/test/src/Geometry/BallSpec.hs | haskell | ------------------------------------------------------------------------------ | module Geometry.BallSpec where
import Control.Lens
import Control.Monad (forM_)
import Data.Ext
import Geometry.Ball
import Geometry.LineSegment
import Geometry.Point
import Data.Intersection
import Data.RealNumber.Rational
import Test.Hspec
import Test.QuickCheck
import Test.Util
type R = RealNumber 5
spec :: Spec
spec = do
describe "Ball" $ do
describe "intersection tests" $ do
it "grav2ity's tests with Double" $
forM_ [1.0,1.1..1.6] $ \r ->
(segment r (0.1 :: Double) `intersects` unitCircle @Double) `shouldBe` True
it "touching line segment" $ do
let mySeg = ClosedLineSegment (ext $ Point2 @R (-1) 1) (ext $ Point2 1 1)
(mySeg `intersects` unitCircle @R) `shouldBe` True
unitCircle :: Num r => Circle () r
unitCircle = Circle (ext origin) 1
segment :: Floating r => r -> r -> LineSegment 2 () r
segment r x = ClosedLineSegment (ext origin) (ext $ Point2 (r*cos x) (r*sin x))
|
a1af0e05047e59cfef239f8eb6b494d98c4938c50a57b4d12ea5276ab1e0c0b9 | tsloughter/rebar3_tests | graph_map_sup.erl | %%%-------------------------------------------------------------------
%% @doc graph_map top level supervisor.
%% @end
%%%-------------------------------------------------------------------
-module(graph_map_sup).
-behaviour(supervisor).
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
-define(SERVER, ?MODULE).
%%====================================================================
%% API functions
%%====================================================================
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
%%====================================================================
%% Supervisor callbacks
%%====================================================================
Child : : { Id , StartFunc , Restart , Shutdown , Type , Modules }
init([]) ->
{ok, { {one_for_all, 0, 1}, []} }.
%%====================================================================
Internal functions
%%====================================================================
| null | https://raw.githubusercontent.com/tsloughter/rebar3_tests/090bfef7d3a4790bb6b16e4c38df6e4c0460b4b2/sub_app_eleveldb/apps/graph_map/src/graph_map_sup.erl | erlang | -------------------------------------------------------------------
@doc graph_map top level supervisor.
@end
-------------------------------------------------------------------
API
Supervisor callbacks
====================================================================
API functions
====================================================================
====================================================================
Supervisor callbacks
====================================================================
====================================================================
==================================================================== |
-module(graph_map_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SERVER, ?MODULE).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
Child : : { Id , StartFunc , Restart , Shutdown , Type , Modules }
init([]) ->
{ok, { {one_for_all, 0, 1}, []} }.
Internal functions
|
dae9a251997e60073de2116f8acf760910c7b68264933c95c3bddb8782ea33a9 | monadbobo/ocaml-core | int_intf.ml | open Interfaces
module type S = sig
type t
include Binable with type t := t
include Comparable.S_binable with type t := t
include Floatable with type t := t
include Hashable.S_binable with type t := t
include Sexpable with type t := t
include Stringable with type t := t
include Intable with type t := t
val to_string_hum : t -> string
val num_bits : int
val zero : t
val one : t
val minus_one : t
val (+) : t -> t -> t
val (-) : t -> t -> t
val ( * ) : t -> t -> t
val (/) : t -> t -> t
val neg : t -> t
val succ : t -> t
val pred : t -> t
val abs : t -> t
Integer remainder , with the semantics of mod in Pervasives or rem in Int32/64 , i.e.
if y is not zero , the result of rem x y satisfies the following properties :
x = ( x / y ) * y + rem x y and y ) < = abs(y)-1 .
If y = 0 , rem x y raises Division_by_zero . Notice that rem x y is nonpositive if and
only if x < 0 .
if y is not zero, the result of rem x y satisfies the following properties:
x = (x / y) * y + rem x y and abs(rem x y) <= abs(y)-1.
If y = 0, rem x y raises Division_by_zero. Notice that rem x y is nonpositive if and
only if x < 0. *)
val rem : t -> t -> t
val max_value : t
val min_value : t
val bit_and : t -> t -> t
val bit_or : t -> t -> t
val bit_xor : t -> t -> t
val bit_not : t -> t
val decr : t ref -> unit
val incr : t ref -> unit
val shift_left : t -> int -> t
val shift_right : t -> int -> t
val shift_right_logical : t -> int -> t
val of_int32_exn : int32 -> t
val to_int32_exn : t -> int32
val of_int64_exn : int64 -> t
val to_int64 : t -> int64
val of_nativeint_exn : nativeint -> t
val to_nativeint_exn : t -> nativeint
end
| null | https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/core/lib/int_intf.ml | ocaml | open Interfaces
module type S = sig
type t
include Binable with type t := t
include Comparable.S_binable with type t := t
include Floatable with type t := t
include Hashable.S_binable with type t := t
include Sexpable with type t := t
include Stringable with type t := t
include Intable with type t := t
val to_string_hum : t -> string
val num_bits : int
val zero : t
val one : t
val minus_one : t
val (+) : t -> t -> t
val (-) : t -> t -> t
val ( * ) : t -> t -> t
val (/) : t -> t -> t
val neg : t -> t
val succ : t -> t
val pred : t -> t
val abs : t -> t
Integer remainder , with the semantics of mod in Pervasives or rem in Int32/64 , i.e.
if y is not zero , the result of rem x y satisfies the following properties :
x = ( x / y ) * y + rem x y and y ) < = abs(y)-1 .
If y = 0 , rem x y raises Division_by_zero . Notice that rem x y is nonpositive if and
only if x < 0 .
if y is not zero, the result of rem x y satisfies the following properties:
x = (x / y) * y + rem x y and abs(rem x y) <= abs(y)-1.
If y = 0, rem x y raises Division_by_zero. Notice that rem x y is nonpositive if and
only if x < 0. *)
val rem : t -> t -> t
val max_value : t
val min_value : t
val bit_and : t -> t -> t
val bit_or : t -> t -> t
val bit_xor : t -> t -> t
val bit_not : t -> t
val decr : t ref -> unit
val incr : t ref -> unit
val shift_left : t -> int -> t
val shift_right : t -> int -> t
val shift_right_logical : t -> int -> t
val of_int32_exn : int32 -> t
val to_int32_exn : t -> int32
val of_int64_exn : int64 -> t
val to_int64 : t -> int64
val of_nativeint_exn : nativeint -> t
val to_nativeint_exn : t -> nativeint
end
| |
881d26dd7ff4a6639fd23ca6bde0902200bedc93658bff8d583ebf02c915bb4b | holdybot/holdybot | handler.clj | (ns parky.test.handler
(:require
[clojure.test :refer :all]
[ring.mock.request :refer :all]
[parky.handler :refer :all]
[parky.middleware.formats :as formats]
[muuntaja.core :as m]
[mount.core :as mount]))
(defn parse-json [body]
(m/decode formats/instance "application/json" body))
(use-fixtures
:once
(fn [f]
(mount/start #'parky.config/env
#'parky.handler/app)
(f)))
(deftest test-app
(testing "main route"
(let [response (app (request :get "/"))]
(is (= 200 (:status response)))))
(testing "not-found route"
(let [response (app (request :get "/invalid"))]
(is (= 404 (:status response))))))
| null | https://raw.githubusercontent.com/holdybot/holdybot/e65007a3113c89b3f457b9d966d6bf305983c975/test/clj/parky/test/handler.clj | clojure | (ns parky.test.handler
(:require
[clojure.test :refer :all]
[ring.mock.request :refer :all]
[parky.handler :refer :all]
[parky.middleware.formats :as formats]
[muuntaja.core :as m]
[mount.core :as mount]))
(defn parse-json [body]
(m/decode formats/instance "application/json" body))
(use-fixtures
:once
(fn [f]
(mount/start #'parky.config/env
#'parky.handler/app)
(f)))
(deftest test-app
(testing "main route"
(let [response (app (request :get "/"))]
(is (= 200 (:status response)))))
(testing "not-found route"
(let [response (app (request :get "/invalid"))]
(is (= 404 (:status response))))))
| |
a87acb3bdeb3bd77d20fd02966950e75cf40e7fe624993844c99c1e0f5b476e5 | lspitzner/brittany | Test477.hs | -- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
import TestJustAbitToLongModuleNameLikeThisOneI as T
import TestJustShortEnoughModuleNameLikeThisOn as T
| null | https://raw.githubusercontent.com/lspitzner/brittany/a15eed5f3608bf1fa7084fcf008c6ecb79542562/data/Test477.hs | haskell | brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft } | import TestJustAbitToLongModuleNameLikeThisOneI as T
import TestJustShortEnoughModuleNameLikeThisOn as T
|
580f1b533f864c556d5117c1a2ceec0ef34f8b546cfb838dd2ef1ff289abfcc3 | ChrisPenner/proton | Layers.hs | # LANGUAGE TypeFamilies #
# LANGUAGE TupleSections #
module Examples.Layers where
import Proton
import Data.Profunctor
import Data.Profunctor.Rep
import Data.Profunctor.MStrong
import Proton.Algebraic
import qualified Data.Map as M
import Data.Foldable
import Data.Maybe
imgLayers :: [[Int]]
imgLayers = [ [0, 1, 0, 1]
, [2, 3, 3, 2]
]
-- done :: [Int]
done = imgLayers & pointWise * % head . = 0 )
selector : : ( , Corep p ~ M.Map k , ) = > Optic p s [ Maybe s ] s [ k ]
selector = listLens i d ( \(m , k ) - > flip M.lookup m < $ > k )
done ' = M.fromList [ ( 1 : : Int , [ 1 , 10 ] ) , ( 2 , [ 2 , 20 ] ) , ( 3 , [ 3 , 30 ] ) ] & selector . convolving * % 99 1
forward :: Profunctor p => (s -> a) -> Optic p s t a t
forward f = lmap f
back :: Profunctor p => (x -> t) -> Optic p s t s x
back f = rmap f
lookup'er :: Eq a => AlgebraicLens (a, b) (a, Maybe b) a a
lookup'er = listLens fst (\xs i -> (i, lookup i xs))
-- test :: IO ()
-- test = do
print $ [ ( [ 1 , 2 ] , " one " : : String ) , ( [ 10 , 0 ] , " two " ) ] & lookup'er . pointWise * % maximum
| null | https://raw.githubusercontent.com/ChrisPenner/proton/4ce22d473ce5bece8322c841bd2cf7f18673d57d/src/Examples/Layers.hs | haskell | done :: [Int]
test :: IO ()
test = do | # LANGUAGE TypeFamilies #
# LANGUAGE TupleSections #
module Examples.Layers where
import Proton
import Data.Profunctor
import Data.Profunctor.Rep
import Data.Profunctor.MStrong
import Proton.Algebraic
import qualified Data.Map as M
import Data.Foldable
import Data.Maybe
imgLayers :: [[Int]]
imgLayers = [ [0, 1, 0, 1]
, [2, 3, 3, 2]
]
done = imgLayers & pointWise * % head . = 0 )
selector : : ( , Corep p ~ M.Map k , ) = > Optic p s [ Maybe s ] s [ k ]
selector = listLens i d ( \(m , k ) - > flip M.lookup m < $ > k )
done ' = M.fromList [ ( 1 : : Int , [ 1 , 10 ] ) , ( 2 , [ 2 , 20 ] ) , ( 3 , [ 3 , 30 ] ) ] & selector . convolving * % 99 1
forward :: Profunctor p => (s -> a) -> Optic p s t a t
forward f = lmap f
back :: Profunctor p => (x -> t) -> Optic p s t s x
back f = rmap f
lookup'er :: Eq a => AlgebraicLens (a, b) (a, Maybe b) a a
lookup'er = listLens fst (\xs i -> (i, lookup i xs))
print $ [ ( [ 1 , 2 ] , " one " : : String ) , ( [ 10 , 0 ] , " two " ) ] & lookup'er . pointWise * % maximum
|
861a9f16fe8d4bac7c72cfb2dc25357df9b13b62a792f1b0222c62863230cf0d | mbj/stratosphere | PipeTargetRedshiftDataParametersProperty.hs | module Stratosphere.Pipes.Pipe.PipeTargetRedshiftDataParametersProperty (
PipeTargetRedshiftDataParametersProperty(..),
mkPipeTargetRedshiftDataParametersProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data PipeTargetRedshiftDataParametersProperty
= PipeTargetRedshiftDataParametersProperty {database :: (Value Prelude.Text),
dbUser :: (Prelude.Maybe (Value Prelude.Text)),
secretManagerArn :: (Prelude.Maybe (Value Prelude.Text)),
sqls :: (ValueList Prelude.Text),
statementName :: (Prelude.Maybe (Value Prelude.Text)),
withEvent :: (Prelude.Maybe (Value Prelude.Bool))}
mkPipeTargetRedshiftDataParametersProperty ::
Value Prelude.Text
-> ValueList Prelude.Text
-> PipeTargetRedshiftDataParametersProperty
mkPipeTargetRedshiftDataParametersProperty database sqls
= PipeTargetRedshiftDataParametersProperty
{database = database, sqls = sqls, dbUser = Prelude.Nothing,
secretManagerArn = Prelude.Nothing,
statementName = Prelude.Nothing, withEvent = Prelude.Nothing}
instance ToResourceProperties PipeTargetRedshiftDataParametersProperty where
toResourceProperties PipeTargetRedshiftDataParametersProperty {..}
= ResourceProperties
{awsType = "AWS::Pipes::Pipe.PipeTargetRedshiftDataParameters",
supportsTags = Prelude.False,
properties = Prelude.fromList
((Prelude.<>)
["Database" JSON..= database, "Sqls" JSON..= sqls]
(Prelude.catMaybes
[(JSON..=) "DbUser" Prelude.<$> dbUser,
(JSON..=) "SecretManagerArn" Prelude.<$> secretManagerArn,
(JSON..=) "StatementName" Prelude.<$> statementName,
(JSON..=) "WithEvent" Prelude.<$> withEvent]))}
instance JSON.ToJSON PipeTargetRedshiftDataParametersProperty where
toJSON PipeTargetRedshiftDataParametersProperty {..}
= JSON.object
(Prelude.fromList
((Prelude.<>)
["Database" JSON..= database, "Sqls" JSON..= sqls]
(Prelude.catMaybes
[(JSON..=) "DbUser" Prelude.<$> dbUser,
(JSON..=) "SecretManagerArn" Prelude.<$> secretManagerArn,
(JSON..=) "StatementName" Prelude.<$> statementName,
(JSON..=) "WithEvent" Prelude.<$> withEvent])))
instance Property "Database" PipeTargetRedshiftDataParametersProperty where
type PropertyType "Database" PipeTargetRedshiftDataParametersProperty = Value Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{database = newValue, ..}
instance Property "DbUser" PipeTargetRedshiftDataParametersProperty where
type PropertyType "DbUser" PipeTargetRedshiftDataParametersProperty = Value Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{dbUser = Prelude.pure newValue, ..}
instance Property "SecretManagerArn" PipeTargetRedshiftDataParametersProperty where
type PropertyType "SecretManagerArn" PipeTargetRedshiftDataParametersProperty = Value Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{secretManagerArn = Prelude.pure newValue, ..}
instance Property "Sqls" PipeTargetRedshiftDataParametersProperty where
type PropertyType "Sqls" PipeTargetRedshiftDataParametersProperty = ValueList Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty {sqls = newValue, ..}
instance Property "StatementName" PipeTargetRedshiftDataParametersProperty where
type PropertyType "StatementName" PipeTargetRedshiftDataParametersProperty = Value Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{statementName = Prelude.pure newValue, ..}
instance Property "WithEvent" PipeTargetRedshiftDataParametersProperty where
type PropertyType "WithEvent" PipeTargetRedshiftDataParametersProperty = Value Prelude.Bool
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{withEvent = Prelude.pure newValue, ..} | null | https://raw.githubusercontent.com/mbj/stratosphere/c70f301715425247efcda29af4f3fcf7ec04aa2f/services/pipes/gen/Stratosphere/Pipes/Pipe/PipeTargetRedshiftDataParametersProperty.hs | haskell | module Stratosphere.Pipes.Pipe.PipeTargetRedshiftDataParametersProperty (
PipeTargetRedshiftDataParametersProperty(..),
mkPipeTargetRedshiftDataParametersProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data PipeTargetRedshiftDataParametersProperty
= PipeTargetRedshiftDataParametersProperty {database :: (Value Prelude.Text),
dbUser :: (Prelude.Maybe (Value Prelude.Text)),
secretManagerArn :: (Prelude.Maybe (Value Prelude.Text)),
sqls :: (ValueList Prelude.Text),
statementName :: (Prelude.Maybe (Value Prelude.Text)),
withEvent :: (Prelude.Maybe (Value Prelude.Bool))}
mkPipeTargetRedshiftDataParametersProperty ::
Value Prelude.Text
-> ValueList Prelude.Text
-> PipeTargetRedshiftDataParametersProperty
mkPipeTargetRedshiftDataParametersProperty database sqls
= PipeTargetRedshiftDataParametersProperty
{database = database, sqls = sqls, dbUser = Prelude.Nothing,
secretManagerArn = Prelude.Nothing,
statementName = Prelude.Nothing, withEvent = Prelude.Nothing}
instance ToResourceProperties PipeTargetRedshiftDataParametersProperty where
toResourceProperties PipeTargetRedshiftDataParametersProperty {..}
= ResourceProperties
{awsType = "AWS::Pipes::Pipe.PipeTargetRedshiftDataParameters",
supportsTags = Prelude.False,
properties = Prelude.fromList
((Prelude.<>)
["Database" JSON..= database, "Sqls" JSON..= sqls]
(Prelude.catMaybes
[(JSON..=) "DbUser" Prelude.<$> dbUser,
(JSON..=) "SecretManagerArn" Prelude.<$> secretManagerArn,
(JSON..=) "StatementName" Prelude.<$> statementName,
(JSON..=) "WithEvent" Prelude.<$> withEvent]))}
instance JSON.ToJSON PipeTargetRedshiftDataParametersProperty where
toJSON PipeTargetRedshiftDataParametersProperty {..}
= JSON.object
(Prelude.fromList
((Prelude.<>)
["Database" JSON..= database, "Sqls" JSON..= sqls]
(Prelude.catMaybes
[(JSON..=) "DbUser" Prelude.<$> dbUser,
(JSON..=) "SecretManagerArn" Prelude.<$> secretManagerArn,
(JSON..=) "StatementName" Prelude.<$> statementName,
(JSON..=) "WithEvent" Prelude.<$> withEvent])))
instance Property "Database" PipeTargetRedshiftDataParametersProperty where
type PropertyType "Database" PipeTargetRedshiftDataParametersProperty = Value Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{database = newValue, ..}
instance Property "DbUser" PipeTargetRedshiftDataParametersProperty where
type PropertyType "DbUser" PipeTargetRedshiftDataParametersProperty = Value Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{dbUser = Prelude.pure newValue, ..}
instance Property "SecretManagerArn" PipeTargetRedshiftDataParametersProperty where
type PropertyType "SecretManagerArn" PipeTargetRedshiftDataParametersProperty = Value Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{secretManagerArn = Prelude.pure newValue, ..}
instance Property "Sqls" PipeTargetRedshiftDataParametersProperty where
type PropertyType "Sqls" PipeTargetRedshiftDataParametersProperty = ValueList Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty {sqls = newValue, ..}
instance Property "StatementName" PipeTargetRedshiftDataParametersProperty where
type PropertyType "StatementName" PipeTargetRedshiftDataParametersProperty = Value Prelude.Text
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{statementName = Prelude.pure newValue, ..}
instance Property "WithEvent" PipeTargetRedshiftDataParametersProperty where
type PropertyType "WithEvent" PipeTargetRedshiftDataParametersProperty = Value Prelude.Bool
set newValue PipeTargetRedshiftDataParametersProperty {..}
= PipeTargetRedshiftDataParametersProperty
{withEvent = Prelude.pure newValue, ..} | |
e468410bef47f6ae8e0b668a5565abc3e220e1c8b1b7ec46eef1ea82c9e20c28 | exercism/clojure | exemplar.clj | (ns bird-watcher)
(def last-week [0 2 5 3 7 8 4])
(defn today [birds]
(last birds))
(defn inc-bird [birds]
(update birds (dec (count birds)) inc))
(defn day-without-birds? [birds]
(pos? (count (filter zero? birds))))
(defn n-days-count [birds n]
(reduce + (take n birds)))
(defn busy-days [birds]
(count (filter #(>= % 5) birds)))
(defn odd-week? [birds]
(= birds [1 0 1 0 1 0 1]))
| null | https://raw.githubusercontent.com/exercism/clojure/42df085688c9bf0bbccf8dce33d995287f7a1bc8/exercises/concept/bird-watcher/.meta/exemplar.clj | clojure | (ns bird-watcher)
(def last-week [0 2 5 3 7 8 4])
(defn today [birds]
(last birds))
(defn inc-bird [birds]
(update birds (dec (count birds)) inc))
(defn day-without-birds? [birds]
(pos? (count (filter zero? birds))))
(defn n-days-count [birds n]
(reduce + (take n birds)))
(defn busy-days [birds]
(count (filter #(>= % 5) birds)))
(defn odd-week? [birds]
(= birds [1 0 1 0 1 0 1]))
| |
e4514a369a0f3e742f5403764a2eebeb0f8991ed29572e5bb612252fe9344c44 | haskell-beam/beam | Connection.hs | # OPTIONS_GHC -fno - warn - orphans #
{-# LANGUAGE BangPatterns #-}
# LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE InstanceSigs #
# LANGUAGE UndecidableInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
module Database.Beam.Sqlite.Connection
( Sqlite(..), SqliteM(..)
, sqliteUriSyntax
, runBeamSqlite, runBeamSqliteDebug
* Emulated @INSERT RETURNING@ support
, insertReturning, runInsertReturningList
) where
import Prelude hiding (fail)
import Database.Beam.Backend
import Database.Beam.Backend.Internal.Compat
import qualified Database.Beam.Backend.SQL.BeamExtensions as Beam
import Database.Beam.Backend.URI
import Database.Beam.Migrate.Generics
import Database.Beam.Migrate.SQL ( BeamMigrateOnlySqlBackend, FieldReturnType(..) )
import qualified Database.Beam.Migrate.SQL as Beam
import Database.Beam.Migrate.SQL.BeamExtensions
import Database.Beam.Query ( SqlInsert(..), SqlInsertValues(..)
, HasQBuilder(..), HasSqlEqualityCheck
, HasSqlQuantifiedEqualityCheck
, DataType(..)
, HasSqlInTable(..)
, insert, current_ )
import Database.Beam.Query.Internal
import Database.Beam.Query.SQL92
import Database.Beam.Schema.Tables ( Beamable
, Columnar'(..)
, DatabaseEntity(..)
, DatabaseEntityDescriptor(..)
, TableEntity
, TableField(..)
, allBeamValues
, changeBeamRep )
import Database.Beam.Sqlite.Syntax
import Database.SQLite.Simple ( Connection, ToRow(..), FromRow(..)
, Query(..), SQLData(..), field
, execute, execute_
, withStatement, bind, nextRow
, query_, open, close )
import Database.SQLite.Simple.FromField ( FromField(..), ResultError(..)
, returnError, fieldData)
import Database.SQLite.Simple.Internal (RowParser(RP), unRP)
import Database.SQLite.Simple.Ok (Ok(..))
import Database.SQLite.Simple.Types (Null)
import Control.Exception (SomeException(..), bracket_, onException, mask)
import Control.Monad (forM_)
import Control.Monad.Base (MonadBase)
import Control.Monad.Fail (MonadFail(..))
import Control.Monad.Free.Church
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Identity (Identity)
import Control.Monad.Reader (ReaderT(..), MonadReader(..), runReaderT)
import Control.Monad.State.Strict (MonadState(..), StateT(..), runStateT)
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Writer (tell, execWriter)
import Data.ByteString.Builder (toLazyByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.DList as D
import Data.Int
import Data.Maybe (mapMaybe)
import Data.Proxy (Proxy(..))
import Data.Scientific (Scientific)
import Data.String (fromString)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T (decodeUtf8)
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL (decodeUtf8)
import Data.Time ( LocalTime, UTCTime, Day
, ZonedTime, utc, utcToLocalTime )
import Data.Typeable (cast)
import Data.Word
import GHC.TypeLits
import Network.URI
#ifdef UNIX
import System.Posix.Process (getProcessID)
#elif defined(WINDOWS)
import System.Win32.Process (getCurrentProcessId)
#else
#error Need either POSIX or Win32 API for MonadBeamInsertReturning
#endif
import Text.Read (readMaybe)
| The SQLite backend . Used to parameterize ' MonadBeam ' and ' '
to provide support for SQLite databases . See the documentation for
-- 'MonadBeam' and the <-beam.github.io/beam/ user guide> for more
-- information on how to use this backend.
data Sqlite = Sqlite
instance BeamBackend Sqlite where
type BackendFromField Sqlite = FromField
instance HasQBuilder Sqlite where
SQLite does not support arbitrarily nesting UNION , INTERSECT , and EXCEPT
instance HasSqlInTable Sqlite where
inRowValuesE Proxy e es = SqliteExpressionSyntax $ mconcat
[ parens $ fromSqliteExpression e
, emit " IN "
, parens $ emit "VALUES " <> commas (map fromSqliteExpression es)
]
instance BeamSqlBackendIsString Sqlite T.Text
instance BeamSqlBackendIsString Sqlite String
instance FromBackendRow Sqlite Bool
instance FromBackendRow Sqlite Double
instance FromBackendRow Sqlite Float
instance FromBackendRow Sqlite Int8
instance FromBackendRow Sqlite Int16
instance FromBackendRow Sqlite Int32
instance FromBackendRow Sqlite Int64
instance FromBackendRow Sqlite Integer
instance FromBackendRow Sqlite Word8
instance FromBackendRow Sqlite Word16
instance FromBackendRow Sqlite Word32
instance FromBackendRow Sqlite Word64
instance FromBackendRow Sqlite BS.ByteString
instance FromBackendRow Sqlite BL.ByteString
instance FromBackendRow Sqlite T.Text
instance FromBackendRow Sqlite TL.Text
instance FromBackendRow Sqlite UTCTime
instance FromBackendRow Sqlite Day
instance FromBackendRow Sqlite Null
instance FromBackendRow Sqlite Char where
fromBackendRow = do
t <- fromBackendRow
case T.uncons t of
Just (c, _) -> pure c
_ -> fail "Need string of size one to parse Char"
instance FromBackendRow Sqlite SqlNull where
fromBackendRow =
SqlNull <$ (fromBackendRow :: FromBackendRowM Sqlite Null)
instance FromBackendRow Sqlite LocalTime where
fromBackendRow = utcToLocalTime utc <$> fromBackendRow
instance FromBackendRow Sqlite Scientific where
fromBackendRow = unSqliteScientific <$> fromBackendRow
instance FromBackendRow Sqlite SqliteScientific
instance TypeError (PreferExplicitSize Int Int32) => FromBackendRow Sqlite Int
instance TypeError (PreferExplicitSize Word Word32) => FromBackendRow Sqlite Word
newtype SqliteScientific = SqliteScientific { unSqliteScientific :: Scientific }
instance FromField SqliteScientific where
fromField f =
SqliteScientific <$>
case fieldData f of
SQLInteger i -> pure (fromIntegral i)
SQLFloat d -> pure . fromRational . toRational $ d
SQLText t -> tryRead (T.unpack t)
SQLBlob b -> tryRead (BS.unpack b)
SQLNull -> returnError UnexpectedNull f "null"
where
tryRead s =
case readMaybe s of
Nothing -> returnError ConversionFailed f $
"No conversion to Scientific for '" <> s <> "'"
Just s' -> pure s'
instance BeamSqlBackend Sqlite
instance BeamMigrateOnlySqlBackend Sqlite
type instance BeamSqlBackendSyntax Sqlite = SqliteCommandSyntax
data SqliteHasDefault = SqliteHasDefault
instance FieldReturnType 'True 'False Sqlite resTy a =>
FieldReturnType 'False 'False Sqlite resTy (SqliteHasDefault -> a) where
field' _ _ nm ty _ collation constraints SqliteHasDefault =
field' (Proxy @'True) (Proxy @'False) nm ty Nothing collation constraints
instance BeamSqlBackendHasSerial Sqlite where
genericSerial nm = Beam.field nm (DataType sqliteSerialType) SqliteHasDefault
| ' MonadBeam ' instance inside which SQLite queries are run . See the
-- <-beam.github.io/beam/ user guide> for more information
newtype SqliteM a
= SqliteM
{ runSqliteM :: ReaderT (String -> IO (), Connection) IO a
^ Run an IO action with access to a SQLite connection and a debug logging
-- function, called or each query submitted on the connection.
} deriving (Monad, Functor, Applicative, MonadIO, MonadFail)
deriving newtype (MonadBase IO, MonadBaseControl IO)
newtype BeamSqliteParams = BeamSqliteParams [SQLData]
instance ToRow BeamSqliteParams where
toRow (BeamSqliteParams x) = x
newtype BeamSqliteRow a = BeamSqliteRow a
instance FromBackendRow Sqlite a => FromRow (BeamSqliteRow a) where
fromRow = BeamSqliteRow <$> runF fromBackendRow' finish step
where
FromBackendRowM fromBackendRow' = fromBackendRow :: FromBackendRowM Sqlite a
translateErrors :: Maybe Int -> SomeException -> Maybe SomeException
translateErrors col (SomeException e) =
case cast e of
Just (ConversionFailed { errSQLType = typeString
, errHaskellType = hsString
, errMessage = msg }) ->
Just (SomeException (BeamRowReadError col (ColumnTypeMismatch hsString typeString ("conversion failed: " ++ msg))))
Just (UnexpectedNull {}) ->
Just (SomeException (BeamRowReadError col ColumnUnexpectedNull))
Just (Incompatible { errSQLType = typeString
, errHaskellType = hsString
, errMessage = msg }) ->
Just (SomeException (BeamRowReadError col (ColumnTypeMismatch hsString typeString ("incompatible: " ++ msg))))
Nothing -> Nothing
finish = pure
step :: forall a'. FromBackendRowF Sqlite (RowParser a') -> RowParser a'
step (ParseOneField next) =
RP $ ReaderT $ \ro -> StateT $ \st@(col, _) ->
case runStateT (runReaderT (unRP field) ro) st of
Ok (x, st') -> runStateT (runReaderT (unRP (next x)) ro) st'
Errors errs -> Errors (mapMaybe (translateErrors (Just col)) errs)
step (Alt (FromBackendRowM a) (FromBackendRowM b) next) = do
RP $ do
let RP a' = runF a finish step
RP b' = runF b finish step
st <- get
ro <- ask
case runStateT (runReaderT a' ro) st of
Ok (ra, st') -> do
put st'
unRP (next ra)
Errors aErrs ->
case runStateT (runReaderT b' ro) st of
Ok (rb, st') -> do
put st'
unRP (next rb)
Errors bErrs ->
lift (lift (Errors (aErrs ++ bErrs)))
step (FailParseWith err) = RP (lift (lift (Errors [SomeException err])))
-- * Equality checks
#define HAS_SQLITE_EQUALITY_CHECK(ty) \
instance HasSqlEqualityCheck Sqlite (ty); \
instance HasSqlQuantifiedEqualityCheck Sqlite (ty);
HAS_SQLITE_EQUALITY_CHECK(Int8)
HAS_SQLITE_EQUALITY_CHECK(Int16)
HAS_SQLITE_EQUALITY_CHECK(Int32)
HAS_SQLITE_EQUALITY_CHECK(Int64)
HAS_SQLITE_EQUALITY_CHECK(Word8)
HAS_SQLITE_EQUALITY_CHECK(Word16)
HAS_SQLITE_EQUALITY_CHECK(Word32)
HAS_SQLITE_EQUALITY_CHECK(Word64)
HAS_SQLITE_EQUALITY_CHECK(Double)
HAS_SQLITE_EQUALITY_CHECK(Float)
HAS_SQLITE_EQUALITY_CHECK(Bool)
HAS_SQLITE_EQUALITY_CHECK(String)
HAS_SQLITE_EQUALITY_CHECK(T.Text)
HAS_SQLITE_EQUALITY_CHECK(TL.Text)
HAS_SQLITE_EQUALITY_CHECK(BS.ByteString)
HAS_SQLITE_EQUALITY_CHECK(BL.ByteString)
HAS_SQLITE_EQUALITY_CHECK(UTCTime)
HAS_SQLITE_EQUALITY_CHECK(LocalTime)
HAS_SQLITE_EQUALITY_CHECK(ZonedTime)
HAS_SQLITE_EQUALITY_CHECK(Char)
HAS_SQLITE_EQUALITY_CHECK(Integer)
HAS_SQLITE_EQUALITY_CHECK(Scientific)
instance TypeError (PreferExplicitSize Int Int32) => HasSqlEqualityCheck Sqlite Int
instance TypeError (PreferExplicitSize Int Int32) => HasSqlQuantifiedEqualityCheck Sqlite Int
instance TypeError (PreferExplicitSize Word Word32) => HasSqlEqualityCheck Sqlite Word
instance TypeError (PreferExplicitSize Word Word32) => HasSqlQuantifiedEqualityCheck Sqlite Word
class HasDefaultSqlDataType Sqlite a => IsSqliteSerialIntegerType a
instance IsSqliteSerialIntegerType Int32
instance IsSqliteSerialIntegerType Int64
instance TypeError (PreferExplicitSize Int Int32) => IsSqliteSerialIntegerType Int
instance IsSqliteSerialIntegerType a => HasDefaultSqlDataType Sqlite (SqlSerial a) where
defaultSqlDataType _ _ False = sqliteSerialType
defaultSqlDataType _ _ True = intType
instance HasDefaultSqlDataType Sqlite BS.ByteString where
-- TODO we should somehow allow contsraints based on backend
defaultSqlDataType _ _ _ = sqliteBlobType
instance HasDefaultSqlDataType Sqlite LocalTime where
defaultSqlDataType _ _ _ = timestampType Nothing False
-- | URI syntax for use with 'withDbConnection'. See documentation for
-- 'BeamURIOpeners' for more information.
sqliteUriSyntax :: c Sqlite Connection SqliteM
-> BeamURIOpeners c
sqliteUriSyntax =
mkUriOpener runBeamSqlite "sqlite:"
(\uri -> do
let sqliteName = if null (uriPath uri) then ":memory:" else uriPath uri
hdl <- open sqliteName
pure (hdl, close hdl))
runBeamSqliteDebug :: (String -> IO ()) -> Connection -> SqliteM a -> IO a
runBeamSqliteDebug debugStmt conn x = runReaderT (runSqliteM x) (debugStmt, conn)
runBeamSqlite :: Connection -> SqliteM a -> IO a
runBeamSqlite = runBeamSqliteDebug (\_ -> pure ())
instance MonadBeam Sqlite SqliteM where
runNoReturn (SqliteCommandSyntax (SqliteSyntax cmd vals)) =
SqliteM $ do
(logger, conn) <- ask
let cmdString = BL.unpack (toLazyByteString (withPlaceholders cmd))
liftIO (logger (cmdString ++ ";\n-- With values: " ++ show (D.toList vals)))
liftIO (execute conn (fromString cmdString) (D.toList vals))
runNoReturn (SqliteCommandInsert insertStmt_) =
SqliteM $ do
(logger, conn) <- ask
liftIO (runSqliteInsert logger conn insertStmt_)
runReturningMany (SqliteCommandSyntax (SqliteSyntax cmd vals)) action =
SqliteM $ do
(logger, conn) <- ask
let cmdString = BL.unpack (toLazyByteString (withPlaceholders cmd))
liftIO $ do
logger (cmdString ++ ";\n-- With values: " ++ show (D.toList vals))
withStatement conn (fromString cmdString) $ \stmt ->
do bind stmt (BeamSqliteParams (D.toList vals))
let nextRow' = liftIO (nextRow stmt) >>= \x ->
case x of
Nothing -> pure Nothing
Just (BeamSqliteRow row) -> pure row
runReaderT (runSqliteM (action nextRow')) (logger, conn)
runReturningMany SqliteCommandInsert {} _ =
fail . mconcat $
[ "runReturningMany{Sqlite}: sqlite does not support returning "
, "rows from an insert, use Database.Beam.Sqlite.insertReturning "
, "for emulation" ]
instance Beam.MonadBeamInsertReturning Sqlite SqliteM where
runInsertReturningList = runInsertReturningList
runSqliteInsert :: (String -> IO ()) -> Connection -> SqliteInsertSyntax -> IO ()
runSqliteInsert logger conn (SqliteInsertSyntax tbl fields vs onConflict)
-- If all expressions are simple expressions (no default), then just
| SqliteInsertExpressions es <- vs, any (any (== SqliteExpressionDefault)) es =
forM_ es $ \row -> do
let (fields', row') = unzip $ filter ((/= SqliteExpressionDefault) . snd) $ zip fields row
SqliteSyntax cmd vals = formatSqliteInsertOnConflict tbl fields' (SqliteInsertExpressions [ row' ]) onConflict
cmdString = BL.unpack (toLazyByteString (withPlaceholders cmd))
logger (cmdString ++ ";\n-- With values: " ++ show (D.toList vals))
execute conn (fromString cmdString) (D.toList vals)
| otherwise = do
let SqliteSyntax cmd vals = formatSqliteInsertOnConflict tbl fields vs onConflict
cmdString = BL.unpack (toLazyByteString (withPlaceholders cmd))
logger (cmdString ++ ";\n-- With values: " ++ show (D.toList vals))
execute conn (fromString cmdString) (D.toList vals)
-- * emulated INSERT returning support
-- | Build a 'SqliteInsertReturning' representing inserting the given values
-- into the given table. Use 'runInsertReturningList'
insertReturning :: Beamable table
=> DatabaseEntity Sqlite db (TableEntity table)
-> SqlInsertValues Sqlite (table (QExpr Sqlite s))
-> SqlInsert Sqlite table
insertReturning = insert
-- | Runs a 'SqliteInsertReturning' statement and returns a result for each
-- inserted row.
runInsertReturningList :: (Beamable table, FromBackendRow Sqlite (table Identity))
=> SqlInsert Sqlite table
-> SqliteM [ table Identity ]
runInsertReturningList SqlInsertNoRows = pure []
runInsertReturningList (SqlInsert tblSettings insertStmt_@(SqliteInsertSyntax nm _ _ _)) =
do (logger, conn) <- SqliteM ask
SqliteM . liftIO $ do
#ifdef UNIX
processId <- fromString . show <$> getProcessID
#elif defined(WINDOWS)
processId <- fromString . show <$> getCurrentProcessId
#else
#error Need either POSIX or Win32 API for MonadBeamInsertReturning
#endif
let tableNameTxt = T.decodeUtf8 (BL.toStrict (sqliteRenderSyntaxScript (fromSqliteTableName nm)))
startSavepoint =
execute_ conn (Query ("SAVEPOINT insert_savepoint_" <> processId))
rollbackToSavepoint =
execute_ conn (Query ("ROLLBACK TRANSACTION TO SAVEPOINT insert_savepoint_" <> processId))
releaseSavepoint =
execute_ conn (Query ("RELEASE SAVEPOINT insert_savepoint_" <> processId))
createInsertedValuesTable =
execute_ conn (Query ("CREATE TEMPORARY TABLE inserted_values_" <> processId <> " AS SELECT * FROM " <> tableNameTxt <> " LIMIT 0"))
dropInsertedValuesTable =
execute_ conn (Query ("DROP TABLE inserted_values_" <> processId))
createInsertTrigger =
execute_ conn (Query ("CREATE TEMPORARY TRIGGER insert_trigger_" <> processId <> " AFTER INSERT ON " <> tableNameTxt <> " BEGIN " <>
"INSERT INTO inserted_values_" <> processId <> " SELECT * FROM " <> tableNameTxt <> " WHERE ROWID=last_insert_rowid(); END" ))
dropInsertTrigger =
execute_ conn (Query ("DROP TRIGGER insert_trigger_" <> processId))
mask $ \restore -> do
startSavepoint
flip onException rollbackToSavepoint . restore $ do
x <- bracket_ createInsertedValuesTable dropInsertedValuesTable $
bracket_ createInsertTrigger dropInsertTrigger $ do
runSqliteInsert logger conn insertStmt_
let columns = TL.toStrict $ TL.decodeUtf8 $
sqliteRenderSyntaxScript $ commas $
allBeamValues (\(Columnar' projField) -> quotedIdentifier (_fieldName projField)) $
tblSettings
fmap (\(BeamSqliteRow r) -> r) <$> query_ conn (Query ("SELECT " <> columns <> " FROM inserted_values_" <> processId))
releaseSavepoint
return x
instance Beam.BeamHasInsertOnConflict Sqlite where
newtype SqlConflictTarget Sqlite table = SqliteConflictTarget
{ unSqliteConflictTarget :: table (QExpr Sqlite QInternal) -> SqliteSyntax }
newtype SqlConflictAction Sqlite table = SqliteConflictAction
{ unSqliteConflictAction :: forall s. table (QField s) -> SqliteSyntax }
insertOnConflict
:: forall db table s. Beamable table
=> DatabaseEntity Sqlite db (TableEntity table)
-> SqlInsertValues Sqlite (table (QExpr Sqlite s))
-> Beam.SqlConflictTarget Sqlite table
-> Beam.SqlConflictAction Sqlite table
-> SqlInsert Sqlite table
insertOnConflict (DatabaseEntity dt) values target action = case values of
SqlInsertValuesEmpty -> SqlInsertNoRows
SqlInsertValues vs -> SqlInsert (dbTableSettings dt) $
let getFieldName
:: forall a
. Columnar' (TableField table) a
-> Columnar' (QField QInternal) a
getFieldName (Columnar' fd) =
Columnar' $ QField False (dbTableCurrentName dt) $ _fieldName fd
tableFields = changeBeamRep getFieldName $ dbTableSettings dt
tellFieldName _ _ f = tell [f] >> pure f
fieldNames = execWriter $
project' (Proxy @AnyType) (Proxy @((), T.Text)) tellFieldName tableFields
currentField
:: forall a
. Columnar' (QField QInternal) a
-> Columnar' (QExpr Sqlite QInternal) a
currentField (Columnar' f) = Columnar' $ current_ f
tableCurrent = changeBeamRep currentField tableFields
in SqliteInsertSyntax (tableNameFromEntity dt) fieldNames vs $ Just $
SqliteOnConflictSyntax $ mconcat
[ emit "ON CONFLICT "
, unSqliteConflictTarget target tableCurrent
, emit " DO "
, unSqliteConflictAction action tableFields
]
anyConflict = SqliteConflictTarget $ const mempty
conflictingFields makeProjection = SqliteConflictTarget $ \table ->
parens $ commas $ map fromSqliteExpression $
project (Proxy @Sqlite) (makeProjection table) "t"
conflictingFieldsWhere makeProjection makeWhere =
SqliteConflictTarget $ \table -> mconcat
[ unSqliteConflictTarget (Beam.conflictingFields makeProjection) table
, emit " WHERE "
, let QExpr mkE = makeWhere table
in fromSqliteExpression $ mkE "t"
]
onConflictDoNothing = SqliteConflictAction $ const $ emit "NOTHING"
onConflictUpdateSet makeAssignments = SqliteConflictAction $ \table -> mconcat
[ emit "UPDATE SET "
, let QAssignment assignments = makeAssignments table $ excluded table
emitAssignment (fieldName, expr) = mconcat
[ fromSqliteFieldNameSyntax fieldName
, emit " = "
, fromSqliteExpression expr
]
in commas $ map emitAssignment assignments
]
onConflictUpdateSetWhere makeAssignments makeWhere =
SqliteConflictAction $ \table -> mconcat
[ unSqliteConflictAction (Beam.onConflictUpdateSet makeAssignments) table
, emit " WHERE "
, let QExpr mkE = makeWhere table $ excluded table
in fromSqliteExpression $ mkE "t"
]
excluded
:: forall table s
. Beamable table
=> table (QField s)
-> table (QExpr Sqlite s)
excluded table = changeBeamRep excludedField table
where excludedField (Columnar' (QField _ _ name)) =
Columnar' $ QExpr $ const $ fieldE $ qualifiedField "excluded" name
| null | https://raw.githubusercontent.com/haskell-beam/beam/8bcefd8b50d1c34359e77709de419cd214fe3bb1/beam-sqlite/Database/Beam/Sqlite/Connection.hs | haskell | # LANGUAGE BangPatterns #
'MonadBeam' and the <-beam.github.io/beam/ user guide> for more
information on how to use this backend.
<-beam.github.io/beam/ user guide> for more information
function, called or each query submitted on the connection.
* Equality checks
TODO we should somehow allow contsraints based on backend
| URI syntax for use with 'withDbConnection'. See documentation for
'BeamURIOpeners' for more information.
If all expressions are simple expressions (no default), then just
* emulated INSERT returning support
| Build a 'SqliteInsertReturning' representing inserting the given values
into the given table. Use 'runInsertReturningList'
| Runs a 'SqliteInsertReturning' statement and returns a result for each
inserted row. | # OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE InstanceSigs #
# LANGUAGE UndecidableInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
module Database.Beam.Sqlite.Connection
( Sqlite(..), SqliteM(..)
, sqliteUriSyntax
, runBeamSqlite, runBeamSqliteDebug
* Emulated @INSERT RETURNING@ support
, insertReturning, runInsertReturningList
) where
import Prelude hiding (fail)
import Database.Beam.Backend
import Database.Beam.Backend.Internal.Compat
import qualified Database.Beam.Backend.SQL.BeamExtensions as Beam
import Database.Beam.Backend.URI
import Database.Beam.Migrate.Generics
import Database.Beam.Migrate.SQL ( BeamMigrateOnlySqlBackend, FieldReturnType(..) )
import qualified Database.Beam.Migrate.SQL as Beam
import Database.Beam.Migrate.SQL.BeamExtensions
import Database.Beam.Query ( SqlInsert(..), SqlInsertValues(..)
, HasQBuilder(..), HasSqlEqualityCheck
, HasSqlQuantifiedEqualityCheck
, DataType(..)
, HasSqlInTable(..)
, insert, current_ )
import Database.Beam.Query.Internal
import Database.Beam.Query.SQL92
import Database.Beam.Schema.Tables ( Beamable
, Columnar'(..)
, DatabaseEntity(..)
, DatabaseEntityDescriptor(..)
, TableEntity
, TableField(..)
, allBeamValues
, changeBeamRep )
import Database.Beam.Sqlite.Syntax
import Database.SQLite.Simple ( Connection, ToRow(..), FromRow(..)
, Query(..), SQLData(..), field
, execute, execute_
, withStatement, bind, nextRow
, query_, open, close )
import Database.SQLite.Simple.FromField ( FromField(..), ResultError(..)
, returnError, fieldData)
import Database.SQLite.Simple.Internal (RowParser(RP), unRP)
import Database.SQLite.Simple.Ok (Ok(..))
import Database.SQLite.Simple.Types (Null)
import Control.Exception (SomeException(..), bracket_, onException, mask)
import Control.Monad (forM_)
import Control.Monad.Base (MonadBase)
import Control.Monad.Fail (MonadFail(..))
import Control.Monad.Free.Church
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Identity (Identity)
import Control.Monad.Reader (ReaderT(..), MonadReader(..), runReaderT)
import Control.Monad.State.Strict (MonadState(..), StateT(..), runStateT)
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Writer (tell, execWriter)
import Data.ByteString.Builder (toLazyByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.DList as D
import Data.Int
import Data.Maybe (mapMaybe)
import Data.Proxy (Proxy(..))
import Data.Scientific (Scientific)
import Data.String (fromString)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T (decodeUtf8)
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL (decodeUtf8)
import Data.Time ( LocalTime, UTCTime, Day
, ZonedTime, utc, utcToLocalTime )
import Data.Typeable (cast)
import Data.Word
import GHC.TypeLits
import Network.URI
#ifdef UNIX
import System.Posix.Process (getProcessID)
#elif defined(WINDOWS)
import System.Win32.Process (getCurrentProcessId)
#else
#error Need either POSIX or Win32 API for MonadBeamInsertReturning
#endif
import Text.Read (readMaybe)
| The SQLite backend . Used to parameterize ' MonadBeam ' and ' '
to provide support for SQLite databases . See the documentation for
data Sqlite = Sqlite
instance BeamBackend Sqlite where
type BackendFromField Sqlite = FromField
instance HasQBuilder Sqlite where
SQLite does not support arbitrarily nesting UNION , INTERSECT , and EXCEPT
instance HasSqlInTable Sqlite where
inRowValuesE Proxy e es = SqliteExpressionSyntax $ mconcat
[ parens $ fromSqliteExpression e
, emit " IN "
, parens $ emit "VALUES " <> commas (map fromSqliteExpression es)
]
instance BeamSqlBackendIsString Sqlite T.Text
instance BeamSqlBackendIsString Sqlite String
instance FromBackendRow Sqlite Bool
instance FromBackendRow Sqlite Double
instance FromBackendRow Sqlite Float
instance FromBackendRow Sqlite Int8
instance FromBackendRow Sqlite Int16
instance FromBackendRow Sqlite Int32
instance FromBackendRow Sqlite Int64
instance FromBackendRow Sqlite Integer
instance FromBackendRow Sqlite Word8
instance FromBackendRow Sqlite Word16
instance FromBackendRow Sqlite Word32
instance FromBackendRow Sqlite Word64
instance FromBackendRow Sqlite BS.ByteString
instance FromBackendRow Sqlite BL.ByteString
instance FromBackendRow Sqlite T.Text
instance FromBackendRow Sqlite TL.Text
instance FromBackendRow Sqlite UTCTime
instance FromBackendRow Sqlite Day
instance FromBackendRow Sqlite Null
instance FromBackendRow Sqlite Char where
fromBackendRow = do
t <- fromBackendRow
case T.uncons t of
Just (c, _) -> pure c
_ -> fail "Need string of size one to parse Char"
instance FromBackendRow Sqlite SqlNull where
fromBackendRow =
SqlNull <$ (fromBackendRow :: FromBackendRowM Sqlite Null)
instance FromBackendRow Sqlite LocalTime where
fromBackendRow = utcToLocalTime utc <$> fromBackendRow
instance FromBackendRow Sqlite Scientific where
fromBackendRow = unSqliteScientific <$> fromBackendRow
instance FromBackendRow Sqlite SqliteScientific
instance TypeError (PreferExplicitSize Int Int32) => FromBackendRow Sqlite Int
instance TypeError (PreferExplicitSize Word Word32) => FromBackendRow Sqlite Word
newtype SqliteScientific = SqliteScientific { unSqliteScientific :: Scientific }
instance FromField SqliteScientific where
fromField f =
SqliteScientific <$>
case fieldData f of
SQLInteger i -> pure (fromIntegral i)
SQLFloat d -> pure . fromRational . toRational $ d
SQLText t -> tryRead (T.unpack t)
SQLBlob b -> tryRead (BS.unpack b)
SQLNull -> returnError UnexpectedNull f "null"
where
tryRead s =
case readMaybe s of
Nothing -> returnError ConversionFailed f $
"No conversion to Scientific for '" <> s <> "'"
Just s' -> pure s'
instance BeamSqlBackend Sqlite
instance BeamMigrateOnlySqlBackend Sqlite
type instance BeamSqlBackendSyntax Sqlite = SqliteCommandSyntax
data SqliteHasDefault = SqliteHasDefault
instance FieldReturnType 'True 'False Sqlite resTy a =>
FieldReturnType 'False 'False Sqlite resTy (SqliteHasDefault -> a) where
field' _ _ nm ty _ collation constraints SqliteHasDefault =
field' (Proxy @'True) (Proxy @'False) nm ty Nothing collation constraints
instance BeamSqlBackendHasSerial Sqlite where
genericSerial nm = Beam.field nm (DataType sqliteSerialType) SqliteHasDefault
| ' MonadBeam ' instance inside which SQLite queries are run . See the
newtype SqliteM a
= SqliteM
{ runSqliteM :: ReaderT (String -> IO (), Connection) IO a
^ Run an IO action with access to a SQLite connection and a debug logging
} deriving (Monad, Functor, Applicative, MonadIO, MonadFail)
deriving newtype (MonadBase IO, MonadBaseControl IO)
newtype BeamSqliteParams = BeamSqliteParams [SQLData]
instance ToRow BeamSqliteParams where
toRow (BeamSqliteParams x) = x
newtype BeamSqliteRow a = BeamSqliteRow a
instance FromBackendRow Sqlite a => FromRow (BeamSqliteRow a) where
fromRow = BeamSqliteRow <$> runF fromBackendRow' finish step
where
FromBackendRowM fromBackendRow' = fromBackendRow :: FromBackendRowM Sqlite a
translateErrors :: Maybe Int -> SomeException -> Maybe SomeException
translateErrors col (SomeException e) =
case cast e of
Just (ConversionFailed { errSQLType = typeString
, errHaskellType = hsString
, errMessage = msg }) ->
Just (SomeException (BeamRowReadError col (ColumnTypeMismatch hsString typeString ("conversion failed: " ++ msg))))
Just (UnexpectedNull {}) ->
Just (SomeException (BeamRowReadError col ColumnUnexpectedNull))
Just (Incompatible { errSQLType = typeString
, errHaskellType = hsString
, errMessage = msg }) ->
Just (SomeException (BeamRowReadError col (ColumnTypeMismatch hsString typeString ("incompatible: " ++ msg))))
Nothing -> Nothing
finish = pure
step :: forall a'. FromBackendRowF Sqlite (RowParser a') -> RowParser a'
step (ParseOneField next) =
RP $ ReaderT $ \ro -> StateT $ \st@(col, _) ->
case runStateT (runReaderT (unRP field) ro) st of
Ok (x, st') -> runStateT (runReaderT (unRP (next x)) ro) st'
Errors errs -> Errors (mapMaybe (translateErrors (Just col)) errs)
step (Alt (FromBackendRowM a) (FromBackendRowM b) next) = do
RP $ do
let RP a' = runF a finish step
RP b' = runF b finish step
st <- get
ro <- ask
case runStateT (runReaderT a' ro) st of
Ok (ra, st') -> do
put st'
unRP (next ra)
Errors aErrs ->
case runStateT (runReaderT b' ro) st of
Ok (rb, st') -> do
put st'
unRP (next rb)
Errors bErrs ->
lift (lift (Errors (aErrs ++ bErrs)))
step (FailParseWith err) = RP (lift (lift (Errors [SomeException err])))
#define HAS_SQLITE_EQUALITY_CHECK(ty) \
instance HasSqlEqualityCheck Sqlite (ty); \
instance HasSqlQuantifiedEqualityCheck Sqlite (ty);
HAS_SQLITE_EQUALITY_CHECK(Int8)
HAS_SQLITE_EQUALITY_CHECK(Int16)
HAS_SQLITE_EQUALITY_CHECK(Int32)
HAS_SQLITE_EQUALITY_CHECK(Int64)
HAS_SQLITE_EQUALITY_CHECK(Word8)
HAS_SQLITE_EQUALITY_CHECK(Word16)
HAS_SQLITE_EQUALITY_CHECK(Word32)
HAS_SQLITE_EQUALITY_CHECK(Word64)
HAS_SQLITE_EQUALITY_CHECK(Double)
HAS_SQLITE_EQUALITY_CHECK(Float)
HAS_SQLITE_EQUALITY_CHECK(Bool)
HAS_SQLITE_EQUALITY_CHECK(String)
HAS_SQLITE_EQUALITY_CHECK(T.Text)
HAS_SQLITE_EQUALITY_CHECK(TL.Text)
HAS_SQLITE_EQUALITY_CHECK(BS.ByteString)
HAS_SQLITE_EQUALITY_CHECK(BL.ByteString)
HAS_SQLITE_EQUALITY_CHECK(UTCTime)
HAS_SQLITE_EQUALITY_CHECK(LocalTime)
HAS_SQLITE_EQUALITY_CHECK(ZonedTime)
HAS_SQLITE_EQUALITY_CHECK(Char)
HAS_SQLITE_EQUALITY_CHECK(Integer)
HAS_SQLITE_EQUALITY_CHECK(Scientific)
instance TypeError (PreferExplicitSize Int Int32) => HasSqlEqualityCheck Sqlite Int
instance TypeError (PreferExplicitSize Int Int32) => HasSqlQuantifiedEqualityCheck Sqlite Int
instance TypeError (PreferExplicitSize Word Word32) => HasSqlEqualityCheck Sqlite Word
instance TypeError (PreferExplicitSize Word Word32) => HasSqlQuantifiedEqualityCheck Sqlite Word
class HasDefaultSqlDataType Sqlite a => IsSqliteSerialIntegerType a
instance IsSqliteSerialIntegerType Int32
instance IsSqliteSerialIntegerType Int64
instance TypeError (PreferExplicitSize Int Int32) => IsSqliteSerialIntegerType Int
instance IsSqliteSerialIntegerType a => HasDefaultSqlDataType Sqlite (SqlSerial a) where
defaultSqlDataType _ _ False = sqliteSerialType
defaultSqlDataType _ _ True = intType
instance HasDefaultSqlDataType Sqlite BS.ByteString where
defaultSqlDataType _ _ _ = sqliteBlobType
instance HasDefaultSqlDataType Sqlite LocalTime where
defaultSqlDataType _ _ _ = timestampType Nothing False
sqliteUriSyntax :: c Sqlite Connection SqliteM
-> BeamURIOpeners c
sqliteUriSyntax =
mkUriOpener runBeamSqlite "sqlite:"
(\uri -> do
let sqliteName = if null (uriPath uri) then ":memory:" else uriPath uri
hdl <- open sqliteName
pure (hdl, close hdl))
runBeamSqliteDebug :: (String -> IO ()) -> Connection -> SqliteM a -> IO a
runBeamSqliteDebug debugStmt conn x = runReaderT (runSqliteM x) (debugStmt, conn)
runBeamSqlite :: Connection -> SqliteM a -> IO a
runBeamSqlite = runBeamSqliteDebug (\_ -> pure ())
instance MonadBeam Sqlite SqliteM where
runNoReturn (SqliteCommandSyntax (SqliteSyntax cmd vals)) =
SqliteM $ do
(logger, conn) <- ask
let cmdString = BL.unpack (toLazyByteString (withPlaceholders cmd))
liftIO (logger (cmdString ++ ";\n-- With values: " ++ show (D.toList vals)))
liftIO (execute conn (fromString cmdString) (D.toList vals))
runNoReturn (SqliteCommandInsert insertStmt_) =
SqliteM $ do
(logger, conn) <- ask
liftIO (runSqliteInsert logger conn insertStmt_)
runReturningMany (SqliteCommandSyntax (SqliteSyntax cmd vals)) action =
SqliteM $ do
(logger, conn) <- ask
let cmdString = BL.unpack (toLazyByteString (withPlaceholders cmd))
liftIO $ do
logger (cmdString ++ ";\n-- With values: " ++ show (D.toList vals))
withStatement conn (fromString cmdString) $ \stmt ->
do bind stmt (BeamSqliteParams (D.toList vals))
let nextRow' = liftIO (nextRow stmt) >>= \x ->
case x of
Nothing -> pure Nothing
Just (BeamSqliteRow row) -> pure row
runReaderT (runSqliteM (action nextRow')) (logger, conn)
runReturningMany SqliteCommandInsert {} _ =
fail . mconcat $
[ "runReturningMany{Sqlite}: sqlite does not support returning "
, "rows from an insert, use Database.Beam.Sqlite.insertReturning "
, "for emulation" ]
instance Beam.MonadBeamInsertReturning Sqlite SqliteM where
runInsertReturningList = runInsertReturningList
runSqliteInsert :: (String -> IO ()) -> Connection -> SqliteInsertSyntax -> IO ()
runSqliteInsert logger conn (SqliteInsertSyntax tbl fields vs onConflict)
| SqliteInsertExpressions es <- vs, any (any (== SqliteExpressionDefault)) es =
forM_ es $ \row -> do
let (fields', row') = unzip $ filter ((/= SqliteExpressionDefault) . snd) $ zip fields row
SqliteSyntax cmd vals = formatSqliteInsertOnConflict tbl fields' (SqliteInsertExpressions [ row' ]) onConflict
cmdString = BL.unpack (toLazyByteString (withPlaceholders cmd))
logger (cmdString ++ ";\n-- With values: " ++ show (D.toList vals))
execute conn (fromString cmdString) (D.toList vals)
| otherwise = do
let SqliteSyntax cmd vals = formatSqliteInsertOnConflict tbl fields vs onConflict
cmdString = BL.unpack (toLazyByteString (withPlaceholders cmd))
logger (cmdString ++ ";\n-- With values: " ++ show (D.toList vals))
execute conn (fromString cmdString) (D.toList vals)
insertReturning :: Beamable table
=> DatabaseEntity Sqlite db (TableEntity table)
-> SqlInsertValues Sqlite (table (QExpr Sqlite s))
-> SqlInsert Sqlite table
insertReturning = insert
runInsertReturningList :: (Beamable table, FromBackendRow Sqlite (table Identity))
=> SqlInsert Sqlite table
-> SqliteM [ table Identity ]
runInsertReturningList SqlInsertNoRows = pure []
runInsertReturningList (SqlInsert tblSettings insertStmt_@(SqliteInsertSyntax nm _ _ _)) =
do (logger, conn) <- SqliteM ask
SqliteM . liftIO $ do
#ifdef UNIX
processId <- fromString . show <$> getProcessID
#elif defined(WINDOWS)
processId <- fromString . show <$> getCurrentProcessId
#else
#error Need either POSIX or Win32 API for MonadBeamInsertReturning
#endif
let tableNameTxt = T.decodeUtf8 (BL.toStrict (sqliteRenderSyntaxScript (fromSqliteTableName nm)))
startSavepoint =
execute_ conn (Query ("SAVEPOINT insert_savepoint_" <> processId))
rollbackToSavepoint =
execute_ conn (Query ("ROLLBACK TRANSACTION TO SAVEPOINT insert_savepoint_" <> processId))
releaseSavepoint =
execute_ conn (Query ("RELEASE SAVEPOINT insert_savepoint_" <> processId))
createInsertedValuesTable =
execute_ conn (Query ("CREATE TEMPORARY TABLE inserted_values_" <> processId <> " AS SELECT * FROM " <> tableNameTxt <> " LIMIT 0"))
dropInsertedValuesTable =
execute_ conn (Query ("DROP TABLE inserted_values_" <> processId))
createInsertTrigger =
execute_ conn (Query ("CREATE TEMPORARY TRIGGER insert_trigger_" <> processId <> " AFTER INSERT ON " <> tableNameTxt <> " BEGIN " <>
"INSERT INTO inserted_values_" <> processId <> " SELECT * FROM " <> tableNameTxt <> " WHERE ROWID=last_insert_rowid(); END" ))
dropInsertTrigger =
execute_ conn (Query ("DROP TRIGGER insert_trigger_" <> processId))
mask $ \restore -> do
startSavepoint
flip onException rollbackToSavepoint . restore $ do
x <- bracket_ createInsertedValuesTable dropInsertedValuesTable $
bracket_ createInsertTrigger dropInsertTrigger $ do
runSqliteInsert logger conn insertStmt_
let columns = TL.toStrict $ TL.decodeUtf8 $
sqliteRenderSyntaxScript $ commas $
allBeamValues (\(Columnar' projField) -> quotedIdentifier (_fieldName projField)) $
tblSettings
fmap (\(BeamSqliteRow r) -> r) <$> query_ conn (Query ("SELECT " <> columns <> " FROM inserted_values_" <> processId))
releaseSavepoint
return x
instance Beam.BeamHasInsertOnConflict Sqlite where
newtype SqlConflictTarget Sqlite table = SqliteConflictTarget
{ unSqliteConflictTarget :: table (QExpr Sqlite QInternal) -> SqliteSyntax }
newtype SqlConflictAction Sqlite table = SqliteConflictAction
{ unSqliteConflictAction :: forall s. table (QField s) -> SqliteSyntax }
insertOnConflict
:: forall db table s. Beamable table
=> DatabaseEntity Sqlite db (TableEntity table)
-> SqlInsertValues Sqlite (table (QExpr Sqlite s))
-> Beam.SqlConflictTarget Sqlite table
-> Beam.SqlConflictAction Sqlite table
-> SqlInsert Sqlite table
insertOnConflict (DatabaseEntity dt) values target action = case values of
SqlInsertValuesEmpty -> SqlInsertNoRows
SqlInsertValues vs -> SqlInsert (dbTableSettings dt) $
let getFieldName
:: forall a
. Columnar' (TableField table) a
-> Columnar' (QField QInternal) a
getFieldName (Columnar' fd) =
Columnar' $ QField False (dbTableCurrentName dt) $ _fieldName fd
tableFields = changeBeamRep getFieldName $ dbTableSettings dt
tellFieldName _ _ f = tell [f] >> pure f
fieldNames = execWriter $
project' (Proxy @AnyType) (Proxy @((), T.Text)) tellFieldName tableFields
currentField
:: forall a
. Columnar' (QField QInternal) a
-> Columnar' (QExpr Sqlite QInternal) a
currentField (Columnar' f) = Columnar' $ current_ f
tableCurrent = changeBeamRep currentField tableFields
in SqliteInsertSyntax (tableNameFromEntity dt) fieldNames vs $ Just $
SqliteOnConflictSyntax $ mconcat
[ emit "ON CONFLICT "
, unSqliteConflictTarget target tableCurrent
, emit " DO "
, unSqliteConflictAction action tableFields
]
anyConflict = SqliteConflictTarget $ const mempty
conflictingFields makeProjection = SqliteConflictTarget $ \table ->
parens $ commas $ map fromSqliteExpression $
project (Proxy @Sqlite) (makeProjection table) "t"
conflictingFieldsWhere makeProjection makeWhere =
SqliteConflictTarget $ \table -> mconcat
[ unSqliteConflictTarget (Beam.conflictingFields makeProjection) table
, emit " WHERE "
, let QExpr mkE = makeWhere table
in fromSqliteExpression $ mkE "t"
]
onConflictDoNothing = SqliteConflictAction $ const $ emit "NOTHING"
onConflictUpdateSet makeAssignments = SqliteConflictAction $ \table -> mconcat
[ emit "UPDATE SET "
, let QAssignment assignments = makeAssignments table $ excluded table
emitAssignment (fieldName, expr) = mconcat
[ fromSqliteFieldNameSyntax fieldName
, emit " = "
, fromSqliteExpression expr
]
in commas $ map emitAssignment assignments
]
onConflictUpdateSetWhere makeAssignments makeWhere =
SqliteConflictAction $ \table -> mconcat
[ unSqliteConflictAction (Beam.onConflictUpdateSet makeAssignments) table
, emit " WHERE "
, let QExpr mkE = makeWhere table $ excluded table
in fromSqliteExpression $ mkE "t"
]
excluded
:: forall table s
. Beamable table
=> table (QField s)
-> table (QExpr Sqlite s)
excluded table = changeBeamRep excludedField table
where excludedField (Columnar' (QField _ _ name)) =
Columnar' $ QExpr $ const $ fieldE $ qualifiedField "excluded" name
|
c38e40173fcba6bf0d013a408ae37409f8e064aa87fcec52e67ecbf0751e3a1c | jumarko/web-development-with-clojure | handler.clj | (ns reporting-examples.test.handler
(:require [clojure.test :refer :all]
[ring.mock.request :refer :all]
[reporting-examples.handler :refer :all]))
(deftest test-app
(testing "main route"
(let [response ((app) (request :get "/"))]
(is (= 200 (:status response)))))
(testing "not-found route"
(let [response ((app) (request :get "/invalid"))]
(is (= 404 (:status response))))))
| null | https://raw.githubusercontent.com/jumarko/web-development-with-clojure/dfff6e40c76b64e9fcd440d80c7aa29809601b6b/examples/reporting-examples/test/clj/reporting_examples/test/handler.clj | clojure | (ns reporting-examples.test.handler
(:require [clojure.test :refer :all]
[ring.mock.request :refer :all]
[reporting-examples.handler :refer :all]))
(deftest test-app
(testing "main route"
(let [response ((app) (request :get "/"))]
(is (= 200 (:status response)))))
(testing "not-found route"
(let [response ((app) (request :get "/invalid"))]
(is (= 404 (:status response))))))
| |
6d13c10f3613b213806b59ff723a2d93bb5530a08845ee6c8a4c2f5b9acc69b9 | VMatthijs/CHAD | Simplify.hs | # LANGUAGE DataKinds #
# LANGUAGE EmptyCase #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
# LANGUAGE ImplicitParams #
# LANGUAGE LambdaCase #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE TypeOperators #
# LANGUAGE ViewPatterns #
-- | Simplify terms in the concrete language to aid legibility.
--
-- This should only do simplifications that any basic compiler
-- would also perform.
--
-- The simplifier in this module is /parametrised/: all the individual
-- simplifications can be turned on or off by setting the corresponding flag in
-- the 'Settings' object passed to 'simplifyCTerm'.
module Concrete.Simplify (
simplifyCTerm,
Settings(..), allSettings,
) where
import Data.GADT.Compare (geq)
import Data.Type.Equality ((:~:)(Refl))
import Concrete
import Count
import Env
import Operation
import Types
data Settings = Settings
{ simpLamAppLet :: Bool -- ^ @(\x -> e) a@ ~> @let x = a in e@
, simpLetRotate :: Bool -- ^ @let x = (let y = a in b) in e@ ~> @let y = a in let x = b in e@
^ @let x = ( a , b ) in @e ~ > @let x1 = a in let x2 = b in
, simpLetInline :: Bool -- ^ @let x = a in e@ ~> @e[a/x]@ (if @a@ is cheap or used at most once in e)
^ @fst ( a , b)@ ~ > @a@ ( and similarly for @snd@ )
^ @(fst a , snd a)@ ~ > @a@
^ @fst ( let x = a in e)@ ~ > @let x = a in fst e@ ( and similarly for @snd@ )
, simpPlusZero :: Bool -- ^ @plus zero a@ ~> @a@ (also symmetrically)
, simpPlusPair :: Bool -- ^ @plus (a, b) (c, d)@ ~> @(plus a c, plus b d)@
, simpPlusLet :: Bool -- ^ @plus (let x = e in a) b@ ~> @let x = e in plus a b@ (also symmetrically)
, simpAlgebra :: Bool -- ^ @0 * x = 0@, etc.
^ @let f = \x - > ( a , b ) in e@ ~ > @let f1 = \x - > a ; f2 = \x - > b in e[(\x->(f1 x , f2 x))/f]@
^ @map ( \x - > ( b , c ) ) a@ ~ > @let a ' = a in ( map ( \x - > b ) a ' , map ( \x - > c ) a')@
^ @map ( \x - > zero ) a@ ~ > @zero@
, simpSumZip :: Bool -- ^ @sum (zip a b)@ ~> @(sum a, sum b)@
^ @sum zero@ ~ > @zero@
^ @sum ( map ( \x - > [ x ] ) e)@ ~ > @e@
^ @case inl e of inl a - > e1 ; inr b - > e2@ ~ > @let a = e in e1@ ( and similarly for @inr@ )
}
deriving (Show, Eq)
instance Semigroup Settings where
Settings a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 a14 a15 a16 a17 a18 <>
Settings b1 b2 b3 b4 b5 b6 b7 b8 b9 b10 b11 b12 b13 b14 b15 b16 b17 b18 =
Settings (a1 || b1) (a2 || b2) (a3 || b3) (a4 || b4) (a5 || b5)
(a6 || b6) (a7 || b7) (a8 || b8) (a9 || b9) (a10 || b10)
(a11 || b11) (a12 || b12) (a13 || b13) (a14 || b14)
(a15 || b15) (a16 || b16) (a17 || b17) (a18 || b18)
instance Monoid Settings where
mempty = Settings False False False False False False False False
False False False False False False False False
False False
allSettings :: Settings
allSettings = Settings
{ simpLamAppLet = True
, simpLetRotate = True
, simpLetPairSplit = True
, simpLetInline = True
, simpPairProj = True
, simpPairEta = True
, simpLetProj = True
, simpPlusZero = True
, simpPlusPair = True
, simpPlusLet = True
, simpAlgebra = True
, simpLetLamPairSplit = True
, simpMapPairSplit = True
, simpMapZero = True
, simpSumZip = True
, simpSumZero = True
, simpSumSingleton = True
, simpCase = True
}
simplifyCTerm :: Settings -> CTerm env a -> CTerm env a
simplifyCTerm settings = let ?settings = settings in simplifyCTerm'
-- | Simplify a 'CTerm' using some basic rewriting optimisations.
--
-- Note: inlining of variable definitions is only performed if the variable in
-- question is only used once. Let-splitting is performed.
simplifyCTerm' :: (?settings :: Settings) => CTerm env a -> CTerm env a
simplifyCTerm' (CVar i) = CVar i
simplifyCTerm' (CLambda e) = CLambda (simplifyCTerm' e)
simplifyCTerm' (CLet rhs e) = simplifyLet (simplifyCTerm' rhs) (simplifyCTerm' e)
simplifyCTerm' (CApp f a) = simplifyApp (simplifyCTerm' f) (simplifyCTerm' a)
simplifyCTerm' CUnit = CUnit
simplifyCTerm' (CPair a b) = simplifyPair (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (CFst p) = simplifyFst (simplifyCTerm' p)
simplifyCTerm' (CSnd p) = simplifySnd (simplifyCTerm' p)
simplifyCTerm' (CInl p) = CInl (simplifyCTerm' p)
simplifyCTerm' (CInr p) = CInr (simplifyCTerm' p)
simplifyCTerm' (CCase e a b) = simplifyCase (simplifyCTerm' e) (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (COp op a) = simplifyCOp op (simplifyCTerm' a)
simplifyCTerm' (CMap a b) = CMap (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (CZipWith a b c) = CZipWith (simplifyCTerm' a) (simplifyCTerm' b) (simplifyCTerm' c)
simplifyCTerm' (CReplicate x) = CReplicate (simplifyCTerm' x)
simplifyCTerm' (CSum a) = CSum (simplifyCTerm' a)
simplifyCTerm' (CToList a) = CToList (simplifyCTerm' a)
simplifyCTerm' CLNil = CLNil
simplifyCTerm' (CLCons a b) = CLCons (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (CLMap a b) = simplifyCLMap (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (CLFoldr a b c) = CLFoldr (simplifyCTerm' a) (simplifyCTerm' b) (simplifyCTerm' c)
simplifyCTerm' (CLSum a) = simplifyCLSum (simplifyCTerm' a)
simplifyCTerm' (CLZip b c) = CLZip (simplifyCTerm' b) (simplifyCTerm' c)
simplifyCTerm' (CMkLEither a) = CMkLEither (simplifyCTerm' a)
simplifyCTerm' (CLCase e a b) = CLCase (simplifyCTerm' e) (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' CZero = CZero
simplifyCTerm' (CPlus a b) = simplifyPlus (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' CError = CError
-- | Simplify the App form. This converts immediate lambda application into
-- let-binding.
simplifyApp :: (?settings :: Settings) => CTerm env (a -> b) -> CTerm env a -> CTerm env b
simplifyApp (CLambda e) a | simpLamAppLet ?settings = simplifyLet a e
simplifyApp f a = CApp f a
simplifyPair :: (?settings :: Settings) => CTerm env a -> CTerm env b -> CTerm env (a, b)
simplifyPair (CFst (CVar i)) (CSnd (CVar j))
| simpPairEta ?settings
, Just Refl <- geq i j
= CVar i
simplifyPair a b = CPair a b
data SplitLambda env t where
SLam :: CTerm env a
-> CTerm env b
-> (forall env'. CTerm (b ': a ': env') t)
-> SplitLambda env t
splitLambda :: (?settings :: Settings) => CTerm env t -> Maybe (SplitLambda env t)
splitLambda (CLambda e) =
fmap (\(SLam f1 f2 re) -> SLam (CLambda f1) (CLambda f2)
(CLambda $
substCt wId (CApp (CVar (S (S Z))) (CVar Z)) $
substCt wId (CApp (CVar (S (S Z))) (CVar (S Z))) $
sinkCt (wSink (wSink (wSink (wSucc (wSucc wId)))))
re))
(splitLambda e)
splitLambda (CPair a b) = Just (SLam a b (CPair (CVar (S Z)) (CVar Z)))
splitLambda _ = Nothing
-- | Simplify the Let form.
--
-- We perform let-of-pair splitting, also when that pair is hidden behind a lambda.
simplifyLet :: (?settings :: Settings) => CTerm env a -> CTerm (a ': env) b -> CTerm env b
simplifyLet (CLet rhs e) body | simpLetRotate ?settings =
simplifyLet rhs (simplifyLet e (sinkCt (wSink (wSucc wId)) body))
simplifyLet (CPair a1 a2) e | simpLetPairSplit ?settings =
simplifyLet a1 $
simplifyLet (sinkCt (wSucc wId) a2) $
simplifyCTerm' $ substCt (wSucc (wSucc wId)) (CPair (CVar (S Z)) (CVar Z)) e
simplifyLet a e
| simpLetLamPairSplit ?settings
, Just (SLam a1 a2 re) <- splitLambda a
, let re' = substCt wId (CVar (S Z)) . substCt wId (CVar (S Z)) $ re
= simplifyCTerm' $
CLet a1 $
CLet (sinkCt (wSucc wId) a2) $
substCt wId re' (sinkCt (wSink (wSucc (wSucc wId))) e)
| simpLetInline ?settings
Occurrence counting for variable inlining is tricky . See the documentation of ' OccCount ' .
let OccCount synUses runUses = usesOfCt Z e
, duplicableSyntactic a || synUses <= 1
, duplicableRuntime a || runUses <= 1
= simplifyCTerm' $ substCt wId a e
| otherwise
= CLet a e
duplicableRuntime :: CTerm env a -> Bool
duplicableRuntime = \case
CLambda{} -> True
t -> duplicableSyntactic t
duplicableSyntactic :: CTerm env a -> Bool
duplicableSyntactic = \case
CVar{} -> True
CUnit{} -> True
CPair a b -> duplicableSyntactic a && duplicableSyntactic b
CFst e -> duplicableSyntactic e
CSnd e -> duplicableSyntactic e
CPlus a b -> duplicableSyntactic a && duplicableSyntactic b
CZero -> True
_ -> False
| Simplify the form
simplifyFst :: (?settings :: Settings) => CTerm env (a, b) -> CTerm env a
simplifyFst (CPair t _) | simpPairProj ?settings = t
simplifyFst (CLet rhs e) | simpLetProj ?settings = simplifyLet rhs (simplifyFst e)
simplifyFst p = CFst p
-- | Simplify the Snd form
simplifySnd :: (?settings :: Settings) => CTerm env (a, b) -> CTerm env b
simplifySnd (CPair _ s) | simpPairProj ?settings = s
simplifySnd (CLet rhs e) | simpLetProj ?settings = simplifyLet rhs (simplifySnd e)
simplifySnd p = CSnd p
simplifyCase :: (?settings :: Settings) => CTerm env (Either a b) -> CTerm (a ': env) c -> CTerm (b ': env) c -> CTerm env c
simplifyCase (CInl e) a _ | simpCase ?settings = simplifyLet e a
simplifyCase (CInr e) _ b | simpCase ?settings = simplifyLet e b
simplifyCase e a b = CCase e a b
simplifyCOp :: (?settings :: Settings) => Operation a b -> CTerm env a -> CTerm env b
simplifyCOp op arg | simpAlgebra ?settings = case (op, arg) of
(Constant x, _) -> COp (Constant x) CUnit
(EAdd, CPair (CReplicate t) e) | zeroish t -> e
(EAdd, CPair e (CReplicate t)) | zeroish t -> e
(EProd, CPair (CReplicate (COp (Constant 1.0) _)) e) -> e
(EProd, CPair e (CReplicate (COp (Constant 1.0) _))) -> e
(EScalAdd, CPair a b)
| zeroish a -> b
| zeroish b -> a
(EScalSubt, CPair e t) | zeroish t -> e
(EScalProd, CPair a b)
| zeroish a || zeroish b -> CZero
(EScalProd, CPair (COp (Constant 1.0) _) e) -> e
(EScalProd, CPair e (COp (Constant 1.0) _)) -> e
_ -> COp op arg
where
zeroish :: CTerm env Scal -> Bool
zeroish (COp (Constant 0.0) _) = True
zeroish CZero = True
zeroish _ = False
simplifyCOp op arg = COp op arg
simplifyCLMap :: (?settings :: Settings) => CTerm env (a -> b) -> CTerm env [a] -> CTerm env [b]
simplifyCLMap (CLambda (CPair a b)) l | simpMapPairSplit ?settings =
simplifyCTerm' $
CLet l $
CLZip (CLMap (CLambda (sinkCt (wSink (wSucc wId)) a)) (CVar Z))
(CLMap (CLambda (sinkCt (wSink (wSucc wId)) b)) (CVar Z))
simplifyCLMap (CLambda CZero) _ | simpMapZero ?settings = CZero
simplifyCLMap f l = CLMap f l
simplifyCLSum :: (?settings :: Settings, LT a) => CTerm env [a] -> CTerm env a
simplifyCLSum (CLZip a b) | simpSumZip ?settings =
simplifyCTerm' $ CPair (simplifyCLSum a) (simplifyCLSum b)
simplifyCLSum CZero | simpSumZero ?settings = CZero
simplifyCLSum (CLMap (CLambda (CLCons (CVar Z) CLNil)) e)
| simpSumSingleton ?settings = e
simplifyCLSum l = CLSum l
-- | Simplify the Plus form
simplifyPlus :: (LT a, ?settings :: Settings) => CTerm env a -> CTerm env a -> CTerm env a
simplifyPlus a CZero | simpPlusZero ?settings = a
simplifyPlus CZero b | simpPlusZero ?settings = b
simplifyPlus (CPair a b) (CPair a' b') | simpPlusPair ?settings =
simplifyCTerm' (CPair (CPlus a a') (CPlus b b'))
simplifyPlus (CLet rhs a) b | simpPlusLet ?settings = simplifyLet rhs (simplifyPlus a (sinkCt (wSucc wId) b))
simplifyPlus a (CLet rhs b) | simpPlusLet ?settings = simplifyLet rhs (simplifyPlus (sinkCt (wSucc wId) a) b)
simplifyPlus a b = CPlus a b
| null | https://raw.githubusercontent.com/VMatthijs/CHAD/755fc47e1f8d1c3d91455f123338f44a353fc265/src/Concrete/Simplify.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes #
| Simplify terms in the concrete language to aid legibility.
This should only do simplifications that any basic compiler
would also perform.
The simplifier in this module is /parametrised/: all the individual
simplifications can be turned on or off by setting the corresponding flag in
the 'Settings' object passed to 'simplifyCTerm'.
^ @(\x -> e) a@ ~> @let x = a in e@
^ @let x = (let y = a in b) in e@ ~> @let y = a in let x = b in e@
^ @let x = a in e@ ~> @e[a/x]@ (if @a@ is cheap or used at most once in e)
^ @plus zero a@ ~> @a@ (also symmetrically)
^ @plus (a, b) (c, d)@ ~> @(plus a c, plus b d)@
^ @plus (let x = e in a) b@ ~> @let x = e in plus a b@ (also symmetrically)
^ @0 * x = 0@, etc.
^ @sum (zip a b)@ ~> @(sum a, sum b)@
| Simplify a 'CTerm' using some basic rewriting optimisations.
Note: inlining of variable definitions is only performed if the variable in
question is only used once. Let-splitting is performed.
| Simplify the App form. This converts immediate lambda application into
let-binding.
| Simplify the Let form.
We perform let-of-pair splitting, also when that pair is hidden behind a lambda.
| Simplify the Snd form
| Simplify the Plus form | # LANGUAGE DataKinds #
# LANGUAGE EmptyCase #
# LANGUAGE FlexibleContexts #
# LANGUAGE ImplicitParams #
# LANGUAGE LambdaCase #
# LANGUAGE TypeOperators #
# LANGUAGE ViewPatterns #
module Concrete.Simplify (
simplifyCTerm,
Settings(..), allSettings,
) where
import Data.GADT.Compare (geq)
import Data.Type.Equality ((:~:)(Refl))
import Concrete
import Count
import Env
import Operation
import Types
data Settings = Settings
^ @let x = ( a , b ) in @e ~ > @let x1 = a in let x2 = b in
^ @fst ( a , b)@ ~ > @a@ ( and similarly for @snd@ )
^ @(fst a , snd a)@ ~ > @a@
^ @fst ( let x = a in e)@ ~ > @let x = a in fst e@ ( and similarly for @snd@ )
^ @let f = \x - > ( a , b ) in e@ ~ > @let f1 = \x - > a ; f2 = \x - > b in e[(\x->(f1 x , f2 x))/f]@
^ @map ( \x - > ( b , c ) ) a@ ~ > @let a ' = a in ( map ( \x - > b ) a ' , map ( \x - > c ) a')@
^ @map ( \x - > zero ) a@ ~ > @zero@
^ @sum zero@ ~ > @zero@
^ @sum ( map ( \x - > [ x ] ) e)@ ~ > @e@
^ @case inl e of inl a - > e1 ; inr b - > e2@ ~ > @let a = e in e1@ ( and similarly for @inr@ )
}
deriving (Show, Eq)
instance Semigroup Settings where
Settings a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 a14 a15 a16 a17 a18 <>
Settings b1 b2 b3 b4 b5 b6 b7 b8 b9 b10 b11 b12 b13 b14 b15 b16 b17 b18 =
Settings (a1 || b1) (a2 || b2) (a3 || b3) (a4 || b4) (a5 || b5)
(a6 || b6) (a7 || b7) (a8 || b8) (a9 || b9) (a10 || b10)
(a11 || b11) (a12 || b12) (a13 || b13) (a14 || b14)
(a15 || b15) (a16 || b16) (a17 || b17) (a18 || b18)
instance Monoid Settings where
mempty = Settings False False False False False False False False
False False False False False False False False
False False
allSettings :: Settings
allSettings = Settings
{ simpLamAppLet = True
, simpLetRotate = True
, simpLetPairSplit = True
, simpLetInline = True
, simpPairProj = True
, simpPairEta = True
, simpLetProj = True
, simpPlusZero = True
, simpPlusPair = True
, simpPlusLet = True
, simpAlgebra = True
, simpLetLamPairSplit = True
, simpMapPairSplit = True
, simpMapZero = True
, simpSumZip = True
, simpSumZero = True
, simpSumSingleton = True
, simpCase = True
}
simplifyCTerm :: Settings -> CTerm env a -> CTerm env a
simplifyCTerm settings = let ?settings = settings in simplifyCTerm'
simplifyCTerm' :: (?settings :: Settings) => CTerm env a -> CTerm env a
simplifyCTerm' (CVar i) = CVar i
simplifyCTerm' (CLambda e) = CLambda (simplifyCTerm' e)
simplifyCTerm' (CLet rhs e) = simplifyLet (simplifyCTerm' rhs) (simplifyCTerm' e)
simplifyCTerm' (CApp f a) = simplifyApp (simplifyCTerm' f) (simplifyCTerm' a)
simplifyCTerm' CUnit = CUnit
simplifyCTerm' (CPair a b) = simplifyPair (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (CFst p) = simplifyFst (simplifyCTerm' p)
simplifyCTerm' (CSnd p) = simplifySnd (simplifyCTerm' p)
simplifyCTerm' (CInl p) = CInl (simplifyCTerm' p)
simplifyCTerm' (CInr p) = CInr (simplifyCTerm' p)
simplifyCTerm' (CCase e a b) = simplifyCase (simplifyCTerm' e) (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (COp op a) = simplifyCOp op (simplifyCTerm' a)
simplifyCTerm' (CMap a b) = CMap (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (CZipWith a b c) = CZipWith (simplifyCTerm' a) (simplifyCTerm' b) (simplifyCTerm' c)
simplifyCTerm' (CReplicate x) = CReplicate (simplifyCTerm' x)
simplifyCTerm' (CSum a) = CSum (simplifyCTerm' a)
simplifyCTerm' (CToList a) = CToList (simplifyCTerm' a)
simplifyCTerm' CLNil = CLNil
simplifyCTerm' (CLCons a b) = CLCons (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (CLMap a b) = simplifyCLMap (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' (CLFoldr a b c) = CLFoldr (simplifyCTerm' a) (simplifyCTerm' b) (simplifyCTerm' c)
simplifyCTerm' (CLSum a) = simplifyCLSum (simplifyCTerm' a)
simplifyCTerm' (CLZip b c) = CLZip (simplifyCTerm' b) (simplifyCTerm' c)
simplifyCTerm' (CMkLEither a) = CMkLEither (simplifyCTerm' a)
simplifyCTerm' (CLCase e a b) = CLCase (simplifyCTerm' e) (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' CZero = CZero
simplifyCTerm' (CPlus a b) = simplifyPlus (simplifyCTerm' a) (simplifyCTerm' b)
simplifyCTerm' CError = CError
simplifyApp :: (?settings :: Settings) => CTerm env (a -> b) -> CTerm env a -> CTerm env b
simplifyApp (CLambda e) a | simpLamAppLet ?settings = simplifyLet a e
simplifyApp f a = CApp f a
simplifyPair :: (?settings :: Settings) => CTerm env a -> CTerm env b -> CTerm env (a, b)
simplifyPair (CFst (CVar i)) (CSnd (CVar j))
| simpPairEta ?settings
, Just Refl <- geq i j
= CVar i
simplifyPair a b = CPair a b
data SplitLambda env t where
SLam :: CTerm env a
-> CTerm env b
-> (forall env'. CTerm (b ': a ': env') t)
-> SplitLambda env t
splitLambda :: (?settings :: Settings) => CTerm env t -> Maybe (SplitLambda env t)
splitLambda (CLambda e) =
fmap (\(SLam f1 f2 re) -> SLam (CLambda f1) (CLambda f2)
(CLambda $
substCt wId (CApp (CVar (S (S Z))) (CVar Z)) $
substCt wId (CApp (CVar (S (S Z))) (CVar (S Z))) $
sinkCt (wSink (wSink (wSink (wSucc (wSucc wId)))))
re))
(splitLambda e)
splitLambda (CPair a b) = Just (SLam a b (CPair (CVar (S Z)) (CVar Z)))
splitLambda _ = Nothing
simplifyLet :: (?settings :: Settings) => CTerm env a -> CTerm (a ': env) b -> CTerm env b
simplifyLet (CLet rhs e) body | simpLetRotate ?settings =
simplifyLet rhs (simplifyLet e (sinkCt (wSink (wSucc wId)) body))
simplifyLet (CPair a1 a2) e | simpLetPairSplit ?settings =
simplifyLet a1 $
simplifyLet (sinkCt (wSucc wId) a2) $
simplifyCTerm' $ substCt (wSucc (wSucc wId)) (CPair (CVar (S Z)) (CVar Z)) e
simplifyLet a e
| simpLetLamPairSplit ?settings
, Just (SLam a1 a2 re) <- splitLambda a
, let re' = substCt wId (CVar (S Z)) . substCt wId (CVar (S Z)) $ re
= simplifyCTerm' $
CLet a1 $
CLet (sinkCt (wSucc wId) a2) $
substCt wId re' (sinkCt (wSink (wSucc (wSucc wId))) e)
| simpLetInline ?settings
Occurrence counting for variable inlining is tricky . See the documentation of ' OccCount ' .
let OccCount synUses runUses = usesOfCt Z e
, duplicableSyntactic a || synUses <= 1
, duplicableRuntime a || runUses <= 1
= simplifyCTerm' $ substCt wId a e
| otherwise
= CLet a e
duplicableRuntime :: CTerm env a -> Bool
duplicableRuntime = \case
CLambda{} -> True
t -> duplicableSyntactic t
duplicableSyntactic :: CTerm env a -> Bool
duplicableSyntactic = \case
CVar{} -> True
CUnit{} -> True
CPair a b -> duplicableSyntactic a && duplicableSyntactic b
CFst e -> duplicableSyntactic e
CSnd e -> duplicableSyntactic e
CPlus a b -> duplicableSyntactic a && duplicableSyntactic b
CZero -> True
_ -> False
| Simplify the form
simplifyFst :: (?settings :: Settings) => CTerm env (a, b) -> CTerm env a
simplifyFst (CPair t _) | simpPairProj ?settings = t
simplifyFst (CLet rhs e) | simpLetProj ?settings = simplifyLet rhs (simplifyFst e)
simplifyFst p = CFst p
simplifySnd :: (?settings :: Settings) => CTerm env (a, b) -> CTerm env b
simplifySnd (CPair _ s) | simpPairProj ?settings = s
simplifySnd (CLet rhs e) | simpLetProj ?settings = simplifyLet rhs (simplifySnd e)
simplifySnd p = CSnd p
simplifyCase :: (?settings :: Settings) => CTerm env (Either a b) -> CTerm (a ': env) c -> CTerm (b ': env) c -> CTerm env c
simplifyCase (CInl e) a _ | simpCase ?settings = simplifyLet e a
simplifyCase (CInr e) _ b | simpCase ?settings = simplifyLet e b
simplifyCase e a b = CCase e a b
simplifyCOp :: (?settings :: Settings) => Operation a b -> CTerm env a -> CTerm env b
simplifyCOp op arg | simpAlgebra ?settings = case (op, arg) of
(Constant x, _) -> COp (Constant x) CUnit
(EAdd, CPair (CReplicate t) e) | zeroish t -> e
(EAdd, CPair e (CReplicate t)) | zeroish t -> e
(EProd, CPair (CReplicate (COp (Constant 1.0) _)) e) -> e
(EProd, CPair e (CReplicate (COp (Constant 1.0) _))) -> e
(EScalAdd, CPair a b)
| zeroish a -> b
| zeroish b -> a
(EScalSubt, CPair e t) | zeroish t -> e
(EScalProd, CPair a b)
| zeroish a || zeroish b -> CZero
(EScalProd, CPair (COp (Constant 1.0) _) e) -> e
(EScalProd, CPair e (COp (Constant 1.0) _)) -> e
_ -> COp op arg
where
zeroish :: CTerm env Scal -> Bool
zeroish (COp (Constant 0.0) _) = True
zeroish CZero = True
zeroish _ = False
simplifyCOp op arg = COp op arg
simplifyCLMap :: (?settings :: Settings) => CTerm env (a -> b) -> CTerm env [a] -> CTerm env [b]
simplifyCLMap (CLambda (CPair a b)) l | simpMapPairSplit ?settings =
simplifyCTerm' $
CLet l $
CLZip (CLMap (CLambda (sinkCt (wSink (wSucc wId)) a)) (CVar Z))
(CLMap (CLambda (sinkCt (wSink (wSucc wId)) b)) (CVar Z))
simplifyCLMap (CLambda CZero) _ | simpMapZero ?settings = CZero
simplifyCLMap f l = CLMap f l
simplifyCLSum :: (?settings :: Settings, LT a) => CTerm env [a] -> CTerm env a
simplifyCLSum (CLZip a b) | simpSumZip ?settings =
simplifyCTerm' $ CPair (simplifyCLSum a) (simplifyCLSum b)
simplifyCLSum CZero | simpSumZero ?settings = CZero
simplifyCLSum (CLMap (CLambda (CLCons (CVar Z) CLNil)) e)
| simpSumSingleton ?settings = e
simplifyCLSum l = CLSum l
simplifyPlus :: (LT a, ?settings :: Settings) => CTerm env a -> CTerm env a -> CTerm env a
simplifyPlus a CZero | simpPlusZero ?settings = a
simplifyPlus CZero b | simpPlusZero ?settings = b
simplifyPlus (CPair a b) (CPair a' b') | simpPlusPair ?settings =
simplifyCTerm' (CPair (CPlus a a') (CPlus b b'))
simplifyPlus (CLet rhs a) b | simpPlusLet ?settings = simplifyLet rhs (simplifyPlus a (sinkCt (wSucc wId) b))
simplifyPlus a (CLet rhs b) | simpPlusLet ?settings = simplifyLet rhs (simplifyPlus (sinkCt (wSucc wId) a) b)
simplifyPlus a b = CPlus a b
|
6e690e126d92f74834845e557e53f529b1c1c23f5126dbc17f4a8b246bc30c24 | brendanhay/gogol | Delete.hs | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
-- |
Module : . Logging . Organizations . Exclusions . Delete
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
-- Deletes an exclusion in the _Default sink.
--
/See:/ < / Cloud Logging API Reference > for
module Gogol.Logging.Organizations.Exclusions.Delete
( -- * Resource
LoggingOrganizationsExclusionsDeleteResource,
-- ** Constructing a Request
LoggingOrganizationsExclusionsDelete (..),
newLoggingOrganizationsExclusionsDelete,
)
where
import Gogol.Logging.Types
import qualified Gogol.Prelude as Core
| A resource alias for @logging.organizations.exclusions.delete@ method which the
' ' request conforms to .
type LoggingOrganizationsExclusionsDeleteResource =
"v2"
Core.:> Core.Capture "name" Core.Text
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.Delete '[Core.JSON] Empty
-- | Deletes an exclusion in the _Default sink.
--
-- /See:/ 'newLoggingOrganizationsExclusionsDelete' smart constructor.
data LoggingOrganizationsExclusionsDelete = LoggingOrganizationsExclusionsDelete
{ -- | V1 error format.
xgafv :: (Core.Maybe Xgafv),
-- | OAuth access token.
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
-- | Required. The resource name of an existing exclusion to delete: \"projects\/[PROJECT/ID]\/exclusions\/[EXCLUSION/ID]\" \"organizations\/[ORGANIZATION/ID]\/exclusions\/[EXCLUSION/ID]\" \"billingAccounts\/[BILLING/ACCOUNT/ID]\/exclusions\/[EXCLUSION/ID]\" \"folders\/[FOLDER/ID]\/exclusions\/[EXCLUSION_ID]\" For example:\"projects\/my-project\/exclusions\/my-exclusion\"
name :: Core.Text,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
| Creates a value of ' ' with the minimum fields required to make a request .
newLoggingOrganizationsExclusionsDelete ::
-- | Required. The resource name of an existing exclusion to delete: \"projects\/[PROJECT/ID]\/exclusions\/[EXCLUSION/ID]\" \"organizations\/[ORGANIZATION/ID]\/exclusions\/[EXCLUSION/ID]\" \"billingAccounts\/[BILLING/ACCOUNT/ID]\/exclusions\/[EXCLUSION/ID]\" \"folders\/[FOLDER/ID]\/exclusions\/[EXCLUSION_ID]\" For example:\"projects\/my-project\/exclusions\/my-exclusion\" See 'name'.
Core.Text ->
LoggingOrganizationsExclusionsDelete
newLoggingOrganizationsExclusionsDelete name =
LoggingOrganizationsExclusionsDelete
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
name = name,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance
Core.GoogleRequest
LoggingOrganizationsExclusionsDelete
where
type Rs LoggingOrganizationsExclusionsDelete = Empty
type
Scopes LoggingOrganizationsExclusionsDelete =
'[CloudPlatform'FullControl, Logging'Admin]
requestClient
LoggingOrganizationsExclusionsDelete {..} =
go
name
xgafv
accessToken
callback
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
loggingService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy
LoggingOrganizationsExclusionsDeleteResource
)
Core.mempty
| null | https://raw.githubusercontent.com/brendanhay/gogol/fffd4d98a1996d0ffd4cf64545c5e8af9c976cda/lib/services/gogol-logging/gen/Gogol/Logging/Organizations/Exclusions/Delete.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
Deletes an exclusion in the _Default sink.
* Resource
** Constructing a Request
| Deletes an exclusion in the _Default sink.
/See:/ 'newLoggingOrganizationsExclusionsDelete' smart constructor.
| V1 error format.
| OAuth access token.
| Required. The resource name of an existing exclusion to delete: \"projects\/[PROJECT/ID]\/exclusions\/[EXCLUSION/ID]\" \"organizations\/[ORGANIZATION/ID]\/exclusions\/[EXCLUSION/ID]\" \"billingAccounts\/[BILLING/ACCOUNT/ID]\/exclusions\/[EXCLUSION/ID]\" \"folders\/[FOLDER/ID]\/exclusions\/[EXCLUSION_ID]\" For example:\"projects\/my-project\/exclusions\/my-exclusion\"
| Upload protocol for media (e.g. \"raw\", \"multipart\").
| Required. The resource name of an existing exclusion to delete: \"projects\/[PROJECT/ID]\/exclusions\/[EXCLUSION/ID]\" \"organizations\/[ORGANIZATION/ID]\/exclusions\/[EXCLUSION/ID]\" \"billingAccounts\/[BILLING/ACCOUNT/ID]\/exclusions\/[EXCLUSION/ID]\" \"folders\/[FOLDER/ID]\/exclusions\/[EXCLUSION_ID]\" For example:\"projects\/my-project\/exclusions\/my-exclusion\" See 'name'. | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Module : . Logging . Organizations . Exclusions . Delete
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
/See:/ < / Cloud Logging API Reference > for
module Gogol.Logging.Organizations.Exclusions.Delete
LoggingOrganizationsExclusionsDeleteResource,
LoggingOrganizationsExclusionsDelete (..),
newLoggingOrganizationsExclusionsDelete,
)
where
import Gogol.Logging.Types
import qualified Gogol.Prelude as Core
| A resource alias for @logging.organizations.exclusions.delete@ method which the
' ' request conforms to .
type LoggingOrganizationsExclusionsDeleteResource =
"v2"
Core.:> Core.Capture "name" Core.Text
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.Delete '[Core.JSON] Empty
data LoggingOrganizationsExclusionsDelete = LoggingOrganizationsExclusionsDelete
xgafv :: (Core.Maybe Xgafv),
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
name :: Core.Text,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
| Creates a value of ' ' with the minimum fields required to make a request .
newLoggingOrganizationsExclusionsDelete ::
Core.Text ->
LoggingOrganizationsExclusionsDelete
newLoggingOrganizationsExclusionsDelete name =
LoggingOrganizationsExclusionsDelete
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
name = name,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance
Core.GoogleRequest
LoggingOrganizationsExclusionsDelete
where
type Rs LoggingOrganizationsExclusionsDelete = Empty
type
Scopes LoggingOrganizationsExclusionsDelete =
'[CloudPlatform'FullControl, Logging'Admin]
requestClient
LoggingOrganizationsExclusionsDelete {..} =
go
name
xgafv
accessToken
callback
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
loggingService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy
LoggingOrganizationsExclusionsDeleteResource
)
Core.mempty
|
82acc8ee095243318aec14de76cc5b377cb9fdd66bf12d7ba43a0af7c96f2ef8 | ktakashi/sagittarius-scheme | enums.scm | ;; -*- scheme -*-
#!core
(library (core enums)
(export make-enumeration
enum-set?
enum-set-universe
enum-set-indexer
enum-set-constructor
enum-set->list
enum-set-member?
enum-set-subset?
enum-set=?
enum-set-union
enum-set-intersection
enum-set-difference
enum-set-complement
enum-set-projection
<enum-type>
<enum-set>)
(import (core)
(core base)
(core record procedural)
(sagittarius))
;; use record api directory
(define <enum-type>
(let* ((rtd (make-record-type-descriptor '<enum-type> #f #f #f #f
'#((immutable universe)
(immutable indexer))))
(rcd (make-record-constructor-descriptor rtd #f #f)))
(make-record-type '<enum-type> rtd rcd)))
(define make-enum-type (record-constructor (record-type-rcd <enum-type>)))
(define enum-type? (record-predicate (record-type-rtd <enum-type>)))
(define enum-type-universe (record-accessor (record-type-rtd <enum-type>) 0))
(define enum-type-indexer (record-accessor (record-type-rtd <enum-type>) 1))
(define <enum-set>
(let* ((rtd (make-record-type-descriptor '<enum-set> #f #f #f #f
#((immutable type)
(immutable members))))
(rcd (make-record-constructor-descriptor rtd #f #f)))
(make-record-type '<enum-set> rtd rcd)))
(define make-enum-set (record-constructor (record-type-rcd <enum-set>)))
(define enum-set? (record-predicate (record-type-rtd <enum-set>)))
(define enum-set-type (record-accessor (record-type-rtd <enum-set>) 0))
(define enum-set-members (record-accessor (record-type-rtd <enum-set>) 1))
;; from mosh
(define (make-enumeration-type symbol-list)
(let ([ht (make-eq-hashtable)])
(let loop ([symbol-list symbol-list]
[i 0])
(if (null? symbol-list)
'()
(begin (hashtable-set! ht (car symbol-list) i)
(loop (cdr symbol-list) (+ i 1)))))
(make-enum-type symbol-list
(lambda (symbol)
(hashtable-ref ht symbol #f)))))
(define (make-enumeration symbol-list)
(cond
[(and (list? symbol-list) (for-all symbol? symbol-list))
(make-enum-set (make-enumeration-type symbol-list) symbol-list)]
[else
(assertion-violation 'make-enumeration
"argument 1 must be a list of symbols")]))
(define (enum-set-universe enum-set)
(make-enum-set (enum-set-type enum-set)
(enum-type-universe (enum-set-type enum-set))))
(define (enum-set-indexer enum-set)
(enum-type-indexer (enum-set-type enum-set)))
(define (enum-set-constructor enum-set)
(lambda (symbol-list)
(let ([universe (enum-type-universe (enum-set-type enum-set))])
(if (for-all (lambda (x) (memq x universe)) symbol-list)
(make-enum-set (enum-set-type enum-set) symbol-list)
(assertion-violation
'enum-set-constructor
"the symbol list must all belong to the universe."
universe symbol-list)))))
(define (enum-set->list enum-set)
(let ([universe (enum-type-universe (enum-set-type enum-set))]
[members (enum-set-members enum-set)])
(let loop ([universe universe])
(cond
[(null? universe) '()]
[(memq (car universe) members)
(cons (car universe) (loop (cdr universe)))]
[else
(loop (cdr universe))]))))
(define (enum-set-member? symbol enum-set)
(and (memq symbol (enum-set-members enum-set)) #t))
(define (enum-set-subset? enum-set1 enum-set2)
(and
(let ([enum-set2-univese (enum-set->list (enum-set-universe enum-set2))])
(for-all
(lambda (symbol) (memq symbol enum-set2-univese))
(enum-set->list (enum-set-universe enum-set1))))
(for-all
(lambda (symbol) (enum-set-member? symbol enum-set2))
(enum-set-members enum-set1))))
(define (enum-set=? enum-set1 enum-set2)
(and (enum-set-subset? enum-set1 enum-set2)
(enum-set-subset? enum-set2 enum-set1)))
(define (enum-set-union enum-set1 enum-set2)
(define (union lst1 lst2)
(let loop ([ret lst1]
[lst lst2])
(cond
[(null? lst) ret]
[(memq (car lst) ret)
(loop ret (cdr lst))]
[else
(loop (cons (car lst) ret) (cdr lst))])))
(if (eq? (enum-set-type enum-set1) (enum-set-type enum-set2))
(make-enum-set (enum-set-type enum-set1)
(union (enum-set-members enum-set1)
(enum-set-members enum-set2)))
(assertion-violation 'enum-set-union "enum-set1 and enum-set2 must be enumeration sets that have the same enumeration type.")))
(define (enum-set-intersection enum-set1 enum-set2)
(define (intersection lst1 lst2)
(let loop ([ret '()]
[lst lst1])
(if (null? lst)
ret
(cond
[(memq (car lst) lst2)
(loop (cons (car lst) ret) (cdr lst))]
[else
(loop ret (cdr lst))]))))
(if (eq? (enum-set-type enum-set1) (enum-set-type enum-set2))
(make-enum-set (enum-set-type enum-set1)
(intersection (enum-set-members enum-set1)
(enum-set-members enum-set2)))
(assertion-violation 'enum-set-intersection "enum-set1 and enum-set2 must be enumeration sets that have the same enumeration type.")))
(define (enum-set-difference enum-set1 enum-set2)
(define (difference lst1 lst2)
(let loop ([ret '()]
[lst lst1])
(if (null? lst)
ret
(cond
[(memq (car lst) lst2)
(loop ret (cdr lst))]
[else
(loop (cons (car lst) ret) (cdr lst))]))))
(if (eq? (enum-set-type enum-set1) (enum-set-type enum-set2))
(make-enum-set (enum-set-type enum-set1)
(difference (enum-set-members enum-set1)
(enum-set-members enum-set2)))
(assertion-violation 'enum-set-difference "enum-set1 and enum-set2 must be enumeration sets that have the same enumeration type.")))
(define (enum-set-complement enum-set)
(let ([members (enum-set-members enum-set)])
(make-enum-set (enum-set-type enum-set)
(filter (lambda (symbol) (not (memq symbol members)))
(enum-type-universe (enum-set-type enum-set))))))
(define (enum-set-projection enum-set1 enum-set2)
(if (enum-set-subset? enum-set1 enum-set2)
enum-set1
(let ([universe2 (enum-type-universe (enum-set-type enum-set2))]
[members1 (enum-set-members enum-set1)])
(make-enum-set (enum-set-type enum-set2)
(filter (lambda (symbol) (memq symbol universe2))
members1)))))
) ; [end]
;; end of file
;; Local Variables:
coding : utf-8 - unix
;; End:
| null | https://raw.githubusercontent.com/ktakashi/sagittarius-scheme/45ee618323dcbb5fde7072d4872a35bdacb387e1/lib/core/enums.scm | scheme | -*- scheme -*-
use record api directory
from mosh
[end]
end of file
Local Variables:
End: | #!core
(library (core enums)
(export make-enumeration
enum-set?
enum-set-universe
enum-set-indexer
enum-set-constructor
enum-set->list
enum-set-member?
enum-set-subset?
enum-set=?
enum-set-union
enum-set-intersection
enum-set-difference
enum-set-complement
enum-set-projection
<enum-type>
<enum-set>)
(import (core)
(core base)
(core record procedural)
(sagittarius))
(define <enum-type>
(let* ((rtd (make-record-type-descriptor '<enum-type> #f #f #f #f
'#((immutable universe)
(immutable indexer))))
(rcd (make-record-constructor-descriptor rtd #f #f)))
(make-record-type '<enum-type> rtd rcd)))
(define make-enum-type (record-constructor (record-type-rcd <enum-type>)))
(define enum-type? (record-predicate (record-type-rtd <enum-type>)))
(define enum-type-universe (record-accessor (record-type-rtd <enum-type>) 0))
(define enum-type-indexer (record-accessor (record-type-rtd <enum-type>) 1))
(define <enum-set>
(let* ((rtd (make-record-type-descriptor '<enum-set> #f #f #f #f
#((immutable type)
(immutable members))))
(rcd (make-record-constructor-descriptor rtd #f #f)))
(make-record-type '<enum-set> rtd rcd)))
(define make-enum-set (record-constructor (record-type-rcd <enum-set>)))
(define enum-set? (record-predicate (record-type-rtd <enum-set>)))
(define enum-set-type (record-accessor (record-type-rtd <enum-set>) 0))
(define enum-set-members (record-accessor (record-type-rtd <enum-set>) 1))
(define (make-enumeration-type symbol-list)
(let ([ht (make-eq-hashtable)])
(let loop ([symbol-list symbol-list]
[i 0])
(if (null? symbol-list)
'()
(begin (hashtable-set! ht (car symbol-list) i)
(loop (cdr symbol-list) (+ i 1)))))
(make-enum-type symbol-list
(lambda (symbol)
(hashtable-ref ht symbol #f)))))
(define (make-enumeration symbol-list)
(cond
[(and (list? symbol-list) (for-all symbol? symbol-list))
(make-enum-set (make-enumeration-type symbol-list) symbol-list)]
[else
(assertion-violation 'make-enumeration
"argument 1 must be a list of symbols")]))
(define (enum-set-universe enum-set)
(make-enum-set (enum-set-type enum-set)
(enum-type-universe (enum-set-type enum-set))))
(define (enum-set-indexer enum-set)
(enum-type-indexer (enum-set-type enum-set)))
(define (enum-set-constructor enum-set)
(lambda (symbol-list)
(let ([universe (enum-type-universe (enum-set-type enum-set))])
(if (for-all (lambda (x) (memq x universe)) symbol-list)
(make-enum-set (enum-set-type enum-set) symbol-list)
(assertion-violation
'enum-set-constructor
"the symbol list must all belong to the universe."
universe symbol-list)))))
(define (enum-set->list enum-set)
(let ([universe (enum-type-universe (enum-set-type enum-set))]
[members (enum-set-members enum-set)])
(let loop ([universe universe])
(cond
[(null? universe) '()]
[(memq (car universe) members)
(cons (car universe) (loop (cdr universe)))]
[else
(loop (cdr universe))]))))
(define (enum-set-member? symbol enum-set)
(and (memq symbol (enum-set-members enum-set)) #t))
(define (enum-set-subset? enum-set1 enum-set2)
(and
(let ([enum-set2-univese (enum-set->list (enum-set-universe enum-set2))])
(for-all
(lambda (symbol) (memq symbol enum-set2-univese))
(enum-set->list (enum-set-universe enum-set1))))
(for-all
(lambda (symbol) (enum-set-member? symbol enum-set2))
(enum-set-members enum-set1))))
(define (enum-set=? enum-set1 enum-set2)
(and (enum-set-subset? enum-set1 enum-set2)
(enum-set-subset? enum-set2 enum-set1)))
(define (enum-set-union enum-set1 enum-set2)
(define (union lst1 lst2)
(let loop ([ret lst1]
[lst lst2])
(cond
[(null? lst) ret]
[(memq (car lst) ret)
(loop ret (cdr lst))]
[else
(loop (cons (car lst) ret) (cdr lst))])))
(if (eq? (enum-set-type enum-set1) (enum-set-type enum-set2))
(make-enum-set (enum-set-type enum-set1)
(union (enum-set-members enum-set1)
(enum-set-members enum-set2)))
(assertion-violation 'enum-set-union "enum-set1 and enum-set2 must be enumeration sets that have the same enumeration type.")))
(define (enum-set-intersection enum-set1 enum-set2)
(define (intersection lst1 lst2)
(let loop ([ret '()]
[lst lst1])
(if (null? lst)
ret
(cond
[(memq (car lst) lst2)
(loop (cons (car lst) ret) (cdr lst))]
[else
(loop ret (cdr lst))]))))
(if (eq? (enum-set-type enum-set1) (enum-set-type enum-set2))
(make-enum-set (enum-set-type enum-set1)
(intersection (enum-set-members enum-set1)
(enum-set-members enum-set2)))
(assertion-violation 'enum-set-intersection "enum-set1 and enum-set2 must be enumeration sets that have the same enumeration type.")))
(define (enum-set-difference enum-set1 enum-set2)
(define (difference lst1 lst2)
(let loop ([ret '()]
[lst lst1])
(if (null? lst)
ret
(cond
[(memq (car lst) lst2)
(loop ret (cdr lst))]
[else
(loop (cons (car lst) ret) (cdr lst))]))))
(if (eq? (enum-set-type enum-set1) (enum-set-type enum-set2))
(make-enum-set (enum-set-type enum-set1)
(difference (enum-set-members enum-set1)
(enum-set-members enum-set2)))
(assertion-violation 'enum-set-difference "enum-set1 and enum-set2 must be enumeration sets that have the same enumeration type.")))
(define (enum-set-complement enum-set)
(let ([members (enum-set-members enum-set)])
(make-enum-set (enum-set-type enum-set)
(filter (lambda (symbol) (not (memq symbol members)))
(enum-type-universe (enum-set-type enum-set))))))
(define (enum-set-projection enum-set1 enum-set2)
(if (enum-set-subset? enum-set1 enum-set2)
enum-set1
(let ([universe2 (enum-type-universe (enum-set-type enum-set2))]
[members1 (enum-set-members enum-set1)])
(make-enum-set (enum-set-type enum-set2)
(filter (lambda (symbol) (memq symbol universe2))
members1)))))
coding : utf-8 - unix
|
3282f8806bae5d2bc45b3976d6eb7d9d92eef6f4a70f75e654b85eedc87ae5f8 | slyrus/mcclim-old | glimpse-present-window.lisp |
(in-package :glimpse)
;;; Convert a basic :select gesture on a PORT presentation type to a
;;; 'describe object' command. This needs to be applicable when
;;; we're in the 'standard' input context for an interactor pane.
(define-presentation-to-command-translator port-select-to-describe-command
(port ; from-type
com-describe ; command-name - maybe com-describe-presentation?
glimpse ; command table containing command
:gesture :select ; activate on :select (left-button click)
;; :tester ...
:documentation
"Invoke the 'describe presentation' command on the selected port"
:pointer-documentation ((object stream) (format stream "Describe port ~A" (type-of object)))
:menu t ; command should appear in popped-up menus
: priority 1123
:echo nil) ; don't echo the command when it is invoked
;; arglist must be a subset (using string-equal) of:
;; (object presentation context-type frame event window x y)
(object)
;; body of translator; returns a list of the arguments to the command
;; named by command-name.
(list object))
(define-presentation-to-command-translator graft-select-to-describe-command
(graft ; from-type
com-describe ; command-name - maybe com-describe-presentation?
glimpse ; command table containing command
:gesture :select ; activate on :select (left-button click)
;; :tester ...
:documentation
"Invoke the 'describe presentation' command on the selected graft"
:pointer-documentation ((object stream) (format stream "Describe graft ~A" (type-of object)))
:menu t ; command should appear in popped-up menus
: priority 1123
:echo nil) ; don't echo the command when it is invoked
;; arglist must be a subset (using string-equal) of:
;; (object presentation context-type frame event window x y)
(object)
;; body of translator; returns a list of the arguments to the command
;; named by command-name.
(list object))
(define-presentation-to-command-translator pane-select-to-describe-command
(pane ; from-type
com-describe
;; com-describe-sheet ; command-name - maybe com-describe-presentation?
glimpse ; command table containing command
:gesture :select ; activate on :select (left-button click)
;; :tester ...
:documentation
"Invoke the 'describe presentation' command on the selected pane"
:pointer-documentation ((object stream) (format stream "Describe pane ~A" (type-of object)))
:menu t ; command should appear in popped-up menus
: priority 1123
:echo nil) ; don't echo the command when it is invoked
;; arglist must be a subset (using string-equal) of:
;; (object presentation context-type frame event window x y)
(object)
;; body of translator; returns a list of the arguments to the command
;; named by command-name.
(list object))
| null | https://raw.githubusercontent.com/slyrus/mcclim-old/354cdf73c1a4c70e619ccd7d390cb2f416b21c1a/Backends/beagle/glimpse/glimpse-present-window.lisp | lisp | Convert a basic :select gesture on a PORT presentation type to a
'describe object' command. This needs to be applicable when
we're in the 'standard' input context for an interactor pane.
from-type
command-name - maybe com-describe-presentation?
command table containing command
activate on :select (left-button click)
:tester ...
command should appear in popped-up menus
don't echo the command when it is invoked
arglist must be a subset (using string-equal) of:
(object presentation context-type frame event window x y)
body of translator; returns a list of the arguments to the command
named by command-name.
from-type
command-name - maybe com-describe-presentation?
command table containing command
activate on :select (left-button click)
:tester ...
command should appear in popped-up menus
don't echo the command when it is invoked
arglist must be a subset (using string-equal) of:
(object presentation context-type frame event window x y)
body of translator; returns a list of the arguments to the command
named by command-name.
from-type
com-describe-sheet ; command-name - maybe com-describe-presentation?
command table containing command
activate on :select (left-button click)
:tester ...
command should appear in popped-up menus
don't echo the command when it is invoked
arglist must be a subset (using string-equal) of:
(object presentation context-type frame event window x y)
body of translator; returns a list of the arguments to the command
named by command-name. |
(in-package :glimpse)
(define-presentation-to-command-translator port-select-to-describe-command
:documentation
"Invoke the 'describe presentation' command on the selected port"
:pointer-documentation ((object stream) (format stream "Describe port ~A" (type-of object)))
: priority 1123
(object)
(list object))
(define-presentation-to-command-translator graft-select-to-describe-command
:documentation
"Invoke the 'describe presentation' command on the selected graft"
:pointer-documentation ((object stream) (format stream "Describe graft ~A" (type-of object)))
: priority 1123
(object)
(list object))
(define-presentation-to-command-translator pane-select-to-describe-command
com-describe
:documentation
"Invoke the 'describe presentation' command on the selected pane"
:pointer-documentation ((object stream) (format stream "Describe pane ~A" (type-of object)))
: priority 1123
(object)
(list object))
|
ed19e1cab385bc8cfbd9be691026f6dc8c4f936772b9c66ff52a14a44f782e2f | merijn/broadcast-chan | IOTest.hs | {-# LANGUAGE BangPatterns #-}
import Control.Monad (void)
import Data.Foldable (forM_, foldlM)
import Data.Set (Set)
import qualified Data.Set as S
import BroadcastChan
import BroadcastChan.Test
sequentialSink :: Foldable f => f a -> (a -> IO b) -> IO ()
sequentialSink set f = forM_ set (void . f)
parallelSink
:: Foldable f => Handler IO a -> f a -> (a -> IO b) -> Int -> IO ()
parallelSink hnd input f n =
parMapM_ hnd n (void . f) input
sequentialFold :: (Foldable f, Ord b) => f a -> (a -> IO b) -> IO (Set b)
sequentialFold input f = foldlM foldFun S.empty input
where
foldFun bs a = (\b -> S.insert b bs) <$> f a
parallelFold
:: (Foldable f, Ord b)
=> Handler IO a -> f a -> (a -> IO b) -> Int -> IO (Set b)
parallelFold hnd input f n =
parFoldMap hnd n f foldFun S.empty input
where
foldFun :: Ord b => Set b -> b -> Set b
foldFun s b = S.insert b s
parallelFoldM
:: (Foldable f, Ord b)
=> Handler IO a -> f a -> (a -> IO b) -> Int -> IO (Set b)
parallelFoldM hnd input f n =
parFoldMapM hnd n f foldFun S.empty input
where
foldFun :: (Ord b, Monad m) => Set b -> b -> m (Set b)
foldFun !z b = return $ S.insert b z
main :: IO ()
main = runTests "parallel-io" $
[ genStreamTests "sink" sequentialSink parallelSink
, genStreamTests "fold" sequentialFold parallelFold
, genStreamTests "foldM" sequentialFold parallelFoldM
]
| null | https://raw.githubusercontent.com/merijn/broadcast-chan/1a884f9ffa6f9f5628f575aec1e06502c853ab9f/broadcast-chan-tests/tests/IOTest.hs | haskell | # LANGUAGE BangPatterns # |
import Control.Monad (void)
import Data.Foldable (forM_, foldlM)
import Data.Set (Set)
import qualified Data.Set as S
import BroadcastChan
import BroadcastChan.Test
sequentialSink :: Foldable f => f a -> (a -> IO b) -> IO ()
sequentialSink set f = forM_ set (void . f)
parallelSink
:: Foldable f => Handler IO a -> f a -> (a -> IO b) -> Int -> IO ()
parallelSink hnd input f n =
parMapM_ hnd n (void . f) input
sequentialFold :: (Foldable f, Ord b) => f a -> (a -> IO b) -> IO (Set b)
sequentialFold input f = foldlM foldFun S.empty input
where
foldFun bs a = (\b -> S.insert b bs) <$> f a
parallelFold
:: (Foldable f, Ord b)
=> Handler IO a -> f a -> (a -> IO b) -> Int -> IO (Set b)
parallelFold hnd input f n =
parFoldMap hnd n f foldFun S.empty input
where
foldFun :: Ord b => Set b -> b -> Set b
foldFun s b = S.insert b s
parallelFoldM
:: (Foldable f, Ord b)
=> Handler IO a -> f a -> (a -> IO b) -> Int -> IO (Set b)
parallelFoldM hnd input f n =
parFoldMapM hnd n f foldFun S.empty input
where
foldFun :: (Ord b, Monad m) => Set b -> b -> m (Set b)
foldFun !z b = return $ S.insert b z
main :: IO ()
main = runTests "parallel-io" $
[ genStreamTests "sink" sequentialSink parallelSink
, genStreamTests "fold" sequentialFold parallelFold
, genStreamTests "foldM" sequentialFold parallelFoldM
]
|
1436d6f8441c7bd6ee6684ebb970c4f4685ae07adca0630547da3ebaf33cb2db | bmeurer/ocaml-arm | ocaml_specific.mli | (***********************************************************************)
(* ocamlbuild *)
(* *)
, , projet Gallium , INRIA Rocquencourt
(* *)
Copyright 2007 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
Original author :
val init : unit -> unit
| null | https://raw.githubusercontent.com/bmeurer/ocaml-arm/43f7689c76a349febe3d06ae7a4fc1d52984fd8b/ocamlbuild/ocaml_specific.mli | ocaml | *********************************************************************
ocamlbuild
********************************************************************* | , , projet Gallium , INRIA Rocquencourt
Copyright 2007 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
Original author :
val init : unit -> unit
|
077d392c23ba88bf755ee64933cc2776520d542b4af5f16207266508a8685a9b | cmsc430/www | let-x-err.rkt | #lang racket
(let ((x #f))
(unbox #f))
| null | https://raw.githubusercontent.com/cmsc430/www/0809867532b8ef516029ac38093a145db5b424ea/langs/test-programs/hustle/let-x-err.rkt | racket | #lang racket
(let ((x #f))
(unbox #f))
| |
092951646e65fa5b7439ecf0f192eff94ae89c5ee105022d4aeb08ca9920dbdd | ormf/cm | fullmix-examples.lisp |
(cd )
(load "/Volumes/Classes/Music 404b/Samples/sdb.lisp")
(dac "sdb:pp;a4.aiff")
(defparameter sf (namestring (truename "sdb:pp;a4.aiff")))
(sound-chans sf)
(sound-srate sf)
(sound-duration sf)
(sound-comment sf)
(sound-data-format sf)
(sound-format-name (sound-data-format sf))
(sound-header-type sf)
(sound-type-name (sound-header-type sf))
(sound-frames sf)
get max amp of both chans
(let ((da (clm:make-double-array 2))
(ia (clm:make-integer-array 2)))
(sound-maxamp sf 2 da ia)
(list (elt da 0) (elt da 1)))
;;; Fullmix
(cload "fullmix.ins")
(object-parameters (new fullmix))
infile & optional beg outdur inbeg matrix srate reverb - amount srenv
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
(fullmix sf)
(fullmix sf .5)
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
(fullmix sf)
(fullmix sf .5 .1)
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
(fullmix sf)
(fullmix sf 6 nil 2)
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
(fullmix sf)
(fullmix sf .5 nil nil '((1 0) (0 1)))
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
both chans send to left
(fullmix sf nil nil nil '((1 0) (1 0)))
both chans send to middle
(fullmix sf 2 nil nil '((.5 .5) (.5 .5)))
both chans send to right
(fullmix sf 4 nil nil '((0 1) (0 1)))
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
left chan appears in right 1/4 through sound
(fullmix sf nil nil nil '((1 (0 0 .25 1 1 1))
(0 0)
))
(fullmix sf nil nil nil '((1 (0 0 .25 1 1 1))
(0 0)
))
)
| null | https://raw.githubusercontent.com/ormf/cm/26843eec009bd6c214992a8e67c49fffa16d9530/doc/404B-SoundSynth-AlgoComp/www-camil.music.uiuc.edu_16080/classes/404B/sfp/fullmix/fullmix-examples.lisp | lisp | Fullmix |
(cd )
(load "/Volumes/Classes/Music 404b/Samples/sdb.lisp")
(dac "sdb:pp;a4.aiff")
(defparameter sf (namestring (truename "sdb:pp;a4.aiff")))
(sound-chans sf)
(sound-srate sf)
(sound-duration sf)
(sound-comment sf)
(sound-data-format sf)
(sound-format-name (sound-data-format sf))
(sound-header-type sf)
(sound-type-name (sound-header-type sf))
(sound-frames sf)
get max amp of both chans
(let ((da (clm:make-double-array 2))
(ia (clm:make-integer-array 2)))
(sound-maxamp sf 2 da ia)
(list (elt da 0) (elt da 1)))
(cload "fullmix.ins")
(object-parameters (new fullmix))
infile & optional beg outdur inbeg matrix srate reverb - amount srenv
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
(fullmix sf)
(fullmix sf .5)
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
(fullmix sf)
(fullmix sf .5 .1)
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
(fullmix sf)
(fullmix sf 6 nil 2)
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
(fullmix sf)
(fullmix sf .5 nil nil '((1 0) (0 1)))
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
both chans send to left
(fullmix sf nil nil nil '((1 0) (1 0)))
both chans send to middle
(fullmix sf 2 nil nil '((.5 .5) (.5 .5)))
both chans send to right
(fullmix sf 4 nil nil '((0 1) (0 1)))
)
(with-sound (:output "test.aiff" :channels 2 :srate 44100)
left chan appears in right 1/4 through sound
(fullmix sf nil nil nil '((1 (0 0 .25 1 1 1))
(0 0)
))
(fullmix sf nil nil nil '((1 (0 0 .25 1 1 1))
(0 0)
))
)
|
d26d7cdbb50ccd682f25cacf98a39d7ee0bfae585e1d03dabb55b1346aeb670d | mzp/coq-ide-for-ios | ideal.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
Nullstellensatz with Groebner basis computation
We use a sparse representation for polynomials :
a monomial is an array of exponents ( one for each variable )
with its degree in head
a polynomial is a sorted list of ( coefficient , monomial )
We use a sparse representation for polynomials:
a monomial is an array of exponents (one for each variable)
with its degree in head
a polynomial is a sorted list of (coefficient, monomial)
*)
open Utile
open List
exception NotInIdeal
module type S = sig
(* Monomials *)
type mon = int array
val mult_mon : mon -> mon -> mon
val deg : mon -> int
val compare_mon : mon -> mon -> int
val div_mon : mon -> mon -> mon
val div_mon_test : mon -> mon -> bool
val ppcm_mon : mon -> mon -> mon
(* Polynomials *)
type deg = int
type coef
type poly
type polynom
val repr : poly -> (coef * mon) list
val polconst : coef -> poly
val zeroP : poly
val gen : int -> poly
val equal : poly -> poly -> bool
val name_var : string list ref
val getvar : string list -> int -> string
val lstringP : poly list -> string
val printP : poly -> unit
val lprintP : poly list -> unit
val div_pol_coef : poly -> coef -> poly
val plusP : poly -> poly -> poly
val mult_t_pol : coef -> mon -> poly -> poly
val selectdiv : mon -> poly list -> poly
val oppP : poly -> poly
val emultP : coef -> poly -> poly
val multP : poly -> poly -> poly
val puisP : poly -> int -> poly
val contentP : poly -> coef
val contentPlist : poly list -> coef
val pgcdpos : coef -> coef -> coef
val div_pol : poly -> poly -> coef -> coef -> mon -> poly
val reduce2 : poly -> poly list -> coef * poly
val poldepcontent : coef list ref
val coefpoldep_find : poly -> poly -> poly
val coefpoldep_set : poly -> poly -> poly -> unit
val initcoefpoldep : poly list -> unit
val reduce2_trace : poly -> poly list -> poly list -> poly list * poly
val spol : poly -> poly -> poly
val etrangers : poly -> poly -> bool
val div_ppcm : poly -> poly -> poly -> bool
val genPcPf : poly -> poly list -> poly list -> poly list
val genOCPf : poly list -> poly list
val is_homogeneous : poly -> bool
type certificate =
{ coef : coef; power : int;
gb_comb : poly list list; last_comb : poly list }
val test_dans_ideal : poly -> poly list -> poly list ->
poly list * poly * certificate
val in_ideal : deg -> poly list -> poly -> poly list * poly * certificate
end
(***********************************************************************
Global options
*)
let lexico = ref false
let use_hmon = ref false
(* division of tail monomials *)
let reduire_les_queues = false
divide first with new polynomials
let nouveaux_pol_en_tete = false
(***********************************************************************
Functor
*)
module Make (P:Polynom.S) = struct
type coef = P.t
let coef0 = P.of_num (Num.Int 0)
let coef1 = P.of_num (Num.Int 1)
let coefm1 = P.of_num (Num.Int (-1))
let string_of_coef c = "["^(P.to_string c)^"]"
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Monomials
array of integers , first is the degree
Monomials
array of integers, first is the degree
*)
type mon = int array
type deg = int
type poly = (coef * mon) list
type polynom =
{pol : poly ref;
num : int;
sugar : int}
let nvar m = Array.length m - 1
let deg m = m.(0)
let mult_mon m m' =
let d = nvar m in
let m'' = Array.create (d+1) 0 in
for i=0 to d do
m''.(i)<- (m.(i)+m'.(i));
done;
m''
let compare_mon m m' =
let d = nvar m in
if !lexico
then (
Comparaison de monomes avec ordre du degre lexicographique = on commence par regarder la 1ere variable
let res=ref 0 in
1 si lexico pur 0 si degre
while (!res=0) && (!i<=d) do
res:= (compare m.(!i) m'.(!i));
i:=!i+1;
done;
!res)
else (
(* degre lexicographique inverse *)
match compare m.(0) m'.(0) with
| 0 -> (* meme degre total *)
let res=ref 0 in
let i=ref d in
while (!res=0) && (!i>=1) do
res:= - (compare m.(!i) m'.(!i));
i:=!i-1;
done;
!res
| x -> x)
let div_mon m m' =
let d = nvar m in
let m'' = Array.create (d+1) 0 in
for i=0 to d do
m''.(i)<- (m.(i)-m'.(i));
done;
m''
let div_pol_coef p c =
List.map (fun (a,m) -> (P.divP a c,m)) p
(* m' divides m *)
let div_mon_test m m' =
let d = nvar m in
let res=ref true in
degre total , i = ref 1
while (!res) && (!i<=d) do
res:= (m.(!i) >= m'.(!i));
i:=succ !i;
done;
!res
let set_deg m =
let d = nvar m in
m.(0)<-0;
for i=1 to d do
m.(0)<- m.(i)+m.(0);
done;
m
lcm
let ppcm_mon m m' =
let d = nvar m in
let m'' = Array.create (d+1) 0 in
for i=1 to d do
m''.(i)<- (max m.(i) m'.(i));
done;
set_deg m''
(**********************************************************************
Polynomials
list of (coefficient, monomial) decreasing order
*)
let repr p = p
let equal =
Util.list_for_all2eq
(fun (c1,m1) (c2,m2) -> P.equal c1 c2 && m1=m2)
let hash p =
let c = map fst p in
let m = map snd p in
fold_left (fun h p -> h * 17 + P.hash p) (Hashtbl.hash m) c
module Hashpol = Hashtbl.Make(
struct
type t = poly
let equal = equal
let hash = hash
end)
(* A pretty printer for polynomials, with Maple-like syntax. *)
open Format
let getvar lv i =
try (nth lv i)
with _ -> (fold_left (fun r x -> r^" "^x) "lv= " lv)
^" i="^(string_of_int i)
let string_of_pol zeroP hdP tlP coefterm monterm string_of_coef
dimmon string_of_exp lvar p =
let rec string_of_mon m coefone =
let s=ref [] in
for i=1 to (dimmon m) do
(match (string_of_exp m i) with
"0" -> ()
| "1" -> s:= (!s) @ [(getvar !lvar (i-1))]
| e -> s:= (!s) @ [((getvar !lvar (i-1)) ^ "^" ^ e)]);
done;
(match !s with
[] -> if coefone
then "1"
else ""
| l -> if coefone
then (String.concat "*" l)
else ( "*" ^
(String.concat "*" l)))
and string_of_term t start = let a = coefterm t and m = monterm t in
match (string_of_coef a) with
"0" -> ""
| "1" ->(match start with
true -> string_of_mon m true
|false -> ( "+ "^
(string_of_mon m true)))
| "-1" ->( "-" ^" "^(string_of_mon m true))
| c -> if (String.get c 0)='-'
then ( "- "^
(String.sub c 1
((String.length c)-1))^
(string_of_mon m false))
else (match start with
true -> ( c^(string_of_mon m false))
|false -> ( "+ "^
c^(string_of_mon m false)))
and stringP p start =
if (zeroP p)
then (if start
then ("0")
else "")
else ((string_of_term (hdP p) start)^
" "^
(stringP (tlP p) false))
in
(stringP p true)
let print_pol zeroP hdP tlP coefterm monterm string_of_coef
dimmon string_of_exp lvar p =
let rec print_mon m coefone =
let s=ref [] in
for i=1 to (dimmon m) do
(match (string_of_exp m i) with
"0" -> ()
| "1" -> s:= (!s) @ [(getvar !lvar (i-1))]
| e -> s:= (!s) @ [((getvar !lvar (i-1)) ^ "^" ^ e)]);
done;
(match !s with
[] -> if coefone
then print_string "1"
else ()
| l -> if coefone
then print_string (String.concat "*" l)
else (print_string "*";
print_string (String.concat "*" l)))
and print_term t start = let a = coefterm t and m = monterm t in
match (string_of_coef a) with
"0" -> ()
| "1" ->(match start with
true -> print_mon m true
|false -> (print_string "+ ";
print_mon m true))
| "-1" ->(print_string "-";print_space();print_mon m true)
| c -> if (String.get c 0)='-'
then (print_string "- ";
print_string (String.sub c 1
((String.length c)-1));
print_mon m false)
else (match start with
true -> (print_string c;print_mon m false)
|false -> (print_string "+ ";
print_string c;print_mon m false))
and printP p start =
if (zeroP p)
then (if start
then print_string("0")
else ())
else (print_term (hdP p) start;
if start then open_hovbox 0;
print_space();
print_cut();
printP (tlP p) false)
in open_hovbox 3;
printP p true;
print_flush()
let name_var= ref []
let stringP p =
string_of_pol
(fun p -> match p with [] -> true | _ -> false)
(fun p -> match p with (t::p) -> t |_ -> failwith "print_pol dans dansideal")
(fun p -> match p with (t::p) -> p |_ -> failwith "print_pol dans dansideal")
(fun (a,m) -> a)
(fun (a,m) -> m)
string_of_coef
(fun m -> (Array.length m)-1)
(fun m i -> (string_of_int (m.(i))))
name_var
p
let nsP2 = ref max_int
let stringPcut p =
(*Polynomesrec.nsP1:=20;*)
nsP2:=10;
let res =
if (length p)> !nsP2
then (stringP [hd p])^" + "^(string_of_int (length p))^" termes"
else stringP p in
Polynomesrec.nsP1:= max_int ;
nsP2:= max_int;
res
let rec lstringP l =
match l with
[] -> ""
|p::l -> (stringP p)^("\n")^(lstringP l)
let printP = print_pol
(fun p -> match p with [] -> true | _ -> false)
(fun p -> match p with (t::p) -> t |_ -> failwith "print_pol dans dansideal")
(fun p -> match p with (t::p) -> p |_ -> failwith "print_pol dans dansideal")
(fun (a,m) -> a)
(fun (a,m) -> m)
string_of_coef
(fun m -> (Array.length m)-1)
(fun m i -> (string_of_int (m.(i))))
name_var
let rec lprintP l =
match l with
[] -> ()
|p::l -> printP p;print_string "\n"; lprintP l
Operations
let zeroP = []
(* returns a constant polynom ial with d variables *)
let polconst d c =
let m = Array.create (d+1) 0 in
let m = set_deg m in
[(c,m)]
let plusP p q =
let rec plusP p q =
match p with
[] -> q
|t::p' ->
match q with
[] -> p
|t'::q' ->
match compare_mon (snd t) (snd t') with
1 -> t::(plusP p' q)
|(-1) -> t'::(plusP p q')
|_ -> let c=P.plusP (fst t) (fst t') in
match P.equal c coef0 with
true -> (plusP p' q')
|false -> (c,(snd t))::(plusP p' q')
in plusP p q
(* multiplication by (a,monomial) *)
let mult_t_pol a m p =
let rec mult_t_pol p =
match p with
[] -> []
|(b,m')::p -> ((P.multP a b),(mult_mon m m'))::(mult_t_pol p)
in mult_t_pol p
let coef_of_int x = P.of_num (Num.Int x)
(* variable i *)
let gen d i =
let m = Array.create (d+1) 0 in
m.(i) <- 1;
let m = set_deg m in
[((coef_of_int 1),m)]
let oppP p =
let rec oppP p =
match p with
[] -> []
|(b,m')::p -> ((P.oppP b),m')::(oppP p)
in oppP p
(* multiplication by a coefficient *)
let emultP a p =
let rec emultP p =
match p with
[] -> []
|(b,m')::p -> ((P.multP a b),m')::(emultP p)
in emultP p
let multP p q =
let rec aux p =
match p with
[] -> []
|(a,m)::p' -> plusP (mult_t_pol a m q) (aux p')
in aux p
let puisP p n=
match p with
[] -> []
|_ ->
let d = nvar (snd (hd p)) in
let rec puisP n =
match n with
0 -> [coef1, Array.create (d+1) 0]
| 1 -> p
|_ -> multP p (puisP (n-1))
in puisP n
let rec contentP p =
match p with
|[] -> coef1
|[a,m] -> a
|(a,m)::p1 ->
if P.equal a coef1 || P.equal a coefm1
then a
else P.pgcdP a (contentP p1)
let contentPlist lp =
match lp with
|[] -> coef1
|p::l1 ->
fold_left
(fun r q ->
if P.equal r coef1 || P.equal r coefm1
then r
else P.pgcdP r (contentP q))
(contentP p) l1
(***********************************************************************
Division of polynomials
*)
let pgcdpos a b = P.pgcdP a b
let polynom0 = {pol = ref []; num = 0; sugar = 0}
let ppol p = !(p.pol)
let lm p = snd (hd (ppol p))
let nallpol = ref 0
let allpol = ref (Array.create 1000 polynom0)
let new_allpol p s =
nallpol := !nallpol + 1;
if !nallpol >= Array.length !allpol
then
allpol := Array.append !allpol (Array.create !nallpol polynom0);
let p = {pol = ref p; num = !nallpol; sugar = s} in
!allpol.(!nallpol)<- p;
p
(* returns a polynomial of l whose head monomial divides m, else [] *)
let rec selectdiv m l =
match l with
[] -> polynom0
|q::r -> let m'= snd (hd (ppol q)) in
match (div_mon_test m m') with
true -> q
|false -> selectdiv m r
let div_pol p q a b m =
(* info ".";*)
plusP (emultP a p) (mult_t_pol b m q)
let hmon = Hashtbl.create 1000
let use_hmon = ref false
let find_hmon m =
if !use_hmon
then Hashtbl.find hmon m
else raise Not_found
let add_hmon m q =
if !use_hmon
then Hashtbl.add hmon m q
else ()
let div_coef a b = P.divP a b
remainder r of the division of p by polynomials of l , returns ( c , r ) where c is the coefficient for pseudo - division : c p = sum_i q_i p_i + r
let reduce2 p l =
let l = if nouveaux_pol_en_tete then rev l else l in
let rec reduce p =
match p with
[] -> (coef1,[])
|t::p' ->
let (a,m)=t in
let q = (try find_hmon m
with Not_found ->
let q = selectdiv m l in
match (ppol q) with
t'::q' -> (add_hmon m q;
q)
|[] -> q) in
match (ppol q) with
[] -> if reduire_les_queues
then
let (c,r)=(reduce p') in
(c,((P.multP a c,m)::r))
else (coef1,p)
|(b,m')::q' ->
let c=(pgcdpos a b) in
let a'= (div_coef b c) in
let b'=(P.oppP (div_coef a c)) in
let (e,r)=reduce (div_pol p' q' a' b'
(div_mon m m')) in
(P.multP a' e,r)
in let (c,r) = reduce p in
(c,r)
(* trace of divisions *)
(* list of initial polynomials *)
let poldep = ref []
let poldepcontent = ref []
(* coefficients of polynomials when written with initial polynomials *)
let coefpoldep = Hashtbl.create 51
of q in p = sum_i c_i*q_i
let coefpoldep_find p q =
try (Hashtbl.find coefpoldep (p.num,q.num))
with _ -> []
let coefpoldep_remove p q =
Hashtbl.remove coefpoldep (p.num,q.num)
let coefpoldep_set p q c =
Hashtbl.add coefpoldep (p.num,q.num) c
let initcoefpoldep d lp =
poldep:=lp;
poldepcontent:= map (fun p -> contentP (ppol p)) lp;
iter
(fun p -> coefpoldep_set p p (polconst d (coef_of_int 1)))
lp
(* keeps trace in coefpoldep
divides without pseudodivisions *)
let reduce2_trace p l lcp =
let l = if nouveaux_pol_en_tete then rev l else l in
(* rend (lq,r), ou r = p + sum(lq) *)
let rec reduce p =
match p with
[] -> ([],[])
|t::p' ->
let (a,m)=t in
let q =
(try find_hmon m
with Not_found ->
let q = selectdiv m l in
match (ppol q) with
t'::q' -> (add_hmon m q;
q)
|[] -> q) in
match (ppol q) with
[] ->
if reduire_les_queues
then
let (lq,r)=(reduce p') in
(lq,((a,m)::r))
else ([],p)
|(b,m')::q' ->
let b'=(P.oppP (div_coef a b)) in
let m''= div_mon m m' in
let p1=plusP p' (mult_t_pol b' m'' q') in
let (lq,r)=reduce p1 in
((b',m'',q)::lq, r)
in let (lq,r) = reduce p in
(*info "reduce2_trace:\n";
iter
(fun (a,m,s) ->
let x = mult_t_pol a m s in
info ((stringP x)^"\n"))
lq;
info "ok\n";*)
(map2
(fun c0 q ->
let c =
fold_left
(fun x (a,m,s) ->
if equal (ppol s) (ppol q)
then
plusP x (mult_t_pol a m (polconst (nvar m) (coef_of_int 1)))
else x)
c0
lq in
c)
lcp
!poldep,
r)
let homogeneous = ref false
let pol_courant = ref polynom0
(***********************************************************************
Completion
*)
let sugar_flag = ref true
let compute_sugar p =
fold_left (fun s (a,m) -> max s m.(0)) 0 p
let mk_polynom p =
new_allpol p (compute_sugar p)
let spol ps qs=
let p = ppol ps in
let q = ppol qs in
let m = snd (hd p) in
let m'= snd (hd q) in
let a = fst (hd p) in
let b = fst (hd q) in
let p'= tl p in
let q'= tl q in
let c = (pgcdpos a b) in
let m''=(ppcm_mon m m') in
let m1 = div_mon m'' m in
let m2 = div_mon m'' m' in
let fsp p' q' =
plusP
(mult_t_pol
(div_coef b c)
m1 p')
(mult_t_pol
(P.oppP (div_coef a c))
m2 q') in
let sp = fsp p' q' in
let sps =
new_allpol
sp
(max (m1.(0) + ps.sugar) (m2.(0) + qs.sugar)) in
coefpoldep_set sps ps (fsp (polconst (nvar m) (coef_of_int 1)) []);
coefpoldep_set sps qs (fsp [] (polconst (nvar m) (coef_of_int 1)));
sps
let etrangers p p'=
let m = snd (hd p) in
let m'= snd (hd p') in
let d = nvar m in
let res=ref true in
let i=ref 1 in
while (!res) && (!i<=d) do
res:= (m.(!i) = 0) || (m'.(!i)=0);
i:=!i+1;
done;
!res
teste if head monomial of p '' divides lcm of lhead monomials of p and p '
let div_ppcm p p' p'' =
let m = snd (hd p) in
let m'= snd (hd p') in
let m''= snd (hd p'') in
let d = nvar m in
let res=ref true in
let i=ref 1 in
while (!res) && (!i<=d) do
res:= ((max m.(!i) m'.(!i)) >= m''.(!i));
i:=!i+1;
done;
!res
code from extraction of program
type 'poly cpRes =
Keep of ('poly list)
| DontKeep of ('poly list)
let list_rec f0 f1 =
let rec f2 = function
[] -> f0
| a0::l0 -> f1 a0 l0 (f2 l0)
in f2
let addRes i = function
Keep h'0 -> Keep (i::h'0)
| DontKeep h'0 -> DontKeep (i::h'0)
let slice i a q =
list_rec
(match etrangers (ppol i) (ppol a) with
true -> DontKeep []
| false -> Keep [])
(fun b q1 rec_ren ->
match div_ppcm (ppol i) (ppol a) (ppol b) with
true -> DontKeep (b::q1)
| false ->
(match div_ppcm (ppol i) (ppol b) (ppol a) with
true -> rec_ren
| false -> addRes b rec_ren)) q
(* sugar strategy *)
oblige en queue sinon le certificat deconne
let addSsugar x l =
if !sugar_flag
then
let sx = x.sugar in
let rec insere l =
match l with
| [] -> [x]
| y::l1 ->
if sx <= y.sugar
then x::l
else y::(insere l1)
in insere l
else addS x l
ajoute les spolynomes de i avec la liste de polynomes aP ,
a la liste q
a la liste q *)
let genPcPf i aP q =
(let rec genPc aP0 =
match aP0 with
[] -> (fun r -> r)
| a::l1 ->
(fun l ->
(match slice i a l1 with
Keep l2 -> addSsugar (spol i a) (genPc l2 l)
| DontKeep l2 -> genPc l2 l))
in genPc aP) q
let genOCPf h' =
list_rec [] (fun a l rec_ren ->
genPcPf a l rec_ren) h'
(***********************************************************************
critical pairs/s-polynomials
*)
let ordcpair ((i1,j1),m1) ((i2,j2),m2) =
let s1 = ( max
( ! allpol.(i1).sugar )
- ( snd ( hd ( ppol ! ) )
( ! + m1.(0 )
- ( snd ( hd ( ppol ! allpol.(j1)))).(0 ) ) ) in
let s2 = ( max
( ! allpol.(i2).sugar + m2.(0 )
- ( snd ( hd ( ppol ! allpol.(i2)))).(0 ) )
( ! allpol.(j2).sugar + m2.(0 )
- ( snd ( hd ( ppol ! ) ) ) in
match compare s1 s2 with
| 1 - > 1
|(-1 ) - > -1
|0 - > compare_mon
(!allpol.(i1).sugar + m1.(0)
- (snd (hd (ppol !allpol.(i1)))).(0))
(!allpol.(j1).sugar + m1.(0)
- (snd (hd (ppol !allpol.(j1)))).(0))) in
let s2 = (max
(!allpol.(i2).sugar + m2.(0)
- (snd (hd (ppol !allpol.(i2)))).(0))
(!allpol.(j2).sugar + m2.(0)
- (snd (hd (ppol !allpol.(j2)))).(0))) in
match compare s1 s2 with
| 1 -> 1
|(-1) -> -1
|0 -> compare_mon m1 m2*)
compare_mon m1 m2
let sortcpairs lcp =
sort ordcpair lcp
let mergecpairs l1 l2 =
merge ordcpair l1 l2
let ord i j =
if i<j then (i,j) else (j,i)
let cpair p q =
if etrangers (ppol p) (ppol q)
then []
else [(ord p.num q.num,
ppcm_mon (lm p) (lm q))]
let cpairs1 p lq =
sortcpairs (fold_left (fun r q -> r @ (cpair p q)) [] lq)
let cpairs lp =
let rec aux l =
match l with
[]|[_] -> []
|p::l1 -> mergecpairs (cpairs1 p l1) (aux l1)
in aux lp
let critere2 ((i,j),m) lp lcp =
exists
(fun h ->
h.num <> i && h.num <> j
&& (div_mon_test m (lm h))
&& (let c1 = ord i h.num in
not (exists (fun (c,_) -> c1 = c) lcp))
&& (let c1 = ord j h.num in
not (exists (fun (c,_) -> c1 = c) lcp)))
lp
let critere3 ((i,j),m) lp lcp =
exists
(fun h ->
h.num <> i && h.num <> j
&& (div_mon_test m (lm h))
&& (h.num < j
|| not (m = ppcm_mon
(lm (!allpol.(i)))
(lm h)))
&& (h.num < i
|| not (m = ppcm_mon
(lm (!allpol.(j)))
(lm h))))
lp
let add_cpairs p lp lcp =
mergecpairs (cpairs1 p lp) lcp
let step = ref 0
let infobuch p q =
if !step = 0
then (info ("[" ^ (string_of_int (length p))
^ "," ^ (string_of_int (length q))
^ "]"))
(* in lp new polynomials are at the end *)
let coef_courant = ref coef1
type certificate =
{ coef : coef; power : int;
gb_comb : poly list list; last_comb : poly list }
let test_dans_ideal p lp lp0 =
let (c,r) = reduce2 (ppol !pol_courant) lp in
info ("remainder: "^(stringPcut r)^"\n");
coef_courant:= P.multP !coef_courant c;
pol_courant:= mk_polynom r;
if r=[]
then (info "polynomial reduced to 0\n";
let lcp = map (fun q -> []) !poldep in
let c = !coef_courant in
let (lcq,r) = reduce2_trace (emultP c p) lp lcp in
info "r ok\n";
info ("r: "^(stringP r)^"\n");
let res=ref (emultP c p) in
iter2
(fun cq q -> res:=plusP (!res) (multP cq (ppol q));
)
lcq !poldep;
info ("verif sum: "^(stringP (!res))^"\n");
info ("coefficient: "^(stringP (polconst 1 c))^"\n");
let rec aux lp =
match lp with
|[] -> []
|p::lp ->
(map
(fun q -> coefpoldep_find p q)
lp)::(aux lp)
in
let coefficient_multiplicateur = c in
let liste_polynomes_de_depart = rev lp0 in
let polynome_a_tester = p in
let liste_des_coefficients_intermediaires =
(let lci = rev (aux (rev lp)) in
let lci = ref lci (* (map rev lci) *) in
iter (fun x -> lci := tl (!lci)) lp0;
!lci) in
let liste_des_coefficients =
map
(fun cq -> emultP (coef_of_int (-1)) cq)
(rev lcq) in
(liste_polynomes_de_depart,
polynome_a_tester,
{coef = coefficient_multiplicateur;
power = 1;
gb_comb = liste_des_coefficients_intermediaires;
last_comb = liste_des_coefficients})
)
info " polynomial not reduced to 0\n " ;
info ( " \nremainder : " ^(stringPcut r)^"\n " ) ;
info ("\nremainder: "^(stringPcut r)^"\n");*)
raise NotInIdeal)
let divide_rem_with_critical_pair = ref false
let list_diff l x =
filter (fun y -> y <> x) l
let deg_hom p =
match p with
| [] -> -1
| (a,m)::_ -> m.(0)
let pbuchf pq p lp0=
info "computation of the Groebner basis\n";
step:=0;
Hashtbl.clear hmon;
let rec pbuchf (lp, lpc) =
infobuch lp lpc;
step:=(!step+1)mod 10 ;
match lpc with
[] ->
info ( " List of polynomials:\n"^(fold_left ( fun r p - > r^(stringP p)^"\n " ) " " lp ) ) ;
info " --------------------\n " ;
info "--------------------\n";*)
test_dans_ideal (ppol p) lp lp0
| ((i,j),m) :: lpc2 ->
(* info "choosen pair\n";*)
if critere3 ((i,j),m) lp lpc2
then (info "c"; pbuchf (lp, lpc2))
else
let a = spol !allpol.(i) !allpol.(j) in
if !homogeneous && (ppol a)<>[] && deg_hom (ppol a)
> deg_hom (ppol !pol_courant)
then (info "h"; pbuchf (lp, lpc2))
else
(* let sa = a.sugar in*)
let (ca,a0)= reduce2 (ppol a) lp in
match a0 with
[] -> info "0";pbuchf (lp, lpc2)
| _ ->
(* info "pair reduced\n";*)
a.pol := emultP ca (ppol a);
let (lca,a0) = reduce2_trace (ppol a) lp
(map (fun q -> emultP ca (coefpoldep_find a q))
!poldep) in
(* info "paire re-reduced";*)
a.pol := a0;
let a0 = new_allpol a0 sa in
iter2 (fun c q ->
coefpoldep_remove a q;
coefpoldep_set a q c) lca !poldep;
let a0 = a in
info ("\nnew polynomials: "^(stringPcut (ppol a0))^"\n");
let ct = coef1 (* contentP a0 *) in
(*info ("content: "^(string_of_coef ct)^"\n");*)
poldep:=addS a0 lp;
poldepcontent:=addS ct (!poldepcontent);
try test_dans_ideal (ppol p) (addS a0 lp) lp0
with NotInIdeal ->
let newlpc = add_cpairs a0 lp lpc2 in
pbuchf (((addS a0 lp), newlpc))
in pbuchf pq
let is_homogeneous p =
match p with
| [] -> true
| (a,m)::p1 -> let d = m.(0) in
for_all (fun (b,m') -> m'.(0)=d) p1
returns
c
lp = [ pn; ... ;p1 ]
p
lci = [ [ a(n+1,n); ... ;a(n+1,1 ) ] ;
[ a(n+2,n+1); ... ) ] ;
...
[ a(n+m , n+m-1); ... ;a(n+m,1 ) ] ]
lc = [ qn+m ; ... q1 ]
such that
c*p = sum qi*pi
where pn+k = a(n+k , n+k-1)*pn+k-1 + ... + a(n+k,1 ) * p1
c
lp = [pn;...;p1]
p
lci = [[a(n+1,n);...;a(n+1,1)];
[a(n+2,n+1);...;a(n+2,1)];
...
[a(n+m,n+m-1);...;a(n+m,1)]]
lc = [qn+m; ... q1]
such that
c*p = sum qi*pi
where pn+k = a(n+k,n+k-1)*pn+k-1 + ... + a(n+k,1)* p1
*)
let in_ideal d lp p =
Hashtbl.clear hmon;
Hashtbl.clear coefpoldep;
nallpol := 0;
allpol := Array.create 1000 polynom0;
homogeneous := for_all is_homogeneous (p::lp);
if !homogeneous then info "homogeneous polynomials\n";
info ("p: "^(stringPcut p)^"\n");
info ("lp:\n"^(fold_left (fun r p -> r^(stringPcut p)^"\n") "" lp));
(*info ("p: "^(stringP p)^"\n");
info ("lp:\n"^(fold_left (fun r p -> r^(stringP p)^"\n") "" lp));*)
let lp = map mk_polynom lp in
let p = mk_polynom p in
initcoefpoldep d lp;
coef_courant:=coef1;
pol_courant:=p;
let (lp1,p1,cert) =
try test_dans_ideal (ppol p) lp lp
with NotInIdeal -> pbuchf (lp, (cpairs lp)) p lp in
info "computed\n";
(map ppol lp1, p1, cert)
(* *)
end
| null | https://raw.githubusercontent.com/mzp/coq-ide-for-ios/4cdb389bbecd7cdd114666a8450ecf5b5f0391d3/CoqIDE/coq-8.2pl2/plugins/nsatz/ideal.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
Monomials
Polynomials
**********************************************************************
Global options
division of tail monomials
**********************************************************************
Functor
degre lexicographique inverse
meme degre total
m' divides m
*********************************************************************
Polynomials
list of (coefficient, monomial) decreasing order
A pretty printer for polynomials, with Maple-like syntax.
Polynomesrec.nsP1:=20;
returns a constant polynom ial with d variables
multiplication by (a,monomial)
variable i
multiplication by a coefficient
**********************************************************************
Division of polynomials
returns a polynomial of l whose head monomial divides m, else []
info ".";
trace of divisions
list of initial polynomials
coefficients of polynomials when written with initial polynomials
keeps trace in coefpoldep
divides without pseudodivisions
rend (lq,r), ou r = p + sum(lq)
info "reduce2_trace:\n";
iter
(fun (a,m,s) ->
let x = mult_t_pol a m s in
info ((stringP x)^"\n"))
lq;
info "ok\n";
**********************************************************************
Completion
sugar strategy
**********************************************************************
critical pairs/s-polynomials
in lp new polynomials are at the end
(map rev lci)
info "choosen pair\n";
let sa = a.sugar in
info "pair reduced\n";
info "paire re-reduced";
contentP a0
info ("content: "^(string_of_coef ct)^"\n");
info ("p: "^(stringP p)^"\n");
info ("lp:\n"^(fold_left (fun r p -> r^(stringP p)^"\n") "" lp));
| v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Nullstellensatz with Groebner basis computation
We use a sparse representation for polynomials :
a monomial is an array of exponents ( one for each variable )
with its degree in head
a polynomial is a sorted list of ( coefficient , monomial )
We use a sparse representation for polynomials:
a monomial is an array of exponents (one for each variable)
with its degree in head
a polynomial is a sorted list of (coefficient, monomial)
*)
open Utile
open List
exception NotInIdeal
module type S = sig
type mon = int array
val mult_mon : mon -> mon -> mon
val deg : mon -> int
val compare_mon : mon -> mon -> int
val div_mon : mon -> mon -> mon
val div_mon_test : mon -> mon -> bool
val ppcm_mon : mon -> mon -> mon
type deg = int
type coef
type poly
type polynom
val repr : poly -> (coef * mon) list
val polconst : coef -> poly
val zeroP : poly
val gen : int -> poly
val equal : poly -> poly -> bool
val name_var : string list ref
val getvar : string list -> int -> string
val lstringP : poly list -> string
val printP : poly -> unit
val lprintP : poly list -> unit
val div_pol_coef : poly -> coef -> poly
val plusP : poly -> poly -> poly
val mult_t_pol : coef -> mon -> poly -> poly
val selectdiv : mon -> poly list -> poly
val oppP : poly -> poly
val emultP : coef -> poly -> poly
val multP : poly -> poly -> poly
val puisP : poly -> int -> poly
val contentP : poly -> coef
val contentPlist : poly list -> coef
val pgcdpos : coef -> coef -> coef
val div_pol : poly -> poly -> coef -> coef -> mon -> poly
val reduce2 : poly -> poly list -> coef * poly
val poldepcontent : coef list ref
val coefpoldep_find : poly -> poly -> poly
val coefpoldep_set : poly -> poly -> poly -> unit
val initcoefpoldep : poly list -> unit
val reduce2_trace : poly -> poly list -> poly list -> poly list * poly
val spol : poly -> poly -> poly
val etrangers : poly -> poly -> bool
val div_ppcm : poly -> poly -> poly -> bool
val genPcPf : poly -> poly list -> poly list -> poly list
val genOCPf : poly list -> poly list
val is_homogeneous : poly -> bool
type certificate =
{ coef : coef; power : int;
gb_comb : poly list list; last_comb : poly list }
val test_dans_ideal : poly -> poly list -> poly list ->
poly list * poly * certificate
val in_ideal : deg -> poly list -> poly -> poly list * poly * certificate
end
let lexico = ref false
let use_hmon = ref false
let reduire_les_queues = false
divide first with new polynomials
let nouveaux_pol_en_tete = false
module Make (P:Polynom.S) = struct
type coef = P.t
let coef0 = P.of_num (Num.Int 0)
let coef1 = P.of_num (Num.Int 1)
let coefm1 = P.of_num (Num.Int (-1))
let string_of_coef c = "["^(P.to_string c)^"]"
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Monomials
array of integers , first is the degree
Monomials
array of integers, first is the degree
*)
type mon = int array
type deg = int
type poly = (coef * mon) list
type polynom =
{pol : poly ref;
num : int;
sugar : int}
let nvar m = Array.length m - 1
let deg m = m.(0)
let mult_mon m m' =
let d = nvar m in
let m'' = Array.create (d+1) 0 in
for i=0 to d do
m''.(i)<- (m.(i)+m'.(i));
done;
m''
let compare_mon m m' =
let d = nvar m in
if !lexico
then (
Comparaison de monomes avec ordre du degre lexicographique = on commence par regarder la 1ere variable
let res=ref 0 in
1 si lexico pur 0 si degre
while (!res=0) && (!i<=d) do
res:= (compare m.(!i) m'.(!i));
i:=!i+1;
done;
!res)
else (
match compare m.(0) m'.(0) with
let res=ref 0 in
let i=ref d in
while (!res=0) && (!i>=1) do
res:= - (compare m.(!i) m'.(!i));
i:=!i-1;
done;
!res
| x -> x)
let div_mon m m' =
let d = nvar m in
let m'' = Array.create (d+1) 0 in
for i=0 to d do
m''.(i)<- (m.(i)-m'.(i));
done;
m''
let div_pol_coef p c =
List.map (fun (a,m) -> (P.divP a c,m)) p
let div_mon_test m m' =
let d = nvar m in
let res=ref true in
degre total , i = ref 1
while (!res) && (!i<=d) do
res:= (m.(!i) >= m'.(!i));
i:=succ !i;
done;
!res
let set_deg m =
let d = nvar m in
m.(0)<-0;
for i=1 to d do
m.(0)<- m.(i)+m.(0);
done;
m
lcm
let ppcm_mon m m' =
let d = nvar m in
let m'' = Array.create (d+1) 0 in
for i=1 to d do
m''.(i)<- (max m.(i) m'.(i));
done;
set_deg m''
let repr p = p
let equal =
Util.list_for_all2eq
(fun (c1,m1) (c2,m2) -> P.equal c1 c2 && m1=m2)
let hash p =
let c = map fst p in
let m = map snd p in
fold_left (fun h p -> h * 17 + P.hash p) (Hashtbl.hash m) c
module Hashpol = Hashtbl.Make(
struct
type t = poly
let equal = equal
let hash = hash
end)
open Format
let getvar lv i =
try (nth lv i)
with _ -> (fold_left (fun r x -> r^" "^x) "lv= " lv)
^" i="^(string_of_int i)
let string_of_pol zeroP hdP tlP coefterm monterm string_of_coef
dimmon string_of_exp lvar p =
let rec string_of_mon m coefone =
let s=ref [] in
for i=1 to (dimmon m) do
(match (string_of_exp m i) with
"0" -> ()
| "1" -> s:= (!s) @ [(getvar !lvar (i-1))]
| e -> s:= (!s) @ [((getvar !lvar (i-1)) ^ "^" ^ e)]);
done;
(match !s with
[] -> if coefone
then "1"
else ""
| l -> if coefone
then (String.concat "*" l)
else ( "*" ^
(String.concat "*" l)))
and string_of_term t start = let a = coefterm t and m = monterm t in
match (string_of_coef a) with
"0" -> ""
| "1" ->(match start with
true -> string_of_mon m true
|false -> ( "+ "^
(string_of_mon m true)))
| "-1" ->( "-" ^" "^(string_of_mon m true))
| c -> if (String.get c 0)='-'
then ( "- "^
(String.sub c 1
((String.length c)-1))^
(string_of_mon m false))
else (match start with
true -> ( c^(string_of_mon m false))
|false -> ( "+ "^
c^(string_of_mon m false)))
and stringP p start =
if (zeroP p)
then (if start
then ("0")
else "")
else ((string_of_term (hdP p) start)^
" "^
(stringP (tlP p) false))
in
(stringP p true)
let print_pol zeroP hdP tlP coefterm monterm string_of_coef
dimmon string_of_exp lvar p =
let rec print_mon m coefone =
let s=ref [] in
for i=1 to (dimmon m) do
(match (string_of_exp m i) with
"0" -> ()
| "1" -> s:= (!s) @ [(getvar !lvar (i-1))]
| e -> s:= (!s) @ [((getvar !lvar (i-1)) ^ "^" ^ e)]);
done;
(match !s with
[] -> if coefone
then print_string "1"
else ()
| l -> if coefone
then print_string (String.concat "*" l)
else (print_string "*";
print_string (String.concat "*" l)))
and print_term t start = let a = coefterm t and m = monterm t in
match (string_of_coef a) with
"0" -> ()
| "1" ->(match start with
true -> print_mon m true
|false -> (print_string "+ ";
print_mon m true))
| "-1" ->(print_string "-";print_space();print_mon m true)
| c -> if (String.get c 0)='-'
then (print_string "- ";
print_string (String.sub c 1
((String.length c)-1));
print_mon m false)
else (match start with
true -> (print_string c;print_mon m false)
|false -> (print_string "+ ";
print_string c;print_mon m false))
and printP p start =
if (zeroP p)
then (if start
then print_string("0")
else ())
else (print_term (hdP p) start;
if start then open_hovbox 0;
print_space();
print_cut();
printP (tlP p) false)
in open_hovbox 3;
printP p true;
print_flush()
let name_var= ref []
let stringP p =
string_of_pol
(fun p -> match p with [] -> true | _ -> false)
(fun p -> match p with (t::p) -> t |_ -> failwith "print_pol dans dansideal")
(fun p -> match p with (t::p) -> p |_ -> failwith "print_pol dans dansideal")
(fun (a,m) -> a)
(fun (a,m) -> m)
string_of_coef
(fun m -> (Array.length m)-1)
(fun m i -> (string_of_int (m.(i))))
name_var
p
let nsP2 = ref max_int
let stringPcut p =
nsP2:=10;
let res =
if (length p)> !nsP2
then (stringP [hd p])^" + "^(string_of_int (length p))^" termes"
else stringP p in
Polynomesrec.nsP1:= max_int ;
nsP2:= max_int;
res
let rec lstringP l =
match l with
[] -> ""
|p::l -> (stringP p)^("\n")^(lstringP l)
let printP = print_pol
(fun p -> match p with [] -> true | _ -> false)
(fun p -> match p with (t::p) -> t |_ -> failwith "print_pol dans dansideal")
(fun p -> match p with (t::p) -> p |_ -> failwith "print_pol dans dansideal")
(fun (a,m) -> a)
(fun (a,m) -> m)
string_of_coef
(fun m -> (Array.length m)-1)
(fun m i -> (string_of_int (m.(i))))
name_var
let rec lprintP l =
match l with
[] -> ()
|p::l -> printP p;print_string "\n"; lprintP l
Operations
let zeroP = []
let polconst d c =
let m = Array.create (d+1) 0 in
let m = set_deg m in
[(c,m)]
let plusP p q =
let rec plusP p q =
match p with
[] -> q
|t::p' ->
match q with
[] -> p
|t'::q' ->
match compare_mon (snd t) (snd t') with
1 -> t::(plusP p' q)
|(-1) -> t'::(plusP p q')
|_ -> let c=P.plusP (fst t) (fst t') in
match P.equal c coef0 with
true -> (plusP p' q')
|false -> (c,(snd t))::(plusP p' q')
in plusP p q
let mult_t_pol a m p =
let rec mult_t_pol p =
match p with
[] -> []
|(b,m')::p -> ((P.multP a b),(mult_mon m m'))::(mult_t_pol p)
in mult_t_pol p
let coef_of_int x = P.of_num (Num.Int x)
let gen d i =
let m = Array.create (d+1) 0 in
m.(i) <- 1;
let m = set_deg m in
[((coef_of_int 1),m)]
let oppP p =
let rec oppP p =
match p with
[] -> []
|(b,m')::p -> ((P.oppP b),m')::(oppP p)
in oppP p
let emultP a p =
let rec emultP p =
match p with
[] -> []
|(b,m')::p -> ((P.multP a b),m')::(emultP p)
in emultP p
let multP p q =
let rec aux p =
match p with
[] -> []
|(a,m)::p' -> plusP (mult_t_pol a m q) (aux p')
in aux p
let puisP p n=
match p with
[] -> []
|_ ->
let d = nvar (snd (hd p)) in
let rec puisP n =
match n with
0 -> [coef1, Array.create (d+1) 0]
| 1 -> p
|_ -> multP p (puisP (n-1))
in puisP n
let rec contentP p =
match p with
|[] -> coef1
|[a,m] -> a
|(a,m)::p1 ->
if P.equal a coef1 || P.equal a coefm1
then a
else P.pgcdP a (contentP p1)
let contentPlist lp =
match lp with
|[] -> coef1
|p::l1 ->
fold_left
(fun r q ->
if P.equal r coef1 || P.equal r coefm1
then r
else P.pgcdP r (contentP q))
(contentP p) l1
let pgcdpos a b = P.pgcdP a b
let polynom0 = {pol = ref []; num = 0; sugar = 0}
let ppol p = !(p.pol)
let lm p = snd (hd (ppol p))
let nallpol = ref 0
let allpol = ref (Array.create 1000 polynom0)
let new_allpol p s =
nallpol := !nallpol + 1;
if !nallpol >= Array.length !allpol
then
allpol := Array.append !allpol (Array.create !nallpol polynom0);
let p = {pol = ref p; num = !nallpol; sugar = s} in
!allpol.(!nallpol)<- p;
p
let rec selectdiv m l =
match l with
[] -> polynom0
|q::r -> let m'= snd (hd (ppol q)) in
match (div_mon_test m m') with
true -> q
|false -> selectdiv m r
let div_pol p q a b m =
plusP (emultP a p) (mult_t_pol b m q)
let hmon = Hashtbl.create 1000
let use_hmon = ref false
let find_hmon m =
if !use_hmon
then Hashtbl.find hmon m
else raise Not_found
let add_hmon m q =
if !use_hmon
then Hashtbl.add hmon m q
else ()
let div_coef a b = P.divP a b
remainder r of the division of p by polynomials of l , returns ( c , r ) where c is the coefficient for pseudo - division : c p = sum_i q_i p_i + r
let reduce2 p l =
let l = if nouveaux_pol_en_tete then rev l else l in
let rec reduce p =
match p with
[] -> (coef1,[])
|t::p' ->
let (a,m)=t in
let q = (try find_hmon m
with Not_found ->
let q = selectdiv m l in
match (ppol q) with
t'::q' -> (add_hmon m q;
q)
|[] -> q) in
match (ppol q) with
[] -> if reduire_les_queues
then
let (c,r)=(reduce p') in
(c,((P.multP a c,m)::r))
else (coef1,p)
|(b,m')::q' ->
let c=(pgcdpos a b) in
let a'= (div_coef b c) in
let b'=(P.oppP (div_coef a c)) in
let (e,r)=reduce (div_pol p' q' a' b'
(div_mon m m')) in
(P.multP a' e,r)
in let (c,r) = reduce p in
(c,r)
let poldep = ref []
let poldepcontent = ref []
let coefpoldep = Hashtbl.create 51
of q in p = sum_i c_i*q_i
let coefpoldep_find p q =
try (Hashtbl.find coefpoldep (p.num,q.num))
with _ -> []
let coefpoldep_remove p q =
Hashtbl.remove coefpoldep (p.num,q.num)
let coefpoldep_set p q c =
Hashtbl.add coefpoldep (p.num,q.num) c
let initcoefpoldep d lp =
poldep:=lp;
poldepcontent:= map (fun p -> contentP (ppol p)) lp;
iter
(fun p -> coefpoldep_set p p (polconst d (coef_of_int 1)))
lp
let reduce2_trace p l lcp =
let l = if nouveaux_pol_en_tete then rev l else l in
let rec reduce p =
match p with
[] -> ([],[])
|t::p' ->
let (a,m)=t in
let q =
(try find_hmon m
with Not_found ->
let q = selectdiv m l in
match (ppol q) with
t'::q' -> (add_hmon m q;
q)
|[] -> q) in
match (ppol q) with
[] ->
if reduire_les_queues
then
let (lq,r)=(reduce p') in
(lq,((a,m)::r))
else ([],p)
|(b,m')::q' ->
let b'=(P.oppP (div_coef a b)) in
let m''= div_mon m m' in
let p1=plusP p' (mult_t_pol b' m'' q') in
let (lq,r)=reduce p1 in
((b',m'',q)::lq, r)
in let (lq,r) = reduce p in
(map2
(fun c0 q ->
let c =
fold_left
(fun x (a,m,s) ->
if equal (ppol s) (ppol q)
then
plusP x (mult_t_pol a m (polconst (nvar m) (coef_of_int 1)))
else x)
c0
lq in
c)
lcp
!poldep,
r)
let homogeneous = ref false
let pol_courant = ref polynom0
let sugar_flag = ref true
let compute_sugar p =
fold_left (fun s (a,m) -> max s m.(0)) 0 p
let mk_polynom p =
new_allpol p (compute_sugar p)
let spol ps qs=
let p = ppol ps in
let q = ppol qs in
let m = snd (hd p) in
let m'= snd (hd q) in
let a = fst (hd p) in
let b = fst (hd q) in
let p'= tl p in
let q'= tl q in
let c = (pgcdpos a b) in
let m''=(ppcm_mon m m') in
let m1 = div_mon m'' m in
let m2 = div_mon m'' m' in
let fsp p' q' =
plusP
(mult_t_pol
(div_coef b c)
m1 p')
(mult_t_pol
(P.oppP (div_coef a c))
m2 q') in
let sp = fsp p' q' in
let sps =
new_allpol
sp
(max (m1.(0) + ps.sugar) (m2.(0) + qs.sugar)) in
coefpoldep_set sps ps (fsp (polconst (nvar m) (coef_of_int 1)) []);
coefpoldep_set sps qs (fsp [] (polconst (nvar m) (coef_of_int 1)));
sps
let etrangers p p'=
let m = snd (hd p) in
let m'= snd (hd p') in
let d = nvar m in
let res=ref true in
let i=ref 1 in
while (!res) && (!i<=d) do
res:= (m.(!i) = 0) || (m'.(!i)=0);
i:=!i+1;
done;
!res
teste if head monomial of p '' divides lcm of lhead monomials of p and p '
let div_ppcm p p' p'' =
let m = snd (hd p) in
let m'= snd (hd p') in
let m''= snd (hd p'') in
let d = nvar m in
let res=ref true in
let i=ref 1 in
while (!res) && (!i<=d) do
res:= ((max m.(!i) m'.(!i)) >= m''.(!i));
i:=!i+1;
done;
!res
code from extraction of program
type 'poly cpRes =
Keep of ('poly list)
| DontKeep of ('poly list)
let list_rec f0 f1 =
let rec f2 = function
[] -> f0
| a0::l0 -> f1 a0 l0 (f2 l0)
in f2
let addRes i = function
Keep h'0 -> Keep (i::h'0)
| DontKeep h'0 -> DontKeep (i::h'0)
let slice i a q =
list_rec
(match etrangers (ppol i) (ppol a) with
true -> DontKeep []
| false -> Keep [])
(fun b q1 rec_ren ->
match div_ppcm (ppol i) (ppol a) (ppol b) with
true -> DontKeep (b::q1)
| false ->
(match div_ppcm (ppol i) (ppol b) (ppol a) with
true -> rec_ren
| false -> addRes b rec_ren)) q
oblige en queue sinon le certificat deconne
let addSsugar x l =
if !sugar_flag
then
let sx = x.sugar in
let rec insere l =
match l with
| [] -> [x]
| y::l1 ->
if sx <= y.sugar
then x::l
else y::(insere l1)
in insere l
else addS x l
ajoute les spolynomes de i avec la liste de polynomes aP ,
a la liste q
a la liste q *)
let genPcPf i aP q =
(let rec genPc aP0 =
match aP0 with
[] -> (fun r -> r)
| a::l1 ->
(fun l ->
(match slice i a l1 with
Keep l2 -> addSsugar (spol i a) (genPc l2 l)
| DontKeep l2 -> genPc l2 l))
in genPc aP) q
let genOCPf h' =
list_rec [] (fun a l rec_ren ->
genPcPf a l rec_ren) h'
let ordcpair ((i1,j1),m1) ((i2,j2),m2) =
let s1 = ( max
( ! allpol.(i1).sugar )
- ( snd ( hd ( ppol ! ) )
( ! + m1.(0 )
- ( snd ( hd ( ppol ! allpol.(j1)))).(0 ) ) ) in
let s2 = ( max
( ! allpol.(i2).sugar + m2.(0 )
- ( snd ( hd ( ppol ! allpol.(i2)))).(0 ) )
( ! allpol.(j2).sugar + m2.(0 )
- ( snd ( hd ( ppol ! ) ) ) in
match compare s1 s2 with
| 1 - > 1
|(-1 ) - > -1
|0 - > compare_mon
(!allpol.(i1).sugar + m1.(0)
- (snd (hd (ppol !allpol.(i1)))).(0))
(!allpol.(j1).sugar + m1.(0)
- (snd (hd (ppol !allpol.(j1)))).(0))) in
let s2 = (max
(!allpol.(i2).sugar + m2.(0)
- (snd (hd (ppol !allpol.(i2)))).(0))
(!allpol.(j2).sugar + m2.(0)
- (snd (hd (ppol !allpol.(j2)))).(0))) in
match compare s1 s2 with
| 1 -> 1
|(-1) -> -1
|0 -> compare_mon m1 m2*)
compare_mon m1 m2
let sortcpairs lcp =
sort ordcpair lcp
let mergecpairs l1 l2 =
merge ordcpair l1 l2
let ord i j =
if i<j then (i,j) else (j,i)
let cpair p q =
if etrangers (ppol p) (ppol q)
then []
else [(ord p.num q.num,
ppcm_mon (lm p) (lm q))]
let cpairs1 p lq =
sortcpairs (fold_left (fun r q -> r @ (cpair p q)) [] lq)
let cpairs lp =
let rec aux l =
match l with
[]|[_] -> []
|p::l1 -> mergecpairs (cpairs1 p l1) (aux l1)
in aux lp
let critere2 ((i,j),m) lp lcp =
exists
(fun h ->
h.num <> i && h.num <> j
&& (div_mon_test m (lm h))
&& (let c1 = ord i h.num in
not (exists (fun (c,_) -> c1 = c) lcp))
&& (let c1 = ord j h.num in
not (exists (fun (c,_) -> c1 = c) lcp)))
lp
let critere3 ((i,j),m) lp lcp =
exists
(fun h ->
h.num <> i && h.num <> j
&& (div_mon_test m (lm h))
&& (h.num < j
|| not (m = ppcm_mon
(lm (!allpol.(i)))
(lm h)))
&& (h.num < i
|| not (m = ppcm_mon
(lm (!allpol.(j)))
(lm h))))
lp
let add_cpairs p lp lcp =
mergecpairs (cpairs1 p lp) lcp
let step = ref 0
let infobuch p q =
if !step = 0
then (info ("[" ^ (string_of_int (length p))
^ "," ^ (string_of_int (length q))
^ "]"))
let coef_courant = ref coef1
type certificate =
{ coef : coef; power : int;
gb_comb : poly list list; last_comb : poly list }
let test_dans_ideal p lp lp0 =
let (c,r) = reduce2 (ppol !pol_courant) lp in
info ("remainder: "^(stringPcut r)^"\n");
coef_courant:= P.multP !coef_courant c;
pol_courant:= mk_polynom r;
if r=[]
then (info "polynomial reduced to 0\n";
let lcp = map (fun q -> []) !poldep in
let c = !coef_courant in
let (lcq,r) = reduce2_trace (emultP c p) lp lcp in
info "r ok\n";
info ("r: "^(stringP r)^"\n");
let res=ref (emultP c p) in
iter2
(fun cq q -> res:=plusP (!res) (multP cq (ppol q));
)
lcq !poldep;
info ("verif sum: "^(stringP (!res))^"\n");
info ("coefficient: "^(stringP (polconst 1 c))^"\n");
let rec aux lp =
match lp with
|[] -> []
|p::lp ->
(map
(fun q -> coefpoldep_find p q)
lp)::(aux lp)
in
let coefficient_multiplicateur = c in
let liste_polynomes_de_depart = rev lp0 in
let polynome_a_tester = p in
let liste_des_coefficients_intermediaires =
(let lci = rev (aux (rev lp)) in
iter (fun x -> lci := tl (!lci)) lp0;
!lci) in
let liste_des_coefficients =
map
(fun cq -> emultP (coef_of_int (-1)) cq)
(rev lcq) in
(liste_polynomes_de_depart,
polynome_a_tester,
{coef = coefficient_multiplicateur;
power = 1;
gb_comb = liste_des_coefficients_intermediaires;
last_comb = liste_des_coefficients})
)
info " polynomial not reduced to 0\n " ;
info ( " \nremainder : " ^(stringPcut r)^"\n " ) ;
info ("\nremainder: "^(stringPcut r)^"\n");*)
raise NotInIdeal)
let divide_rem_with_critical_pair = ref false
let list_diff l x =
filter (fun y -> y <> x) l
let deg_hom p =
match p with
| [] -> -1
| (a,m)::_ -> m.(0)
let pbuchf pq p lp0=
info "computation of the Groebner basis\n";
step:=0;
Hashtbl.clear hmon;
let rec pbuchf (lp, lpc) =
infobuch lp lpc;
step:=(!step+1)mod 10 ;
match lpc with
[] ->
info ( " List of polynomials:\n"^(fold_left ( fun r p - > r^(stringP p)^"\n " ) " " lp ) ) ;
info " --------------------\n " ;
info "--------------------\n";*)
test_dans_ideal (ppol p) lp lp0
| ((i,j),m) :: lpc2 ->
if critere3 ((i,j),m) lp lpc2
then (info "c"; pbuchf (lp, lpc2))
else
let a = spol !allpol.(i) !allpol.(j) in
if !homogeneous && (ppol a)<>[] && deg_hom (ppol a)
> deg_hom (ppol !pol_courant)
then (info "h"; pbuchf (lp, lpc2))
else
let (ca,a0)= reduce2 (ppol a) lp in
match a0 with
[] -> info "0";pbuchf (lp, lpc2)
| _ ->
a.pol := emultP ca (ppol a);
let (lca,a0) = reduce2_trace (ppol a) lp
(map (fun q -> emultP ca (coefpoldep_find a q))
!poldep) in
a.pol := a0;
let a0 = new_allpol a0 sa in
iter2 (fun c q ->
coefpoldep_remove a q;
coefpoldep_set a q c) lca !poldep;
let a0 = a in
info ("\nnew polynomials: "^(stringPcut (ppol a0))^"\n");
poldep:=addS a0 lp;
poldepcontent:=addS ct (!poldepcontent);
try test_dans_ideal (ppol p) (addS a0 lp) lp0
with NotInIdeal ->
let newlpc = add_cpairs a0 lp lpc2 in
pbuchf (((addS a0 lp), newlpc))
in pbuchf pq
let is_homogeneous p =
match p with
| [] -> true
| (a,m)::p1 -> let d = m.(0) in
for_all (fun (b,m') -> m'.(0)=d) p1
returns
c
lp = [ pn; ... ;p1 ]
p
lci = [ [ a(n+1,n); ... ;a(n+1,1 ) ] ;
[ a(n+2,n+1); ... ) ] ;
...
[ a(n+m , n+m-1); ... ;a(n+m,1 ) ] ]
lc = [ qn+m ; ... q1 ]
such that
c*p = sum qi*pi
where pn+k = a(n+k , n+k-1)*pn+k-1 + ... + a(n+k,1 ) * p1
c
lp = [pn;...;p1]
p
lci = [[a(n+1,n);...;a(n+1,1)];
[a(n+2,n+1);...;a(n+2,1)];
...
[a(n+m,n+m-1);...;a(n+m,1)]]
lc = [qn+m; ... q1]
such that
c*p = sum qi*pi
where pn+k = a(n+k,n+k-1)*pn+k-1 + ... + a(n+k,1)* p1
*)
let in_ideal d lp p =
Hashtbl.clear hmon;
Hashtbl.clear coefpoldep;
nallpol := 0;
allpol := Array.create 1000 polynom0;
homogeneous := for_all is_homogeneous (p::lp);
if !homogeneous then info "homogeneous polynomials\n";
info ("p: "^(stringPcut p)^"\n");
info ("lp:\n"^(fold_left (fun r p -> r^(stringPcut p)^"\n") "" lp));
let lp = map mk_polynom lp in
let p = mk_polynom p in
initcoefpoldep d lp;
coef_courant:=coef1;
pol_courant:=p;
let (lp1,p1,cert) =
try test_dans_ideal (ppol p) lp lp
with NotInIdeal -> pbuchf (lp, (cpairs lp)) p lp in
info "computed\n";
(map ppol lp1, p1, cert)
end
|
2b4a97ab3409e1a409a90e960b284b573df77b2770b4c8e92bab1652991818f8 | huangz1990/SICP-answers | test-17-double-and-halve.scm | (load "test-manager/load.scm")
(load "17-double-and-halve.scm")
(define-test (test-double)
(check (= 4 (double 2)))
(check (= 10 (double 5)))
)
(define-test (test-halve)
(check (= 2 (halve 4)))
(check (= 5 (halve 10)))
)
(run-registered-tests)
| null | https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/chp1/code/test-17-double-and-halve.scm | scheme | (load "test-manager/load.scm")
(load "17-double-and-halve.scm")
(define-test (test-double)
(check (= 4 (double 2)))
(check (= 10 (double 5)))
)
(define-test (test-halve)
(check (= 2 (halve 4)))
(check (= 5 (halve 10)))
)
(run-registered-tests)
| |
d52f9a319516406f98fc546bff3d875830f1ea5880eb960c84cf9c59b31c328f | mirage/ocaml-matrix | http.ml | open Lwt
open Cohttp
open Cohttp_lwt_unix
open Json_encoding
exception Json_error of string
module Server = struct
type scheme = [ `Http | `Https ]
let scheme_to_string = function `Http -> "http" | `Https -> "https"
type t = {scheme: scheme; host: string; port: int}
let pp f {scheme; host; port} =
Fmt.pf f "%s:%d" (scheme_to_string scheme) host port
let v scheme host port = {scheme; host; port}
let to_uri {scheme; host; port} path query =
let scheme = scheme_to_string scheme in
Uri.make ~scheme ~host ~port ~path ?query ()
end
let make_headers ?(header = []) auth_token =
let headers = Header.of_list header in
let headers =
match auth_token with
| None -> headers
| Some auth_token ->
Header.add headers "Authorization" (Fmt.str "Bearer %s" auth_token) in
headers
type 'a or_error = ('a, Matrix_ctos.Errors.t) Result.t
let parse_or_fail code json_encoding body =
let json_body = Ezjsonm.from_string body in
if code >= 400 then Error (destruct Matrix_ctos.Errors.encoding json_body)
else Ok (destruct json_encoding json_body)
let get server ?header path args response_encoding needs_auth =
let uri = Server.to_uri server path args in
let headers = make_headers ?header needs_auth in
Cohttp_lwt_unix.Client.get ~headers uri >>= fun (resp, body) ->
let code = resp |> Response.status |> Code.code_of_status in
body |> Cohttp_lwt.Body.to_string >|= fun body ->
if body <> "" then parse_or_fail code response_encoding body
else raise (Json_error (Fmt.str "Error %d in get: no body" code))
let post
server ?header path args value request_encoding response_encoding auth_token
=
let uri = Server.to_uri server path args in
let body = construct request_encoding value |> Ezjsonm.value_to_string in
let body = Cohttp_lwt.Body.of_string body in
let headers = make_headers ?header auth_token in
Cohttp_lwt_unix.Client.post ~headers ~body uri >>= fun (resp, body) ->
let code = resp |> Response.status |> Code.code_of_status in
body |> Cohttp_lwt.Body.to_string >|= fun body ->
if body <> "" then parse_or_fail code response_encoding body
else raise (Json_error (Fmt.str "Error %d in post" code))
let put
server ?header path args value request_encoding response_encoding auth_token
=
let uri = Server.to_uri server path args in
let body = construct request_encoding value |> Ezjsonm.value_to_string in
let body = Cohttp_lwt.Body.of_string body in
let headers = make_headers ?header auth_token in
Cohttp_lwt_unix.Client.put ~headers ~body uri >>= fun (resp, body) ->
let code = resp |> Response.status |> Code.code_of_status in
body |> Cohttp_lwt.Body.to_string >|= fun body ->
if body <> "" then parse_or_fail code response_encoding body
else raise (Json_error (Fmt.str "Error %d in put" code))
| null | https://raw.githubusercontent.com/mirage/ocaml-matrix/2a58d3d41c43404741f2dfdaf1d2d0f3757b2b69/ci-client/http.ml | ocaml | open Lwt
open Cohttp
open Cohttp_lwt_unix
open Json_encoding
exception Json_error of string
module Server = struct
type scheme = [ `Http | `Https ]
let scheme_to_string = function `Http -> "http" | `Https -> "https"
type t = {scheme: scheme; host: string; port: int}
let pp f {scheme; host; port} =
Fmt.pf f "%s:%d" (scheme_to_string scheme) host port
let v scheme host port = {scheme; host; port}
let to_uri {scheme; host; port} path query =
let scheme = scheme_to_string scheme in
Uri.make ~scheme ~host ~port ~path ?query ()
end
let make_headers ?(header = []) auth_token =
let headers = Header.of_list header in
let headers =
match auth_token with
| None -> headers
| Some auth_token ->
Header.add headers "Authorization" (Fmt.str "Bearer %s" auth_token) in
headers
type 'a or_error = ('a, Matrix_ctos.Errors.t) Result.t
let parse_or_fail code json_encoding body =
let json_body = Ezjsonm.from_string body in
if code >= 400 then Error (destruct Matrix_ctos.Errors.encoding json_body)
else Ok (destruct json_encoding json_body)
let get server ?header path args response_encoding needs_auth =
let uri = Server.to_uri server path args in
let headers = make_headers ?header needs_auth in
Cohttp_lwt_unix.Client.get ~headers uri >>= fun (resp, body) ->
let code = resp |> Response.status |> Code.code_of_status in
body |> Cohttp_lwt.Body.to_string >|= fun body ->
if body <> "" then parse_or_fail code response_encoding body
else raise (Json_error (Fmt.str "Error %d in get: no body" code))
let post
server ?header path args value request_encoding response_encoding auth_token
=
let uri = Server.to_uri server path args in
let body = construct request_encoding value |> Ezjsonm.value_to_string in
let body = Cohttp_lwt.Body.of_string body in
let headers = make_headers ?header auth_token in
Cohttp_lwt_unix.Client.post ~headers ~body uri >>= fun (resp, body) ->
let code = resp |> Response.status |> Code.code_of_status in
body |> Cohttp_lwt.Body.to_string >|= fun body ->
if body <> "" then parse_or_fail code response_encoding body
else raise (Json_error (Fmt.str "Error %d in post" code))
let put
server ?header path args value request_encoding response_encoding auth_token
=
let uri = Server.to_uri server path args in
let body = construct request_encoding value |> Ezjsonm.value_to_string in
let body = Cohttp_lwt.Body.of_string body in
let headers = make_headers ?header auth_token in
Cohttp_lwt_unix.Client.put ~headers ~body uri >>= fun (resp, body) ->
let code = resp |> Response.status |> Code.code_of_status in
body |> Cohttp_lwt.Body.to_string >|= fun body ->
if body <> "" then parse_or_fail code response_encoding body
else raise (Json_error (Fmt.str "Error %d in put" code))
| |
d9eff749d3a92fd96f5d2410c91637f3a0b388c85d3b881ec183a19fe1a28f5b | Bogdanp/racket-north | cli.rkt | #lang at-exp racket/base
(require db
gregor
net/url
openssl/md5
racket/cmdline
racket/format
racket/function
racket/match
racket/port
raco/command-name
"adapter/base.rkt"
"adapter/postgres.rkt"
"adapter/sqlite.rkt"
"base.rkt")
(define current-program-name
(make-parameter (short-program+command-name)))
(define database-url
(make-parameter (getenv "DATABASE_URL")))
(define dry-run?
(make-parameter #t))
(define migrations-path
(make-parameter
(build-path (current-directory) "migrations")
(lambda (p)
(unless (directory-exists? p)
(exit-with-errors! @~a{error: migrations path '@p' does not exist}))
p)))
(define adapter-factories
(hasheq 'postgres url->postgres-adapter
'sqlite url->sqlite-adapter))
(define (make-adapter dsn)
(define url (string->url dsn))
(define factory (hash-ref adapter-factories (string->symbol (url-scheme url)) #f))
(and factory (factory url)))
(define root-revision-template #<<EOT
#lang north
-- @revision: ~a
-- @description: Creates some table.
-- @up {
create table example();
-- }
-- @down {
drop table example;
-- }
EOT
)
(define child-revision-template #<<EOT
#lang north
-- @revision: ~a
-- @parent: ~a
-- @description: Alters some table.
-- @up {
alter table example add column created_at timestamp not null default now();
-- }
-- @down {
alter table example drop column created_at;
-- }
EOT
)
(define (current-date->string)
(~t (today) "yyyyMMdd"))
(define (generate-revision-id name)
(call-with-input-string (~a (datetime->iso8601 (now)) name) md5))
(define (generate-revision-filename name)
(build-path (migrations-path) (~a (current-date->string) "-" name ".sql")))
(define (exit-with-errors! . messages)
(parameterize ([current-output-port (current-error-port)])
(for ([message messages])
(displayln message)))
(exit 1))
(define (exit-with-adapter-error! e)
(define revision (exn:fail:adapter:migration-revision e))
(define info (exn:fail:sql-info (exn:fail:adapter-cause e)))
(apply exit-with-errors!
@~a{error: failed to apply revision @revision}
@~a{details:}
(for/list ([i info])
@~a{ @(car i): @(cdr i)})))
(define (read-migrations)
(with-handlers ([exn:fail:migration?
(lambda (e)
(exit-with-errors! @~a{error: @(exn-message e)}))]
[exn:fail:read?
(lambda (e)
(exit-with-errors! @~a{error: @(exn-message e)}))]
[exn:fail?
(lambda (e)
(exit-with-errors! @~a{error: '@(migrations-path)' folder not found}))])
(path->migration (migrations-path))))
(define (parse-migrator-args command)
(define revision
(command-line
#:program (current-program-name)
#:once-each
[("-f" "--force") "Unless specified, none of the operations will be applied."
(dry-run? #f)]
[("-u" "--database-url") url
"The URL with which to connect to the database."
(database-url url)]
[("-p" "--path") path
"The path to the migrations folder."
(migrations-path path)]
#:args ([revision #f]) revision))
(unless (database-url)
(exit-with-errors! "error: no database url"))
(define base (read-migrations))
(unless base
(exit-with-errors! "error: no migrations"))
(define adapter (make-adapter (database-url)))
(unless adapter
(exit-with-errors! "error: no adapter"))
(adapter-init adapter)
(values adapter base (adapter-current-revision adapter) revision))
(define (print-message message)
(if (and (dry-run?) (not (string=? message "")))
(displayln (~a "-- " message))
(displayln message)))
(define (print-dry-run migration script-proc)
(unless (string=? (migration-revision migration) "base")
(define scripts (script-proc migration))
(print-message @~a{Revision: @(migration-revision migration)})
(print-message @~a{Parent: @(migration-parent migration)})
(print-message @~a{Path: @(migration-path migration)})
(cond
[(null? scripts) (displayln "-- no content --")]
[else (for-each displayln scripts)])))
(define (print-migration migration)
(unless (string=? (migration-revision migration) "base")
(print-message @~a{Revision: @(migration-revision migration)})
(print-message @~a{Parent: @(migration-parent migration)})
(print-message @~a{Path: @(migration-path migration)})
(print-message @~a{Description: @(migration-description migration)})
(print-message "")))
(define (handle-help)
(exit-with-errors!
"usage: raco north <command> <option> ... <arg> ..."
""
"available commands:"
" create create a new revision"
" help print this message and exit"
" migrate migrate to a particular revision"
" rollback roll back to a previous revision"
" show print information about each revision"))
(define (handle-migrate)
(define-values (adapter base current-revision input-revision)
(parse-migrator-args "migrate"))
(define target-revision
(or input-revision (migration-revision (migration-most-recent base))))
(print-message @~a{Current revision: @(or current-revision "base")})
(print-message @~a{Target revision: @target-revision})
(when (equal? current-revision target-revision)
(exit 0))
(define plan
(migration-plan base current-revision target-revision))
(with-handlers ([exn:fail:adapter:migration? exit-with-adapter-error!])
(for ([migration plan])
(print-message "")
(print-message @~a{Applying revision: @(migration-revision migration)})
(if (dry-run?)
(print-dry-run migration migration-up)
(adapter-apply! adapter (migration-revision migration) (migration-up migration))))))
(define (handle-rollback)
(define-values (adapter base current-revision input-revision)
(parse-migrator-args "rollback"))
(define target-migration
(if input-revision
(migration-find-revision base input-revision)
(migration-find-parent base (or current-revision "base"))))
(unless target-migration
(exit-with-errors! @~a{error: invalid revision '@input-revision'}))
(define target-revision
(match (migration-revision target-migration)
["base" #f]
[rev rev]))
(print-message @~a{WARNING: Never roll back a production database!})
(print-message @~a{Current revision: @(or current-revision "base")})
(print-message @~a{Target revision: @(or target-revision "base")})
(when (equal? current-revision target-revision)
(exit-with-errors! "error: nothing to do"))
(define plan
(migration-plan base current-revision target-revision))
(with-handlers ([exn:fail:adapter:migration? exit-with-adapter-error!])
(for ([migration plan])
(print-message "")
(print-message @~a{Rolling back revision: @(migration-revision migration)})
(if (dry-run?)
(print-dry-run migration migration-down)
(adapter-apply! adapter (migration-parent migration) (migration-down migration))))))
(define (handle-create)
(define name
(command-line
#:program (current-program-name)
#:once-each
[("-p" "--path") path
"The path to the migrations folder."
(migrations-path path)]
#:args (name) name))
(define revision (generate-revision-id name))
(define filename (generate-revision-filename name))
(define content
(match (read-migrations)
[#f (format root-revision-template revision)]
[base (format child-revision-template revision (migration-revision (migration-most-recent base)))]))
(with-handlers ([exn:fail:filesystem:exists?
(lambda _
(exit-with-errors! @~a{error: output file '@filename' already exists}))])
(void (call-with-output-file filename (curry write-string content)))))
(define (handle-show)
(define revision
(command-line
#:program (current-program-name)
#:once-each
[("-p" "--path") path
"The path to the migrations folder."
(migrations-path path)]
#:args ([revision #f]) revision))
(parameterize ([dry-run? #f])
(define base (read-migrations))
(unless base
(exit-with-errors! "error: no migrations"))
(cond
[revision
(define migration (migration-find-revision base revision))
(unless migration
(exit-with-errors! @~a{error: revision '@revision' not found}))
(print-migration migration)]
[else
(for-each print-migration (reverse (migration->list base)))])))
(define ((handle-unknown command))
(exit-with-errors! @~a{error: unrecognized command '@command'}))
(define all-commands
(hasheq 'create handle-create
'help handle-help
'migrate handle-migrate
'rollback handle-rollback
'show handle-show))
(define-values (command handler args)
(match (current-command-line-arguments)
[(vector command args ...)
(values command (hash-ref all-commands (string->symbol command) (handle-unknown command)) args)]
[_
(values "help" handle-help null)]))
(parameterize ([current-command-line-arguments (list->vector args)]
[current-program-name (~a (current-program-name) " " command)])
(with-handlers ([(λ (e)
(or (exn:fail:adapter? e)
(exn:fail:sql? e)))
(λ (e)
(eprintf "error: ~a~n" (exn-message e))
(exit 1))])
(handler)))
| null | https://raw.githubusercontent.com/Bogdanp/racket-north/d256258e2d485bc425bca327809dbb0eaa7318ac/north/cli.rkt | racket | #lang at-exp racket/base
(require db
gregor
net/url
openssl/md5
racket/cmdline
racket/format
racket/function
racket/match
racket/port
raco/command-name
"adapter/base.rkt"
"adapter/postgres.rkt"
"adapter/sqlite.rkt"
"base.rkt")
(define current-program-name
(make-parameter (short-program+command-name)))
(define database-url
(make-parameter (getenv "DATABASE_URL")))
(define dry-run?
(make-parameter #t))
(define migrations-path
(make-parameter
(build-path (current-directory) "migrations")
(lambda (p)
(unless (directory-exists? p)
(exit-with-errors! @~a{error: migrations path '@p' does not exist}))
p)))
(define adapter-factories
(hasheq 'postgres url->postgres-adapter
'sqlite url->sqlite-adapter))
(define (make-adapter dsn)
(define url (string->url dsn))
(define factory (hash-ref adapter-factories (string->symbol (url-scheme url)) #f))
(and factory (factory url)))
(define root-revision-template #<<EOT
#lang north
-- @revision: ~a
-- @description: Creates some table.
-- @up {
-- }
-- @down {
-- }
EOT
)
(define child-revision-template #<<EOT
#lang north
-- @revision: ~a
-- @parent: ~a
-- @description: Alters some table.
-- @up {
-- }
-- @down {
-- }
EOT
)
(define (current-date->string)
(~t (today) "yyyyMMdd"))
(define (generate-revision-id name)
(call-with-input-string (~a (datetime->iso8601 (now)) name) md5))
(define (generate-revision-filename name)
(build-path (migrations-path) (~a (current-date->string) "-" name ".sql")))
(define (exit-with-errors! . messages)
(parameterize ([current-output-port (current-error-port)])
(for ([message messages])
(displayln message)))
(exit 1))
(define (exit-with-adapter-error! e)
(define revision (exn:fail:adapter:migration-revision e))
(define info (exn:fail:sql-info (exn:fail:adapter-cause e)))
(apply exit-with-errors!
@~a{error: failed to apply revision @revision}
@~a{details:}
(for/list ([i info])
@~a{ @(car i): @(cdr i)})))
(define (read-migrations)
(with-handlers ([exn:fail:migration?
(lambda (e)
(exit-with-errors! @~a{error: @(exn-message e)}))]
[exn:fail:read?
(lambda (e)
(exit-with-errors! @~a{error: @(exn-message e)}))]
[exn:fail?
(lambda (e)
(exit-with-errors! @~a{error: '@(migrations-path)' folder not found}))])
(path->migration (migrations-path))))
(define (parse-migrator-args command)
(define revision
(command-line
#:program (current-program-name)
#:once-each
[("-f" "--force") "Unless specified, none of the operations will be applied."
(dry-run? #f)]
[("-u" "--database-url") url
"The URL with which to connect to the database."
(database-url url)]
[("-p" "--path") path
"The path to the migrations folder."
(migrations-path path)]
#:args ([revision #f]) revision))
(unless (database-url)
(exit-with-errors! "error: no database url"))
(define base (read-migrations))
(unless base
(exit-with-errors! "error: no migrations"))
(define adapter (make-adapter (database-url)))
(unless adapter
(exit-with-errors! "error: no adapter"))
(adapter-init adapter)
(values adapter base (adapter-current-revision adapter) revision))
(define (print-message message)
(if (and (dry-run?) (not (string=? message "")))
(displayln (~a "-- " message))
(displayln message)))
(define (print-dry-run migration script-proc)
(unless (string=? (migration-revision migration) "base")
(define scripts (script-proc migration))
(print-message @~a{Revision: @(migration-revision migration)})
(print-message @~a{Parent: @(migration-parent migration)})
(print-message @~a{Path: @(migration-path migration)})
(cond
[(null? scripts) (displayln "-- no content --")]
[else (for-each displayln scripts)])))
(define (print-migration migration)
(unless (string=? (migration-revision migration) "base")
(print-message @~a{Revision: @(migration-revision migration)})
(print-message @~a{Parent: @(migration-parent migration)})
(print-message @~a{Path: @(migration-path migration)})
(print-message @~a{Description: @(migration-description migration)})
(print-message "")))
(define (handle-help)
(exit-with-errors!
"usage: raco north <command> <option> ... <arg> ..."
""
"available commands:"
" create create a new revision"
" help print this message and exit"
" migrate migrate to a particular revision"
" rollback roll back to a previous revision"
" show print information about each revision"))
(define (handle-migrate)
(define-values (adapter base current-revision input-revision)
(parse-migrator-args "migrate"))
(define target-revision
(or input-revision (migration-revision (migration-most-recent base))))
(print-message @~a{Current revision: @(or current-revision "base")})
(print-message @~a{Target revision: @target-revision})
(when (equal? current-revision target-revision)
(exit 0))
(define plan
(migration-plan base current-revision target-revision))
(with-handlers ([exn:fail:adapter:migration? exit-with-adapter-error!])
(for ([migration plan])
(print-message "")
(print-message @~a{Applying revision: @(migration-revision migration)})
(if (dry-run?)
(print-dry-run migration migration-up)
(adapter-apply! adapter (migration-revision migration) (migration-up migration))))))
(define (handle-rollback)
(define-values (adapter base current-revision input-revision)
(parse-migrator-args "rollback"))
(define target-migration
(if input-revision
(migration-find-revision base input-revision)
(migration-find-parent base (or current-revision "base"))))
(unless target-migration
(exit-with-errors! @~a{error: invalid revision '@input-revision'}))
(define target-revision
(match (migration-revision target-migration)
["base" #f]
[rev rev]))
(print-message @~a{WARNING: Never roll back a production database!})
(print-message @~a{Current revision: @(or current-revision "base")})
(print-message @~a{Target revision: @(or target-revision "base")})
(when (equal? current-revision target-revision)
(exit-with-errors! "error: nothing to do"))
(define plan
(migration-plan base current-revision target-revision))
(with-handlers ([exn:fail:adapter:migration? exit-with-adapter-error!])
(for ([migration plan])
(print-message "")
(print-message @~a{Rolling back revision: @(migration-revision migration)})
(if (dry-run?)
(print-dry-run migration migration-down)
(adapter-apply! adapter (migration-parent migration) (migration-down migration))))))
(define (handle-create)
(define name
(command-line
#:program (current-program-name)
#:once-each
[("-p" "--path") path
"The path to the migrations folder."
(migrations-path path)]
#:args (name) name))
(define revision (generate-revision-id name))
(define filename (generate-revision-filename name))
(define content
(match (read-migrations)
[#f (format root-revision-template revision)]
[base (format child-revision-template revision (migration-revision (migration-most-recent base)))]))
(with-handlers ([exn:fail:filesystem:exists?
(lambda _
(exit-with-errors! @~a{error: output file '@filename' already exists}))])
(void (call-with-output-file filename (curry write-string content)))))
(define (handle-show)
(define revision
(command-line
#:program (current-program-name)
#:once-each
[("-p" "--path") path
"The path to the migrations folder."
(migrations-path path)]
#:args ([revision #f]) revision))
(parameterize ([dry-run? #f])
(define base (read-migrations))
(unless base
(exit-with-errors! "error: no migrations"))
(cond
[revision
(define migration (migration-find-revision base revision))
(unless migration
(exit-with-errors! @~a{error: revision '@revision' not found}))
(print-migration migration)]
[else
(for-each print-migration (reverse (migration->list base)))])))
(define ((handle-unknown command))
(exit-with-errors! @~a{error: unrecognized command '@command'}))
(define all-commands
(hasheq 'create handle-create
'help handle-help
'migrate handle-migrate
'rollback handle-rollback
'show handle-show))
(define-values (command handler args)
(match (current-command-line-arguments)
[(vector command args ...)
(values command (hash-ref all-commands (string->symbol command) (handle-unknown command)) args)]
[_
(values "help" handle-help null)]))
(parameterize ([current-command-line-arguments (list->vector args)]
[current-program-name (~a (current-program-name) " " command)])
(with-handlers ([(λ (e)
(or (exn:fail:adapter? e)
(exn:fail:sql? e)))
(λ (e)
(eprintf "error: ~a~n" (exn-message e))
(exit 1))])
(handler)))
| |
6bf2de1e4dc897d00ad623f0c2070eaddf3d784cc0c78f1e11c1919dbe473647 | UnkindPartition/tasty | Utils.hs | {-# LANGUAGE BangPatterns #-}
-- | Note: this module is re-exported as a whole from "Test.Tasty.Runners"
module Test.Tasty.Runners.Utils where
import Control.Exception
import Control.Applicative
import Control.Concurrent (mkWeakThreadId, myThreadId)
import Control.Monad (forM_)
import Data.Typeable (Typeable)
Silence AMP import warnings
import Text.Printf
import Foreign.C (CInt)
#if MIN_VERSION_base(4,11,0)
import GHC.Clock (getMonotonicTime)
#else
import Data.Time.Clock.POSIX (getPOSIXTime)
#endif
Install handlers only on UNIX
#ifdef VERSION_unix
#define INSTALL_HANDLERS 1
#else
#define INSTALL_HANDLERS 0
#endif
#if INSTALL_HANDLERS
import System.Posix.Signals
import System.Mem.Weak (deRefWeak)
#endif
import Test.Tasty.Core (Time)
-- | Catch possible exceptions that may arise when evaluating a string.
-- For normal (total) strings, this is a no-op.
--
-- This function should be used to display messages generated by the test
-- suite (such as test result descriptions).
--
-- See e.g. <>.
--
@since 0.10.1
formatMessage :: String -> IO String
formatMessage = go 3
where
-- to avoid infinite recursion, we introduce the recursion limit
go :: Int -> String -> IO String
go 0 _ = return "exceptions keep throwing other exceptions!"
go recLimit msg = do
mbStr <- try $ evaluate $ forceElements msg
case mbStr of
Right () -> return msg
Left e' -> printf "message threw an exception: %s" <$> go (recLimit-1) (show (e' :: SomeException))
-- | Force elements of a list
-- (<-che.info/articles/2015-05-28-force-list>).
--
@since 1.0.1
forceElements :: [a] -> ()
forceElements = foldr seq ()
-- from -che.info/articles/2014-07-30-bracket
-- | Install signal handlers so that e.g. the cursor is restored if the test
suite is killed by SIGTERM . Upon a signal , a ' SignalException ' will be
-- thrown to the thread that has executed this action.
--
This function is called automatically from the family of
-- functions. You only need to call it explicitly if you call
' Test . Tasty . Runners.tryIngredients ' yourself .
--
This function does nothing on non - UNIX systems or when compiled with GHC
-- older than 7.6.
--
@since 1.2.1
installSignalHandlers :: IO ()
installSignalHandlers = do
#if INSTALL_HANDLERS
main_thread_id <- myThreadId
weak_tid <- mkWeakThreadId main_thread_id
forM_ [ sigHUP, sigTERM, sigUSR1, sigUSR2, sigXCPU, sigXFSZ ] $ \sig ->
installHandler sig (Catch $ send_exception weak_tid sig) Nothing
where
send_exception weak_tid sig = do
m <- deRefWeak weak_tid
case m of
Nothing -> return ()
Just tid -> throwTo tid (toException $ SignalException sig)
#else
return ()
#endif
-- | This exception is thrown when the program receives a signal, assuming
-- 'installSignalHandlers' was called.
--
-- The 'CInt' field contains the signal number, as in
' System . . Signals . Signal ' . We do n't use that type synonym , however ,
-- because it's not available on non-UNIXes.
--
@since 1.2.1
newtype SignalException = SignalException CInt
deriving (Show, Typeable)
instance Exception SignalException
-- | Measure the time taken by an 'IO' action to run.
--
-- @since 1.2.2
timed :: IO a -> IO (Time, a)
timed t = do
start <- getTime
!r <- t
end <- getTime
return (end-start, r)
#if MIN_VERSION_base(4,11,0)
-- | Get monotonic time.
--
-- Warning: This is not the system time, but a monotonically increasing time
-- that facilitates reliable measurement of time differences.
--
-- @since 1.2.2
getTime :: IO Time
getTime = getMonotonicTime
#else
-- | Get system time.
--
-- @since 1.2.2
getTime :: IO Time
getTime = realToFrac <$> getPOSIXTime
#endif
| null | https://raw.githubusercontent.com/UnkindPartition/tasty/0debac85701560e8c96cd3705988c50197cb214e/core/Test/Tasty/Runners/Utils.hs | haskell | # LANGUAGE BangPatterns #
| Note: this module is re-exported as a whole from "Test.Tasty.Runners"
| Catch possible exceptions that may arise when evaluating a string.
For normal (total) strings, this is a no-op.
This function should be used to display messages generated by the test
suite (such as test result descriptions).
See e.g. <>.
to avoid infinite recursion, we introduce the recursion limit
| Force elements of a list
(<-che.info/articles/2015-05-28-force-list>).
from -che.info/articles/2014-07-30-bracket
| Install signal handlers so that e.g. the cursor is restored if the test
thrown to the thread that has executed this action.
functions. You only need to call it explicitly if you call
older than 7.6.
| This exception is thrown when the program receives a signal, assuming
'installSignalHandlers' was called.
The 'CInt' field contains the signal number, as in
because it's not available on non-UNIXes.
| Measure the time taken by an 'IO' action to run.
@since 1.2.2
| Get monotonic time.
Warning: This is not the system time, but a monotonically increasing time
that facilitates reliable measurement of time differences.
@since 1.2.2
| Get system time.
@since 1.2.2 |
module Test.Tasty.Runners.Utils where
import Control.Exception
import Control.Applicative
import Control.Concurrent (mkWeakThreadId, myThreadId)
import Control.Monad (forM_)
import Data.Typeable (Typeable)
Silence AMP import warnings
import Text.Printf
import Foreign.C (CInt)
#if MIN_VERSION_base(4,11,0)
import GHC.Clock (getMonotonicTime)
#else
import Data.Time.Clock.POSIX (getPOSIXTime)
#endif
Install handlers only on UNIX
#ifdef VERSION_unix
#define INSTALL_HANDLERS 1
#else
#define INSTALL_HANDLERS 0
#endif
#if INSTALL_HANDLERS
import System.Posix.Signals
import System.Mem.Weak (deRefWeak)
#endif
import Test.Tasty.Core (Time)
@since 0.10.1
formatMessage :: String -> IO String
formatMessage = go 3
where
go :: Int -> String -> IO String
go 0 _ = return "exceptions keep throwing other exceptions!"
go recLimit msg = do
mbStr <- try $ evaluate $ forceElements msg
case mbStr of
Right () -> return msg
Left e' -> printf "message threw an exception: %s" <$> go (recLimit-1) (show (e' :: SomeException))
@since 1.0.1
forceElements :: [a] -> ()
forceElements = foldr seq ()
suite is killed by SIGTERM . Upon a signal , a ' SignalException ' will be
This function is called automatically from the family of
' Test . Tasty . Runners.tryIngredients ' yourself .
This function does nothing on non - UNIX systems or when compiled with GHC
@since 1.2.1
installSignalHandlers :: IO ()
installSignalHandlers = do
#if INSTALL_HANDLERS
main_thread_id <- myThreadId
weak_tid <- mkWeakThreadId main_thread_id
forM_ [ sigHUP, sigTERM, sigUSR1, sigUSR2, sigXCPU, sigXFSZ ] $ \sig ->
installHandler sig (Catch $ send_exception weak_tid sig) Nothing
where
send_exception weak_tid sig = do
m <- deRefWeak weak_tid
case m of
Nothing -> return ()
Just tid -> throwTo tid (toException $ SignalException sig)
#else
return ()
#endif
' System . . Signals . Signal ' . We do n't use that type synonym , however ,
@since 1.2.1
newtype SignalException = SignalException CInt
deriving (Show, Typeable)
instance Exception SignalException
timed :: IO a -> IO (Time, a)
timed t = do
start <- getTime
!r <- t
end <- getTime
return (end-start, r)
#if MIN_VERSION_base(4,11,0)
getTime :: IO Time
getTime = getMonotonicTime
#else
getTime :: IO Time
getTime = realToFrac <$> getPOSIXTime
#endif
|
147497d04d30fa53887d43339d518d7bb3cbc25d68b56c1122943b7e285380c2 | dancrossnyc/multics | e_window_mgr_.lisp | ;;; ***********************************************************
;;; * *
* Copyright , ( C ) Honeywell Bull Inc. , 1988 *
;;; * *
* Copyright , ( C ) Honeywell Information Systems Inc. , 1982 *
;;; * *
* Copyright ( c ) 1978 by Massachusetts Institute of *
* Technology and Honeywell Information Systems , Inc. *
;;; * *
;;; ***********************************************************
;;;
;;;
;;; Multics Emacs Window Manager
;;; HISTORY COMMENTS:
1 ) change(84 - 01 - 19,Margolin ) , approve ( ) , audit ( ) , install ( ):
;;; pre-hcom history:
27 April 1979 by BSG
To DLW , , RMS , and all the others who
think / thought about this all day long .
Modified : 19 January 1984 - Barmar - commented out register - option form ,
;;; as it was moved to e_option_defaults_.
2 ) change(84 - 12 - 25,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
;;; audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
;;; Slashified #'s, changed lambda's to
;;; let's, use defmacro, use the uwind macro in places where it
;;; it is spelled out.
3 ) change(84 - 12 - 26,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
;;; audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
;;; Fixed bug in rdis-update-window-struct
that I put in last night when rewriting lambda 's .
4 ) change(84 - 12 - 27,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
;;; audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
;;; Fix the rewritten lambda in rdis-window-totenpurge.
5 ) change(84 - 12 - 28,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
;;; audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
;;; remove buggy optimization from rdis-update-window-struct.
6 ) change(85 - 01 - 06,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
;;; audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
;;; changed to use set-mark-here instead
;;; of rplac'ing marks manually. This may also fix some bugs, since
it now updates curline - marklist . Changed to use make - mark
and make - eline in wman - init , rather than cons .
7 ) change(88 - 01 - 15,Schroth ) , approve(88 - 02 - 29,MCR7852 ) ,
;;; audit(88-06-08,RBarstad), install(88-08-01,MR12.2-1071):
Implement Window Mgr portions of Split Screen Display .
Used some defstruct 's suggested by .
;;; END HISTORY COMMENTS
( Multics Emacs screen mgmt , vers . 3 )
(%include defmacro)
(declare (macros nil))
(%include backquote)
(%include e-macros) ;for defvar
(%include emacs-internal-macros)
(%include emacs-rdis-dcls)
(%include other_other)
(declare (*expr rdis-instate-split rdis-update-split-struct rdis-assert-not-split-mode))
(declare (*expr decimal-rep del-mark-from-buffer e_lap_$gsubstr empty-buffer-p
get-buffer-state go-to-mark go-to-or-create-buffer gstrgl
rdis-cause-full-screen-recomputation set-mark set-mark-here wwtcomp))
;;;
;;; Declarations. See main redisplay for meaning.
;;;
;;; (register-option 'pop-up-windows nil) ;moved to e_option_defaults_
(declare (*lexpr display-error display-error-noabort minibuffer-print
display-error-remark))
(declare (genprefix /!rdis_wman_))
(declare (special current-buffer minibufferp numarg damaged-flag number-of-lines-in-buffer))
(declare (special screenheight main-window-size default-new-window-size))
(declare (special pop-up-windows rdis-suppress-rupdate tty-no-upmotionp))
(declare (special known-buflist))
(declare (special two-window-mode selected-window modelwindow minibufwindow
nwindows nuwindows rdis-splln-mark phony-modeline-edline))
(declare (special screenlinelen rdis-lru-stack rdis-multiwindowed-buflist
rdis-selected-wlist rdis-selected-windowx rdis-locdisp-window
current-split nsplits rdis-selected-split split-mode-p))
(declare (array* (fixnum (wman-lrux-array ?))))
;;;
;;; Window management initialization. Called at end of rdis-init.
;;;
(defun wman-init ()
(setq nuwindows 1 nwindows 3 selected-window 1 two-window-mode nil)
(and tty-no-upmotionp (setq pop-up-windows nil))
(setq rdis-suppress-rupdate nil)
changed to array pointer Aug/85 EDS
(setq uwindows (*array nil t 50.))
(*array 'wman-lrux-array 'fixnum screenheight)
(setq main-window-size (- screenheight 3) default-new-window-size nil)
(store (windows 0) (make-window numlines main-window-size))
(store (windows 1) (setq modelwindow
(make-window startline (- screenheight 3)
numlines 2)))
(store (windows 2) (setq minibufwindow
(make-window startline (- screenheight 1)
numlines 1)))
(setq rdis-splln-mark (make-mark
eline (make-eline
contents
(do ((c "--------" (catenate c c)))
((> (stringlength c) screenlinelen)
(substr c 1 (1+ screenlinelen)))))
position 0))
(setq rdis-lru-stack (list 1) rdis-multiwindowed-buflist nil)
(fillarray uwindows '((nil nil) (0 nil) (nil nil)))
(setq rdis-selected-wlist (windows 0) rdis-selected-windowx 0)
(setq rdis-locdisp-window
(make-window numlines (numlines (windows 0))))
(rplac-bufmark (windows 1) (make-mark eline phony-modeline-edline
position 0))
nil)
;;;
;;;
;;; Window from-editor and from-redisplay updates.
;;;
;; Called at buffer-kill time from buffer-kill
(defun redisplay-purge-buffer (bufnam)
(prog (orign u)
(or (boundp 'nuwindows)(return nil))
a
(setq orign nuwindows u 1)
b
(redisplay-purge-buffer-window bufnam u)
(or (= orign nuwindows)(go a))
(aos u)
(and (> u orign)(return nil))
(go b))))
;;; Re-initting abuffer must flush all marks, which will
;;; not be relocated, and might point to a scrapped temp-seg
;;; from the redisplay. The next select on that window would
;;; redisplay around that garbage did we not do this.
;;; (see (cond ((bufmark... in select-window).
(defun redisplay-buffer-reinit-purge (bufnam)
(do u 1 (1+ u)(> u nuwindows)
(let ((w (uwind u)))
(cond ((eq bufnam (bufsym w))
(rplac-bufmark w nil))))))
(defun lruify-current-window ()(rdis-lruify-window selected-window))
(defun lruify-window (u)(rdis-lruify-window u))
(defun find-buffer-in-window (bufnam)
(let ((found-window (buffer-on-display-in-window bufnam)))
(cond (found-window
(select-window found-window))
(pop-up-windows (wman-place-buffer bufnam))
(t (select-window (car (last rdis-lru-stack)))
(go-to-or-create-buffer bufnam)))
(rdis-update-window-struct)))
(defun buffer-on-display-in-window (bufnam)
(do u 1 (1+ u)(> u nuwindows)
(and (eq bufnam (bufsym (uwind u)))
(return u))))
;;;
;;;
;;; Dynamic redisplay-time window maintenance.
;;;
(defun rdis-update-window-struct () ;Called by redisplay et al.
(cond (rdis-suppress-rupdate)
((eq current-buffer (bufsym rdis-selected-wlist))
(or (bufmark rdis-selected-wlist)
(rplac-bufmark rdis-selected-wlist (set-mark)))
(let ((m (bufmark rdis-selected-wlist))) ;makes marks
(set-mark-here m))) ;update the mark
(t (rdis-upd-virtual-window-point rdis-selected-wlist)))
;; Update LRU stack
(setq rdis-lru-stack
(cons selected-window
MULTICS MACLISP DEPENDENCY EQ FIXNUMS
selected-window rdis-lru-stack))))
(defun rdis-upd-virtual-window-point (window)
85 - 09 - 10 EDS to look at windows through all splits
(cond ((numberp window)(setq window (windows window))))
(cond ((not (eq current-buffer (bufsym window)))
(do ((u 1 (1+ u)) ;user window index
(testbuf)
(oldbuf (bufsym window))
(oldoccurs 0) ;Multiplicity of oldbuf
(newoccurs 0)) ;Multiplicity of newbuf
((= u nuwindows)
(setq oldoccurs (1- oldoccurs) newoccurs (1+ newoccurs))
(cond ((< oldoccurs 2)
(setq rdis-multiwindowed-buflist
(delq oldbuf rdis-multiwindowed-buflist))))
(cond ((> newoccurs 1)
(setq rdis-multiwindowed-buflist
(cons current-buffer (delq current-buffer rdis-multiwindowed-buflist))))))
(setq testbuf (bufsym (uwind-real-window u)))
(cond ((eq testbuf oldbuf)(aos oldoccurs))
((eq testbuf current-buffer)(aos newoccurs))))))
(del-mark-from-buffer (bufmark window)(bufsym window))
(rplac-bufsym window current-buffer)
(rplac-bufmark window (set-mark))))
(defun redisplay-purge-buffer-window (bufnam u)
(let ((window (uwind-real-window u)))
(cond ((eq (bufsym window) bufnam)
(cond (pop-up-windows
(wman-fenestra-nata-est-virgo u)
(cond ((> nuwindows 1)
(select-other-window)
(delete-window u))))
(t (rdis-lruify-window u)
(rdis-fenestra-nata-est-virgo window)
(rplac-bufmark window nil)))))))
;; Leave bufsym around, no empty windows please.
;;; Some utility functions
(defun uwind-real-window (u)
;; returns true window given user window index factoring in splits
(let ((uw (uwindows u)))
(arraycall t
(split-windows (uwindow-split uw)) ;containing split's window array
(uwindow-windowx uw)))) ;index of uwindow into same
(defun nuwindows-in-split (nrws)
;; computes number of uwindows given number of real windows in split
user windows in a split are followed by separator windows , hence // 2
(cond (split-mode-p (// (1+ nrws) 2))
do n't count model or minibuf
;;;
;;;
;;; Called by e_ when a buffer is exited.
;;;
(defun redisplay-leave-buffer () ;current buffer implied
(do ((u 1 (1+ u))
(slcbuf (bufsym rdis-selected-wlist))
(window))
((> u nuwindows))
(setq window (uwind-real-window u))
(cond ((eq current-buffer (bufsym window)) ;Got one with guy in it
(cond ((or (eq window rdis-selected-wlist) ;Update real guy
(not (eq current-buffer slcbuf)))
(rdis-bufleave-upd window)
(return nil))))))) ;Dont upd many windows of same.
(defun rdis-bufleave-upd (window)
(cond ((null (bufmark window))
(rplac-bufmark window (set-mark)))
(t (let ((m (bufmark window)))
(set-mark-here m)))))
;;;
;;; Window genesis, no-pop up case.
(defun rdis-nata-est-fenestra () ;Window is born
(and (or (> nwindows (- (cadr (arraydims windows)) 4))
(> nuwindows (- (cadr (arraydims uwindows)) 2)))
(display-error "Too many windows, total."))
(prog (ux wx window nnuw nnw quo)
(setq nnuw (1+ nuwindows)
nnw (+ 2 nwindows)
quo (// main-window-size (nuwindows-in-split nnw)))
(or (> quo 2)(display-error "Too many windows for this screen size."))
(setq wx (cond (split-mode-p (1- nnw)) ;no minibuf and model
(t (- nnw 3))) ;only split has it all
ux nnuw
window (make-window
startline 0 numlines 0 bufmark nil
bufsym (make_atom (catenate "Window " (decimal-rep ux) " Default"))
window-split current-split))
(store (windows (- nnw 1))(windows (- nwindows 1)))
(store (windows (- nnw 2))(windows (- nwindows 2)))
(store (windows (- nnw 3)) window)
(store (windows (- nnw 4)) (wman-create-divider 0))
(store (uwindows ux) (make-uwindow windowx wx split current-split))
(setq nuwindows nnuw nwindows nnw)
(setq two-window-mode t)
(rdis-reallocate-screen-evenly)
(rdis-lruify-window ux)
(rdis-update-split-struct)))
(defun rdis-lruify-window (u)
(setq rdis-lru-stack
(nconc (delq u rdis-lru-stack)(list u))))
(defun rdis-fenestra-nata-est-virgo (w) ;Gets done by cause-full-rc
(do ((x (startline w)(1+ x)) ;in creation case.
(ctr (numlines w)(1- ctr)))
((= ctr 0))
(store (eline-conts x) 'hphcs))) ;See redisplay, rdis-wdw
(defun rdis-reallocate-screen-evenly ()
(let ((nuws (nuwindows-in-split nwindows)))
(do ((w 0 (1+ w))
(startl 0)
(real-ws 0)
(thisw)
(howdeep)
(quo (// main-window-size nuws)) ;window + sep line size
(rem (\ main-window-size nuws))) ;extra lines
((= real-ws nuws))
(setq thisw (windows w))
(cond ((eq (bufmark thisw) rdis-splln-mark)
(setq howdeep 1))
((< real-ws rem)
(setq howdeep quo)
(aos real-ws))
(t (setq howdeep (1- quo))
(aos real-ws)))
(and (= real-ws nuwindows)(setq howdeep (1+ howdeep)))
(rplac-startline thisw startl)
(rplac-numlines thisw howdeep)
(setq startl (+ startl howdeep)))
(rdis-cause-full-screen-recomputation)))
;;;
;;; Window destruction
(defun remove-window () ;command
(rdis-assert-not-split-mode 'remove-window)
(delete-window (or numarg selected-window)))
;;; Enter one window mode
(defun expand-window-to-whole-screen ()
(rdis-assert-not-split-mode 'expand-window-to-whole-screen)
(do ((u 1 (1+ u))
(windows-to-go))
((> u nuwindows)
(mapc 'rdis-delete-uwindow windows-to-go))
(or (= u selected-window)
(setq windows-to-go (cons u windows-to-go)))))
(defun delete-window (u)
(rdis-assert-not-split-mode 'delete-window)
(cond ((or (< u 1)(> u nuwindows))
(display-error "Invalid window number: " (decimal-rep u)))
((not two-window-mode)
(display-error "Not in multi-window mode"))
((= u selected-window)
(select-other-window)))
(rdis-delete-uwindow u)
(rdis-update-split-struct))
(defun rdis-delete-uwindow (u)
;; delete user window given index
(let ((uw (uwindows u)) ;link to real window
(ocs current-split) ;saved current-split
(uws)) ;split containing uwindow
(setq uws (uwindow-split uw))
(and (eq (uwind u) rdis-selected-wlist)
(display-error "Attempt to destroy selected window"))
only 1 window in split ?
(display-error "Attempt to destroy only window in split"))
(do uu 0 (1+ uu)(> uu nuwindows)
(cond ((> uu u)(store (uwindows (1- uu))(uwindows uu)))))
(sos nuwindows)
(rdis-instate-split uws) ;switch splits if needed
(rdis-delete-rwindow (uwindow-windowx uw))
(rdis-instate-split ocs) ;restore real current split
(setq rdis-lru-stack (delq u rdis-lru-stack))
(and (> selected-window u)(sos selected-window))
(setq two-window-mode (> nuwindows 1))
(map '(lambda (x)(and (> (car x) u)(rplaca x (1- (car x)))))
rdis-lru-stack)))
(defun rdis-delete-rwindow (r)
(prog (upper lower nlines window scbottom upstairsadd downstairsadd)
(setq window (windows r) scbottom (1- main-window-size))
(setq upper (startline window) nlines (numlines window)
lower (+ upper (1- nlines)) nlines (1+ nlines))
(cond ((and (= upper 0)(= lower scbottom))
(rbarf "rdis-delete-rwindow: deleting all that's left")))
(rdis-window-totenpurge window)
(cond ((= upper 0) ;This is the top window
(rdis-remove-divider (1+ r))
(rdis-adjust-window (1+ r)(- nlines) nlines))
((= lower scbottom)
(rdis-remove-divider (1- r))
(sos r)
(rdis-adjust-window (1- r) 0 nlines))
(t (rdis-remove-divider (1+ r))
(setq upstairsadd (// nlines 2) downstairsadd (- nlines upstairsadd))
(rdis-adjust-window (- r 2) 0 upstairsadd)
(rdis-adjust-window (- r 1) upstairsadd 0) ;divider
(rdis-adjust-window (+ r 1) (- downstairsadd) downstairsadd)))
(rdis-condense-out-window r)
(rdis-cause-full-screen-recomputation)))
(defun rdis-remove-divider (w/#)
(or (eq (bufmark (windows w/#)) rdis-splln-mark)(rbarf "rdis-remove-divider: not a divider: " w/#))
(rdis-condense-out-window w/#))
(defun rdis-condense-out-window (w/#)
(do w 0 (1+ w)(= w nwindows)
(and (> w w/#)(store (windows (1- w))(windows w))))
(and (= w/# rdis-selected-windowx)
(rbarf "rdis-condense-out-window: called on current: " w/#))
(and (> rdis-selected-windowx w/#)(sos rdis-selected-windowx))
(sos nwindows)
(do ((u 1 (1+ u))
(uw))
((> u nuwindows))
(setq uw (uwindows u))
(and (> (uwindow-windowx uw) w/#)
(decf (uwindow-windowx uw)))))
(defun rdis-adjust-window (w addstart addnl)
(setq w (windows w))
(rplac-startline w (+ addstart (startline w)))
(rplac-numlines w (+ addnl (numlines w))))
(defun rdis-window-totenpurge (window)
;;; This thoroughly ingenious hack totally cleans out all traces of the
;;; buffer that was in here and updates the multiwindowed list.
(let ((ocb current-buffer)
(current-buffer (gensym)))
(rdis-upd-virtual-window-point window)
(del-mark-from-buffer (bufmark window) ocb)))
;;;
;;;
;;; Demand Window Selection.
;;;
(defun select-window (utag)
(prog (window)
(and minibufferp (display-error "No window selection from minibuffer."))
(and (or (< utag 1)
(> utag nuwindows))
(display-error "Non-existant window number: " (decimal-rep utag)))
;;; This next line is a source of infinite grief and the root of all hair
and bugs . When not in pop - up mode , it ensures that ^XB/^XO done
;;; "real fast" (redisplayless) indeed updates the new buffer into the
old window , if not the old buffer would not show up in the wlist .
;;; Now in pop-up mode, it is completely wrong, because people
;;; have to find-buffer-in-window current-buffer's, which would tend to update
;;; that buffer into old and new windows. "What is truth?" -Pilate.
(and (or (not pop-up-windows)
(eq current-buffer (bufsym rdis-selected-wlist)))
(rdis-update-window-struct))
(setq selected-window utag)
(rdis-instate-split (uwind-split utag)) ;switch splits
(setq window (uwind utag))
(setq rdis-selected-wlist window
rdis-selected-windowx (uwindow-windowx (uwindows utag))
rdis-selected-split (window-split window))
(go-to-or-create-buffer (bufsym window))
(cond ((bufmark window)(go-to-mark (bufmark window))))
(rdis-update-window-struct)
(setq damaged-flag t)))
(defun create-new-window-and-stay-here ()
(rdis-assert-not-split-mode 'create-new-window-and-stay-here)
(rdis-nata-est-fenestra))
(defun create-new-window-and-go-there ()
(rdis-assert-not-split-mode 'create-new-window-and-go-there)
(rdis-nata-est-fenestra)
(rdis-select-lru-window))
(defun select-another-window ()
(and (not two-window-mode)
(display-error "Not in two window mode."))
(cond ((not numarg)(rdis-select-lru-window))
((or (< numarg 1)(> numarg nuwindows))
(display-error "Invalid window number: " (decimal-rep numarg)))
(t (select-window numarg))))
(defun rdis-select-lru-window ()
(or (cdr rdis-lru-stack)(display-error "No alternate window to select."))
;; The above error should not happen.
(select-window (car (last rdis-lru-stack))))
(defun select-other-window ()
(cond ((> nuwindows 1)
(and (cdr rdis-lru-stack)(select-window (cadr rdis-lru-stack))))
(t (display-error "Not in 2-window mode"))))
;;;
;;;
;;; Externally available utilities needed by window editor.
;;;
(defun window-info (u)
(and (or (< u 1)(> u nuwindows))
(display-error "window-info: no such window: " (decimal-rep u)))
(let ((w (uwind-real-window u)))
(list (cons (startline w)(numlines w)) ;bounds
(uwindow-windowx (uwindows u)) ;internal window index
(bufsym w) ;buffer
(cond ((null (bufmark w)) nil) ;char string on line
(t (let ((s (wwtcomp (car (bufmark w)))))
(e_lap_$gsubstr s 0 (gstrgl s)))))
;split number of the window
(cond ((not split-mode-p) 0)
(t (do ((s (window-split w))
(split-num 0 (1+ split-num)))
((= split-num nsplits) ;should not get here!!!
(display-error "Could not find window split."))
(cond ((eq s (splits split-num))
(return split-num)))))))))
(defun window-adjust-upper (u deltaf)
(rdis-assert-not-split-mode 'window-adjust-upper)
(and (or (< u 2)(> u nuwindows))
(display-error "window-adjust-upper: bad window #: " (decimal-rep u)))
(let ((w (uwindow-windowx (uwindows u))))
(rdis-adjust-window (- w 2) 0 deltaf)
(rdis-adjust-window (- w 1) deltaf 0)
(rdis-adjust-window w deltaf (- deltaf))
(rdis-cause-full-screen-recomputation)
(assign-current-wsize w)))
(defun window-adjust-lower (u deltaf)
(rdis-assert-not-split-mode 'window-adjust-lower)
(and (or (< u 1)(> u (1- nuwindows)))
(display-error "window-adjust-lower: bad window #: " (decimal-rep u)))
(let ((w (uwindow-windowx (uwindows u))))
(rdis-adjust-window w 0 deltaf)
(rdis-adjust-window (+ w 1) deltaf 0)
(rdis-adjust-window (+ w 2) deltaf (- deltaf))
(rdis-cause-full-screen-recomputation)
(assign-current-wsize w)))
(defun assign-current-wsize (w)
(setq w (windows w))
(and (bufmark w)(putprop (bufsym w)(numlines w) 'window-size)))
;;;
;;;
;;; Dynamic (pop-up) window policy and implementation department.
;;;
Put buffer buf someplace appropriate on the screen .
;;; This is an esoteric form of select-window. It is critical to note
;;; that find-buffer-in-window does a rdis-update-window-strct after calling
;;; this.
(defun wman-place-buffer (buf)
(let ((u (wman-allocate-window (wman-buf-wsize buf))))
(and (eq buf (bufsym (uwind u)))
hair .
;;see select-window, same thing.
(setq selected-window u
rdis-selected-windowx (uwindow-windowx (uwindows u))
rdis-selected-wlist (windows rdis-selected-windowx)
rdis-selected-split (window-split rdis-selected-wlist))
(setq damaged-flag t)
(go-to-or-create-buffer buf)))
;;; Find a good place of size size to put a window.
(defun wman-allocate-window (size)
(cond ((wman-find-unused-window size)) ;set.
(t (wman-fill-lrux-array)
(let ((start (wman-find-rottenest-space (1+ size))))
(or (= start 0)
(= (+ start size) main-window-size)
(setq start (1+ start)))
(wman-metamorphose start size)))))
;;; Find out a buffer's wanted window size.
(defun wman-buf-wsize (buf)
(let ((prop (get buf 'window-size)))
(cond ((fixp prop) prop)
((eq prop 'share)
(min (// main-window-size 2)
(do ((u 1 (1+ u))
(m 0))
((> u nuwindows) m)
(setq m (max m (numlines (uwind u)))))))
((= nuwindows 1) main-window-size)
(default-new-window-size)
(t (// main-window-size 2)))))
Find a totally useless window for first choice .
(defun wman-find-unused-window (size) ;Find unused space that fits
(do ((u 1 (1+ u)) ;best.
(m main-window-size)
(mu nil))
((> u nuwindows) mu)
(and (get (bufsym (uwind u)) 'nulls-windows-buffer)
(not (< (numlines (uwind u)) size))
(< (numlines (uwind u)) m)
(setq m (numlines (uwind u)) mu u))))
;;; not used.
(defun wman-find-lruness (u)
(do ((l rdis-lru-stack (cdr l))
(d 1 (1+ d)))
((null l) d)
(and (= (car l) u)(return d))))
Set up the array with the LRU depth of each screen line .
(defun wman-fill-lrux-array ()
(let ((ld (1+ (length rdis-lru-stack))))
(fillarray 'wman-lrux-array (list (1+ ld)))
(do ((l rdis-lru-stack (cdr l))
(d 1 (1+ d)))
((null l) d)
(do ((c (numlines (uwind (car l)))(1- c))
(lx (startline (uwind (car l)))(1+ lx)))
((= c 0))
(store (wman-lrux-array lx) d)))))
(defun wman-find-rottenest-space (height)
(setq height (min main-window-size height))
(do ((rotsx 0 (1+ rotsx)) ;index of.
(best-try-index)
(just-how-rotten-was-it 0)
(stopx (- main-window-size height)))
((> rotsx stopx) best-try-index)
(do ((c height (1- c))
(lx rotsx (1+ lx))
(total 0))
((= c 0)(cond ((> total just-how-rotten-was-it)
(setq just-how-rotten-was-it total
best-try-index rotsx))))
(and (or (null (screen lx))
(= 0 (lineln (screen lx))))
(aos total)) ;Counts points!
(setq total (+ total (wman-lrux-array lx))))))
;;;
;;;
;;; wman-metamorphose returns an index (uwindow) for a window
;;; at line start for size (not including dividers). He will
destroy all current windows contained therein , take one over ,
;;; and chop into others to make it so. He will not leave 0-line
;;; windows, nor rend an extant window in twain.
(defun wman-metamorphose (start size)
(rdis-assert-not-split-mode 'pop-up/ windows)
(prog2
(rdis-cause-full-screen-recomputation)
(prog (mytop mybot histop hisbot ux w try-here dchop w/#)
;Terminology is geographic
(setq mytop (1- start) mybot (+ size start)) ;not numeric
(setq ux 1) ;loop uwindows
loop
(and (> ux nuwindows)(go pass2))
(setq w (uwind ux))
(setq histop (1- (startline w)) hisbot (+ histop (numlines w) 1))
(cond ((not (< histop mybot))(go pass2)) ;clear below us
((not (> hisbot mytop)) ;clear above us
(aos ux))
((and (= hisbot mybot)(= histop mytop)) ;'xact match!
WOW !
((and (< histop mytop) ;eat up oneliner on top
(not (< histop (- mytop 2))))
(setq mytop histop))
((and (not (< histop mytop)) ;completely contained within
(not (> hisbot mybot))) ;flush it
(wman-delete-window ux)
(or (= ux 1)(sos ux)))
((and (> hisbot mybot) ;Bottom short.
(not (> hisbot (+ 2 mybot))))
(setq mybot hisbot))
((> histop mybot)(rbarf "wman-metamorphose: err 3 "
(list ux mytop mybot histop hisbot)))
((and (< histop mytop)(> hisbot mybot)) ;dont split window
(setq mytop (+ mytop (- hisbot mybot)) mybot hisbot))
do nt move 1 up down
(> (- mybot mytop) 4))
(setq mybot histop))
((and (= (abs (- mytop hisbot)) 1)
(> (- mybot mytop) 4))
(setq mytop hisbot))
(t (or try-here (setq try-here ux))
(aos ux)))
(go loop)
;;;
pass2
Two cases wrt try - here :
1 . We cut out of his bottom and maybe the next guy 's top .
2 . We cut out of his top alone .
There is no case of upper guy 's top , or we 'd be case 1 on him .
(setq ux try-here) ;for typing ease!
(setq w/# (uwindow-windowx (uwindows ux)))
(setq w (windows w/#))
(setq histop (1- (startline w)) hisbot (+ histop (numlines w) 1))
(setq size (- mybot mytop 1) start (1+ mytop))
Case 1
(setq dchop (- mybot hisbot))
(and (or (> hisbot mybot)
(not (> hisbot mytop)))
(rbarf "wman-metamorphose.pass2: err case 1 "
(list ux mytop mybot histop hisbot)))
(wman-push-down-uwnums (1+ ux))
(rdis-adjust-window w/# 0 (- mytop hisbot))
(wman-push-down-rwnums (1+ w/#) 2)
(store (windows (+ 1 w/#))(wman-create-divider mytop))
(store (windows (+ 2 w/#))
(wman-fenestrarum-genetrix start size (1+ ux)))
(cond ((and (not (= ux (1- nuwindows)))
(> dchop 0))
(rdis-adjust-window (+ 3 w/#) dchop 0)
(rdis-adjust-window (+ 4 w/#) dchop (- dchop))))
(store (uwindows (1+ ux))
(make-uwindow windowx (+ 2 w/#)
split (window-split w)))
(return (1+ ux)))
case 2 .
(and (or (not (> hisbot mybot))
(> histop mytop))
(rbarf "wman-metamorphose.pass2: err case 2 "
(list ux mytop mybot histop hisbot)))
(wman-push-down-uwnums ux)
(wman-push-down-rwnums w/# 2)
(rdis-adjust-window (+ 2 w/#) (1+ size)(- (1+ size)))
(store (windows w/#)
(wman-fenestrarum-genetrix start size ux))
(store (windows (1+ w/#))(wman-create-divider mybot))
(store (uwindows ux)
(make-uwindow windowx w/# split (window-split w)))
(return ux))))
(setq two-window-mode t)))
;;;
;;;
Friends and utilities of wman - metamorphose .
;;;
(defun wman-fenestrarum-genetrix (sl nl u/#)
(let ((sym (maknam (append '(n u l l i t y /. )(explodec u/#)))))
(putprop sym t 'nulls-windows-buffer)
(make-window startline sl numlines nl bufmark nil bufsym sym window-split current-split)))
(defun wman-create-divider (lx)
(make-window startline lx numlines 1 bufmark rdis-splln-mark bufsym nil window-split current-split))
(defun wman-delete-window (u)
(and (= selected-window u)
(select-other-window))
(rdis-delete-uwindow u))
(defun wman-push-down-uwnums (u)
(map '(lambda (x)(or (< (car x) u)(rplaca x (1+ (car x)))))
rdis-lru-stack)
(or (< selected-window u)(aos selected-window))
(aos nuwindows)
(do x nuwindows (1- x)(= x u)
(store (uwindows x)(uwindows (1- x))))
(store (uwindows u) (make-uwindow windowx -1 split nil)))
(defun wman-push-down-rwnums (w/# d)
(or (< rdis-selected-windowx w/#)
(setq rdis-selected-windowx (+ rdis-selected-windowx d)))
(setq nwindows (+ d nwindows))
(do x (1- nwindows)(1- x)(= (- x d)(1- w/#))
(store (windows x)(windows (- x d))))
(do u 1 (1+ u)(> u nuwindows)
(or (< (uwindow-windowx (uwindows u)) w/#)
(incf (uwindow-windowx (uwindows u)) d))))
(defun wman-fenestra-nata-est-virgo (u)
(setq rdis-lru-stack (delq u rdis-lru-stack))
(rdis-fenestra-nata-est-virgo (uwind u))
(store (uwind u)
(wman-fenestrarum-genetrix (startline (uwind u))
(numlines (uwind u))
u))
(and (= u selected-window)
(setq rdis-selected-wlist (uwind u)
rdis-selected-split (window-split rdis-selected-wlist))))
(defun assign-buffer-window-size ()
(putprop current-buffer (numlines rdis-selected-wlist) 'window-size))
;;;
;;;
;;; Buffer window size hacking primitives.
;;;
;;; Callable interface from editor.
(defun select-buffer-window (buf key)
(cond (pop-up-windows
(putprop buf (select-buffer-window-size-interpreter buf key)
'window-size)
(find-buffer-in-window buf))
(t (go-to-or-create-buffer buf))))
(defun select-buffer-find-window (buf key)
(cond (pop-up-windows (select-buffer-window buf key))
((eq buf current-buffer)(find-current-buffer-in-window))
(t (find-buffer-in-window buf))))
(defun select-buffer-window-size-interpreter (buf size)
(cond ((and (eq size 'default-cursize)(get buf 'window-size))
(setq size (get buf 'window-size)))
((and (eq size 'cursize-not-empty)(empty-buffer-p buf))
(setq size nil)))
(or (fixp size)(memq size '(float nil))
(setq size
(cond ((memq buf known-buflist)
(get-buffer-state buf 'number-of-lines-in-buffer))
(t nil))))
(cond ((fixp size)
(and (< size 1)(setq size 1))
(and (> size (// (* main-window-size 3) 5))
(setq size nil))))
(and (not (eq size 'float)) size))
(defun find-current-buffer-in-window ()
(find-buffer-in-window-noupdate current-buffer))
(defun find-buffer-in-window-noupdate (buf)
(let ((rdis-suppress-rupdate t))
(find-buffer-in-window buf))
(rdis-update-window-struct))
| null | https://raw.githubusercontent.com/dancrossnyc/multics/dc291689edf955c660e57236da694630e2217151/library_dir_dir/system_library_unbundled/source/bound_multics_emacs_.s.archive/e_window_mgr_.lisp | lisp | ***********************************************************
* *
* *
* *
* *
***********************************************************
Multics Emacs Window Manager
HISTORY COMMENTS:
pre-hcom history:
as it was moved to e_option_defaults_.
audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
Slashified #'s, changed lambda's to
let's, use defmacro, use the uwind macro in places where it
it is spelled out.
audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
Fixed bug in rdis-update-window-struct
audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
Fix the rewritten lambda in rdis-window-totenpurge.
audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
remove buggy optimization from rdis-update-window-struct.
audit(86-08-12,Harvey), install(86-08-20,MR12.0-1136):
changed to use set-mark-here instead
of rplac'ing marks manually. This may also fix some bugs, since
audit(88-06-08,RBarstad), install(88-08-01,MR12.2-1071):
END HISTORY COMMENTS
for defvar
Declarations. See main redisplay for meaning.
(register-option 'pop-up-windows nil) ;moved to e_option_defaults_
Window management initialization. Called at end of rdis-init.
Window from-editor and from-redisplay updates.
Called at buffer-kill time from buffer-kill
Re-initting abuffer must flush all marks, which will
not be relocated, and might point to a scrapped temp-seg
from the redisplay. The next select on that window would
redisplay around that garbage did we not do this.
(see (cond ((bufmark... in select-window).
Dynamic redisplay-time window maintenance.
Called by redisplay et al.
makes marks
update the mark
Update LRU stack
user window index
Multiplicity of oldbuf
Multiplicity of newbuf
Leave bufsym around, no empty windows please.
Some utility functions
returns true window given user window index factoring in splits
containing split's window array
index of uwindow into same
computes number of uwindows given number of real windows in split
Called by e_ when a buffer is exited.
current buffer implied
Got one with guy in it
Update real guy
Dont upd many windows of same.
Window genesis, no-pop up case.
Window is born
no minibuf and model
only split has it all
Gets done by cause-full-rc
in creation case.
See redisplay, rdis-wdw
window + sep line size
extra lines
Window destruction
command
Enter one window mode
delete user window given index
link to real window
saved current-split
split containing uwindow
switch splits if needed
restore real current split
This is the top window
divider
This thoroughly ingenious hack totally cleans out all traces of the
buffer that was in here and updates the multiwindowed list.
Demand Window Selection.
This next line is a source of infinite grief and the root of all hair
"real fast" (redisplayless) indeed updates the new buffer into the
Now in pop-up mode, it is completely wrong, because people
have to find-buffer-in-window current-buffer's, which would tend to update
that buffer into old and new windows. "What is truth?" -Pilate.
switch splits
The above error should not happen.
Externally available utilities needed by window editor.
bounds
internal window index
buffer
char string on line
split number of the window
should not get here!!!
Dynamic (pop-up) window policy and implementation department.
This is an esoteric form of select-window. It is critical to note
that find-buffer-in-window does a rdis-update-window-strct after calling
this.
see select-window, same thing.
Find a good place of size size to put a window.
set.
Find out a buffer's wanted window size.
Find unused space that fits
best.
not used.
index of.
Counts points!
wman-metamorphose returns an index (uwindow) for a window
at line start for size (not including dividers). He will
and chop into others to make it so. He will not leave 0-line
windows, nor rend an extant window in twain.
Terminology is geographic
not numeric
loop uwindows
clear below us
clear above us
'xact match!
eat up oneliner on top
completely contained within
flush it
Bottom short.
dont split window
for typing ease!
Buffer window size hacking primitives.
Callable interface from editor. | * Copyright , ( C ) Honeywell Bull Inc. , 1988 *
* Copyright , ( C ) Honeywell Information Systems Inc. , 1982 *
* Copyright ( c ) 1978 by Massachusetts Institute of *
* Technology and Honeywell Information Systems , Inc. *
1 ) change(84 - 01 - 19,Margolin ) , approve ( ) , audit ( ) , install ( ):
27 April 1979 by BSG
To DLW , , RMS , and all the others who
think / thought about this all day long .
Modified : 19 January 1984 - Barmar - commented out register - option form ,
2 ) change(84 - 12 - 25,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
3 ) change(84 - 12 - 26,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
that I put in last night when rewriting lambda 's .
4 ) change(84 - 12 - 27,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
5 ) change(84 - 12 - 28,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
6 ) change(85 - 01 - 06,Margolin ) , approve(86 - 02 - 24,MCR7186 ) ,
it now updates curline - marklist . Changed to use make - mark
and make - eline in wman - init , rather than cons .
7 ) change(88 - 01 - 15,Schroth ) , approve(88 - 02 - 29,MCR7852 ) ,
Implement Window Mgr portions of Split Screen Display .
Used some defstruct 's suggested by .
( Multics Emacs screen mgmt , vers . 3 )
(%include defmacro)
(declare (macros nil))
(%include backquote)
(%include emacs-internal-macros)
(%include emacs-rdis-dcls)
(%include other_other)
(declare (*expr rdis-instate-split rdis-update-split-struct rdis-assert-not-split-mode))
(declare (*expr decimal-rep del-mark-from-buffer e_lap_$gsubstr empty-buffer-p
get-buffer-state go-to-mark go-to-or-create-buffer gstrgl
rdis-cause-full-screen-recomputation set-mark set-mark-here wwtcomp))
(declare (*lexpr display-error display-error-noabort minibuffer-print
display-error-remark))
(declare (genprefix /!rdis_wman_))
(declare (special current-buffer minibufferp numarg damaged-flag number-of-lines-in-buffer))
(declare (special screenheight main-window-size default-new-window-size))
(declare (special pop-up-windows rdis-suppress-rupdate tty-no-upmotionp))
(declare (special known-buflist))
(declare (special two-window-mode selected-window modelwindow minibufwindow
nwindows nuwindows rdis-splln-mark phony-modeline-edline))
(declare (special screenlinelen rdis-lru-stack rdis-multiwindowed-buflist
rdis-selected-wlist rdis-selected-windowx rdis-locdisp-window
current-split nsplits rdis-selected-split split-mode-p))
(declare (array* (fixnum (wman-lrux-array ?))))
(defun wman-init ()
(setq nuwindows 1 nwindows 3 selected-window 1 two-window-mode nil)
(and tty-no-upmotionp (setq pop-up-windows nil))
(setq rdis-suppress-rupdate nil)
changed to array pointer Aug/85 EDS
(setq uwindows (*array nil t 50.))
(*array 'wman-lrux-array 'fixnum screenheight)
(setq main-window-size (- screenheight 3) default-new-window-size nil)
(store (windows 0) (make-window numlines main-window-size))
(store (windows 1) (setq modelwindow
(make-window startline (- screenheight 3)
numlines 2)))
(store (windows 2) (setq minibufwindow
(make-window startline (- screenheight 1)
numlines 1)))
(setq rdis-splln-mark (make-mark
eline (make-eline
contents
(do ((c "--------" (catenate c c)))
((> (stringlength c) screenlinelen)
(substr c 1 (1+ screenlinelen)))))
position 0))
(setq rdis-lru-stack (list 1) rdis-multiwindowed-buflist nil)
(fillarray uwindows '((nil nil) (0 nil) (nil nil)))
(setq rdis-selected-wlist (windows 0) rdis-selected-windowx 0)
(setq rdis-locdisp-window
(make-window numlines (numlines (windows 0))))
(rplac-bufmark (windows 1) (make-mark eline phony-modeline-edline
position 0))
nil)
(defun redisplay-purge-buffer (bufnam)
(prog (orign u)
(or (boundp 'nuwindows)(return nil))
a
(setq orign nuwindows u 1)
b
(redisplay-purge-buffer-window bufnam u)
(or (= orign nuwindows)(go a))
(aos u)
(and (> u orign)(return nil))
(go b))))
(defun redisplay-buffer-reinit-purge (bufnam)
(do u 1 (1+ u)(> u nuwindows)
(let ((w (uwind u)))
(cond ((eq bufnam (bufsym w))
(rplac-bufmark w nil))))))
(defun lruify-current-window ()(rdis-lruify-window selected-window))
(defun lruify-window (u)(rdis-lruify-window u))
(defun find-buffer-in-window (bufnam)
(let ((found-window (buffer-on-display-in-window bufnam)))
(cond (found-window
(select-window found-window))
(pop-up-windows (wman-place-buffer bufnam))
(t (select-window (car (last rdis-lru-stack)))
(go-to-or-create-buffer bufnam)))
(rdis-update-window-struct)))
(defun buffer-on-display-in-window (bufnam)
(do u 1 (1+ u)(> u nuwindows)
(and (eq bufnam (bufsym (uwind u)))
(return u))))
(cond (rdis-suppress-rupdate)
((eq current-buffer (bufsym rdis-selected-wlist))
(or (bufmark rdis-selected-wlist)
(rplac-bufmark rdis-selected-wlist (set-mark)))
(t (rdis-upd-virtual-window-point rdis-selected-wlist)))
(setq rdis-lru-stack
(cons selected-window
MULTICS MACLISP DEPENDENCY EQ FIXNUMS
selected-window rdis-lru-stack))))
(defun rdis-upd-virtual-window-point (window)
85 - 09 - 10 EDS to look at windows through all splits
(cond ((numberp window)(setq window (windows window))))
(cond ((not (eq current-buffer (bufsym window)))
(testbuf)
(oldbuf (bufsym window))
((= u nuwindows)
(setq oldoccurs (1- oldoccurs) newoccurs (1+ newoccurs))
(cond ((< oldoccurs 2)
(setq rdis-multiwindowed-buflist
(delq oldbuf rdis-multiwindowed-buflist))))
(cond ((> newoccurs 1)
(setq rdis-multiwindowed-buflist
(cons current-buffer (delq current-buffer rdis-multiwindowed-buflist))))))
(setq testbuf (bufsym (uwind-real-window u)))
(cond ((eq testbuf oldbuf)(aos oldoccurs))
((eq testbuf current-buffer)(aos newoccurs))))))
(del-mark-from-buffer (bufmark window)(bufsym window))
(rplac-bufsym window current-buffer)
(rplac-bufmark window (set-mark))))
(defun redisplay-purge-buffer-window (bufnam u)
(let ((window (uwind-real-window u)))
(cond ((eq (bufsym window) bufnam)
(cond (pop-up-windows
(wman-fenestra-nata-est-virgo u)
(cond ((> nuwindows 1)
(select-other-window)
(delete-window u))))
(t (rdis-lruify-window u)
(rdis-fenestra-nata-est-virgo window)
(rplac-bufmark window nil)))))))
(defun uwind-real-window (u)
(let ((uw (uwindows u)))
(arraycall t
(defun nuwindows-in-split (nrws)
user windows in a split are followed by separator windows , hence // 2
(cond (split-mode-p (// (1+ nrws) 2))
do n't count model or minibuf
(do ((u 1 (1+ u))
(slcbuf (bufsym rdis-selected-wlist))
(window))
((> u nuwindows))
(setq window (uwind-real-window u))
(not (eq current-buffer slcbuf)))
(rdis-bufleave-upd window)
(defun rdis-bufleave-upd (window)
(cond ((null (bufmark window))
(rplac-bufmark window (set-mark)))
(t (let ((m (bufmark window)))
(set-mark-here m)))))
(and (or (> nwindows (- (cadr (arraydims windows)) 4))
(> nuwindows (- (cadr (arraydims uwindows)) 2)))
(display-error "Too many windows, total."))
(prog (ux wx window nnuw nnw quo)
(setq nnuw (1+ nuwindows)
nnw (+ 2 nwindows)
quo (// main-window-size (nuwindows-in-split nnw)))
(or (> quo 2)(display-error "Too many windows for this screen size."))
ux nnuw
window (make-window
startline 0 numlines 0 bufmark nil
bufsym (make_atom (catenate "Window " (decimal-rep ux) " Default"))
window-split current-split))
(store (windows (- nnw 1))(windows (- nwindows 1)))
(store (windows (- nnw 2))(windows (- nwindows 2)))
(store (windows (- nnw 3)) window)
(store (windows (- nnw 4)) (wman-create-divider 0))
(store (uwindows ux) (make-uwindow windowx wx split current-split))
(setq nuwindows nnuw nwindows nnw)
(setq two-window-mode t)
(rdis-reallocate-screen-evenly)
(rdis-lruify-window ux)
(rdis-update-split-struct)))
(defun rdis-lruify-window (u)
(setq rdis-lru-stack
(nconc (delq u rdis-lru-stack)(list u))))
(ctr (numlines w)(1- ctr)))
((= ctr 0))
(defun rdis-reallocate-screen-evenly ()
(let ((nuws (nuwindows-in-split nwindows)))
(do ((w 0 (1+ w))
(startl 0)
(real-ws 0)
(thisw)
(howdeep)
((= real-ws nuws))
(setq thisw (windows w))
(cond ((eq (bufmark thisw) rdis-splln-mark)
(setq howdeep 1))
((< real-ws rem)
(setq howdeep quo)
(aos real-ws))
(t (setq howdeep (1- quo))
(aos real-ws)))
(and (= real-ws nuwindows)(setq howdeep (1+ howdeep)))
(rplac-startline thisw startl)
(rplac-numlines thisw howdeep)
(setq startl (+ startl howdeep)))
(rdis-cause-full-screen-recomputation)))
(rdis-assert-not-split-mode 'remove-window)
(delete-window (or numarg selected-window)))
(defun expand-window-to-whole-screen ()
(rdis-assert-not-split-mode 'expand-window-to-whole-screen)
(do ((u 1 (1+ u))
(windows-to-go))
((> u nuwindows)
(mapc 'rdis-delete-uwindow windows-to-go))
(or (= u selected-window)
(setq windows-to-go (cons u windows-to-go)))))
(defun delete-window (u)
(rdis-assert-not-split-mode 'delete-window)
(cond ((or (< u 1)(> u nuwindows))
(display-error "Invalid window number: " (decimal-rep u)))
((not two-window-mode)
(display-error "Not in multi-window mode"))
((= u selected-window)
(select-other-window)))
(rdis-delete-uwindow u)
(rdis-update-split-struct))
(defun rdis-delete-uwindow (u)
(setq uws (uwindow-split uw))
(and (eq (uwind u) rdis-selected-wlist)
(display-error "Attempt to destroy selected window"))
only 1 window in split ?
(display-error "Attempt to destroy only window in split"))
(do uu 0 (1+ uu)(> uu nuwindows)
(cond ((> uu u)(store (uwindows (1- uu))(uwindows uu)))))
(sos nuwindows)
(rdis-delete-rwindow (uwindow-windowx uw))
(setq rdis-lru-stack (delq u rdis-lru-stack))
(and (> selected-window u)(sos selected-window))
(setq two-window-mode (> nuwindows 1))
(map '(lambda (x)(and (> (car x) u)(rplaca x (1- (car x)))))
rdis-lru-stack)))
(defun rdis-delete-rwindow (r)
(prog (upper lower nlines window scbottom upstairsadd downstairsadd)
(setq window (windows r) scbottom (1- main-window-size))
(setq upper (startline window) nlines (numlines window)
lower (+ upper (1- nlines)) nlines (1+ nlines))
(cond ((and (= upper 0)(= lower scbottom))
(rbarf "rdis-delete-rwindow: deleting all that's left")))
(rdis-window-totenpurge window)
(rdis-remove-divider (1+ r))
(rdis-adjust-window (1+ r)(- nlines) nlines))
((= lower scbottom)
(rdis-remove-divider (1- r))
(sos r)
(rdis-adjust-window (1- r) 0 nlines))
(t (rdis-remove-divider (1+ r))
(setq upstairsadd (// nlines 2) downstairsadd (- nlines upstairsadd))
(rdis-adjust-window (- r 2) 0 upstairsadd)
(rdis-adjust-window (+ r 1) (- downstairsadd) downstairsadd)))
(rdis-condense-out-window r)
(rdis-cause-full-screen-recomputation)))
(defun rdis-remove-divider (w/#)
(or (eq (bufmark (windows w/#)) rdis-splln-mark)(rbarf "rdis-remove-divider: not a divider: " w/#))
(rdis-condense-out-window w/#))
(defun rdis-condense-out-window (w/#)
(do w 0 (1+ w)(= w nwindows)
(and (> w w/#)(store (windows (1- w))(windows w))))
(and (= w/# rdis-selected-windowx)
(rbarf "rdis-condense-out-window: called on current: " w/#))
(and (> rdis-selected-windowx w/#)(sos rdis-selected-windowx))
(sos nwindows)
(do ((u 1 (1+ u))
(uw))
((> u nuwindows))
(setq uw (uwindows u))
(and (> (uwindow-windowx uw) w/#)
(decf (uwindow-windowx uw)))))
(defun rdis-adjust-window (w addstart addnl)
(setq w (windows w))
(rplac-startline w (+ addstart (startline w)))
(rplac-numlines w (+ addnl (numlines w))))
(defun rdis-window-totenpurge (window)
(let ((ocb current-buffer)
(current-buffer (gensym)))
(rdis-upd-virtual-window-point window)
(del-mark-from-buffer (bufmark window) ocb)))
(defun select-window (utag)
(prog (window)
(and minibufferp (display-error "No window selection from minibuffer."))
(and (or (< utag 1)
(> utag nuwindows))
(display-error "Non-existant window number: " (decimal-rep utag)))
and bugs . When not in pop - up mode , it ensures that ^XB/^XO done
old window , if not the old buffer would not show up in the wlist .
(and (or (not pop-up-windows)
(eq current-buffer (bufsym rdis-selected-wlist)))
(rdis-update-window-struct))
(setq selected-window utag)
(setq window (uwind utag))
(setq rdis-selected-wlist window
rdis-selected-windowx (uwindow-windowx (uwindows utag))
rdis-selected-split (window-split window))
(go-to-or-create-buffer (bufsym window))
(cond ((bufmark window)(go-to-mark (bufmark window))))
(rdis-update-window-struct)
(setq damaged-flag t)))
(defun create-new-window-and-stay-here ()
(rdis-assert-not-split-mode 'create-new-window-and-stay-here)
(rdis-nata-est-fenestra))
(defun create-new-window-and-go-there ()
(rdis-assert-not-split-mode 'create-new-window-and-go-there)
(rdis-nata-est-fenestra)
(rdis-select-lru-window))
(defun select-another-window ()
(and (not two-window-mode)
(display-error "Not in two window mode."))
(cond ((not numarg)(rdis-select-lru-window))
((or (< numarg 1)(> numarg nuwindows))
(display-error "Invalid window number: " (decimal-rep numarg)))
(t (select-window numarg))))
(defun rdis-select-lru-window ()
(or (cdr rdis-lru-stack)(display-error "No alternate window to select."))
(select-window (car (last rdis-lru-stack))))
(defun select-other-window ()
(cond ((> nuwindows 1)
(and (cdr rdis-lru-stack)(select-window (cadr rdis-lru-stack))))
(t (display-error "Not in 2-window mode"))))
(defun window-info (u)
(and (or (< u 1)(> u nuwindows))
(display-error "window-info: no such window: " (decimal-rep u)))
(let ((w (uwind-real-window u)))
(t (let ((s (wwtcomp (car (bufmark w)))))
(e_lap_$gsubstr s 0 (gstrgl s)))))
(cond ((not split-mode-p) 0)
(t (do ((s (window-split w))
(split-num 0 (1+ split-num)))
(display-error "Could not find window split."))
(cond ((eq s (splits split-num))
(return split-num)))))))))
(defun window-adjust-upper (u deltaf)
(rdis-assert-not-split-mode 'window-adjust-upper)
(and (or (< u 2)(> u nuwindows))
(display-error "window-adjust-upper: bad window #: " (decimal-rep u)))
(let ((w (uwindow-windowx (uwindows u))))
(rdis-adjust-window (- w 2) 0 deltaf)
(rdis-adjust-window (- w 1) deltaf 0)
(rdis-adjust-window w deltaf (- deltaf))
(rdis-cause-full-screen-recomputation)
(assign-current-wsize w)))
(defun window-adjust-lower (u deltaf)
(rdis-assert-not-split-mode 'window-adjust-lower)
(and (or (< u 1)(> u (1- nuwindows)))
(display-error "window-adjust-lower: bad window #: " (decimal-rep u)))
(let ((w (uwindow-windowx (uwindows u))))
(rdis-adjust-window w 0 deltaf)
(rdis-adjust-window (+ w 1) deltaf 0)
(rdis-adjust-window (+ w 2) deltaf (- deltaf))
(rdis-cause-full-screen-recomputation)
(assign-current-wsize w)))
(defun assign-current-wsize (w)
(setq w (windows w))
(and (bufmark w)(putprop (bufsym w)(numlines w) 'window-size)))
Put buffer buf someplace appropriate on the screen .
(defun wman-place-buffer (buf)
(let ((u (wman-allocate-window (wman-buf-wsize buf))))
(and (eq buf (bufsym (uwind u)))
hair .
(setq selected-window u
rdis-selected-windowx (uwindow-windowx (uwindows u))
rdis-selected-wlist (windows rdis-selected-windowx)
rdis-selected-split (window-split rdis-selected-wlist))
(setq damaged-flag t)
(go-to-or-create-buffer buf)))
(defun wman-allocate-window (size)
(t (wman-fill-lrux-array)
(let ((start (wman-find-rottenest-space (1+ size))))
(or (= start 0)
(= (+ start size) main-window-size)
(setq start (1+ start)))
(wman-metamorphose start size)))))
(defun wman-buf-wsize (buf)
(let ((prop (get buf 'window-size)))
(cond ((fixp prop) prop)
((eq prop 'share)
(min (// main-window-size 2)
(do ((u 1 (1+ u))
(m 0))
((> u nuwindows) m)
(setq m (max m (numlines (uwind u)))))))
((= nuwindows 1) main-window-size)
(default-new-window-size)
(t (// main-window-size 2)))))
Find a totally useless window for first choice .
(m main-window-size)
(mu nil))
((> u nuwindows) mu)
(and (get (bufsym (uwind u)) 'nulls-windows-buffer)
(not (< (numlines (uwind u)) size))
(< (numlines (uwind u)) m)
(setq m (numlines (uwind u)) mu u))))
(defun wman-find-lruness (u)
(do ((l rdis-lru-stack (cdr l))
(d 1 (1+ d)))
((null l) d)
(and (= (car l) u)(return d))))
Set up the array with the LRU depth of each screen line .
(defun wman-fill-lrux-array ()
(let ((ld (1+ (length rdis-lru-stack))))
(fillarray 'wman-lrux-array (list (1+ ld)))
(do ((l rdis-lru-stack (cdr l))
(d 1 (1+ d)))
((null l) d)
(do ((c (numlines (uwind (car l)))(1- c))
(lx (startline (uwind (car l)))(1+ lx)))
((= c 0))
(store (wman-lrux-array lx) d)))))
(defun wman-find-rottenest-space (height)
(setq height (min main-window-size height))
(best-try-index)
(just-how-rotten-was-it 0)
(stopx (- main-window-size height)))
((> rotsx stopx) best-try-index)
(do ((c height (1- c))
(lx rotsx (1+ lx))
(total 0))
((= c 0)(cond ((> total just-how-rotten-was-it)
(setq just-how-rotten-was-it total
best-try-index rotsx))))
(and (or (null (screen lx))
(= 0 (lineln (screen lx))))
(setq total (+ total (wman-lrux-array lx))))))
destroy all current windows contained therein , take one over ,
(defun wman-metamorphose (start size)
(rdis-assert-not-split-mode 'pop-up/ windows)
(prog2
(rdis-cause-full-screen-recomputation)
(prog (mytop mybot histop hisbot ux w try-here dchop w/#)
loop
(and (> ux nuwindows)(go pass2))
(setq w (uwind ux))
(setq histop (1- (startline w)) hisbot (+ histop (numlines w) 1))
(aos ux))
WOW !
(not (< histop (- mytop 2))))
(setq mytop histop))
(wman-delete-window ux)
(or (= ux 1)(sos ux)))
(not (> hisbot (+ 2 mybot))))
(setq mybot hisbot))
((> histop mybot)(rbarf "wman-metamorphose: err 3 "
(list ux mytop mybot histop hisbot)))
(setq mytop (+ mytop (- hisbot mybot)) mybot hisbot))
do nt move 1 up down
(> (- mybot mytop) 4))
(setq mybot histop))
((and (= (abs (- mytop hisbot)) 1)
(> (- mybot mytop) 4))
(setq mytop hisbot))
(t (or try-here (setq try-here ux))
(aos ux)))
(go loop)
pass2
Two cases wrt try - here :
1 . We cut out of his bottom and maybe the next guy 's top .
2 . We cut out of his top alone .
There is no case of upper guy 's top , or we 'd be case 1 on him .
(setq w/# (uwindow-windowx (uwindows ux)))
(setq w (windows w/#))
(setq histop (1- (startline w)) hisbot (+ histop (numlines w) 1))
(setq size (- mybot mytop 1) start (1+ mytop))
Case 1
(setq dchop (- mybot hisbot))
(and (or (> hisbot mybot)
(not (> hisbot mytop)))
(rbarf "wman-metamorphose.pass2: err case 1 "
(list ux mytop mybot histop hisbot)))
(wman-push-down-uwnums (1+ ux))
(rdis-adjust-window w/# 0 (- mytop hisbot))
(wman-push-down-rwnums (1+ w/#) 2)
(store (windows (+ 1 w/#))(wman-create-divider mytop))
(store (windows (+ 2 w/#))
(wman-fenestrarum-genetrix start size (1+ ux)))
(cond ((and (not (= ux (1- nuwindows)))
(> dchop 0))
(rdis-adjust-window (+ 3 w/#) dchop 0)
(rdis-adjust-window (+ 4 w/#) dchop (- dchop))))
(store (uwindows (1+ ux))
(make-uwindow windowx (+ 2 w/#)
split (window-split w)))
(return (1+ ux)))
case 2 .
(and (or (not (> hisbot mybot))
(> histop mytop))
(rbarf "wman-metamorphose.pass2: err case 2 "
(list ux mytop mybot histop hisbot)))
(wman-push-down-uwnums ux)
(wman-push-down-rwnums w/# 2)
(rdis-adjust-window (+ 2 w/#) (1+ size)(- (1+ size)))
(store (windows w/#)
(wman-fenestrarum-genetrix start size ux))
(store (windows (1+ w/#))(wman-create-divider mybot))
(store (uwindows ux)
(make-uwindow windowx w/# split (window-split w)))
(return ux))))
(setq two-window-mode t)))
Friends and utilities of wman - metamorphose .
(defun wman-fenestrarum-genetrix (sl nl u/#)
(let ((sym (maknam (append '(n u l l i t y /. )(explodec u/#)))))
(putprop sym t 'nulls-windows-buffer)
(make-window startline sl numlines nl bufmark nil bufsym sym window-split current-split)))
(defun wman-create-divider (lx)
(make-window startline lx numlines 1 bufmark rdis-splln-mark bufsym nil window-split current-split))
(defun wman-delete-window (u)
(and (= selected-window u)
(select-other-window))
(rdis-delete-uwindow u))
(defun wman-push-down-uwnums (u)
(map '(lambda (x)(or (< (car x) u)(rplaca x (1+ (car x)))))
rdis-lru-stack)
(or (< selected-window u)(aos selected-window))
(aos nuwindows)
(do x nuwindows (1- x)(= x u)
(store (uwindows x)(uwindows (1- x))))
(store (uwindows u) (make-uwindow windowx -1 split nil)))
(defun wman-push-down-rwnums (w/# d)
(or (< rdis-selected-windowx w/#)
(setq rdis-selected-windowx (+ rdis-selected-windowx d)))
(setq nwindows (+ d nwindows))
(do x (1- nwindows)(1- x)(= (- x d)(1- w/#))
(store (windows x)(windows (- x d))))
(do u 1 (1+ u)(> u nuwindows)
(or (< (uwindow-windowx (uwindows u)) w/#)
(incf (uwindow-windowx (uwindows u)) d))))
(defun wman-fenestra-nata-est-virgo (u)
(setq rdis-lru-stack (delq u rdis-lru-stack))
(rdis-fenestra-nata-est-virgo (uwind u))
(store (uwind u)
(wman-fenestrarum-genetrix (startline (uwind u))
(numlines (uwind u))
u))
(and (= u selected-window)
(setq rdis-selected-wlist (uwind u)
rdis-selected-split (window-split rdis-selected-wlist))))
(defun assign-buffer-window-size ()
(putprop current-buffer (numlines rdis-selected-wlist) 'window-size))
(defun select-buffer-window (buf key)
(cond (pop-up-windows
(putprop buf (select-buffer-window-size-interpreter buf key)
'window-size)
(find-buffer-in-window buf))
(t (go-to-or-create-buffer buf))))
(defun select-buffer-find-window (buf key)
(cond (pop-up-windows (select-buffer-window buf key))
((eq buf current-buffer)(find-current-buffer-in-window))
(t (find-buffer-in-window buf))))
(defun select-buffer-window-size-interpreter (buf size)
(cond ((and (eq size 'default-cursize)(get buf 'window-size))
(setq size (get buf 'window-size)))
((and (eq size 'cursize-not-empty)(empty-buffer-p buf))
(setq size nil)))
(or (fixp size)(memq size '(float nil))
(setq size
(cond ((memq buf known-buflist)
(get-buffer-state buf 'number-of-lines-in-buffer))
(t nil))))
(cond ((fixp size)
(and (< size 1)(setq size 1))
(and (> size (// (* main-window-size 3) 5))
(setq size nil))))
(and (not (eq size 'float)) size))
(defun find-current-buffer-in-window ()
(find-buffer-in-window-noupdate current-buffer))
(defun find-buffer-in-window-noupdate (buf)
(let ((rdis-suppress-rupdate t))
(find-buffer-in-window buf))
(rdis-update-window-struct))
|
43f74866e3eba91f5fb589b010f7bf960a1f2b1e54b3b8951c974fbdf77af4f1 | diogob/postgres-websockets | ServerSpec.hs | module ServerSpec (spec) where
import Control.Lens
import Data.Aeson.Lens
import Network.Socket (withSocketsDo)
import qualified Network.WebSockets as WS
import PostgresWebsockets
import PostgresWebsockets.Config
import Protolude
import Test.Hspec
testServerConfig :: AppConfig
testServerConfig =
AppConfig
{ configDatabase = "postgres:roottoor@localhost:5432/postgres_ws_test",
configPath = Nothing,
configHost = "*",
configPort = 8080,
configListenChannel = "postgres-websockets-test-channel",
configJwtSecret = "reallyreallyreallyreallyverysafe",
configMetaChannel = Nothing,
configJwtSecretIsBase64 = False,
configPool = 10,
configRetries = 5,
configReconnectInterval = Nothing,
configCertificateFile = Nothing,
configKeyFile = Nothing
}
startTestServer :: IO ThreadId
startTestServer = do
threadId <- forkIO $ serve testServerConfig
threadDelay 500000
pure threadId
withServer :: IO () -> IO ()
withServer action =
bracket
startTestServer
(\tid -> killThread tid >> threadDelay 500000)
(const action)
sendWsData :: Text -> Text -> IO ()
sendWsData uri msg =
withSocketsDo $
WS.runClient
"127.0.0.1"
(configPort testServerConfig)
(toS uri)
(`WS.sendTextData` msg)
testChannel :: Text
testChannel = "/test/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJtb2RlIjoicncifQ.auy9z4-pqoVEAay9oMi1FuG7ux_C_9RQCH8-wZgej18"
secondaryChannel :: Text
secondaryChannel = "/secondary/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJtb2RlIjoicncifQ.auy9z4-pqoVEAay9oMi1FuG7ux_C_9RQCH8-wZgej18"
testAndSecondaryChannel :: Text
testAndSecondaryChannel = "/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJtb2RlIjoicnciLCJjaGFubmVscyI6WyJ0ZXN0Iiwic2Vjb25kYXJ5Il19.7tB2A9MhpY4tyqhfnHNy5FUYw4gwpKtL4UAHBXbNEz4"
waitForWsData :: Text -> IO (MVar ByteString)
waitForWsData uri = do
msg <- newEmptyMVar
void $
forkIO $
withSocketsDo $
WS.runClient
"127.0.0.1"
(configPort testServerConfig)
(toS uri)
( \c -> do
m <- WS.receiveData c
putMVar msg m
)
threadDelay 10000
pure msg
waitForMultipleWsData :: Int -> Text -> IO (MVar [ByteString])
waitForMultipleWsData messageCount uri = do
msg <- newEmptyMVar
void $
forkIO $
withSocketsDo $
WS.runClient
"127.0.0.1"
(configPort testServerConfig)
(toS uri)
( \c -> do
m <- replicateM messageCount (WS.receiveData c)
putMVar msg m
)
threadDelay 1000
pure msg
spec :: Spec
spec = around_ withServer $
describe "serve" $ do
it "should be able to send messages to test server" $
sendWsData testChannel "test data"
it "should be able to receive messages from test server" $ do
msg <- waitForWsData testChannel
sendWsData testChannel "test data"
msgJson <- takeMVar msg
(msgJson ^? key "payload" . _String) `shouldBe` Just "test data"
it "should be able to send messages to multiple channels in one shot" $ do
msg <- waitForWsData testChannel
secondaryMsg <- waitForWsData secondaryChannel
sendWsData testAndSecondaryChannel "test data"
msgJson <- takeMVar msg
secondaryMsgJson <- takeMVar secondaryMsg
(msgJson ^? key "payload" . _String) `shouldBe` Just "test data"
(msgJson ^? key "channel" . _String) `shouldBe` Just "test"
(secondaryMsgJson ^? key "payload" . _String) `shouldBe` Just "test data"
(secondaryMsgJson ^? key "channel" . _String) `shouldBe` Just "secondary"
it "should be able to receive from multiple channels in one shot" $ do
msgs <- waitForMultipleWsData 2 testAndSecondaryChannel
sendWsData testAndSecondaryChannel "test data"
msgsJson <- takeMVar msgs
forM_
msgsJson
(\msgJson -> (msgJson ^? key "payload" . _String) `shouldBe` Just "test data")
| null | https://raw.githubusercontent.com/diogob/postgres-websockets/679d471d2682be6529a2447a145d9362648ed732/test/ServerSpec.hs | haskell | module ServerSpec (spec) where
import Control.Lens
import Data.Aeson.Lens
import Network.Socket (withSocketsDo)
import qualified Network.WebSockets as WS
import PostgresWebsockets
import PostgresWebsockets.Config
import Protolude
import Test.Hspec
testServerConfig :: AppConfig
testServerConfig =
AppConfig
{ configDatabase = "postgres:roottoor@localhost:5432/postgres_ws_test",
configPath = Nothing,
configHost = "*",
configPort = 8080,
configListenChannel = "postgres-websockets-test-channel",
configJwtSecret = "reallyreallyreallyreallyverysafe",
configMetaChannel = Nothing,
configJwtSecretIsBase64 = False,
configPool = 10,
configRetries = 5,
configReconnectInterval = Nothing,
configCertificateFile = Nothing,
configKeyFile = Nothing
}
startTestServer :: IO ThreadId
startTestServer = do
threadId <- forkIO $ serve testServerConfig
threadDelay 500000
pure threadId
withServer :: IO () -> IO ()
withServer action =
bracket
startTestServer
(\tid -> killThread tid >> threadDelay 500000)
(const action)
sendWsData :: Text -> Text -> IO ()
sendWsData uri msg =
withSocketsDo $
WS.runClient
"127.0.0.1"
(configPort testServerConfig)
(toS uri)
(`WS.sendTextData` msg)
testChannel :: Text
testChannel = "/test/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJtb2RlIjoicncifQ.auy9z4-pqoVEAay9oMi1FuG7ux_C_9RQCH8-wZgej18"
secondaryChannel :: Text
secondaryChannel = "/secondary/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJtb2RlIjoicncifQ.auy9z4-pqoVEAay9oMi1FuG7ux_C_9RQCH8-wZgej18"
testAndSecondaryChannel :: Text
testAndSecondaryChannel = "/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJtb2RlIjoicnciLCJjaGFubmVscyI6WyJ0ZXN0Iiwic2Vjb25kYXJ5Il19.7tB2A9MhpY4tyqhfnHNy5FUYw4gwpKtL4UAHBXbNEz4"
waitForWsData :: Text -> IO (MVar ByteString)
waitForWsData uri = do
msg <- newEmptyMVar
void $
forkIO $
withSocketsDo $
WS.runClient
"127.0.0.1"
(configPort testServerConfig)
(toS uri)
( \c -> do
m <- WS.receiveData c
putMVar msg m
)
threadDelay 10000
pure msg
waitForMultipleWsData :: Int -> Text -> IO (MVar [ByteString])
waitForMultipleWsData messageCount uri = do
msg <- newEmptyMVar
void $
forkIO $
withSocketsDo $
WS.runClient
"127.0.0.1"
(configPort testServerConfig)
(toS uri)
( \c -> do
m <- replicateM messageCount (WS.receiveData c)
putMVar msg m
)
threadDelay 1000
pure msg
spec :: Spec
spec = around_ withServer $
describe "serve" $ do
it "should be able to send messages to test server" $
sendWsData testChannel "test data"
it "should be able to receive messages from test server" $ do
msg <- waitForWsData testChannel
sendWsData testChannel "test data"
msgJson <- takeMVar msg
(msgJson ^? key "payload" . _String) `shouldBe` Just "test data"
it "should be able to send messages to multiple channels in one shot" $ do
msg <- waitForWsData testChannel
secondaryMsg <- waitForWsData secondaryChannel
sendWsData testAndSecondaryChannel "test data"
msgJson <- takeMVar msg
secondaryMsgJson <- takeMVar secondaryMsg
(msgJson ^? key "payload" . _String) `shouldBe` Just "test data"
(msgJson ^? key "channel" . _String) `shouldBe` Just "test"
(secondaryMsgJson ^? key "payload" . _String) `shouldBe` Just "test data"
(secondaryMsgJson ^? key "channel" . _String) `shouldBe` Just "secondary"
it "should be able to receive from multiple channels in one shot" $ do
msgs <- waitForMultipleWsData 2 testAndSecondaryChannel
sendWsData testAndSecondaryChannel "test data"
msgsJson <- takeMVar msgs
forM_
msgsJson
(\msgJson -> (msgJson ^? key "payload" . _String) `shouldBe` Just "test data")
| |
68fe68be87c8e0e285f0cc648adc1c2783acdad2b3e58b0221d6fa411f401c45 | geophf/1HaskellADay | Exercise.hs | module Y2018.M02.D27.Exercise where
-
From
-you-solve-these-mensa-puzzles5/
( 7 ) If works one shift every second day , works once every third day ,
and works every fifth day , how often do all three colleagues work
together ?
This problem looks very fizz - buzzy .
HOW DO YOU SOLVE THIS ?
Here 's one approach :
-
From
-you-solve-these-mensa-puzzles5/
(7) If JT works one shift every second day, Laura works once every third day,
and Aditya works every fifth day, how often do all three colleagues work
together?
This problem looks very fizz-buzzy.
HOW DO YOU SOLVE THIS?
Here's one approach:
--}
data Peeps = JT | Laura | Aditya
deriving (Eq, Show)
type Day = Int
scheduler :: [(Peeps, Day)] -> [[Peeps]]
scheduler schedulesFor = undefined
with the n'th value with all three being how often .
-- or you could solve it mathematically to:
oftenness :: [(Peeps, Day)] -> Day
oftenness schedulesFor = undefined
with the n'th day being returned MATHEMATICALLY !
-- your choice.
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2018/M02/D27/Exercise.hs | haskell | }
or you could solve it mathematically to:
your choice. | module Y2018.M02.D27.Exercise where
-
From
-you-solve-these-mensa-puzzles5/
( 7 ) If works one shift every second day , works once every third day ,
and works every fifth day , how often do all three colleagues work
together ?
This problem looks very fizz - buzzy .
HOW DO YOU SOLVE THIS ?
Here 's one approach :
-
From
-you-solve-these-mensa-puzzles5/
(7) If JT works one shift every second day, Laura works once every third day,
and Aditya works every fifth day, how often do all three colleagues work
together?
This problem looks very fizz-buzzy.
HOW DO YOU SOLVE THIS?
Here's one approach:
data Peeps = JT | Laura | Aditya
deriving (Eq, Show)
type Day = Int
scheduler :: [(Peeps, Day)] -> [[Peeps]]
scheduler schedulesFor = undefined
with the n'th value with all three being how often .
oftenness :: [(Peeps, Day)] -> Day
oftenness schedulesFor = undefined
with the n'th day being returned MATHEMATICALLY !
|
8eecd3a4d2a567db5c520d18fbaaa6c971faf0a1b7d0f2fe80b2d1df572b5a49 | mzp/ocaml-hoogle | controller.mli | type t =
String of string
| Bool of bool
| Table of (string * t) list list
val format : Chconfig.t list -> Search.t -> (string * t) list
* formatter for [ Search.t ]
val pagenation : offset:int -> window:int -> 'a list -> (string * t) list * 'a list
val available : Chconfig.t list -> t
| null | https://raw.githubusercontent.com/mzp/ocaml-hoogle/dbfb2e970d65e41936baa0ba51c7f7596cc6c369/controller.mli | ocaml | type t =
String of string
| Bool of bool
| Table of (string * t) list list
val format : Chconfig.t list -> Search.t -> (string * t) list
* formatter for [ Search.t ]
val pagenation : offset:int -> window:int -> 'a list -> (string * t) list * 'a list
val available : Chconfig.t list -> t
| |
bfc29284f078f4858677ce601b7ebe793a29218759987a376b8af0b2400e0773 | andorp/bead | TestSet.hs | # LANGUAGE GeneralizedNewtypeDeriving #
module Test.Tasty.TestSet (
TestSet
, TestName
, buildTestTree
, runTestSet
, group
, shrink
, add
, test
, assertEquals
, assertProperty
, assertSatisfy
, ioTest
, equals
, satisfies
, Partition(..)
, eqPartitions
) where
import Control.Applicative
import Control.Monad.Identity
import Control.Monad.IO.Class (MonadIO(..), liftIO)
import Control.Monad.State.Class
import qualified Control.Monad.Trans.State as CMS
import qualified Test.HUnit as HUnit (assertEqual)
import Test.Tasty
import qualified Test.Tasty.HUnit as HU
import qualified Test.Tasty.QuickCheck as QC
newtype TestSet a = TestSet { unTest :: (CMS.StateT [TestTree] Identity a) }
deriving (Functor, Applicative, Monad, MonadState [TestTree])
buildTestTree :: TestName -> TestSet a -> TestTree
buildTestTree name = testGroup name . runIdentity . flip CMS.execStateT [] . unTest
runTestSet = defaultMain . buildTestTree ""
group :: TestName -> TestSet a -> TestSet ()
group name test = add (buildTestTree name test)
shrink :: TestName -> TestSet a -> TestSet b -> TestSet ()
shrink name tests shrinks = do
group name tests
group (name ++ "-shrinks") shrinks
type Message = String
add :: TestTree -> TestSet ()
add t = modify (\ts -> ts ++ [t])
test = add
assertEquals :: (Eq a, Show a) => TestName -> a -> a -> Message -> TestSet ()
assertEquals name expected found msg = add (HU.testCase name (HU.assertEqual msg expected found))
assertSatisfy :: TestName -> (a -> Bool) -> a -> Message -> TestSet ()
assertSatisfy name predicate value msg = add (HU.testCase name (HU.assertBool msg (predicate value)))
assertProperty :: (Show a) => TestName -> (a -> Bool) -> QC.Gen a -> Message -> TestSet ()
assertProperty name predicate generator _msg = add (QC.testProperty name (QC.forAll generator predicate))
IO related test suite
ioTest :: TestName -> IO a -> TestSet ()
ioTest name computation = add (HU.testCase name (void computation))
equals :: (Eq a, Show a, MonadIO io) => a -> a -> Message -> io ()
equals expected found msg = liftIO $ HUnit.assertEqual msg expected found
satisfies :: (MonadIO io) => a -> (a -> Bool) -> Message -> io ()
satisfies x p msg = liftIO $ HUnit.assertEqual msg True (p x)
data Partition a b = Partition TestName a b Message
partitionToTestTree f (Partition name x y msg) = assertEquals name y (f x) msg
eqPartitions :: (Eq b, Show b) => (a -> b) -> [Partition a b] -> TestSet ()
eqPartitions function = mapM_ (partitionToTestTree function)
| null | https://raw.githubusercontent.com/andorp/bead/280dc9c3d5cfe1b9aac0f2f802c705ae65f02ac2/src/Test/Tasty/TestSet.hs | haskell | # LANGUAGE GeneralizedNewtypeDeriving #
module Test.Tasty.TestSet (
TestSet
, TestName
, buildTestTree
, runTestSet
, group
, shrink
, add
, test
, assertEquals
, assertProperty
, assertSatisfy
, ioTest
, equals
, satisfies
, Partition(..)
, eqPartitions
) where
import Control.Applicative
import Control.Monad.Identity
import Control.Monad.IO.Class (MonadIO(..), liftIO)
import Control.Monad.State.Class
import qualified Control.Monad.Trans.State as CMS
import qualified Test.HUnit as HUnit (assertEqual)
import Test.Tasty
import qualified Test.Tasty.HUnit as HU
import qualified Test.Tasty.QuickCheck as QC
newtype TestSet a = TestSet { unTest :: (CMS.StateT [TestTree] Identity a) }
deriving (Functor, Applicative, Monad, MonadState [TestTree])
buildTestTree :: TestName -> TestSet a -> TestTree
buildTestTree name = testGroup name . runIdentity . flip CMS.execStateT [] . unTest
runTestSet = defaultMain . buildTestTree ""
group :: TestName -> TestSet a -> TestSet ()
group name test = add (buildTestTree name test)
shrink :: TestName -> TestSet a -> TestSet b -> TestSet ()
shrink name tests shrinks = do
group name tests
group (name ++ "-shrinks") shrinks
type Message = String
add :: TestTree -> TestSet ()
add t = modify (\ts -> ts ++ [t])
test = add
assertEquals :: (Eq a, Show a) => TestName -> a -> a -> Message -> TestSet ()
assertEquals name expected found msg = add (HU.testCase name (HU.assertEqual msg expected found))
assertSatisfy :: TestName -> (a -> Bool) -> a -> Message -> TestSet ()
assertSatisfy name predicate value msg = add (HU.testCase name (HU.assertBool msg (predicate value)))
assertProperty :: (Show a) => TestName -> (a -> Bool) -> QC.Gen a -> Message -> TestSet ()
assertProperty name predicate generator _msg = add (QC.testProperty name (QC.forAll generator predicate))
IO related test suite
ioTest :: TestName -> IO a -> TestSet ()
ioTest name computation = add (HU.testCase name (void computation))
equals :: (Eq a, Show a, MonadIO io) => a -> a -> Message -> io ()
equals expected found msg = liftIO $ HUnit.assertEqual msg expected found
satisfies :: (MonadIO io) => a -> (a -> Bool) -> Message -> io ()
satisfies x p msg = liftIO $ HUnit.assertEqual msg True (p x)
data Partition a b = Partition TestName a b Message
partitionToTestTree f (Partition name x y msg) = assertEquals name y (f x) msg
eqPartitions :: (Eq b, Show b) => (a -> b) -> [Partition a b] -> TestSet ()
eqPartitions function = mapM_ (partitionToTestTree function)
| |
44aeda5adf4d8bffafd73c9a6b24ff8fabe6255c74b4b966e7368ca2de140111 | re-ops/re-cipes | tmux.clj | (ns re-cipes.tmux
"Setting up tmux and related utilities"
(:require
[re-cipes.access :refer (permissions)]
[re-cog.resources.git :refer (clone)]
[re-cog.common.recipe :refer (require-recipe)]
[re-cog.facts.config :refer (configuration)]
[re-cog.resources.download :refer (download)]
[re-cog.resources.file :refer (symlink directory chmod)]))
(require-recipe)
(def-inline tmux
"Setup tmux for user"
[]
(let [{:keys [home user]} (configuration)
dest (<< "~{home}/.tmux")]
(package "tmux" :present)
(clone "" dest {})
(directory (<< "~{dest}/plugins/") :present)
(clone "-plugins/tpm" (<< "~{dest}/plugins/tpm") {})
(chown dest user user {:recursive true})
(symlink (<< "~{home}/.tmux.conf") (<< "~{dest}/.tmux.conf"))
(chown (<< "~{home}/.tmux.conf") user user {})))
(def-inline {:depends #'re-cipes.access/permissions} tmx
"Setting up tmx "
[]
(let [version "0.2.2"
sum "5d624f40caef8b8c5f8b420474778499f55ffeab7a2fc19c892d00aa20f05c70"
url (<< "/~{version}/tmx")
dest "/usr/local/bin/tmx"]
(download url dest sum)
(chmod dest "+x" {})))
| null | https://raw.githubusercontent.com/re-ops/re-cipes/183bdb637e54df1c6f20e8d529132e0c004e8ead/src/re_cipes/tmux.clj | clojure | (ns re-cipes.tmux
"Setting up tmux and related utilities"
(:require
[re-cipes.access :refer (permissions)]
[re-cog.resources.git :refer (clone)]
[re-cog.common.recipe :refer (require-recipe)]
[re-cog.facts.config :refer (configuration)]
[re-cog.resources.download :refer (download)]
[re-cog.resources.file :refer (symlink directory chmod)]))
(require-recipe)
(def-inline tmux
"Setup tmux for user"
[]
(let [{:keys [home user]} (configuration)
dest (<< "~{home}/.tmux")]
(package "tmux" :present)
(clone "" dest {})
(directory (<< "~{dest}/plugins/") :present)
(clone "-plugins/tpm" (<< "~{dest}/plugins/tpm") {})
(chown dest user user {:recursive true})
(symlink (<< "~{home}/.tmux.conf") (<< "~{dest}/.tmux.conf"))
(chown (<< "~{home}/.tmux.conf") user user {})))
(def-inline {:depends #'re-cipes.access/permissions} tmx
"Setting up tmx "
[]
(let [version "0.2.2"
sum "5d624f40caef8b8c5f8b420474778499f55ffeab7a2fc19c892d00aa20f05c70"
url (<< "/~{version}/tmx")
dest "/usr/local/bin/tmx"]
(download url dest sum)
(chmod dest "+x" {})))
| |
2aed03e0b1f9c266b6df4cfd2526b3735e54a40506aa2168f9aa672039e05d5c | theia-ide/racket-language-server | conversion.rkt | #lang racket/base
(require racket/class
racket/list
racket/match
"lsp.rkt"
"../lang/check-syntax.rkt") ; For exception and warning structs
(define (pos->line/char t pos)
(define line (send t position-paragraph pos))
(define line-begin (send t paragraph-start-position line))
(define char (- pos line-begin))
(values line char))
(define (line/char->pos t line char)
(+ char (send t paragraph-start-position line)))
(define (pos->Position t pos)
(define-values (line char) (pos->line/char t pos))
(Position #:line line #:character char))
(define (pos/pos->Range t start end)
(Range #:start (pos->Position t start)
#:end (pos->Position t end)))
(define (srcloc->Range t sl)
(match-define (srcloc src line col pos span) sl)
(Range #:start (pos->Position t pos)
#:end (pos->Position t (+ pos span))))
(define ((exception->Diagnostics t) e)
(define-values (code msg srclocs severity)
(match e
[(exception code msg srclocs)
(values code msg srclocs DiagnosticSeverityError)]
[(warning code msg srclocs)
(values code msg srclocs DiagnosticSeverityWarning)]))
(map (lambda (sl)
(Diagnostic #:range (srcloc->Range t sl)
#:message msg
#:severity severity
#:code code
#:source "Racket"
#:relatedInformation empty))
srclocs))
(provide (all-defined-out))
| null | https://raw.githubusercontent.com/theia-ide/racket-language-server/e397a130676504fc8b053e6b1f48d49b77b9ad98/protocol/conversion.rkt | racket | For exception and warning structs | #lang racket/base
(require racket/class
racket/list
racket/match
"lsp.rkt"
(define (pos->line/char t pos)
(define line (send t position-paragraph pos))
(define line-begin (send t paragraph-start-position line))
(define char (- pos line-begin))
(values line char))
(define (line/char->pos t line char)
(+ char (send t paragraph-start-position line)))
(define (pos->Position t pos)
(define-values (line char) (pos->line/char t pos))
(Position #:line line #:character char))
(define (pos/pos->Range t start end)
(Range #:start (pos->Position t start)
#:end (pos->Position t end)))
(define (srcloc->Range t sl)
(match-define (srcloc src line col pos span) sl)
(Range #:start (pos->Position t pos)
#:end (pos->Position t (+ pos span))))
(define ((exception->Diagnostics t) e)
(define-values (code msg srclocs severity)
(match e
[(exception code msg srclocs)
(values code msg srclocs DiagnosticSeverityError)]
[(warning code msg srclocs)
(values code msg srclocs DiagnosticSeverityWarning)]))
(map (lambda (sl)
(Diagnostic #:range (srcloc->Range t sl)
#:message msg
#:severity severity
#:code code
#:source "Racket"
#:relatedInformation empty))
srclocs))
(provide (all-defined-out))
|
dc693a5522c4ba02285ac6f90167b4c7e0d43dca003863b1948041905844edf6 | alephcloud/hs-yet-another-logger | Logger.hs | Copyright ( c ) 2014 - 2015 PivotCloud , Inc.
--
-- System.Logger.Logger
--
-- Please feel free to contact us at with any
-- contributions, additions, or other feedback; we would love to hear from
-- you.
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may
-- not use this file except in compliance with the License. You may obtain a
-- copy of the License at -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-- License for the specific language governing permissions and limitations
-- under the License.
-- |
-- Module: System.Logger.Logger
-- Description: Yet Another Logger Implementation
Copyright : Copyright ( c ) 2014 - 2015 PivotCloud , Inc.
-- License: Apache License, Version 2.0
Maintainer : < >
-- Stability: experimental
--
-- This module provides a logger that implements the logger interface
-- that is defined in "System.Logger.Types".
--
-- All the code of this module is in "System.Logger.Logger.Internal".
--
-- The definitions in "System.Logger.Types" are re-exported by this module.
--
# LANGUAGE UnicodeSyntax #
module System.Logger.Logger
(
-- * Re-Export Logger Interface
module System.Logger.Types
-- * Logger
, Logger
, withLogger
, withLogger_
, withLogFunction
, withLogFunction_
* LoggerT
, LoggerT
, runLoggerT
, runLogT
-- * Configuration Types
-- ** Logger Configuration
, LoggerConfig(..)
, loggerConfigQueueSize
, loggerConfigThreshold
, loggerConfigScope
, loggerConfigPolicy
, loggerConfigExceptionLimit
, loggerConfigExceptionWait
, loggerConfigExitTimeout
, defaultLoggerConfig
, validateLoggerConfig
, pLoggerConfig
, pLoggerConfig_
) where
import System.Logger.Types
import System.Logger.Logger.Internal
| null | https://raw.githubusercontent.com/alephcloud/hs-yet-another-logger/bdeb95980b69839b1acf1a8cef53426bbd073531/src/System/Logger/Logger.hs | haskell |
System.Logger.Logger
Please feel free to contact us at with any
contributions, additions, or other feedback; we would love to hear from
you.
not use this file except in compliance with the License. You may obtain a
copy of the License at -2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
|
Module: System.Logger.Logger
Description: Yet Another Logger Implementation
License: Apache License, Version 2.0
Stability: experimental
This module provides a logger that implements the logger interface
that is defined in "System.Logger.Types".
All the code of this module is in "System.Logger.Logger.Internal".
The definitions in "System.Logger.Types" are re-exported by this module.
* Re-Export Logger Interface
* Logger
* Configuration Types
** Logger Configuration | Copyright ( c ) 2014 - 2015 PivotCloud , Inc.
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
Copyright : Copyright ( c ) 2014 - 2015 PivotCloud , Inc.
Maintainer : < >
# LANGUAGE UnicodeSyntax #
module System.Logger.Logger
(
module System.Logger.Types
, Logger
, withLogger
, withLogger_
, withLogFunction
, withLogFunction_
* LoggerT
, LoggerT
, runLoggerT
, runLogT
, LoggerConfig(..)
, loggerConfigQueueSize
, loggerConfigThreshold
, loggerConfigScope
, loggerConfigPolicy
, loggerConfigExceptionLimit
, loggerConfigExceptionWait
, loggerConfigExitTimeout
, defaultLoggerConfig
, validateLoggerConfig
, pLoggerConfig
, pLoggerConfig_
) where
import System.Logger.Types
import System.Logger.Logger.Internal
|
0f2b2632ebd964840cec32d094786bf2b814a29d8259dcf8c88a2e46391c26e7 | fredlund/McErlang | mce_erl_process.erl | Copyright ( c ) 2009 ,
%% All rights reserved.
%%
%% Redistribution and use in source and binary forms, with or without
%% modification, are permitted provided that the following conditions are met:
%% %% Redistributions of source code must retain the above copyright
%% notice, this list of conditions and the following disclaimer.
%% %% Redistributions in binary form must reproduce the above copyright
%% notice, this list of conditions and the following disclaimer in the
%% documentation and/or other materials provided with the distribution.
%% %% Neither the name of the copyright holders nor the
%% names of its contributors may be used to endorse or promote products
%% derived from this software without specific prior written permission.
%%
%% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ''AS IS''
%% AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
%% ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS
BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
%% CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
%% SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
%% BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
%% WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
%% OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
%% ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@author
2006 - 2009
%% @doc
@private
-module(mce_erl_process).
-export([makeRunnable/2,makeRunnable/3,makeRunnableInSystem/3]).
-include("process.hrl").
%%-define(debug,true).
-include("../../../src/include/macros.hrl").
-include("emacros.hrl").
makeRunnableInSystem(Expr, Node, State) ->
Pid = mce_erl_references:getNewPidInSystem(Node, Expr, State, void),
?LOG("~nnewProc(~p,~p) --> ~p~n",[Expr,Node,Pid]),
#process{status=runnable,expr=Expr,pid=Pid}.
makeRunnable(Expr, Node) ->
Pid = mce_erl_references:getNewPid(Node, Expr),
?LOG("~nnewProc(~p,~p) --> ~p~n",[Expr,Node,Pid]),
#process{status=runnable,expr=Expr,pid=Pid}.
makeRunnable(Expr, Node, Conf) ->
Pid = mce_erl_references:getNewPid(Node, Expr, Conf),
?LOG("~nnewProc(~p,~p) --> ~p~n",[Expr,Node,Pid]),
#process{status=runnable,expr=Expr,pid=Pid}.
| null | https://raw.githubusercontent.com/fredlund/McErlang/25b38a38a729fdb3c3d2afb9be016bbb14237792/languages/erlang/src/mce_erl_process.erl | erlang | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
%% Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
%% Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
%% Neither the name of the copyright holders nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ''AS IS''
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@doc
-define(debug,true). | Copyright ( c ) 2009 ,
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
@author
2006 - 2009
@private
-module(mce_erl_process).
-export([makeRunnable/2,makeRunnable/3,makeRunnableInSystem/3]).
-include("process.hrl").
-include("../../../src/include/macros.hrl").
-include("emacros.hrl").
makeRunnableInSystem(Expr, Node, State) ->
Pid = mce_erl_references:getNewPidInSystem(Node, Expr, State, void),
?LOG("~nnewProc(~p,~p) --> ~p~n",[Expr,Node,Pid]),
#process{status=runnable,expr=Expr,pid=Pid}.
makeRunnable(Expr, Node) ->
Pid = mce_erl_references:getNewPid(Node, Expr),
?LOG("~nnewProc(~p,~p) --> ~p~n",[Expr,Node,Pid]),
#process{status=runnable,expr=Expr,pid=Pid}.
makeRunnable(Expr, Node, Conf) ->
Pid = mce_erl_references:getNewPid(Node, Expr, Conf),
?LOG("~nnewProc(~p,~p) --> ~p~n",[Expr,Node,Pid]),
#process{status=runnable,expr=Expr,pid=Pid}.
|
e113ee337e916045dec4864b1f3de2f6ee8cf314444363626bd61d456141684b | tonsky/datascript-transit | transit.cljs | (ns datascript.test.transit
(:require
[datascript.transit :as dt]
[datascript.core :as d]
[cljs.test :refer-macros [is deftest]]))
(def test-db
(-> (d/empty-db { :email { :db/unique :db.unique/identity } })
(d/db-with [{:name "Ivan"
:email ""
:id #uuid "de305d54-75b4-431b-adb2-eb6b9e546014"}
{:name "Oleg"
:email ""}])))
(def test-tx-data
[ (d/datom 1 :name "Ivan" d/tx0 false)
(d/datom 1 :name "Oleg" d/tx0 true) ])
(defn roundtrip [o]
(dt/read-transit-str (dt/write-transit-str o)))
(defn equiv-datom [d1 d2]
(is (= (:e d1) (:e d2)))
(is (= (:a d1) (:a d2)))
(is (= (:v d1) (:v d2)))
(is (= (:tx d1) (:tx d2)))
(is (= (:added d1) (:added d2))))
(defn equiv-datoms [ds1 ds2]
(is (= (count ds1) (count ds2)))
(doseq [[d1 d2] (map vector ds1 ds2)]
(equiv-datom d1 d2)))
(deftest test-roundtrip
(let [db (roundtrip test-db)]
(is (= (:schema db) (:schema test-db)))
(is (= (:rschema db) (:rschema test-db)))
(is (= (:max-eid db) (:max-eid test-db)))
(is (= (:max-tx db) (:max-tx test-db)))
(equiv-datoms (:eavt db) (:eavt test-db))
(equiv-datoms (:aevt db) (:aevt test-db))
(equiv-datoms (:avet db) (:avet test-db)))
(equiv-datoms (roundtrip test-tx-data) test-tx-data))
(defn ^:export test_all []
(enable-console-print!)
(cljs.test/run-all-tests #"datascript\.test\.transit"))
| null | https://raw.githubusercontent.com/tonsky/datascript-transit/365f2e5a7b4e268b78b3b9576bb685d18d35eb96/test/datascript/test/transit.cljs | clojure | (ns datascript.test.transit
(:require
[datascript.transit :as dt]
[datascript.core :as d]
[cljs.test :refer-macros [is deftest]]))
(def test-db
(-> (d/empty-db { :email { :db/unique :db.unique/identity } })
(d/db-with [{:name "Ivan"
:email ""
:id #uuid "de305d54-75b4-431b-adb2-eb6b9e546014"}
{:name "Oleg"
:email ""}])))
(def test-tx-data
[ (d/datom 1 :name "Ivan" d/tx0 false)
(d/datom 1 :name "Oleg" d/tx0 true) ])
(defn roundtrip [o]
(dt/read-transit-str (dt/write-transit-str o)))
(defn equiv-datom [d1 d2]
(is (= (:e d1) (:e d2)))
(is (= (:a d1) (:a d2)))
(is (= (:v d1) (:v d2)))
(is (= (:tx d1) (:tx d2)))
(is (= (:added d1) (:added d2))))
(defn equiv-datoms [ds1 ds2]
(is (= (count ds1) (count ds2)))
(doseq [[d1 d2] (map vector ds1 ds2)]
(equiv-datom d1 d2)))
(deftest test-roundtrip
(let [db (roundtrip test-db)]
(is (= (:schema db) (:schema test-db)))
(is (= (:rschema db) (:rschema test-db)))
(is (= (:max-eid db) (:max-eid test-db)))
(is (= (:max-tx db) (:max-tx test-db)))
(equiv-datoms (:eavt db) (:eavt test-db))
(equiv-datoms (:aevt db) (:aevt test-db))
(equiv-datoms (:avet db) (:avet test-db)))
(equiv-datoms (roundtrip test-tx-data) test-tx-data))
(defn ^:export test_all []
(enable-console-print!)
(cljs.test/run-all-tests #"datascript\.test\.transit"))
| |
9616429afb6b902877dd2070fe3b5b030dff2f7e41edb2f02f044f45d228a89a | poroh/ersip | ersip_sdp_attr.erl | %%
Copyright ( c ) 2018 Dmitry Poroh
%% All rights reserved.
Distributed under the terms of the MIT License . See the LICENSE file .
%%
SDP attributes
%%
-module(ersip_sdp_attr).
-export([parse/1,
parse_attr/1,
assemble/1
]).
-export_type([attr_list/0, attr/0]).
%%%===================================================================
%%% Types
%%%===================================================================
-type attr_list() :: [attr()].
-type parse_result() :: ersip_parser_aux:parse_result(attr_list()).
-type parse_result(T) :: ersip_parser_aux:parse_result(T).
-type attr() :: attr_name()
| {attr_name(), attr_value()}.
-type attr_name() :: binary().
-type attr_value() :: binary().
%%%===================================================================
%%% API
%%%===================================================================
-spec parse(binary()) -> parse_result().
parse(Bin) ->
do_parse_attrs(Bin, []).
-spec parse_attr(binary()) -> parse_result(attr()).
parse_attr(Bin) ->
do_parse_attr(Bin).
-spec assemble(attr_list()) -> iolist().
assemble(AttrList) ->
[[<<"a=">>,
case Attr of
{AttrName, AttrValue} ->
[AttrName, <<":">>, AttrValue];
AttrName ->
AttrName
end,
<<"\r\n">>]
|| Attr <- AttrList].
%%%===================================================================
%%% Internal Implementation
%%%===================================================================
-define(crlf, "\r\n").
-spec do_parse_attrs(binary(), attr_list()) -> parse_result().
do_parse_attrs(<<"a=", Rest/binary>>, Acc) ->
case binary:split(Rest, <<?crlf>>) of
[_] ->
{error, {invalid_attr, {no_crlf, Rest}}};
[AttrLine, Rest1] ->
case do_parse_attr(AttrLine) of
{error,_} = Err ->
Err;
{ok, V, _} ->
do_parse_attrs(Rest1, [V|Acc])
end
end;
do_parse_attrs(Rest, Acc) ->
{ok, lists:reverse(Acc), Rest}.
-spec do_parse_attr(binary()) -> parse_result(attr()).
do_parse_attr(AttrLine) ->
{AttrName, _} = Pair =
case binary:split(AttrLine, <<":">>) of
[N, V] -> {N, V};
[N] -> {N, novalue}
end,
case ersip_sdp_aux:check_token(AttrName) of
false ->
{error, {invalid_attr, AttrName}};
true ->
{ok, make_attr(Pair), <<>>}
end.
-spec make_attr({attr_name(), attr_value() | novalue}) -> attr().
make_attr({Name, novalue}) ->
Name;
make_attr({_, _} = Attr) ->
Attr.
| null | https://raw.githubusercontent.com/poroh/ersip/241c4be134e388325011cc6492678af7025fcef5/src/sdp/ersip_sdp_attr.erl | erlang |
All rights reserved.
===================================================================
Types
===================================================================
===================================================================
API
===================================================================
===================================================================
Internal Implementation
=================================================================== | Copyright ( c ) 2018 Dmitry Poroh
Distributed under the terms of the MIT License . See the LICENSE file .
SDP attributes
-module(ersip_sdp_attr).
-export([parse/1,
parse_attr/1,
assemble/1
]).
-export_type([attr_list/0, attr/0]).
-type attr_list() :: [attr()].
-type parse_result() :: ersip_parser_aux:parse_result(attr_list()).
-type parse_result(T) :: ersip_parser_aux:parse_result(T).
-type attr() :: attr_name()
| {attr_name(), attr_value()}.
-type attr_name() :: binary().
-type attr_value() :: binary().
-spec parse(binary()) -> parse_result().
parse(Bin) ->
do_parse_attrs(Bin, []).
-spec parse_attr(binary()) -> parse_result(attr()).
parse_attr(Bin) ->
do_parse_attr(Bin).
-spec assemble(attr_list()) -> iolist().
assemble(AttrList) ->
[[<<"a=">>,
case Attr of
{AttrName, AttrValue} ->
[AttrName, <<":">>, AttrValue];
AttrName ->
AttrName
end,
<<"\r\n">>]
|| Attr <- AttrList].
-define(crlf, "\r\n").
-spec do_parse_attrs(binary(), attr_list()) -> parse_result().
do_parse_attrs(<<"a=", Rest/binary>>, Acc) ->
case binary:split(Rest, <<?crlf>>) of
[_] ->
{error, {invalid_attr, {no_crlf, Rest}}};
[AttrLine, Rest1] ->
case do_parse_attr(AttrLine) of
{error,_} = Err ->
Err;
{ok, V, _} ->
do_parse_attrs(Rest1, [V|Acc])
end
end;
do_parse_attrs(Rest, Acc) ->
{ok, lists:reverse(Acc), Rest}.
-spec do_parse_attr(binary()) -> parse_result(attr()).
do_parse_attr(AttrLine) ->
{AttrName, _} = Pair =
case binary:split(AttrLine, <<":">>) of
[N, V] -> {N, V};
[N] -> {N, novalue}
end,
case ersip_sdp_aux:check_token(AttrName) of
false ->
{error, {invalid_attr, AttrName}};
true ->
{ok, make_attr(Pair), <<>>}
end.
-spec make_attr({attr_name(), attr_value() | novalue}) -> attr().
make_attr({Name, novalue}) ->
Name;
make_attr({_, _} = Attr) ->
Attr.
|
9b7d052ace028998d52963bea446635b1a402f23e492767961b8f0d1fa47dd2d | semilin/layoup | Narts-9.lisp |
(MAKE-LAYOUT :NAME "Narts-9" :MATRIX
(APPLY #'KEY-MATRIX '("qwlgjkfou;" "nrtscpheia" "zxmyvbd',."))
:SHIFT-MATRIX NIL :KEYBOARD NIL) | null | https://raw.githubusercontent.com/semilin/layoup/27ec9ba9a9388cd944ac46206d10424e3ab45499/data/layouts/Narts-9.lisp | lisp |
(MAKE-LAYOUT :NAME "Narts-9" :MATRIX
(APPLY #'KEY-MATRIX '("qwlgjkfou;" "nrtscpheia" "zxmyvbd',."))
:SHIFT-MATRIX NIL :KEYBOARD NIL) | |
f280423faf196658f2db52a8dfbd267b775819ec09aa99259e27aefc2f9759d8 | wireapp/saml2-web-sso | Samples.hs | {-# LANGUAGE OverloadedStrings #-}
| haskell representations of the sample data in /test / xml/.
module Samples where
import Crypto.PubKey.RSA as RSA
import SAML2.WebSSO
import URI.ByteString.QQ
-- source: -us/azure/active-directory/develop/active-directory-single-sign-on-protocol-reference
microsoft_authnrequest_1 :: AuthnRequest
microsoft_authnrequest_1 = AuthnRequest {_rqID = ID {fromID = mkXmlText "id6c1c178c166d486687be4aaf5e482730"}, _rqIssueInstant = unsafeReadTime "2013-03-18T03:28:54.1839884Z", _rqIssuer = Issuer [uri||], _rqNameIDPolicy = Nothing}
microsoft_idp_keyinfo :: RSA.PublicKey
microsoft_idp_keyinfo = RSA.PublicKey {RSA.public_size = 256, RSA.public_n = 25266449847616032605280887180445946447207827183577848474184616846136744459013940059297388901949168945284876940449831022307515873851064478442088422354857937225651881330471758496854838771727042040743247357142715828206862944363464254559789249948813601166263148331870847174783313061359856986236119735646527697584352746595070831856826144035963401689203402892945174546914103842786133972997748592644348449313622309013132898328267351769300154913558618070904948090075390540412224001257219178211139174816497946208851160331679253663348572295658494523941468674192526945936020222447823781738913444250167710107767554511823728989391, RSA.public_e = 65537}
| null | https://raw.githubusercontent.com/wireapp/saml2-web-sso/ac88b934bb4a91d4d4bb90c620277188e4087043/test/Samples.hs | haskell | # LANGUAGE OverloadedStrings #
source: -us/azure/active-directory/develop/active-directory-single-sign-on-protocol-reference |
| haskell representations of the sample data in /test / xml/.
module Samples where
import Crypto.PubKey.RSA as RSA
import SAML2.WebSSO
import URI.ByteString.QQ
microsoft_authnrequest_1 :: AuthnRequest
microsoft_authnrequest_1 = AuthnRequest {_rqID = ID {fromID = mkXmlText "id6c1c178c166d486687be4aaf5e482730"}, _rqIssueInstant = unsafeReadTime "2013-03-18T03:28:54.1839884Z", _rqIssuer = Issuer [uri||], _rqNameIDPolicy = Nothing}
microsoft_idp_keyinfo :: RSA.PublicKey
microsoft_idp_keyinfo = RSA.PublicKey {RSA.public_size = 256, RSA.public_n = 25266449847616032605280887180445946447207827183577848474184616846136744459013940059297388901949168945284876940449831022307515873851064478442088422354857937225651881330471758496854838771727042040743247357142715828206862944363464254559789249948813601166263148331870847174783313061359856986236119735646527697584352746595070831856826144035963401689203402892945174546914103842786133972997748592644348449313622309013132898328267351769300154913558618070904948090075390540412224001257219178211139174816497946208851160331679253663348572295658494523941468674192526945936020222447823781738913444250167710107767554511823728989391, RSA.public_e = 65537}
|
2c91932d90cc30d3081ce3f848aa7a1f250340b6d1009a25865ccb710b2f2fc0 | pascutto/cachecache | lru.mli | module Make (K : sig
type t
val equal : t -> t -> bool
(*@ pure *)
val hash : t -> int
end) : sig
type 'a t
type key = K.t
(*@ ephemeral
model cap : int
mutable model assoc : key -> 'a option
mutable model age : key -> int
invariant cap > 0
invariant forall k k'.
not (K.equal k k') ->
assoc k <> None -> assoc k' <> None ->
age k <> age k'
invariant forall k. age k >= 0
invariant forall k. age k >= cap <-> assoc k = None *)
val v : int -> 'a t
(*@ t = v c
checks c > 0
ensures t.cap = c
ensures forall k. t.assoc k = None *)
val stats : 'a t -> Stats.t
val is_empty : 'a t -> bool
@ b = is_empty t
ensures b = true < - > forall k = None
ensures b = true <-> forall k. t.assoc k = None *)
val capacity : 'a t -> int
(*@ c = capacity t
ensures c = t.cap *)
val size : 'a t -> int
val clear : 'a t -> unit
(*@ clear t
ensures forall k. t.assoc k = None *)
val find : 'a t -> key -> 'a
(*@ v = find t k
ensures t.assoc k = Some v
raises Not_found -> t.assoc k = None *)
val find_opt : 'a t -> key -> 'a option
@ o = find_opt t k
ensures o = t.assoc k
ensures o = t.assoc k *)
val mem : 'a t -> key -> bool
(*@ b = mem t k
ensures b = true <-> t.assoc k <> None *)
val replace : 'a t -> key -> 'a -> unit
(*@ replace t k v
modifies t
ensures t.assoc k = Some v
ensures forall k', v'.
not (K.equal k k') -> t.assoc k' = Some v' -> old t.assoc k' = Some v'
ensures forall k'.
t.age k' =
if K.equal k k' then 0
else if old t.age k' < old t.age k then old t.age k' + 1
else old t.age k' *)
val remove : 'a t -> key -> unit
(*@ remove t k
modifies t
ensures t.assoc k = None *)
end
| null | https://raw.githubusercontent.com/pascutto/cachecache/d239c20d3411383c0399674f1425acce73dbf95e/src/lru.mli | ocaml | @ pure
@ ephemeral
model cap : int
mutable model assoc : key -> 'a option
mutable model age : key -> int
invariant cap > 0
invariant forall k k'.
not (K.equal k k') ->
assoc k <> None -> assoc k' <> None ->
age k <> age k'
invariant forall k. age k >= 0
invariant forall k. age k >= cap <-> assoc k = None
@ t = v c
checks c > 0
ensures t.cap = c
ensures forall k. t.assoc k = None
@ c = capacity t
ensures c = t.cap
@ clear t
ensures forall k. t.assoc k = None
@ v = find t k
ensures t.assoc k = Some v
raises Not_found -> t.assoc k = None
@ b = mem t k
ensures b = true <-> t.assoc k <> None
@ replace t k v
modifies t
ensures t.assoc k = Some v
ensures forall k', v'.
not (K.equal k k') -> t.assoc k' = Some v' -> old t.assoc k' = Some v'
ensures forall k'.
t.age k' =
if K.equal k k' then 0
else if old t.age k' < old t.age k then old t.age k' + 1
else old t.age k'
@ remove t k
modifies t
ensures t.assoc k = None | module Make (K : sig
type t
val equal : t -> t -> bool
val hash : t -> int
end) : sig
type 'a t
type key = K.t
val v : int -> 'a t
val stats : 'a t -> Stats.t
val is_empty : 'a t -> bool
@ b = is_empty t
ensures b = true < - > forall k = None
ensures b = true <-> forall k. t.assoc k = None *)
val capacity : 'a t -> int
val size : 'a t -> int
val clear : 'a t -> unit
val find : 'a t -> key -> 'a
val find_opt : 'a t -> key -> 'a option
@ o = find_opt t k
ensures o = t.assoc k
ensures o = t.assoc k *)
val mem : 'a t -> key -> bool
val replace : 'a t -> key -> 'a -> unit
val remove : 'a t -> key -> unit
end
|
a70cbf831c9d3909d68b8b3fa2f43f88bdf682447e8b70a7ed7fbb9d71bed7e3 | erlcloud/erlcloud | erlcloud_cognito_user_pools.erl | -module(erlcloud_cognito_user_pools).
-include("erlcloud_aws.hrl").
-export([configure/2, configure/3, new/2, new/3]).
-export([
list_users/1,
list_users/2,
list_users/5,
list_users/6,
list_all_users/1,
list_all_users/2,
list_all_users/3,
admin_list_groups_for_user/2,
admin_list_groups_for_user/3,
admin_list_groups_for_user/4,
admin_list_groups_for_user/5,
admin_get_user/2,
admin_get_user/3,
admin_create_user/2,
admin_create_user/3,
admin_create_user/4,
admin_delete_user/2,
admin_delete_user/3,
admin_add_user_to_group/3,
admin_add_user_to_group/4,
admin_remove_user_from_group/3,
admin_remove_user_from_group/4,
create_group/2,
create_group/3,
create_group/5,
create_group/6,
delete_group/2,
delete_group/3,
admin_reset_user_password/2,
admin_reset_user_password/3,
admin_reset_user_password/4,
admin_update_user_attributes/3,
admin_update_user_attributes/4,
admin_update_user_attributes/5,
change_password/3,
change_password/4,
list_user_pools/0,
list_user_pools/1,
list_user_pools/2,
list_all_user_pools/0,
list_all_user_pools/1,
admin_set_user_password/3,
admin_set_user_password/4,
admin_set_user_password/5,
describe_user_pool/1,
describe_user_pool/2,
get_user_pool_mfa_config/1,
get_user_pool_mfa_config/2,
list_identity_providers/1,
list_identity_providers/3,
list_identity_providers/4,
list_all_identity_providers/1,
list_all_identity_providers/2,
describe_identity_provider/2,
describe_identity_provider/3,
describe_user_pool_client/2,
describe_user_pool_client/3,
list_user_pool_clients/1,
list_user_pool_clients/3,
list_user_pool_clients/4,
list_all_user_pool_clients/1,
list_all_user_pool_clients/2,
admin_list_devices/2,
admin_list_devices/3,
admin_list_devices/5,
admin_list_all_devices/2,
admin_list_all_devices/3,
admin_forget_device/3,
admin_forget_device/4,
admin_confirm_signup/2,
admin_confirm_signup/3,
admin_confirm_signup/4,
admin_initiate_auth/4,
admin_initiate_auth/5,
admin_initiate_auth/8,
respond_to_auth_challenge/4,
respond_to_auth_challenge/5,
respond_to_auth_challenge/8,
create_identity_provider/4,
create_identity_provider/5,
create_identity_provider/6,
create_identity_provider/7,
delete_identity_provider/2,
delete_identity_provider/3,
update_identity_provider/2,
update_identity_provider/3,
update_identity_provider/4,
update_identity_provider/5,
update_identity_provider/6,
request/2,
request/3
]).
-define(MAX_RESULTS, 60).
-define(API_VERSION, "2016-04-18").
-spec new(string(), string()) -> aws_config().
new(AccessKeyID, SecretAccessKey) ->
#aws_config{access_key_id = AccessKeyID,
secret_access_key = SecretAccessKey,
retry = fun erlcloud_retry:default_retry/1}.
-spec new(string(), string(), string()) -> aws_config().
new(AccessKeyID, SecretAccessKey, Host) ->
#aws_config{access_key_id = AccessKeyID,
secret_access_key = SecretAccessKey,
cognito_user_pools_host = Host,
retry = fun erlcloud_retry:default_retry/1}.
-spec configure(string(), string()) -> ok.
configure(AccessKeyID, SecretAccessKey) ->
put(aws_config, new(AccessKeyID, SecretAccessKey)),
ok.
-spec configure(string(), string(), string()) -> ok.
configure(AccessKeyID, SecretAccessKey, Host) ->
put(aws_config, new(AccessKeyID, SecretAccessKey, Host)),
ok.
-spec list_users(binary()) -> {ok, map()} | {error, any()}.
list_users(UserPoolId) ->
list_users(UserPoolId, undefined, undefined, undefined, undefined).
-spec list_users(binary(), aws_config()) -> {ok, map()} | {error, any()}.
list_users(UserPoolId, Config) ->
Body = #{
<<"UserPoolId">> => unicode:characters_to_binary(UserPoolId)
},
request(Config, "ListUsers", Body).
-spec list_users(binary(),
[binary()] | undefined,
binary() | undefined,
number() | undefined,
binary() | undefined) -> {ok, map()} | {error, any()}.
list_users(UserPoolId, AttributesToGet, Filter, Limit, PaginationToken) ->
Config = erlcloud_aws:default_config(),
list_users(UserPoolId, AttributesToGet, Filter, Limit, PaginationToken, Config).
list_users(UserPoolId, AttributesToGet, Filter, Limit, PaginationToken, Config) ->
BaseBody = #{
<<"UserPoolId">> => UserPoolId,
<<"AttributesToGet">> => AttributesToGet,
<<"Filter">> => Filter,
<<"Limit">> => Limit,
<<"PaginationToken">> => PaginationToken
},
Body = erlcloud_util:filter_undef(BaseBody),
request(Config, "ListUsers", Body).
-spec list_all_users(binary()) -> {ok, map()} | {error, any()}.
list_all_users(UserPoolId) ->
list_all_users(UserPoolId, undefined).
-spec list_all_users(binary(), binary() | undefined | aws_config()) ->
{ok, map()} | {error, any()}.
list_all_users(UserPoolId, Config) when is_record(Config, aws_config) ->
list_all_users(UserPoolId, undefined, Config);
list_all_users(UserPoolId, Filter) ->
Config = erlcloud_aws:default_config(),
list_all_users(UserPoolId, Filter, Config).
-spec list_all_users(binary(), binary() | undefined, aws_config()) ->
{ok, map()} | {error, any()}.
list_all_users(UserPoolId, Filter, Config) ->
Fun = fun list_users/6,
Args = [UserPoolId, undefined, Filter],
list_all(Fun, Args, Config, <<"Users">>, <<"PaginationToken">>).
-spec admin_list_groups_for_user(binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_list_groups_for_user(UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_list_groups_for_user(UserName, UserPoolId, Config).
-spec admin_list_groups_for_user(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_groups_for_user(UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId
},
request(Config, "AdminListGroupsForUser", Body).
-spec admin_list_groups_for_user(binary(), binary(), number(),
binary() | undefined) ->
{ok, map()} | {error, any()}.
admin_list_groups_for_user(UserName, UserPoolId, Limit, NextToken) ->
Config = erlcloud_aws:default_config(),
admin_list_groups_for_user(UserName, UserPoolId, Limit, NextToken, Config).
-spec admin_list_groups_for_user(binary(), binary(), number(),
binary() | undefined, aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_groups_for_user(UserName, UserPoolId, Limit, NextToken, Config) ->
Body = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId,
<<"Limit">> => Limit,
<<"NextToken">> => NextToken
},
request(Config, "AdminListGroupsForUser", Body).
-spec admin_get_user(binary(), binary()) -> {ok, map()} | {error, any()}.
admin_get_user(UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_get_user(UserName, UserPoolId, Config).
-spec admin_get_user(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_get_user(UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId
},
request(Config, "AdminGetUser", Body).
-spec admin_create_user(binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_create_user(UserName, UserPoolId) ->
admin_create_user(UserName, UserPoolId, #{}).
-spec admin_create_user(binary(), binary(), maps:maps()) ->
{ok, map()} | {error, any()}.
admin_create_user(UserName, UserPoolId, OptionalArgs) ->
Config = erlcloud_aws:default_config(),
admin_create_user(UserName, UserPoolId, OptionalArgs, Config).
-spec admin_create_user(binary(), binary(), maps:maps(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_create_user(UserName, UserPoolId, OptionalArgs, Config) ->
Body = OptionalArgs#{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId
},
request(Config, "AdminCreateUser", Body).
-spec admin_delete_user(binary(), binary()) -> ok | {error, any()}.
admin_delete_user(UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_delete_user(UserName, UserPoolId, Config).
-spec admin_delete_user(binary(), binary(), aws_config()) -> ok | {error, any()}.
admin_delete_user(UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId
},
request_no_resp(Config, "AdminDeleteUser", Body).
-spec admin_add_user_to_group(binary(), binary(), binary()) ->
ok | {error, any()}.
admin_add_user_to_group(GroupName, UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_add_user_to_group(GroupName, UserName, UserPoolId, Config).
-spec admin_add_user_to_group(binary(), binary(), binary(), aws_config()) ->
ok | {error, any()}.
admin_add_user_to_group(GroupName, UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"GroupName">> => GroupName,
<<"UserPoolId">> => UserPoolId
},
request_no_resp(Config, "AdminAddUserToGroup", Body).
-spec admin_remove_user_from_group(binary(), binary(), binary()) ->
ok | {error, any()}.
admin_remove_user_from_group(GroupName, UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_remove_user_from_group(GroupName, UserName, UserPoolId, Config).
-spec admin_remove_user_from_group(binary(), binary(), binary(), aws_config()) ->
ok | {error, any()}.
admin_remove_user_from_group(GroupName, UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"GroupName">> => GroupName,
<<"UserPoolId">> => UserPoolId
},
request_no_resp(Config, "AdminRemoveUserFromGroup", Body).
-spec create_group(binary(), binary()) -> {ok, map()} | {error, any()}.
create_group(GroupName, UserPoolId) ->
create_group(GroupName, UserPoolId, undefined, undefined, undefined).
-spec create_group(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
create_group(GroupName, UserPoolId, Config) ->
create_group(GroupName, UserPoolId, undefined, undefined, undefined, Config).
-spec create_group(binary(), binary(), binary() | undefined,
number() | undefined, binary() | undefined) ->
{ok, map()} | {error, any()}.
create_group(GroupName, UserPoolId, Description, Precedence, RoleArn) ->
Config = erlcloud_aws:default_config(),
create_group(GroupName, UserPoolId, Description, Precedence, RoleArn, Config).
-spec create_group(binary(), binary(), binary() | undefined,
number() | undefined, binary() | undefined, aws_config()) ->
{ok, map()} | {error, any()}.
create_group(GroupName, UserPoolId, Description, Precedence, RoleArn, Config) ->
Body0 = #{
<<"GroupName">> => GroupName,
<<"UserPoolId">> => UserPoolId,
<<"Description">> => Description,
<<"Precedence">> => Precedence,
<<"RoleArn">> => RoleArn
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "CreateGroup", Body).
-spec delete_group(binary(), binary()) -> ok | {error, any()}.
delete_group(GroupName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
delete_group(GroupName, UserPoolId, Config).
-spec delete_group(binary(), binary(), aws_config()) -> ok | {error, any()}.
delete_group(GroupName, UserPoolId, Config) ->
Body = #{
<<"GroupName">> => unicode:characters_to_binary(GroupName),
<<"UserPoolId">> => unicode:characters_to_binary(UserPoolId)
},
request_no_resp(Config, "DeleteGroup", Body).
-spec admin_reset_user_password(binary(), binary()) ->
ok| {error, any()}.
admin_reset_user_password(UserName, UserPoolId) ->
admin_reset_user_password(UserName, UserPoolId, undefined).
-spec admin_reset_user_password(binary(), binary(), map() | undefined) ->
ok | {error, any()}.
admin_reset_user_password(UserName, UserPoolId, MetaData) ->
Config = erlcloud_aws:default_config(),
admin_reset_user_password(UserName, UserPoolId, MetaData, Config).
-spec admin_reset_user_password(binary(), binary(),
map() | undefined, aws_config()) ->
ok | {error, any()}.
admin_reset_user_password(UserName, UserPoolId, MetaData, Config) ->
BaseBody = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId,
<<"ClientMetaData">> => MetaData
},
Body = erlcloud_util:filter_undef(BaseBody),
request_no_resp(Config, "AdminResetUserPassword", Body).
-spec admin_update_user_attributes(binary(), binary(), [map()]) ->
ok | {error, any()}.
admin_update_user_attributes(UserName, UserPoolId, Attributes) ->
admin_update_user_attributes(UserName, UserPoolId, Attributes, undefined).
-spec admin_update_user_attributes(binary(), binary(), [map()],
map() | undefined) ->
ok | {error, any()}.
admin_update_user_attributes(UserName, UserPoolId, Attributes, MetaData) ->
Config = erlcloud_aws:default_config(),
admin_update_user_attributes(UserName, UserPoolId, Attributes, MetaData, Config).
-spec admin_update_user_attributes(binary(), binary(), [map()],
map() | undefined, aws_config()) ->
ok | {error, any()}.
admin_update_user_attributes(UserName, UserPoolId, Attributes, MetaData, Config) ->
BaseBody = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId,
<<"UserAttributes">> => Attributes,
<<"ClientMetaData">> => MetaData
},
Body = erlcloud_util:filter_undef(BaseBody),
request_no_resp(Config, "AdminUpdateUserAttributes", Body).
-spec change_password(binary(), binary(), binary()) ->
ok | {error, any()}.
change_password(OldPass, NewPass, AccessToken) ->
Config = erlcloud_aws:default_config(),
change_password(OldPass, NewPass, AccessToken, Config).
-spec change_password(binary(), binary(), binary(), aws_config()) ->
ok | {error, any()}.
change_password(OldPass, NewPass, AccessToken, Config) ->
Body = #{
<<"AccessToken">> => AccessToken,
<<"PreviousPassword">> => OldPass,
<<"ProposedPassword">> => NewPass
},
request_no_resp(Config, "ChangePassword", Body).
-spec list_user_pools() -> {ok, map()} | {error, any()}.
list_user_pools() ->
list_user_pools(?MAX_RESULTS, undefined).
-spec list_user_pools(integer()) -> {ok, map()} | {error, any()}.
list_user_pools(MaxResult) ->
list_user_pools(MaxResult, undefined).
-spec list_user_pools(integer(), binary() | undefined) ->
{ok, map()} | {error, any()}.
list_user_pools(MaxResult, NextToken) ->
Config = erlcloud_aws:default_config(),
list_user_pools(MaxResult, NextToken, Config).
-spec list_user_pools(integer(), binary() | undefined, aws_config()) ->
{ok, map()} | {error, any()}.
list_user_pools(MaxResult, NextToken, Config) ->
Body0 = #{
<<"MaxResults">> => MaxResult,
<<"NextToken">> => NextToken
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "ListUserPools", Body).
-spec list_all_user_pools() -> {ok, map()} | {error, any()}.
list_all_user_pools() ->
Config = erlcloud_aws:default_config(),
list_all_user_pools(Config).
-spec list_all_user_pools(aws_config()) -> {ok, map()} | {error, any()}.
list_all_user_pools(Config) ->
Fun = fun list_user_pools/3,
list_all(Fun, [], Config, <<"UserPools">>, <<"NextToken">>).
-spec admin_set_user_password(binary(), binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_set_user_password(UserId, UserPoolId, Password) ->
admin_set_user_password(UserId, UserPoolId, Password, false).
-spec admin_set_user_password(binary(), binary(), binary(), boolean()) ->
ok | {error, any()}.
admin_set_user_password(UserId, UserPoolId, Password, Permanent) ->
Config = erlcloud_aws:default_config(),
admin_set_user_password(UserId, UserPoolId, Password, Permanent, Config).
-spec admin_set_user_password(binary(), binary(), binary(), boolean(),
aws_config()) ->
ok | {error, any()}.
admin_set_user_password(UserId, UserPoolId, Password, Permanent, Config) ->
Body = #{
<<"Password">> => Password,
<<"Username">> => UserId,
<<"UserPoolId">> => UserPoolId,
<<"Permanent">> => Permanent
},
request_no_resp(Config, "AdminSetUserPassword", Body).
-spec describe_user_pool(binary()) -> {ok, map()} | {error, any()}.
describe_user_pool(UserPoolId) ->
Config = erlcloud_aws:default_config(),
describe_user_pool(UserPoolId, Config).
-spec describe_user_pool(binary(), aws_config()) -> {ok, map()} | {error, any()}.
describe_user_pool(UserPoolId, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId
},
request(Config, "DescribeUserPool", Body).
-spec get_user_pool_mfa_config(binary()) -> {ok, map()} | {error, any()}.
get_user_pool_mfa_config(UserPoolId) ->
Config = erlcloud_aws:default_config(),
get_user_pool_mfa_config(UserPoolId, Config).
-spec get_user_pool_mfa_config(binary(), aws_config()) ->
{ok, map()} | {error, any()}.
get_user_pool_mfa_config(UserPoolId, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId
},
request(Config, "GetUserPoolMfaConfig", Body).
-spec list_identity_providers(binary()) -> {ok, map()} | {error, any()}.
list_identity_providers(UserPoolId) ->
list_identity_providers(UserPoolId, ?MAX_RESULTS, undefined).
-spec list_identity_providers(binary(), integer(), binary() | undefined) ->
{ok, map()} | {error, any()}.
list_identity_providers(UserPoolId, MaxResults, NextToken) ->
Config = erlcloud_aws:default_config(),
list_identity_providers(UserPoolId, MaxResults, NextToken, Config).
-spec list_identity_providers(binary(),
integer(),
binary() | undefined,
aws_config()) ->
{ok, map()} | {error, any()}.
list_identity_providers(UserPoolId, MaxResults, NextToken, Config) ->
Body0 = #{
<<"UserPoolId">> => UserPoolId,
<<"NextToken">> => NextToken,
<<"MaxResults">> => MaxResults
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "ListIdentityProviders", Body).
-spec list_all_identity_providers(binary()) ->
{ok, map()} | {error, any()}.
list_all_identity_providers(UserPoolId) ->
Config = erlcloud_aws:default_config(),
list_all_identity_providers(UserPoolId, Config).
-spec list_all_identity_providers(binary(), aws_config()) ->
{ok, map()} | {error, any()}.
list_all_identity_providers(UserPoolId, Config) ->
Fun = fun list_identity_providers/4,
Args = [UserPoolId],
list_all(Fun, Args, Config, <<"Providers">>, <<"NextToken">>).
-spec describe_identity_provider(binary(), binary()) ->
{ok, map()} | {error, any()}.
describe_identity_provider(UserPoolId, ProviderName) ->
Config = erlcloud_aws:default_config(),
describe_identity_provider(UserPoolId, ProviderName, Config).
-spec describe_identity_provider(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
describe_identity_provider(UserPoolId, ProviderName, Config) ->
Body = #{
<<"ProviderName">> => ProviderName,
<<"UserPoolId">> => UserPoolId
},
request(Config, "DescribeIdentityProvider", Body).
-spec describe_user_pool_client(binary(), binary()) ->
{ok, map()} | {error, any()}.
describe_user_pool_client(UserPoolId, ClientId) ->
Config = erlcloud_aws:default_config(),
describe_user_pool_client(UserPoolId, ClientId, Config).
describe_user_pool_client(UserPoolId, ClientId, Config) ->
Body = #{
<<"ClientId">> => ClientId,
<<"UserPoolId">> => UserPoolId
},
request(Config, "DescribeUserPoolClient", Body).
-spec list_user_pool_clients(binary()) -> {ok, map()} | {error, any()}.
list_user_pool_clients(UserPoolId) ->
list_user_pool_clients(UserPoolId, ?MAX_RESULTS, undefined).
-spec list_user_pool_clients(binary(), non_neg_integer(), binary() | undefined) ->
{ok, map()} | {error, any()}.
list_user_pool_clients(UserPoolId, MaxResults, NextToken) ->
Config = erlcloud_aws:default_config(),
list_user_pool_clients(UserPoolId, MaxResults, NextToken, Config).
-spec list_user_pool_clients(binary(), non_neg_integer(), binary() | undefined,
aws_config()) ->
{ok, map()} | {error, any()}.
list_user_pool_clients(UserPoolId, MaxResults, NextToken, Config) ->
Body0 = #{
<<"UserPoolId">> => UserPoolId,
<<"NextToken">> => NextToken,
<<"MaxResults">> => MaxResults
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "ListUserPoolClients", Body).
-spec list_all_user_pool_clients(binary()) ->
{ok, map()} | {error, any()}.
list_all_user_pool_clients(UserPoolId) ->
Config = erlcloud_aws:default_config(),
list_all_user_pool_clients(UserPoolId, Config).
-spec list_all_user_pool_clients(binary(), aws_config()) ->
{ok, map()} | {error, any()}.
list_all_user_pool_clients(UserPoolId, Config) ->
Fun = fun list_user_pool_clients/4,
Args = [UserPoolId],
list_all(Fun, Args, Config, <<"UserPoolClients">>, <<"NextToken">>).
-spec admin_list_devices(binary(), binary()) -> {ok, map()} | {error, any()}.
admin_list_devices(UserPoolId, Username) ->
Config = erlcloud_aws:default_config(),
admin_list_devices(UserPoolId, Username, Config).
-spec admin_list_devices(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_devices(UserPoolId, Username, Config) ->
admin_list_devices(UserPoolId, Username, ?MAX_RESULTS, undefined, Config).
-spec admin_list_devices(binary(), binary(), integer(), binary() | undefined,
aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_devices(UserPoolId, Username, Limit, PaginationToken, Config) ->
Body0 = #{
<<"UserPoolId">> => UserPoolId,
<<"Username">> => Username,
<<"Limit">> => Limit,
<<"PaginationToken">> => PaginationToken
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "AdminListDevices", Body).
-spec admin_list_all_devices(binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_list_all_devices(UserPoolId, Username) ->
Config = erlcloud_aws:default_config(),
admin_list_all_devices(UserPoolId, Username, Config).
-spec admin_list_all_devices(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_all_devices(UserPoolId, Username, Config) ->
Fun = fun admin_list_devices/5,
Args = [UserPoolId, Username],
list_all(Fun, Args, Config, <<"Devices">>, <<"PaginationToken">>).
-spec admin_forget_device(binary(), binary(), binary()) ->
ok | {error, any()}.
admin_forget_device(UserPoolId, Username, DeviceKey) ->
Config = erlcloud_aws:default_config(),
admin_forget_device(UserPoolId, Username, DeviceKey, Config).
-spec admin_forget_device(binary(), binary(), binary(), aws_config()) ->
ok | {error, any()}.
admin_forget_device(UserPoolId, Username, DeviceKey, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId,
<<"Username">> => Username,
<<"DeviceKey">> => DeviceKey
},
request_no_resp(Config, "AdminForgetDevice", Body).
-spec admin_confirm_signup(binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_confirm_signup(UserPoolId, Username) ->
admin_confirm_signup(UserPoolId, Username, #{}).
-spec admin_confirm_signup(binary(), binary(), maps:map()) ->
{ok, map()} | {error, any()}.
admin_confirm_signup(UserPoolId, Username, ClientMetadata) ->
Config = erlcloud_aws:default_config(),
admin_confirm_signup(UserPoolId, Username, ClientMetadata, Config).
-spec admin_confirm_signup(binary(), binary(), maps:map(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_confirm_signup(UserPoolId, Username, ClientMetadata, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId,
<<"Username">> => Username,
<<"ClientMetadata">> => ClientMetadata
},
request(Config, "AdminConfirmSignUp", Body).
-spec admin_initiate_auth(binary(), binary(), binary(), maps:map()) ->
{ok, map()} | {error, any()}.
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams) ->
Cfg = erlcloud_aws:default_config(),
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams, Cfg).
-spec admin_initiate_auth(binary(), binary(), binary(),
maps:map(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams, Cfg) ->
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams,
#{}, #{}, #{}, Cfg).
-spec admin_initiate_auth(binary(), binary(), binary(), maps:map(),
maps:map(), maps:map(), maps:map(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams,
AnalyticsMeta, ClientMeta, ContextData, Cfg) ->
Mandatory = #{
<<"AuthFlow">> => AuthFlow,
<<"ClientId">> => ClientId,
<<"UserPoolId">> => PoolId
},
Optional = #{
<<"AnalyticsMetadata">> => AnalyticsMeta,
<<"AuthParameters">> => AuthParams,
<<"ClientMetadata">> => ClientMeta,
<<"ContextData">> => ContextData
},
request(Cfg, "AdminInitiateAuth", make_request_body(Mandatory, Optional)).
-spec respond_to_auth_challenge(binary(), binary(), maps:map(), binary()) ->
{ok, map()} | {error, any()}.
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses, Session) ->
Cfg = erlcloud_aws:default_config(),
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses,
Session, Cfg).
-spec respond_to_auth_challenge(binary(), binary(), maps:map(), binary(),
aws_config()) ->
{ok, map()} | {error, any()}.
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses,
Session, Cfg) ->
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses,
Session, #{}, #{}, #{}, Cfg).
-spec respond_to_auth_challenge(binary(), binary(), maps:map(), binary(),
maps:map(), maps:map(), maps:map(),
aws_config()) ->
{ok, map()} | {error, any()}.
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses,
Session, AnalyticsMeta, ClientMeta, ContextData, Cfg) ->
Mandatory = #{
<<"ChallengeName">> => ChallengeName,
<<"ChallengeResponses">> => ChallengeResponses,
<<"ClientId">> => ClientId
},
Optional = #{
<<"AnalyticsMetadata">> => AnalyticsMeta,
<<"ClientMetadata">> => ClientMeta,
<<"Session">> => Session,
<<"UserContextData">> => ContextData
},
request(Cfg, "RespondToAuthChallenge", make_request_body(Mandatory, Optional)).
-spec create_identity_provider(binary(), binary(), binary(), map()) ->
{ok, map()} | {error, any()}.
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails) ->
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, #{}).
-spec create_identity_provider(binary(), binary(), binary(), map(), map()) ->
{ok, map()} | {error, any()}.
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, AttributeMapping) ->
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, AttributeMapping, []).
-spec create_identity_provider(binary(), binary(), binary(),
map(), map(), list()) ->
{ok, map()} | {error, any()}.
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, AttributeMapping, IdpIdentifiers) ->
Config = erlcloud_aws:default_config(),
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, AttributeMapping, IdpIdentifiers, Config).
-spec create_identity_provider(binary(), binary(), binary(), map(), map(),
list(), aws_config()) ->
{ok, map()} | {error, any()}.
create_identity_provider(UserPoolId, ProviderName, ProviderType, ProviderDetails,
AttributeMapping, IdpIdentifiers, Config) ->
Mandatory = #{
<<"UserPoolId">> => UserPoolId,
<<"ProviderName">> => ProviderName,
<<"ProviderType">> => ProviderType,
<<"ProviderDetails">> => ProviderDetails
},
Optional = #{
<<"AttributeMapping">> => AttributeMapping,
<<"IdpIdentifiers">> => IdpIdentifiers
},
request(Config, "CreateIdentityProvider", make_request_body(Mandatory, Optional)).
-spec delete_identity_provider(binary(), binary()) ->
ok | {error, any()}.
delete_identity_provider(UserPoolId, ProviderName) ->
Config = erlcloud_aws:default_config(),
delete_identity_provider(UserPoolId, ProviderName, Config).
-spec delete_identity_provider(binary(), binary(), aws_config()) ->
ok | {error, any()}.
delete_identity_provider(UserPoolId, ProviderName, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId,
<<"ProviderName">> => ProviderName
},
request_no_resp(Config, "DeleteIdentityProvider", Body).
-spec update_identity_provider(binary(), binary()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName) ->
update_identity_provider(UserPoolId, ProviderName, #{}).
-spec update_identity_provider(binary(), binary(), map()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName, ProviderDetails) ->
update_identity_provider(UserPoolId, ProviderName, ProviderDetails, #{}).
-spec update_identity_provider(binary(), binary(), map(), map()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName,
ProviderDetails, AttributeMapping) ->
update_identity_provider(UserPoolId, ProviderName,
ProviderDetails, AttributeMapping, []).
-spec update_identity_provider(binary(), binary(), map(), map(), list()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName,
ProviderDetails, AttributeMapping, IdpIdentifiers) ->
Config = erlcloud_aws:default_config(),
update_identity_provider(UserPoolId, ProviderName, ProviderDetails,
AttributeMapping, IdpIdentifiers, Config).
-spec update_identity_provider(binary(), binary(), map(), map(),
list(), aws_config()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName, ProviderDetails,
AttributeMapping, IdpIdentifiers, Config) ->
Mandatory = #{
<<"UserPoolId">> => UserPoolId,
<<"ProviderName">> => ProviderName
},
Optional = #{
<<"ProviderDetails">> => ProviderDetails,
<<"AttributeMapping">> => AttributeMapping,
<<"IdpIdentifiers">> => IdpIdentifiers
},
request(Config, "UpdateIdentityProvider", make_request_body(Mandatory, Optional)).
%%------------------------------------------------------------------------------
%% Internal Functions
%%------------------------------------------------------------------------------
request(Config, Request) ->
Result = erlcloud_retry:request(Config, Request, fun handle_result/1),
case erlcloud_aws:request_to_return(Result) of
{ok, {_, <<>>}} -> {ok, #{}};
{ok, {_, RespBody}} -> {ok, jsx:decode(RespBody, [return_maps])};
{error, _} = Error -> Error
end.
request(Config0, OperationName, Request) ->
case erlcloud_aws:update_config(Config0) of
{ok, Config} ->
Body = jsx:encode(Request),
Operation = "AWSCognitoIdentityProviderService." ++ OperationName,
Headers = get_headers(Config, Operation, Body),
AwsRequest = #aws_request{service = 'cognito-idp',
uri = get_url(Config),
method = post,
request_headers = Headers,
request_body = Body},
request(Config, AwsRequest);
{error, Reason} ->
{error, Reason}
end.
request_no_resp(Config, OperationName, Request) ->
case request(Config, OperationName, Request) of
{ok, _} -> ok;
Error -> Error
end.
make_request_body(Mandatory, Optional) ->
maps:merge(Mandatory, erlcloud_util:filter_empty_map(Optional)).
get_headers(#aws_config{cognito_user_pools_host = Host} = Config, Operation, Body) ->
Headers = [{"host", Host},
{"x-amz-target", Operation},
{"version", ?API_VERSION},
{"content-type", "application/x-amz-json-1.1"}],
Region = erlcloud_aws:aws_region_from_host(Host),
erlcloud_aws:sign_v4_headers(Config, Headers, Body, Region, "cognito-idp").
handle_result(#aws_request{response_type = ok} = Request) ->
Request;
handle_result(#aws_request{response_type = error,
error_type = aws,
response_status = Status} = Request)
when Status >= 500 ->
Request#aws_request{should_retry = true};
handle_result(#aws_request{response_type = error,
error_type = aws} = Request) ->
Request#aws_request{should_retry = false}.
get_url(#aws_config{cognito_user_pools_scheme = Scheme,
cognito_user_pools_host = Host}) ->
Scheme ++ Host.
list_all(Fun, Args, Config, Key, TokenAlias) ->
list_all(Fun, Args, Config, Key, TokenAlias, undefined, []).
list_all(Fun, Args, Config, Key, TokenAlias, NextToken, Acc) ->
UpdArgs = Args ++ [?MAX_RESULTS, NextToken, Config],
case erlang:apply(Fun, UpdArgs) of
{ok, Map} ->
UpdAcc = Acc ++ maps:get(Key, Map),
NewToken = maps:get(TokenAlias, Map, undefined),
case NewToken of
undefined ->
{ok, #{Key => UpdAcc}};
_ ->
list_all(Fun, Args, Config, Key, TokenAlias, NewToken, UpdAcc)
end;
Error ->
Error
end.
| null | https://raw.githubusercontent.com/erlcloud/erlcloud/a080fe22ac0c8625c2506cdb36b12badf5469910/src/erlcloud_cognito_user_pools.erl | erlang | ------------------------------------------------------------------------------
Internal Functions
------------------------------------------------------------------------------ | -module(erlcloud_cognito_user_pools).
-include("erlcloud_aws.hrl").
-export([configure/2, configure/3, new/2, new/3]).
-export([
list_users/1,
list_users/2,
list_users/5,
list_users/6,
list_all_users/1,
list_all_users/2,
list_all_users/3,
admin_list_groups_for_user/2,
admin_list_groups_for_user/3,
admin_list_groups_for_user/4,
admin_list_groups_for_user/5,
admin_get_user/2,
admin_get_user/3,
admin_create_user/2,
admin_create_user/3,
admin_create_user/4,
admin_delete_user/2,
admin_delete_user/3,
admin_add_user_to_group/3,
admin_add_user_to_group/4,
admin_remove_user_from_group/3,
admin_remove_user_from_group/4,
create_group/2,
create_group/3,
create_group/5,
create_group/6,
delete_group/2,
delete_group/3,
admin_reset_user_password/2,
admin_reset_user_password/3,
admin_reset_user_password/4,
admin_update_user_attributes/3,
admin_update_user_attributes/4,
admin_update_user_attributes/5,
change_password/3,
change_password/4,
list_user_pools/0,
list_user_pools/1,
list_user_pools/2,
list_all_user_pools/0,
list_all_user_pools/1,
admin_set_user_password/3,
admin_set_user_password/4,
admin_set_user_password/5,
describe_user_pool/1,
describe_user_pool/2,
get_user_pool_mfa_config/1,
get_user_pool_mfa_config/2,
list_identity_providers/1,
list_identity_providers/3,
list_identity_providers/4,
list_all_identity_providers/1,
list_all_identity_providers/2,
describe_identity_provider/2,
describe_identity_provider/3,
describe_user_pool_client/2,
describe_user_pool_client/3,
list_user_pool_clients/1,
list_user_pool_clients/3,
list_user_pool_clients/4,
list_all_user_pool_clients/1,
list_all_user_pool_clients/2,
admin_list_devices/2,
admin_list_devices/3,
admin_list_devices/5,
admin_list_all_devices/2,
admin_list_all_devices/3,
admin_forget_device/3,
admin_forget_device/4,
admin_confirm_signup/2,
admin_confirm_signup/3,
admin_confirm_signup/4,
admin_initiate_auth/4,
admin_initiate_auth/5,
admin_initiate_auth/8,
respond_to_auth_challenge/4,
respond_to_auth_challenge/5,
respond_to_auth_challenge/8,
create_identity_provider/4,
create_identity_provider/5,
create_identity_provider/6,
create_identity_provider/7,
delete_identity_provider/2,
delete_identity_provider/3,
update_identity_provider/2,
update_identity_provider/3,
update_identity_provider/4,
update_identity_provider/5,
update_identity_provider/6,
request/2,
request/3
]).
-define(MAX_RESULTS, 60).
-define(API_VERSION, "2016-04-18").
-spec new(string(), string()) -> aws_config().
new(AccessKeyID, SecretAccessKey) ->
#aws_config{access_key_id = AccessKeyID,
secret_access_key = SecretAccessKey,
retry = fun erlcloud_retry:default_retry/1}.
-spec new(string(), string(), string()) -> aws_config().
new(AccessKeyID, SecretAccessKey, Host) ->
#aws_config{access_key_id = AccessKeyID,
secret_access_key = SecretAccessKey,
cognito_user_pools_host = Host,
retry = fun erlcloud_retry:default_retry/1}.
-spec configure(string(), string()) -> ok.
configure(AccessKeyID, SecretAccessKey) ->
put(aws_config, new(AccessKeyID, SecretAccessKey)),
ok.
-spec configure(string(), string(), string()) -> ok.
configure(AccessKeyID, SecretAccessKey, Host) ->
put(aws_config, new(AccessKeyID, SecretAccessKey, Host)),
ok.
-spec list_users(binary()) -> {ok, map()} | {error, any()}.
list_users(UserPoolId) ->
list_users(UserPoolId, undefined, undefined, undefined, undefined).
-spec list_users(binary(), aws_config()) -> {ok, map()} | {error, any()}.
list_users(UserPoolId, Config) ->
Body = #{
<<"UserPoolId">> => unicode:characters_to_binary(UserPoolId)
},
request(Config, "ListUsers", Body).
-spec list_users(binary(),
[binary()] | undefined,
binary() | undefined,
number() | undefined,
binary() | undefined) -> {ok, map()} | {error, any()}.
list_users(UserPoolId, AttributesToGet, Filter, Limit, PaginationToken) ->
Config = erlcloud_aws:default_config(),
list_users(UserPoolId, AttributesToGet, Filter, Limit, PaginationToken, Config).
list_users(UserPoolId, AttributesToGet, Filter, Limit, PaginationToken, Config) ->
BaseBody = #{
<<"UserPoolId">> => UserPoolId,
<<"AttributesToGet">> => AttributesToGet,
<<"Filter">> => Filter,
<<"Limit">> => Limit,
<<"PaginationToken">> => PaginationToken
},
Body = erlcloud_util:filter_undef(BaseBody),
request(Config, "ListUsers", Body).
-spec list_all_users(binary()) -> {ok, map()} | {error, any()}.
list_all_users(UserPoolId) ->
list_all_users(UserPoolId, undefined).
-spec list_all_users(binary(), binary() | undefined | aws_config()) ->
{ok, map()} | {error, any()}.
list_all_users(UserPoolId, Config) when is_record(Config, aws_config) ->
list_all_users(UserPoolId, undefined, Config);
list_all_users(UserPoolId, Filter) ->
Config = erlcloud_aws:default_config(),
list_all_users(UserPoolId, Filter, Config).
-spec list_all_users(binary(), binary() | undefined, aws_config()) ->
{ok, map()} | {error, any()}.
list_all_users(UserPoolId, Filter, Config) ->
Fun = fun list_users/6,
Args = [UserPoolId, undefined, Filter],
list_all(Fun, Args, Config, <<"Users">>, <<"PaginationToken">>).
-spec admin_list_groups_for_user(binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_list_groups_for_user(UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_list_groups_for_user(UserName, UserPoolId, Config).
-spec admin_list_groups_for_user(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_groups_for_user(UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId
},
request(Config, "AdminListGroupsForUser", Body).
-spec admin_list_groups_for_user(binary(), binary(), number(),
binary() | undefined) ->
{ok, map()} | {error, any()}.
admin_list_groups_for_user(UserName, UserPoolId, Limit, NextToken) ->
Config = erlcloud_aws:default_config(),
admin_list_groups_for_user(UserName, UserPoolId, Limit, NextToken, Config).
-spec admin_list_groups_for_user(binary(), binary(), number(),
binary() | undefined, aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_groups_for_user(UserName, UserPoolId, Limit, NextToken, Config) ->
Body = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId,
<<"Limit">> => Limit,
<<"NextToken">> => NextToken
},
request(Config, "AdminListGroupsForUser", Body).
-spec admin_get_user(binary(), binary()) -> {ok, map()} | {error, any()}.
admin_get_user(UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_get_user(UserName, UserPoolId, Config).
-spec admin_get_user(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_get_user(UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId
},
request(Config, "AdminGetUser", Body).
-spec admin_create_user(binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_create_user(UserName, UserPoolId) ->
admin_create_user(UserName, UserPoolId, #{}).
-spec admin_create_user(binary(), binary(), maps:maps()) ->
{ok, map()} | {error, any()}.
admin_create_user(UserName, UserPoolId, OptionalArgs) ->
Config = erlcloud_aws:default_config(),
admin_create_user(UserName, UserPoolId, OptionalArgs, Config).
-spec admin_create_user(binary(), binary(), maps:maps(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_create_user(UserName, UserPoolId, OptionalArgs, Config) ->
Body = OptionalArgs#{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId
},
request(Config, "AdminCreateUser", Body).
-spec admin_delete_user(binary(), binary()) -> ok | {error, any()}.
admin_delete_user(UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_delete_user(UserName, UserPoolId, Config).
-spec admin_delete_user(binary(), binary(), aws_config()) -> ok | {error, any()}.
admin_delete_user(UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId
},
request_no_resp(Config, "AdminDeleteUser", Body).
-spec admin_add_user_to_group(binary(), binary(), binary()) ->
ok | {error, any()}.
admin_add_user_to_group(GroupName, UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_add_user_to_group(GroupName, UserName, UserPoolId, Config).
-spec admin_add_user_to_group(binary(), binary(), binary(), aws_config()) ->
ok | {error, any()}.
admin_add_user_to_group(GroupName, UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"GroupName">> => GroupName,
<<"UserPoolId">> => UserPoolId
},
request_no_resp(Config, "AdminAddUserToGroup", Body).
-spec admin_remove_user_from_group(binary(), binary(), binary()) ->
ok | {error, any()}.
admin_remove_user_from_group(GroupName, UserName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
admin_remove_user_from_group(GroupName, UserName, UserPoolId, Config).
-spec admin_remove_user_from_group(binary(), binary(), binary(), aws_config()) ->
ok | {error, any()}.
admin_remove_user_from_group(GroupName, UserName, UserPoolId, Config) ->
Body = #{
<<"Username">> => UserName,
<<"GroupName">> => GroupName,
<<"UserPoolId">> => UserPoolId
},
request_no_resp(Config, "AdminRemoveUserFromGroup", Body).
-spec create_group(binary(), binary()) -> {ok, map()} | {error, any()}.
create_group(GroupName, UserPoolId) ->
create_group(GroupName, UserPoolId, undefined, undefined, undefined).
-spec create_group(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
create_group(GroupName, UserPoolId, Config) ->
create_group(GroupName, UserPoolId, undefined, undefined, undefined, Config).
-spec create_group(binary(), binary(), binary() | undefined,
number() | undefined, binary() | undefined) ->
{ok, map()} | {error, any()}.
create_group(GroupName, UserPoolId, Description, Precedence, RoleArn) ->
Config = erlcloud_aws:default_config(),
create_group(GroupName, UserPoolId, Description, Precedence, RoleArn, Config).
-spec create_group(binary(), binary(), binary() | undefined,
number() | undefined, binary() | undefined, aws_config()) ->
{ok, map()} | {error, any()}.
create_group(GroupName, UserPoolId, Description, Precedence, RoleArn, Config) ->
Body0 = #{
<<"GroupName">> => GroupName,
<<"UserPoolId">> => UserPoolId,
<<"Description">> => Description,
<<"Precedence">> => Precedence,
<<"RoleArn">> => RoleArn
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "CreateGroup", Body).
-spec delete_group(binary(), binary()) -> ok | {error, any()}.
delete_group(GroupName, UserPoolId) ->
Config = erlcloud_aws:default_config(),
delete_group(GroupName, UserPoolId, Config).
-spec delete_group(binary(), binary(), aws_config()) -> ok | {error, any()}.
delete_group(GroupName, UserPoolId, Config) ->
Body = #{
<<"GroupName">> => unicode:characters_to_binary(GroupName),
<<"UserPoolId">> => unicode:characters_to_binary(UserPoolId)
},
request_no_resp(Config, "DeleteGroup", Body).
-spec admin_reset_user_password(binary(), binary()) ->
ok| {error, any()}.
admin_reset_user_password(UserName, UserPoolId) ->
admin_reset_user_password(UserName, UserPoolId, undefined).
-spec admin_reset_user_password(binary(), binary(), map() | undefined) ->
ok | {error, any()}.
admin_reset_user_password(UserName, UserPoolId, MetaData) ->
Config = erlcloud_aws:default_config(),
admin_reset_user_password(UserName, UserPoolId, MetaData, Config).
-spec admin_reset_user_password(binary(), binary(),
map() | undefined, aws_config()) ->
ok | {error, any()}.
admin_reset_user_password(UserName, UserPoolId, MetaData, Config) ->
BaseBody = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId,
<<"ClientMetaData">> => MetaData
},
Body = erlcloud_util:filter_undef(BaseBody),
request_no_resp(Config, "AdminResetUserPassword", Body).
-spec admin_update_user_attributes(binary(), binary(), [map()]) ->
ok | {error, any()}.
admin_update_user_attributes(UserName, UserPoolId, Attributes) ->
admin_update_user_attributes(UserName, UserPoolId, Attributes, undefined).
-spec admin_update_user_attributes(binary(), binary(), [map()],
map() | undefined) ->
ok | {error, any()}.
admin_update_user_attributes(UserName, UserPoolId, Attributes, MetaData) ->
Config = erlcloud_aws:default_config(),
admin_update_user_attributes(UserName, UserPoolId, Attributes, MetaData, Config).
-spec admin_update_user_attributes(binary(), binary(), [map()],
map() | undefined, aws_config()) ->
ok | {error, any()}.
admin_update_user_attributes(UserName, UserPoolId, Attributes, MetaData, Config) ->
BaseBody = #{
<<"Username">> => UserName,
<<"UserPoolId">> => UserPoolId,
<<"UserAttributes">> => Attributes,
<<"ClientMetaData">> => MetaData
},
Body = erlcloud_util:filter_undef(BaseBody),
request_no_resp(Config, "AdminUpdateUserAttributes", Body).
-spec change_password(binary(), binary(), binary()) ->
ok | {error, any()}.
change_password(OldPass, NewPass, AccessToken) ->
Config = erlcloud_aws:default_config(),
change_password(OldPass, NewPass, AccessToken, Config).
-spec change_password(binary(), binary(), binary(), aws_config()) ->
ok | {error, any()}.
change_password(OldPass, NewPass, AccessToken, Config) ->
Body = #{
<<"AccessToken">> => AccessToken,
<<"PreviousPassword">> => OldPass,
<<"ProposedPassword">> => NewPass
},
request_no_resp(Config, "ChangePassword", Body).
-spec list_user_pools() -> {ok, map()} | {error, any()}.
list_user_pools() ->
list_user_pools(?MAX_RESULTS, undefined).
-spec list_user_pools(integer()) -> {ok, map()} | {error, any()}.
list_user_pools(MaxResult) ->
list_user_pools(MaxResult, undefined).
-spec list_user_pools(integer(), binary() | undefined) ->
{ok, map()} | {error, any()}.
list_user_pools(MaxResult, NextToken) ->
Config = erlcloud_aws:default_config(),
list_user_pools(MaxResult, NextToken, Config).
-spec list_user_pools(integer(), binary() | undefined, aws_config()) ->
{ok, map()} | {error, any()}.
list_user_pools(MaxResult, NextToken, Config) ->
Body0 = #{
<<"MaxResults">> => MaxResult,
<<"NextToken">> => NextToken
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "ListUserPools", Body).
-spec list_all_user_pools() -> {ok, map()} | {error, any()}.
list_all_user_pools() ->
Config = erlcloud_aws:default_config(),
list_all_user_pools(Config).
-spec list_all_user_pools(aws_config()) -> {ok, map()} | {error, any()}.
list_all_user_pools(Config) ->
Fun = fun list_user_pools/3,
list_all(Fun, [], Config, <<"UserPools">>, <<"NextToken">>).
-spec admin_set_user_password(binary(), binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_set_user_password(UserId, UserPoolId, Password) ->
admin_set_user_password(UserId, UserPoolId, Password, false).
-spec admin_set_user_password(binary(), binary(), binary(), boolean()) ->
ok | {error, any()}.
admin_set_user_password(UserId, UserPoolId, Password, Permanent) ->
Config = erlcloud_aws:default_config(),
admin_set_user_password(UserId, UserPoolId, Password, Permanent, Config).
-spec admin_set_user_password(binary(), binary(), binary(), boolean(),
aws_config()) ->
ok | {error, any()}.
admin_set_user_password(UserId, UserPoolId, Password, Permanent, Config) ->
Body = #{
<<"Password">> => Password,
<<"Username">> => UserId,
<<"UserPoolId">> => UserPoolId,
<<"Permanent">> => Permanent
},
request_no_resp(Config, "AdminSetUserPassword", Body).
-spec describe_user_pool(binary()) -> {ok, map()} | {error, any()}.
describe_user_pool(UserPoolId) ->
Config = erlcloud_aws:default_config(),
describe_user_pool(UserPoolId, Config).
-spec describe_user_pool(binary(), aws_config()) -> {ok, map()} | {error, any()}.
describe_user_pool(UserPoolId, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId
},
request(Config, "DescribeUserPool", Body).
-spec get_user_pool_mfa_config(binary()) -> {ok, map()} | {error, any()}.
get_user_pool_mfa_config(UserPoolId) ->
Config = erlcloud_aws:default_config(),
get_user_pool_mfa_config(UserPoolId, Config).
-spec get_user_pool_mfa_config(binary(), aws_config()) ->
{ok, map()} | {error, any()}.
get_user_pool_mfa_config(UserPoolId, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId
},
request(Config, "GetUserPoolMfaConfig", Body).
-spec list_identity_providers(binary()) -> {ok, map()} | {error, any()}.
list_identity_providers(UserPoolId) ->
list_identity_providers(UserPoolId, ?MAX_RESULTS, undefined).
-spec list_identity_providers(binary(), integer(), binary() | undefined) ->
{ok, map()} | {error, any()}.
list_identity_providers(UserPoolId, MaxResults, NextToken) ->
Config = erlcloud_aws:default_config(),
list_identity_providers(UserPoolId, MaxResults, NextToken, Config).
-spec list_identity_providers(binary(),
integer(),
binary() | undefined,
aws_config()) ->
{ok, map()} | {error, any()}.
list_identity_providers(UserPoolId, MaxResults, NextToken, Config) ->
Body0 = #{
<<"UserPoolId">> => UserPoolId,
<<"NextToken">> => NextToken,
<<"MaxResults">> => MaxResults
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "ListIdentityProviders", Body).
-spec list_all_identity_providers(binary()) ->
{ok, map()} | {error, any()}.
list_all_identity_providers(UserPoolId) ->
Config = erlcloud_aws:default_config(),
list_all_identity_providers(UserPoolId, Config).
-spec list_all_identity_providers(binary(), aws_config()) ->
{ok, map()} | {error, any()}.
list_all_identity_providers(UserPoolId, Config) ->
Fun = fun list_identity_providers/4,
Args = [UserPoolId],
list_all(Fun, Args, Config, <<"Providers">>, <<"NextToken">>).
-spec describe_identity_provider(binary(), binary()) ->
{ok, map()} | {error, any()}.
describe_identity_provider(UserPoolId, ProviderName) ->
Config = erlcloud_aws:default_config(),
describe_identity_provider(UserPoolId, ProviderName, Config).
-spec describe_identity_provider(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
describe_identity_provider(UserPoolId, ProviderName, Config) ->
Body = #{
<<"ProviderName">> => ProviderName,
<<"UserPoolId">> => UserPoolId
},
request(Config, "DescribeIdentityProvider", Body).
-spec describe_user_pool_client(binary(), binary()) ->
{ok, map()} | {error, any()}.
describe_user_pool_client(UserPoolId, ClientId) ->
Config = erlcloud_aws:default_config(),
describe_user_pool_client(UserPoolId, ClientId, Config).
describe_user_pool_client(UserPoolId, ClientId, Config) ->
Body = #{
<<"ClientId">> => ClientId,
<<"UserPoolId">> => UserPoolId
},
request(Config, "DescribeUserPoolClient", Body).
-spec list_user_pool_clients(binary()) -> {ok, map()} | {error, any()}.
list_user_pool_clients(UserPoolId) ->
list_user_pool_clients(UserPoolId, ?MAX_RESULTS, undefined).
-spec list_user_pool_clients(binary(), non_neg_integer(), binary() | undefined) ->
{ok, map()} | {error, any()}.
list_user_pool_clients(UserPoolId, MaxResults, NextToken) ->
Config = erlcloud_aws:default_config(),
list_user_pool_clients(UserPoolId, MaxResults, NextToken, Config).
-spec list_user_pool_clients(binary(), non_neg_integer(), binary() | undefined,
aws_config()) ->
{ok, map()} | {error, any()}.
list_user_pool_clients(UserPoolId, MaxResults, NextToken, Config) ->
Body0 = #{
<<"UserPoolId">> => UserPoolId,
<<"NextToken">> => NextToken,
<<"MaxResults">> => MaxResults
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "ListUserPoolClients", Body).
-spec list_all_user_pool_clients(binary()) ->
{ok, map()} | {error, any()}.
list_all_user_pool_clients(UserPoolId) ->
Config = erlcloud_aws:default_config(),
list_all_user_pool_clients(UserPoolId, Config).
-spec list_all_user_pool_clients(binary(), aws_config()) ->
{ok, map()} | {error, any()}.
list_all_user_pool_clients(UserPoolId, Config) ->
Fun = fun list_user_pool_clients/4,
Args = [UserPoolId],
list_all(Fun, Args, Config, <<"UserPoolClients">>, <<"NextToken">>).
-spec admin_list_devices(binary(), binary()) -> {ok, map()} | {error, any()}.
admin_list_devices(UserPoolId, Username) ->
Config = erlcloud_aws:default_config(),
admin_list_devices(UserPoolId, Username, Config).
-spec admin_list_devices(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_devices(UserPoolId, Username, Config) ->
admin_list_devices(UserPoolId, Username, ?MAX_RESULTS, undefined, Config).
-spec admin_list_devices(binary(), binary(), integer(), binary() | undefined,
aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_devices(UserPoolId, Username, Limit, PaginationToken, Config) ->
Body0 = #{
<<"UserPoolId">> => UserPoolId,
<<"Username">> => Username,
<<"Limit">> => Limit,
<<"PaginationToken">> => PaginationToken
},
Body = erlcloud_util:filter_undef(Body0),
request(Config, "AdminListDevices", Body).
-spec admin_list_all_devices(binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_list_all_devices(UserPoolId, Username) ->
Config = erlcloud_aws:default_config(),
admin_list_all_devices(UserPoolId, Username, Config).
-spec admin_list_all_devices(binary(), binary(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_list_all_devices(UserPoolId, Username, Config) ->
Fun = fun admin_list_devices/5,
Args = [UserPoolId, Username],
list_all(Fun, Args, Config, <<"Devices">>, <<"PaginationToken">>).
-spec admin_forget_device(binary(), binary(), binary()) ->
ok | {error, any()}.
admin_forget_device(UserPoolId, Username, DeviceKey) ->
Config = erlcloud_aws:default_config(),
admin_forget_device(UserPoolId, Username, DeviceKey, Config).
-spec admin_forget_device(binary(), binary(), binary(), aws_config()) ->
ok | {error, any()}.
admin_forget_device(UserPoolId, Username, DeviceKey, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId,
<<"Username">> => Username,
<<"DeviceKey">> => DeviceKey
},
request_no_resp(Config, "AdminForgetDevice", Body).
-spec admin_confirm_signup(binary(), binary()) ->
{ok, map()} | {error, any()}.
admin_confirm_signup(UserPoolId, Username) ->
admin_confirm_signup(UserPoolId, Username, #{}).
-spec admin_confirm_signup(binary(), binary(), maps:map()) ->
{ok, map()} | {error, any()}.
admin_confirm_signup(UserPoolId, Username, ClientMetadata) ->
Config = erlcloud_aws:default_config(),
admin_confirm_signup(UserPoolId, Username, ClientMetadata, Config).
-spec admin_confirm_signup(binary(), binary(), maps:map(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_confirm_signup(UserPoolId, Username, ClientMetadata, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId,
<<"Username">> => Username,
<<"ClientMetadata">> => ClientMetadata
},
request(Config, "AdminConfirmSignUp", Body).
-spec admin_initiate_auth(binary(), binary(), binary(), maps:map()) ->
{ok, map()} | {error, any()}.
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams) ->
Cfg = erlcloud_aws:default_config(),
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams, Cfg).
-spec admin_initiate_auth(binary(), binary(), binary(),
maps:map(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams, Cfg) ->
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams,
#{}, #{}, #{}, Cfg).
-spec admin_initiate_auth(binary(), binary(), binary(), maps:map(),
maps:map(), maps:map(), maps:map(), aws_config()) ->
{ok, map()} | {error, any()}.
admin_initiate_auth(PoolId, ClientId, AuthFlow, AuthParams,
AnalyticsMeta, ClientMeta, ContextData, Cfg) ->
Mandatory = #{
<<"AuthFlow">> => AuthFlow,
<<"ClientId">> => ClientId,
<<"UserPoolId">> => PoolId
},
Optional = #{
<<"AnalyticsMetadata">> => AnalyticsMeta,
<<"AuthParameters">> => AuthParams,
<<"ClientMetadata">> => ClientMeta,
<<"ContextData">> => ContextData
},
request(Cfg, "AdminInitiateAuth", make_request_body(Mandatory, Optional)).
-spec respond_to_auth_challenge(binary(), binary(), maps:map(), binary()) ->
{ok, map()} | {error, any()}.
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses, Session) ->
Cfg = erlcloud_aws:default_config(),
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses,
Session, Cfg).
-spec respond_to_auth_challenge(binary(), binary(), maps:map(), binary(),
aws_config()) ->
{ok, map()} | {error, any()}.
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses,
Session, Cfg) ->
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses,
Session, #{}, #{}, #{}, Cfg).
-spec respond_to_auth_challenge(binary(), binary(), maps:map(), binary(),
maps:map(), maps:map(), maps:map(),
aws_config()) ->
{ok, map()} | {error, any()}.
respond_to_auth_challenge(ClientId, ChallengeName, ChallengeResponses,
Session, AnalyticsMeta, ClientMeta, ContextData, Cfg) ->
Mandatory = #{
<<"ChallengeName">> => ChallengeName,
<<"ChallengeResponses">> => ChallengeResponses,
<<"ClientId">> => ClientId
},
Optional = #{
<<"AnalyticsMetadata">> => AnalyticsMeta,
<<"ClientMetadata">> => ClientMeta,
<<"Session">> => Session,
<<"UserContextData">> => ContextData
},
request(Cfg, "RespondToAuthChallenge", make_request_body(Mandatory, Optional)).
-spec create_identity_provider(binary(), binary(), binary(), map()) ->
{ok, map()} | {error, any()}.
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails) ->
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, #{}).
-spec create_identity_provider(binary(), binary(), binary(), map(), map()) ->
{ok, map()} | {error, any()}.
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, AttributeMapping) ->
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, AttributeMapping, []).
-spec create_identity_provider(binary(), binary(), binary(),
map(), map(), list()) ->
{ok, map()} | {error, any()}.
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, AttributeMapping, IdpIdentifiers) ->
Config = erlcloud_aws:default_config(),
create_identity_provider(UserPoolId, ProviderName, ProviderType,
ProviderDetails, AttributeMapping, IdpIdentifiers, Config).
-spec create_identity_provider(binary(), binary(), binary(), map(), map(),
list(), aws_config()) ->
{ok, map()} | {error, any()}.
create_identity_provider(UserPoolId, ProviderName, ProviderType, ProviderDetails,
AttributeMapping, IdpIdentifiers, Config) ->
Mandatory = #{
<<"UserPoolId">> => UserPoolId,
<<"ProviderName">> => ProviderName,
<<"ProviderType">> => ProviderType,
<<"ProviderDetails">> => ProviderDetails
},
Optional = #{
<<"AttributeMapping">> => AttributeMapping,
<<"IdpIdentifiers">> => IdpIdentifiers
},
request(Config, "CreateIdentityProvider", make_request_body(Mandatory, Optional)).
-spec delete_identity_provider(binary(), binary()) ->
ok | {error, any()}.
delete_identity_provider(UserPoolId, ProviderName) ->
Config = erlcloud_aws:default_config(),
delete_identity_provider(UserPoolId, ProviderName, Config).
-spec delete_identity_provider(binary(), binary(), aws_config()) ->
ok | {error, any()}.
delete_identity_provider(UserPoolId, ProviderName, Config) ->
Body = #{
<<"UserPoolId">> => UserPoolId,
<<"ProviderName">> => ProviderName
},
request_no_resp(Config, "DeleteIdentityProvider", Body).
-spec update_identity_provider(binary(), binary()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName) ->
update_identity_provider(UserPoolId, ProviderName, #{}).
-spec update_identity_provider(binary(), binary(), map()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName, ProviderDetails) ->
update_identity_provider(UserPoolId, ProviderName, ProviderDetails, #{}).
-spec update_identity_provider(binary(), binary(), map(), map()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName,
ProviderDetails, AttributeMapping) ->
update_identity_provider(UserPoolId, ProviderName,
ProviderDetails, AttributeMapping, []).
-spec update_identity_provider(binary(), binary(), map(), map(), list()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName,
ProviderDetails, AttributeMapping, IdpIdentifiers) ->
Config = erlcloud_aws:default_config(),
update_identity_provider(UserPoolId, ProviderName, ProviderDetails,
AttributeMapping, IdpIdentifiers, Config).
-spec update_identity_provider(binary(), binary(), map(), map(),
list(), aws_config()) ->
{ok, map()} | {error, any()}.
update_identity_provider(UserPoolId, ProviderName, ProviderDetails,
AttributeMapping, IdpIdentifiers, Config) ->
Mandatory = #{
<<"UserPoolId">> => UserPoolId,
<<"ProviderName">> => ProviderName
},
Optional = #{
<<"ProviderDetails">> => ProviderDetails,
<<"AttributeMapping">> => AttributeMapping,
<<"IdpIdentifiers">> => IdpIdentifiers
},
request(Config, "UpdateIdentityProvider", make_request_body(Mandatory, Optional)).
request(Config, Request) ->
Result = erlcloud_retry:request(Config, Request, fun handle_result/1),
case erlcloud_aws:request_to_return(Result) of
{ok, {_, <<>>}} -> {ok, #{}};
{ok, {_, RespBody}} -> {ok, jsx:decode(RespBody, [return_maps])};
{error, _} = Error -> Error
end.
request(Config0, OperationName, Request) ->
case erlcloud_aws:update_config(Config0) of
{ok, Config} ->
Body = jsx:encode(Request),
Operation = "AWSCognitoIdentityProviderService." ++ OperationName,
Headers = get_headers(Config, Operation, Body),
AwsRequest = #aws_request{service = 'cognito-idp',
uri = get_url(Config),
method = post,
request_headers = Headers,
request_body = Body},
request(Config, AwsRequest);
{error, Reason} ->
{error, Reason}
end.
request_no_resp(Config, OperationName, Request) ->
case request(Config, OperationName, Request) of
{ok, _} -> ok;
Error -> Error
end.
make_request_body(Mandatory, Optional) ->
maps:merge(Mandatory, erlcloud_util:filter_empty_map(Optional)).
get_headers(#aws_config{cognito_user_pools_host = Host} = Config, Operation, Body) ->
Headers = [{"host", Host},
{"x-amz-target", Operation},
{"version", ?API_VERSION},
{"content-type", "application/x-amz-json-1.1"}],
Region = erlcloud_aws:aws_region_from_host(Host),
erlcloud_aws:sign_v4_headers(Config, Headers, Body, Region, "cognito-idp").
handle_result(#aws_request{response_type = ok} = Request) ->
Request;
handle_result(#aws_request{response_type = error,
error_type = aws,
response_status = Status} = Request)
when Status >= 500 ->
Request#aws_request{should_retry = true};
handle_result(#aws_request{response_type = error,
error_type = aws} = Request) ->
Request#aws_request{should_retry = false}.
get_url(#aws_config{cognito_user_pools_scheme = Scheme,
cognito_user_pools_host = Host}) ->
Scheme ++ Host.
list_all(Fun, Args, Config, Key, TokenAlias) ->
list_all(Fun, Args, Config, Key, TokenAlias, undefined, []).
list_all(Fun, Args, Config, Key, TokenAlias, NextToken, Acc) ->
UpdArgs = Args ++ [?MAX_RESULTS, NextToken, Config],
case erlang:apply(Fun, UpdArgs) of
{ok, Map} ->
UpdAcc = Acc ++ maps:get(Key, Map),
NewToken = maps:get(TokenAlias, Map, undefined),
case NewToken of
undefined ->
{ok, #{Key => UpdAcc}};
_ ->
list_all(Fun, Args, Config, Key, TokenAlias, NewToken, UpdAcc)
end;
Error ->
Error
end.
|
b0d2567593b5b5a25555d621319fe9e48b5271a8f9de1429e9c227e87b21961e | finnishtransportagency/harja | kohteet_test.clj | (ns harja.palvelin.palvelut.kanavat.kohteet-test
(:require [clojure.test :refer :all]
[harja.palvelin.komponentit.tietokanta :as tietokanta]
[com.stuartsierra.component :as component]
[harja
[pvm :as pvm]
[testi :refer :all]]
[harja.kyselyt.konversio :as konv]
[harja.palvelin.palvelut.yllapito-toteumat :refer :all]
[harja.tyokalut.functor :refer [fmap]]
[taoensso.timbre :as log]
[clojure.spec.gen.alpha :as gen]
[clojure.spec.alpha :as s]
[harja.palvelin.palvelut.kanavat.kohteet :as kan-kohteet]
[clojure.string :as str]
[harja.domain.kanavat.kohdekokonaisuus :as kok]
[harja.domain.kanavat.kohde :as kohde]
[harja.domain.kanavat.kohteenosa :as osa]
[harja.domain.kanavat.kanavan-huoltokohde :as huoltokohde]
[harja.domain.urakka :as ur]
[harja.domain.muokkaustiedot :as m]))
(defn jarjestelma-fixture [testit]
(alter-var-root #'jarjestelma
(fn [_]
(component/start
(component/system-map
:db (tietokanta/luo-tietokanta testitietokanta)
:http-palvelin (testi-http-palvelin)
:kan-kanavat (component/using
(kan-kohteet/->Kohteet)
[:http-palvelin :db])))))
(testit)
(alter-var-root #'jarjestelma component/stop))
(use-fixtures :once (compose-fixtures
jarjestelma-fixture
urakkatieto-fixture))
(defn- jollain-kanavalla-nimi? [vastaus]
(some
some?
(mapcat
(fn [kanava]
(map
(fn [kohde]
(::kohde/nimi kohde))
(::kok/kohteet kanava)))
vastaus)))
(defn- pakolliset-kentat? [vastaus]
(every?
(fn [kanava]
nämä arvot täytyy löytyä
(and (every? some? ((juxt ::kok/id ::kok/nimi ::kok/kohteet) kanava))
(every?
(fn [kohde]
nämä arvot täytyy löytyä
(and (every? some? ((juxt ::kohde/id) kohde))
Nämä avaimet pitää olla
(every? (partial contains? kohde) [::kohde/urakat])
#_(every? (partial contains? kohde) [::kohde/kohteenosat])
#_(every?
(fn [osa]
nämä arvot täytyy löytyä
(every? some? ((juxt ::osa/id ::osa/tyyppi) urakka)))
(::kohde/kohteenosat kohde))
(every?
(fn [urakka]
nämä arvot täytyy löytyä
(every? some? ((juxt ::ur/id ::ur/nimi) urakka)))
(::kohde/urakat kohde))))
(::kok/kohteet kanava))))
vastaus))
(deftest kanavien-haku
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:hae-kohdekokonaisuudet-ja-kohteet
+kayttaja-jvh+)]
(is (s/valid? ::kok/hae-kohdekokonaisuudet-ja-kohteet-vastaus vastaus))
(is (jollain-kanavalla-nimi? vastaus))
(is (pakolliset-kentat? vastaus))))
(deftest urakan-kohteiden-haku
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:hae-urakan-kohteet
+kayttaja-jvh+
{::ur/id (hae-urakan-id-nimella "Saimaan kanava")})]
(is (true? (every? (comp some? ::kohde/nimi) vastaus)))))
(deftest urakan-kohteiden-haku
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:hae-kanavien-huoltokohteet
+kayttaja-jvh+)]
(is (true? (every? (comp some? ::huoltokohde/nimi) vastaus)))))
(deftest kohteen-liittaminen-urakkaan
(testing "Uuden linkin lisääminen"
(let [kohde-id (hae-kohde-iisalmen-kanava)
urakka-id (hae-urakan-id-nimella "Saimaan kanava")
linkki (first (q (str "SELECT * FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))
_ (is (and (some? kohde-id) (some? urakka-id)))
_ (is (empty? linkki))
params {:liitokset {[kohde-id urakka-id] true}}
vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:liita-kohteet-urakkaan
+kayttaja-jvh+
params)]
(let [[ur koh poistettu?] (first (q (str "SELECT \"urakka-id\", \"kohde-id\", poistettu FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))]
(is (= ur urakka-id))
(is (= koh kohde-id))
(is (= poistettu? false)))))
(testing "Linkin poistaminen"
(let [kohde-id (hae-kohde-soskua)
urakka-id (hae-urakan-id-nimella "Saimaan kanava")
linkki (first (q (str "SELECT * FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))
_ (is (and (some? kohde-id) (some? urakka-id)))
_ (is (some? linkki))
params {:liitokset {[kohde-id urakka-id] false}}
vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:liita-kohteet-urakkaan
+kayttaja-jvh+
params)]
(let [[ur koh poistettu?] (first (q (str "SELECT \"urakka-id\", \"kohde-id\", poistettu FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))]
(is (= ur urakka-id))
(is (= koh kohde-id))
(is (= poistettu? true)))))
(testing "Linkin palauttaminen"
(let [kohde-id (hae-kohde-soskua)
urakka-id (hae-urakan-id-nimella "Saimaan kanava")
linkki (first (q (str "SELECT * FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))
_ (is (and (some? kohde-id) (some? urakka-id)))
_ (is (some? linkki))
params {:liitokset {[kohde-id urakka-id] true}}
vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:liita-kohteet-urakkaan
+kayttaja-jvh+
params)]
(let [[ur koh poistettu?] (first (q (str "SELECT \"urakka-id\", \"kohde-id\", poistettu FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))]
(is (= ur urakka-id))
(is (= koh kohde-id))
(is (= poistettu? false)))))) | null | https://raw.githubusercontent.com/finnishtransportagency/harja/c57d742beaff2bef7b30318819f07d4a13423404/test/clj/harja/palvelin/palvelut/kanavat/kohteet_test.clj | clojure | (ns harja.palvelin.palvelut.kanavat.kohteet-test
(:require [clojure.test :refer :all]
[harja.palvelin.komponentit.tietokanta :as tietokanta]
[com.stuartsierra.component :as component]
[harja
[pvm :as pvm]
[testi :refer :all]]
[harja.kyselyt.konversio :as konv]
[harja.palvelin.palvelut.yllapito-toteumat :refer :all]
[harja.tyokalut.functor :refer [fmap]]
[taoensso.timbre :as log]
[clojure.spec.gen.alpha :as gen]
[clojure.spec.alpha :as s]
[harja.palvelin.palvelut.kanavat.kohteet :as kan-kohteet]
[clojure.string :as str]
[harja.domain.kanavat.kohdekokonaisuus :as kok]
[harja.domain.kanavat.kohde :as kohde]
[harja.domain.kanavat.kohteenosa :as osa]
[harja.domain.kanavat.kanavan-huoltokohde :as huoltokohde]
[harja.domain.urakka :as ur]
[harja.domain.muokkaustiedot :as m]))
(defn jarjestelma-fixture [testit]
(alter-var-root #'jarjestelma
(fn [_]
(component/start
(component/system-map
:db (tietokanta/luo-tietokanta testitietokanta)
:http-palvelin (testi-http-palvelin)
:kan-kanavat (component/using
(kan-kohteet/->Kohteet)
[:http-palvelin :db])))))
(testit)
(alter-var-root #'jarjestelma component/stop))
(use-fixtures :once (compose-fixtures
jarjestelma-fixture
urakkatieto-fixture))
(defn- jollain-kanavalla-nimi? [vastaus]
(some
some?
(mapcat
(fn [kanava]
(map
(fn [kohde]
(::kohde/nimi kohde))
(::kok/kohteet kanava)))
vastaus)))
(defn- pakolliset-kentat? [vastaus]
(every?
(fn [kanava]
nämä arvot täytyy löytyä
(and (every? some? ((juxt ::kok/id ::kok/nimi ::kok/kohteet) kanava))
(every?
(fn [kohde]
nämä arvot täytyy löytyä
(and (every? some? ((juxt ::kohde/id) kohde))
Nämä avaimet pitää olla
(every? (partial contains? kohde) [::kohde/urakat])
#_(every? (partial contains? kohde) [::kohde/kohteenosat])
#_(every?
(fn [osa]
nämä arvot täytyy löytyä
(every? some? ((juxt ::osa/id ::osa/tyyppi) urakka)))
(::kohde/kohteenosat kohde))
(every?
(fn [urakka]
nämä arvot täytyy löytyä
(every? some? ((juxt ::ur/id ::ur/nimi) urakka)))
(::kohde/urakat kohde))))
(::kok/kohteet kanava))))
vastaus))
(deftest kanavien-haku
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:hae-kohdekokonaisuudet-ja-kohteet
+kayttaja-jvh+)]
(is (s/valid? ::kok/hae-kohdekokonaisuudet-ja-kohteet-vastaus vastaus))
(is (jollain-kanavalla-nimi? vastaus))
(is (pakolliset-kentat? vastaus))))
(deftest urakan-kohteiden-haku
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:hae-urakan-kohteet
+kayttaja-jvh+
{::ur/id (hae-urakan-id-nimella "Saimaan kanava")})]
(is (true? (every? (comp some? ::kohde/nimi) vastaus)))))
(deftest urakan-kohteiden-haku
(let [vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:hae-kanavien-huoltokohteet
+kayttaja-jvh+)]
(is (true? (every? (comp some? ::huoltokohde/nimi) vastaus)))))
(deftest kohteen-liittaminen-urakkaan
(testing "Uuden linkin lisääminen"
(let [kohde-id (hae-kohde-iisalmen-kanava)
urakka-id (hae-urakan-id-nimella "Saimaan kanava")
linkki (first (q (str "SELECT * FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))
_ (is (and (some? kohde-id) (some? urakka-id)))
_ (is (empty? linkki))
params {:liitokset {[kohde-id urakka-id] true}}
vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:liita-kohteet-urakkaan
+kayttaja-jvh+
params)]
(let [[ur koh poistettu?] (first (q (str "SELECT \"urakka-id\", \"kohde-id\", poistettu FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))]
(is (= ur urakka-id))
(is (= koh kohde-id))
(is (= poistettu? false)))))
(testing "Linkin poistaminen"
(let [kohde-id (hae-kohde-soskua)
urakka-id (hae-urakan-id-nimella "Saimaan kanava")
linkki (first (q (str "SELECT * FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))
_ (is (and (some? kohde-id) (some? urakka-id)))
_ (is (some? linkki))
params {:liitokset {[kohde-id urakka-id] false}}
vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:liita-kohteet-urakkaan
+kayttaja-jvh+
params)]
(let [[ur koh poistettu?] (first (q (str "SELECT \"urakka-id\", \"kohde-id\", poistettu FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))]
(is (= ur urakka-id))
(is (= koh kohde-id))
(is (= poistettu? true)))))
(testing "Linkin palauttaminen"
(let [kohde-id (hae-kohde-soskua)
urakka-id (hae-urakan-id-nimella "Saimaan kanava")
linkki (first (q (str "SELECT * FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))
_ (is (and (some? kohde-id) (some? urakka-id)))
_ (is (some? linkki))
params {:liitokset {[kohde-id urakka-id] true}}
vastaus (kutsu-palvelua (:http-palvelin jarjestelma)
:liita-kohteet-urakkaan
+kayttaja-jvh+
params)]
(let [[ur koh poistettu?] (first (q (str "SELECT \"urakka-id\", \"kohde-id\", poistettu FROM kan_kohde_urakka WHERE \"kohde-id\" = " kohde-id
" AND \"urakka-id\" =" urakka-id ";")))]
(is (= ur urakka-id))
(is (= koh kohde-id))
(is (= poistettu? false)))))) | |
0bc6586538d6a5bd63b704e84ee519c86deea7f49c7d17b6913dfb8bfd5622fb | khibino/haskell-relational-record | Session.hs | {-# LANGUAGE Rank2Types #-}
-- |
-- Module : Database.HDBC.Session
Copyright : 2013 - 2018
-- License : BSD3
--
-- Maintainer :
-- Stability : experimental
-- Portability : unknown
--
-- This module provides a base bracketed function
-- to call close correctly against opend DB connection.
module Database.HDBC.Session (
-- * Bracketed session
-- $bracketedSession
transaction,
withConnectionIO, withConnectionIO_,
bracketConnection,
-- * Show errors
-- $showErrors
showSqlError, handleSqlError',
-- * Deprecated
withConnection,
withConnectionIO',
withConnectionCommit,
) where
import Database.HDBC (IConnection, handleSql,
SqlError(seState, seNativeError, seErrorMsg))
import qualified Database.HDBC as HDBC
import Control.Exception (bracket)
{- $bracketedSession
Bracket function implementation is provided by several packages,
so this package provides base implementation which requires
bracket function and corresponding lift function.
-}
$ showErrors
Functions to show ' SqlError ' type not to show ' String ' fields .
Functions to show 'SqlError' type not to show 'String' fields.
-}
| show ' SqlError ' not to show ' String ' fields .
showSqlError :: SqlError -> String
showSqlError se = unlines
["seState: '" ++ seState se ++ "'",
"seNativeError: " ++ show (seNativeError se),
"seErrorMsg: '" ++ seErrorMsg se ++ "'"]
| Like ' handleSqlError ' , but not to show ' String ' fields of SqlError .
handleSqlError' :: IO a -> IO a
handleSqlError' = handleSql (fail . reformat . showSqlError) where
reformat = ("SQL error: \n" ++) . unlines . map (" " ++) . lines
| Generalized session with bracketed HDBC connection .
-- Run a transaction on a HDBC IConnection and close the connection.
bracketConnection :: (Monad m, IConnection conn)
=> (forall c. m c -> (c -> m ()) -> (c -> m a) -> m a) -- ^ bracket
-> (forall b. IO b -> m b) -- ^ lift
-> IO conn -- ^ Connect action
-> (conn -> m a) -- ^ Transaction body
-> m a
bracketConnection bracket_ lift connect tbody =
bracket_ (lift open) (lift . close) bodyWithRollback
where
open = handleSqlError' connect
close :: IConnection conn => conn -> IO ()
close = handleSqlError' . HDBC.disconnect
bodyWithRollback conn =
bracket_
(return ())
-- Do rollback independent from driver default behavior when disconnect.
(const . lift . handleSqlError' $ HDBC.rollback conn)
(const $ tbody conn)
# DEPRECATED withConnection " use ' bracketConnection ' instead of this . " #
-- | Deprecated. use 'bracketConnection' instead of this.
withConnection :: (Monad m, IConnection conn)
=> (forall c. m c -> (c -> m ()) -> (c -> m a) -> m a)
-> (forall b. IO b -> m b)
-> IO conn
-> (conn -> m a)
-> m a
withConnection = bracketConnection
-- | Same as 'withConnectionIO' other than not wrapping transaction body in 'handleSqlError''.
withConnectionIO_ :: IConnection conn
=> IO conn -- ^ Connect action
-> (conn -> IO a) -- ^ Transaction body
-> IO a -- ^ Result transaction action
withConnectionIO_ = bracketConnection bracket id
| Run a transaction on a HDBC ' IConnection ' and close the connection .
-- Not issuing commit at last, so if you need, issue commit manually in transaction body.
withConnectionIO :: IConnection conn
=> IO conn -- ^ Connect action
-> (conn -> IO a) -- ^ Transaction body
-> IO a -- ^ Result transaction action
withConnectionIO connect body = withConnectionIO_ connect $ handleSqlError' . body
{-# DEPRECATED withConnectionIO' "use 'withConnectionIO' instead of this." #-}
-- | Deprecated. use 'withConnectionIO' instead of this.
withConnectionIO' :: IConnection conn
=> IO conn -- ^ Connect action
-> (conn -> IO a) -- ^ Transaction body
-> IO a -- ^ Result transaction action
withConnectionIO' = withConnectionIO
| Run a transaction on a HDBC ' IConnection ' and commit at last , and then close the connection .
-- In other words, the transaction with no exception is committed.
-- Handy defintion for simple transactions.
transaction :: IConnection conn
=> IO conn -- ^ Connect action
-> (conn -> IO a) -- ^ Transaction body
-> IO a -- ^ Result transaction action
transaction conn body =
withConnectionIO conn $ \c -> do
x <- body c
HDBC.commit c
return x
{-# DEPRECATED withConnectionCommit "use 'transaction' instead of this." #-}
-- | Deprecated. use 'transaction' instead of this.
withConnectionCommit :: IConnection conn
=> IO conn -- ^ Connect action
-> (conn -> IO a) -- ^ Transaction body
-> IO a -- ^ Result transaction action
withConnectionCommit conn body =
withConnectionIO_ conn $ \c -> do
x <- body c
HDBC.commit c
return x
| null | https://raw.githubusercontent.com/khibino/haskell-relational-record/759b3d7cea207e64d2bd1cf195125182f73d2a52/HDBC-session/src/Database/HDBC/Session.hs | haskell | # LANGUAGE Rank2Types #
|
Module : Database.HDBC.Session
License : BSD3
Maintainer :
Stability : experimental
Portability : unknown
This module provides a base bracketed function
to call close correctly against opend DB connection.
* Bracketed session
$bracketedSession
* Show errors
$showErrors
* Deprecated
$bracketedSession
Bracket function implementation is provided by several packages,
so this package provides base implementation which requires
bracket function and corresponding lift function.
Run a transaction on a HDBC IConnection and close the connection.
^ bracket
^ lift
^ Connect action
^ Transaction body
Do rollback independent from driver default behavior when disconnect.
| Deprecated. use 'bracketConnection' instead of this.
| Same as 'withConnectionIO' other than not wrapping transaction body in 'handleSqlError''.
^ Connect action
^ Transaction body
^ Result transaction action
Not issuing commit at last, so if you need, issue commit manually in transaction body.
^ Connect action
^ Transaction body
^ Result transaction action
# DEPRECATED withConnectionIO' "use 'withConnectionIO' instead of this." #
| Deprecated. use 'withConnectionIO' instead of this.
^ Connect action
^ Transaction body
^ Result transaction action
In other words, the transaction with no exception is committed.
Handy defintion for simple transactions.
^ Connect action
^ Transaction body
^ Result transaction action
# DEPRECATED withConnectionCommit "use 'transaction' instead of this." #
| Deprecated. use 'transaction' instead of this.
^ Connect action
^ Transaction body
^ Result transaction action |
Copyright : 2013 - 2018
module Database.HDBC.Session (
transaction,
withConnectionIO, withConnectionIO_,
bracketConnection,
showSqlError, handleSqlError',
withConnection,
withConnectionIO',
withConnectionCommit,
) where
import Database.HDBC (IConnection, handleSql,
SqlError(seState, seNativeError, seErrorMsg))
import qualified Database.HDBC as HDBC
import Control.Exception (bracket)
$ showErrors
Functions to show ' SqlError ' type not to show ' String ' fields .
Functions to show 'SqlError' type not to show 'String' fields.
-}
| show ' SqlError ' not to show ' String ' fields .
showSqlError :: SqlError -> String
showSqlError se = unlines
["seState: '" ++ seState se ++ "'",
"seNativeError: " ++ show (seNativeError se),
"seErrorMsg: '" ++ seErrorMsg se ++ "'"]
| Like ' handleSqlError ' , but not to show ' String ' fields of SqlError .
handleSqlError' :: IO a -> IO a
handleSqlError' = handleSql (fail . reformat . showSqlError) where
reformat = ("SQL error: \n" ++) . unlines . map (" " ++) . lines
| Generalized session with bracketed HDBC connection .
bracketConnection :: (Monad m, IConnection conn)
-> m a
bracketConnection bracket_ lift connect tbody =
bracket_ (lift open) (lift . close) bodyWithRollback
where
open = handleSqlError' connect
close :: IConnection conn => conn -> IO ()
close = handleSqlError' . HDBC.disconnect
bodyWithRollback conn =
bracket_
(return ())
(const . lift . handleSqlError' $ HDBC.rollback conn)
(const $ tbody conn)
# DEPRECATED withConnection " use ' bracketConnection ' instead of this . " #
withConnection :: (Monad m, IConnection conn)
=> (forall c. m c -> (c -> m ()) -> (c -> m a) -> m a)
-> (forall b. IO b -> m b)
-> IO conn
-> (conn -> m a)
-> m a
withConnection = bracketConnection
withConnectionIO_ :: IConnection conn
withConnectionIO_ = bracketConnection bracket id
| Run a transaction on a HDBC ' IConnection ' and close the connection .
withConnectionIO :: IConnection conn
withConnectionIO connect body = withConnectionIO_ connect $ handleSqlError' . body
withConnectionIO' :: IConnection conn
withConnectionIO' = withConnectionIO
| Run a transaction on a HDBC ' IConnection ' and commit at last , and then close the connection .
transaction :: IConnection conn
transaction conn body =
withConnectionIO conn $ \c -> do
x <- body c
HDBC.commit c
return x
withConnectionCommit :: IConnection conn
withConnectionCommit conn body =
withConnectionIO_ conn $ \c -> do
x <- body c
HDBC.commit c
return x
|
852dc8fc87f9d6f4ad1dbd6a63f87f5a93229a2c6103066e4d46676ed426bcea | mirage/ocaml-matrix | identifier.ml | open Json_encoding
module User = struct
type t = {user: string} [@@deriving accessor]
let encoding =
let to_tuple t = t.user in
let of_tuple v =
let user = v in
{user} in
let with_tuple = obj1 (req "user" string) in
conv to_tuple of_tuple with_tuple
let pp ppf t = Fmt.(pf ppf "{ user: %s }" t.user)
end
module Thirdparty = struct
type t = {medium: string; address: string} [@@deriving accessor]
let encoding =
let to_tuple t = t.medium, t.address in
let of_tuple v =
let medium, address = v in
{medium; address} in
let with_tuple = obj2 (req "medium" string) (req "address" string) in
conv to_tuple of_tuple with_tuple
let pp ppf t = Fmt.(pf ppf "{ medium: %s ; address: %s }" t.medium t.address)
end
module Phone = struct
type t = {country: string; phone: string} [@@deriving accessor]
let encoding =
let to_tuple t = t.country, t.phone in
let of_tuple v =
let country, phone = v in
{country; phone} in
let with_tuple = obj2 (req "country" string) (req "phone" string) in
conv to_tuple of_tuple with_tuple
let pp ppf t = Fmt.(pf ppf "{ country: %s ; phone: %s }" t.country t.phone)
end
type t = User of User.t | Thirdparty of Thirdparty.t | Phone of Phone.t
let encoding =
let to_tuple t =
let get_type = function
| User _ -> "m.id.user"
| Thirdparty _ -> "m.id.thirdparty"
| Phone _ -> "m.id.phone" in
get_type t, t in
let of_tuple v =
let _, t = v in
t in
let with_tuple =
cond
(obj1 (req "type" string))
[
( "m.id.user",
case User.encoding
(function User t -> Some t | _ -> None)
(fun t -> User t) );
( "m.id.thirdparty",
case Thirdparty.encoding
(function Thirdparty t -> Some t | _ -> None)
(fun t -> Thirdparty t) );
( "m.id.phone",
case Phone.encoding
(function Phone t -> Some t | _ -> None)
(fun t -> Phone t) );
] in
conv to_tuple of_tuple with_tuple
let pp ppf = function
| User t -> Fmt.(pf ppf "User of %a" User.pp t)
| Thirdparty t -> Fmt.(pf ppf "Thirdparty of %a" Thirdparty.pp t)
| Phone t -> Fmt.(pf ppf "Phone of %a" Phone.pp t)
| null | https://raw.githubusercontent.com/mirage/ocaml-matrix/2a58d3d41c43404741f2dfdaf1d2d0f3757b2b69/lib/matrix-ctos/identifier.ml | ocaml | open Json_encoding
module User = struct
type t = {user: string} [@@deriving accessor]
let encoding =
let to_tuple t = t.user in
let of_tuple v =
let user = v in
{user} in
let with_tuple = obj1 (req "user" string) in
conv to_tuple of_tuple with_tuple
let pp ppf t = Fmt.(pf ppf "{ user: %s }" t.user)
end
module Thirdparty = struct
type t = {medium: string; address: string} [@@deriving accessor]
let encoding =
let to_tuple t = t.medium, t.address in
let of_tuple v =
let medium, address = v in
{medium; address} in
let with_tuple = obj2 (req "medium" string) (req "address" string) in
conv to_tuple of_tuple with_tuple
let pp ppf t = Fmt.(pf ppf "{ medium: %s ; address: %s }" t.medium t.address)
end
module Phone = struct
type t = {country: string; phone: string} [@@deriving accessor]
let encoding =
let to_tuple t = t.country, t.phone in
let of_tuple v =
let country, phone = v in
{country; phone} in
let with_tuple = obj2 (req "country" string) (req "phone" string) in
conv to_tuple of_tuple with_tuple
let pp ppf t = Fmt.(pf ppf "{ country: %s ; phone: %s }" t.country t.phone)
end
type t = User of User.t | Thirdparty of Thirdparty.t | Phone of Phone.t
let encoding =
let to_tuple t =
let get_type = function
| User _ -> "m.id.user"
| Thirdparty _ -> "m.id.thirdparty"
| Phone _ -> "m.id.phone" in
get_type t, t in
let of_tuple v =
let _, t = v in
t in
let with_tuple =
cond
(obj1 (req "type" string))
[
( "m.id.user",
case User.encoding
(function User t -> Some t | _ -> None)
(fun t -> User t) );
( "m.id.thirdparty",
case Thirdparty.encoding
(function Thirdparty t -> Some t | _ -> None)
(fun t -> Thirdparty t) );
( "m.id.phone",
case Phone.encoding
(function Phone t -> Some t | _ -> None)
(fun t -> Phone t) );
] in
conv to_tuple of_tuple with_tuple
let pp ppf = function
| User t -> Fmt.(pf ppf "User of %a" User.pp t)
| Thirdparty t -> Fmt.(pf ppf "Thirdparty of %a" Thirdparty.pp t)
| Phone t -> Fmt.(pf ppf "Phone of %a" Phone.pp t)
| |
2c104abeceea5609781afa54724a18fd35e5c462f41eb0136475e4b87945a1c1 | sebastian-philipp/r-tree | RTreeStrict.hs | # LANGUAGE BangPatterns #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE ScopedTypeVariables #
module Main
where
-- import qualified Data.RTree as Lazy -- just for dev.
import Prelude hiding (lookup, map, mapM,
null, succ)
import Control . Arrow ( second )
import Control.Applicative ((<$>))
import Control.DeepSeq (($!!))
import Data.RTree.Strict
import qualified Data.RTree as L
import Data.RTree.MBB
import qualified GHC.AssertNF as NF
-- import System.IO
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck.Arbitrary as QA (Arbitrary, arbitrary, shrink)
import Test.QuickCheck.Monadic as QM (PropertyM, monadicIO, pick, run, assert)
import Test.QuickCheck as Q (Property)
import Test.QuickCheck.Gen (suchThat)
import Test.HUnit hiding (Test, Testable)
newtype Attr = A [Int]
deriving (Show, Semigroup)
instance Monoid Attr where
mempty = mkA []
mappend (A xs) (A ys) = mkA (xs ++ ys)
evaluation of x ` mappend ` y to WHNF leads to NF
because of the $ ! ! in mkA
--
-- example
--
A [ 1,2 ] ` mappend ` A [ 3,4 ]
-- = { subst of mappend }
mkA ( [ 1,2 ] + + [ 3,4 ] )
-- = { subst of mkA }
A $ ! ! ( [ 1,2 ] + + [ 3,4 ] )
-- = { subst of $!! }
-- A [1,2,3,4]
--
-- in a call of Data.RTree.insert k (x `mappend` y) m
the attribute is forced to be in WHNF , and this leads to NF
type Map = RTree Attr
smart constructor for evaluation into NF
-- before calling the constructor A
mkA :: [Int] -> Attr
mkA xs = A $!! xs
mkA' :: Int -> Attr
mkA' x = mkA [0 .. x]
consA :: Int -> Attr -> Attr
consA n a = mkA [n] `mappend` a
default (Int)
main :: IO ()
main = defaultMain
[
testCase "isNF" test_isNF
, testCase "empty" (checkIsNF (empty :: RTree ()))
, testCase "t_1" (checkIsNF t_1)
, testCase "tu_1" (checkIsNF tu_1)
, testCase "tu_2" (checkIsNF tu_2)
, testCase "tu_2" (checkIsNF test_union)
, testCase "test_insertWith1" (checkIsNF test_insertWith1)
, testCase "test_insertWith" (checkIsNF test_insertWith)
, testCase "test_map" (checkIsNF test_map)
, testCase "test_toStrict" (checkIsNF test_toStrict)
, " m1 " ( checkIsNF m1 )
, testCase " m2 " ( checkIsNF m2 )
, testCase " m3 " ( checkIsNF m3 )
, " m5 " ( checkIsNF m3 )
, " m6 " ( checkIsNF m3 )
, " m7 ( map test ) " ( checkIsNF m7 )
, " fromList l4 " ( checkIsNF $ fromList l4 )
, " m8 ( fromList '' ' ll ) " ( checkIsNF m8 )
, " adjust m6 " ( checkIsNF $ adjust ( consA 42 ) " ab " m6 )
, " adjust m1 " ( checkIsNF $ adjust ( consA 42 ) " xx " m1 )
, " delete m6 " ( checkIsNF $ delete " ab " m6 )
, " delete m1 " ( checkIsNF $ delete " xx " m1 )
, testCase " m2 union m3 " ( checkIsNF $ m2 ` union ` m3 )
, testCase " m2 unionWith m2 " ( checkIsNF $ unionWith mappend m2 m2 )
, testProperty "prop_fromList" prop_fromList
, testProperty "prop_union" prop_union
]
test_isNF :: Assertion
test_isNF = fmap not (NF.isNF [(1::Int)..10]) @? "isNF"
checkIsNF :: (Show a) => RTree a -> Assertion
checkIsNF !m = NF.isNF m @? ("isNF " ++ show m)
-- some simple test data
-- ------------------------
t_mbb1, t_mbb2 , t_mbb3, t_mbb4, t_mbb5, t_mbb6 :: MBB
t_mbb1 = (MBB 0.0 0.0 1.0 1.0)
t_mbb2 = (MBB 5.0 0.0 6.0 1.0)
t_mbb3 = (MBB 1.0 2.0 2.0 3.0)
t_mbb4 = (MBB 6.0 2.0 7.0 3.0)
t_mbb5 = (MBB 3.0 3.0 4.0 4.0)
t_mbb6 = (MBB 0.0 0.0 0.0 0.0)
u_1, u_2 :: [(MBB, Attr)]
u_1 = [(t_mbb1, mkA' 1), (t_mbb2, mkA' 2),(t_mbb3, mkA' 3),(t_mbb4, mkA' 4)]
u_2 = [(t_mbb5, mkA' 5), (t_mbb6, mkA' 6)] ++ u_1
t_1, t_2, t_3, t_4, t_5, t_6 :: RTree Attr
[t_5, t_6, t_1, t_2, t_3, t_4] = (uncurry singleton) <$> u_2
tu_1, tu_2 :: RTree Attr
tu_1 = fromList u_1
tu_2 = fromList u_2
test_union :: RTree Attr
test_union = unionWith mappend tu_1 t_6
test_map :: RTree Attr
test_map = fmap id tu_1
test_insertWith1 :: RTree Attr
test_insertWith1 = insertWith mappend t_mbb1 (mkA' 4) t_1
test_insertWith :: RTree Attr
test_insertWith = insertWith mappend t_mbb6 (mkA' 6) tu_2
test_toStrict :: RTree Attr
test_toStrict = toStrict $ L.fromList u_2
# # # # # # # #
instance QA.Arbitrary MBB where
arbitrary = do
cx <- QA.arbitrary
cy <- QA.arbitrary
h <- QA.arbitrary `suchThat` (>=0)
w <- QA.arbitrary `suchThat` (>=0)
return $ MBB (cx - w) (cy - h) (cx + w) (cy + h)
shrink this_mbb@(MBB ulx uly brx bry)
| isPointMBB this_mbb = []
| otherwise = [MBB (mid ulx brx) (mid uly bry) (mid ulx brx) (mid uly bry)]
where
mid x y = (y - x) / 2
prop_fromList :: Q.Property
prop_fromList = QM.monadicIO $ do
l <- (QM.pick QA.arbitrary) :: QM.PropertyM IO [(MBB, Int)]
passed <- QM.run $ do
-- hPutStrLn stderr $ "\n" ++ show l
-- hPutStrLn stderr $ "\n" ++ show (fromList''' l)
NF.isNF $! fromList l
QM.assert passed
prop_union :: Q.Property
prop_union = QM.monadicIO $ do
l1 <- (QM.pick QA.arbitrary) :: QM.PropertyM IO [(MBB, Int)]
l2 <- (QM.pick QA.arbitrary) :: QM.PropertyM IO [(MBB, Int)]
passed <- QM.run $ do
let sm = fromList l1 `union` fromList l2
NF.isNF $! sm
QM.assert passed
| null | https://raw.githubusercontent.com/sebastian-philipp/r-tree/547c6fbda8827d8f17b3daaae3b7c9437664052a/test/RTreeStrict.hs | haskell | import qualified Data.RTree as Lazy -- just for dev.
import System.IO
example
= { subst of mappend }
= { subst of mkA }
= { subst of $!! }
A [1,2,3,4]
in a call of Data.RTree.insert k (x `mappend` y) m
before calling the constructor A
some simple test data
------------------------
hPutStrLn stderr $ "\n" ++ show l
hPutStrLn stderr $ "\n" ++ show (fromList''' l) | # LANGUAGE BangPatterns #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE ScopedTypeVariables #
module Main
where
import Prelude hiding (lookup, map, mapM,
null, succ)
import Control . Arrow ( second )
import Control.Applicative ((<$>))
import Control.DeepSeq (($!!))
import Data.RTree.Strict
import qualified Data.RTree as L
import Data.RTree.MBB
import qualified GHC.AssertNF as NF
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck.Arbitrary as QA (Arbitrary, arbitrary, shrink)
import Test.QuickCheck.Monadic as QM (PropertyM, monadicIO, pick, run, assert)
import Test.QuickCheck as Q (Property)
import Test.QuickCheck.Gen (suchThat)
import Test.HUnit hiding (Test, Testable)
newtype Attr = A [Int]
deriving (Show, Semigroup)
instance Monoid Attr where
mempty = mkA []
mappend (A xs) (A ys) = mkA (xs ++ ys)
evaluation of x ` mappend ` y to WHNF leads to NF
because of the $ ! ! in mkA
A [ 1,2 ] ` mappend ` A [ 3,4 ]
mkA ( [ 1,2 ] + + [ 3,4 ] )
A $ ! ! ( [ 1,2 ] + + [ 3,4 ] )
the attribute is forced to be in WHNF , and this leads to NF
type Map = RTree Attr
smart constructor for evaluation into NF
mkA :: [Int] -> Attr
mkA xs = A $!! xs
mkA' :: Int -> Attr
mkA' x = mkA [0 .. x]
consA :: Int -> Attr -> Attr
consA n a = mkA [n] `mappend` a
default (Int)
main :: IO ()
main = defaultMain
[
testCase "isNF" test_isNF
, testCase "empty" (checkIsNF (empty :: RTree ()))
, testCase "t_1" (checkIsNF t_1)
, testCase "tu_1" (checkIsNF tu_1)
, testCase "tu_2" (checkIsNF tu_2)
, testCase "tu_2" (checkIsNF test_union)
, testCase "test_insertWith1" (checkIsNF test_insertWith1)
, testCase "test_insertWith" (checkIsNF test_insertWith)
, testCase "test_map" (checkIsNF test_map)
, testCase "test_toStrict" (checkIsNF test_toStrict)
, " m1 " ( checkIsNF m1 )
, testCase " m2 " ( checkIsNF m2 )
, testCase " m3 " ( checkIsNF m3 )
, " m5 " ( checkIsNF m3 )
, " m6 " ( checkIsNF m3 )
, " m7 ( map test ) " ( checkIsNF m7 )
, " fromList l4 " ( checkIsNF $ fromList l4 )
, " m8 ( fromList '' ' ll ) " ( checkIsNF m8 )
, " adjust m6 " ( checkIsNF $ adjust ( consA 42 ) " ab " m6 )
, " adjust m1 " ( checkIsNF $ adjust ( consA 42 ) " xx " m1 )
, " delete m6 " ( checkIsNF $ delete " ab " m6 )
, " delete m1 " ( checkIsNF $ delete " xx " m1 )
, testCase " m2 union m3 " ( checkIsNF $ m2 ` union ` m3 )
, testCase " m2 unionWith m2 " ( checkIsNF $ unionWith mappend m2 m2 )
, testProperty "prop_fromList" prop_fromList
, testProperty "prop_union" prop_union
]
test_isNF :: Assertion
test_isNF = fmap not (NF.isNF [(1::Int)..10]) @? "isNF"
checkIsNF :: (Show a) => RTree a -> Assertion
checkIsNF !m = NF.isNF m @? ("isNF " ++ show m)
t_mbb1, t_mbb2 , t_mbb3, t_mbb4, t_mbb5, t_mbb6 :: MBB
t_mbb1 = (MBB 0.0 0.0 1.0 1.0)
t_mbb2 = (MBB 5.0 0.0 6.0 1.0)
t_mbb3 = (MBB 1.0 2.0 2.0 3.0)
t_mbb4 = (MBB 6.0 2.0 7.0 3.0)
t_mbb5 = (MBB 3.0 3.0 4.0 4.0)
t_mbb6 = (MBB 0.0 0.0 0.0 0.0)
u_1, u_2 :: [(MBB, Attr)]
u_1 = [(t_mbb1, mkA' 1), (t_mbb2, mkA' 2),(t_mbb3, mkA' 3),(t_mbb4, mkA' 4)]
u_2 = [(t_mbb5, mkA' 5), (t_mbb6, mkA' 6)] ++ u_1
t_1, t_2, t_3, t_4, t_5, t_6 :: RTree Attr
[t_5, t_6, t_1, t_2, t_3, t_4] = (uncurry singleton) <$> u_2
tu_1, tu_2 :: RTree Attr
tu_1 = fromList u_1
tu_2 = fromList u_2
test_union :: RTree Attr
test_union = unionWith mappend tu_1 t_6
test_map :: RTree Attr
test_map = fmap id tu_1
test_insertWith1 :: RTree Attr
test_insertWith1 = insertWith mappend t_mbb1 (mkA' 4) t_1
test_insertWith :: RTree Attr
test_insertWith = insertWith mappend t_mbb6 (mkA' 6) tu_2
test_toStrict :: RTree Attr
test_toStrict = toStrict $ L.fromList u_2
# # # # # # # #
instance QA.Arbitrary MBB where
arbitrary = do
cx <- QA.arbitrary
cy <- QA.arbitrary
h <- QA.arbitrary `suchThat` (>=0)
w <- QA.arbitrary `suchThat` (>=0)
return $ MBB (cx - w) (cy - h) (cx + w) (cy + h)
shrink this_mbb@(MBB ulx uly brx bry)
| isPointMBB this_mbb = []
| otherwise = [MBB (mid ulx brx) (mid uly bry) (mid ulx brx) (mid uly bry)]
where
mid x y = (y - x) / 2
prop_fromList :: Q.Property
prop_fromList = QM.monadicIO $ do
l <- (QM.pick QA.arbitrary) :: QM.PropertyM IO [(MBB, Int)]
passed <- QM.run $ do
NF.isNF $! fromList l
QM.assert passed
prop_union :: Q.Property
prop_union = QM.monadicIO $ do
l1 <- (QM.pick QA.arbitrary) :: QM.PropertyM IO [(MBB, Int)]
l2 <- (QM.pick QA.arbitrary) :: QM.PropertyM IO [(MBB, Int)]
passed <- QM.run $ do
let sm = fromList l1 `union` fromList l2
NF.isNF $! sm
QM.assert passed
|
a63d574e013276483fcc9b88efa49e6c0e420059c1625c5b63d319cf506f3bf4 | bytekid/mkbtt | dioMio.mli | val solve : (Rewriting.Variable.t list) * (Rewriting.Variable.t list) -> (Rewriting.Variable.t * int) list list
val minimize : (Rewriting.Variable.t * int) list list -> (Rewriting.Variable.t * int) list list
| null | https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/mascott/src/dioMio.mli | ocaml | val solve : (Rewriting.Variable.t list) * (Rewriting.Variable.t list) -> (Rewriting.Variable.t * int) list list
val minimize : (Rewriting.Variable.t * int) list list -> (Rewriting.Variable.t * int) list list
| |
adbc9e26f63b178d4c5830895fab81064db765340d3e88ceb9f1cee0da60e59e | binsec/haunted | relse_insecurity.mli | type level = Path | Block | Instr
(** Module that handles all the insecurity checks.
Contains all the checks related to a path. *)
module Insecurity_State :
sig
type t
(** Create a new insecurity state with no insecurity checks *)
val create : unit -> t
* [ add_memory_check ps instr is ] Collects memory insecurity check
of hte instruction [ instr ] under the symbolic state [ ps ] and add
them to the insecurity state [ is ]
of hte instruction [instr] under the symbolic state [ps] and add
them to the insecurity state [is] *)
val add_memory_check : Relse_path.Path_state.t -> Dba.Instr.t -> t -> t
(** [add_memory_check r_expr is] Add a memory insecurity check to
the insecurity state [is] to check [r_expr] cannot leak secret
information *)
val add_cf_check : Rel_expr.rel_bv -> t -> t
* [ check_insecurity level is ] Perform a check ( according to the
[ level ] and [ fp ] parameters ) to ensure that no insecurity query is SAT
[level] and [fp] parameters) to ensure that no insecurity query is SAT *)
val check_insecurity : level -> Relse_path.Path_state.t -> t -> (Relse_path.Path_state.t * t)
end
| null | https://raw.githubusercontent.com/binsec/haunted/7ffc5f4072950fe138f53fe953ace98fff181c73/src/relse/relse_insecurity.mli | ocaml | * Module that handles all the insecurity checks.
Contains all the checks related to a path.
* Create a new insecurity state with no insecurity checks
* [add_memory_check r_expr is] Add a memory insecurity check to
the insecurity state [is] to check [r_expr] cannot leak secret
information | type level = Path | Block | Instr
module Insecurity_State :
sig
type t
val create : unit -> t
* [ add_memory_check ps instr is ] Collects memory insecurity check
of hte instruction [ instr ] under the symbolic state [ ps ] and add
them to the insecurity state [ is ]
of hte instruction [instr] under the symbolic state [ps] and add
them to the insecurity state [is] *)
val add_memory_check : Relse_path.Path_state.t -> Dba.Instr.t -> t -> t
val add_cf_check : Rel_expr.rel_bv -> t -> t
* [ check_insecurity level is ] Perform a check ( according to the
[ level ] and [ fp ] parameters ) to ensure that no insecurity query is SAT
[level] and [fp] parameters) to ensure that no insecurity query is SAT *)
val check_insecurity : level -> Relse_path.Path_state.t -> t -> (Relse_path.Path_state.t * t)
end
|
1c612bb9336e39745417106e643f737914db36998cefe2d393ac8692c510ab6a | avsm/mirage-duniverse | udp_packet.ml | type t = {
src_port : Cstruct.uint16;
dst_port : Cstruct.uint16;
}
let equal {src_port; dst_port} q =
src_port = q.src_port &&
dst_port = q.dst_port
let pp fmt t =
Format.fprintf fmt "UDP port %d -> %d" t.src_port t.dst_port
module Unmarshal = struct
type error = string
let of_cstruct buf =
let open Rresult in
let open Udp_wire in
let check_header_length () =
if Cstruct.len buf < sizeof_udp then Error "UDP header too short" else Ok ()
in
let check_payload_length length_from_header length_of_buffer =
if length_from_header < sizeof_udp then
Error "UDP header claimed a total length < the size of just the header"
else begin
let payload_len = length_from_header - sizeof_udp in
if payload_len > (length_of_buffer - sizeof_udp)
then Error (Printf.sprintf
"UDP header claimed a payload longer than the supplied buffer: %d vs %d."
payload_len length_of_buffer)
else Ok payload_len
end
in
check_header_length () >>= fun () ->
let total_length_from_header = get_udp_length buf in
check_payload_length total_length_from_header (Cstruct.len buf) >>= fun payload_len ->
let src_port = Udp_wire.get_udp_source_port buf in
let dst_port = Udp_wire.get_udp_dest_port buf in
let payload = Cstruct.sub buf Udp_wire.sizeof_udp payload_len in
Ok ({ src_port; dst_port; }, payload)
end
module Marshal = struct
open Rresult
type error = string
let unsafe_fill ~pseudoheader ~payload {src_port; dst_port} udp_buf len =
let open Udp_wire in
let udp_buf = Cstruct.sub udp_buf 0 sizeof_udp in
set_udp_source_port udp_buf src_port;
set_udp_dest_port udp_buf dst_port;
set_udp_length udp_buf len;
set_udp_checksum udp_buf 0;
(* if we've been passed a buffer larger than sizeof_udp, make sure we
* consider only the portion which will actually contain the header
* when calculating this bit of the checksum *)
let csum = Tcpip_checksum.ones_complement_list [ pseudoheader ; udp_buf ; payload ] in
Convert zero checksum to the equivalent 0xffff , to prevent it
* seeming like no checksum at all . From : " If the computed
* checksum is zero , it is transmitted as all ones ( the equivalent
* in one 's complement arithmetic ) . "
* seeming like no checksum at all. From RFC768: "If the computed
* checksum is zero, it is transmitted as all ones (the equivalent
* in one's complement arithmetic)." *)
let csum = if csum = 0 then 0xffff else csum in
set_udp_checksum udp_buf csum
let into_cstruct ~pseudoheader ~payload t udp_buf =
let open Udp_wire in
let check_header_len () =
if (Cstruct.len udp_buf) < sizeof_udp then Error "Not enough space for a UDP header"
else Ok ()
in
let check_overall_len () =
let needed = sizeof_udp in
let provided = Cstruct.len udp_buf in
if provided < needed then
Error (Printf.sprintf "Not enough space for UDP header: provided %d, need %d" provided needed)
else Ok ((Cstruct.len payload) + sizeof_udp)
in
check_header_len () >>= check_overall_len >>= fun len ->
let buf = Cstruct.sub udp_buf 0 Udp_wire.sizeof_udp in
unsafe_fill ~pseudoheader ~payload t buf len;
Ok ()
let make_cstruct ~pseudoheader ~payload t =
let buf = Cstruct.create Udp_wire.sizeof_udp in
let len = Udp_wire.sizeof_udp + Cstruct.len payload in
unsafe_fill ~pseudoheader ~payload t buf len;
buf
end
| null | https://raw.githubusercontent.com/avsm/mirage-duniverse/983e115ff5a9fb37e3176c373e227e9379f0d777/ocaml_modules/tcpip/src/udp/udp_packet.ml | ocaml | if we've been passed a buffer larger than sizeof_udp, make sure we
* consider only the portion which will actually contain the header
* when calculating this bit of the checksum | type t = {
src_port : Cstruct.uint16;
dst_port : Cstruct.uint16;
}
let equal {src_port; dst_port} q =
src_port = q.src_port &&
dst_port = q.dst_port
let pp fmt t =
Format.fprintf fmt "UDP port %d -> %d" t.src_port t.dst_port
module Unmarshal = struct
type error = string
let of_cstruct buf =
let open Rresult in
let open Udp_wire in
let check_header_length () =
if Cstruct.len buf < sizeof_udp then Error "UDP header too short" else Ok ()
in
let check_payload_length length_from_header length_of_buffer =
if length_from_header < sizeof_udp then
Error "UDP header claimed a total length < the size of just the header"
else begin
let payload_len = length_from_header - sizeof_udp in
if payload_len > (length_of_buffer - sizeof_udp)
then Error (Printf.sprintf
"UDP header claimed a payload longer than the supplied buffer: %d vs %d."
payload_len length_of_buffer)
else Ok payload_len
end
in
check_header_length () >>= fun () ->
let total_length_from_header = get_udp_length buf in
check_payload_length total_length_from_header (Cstruct.len buf) >>= fun payload_len ->
let src_port = Udp_wire.get_udp_source_port buf in
let dst_port = Udp_wire.get_udp_dest_port buf in
let payload = Cstruct.sub buf Udp_wire.sizeof_udp payload_len in
Ok ({ src_port; dst_port; }, payload)
end
module Marshal = struct
open Rresult
type error = string
let unsafe_fill ~pseudoheader ~payload {src_port; dst_port} udp_buf len =
let open Udp_wire in
let udp_buf = Cstruct.sub udp_buf 0 sizeof_udp in
set_udp_source_port udp_buf src_port;
set_udp_dest_port udp_buf dst_port;
set_udp_length udp_buf len;
set_udp_checksum udp_buf 0;
let csum = Tcpip_checksum.ones_complement_list [ pseudoheader ; udp_buf ; payload ] in
Convert zero checksum to the equivalent 0xffff , to prevent it
* seeming like no checksum at all . From : " If the computed
* checksum is zero , it is transmitted as all ones ( the equivalent
* in one 's complement arithmetic ) . "
* seeming like no checksum at all. From RFC768: "If the computed
* checksum is zero, it is transmitted as all ones (the equivalent
* in one's complement arithmetic)." *)
let csum = if csum = 0 then 0xffff else csum in
set_udp_checksum udp_buf csum
let into_cstruct ~pseudoheader ~payload t udp_buf =
let open Udp_wire in
let check_header_len () =
if (Cstruct.len udp_buf) < sizeof_udp then Error "Not enough space for a UDP header"
else Ok ()
in
let check_overall_len () =
let needed = sizeof_udp in
let provided = Cstruct.len udp_buf in
if provided < needed then
Error (Printf.sprintf "Not enough space for UDP header: provided %d, need %d" provided needed)
else Ok ((Cstruct.len payload) + sizeof_udp)
in
check_header_len () >>= check_overall_len >>= fun len ->
let buf = Cstruct.sub udp_buf 0 Udp_wire.sizeof_udp in
unsafe_fill ~pseudoheader ~payload t buf len;
Ok ()
let make_cstruct ~pseudoheader ~payload t =
let buf = Cstruct.create Udp_wire.sizeof_udp in
let len = Udp_wire.sizeof_udp + Cstruct.len payload in
unsafe_fill ~pseudoheader ~payload t buf len;
buf
end
|
2ad66fa131df83fc6e5451141d099ea51fb5dd38d656fe748b6259cdc0aee9c4 | tezos/tezos-mirror | main_wasm_debugger.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2022 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Wasm_utils
[ parse_binary_module module_name module_stream ] parses a binary encoded
module . Parsing outside of the PVM allows locations in case of errors .
module. Parsing outside of the PVM allows locations in case of errors. *)
let parse_binary_module name module_ =
let bytes = Tezos_lazy_containers.Chunked_byte_vector.of_string module_ in
Tezos_webassembly_interpreter.Decode.decode ~allow_floats:false ~name ~bytes
[ typecheck_module module_ast ] runs the typechecker on the module , which is
not done by the PVM .
not done by the PVM. *)
let typecheck_module module_ =
Repl_helpers.trap_exn (fun () ->
Tezos_webassembly_interpreter.Valid.check_module module_)
[ import_pvm_host_functions ( ) ] registers the host functions of the PVM .
let import_pvm_host_functions () =
let lookup name = Lwt.return (Tezos_scoru_wasm.Host_funcs.lookup name) in
Repl_helpers.trap_exn (fun () ->
Lwt.return
(Tezos_webassembly_interpreter.Import.register
~module_name:"smart_rollup_core"
lookup))
[ link module_ast ] checks a module actually uses the host functions with their
correct type , outside of the PVM .
correct type, outside of the PVM. *)
let link module_ =
Repl_helpers.trap_exn (fun () ->
Tezos_webassembly_interpreter.Import.link module_)
Starting point of the module after reading the kernel file : parsing ,
typechecking and linking for safety before feeding kernel to the PVM , then
installation into a tree for the PVM interpreter .
typechecking and linking for safety before feeding kernel to the PVM, then
installation into a tree for the PVM interpreter. *)
let handle_module binary name module_ =
let open Lwt_result_syntax in
let open Tezos_protocol_alpha.Protocol.Alpha_context.Sc_rollup in
let* ast =
Repl_helpers.trap_exn (fun () ->
if binary then parse_binary_module name module_
else Lwt.return (parse_module module_))
in
let* () = typecheck_module ast in
let* () = import_pvm_host_functions () in
let* _ = link ast in
let*! tree =
initial_tree
~version:V1
~ticks_per_snapshot:(Z.to_int64 Wasm_2_0_0PVM.ticks_per_snapshot)
~outbox_validity_period:Wasm_2_0_0PVM.outbox_validity_period
~outbox_message_limit:Wasm_2_0_0PVM.outbox_message_limit
~from_binary:binary
module_
in
let*! tree = eval_until_input_requested tree in
return tree
let start binary file =
let open Lwt_result_syntax in
let module_name = Filename.(file |> basename |> chop_extension) in
let*! buffer = Repl_helpers.read_file file in
handle_module binary module_name buffer
(* REPL main loop: reads an input, does something out of it, then loops. *)
let repl tree inboxes level config =
let open Lwt_result_syntax in
let rec loop tree inboxes level =
let*! () = Lwt_io.printf "> " in
let* input =
Lwt.catch
(fun () ->
let*! i = Lwt_io.read_line Lwt_io.stdin in
return_some i)
(fun _ -> return_none)
in
match input with
| Some command ->
let* tree, inboxes, level =
Commands.handle_command command config tree inboxes level
in
loop tree inboxes level
| None -> return tree
in
loop tree (List.to_seq inboxes) level
let file_parameter =
Tezos_clic.parameter (fun _ filename ->
Repl_helpers.(trap_exn (fun () -> read_file filename)))
let dir_parameter =
Tezos_clic.parameter (fun _ dirpath ->
if Sys.file_exists dirpath && Sys.is_directory dirpath then
Lwt.return_ok dirpath
else Error_monad.failwith "%s is not a valid directory" dirpath)
let wasm_param =
let open Lwt_result_syntax in
Tezos_clic.(
param
~name:"module"
~desc:"wasm or wast file"
(parameter (fun _ filename -> return filename)))
let input_arg =
let open Tezos_clic in
arg ~doc:"input file" ~long:"inputs" ~placeholder:"inputs.json" file_parameter
let rollup_parameter =
let open Lwt_result_syntax in
Tezos_clic.(
parameter (fun _ hash ->
let hash_opt =
Tezos_protocol_alpha.Protocol.Alpha_context.Sc_rollup.Address
.of_b58check_opt
hash
in
match hash_opt with
| Some hash -> return hash
| None ->
failwith
"Parameter '%s' is an invalid smart rollup address encoded in a \
base58 string."
hash))
let rollup_arg =
let open Tezos_clic in
arg
~doc:
(Format.asprintf
"The rollup address representing the current kernel. It is used on \
the reveal metadata channel and as the default destination for \
internal messages. If absent, it defaults to `%a`."
Tezos_protocol_alpha.Protocol.Alpha_context.Sc_rollup.Address.pp
Config.default_destination)
~long:"rollup"
~placeholder:"rollup address"
rollup_parameter
let preimage_directory_arg =
let open Tezos_clic in
arg
~doc:
(Format.sprintf
"Directory where the preimages can be read. If not specified, it \
defaults to `%s`."
Config.default_preimage_directory)
~long:"preimage-dir"
~placeholder:"preimage-dir"
dir_parameter
let main_command =
let open Tezos_clic in
let open Lwt_result_syntax in
command
~desc:"Start the eval loop"
(args3 input_arg rollup_arg preimage_directory_arg)
(wasm_param @@ stop)
(fun (inputs, rollup_arg, preimage_directory) wasm_file () ->
let config =
Config.config ?destination:rollup_arg ?preimage_directory ()
in
let*? binary =
if Filename.check_suffix wasm_file ".wasm" then Ok true
else if Filename.check_suffix wasm_file ".wast" then Ok false
else error_with "Kernels should have .wasm or .wast file extension"
in
let* tree = start binary wasm_file in
let* inboxes =
match inputs with
| Some inputs -> Messages.parse_inboxes inputs config
| None -> return []
in
let+ _tree = repl tree inboxes 0l config in
())
(* List of program commands *)
let commands = [main_command]
let global_options = Tezos_clic.no_options
let dispatch initial_ctx args =
let open Lwt_result_syntax in
let* ctx, remaining_args =
Tezos_clic.parse_global_options global_options initial_ctx args
in
Tezos_clic.dispatch commands ctx remaining_args
let () =
ignore
Tezos_clic.(
setup_formatter
Format.std_formatter
(if Unix.isatty Unix.stdout then Ansi else Plain)
Short) ;
let args = Array.to_list Sys.argv |> List.tl |> Option.value ~default:[] in
let result = Lwt_main.run (dispatch () args) in
match result with
| Ok _ -> ()
| Error [Tezos_clic.Version] ->
let version = Tezos_version.Bin_version.version_string in
Format.printf "%s\n" version ;
exit 0
| Error e ->
Format.eprintf
"%a\n%!"
Tezos_clic.(
fun ppf errs ->
pp_cli_errors
ppf
~executable_name:"octez-wasm-debugger"
~global_options:no_options
~default:pp
errs)
e ;
exit 1
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/195315d385d7e8e25fc599e7cb645b1429957183/src/bin_wasm_debugger/main_wasm_debugger.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
REPL main loop: reads an input, does something out of it, then loops.
List of program commands | Copyright ( c ) 2022 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Wasm_utils
[ parse_binary_module module_name module_stream ] parses a binary encoded
module . Parsing outside of the PVM allows locations in case of errors .
module. Parsing outside of the PVM allows locations in case of errors. *)
let parse_binary_module name module_ =
let bytes = Tezos_lazy_containers.Chunked_byte_vector.of_string module_ in
Tezos_webassembly_interpreter.Decode.decode ~allow_floats:false ~name ~bytes
[ typecheck_module module_ast ] runs the typechecker on the module , which is
not done by the PVM .
not done by the PVM. *)
let typecheck_module module_ =
Repl_helpers.trap_exn (fun () ->
Tezos_webassembly_interpreter.Valid.check_module module_)
[ import_pvm_host_functions ( ) ] registers the host functions of the PVM .
let import_pvm_host_functions () =
let lookup name = Lwt.return (Tezos_scoru_wasm.Host_funcs.lookup name) in
Repl_helpers.trap_exn (fun () ->
Lwt.return
(Tezos_webassembly_interpreter.Import.register
~module_name:"smart_rollup_core"
lookup))
[ link module_ast ] checks a module actually uses the host functions with their
correct type , outside of the PVM .
correct type, outside of the PVM. *)
let link module_ =
Repl_helpers.trap_exn (fun () ->
Tezos_webassembly_interpreter.Import.link module_)
Starting point of the module after reading the kernel file : parsing ,
typechecking and linking for safety before feeding kernel to the PVM , then
installation into a tree for the PVM interpreter .
typechecking and linking for safety before feeding kernel to the PVM, then
installation into a tree for the PVM interpreter. *)
let handle_module binary name module_ =
let open Lwt_result_syntax in
let open Tezos_protocol_alpha.Protocol.Alpha_context.Sc_rollup in
let* ast =
Repl_helpers.trap_exn (fun () ->
if binary then parse_binary_module name module_
else Lwt.return (parse_module module_))
in
let* () = typecheck_module ast in
let* () = import_pvm_host_functions () in
let* _ = link ast in
let*! tree =
initial_tree
~version:V1
~ticks_per_snapshot:(Z.to_int64 Wasm_2_0_0PVM.ticks_per_snapshot)
~outbox_validity_period:Wasm_2_0_0PVM.outbox_validity_period
~outbox_message_limit:Wasm_2_0_0PVM.outbox_message_limit
~from_binary:binary
module_
in
let*! tree = eval_until_input_requested tree in
return tree
let start binary file =
let open Lwt_result_syntax in
let module_name = Filename.(file |> basename |> chop_extension) in
let*! buffer = Repl_helpers.read_file file in
handle_module binary module_name buffer
let repl tree inboxes level config =
let open Lwt_result_syntax in
let rec loop tree inboxes level =
let*! () = Lwt_io.printf "> " in
let* input =
Lwt.catch
(fun () ->
let*! i = Lwt_io.read_line Lwt_io.stdin in
return_some i)
(fun _ -> return_none)
in
match input with
| Some command ->
let* tree, inboxes, level =
Commands.handle_command command config tree inboxes level
in
loop tree inboxes level
| None -> return tree
in
loop tree (List.to_seq inboxes) level
let file_parameter =
Tezos_clic.parameter (fun _ filename ->
Repl_helpers.(trap_exn (fun () -> read_file filename)))
let dir_parameter =
Tezos_clic.parameter (fun _ dirpath ->
if Sys.file_exists dirpath && Sys.is_directory dirpath then
Lwt.return_ok dirpath
else Error_monad.failwith "%s is not a valid directory" dirpath)
let wasm_param =
let open Lwt_result_syntax in
Tezos_clic.(
param
~name:"module"
~desc:"wasm or wast file"
(parameter (fun _ filename -> return filename)))
let input_arg =
let open Tezos_clic in
arg ~doc:"input file" ~long:"inputs" ~placeholder:"inputs.json" file_parameter
let rollup_parameter =
let open Lwt_result_syntax in
Tezos_clic.(
parameter (fun _ hash ->
let hash_opt =
Tezos_protocol_alpha.Protocol.Alpha_context.Sc_rollup.Address
.of_b58check_opt
hash
in
match hash_opt with
| Some hash -> return hash
| None ->
failwith
"Parameter '%s' is an invalid smart rollup address encoded in a \
base58 string."
hash))
let rollup_arg =
let open Tezos_clic in
arg
~doc:
(Format.asprintf
"The rollup address representing the current kernel. It is used on \
the reveal metadata channel and as the default destination for \
internal messages. If absent, it defaults to `%a`."
Tezos_protocol_alpha.Protocol.Alpha_context.Sc_rollup.Address.pp
Config.default_destination)
~long:"rollup"
~placeholder:"rollup address"
rollup_parameter
let preimage_directory_arg =
let open Tezos_clic in
arg
~doc:
(Format.sprintf
"Directory where the preimages can be read. If not specified, it \
defaults to `%s`."
Config.default_preimage_directory)
~long:"preimage-dir"
~placeholder:"preimage-dir"
dir_parameter
let main_command =
let open Tezos_clic in
let open Lwt_result_syntax in
command
~desc:"Start the eval loop"
(args3 input_arg rollup_arg preimage_directory_arg)
(wasm_param @@ stop)
(fun (inputs, rollup_arg, preimage_directory) wasm_file () ->
let config =
Config.config ?destination:rollup_arg ?preimage_directory ()
in
let*? binary =
if Filename.check_suffix wasm_file ".wasm" then Ok true
else if Filename.check_suffix wasm_file ".wast" then Ok false
else error_with "Kernels should have .wasm or .wast file extension"
in
let* tree = start binary wasm_file in
let* inboxes =
match inputs with
| Some inputs -> Messages.parse_inboxes inputs config
| None -> return []
in
let+ _tree = repl tree inboxes 0l config in
())
let commands = [main_command]
let global_options = Tezos_clic.no_options
let dispatch initial_ctx args =
let open Lwt_result_syntax in
let* ctx, remaining_args =
Tezos_clic.parse_global_options global_options initial_ctx args
in
Tezos_clic.dispatch commands ctx remaining_args
let () =
ignore
Tezos_clic.(
setup_formatter
Format.std_formatter
(if Unix.isatty Unix.stdout then Ansi else Plain)
Short) ;
let args = Array.to_list Sys.argv |> List.tl |> Option.value ~default:[] in
let result = Lwt_main.run (dispatch () args) in
match result with
| Ok _ -> ()
| Error [Tezos_clic.Version] ->
let version = Tezos_version.Bin_version.version_string in
Format.printf "%s\n" version ;
exit 0
| Error e ->
Format.eprintf
"%a\n%!"
Tezos_clic.(
fun ppf errs ->
pp_cli_errors
ppf
~executable_name:"octez-wasm-debugger"
~global_options:no_options
~default:pp
errs)
e ;
exit 1
|
a99690ebc5be1e7fc7a2f2c4aa2a61d1ee9d1aff0e8f64ce192e4dd7e301db7b | WormBase/wormbase_rest | homology.clj | (ns rest-api.classes.protein.widgets.homology
(:require
[datomic.api :as d]
[rest-api.db.main :refer [datomic-homology-conn datomic-conn]]
[rest-api.formatters.object :as obj :refer [pack-obj]]
[clojure.math.numeric-tower :as math]
[rest-api.classes.generic-fields :as generic]
[rest-api.classes.protein.core :as protein-core]
[rest-api.classes.protein.widgets.motif-details :as protein-motif-details]))
(defn ** [x n] (reduce * (repeat n x)))
(defn- score-to-evalue [score]
(let [evalue-str (format "%7.0e" (/ 1 (math/expt 10 score)))]
(if (= evalue-str " 0e+00")
" 0"
evalue-str)))
(defn- remove-introns-from-exons [exons]
(let [last-stop (atom 0)]
(flatten
(for [exon (sort-by :no exons)
:let [last-stop-position @last-stop
new-stop-position (+ last-stop-position (:len exon))
new-start-position (+ 1 last-stop-position)]]
(do (reset! last-stop new-stop-position)
{:no (:no exon)
:min (int (Math/floor (/ new-start-position 3)))
:max (int (Math/floor (/ new-stop-position 3)))})))))
This is likely going to be provided through the GFF files for JBrowse
(defn protein-homology [p]
{:data (let [hdb (d/db datomic-homology-conn)
db (d/db datomic-conn)]
{:match
(some->> (d/q '[:find ?p ?l
:in $ $hdb ?pid
:where
[$hdb ?hp :protein/id ?pid]
[$hdb ?l :locatable/parent ?hp]
[$hdb ?l :homology/protein ?pr]
[$hdb ?pr :protein/id ?hpid]
[$ ?p :protein/id ?hpid]
]
db hdb (:protein/id p))
(map (fn [ids]
(let [protein (d/entity db (first ids))
locatable (d/entity hdb (second ids))
score (:locatable/score locatable)]
{:source (pack-obj protein)
:species (pack-obj (:protein/species protein))
:method (->> locatable :locatable/method :method/id)
:score score
:evalue (when (some? score) (score-to-evalue score))
:min (when-let [lmin (:locatable/min locatable)]
(+ 1 lmin))
:max (when-let [lmax (:locatable/max locatable)]
(+ 1 lmax))}))))
:motif
(some->> (protein-core/get-motif-details p)
(map (fn [motif]
(if (nil? (:score motif))
motif
(conj motif
{:evalue (score-to-evalue (:score motif))})))))
:peptide
(when-let [peptide (:protein/peptide p)]
{:sequence (->> peptide
:protein.peptide/peptide
:peptide/sequence)
:min 1
:max (+ 1 (:protein.peptide/length peptide))})
:exons
(->> p
:cds.corresponding-protein/_protein
first
:cds/_corresponding-protein
generic/predicted-exon-structure
:data
remove-introns-from-exons)})
:description "Homologous proteins for the protein"})
(defn best-blastp-matches [p]
(let [hits (protein-core/get-best-blastp-matches p)]
{:data {:biggest (:protein/id p)
:hits hits}
:description (if hits
"best BLASTP hits from selected species"
"no homologous proteins found, no best blastp hits to display")}))
(defn homology-image [p]
{:data 1
:description "a dynamically generated image representing homologous regions of the protein"})
(defn homology-groups [p]
{:data (protein-core/get-homology-groups p)
:description "KOG homology groups of the protein"})
(def widget
{:name generic/name-field
:best_blastp_matches best-blastp-matches
; :protein_homology protein-homology
:homology_groups homology-groups
:homology_image homology-image
:schematic_parameters protein-motif-details/schematic-parameters})
| null | https://raw.githubusercontent.com/WormBase/wormbase_rest/e51026f35b87d96260b62ddb5458a81ee911bf3a/src/rest_api/classes/protein/widgets/homology.clj | clojure | :protein_homology protein-homology | (ns rest-api.classes.protein.widgets.homology
(:require
[datomic.api :as d]
[rest-api.db.main :refer [datomic-homology-conn datomic-conn]]
[rest-api.formatters.object :as obj :refer [pack-obj]]
[clojure.math.numeric-tower :as math]
[rest-api.classes.generic-fields :as generic]
[rest-api.classes.protein.core :as protein-core]
[rest-api.classes.protein.widgets.motif-details :as protein-motif-details]))
(defn ** [x n] (reduce * (repeat n x)))
(defn- score-to-evalue [score]
(let [evalue-str (format "%7.0e" (/ 1 (math/expt 10 score)))]
(if (= evalue-str " 0e+00")
" 0"
evalue-str)))
(defn- remove-introns-from-exons [exons]
(let [last-stop (atom 0)]
(flatten
(for [exon (sort-by :no exons)
:let [last-stop-position @last-stop
new-stop-position (+ last-stop-position (:len exon))
new-start-position (+ 1 last-stop-position)]]
(do (reset! last-stop new-stop-position)
{:no (:no exon)
:min (int (Math/floor (/ new-start-position 3)))
:max (int (Math/floor (/ new-stop-position 3)))})))))
This is likely going to be provided through the GFF files for JBrowse
(defn protein-homology [p]
{:data (let [hdb (d/db datomic-homology-conn)
db (d/db datomic-conn)]
{:match
(some->> (d/q '[:find ?p ?l
:in $ $hdb ?pid
:where
[$hdb ?hp :protein/id ?pid]
[$hdb ?l :locatable/parent ?hp]
[$hdb ?l :homology/protein ?pr]
[$hdb ?pr :protein/id ?hpid]
[$ ?p :protein/id ?hpid]
]
db hdb (:protein/id p))
(map (fn [ids]
(let [protein (d/entity db (first ids))
locatable (d/entity hdb (second ids))
score (:locatable/score locatable)]
{:source (pack-obj protein)
:species (pack-obj (:protein/species protein))
:method (->> locatable :locatable/method :method/id)
:score score
:evalue (when (some? score) (score-to-evalue score))
:min (when-let [lmin (:locatable/min locatable)]
(+ 1 lmin))
:max (when-let [lmax (:locatable/max locatable)]
(+ 1 lmax))}))))
:motif
(some->> (protein-core/get-motif-details p)
(map (fn [motif]
(if (nil? (:score motif))
motif
(conj motif
{:evalue (score-to-evalue (:score motif))})))))
:peptide
(when-let [peptide (:protein/peptide p)]
{:sequence (->> peptide
:protein.peptide/peptide
:peptide/sequence)
:min 1
:max (+ 1 (:protein.peptide/length peptide))})
:exons
(->> p
:cds.corresponding-protein/_protein
first
:cds/_corresponding-protein
generic/predicted-exon-structure
:data
remove-introns-from-exons)})
:description "Homologous proteins for the protein"})
(defn best-blastp-matches [p]
(let [hits (protein-core/get-best-blastp-matches p)]
{:data {:biggest (:protein/id p)
:hits hits}
:description (if hits
"best BLASTP hits from selected species"
"no homologous proteins found, no best blastp hits to display")}))
(defn homology-image [p]
{:data 1
:description "a dynamically generated image representing homologous regions of the protein"})
(defn homology-groups [p]
{:data (protein-core/get-homology-groups p)
:description "KOG homology groups of the protein"})
(def widget
{:name generic/name-field
:best_blastp_matches best-blastp-matches
:homology_groups homology-groups
:homology_image homology-image
:schematic_parameters protein-motif-details/schematic-parameters})
|
4a8c4809cc71144e1ff8edbddc067e0c5fc3a3881482a81f14ecbc774ecbfb0e | bgamari/ghc-debug | List.hs | # LANGUAGE DeriveFunctor #
# LANGUAGE DeriveGeneric #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
Authored by , copied from cursor package #
module Cursor.List
( ListCursor (..),
emptyListCursor,
makeListCursor,
makeListCursorWithSelection,
rebuildListCursor,
listCursorNull,
listCursorLength,
listCursorIndex,
listCursorSelectPrev,
listCursorSelectNext,
listCursorSelectIndex,
listCursorSelectStart,
listCursorSelectEnd,
listCursorPrevItem,
listCursorNextItem,
listCursorPrevUntil,
listCursorNextUntil,
listCursorInsert,
listCursorAppend,
listCursorInsertList,
listCursorAppendList,
listCursorRemove,
listCursorDelete,
listCursorSplit,
listCursorCombine,
traverseListCursor,
foldListCursor,
)
where
import Control.DeepSeq
import Cursor.Types
import GHC.Generics (Generic)
data ListCursor a = ListCursor
{ -- | In reverse order
listCursorPrev :: [a],
listCursorNext :: [a]
}
deriving (Show, Eq, Generic, Functor)
instance NFData a => NFData (ListCursor a)
emptyListCursor :: ListCursor a
emptyListCursor = ListCursor {listCursorPrev = [], listCursorNext = []}
makeListCursor :: [a] -> ListCursor a
makeListCursor as = ListCursor {listCursorPrev = [], listCursorNext = as}
makeListCursorWithSelection :: Int -> [a] -> Maybe (ListCursor a)
makeListCursorWithSelection i as
| i < 0 = Nothing
| i > length as = Nothing
| otherwise = Just ListCursor {listCursorPrev = reverse $ take i as, listCursorNext = drop i as}
rebuildListCursor :: ListCursor a -> [a]
rebuildListCursor ListCursor {..} = reverse listCursorPrev ++ listCursorNext
listCursorNull :: ListCursor a -> Bool
listCursorNull ListCursor {..} = null listCursorPrev && null listCursorNext
listCursorLength :: ListCursor a -> Int
listCursorLength = length . rebuildListCursor
listCursorIndex :: ListCursor a -> Int
listCursorIndex = length . listCursorPrev
listCursorSelectPrev :: ListCursor a -> Maybe (ListCursor a)
listCursorSelectPrev tc =
case listCursorPrev tc of
[] -> Nothing
(c : cs) -> Just ListCursor {listCursorPrev = cs, listCursorNext = c : listCursorNext tc}
listCursorSelectNext :: ListCursor a -> Maybe (ListCursor a)
listCursorSelectNext tc =
case listCursorNext tc of
[] -> Nothing
(c : cs) -> Just ListCursor {listCursorPrev = c : listCursorPrev tc, listCursorNext = cs}
listCursorSelectIndex :: Int -> ListCursor a -> ListCursor a
listCursorSelectIndex ix_ lc =
let ls = rebuildListCursor lc
in case splitAt ix_ ls of
(l, r) -> ListCursor {listCursorPrev = reverse l, listCursorNext = r}
listCursorSelectStart :: ListCursor a -> ListCursor a
listCursorSelectStart tc =
case listCursorSelectPrev tc of
Nothing -> tc
Just tc' -> listCursorSelectStart tc'
listCursorSelectEnd :: ListCursor a -> ListCursor a
listCursorSelectEnd tc =
case listCursorSelectNext tc of
Nothing -> tc
Just tc' -> listCursorSelectEnd tc'
listCursorPrevItem :: ListCursor a -> Maybe a
listCursorPrevItem lc =
case listCursorPrev lc of
[] -> Nothing
(c : _) -> Just c
listCursorNextItem :: ListCursor a -> Maybe a
listCursorNextItem lc =
case listCursorNext lc of
[] -> Nothing
(c : _) -> Just c
listCursorPrevUntil :: (a -> Bool) -> ListCursor a -> ListCursor a
listCursorPrevUntil p = go
where
go lc =
case listCursorPrev lc of
[] -> lc
(c : _)
| p c -> lc
_ -> maybe lc go (listCursorSelectPrev lc)
listCursorNextUntil :: (a -> Bool) -> ListCursor a -> ListCursor a
listCursorNextUntil p = go
where
go lc =
case listCursorNext lc of
[] -> lc
(c : _)
| p c -> lc
_ -> maybe lc go (listCursorSelectNext lc)
listCursorInsert :: a -> ListCursor a -> ListCursor a
listCursorInsert c lc = lc {listCursorPrev = c : listCursorPrev lc}
listCursorAppend :: a -> ListCursor a -> ListCursor a
listCursorAppend c lc = lc {listCursorNext = c : listCursorNext lc}
listCursorInsertList :: [a] -> ListCursor a -> ListCursor a
listCursorInsertList l lc = lc {listCursorPrev = reverse l ++ listCursorPrev lc}
listCursorAppendList :: [a] -> ListCursor a -> ListCursor a
listCursorAppendList l lc = lc {listCursorNext = l ++ listCursorNext lc}
listCursorRemove :: ListCursor a -> Maybe (DeleteOrUpdate (ListCursor a))
listCursorRemove tc =
case listCursorPrev tc of
[] ->
case listCursorNext tc of
[] -> Just Deleted
_ -> Nothing
(_ : prev) -> Just $ Updated $ tc {listCursorPrev = prev}
listCursorDelete :: ListCursor a -> Maybe (DeleteOrUpdate (ListCursor a))
listCursorDelete tc =
case listCursorNext tc of
[] ->
case listCursorPrev tc of
[] -> Just Deleted
_ -> Nothing
(_ : next) -> Just $ Updated $ tc {listCursorNext = next}
listCursorSplit :: ListCursor a -> (ListCursor a, ListCursor a)
listCursorSplit ListCursor {..} =
( ListCursor {listCursorPrev = listCursorPrev, listCursorNext = []},
ListCursor {listCursorPrev = [], listCursorNext = listCursorNext}
)
listCursorCombine :: ListCursor a -> ListCursor a -> ListCursor a
listCursorCombine lc1 lc2 =
ListCursor
{ listCursorPrev = reverse $ rebuildListCursor lc1,
listCursorNext = rebuildListCursor lc2
}
traverseListCursor :: ([a] -> [a] -> f b) -> ListCursor a -> f b
traverseListCursor = foldListCursor
foldListCursor :: ([a] -> [a] -> b) -> ListCursor a -> b
foldListCursor func ListCursor {..} = func (reverse listCursorPrev) listCursorNext
| null | https://raw.githubusercontent.com/bgamari/ghc-debug/c931fe52efbed1f4c3e47e27e81994ddd0b88f10/ghc-debug-brick/src/Cursor/List.hs | haskell | | In reverse order | # LANGUAGE DeriveFunctor #
# LANGUAGE DeriveGeneric #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
Authored by , copied from cursor package #
module Cursor.List
( ListCursor (..),
emptyListCursor,
makeListCursor,
makeListCursorWithSelection,
rebuildListCursor,
listCursorNull,
listCursorLength,
listCursorIndex,
listCursorSelectPrev,
listCursorSelectNext,
listCursorSelectIndex,
listCursorSelectStart,
listCursorSelectEnd,
listCursorPrevItem,
listCursorNextItem,
listCursorPrevUntil,
listCursorNextUntil,
listCursorInsert,
listCursorAppend,
listCursorInsertList,
listCursorAppendList,
listCursorRemove,
listCursorDelete,
listCursorSplit,
listCursorCombine,
traverseListCursor,
foldListCursor,
)
where
import Control.DeepSeq
import Cursor.Types
import GHC.Generics (Generic)
data ListCursor a = ListCursor
listCursorPrev :: [a],
listCursorNext :: [a]
}
deriving (Show, Eq, Generic, Functor)
instance NFData a => NFData (ListCursor a)
emptyListCursor :: ListCursor a
emptyListCursor = ListCursor {listCursorPrev = [], listCursorNext = []}
makeListCursor :: [a] -> ListCursor a
makeListCursor as = ListCursor {listCursorPrev = [], listCursorNext = as}
makeListCursorWithSelection :: Int -> [a] -> Maybe (ListCursor a)
makeListCursorWithSelection i as
| i < 0 = Nothing
| i > length as = Nothing
| otherwise = Just ListCursor {listCursorPrev = reverse $ take i as, listCursorNext = drop i as}
rebuildListCursor :: ListCursor a -> [a]
rebuildListCursor ListCursor {..} = reverse listCursorPrev ++ listCursorNext
listCursorNull :: ListCursor a -> Bool
listCursorNull ListCursor {..} = null listCursorPrev && null listCursorNext
listCursorLength :: ListCursor a -> Int
listCursorLength = length . rebuildListCursor
listCursorIndex :: ListCursor a -> Int
listCursorIndex = length . listCursorPrev
listCursorSelectPrev :: ListCursor a -> Maybe (ListCursor a)
listCursorSelectPrev tc =
case listCursorPrev tc of
[] -> Nothing
(c : cs) -> Just ListCursor {listCursorPrev = cs, listCursorNext = c : listCursorNext tc}
listCursorSelectNext :: ListCursor a -> Maybe (ListCursor a)
listCursorSelectNext tc =
case listCursorNext tc of
[] -> Nothing
(c : cs) -> Just ListCursor {listCursorPrev = c : listCursorPrev tc, listCursorNext = cs}
listCursorSelectIndex :: Int -> ListCursor a -> ListCursor a
listCursorSelectIndex ix_ lc =
let ls = rebuildListCursor lc
in case splitAt ix_ ls of
(l, r) -> ListCursor {listCursorPrev = reverse l, listCursorNext = r}
listCursorSelectStart :: ListCursor a -> ListCursor a
listCursorSelectStart tc =
case listCursorSelectPrev tc of
Nothing -> tc
Just tc' -> listCursorSelectStart tc'
listCursorSelectEnd :: ListCursor a -> ListCursor a
listCursorSelectEnd tc =
case listCursorSelectNext tc of
Nothing -> tc
Just tc' -> listCursorSelectEnd tc'
listCursorPrevItem :: ListCursor a -> Maybe a
listCursorPrevItem lc =
case listCursorPrev lc of
[] -> Nothing
(c : _) -> Just c
listCursorNextItem :: ListCursor a -> Maybe a
listCursorNextItem lc =
case listCursorNext lc of
[] -> Nothing
(c : _) -> Just c
listCursorPrevUntil :: (a -> Bool) -> ListCursor a -> ListCursor a
listCursorPrevUntil p = go
where
go lc =
case listCursorPrev lc of
[] -> lc
(c : _)
| p c -> lc
_ -> maybe lc go (listCursorSelectPrev lc)
listCursorNextUntil :: (a -> Bool) -> ListCursor a -> ListCursor a
listCursorNextUntil p = go
where
go lc =
case listCursorNext lc of
[] -> lc
(c : _)
| p c -> lc
_ -> maybe lc go (listCursorSelectNext lc)
listCursorInsert :: a -> ListCursor a -> ListCursor a
listCursorInsert c lc = lc {listCursorPrev = c : listCursorPrev lc}
listCursorAppend :: a -> ListCursor a -> ListCursor a
listCursorAppend c lc = lc {listCursorNext = c : listCursorNext lc}
listCursorInsertList :: [a] -> ListCursor a -> ListCursor a
listCursorInsertList l lc = lc {listCursorPrev = reverse l ++ listCursorPrev lc}
listCursorAppendList :: [a] -> ListCursor a -> ListCursor a
listCursorAppendList l lc = lc {listCursorNext = l ++ listCursorNext lc}
listCursorRemove :: ListCursor a -> Maybe (DeleteOrUpdate (ListCursor a))
listCursorRemove tc =
case listCursorPrev tc of
[] ->
case listCursorNext tc of
[] -> Just Deleted
_ -> Nothing
(_ : prev) -> Just $ Updated $ tc {listCursorPrev = prev}
listCursorDelete :: ListCursor a -> Maybe (DeleteOrUpdate (ListCursor a))
listCursorDelete tc =
case listCursorNext tc of
[] ->
case listCursorPrev tc of
[] -> Just Deleted
_ -> Nothing
(_ : next) -> Just $ Updated $ tc {listCursorNext = next}
listCursorSplit :: ListCursor a -> (ListCursor a, ListCursor a)
listCursorSplit ListCursor {..} =
( ListCursor {listCursorPrev = listCursorPrev, listCursorNext = []},
ListCursor {listCursorPrev = [], listCursorNext = listCursorNext}
)
listCursorCombine :: ListCursor a -> ListCursor a -> ListCursor a
listCursorCombine lc1 lc2 =
ListCursor
{ listCursorPrev = reverse $ rebuildListCursor lc1,
listCursorNext = rebuildListCursor lc2
}
traverseListCursor :: ([a] -> [a] -> f b) -> ListCursor a -> f b
traverseListCursor = foldListCursor
foldListCursor :: ([a] -> [a] -> b) -> ListCursor a -> b
foldListCursor func ListCursor {..} = func (reverse listCursorPrev) listCursorNext
|
fa4760f77255fa57d182652bbde531b7db7e762d2e84088bc714efe3de206f87 | exoscale/clojure-kubernetes-client | v1_replica_set.clj | (ns clojure-kubernetes-client.specs.v1-replica-set
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-status :refer :all]
)
(:import (java.io File)))
(declare v1-replica-set-data v1-replica-set)
(def v1-replica-set-data
{
(ds/opt :apiVersion) string?
(ds/opt :kind) string?
(ds/opt :metadata) v1-object-meta
(ds/opt :spec) v1-replica-set-spec
(ds/opt :status) v1-replica-set-status
})
(def v1-replica-set
(ds/spec
{:name ::v1-replica-set
:spec v1-replica-set-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1_replica_set.clj | clojure | (ns clojure-kubernetes-client.specs.v1-replica-set
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-spec :refer :all]
[clojure-kubernetes-client.specs.v1-replica-set-status :refer :all]
)
(:import (java.io File)))
(declare v1-replica-set-data v1-replica-set)
(def v1-replica-set-data
{
(ds/opt :apiVersion) string?
(ds/opt :kind) string?
(ds/opt :metadata) v1-object-meta
(ds/opt :spec) v1-replica-set-spec
(ds/opt :status) v1-replica-set-status
})
(def v1-replica-set
(ds/spec
{:name ::v1-replica-set
:spec v1-replica-set-data}))
| |
321c8cafcaebbfae8ed20a99af834e3e70739f59e0428cb2e8031830ecacc53d | mransan/raft-udp | raft_utl_ratelimiter.ml | open Lwt.Infix
let wrap rate stream =
let events = Array.make rate (-. 1.) in
let t0 = Mtime.counter () in
let i = ref 0 in
let rate, min_delta =
let precision = 5 in
if rate <= precision
then rate, 1.
else (rate / precision), (1. /. (float_of_int precision))
in
let set_events ~from ~len =
let time = Mtime.(count t0 |> to_s) in
let next = from + len in
if next > rate
then begin
Array.fill events from (rate - from) time;
let next = next - rate in
Array.fill events 0 next time;
next
end
else begin
Array.fill events from len time;
next
end
in
fun () ->
Lwt_stream.get stream
>>= (function
| None -> Lwt.return []
| Some hd ->
let tl = Lwt_stream.get_available_up_to (rate - 1) stream in
let l = hd::tl in
let len = List.length l in
let time = Mtime.(count t0 |> to_s) in
let delta = time -. events.((!i + len - 1) mod rate) in
if delta < min_delta
then
Lwt_unix.sleep (min_delta -. delta)
>|= (fun () ->
i := set_events ~from:!i ~len;
l
)
else begin
i := set_events ~from:!i ~len;
Lwt.return l
end
)
| null | https://raw.githubusercontent.com/mransan/raft-udp/ffa307fa6d8bdaa3133f3cc66149ac7dfda5fc7c/src/utl/raft_utl_ratelimiter.ml | ocaml | open Lwt.Infix
let wrap rate stream =
let events = Array.make rate (-. 1.) in
let t0 = Mtime.counter () in
let i = ref 0 in
let rate, min_delta =
let precision = 5 in
if rate <= precision
then rate, 1.
else (rate / precision), (1. /. (float_of_int precision))
in
let set_events ~from ~len =
let time = Mtime.(count t0 |> to_s) in
let next = from + len in
if next > rate
then begin
Array.fill events from (rate - from) time;
let next = next - rate in
Array.fill events 0 next time;
next
end
else begin
Array.fill events from len time;
next
end
in
fun () ->
Lwt_stream.get stream
>>= (function
| None -> Lwt.return []
| Some hd ->
let tl = Lwt_stream.get_available_up_to (rate - 1) stream in
let l = hd::tl in
let len = List.length l in
let time = Mtime.(count t0 |> to_s) in
let delta = time -. events.((!i + len - 1) mod rate) in
if delta < min_delta
then
Lwt_unix.sleep (min_delta -. delta)
>|= (fun () ->
i := set_events ~from:!i ~len;
l
)
else begin
i := set_events ~from:!i ~len;
Lwt.return l
end
)
| |
5d8109fb0e891d083ad2f9003427badfe9df3f4e8b3aeccc06412dcae46d43d5 | JHU-PL-Lab/jaylang | append02.ml |
let rec bot _ = bot ()
let fail _ = assert false
let rec append_1030 append_without_checking_1072 x_DO_NOT_CARE_1074 x_DO_NOT_CARE_1075 x_DO_NOT_CARE_1076 xs_1031 prev_set_flag_append_1058 s_prev_append_xs_1056 s_prev_append_ys_1057 ys_1032 =
let u = if prev_set_flag_append_1058 then
if ((0 * 1) + (1 * s_prev_append_xs_1056)) +
(0 * s_prev_append_ys_1057) >
((0 * 1) + (1 * xs_1031)) + (0 * ys_1032) &&
((0 * 1) + (1 * xs_1031)) + (0 * ys_1032) >= 0 then
()
else
let u_3245 = fail ()
in
bot()
else () in
append_without_checking_1072 x_DO_NOT_CARE_1074 x_DO_NOT_CARE_1075
x_DO_NOT_CARE_1076 xs_1031 prev_set_flag_append_1058
s_prev_append_xs_1056 s_prev_append_ys_1057 ys_1032
let rec append_without_checking_1072 x_DO_NOT_CARE_1078 x_DO_NOT_CARE_1079 x_DO_NOT_CARE_1080 xs_1031 set_flag_append_1059 s_append_xs_1054 s_append_ys_1055 ys_1032 =
let set_flag_append_1059 = true
in
let s_append_ys_1055 = ys_1032
in
let s_append_xs_1054 = xs_1031
in
if xs_1031 <= 0 then
ys_1032
else
let xs'_1033 = xs_1031 - 1
in
1 +
append_1030 append_without_checking_1072 set_flag_append_1059 s_append_xs_1054 s_append_ys_1055
xs'_1033 set_flag_append_1059 s_append_xs_1054 s_append_ys_1055
ys_1032
let main_1034 set_flag_append_1059 s_append_xs_1054 s_append_ys_1055 u_1035 l1_1036 l2_1037 =
append_without_checking_1072 set_flag_append_1059 s_append_xs_1054
s_append_ys_1055 l1_1036 set_flag_append_1059 s_append_xs_1054
s_append_ys_1055 l2_1037
let main =
main_1034 false 0 0 ()
| null | https://raw.githubusercontent.com/JHU-PL-Lab/jaylang/484b3876986a515fb57b11768a1b3b50418cde0c/benchmark/cases/mochi_origin/termination/append02.ml | ocaml |
let rec bot _ = bot ()
let fail _ = assert false
let rec append_1030 append_without_checking_1072 x_DO_NOT_CARE_1074 x_DO_NOT_CARE_1075 x_DO_NOT_CARE_1076 xs_1031 prev_set_flag_append_1058 s_prev_append_xs_1056 s_prev_append_ys_1057 ys_1032 =
let u = if prev_set_flag_append_1058 then
if ((0 * 1) + (1 * s_prev_append_xs_1056)) +
(0 * s_prev_append_ys_1057) >
((0 * 1) + (1 * xs_1031)) + (0 * ys_1032) &&
((0 * 1) + (1 * xs_1031)) + (0 * ys_1032) >= 0 then
()
else
let u_3245 = fail ()
in
bot()
else () in
append_without_checking_1072 x_DO_NOT_CARE_1074 x_DO_NOT_CARE_1075
x_DO_NOT_CARE_1076 xs_1031 prev_set_flag_append_1058
s_prev_append_xs_1056 s_prev_append_ys_1057 ys_1032
let rec append_without_checking_1072 x_DO_NOT_CARE_1078 x_DO_NOT_CARE_1079 x_DO_NOT_CARE_1080 xs_1031 set_flag_append_1059 s_append_xs_1054 s_append_ys_1055 ys_1032 =
let set_flag_append_1059 = true
in
let s_append_ys_1055 = ys_1032
in
let s_append_xs_1054 = xs_1031
in
if xs_1031 <= 0 then
ys_1032
else
let xs'_1033 = xs_1031 - 1
in
1 +
append_1030 append_without_checking_1072 set_flag_append_1059 s_append_xs_1054 s_append_ys_1055
xs'_1033 set_flag_append_1059 s_append_xs_1054 s_append_ys_1055
ys_1032
let main_1034 set_flag_append_1059 s_append_xs_1054 s_append_ys_1055 u_1035 l1_1036 l2_1037 =
append_without_checking_1072 set_flag_append_1059 s_append_xs_1054
s_append_ys_1055 l1_1036 set_flag_append_1059 s_append_xs_1054
s_append_ys_1055 l2_1037
let main =
main_1034 false 0 0 ()
| |
a1c86c5d0223586635f2f88069442fa5d8092a05fd070ed75a5bb7c450e8944b | con-kitty/categorifier-c | Main.hs | {-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Categorifier.C.Generate (writeCFiles)
import F (fCategorified)
-- This generates /tmp/recursive_types_2.c
main :: IO ()
main = writeCFiles "/tmp" "recursive_types_2" fCategorified
| null | https://raw.githubusercontent.com/con-kitty/categorifier-c/dd5ba3ac4d39629b99200bdffa0af54d124233f0/examples/recursive-types-2/Main.hs | haskell | # LANGUAGE OverloadedStrings #
This generates /tmp/recursive_types_2.c |
module Main (main) where
import Categorifier.C.Generate (writeCFiles)
import F (fCategorified)
main :: IO ()
main = writeCFiles "/tmp" "recursive_types_2" fCategorified
|
9e0e9dfd6e1794e666440de5ae2c9704570e91d2ff88d5105f020c204d3111d9 | openmusic-project/OMChroma | gen-model-data.lisp | ;=====================================================
; CHROMA
;=====================================================
part of the OMChroma library
- > High - level control of sound synthesis in OM
;=====================================================
;
;This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version 2
;of the License, or (at your option) any later version.
;
;See file LICENSE for further informations on licensing terms.
;
;This program is distributed in the hope that it will be useful,
;but WITHOUT ANY WARRANTY; without even the implied warranty of
;MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;GNU General Public License for more details.
;
;=====================================================
(in-package :cr)
;ms_1109
(defmethod gen-model-data (lvals fun-list arg-list &key (interpolmode) (markers) (test)
(markermode 'delete) (timemode 'rel) (integeritp) (verbose))
"Process a list of values by sequentially applying all the functions in fun-list to each value,
with the arguments dynamically computed according to the rules specified in arg-list.
<lvals> : Any list to be modified
<fun-list> : List of functions sequentially applied to lvals
<arg-list> : List of dynamic control structures (see below)
The first argument of each function is each element of lvals. The other arguments depend on the function.
NB: the function must return an element of the same type as the input argument.
arg-list: (larg1... largN)
largN: control list for the nth function, specifying the way the parameters are dynamically computed from the beginning to the end of the list.
Main structure:
'(
(pos1 (val11... val1n))
(posN (valN1... valNn)))
where:
posN: position of the corresponding arguments in the list of values
if :timemode=relative
if integer = absolute positions in the list (0 = beginning), that is, access through (nth posN list)
if float = relative positions, scaled between the beginning and the end of the list (first number = beginning, last number = end)
if :timemode=absolute
posN refers to an absolute time with respect to the list of markers (which must be given)
val11...val1N: values of the arguments at the given position; the intermediate values will be computed via interpolation
:interpolmode = interpolation mode
() = the values will be held until the next position, that is, no interpolation
0.0 = linear interpolation
<0.0 = logarithmic interpolation (going up), and exponential interpolation (going down), the argument is the alpha
>0.0 = exponential interpolation (going up), and logarithmic interpolation (going down), the argument is the alpha
'(exp2 [float]) = symetric interpolation (log/exp up and down, >0.0 = exp, <0.0 = log)
sin/sin1 = complete sinusoidal interpolation (from -pi/2 to pi/2)
sin2 = 1/2 sin interpolation (from 0 to pi/2, slightly log up, exp down)
sin3 = same as sin2, but totally symetric.
At the end of the process, a further test is applied to the computed list.
If the test is positive, the element will not be returned.
:test 'fun = apply fun to each element of the list.
:test '(fun arg1... argN) = apply fun with the following arguments: list-element, arg1... argN
:markermode
If the test results in eliminating some elements, AND there are corresponding markers, they will also be processed:
delete = the marker correponding to the deleted element will also be deleted
firstn = keep only the first n markers in the order of definition (hence discarding the remaining ones)
() = do nothing, return all the markers
:integeritp
Flag, if t, the arguments computed by interpolation will be rounded to an integer.
:verbose
Flag, if t, print only the args passed to the modifying functions, NOT the result.
"
(flet ((list! (l) (if (listp l) l (list l))))
(let* ((fun-list (list! fun-list))
(num-vals (length lvals))
; if timemode is a list, keep only as many args as functions (if too little, repeat the last arg)
(timemode (prepare-arg (list! timemode) (length fun-list)))
(interpolmode (prepare-interpolmode (list! interpolmode) (length fun-list)))
(integeritp (prepare-arg (list! integeritp) (length fun-list)))
(res-arg-list))
(loop for args in arg-list do
(let ((time-mode (nextl timemode))
(interpol-mode (nextl interpolmode))
(integer-itp (nextl integeritp)))
(cond ((and (equal time-mode 'rel) (null interpol-mode))
; fixed list, no interpolation of arguments
(setf res-arg-list (cons (fixed-list args num-vals) res-arg-list)))
((equal time-mode 'rel)
(setf res-arg-list
(cons (interpolated-list
args num-vals
:itpmode interpol-mode
:intitp integer-itp)
res-arg-list)))
((and (equal time-mode 'abs))
(setf res-arg-list
(cons (interpolated-list-markers
args markers
:itpmode interpol-mode
:intitp integer-itp)
res-arg-list)))
(t (error "Unknown case, sir, timemode=~a, interpolmode=~a~%" timemode interpolmode)))))
(if verbose
(nreverse res-arg-list)
( print ( list lvals fun - list ( - list ) : markers markers : test test : ) ) ) ) ) )
(final-model-data lvals fun-list (nreverse res-arg-list) :markers markers :test test :markermode markermode))
)
))
(defun prepare-arg (arg n)
(if (consp arg)
(cr::l-val n arg)
(make-list n :initial-element arg)))
( prepare - arg ' ( 1 2 3 ) 10 )
( prepare - arg ' ( ) 10 )
(defun prepare-interpolmode (arg n)
(cond
((null arg) (make-list n :initial-element arg))
((and (listp arg) (symbolp (car arg)) (not (null (car arg)))) (make-list n :initial-element arg))
(t (cr::l-val n arg))))
( prepare - interpolmode ' ( exp 1.0 ) 10 )
(defun fixed-list (l length)
(let* ((result)
(curr-l (copy-list l))
(el1 (nextl curr-l))
(el2 (nextl curr-l)))
(loop for i = 0 then (+ i 1)
while (< i length) do
update to next ctl in list
(setf el1 el2)
(setf el2 (let ((tmpel (nextl curr-l))) (if (null tmpel) el2 tmpel)))) ; last el of list
(setf result (cons (cadr el1) result)))
(nreverse result)))
(defun interpolated-list (ctl-list length &key (intitp) (itpmode 0.0))
(let ((result ; rearrange data so that they can build a FUN object (thorny!!!)
(mapcar #'flat
(mat-trans
el = ( 0 ( 10 15 ) )
collect (mat-trans (list (cadr el)
(make-list (length (cadr el)) :initial-element (car el))
)))
))))
(mat-trans ; interpolate each argument alone, then mat-trans them
(if intitp
(loop for fun in result
collect
(mapcar
#'round
(cr::y-list_fun (cr::sample_fun (cr::make_fun fun) length itpmode))))
(loop for fun in result
collect (cr::y-list_fun (cr::sample_fun (cr::make_fun fun) length itpmode)))))
))
(defun interpolated-list-markers (ctl-list markers &key (intitp) (itpmode 0.0))
(let ((result ; rearrange data so that they can build a FUN object (thorny!!!)
(mapcar #'flat
(mat-trans
el = ( 0 ( 10 15 ) )
collect (mat-trans (list (cadr el)
(make-list (length (cadr el)) :initial-element (car el)))))
))))
(mat-trans ; interpolate each argument alone, then mat-trans them
(if intitp
(loop for fun in result
collect
(mapcar
#'round
(loop for marker in markers
collect (cr::y-val_fun (cr::make_fun fun) marker itpmode))))
(loop for fun in result
collect
(loop for marker in markers
collect (cr::y-val_fun (cr::make_fun fun) marker itpmode))))
)))
( interpolated - list - markers )
( interpolated - list - markers : 1.0 : intitp t )
( interpolated - list 9 : intitp t : itpmode 0.0 )
(defmethod final-model-data (lvals fun-list arg-list &key (markers) (test) (markermode 'delete))
; complete data already prepared, just loop through them and apply the functions + the test
(let ((rep lvals))
(loop for fun in fun-list
for args in arg-list do
( print ( format ( ) " Processing function with args ~a~% " args ) )
(setf rep (loop for elem in rep
for i = 0 then (+ i 1)
collect
(apply fun (append (list elem) (nth i args))))))
; deal with the test
(cond ((and test markers)
(if (equal markermode 'delete) ; delete corresponding marker in position
(let ((res-val) (res-mrk))
(loop for val in rep
for mrk in markers do
(unless (if (and (listp test) (not (null test))) (apply (car test) (append (list val) (cdr test))) (apply test (list val)))
(setf res-val (cons val res-val))
(setf res-mrk (cons mrk res-mrk))))
(list (nreverse res-val) (nreverse res-mrk)))
(let ((res-val)) ; otherwise compute result and...
(loop for val in rep do
(unless (if (and (listp test) (not (null test))) (apply (car test) (append (list val) (cdr test))) (apply test (list val)))
(setf res-val (cons val res-val))))
(if (equal markermode 'firstn) ; ...either return first N markers,...
(list (reverse res-val) (firstn markers (length res-val)))
(list (nreverse res-val) markers))))) ; ...or return all the markers
(test ; there are no markers
(loop for val in rep
unless (if (and (listp test) (not (null test))) (apply (car test) (append (list val) (cdr test))) (apply test (list val)))
collect val))
(t (if markers
(list rep markers)
rep))) ; there is no test
))
( defun mynth ( l pos ) ( nth pos l ) )
( defun my * ( l ) ( om * l 10 ) )
( defun mytest ( l ) ( null l ) )
;(defun mytest1 (l n) (print l) (print n) (print (equal l n)))
( final - model - data ' ( ( 1 2 ) ( 2 3 ) ( 3 4 ) ( 4 5 ) ( 5 6 ) ( 6 7 ) ( 7 8) ( 8 9 ) ( 10 11 12 ) ( 9 10 ) ) ' ( my * ) ' ( ( ) ( ( 0 ) ( 1 ) ( 2 ) ( 0 ) ( 1 ) ( 2 ) ( 0 ) ( 1 ) ( 2 ) ( 0 ) ( 1 ) ( 2 ) ) ) )
: test ' ( mytest ) : markers ( ) )
: test ' ( mytest ) : markers ' ( 0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 0.91 0.92 ) : markermode ' delete )
| null | https://raw.githubusercontent.com/openmusic-project/OMChroma/5ded34f22b59a1a93ea7b87e182c9dbdfa95e047/sources/chroma/models/gen-model-data.lisp | lisp | =====================================================
CHROMA
=====================================================
=====================================================
This program is free software; you can redistribute it and/or
either version 2
of the License, or (at your option) any later version.
See file LICENSE for further informations on licensing terms.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
=====================================================
ms_1109
the intermediate values will be computed via interpolation
if timemode is a list, keep only as many args as functions (if too little, repeat the last arg)
fixed list, no interpolation of arguments
last el of list
rearrange data so that they can build a FUN object (thorny!!!)
interpolate each argument alone, then mat-trans them
rearrange data so that they can build a FUN object (thorny!!!)
interpolate each argument alone, then mat-trans them
complete data already prepared, just loop through them and apply the functions + the test
deal with the test
delete corresponding marker in position
otherwise compute result and...
...either return first N markers,...
...or return all the markers
there are no markers
there is no test
(defun mytest1 (l n) (print l) (print n) (print (equal l n))) | part of the OMChroma library
- > High - level control of sound synthesis in OM
modify it under the terms of the GNU General Public License
(in-package :cr)
(defmethod gen-model-data (lvals fun-list arg-list &key (interpolmode) (markers) (test)
(markermode 'delete) (timemode 'rel) (integeritp) (verbose))
"Process a list of values by sequentially applying all the functions in fun-list to each value,
with the arguments dynamically computed according to the rules specified in arg-list.
<lvals> : Any list to be modified
<fun-list> : List of functions sequentially applied to lvals
<arg-list> : List of dynamic control structures (see below)
The first argument of each function is each element of lvals. The other arguments depend on the function.
NB: the function must return an element of the same type as the input argument.
arg-list: (larg1... largN)
largN: control list for the nth function, specifying the way the parameters are dynamically computed from the beginning to the end of the list.
Main structure:
'(
(pos1 (val11... val1n))
(posN (valN1... valNn)))
where:
posN: position of the corresponding arguments in the list of values
if :timemode=relative
if integer = absolute positions in the list (0 = beginning), that is, access through (nth posN list)
if float = relative positions, scaled between the beginning and the end of the list (first number = beginning, last number = end)
if :timemode=absolute
posN refers to an absolute time with respect to the list of markers (which must be given)
:interpolmode = interpolation mode
() = the values will be held until the next position, that is, no interpolation
0.0 = linear interpolation
<0.0 = logarithmic interpolation (going up), and exponential interpolation (going down), the argument is the alpha
>0.0 = exponential interpolation (going up), and logarithmic interpolation (going down), the argument is the alpha
'(exp2 [float]) = symetric interpolation (log/exp up and down, >0.0 = exp, <0.0 = log)
sin/sin1 = complete sinusoidal interpolation (from -pi/2 to pi/2)
sin2 = 1/2 sin interpolation (from 0 to pi/2, slightly log up, exp down)
sin3 = same as sin2, but totally symetric.
At the end of the process, a further test is applied to the computed list.
If the test is positive, the element will not be returned.
:test 'fun = apply fun to each element of the list.
:test '(fun arg1... argN) = apply fun with the following arguments: list-element, arg1... argN
:markermode
If the test results in eliminating some elements, AND there are corresponding markers, they will also be processed:
delete = the marker correponding to the deleted element will also be deleted
firstn = keep only the first n markers in the order of definition (hence discarding the remaining ones)
() = do nothing, return all the markers
:integeritp
Flag, if t, the arguments computed by interpolation will be rounded to an integer.
:verbose
Flag, if t, print only the args passed to the modifying functions, NOT the result.
"
(flet ((list! (l) (if (listp l) l (list l))))
(let* ((fun-list (list! fun-list))
(num-vals (length lvals))
(timemode (prepare-arg (list! timemode) (length fun-list)))
(interpolmode (prepare-interpolmode (list! interpolmode) (length fun-list)))
(integeritp (prepare-arg (list! integeritp) (length fun-list)))
(res-arg-list))
(loop for args in arg-list do
(let ((time-mode (nextl timemode))
(interpol-mode (nextl interpolmode))
(integer-itp (nextl integeritp)))
(cond ((and (equal time-mode 'rel) (null interpol-mode))
(setf res-arg-list (cons (fixed-list args num-vals) res-arg-list)))
((equal time-mode 'rel)
(setf res-arg-list
(cons (interpolated-list
args num-vals
:itpmode interpol-mode
:intitp integer-itp)
res-arg-list)))
((and (equal time-mode 'abs))
(setf res-arg-list
(cons (interpolated-list-markers
args markers
:itpmode interpol-mode
:intitp integer-itp)
res-arg-list)))
(t (error "Unknown case, sir, timemode=~a, interpolmode=~a~%" timemode interpolmode)))))
(if verbose
(nreverse res-arg-list)
( print ( list lvals fun - list ( - list ) : markers markers : test test : ) ) ) ) ) )
(final-model-data lvals fun-list (nreverse res-arg-list) :markers markers :test test :markermode markermode))
)
))
(defun prepare-arg (arg n)
(if (consp arg)
(cr::l-val n arg)
(make-list n :initial-element arg)))
( prepare - arg ' ( 1 2 3 ) 10 )
( prepare - arg ' ( ) 10 )
(defun prepare-interpolmode (arg n)
(cond
((null arg) (make-list n :initial-element arg))
((and (listp arg) (symbolp (car arg)) (not (null (car arg)))) (make-list n :initial-element arg))
(t (cr::l-val n arg))))
( prepare - interpolmode ' ( exp 1.0 ) 10 )
(defun fixed-list (l length)
(let* ((result)
(curr-l (copy-list l))
(el1 (nextl curr-l))
(el2 (nextl curr-l)))
(loop for i = 0 then (+ i 1)
while (< i length) do
update to next ctl in list
(setf el1 el2)
(setf result (cons (cadr el1) result)))
(nreverse result)))
(defun interpolated-list (ctl-list length &key (intitp) (itpmode 0.0))
(mapcar #'flat
(mat-trans
el = ( 0 ( 10 15 ) )
collect (mat-trans (list (cadr el)
(make-list (length (cadr el)) :initial-element (car el))
)))
))))
(if intitp
(loop for fun in result
collect
(mapcar
#'round
(cr::y-list_fun (cr::sample_fun (cr::make_fun fun) length itpmode))))
(loop for fun in result
collect (cr::y-list_fun (cr::sample_fun (cr::make_fun fun) length itpmode)))))
))
(defun interpolated-list-markers (ctl-list markers &key (intitp) (itpmode 0.0))
(mapcar #'flat
(mat-trans
el = ( 0 ( 10 15 ) )
collect (mat-trans (list (cadr el)
(make-list (length (cadr el)) :initial-element (car el)))))
))))
(if intitp
(loop for fun in result
collect
(mapcar
#'round
(loop for marker in markers
collect (cr::y-val_fun (cr::make_fun fun) marker itpmode))))
(loop for fun in result
collect
(loop for marker in markers
collect (cr::y-val_fun (cr::make_fun fun) marker itpmode))))
)))
( interpolated - list - markers )
( interpolated - list - markers : 1.0 : intitp t )
( interpolated - list 9 : intitp t : itpmode 0.0 )
(defmethod final-model-data (lvals fun-list arg-list &key (markers) (test) (markermode 'delete))
(let ((rep lvals))
(loop for fun in fun-list
for args in arg-list do
( print ( format ( ) " Processing function with args ~a~% " args ) )
(setf rep (loop for elem in rep
for i = 0 then (+ i 1)
collect
(apply fun (append (list elem) (nth i args))))))
(cond ((and test markers)
(let ((res-val) (res-mrk))
(loop for val in rep
for mrk in markers do
(unless (if (and (listp test) (not (null test))) (apply (car test) (append (list val) (cdr test))) (apply test (list val)))
(setf res-val (cons val res-val))
(setf res-mrk (cons mrk res-mrk))))
(list (nreverse res-val) (nreverse res-mrk)))
(loop for val in rep do
(unless (if (and (listp test) (not (null test))) (apply (car test) (append (list val) (cdr test))) (apply test (list val)))
(setf res-val (cons val res-val))))
(list (reverse res-val) (firstn markers (length res-val)))
(loop for val in rep
unless (if (and (listp test) (not (null test))) (apply (car test) (append (list val) (cdr test))) (apply test (list val)))
collect val))
(t (if markers
(list rep markers)
))
( defun mynth ( l pos ) ( nth pos l ) )
( defun my * ( l ) ( om * l 10 ) )
( defun mytest ( l ) ( null l ) )
( final - model - data ' ( ( 1 2 ) ( 2 3 ) ( 3 4 ) ( 4 5 ) ( 5 6 ) ( 6 7 ) ( 7 8) ( 8 9 ) ( 10 11 12 ) ( 9 10 ) ) ' ( my * ) ' ( ( ) ( ( 0 ) ( 1 ) ( 2 ) ( 0 ) ( 1 ) ( 2 ) ( 0 ) ( 1 ) ( 2 ) ( 0 ) ( 1 ) ( 2 ) ) ) )
: test ' ( mytest ) : markers ( ) )
: test ' ( mytest ) : markers ' ( 0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 0.91 0.92 ) : markermode ' delete )
|
9af33dab26b43ab46c501289e395e07654833972ff8d2cfcac1aa132ab113ac0 | ThoughtWorksInc/DeepDarkFantasy | Lang.hs | # LANGUAGE
NoImplicitPrelude ,
NoMonomorphismRestriction ,
MultiParamTypeClasses ,
FlexibleInstances ,
TypeFamilies ,
ScopedTypeVariables ,
FlexibleContexts ,
UndecidableInstances ,
TypeApplications ,
PartialTypeSignatures ,
UndecidableSuperClasses
#
NoImplicitPrelude,
NoMonomorphismRestriction,
MultiParamTypeClasses,
FlexibleInstances,
TypeFamilies,
ScopedTypeVariables,
FlexibleContexts,
UndecidableInstances,
TypeApplications,
PartialTypeSignatures,
UndecidableSuperClasses
#-}
module DDF.Lang (
module DDF.Lang,
module DDF.Bimap,
module DDF.Bool,
module DDF.Char,
module DDF.Double,
module DDF.Dual,
module DDF.Float,
module DDF.Meta.Diff,
module DDF.Ordering,
module DDF.Unit,
module DDF.Sum,
module DDF.Int,
module DDF.IO,
module DDF.DiffWrapper,
module DDF.Fix,
module DDF.FreeVector
) where
import DDF.Bool
import DDF.Char
import DDF.Double
import DDF.Float
import DDF.Bimap
import DDF.Dual
import DDF.Vector
import DDF.Meta.Diff
import DDF.Unit
import DDF.Sum
import DDF.Int
import DDF.IO
import DDF.DiffWrapper
import DDF.Fix
import DDF.FreeVector
import DDF.Ordering
import qualified DDF.VectorTF as VTF
import qualified DDF.Meta.VectorTF as M.VTF
import qualified DDF.Meta.Dual as M
import qualified Control.Monad.Writer as M (Writer)
import qualified GHC.Float as M
import qualified Prelude as M
import qualified Data.Map as M
import qualified DDF.Map as Map
import qualified Data.Map as M.Map
import qualified Data.Functor.Foldable as M
import qualified Data.Bimap as M.Bimap
import qualified DDF.Meta.FreeVector as M
type FreeVectorBuilder b = M.Map.Map b M.Double
type SVTFBuilder b = State (M.Bimap.Bimap (M.VTF.VectorTF b M.Int) M.Int) M.Int
class (Ordering r, Char r, Double r, Float r, Bimap r, Dual r, Unit r, Sum r, Int r, IO r, VTF.VectorTF r, DiffWrapper r, Fix r, FreeVector r) => Lang r where
exfalso :: r h (Void -> a)
writer :: r h ((a, w) -> M.Writer w a)
runWriter :: r h (M.Writer w a -> (a, w))
float2Double :: r h (M.Float -> M.Double)
double2Float :: r h (M.Double -> M.Float)
state :: r h ((x -> (y, x)) -> State x y)
runState :: r h (State x y -> (x -> (y, x)))
iterate :: r h ((x -> x) -> x -> [x])
iterate = lam $ \f -> y1 $ lam2 $ \fi x -> cons2 x (app fi (app f x))
buildFreeVector :: Ord r b => r h (FreeVectorBuilder b -> M.FreeVector b M.Double)
buildFreeVector = lam $ \fb -> freeVector1 $ lam $ \b -> optionMatch3 (double 0) id (Map.lookup2 fb b)
toSVTFBuilder :: forall h b. Ord r b => r h (M.VTF.VectorTF b M.Int -> SVTFBuilder b)
toSVTFBuilder =
lam $ \x -> state1 $ lam $ \m ->
optionMatch3
(let_2 (size1 m) (lam $ \si -> mkProd2 si (insert2 (mkProd2 x si) m)))
(lam $ \xid -> mkProd2 xid m)
(lookupL2 m x)
get :: r h (Maybe a -> a)
get = optionMatch2 undefined id
getVar :: r h (State x x)
getVar = state1 (dup1 mkProd)
update :: r h ((x -> x) -> State x ())
update = lam $ \f -> state1 $ lam $ \x -> mkProd2 unit (app f x)
updateWengert :: r h (M.Int -> M.Double -> M.Map.Map M.Int M.Double -> M.Map M.Int M.Double)
updateWengert = lam2 $ \i d -> Map.alter2 (optionMatch2 (just1 d) (just `com2` (plus1 d))) i
vtfCata :: r h ((M.VTF.VectorTF a b -> b) -> M.Fix (M.VTF.VectorTF a) -> b)
vtfCata = lam $ \f -> y1 $ lam $ \fx ->
VTF.vtfMatch4
(app f VTF.zero)
(f `com2` VTF.basis)
(lam2 $ \l r -> app f (VTF.plus2 (app fx l) (app fx r)))
(lam2 $ \d v -> app f (VTF.mult2 d (app fx v))) `com2` runFix
class Reify r x where
reify :: x -> r h x
instance Lang r => Reify r () where
reify _ = unit
instance Lang r => Reify r M.Double where
reify = double
instance (Lang repr, Reify repr l, Reify repr r) => Reify repr (l, r) where
reify (l, r) = mkProd2 (reify l) (reify r)
instance Lang r => Monoid r () where
zero = unit
plus = const1 $ const1 unit
instance Lang r => Group r () where
invert = const1 unit
minus = const1 $ const1 unit
instance Lang r => Vector r () where
type Basis () = Void
toFreeVector = const1 $ freeVector1 exfalso
mult = const1 $ const1 unit
divide = const1 $ const1 unit
instance Float r => Monoid r M.Float where
zero = floatZero
plus = floatPlus
instance Float r => Group r M.Float where
minus = floatMinus
instance Lang r => Vector r M.Float where
type Basis M.Float = ()
toFreeVector = freeVector `com2` const `com2` float2Double
mult = com2 floatMult double2Float
divide = com2 (flip2 com double2Float) floatDivide
instance Lang r => Functor r (M.VTF.VectorTF b) where
map = lam $ \f -> VTF.vtfMatch4 VTF.zero VTF.basis (lam2 $ \l r -> app f l `VTF.plus2` app f r) (lam2 $ \d x -> d `VTF.mult2` app f x)
instance (Prod repr, Monoid repr l, Monoid repr r) => Monoid repr (l, r) where
zero = mkProd2 zero zero
plus = lam2 $ \l r -> mkProd2 (plus2 (zro1 l) (zro1 r)) (plus2 (fst1 l) (fst1 r))
instance (Prod repr, Group repr l, Group repr r) => Group repr (l, r) where
invert = bimap2 invert invert
instance (Prod repr, Double repr, Sum repr, FreeVector repr, Vector repr l, Vector repr r) => Vector repr (l, r) where
type Basis (l, r) = M.Either (Basis l) (Basis r)
toFreeVector = lam $ \p -> let_2 (toFreeVector1 $ zro1 p) $ lam $ \lfv -> let_2 (toFreeVector1 $ fst1 p) $ lam $ \rfv ->
freeVector1 $ sumMatch2 (runFreeVector1 lfv) (runFreeVector1 rfv)
mult = lam $ \x -> bimap2 (mult1 x) (mult1 x)
instance (Double r, Monoid r v) => Monoid r (M.Double -> v) where
zero = const1 zero
plus = lam3 $ \l r x -> plus2 (app l x) (app r x)
instance (Lang r, Group r v) => Group r (M.Double -> v) where
invert = lam2 $ \l x -> app l (invert1 x)
instance (Lang r, Vector r v) => Vector r (M.Double -> v) where
type Basis (M.Double -> v) = Basis v
toFreeVector = lam $ \f -> toFreeVector1 $ app f (double 1)
mult = lam3 $ \l r x -> app r (mult2 l x)
instance Lang r => Monoid r [a] where
zero = nil
plus = listAppend
# INCOHERENT #
map = lam $ \f -> y1 $ lam $ \self -> listMatch2 nil (lam2 $ \x xs -> cons2 (app f x) $ app self xs)
instance Lang r => BiFunctor r M.Either where
bimap = lam2 $ \l r -> sumMatch2 (com2 left l) (com2 right r)
instance Prod r => BiFunctor r (,) where
bimap = lam3 $ \l r p -> mkProd2 (app l (zro1 p)) (app r (fst1 p))
instance Dual r => BiFunctor r M.Dual where
bimap = lam2 $ \l r -> dual `com2` bimap2 l r `com2` runDual
instance Lang r => Functor r (Writer w) where
map = lam $ \f -> com2 writer (com2 (bimap2 f id) runWriter)
instance Lang r => Functor r (M.Map k) where
map = Map.mapMap
instance (Lang r, Monoid r w) => Applicative r (Writer w) where
pure = com2 writer (flip2 mkProd zero)
ap = lam2 $ \f x -> writer1 (mkProd2 (app (zro1 (runWriter1 f)) (zro1 (runWriter1 x))) (plus2 (fst1 (runWriter1 f)) (fst1 (runWriter1 x))))
instance (Lang r, Monoid r w) => Monad r (Writer w) where
join = lam $ \x -> writer1 $ mkProd2 (zro1 $ runWriter1 $ zro1 $ runWriter1 x) (plus2 (fst1 $ runWriter1 $ zro1 $ runWriter1 x) (fst1 $ runWriter1 x))
instance Lang r => Functor r (State l) where
map = lam2 $ \f st -> state1 (com2 (bimap2 f id) (runState1 st))
instance Lang r => Applicative r (State l) where
pure = lam $ \x -> state1 (mkProd1 x)
ap = lam2 $ \f x -> state1 $ lam $ \st -> let_2 (runState2 f st) (lam $ \p -> bimap3 (zro1 p) id (runState2 x (fst1 p)))
instance Lang r => Monad r (State l) where
join = lam $ \x -> state1 $ lam $ \st -> let_2 (runState2 x st) (uncurry1 runState)
instance Lang r => Functor r M.Maybe where
map = lam $ \func -> optionMatch2 nothing (com2 just func)
instance Lang r => Applicative r M.Maybe where
pure = just
ap = optionMatch2 (const1 nothing) map
instance Lang r => Monad r M.Maybe where
bind = lam2 $ \x func -> optionMatch3 nothing func x
instance Lang r => Monoid r (M.FreeVector b M.Double) where
zero = freeVector1 $ const1 (double 0)
plus = lam2 $ \l r -> freeVector1 $ lam $ \x -> runFreeVector2 l x `plus2` runFreeVector2 r x
instance Lang r => Group r (M.FreeVector b M.Double) where
invert = lam $ \f -> freeVector1 $ lam $ \x -> invert1 (runFreeVector2 f x)
minus = lam2 $ \l r -> freeVector1 $ lam $ \x -> runFreeVector2 l x `minus2` runFreeVector2 r x
instance Lang r => Vector r (M.FreeVector b M.Double) where
type Basis (M.FreeVector b M.Double) = b
toFreeVector = id
mult = lam2 $ \d l -> freeVector1 $ lam $ \x -> d `mult2` runFreeVector2 l x
divide = lam2 $ \l d -> freeVector1 $ lam $ \x -> runFreeVector2 l x `divide2` d
instance (Ord r b, Lang r) => Monoid r (FreeVectorBuilder b) where
zero = Map.empty
plus = Map.unionWithKey1 (const1 plus)
instance (Ord r b, Lang r) => Group r (FreeVectorBuilder b) where
invert = Map.mapMap1 invert
instance (Ord r b, Lang r) => Vector r (FreeVectorBuilder b) where
type Basis (FreeVectorBuilder b) = b
toFreeVector = buildFreeVector
mult = Map.mapMap `com2` mult
divide = lam2 $ \m d -> Map.mapMap2 (lam $ \x -> divide2 x d) m
instance Lang r => Monoid r (M.Fix (M.VTF.VectorTF b)) where
zero = fix1 VTF.zero
plus = lam2 $ \l r -> fix1 $ l `VTF.plus2` r
instance (Ord r b, Lang r) => Group r (M.Fix (M.VTF.VectorTF b)) where
invert = mult1 (double (-1))
instance (Ord r b, Lang r) => Vector r (M.Fix (M.VTF.VectorTF b)) where
type Basis (M.Fix (M.VTF.VectorTF b)) = b
toFreeVector = buildFreeVector `com2` vtfCata1 (VTF.vtfMatch4 zero (flip2 Map.singleton (double 1)) plus mult)
mult = lam $ \d -> fix `com2` VTF.mult1 d
instance (Ord r b, Lang r) => Monoid r (SVTFBuilder b) where
zero = toSVTFBuilder1 VTF.zero
plus = lam2 $ \l r -> l `bind2` (lam $ \lid -> r `bind2` (lam $ \rid -> toSVTFBuilder1 (VTF.plus2 lid rid)))
instance (Ord r b, Lang r) => Group r (SVTFBuilder b) where
invert = mult1 (double (-1))
instance (Ord r b, Lang r) => Vector r (SVTFBuilder b) where
type Basis (SVTFBuilder b) = b
toFreeVector =
buildFreeVector `com2` flip2 id Map.empty `com2`
(lam $ \x -> zro `com2` (runState1 $ y2 (lam2 $ \fx i ->
map2 (lam $ \m -> mkProd2 (get1 $ Map.lookup2 (fst1 x) i) (get1 $ Map.lookup2 m i)) getVar `bind2`
(lam $ \p -> VTF.vtfMatch5
(return1 zero)
(lam $ \b -> return1 (Map.singleton2 b (fst1 p)))
(lam2 $ \lid rid -> map2 (const1 zero) (update1 (updateWengert2 lid (fst1 p) `com2` updateWengert2 rid (fst1 p))))
(lam2 $ \d xid -> map2 (const1 zero) (update1 (let_2 (d `mult2` (fst1 p)) (updateWengert1 xid))))
(zro1 p) `bind2` (lam $ \fvb -> ite3 (return1 fvb) (map2 (plus1 fvb) $ app fx (pred1 i)) (eq2 i (int 0))))) (zro1 x))
`com2` Map.insert2 (zro1 x) (double 0)) `com2` bimap2 id toMapR `com2` flip2 runState empty
mult = lam2 $ \d x -> x `bind2` (lam $ \xid -> toSVTFBuilder1 (VTF.mult2 d xid))
type instance DiffType v (M.VTF.VectorTF t f) = M.VTF.VectorTF (DiffType v t) (DiffType v f)
type instance DiffType v M.Int = M.Int
instance Double r => Monoid r M.Double where
zero = doubleZero
plus = doublePlus
instance Double r => Group r M.Double where
minus = doubleMinus
instance Lang r => Vector r M.Double where
type Basis M.Double = ()
toFreeVector = freeVector `com2` const
mult = doubleMult
divide = doubleDivide
optionMatch2 = app2 optionMatch
optionMatch3 = app3 optionMatch
writer1 = app writer
runWriter1 = app runWriter
float2Double1 = app float2Double
state1 = app state
runState1 = app runState
runState2 = app2 runState
toSVTFBuilder1 = app toSVTFBuilder
double2Float1 = app double2Float
get1 = app get
return1 = app return
update1 = app update
updateWengert1 = app updateWengert
updateWengert2 = app2 updateWengert
vtfCata1 = app vtfCata
| null | https://raw.githubusercontent.com/ThoughtWorksInc/DeepDarkFantasy/4c569aefc03a2bcfb6113b65367201d30077f2b6/DDF/Lang.hs | haskell | # LANGUAGE
NoImplicitPrelude ,
NoMonomorphismRestriction ,
MultiParamTypeClasses ,
FlexibleInstances ,
TypeFamilies ,
ScopedTypeVariables ,
FlexibleContexts ,
UndecidableInstances ,
TypeApplications ,
PartialTypeSignatures ,
UndecidableSuperClasses
#
NoImplicitPrelude,
NoMonomorphismRestriction,
MultiParamTypeClasses,
FlexibleInstances,
TypeFamilies,
ScopedTypeVariables,
FlexibleContexts,
UndecidableInstances,
TypeApplications,
PartialTypeSignatures,
UndecidableSuperClasses
#-}
module DDF.Lang (
module DDF.Lang,
module DDF.Bimap,
module DDF.Bool,
module DDF.Char,
module DDF.Double,
module DDF.Dual,
module DDF.Float,
module DDF.Meta.Diff,
module DDF.Ordering,
module DDF.Unit,
module DDF.Sum,
module DDF.Int,
module DDF.IO,
module DDF.DiffWrapper,
module DDF.Fix,
module DDF.FreeVector
) where
import DDF.Bool
import DDF.Char
import DDF.Double
import DDF.Float
import DDF.Bimap
import DDF.Dual
import DDF.Vector
import DDF.Meta.Diff
import DDF.Unit
import DDF.Sum
import DDF.Int
import DDF.IO
import DDF.DiffWrapper
import DDF.Fix
import DDF.FreeVector
import DDF.Ordering
import qualified DDF.VectorTF as VTF
import qualified DDF.Meta.VectorTF as M.VTF
import qualified DDF.Meta.Dual as M
import qualified Control.Monad.Writer as M (Writer)
import qualified GHC.Float as M
import qualified Prelude as M
import qualified Data.Map as M
import qualified DDF.Map as Map
import qualified Data.Map as M.Map
import qualified Data.Functor.Foldable as M
import qualified Data.Bimap as M.Bimap
import qualified DDF.Meta.FreeVector as M
type FreeVectorBuilder b = M.Map.Map b M.Double
type SVTFBuilder b = State (M.Bimap.Bimap (M.VTF.VectorTF b M.Int) M.Int) M.Int
class (Ordering r, Char r, Double r, Float r, Bimap r, Dual r, Unit r, Sum r, Int r, IO r, VTF.VectorTF r, DiffWrapper r, Fix r, FreeVector r) => Lang r where
exfalso :: r h (Void -> a)
writer :: r h ((a, w) -> M.Writer w a)
runWriter :: r h (M.Writer w a -> (a, w))
float2Double :: r h (M.Float -> M.Double)
double2Float :: r h (M.Double -> M.Float)
state :: r h ((x -> (y, x)) -> State x y)
runState :: r h (State x y -> (x -> (y, x)))
iterate :: r h ((x -> x) -> x -> [x])
iterate = lam $ \f -> y1 $ lam2 $ \fi x -> cons2 x (app fi (app f x))
buildFreeVector :: Ord r b => r h (FreeVectorBuilder b -> M.FreeVector b M.Double)
buildFreeVector = lam $ \fb -> freeVector1 $ lam $ \b -> optionMatch3 (double 0) id (Map.lookup2 fb b)
toSVTFBuilder :: forall h b. Ord r b => r h (M.VTF.VectorTF b M.Int -> SVTFBuilder b)
toSVTFBuilder =
lam $ \x -> state1 $ lam $ \m ->
optionMatch3
(let_2 (size1 m) (lam $ \si -> mkProd2 si (insert2 (mkProd2 x si) m)))
(lam $ \xid -> mkProd2 xid m)
(lookupL2 m x)
get :: r h (Maybe a -> a)
get = optionMatch2 undefined id
getVar :: r h (State x x)
getVar = state1 (dup1 mkProd)
update :: r h ((x -> x) -> State x ())
update = lam $ \f -> state1 $ lam $ \x -> mkProd2 unit (app f x)
updateWengert :: r h (M.Int -> M.Double -> M.Map.Map M.Int M.Double -> M.Map M.Int M.Double)
updateWengert = lam2 $ \i d -> Map.alter2 (optionMatch2 (just1 d) (just `com2` (plus1 d))) i
vtfCata :: r h ((M.VTF.VectorTF a b -> b) -> M.Fix (M.VTF.VectorTF a) -> b)
vtfCata = lam $ \f -> y1 $ lam $ \fx ->
VTF.vtfMatch4
(app f VTF.zero)
(f `com2` VTF.basis)
(lam2 $ \l r -> app f (VTF.plus2 (app fx l) (app fx r)))
(lam2 $ \d v -> app f (VTF.mult2 d (app fx v))) `com2` runFix
class Reify r x where
reify :: x -> r h x
instance Lang r => Reify r () where
reify _ = unit
instance Lang r => Reify r M.Double where
reify = double
instance (Lang repr, Reify repr l, Reify repr r) => Reify repr (l, r) where
reify (l, r) = mkProd2 (reify l) (reify r)
instance Lang r => Monoid r () where
zero = unit
plus = const1 $ const1 unit
instance Lang r => Group r () where
invert = const1 unit
minus = const1 $ const1 unit
instance Lang r => Vector r () where
type Basis () = Void
toFreeVector = const1 $ freeVector1 exfalso
mult = const1 $ const1 unit
divide = const1 $ const1 unit
instance Float r => Monoid r M.Float where
zero = floatZero
plus = floatPlus
instance Float r => Group r M.Float where
minus = floatMinus
instance Lang r => Vector r M.Float where
type Basis M.Float = ()
toFreeVector = freeVector `com2` const `com2` float2Double
mult = com2 floatMult double2Float
divide = com2 (flip2 com double2Float) floatDivide
instance Lang r => Functor r (M.VTF.VectorTF b) where
map = lam $ \f -> VTF.vtfMatch4 VTF.zero VTF.basis (lam2 $ \l r -> app f l `VTF.plus2` app f r) (lam2 $ \d x -> d `VTF.mult2` app f x)
instance (Prod repr, Monoid repr l, Monoid repr r) => Monoid repr (l, r) where
zero = mkProd2 zero zero
plus = lam2 $ \l r -> mkProd2 (plus2 (zro1 l) (zro1 r)) (plus2 (fst1 l) (fst1 r))
instance (Prod repr, Group repr l, Group repr r) => Group repr (l, r) where
invert = bimap2 invert invert
instance (Prod repr, Double repr, Sum repr, FreeVector repr, Vector repr l, Vector repr r) => Vector repr (l, r) where
type Basis (l, r) = M.Either (Basis l) (Basis r)
toFreeVector = lam $ \p -> let_2 (toFreeVector1 $ zro1 p) $ lam $ \lfv -> let_2 (toFreeVector1 $ fst1 p) $ lam $ \rfv ->
freeVector1 $ sumMatch2 (runFreeVector1 lfv) (runFreeVector1 rfv)
mult = lam $ \x -> bimap2 (mult1 x) (mult1 x)
instance (Double r, Monoid r v) => Monoid r (M.Double -> v) where
zero = const1 zero
plus = lam3 $ \l r x -> plus2 (app l x) (app r x)
instance (Lang r, Group r v) => Group r (M.Double -> v) where
invert = lam2 $ \l x -> app l (invert1 x)
instance (Lang r, Vector r v) => Vector r (M.Double -> v) where
type Basis (M.Double -> v) = Basis v
toFreeVector = lam $ \f -> toFreeVector1 $ app f (double 1)
mult = lam3 $ \l r x -> app r (mult2 l x)
instance Lang r => Monoid r [a] where
zero = nil
plus = listAppend
# INCOHERENT #
map = lam $ \f -> y1 $ lam $ \self -> listMatch2 nil (lam2 $ \x xs -> cons2 (app f x) $ app self xs)
instance Lang r => BiFunctor r M.Either where
bimap = lam2 $ \l r -> sumMatch2 (com2 left l) (com2 right r)
instance Prod r => BiFunctor r (,) where
bimap = lam3 $ \l r p -> mkProd2 (app l (zro1 p)) (app r (fst1 p))
instance Dual r => BiFunctor r M.Dual where
bimap = lam2 $ \l r -> dual `com2` bimap2 l r `com2` runDual
instance Lang r => Functor r (Writer w) where
map = lam $ \f -> com2 writer (com2 (bimap2 f id) runWriter)
instance Lang r => Functor r (M.Map k) where
map = Map.mapMap
instance (Lang r, Monoid r w) => Applicative r (Writer w) where
pure = com2 writer (flip2 mkProd zero)
ap = lam2 $ \f x -> writer1 (mkProd2 (app (zro1 (runWriter1 f)) (zro1 (runWriter1 x))) (plus2 (fst1 (runWriter1 f)) (fst1 (runWriter1 x))))
instance (Lang r, Monoid r w) => Monad r (Writer w) where
join = lam $ \x -> writer1 $ mkProd2 (zro1 $ runWriter1 $ zro1 $ runWriter1 x) (plus2 (fst1 $ runWriter1 $ zro1 $ runWriter1 x) (fst1 $ runWriter1 x))
instance Lang r => Functor r (State l) where
map = lam2 $ \f st -> state1 (com2 (bimap2 f id) (runState1 st))
instance Lang r => Applicative r (State l) where
pure = lam $ \x -> state1 (mkProd1 x)
ap = lam2 $ \f x -> state1 $ lam $ \st -> let_2 (runState2 f st) (lam $ \p -> bimap3 (zro1 p) id (runState2 x (fst1 p)))
instance Lang r => Monad r (State l) where
join = lam $ \x -> state1 $ lam $ \st -> let_2 (runState2 x st) (uncurry1 runState)
instance Lang r => Functor r M.Maybe where
map = lam $ \func -> optionMatch2 nothing (com2 just func)
instance Lang r => Applicative r M.Maybe where
pure = just
ap = optionMatch2 (const1 nothing) map
instance Lang r => Monad r M.Maybe where
bind = lam2 $ \x func -> optionMatch3 nothing func x
instance Lang r => Monoid r (M.FreeVector b M.Double) where
zero = freeVector1 $ const1 (double 0)
plus = lam2 $ \l r -> freeVector1 $ lam $ \x -> runFreeVector2 l x `plus2` runFreeVector2 r x
instance Lang r => Group r (M.FreeVector b M.Double) where
invert = lam $ \f -> freeVector1 $ lam $ \x -> invert1 (runFreeVector2 f x)
minus = lam2 $ \l r -> freeVector1 $ lam $ \x -> runFreeVector2 l x `minus2` runFreeVector2 r x
instance Lang r => Vector r (M.FreeVector b M.Double) where
type Basis (M.FreeVector b M.Double) = b
toFreeVector = id
mult = lam2 $ \d l -> freeVector1 $ lam $ \x -> d `mult2` runFreeVector2 l x
divide = lam2 $ \l d -> freeVector1 $ lam $ \x -> runFreeVector2 l x `divide2` d
instance (Ord r b, Lang r) => Monoid r (FreeVectorBuilder b) where
zero = Map.empty
plus = Map.unionWithKey1 (const1 plus)
instance (Ord r b, Lang r) => Group r (FreeVectorBuilder b) where
invert = Map.mapMap1 invert
instance (Ord r b, Lang r) => Vector r (FreeVectorBuilder b) where
type Basis (FreeVectorBuilder b) = b
toFreeVector = buildFreeVector
mult = Map.mapMap `com2` mult
divide = lam2 $ \m d -> Map.mapMap2 (lam $ \x -> divide2 x d) m
instance Lang r => Monoid r (M.Fix (M.VTF.VectorTF b)) where
zero = fix1 VTF.zero
plus = lam2 $ \l r -> fix1 $ l `VTF.plus2` r
instance (Ord r b, Lang r) => Group r (M.Fix (M.VTF.VectorTF b)) where
invert = mult1 (double (-1))
instance (Ord r b, Lang r) => Vector r (M.Fix (M.VTF.VectorTF b)) where
type Basis (M.Fix (M.VTF.VectorTF b)) = b
toFreeVector = buildFreeVector `com2` vtfCata1 (VTF.vtfMatch4 zero (flip2 Map.singleton (double 1)) plus mult)
mult = lam $ \d -> fix `com2` VTF.mult1 d
instance (Ord r b, Lang r) => Monoid r (SVTFBuilder b) where
zero = toSVTFBuilder1 VTF.zero
plus = lam2 $ \l r -> l `bind2` (lam $ \lid -> r `bind2` (lam $ \rid -> toSVTFBuilder1 (VTF.plus2 lid rid)))
instance (Ord r b, Lang r) => Group r (SVTFBuilder b) where
invert = mult1 (double (-1))
instance (Ord r b, Lang r) => Vector r (SVTFBuilder b) where
type Basis (SVTFBuilder b) = b
toFreeVector =
buildFreeVector `com2` flip2 id Map.empty `com2`
(lam $ \x -> zro `com2` (runState1 $ y2 (lam2 $ \fx i ->
map2 (lam $ \m -> mkProd2 (get1 $ Map.lookup2 (fst1 x) i) (get1 $ Map.lookup2 m i)) getVar `bind2`
(lam $ \p -> VTF.vtfMatch5
(return1 zero)
(lam $ \b -> return1 (Map.singleton2 b (fst1 p)))
(lam2 $ \lid rid -> map2 (const1 zero) (update1 (updateWengert2 lid (fst1 p) `com2` updateWengert2 rid (fst1 p))))
(lam2 $ \d xid -> map2 (const1 zero) (update1 (let_2 (d `mult2` (fst1 p)) (updateWengert1 xid))))
(zro1 p) `bind2` (lam $ \fvb -> ite3 (return1 fvb) (map2 (plus1 fvb) $ app fx (pred1 i)) (eq2 i (int 0))))) (zro1 x))
`com2` Map.insert2 (zro1 x) (double 0)) `com2` bimap2 id toMapR `com2` flip2 runState empty
mult = lam2 $ \d x -> x `bind2` (lam $ \xid -> toSVTFBuilder1 (VTF.mult2 d xid))
type instance DiffType v (M.VTF.VectorTF t f) = M.VTF.VectorTF (DiffType v t) (DiffType v f)
type instance DiffType v M.Int = M.Int
instance Double r => Monoid r M.Double where
zero = doubleZero
plus = doublePlus
instance Double r => Group r M.Double where
minus = doubleMinus
instance Lang r => Vector r M.Double where
type Basis M.Double = ()
toFreeVector = freeVector `com2` const
mult = doubleMult
divide = doubleDivide
optionMatch2 = app2 optionMatch
optionMatch3 = app3 optionMatch
writer1 = app writer
runWriter1 = app runWriter
float2Double1 = app float2Double
state1 = app state
runState1 = app runState
runState2 = app2 runState
toSVTFBuilder1 = app toSVTFBuilder
double2Float1 = app double2Float
get1 = app get
return1 = app return
update1 = app update
updateWengert1 = app updateWengert
updateWengert2 = app2 updateWengert
vtfCata1 = app vtfCata
| |
4f146b833f1287985b95b391ea56c68712e13f3d759ad46ee989968dd77aef3a | niquola/clojure-native-example | user.clj | (ns user (:require [cider-nrepl.main]))
(defn -main [& args]
(-> (Thread/currentThread)
(.setName "cider"))
(cider-nrepl.main/init
["refactor-nrepl.middleware/wrap-refactor"
"cider.nrepl/cider-middleware"]))
| null | https://raw.githubusercontent.com/niquola/clojure-native-example/90c171bcbec5ee7e72396243bf854c3d0f80f755/dev/user.clj | clojure | (ns user (:require [cider-nrepl.main]))
(defn -main [& args]
(-> (Thread/currentThread)
(.setName "cider"))
(cider-nrepl.main/init
["refactor-nrepl.middleware/wrap-refactor"
"cider.nrepl/cider-middleware"]))
| |
938b143cf6c41b1742b1bfc50cee23d2080c72b9289059ba84c134b057d50bac | 4clojure/4clojure | config.clj | {:wrap-reload false
:db-host "localhost"
:db-name "4clojure"
:db-user nil
:db-pwd nil
:jetty-port 8080
:host "smtp.googlemail.com"
:user ""
:problem-submission true
:advanced-user-count 50
:pass ""
:repo-url ""
:hosts {;; :static "static.4clojure.com"
;; :dynamic "www.4clojure.com"
:redirects ["4clojure.com"]}
:golfing-active true
set to , eg , [ 1 : hour ] for periodic heap information on stdout
;; this list is just for bootstrapping - the real DB is authoritative
:contributors ["amalloy" "dbyrne" "raynes" "cmeier" "devn" "amcnamara"
"citizen428" "daviddavis" "clinteger" "tclamb" "0x89"]}
| null | https://raw.githubusercontent.com/4clojure/4clojure/25dec057d9d6871ce52aee9e2c3de7efdab14373/resources/config.clj | clojure | :static "static.4clojure.com"
:dynamic "www.4clojure.com"
this list is just for bootstrapping - the real DB is authoritative | {:wrap-reload false
:db-host "localhost"
:db-name "4clojure"
:db-user nil
:db-pwd nil
:jetty-port 8080
:host "smtp.googlemail.com"
:user ""
:problem-submission true
:advanced-user-count 50
:pass ""
:repo-url ""
:redirects ["4clojure.com"]}
:golfing-active true
set to , eg , [ 1 : hour ] for periodic heap information on stdout
:contributors ["amalloy" "dbyrne" "raynes" "cmeier" "devn" "amcnamara"
"citizen428" "daviddavis" "clinteger" "tclamb" "0x89"]}
|
c6015c1ed97b423d012f8f37cdf85137933d8828389ab02cdfb206651a5ce1c0 | nuvla/ui | subs.cljs | (ns sixsq.nuvla.ui.clouds-detail.subs
(:require [re-frame.core :refer [reg-sub]]
[sixsq.nuvla.ui.clouds-detail.spec :as spec]
[sixsq.nuvla.ui.utils.general :as general-utils]))
(reg-sub
::loading?
(fn [db]
(::spec/loading? db)))
(reg-sub
::infrastructure-service
(fn [db]
(::spec/infrastructure-service db)))
(reg-sub
::can-edit?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-edit? infrastructure-service)))
(reg-sub
::can-delete?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-delete? infrastructure-service)))
(reg-sub
::can-terminate?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-terminate? infrastructure-service)))
(reg-sub
::can-stop?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-stop? infrastructure-service)))
(reg-sub
::can-start?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-start? infrastructure-service)))
(reg-sub
::infra-service-not-found?
(fn [db]
(::spec/infra-service-not-found? db)))
| null | https://raw.githubusercontent.com/nuvla/ui/c10704eabd339489722fa53bc99f11f21103c070/code/src/cljs/sixsq/nuvla/ui/clouds_detail/subs.cljs | clojure | (ns sixsq.nuvla.ui.clouds-detail.subs
(:require [re-frame.core :refer [reg-sub]]
[sixsq.nuvla.ui.clouds-detail.spec :as spec]
[sixsq.nuvla.ui.utils.general :as general-utils]))
(reg-sub
::loading?
(fn [db]
(::spec/loading? db)))
(reg-sub
::infrastructure-service
(fn [db]
(::spec/infrastructure-service db)))
(reg-sub
::can-edit?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-edit? infrastructure-service)))
(reg-sub
::can-delete?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-delete? infrastructure-service)))
(reg-sub
::can-terminate?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-terminate? infrastructure-service)))
(reg-sub
::can-stop?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-stop? infrastructure-service)))
(reg-sub
::can-start?
:<- [::infrastructure-service]
(fn [infrastructure-service _]
(general-utils/can-start? infrastructure-service)))
(reg-sub
::infra-service-not-found?
(fn [db]
(::spec/infra-service-not-found? db)))
| |
26348294ed99631c4c2e70d69a2ff74b826a77d0b772e588d11651771d8d5e62 | abdulapopoola/SICPBook | Ex2.34.scm | #lang planet neil/sicp
;; helpers
(define (accumulate op initial sequence)
(if (null? sequence)
initial
(op (car sequence)
(accumulate op
initial
(cdr sequence)))))
(define (horner-eval x coefficient-sequence)
(accumulate
(lambda (this-coeff higher-terms)
(+ this-coeff (* x higher-terms)))
0
coefficient-sequence))
(horner-eval 2 (list 1 3 0 5 0 1))
79
| null | https://raw.githubusercontent.com/abdulapopoola/SICPBook/c8a0228ebf66d9c1ddc5ef1fcc1d05d8684f090a/Chapter%202/2.2/Ex2.34.scm | scheme | helpers | #lang planet neil/sicp
(define (accumulate op initial sequence)
(if (null? sequence)
initial
(op (car sequence)
(accumulate op
initial
(cdr sequence)))))
(define (horner-eval x coefficient-sequence)
(accumulate
(lambda (this-coeff higher-terms)
(+ this-coeff (* x higher-terms)))
0
coefficient-sequence))
(horner-eval 2 (list 1 3 0 5 0 1))
79
|
7b7f8804f96e75d123dab7df4c9f055f8b4b21e11e88abe2e35329a9b775aa64 | slipstream/SlipStreamServer | deployment_parameter.clj | (ns com.sixsq.slipstream.ssclj.resources.deployment-parameter
(:require
[clojure.string :as s]
[com.sixsq.slipstream.auth.acl :as a]
[com.sixsq.slipstream.ssclj.resources.common.crud :as crud]
[com.sixsq.slipstream.ssclj.resources.common.schema :as c]
[com.sixsq.slipstream.ssclj.resources.common.std-crud :as std-crud]
[com.sixsq.slipstream.ssclj.resources.common.utils :as u]
[com.sixsq.slipstream.ssclj.resources.deployment :as d]
[com.sixsq.slipstream.ssclj.resources.event.utils :as event-utils]
[com.sixsq.slipstream.ssclj.resources.spec.deployment-parameter :as deployment-parameter]
[com.sixsq.slipstream.util.response :as r]
[superstring.core :as str]
[taoensso.timbre :as log]))
(def ^:const resource-name "DeploymentParameter")
(def ^:const resource-tag (keyword (str (str/camel-case resource-name) "s")))
(def ^:const resource-url (u/de-camelcase resource-name))
(def ^:const collection-name "DeploymentCollection")
(def ^:const resource-uri (str c/slipstream-schema-uri resource-name))
(def ^:const collection-uri (str c/slipstream-schema-uri collection-name))
;; only authenticated users can view and create credentials
(def collection-acl {:owner {:principal "ADMIN"
:type "ROLE"}
:rules [{:principal "ADMIN"
:type "ROLE"
:right "MODIFY"}
{:principal "USER"
:type "ROLE"
:right "VIEW"}]})
(defn parameter->uiid
[deployment-href nodeID name]
(let [id (s/join ":" [deployment-href nodeID name])]
(u/from-data-uuid id)))
(def next-state-machine-transition-map {"Provisioning" "Executing"
"Executing" "SendingReports"
"SendingReports" "Ready"
"Ready" "Ready"
"Done" "Done"
"Aborted" "Aborted"
"Cancelled" "Cancelled"})
(defn next-state
[current-state]
(let [next-state (get next-state-machine-transition-map current-state)]
(if (nil? next-state)
(throw (r/ex-bad-request (str "complete state invalid: " current-state)))
next-state)))
(defn is-complete-parameter?
[name]
(= name "complete"))
(defn update-state
[current-state deployment-href]
(let [new-state (next-state current-state)
uuid (parameter->uiid deployment-href nil "ss:state")
content-request {:params {:resource-name resource-url
:uuid uuid}
:identity std-crud/internal-identity
:body {:value new-state}}
{:keys [status body] :as response} (-> content-request crud/edit)]
(when (not= status 200)
(log/error response)
(throw (r/ex-response (str "A failure happened during update of deployment state." response) 500)))
(when (not= current-state new-state)
(event-utils/create-event deployment-href new-state (:acl body)
:severity (if (= new-state "Aborted")
event-utils/severity-critical
event-utils/severity-medium)
:type event-utils/type-state))))
;;
multimethod for ACLs
;;
(defmethod crud/add-acl resource-uri
[resource request]
(a/add-acl resource request))
(def validate-fn (u/create-spec-validation-fn ::deployment-parameter/deployment-parameter))
(defmethod crud/validate resource-uri
[{:keys [name value deployment] :as resource}]
(let [deployment-href (:href deployment)]
(case name
"complete" (some-> value
(update-state deployment-href))
"ss:abort" (when value (update-state "Aborted" deployment-href))
"ss:state" (let [deployment-request {:params {:resource-name d/resource-url
:uuid (u/document-id deployment-href)}
:cimi-params {:select #{"keepRunning"}}
:identity std-crud/internal-identity}
deployment-data (-> deployment-request crud/retrieve :body)
keep-running (get deployment-data :keepRunning "Always")]
(when (or (and (= keep-running "Never") (#{"Ready" "Aborted"} value))
(and (= keep-running "On Success") (= value "Aborted"))
(and (= keep-running "On Error") (= value "Ready")))
(crud/do-action {:params {:action "stop"
:resource-name d/resource-url
:uuid (u/document-id deployment-href)}
:identity std-crud/internal-identity})))
nil))
(validate-fn resource))
;;
;; set the resource identifier to "deployment-parameter/predictable-uuid3-from-string"
;;
(defmethod crud/new-identifier resource-name
[{:keys [deployment nodeID name] :as parameter} resource-name]
(->> (parameter->uiid (:href deployment) nodeID name)
(str resource-url "/")
(assoc parameter :id)))
;;
;; CRUD operations
;;
(def add-impl (std-crud/add-fn resource-name collection-acl resource-uri))
(defmethod crud/add resource-name
[{{:keys [name value deployment acl]} :body :as request}]
(when (= name "ss:state")
(event-utils/create-event (:href deployment) value acl
:severity event-utils/severity-medium
:type event-utils/type-state))
(add-impl request))
(def edit-impl (std-crud/edit-fn resource-name))
(defmethod crud/edit resource-name
[request]
(edit-impl request))
(def retrieve-impl (std-crud/retrieve-fn resource-name))
(defmethod crud/retrieve resource-name
[request]
(retrieve-impl request))
(def delete-impl (std-crud/delete-fn resource-name))
(defmethod crud/delete resource-name
[request]
(delete-impl request))
(def query-impl (std-crud/query-fn resource-name collection-acl collection-uri resource-tag))
(defmethod crud/query resource-name
[request]
(query-impl request))
;;
;; initialization
;;
(defn initialize
[]
(std-crud/initialize resource-url ::deployment-parameter/deployment-parameter))
| null | https://raw.githubusercontent.com/slipstream/SlipStreamServer/3ee5c516877699746c61c48fc72779fe3d4e4652/cimi-resources/src/com/sixsq/slipstream/ssclj/resources/deployment_parameter.clj | clojure | only authenticated users can view and create credentials
set the resource identifier to "deployment-parameter/predictable-uuid3-from-string"
CRUD operations
initialization
| (ns com.sixsq.slipstream.ssclj.resources.deployment-parameter
(:require
[clojure.string :as s]
[com.sixsq.slipstream.auth.acl :as a]
[com.sixsq.slipstream.ssclj.resources.common.crud :as crud]
[com.sixsq.slipstream.ssclj.resources.common.schema :as c]
[com.sixsq.slipstream.ssclj.resources.common.std-crud :as std-crud]
[com.sixsq.slipstream.ssclj.resources.common.utils :as u]
[com.sixsq.slipstream.ssclj.resources.deployment :as d]
[com.sixsq.slipstream.ssclj.resources.event.utils :as event-utils]
[com.sixsq.slipstream.ssclj.resources.spec.deployment-parameter :as deployment-parameter]
[com.sixsq.slipstream.util.response :as r]
[superstring.core :as str]
[taoensso.timbre :as log]))
(def ^:const resource-name "DeploymentParameter")
(def ^:const resource-tag (keyword (str (str/camel-case resource-name) "s")))
(def ^:const resource-url (u/de-camelcase resource-name))
(def ^:const collection-name "DeploymentCollection")
(def ^:const resource-uri (str c/slipstream-schema-uri resource-name))
(def ^:const collection-uri (str c/slipstream-schema-uri collection-name))
(def collection-acl {:owner {:principal "ADMIN"
:type "ROLE"}
:rules [{:principal "ADMIN"
:type "ROLE"
:right "MODIFY"}
{:principal "USER"
:type "ROLE"
:right "VIEW"}]})
(defn parameter->uiid
[deployment-href nodeID name]
(let [id (s/join ":" [deployment-href nodeID name])]
(u/from-data-uuid id)))
(def next-state-machine-transition-map {"Provisioning" "Executing"
"Executing" "SendingReports"
"SendingReports" "Ready"
"Ready" "Ready"
"Done" "Done"
"Aborted" "Aborted"
"Cancelled" "Cancelled"})
(defn next-state
[current-state]
(let [next-state (get next-state-machine-transition-map current-state)]
(if (nil? next-state)
(throw (r/ex-bad-request (str "complete state invalid: " current-state)))
next-state)))
(defn is-complete-parameter?
[name]
(= name "complete"))
(defn update-state
[current-state deployment-href]
(let [new-state (next-state current-state)
uuid (parameter->uiid deployment-href nil "ss:state")
content-request {:params {:resource-name resource-url
:uuid uuid}
:identity std-crud/internal-identity
:body {:value new-state}}
{:keys [status body] :as response} (-> content-request crud/edit)]
(when (not= status 200)
(log/error response)
(throw (r/ex-response (str "A failure happened during update of deployment state." response) 500)))
(when (not= current-state new-state)
(event-utils/create-event deployment-href new-state (:acl body)
:severity (if (= new-state "Aborted")
event-utils/severity-critical
event-utils/severity-medium)
:type event-utils/type-state))))
multimethod for ACLs
(defmethod crud/add-acl resource-uri
[resource request]
(a/add-acl resource request))
(def validate-fn (u/create-spec-validation-fn ::deployment-parameter/deployment-parameter))
(defmethod crud/validate resource-uri
[{:keys [name value deployment] :as resource}]
(let [deployment-href (:href deployment)]
(case name
"complete" (some-> value
(update-state deployment-href))
"ss:abort" (when value (update-state "Aborted" deployment-href))
"ss:state" (let [deployment-request {:params {:resource-name d/resource-url
:uuid (u/document-id deployment-href)}
:cimi-params {:select #{"keepRunning"}}
:identity std-crud/internal-identity}
deployment-data (-> deployment-request crud/retrieve :body)
keep-running (get deployment-data :keepRunning "Always")]
(when (or (and (= keep-running "Never") (#{"Ready" "Aborted"} value))
(and (= keep-running "On Success") (= value "Aborted"))
(and (= keep-running "On Error") (= value "Ready")))
(crud/do-action {:params {:action "stop"
:resource-name d/resource-url
:uuid (u/document-id deployment-href)}
:identity std-crud/internal-identity})))
nil))
(validate-fn resource))
(defmethod crud/new-identifier resource-name
[{:keys [deployment nodeID name] :as parameter} resource-name]
(->> (parameter->uiid (:href deployment) nodeID name)
(str resource-url "/")
(assoc parameter :id)))
(def add-impl (std-crud/add-fn resource-name collection-acl resource-uri))
(defmethod crud/add resource-name
[{{:keys [name value deployment acl]} :body :as request}]
(when (= name "ss:state")
(event-utils/create-event (:href deployment) value acl
:severity event-utils/severity-medium
:type event-utils/type-state))
(add-impl request))
(def edit-impl (std-crud/edit-fn resource-name))
(defmethod crud/edit resource-name
[request]
(edit-impl request))
(def retrieve-impl (std-crud/retrieve-fn resource-name))
(defmethod crud/retrieve resource-name
[request]
(retrieve-impl request))
(def delete-impl (std-crud/delete-fn resource-name))
(defmethod crud/delete resource-name
[request]
(delete-impl request))
(def query-impl (std-crud/query-fn resource-name collection-acl collection-uri resource-tag))
(defmethod crud/query resource-name
[request]
(query-impl request))
(defn initialize
[]
(std-crud/initialize resource-url ::deployment-parameter/deployment-parameter))
|
2b830b4d101c36d88c3447447f73f31b2e1b6ddc91d2d4c4fff536089c53808c | facebook/duckling | Tests.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Quantity.PT.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Quantity.PT.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "PT Tests"
[ makeCorpusTest [Seal Quantity] corpus
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/tests/Duckling/Quantity/PT/Tests.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. | Copyright ( c ) 2016 - present , Facebook , Inc.
module Duckling.Quantity.PT.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Quantity.PT.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "PT Tests"
[ makeCorpusTest [Seal Quantity] corpus
]
|
6f18176ae9b0f6394383c517a7bde2819abe4e9b7a26e65cc0f7e6eba0d84dcc | UU-ComputerScience/uhc | t4.hs | module Main where
ids = []
ids :: [forall a . a -> a]
( :) : : a - > [ a ] - > [ a ]
un : : : [ a ] - > ( a,[a ] )
( f1 : g1 : _ ) = ids
( ( f2::Int->Int):g2 : _ ) = ids
( ( f3::Int->Int):(g3::forall a . a->a ): _ ) = ids
( f4 : g4 : _ ) = const ( const ids f4 ' ) g4 '
f4 ' : : forall a. a - > a
f4 ' = f4
g4 ' : : forall a. a - > a
g4 ' = g4
( f4 : g4 : _ ) = const ( const ids f4 ' ) g4 '
( f4',g4 ' ) = ( f4,g4 )
( ( f5::Int->Int):g5 : _ ) : : [ forall a . a - > a ] = ids
( ( f6::Int->Int):g6 : _ ) = ids : : [ forall a . a - > a ]
(:) :: a -> [a] -> [a]
un: :: [a] -> (a,[a])
( f1 :g1:_) = ids
((f2::Int->Int):g2:_) = ids
((f3::Int->Int):(g3::forall a . a->a):_) = ids
(f4:g4:_) = const (const ids f4') g4'
f4' :: forall a. a -> a
f4' = f4
g4' :: forall a. a -> a
g4' = g4
(f4:g4:_) = const (const ids f4') g4'
(f4',g4') = (f4,g4)
((f5::Int->Int):g5:_) :: [forall a . a -> a] = ids
((f6::Int->Int):g6:_) = ids :: [forall a . a -> a]
-}
f7g7 = const (const ids f7) g7
f7 :: Int -> Int
f7 = head f7g7
g7 :: forall a. a -> a
g7 = head (tail f7g7)
( ( f8::forall a . a->a):(g8::Int->Int ): _ ) = ids
((f8::forall a . a->a):(g8::Int->Int):_) = ids
-}
const' :: [a] -> a -> [a]
f9g9 = const' (const' ids f9) g9
f9 : : Int - > Int
f9 = head f9g9
-- g9 :: forall a. a -> a
g9 = head (tail f9g9)
f0g0 = const' (const' ids f0) g0
f0 :: Int -> Int
f0 = head f0g0
g0 :: forall a. a -> a
g0 = head (tail f0g0)
main = return ()
| null | https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/test/lucilia/t4.hs | haskell | g9 :: forall a. a -> a | module Main where
ids = []
ids :: [forall a . a -> a]
( :) : : a - > [ a ] - > [ a ]
un : : : [ a ] - > ( a,[a ] )
( f1 : g1 : _ ) = ids
( ( f2::Int->Int):g2 : _ ) = ids
( ( f3::Int->Int):(g3::forall a . a->a ): _ ) = ids
( f4 : g4 : _ ) = const ( const ids f4 ' ) g4 '
f4 ' : : forall a. a - > a
f4 ' = f4
g4 ' : : forall a. a - > a
g4 ' = g4
( f4 : g4 : _ ) = const ( const ids f4 ' ) g4 '
( f4',g4 ' ) = ( f4,g4 )
( ( f5::Int->Int):g5 : _ ) : : [ forall a . a - > a ] = ids
( ( f6::Int->Int):g6 : _ ) = ids : : [ forall a . a - > a ]
(:) :: a -> [a] -> [a]
un: :: [a] -> (a,[a])
( f1 :g1:_) = ids
((f2::Int->Int):g2:_) = ids
((f3::Int->Int):(g3::forall a . a->a):_) = ids
(f4:g4:_) = const (const ids f4') g4'
f4' :: forall a. a -> a
f4' = f4
g4' :: forall a. a -> a
g4' = g4
(f4:g4:_) = const (const ids f4') g4'
(f4',g4') = (f4,g4)
((f5::Int->Int):g5:_) :: [forall a . a -> a] = ids
((f6::Int->Int):g6:_) = ids :: [forall a . a -> a]
-}
f7g7 = const (const ids f7) g7
f7 :: Int -> Int
f7 = head f7g7
g7 :: forall a. a -> a
g7 = head (tail f7g7)
( ( f8::forall a . a->a):(g8::Int->Int ): _ ) = ids
((f8::forall a . a->a):(g8::Int->Int):_) = ids
-}
const' :: [a] -> a -> [a]
f9g9 = const' (const' ids f9) g9
f9 : : Int - > Int
f9 = head f9g9
g9 = head (tail f9g9)
f0g0 = const' (const' ids f0) g0
f0 :: Int -> Int
f0 = head f0g0
g0 :: forall a. a -> a
g0 = head (tail f0g0)
main = return ()
|
35e8ef692ff3a731a431f17f32e1f833b334ba905973e27458fda0c07d752f9b | FranklinChen/learn-you-some-erlang | state_SUITE.erl | -module(state_SUITE).
-include_lib("common_test/include/ct.hrl").
-export([all/0, init_per_testcase/2, end_per_testcase/2]).
-export([ets_tests/1]).
all() -> [ets_tests].
init_per_testcase(ets_tests, Config) ->
TabId = ets:new(account, [ordered_set, public]),
ets:insert(TabId, {andy, 2131}),
ets:insert(TabId, {david, 12}),
ets:insert(TabId, {steve, 12943752}),
[{table,TabId} | Config].
end_per_testcase(ets_tests, Config) ->
ets:delete(?config(table, Config)).
ets_tests(Config) ->
TabId = ?config(table, Config),
[{david, 12}] = ets:lookup(TabId, david),
steve = ets:last(TabId),
true = ets:insert(TabId, {zachary, 99}),
zachary = ets:last(TabId).
| null | https://raw.githubusercontent.com/FranklinChen/learn-you-some-erlang/878c8bc2011a12862fe72dd7fdc6c921348c79d6/ct/demo/state_SUITE.erl | erlang | -module(state_SUITE).
-include_lib("common_test/include/ct.hrl").
-export([all/0, init_per_testcase/2, end_per_testcase/2]).
-export([ets_tests/1]).
all() -> [ets_tests].
init_per_testcase(ets_tests, Config) ->
TabId = ets:new(account, [ordered_set, public]),
ets:insert(TabId, {andy, 2131}),
ets:insert(TabId, {david, 12}),
ets:insert(TabId, {steve, 12943752}),
[{table,TabId} | Config].
end_per_testcase(ets_tests, Config) ->
ets:delete(?config(table, Config)).
ets_tests(Config) ->
TabId = ?config(table, Config),
[{david, 12}] = ets:lookup(TabId, david),
steve = ets:last(TabId),
true = ets:insert(TabId, {zachary, 99}),
zachary = ets:last(TabId).
| |
c7049a21f8f3907150495591ed9c9a6c529215317be8f4d46fb02aefb4524aaf | vikram/lisplibraries | wiki-make.lisp |
(asdf:oos 'asdf:load-op :ucw)
(asdf:oos 'asdf:load-op :ucw.aserve)
(asdf:oos 'asdf:load-op :ucw.araneida)
(asdf:oos 'asdf:load-op :ucw.mod-lisp)
(asdf:oos 'asdf:load-op :qbook)
(asdf:oos 'asdf:load-op :qbook)
(in-package :it.bese.ucw-user)
(qbook:publish-qbook "../examples/wiki.lisp"
(make-instance 'qbook:latex-generator
:title "Creating a Wiki with UCW"
:output-file "./pdf/wiki.tex"))
(qbook:publish-qbook "../examples/wiki.lisp"
(make-instance 'qbook:html-generator
:title "Creating a Wiki with UCW"
:output-directory "./html/wiki/"))
| null | https://raw.githubusercontent.com/vikram/lisplibraries/105e3ef2d165275eb78f36f5090c9e2cdd0754dd/site/ucw-boxset/ucw_ajax/docs/wiki-make.lisp | lisp |
(asdf:oos 'asdf:load-op :ucw)
(asdf:oos 'asdf:load-op :ucw.aserve)
(asdf:oos 'asdf:load-op :ucw.araneida)
(asdf:oos 'asdf:load-op :ucw.mod-lisp)
(asdf:oos 'asdf:load-op :qbook)
(asdf:oos 'asdf:load-op :qbook)
(in-package :it.bese.ucw-user)
(qbook:publish-qbook "../examples/wiki.lisp"
(make-instance 'qbook:latex-generator
:title "Creating a Wiki with UCW"
:output-file "./pdf/wiki.tex"))
(qbook:publish-qbook "../examples/wiki.lisp"
(make-instance 'qbook:html-generator
:title "Creating a Wiki with UCW"
:output-directory "./html/wiki/"))
| |
0258bf320814d74547d41e628820d59d525f895cc23580fdb99f2d8f20c86864 | SahilKang/cl-rdkafka | posix.lisp | Copyright ( C ) 2018 - 2020 < >
;;;
;;; This file is part of cl-rdkafka.
;;;
;;; cl-rdkafka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;; (at your option) any later version.
;;;
;;; cl-rdkafka is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with cl-rdkafka. If not, see </>.
(in-package #:cl-rdkafka)
(cffi:defcstruct pollfd
(fd :int)
(events :short)
(revents :short))
(cffi:defcfun ("poll" posix-poll) :int
(fds :pointer)
(nfds nfds-t)
(timeout :int))
(cffi:defcfun ("pipe" posix-pipe) :int
(fds :pointer))
(cffi:defcfun ("close" posix-close) :int
(fd :int))
(cffi:defcfun ("read" posix-read) cl-rdkafka/ll:ssize-t
(fd :int)
(buf :pointer)
(count cl-rdkafka/ll:size-t))
(cffi:defcfun ("write" posix-write) cl-rdkafka/ll:ssize-t
(fd :int)
(buf :pointer)
(count cl-rdkafka/ll:size-t))
| null | https://raw.githubusercontent.com/SahilKang/cl-rdkafka/4d0b6f7f9b102769cb91a020e4e192a2ea066e0b/src/high-level/event-io/posix.lisp | lisp |
This file is part of cl-rdkafka.
cl-rdkafka is free software: you can redistribute it and/or modify
(at your option) any later version.
cl-rdkafka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with cl-rdkafka. If not, see </>. | Copyright ( C ) 2018 - 2020 < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(in-package #:cl-rdkafka)
(cffi:defcstruct pollfd
(fd :int)
(events :short)
(revents :short))
(cffi:defcfun ("poll" posix-poll) :int
(fds :pointer)
(nfds nfds-t)
(timeout :int))
(cffi:defcfun ("pipe" posix-pipe) :int
(fds :pointer))
(cffi:defcfun ("close" posix-close) :int
(fd :int))
(cffi:defcfun ("read" posix-read) cl-rdkafka/ll:ssize-t
(fd :int)
(buf :pointer)
(count cl-rdkafka/ll:size-t))
(cffi:defcfun ("write" posix-write) cl-rdkafka/ll:ssize-t
(fd :int)
(buf :pointer)
(count cl-rdkafka/ll:size-t))
|
c5626ba4097eae8d2273a1d1fd4adf2ed87c1ae3506e7e90ed86139ed51b59f7 | msp-strath/Mary | Examples.hs | module Shonkier.Examples where
import Data . Map ( singleton )
import Data . Semigroup ( ( < > ) ) -- needed for ghc versions < = 8.2.2
import Shonkier . Syntax
import Shonkier . Semantics
appendEnv : : GlobalEnv
appendEnv = singleton " append " $ singleton " . " $ VFun mempty [ ] [ ]
[ ( PValue < $ > [ PCell ( PBind " x " ) ( PBind " xs " )
, PBind " ys "
]
, Cell ( Var " x " ) ( App ( Var " append " ) [ Var " xs " , " ys " ] )
)
, ( PValue < $ > [ PAtom " "
, PBind " ys "
]
, " ys "
)
]
onetwo : : Term
onetwo = Cell ( Atom " 1 " ) ( Cell ( Atom " 2 " ) ( Atom " " ) )
threefour : : Term
threefour = Cell ( Atom " 3 " ) ( Cell ( Atom " 4 " ) ( Atom " " ) )
onetwothreefour : : Term
onetwothreefour = App ( Var " append " ) [ onetwo , ]
appendTest : : Computation
appendTest = shonkier appendEnv onetwothreefour
readerEnv : : GlobalEnv
readerEnv = singleton " runReader " $ singleton " . " $
VFun mempty [ ] [ [ ] , [ " ask " ] ]
[ ( PValue < $ > [ PBind " _ " , PBind " val " ]
, " val "
)
, ( [ PValue ( PBind " r " ) , PRequest ( " ask " , [ ] ) ( Just " k " ) ]
, App ( Var " runReader " ) [ Var " r "
, App ( Var " k " ) [ Var " r " ]
]
)
]
onetwoSquared : : Term
onetwoSquared = App ( Var " runReader " )
[ onetwo
, App ( Var " append " ) [ ask , ask ]
] where ask = App ( Atom " ask " ) [ ]
askTest : : Computation
askTest = ( appendEnv < > readerEnv ) onetwoSquared
stateEnv : : GlobalEnv
stateEnv = singleton " runState " $ singleton " . " $
VFun mempty [ ] [ [ ] , [ " get " , " put " ] ]
[ ( PValue < $ > [ PBind " _ " , PBind " val " ]
, " val "
)
, ( [ PValue ( PBind " s " ) , PRequest ( " get " , [ ] ) ( Just " k " ) ]
, App ( Var " runState " ) [ Var " s "
, App ( Var " k " ) [ Var " s " ]
]
)
, ( [ PValue ( PBind " _ " ) , PRequest ( " put " , [ PBind " s " ] ) ( Just " k " ) ]
, App ( Var " runState " ) [ Var " s "
, App ( Var " k " ) [ Atom " " ]
]
)
]
mapEnv : : GlobalEnv
mapEnv = singleton " map " $ singleton " . " $ VFun mempty [ ] [ ]
[ ( PValue < $ > [ PBind " f " , PAtom " " ]
, Atom " "
)
, ( PValue < $ > [ PBind " f " , PCell ( PBind " x " ) ( PBind " xs " ) ]
, Cell ( App ( Var " f " ) [ Var " x " ] ) ( App ( Var " map " ) ( Var < $ > [ " f " , " xs " ] ) )
)
]
lam : : Variable - > ( Term - > Term ) - > Term
lam x b = Fun [ ] [ ( [ PValue ( PBind x ) ]
, b ( Var x )
)
]
inc : : Term
inc = App f [ App ( Atom " get " ) [ ]
, App ( Atom " put " ) [ Cell ( Atom " bip " ) ( App ( Atom " get " ) [ ] ) ]
] where
f = Fun [ ] [ ( PValue . PBind < $ > [ " v " , " _ " ]
, " v "
)
]
bipping : : Term
bipping = App ( Var " runState " )
[ Atom " "
, App ( Var " map " ) [ lam " _ " ( \ _ - > inc )
, onetwothreefour
]
]
stateTest : : Computation
stateTest = ( mapEnv < > stateEnv < > appendEnv ) bipping
mkPrim : : String - > [ Literal ] - > Computation
mkPrim p ls = shonkier primEnv $ App ( Var p ) ( Lit < $ > ls )
strConcat : : [ Literal ] - > Computation
strConcat = mkPrim " primStringConcat "
helloworld : : Computation
helloworld = strConcat $ String " foo " < $ > [ " hello " , " world " , " ! " ]
helloworld ' : : Computation
helloworld ' = strConcat $ String " " < $ > [ " hello " , " world " , " ! " ]
foogoo : : Computation
foogoo = strConcat [ String " foo " " fo " , String " goo " " \"foo " , String " " " oof ! " ]
: : Computation
listConcat = shonkier primEnv $ App ( Var " primStringConcat " ) [ str ] where
str = Cell ( Cell ( TString " " " hello " )
( Cell ( TString " " " " ) ( TString " " " world " ) ) )
( Cell ( TString " " " ! " ) ( TString " " " \n " ) )
: : [ Literal ] - > Computation
numAdd = mkPrim " primNumAdd "
three : : Computation
three = ( Num < $ > [ 1 , 2 ] )
import Data.Map (singleton)
import Data.Semigroup ((<>)) -- needed for ghc versions <= 8.2.2
import Shonkier.Syntax
import Shonkier.Semantics
appendEnv :: GlobalEnv
appendEnv = singleton "append" $ singleton "." $ VFun mempty [] mempty []
[ ( PValue <$> [ PCell (PBind "x") (PBind "xs")
, PBind "ys"
]
, Cell (Var "x") (App (Var "append") [Var "xs", Var "ys"])
)
, ( PValue <$> [ PAtom ""
, PBind "ys"
]
, Var "ys"
)
]
onetwo :: Term
onetwo = Cell (Atom "1") (Cell (Atom "2") (Atom ""))
threefour :: Term
threefour = Cell (Atom "3") (Cell (Atom "4") (Atom ""))
onetwothreefour :: Term
onetwothreefour = App (Var "append") [onetwo, threefour]
appendTest :: Computation
appendTest = shonkier appendEnv onetwothreefour
readerEnv :: GlobalEnv
readerEnv = singleton "runReader" $ singleton "." $
VFun mempty [] mempty [[],["ask"]]
[ ( PValue <$> [PBind "_", PBind "val"]
, Var "val"
)
, ( [PValue (PBind "r"), PRequest ("ask", []) (Just "k")]
, App (Var "runReader") [ Var "r"
, App (Var "k") [Var "r"]
]
)
]
onetwoSquared :: Term
onetwoSquared = App (Var "runReader")
[ onetwo
, App (Var "append") [ask, ask]
] where ask = App (Atom "ask") []
askTest :: Computation
askTest = shonkier (appendEnv <> readerEnv) onetwoSquared
stateEnv :: GlobalEnv
stateEnv = singleton "runState" $ singleton "." $
VFun mempty [] mempty [[],["get", "put"]]
[ ( PValue <$> [PBind "_", PBind "val"]
, Var "val"
)
, ( [ PValue (PBind "s"), PRequest ("get", []) (Just "k")]
, App (Var "runState") [ Var "s"
, App (Var "k") [Var "s"]
]
)
, ( [ PValue (PBind "_"), PRequest ("put", [PBind "s"]) (Just "k")]
, App (Var "runState") [ Var "s"
, App (Var "k") [Atom ""]
]
)
]
mapEnv :: GlobalEnv
mapEnv = singleton "map" $ singleton "." $ VFun mempty [] mempty []
[ ( PValue <$> [ PBind "f", PAtom "" ]
, Atom ""
)
, ( PValue <$> [ PBind "f", PCell (PBind "x") (PBind "xs") ]
, Cell (App (Var "f") [Var "x"]) (App (Var "map") (Var <$> ["f", "xs"]))
)
]
lam :: Variable -> (Term -> Term) -> Term
lam x b = Fun [] [ ( [ PValue (PBind x)]
, b (Var x)
)
]
inc :: Term
inc = App f [ App (Atom "get") []
, App (Atom "put") [ Cell (Atom "bip") (App (Atom "get") []) ]
] where
f = Fun [] [ ( PValue . PBind <$> ["v", "_"]
, Var "v"
)
]
bipping :: Term
bipping = App (Var "runState")
[ Atom ""
, App (Var "map") [ lam "_" (\ _ -> inc)
, onetwothreefour
]
]
stateTest :: Computation
stateTest = shonkier (mapEnv <> stateEnv <> appendEnv) bipping
mkPrim :: String -> [Literal] -> Computation
mkPrim p ls = shonkier primEnv $ App (Var p) (Lit <$> ls)
strConcat :: [Literal] -> Computation
strConcat = mkPrim "primStringConcat"
helloworld :: Computation
helloworld = strConcat $ String "foo" <$> ["hello ", "world", "!"]
helloworld' :: Computation
helloworld' = strConcat $ String "" <$> ["hello ", "world", "!"]
foogoo :: Computation
foogoo = strConcat [String "foo" "fo", String "goo" "\"foo", String "" " oof!"]
listConcat :: Computation
listConcat = shonkier primEnv $ App (Var "primStringConcat") [str] where
str = Cell (Cell (TString "" "hello")
(Cell (TString "" " ") (TString "" "world")))
(Cell (TString "" "!") (TString "" "\n"))
numAdd :: [Literal] -> Computation
numAdd = mkPrim "primNumAdd"
three :: Computation
three = numAdd (Num <$> [1, 2])
-}
| null | https://raw.githubusercontent.com/msp-strath/Mary/d805849e6da4133093564077d2344815155d7e5e/src/Shonkier/Examples.hs | haskell | needed for ghc versions < = 8.2.2
needed for ghc versions <= 8.2.2 | module Shonkier.Examples where
import Data . Map ( singleton )
import Shonkier . Syntax
import Shonkier . Semantics
appendEnv : : GlobalEnv
appendEnv = singleton " append " $ singleton " . " $ VFun mempty [ ] [ ]
[ ( PValue < $ > [ PCell ( PBind " x " ) ( PBind " xs " )
, PBind " ys "
]
, Cell ( Var " x " ) ( App ( Var " append " ) [ Var " xs " , " ys " ] )
)
, ( PValue < $ > [ PAtom " "
, PBind " ys "
]
, " ys "
)
]
onetwo : : Term
onetwo = Cell ( Atom " 1 " ) ( Cell ( Atom " 2 " ) ( Atom " " ) )
threefour : : Term
threefour = Cell ( Atom " 3 " ) ( Cell ( Atom " 4 " ) ( Atom " " ) )
onetwothreefour : : Term
onetwothreefour = App ( Var " append " ) [ onetwo , ]
appendTest : : Computation
appendTest = shonkier appendEnv onetwothreefour
readerEnv : : GlobalEnv
readerEnv = singleton " runReader " $ singleton " . " $
VFun mempty [ ] [ [ ] , [ " ask " ] ]
[ ( PValue < $ > [ PBind " _ " , PBind " val " ]
, " val "
)
, ( [ PValue ( PBind " r " ) , PRequest ( " ask " , [ ] ) ( Just " k " ) ]
, App ( Var " runReader " ) [ Var " r "
, App ( Var " k " ) [ Var " r " ]
]
)
]
onetwoSquared : : Term
onetwoSquared = App ( Var " runReader " )
[ onetwo
, App ( Var " append " ) [ ask , ask ]
] where ask = App ( Atom " ask " ) [ ]
askTest : : Computation
askTest = ( appendEnv < > readerEnv ) onetwoSquared
stateEnv : : GlobalEnv
stateEnv = singleton " runState " $ singleton " . " $
VFun mempty [ ] [ [ ] , [ " get " , " put " ] ]
[ ( PValue < $ > [ PBind " _ " , PBind " val " ]
, " val "
)
, ( [ PValue ( PBind " s " ) , PRequest ( " get " , [ ] ) ( Just " k " ) ]
, App ( Var " runState " ) [ Var " s "
, App ( Var " k " ) [ Var " s " ]
]
)
, ( [ PValue ( PBind " _ " ) , PRequest ( " put " , [ PBind " s " ] ) ( Just " k " ) ]
, App ( Var " runState " ) [ Var " s "
, App ( Var " k " ) [ Atom " " ]
]
)
]
mapEnv : : GlobalEnv
mapEnv = singleton " map " $ singleton " . " $ VFun mempty [ ] [ ]
[ ( PValue < $ > [ PBind " f " , PAtom " " ]
, Atom " "
)
, ( PValue < $ > [ PBind " f " , PCell ( PBind " x " ) ( PBind " xs " ) ]
, Cell ( App ( Var " f " ) [ Var " x " ] ) ( App ( Var " map " ) ( Var < $ > [ " f " , " xs " ] ) )
)
]
lam : : Variable - > ( Term - > Term ) - > Term
lam x b = Fun [ ] [ ( [ PValue ( PBind x ) ]
, b ( Var x )
)
]
inc : : Term
inc = App f [ App ( Atom " get " ) [ ]
, App ( Atom " put " ) [ Cell ( Atom " bip " ) ( App ( Atom " get " ) [ ] ) ]
] where
f = Fun [ ] [ ( PValue . PBind < $ > [ " v " , " _ " ]
, " v "
)
]
bipping : : Term
bipping = App ( Var " runState " )
[ Atom " "
, App ( Var " map " ) [ lam " _ " ( \ _ - > inc )
, onetwothreefour
]
]
stateTest : : Computation
stateTest = ( mapEnv < > stateEnv < > appendEnv ) bipping
mkPrim : : String - > [ Literal ] - > Computation
mkPrim p ls = shonkier primEnv $ App ( Var p ) ( Lit < $ > ls )
strConcat : : [ Literal ] - > Computation
strConcat = mkPrim " primStringConcat "
helloworld : : Computation
helloworld = strConcat $ String " foo " < $ > [ " hello " , " world " , " ! " ]
helloworld ' : : Computation
helloworld ' = strConcat $ String " " < $ > [ " hello " , " world " , " ! " ]
foogoo : : Computation
foogoo = strConcat [ String " foo " " fo " , String " goo " " \"foo " , String " " " oof ! " ]
: : Computation
listConcat = shonkier primEnv $ App ( Var " primStringConcat " ) [ str ] where
str = Cell ( Cell ( TString " " " hello " )
( Cell ( TString " " " " ) ( TString " " " world " ) ) )
( Cell ( TString " " " ! " ) ( TString " " " \n " ) )
: : [ Literal ] - > Computation
numAdd = mkPrim " primNumAdd "
three : : Computation
three = ( Num < $ > [ 1 , 2 ] )
import Data.Map (singleton)
import Shonkier.Syntax
import Shonkier.Semantics
appendEnv :: GlobalEnv
appendEnv = singleton "append" $ singleton "." $ VFun mempty [] mempty []
[ ( PValue <$> [ PCell (PBind "x") (PBind "xs")
, PBind "ys"
]
, Cell (Var "x") (App (Var "append") [Var "xs", Var "ys"])
)
, ( PValue <$> [ PAtom ""
, PBind "ys"
]
, Var "ys"
)
]
onetwo :: Term
onetwo = Cell (Atom "1") (Cell (Atom "2") (Atom ""))
threefour :: Term
threefour = Cell (Atom "3") (Cell (Atom "4") (Atom ""))
onetwothreefour :: Term
onetwothreefour = App (Var "append") [onetwo, threefour]
appendTest :: Computation
appendTest = shonkier appendEnv onetwothreefour
readerEnv :: GlobalEnv
readerEnv = singleton "runReader" $ singleton "." $
VFun mempty [] mempty [[],["ask"]]
[ ( PValue <$> [PBind "_", PBind "val"]
, Var "val"
)
, ( [PValue (PBind "r"), PRequest ("ask", []) (Just "k")]
, App (Var "runReader") [ Var "r"
, App (Var "k") [Var "r"]
]
)
]
onetwoSquared :: Term
onetwoSquared = App (Var "runReader")
[ onetwo
, App (Var "append") [ask, ask]
] where ask = App (Atom "ask") []
askTest :: Computation
askTest = shonkier (appendEnv <> readerEnv) onetwoSquared
stateEnv :: GlobalEnv
stateEnv = singleton "runState" $ singleton "." $
VFun mempty [] mempty [[],["get", "put"]]
[ ( PValue <$> [PBind "_", PBind "val"]
, Var "val"
)
, ( [ PValue (PBind "s"), PRequest ("get", []) (Just "k")]
, App (Var "runState") [ Var "s"
, App (Var "k") [Var "s"]
]
)
, ( [ PValue (PBind "_"), PRequest ("put", [PBind "s"]) (Just "k")]
, App (Var "runState") [ Var "s"
, App (Var "k") [Atom ""]
]
)
]
mapEnv :: GlobalEnv
mapEnv = singleton "map" $ singleton "." $ VFun mempty [] mempty []
[ ( PValue <$> [ PBind "f", PAtom "" ]
, Atom ""
)
, ( PValue <$> [ PBind "f", PCell (PBind "x") (PBind "xs") ]
, Cell (App (Var "f") [Var "x"]) (App (Var "map") (Var <$> ["f", "xs"]))
)
]
lam :: Variable -> (Term -> Term) -> Term
lam x b = Fun [] [ ( [ PValue (PBind x)]
, b (Var x)
)
]
inc :: Term
inc = App f [ App (Atom "get") []
, App (Atom "put") [ Cell (Atom "bip") (App (Atom "get") []) ]
] where
f = Fun [] [ ( PValue . PBind <$> ["v", "_"]
, Var "v"
)
]
bipping :: Term
bipping = App (Var "runState")
[ Atom ""
, App (Var "map") [ lam "_" (\ _ -> inc)
, onetwothreefour
]
]
stateTest :: Computation
stateTest = shonkier (mapEnv <> stateEnv <> appendEnv) bipping
mkPrim :: String -> [Literal] -> Computation
mkPrim p ls = shonkier primEnv $ App (Var p) (Lit <$> ls)
strConcat :: [Literal] -> Computation
strConcat = mkPrim "primStringConcat"
helloworld :: Computation
helloworld = strConcat $ String "foo" <$> ["hello ", "world", "!"]
helloworld' :: Computation
helloworld' = strConcat $ String "" <$> ["hello ", "world", "!"]
foogoo :: Computation
foogoo = strConcat [String "foo" "fo", String "goo" "\"foo", String "" " oof!"]
listConcat :: Computation
listConcat = shonkier primEnv $ App (Var "primStringConcat") [str] where
str = Cell (Cell (TString "" "hello")
(Cell (TString "" " ") (TString "" "world")))
(Cell (TString "" "!") (TString "" "\n"))
numAdd :: [Literal] -> Computation
numAdd = mkPrim "primNumAdd"
three :: Computation
three = numAdd (Num <$> [1, 2])
-}
|
609628ac20e51cb013317b40cc9ba24a8a72c206d63e7b5a2161f72d33360399 | qrilka/xlsx | Types.hs | # LANGUAGE CPP #
# LANGUAGE NoMonomorphismRestriction #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE RankNTypes #-}
module Codec.Xlsx.Types (
-- * The main types
Xlsx(..)
, Styles(..)
, DefinedNames(..)
, ColumnsProperties(..)
, PageSetup(..)
, Worksheet(..)
, SheetState(..)
, CellMap
, CellValue(..)
, CellFormula(..)
, FormulaExpression(..)
, Cell.SharedFormulaIndex(..)
, Cell.SharedFormulaOptions(..)
, Cell(..)
, RowHeight(..)
, RowProperties (..)
-- * Lenses
-- ** Workbook
, xlSheets
, xlStyles
, xlDefinedNames
, xlCustomProperties
, xlDateBase
-- ** Worksheet
, wsColumnsProperties
, wsRowPropertiesMap
, wsCells
, wsDrawing
, wsMerges
, wsSheetViews
, wsPageSetup
, wsConditionalFormattings
, wsDataValidations
, wsPivotTables
, wsAutoFilter
, wsTables
, wsProtection
, wsSharedFormulas
, wsState
-- ** Cells
, Cell.cellValue
, Cell.cellStyle
, Cell.cellComment
, Cell.cellFormula
-- ** Row properties
, rowHeightLens
, _CustomHeight
, _AutomaticHeight
-- * Style helpers
, emptyStyles
, renderStyleSheet
, parseStyleSheet
-- * Misc
, simpleCellFormula
, sharedFormulaByIndex
, def
, toRows
, fromRows
, module X
) where
import Control.Exception (SomeException, toException)
#ifdef USE_MICROLENS
import Lens.Micro.TH
import Data.Profunctor(dimap)
import Data.Profunctor.Choice
#else
#endif
import Control.DeepSeq (NFData)
import qualified Data.ByteString.Lazy as L
import Data.Default
import Data.Function (on)
import Data.List (groupBy)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe (catMaybes, isJust)
import Data.Text (Text)
import GHC.Generics (Generic)
import Text.XML (parseLBS, renderLBS)
import Text.XML.Cursor
import Codec.Xlsx.Parser.Internal
import Codec.Xlsx.Types.AutoFilter as X
import Codec.Xlsx.Types.Cell as Cell
import Codec.Xlsx.Types.Comment as X
import Codec.Xlsx.Types.Common as X
import Codec.Xlsx.Types.ConditionalFormatting as X
import Codec.Xlsx.Types.DataValidation as X
import Codec.Xlsx.Types.Drawing as X
import Codec.Xlsx.Types.Drawing.Chart as X
import Codec.Xlsx.Types.Drawing.Common as X
import Codec.Xlsx.Types.PageSetup as X
import Codec.Xlsx.Types.PivotTable as X
import Codec.Xlsx.Types.Protection as X
import Codec.Xlsx.Types.RichText as X
import Codec.Xlsx.Types.SheetViews as X
import Codec.Xlsx.Types.StyleSheet as X
import Codec.Xlsx.Types.Table as X
import Codec.Xlsx.Types.Variant as X
import Codec.Xlsx.Writer.Internal
#ifdef USE_MICROLENS
import Lens.Micro
#else
import Control.Lens (lens, Lens', makeLenses)
import Control.Lens.TH (makePrisms)
#endif
-- | Height of a row in points (1/72in)
data RowHeight
= CustomHeight !Double
-- ^ Row height is set by the user
| AutomaticHeight !Double
-- ^ Row height is set automatically by the program
deriving (Eq, Ord, Show, Read, Generic)
instance NFData RowHeight
#ifdef USE_MICROLENS
-- Since micro-lens denies the existence of prisms,
-- I pasted the splice that's generated from makePrisms,
then I copied over the definitions from Control . Lens for the prism
-- function as well.
type Prism s t a b = forall p f. (Choice p, Applicative f) => p a (f b) -> p s (f t)
type Prism' s a = Prism s s a a
prism :: (b -> t) -> (s -> Either t a) -> Prism s t a b
prism bt seta = dimap seta (either pure (fmap bt)) . right'
_CustomHeight :: Prism' RowHeight Double
_CustomHeight
= (prism (\ x1_a4xgd -> CustomHeight x1_a4xgd))
(\ x_a4xge
-> case x_a4xge of
CustomHeight y1_a4xgf -> Right y1_a4xgf
_ -> Left x_a4xge)
{-# INLINE _CustomHeight #-}
_AutomaticHeight :: Prism' RowHeight Double
_AutomaticHeight
= (prism (\ x1_a4xgg -> AutomaticHeight x1_a4xgg))
(\ x_a4xgh
-> case x_a4xgh of
AutomaticHeight y1_a4xgi -> Right y1_a4xgi
_ -> Left x_a4xgh)
# INLINE _ AutomaticHeight #
#else
makePrisms ''RowHeight
#endif
-- | Properties of a row. See §18.3.1.73 "row (Row)" for more details
data RowProperties = RowProps
{ rowHeight :: Maybe RowHeight
-- ^ Row height in points
, rowStyle :: Maybe Int
-- ^ Style to be applied to row
, rowHidden :: Bool
-- ^ Whether row is visible or not
} deriving (Eq, Ord, Show, Read, Generic)
instance NFData RowProperties
rowHeightLens :: Lens' RowProperties (Maybe RowHeight)
rowHeightLens = lens rowHeight $ \x y -> x{rowHeight=y}
instance Default RowProperties where
def = RowProps { rowHeight = Nothing
, rowStyle = Nothing
, rowHidden = False
}
| Column range ( from cwMin to ) properties
data ColumnsProperties = ColumnsProperties
{ cpMin :: Int
^ First column affected by this ' ColumnWidth ' record .
, cpMax :: Int
^ Last column affected by this ' ColumnWidth ' record .
, cpWidth :: Maybe Double
-- ^ Column width measured as the number of characters of the
maximum digit width of the numbers 0 , 1 , 2 , ... , 9 as rendered in
-- the normal style's font.
--
See longer description in Section 18.3.1.13 " col ( Column Width &
Formatting ) " ( p. 1605 )
, cpStyle :: Maybe Int
-- ^ Default style for the affected column(s). Affects cells not yet
-- allocated in the column(s). In other words, this style applies
-- to new columns.
, cpHidden :: Bool
-- ^ Flag indicating if the affected column(s) are hidden on this
-- worksheet.
, cpCollapsed :: Bool
-- ^ Flag indicating if the outlining of the affected column(s) is
-- in the collapsed state.
, cpBestFit :: Bool
-- ^ Flag indicating if the specified column(s) is set to 'best
-- fit'.
} deriving (Eq, Show, Generic)
instance NFData ColumnsProperties
instance FromCursor ColumnsProperties where
fromCursor c = do
cpMin <- fromAttribute "min" c
cpMax <- fromAttribute "max" c
cpWidth <- maybeAttribute "width" c
cpStyle <- maybeAttribute "style" c
cpHidden <- fromAttributeDef "hidden" False c
cpCollapsed <- fromAttributeDef "collapsed" False c
cpBestFit <- fromAttributeDef "bestFit" False c
return ColumnsProperties {..}
instance FromXenoNode ColumnsProperties where
fromXenoNode root = parseAttributes root $ do
cpMin <- fromAttr "min"
cpMax <- fromAttr "max"
cpWidth <- maybeAttr "width"
cpStyle <- maybeAttr "style"
cpHidden <- fromAttrDef "hidden" False
cpCollapsed <- fromAttrDef "collapsed" False
cpBestFit <- fromAttrDef "bestFit" False
return ColumnsProperties {..}
-- | Sheet visibility state
cf . Ecma Office Open XML Part 1 :
18.18.68 ST_SheetState ( Sheet Visibility Types )
-- * "visible"
-- Indicates the sheet is visible (default)
-- * "hidden"
-- Indicates the workbook window is hidden, but can be shown by the user via the user interface.
* " "
-- Indicates the sheet is hidden and cannot be shown in the user interface (UI). This state is only available programmatically.
data SheetState =
Visible -- ^ state="visible"
| Hidden -- ^ state="hidden"
| VeryHidden -- ^ state="veryHidden"
deriving (Eq, Show, Generic)
instance NFData SheetState
instance Default SheetState where
def = Visible
instance FromAttrVal SheetState where
fromAttrVal "visible" = readSuccess Visible
fromAttrVal "hidden" = readSuccess Hidden
fromAttrVal "veryHidden" = readSuccess VeryHidden
fromAttrVal t = invalidText "SheetState" t
instance FromAttrBs SheetState where
fromAttrBs "visible" = return Visible
fromAttrBs "hidden" = return Hidden
fromAttrBs "veryHidden" = return VeryHidden
fromAttrBs t = unexpectedAttrBs "SheetState" t
instance ToAttrVal SheetState where
toAttrVal Visible = "visible"
toAttrVal Hidden = "hidden"
toAttrVal VeryHidden = "veryHidden"
-- | Xlsx worksheet
data Worksheet = Worksheet
{ _wsColumnsProperties :: [ColumnsProperties] -- ^ column widths
, _wsRowPropertiesMap :: Map RowIndex RowProperties
-- ^ custom row properties (height, style) map
, _wsCells :: CellMap -- ^ data mapped by (row, column) pairs
^
, _wsMerges :: [Range] -- ^ list of cell merges
, _wsSheetViews :: Maybe [SheetView]
, _wsPageSetup :: Maybe PageSetup
, _wsConditionalFormattings :: Map SqRef ConditionalFormatting
, _wsDataValidations :: Map SqRef DataValidation
, _wsPivotTables :: [PivotTable]
, _wsAutoFilter :: Maybe AutoFilter
, _wsTables :: [Table]
, _wsProtection :: Maybe SheetProtection
, _wsSharedFormulas :: Map SharedFormulaIndex SharedFormulaOptions
, _wsState :: SheetState
} deriving (Eq, Show, Generic)
instance NFData Worksheet
makeLenses ''Worksheet
instance Default Worksheet where
def =
Worksheet
{ _wsColumnsProperties = []
, _wsRowPropertiesMap = M.empty
, _wsCells = M.empty
, _wsDrawing = Nothing
, _wsMerges = []
, _wsSheetViews = Nothing
, _wsPageSetup = Nothing
, _wsConditionalFormattings = M.empty
, _wsDataValidations = M.empty
, _wsPivotTables = []
, _wsAutoFilter = Nothing
, _wsTables = []
, _wsProtection = Nothing
, _wsSharedFormulas = M.empty
, _wsState = def
}
-- | Raw worksheet styles, for structured implementation see 'StyleSheet'
-- and functions in "Codec.Xlsx.Types.StyleSheet"
newtype Styles = Styles {unStyles :: L.ByteString}
deriving (Eq, Show, Generic)
instance NFData Styles
-- | Structured representation of Xlsx file (currently a subset of its contents)
data Xlsx = Xlsx
{ _xlSheets :: [(Text, Worksheet)]
, _xlStyles :: Styles
, _xlDefinedNames :: DefinedNames
, _xlCustomProperties :: Map Text Variant
, _xlDateBase :: DateBase
^ date base to use when converting serial value ( i.e. ' CellDouble d ' )
-- into date-time. Default value is 'DateBase1900'
--
See also 18.17.4.1 " Date Conversion for Serial Date - Times " ( p. 2067 )
} deriving (Eq, Show, Generic)
instance NFData Xlsx
-- | Defined names
--
-- Each defined name consists of a name, an optional local sheet ID, and a value.
--
-- This element defines the collection of defined names for this workbook.
-- Defined names are descriptive names to represent cells, ranges of cells,
-- formulas, or constant values. Defined names can be used to represent a range
-- on any worksheet.
--
Excel also defines a number of reserved names with a special interpretation :
--
-- * @_xlnm.Print_Area@ specifies the workbook's print area.
-- Example value: @SheetName!$A:$A,SheetName!$1:$4@
-- * @_xlnm.Print_Titles@ specifies the row(s) or column(s) to repeat
-- at the top of each printed page.
-- * @_xlnm.Sheet_Title@:refers to a sheet title.
--
and others . See Section 18.2.6 , " definedNames ( Defined Names ) " ( p. 1728 ) of
the spec ( second edition ) .
--
-- NOTE: Right now this is only a minimal implementation of defined names.
newtype DefinedNames = DefinedNames [(Text, Maybe Text, Text)]
deriving (Eq, Show, Generic)
instance NFData DefinedNames
makeLenses ''Xlsx
instance Default Xlsx where
def = Xlsx [] emptyStyles def M.empty DateBase1900
instance Default DefinedNames where
def = DefinedNames []
emptyStyles :: Styles
emptyStyles = Styles "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?><styleSheet xmlns=\"\"></styleSheet>"
-- | Render 'StyleSheet'
--
-- This is used to render a structured 'StyleSheet' into a raw XML 'Styles'
-- document. Actually /replacing/ 'Styles' with 'StyleSheet' would mean we
-- would need to write a /parser/ for 'StyleSheet' as well (and would moreover
-- require that we support the full style sheet specification, which is still
-- quite a bit of work).
renderStyleSheet :: StyleSheet -> Styles
renderStyleSheet = Styles . renderLBS def . toDocument
-- | Parse 'StyleSheet'
--
This is used to parse raw ' Styles ' into structured ' StyleSheet '
-- currently not all of the style sheet specification is supported
-- so parser (and the data model) is to be completed
parseStyleSheet :: Styles -> Either SomeException StyleSheet
parseStyleSheet (Styles bs) = parseLBS def bs >>= parseDoc
where
parseDoc doc = case fromCursor (fromDocument doc) of
[stylesheet] -> Right stylesheet
_ -> Left . toException $ ParseException "Could not parse style sheets"
-- | converts cells mapped by (row, column) into rows which contain
-- row index and cells as pairs of column indices and cell values
toRows :: CellMap -> [(RowIndex, [(ColumnIndex, Cell)])]
toRows cells =
map extractRow $ groupBy ((==) `on` (fst . fst)) $ M.toList cells
where
extractRow row@(((x,_),_):_) =
(x, map (\((_,y),v) -> (y,v)) row)
extractRow _ = error "invalid CellMap row"
-- | reverse to 'toRows'
fromRows :: [(RowIndex, [(ColumnIndex, Cell)])] -> CellMap
fromRows rows = M.fromList $ concatMap mapRow rows
where
mapRow (r, cells) = map (\(c, v) -> ((r, c), v)) cells
instance ToElement ColumnsProperties where
toElement nm ColumnsProperties {..} = leafElement nm attrs
where
attrs =
["min" .= cpMin, "max" .= cpMax] ++
catMaybes
[ "style" .=? (justNonDef 0 =<< cpStyle)
, "width" .=? cpWidth
, "customWidth" .=? justTrue (isJust cpWidth)
, "hidden" .=? justTrue cpHidden
, "collapsed" .=? justTrue cpCollapsed
, "bestFit" .=? justTrue cpBestFit
] | null | https://raw.githubusercontent.com/qrilka/xlsx/ad3bfe13d89db1b519b4996f0cd82b1a1e47f0ab/src/Codec/Xlsx/Types.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
* The main types
* Lenses
** Workbook
** Worksheet
** Cells
** Row properties
* Style helpers
* Misc
| Height of a row in points (1/72in)
^ Row height is set by the user
^ Row height is set automatically by the program
Since micro-lens denies the existence of prisms,
I pasted the splice that's generated from makePrisms,
function as well.
# INLINE _CustomHeight #
| Properties of a row. See §18.3.1.73 "row (Row)" for more details
^ Row height in points
^ Style to be applied to row
^ Whether row is visible or not
^ Column width measured as the number of characters of the
the normal style's font.
^ Default style for the affected column(s). Affects cells not yet
allocated in the column(s). In other words, this style applies
to new columns.
^ Flag indicating if the affected column(s) are hidden on this
worksheet.
^ Flag indicating if the outlining of the affected column(s) is
in the collapsed state.
^ Flag indicating if the specified column(s) is set to 'best
fit'.
| Sheet visibility state
* "visible"
Indicates the sheet is visible (default)
* "hidden"
Indicates the workbook window is hidden, but can be shown by the user via the user interface.
Indicates the sheet is hidden and cannot be shown in the user interface (UI). This state is only available programmatically.
^ state="visible"
^ state="hidden"
^ state="veryHidden"
| Xlsx worksheet
^ column widths
^ custom row properties (height, style) map
^ data mapped by (row, column) pairs
^ list of cell merges
| Raw worksheet styles, for structured implementation see 'StyleSheet'
and functions in "Codec.Xlsx.Types.StyleSheet"
| Structured representation of Xlsx file (currently a subset of its contents)
into date-time. Default value is 'DateBase1900'
| Defined names
Each defined name consists of a name, an optional local sheet ID, and a value.
This element defines the collection of defined names for this workbook.
Defined names are descriptive names to represent cells, ranges of cells,
formulas, or constant values. Defined names can be used to represent a range
on any worksheet.
* @_xlnm.Print_Area@ specifies the workbook's print area.
Example value: @SheetName!$A:$A,SheetName!$1:$4@
* @_xlnm.Print_Titles@ specifies the row(s) or column(s) to repeat
at the top of each printed page.
* @_xlnm.Sheet_Title@:refers to a sheet title.
NOTE: Right now this is only a minimal implementation of defined names.
| Render 'StyleSheet'
This is used to render a structured 'StyleSheet' into a raw XML 'Styles'
document. Actually /replacing/ 'Styles' with 'StyleSheet' would mean we
would need to write a /parser/ for 'StyleSheet' as well (and would moreover
require that we support the full style sheet specification, which is still
quite a bit of work).
| Parse 'StyleSheet'
currently not all of the style sheet specification is supported
so parser (and the data model) is to be completed
| converts cells mapped by (row, column) into rows which contain
row index and cells as pairs of column indices and cell values
| reverse to 'toRows' | # LANGUAGE CPP #
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
# LANGUAGE DeriveGeneric #
module Codec.Xlsx.Types (
Xlsx(..)
, Styles(..)
, DefinedNames(..)
, ColumnsProperties(..)
, PageSetup(..)
, Worksheet(..)
, SheetState(..)
, CellMap
, CellValue(..)
, CellFormula(..)
, FormulaExpression(..)
, Cell.SharedFormulaIndex(..)
, Cell.SharedFormulaOptions(..)
, Cell(..)
, RowHeight(..)
, RowProperties (..)
, xlSheets
, xlStyles
, xlDefinedNames
, xlCustomProperties
, xlDateBase
, wsColumnsProperties
, wsRowPropertiesMap
, wsCells
, wsDrawing
, wsMerges
, wsSheetViews
, wsPageSetup
, wsConditionalFormattings
, wsDataValidations
, wsPivotTables
, wsAutoFilter
, wsTables
, wsProtection
, wsSharedFormulas
, wsState
, Cell.cellValue
, Cell.cellStyle
, Cell.cellComment
, Cell.cellFormula
, rowHeightLens
, _CustomHeight
, _AutomaticHeight
, emptyStyles
, renderStyleSheet
, parseStyleSheet
, simpleCellFormula
, sharedFormulaByIndex
, def
, toRows
, fromRows
, module X
) where
import Control.Exception (SomeException, toException)
#ifdef USE_MICROLENS
import Lens.Micro.TH
import Data.Profunctor(dimap)
import Data.Profunctor.Choice
#else
#endif
import Control.DeepSeq (NFData)
import qualified Data.ByteString.Lazy as L
import Data.Default
import Data.Function (on)
import Data.List (groupBy)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe (catMaybes, isJust)
import Data.Text (Text)
import GHC.Generics (Generic)
import Text.XML (parseLBS, renderLBS)
import Text.XML.Cursor
import Codec.Xlsx.Parser.Internal
import Codec.Xlsx.Types.AutoFilter as X
import Codec.Xlsx.Types.Cell as Cell
import Codec.Xlsx.Types.Comment as X
import Codec.Xlsx.Types.Common as X
import Codec.Xlsx.Types.ConditionalFormatting as X
import Codec.Xlsx.Types.DataValidation as X
import Codec.Xlsx.Types.Drawing as X
import Codec.Xlsx.Types.Drawing.Chart as X
import Codec.Xlsx.Types.Drawing.Common as X
import Codec.Xlsx.Types.PageSetup as X
import Codec.Xlsx.Types.PivotTable as X
import Codec.Xlsx.Types.Protection as X
import Codec.Xlsx.Types.RichText as X
import Codec.Xlsx.Types.SheetViews as X
import Codec.Xlsx.Types.StyleSheet as X
import Codec.Xlsx.Types.Table as X
import Codec.Xlsx.Types.Variant as X
import Codec.Xlsx.Writer.Internal
#ifdef USE_MICROLENS
import Lens.Micro
#else
import Control.Lens (lens, Lens', makeLenses)
import Control.Lens.TH (makePrisms)
#endif
data RowHeight
= CustomHeight !Double
| AutomaticHeight !Double
deriving (Eq, Ord, Show, Read, Generic)
instance NFData RowHeight
#ifdef USE_MICROLENS
then I copied over the definitions from Control . Lens for the prism
type Prism s t a b = forall p f. (Choice p, Applicative f) => p a (f b) -> p s (f t)
type Prism' s a = Prism s s a a
prism :: (b -> t) -> (s -> Either t a) -> Prism s t a b
prism bt seta = dimap seta (either pure (fmap bt)) . right'
_CustomHeight :: Prism' RowHeight Double
_CustomHeight
= (prism (\ x1_a4xgd -> CustomHeight x1_a4xgd))
(\ x_a4xge
-> case x_a4xge of
CustomHeight y1_a4xgf -> Right y1_a4xgf
_ -> Left x_a4xge)
_AutomaticHeight :: Prism' RowHeight Double
_AutomaticHeight
= (prism (\ x1_a4xgg -> AutomaticHeight x1_a4xgg))
(\ x_a4xgh
-> case x_a4xgh of
AutomaticHeight y1_a4xgi -> Right y1_a4xgi
_ -> Left x_a4xgh)
# INLINE _ AutomaticHeight #
#else
makePrisms ''RowHeight
#endif
data RowProperties = RowProps
{ rowHeight :: Maybe RowHeight
, rowStyle :: Maybe Int
, rowHidden :: Bool
} deriving (Eq, Ord, Show, Read, Generic)
instance NFData RowProperties
rowHeightLens :: Lens' RowProperties (Maybe RowHeight)
rowHeightLens = lens rowHeight $ \x y -> x{rowHeight=y}
instance Default RowProperties where
def = RowProps { rowHeight = Nothing
, rowStyle = Nothing
, rowHidden = False
}
| Column range ( from cwMin to ) properties
data ColumnsProperties = ColumnsProperties
{ cpMin :: Int
^ First column affected by this ' ColumnWidth ' record .
, cpMax :: Int
^ Last column affected by this ' ColumnWidth ' record .
, cpWidth :: Maybe Double
maximum digit width of the numbers 0 , 1 , 2 , ... , 9 as rendered in
See longer description in Section 18.3.1.13 " col ( Column Width &
Formatting ) " ( p. 1605 )
, cpStyle :: Maybe Int
, cpHidden :: Bool
, cpCollapsed :: Bool
, cpBestFit :: Bool
} deriving (Eq, Show, Generic)
instance NFData ColumnsProperties
instance FromCursor ColumnsProperties where
fromCursor c = do
cpMin <- fromAttribute "min" c
cpMax <- fromAttribute "max" c
cpWidth <- maybeAttribute "width" c
cpStyle <- maybeAttribute "style" c
cpHidden <- fromAttributeDef "hidden" False c
cpCollapsed <- fromAttributeDef "collapsed" False c
cpBestFit <- fromAttributeDef "bestFit" False c
return ColumnsProperties {..}
instance FromXenoNode ColumnsProperties where
fromXenoNode root = parseAttributes root $ do
cpMin <- fromAttr "min"
cpMax <- fromAttr "max"
cpWidth <- maybeAttr "width"
cpStyle <- maybeAttr "style"
cpHidden <- fromAttrDef "hidden" False
cpCollapsed <- fromAttrDef "collapsed" False
cpBestFit <- fromAttrDef "bestFit" False
return ColumnsProperties {..}
cf . Ecma Office Open XML Part 1 :
18.18.68 ST_SheetState ( Sheet Visibility Types )
* " "
data SheetState =
deriving (Eq, Show, Generic)
instance NFData SheetState
instance Default SheetState where
def = Visible
instance FromAttrVal SheetState where
fromAttrVal "visible" = readSuccess Visible
fromAttrVal "hidden" = readSuccess Hidden
fromAttrVal "veryHidden" = readSuccess VeryHidden
fromAttrVal t = invalidText "SheetState" t
instance FromAttrBs SheetState where
fromAttrBs "visible" = return Visible
fromAttrBs "hidden" = return Hidden
fromAttrBs "veryHidden" = return VeryHidden
fromAttrBs t = unexpectedAttrBs "SheetState" t
instance ToAttrVal SheetState where
toAttrVal Visible = "visible"
toAttrVal Hidden = "hidden"
toAttrVal VeryHidden = "veryHidden"
data Worksheet = Worksheet
, _wsRowPropertiesMap :: Map RowIndex RowProperties
^
, _wsSheetViews :: Maybe [SheetView]
, _wsPageSetup :: Maybe PageSetup
, _wsConditionalFormattings :: Map SqRef ConditionalFormatting
, _wsDataValidations :: Map SqRef DataValidation
, _wsPivotTables :: [PivotTable]
, _wsAutoFilter :: Maybe AutoFilter
, _wsTables :: [Table]
, _wsProtection :: Maybe SheetProtection
, _wsSharedFormulas :: Map SharedFormulaIndex SharedFormulaOptions
, _wsState :: SheetState
} deriving (Eq, Show, Generic)
instance NFData Worksheet
makeLenses ''Worksheet
instance Default Worksheet where
def =
Worksheet
{ _wsColumnsProperties = []
, _wsRowPropertiesMap = M.empty
, _wsCells = M.empty
, _wsDrawing = Nothing
, _wsMerges = []
, _wsSheetViews = Nothing
, _wsPageSetup = Nothing
, _wsConditionalFormattings = M.empty
, _wsDataValidations = M.empty
, _wsPivotTables = []
, _wsAutoFilter = Nothing
, _wsTables = []
, _wsProtection = Nothing
, _wsSharedFormulas = M.empty
, _wsState = def
}
newtype Styles = Styles {unStyles :: L.ByteString}
deriving (Eq, Show, Generic)
instance NFData Styles
data Xlsx = Xlsx
{ _xlSheets :: [(Text, Worksheet)]
, _xlStyles :: Styles
, _xlDefinedNames :: DefinedNames
, _xlCustomProperties :: Map Text Variant
, _xlDateBase :: DateBase
^ date base to use when converting serial value ( i.e. ' CellDouble d ' )
See also 18.17.4.1 " Date Conversion for Serial Date - Times " ( p. 2067 )
} deriving (Eq, Show, Generic)
instance NFData Xlsx
Excel also defines a number of reserved names with a special interpretation :
and others . See Section 18.2.6 , " definedNames ( Defined Names ) " ( p. 1728 ) of
the spec ( second edition ) .
newtype DefinedNames = DefinedNames [(Text, Maybe Text, Text)]
deriving (Eq, Show, Generic)
instance NFData DefinedNames
makeLenses ''Xlsx
instance Default Xlsx where
def = Xlsx [] emptyStyles def M.empty DateBase1900
instance Default DefinedNames where
def = DefinedNames []
emptyStyles :: Styles
emptyStyles = Styles "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?><styleSheet xmlns=\"\"></styleSheet>"
renderStyleSheet :: StyleSheet -> Styles
renderStyleSheet = Styles . renderLBS def . toDocument
This is used to parse raw ' Styles ' into structured ' StyleSheet '
parseStyleSheet :: Styles -> Either SomeException StyleSheet
parseStyleSheet (Styles bs) = parseLBS def bs >>= parseDoc
where
parseDoc doc = case fromCursor (fromDocument doc) of
[stylesheet] -> Right stylesheet
_ -> Left . toException $ ParseException "Could not parse style sheets"
toRows :: CellMap -> [(RowIndex, [(ColumnIndex, Cell)])]
toRows cells =
map extractRow $ groupBy ((==) `on` (fst . fst)) $ M.toList cells
where
extractRow row@(((x,_),_):_) =
(x, map (\((_,y),v) -> (y,v)) row)
extractRow _ = error "invalid CellMap row"
fromRows :: [(RowIndex, [(ColumnIndex, Cell)])] -> CellMap
fromRows rows = M.fromList $ concatMap mapRow rows
where
mapRow (r, cells) = map (\(c, v) -> ((r, c), v)) cells
instance ToElement ColumnsProperties where
toElement nm ColumnsProperties {..} = leafElement nm attrs
where
attrs =
["min" .= cpMin, "max" .= cpMax] ++
catMaybes
[ "style" .=? (justNonDef 0 =<< cpStyle)
, "width" .=? cpWidth
, "customWidth" .=? justTrue (isJust cpWidth)
, "hidden" .=? justTrue cpHidden
, "collapsed" .=? justTrue cpCollapsed
, "bestFit" .=? justTrue cpBestFit
] |
ab190e477a35dba4cad196820d800aa96e2ba4d994eaf1dc88a7b3f30599f034 | haskellfoundation/matchmaker | ContributorCall.hs | module DB.ContributorCall where
import Data.Aeson (FromJSON, ToJSON)
import Data.Time (UTCTime)
import Data.UUID (UUID)
import Database.PostgreSQL.Entity
import Database.PostgreSQL.Simple (FromRow, Only (Only), ToRow)
import Database.PostgreSQL.Simple.FromField (FromField)
import Database.PostgreSQL.Simple.ToField (ToField)
import Database.PostgreSQL.Transact (DBT)
import DB.Repository (RepositoryId)
import Database.PostgreSQL.Entity.Types
newtype ContributorCallId
= ContributorCallId { getContributorCallId :: UUID }
deriving stock (Eq, Generic)
deriving newtype (FromField, FromJSON, Show, ToField, ToJSON)
data ContributorCall
= ContributorCall { contributorCallId :: ContributorCallId
, repositoryId :: RepositoryId
, title :: Text
, description :: Text
, createdAt :: UTCTime
, updatedAt :: UTCTime
}
deriving stock (Eq, Generic, Show)
deriving anyclass (FromRow, ToRow)
deriving (Entity)
via (GenericEntity '[TableName "contributor_calls"] ContributorCall)
insertContributorCall :: ContributorCall -> DBT IO ()
insertContributorCall cc = insert @ContributorCall cc
getContributorCall :: ContributorCallId -> DBT IO (Maybe ContributorCall)
getContributorCall ccId = selectById @ContributorCall (Only ccId)
deleteContributorCall :: ContributorCallId -> DBT IO ()
deleteContributorCall ccId = delete @ContributorCall (Only ccId)
| null | https://raw.githubusercontent.com/haskellfoundation/matchmaker/136fcb9ba866359c5efd4271a4d39e2b98fd1897/src/DB/ContributorCall.hs | haskell | module DB.ContributorCall where
import Data.Aeson (FromJSON, ToJSON)
import Data.Time (UTCTime)
import Data.UUID (UUID)
import Database.PostgreSQL.Entity
import Database.PostgreSQL.Simple (FromRow, Only (Only), ToRow)
import Database.PostgreSQL.Simple.FromField (FromField)
import Database.PostgreSQL.Simple.ToField (ToField)
import Database.PostgreSQL.Transact (DBT)
import DB.Repository (RepositoryId)
import Database.PostgreSQL.Entity.Types
newtype ContributorCallId
= ContributorCallId { getContributorCallId :: UUID }
deriving stock (Eq, Generic)
deriving newtype (FromField, FromJSON, Show, ToField, ToJSON)
data ContributorCall
= ContributorCall { contributorCallId :: ContributorCallId
, repositoryId :: RepositoryId
, title :: Text
, description :: Text
, createdAt :: UTCTime
, updatedAt :: UTCTime
}
deriving stock (Eq, Generic, Show)
deriving anyclass (FromRow, ToRow)
deriving (Entity)
via (GenericEntity '[TableName "contributor_calls"] ContributorCall)
insertContributorCall :: ContributorCall -> DBT IO ()
insertContributorCall cc = insert @ContributorCall cc
getContributorCall :: ContributorCallId -> DBT IO (Maybe ContributorCall)
getContributorCall ccId = selectById @ContributorCall (Only ccId)
deleteContributorCall :: ContributorCallId -> DBT IO ()
deleteContributorCall ccId = delete @ContributorCall (Only ccId)
| |
6b1d67cf4ba1c0fe674936e62971db428ffcae542582cc69ab9ff44a3b863f16 | AccelerateHS/accelerate-examples | Config.hs | {-# LANGUAGE CPP #-}
# LANGUAGE TemplateHaskell #
{-# LANGUAGE TypeOperators #-}
-- |
-- Module: : Data.Array.Accelerate.Examples.Internal.Criterion.Config
Copyright : [ 2014 .. 2020 ]
-- License : BSD3
--
Maintainer : < >
-- Stability : experimental
Portability : non - portable ( GHC extensions )
--
module Data.Array.Accelerate.Examples.Internal.Criterion.Config (
-- ** Criterion options
Config, defaultConfig,
module Data.Array.Accelerate.Examples.Internal.Criterion.Config
) where
import Prelude hiding ( (.), (<$>), id )
import Data.Char
import Data.Label
import Data.Label.Derive
import Control.Category ( (.), id )
import System.Console.GetOpt
import Text.Printf
import Text.PrettyPrint.ANSI.Leijen
import qualified Data.Map as M
import Criterion.Analysis ( validateAccessors )
import Criterion.Types ( Config, measureKeys, measureAccessors )
import Criterion.Main.Options ( defaultConfig )
$(mkLabelsNamed id [''Config])
A GetOpt version of Criterion 's command line options parser . It is
-- unfortunate that we need to do this to integrate with the other frameworks.
--
defaultOptions :: [OptDescr (Config -> Config)]
defaultOptions =
[ Option [] ["ci"]
(ReqArg (set confInterval . read) "CI")
(describe confInterval "confidence interval")
#if !MIN_VERSION_criterion(1,1,0)
, Option [] ["no-gc"]
(NoArg (set forceGC False))
"do not collect garbage between iterations"
#endif
, Option [] ["time-limit"]
(ReqArg (set timeLimit . read) "SECS")
(describe timeLimit "time limit to run a benchmark")
, Option [] ["resamples"]
(ReqArg (set resamples . read) "INT")
(describe resamples "number of bootstrap resamples to perform")
, Option [] ["regress"]
(ReqArg (\v -> modify regressions (regressParams v :)) "RESP:PRED..")
"regressions to perform"
, Option [] ["raw"]
(OptArg (set rawDataFile) "FILE")
(describe rawDataFile "file to write raw data to")
, Option [] ["output"]
(OptArg (set reportFile) "FILE")
(describe reportFile "file to write report to")
, Option [] ["csv"]
(OptArg (set csvFile) "FILE")
(describe csvFile "file to write CSV summary to")
, Option [] ["junit"]
(OptArg (set junitFile) "FILE")
(describe junitFile "file to write JUnit summary to")
, Option [] ["verbosity"]
(ReqArg (set verbosity . toEnum . range (0,2) . read) "LEVEL")
(describe' fromEnum verbosity "verbosity level")
, Option [] ["template"]
(ReqArg (set template) "FILE")
(describe template "template to use for report")
]
where
describe :: Show a => (Config :-> a) -> String -> String
describe = describe' id
describe' :: Show a => (b -> a) -> (Config :-> b) -> String -> String
describe' p f msg
= msg ++ " (" ++ show (p (get f defaultConfig)) ++ ")"
range (n,m) x
| n <= x && x <= m = x
| otherwise = error $ printf "%d is outside range (%d,%d)" x n m
-- The following options are not part of the configuration structure, but will
-- be intercepted when calling 'defaultMainWith', and control the execution
-- mode. We include these extra options when generating the help text, but don't
-- include them when processing the 'Config' structure.
--
extraOptions :: [OptDescr (a -> a)]
extraOptions =
[ Option [] ["match"]
(ReqArg (flip const) "MATCH")
"how to match benchmark names"
, Option [] ["only-run"]
(ReqArg (flip const) "ITERS")
"run benchmarks, don't analyse"
, Option [] ["list"]
(NoArg id)
"list benchmarks"
, Option [] ["help"]
(NoArg id)
"Shows this help text"
]
-- Check and parse the arguments to '--regress'. Copied from
Criterion . Main . Options
--
regressParams :: String -> ([String], String)
regressParams m
| null r = error "regression parameters: no responder specified"
| null ps = error "regression parameters: no predictors specified"
| otherwise = validate `seq` ret
where
repl ',' = ' '
repl c = c
tidy = reverse . dropWhile isSpace . reverse . dropWhile isSpace
(r,ps) = break (==':') m
ret = (words . map repl . drop 1 $ ps, tidy r)
validate = either error id $ uncurry validateAccessors ret
-- Generate the help string to describe the possible arguments to '--regress'.
Copied from Criterion . Main . Options .
--
regressHelp :: String
regressHelp
= show
$ text "Criterion regression metrics for use with --regress:"
<$> tabulate [(text n, text d) | (n,(_,d)) <- map f measureKeys]
where
f k = (k, measureAccessors M.! k)
tabulate :: [(Doc,Doc)] -> Doc
tabulate = tabulate' 24
where
tabulate' _ [] = empty
tabulate' size table = vcat
[ indent 2 (fillBreak size key <+> value) | (key, value) <- table ]
| null | https://raw.githubusercontent.com/AccelerateHS/accelerate-examples/a973ee423b5eadda6ef2e2504d2383f625e49821/lib/Data/Array/Accelerate/Examples/Internal/Criterion/Config.hs | haskell | # LANGUAGE CPP #
# LANGUAGE TypeOperators #
|
Module: : Data.Array.Accelerate.Examples.Internal.Criterion.Config
License : BSD3
Stability : experimental
** Criterion options
unfortunate that we need to do this to integrate with the other frameworks.
The following options are not part of the configuration structure, but will
be intercepted when calling 'defaultMainWith', and control the execution
mode. We include these extra options when generating the help text, but don't
include them when processing the 'Config' structure.
Check and parse the arguments to '--regress'. Copied from
Generate the help string to describe the possible arguments to '--regress'.
| # LANGUAGE TemplateHaskell #
Copyright : [ 2014 .. 2020 ]
Maintainer : < >
Portability : non - portable ( GHC extensions )
module Data.Array.Accelerate.Examples.Internal.Criterion.Config (
Config, defaultConfig,
module Data.Array.Accelerate.Examples.Internal.Criterion.Config
) where
import Prelude hiding ( (.), (<$>), id )
import Data.Char
import Data.Label
import Data.Label.Derive
import Control.Category ( (.), id )
import System.Console.GetOpt
import Text.Printf
import Text.PrettyPrint.ANSI.Leijen
import qualified Data.Map as M
import Criterion.Analysis ( validateAccessors )
import Criterion.Types ( Config, measureKeys, measureAccessors )
import Criterion.Main.Options ( defaultConfig )
$(mkLabelsNamed id [''Config])
A GetOpt version of Criterion 's command line options parser . It is
defaultOptions :: [OptDescr (Config -> Config)]
defaultOptions =
[ Option [] ["ci"]
(ReqArg (set confInterval . read) "CI")
(describe confInterval "confidence interval")
#if !MIN_VERSION_criterion(1,1,0)
, Option [] ["no-gc"]
(NoArg (set forceGC False))
"do not collect garbage between iterations"
#endif
, Option [] ["time-limit"]
(ReqArg (set timeLimit . read) "SECS")
(describe timeLimit "time limit to run a benchmark")
, Option [] ["resamples"]
(ReqArg (set resamples . read) "INT")
(describe resamples "number of bootstrap resamples to perform")
, Option [] ["regress"]
(ReqArg (\v -> modify regressions (regressParams v :)) "RESP:PRED..")
"regressions to perform"
, Option [] ["raw"]
(OptArg (set rawDataFile) "FILE")
(describe rawDataFile "file to write raw data to")
, Option [] ["output"]
(OptArg (set reportFile) "FILE")
(describe reportFile "file to write report to")
, Option [] ["csv"]
(OptArg (set csvFile) "FILE")
(describe csvFile "file to write CSV summary to")
, Option [] ["junit"]
(OptArg (set junitFile) "FILE")
(describe junitFile "file to write JUnit summary to")
, Option [] ["verbosity"]
(ReqArg (set verbosity . toEnum . range (0,2) . read) "LEVEL")
(describe' fromEnum verbosity "verbosity level")
, Option [] ["template"]
(ReqArg (set template) "FILE")
(describe template "template to use for report")
]
where
describe :: Show a => (Config :-> a) -> String -> String
describe = describe' id
describe' :: Show a => (b -> a) -> (Config :-> b) -> String -> String
describe' p f msg
= msg ++ " (" ++ show (p (get f defaultConfig)) ++ ")"
range (n,m) x
| n <= x && x <= m = x
| otherwise = error $ printf "%d is outside range (%d,%d)" x n m
extraOptions :: [OptDescr (a -> a)]
extraOptions =
[ Option [] ["match"]
(ReqArg (flip const) "MATCH")
"how to match benchmark names"
, Option [] ["only-run"]
(ReqArg (flip const) "ITERS")
"run benchmarks, don't analyse"
, Option [] ["list"]
(NoArg id)
"list benchmarks"
, Option [] ["help"]
(NoArg id)
"Shows this help text"
]
Criterion . Main . Options
regressParams :: String -> ([String], String)
regressParams m
| null r = error "regression parameters: no responder specified"
| null ps = error "regression parameters: no predictors specified"
| otherwise = validate `seq` ret
where
repl ',' = ' '
repl c = c
tidy = reverse . dropWhile isSpace . reverse . dropWhile isSpace
(r,ps) = break (==':') m
ret = (words . map repl . drop 1 $ ps, tidy r)
validate = either error id $ uncurry validateAccessors ret
Copied from Criterion . Main . Options .
regressHelp :: String
regressHelp
= show
$ text "Criterion regression metrics for use with --regress:"
<$> tabulate [(text n, text d) | (n,(_,d)) <- map f measureKeys]
where
f k = (k, measureAccessors M.! k)
tabulate :: [(Doc,Doc)] -> Doc
tabulate = tabulate' 24
where
tabulate' _ [] = empty
tabulate' size table = vcat
[ indent 2 (fillBreak size key <+> value) | (key, value) <- table ]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.