_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
50b39b2ba1a5830ad88321bb9a12a778f6cbe813e8a44d755f85122f09947d65 | mlcfp/zenacy-html | Tests.hs | {-# LANGUAGE OverloadedStrings #-}
module Zenacy.HTML.Internal.Oper.Tests
( testOper
) where
import Zenacy.HTML
import Control.Monad
( (>=>)
)
import Data.Map
( Map
)
import qualified Data.Map as Map
( fromList
, lookup
)
import Data.Maybe
( fromJust
)
import Test.Framework
( Test
, testGroup
)
import Test.Framework.Providers.HUnit
( testCase
)
import Test.HUnit
( assertBool
, assertEqual
, assertFailure
)
testOper :: Test
testOper = testGroup "Zenacy.HTML.Internal.Oper"
[ testStyle
]
h = fromJust
. htmlDocBody
$ htmlParseEasy
"<body>\
\<h1></h1>\
\<p><a href='bbb'>AAA</a><span></span><br><img></p>\
\<p id=\"1\" style=\"display:none\"></p>\
\<p id=\"2\" style=\"display:none;\"></p>\
\<p id=\"3\" style=\"display: none;\"></p>\
\<p id=\"4\" style=\" display: none; \"></p>\
\<p id=\"5\" style=\" display\"></p>\
\<p id=\"6\" style=\" \"></p>\
\<p id=\"7\" style=\" display: ; \"></p>\
\<p id=\"8\" style=\"; ;display: none;;\"></p>\
\<div id=\"9\" style=\"background-image:url();\"></div>\
\<div id=\"10\" style=\"background-image:url('');\"></div>\
\</body>"
f x = fromJust . htmlElemFindID x
testStyle :: Test
testStyle = testCase "oper style" $ do
assertEqual "TEST 1" m0 $ g "1"
assertEqual "TEST 2" m0 $ g "2"
assertEqual "TEST 3" m0 $ g "3"
assertEqual "TEST 4" m0 $ g "4"
assertEqual "TEST 5" m1 $ g "5"
assertEqual "TEST 6" m2 $ g "6"
assertEqual "TEST 7" m1 $ g "7"
assertEqual "TEST 8" m0 $ g "8"
assertEqual "TEST 9"
(Map.fromList [("background-image","url()")])
(g "9")
assertEqual "TEST 10"
(Just "")
((Map.lookup "background-image" >=> htmlElemStyleParseURL) (g "9"))
assertEqual "TEST 11"
(Just "")
((Map.lookup "background-image" >=> htmlElemStyleParseURL) (g "10"))
where
g x = htmlElemStyles $ f x h
m0 = Map.fromList [("display","none")]
m1 = Map.fromList [("display","")]
m2 = Map.fromList []
| null | https://raw.githubusercontent.com/mlcfp/zenacy-html/b4af86fecc6fbbe1c7501e2fa75e6aa28206ebcb/test/Zenacy/HTML/Internal/Oper/Tests.hs | haskell | # LANGUAGE OverloadedStrings # |
module Zenacy.HTML.Internal.Oper.Tests
( testOper
) where
import Zenacy.HTML
import Control.Monad
( (>=>)
)
import Data.Map
( Map
)
import qualified Data.Map as Map
( fromList
, lookup
)
import Data.Maybe
( fromJust
)
import Test.Framework
( Test
, testGroup
)
import Test.Framework.Providers.HUnit
( testCase
)
import Test.HUnit
( assertBool
, assertEqual
, assertFailure
)
testOper :: Test
testOper = testGroup "Zenacy.HTML.Internal.Oper"
[ testStyle
]
h = fromJust
. htmlDocBody
$ htmlParseEasy
"<body>\
\<h1></h1>\
\<p><a href='bbb'>AAA</a><span></span><br><img></p>\
\<p id=\"1\" style=\"display:none\"></p>\
\<p id=\"2\" style=\"display:none;\"></p>\
\<p id=\"3\" style=\"display: none;\"></p>\
\<p id=\"4\" style=\" display: none; \"></p>\
\<p id=\"5\" style=\" display\"></p>\
\<p id=\"6\" style=\" \"></p>\
\<p id=\"7\" style=\" display: ; \"></p>\
\<p id=\"8\" style=\"; ;display: none;;\"></p>\
\<div id=\"9\" style=\"background-image:url();\"></div>\
\<div id=\"10\" style=\"background-image:url('');\"></div>\
\</body>"
f x = fromJust . htmlElemFindID x
testStyle :: Test
testStyle = testCase "oper style" $ do
assertEqual "TEST 1" m0 $ g "1"
assertEqual "TEST 2" m0 $ g "2"
assertEqual "TEST 3" m0 $ g "3"
assertEqual "TEST 4" m0 $ g "4"
assertEqual "TEST 5" m1 $ g "5"
assertEqual "TEST 6" m2 $ g "6"
assertEqual "TEST 7" m1 $ g "7"
assertEqual "TEST 8" m0 $ g "8"
assertEqual "TEST 9"
(Map.fromList [("background-image","url()")])
(g "9")
assertEqual "TEST 10"
(Just "")
((Map.lookup "background-image" >=> htmlElemStyleParseURL) (g "9"))
assertEqual "TEST 11"
(Just "")
((Map.lookup "background-image" >=> htmlElemStyleParseURL) (g "10"))
where
g x = htmlElemStyles $ f x h
m0 = Map.fromList [("display","none")]
m1 = Map.fromList [("display","")]
m2 = Map.fromList []
|
2d6d242c043402f97eb7faf039f96830a7018ba0f925afd8d668fe2c1f793635 | awolven/cl-vulkan | memory-pool.lisp | (in-package :vk)
(defclass memory-pool-mixin ()
((name)
(lock :initform (bt:make-lock))
(memory-allocation :reader allocation)
(allocated :initform (make-hash-table))
(small-free :initform nil :accessor memory-pool-small-free)
(medium-free :initform nil :accessor memory-pool-medium-free)
(large-free :initform nil :accessor memory-pool-large-free)
(buffer :accessor memory-pool-buffer)))
(defclass vertex-buffer-memory-pool (memory-pool-mixin) ())
(defclass index-buffer-memory-pool (memory-pool-mixin) ())
(defclass storage-buffer-memory-pool (memory-pool-mixin) ())
(defmethod memory-pool-buffer-usage ((memory-pool vertex-buffer-memory-pool))
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT)
(defmethod memory-pool-buffer-usage ((memory-pool index-buffer-memory-pool))
VK_BUFFER_USAGE_INDEX_BUFFER_BIT)
(defmethod memory-pool-buffer-usage ((memory-pool storage-buffer-memory-pool))
VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)
(defmethod initialize-instance :before ((instance memory-pool-mixin) &rest initargs
&key device size
(properties
(logior VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)))
(declare (ignore initargs))
(let ((big-buffer (create-buffer-1 device size (memory-pool-buffer-usage instance))))
(with-vk-struct (p-requirements VkMemoryRequirements)
(vkGetBufferMemoryRequirements (h device) (h big-buffer) p-requirements)
(with-vk-struct
(p-alloc-info VkMemoryAllocateInfo)
(with-foreign-slots ((%vk::allocationSize
%vk::memoryTypeIndex)
p-alloc-info
(:struct VkMemoryAllocateInfo))
(setf %vk::allocationSize size
%vk::memoryTypeIndex (find-memory-type
(physical-device device)
(foreign-slot-value
p-requirements
'(:struct VkMemoryRequirements)
'%vk::memoryTypeBits)
properties))
(with-foreign-object (p-buffer-memory 'VkDeviceMemory)
(check-vk-result (vkAllocateMemory (h device) p-alloc-info (h (allocator device)) p-buffer-memory))
(setf (slot-value instance 'memory-allocation)
(make-instance 'allocated-memory :handle (mem-aref p-buffer-memory 'VkDeviceMemory)
:device device
:allocator (allocator device)
:alignment (foreign-slot-value
p-requirements
'(:struct VkMemoryRequirements)
'%vk::alignment)
:size size)))
(setf (allocated-memory big-buffer) (slot-value instance 'memory-allocation))
(bind-buffer-memory device big-buffer (slot-value instance 'memory-allocation))
(setf (memory-pool-buffer instance) big-buffer)))))
(values))
(defstruct memory-resource
(memory-pool)
(offset)
(size))
(defstruct (memory-resource-small
(:include memory-resource)))
(defstruct (memory-resource-medium
(:include memory-resource)))
(defstruct (memory-resource-large
(:include memory-resource)))
(defparameter *memory-pool-small-buffer-size* (expt 2 17))
(defparameter *memory-pool-small-buffer-count* 1024)
(defparameter *memory-pool-medium-buffer-size* (expt 2 19))
(defparameter *memory-pool-medium-buffer-count* 512)
(defparameter *memory-pool-large-buffer-size* (expt 2 21))
(defparameter *memory-pool-large-buffer-count* 64)
;; with these current numbers, for an index buffer and a vertex buffer
the system tries to allocate 1 G from the video card for the memory pools
(defparameter *memory-pool-size*
(+ (* *memory-pool-small-buffer-count* *memory-pool-small-buffer-size*)
(* *memory-pool-medium-buffer-count* *memory-pool-medium-buffer-size*)
(* *memory-pool-large-buffer-count* *memory-pool-large-buffer-size*)))
(defun initialize-vertex-buffer-memory-pool (app)
(setf (vertex-buffer-memory-pool app)
(make-instance 'vertex-buffer-memory-pool
:size *memory-pool-size*
:device (default-logical-device app)
:properties (logior VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))))
(defun initialize-index-buffer-memory-pool (app)
(setf (index-buffer-memory-pool app)
(make-instance 'index-buffer-memory-pool
:size *memory-pool-size*
:device (default-logical-device app)
:properties (logior VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))))
(defun initialize-storage-buffer-memory-pool (app)
(setf (storage-buffer-memory-pool app)
(make-instance 'storage-buffer-memory-pool
:size *memory-pool-size*
:device (default-logical-device app)
:properties (logior VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))))
(defmethod initialize-instance :after ((instance memory-pool-mixin)
&rest initargs)
(declare (ignore initargs))
(loop for i from 0 below *memory-pool-small-buffer-count*
do (push (make-memory-resource-small
:memory-pool instance
:offset (* i *memory-pool-small-buffer-size*)
:size *memory-pool-small-buffer-size*)
(memory-pool-small-free instance)))
(loop for i from 0 below *memory-pool-medium-buffer-count*
do (push (make-memory-resource-medium
:memory-pool instance
:offset (+ (* i *memory-pool-medium-buffer-size*)
(* *memory-pool-small-buffer-count*
*memory-pool-small-buffer-size*))
:size *memory-pool-medium-buffer-size*)
(memory-pool-medium-free instance)))
(loop for i from 0 below *memory-pool-large-buffer-count*
do (push (make-memory-resource-large
:memory-pool instance
:offset (+ (* i *memory-pool-large-buffer-size*)
(* *memory-pool-small-buffer-count*
*memory-pool-small-buffer-size*)
(* *memory-pool-medium-buffer-count*
*memory-pool-medium-buffer-size*))
:size *memory-pool-large-buffer-size*)
(memory-pool-large-free instance)))
(values))
(defun %acquire-memory-small (memory-pool)
(declare (type memory-pool-mixin memory-pool))
(bt:with-lock-held ((slot-value memory-pool 'lock))
(let ((free (pop (memory-pool-small-free memory-pool))))
(when free
(setf (gethash free (slot-value memory-pool 'allocated)) free)))))
(defun %acquire-memory-medium (memory-pool)
(declare (type memory-pool-mixin memory-pool))
(bt:with-lock-held ((slot-value memory-pool 'lock))
(let ((free (pop (memory-pool-medium-free memory-pool))))
(when free
(setf (gethash free (slot-value memory-pool 'allocated)) free)))))
(defun %acquire-memory-large (memory-pool)
(declare (type memory-pool-mixin memory-pool))
(bt:with-lock-held ((slot-value memory-pool 'lock))
(let ((free (pop (memory-pool-large-free memory-pool))))
(when free
(setf (gethash free (slot-value memory-pool 'allocated)) free)))))
(defun %release-memory (memory-pool memory)
(declare (type memory-pool-mixin memory-pool))
(declare (type memory-resource memory))
(bt:with-lock-held ((slot-value memory-pool 'lock))
(let ((result (gethash memory (slot-value memory-pool 'allocated))))
(if result
(progn
(remhash memory (slot-value memory-pool 'allocated))
(etypecase memory
(memory-resource-small (push memory (memory-pool-small-free memory-pool)))
(memory-resource-medium (push memory (memory-pool-medium-free memory-pool)))
(memory-resource-large (push memory (memory-pool-large-free memory-pool))))
(values))
(error "Memory was not acquired out: ~S" memory)))))
(defun %acquire-memory-sized (memory-pool size properties)
(declare (ignore properties))
(let ((free
(cond ((<= size *memory-pool-small-buffer-size*)
(or (%acquire-memory-small memory-pool)
(%acquire-memory-medium memory-pool)
(%acquire-memory-large memory-pool)))
((<= size *memory-pool-medium-buffer-size*)
(or (%acquire-memory-medium memory-pool)
(%acquire-memory-large memory-pool)))
((<= size *memory-pool-large-buffer-size*)
(%acquire-memory-large memory-pool)))))
(unless free (error "could not get memory."))
free))
(defun acquire-vertex-memory-sized (app size properties)
(%acquire-memory-sized (vertex-buffer-memory-pool app) size properties))
(defun acquire-index-memory-sized (app size properties)
(%acquire-memory-sized (index-buffer-memory-pool app) size properties))
(defun acquire-storage-memory-sized (app size properties)
(%acquire-memory-sized (storage-buffer-memory-pool app) size properties))
(defun release-vertex-memory (app memory-resource)
(%release-memory (vertex-buffer-memory-pool app) memory-resource))
(defun release-index-memory (app memory-resource)
(%release-memory (index-buffer-memory-pool app) memory-resource))
(defun release-storage-memory (app memory-resource)
(%release-memory (storage-buffer-memory-pool app) memory-resource))
(defun destroy-memory-pools (app)
(let ((vmp (vertex-buffer-memory-pool app))
(imp (index-buffer-memory-pool app)))
(%vk:vkdestroybuffer (h (device (memory-pool-buffer vmp)))
(h (memory-pool-buffer vmp))
(h (allocator (memory-pool-buffer vmp))))
(%vk:vkfreememory (h (device (allocated-memory (memory-pool-buffer vmp))))
(h (allocated-memory (memory-pool-buffer vmp)))
(h (allocator (allocated-memory (memory-pool-buffer vmp)))))
(%vk:vkdestroybuffer (h (device (memory-pool-buffer imp)))
(h (memory-pool-buffer imp))
(h (allocator (memory-pool-buffer imp))))
(%vk:vkfreememory (h (device (allocated-memory (memory-pool-buffer imp))))
(h (allocated-memory (memory-pool-buffer imp)))
(h (allocator (allocated-memory (memory-pool-buffer imp)))))
(values)))
| null | https://raw.githubusercontent.com/awolven/cl-vulkan/988e0a644a15a45298572d08a13bcc3f55a3de46/src/memory-pool.lisp | lisp | with these current numbers, for an index buffer and a vertex buffer
| (in-package :vk)
(defclass memory-pool-mixin ()
((name)
(lock :initform (bt:make-lock))
(memory-allocation :reader allocation)
(allocated :initform (make-hash-table))
(small-free :initform nil :accessor memory-pool-small-free)
(medium-free :initform nil :accessor memory-pool-medium-free)
(large-free :initform nil :accessor memory-pool-large-free)
(buffer :accessor memory-pool-buffer)))
(defclass vertex-buffer-memory-pool (memory-pool-mixin) ())
(defclass index-buffer-memory-pool (memory-pool-mixin) ())
(defclass storage-buffer-memory-pool (memory-pool-mixin) ())
(defmethod memory-pool-buffer-usage ((memory-pool vertex-buffer-memory-pool))
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT)
(defmethod memory-pool-buffer-usage ((memory-pool index-buffer-memory-pool))
VK_BUFFER_USAGE_INDEX_BUFFER_BIT)
(defmethod memory-pool-buffer-usage ((memory-pool storage-buffer-memory-pool))
VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)
(defmethod initialize-instance :before ((instance memory-pool-mixin) &rest initargs
&key device size
(properties
(logior VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)))
(declare (ignore initargs))
(let ((big-buffer (create-buffer-1 device size (memory-pool-buffer-usage instance))))
(with-vk-struct (p-requirements VkMemoryRequirements)
(vkGetBufferMemoryRequirements (h device) (h big-buffer) p-requirements)
(with-vk-struct
(p-alloc-info VkMemoryAllocateInfo)
(with-foreign-slots ((%vk::allocationSize
%vk::memoryTypeIndex)
p-alloc-info
(:struct VkMemoryAllocateInfo))
(setf %vk::allocationSize size
%vk::memoryTypeIndex (find-memory-type
(physical-device device)
(foreign-slot-value
p-requirements
'(:struct VkMemoryRequirements)
'%vk::memoryTypeBits)
properties))
(with-foreign-object (p-buffer-memory 'VkDeviceMemory)
(check-vk-result (vkAllocateMemory (h device) p-alloc-info (h (allocator device)) p-buffer-memory))
(setf (slot-value instance 'memory-allocation)
(make-instance 'allocated-memory :handle (mem-aref p-buffer-memory 'VkDeviceMemory)
:device device
:allocator (allocator device)
:alignment (foreign-slot-value
p-requirements
'(:struct VkMemoryRequirements)
'%vk::alignment)
:size size)))
(setf (allocated-memory big-buffer) (slot-value instance 'memory-allocation))
(bind-buffer-memory device big-buffer (slot-value instance 'memory-allocation))
(setf (memory-pool-buffer instance) big-buffer)))))
(values))
(defstruct memory-resource
(memory-pool)
(offset)
(size))
(defstruct (memory-resource-small
(:include memory-resource)))
(defstruct (memory-resource-medium
(:include memory-resource)))
(defstruct (memory-resource-large
(:include memory-resource)))
(defparameter *memory-pool-small-buffer-size* (expt 2 17))
(defparameter *memory-pool-small-buffer-count* 1024)
(defparameter *memory-pool-medium-buffer-size* (expt 2 19))
(defparameter *memory-pool-medium-buffer-count* 512)
(defparameter *memory-pool-large-buffer-size* (expt 2 21))
(defparameter *memory-pool-large-buffer-count* 64)
the system tries to allocate 1 G from the video card for the memory pools
(defparameter *memory-pool-size*
(+ (* *memory-pool-small-buffer-count* *memory-pool-small-buffer-size*)
(* *memory-pool-medium-buffer-count* *memory-pool-medium-buffer-size*)
(* *memory-pool-large-buffer-count* *memory-pool-large-buffer-size*)))
(defun initialize-vertex-buffer-memory-pool (app)
(setf (vertex-buffer-memory-pool app)
(make-instance 'vertex-buffer-memory-pool
:size *memory-pool-size*
:device (default-logical-device app)
:properties (logior VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))))
(defun initialize-index-buffer-memory-pool (app)
(setf (index-buffer-memory-pool app)
(make-instance 'index-buffer-memory-pool
:size *memory-pool-size*
:device (default-logical-device app)
:properties (logior VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))))
(defun initialize-storage-buffer-memory-pool (app)
(setf (storage-buffer-memory-pool app)
(make-instance 'storage-buffer-memory-pool
:size *memory-pool-size*
:device (default-logical-device app)
:properties (logior VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))))
(defmethod initialize-instance :after ((instance memory-pool-mixin)
&rest initargs)
(declare (ignore initargs))
(loop for i from 0 below *memory-pool-small-buffer-count*
do (push (make-memory-resource-small
:memory-pool instance
:offset (* i *memory-pool-small-buffer-size*)
:size *memory-pool-small-buffer-size*)
(memory-pool-small-free instance)))
(loop for i from 0 below *memory-pool-medium-buffer-count*
do (push (make-memory-resource-medium
:memory-pool instance
:offset (+ (* i *memory-pool-medium-buffer-size*)
(* *memory-pool-small-buffer-count*
*memory-pool-small-buffer-size*))
:size *memory-pool-medium-buffer-size*)
(memory-pool-medium-free instance)))
(loop for i from 0 below *memory-pool-large-buffer-count*
do (push (make-memory-resource-large
:memory-pool instance
:offset (+ (* i *memory-pool-large-buffer-size*)
(* *memory-pool-small-buffer-count*
*memory-pool-small-buffer-size*)
(* *memory-pool-medium-buffer-count*
*memory-pool-medium-buffer-size*))
:size *memory-pool-large-buffer-size*)
(memory-pool-large-free instance)))
(values))
(defun %acquire-memory-small (memory-pool)
(declare (type memory-pool-mixin memory-pool))
(bt:with-lock-held ((slot-value memory-pool 'lock))
(let ((free (pop (memory-pool-small-free memory-pool))))
(when free
(setf (gethash free (slot-value memory-pool 'allocated)) free)))))
(defun %acquire-memory-medium (memory-pool)
(declare (type memory-pool-mixin memory-pool))
(bt:with-lock-held ((slot-value memory-pool 'lock))
(let ((free (pop (memory-pool-medium-free memory-pool))))
(when free
(setf (gethash free (slot-value memory-pool 'allocated)) free)))))
(defun %acquire-memory-large (memory-pool)
(declare (type memory-pool-mixin memory-pool))
(bt:with-lock-held ((slot-value memory-pool 'lock))
(let ((free (pop (memory-pool-large-free memory-pool))))
(when free
(setf (gethash free (slot-value memory-pool 'allocated)) free)))))
(defun %release-memory (memory-pool memory)
(declare (type memory-pool-mixin memory-pool))
(declare (type memory-resource memory))
(bt:with-lock-held ((slot-value memory-pool 'lock))
(let ((result (gethash memory (slot-value memory-pool 'allocated))))
(if result
(progn
(remhash memory (slot-value memory-pool 'allocated))
(etypecase memory
(memory-resource-small (push memory (memory-pool-small-free memory-pool)))
(memory-resource-medium (push memory (memory-pool-medium-free memory-pool)))
(memory-resource-large (push memory (memory-pool-large-free memory-pool))))
(values))
(error "Memory was not acquired out: ~S" memory)))))
(defun %acquire-memory-sized (memory-pool size properties)
(declare (ignore properties))
(let ((free
(cond ((<= size *memory-pool-small-buffer-size*)
(or (%acquire-memory-small memory-pool)
(%acquire-memory-medium memory-pool)
(%acquire-memory-large memory-pool)))
((<= size *memory-pool-medium-buffer-size*)
(or (%acquire-memory-medium memory-pool)
(%acquire-memory-large memory-pool)))
((<= size *memory-pool-large-buffer-size*)
(%acquire-memory-large memory-pool)))))
(unless free (error "could not get memory."))
free))
(defun acquire-vertex-memory-sized (app size properties)
(%acquire-memory-sized (vertex-buffer-memory-pool app) size properties))
(defun acquire-index-memory-sized (app size properties)
(%acquire-memory-sized (index-buffer-memory-pool app) size properties))
(defun acquire-storage-memory-sized (app size properties)
(%acquire-memory-sized (storage-buffer-memory-pool app) size properties))
(defun release-vertex-memory (app memory-resource)
(%release-memory (vertex-buffer-memory-pool app) memory-resource))
(defun release-index-memory (app memory-resource)
(%release-memory (index-buffer-memory-pool app) memory-resource))
(defun release-storage-memory (app memory-resource)
(%release-memory (storage-buffer-memory-pool app) memory-resource))
(defun destroy-memory-pools (app)
(let ((vmp (vertex-buffer-memory-pool app))
(imp (index-buffer-memory-pool app)))
(%vk:vkdestroybuffer (h (device (memory-pool-buffer vmp)))
(h (memory-pool-buffer vmp))
(h (allocator (memory-pool-buffer vmp))))
(%vk:vkfreememory (h (device (allocated-memory (memory-pool-buffer vmp))))
(h (allocated-memory (memory-pool-buffer vmp)))
(h (allocator (allocated-memory (memory-pool-buffer vmp)))))
(%vk:vkdestroybuffer (h (device (memory-pool-buffer imp)))
(h (memory-pool-buffer imp))
(h (allocator (memory-pool-buffer imp))))
(%vk:vkfreememory (h (device (allocated-memory (memory-pool-buffer imp))))
(h (allocated-memory (memory-pool-buffer imp)))
(h (allocator (allocated-memory (memory-pool-buffer imp)))))
(values)))
|
8a3e2f54fb1eb7d0499b96f0fd098383593657954bc53ef2dd7a66ad19fb3dfe | census-instrumentation/opencensus-erlang | oc_sweeper_SUITE.erl |
%%% ---------------------------------------------------------------------------
%%% @doc
%%% @end
%%% ---------------------------------------------------------------------------
-module(oc_sweeper_SUITE).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include("oc_test_utils.hrl").
-include("opencensus.hrl").
all() ->
[storage_size,
drop,
finish,
failed_attribute_and_finish].
init_per_suite(Config) ->
application:load(opencensus),
Config.
end_per_suite(_Config) ->
application:unload(opencensus),
ok.
init_per_testcase(storage_size, Config) ->
application:set_env(opencensus, sweeper, #{interval => 250,
strategy => finish,
span_ttl => 500,
storage_size => 100}),
application:set_env(opencensus, send_interval_ms, 1),
application:set_env(opencensus, reporters, [{oc_reporter_pid, self()}]),
{ok, _} = application:ensure_all_started(opencensus),
Config;
init_per_testcase(Type, Config) ->
application:set_env(opencensus, sweeper, #{interval => 250,
strategy => Type,
span_ttl => 500}),
application:set_env(opencensus, send_interval_ms, 1),
application:set_env(opencensus, reporters, [{oc_reporter_pid, self()}]),
{ok, _} = application:ensure_all_started(opencensus),
Config.
end_per_testcase(_, _Config) ->
ok = application:stop(opencensus),
ok.
storage_size(_Config) ->
SpanName1 = <<"span-1">>,
SpanCtx = oc_trace:start_span(SpanName1, undefined),
ChildSpanName1 = <<"child-span-1">>,
ChildSpanCtx = oc_trace:start_span(ChildSpanName1, SpanCtx),
[ChildSpanData] = ets:lookup(?SPAN_TAB, ChildSpanCtx#span_ctx.span_id),
?assertEqual(ChildSpanName1, ChildSpanData#span.name),
?assertEqual(SpanCtx#span_ctx.span_id, ChildSpanData#span.parent_span_id),
%% wait until the sweeper sweeps away the parent span
?UNTIL(ets:tab2list(?SPAN_TAB) =:= []),
%% sleep long enough that the reporter would have run again for sure
timer:sleep(10),
%% should be no reported spans
?assertEqual(no_span, receive
{span, #span{name=N}} when N =:= SpanName1 ->
got_span
after
0 ->
no_span
end).
drop(_Config) ->
SpanName1 = <<"span-1">>,
SpanCtx = oc_trace:start_span(SpanName1, undefined),
ChildSpanName1 = <<"child-span-1">>,
ChildSpanCtx = oc_trace:start_span(ChildSpanName1, SpanCtx),
[ChildSpanData] = ets:lookup(?SPAN_TAB, ChildSpanCtx#span_ctx.span_id),
?assertEqual(ChildSpanName1, ChildSpanData#span.name),
?assertEqual(SpanCtx#span_ctx.span_id, ChildSpanData#span.parent_span_id),
oc_trace:finish_span(ChildSpanCtx),
%% wait until the sweeper sweeps away the parent span
?UNTIL(ets:tab2list(?SPAN_TAB) =:= []),
oc_trace:finish_span(SpanCtx),
receive
{span, S=#span{name=Name}} when Name =:= ChildSpanName1 ->
%% Verify the end time and duration are set when the span was finished
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.start_time),
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.end_time)
after
1000 -> ct:fail("Do not received any message after 1s")
end,
%% sleep long enough that the reporter would have run again for sure
timer:sleep(10),
%% should be no reported span for span-1
?assertEqual(no_span, receive
{span, #span{name=N}} when N =:= SpanName1 ->
got_span
after
0 ->
no_span
end).
finish(_Config) ->
SpanName1 = <<"span-1">>,
SpanCtx = oc_trace:start_span(SpanName1, undefined),
ChildSpanName1 = <<"child-span-1">>,
ChildSpanCtx = oc_trace:start_span(ChildSpanName1, SpanCtx),
oc_trace:finish_span(ChildSpanCtx),
%% wait until the sweeper sweeps away the parent span
?UNTIL(ets:tab2list(?SPAN_TAB) =:= []),
lists:foreach(fun(Name) ->
receive
{span, S=#span{name=Name}} ->
%% Verify the end time and duration are set when the span was finished
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.start_time),
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.end_time)
after
1000 -> ct:fail("Do not received any message after 1s")
end
end, [SpanName1, ChildSpanName1]).
failed_attribute_and_finish(_Config) ->
SpanName1 = <<"span-1">>,
SpanCtx = oc_trace:start_span(SpanName1, undefined),
ChildSpanName1 = <<"child-span-1">>,
ChildSpanCtx = oc_trace:start_span(ChildSpanName1, SpanCtx),
[ChildSpanData] = ets:lookup(?SPAN_TAB, ChildSpanCtx#span_ctx.span_id),
?assertEqual(ChildSpanName1, ChildSpanData#span.name),
?assertEqual(SpanCtx#span_ctx.span_id, ChildSpanData#span.parent_span_id),
oc_trace:finish_span(ChildSpanCtx),
%% wait until the sweeper sweeps away the parent span
?UNTIL(ets:tab2list(?SPAN_TAB) =:= []),
receive
{span, S=#span{name=Name,
attributes=Attributes}} when Name =:= SpanName1 ->
%% should have attribute finished_by_sweeper
?assertMatch(#{<<"finished_by_sweeper">> := true}, Attributes),
%% Verify the end time and duration are set when the span was finished
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.start_time),
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.end_time)
after
1000 -> ct:fail("Do not received any message after 1s")
end.
| null | https://raw.githubusercontent.com/census-instrumentation/opencensus-erlang/7fb276ff73d677c00458922c9180df634f45e018/test/oc_sweeper_SUITE.erl | erlang | ---------------------------------------------------------------------------
@doc
@end
---------------------------------------------------------------------------
wait until the sweeper sweeps away the parent span
sleep long enough that the reporter would have run again for sure
should be no reported spans
wait until the sweeper sweeps away the parent span
Verify the end time and duration are set when the span was finished
sleep long enough that the reporter would have run again for sure
should be no reported span for span-1
wait until the sweeper sweeps away the parent span
Verify the end time and duration are set when the span was finished
wait until the sweeper sweeps away the parent span
should have attribute finished_by_sweeper
Verify the end time and duration are set when the span was finished |
-module(oc_sweeper_SUITE).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include("oc_test_utils.hrl").
-include("opencensus.hrl").
all() ->
[storage_size,
drop,
finish,
failed_attribute_and_finish].
init_per_suite(Config) ->
application:load(opencensus),
Config.
end_per_suite(_Config) ->
application:unload(opencensus),
ok.
init_per_testcase(storage_size, Config) ->
application:set_env(opencensus, sweeper, #{interval => 250,
strategy => finish,
span_ttl => 500,
storage_size => 100}),
application:set_env(opencensus, send_interval_ms, 1),
application:set_env(opencensus, reporters, [{oc_reporter_pid, self()}]),
{ok, _} = application:ensure_all_started(opencensus),
Config;
init_per_testcase(Type, Config) ->
application:set_env(opencensus, sweeper, #{interval => 250,
strategy => Type,
span_ttl => 500}),
application:set_env(opencensus, send_interval_ms, 1),
application:set_env(opencensus, reporters, [{oc_reporter_pid, self()}]),
{ok, _} = application:ensure_all_started(opencensus),
Config.
end_per_testcase(_, _Config) ->
ok = application:stop(opencensus),
ok.
storage_size(_Config) ->
SpanName1 = <<"span-1">>,
SpanCtx = oc_trace:start_span(SpanName1, undefined),
ChildSpanName1 = <<"child-span-1">>,
ChildSpanCtx = oc_trace:start_span(ChildSpanName1, SpanCtx),
[ChildSpanData] = ets:lookup(?SPAN_TAB, ChildSpanCtx#span_ctx.span_id),
?assertEqual(ChildSpanName1, ChildSpanData#span.name),
?assertEqual(SpanCtx#span_ctx.span_id, ChildSpanData#span.parent_span_id),
?UNTIL(ets:tab2list(?SPAN_TAB) =:= []),
timer:sleep(10),
?assertEqual(no_span, receive
{span, #span{name=N}} when N =:= SpanName1 ->
got_span
after
0 ->
no_span
end).
drop(_Config) ->
SpanName1 = <<"span-1">>,
SpanCtx = oc_trace:start_span(SpanName1, undefined),
ChildSpanName1 = <<"child-span-1">>,
ChildSpanCtx = oc_trace:start_span(ChildSpanName1, SpanCtx),
[ChildSpanData] = ets:lookup(?SPAN_TAB, ChildSpanCtx#span_ctx.span_id),
?assertEqual(ChildSpanName1, ChildSpanData#span.name),
?assertEqual(SpanCtx#span_ctx.span_id, ChildSpanData#span.parent_span_id),
oc_trace:finish_span(ChildSpanCtx),
?UNTIL(ets:tab2list(?SPAN_TAB) =:= []),
oc_trace:finish_span(SpanCtx),
receive
{span, S=#span{name=Name}} when Name =:= ChildSpanName1 ->
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.start_time),
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.end_time)
after
1000 -> ct:fail("Do not received any message after 1s")
end,
timer:sleep(10),
?assertEqual(no_span, receive
{span, #span{name=N}} when N =:= SpanName1 ->
got_span
after
0 ->
no_span
end).
finish(_Config) ->
SpanName1 = <<"span-1">>,
SpanCtx = oc_trace:start_span(SpanName1, undefined),
ChildSpanName1 = <<"child-span-1">>,
ChildSpanCtx = oc_trace:start_span(ChildSpanName1, SpanCtx),
oc_trace:finish_span(ChildSpanCtx),
?UNTIL(ets:tab2list(?SPAN_TAB) =:= []),
lists:foreach(fun(Name) ->
receive
{span, S=#span{name=Name}} ->
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.start_time),
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.end_time)
after
1000 -> ct:fail("Do not received any message after 1s")
end
end, [SpanName1, ChildSpanName1]).
failed_attribute_and_finish(_Config) ->
SpanName1 = <<"span-1">>,
SpanCtx = oc_trace:start_span(SpanName1, undefined),
ChildSpanName1 = <<"child-span-1">>,
ChildSpanCtx = oc_trace:start_span(ChildSpanName1, SpanCtx),
[ChildSpanData] = ets:lookup(?SPAN_TAB, ChildSpanCtx#span_ctx.span_id),
?assertEqual(ChildSpanName1, ChildSpanData#span.name),
?assertEqual(SpanCtx#span_ctx.span_id, ChildSpanData#span.parent_span_id),
oc_trace:finish_span(ChildSpanCtx),
?UNTIL(ets:tab2list(?SPAN_TAB) =:= []),
receive
{span, S=#span{name=Name,
attributes=Attributes}} when Name =:= SpanName1 ->
?assertMatch(#{<<"finished_by_sweeper">> := true}, Attributes),
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.start_time),
?assertMatch({ST, O} when is_integer(ST)
andalso is_integer(O), S#span.end_time)
after
1000 -> ct:fail("Do not received any message after 1s")
end.
|
6d908a9e50e8ba8ecb9329441f4a36054cdfb39dc625b13e7b44530fb70f8420 | cyverse-archive/DiscoveryEnvironmentBackend | util.clj | (ns mescal.util
(:use [clojure.java.io :only [reader]]
[medley.core :only [find-first]]
[slingshot.slingshot :only [throw+]])
(:require [cheshire.core :as cheshire]
[clj-time.format :as tf]
[clojure.tools.logging :as log]
[clojure-commons.error-codes :as ce]))
(defn- assert-defined*
"Ensures that a symbol is non-nil."
[symbol-name symbol-value]
(when (nil? symbol-value)
(throw+ {:error_code ce/ERR_ILLEGAL_ARGUMENT
:reason (str symbol-name " is nil")})))
(defmacro assert-defined
"Ensures that zero or more symbols are defined."
[& syms]
`(do ~@(map (fn [sym] `(@#'assert-defined* ~(name sym) ~sym)) syms)))
(defn decode-json
"Parses a JSON stream or string."
[source]
(if (string? source)
(cheshire/decode source true)
(cheshire/decode-stream (reader source) true)))
(defn to-utc
"Converts a formatted timestamp to UTC."
[timestamp]
(when-not (nil? timestamp)
(->> (tf/parse (:date-time tf/formatters) timestamp)
(tf/unparse (:date-time tf/formatters)))))
(defn get-boolean
[value default]
(cond (nil? value) default
(string? value) (Boolean/parseBoolean value)
:else value))
(defn find-value
"Finds the value associated with a key in a map. The first non-nil value associated with one
of the given keys is returned. With the current implementation, the keys provided must be
keywords."
[m ks]
(find-first (complement nil?) ((apply juxt ks) m)))
(defn get-enum-values
[value-obj]
(find-value value-obj [:enumValues :enum_values]))
| null | https://raw.githubusercontent.com/cyverse-archive/DiscoveryEnvironmentBackend/7f6177078c1a1cb6d11e62f12cfe2e22d669635b/libs/mescal/src/mescal/util.clj | clojure | (ns mescal.util
(:use [clojure.java.io :only [reader]]
[medley.core :only [find-first]]
[slingshot.slingshot :only [throw+]])
(:require [cheshire.core :as cheshire]
[clj-time.format :as tf]
[clojure.tools.logging :as log]
[clojure-commons.error-codes :as ce]))
(defn- assert-defined*
"Ensures that a symbol is non-nil."
[symbol-name symbol-value]
(when (nil? symbol-value)
(throw+ {:error_code ce/ERR_ILLEGAL_ARGUMENT
:reason (str symbol-name " is nil")})))
(defmacro assert-defined
"Ensures that zero or more symbols are defined."
[& syms]
`(do ~@(map (fn [sym] `(@#'assert-defined* ~(name sym) ~sym)) syms)))
(defn decode-json
"Parses a JSON stream or string."
[source]
(if (string? source)
(cheshire/decode source true)
(cheshire/decode-stream (reader source) true)))
(defn to-utc
"Converts a formatted timestamp to UTC."
[timestamp]
(when-not (nil? timestamp)
(->> (tf/parse (:date-time tf/formatters) timestamp)
(tf/unparse (:date-time tf/formatters)))))
(defn get-boolean
[value default]
(cond (nil? value) default
(string? value) (Boolean/parseBoolean value)
:else value))
(defn find-value
"Finds the value associated with a key in a map. The first non-nil value associated with one
of the given keys is returned. With the current implementation, the keys provided must be
keywords."
[m ks]
(find-first (complement nil?) ((apply juxt ks) m)))
(defn get-enum-values
[value-obj]
(find-value value-obj [:enumValues :enum_values]))
| |
cf106583e742b4befda43a637ad701b4c8a1ce0aca468b684d390c97fb292752 | armedbear/abcl | closure-serialization.lisp | compiler-tests.lisp
;;;
Copyright ( C ) 2010
;;;
$ Id$
;;;
;;; This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version 2
of the License , or ( at your option ) any later version .
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
(in-package #:abcl.test.lisp)
(require '#:java)
(defun f (x)
(flet ((g (y) (cons x y)))
(let* ((b (java:jnew "java.io.ByteArrayOutputStream"))
(o (java:jnew "java.io.ObjectOutputStream" b)))
(java:jcall "writeObject" o #'g)
(java:jcall "flush" o)
(java:jcall "toByteArray" b))))
(deftest serialization-of-closure
(let* ((b (java:jnew "java.io.ByteArrayInputStream" (f 3)))
(i (java:jnew "java.io.ObjectInputStream" b)))
(fmakunbound 'f)
(funcall (java:jcall "readObject" i) T))
'(3 . T)) | null | https://raw.githubusercontent.com/armedbear/abcl/36a4b5994227d768882ff6458b3df9f79caac664/test/lisp/abcl/closure-serialization.lisp | lisp |
This program is free software; you can redistribute it and/or
either version 2
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, write to the Free Software | compiler-tests.lisp
Copyright ( C ) 2010
$ Id$
modify it under the terms of the GNU General Public License
of the License , or ( at your option ) any later version .
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
(in-package #:abcl.test.lisp)
(require '#:java)
(defun f (x)
(flet ((g (y) (cons x y)))
(let* ((b (java:jnew "java.io.ByteArrayOutputStream"))
(o (java:jnew "java.io.ObjectOutputStream" b)))
(java:jcall "writeObject" o #'g)
(java:jcall "flush" o)
(java:jcall "toByteArray" b))))
(deftest serialization-of-closure
(let* ((b (java:jnew "java.io.ByteArrayInputStream" (f 3)))
(i (java:jnew "java.io.ObjectInputStream" b)))
(fmakunbound 'f)
(funcall (java:jcall "readObject" i) T))
'(3 . T)) |
6014ddd7d22e137970e0608bf3f7aaea378d197d5d99dc2282d72360b0d4144c | musically-ut/haskell-dgim | DGIM.hs | module Data.Stream.Algorithms.DGIM (
-- * Type (no constructors)
DGIM
-- * External interface
, mkDGIM
, insert
, insert_
, querySince
, queryAll
, queryLen
) where
import Data.Stream.Algorithms.DGIM.Internal
| null | https://raw.githubusercontent.com/musically-ut/haskell-dgim/3efa4e49020ebd04d429bafb59e26e02284b6599/src/Data/Stream/Algorithms/DGIM.hs | haskell | * Type (no constructors)
* External interface | module Data.Stream.Algorithms.DGIM (
DGIM
, mkDGIM
, insert
, insert_
, querySince
, queryAll
, queryLen
) where
import Data.Stream.Algorithms.DGIM.Internal
|
a86e0ec0d906e2292af3616a73831f905c97e41d5635042757b23f87f13f17cb | european-lisp-symposium/els-web | global.lisp | (in-package #:els-web-editions)
(define-edition "global")
(in-package "global")
(define-person (:family-name "Verna"
:given-name "Didier")
:role (:administrator)
:email ""
:website "-lisp-symposium.org"
:organization "EPITA"
:address (:country "France"))
(define-person (:family-name "Hafner"
:given-name "Nicolas")
:photo ""
:email ""
:website ""
:nickname "Shinmera"
:organization "Shirakumo.org"
:address (:country "Switzerland"))
(define-sponsor acm
:website "/"
:logo "../static/logos/acm.svg")
(define-sponsor cl-foundation
:website "-foundation.org/"
:logo "../static/logos/cl-foundation.png")
(define-sponsor epita
:website "/"
:logo "../static/logos/epita.png")
(define-sponsor lispworks
:website "/"
:logo "../static/logos/lispworks.png")
(define-sponsor franz
:website "/"
:logo "../static/logos/franz.png")
(define-sponsor brunner
:website "-brunner.de/"
:logo "../static/logos/brunner.png")
| null | https://raw.githubusercontent.com/european-lisp-symposium/els-web/b29b460cb4dae62db92d244394c4529ba0f19544/editions/global.lisp | lisp | (in-package #:els-web-editions)
(define-edition "global")
(in-package "global")
(define-person (:family-name "Verna"
:given-name "Didier")
:role (:administrator)
:email ""
:website "-lisp-symposium.org"
:organization "EPITA"
:address (:country "France"))
(define-person (:family-name "Hafner"
:given-name "Nicolas")
:photo ""
:email ""
:website ""
:nickname "Shinmera"
:organization "Shirakumo.org"
:address (:country "Switzerland"))
(define-sponsor acm
:website "/"
:logo "../static/logos/acm.svg")
(define-sponsor cl-foundation
:website "-foundation.org/"
:logo "../static/logos/cl-foundation.png")
(define-sponsor epita
:website "/"
:logo "../static/logos/epita.png")
(define-sponsor lispworks
:website "/"
:logo "../static/logos/lispworks.png")
(define-sponsor franz
:website "/"
:logo "../static/logos/franz.png")
(define-sponsor brunner
:website "-brunner.de/"
:logo "../static/logos/brunner.png")
| |
e9c4c796d12b70cf29b438bba616f0b5b0cbca747674027128716c8e36d716f9 | softlab-ntua/bencherl | file_utils.erl | Copyright ( C ) 2003 - 2014
%
This file is part of the Ceylan Erlang library .
%
% This library is free software: you can redistribute it and/or modify
% it under the terms of the GNU Lesser General Public License or
the GNU General Public License , as they are published by the Free Software
Foundation , either version 3 of these Licenses , or ( at your option )
% any later version.
% You can also redistribute it and/or modify it under the terms of the
Mozilla Public License , version 1.1 or later .
%
% This library is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License and the GNU General Public License
% for more details.
%
You should have received a copy of the GNU Lesser General Public
License , of the GNU General Public License and of the Mozilla Public License
% along with this library.
% If not, see </> and
% </>.
%
Author : ( )
Creation date : Saturday , July 12 , 2008 .
% Gathering of various convenient facilities regarding files.
%
% See file_utils_test.erl for the corresponding test.
%
-module(file_utils).
% Related standard modules: file, filename.
% Filename-related operations.
%
-export([ join/1, join/2, convert_to_filename/1, replace_extension/3,
exists/1, get_type_of/1, is_file/1,
is_existing_file/1, is_existing_file_or_link/1,
is_executable/1, is_directory/1, is_existing_directory/1,
list_dir_elements/1,
get_current_directory/0, set_current_directory/1,
filter_by_extension/2, filter_by_extensions/2,
filter_by_included_suffixes/2, filter_by_excluded_suffixes/2,
find_files_from/1, find_files_with_extension_from/2,
find_files_with_excluded_dirs/2,
find_files_with_excluded_suffixes/2,
find_files_with_excluded_dirs_and_suffixes/3,
find_directories_from/1,
create_directory/1, create_directory/2,
create_directory_if_not_existing/1,
create_temporary_directory/0,
remove_file/1, remove_file_if_existing/1,
remove_files/1, remove_files_if_existing/1,
remove_directory/1,
copy_file/2, copy_file_if_existing/2,
rename/2, move_file/2,
is_absolute_path/1,
ensure_path_is_absolute/1, ensure_path_is_absolute/2,
normalise_path/1,
path_to_variable_name/1, path_to_variable_name/2,
get_image_extensions/0, get_image_file_png/1, get_image_file_gif/1 ]).
% I/O section.
%
-export([ open/2, open/3, close/1, close/2, read/2, write/2, write/3,
read_whole/1, write_whole/2, read_terms/1 ]).
% Compression-related operations.
%
-export([ get_extension_for/1, compress/2, decompress/2,
file_to_zipped_term/1, zipped_term_to_unzipped_file/1,
zipped_term_to_unzipped_file/2,
files_to_zipped_term/1, files_to_zipped_term/2,
zipped_term_to_unzipped_files/1, zipped_term_to_unzipped_files/2 ]).
% For the file_info record:
-include_lib("kernel/include/file.hrl").
% Type declarations:
% A path may designate either a file or a directory.
-type path() :: string().
-type bin_path() :: binary().
-type file_name() :: path().
-type bin_file_name() :: binary().
-type directory_name() :: path().
-type bin_directory_name() :: binary().
-type extension() :: string().
% All known types of file entries:
-type entry_type() :: 'device' | 'directory' | 'other' | 'regular' | 'symlink'.
% The supported compression formats:
-type compression_format() :: 'zip' | 'bzip2' | 'xz'.
-export_type([ path/0, bin_path/0,
file_name/0, bin_file_name/0,
directory_name/0, bin_directory_name/0,
extension/0,
entry_type/0,
compression_format/0
]).
% Filename-related operations.
% Joins the specified list of path elements.
%
Note : added back to file_utils , filename : join ( Components ) can be used
% instead. However filename:join( [ "", "my_dir" ] ) results in "/my_dir",
whereas often we would want " my_dir " , which is returned by file_utils : .
%
-spec join( [ path() ] ) -> path().
join( _ComponentList=[ "" | T ] ) ->
filename:join( T );
join( ComponentList ) ->
filename:join( ComponentList ).
Joins the two specified path elements .
%
Note : join/2 added back to file_utils , filename : join ( Name1 , ) can be
% used instead. However filename:join( "", "my_dir" ) results in "/my_dir",
% whereas often we would want "my_dir", which is returned by file_utils:join/2.
%
-spec join( path(), path() ) -> path().
join( _FirstPath="", SecondPath ) ->
SecondPath ;
join( FirstPath, SecondPath ) ->
filename:join( FirstPath, SecondPath ).
% Converts specified name to an acceptable filename, filesystem-wise.
%
-spec convert_to_filename( string() ) ->
none ( ) in case of erlang : error/2
convert_to_filename( Name ) ->
% Currently we use exactly the same translation rules both for node names
% and file names (see net_utils:generate_valid_node_name_from/1).
% Note however that now we duplicate the code instead of calling the
net_utils module from here , as otherwise there would be one more module
% to deploy under some circumstances.
% Replaces each series of spaces (' '), lower than ('<'), greater than
% ('>'), comma (','), left ('(') and right (')') parentheses, single (''')
% and double ('"') quotes, forward ('/') and backward ('\') slashes,
% ampersand ('&'), tilde ('~'), sharp ('#'), at sign ('@'), all other kinds
% of brackets ('{', '}', '[', ']'), pipe ('|'), dollar ('$'), star ('*'),
% marks ('?' and '!'), plus ('+'), other punctation signs (';' and ':') by
% exactly one underscore:
%
% (see also: net_utils:generate_valid_node_name_from/1)
%
re:replace( lists:flatten(Name),
"( |<|>|,|\\(|\\)|'|\"|/|\\\\|\&|~|"
"#|@|{|}|\\[|\\]|\\||\\$|\\*|\\?|!|\\+|;|:)+", "_",
[ global, { return, list } ] ).
% Returns a new filename whose extension has been updated.
%
Ex : replace_extension ( " /home / jack / rosie.ttf " , " .ttf " , " .wav " ) should return
% "/home/jack/rosie.wav".
%
-spec replace_extension( file_name(), extension(), extension() ) -> file_name().
replace_extension( Filename, SourceExtension, TargetExtension ) ->
case string:rstr( Filename, SourceExtension ) of
0 ->
throw( { extension_not_found, SourceExtension, Filename } );
Index ->
string:substr( Filename, 1, Index-1 ) ++ TargetExtension
end.
% Tells whether specified file entry exists, regardless of its type.
%
-spec exists( file_name() ) -> boolean().
exists( EntryName ) ->
case file:read_file_info( EntryName ) of
{ ok, _FileInfo } ->
true;
{ error, _Reason } ->
false
end.
% Returns the type of the specified file entry.
%
-spec get_type_of( file_name() ) -> entry_type().
get_type_of( EntryName ) ->
% We used to rely on file:read_file_info/1, but an existing symlink pointing
% to a non-existing entry was triggering the enoent error, while we just
% wanted to know that the specified entry is an existing (yet dead) symlink.
% Some tools (e.g. emacs) used thus to get in the way, as apparently they
% create dead symlinks on purpose, to store information.
case file:read_link_info( EntryName ) of
{ ok, FileInfo } ->
#file_info{ type=FileType } = FileInfo,
FileType;
{ error, eloop } ->
% Probably a recursive symlink:
throw( { too_many_symlink_levels, EntryName } );
{ error, enoent } ->
throw( { non_existing_entry, EntryName } )
end.
% Returns whether the specified entry, supposedly existing, is a regular file.
%
% If the specified entry happens not to exist, a
% '{ non_existing_entry, EntryName }' exception will be thrown.
%
-spec is_file( file_name() ) -> boolean().
is_file( EntryName ) ->
case get_type_of( EntryName ) of
regular ->
true ;
_ ->
false
end.
% Returns whether the specified entry exists and is a regular file.
%
% Returns true or false, and cannot trigger an exception.
%
-spec is_existing_file( file_name() ) -> boolean().
is_existing_file( EntryName ) ->
case exists( EntryName ) andalso get_type_of( EntryName ) of
regular ->
true ;
_ ->
false
end.
% Returns whether the specified entry exists and is either a regular file or a
% symbolic link.
%
% Returns true or false, and cannot trigger an exception.
%
-spec is_existing_file_or_link( file_name() ) -> boolean().
is_existing_file_or_link( EntryName ) ->
case exists( EntryName ) andalso get_type_of( EntryName ) of
regular ->
true ;
symlink ->
true ;
_ ->
false
end.
% Returns whether the specified entry exists and is executable for its current
% owner (can be either a regular file or a symbolic link).
%
% Returns true or false, and cannot trigger an exception.
%
-spec is_executable( file_name() ) -> boolean().
is_executable( ExecutableName ) ->
case file:read_file_info( ExecutableName ) of
{ ok, FileInfo } ->
#file_info{ type=FileType, mode=Mode } = FileInfo,
case FileType of
regular ->
OwnerExecMask = 8#00100,
case Mode band OwnerExecMask of
0 ->
% Not executable:
false;
_ ->
One positive case :
true
end;
_ ->
false
end;
_ ->
false
end.
% Returns whether the specified entry, supposedly existing, is a directory.
%
% If the specified entry happens not to exist, a
% '{ non_existing_entry, EntryName }' exception will be thrown.
%
-spec is_directory( directory_name() ) -> boolean().
is_directory( EntryName ) ->
case get_type_of( EntryName ) of
directory ->
true ;
_ ->
false
end.
% Returns whether the specified entry exists and is a directory.
%
% Returns true or false, and cannot trigger an exception.
%
-spec is_existing_directory( directory_name() ) -> boolean().
is_existing_directory( EntryName ) ->
case exists( EntryName ) andalso get_type_of( EntryName ) of
directory ->
true ;
_ ->
false
end.
Returns a tuple made of a four lists describing the file elements found in
% specified directory: { Files, Directories, OtherFiles, Devices }.
%
% Note that Files include symbolic links (dead or not).
%
-spec list_dir_elements( directory_name() ) ->
{ [ file_name() ], [ directory_name() ], [ file_name() ], [ file_name() ] }.
list_dir_elements( Dirname ) ->
%io:format( "list_dir_elements for '~s'.~n", [ Dirname ] ),
{ ok, LocalDirElements } = file:list_dir( Dirname ),
classify_dir_elements( Dirname, LocalDirElements, _Devices=[],
_Directories=[], _Files=[], _OtherFiles=[] ).
% Returns the current directory, as a plain string.
%
% Throws an exception on failure.
%
-spec get_current_directory() -> directory_name().
get_current_directory() ->
case file:get_cwd() of
{ ok, Dir } ->
Dir;
{ error, Reason } ->
throw( { failed_to_determine_current_directory, Reason } )
end.
% Sets the specified directory as current directory.
%
% Throws an exception on failure.
%
-spec set_current_directory( directory_name() ) -> basic_utils:void().
set_current_directory( DirName ) ->
% For more detail of { 'error', atom() }, refer to type specifications of
% erlang files: file.erl and file.hrl.
case file:set_cwd( DirName ) of
ok ->
ok;
{ error, Error } ->
throw( { set_current_directory_failed, DirName, Error } )
end.
% Helper function.
%
Returns a tuple containing four lists corresponding to the sorting of all
% file elements: { Directories, Files, Devices, OtherFiles }.
%
% Note that Files include symbolic links (dead or not).
%
classify_dir_elements( _Dirname, _Elements=[], Devices, Directories, Files,
OtherFiles ) ->
% Note the reordering:
{ Files, Directories, OtherFiles, Devices };
classify_dir_elements( Dirname, _Elements=[ H | T ],
Devices, Directories, Files, OtherFiles ) ->
case get_type_of( filename:join( Dirname, H ) ) of
device ->
classify_dir_elements( Dirname, T, [ H | Devices ], Directories,
Files, OtherFiles ) ;
directory ->
classify_dir_elements( Dirname, T, Devices, [ H | Directories ],
Files, OtherFiles ) ;
regular ->
classify_dir_elements( Dirname, T, Devices, Directories,
[ H | Files ], OtherFiles ) ;
% Managed as regular files:
symlink ->
classify_dir_elements( Dirname, T, Devices, Directories,
[ H | Files ], OtherFiles ) ;
other ->
classify_dir_elements( Dirname, T, Devices, Directories,
Files, [ H | OtherFiles ] )
end.
% Regarding extensions: we could canonicalise their case, so that ".png" and
% ".PNG" are treated the same.
Returns a list containing all elements of Filenames list whose extension is
the specified one ( ex : " .dat " ) .
%
-spec filter_by_extension( [ file_name() ], extension() ) -> [ file_name() ].
filter_by_extension( Filenames, Extension ) ->
filter_by_extension( Filenames, Extension, _Acc=[] ).
filter_by_extension( _Filenames=[], _Extension, Acc ) ->
Acc ;
filter_by_extension( _Filenames=[ H | T ], Extension, Acc ) ->
case filename:extension( H ) of
Extension ->
filter_by_extension( T, Extension, [ H | Acc ] ) ;
_Other ->
filter_by_extension( T, Extension, Acc )
end.
Returns a list containing all elements of Filenames list whose extension
corresponds to one of the specified extensions ( ex : [ " .dat " , " .png " ] ) .
%
-spec filter_by_extensions( [ file_name() ], [ extension() ] ) ->
[ file_name() ].
filter_by_extensions( Filenames, Extensions ) ->
filter_by_extensions( Filenames, Extensions, _Acc=[] ).
filter_by_extensions( _Filenames=[], _Extensions, Acc ) ->
Acc ;
filter_by_extensions( _Filenames=[ F | T ], Extensions, Acc ) ->
case lists:member( filename:extension( F ), Extensions ) of
true ->
filter_by_extensions( T, Extensions, [ F | Acc ] ) ;
false ->
filter_by_extensions( T, Extensions, Acc )
end.
Returns a list containing all elements of the Filenames list which match any
% of the specified suffixes.
%
-spec filter_by_included_suffixes( [ file_name() ], [ string() ] ) ->
[ file_name() ].
filter_by_included_suffixes( Filenames, IncludedSuffixes ) ->
[ F || F <- Filenames, has_matching_suffix( F, IncludedSuffixes ) ].
Returns a list containing all elements of the Filenames list which do not
% match any of the specified suffixes.
%
-spec filter_by_excluded_suffixes( [ file_name() ], [ string() ] ) ->
[ file_name() ].
filter_by_excluded_suffixes( Filenames, ExcludedSuffixes ) ->
[ F || F <- Filenames, not has_matching_suffix( F, ExcludedSuffixes ) ].
-spec has_matching_suffix( file_name(), [ string() ] ) -> boolean().
has_matching_suffix( _Filename, _ExcludedSuffixes=[] ) ->
false;
has_matching_suffix( Filename, [ S | OtherS ] ) ->
% We have to avoid feeding string:substr/2 with a start position that is not
% strictly positive, otherwise we would trigger a function clause error:
LenFile = length( Filename ),
LenSuffix = length( S ),
case LenFile - LenSuffix + 1 of
StartPos when StartPos > 0 ->
case string:substr( Filename, StartPos ) of
S ->
true;
_ ->
has_matching_suffix( Filename, OtherS )
end;
_ ->
has_matching_suffix( Filename, OtherS )
end.
% Section dedicated to the look-up of files, with various variations (with or
% without extensions, with or without excluded directories, etc.)
% Returns the list of all regular files found from the root, in the whole
% subtree (i.e. recursively).
%
% All extensions and suffixes accepted, no excluded directories.
%
% All returned pathnames are relative to this root.
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
%
-spec find_files_from( directory_name() ) -> [ file_name() ].
find_files_from( RootDir ) ->
find_files_from( RootDir, _CurrentRelativeDir="", _Acc=[] ).
% Helper.
find_files_from( RootDir, CurrentRelativeDir, Acc ) ->
%io:format( "find_files_from with root = '~s', current = '~s'.~n",
% [ RootDir, CurrentRelativeDir ] ),
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
Acc ++ list_files_in_subdirs( Directories, RootDir, CurrentRelativeDir, [] )
++ prefix_files_with( CurrentRelativeDir, RegularFiles ).
% Specific helper for find_files_from/3 above:
list_files_in_subdirs( _Dirs=[], _RootDir, _CurrentRelativeDir, Acc ) ->
Acc;
list_files_in_subdirs( _Dirs=[ H | T ], RootDir, CurrentRelativeDir, Acc ) ->
%io:format( "list_files_in_subdirs with root = '~s', current = '~s' "
% "and H='~s'.~n", [ RootDir, CurrentRelativeDir, H ] ),
list_files_in_subdirs( T, RootDir, CurrentRelativeDir,
find_files_from( RootDir, join( CurrentRelativeDir, H ), [] ) ++ Acc ).
% Returns the list of all regular files found from the root with specified
% extension, in the whole subtree (i.e. recursively).
%
% All returned pathnames are relative to this root.
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
%
-spec find_files_with_extension_from( directory_name(), extension() )
-> [ file_name() ].
find_files_with_extension_from( RootDir, Extension ) ->
find_files_with_extension_from( RootDir, "", Extension, [] ).
% Helper.
find_files_with_extension_from( RootDir, CurrentRelativeDir, Extension, Acc ) ->
%io:format( "find_files_from in ~s.~n", [ CurrentRelativeDir ] ),
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
Acc ++ list_files_in_subdirs_with_extension( Directories, Extension,
RootDir, CurrentRelativeDir, [] )
++ prefix_files_with( CurrentRelativeDir,
filter_by_extension( RegularFiles, Extension ) ).
% Helper for find_files_with_extension_from/4:
list_files_in_subdirs_with_extension( _Dirs=[], _Extension, _RootDir,
_CurrentRelativeDir, Acc) ->
Acc;
list_files_in_subdirs_with_extension( _Dirs=[ H | T ], Extension, RootDir,
CurrentRelativeDir, Acc ) ->
list_files_in_subdirs_with_extension( T, Extension, RootDir,
CurrentRelativeDir,
find_files_with_extension_from( RootDir, join( CurrentRelativeDir, H ),
Extension, [] ) ++ Acc ).
% Returns the list of all regular files found from the root, in the whole
% subtree (i.e. recursively), with specified directories excluded.
%
% Note that the excluded directories can be specified as a full path (ex:
% "foo/bar/not-wanted"), for just as a final directory name (ex:
% "my-excluded-name"). In the latter case, all directories bearing that name
% (ex: "foo/bar/my-excluded-name") will be excluded as well.
%
% Thus when a directory D is specified in the excluded list, each traversed
% directory T will be compared twice to D: T will be matched against D, and
against filename : basename(T ) , i.e. its final name , as well . As soon as one
% matches, T will be excluded.
%
% All extensions accepted.
%
% All returned pathnames are relative to this root.
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
%
-spec find_files_with_excluded_dirs( directory_name(), [ directory_name() ] )
-> [ file_name() ].
find_files_with_excluded_dirs( RootDir, ExcludedDirList ) ->
find_files_with_excluded_dirs( RootDir, _CurrentRelativeDir="",
ExcludedDirList, _Acc=[] ).
% Helper.
find_files_with_excluded_dirs( RootDir, CurrentRelativeDir, ExcludedDirList,
Acc ) ->
io : format ( " find_files_with_excluded_dirs in ~s.~n " ,
% [ CurrentRelativeDir ] ),
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
If for example ExcludedDirList= [ " .svn " ] , we want to eliminate not only
% ".svn" but also all "foo/bar/.svn", i.e. all directories having the same
% (last) name:
%
FilteredDirectories = [ D || D <- Directories,
not ( lists:member( join( CurrentRelativeDir, D ), ExcludedDirList )
or lists:member( D, ExcludedDirList ) ) ],
Acc ++ list_files_in_subdirs_excluded_dirs( FilteredDirectories, RootDir,
CurrentRelativeDir, ExcludedDirList, _Acc=[] )
++ prefix_files_with( CurrentRelativeDir, RegularFiles ).
% Specific helper for find_files_with_excluded_dirs/4 above:
list_files_in_subdirs_excluded_dirs( _Dirs=[], _RootDir,
_CurrentRelativeDir, _ExcludedDirList, Acc ) ->
Acc;
list_files_in_subdirs_excluded_dirs( _Dirs=[ H | T ], RootDir,
CurrentRelativeDir, ExcludedDirList, Acc ) ->
list_files_in_subdirs_excluded_dirs( T, RootDir, CurrentRelativeDir,
ExcludedDirList,
find_files_with_excluded_dirs( RootDir, join( CurrentRelativeDir, H ),
ExcludedDirList, [] ) ++ Acc ).
% Returns the list of all regular files found from the root which do not match
% any of the specified suffixes, in the whole subtree (i.e. recursively).
%
% All returned pathnames are relative to this root.
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
%
-spec find_files_with_excluded_suffixes( directory_name(), [ string() ])
-> [ file_name() ].
find_files_with_excluded_suffixes( RootDir, ExcludedSuffixes ) ->
find_files_with_excluded_suffixes( RootDir, _CurrentRelativeDir="",
ExcludedSuffixes, _Acc=[] ).
% Helper:
find_files_with_excluded_suffixes( RootDir, CurrentRelativeDir,
ExcludedSuffixes, Acc ) ->
%io:format( "find_files_with_excluded_suffixes in ~s.~n",
% [ CurrentRelativeDir ] ),
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
Acc ++ list_files_in_subdirs_with_excluded_suffixes( Directories,
ExcludedSuffixes, RootDir, CurrentRelativeDir, [] )
++ prefix_files_with( CurrentRelativeDir,
filter_by_excluded_suffixes( RegularFiles, ExcludedSuffixes ) ).
% Helper for find_files_with_excluded_suffixes/4:
-spec list_files_in_subdirs_with_excluded_suffixes( list(), [ string() ],
directory_name(), directory_name(), [ file_name() ]) -> [ file_name() ].
list_files_in_subdirs_with_excluded_suffixes( [], _ExcludedSuffixes, _RootDir,
_CurrentRelativeDir, Acc ) ->
Acc;
list_files_in_subdirs_with_excluded_suffixes( [ H | T ], ExcludedSuffixes,
RootDir, CurrentRelativeDir, Acc ) ->
list_files_in_subdirs_with_excluded_suffixes( T, ExcludedSuffixes, RootDir,
CurrentRelativeDir,
find_files_with_excluded_suffixes( RootDir,
join( CurrentRelativeDir, H ), ExcludedSuffixes, [] ) ++ Acc ).
% Returns the list of all regular files found from the root with specified
% suffix, in the whole subtree (i.e. recursively), with specified directories
% excluded.
%
% Note that the excluded directories can be specified as a full path (ex:
% "foo/bar/not-wanted"), for just as a final directory name (ex:
% "my-excluded-name"). In the latter case, all directories bearing that name
% (ex: "foo/bar/my-excluded-name") will be excluded as well.
%
% Thus when a directory D is specified in the excluded list, each traversed
% directory T will be compared twice to D: T will be matched against D, and
against filename : basename(T ) , i.e. its final name , as well . As soon as one
% matches, T will be excluded.
%
% All returned pathnames are relative to this root.
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
%
-spec find_files_with_excluded_dirs_and_suffixes( directory_name(),
[ directory_name() ], [ string() ] ) -> [ file_name() ].
find_files_with_excluded_dirs_and_suffixes( RootDir, ExcludedDirList,
ExcludedSuffixes ) ->
%{ ok, CurrentDir } = file:get_cwd(),
%io:format( "find_files_with_excluded_dirs_and_suffixes: current is ~s, "
" root is ~s.~n " , [ CurrentDir , ] ) ,
find_files_with_excluded_dirs_and_suffixes( RootDir,
_CurrentRelativeDir="", ExcludedDirList, ExcludedSuffixes, _Acc=[]
).
% Helper:
find_files_with_excluded_dirs_and_suffixes( RootDir, CurrentRelativeDir,
ExcludedDirList, ExcludedSuffixes, Acc ) ->
io : format ( " find_files_with_excluded_dirs_and_suffixes in ~s / ~s.~n " ,
% [ RootDir, CurrentRelativeDir ] ),
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
If for example ExcludedDirList= [ " .svn " ] , we want to eliminate not only
% ".svn" but also all "foo/bar/.svn", i.e. all directories having the same
% (last) name:
%
FilteredDirectories = [ D || D <- Directories,
not ( lists:member( join( CurrentRelativeDir, D ), ExcludedDirList )
or lists:member( D, ExcludedDirList ) ) ],
Acc ++ list_files_in_subdirs_excluded_dirs_and_suffixes(
FilteredDirectories, RootDir, CurrentRelativeDir,
ExcludedDirList, ExcludedSuffixes, _Acc=[] )
++ prefix_files_with( CurrentRelativeDir,
filter_by_excluded_suffixes( RegularFiles, ExcludedSuffixes ) ).
% Specific helper for find_files_with_excluded_dirs_and_suffixes/5 above:
list_files_in_subdirs_excluded_dirs_and_suffixes( _Dirs=[], _RootDir,
_CurrentRelativeDir, _ExcludedDirList, _ExcludedSuffixes, Acc ) ->
Acc;
list_files_in_subdirs_excluded_dirs_and_suffixes( _Dirs=[ H | T ], RootDir,
CurrentRelativeDir, ExcludedDirList, ExcludedSuffixes, Acc ) ->
list_files_in_subdirs_excluded_dirs_and_suffixes( T, RootDir,
CurrentRelativeDir, ExcludedDirList, ExcludedSuffixes,
find_files_with_excluded_dirs_and_suffixes( RootDir,
join(CurrentRelativeDir,H), ExcludedDirList, ExcludedSuffixes, [] )
++ Acc ).
-spec prefix_files_with( directory_name(), [ file_name() ] ) -> [ file_name() ].
prefix_files_with( RootDir, Files ) ->
io : format ( " Prefixing ~p with ' ~s'.~n " , [ Files , RootDir ] ) ,
prefix_files_with( RootDir, Files, _Acc=[] ).
% Helper:
prefix_files_with( _RootDir, _Files=[], Acc ) ->
Acc;
prefix_files_with( RootDir, [ H| T ], Acc ) ->
prefix_files_with( RootDir, T, [ join( RootDir, H ) | Acc ] ).
% Returns the list of all directories found from the root, in the whole subtree
% (i.e. recursively).
%
% All returned pathnames are relative to this root.
% Ex: [ "./my-dir", "./tmp/other-dir" ].
%
-spec find_directories_from( directory_name() ) -> [ directory_name() ].
find_directories_from( RootDir ) ->
find_directories_from( RootDir, "", _Acc=[] ).
% Helper:
find_directories_from( RootDir, CurrentRelativeDir, Acc ) ->
%io:format( "find_directories_from in ~s.~n", [ CurrentRelativeDir ] ),
{ _RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
Acc ++ list_directories_in_subdirs( Directories,
RootDir, CurrentRelativeDir, _Acc=[] )
++ prefix_files_with( CurrentRelativeDir, Directories ).
% Helper:
list_directories_in_subdirs( _Dirs=[], _RootDir, _CurrentRelativeDir, Acc ) ->
Acc;
list_directories_in_subdirs( _Dirs=[ H | T ], RootDir, CurrentRelativeDir,
Acc ) ->
list_directories_in_subdirs( T, RootDir, CurrentRelativeDir,
find_directories_from( RootDir, join( CurrentRelativeDir, H ), _Acc=[] )
++ Acc ).
% Creates specified directory, without creating any intermediate (parent)
% directory that would not exist.
%
% Throws an exception if the operation failed.
%
-spec create_directory( directory_name() ) -> basic_utils:void().
create_directory( Dirname ) ->
create_directory( Dirname, create_no_parent ).
% Creates the specified directory.
%
% If 'create_no_parent' is specified, no intermediate (parent) directory will be
% created.
%
% If 'create_parents' is specified, any non-existing intermediate (parent)
% directory will be created.
%
% Throws an exception if the operation fails.
%
-spec create_directory( directory_name(),
'create_no_parent' | 'create_parents' ) -> basic_utils:void().
create_directory( Dirname, create_no_parent ) ->
case file:make_dir( Dirname ) of
ok ->
ok;
{ error, Reason } ->
throw( { create_directory_failed, Dirname, Reason } )
end;
create_directory( Dirname, create_parents ) ->
create_dir_elem( filename:split( Dirname ), "" ).
% Creates specified directory (but not any parents), if not already existing.
%
% Throws an exception if the operation fails.
%
-spec create_directory_if_not_existing( directory_name() ) ->
basic_utils:void().
create_directory_if_not_existing( Dirname ) ->
case is_existing_directory( Dirname ) of
true ->
ok;
false ->
create_directory( Dirname )
end.
% Helper:
create_dir_elem( _Elems=[], _Prefix ) ->
ok;
create_dir_elem( _Elems=[ H | T ], Prefix ) ->
NewPrefix = join( Prefix, H ),
case exists( NewPrefix ) of
true ->
ok ;
false ->
create_directory( NewPrefix, create_no_parent )
end,
create_dir_elem( T, NewPrefix ).
% Creates a non previously existing temporary directory, and returs its full
% path.
%
-spec create_temporary_directory() -> directory_name().
create_temporary_directory() ->
TmpDir = join( [ "/tmp", system_utils:get_user_name(),
basic_utils:generate_uuid() ] ),
case exists( TmpDir ) of
true ->
% Very bad luck apparently, or same random root:
create_temporary_directory();
false ->
create_directory( TmpDir, create_parents ),
TmpDir
end.
% Removes specified file, specified as a plain string.
%
% Throws an exception if any problem occurs.
%
-spec remove_file( file_name() ) -> basic_utils:void().
remove_file( Filename ) ->
io : format ( " # # Removing file ' ~s'.~n " , [ Filename ] ) ,
case file:delete( Filename ) of
ok ->
ok;
Error ->
throw( { remove_file_failed, Filename, Error } )
end.
% Removes specified files, specified as a list of plain strings.
%
-spec remove_files( [ file_name() ] ) -> basic_utils:void().
remove_files( FilenameList ) ->
[ remove_file( Filename ) || Filename <- FilenameList ].
% Removes specified file, specified as a plain string, iff it is already
% existing, otherwise does nothing.
%
-spec remove_file_if_existing( file_name() ) -> basic_utils:void().
remove_file_if_existing( Filename ) ->
case is_existing_file( Filename ) of
true ->
remove_file( Filename );
false ->
ok
end.
% Removes each specified file, in specified list of plain strings, iff it is
% already existing.
%
-spec remove_files_if_existing( [ file_name() ] ) -> basic_utils:void().
remove_files_if_existing( FilenameList ) ->
[ remove_file_if_existing( Filename ) || Filename <- FilenameList ].
% Removes specified directory, which must be empty.
%
-spec remove_directory( directory_name() ) -> basic_utils:void().
remove_directory( DirectoryName ) ->
case file:del_dir( DirectoryName ) of
ok ->
ok;
{ error, Reason } ->
throw( { remove_directory_failed, Reason, DirectoryName } )
end.
% Copies a specified file to a given destination.
%
% Note: content is copied and permissions are preserved (ex: the copy of an
% executable file will be itself executable).
%
-spec copy_file( file_name(), file_name() ) -> basic_utils:void().
copy_file( SourceFilename, DestinationFilename ) ->
First , checks the source file exists and retrieves its meta - information :
case file:read_file_info( SourceFilename ) of
{ ok, #file_info{ mode=Mode } } ->
case file:copy( SourceFilename, DestinationFilename ) of
{ ok, _ByteCount } ->
% Now sets the permissions of the copy:
ok = file:change_mode( DestinationFilename, Mode );
Error ->
throw( { copy_file_failed, SourceFilename,
DestinationFilename, Error } )
end;
{ error, Reason } ->
throw( { copy_file_failed, SourceFilename, Reason } )
end.
% Copies a specified file to a given destination iff it is already existing.
%
% Note: content is copied and permissions are preserved (ex: the copy of an
% executable file will be itself executable).
%
-spec copy_file_if_existing( file_name(), file_name() ) -> basic_utils:void().
copy_file_if_existing( SourceFilename, DestinationFilename ) ->
case is_existing_file( SourceFilename ) of
true ->
copy_file( SourceFilename, DestinationFilename );
false ->
ok
end.
Renames specified file .
%
rename( SourceFilename, DestinationFilename ) ->
move_file( SourceFilename, DestinationFilename ).
% Moves specified file so that it is now designated by specified filename.
%
-spec move_file( file_name(), file_name() ) -> basic_utils:void().
move_file( SourceFilename, DestinationFilename ) ->
%io:format( "## Moving file '~s' to '~s'.~n",
% [ SourceFilename, DestinationFilename ] ),
%copy_file( SourceFilename, DestinationFilename ),
%remove_file( SourceFilename ).
% Simpler, better:
case file:rename( SourceFilename, DestinationFilename ) of
ok ->
ok;
Error ->
throw( { move_file_failed, Error, SourceFilename,
DestinationFilename } )
end.
% Tells whether the specified path is an absolute one.
%
% A path is deemed absolute iff it starts with "/".
%
-spec is_absolute_path( path() ) -> boolean().
is_absolute_path( _Path=[ $/ | _Rest ] ) ->
true;
is_absolute_path( _Path ) ->
false.
% Returns an absolute path corresponding to specified path.
%
% If it is not already abolute, it will made so by using the current working
% directory.
%
ensure_path_is_absolute( Path ) ->
case is_absolute_path( Path ) of
true ->
% Already absolute:
Path;
false ->
% Relative, using current directory as base:
join( get_current_directory(), Path )
end.
% Returns an absolute path corresponding to specified path, using base path as
% root directory (this must be an absolute path).
%
% Ex: ensure_path_is_absolute( "tmp/foo", "/home/dalton" ) will return
% "/home/dalton/tmp/foo".
%
ensure_path_is_absolute( TargetPath, BasePath ) ->
case is_absolute_path( TargetPath ) of
true ->
% Already absolute:
TargetPath;
false ->
% Relative, using specified base directory:
case is_absolute_path( BasePath ) of
true ->
join( BasePath, TargetPath );
false ->
throw( { base_path_not_absolute, BasePath } )
end
end.
Normalises path , by translating it so that no ' . ' or ' .. ' is present
% afterwards.
%
% For example, "/home/garfield/../lisa/./src/.././tube" shall be normalised in
% "/home/lisa/tube".
%
normalise_path( Path ) ->
ElemList = filename:split( Path ),
join( filter_elems( ElemList, _Acc=[] ) ).
filter_elems( _ElemList=[], Acc ) ->
lists:reverse( Acc );
filter_elems( _ElemList=[ "." | T ], Acc ) ->
filter_elems( T, Acc );
filter_elems( _ElemList=[ ".." | T ], Acc ) ->
filter_elems( T, tl( Acc ) );
filter_elems( _ElemList=[ E | T ], Acc ) ->
filter_elems( T, [ E | Acc ] ).
% The approach below would not work with, for example, "X/Y/Z/../../A":
RevElemList = lists : reverse ( filename : split ( Path ) ) ,
% % Returns in the right order:
join ( filter_elems ( RevElemList , _ Acc= [ ] ) ) .
% filter_elems( _Elems=[], Acc ) ->
Acc ;
% filter_elems( _Elems=[ "." | T ], Acc ) ->
filter_elems ( T , Acc ) ;
% filter_elems( _Elems=[ "..", _E | T ], Acc ) ->
filter_elems ( T , Acc ) ;
% filter_elems( _Elems=[ E | T ], Acc ) ->
% filter_elems( T, [ E | Acc ] ).
% Converts specified path (full filename, like '/home/jack/test.txt' or
% './media/test.txt') into a variable name licit in most programming languages
% (ex: C/C++).
%
% Rule here is:
- variable name starts with a prefix , user - supplied or the default one
% - any leading './' is removed
% - '-' becomes '_'
% - '.' becomes '_'
% - '/' becomes '_'
%
-spec path_to_variable_name( path() ) -> string().
path_to_variable_name( Filename ) ->
path_to_variable_name( Filename, "File_" ).
% Removes any leading './'.
%
-spec path_to_variable_name( path(), string() ) -> string().
path_to_variable_name( [ $.,$/ | T ], Prefix ) ->
convert( T, Prefix );
path_to_variable_name( Filename, Prefix ) ->
convert( Filename, Prefix ).
% Helper function:
%
convert( Filename, Prefix ) ->
NoDashName = re:replace( lists:flatten( Filename ), "-+", "_",
[ global, { return, list } ] ),
NoDotName = re:replace( NoDashName, "\\.+", "_",
[ global, { return, list } ] ),
Prefix ++ re:replace( NoDotName, "/+", "_",
[ global, { return, list } ] ).
% Returns a list of the known file extensions that refer image files.
%
-spec get_image_extensions() -> [ extension() ].
get_image_extensions() ->
TIFF , TGA and al deemed deprecated :
[ ".png", ".jpg", ".jpeg", ".bmp" ].
-define(ResourceDir,"resources").
% Returns the image path corresponding to the specified file.
%
-spec get_image_file_png( file_name() ) -> path().
get_image_file_png( Image ) ->
filename:join( [ ?ResourceDir, "images", Image ++ ".png" ] ).
% Returns the image path corresponding to the specified file.
%
-spec get_image_file_gif( file_name() ) -> path().
get_image_file_gif( Image ) ->
filename:join( [ ?ResourceDir, "images", Image ++ ".gif" ] ).
% I/O section.
Opens the file corresponding to the specified filename , with specified list of
% options (as listed in file:open/2).
%
% Returns the file reference, or throws an exception.
%
% Will attempt to open the specified file only once, as looping endlessly does
% not seem a viable solution right now (risk of exhausting the descriptors,
making the VM fail for example when loading a new BEAM ) .
%
-spec open( file_name(), list() ) -> file:io_device().
open( Filename, Options ) ->
open( Filename, Options, _Default=try_once ).
Opens the file corresponding to specified filename ( first parameter ) with
specified list of options ( second parameter ; refer to file : open/2 for detailed
% documentation).
%
Third parameter is the " attempt mode " , either ' try_once ' , ' try_endlessly ' or
% 'try_endlessly_safer', depending respectively on whether we want to try to
% open the file once (no other attempt will be made), endlessly (until a file
% descriptor can be gained), possibly with a safer setting.
%
% Returns the file reference, or throws an exception.
%
% Will try to obtain a file descriptor iteratively (and endlessly) with
process - specific random waitings , should no descriptor be available .
%
% A risk of that approach is that all available file descriptors will be
% taken, thus potentially preventing other processes (including the VM itself)
to perform any file operation , like loading a new BEAM , ex :
% """
File operation error : system_limit . Target :
% lib/erlang/lib/kernel-x.y.z/ebin/timer.beam. Function: get_file.
% Process: code_server.
% """
% %
This is done in order to support situations where potentially more Erlang
% processes than available file descriptors try to access to files. An effort is
% made to desynchronize these processes to smooth the use of descriptors.
%
% (file:mode() not exported currently unfortunately)
%
-spec open( file_name(), [ file:mode() | 'ram' ],
'try_once' | 'try_endlessly' | 'try_endlessly_safer' )
-> file:io_device().
For the contents in above tuple ( ): reference to type # file_descriptor
of erlang module : file.hrl
open( Filename, Options, _AttemptMode=try_endlessly_safer ) ->
File = open( Filename, Options, try_endlessly ),
We could check here that at least one descriptor remains , by adding a
% dummy file open/close and catching emfile, however one could need more
% than one spare descriptor.
%
The correct solution would involve knowing the number of descriptors
% for that process and the current number of open ones, no information we
% seems able to know.
%
% So for the moment we do not do anything more than 'try_endlessly':
File;
open( Filename, Options, _AttemptMode=try_endlessly ) ->
case file:open( Filename, Options ) of
{ ok, File } ->
File;
{ error, FileError } when FileError == emfile
orelse FileError == system_limit ->
% File descriptors exhausted for this OS process.
to desynchronize file opening in order to remain below 1024
% file descriptor opened:
Duration = basic_utils:get_process_specific_value(
_Min=50, _Max=200 ),
% Attempt not to use timer:sleep (anyway will trigger errors
% afterwards when the system will try to look-up some BEAMs):
receive
after Duration ->
open( Filename, Options, try_endlessly )
end;
{ error, OtherFileError } ->
throw( { open_failed, { Filename, Options }, OtherFileError } )
end;
open( Filename, Options, _AttemptMode=try_once ) ->
case file:open( Filename, Options ) of
{ ok, File } ->
File;
{ error, emfile } ->
throw( { too_many_open_files, { Filename, Options } } );
{ error, system_limit } ->
% Never had system_limit without this cause (yet!):
throw( { too_many_open_files, { Filename, Options },
system_limit } );
{ error, OtherError } ->
throw( { open_failed, { Filename, Options }, OtherError } )
end.
% Closes specified file reference.
%
% Throws an exception on failure.
%
-spec close( file:io_device() ) -> basic_utils:void().
close( File ) ->
close( File, throw_if_failed ).
% Closes specified file reference.
%
% Throws an exception on failure or not, depending on specified failure mode.
%
-spec close( file:io_device(), 'overcome_failure' | 'throw_if_failed' )
-> basic_utils:void().
close( File, _FailureMode=throw_if_failed ) ->
case file:close( File ) of
ok ->
ok;
{ error, Reason } ->
throw( { file_closing_failed, Reason } )
end;
close( File, _FailureMode=overcome_failure ) ->
file:close( File ).
% Reads specified number of bytes/characters from the specified file.
%
% Returns either { ok, Data } if at least some data could be read, or eof if at
% least one element was to read and end of file was reached before anything at
% all could be read.
%
% Throws an exception on failure.
%
-spec read( file:io_device(), basic_utils:count() ) ->
{ 'ok', string() | binary() } | 'eof'.
read( File, Count ) ->
case file:read( File, Count ) of
R={ ok, _Data } ->
R;
eof ->
eof;
{ error, Reason } ->
throw( { read_failed, Reason } )
end.
% Writes specified content into specified file.
%
% Throws an exception on failure.
%
-spec write( file:io_device(), iodata() ) -> basic_utils:void().
write( File, Content ) ->
case file:write( File, Content ) of
ok ->
ok;
{ error, Reason } ->
throw( { write_failed, Reason } )
end.
% Writes specified formatted content into specified file.
%
% Throws an exception on failure.
%
-spec write( file:io_device(), text_utils:format_string(), [ term() ] ) ->
basic_utils:void().
write( File, FormatString, Values ) ->
Text = io_lib:format( FormatString, Values ),
case file:write( File, Text ) of
ok ->
ok;
{ error, Reason } ->
throw( { write_failed, Reason } )
end.
% Reads the content of the specified file, based on its filename specified as a
% plain string, and returns the corresponding binary, or throws an exception on
% failure.
%
See also : read_terms/1 to read directly Erlang terms .
%
-spec read_whole( file_name() ) -> binary().
read_whole( Filename ) ->
case file:read_file( Filename ) of
{ ok, Binary } ->
Binary;
{ error, Error } ->
throw( { read_whole_failed, Filename, Error } )
end.
% Writes the specified binary in specified file, whose filename is specified as
% a plain string. Throws an exception on failure.
%
-spec write_whole( file_name(), binary() ) -> basic_utils:void().
write_whole( Filename, Binary ) ->
case file:write_file( Filename, Binary ) of
ok ->
ok;
{ error, Error } ->
throw( { write_whole_failed, Filename, Error } )
end.
% Reads specified file, tries to read a list of terms from it, and returns it.
%
% Throws an exception on error.
%
read_terms( Filename ) ->
case file:consult( Filename ) of
{ ok, Terms } ->
Terms;
{ error, Error } when is_atom( Error ) ->
throw( { reading_failed, Filename, Error } );
{ error, Error } ->
Reason = file:format_error( Error ),
throw( { interpretation_failed, Filename, Reason } )
end.
% Compression-related operations.
% Returns the file extension corresponding to filenames compressed with
% specified format.
%
-spec get_extension_for( compression_format() ) -> extension().
get_extension_for( _CompressionFormat=zip ) ->
".zip";
get_extension_for( _CompressionFormat=bzip2 ) ->
".bz2";
get_extension_for( _CompressionFormat=xz ) ->
".xz".
Compresses specified file : creates a new , compressed version thereof , whose
% filename, established based on usual conventions, is returned. If a file with
% that name already exists, it will be overwritten.
%
For example , compress ( " hello.png " , zip ) will generate a " hello.png.zip "
% file.
%
% The original file remain as is.
%
% Note: this function just takes care of compressing a single file, even if some
% compressors (ex: zip) include features to create an archive of multiple files
% first.
%
-spec compress( file_name(), compression_format() ) -> file_name().
compress( Filename, _CompressionFormat=zip ) ->
Rather than using a standalone zip tool , we use the Erlang support here :
ZipExec = executable_utils : get_default_zip_compress_tool ( ) ,
ZipFilename = Filename ++ get_extension_for( zip ),
Exactly this one file in the archive :
%Command = ZipExec ++ " --quiet " ++ ZipFilename ++ " " ++ Filename,
%[] = os:cmd( Command ),
zip:zip( ZipFilename, [ Filename ] ),
% Check:
true = is_existing_file( ZipFilename ),
ZipFilename;
compress( Filename, _CompressionFormat=bzip2 ) ->
Bzip2Exec = executable_utils:get_default_bzip2_compress_tool(),
--keep allows to avoid that removes the original file :
[] = os:cmd( Bzip2Exec ++ " --keep --force --quiet " ++ Filename ),
% Check:
Bzip2Filename = Filename ++ get_extension_for( bzip2 ),
true = is_existing_file( Bzip2Filename ),
Bzip2Filename;
compress( Filename, _CompressionFormat=xz ) ->
XZExec = executable_utils:get_default_xz_compress_tool(),
Command = XZExec ++ " --keep --force --quiet " ++ Filename,
[] = os:cmd( Command ),
% Check:
XZFilename = Filename ++ get_extension_for( xz ),
true = is_existing_file( XZFilename ),
XZFilename;
compress( _Filename, CompressionFormat ) ->
throw( { unsupported_compression_format, CompressionFormat } ).
% Decompresses specified compressed file, expected to bear the extension
% corresponding to the specified format: recreates the original, decompressed
% version thereof, whose filename, established based on usual conventions, is
% returned: the name of the input file without its extension.
%
% This function works in pair with compress/2, and as such expects that each
compressed file contains exactly one file , bear the same filename except the
% compressor extension.
%
Typically , when a format MY_FORMAT is specified , converts a compressed file
% name foo.extension_of(MY_FORMAT) into an uncompressed version of it named
% 'foo'.
%
% So, for example, decompress( "foo.xz", xz ) will generate a "foo" file.
%
% If a file with that name already exists, it will be overwritten.
%
% The compressed file remains as is.
%
-spec decompress( file_name(), compression_format() ) -> file_name().
decompress( ZipFilename, _CompressionFormat=zip ) ->
% An annoying problem with zip is that the name of the (single) file in the
% archive might differ from the filename deduced from the archive name (ex:
% "foo.zi"p might contain "bar" instead of "foo"). We need to return "bar",
% not "foo".
Rather than using a standalone zip tool , we use the Erlang support here :
UnzipExec = executable_utils : get_default_zip_decompress_tool ( ) ,
% Checks and removes extension:
Filename = replace_extension ( , ( zip ) , " " ) ,
% Quiet, overwrite:
Command = UnzipExec + + " -q -o " + + ZipFilename ,
%[] = os:cmd( Command ),
Exactly one file per such archives :
{ ok, [ Filename ] } = zip:unzip( ZipFilename ),
% We expect here than only the compression feature (not the archive-making
% feature) of zip has been used, as for all other compressors:
%
true = is_existing_file( Filename ),
Filename;
decompress( Bzip2Filename, _CompressionFormat=bzip2 ) ->
Bzip2Exec = executable_utils:get_default_bzip2_decompress_tool(),
% Checks and removes extension:
Filename = replace_extension( Bzip2Filename, get_extension_for( bzip2 ),
"" ),
The result will be named Filename by :
[] = os:cmd( Bzip2Exec ++ " --keep --force --quiet " ++ Bzip2Filename ),
% Check:
true = is_existing_file( Filename ),
Filename;
decompress( XzFilename, _CompressionFormat=xz ) ->
XZExec = executable_utils:get_default_xz_decompress_tool(),
% Checks and removes extension:
Filename = replace_extension( XzFilename, get_extension_for( xz ), "" ),
The result will be named Filename by unxz :
[] = os:cmd( XZExec ++ " --keep --force --quiet " ++ XzFilename ),
% Check:
true = is_existing_file( Filename ),
Filename;
decompress( _Filename, CompressionFormat ) ->
throw( { unsupported_compression_format, CompressionFormat } ).
% Reads in memory the file specified from its filename, zips the corresponding
% term, and returns it, as a compressed binary.
%
% Note: useful for network transfers of small files.
%
% Larger ones should be transferred with TCP/IP and by chunks.
%
% Returns a binary.
%
-spec file_to_zipped_term( file_name() ) -> binary().
file_to_zipped_term( Filename ) ->
DummyFileName = "dummy",
{ ok, { _DummyFileName, Bin } } =
zip : zip ( DummyFileName , [ Filename ] , [ verbose , memory ] ) ,
zip:zip( DummyFileName, [ Filename ], [ memory ] ),
Bin.
% Reads specified binary, extracts the zipped file in it and writes it on disk,
% in current directory.
%
% Returns the filename of the unzipped file.
%
-spec zipped_term_to_unzipped_file( binary() ) -> file_name().
zipped_term_to_unzipped_file( ZippedTerm ) ->
%zip:unzip( ZippedTerm, [ verbose ] ).
{ ok, [ FileName ] } = zip:unzip( ZippedTerm ),
FileName.
% Reads specified binary, extracts the zipped file in it and writes it on disk,
% in current directory, under specified filename instead of under filename
% stored in the zip archive.
%
% Any pre-existing file will be overwritten.
%
Note : only one file is expected to be stored in the specified archive .
%
-spec zipped_term_to_unzipped_file( binary(), file_name() )
-> basic_utils:void().
zipped_term_to_unzipped_file( ZippedTerm, TargetFilename ) ->
{ ok, [ { _AFilename, Binary } ] } = zip:unzip( ZippedTerm, [ memory ] ),
% { ok, File } = file:open( TargetFilename, [ write ] ),
% ok = io:format( File, "~s", [ binary_to_list(Binary) ] ),
% ok = file:write_file( File, "~s", [ binary_to_list(Binary) ] ),
% ok = file:close( File ).
write_whole( TargetFilename, Binary ).
% Reads in memory the files specified from their filenames, zips the
% corresponding term, and returns it.
%
% Note: useful for network transfers of small files.
%
% Larger ones should be transferred with TCP/IP and by chunks.
%
% Returns a binary.
%
-spec files_to_zipped_term( [file_name()] ) -> binary().
files_to_zipped_term( FilenameList ) ->
DummyFileName = "dummy",
{ ok, { _DummyFileName, Bin } } = zip:zip( DummyFileName, FilenameList,
[ memory ] ),
Bin.
% Reads in memory the files specified from their filenames, assuming their path
% is relative to the specified base directory, zips the corresponding term, and
% returns it.
%
% Note: useful for network transfers of small files.
%
% Larger ones should be transferred with TCP/IP and by chunks.
%
% Returns a binary.
%
-spec files_to_zipped_term( [ file_name() ], directory_name() ) -> binary().
files_to_zipped_term( FilenameList, BaseDirectory ) ->
DummyFileName = "dummy",
%io:format( "files_to_zipped_term operating from ~s on files: ~p.~n",
[ BaseDirectory , ] ) ,
case zip:zip( DummyFileName, FilenameList,
[ memory, { cwd, BaseDirectory } ] ) of
{ ok, { _DummyFileName, Bin } } ->
Bin;
{ error, enoent } ->
% Such a short error might be difficult to diagnose:
io : format ( " ~nfiles_to_zipped_term/2 failed from ' ~s':~n "
% "~n - directory '~p' exists? ~p",
[ get_current_directory ( ) , BaseDirectory ,
is_existing_directory ( BaseDirectory ) ] ) ,
% [ io:format( "~n - file '~p' exists? ~p", [ F,
% is_existing_file( F ) ] ) || F <- FilenameList ],
throw( { zip_failed, BaseDirectory, FilenameList } );
{ error, Other } ->
throw( { zip_failed, Other, BaseDirectory, FilenameList } )
end.
% Reads specified binary, extracts the zipped files stored in it and writes them
% on disk, in current directory.
%
% Returns the list of filenames corresponding to the unzipped files.
%
-spec zipped_term_to_unzipped_files( binary() ) -> [ file_name() ].
zipped_term_to_unzipped_files( ZippedTerm ) ->
{ ok , FileNames } = zip : unzip ( ZippedTerm , [ verbose ] ) ,
{ ok, FileNames } = zip:unzip( ZippedTerm ),
FileNames.
% Reads specified binary, extracts the zipped files in it and writes them on
% disk, in specified directory.
%
% Returns the list of filenames corresponding to the unzipped files.
%
-spec zipped_term_to_unzipped_files( binary(), directory_name() )
-> [ file_name() ].
zipped_term_to_unzipped_files( ZippedTerm, TargetDirectory ) ->
{ ok , FileNames } = zip : unzip ( ZippedTerm , [ verbose ] ) ,
case is_existing_directory( TargetDirectory ) of
true ->
{ ok, FileNames } = zip:unzip( ZippedTerm,
[ { cwd, TargetDirectory } ] ),
FileNames;
false ->
throw( { non_existing_unzip_directory, TargetDirectory } )
end.
| null | https://raw.githubusercontent.com/softlab-ntua/bencherl/317bdbf348def0b2f9ed32cb6621e21083b7e0ca/app/sim-diasca/common/src/utils/file_utils.erl | erlang |
This library is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License or
any later version.
You can also redistribute it and/or modify it under the terms of the
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
for more details.
along with this library.
If not, see </> and
</>.
Gathering of various convenient facilities regarding files.
See file_utils_test.erl for the corresponding test.
Related standard modules: file, filename.
Filename-related operations.
I/O section.
Compression-related operations.
For the file_info record:
Type declarations:
A path may designate either a file or a directory.
All known types of file entries:
The supported compression formats:
Filename-related operations.
Joins the specified list of path elements.
instead. However filename:join( [ "", "my_dir" ] ) results in "/my_dir",
used instead. However filename:join( "", "my_dir" ) results in "/my_dir",
whereas often we would want "my_dir", which is returned by file_utils:join/2.
Converts specified name to an acceptable filename, filesystem-wise.
Currently we use exactly the same translation rules both for node names
and file names (see net_utils:generate_valid_node_name_from/1).
Note however that now we duplicate the code instead of calling the
to deploy under some circumstances.
Replaces each series of spaces (' '), lower than ('<'), greater than
('>'), comma (','), left ('(') and right (')') parentheses, single (''')
and double ('"') quotes, forward ('/') and backward ('\') slashes,
ampersand ('&'), tilde ('~'), sharp ('#'), at sign ('@'), all other kinds
of brackets ('{', '}', '[', ']'), pipe ('|'), dollar ('$'), star ('*'),
marks ('?' and '!'), plus ('+'), other punctation signs (';' and ':') by
exactly one underscore:
(see also: net_utils:generate_valid_node_name_from/1)
Returns a new filename whose extension has been updated.
"/home/jack/rosie.wav".
Tells whether specified file entry exists, regardless of its type.
Returns the type of the specified file entry.
We used to rely on file:read_file_info/1, but an existing symlink pointing
to a non-existing entry was triggering the enoent error, while we just
wanted to know that the specified entry is an existing (yet dead) symlink.
Some tools (e.g. emacs) used thus to get in the way, as apparently they
create dead symlinks on purpose, to store information.
Probably a recursive symlink:
Returns whether the specified entry, supposedly existing, is a regular file.
If the specified entry happens not to exist, a
'{ non_existing_entry, EntryName }' exception will be thrown.
Returns whether the specified entry exists and is a regular file.
Returns true or false, and cannot trigger an exception.
Returns whether the specified entry exists and is either a regular file or a
symbolic link.
Returns true or false, and cannot trigger an exception.
Returns whether the specified entry exists and is executable for its current
owner (can be either a regular file or a symbolic link).
Returns true or false, and cannot trigger an exception.
Not executable:
Returns whether the specified entry, supposedly existing, is a directory.
If the specified entry happens not to exist, a
'{ non_existing_entry, EntryName }' exception will be thrown.
Returns whether the specified entry exists and is a directory.
Returns true or false, and cannot trigger an exception.
specified directory: { Files, Directories, OtherFiles, Devices }.
Note that Files include symbolic links (dead or not).
io:format( "list_dir_elements for '~s'.~n", [ Dirname ] ),
Returns the current directory, as a plain string.
Throws an exception on failure.
Sets the specified directory as current directory.
Throws an exception on failure.
For more detail of { 'error', atom() }, refer to type specifications of
erlang files: file.erl and file.hrl.
Helper function.
file elements: { Directories, Files, Devices, OtherFiles }.
Note that Files include symbolic links (dead or not).
Note the reordering:
Managed as regular files:
Regarding extensions: we could canonicalise their case, so that ".png" and
".PNG" are treated the same.
of the specified suffixes.
match any of the specified suffixes.
We have to avoid feeding string:substr/2 with a start position that is not
strictly positive, otherwise we would trigger a function clause error:
Section dedicated to the look-up of files, with various variations (with or
without extensions, with or without excluded directories, etc.)
Returns the list of all regular files found from the root, in the whole
subtree (i.e. recursively).
All extensions and suffixes accepted, no excluded directories.
All returned pathnames are relative to this root.
Helper.
io:format( "find_files_from with root = '~s', current = '~s'.~n",
[ RootDir, CurrentRelativeDir ] ),
Specific helper for find_files_from/3 above:
io:format( "list_files_in_subdirs with root = '~s', current = '~s' "
"and H='~s'.~n", [ RootDir, CurrentRelativeDir, H ] ),
Returns the list of all regular files found from the root with specified
extension, in the whole subtree (i.e. recursively).
All returned pathnames are relative to this root.
Helper.
io:format( "find_files_from in ~s.~n", [ CurrentRelativeDir ] ),
Helper for find_files_with_extension_from/4:
Returns the list of all regular files found from the root, in the whole
subtree (i.e. recursively), with specified directories excluded.
Note that the excluded directories can be specified as a full path (ex:
"foo/bar/not-wanted"), for just as a final directory name (ex:
"my-excluded-name"). In the latter case, all directories bearing that name
(ex: "foo/bar/my-excluded-name") will be excluded as well.
Thus when a directory D is specified in the excluded list, each traversed
directory T will be compared twice to D: T will be matched against D, and
matches, T will be excluded.
All extensions accepted.
All returned pathnames are relative to this root.
Helper.
[ CurrentRelativeDir ] ),
".svn" but also all "foo/bar/.svn", i.e. all directories having the same
(last) name:
Specific helper for find_files_with_excluded_dirs/4 above:
Returns the list of all regular files found from the root which do not match
any of the specified suffixes, in the whole subtree (i.e. recursively).
All returned pathnames are relative to this root.
Helper:
io:format( "find_files_with_excluded_suffixes in ~s.~n",
[ CurrentRelativeDir ] ),
Helper for find_files_with_excluded_suffixes/4:
Returns the list of all regular files found from the root with specified
suffix, in the whole subtree (i.e. recursively), with specified directories
excluded.
Note that the excluded directories can be specified as a full path (ex:
"foo/bar/not-wanted"), for just as a final directory name (ex:
"my-excluded-name"). In the latter case, all directories bearing that name
(ex: "foo/bar/my-excluded-name") will be excluded as well.
Thus when a directory D is specified in the excluded list, each traversed
directory T will be compared twice to D: T will be matched against D, and
matches, T will be excluded.
All returned pathnames are relative to this root.
{ ok, CurrentDir } = file:get_cwd(),
io:format( "find_files_with_excluded_dirs_and_suffixes: current is ~s, "
Helper:
[ RootDir, CurrentRelativeDir ] ),
".svn" but also all "foo/bar/.svn", i.e. all directories having the same
(last) name:
Specific helper for find_files_with_excluded_dirs_and_suffixes/5 above:
Helper:
Returns the list of all directories found from the root, in the whole subtree
(i.e. recursively).
All returned pathnames are relative to this root.
Ex: [ "./my-dir", "./tmp/other-dir" ].
Helper:
io:format( "find_directories_from in ~s.~n", [ CurrentRelativeDir ] ),
Helper:
Creates specified directory, without creating any intermediate (parent)
directory that would not exist.
Throws an exception if the operation failed.
Creates the specified directory.
If 'create_no_parent' is specified, no intermediate (parent) directory will be
created.
If 'create_parents' is specified, any non-existing intermediate (parent)
directory will be created.
Throws an exception if the operation fails.
Creates specified directory (but not any parents), if not already existing.
Throws an exception if the operation fails.
Helper:
Creates a non previously existing temporary directory, and returs its full
path.
Very bad luck apparently, or same random root:
Removes specified file, specified as a plain string.
Throws an exception if any problem occurs.
Removes specified files, specified as a list of plain strings.
Removes specified file, specified as a plain string, iff it is already
existing, otherwise does nothing.
Removes each specified file, in specified list of plain strings, iff it is
already existing.
Removes specified directory, which must be empty.
Copies a specified file to a given destination.
Note: content is copied and permissions are preserved (ex: the copy of an
executable file will be itself executable).
Now sets the permissions of the copy:
Copies a specified file to a given destination iff it is already existing.
Note: content is copied and permissions are preserved (ex: the copy of an
executable file will be itself executable).
Moves specified file so that it is now designated by specified filename.
io:format( "## Moving file '~s' to '~s'.~n",
[ SourceFilename, DestinationFilename ] ),
copy_file( SourceFilename, DestinationFilename ),
remove_file( SourceFilename ).
Simpler, better:
Tells whether the specified path is an absolute one.
A path is deemed absolute iff it starts with "/".
Returns an absolute path corresponding to specified path.
If it is not already abolute, it will made so by using the current working
directory.
Already absolute:
Relative, using current directory as base:
Returns an absolute path corresponding to specified path, using base path as
root directory (this must be an absolute path).
Ex: ensure_path_is_absolute( "tmp/foo", "/home/dalton" ) will return
"/home/dalton/tmp/foo".
Already absolute:
Relative, using specified base directory:
afterwards.
For example, "/home/garfield/../lisa/./src/.././tube" shall be normalised in
"/home/lisa/tube".
The approach below would not work with, for example, "X/Y/Z/../../A":
% Returns in the right order:
filter_elems( _Elems=[], Acc ) ->
filter_elems( _Elems=[ "." | T ], Acc ) ->
filter_elems( _Elems=[ "..", _E | T ], Acc ) ->
filter_elems( _Elems=[ E | T ], Acc ) ->
filter_elems( T, [ E | Acc ] ).
Converts specified path (full filename, like '/home/jack/test.txt' or
'./media/test.txt') into a variable name licit in most programming languages
(ex: C/C++).
Rule here is:
- any leading './' is removed
- '-' becomes '_'
- '.' becomes '_'
- '/' becomes '_'
Removes any leading './'.
Helper function:
Returns a list of the known file extensions that refer image files.
Returns the image path corresponding to the specified file.
Returns the image path corresponding to the specified file.
I/O section.
options (as listed in file:open/2).
Returns the file reference, or throws an exception.
Will attempt to open the specified file only once, as looping endlessly does
not seem a viable solution right now (risk of exhausting the descriptors,
documentation).
'try_endlessly_safer', depending respectively on whether we want to try to
open the file once (no other attempt will be made), endlessly (until a file
descriptor can be gained), possibly with a safer setting.
Returns the file reference, or throws an exception.
Will try to obtain a file descriptor iteratively (and endlessly) with
A risk of that approach is that all available file descriptors will be
taken, thus potentially preventing other processes (including the VM itself)
"""
lib/erlang/lib/kernel-x.y.z/ebin/timer.beam. Function: get_file.
Process: code_server.
"""
%
processes than available file descriptors try to access to files. An effort is
made to desynchronize these processes to smooth the use of descriptors.
(file:mode() not exported currently unfortunately)
dummy file open/close and catching emfile, however one could need more
than one spare descriptor.
for that process and the current number of open ones, no information we
seems able to know.
So for the moment we do not do anything more than 'try_endlessly':
File descriptors exhausted for this OS process.
file descriptor opened:
Attempt not to use timer:sleep (anyway will trigger errors
afterwards when the system will try to look-up some BEAMs):
Never had system_limit without this cause (yet!):
Closes specified file reference.
Throws an exception on failure.
Closes specified file reference.
Throws an exception on failure or not, depending on specified failure mode.
Reads specified number of bytes/characters from the specified file.
Returns either { ok, Data } if at least some data could be read, or eof if at
least one element was to read and end of file was reached before anything at
all could be read.
Throws an exception on failure.
Writes specified content into specified file.
Throws an exception on failure.
Writes specified formatted content into specified file.
Throws an exception on failure.
Reads the content of the specified file, based on its filename specified as a
plain string, and returns the corresponding binary, or throws an exception on
failure.
Writes the specified binary in specified file, whose filename is specified as
a plain string. Throws an exception on failure.
Reads specified file, tries to read a list of terms from it, and returns it.
Throws an exception on error.
Compression-related operations.
Returns the file extension corresponding to filenames compressed with
specified format.
filename, established based on usual conventions, is returned. If a file with
that name already exists, it will be overwritten.
file.
The original file remain as is.
Note: this function just takes care of compressing a single file, even if some
compressors (ex: zip) include features to create an archive of multiple files
first.
Command = ZipExec ++ " --quiet " ++ ZipFilename ++ " " ++ Filename,
[] = os:cmd( Command ),
Check:
Check:
Check:
Decompresses specified compressed file, expected to bear the extension
corresponding to the specified format: recreates the original, decompressed
version thereof, whose filename, established based on usual conventions, is
returned: the name of the input file without its extension.
This function works in pair with compress/2, and as such expects that each
compressor extension.
name foo.extension_of(MY_FORMAT) into an uncompressed version of it named
'foo'.
So, for example, decompress( "foo.xz", xz ) will generate a "foo" file.
If a file with that name already exists, it will be overwritten.
The compressed file remains as is.
An annoying problem with zip is that the name of the (single) file in the
archive might differ from the filename deduced from the archive name (ex:
"foo.zi"p might contain "bar" instead of "foo"). We need to return "bar",
not "foo".
Checks and removes extension:
Quiet, overwrite:
[] = os:cmd( Command ),
We expect here than only the compression feature (not the archive-making
feature) of zip has been used, as for all other compressors:
Checks and removes extension:
Check:
Checks and removes extension:
Check:
Reads in memory the file specified from its filename, zips the corresponding
term, and returns it, as a compressed binary.
Note: useful for network transfers of small files.
Larger ones should be transferred with TCP/IP and by chunks.
Returns a binary.
Reads specified binary, extracts the zipped file in it and writes it on disk,
in current directory.
Returns the filename of the unzipped file.
zip:unzip( ZippedTerm, [ verbose ] ).
Reads specified binary, extracts the zipped file in it and writes it on disk,
in current directory, under specified filename instead of under filename
stored in the zip archive.
Any pre-existing file will be overwritten.
{ ok, File } = file:open( TargetFilename, [ write ] ),
ok = io:format( File, "~s", [ binary_to_list(Binary) ] ),
ok = file:write_file( File, "~s", [ binary_to_list(Binary) ] ),
ok = file:close( File ).
Reads in memory the files specified from their filenames, zips the
corresponding term, and returns it.
Note: useful for network transfers of small files.
Larger ones should be transferred with TCP/IP and by chunks.
Returns a binary.
Reads in memory the files specified from their filenames, assuming their path
is relative to the specified base directory, zips the corresponding term, and
returns it.
Note: useful for network transfers of small files.
Larger ones should be transferred with TCP/IP and by chunks.
Returns a binary.
io:format( "files_to_zipped_term operating from ~s on files: ~p.~n",
Such a short error might be difficult to diagnose:
"~n - directory '~p' exists? ~p",
[ io:format( "~n - file '~p' exists? ~p", [ F,
is_existing_file( F ) ] ) || F <- FilenameList ],
Reads specified binary, extracts the zipped files stored in it and writes them
on disk, in current directory.
Returns the list of filenames corresponding to the unzipped files.
Reads specified binary, extracts the zipped files in it and writes them on
disk, in specified directory.
Returns the list of filenames corresponding to the unzipped files.
| Copyright ( C ) 2003 - 2014
This file is part of the Ceylan Erlang library .
the GNU General Public License , as they are published by the Free Software
Foundation , either version 3 of these Licenses , or ( at your option )
Mozilla Public License , version 1.1 or later .
GNU Lesser General Public License and the GNU General Public License
You should have received a copy of the GNU Lesser General Public
License , of the GNU General Public License and of the Mozilla Public License
Author : ( )
Creation date : Saturday , July 12 , 2008 .
-module(file_utils).
-export([ join/1, join/2, convert_to_filename/1, replace_extension/3,
exists/1, get_type_of/1, is_file/1,
is_existing_file/1, is_existing_file_or_link/1,
is_executable/1, is_directory/1, is_existing_directory/1,
list_dir_elements/1,
get_current_directory/0, set_current_directory/1,
filter_by_extension/2, filter_by_extensions/2,
filter_by_included_suffixes/2, filter_by_excluded_suffixes/2,
find_files_from/1, find_files_with_extension_from/2,
find_files_with_excluded_dirs/2,
find_files_with_excluded_suffixes/2,
find_files_with_excluded_dirs_and_suffixes/3,
find_directories_from/1,
create_directory/1, create_directory/2,
create_directory_if_not_existing/1,
create_temporary_directory/0,
remove_file/1, remove_file_if_existing/1,
remove_files/1, remove_files_if_existing/1,
remove_directory/1,
copy_file/2, copy_file_if_existing/2,
rename/2, move_file/2,
is_absolute_path/1,
ensure_path_is_absolute/1, ensure_path_is_absolute/2,
normalise_path/1,
path_to_variable_name/1, path_to_variable_name/2,
get_image_extensions/0, get_image_file_png/1, get_image_file_gif/1 ]).
-export([ open/2, open/3, close/1, close/2, read/2, write/2, write/3,
read_whole/1, write_whole/2, read_terms/1 ]).
-export([ get_extension_for/1, compress/2, decompress/2,
file_to_zipped_term/1, zipped_term_to_unzipped_file/1,
zipped_term_to_unzipped_file/2,
files_to_zipped_term/1, files_to_zipped_term/2,
zipped_term_to_unzipped_files/1, zipped_term_to_unzipped_files/2 ]).
-include_lib("kernel/include/file.hrl").
-type path() :: string().
-type bin_path() :: binary().
-type file_name() :: path().
-type bin_file_name() :: binary().
-type directory_name() :: path().
-type bin_directory_name() :: binary().
-type extension() :: string().
-type entry_type() :: 'device' | 'directory' | 'other' | 'regular' | 'symlink'.
-type compression_format() :: 'zip' | 'bzip2' | 'xz'.
-export_type([ path/0, bin_path/0,
file_name/0, bin_file_name/0,
directory_name/0, bin_directory_name/0,
extension/0,
entry_type/0,
compression_format/0
]).
Note : added back to file_utils , filename : join ( Components ) can be used
whereas often we would want " my_dir " , which is returned by file_utils : .
-spec join( [ path() ] ) -> path().
join( _ComponentList=[ "" | T ] ) ->
filename:join( T );
join( ComponentList ) ->
filename:join( ComponentList ).
Joins the two specified path elements .
Note : join/2 added back to file_utils , filename : join ( Name1 , ) can be
-spec join( path(), path() ) -> path().
join( _FirstPath="", SecondPath ) ->
SecondPath ;
join( FirstPath, SecondPath ) ->
filename:join( FirstPath, SecondPath ).
-spec convert_to_filename( string() ) ->
none ( ) in case of erlang : error/2
convert_to_filename( Name ) ->
net_utils module from here , as otherwise there would be one more module
re:replace( lists:flatten(Name),
"( |<|>|,|\\(|\\)|'|\"|/|\\\\|\&|~|"
"#|@|{|}|\\[|\\]|\\||\\$|\\*|\\?|!|\\+|;|:)+", "_",
[ global, { return, list } ] ).
Ex : replace_extension ( " /home / jack / rosie.ttf " , " .ttf " , " .wav " ) should return
-spec replace_extension( file_name(), extension(), extension() ) -> file_name().
replace_extension( Filename, SourceExtension, TargetExtension ) ->
case string:rstr( Filename, SourceExtension ) of
0 ->
throw( { extension_not_found, SourceExtension, Filename } );
Index ->
string:substr( Filename, 1, Index-1 ) ++ TargetExtension
end.
-spec exists( file_name() ) -> boolean().
exists( EntryName ) ->
case file:read_file_info( EntryName ) of
{ ok, _FileInfo } ->
true;
{ error, _Reason } ->
false
end.
-spec get_type_of( file_name() ) -> entry_type().
get_type_of( EntryName ) ->
case file:read_link_info( EntryName ) of
{ ok, FileInfo } ->
#file_info{ type=FileType } = FileInfo,
FileType;
{ error, eloop } ->
throw( { too_many_symlink_levels, EntryName } );
{ error, enoent } ->
throw( { non_existing_entry, EntryName } )
end.
-spec is_file( file_name() ) -> boolean().
is_file( EntryName ) ->
case get_type_of( EntryName ) of
regular ->
true ;
_ ->
false
end.
-spec is_existing_file( file_name() ) -> boolean().
is_existing_file( EntryName ) ->
case exists( EntryName ) andalso get_type_of( EntryName ) of
regular ->
true ;
_ ->
false
end.
-spec is_existing_file_or_link( file_name() ) -> boolean().
is_existing_file_or_link( EntryName ) ->
case exists( EntryName ) andalso get_type_of( EntryName ) of
regular ->
true ;
symlink ->
true ;
_ ->
false
end.
-spec is_executable( file_name() ) -> boolean().
is_executable( ExecutableName ) ->
case file:read_file_info( ExecutableName ) of
{ ok, FileInfo } ->
#file_info{ type=FileType, mode=Mode } = FileInfo,
case FileType of
regular ->
OwnerExecMask = 8#00100,
case Mode band OwnerExecMask of
0 ->
false;
_ ->
One positive case :
true
end;
_ ->
false
end;
_ ->
false
end.
-spec is_directory( directory_name() ) -> boolean().
is_directory( EntryName ) ->
case get_type_of( EntryName ) of
directory ->
true ;
_ ->
false
end.
-spec is_existing_directory( directory_name() ) -> boolean().
is_existing_directory( EntryName ) ->
case exists( EntryName ) andalso get_type_of( EntryName ) of
directory ->
true ;
_ ->
false
end.
Returns a tuple made of a four lists describing the file elements found in
-spec list_dir_elements( directory_name() ) ->
{ [ file_name() ], [ directory_name() ], [ file_name() ], [ file_name() ] }.
list_dir_elements( Dirname ) ->
{ ok, LocalDirElements } = file:list_dir( Dirname ),
classify_dir_elements( Dirname, LocalDirElements, _Devices=[],
_Directories=[], _Files=[], _OtherFiles=[] ).
-spec get_current_directory() -> directory_name().
get_current_directory() ->
case file:get_cwd() of
{ ok, Dir } ->
Dir;
{ error, Reason } ->
throw( { failed_to_determine_current_directory, Reason } )
end.
-spec set_current_directory( directory_name() ) -> basic_utils:void().
set_current_directory( DirName ) ->
case file:set_cwd( DirName ) of
ok ->
ok;
{ error, Error } ->
throw( { set_current_directory_failed, DirName, Error } )
end.
Returns a tuple containing four lists corresponding to the sorting of all
classify_dir_elements( _Dirname, _Elements=[], Devices, Directories, Files,
OtherFiles ) ->
{ Files, Directories, OtherFiles, Devices };
classify_dir_elements( Dirname, _Elements=[ H | T ],
Devices, Directories, Files, OtherFiles ) ->
case get_type_of( filename:join( Dirname, H ) ) of
device ->
classify_dir_elements( Dirname, T, [ H | Devices ], Directories,
Files, OtherFiles ) ;
directory ->
classify_dir_elements( Dirname, T, Devices, [ H | Directories ],
Files, OtherFiles ) ;
regular ->
classify_dir_elements( Dirname, T, Devices, Directories,
[ H | Files ], OtherFiles ) ;
symlink ->
classify_dir_elements( Dirname, T, Devices, Directories,
[ H | Files ], OtherFiles ) ;
other ->
classify_dir_elements( Dirname, T, Devices, Directories,
Files, [ H | OtherFiles ] )
end.
Returns a list containing all elements of Filenames list whose extension is
the specified one ( ex : " .dat " ) .
-spec filter_by_extension( [ file_name() ], extension() ) -> [ file_name() ].
filter_by_extension( Filenames, Extension ) ->
filter_by_extension( Filenames, Extension, _Acc=[] ).
filter_by_extension( _Filenames=[], _Extension, Acc ) ->
Acc ;
filter_by_extension( _Filenames=[ H | T ], Extension, Acc ) ->
case filename:extension( H ) of
Extension ->
filter_by_extension( T, Extension, [ H | Acc ] ) ;
_Other ->
filter_by_extension( T, Extension, Acc )
end.
Returns a list containing all elements of Filenames list whose extension
corresponds to one of the specified extensions ( ex : [ " .dat " , " .png " ] ) .
-spec filter_by_extensions( [ file_name() ], [ extension() ] ) ->
[ file_name() ].
filter_by_extensions( Filenames, Extensions ) ->
filter_by_extensions( Filenames, Extensions, _Acc=[] ).
filter_by_extensions( _Filenames=[], _Extensions, Acc ) ->
Acc ;
filter_by_extensions( _Filenames=[ F | T ], Extensions, Acc ) ->
case lists:member( filename:extension( F ), Extensions ) of
true ->
filter_by_extensions( T, Extensions, [ F | Acc ] ) ;
false ->
filter_by_extensions( T, Extensions, Acc )
end.
Returns a list containing all elements of the Filenames list which match any
-spec filter_by_included_suffixes( [ file_name() ], [ string() ] ) ->
[ file_name() ].
filter_by_included_suffixes( Filenames, IncludedSuffixes ) ->
[ F || F <- Filenames, has_matching_suffix( F, IncludedSuffixes ) ].
Returns a list containing all elements of the Filenames list which do not
-spec filter_by_excluded_suffixes( [ file_name() ], [ string() ] ) ->
[ file_name() ].
filter_by_excluded_suffixes( Filenames, ExcludedSuffixes ) ->
[ F || F <- Filenames, not has_matching_suffix( F, ExcludedSuffixes ) ].
-spec has_matching_suffix( file_name(), [ string() ] ) -> boolean().
has_matching_suffix( _Filename, _ExcludedSuffixes=[] ) ->
false;
has_matching_suffix( Filename, [ S | OtherS ] ) ->
LenFile = length( Filename ),
LenSuffix = length( S ),
case LenFile - LenSuffix + 1 of
StartPos when StartPos > 0 ->
case string:substr( Filename, StartPos ) of
S ->
true;
_ ->
has_matching_suffix( Filename, OtherS )
end;
_ ->
has_matching_suffix( Filename, OtherS )
end.
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
-spec find_files_from( directory_name() ) -> [ file_name() ].
find_files_from( RootDir ) ->
find_files_from( RootDir, _CurrentRelativeDir="", _Acc=[] ).
find_files_from( RootDir, CurrentRelativeDir, Acc ) ->
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
Acc ++ list_files_in_subdirs( Directories, RootDir, CurrentRelativeDir, [] )
++ prefix_files_with( CurrentRelativeDir, RegularFiles ).
list_files_in_subdirs( _Dirs=[], _RootDir, _CurrentRelativeDir, Acc ) ->
Acc;
list_files_in_subdirs( _Dirs=[ H | T ], RootDir, CurrentRelativeDir, Acc ) ->
list_files_in_subdirs( T, RootDir, CurrentRelativeDir,
find_files_from( RootDir, join( CurrentRelativeDir, H ), [] ) ++ Acc ).
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
-spec find_files_with_extension_from( directory_name(), extension() )
-> [ file_name() ].
find_files_with_extension_from( RootDir, Extension ) ->
find_files_with_extension_from( RootDir, "", Extension, [] ).
find_files_with_extension_from( RootDir, CurrentRelativeDir, Extension, Acc ) ->
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
Acc ++ list_files_in_subdirs_with_extension( Directories, Extension,
RootDir, CurrentRelativeDir, [] )
++ prefix_files_with( CurrentRelativeDir,
filter_by_extension( RegularFiles, Extension ) ).
list_files_in_subdirs_with_extension( _Dirs=[], _Extension, _RootDir,
_CurrentRelativeDir, Acc) ->
Acc;
list_files_in_subdirs_with_extension( _Dirs=[ H | T ], Extension, RootDir,
CurrentRelativeDir, Acc ) ->
list_files_in_subdirs_with_extension( T, Extension, RootDir,
CurrentRelativeDir,
find_files_with_extension_from( RootDir, join( CurrentRelativeDir, H ),
Extension, [] ) ++ Acc ).
against filename : basename(T ) , i.e. its final name , as well . As soon as one
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
-spec find_files_with_excluded_dirs( directory_name(), [ directory_name() ] )
-> [ file_name() ].
find_files_with_excluded_dirs( RootDir, ExcludedDirList ) ->
find_files_with_excluded_dirs( RootDir, _CurrentRelativeDir="",
ExcludedDirList, _Acc=[] ).
find_files_with_excluded_dirs( RootDir, CurrentRelativeDir, ExcludedDirList,
Acc ) ->
io : format ( " find_files_with_excluded_dirs in ~s.~n " ,
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
If for example ExcludedDirList= [ " .svn " ] , we want to eliminate not only
FilteredDirectories = [ D || D <- Directories,
not ( lists:member( join( CurrentRelativeDir, D ), ExcludedDirList )
or lists:member( D, ExcludedDirList ) ) ],
Acc ++ list_files_in_subdirs_excluded_dirs( FilteredDirectories, RootDir,
CurrentRelativeDir, ExcludedDirList, _Acc=[] )
++ prefix_files_with( CurrentRelativeDir, RegularFiles ).
list_files_in_subdirs_excluded_dirs( _Dirs=[], _RootDir,
_CurrentRelativeDir, _ExcludedDirList, Acc ) ->
Acc;
list_files_in_subdirs_excluded_dirs( _Dirs=[ H | T ], RootDir,
CurrentRelativeDir, ExcludedDirList, Acc ) ->
list_files_in_subdirs_excluded_dirs( T, RootDir, CurrentRelativeDir,
ExcludedDirList,
find_files_with_excluded_dirs( RootDir, join( CurrentRelativeDir, H ),
ExcludedDirList, [] ) ++ Acc ).
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
-spec find_files_with_excluded_suffixes( directory_name(), [ string() ])
-> [ file_name() ].
find_files_with_excluded_suffixes( RootDir, ExcludedSuffixes ) ->
find_files_with_excluded_suffixes( RootDir, _CurrentRelativeDir="",
ExcludedSuffixes, _Acc=[] ).
find_files_with_excluded_suffixes( RootDir, CurrentRelativeDir,
ExcludedSuffixes, Acc ) ->
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
Acc ++ list_files_in_subdirs_with_excluded_suffixes( Directories,
ExcludedSuffixes, RootDir, CurrentRelativeDir, [] )
++ prefix_files_with( CurrentRelativeDir,
filter_by_excluded_suffixes( RegularFiles, ExcludedSuffixes ) ).
-spec list_files_in_subdirs_with_excluded_suffixes( list(), [ string() ],
directory_name(), directory_name(), [ file_name() ]) -> [ file_name() ].
list_files_in_subdirs_with_excluded_suffixes( [], _ExcludedSuffixes, _RootDir,
_CurrentRelativeDir, Acc ) ->
Acc;
list_files_in_subdirs_with_excluded_suffixes( [ H | T ], ExcludedSuffixes,
RootDir, CurrentRelativeDir, Acc ) ->
list_files_in_subdirs_with_excluded_suffixes( T, ExcludedSuffixes, RootDir,
CurrentRelativeDir,
find_files_with_excluded_suffixes( RootDir,
join( CurrentRelativeDir, H ), ExcludedSuffixes, [] ) ++ Acc ).
against filename : basename(T ) , i.e. its final name , as well . As soon as one
Ex : [ " ./a.txt " , " ./tmp / b.txt " ] .
-spec find_files_with_excluded_dirs_and_suffixes( directory_name(),
[ directory_name() ], [ string() ] ) -> [ file_name() ].
find_files_with_excluded_dirs_and_suffixes( RootDir, ExcludedDirList,
ExcludedSuffixes ) ->
" root is ~s.~n " , [ CurrentDir , ] ) ,
find_files_with_excluded_dirs_and_suffixes( RootDir,
_CurrentRelativeDir="", ExcludedDirList, ExcludedSuffixes, _Acc=[]
).
find_files_with_excluded_dirs_and_suffixes( RootDir, CurrentRelativeDir,
ExcludedDirList, ExcludedSuffixes, Acc ) ->
io : format ( " find_files_with_excluded_dirs_and_suffixes in ~s / ~s.~n " ,
{ RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
If for example ExcludedDirList= [ " .svn " ] , we want to eliminate not only
FilteredDirectories = [ D || D <- Directories,
not ( lists:member( join( CurrentRelativeDir, D ), ExcludedDirList )
or lists:member( D, ExcludedDirList ) ) ],
Acc ++ list_files_in_subdirs_excluded_dirs_and_suffixes(
FilteredDirectories, RootDir, CurrentRelativeDir,
ExcludedDirList, ExcludedSuffixes, _Acc=[] )
++ prefix_files_with( CurrentRelativeDir,
filter_by_excluded_suffixes( RegularFiles, ExcludedSuffixes ) ).
list_files_in_subdirs_excluded_dirs_and_suffixes( _Dirs=[], _RootDir,
_CurrentRelativeDir, _ExcludedDirList, _ExcludedSuffixes, Acc ) ->
Acc;
list_files_in_subdirs_excluded_dirs_and_suffixes( _Dirs=[ H | T ], RootDir,
CurrentRelativeDir, ExcludedDirList, ExcludedSuffixes, Acc ) ->
list_files_in_subdirs_excluded_dirs_and_suffixes( T, RootDir,
CurrentRelativeDir, ExcludedDirList, ExcludedSuffixes,
find_files_with_excluded_dirs_and_suffixes( RootDir,
join(CurrentRelativeDir,H), ExcludedDirList, ExcludedSuffixes, [] )
++ Acc ).
-spec prefix_files_with( directory_name(), [ file_name() ] ) -> [ file_name() ].
prefix_files_with( RootDir, Files ) ->
io : format ( " Prefixing ~p with ' ~s'.~n " , [ Files , RootDir ] ) ,
prefix_files_with( RootDir, Files, _Acc=[] ).
prefix_files_with( _RootDir, _Files=[], Acc ) ->
Acc;
prefix_files_with( RootDir, [ H| T ], Acc ) ->
prefix_files_with( RootDir, T, [ join( RootDir, H ) | Acc ] ).
-spec find_directories_from( directory_name() ) -> [ directory_name() ].
find_directories_from( RootDir ) ->
find_directories_from( RootDir, "", _Acc=[] ).
find_directories_from( RootDir, CurrentRelativeDir, Acc ) ->
{ _RegularFiles, Directories, _OtherFiles, _Devices } = list_dir_elements(
join( RootDir, CurrentRelativeDir ) ),
Acc ++ list_directories_in_subdirs( Directories,
RootDir, CurrentRelativeDir, _Acc=[] )
++ prefix_files_with( CurrentRelativeDir, Directories ).
list_directories_in_subdirs( _Dirs=[], _RootDir, _CurrentRelativeDir, Acc ) ->
Acc;
list_directories_in_subdirs( _Dirs=[ H | T ], RootDir, CurrentRelativeDir,
Acc ) ->
list_directories_in_subdirs( T, RootDir, CurrentRelativeDir,
find_directories_from( RootDir, join( CurrentRelativeDir, H ), _Acc=[] )
++ Acc ).
-spec create_directory( directory_name() ) -> basic_utils:void().
create_directory( Dirname ) ->
create_directory( Dirname, create_no_parent ).
-spec create_directory( directory_name(),
'create_no_parent' | 'create_parents' ) -> basic_utils:void().
create_directory( Dirname, create_no_parent ) ->
case file:make_dir( Dirname ) of
ok ->
ok;
{ error, Reason } ->
throw( { create_directory_failed, Dirname, Reason } )
end;
create_directory( Dirname, create_parents ) ->
create_dir_elem( filename:split( Dirname ), "" ).
-spec create_directory_if_not_existing( directory_name() ) ->
basic_utils:void().
create_directory_if_not_existing( Dirname ) ->
case is_existing_directory( Dirname ) of
true ->
ok;
false ->
create_directory( Dirname )
end.
create_dir_elem( _Elems=[], _Prefix ) ->
ok;
create_dir_elem( _Elems=[ H | T ], Prefix ) ->
NewPrefix = join( Prefix, H ),
case exists( NewPrefix ) of
true ->
ok ;
false ->
create_directory( NewPrefix, create_no_parent )
end,
create_dir_elem( T, NewPrefix ).
-spec create_temporary_directory() -> directory_name().
create_temporary_directory() ->
TmpDir = join( [ "/tmp", system_utils:get_user_name(),
basic_utils:generate_uuid() ] ),
case exists( TmpDir ) of
true ->
create_temporary_directory();
false ->
create_directory( TmpDir, create_parents ),
TmpDir
end.
-spec remove_file( file_name() ) -> basic_utils:void().
remove_file( Filename ) ->
io : format ( " # # Removing file ' ~s'.~n " , [ Filename ] ) ,
case file:delete( Filename ) of
ok ->
ok;
Error ->
throw( { remove_file_failed, Filename, Error } )
end.
-spec remove_files( [ file_name() ] ) -> basic_utils:void().
remove_files( FilenameList ) ->
[ remove_file( Filename ) || Filename <- FilenameList ].
-spec remove_file_if_existing( file_name() ) -> basic_utils:void().
remove_file_if_existing( Filename ) ->
case is_existing_file( Filename ) of
true ->
remove_file( Filename );
false ->
ok
end.
-spec remove_files_if_existing( [ file_name() ] ) -> basic_utils:void().
remove_files_if_existing( FilenameList ) ->
[ remove_file_if_existing( Filename ) || Filename <- FilenameList ].
-spec remove_directory( directory_name() ) -> basic_utils:void().
remove_directory( DirectoryName ) ->
case file:del_dir( DirectoryName ) of
ok ->
ok;
{ error, Reason } ->
throw( { remove_directory_failed, Reason, DirectoryName } )
end.
-spec copy_file( file_name(), file_name() ) -> basic_utils:void().
copy_file( SourceFilename, DestinationFilename ) ->
First , checks the source file exists and retrieves its meta - information :
case file:read_file_info( SourceFilename ) of
{ ok, #file_info{ mode=Mode } } ->
case file:copy( SourceFilename, DestinationFilename ) of
{ ok, _ByteCount } ->
ok = file:change_mode( DestinationFilename, Mode );
Error ->
throw( { copy_file_failed, SourceFilename,
DestinationFilename, Error } )
end;
{ error, Reason } ->
throw( { copy_file_failed, SourceFilename, Reason } )
end.
-spec copy_file_if_existing( file_name(), file_name() ) -> basic_utils:void().
copy_file_if_existing( SourceFilename, DestinationFilename ) ->
case is_existing_file( SourceFilename ) of
true ->
copy_file( SourceFilename, DestinationFilename );
false ->
ok
end.
Renames specified file .
rename( SourceFilename, DestinationFilename ) ->
move_file( SourceFilename, DestinationFilename ).
-spec move_file( file_name(), file_name() ) -> basic_utils:void().
move_file( SourceFilename, DestinationFilename ) ->
case file:rename( SourceFilename, DestinationFilename ) of
ok ->
ok;
Error ->
throw( { move_file_failed, Error, SourceFilename,
DestinationFilename } )
end.
-spec is_absolute_path( path() ) -> boolean().
is_absolute_path( _Path=[ $/ | _Rest ] ) ->
true;
is_absolute_path( _Path ) ->
false.
ensure_path_is_absolute( Path ) ->
case is_absolute_path( Path ) of
true ->
Path;
false ->
join( get_current_directory(), Path )
end.
ensure_path_is_absolute( TargetPath, BasePath ) ->
case is_absolute_path( TargetPath ) of
true ->
TargetPath;
false ->
case is_absolute_path( BasePath ) of
true ->
join( BasePath, TargetPath );
false ->
throw( { base_path_not_absolute, BasePath } )
end
end.
Normalises path , by translating it so that no ' . ' or ' .. ' is present
normalise_path( Path ) ->
ElemList = filename:split( Path ),
join( filter_elems( ElemList, _Acc=[] ) ).
filter_elems( _ElemList=[], Acc ) ->
lists:reverse( Acc );
filter_elems( _ElemList=[ "." | T ], Acc ) ->
filter_elems( T, Acc );
filter_elems( _ElemList=[ ".." | T ], Acc ) ->
filter_elems( T, tl( Acc ) );
filter_elems( _ElemList=[ E | T ], Acc ) ->
filter_elems( T, [ E | Acc ] ).
RevElemList = lists : reverse ( filename : split ( Path ) ) ,
join ( filter_elems ( RevElemList , _ Acc= [ ] ) ) .
Acc ;
filter_elems ( T , Acc ) ;
filter_elems ( T , Acc ) ;
- variable name starts with a prefix , user - supplied or the default one
-spec path_to_variable_name( path() ) -> string().
path_to_variable_name( Filename ) ->
path_to_variable_name( Filename, "File_" ).
-spec path_to_variable_name( path(), string() ) -> string().
path_to_variable_name( [ $.,$/ | T ], Prefix ) ->
convert( T, Prefix );
path_to_variable_name( Filename, Prefix ) ->
convert( Filename, Prefix ).
convert( Filename, Prefix ) ->
NoDashName = re:replace( lists:flatten( Filename ), "-+", "_",
[ global, { return, list } ] ),
NoDotName = re:replace( NoDashName, "\\.+", "_",
[ global, { return, list } ] ),
Prefix ++ re:replace( NoDotName, "/+", "_",
[ global, { return, list } ] ).
-spec get_image_extensions() -> [ extension() ].
get_image_extensions() ->
TIFF , TGA and al deemed deprecated :
[ ".png", ".jpg", ".jpeg", ".bmp" ].
-define(ResourceDir,"resources").
-spec get_image_file_png( file_name() ) -> path().
get_image_file_png( Image ) ->
filename:join( [ ?ResourceDir, "images", Image ++ ".png" ] ).
-spec get_image_file_gif( file_name() ) -> path().
get_image_file_gif( Image ) ->
filename:join( [ ?ResourceDir, "images", Image ++ ".gif" ] ).
Opens the file corresponding to the specified filename , with specified list of
making the VM fail for example when loading a new BEAM ) .
-spec open( file_name(), list() ) -> file:io_device().
open( Filename, Options ) ->
open( Filename, Options, _Default=try_once ).
Opens the file corresponding to specified filename ( first parameter ) with
specified list of options ( second parameter ; refer to file : open/2 for detailed
Third parameter is the " attempt mode " , either ' try_once ' , ' try_endlessly ' or
process - specific random waitings , should no descriptor be available .
to perform any file operation , like loading a new BEAM , ex :
File operation error : system_limit . Target :
This is done in order to support situations where potentially more Erlang
-spec open( file_name(), [ file:mode() | 'ram' ],
'try_once' | 'try_endlessly' | 'try_endlessly_safer' )
-> file:io_device().
For the contents in above tuple ( ): reference to type # file_descriptor
of erlang module : file.hrl
open( Filename, Options, _AttemptMode=try_endlessly_safer ) ->
File = open( Filename, Options, try_endlessly ),
We could check here that at least one descriptor remains , by adding a
The correct solution would involve knowing the number of descriptors
File;
open( Filename, Options, _AttemptMode=try_endlessly ) ->
case file:open( Filename, Options ) of
{ ok, File } ->
File;
{ error, FileError } when FileError == emfile
orelse FileError == system_limit ->
to desynchronize file opening in order to remain below 1024
Duration = basic_utils:get_process_specific_value(
_Min=50, _Max=200 ),
receive
after Duration ->
open( Filename, Options, try_endlessly )
end;
{ error, OtherFileError } ->
throw( { open_failed, { Filename, Options }, OtherFileError } )
end;
open( Filename, Options, _AttemptMode=try_once ) ->
case file:open( Filename, Options ) of
{ ok, File } ->
File;
{ error, emfile } ->
throw( { too_many_open_files, { Filename, Options } } );
{ error, system_limit } ->
throw( { too_many_open_files, { Filename, Options },
system_limit } );
{ error, OtherError } ->
throw( { open_failed, { Filename, Options }, OtherError } )
end.
-spec close( file:io_device() ) -> basic_utils:void().
close( File ) ->
close( File, throw_if_failed ).
-spec close( file:io_device(), 'overcome_failure' | 'throw_if_failed' )
-> basic_utils:void().
close( File, _FailureMode=throw_if_failed ) ->
case file:close( File ) of
ok ->
ok;
{ error, Reason } ->
throw( { file_closing_failed, Reason } )
end;
close( File, _FailureMode=overcome_failure ) ->
file:close( File ).
-spec read( file:io_device(), basic_utils:count() ) ->
{ 'ok', string() | binary() } | 'eof'.
read( File, Count ) ->
case file:read( File, Count ) of
R={ ok, _Data } ->
R;
eof ->
eof;
{ error, Reason } ->
throw( { read_failed, Reason } )
end.
-spec write( file:io_device(), iodata() ) -> basic_utils:void().
write( File, Content ) ->
case file:write( File, Content ) of
ok ->
ok;
{ error, Reason } ->
throw( { write_failed, Reason } )
end.
-spec write( file:io_device(), text_utils:format_string(), [ term() ] ) ->
basic_utils:void().
write( File, FormatString, Values ) ->
Text = io_lib:format( FormatString, Values ),
case file:write( File, Text ) of
ok ->
ok;
{ error, Reason } ->
throw( { write_failed, Reason } )
end.
See also : read_terms/1 to read directly Erlang terms .
-spec read_whole( file_name() ) -> binary().
read_whole( Filename ) ->
case file:read_file( Filename ) of
{ ok, Binary } ->
Binary;
{ error, Error } ->
throw( { read_whole_failed, Filename, Error } )
end.
-spec write_whole( file_name(), binary() ) -> basic_utils:void().
write_whole( Filename, Binary ) ->
case file:write_file( Filename, Binary ) of
ok ->
ok;
{ error, Error } ->
throw( { write_whole_failed, Filename, Error } )
end.
read_terms( Filename ) ->
case file:consult( Filename ) of
{ ok, Terms } ->
Terms;
{ error, Error } when is_atom( Error ) ->
throw( { reading_failed, Filename, Error } );
{ error, Error } ->
Reason = file:format_error( Error ),
throw( { interpretation_failed, Filename, Reason } )
end.
-spec get_extension_for( compression_format() ) -> extension().
get_extension_for( _CompressionFormat=zip ) ->
".zip";
get_extension_for( _CompressionFormat=bzip2 ) ->
".bz2";
get_extension_for( _CompressionFormat=xz ) ->
".xz".
Compresses specified file : creates a new , compressed version thereof , whose
For example , compress ( " hello.png " , zip ) will generate a " hello.png.zip "
-spec compress( file_name(), compression_format() ) -> file_name().
compress( Filename, _CompressionFormat=zip ) ->
Rather than using a standalone zip tool , we use the Erlang support here :
ZipExec = executable_utils : get_default_zip_compress_tool ( ) ,
ZipFilename = Filename ++ get_extension_for( zip ),
Exactly this one file in the archive :
zip:zip( ZipFilename, [ Filename ] ),
true = is_existing_file( ZipFilename ),
ZipFilename;
compress( Filename, _CompressionFormat=bzip2 ) ->
Bzip2Exec = executable_utils:get_default_bzip2_compress_tool(),
--keep allows to avoid that removes the original file :
[] = os:cmd( Bzip2Exec ++ " --keep --force --quiet " ++ Filename ),
Bzip2Filename = Filename ++ get_extension_for( bzip2 ),
true = is_existing_file( Bzip2Filename ),
Bzip2Filename;
compress( Filename, _CompressionFormat=xz ) ->
XZExec = executable_utils:get_default_xz_compress_tool(),
Command = XZExec ++ " --keep --force --quiet " ++ Filename,
[] = os:cmd( Command ),
XZFilename = Filename ++ get_extension_for( xz ),
true = is_existing_file( XZFilename ),
XZFilename;
compress( _Filename, CompressionFormat ) ->
throw( { unsupported_compression_format, CompressionFormat } ).
compressed file contains exactly one file , bear the same filename except the
Typically , when a format MY_FORMAT is specified , converts a compressed file
-spec decompress( file_name(), compression_format() ) -> file_name().
decompress( ZipFilename, _CompressionFormat=zip ) ->
Rather than using a standalone zip tool , we use the Erlang support here :
UnzipExec = executable_utils : get_default_zip_decompress_tool ( ) ,
Filename = replace_extension ( , ( zip ) , " " ) ,
Command = UnzipExec + + " -q -o " + + ZipFilename ,
Exactly one file per such archives :
{ ok, [ Filename ] } = zip:unzip( ZipFilename ),
true = is_existing_file( Filename ),
Filename;
decompress( Bzip2Filename, _CompressionFormat=bzip2 ) ->
Bzip2Exec = executable_utils:get_default_bzip2_decompress_tool(),
Filename = replace_extension( Bzip2Filename, get_extension_for( bzip2 ),
"" ),
The result will be named Filename by :
[] = os:cmd( Bzip2Exec ++ " --keep --force --quiet " ++ Bzip2Filename ),
true = is_existing_file( Filename ),
Filename;
decompress( XzFilename, _CompressionFormat=xz ) ->
XZExec = executable_utils:get_default_xz_decompress_tool(),
Filename = replace_extension( XzFilename, get_extension_for( xz ), "" ),
The result will be named Filename by unxz :
[] = os:cmd( XZExec ++ " --keep --force --quiet " ++ XzFilename ),
true = is_existing_file( Filename ),
Filename;
decompress( _Filename, CompressionFormat ) ->
throw( { unsupported_compression_format, CompressionFormat } ).
-spec file_to_zipped_term( file_name() ) -> binary().
file_to_zipped_term( Filename ) ->
DummyFileName = "dummy",
{ ok, { _DummyFileName, Bin } } =
zip : zip ( DummyFileName , [ Filename ] , [ verbose , memory ] ) ,
zip:zip( DummyFileName, [ Filename ], [ memory ] ),
Bin.
-spec zipped_term_to_unzipped_file( binary() ) -> file_name().
zipped_term_to_unzipped_file( ZippedTerm ) ->
{ ok, [ FileName ] } = zip:unzip( ZippedTerm ),
FileName.
Note : only one file is expected to be stored in the specified archive .
-spec zipped_term_to_unzipped_file( binary(), file_name() )
-> basic_utils:void().
zipped_term_to_unzipped_file( ZippedTerm, TargetFilename ) ->
{ ok, [ { _AFilename, Binary } ] } = zip:unzip( ZippedTerm, [ memory ] ),
write_whole( TargetFilename, Binary ).
-spec files_to_zipped_term( [file_name()] ) -> binary().
files_to_zipped_term( FilenameList ) ->
DummyFileName = "dummy",
{ ok, { _DummyFileName, Bin } } = zip:zip( DummyFileName, FilenameList,
[ memory ] ),
Bin.
-spec files_to_zipped_term( [ file_name() ], directory_name() ) -> binary().
files_to_zipped_term( FilenameList, BaseDirectory ) ->
DummyFileName = "dummy",
[ BaseDirectory , ] ) ,
case zip:zip( DummyFileName, FilenameList,
[ memory, { cwd, BaseDirectory } ] ) of
{ ok, { _DummyFileName, Bin } } ->
Bin;
{ error, enoent } ->
io : format ( " ~nfiles_to_zipped_term/2 failed from ' ~s':~n "
[ get_current_directory ( ) , BaseDirectory ,
is_existing_directory ( BaseDirectory ) ] ) ,
throw( { zip_failed, BaseDirectory, FilenameList } );
{ error, Other } ->
throw( { zip_failed, Other, BaseDirectory, FilenameList } )
end.
-spec zipped_term_to_unzipped_files( binary() ) -> [ file_name() ].
zipped_term_to_unzipped_files( ZippedTerm ) ->
{ ok , FileNames } = zip : unzip ( ZippedTerm , [ verbose ] ) ,
{ ok, FileNames } = zip:unzip( ZippedTerm ),
FileNames.
-spec zipped_term_to_unzipped_files( binary(), directory_name() )
-> [ file_name() ].
zipped_term_to_unzipped_files( ZippedTerm, TargetDirectory ) ->
{ ok , FileNames } = zip : unzip ( ZippedTerm , [ verbose ] ) ,
case is_existing_directory( TargetDirectory ) of
true ->
{ ok, FileNames } = zip:unzip( ZippedTerm,
[ { cwd, TargetDirectory } ] ),
FileNames;
false ->
throw( { non_existing_unzip_directory, TargetDirectory } )
end.
|
58e87c9d203e6ead9692b1239ebfad7c26a8b05f96578ce347626cd3a0383346 | michaelklishin/monger | util_test.clj | (ns monger.test.util-test
(:import com.mongodb.DBObject)
(:require [monger util conversion]
[clojure.test :refer :all]))
(deftest get-object-id
(let [clj-map { :_id (monger.util/object-id) }
db-object ^DBObject (monger.conversion/to-db-object clj-map)
_id (:_id clj-map)]
(is (= _id (monger.util/get-id clj-map)))
(is (= _id (monger.util/get-id db-object)))))
| null | https://raw.githubusercontent.com/michaelklishin/monger/9f3d192dffb16da011f805355b87ae172c584a69/test/monger/test/util_test.clj | clojure | (ns monger.test.util-test
(:import com.mongodb.DBObject)
(:require [monger util conversion]
[clojure.test :refer :all]))
(deftest get-object-id
(let [clj-map { :_id (monger.util/object-id) }
db-object ^DBObject (monger.conversion/to-db-object clj-map)
_id (:_id clj-map)]
(is (= _id (monger.util/get-id clj-map)))
(is (= _id (monger.util/get-id db-object)))))
| |
c8d50ac62fb16f21b63f1073f4f8f0b88ea1026ee66370f289a5dc3128b1f117 | xldenis/ill | DesugarDebug.hs | module DesugarDebug where
import Thrill.Syntax
import Thrill.Syntax.Core (bindings)
import Thrill.Infer
import Thrill.Infer.Monad
import Thrill.Options
import Thrill.Renamer
import Data.Function
import Control.Monad.State (runStateT)
import Control.Monad.Except (runExcept)
import Control.Monad
import Thrill.BindingGroup
import Thrill.Desugar
import Prelude hiding (putStrLn, putStr)
import Data.Text.Lazy.IO
import Data.Text.Lazy hiding (map)
import Thrill.Syntax.Pretty
import Data.Bifunctor (first, bimap)
desugar :: String -> GlobalOptions -> RenamedModule SourceSpan -> IO ()
desugar stage gOpts ast = case (typeCheckModule) ast of
Left err -> putStrLn . render gOpts $ prettyError err
Right (mod, env) -> do
let pipeline = stageToPipeline stage
desugared = pipeline env mod
putStrLn $ render gOpts (pretty desugared)
where
cliRenderArgs = defaultRenderArgs { width = 50}
stageToPipeline :: String -> (Environment -> Module QualifiedName TypedAnn -> Module QualifiedName TypedAnn)
stageToPipeline "binop" e = desugarBinOps
stageToPipeline "traits" e = desugarTraits e . stageToPipeline "binop" e
stageToPipeline "cases" e = desugarPatterns . stageToPipeline "traits" e
stageToPipeline _ _ = id
render :: GlobalOptions -> Doc AnsiStyle -> Text
render opts = renderThrill (renderArgs opts)
| null | https://raw.githubusercontent.com/xldenis/ill/46bb41bf5c82cd6fc4ad6d0d8d33cda9e87a671c/app/DesugarDebug.hs | haskell | module DesugarDebug where
import Thrill.Syntax
import Thrill.Syntax.Core (bindings)
import Thrill.Infer
import Thrill.Infer.Monad
import Thrill.Options
import Thrill.Renamer
import Data.Function
import Control.Monad.State (runStateT)
import Control.Monad.Except (runExcept)
import Control.Monad
import Thrill.BindingGroup
import Thrill.Desugar
import Prelude hiding (putStrLn, putStr)
import Data.Text.Lazy.IO
import Data.Text.Lazy hiding (map)
import Thrill.Syntax.Pretty
import Data.Bifunctor (first, bimap)
desugar :: String -> GlobalOptions -> RenamedModule SourceSpan -> IO ()
desugar stage gOpts ast = case (typeCheckModule) ast of
Left err -> putStrLn . render gOpts $ prettyError err
Right (mod, env) -> do
let pipeline = stageToPipeline stage
desugared = pipeline env mod
putStrLn $ render gOpts (pretty desugared)
where
cliRenderArgs = defaultRenderArgs { width = 50}
stageToPipeline :: String -> (Environment -> Module QualifiedName TypedAnn -> Module QualifiedName TypedAnn)
stageToPipeline "binop" e = desugarBinOps
stageToPipeline "traits" e = desugarTraits e . stageToPipeline "binop" e
stageToPipeline "cases" e = desugarPatterns . stageToPipeline "traits" e
stageToPipeline _ _ = id
render :: GlobalOptions -> Doc AnsiStyle -> Text
render opts = renderThrill (renderArgs opts)
| |
951b4b59732b59f79173ba4d22b88e474cc72fde48a07a3bf46891c67ace36ba | OpenC2-org/ocas | act_allow.erl | @author
( C ) 2017 , sFractal Consulting LLC
-module(act_allow).
%%%-------------------------------------------------------------------
%%%
%%% All rights reserved.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%%
%%% Redistribution and use in source and binary forms, with or without
%%% modification, are permitted provided that the following conditions are
%%% met:
%%%
%%% * Redistributions of source code must retain the above copyright
%%% notice, this list of conditions and the following disclaimer.
%%%
%%% * Redistributions in binary form must reproduce the above copyright
%%% notice, this list of conditions and the following disclaimer in the
%%% documentation and/or other materials provided with the distribution.
%%%
%%% * The names of its contributors may not be used to endorse or promote
%%% products derived from this software without specific prior written
%%% permission.
%%%
%%% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
%%% LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
%%% A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES . LOSS OF USE ,
%%% DATA, OR PROFITS. OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
%%% (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
%%% OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
%%%-------------------------------------------------------------------
-behaviour(gen_server).
-behaviour(oc_env). % for calling start modules
-author("Duncan Sparrell").
-license("Apache 2.0").
%% gen_server callbacks
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
%% interface calls
-export([ start/1
, stop/0
, keepalive/0
]).
-ignore_xref({start, 1}). % to keep xref happy
-ignore_xref({keepalive, 0}). % to keep xref happy
%% This routine API handles all the actions that can be taken
start(State) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [State], []).
stop() ->
gen_server:cast(?MODULE, shutdown).
keepalive() ->
gen_server:call(?MODULE, keepalive).
%% initialize server with state
init( [State] ) ->
lager:debug( "starting ~p with ~p", [?MODULE, State] ),
{ ok, State }.
%% synchronous calls
handle_call( keepalive, From, State ) ->
lager:debug( "~p got keepalive from ~p", [?MODULE, From] ),
reply to
Response = {keepalive_received, ?MODULE},
{reply, Response, State};
%% handle unknown call messages
handle_call(Message, From, State) ->
lager:info( "~p got unknown ~p from ~p", [?MODULE, Message, From] ),
{reply, error, State}.
%% async calls
handle_cast(shutdown, State) ->
lager:info( "~p got shutdown", [?MODULE] ),
{stop, normal, State};
%% handle unknown cast messages
handle_cast(Message, State) ->
lager:info( "~p got unknown ~p", [?MODULE, Message] ),
{noreply, State}.
%% handle unknown info messages
handle_info(Message, State) ->
lager:info( "~p got unknown ~p", [?MODULE, Message] ),
{noreply, State}.
%% handle terminate
terminate(normal, _State) ->
ok.
%% don't really handle code change yet
code_change(_OldVsn, State, _Extra) ->
%% No change planned. The function is there for behaviour sanity,
%% but will not be used. Only a version on the next
{ok, State}.
| null | https://raw.githubusercontent.com/OpenC2-org/ocas/c15132d9f37b1e0e29884456a520557c25b22f38/apps/ocas/src/action_servers/act_allow.erl | erlang | -------------------------------------------------------------------
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* The names of its contributors may not be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
DATA, OR PROFITS. OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-------------------------------------------------------------------
for calling start modules
gen_server callbacks
interface calls
to keep xref happy
to keep xref happy
This routine API handles all the actions that can be taken
initialize server with state
synchronous calls
handle unknown call messages
async calls
handle unknown cast messages
handle unknown info messages
handle terminate
don't really handle code change yet
No change planned. The function is there for behaviour sanity,
but will not be used. Only a version on the next | @author
( C ) 2017 , sFractal Consulting LLC
-module(act_allow).
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES . LOSS OF USE ,
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
-behaviour(gen_server).
-author("Duncan Sparrell").
-license("Apache 2.0").
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-export([ start/1
, stop/0
, keepalive/0
]).
start(State) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [State], []).
stop() ->
gen_server:cast(?MODULE, shutdown).
keepalive() ->
gen_server:call(?MODULE, keepalive).
init( [State] ) ->
lager:debug( "starting ~p with ~p", [?MODULE, State] ),
{ ok, State }.
handle_call( keepalive, From, State ) ->
lager:debug( "~p got keepalive from ~p", [?MODULE, From] ),
reply to
Response = {keepalive_received, ?MODULE},
{reply, Response, State};
handle_call(Message, From, State) ->
lager:info( "~p got unknown ~p from ~p", [?MODULE, Message, From] ),
{reply, error, State}.
handle_cast(shutdown, State) ->
lager:info( "~p got shutdown", [?MODULE] ),
{stop, normal, State};
handle_cast(Message, State) ->
lager:info( "~p got unknown ~p", [?MODULE, Message] ),
{noreply, State}.
handle_info(Message, State) ->
lager:info( "~p got unknown ~p", [?MODULE, Message] ),
{noreply, State}.
terminate(normal, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
|
3b5e207ae10571b54f1238eb60703659afc5ac148c64edc23e7b0a6563610c1d | burtonsamograd/med | file.lisp | (in-package :med)
(defun find-file (path)
(setf path (merge-pathnames path))
(dolist (buffer (buffer-list))
(when (equal (buffer-property buffer 'path) path)
(setf (last-buffer *editor*) (current-buffer *editor*))
(switch-to-buffer buffer)
(setf (buffer-property buffer 'default-pathname-defaults)
(make-pathname :name nil :type nil :version :newest :defaults path))
(return-from find-file buffer)))
(let ((buffer (make-instance 'buffer)))
(if (pathname-name path)
;; read file
(with-open-file (s path :if-does-not-exist nil)
(cond (s
(loop
(multiple-value-bind (line missing-newline-p)
(read-line s nil)
(when (not line)
(return))
(insert buffer line)
(when (not missing-newline-p)
(insert buffer #\Newline)))))
(t (setf (buffer-property buffer 'new-file) t)))
(rename-buffer buffer (file-namestring path)))
;; read directory
(progn
(insert buffer (format nil "Directory: ~A~%~%" path))
(mapc (lambda (file)
(let* ((file-name (file-namestring file))
(name (if file-name file-name (directory-namestring file))))
(insert buffer name)
(insert buffer #\Newline)))
(directory (merge-pathnames "*.*" path)))
(setf (buffer-property buffer 'new-file) t)
(rename-buffer buffer (directory-namestring path))))
(push buffer (buffer-list))
(setf (buffer-property buffer 'path) path)
(move-beginning-of-buffer buffer)
;; Loading the file will set the modified flag.
(setf (last-buffer *editor*) (current-buffer *editor*))
(setf (buffer-modified buffer) nil)
(switch-to-buffer buffer)
(setf (buffer-property buffer 'default-pathname-defaults)
(make-pathname :name nil :type nil :version :newest :defaults path))
buffer))
;; FIME: should be regexes and use ppcre to search the
;; list rather then just strings and search
(defvar *file-completion-ignore-filetype-list* '(".llf" "~"))
(defun any (&rest args)
(dolist (a args)
(when a (return-from any t))))
(defun file-completer (text)
(let (results)
(dolist (path (directory (merge-pathnames "*.*" (pathname text))))
(let ((file (namestring path)))
(when (and (search text file)
(not (apply #'any
(mapcar (lambda (ignore) (search ignore file))
*file-completion-ignore-filetype-list*))))
(push file results))))
results))
(defun find-file-command ()
(find-file (read-from-minibuffer "Find file: "
:default (namestring
(or (buffer-property (current-buffer *editor*) 'default-pathname-defaults)
*default-pathname-defaults*))
:completer #'file-completer)))
TODO : factor out the buffer saving from the below 3 functions into defun save - buffer
(defun save-buffer-command ()
(let ((buffer (current-buffer *editor*)))
(when (not (buffer-property buffer 'path))
(let* ((path (read-from-minibuffer (format nil "Write file (default ~S): "
:default (buffer-property buffer 'default-pathname-defaults))))
(filespec (merge-pathnames path)))
(rename-buffer buffer (file-namestring filespec))
(setf (buffer-property buffer 'path) filespec)))
(with-open-file (s (buffer-property buffer 'path)
:direction :output
:if-exists :new-version
:if-does-not-exist :create)
(do ((line (first-line buffer) (next-line line)))
((not line))
(write-sequence (map 'string #'car (data line)) s)
(when (next-line line)
(terpri s))))
(setf (buffer-property buffer 'new-file) nil
(buffer-modified buffer) nil)
(format t "Wrote ~S~%" (buffer-property buffer 'path))))
(defun save-some-buffers-command ()
(dolist (buffer (buffer-list))
(when (and (buffer-modified buffer)
(minibuffer-y-or-n-p
(format nil "Save buffer ~A?" (buffer-property buffer 'name)))
(buffer-property buffer 'path))
(with-open-file (s (buffer-property buffer 'path)
:direction :output
:if-exists :new-version
:if-does-not-exist :create)
(do ((line (first-line buffer) (next-line line)))
((not line))
(write-sequence (map 'string #'car (data line)) s)
(when (next-line line)
(terpri s))))
(setf (buffer-property buffer 'new-file) nil
(buffer-modified buffer) nil)
(format t "Wrote ~S~%" (buffer-property buffer 'path)))))
(defun write-file-command ()
(let* ((buffer (current-buffer *editor*))
(*default-pathname-defaults* (or (buffer-property buffer 'path)
(buffer-property buffer 'default-pathname-defaults)
*default-pathname-defaults*))
(path (read-from-minibuffer "Write file: "
:default (namestring *default-pathname-defaults*)))
(filespec (merge-pathnames path)))
(rename-buffer buffer (file-namestring filespec))
(setf (buffer-property buffer 'path) filespec)
(with-open-file (s (buffer-property buffer 'path)
:direction :output
:if-exists :new-version
:if-does-not-exist :create)
(do ((line (first-line buffer) (next-line line)))
((not line))
(write-sequence (map 'string #'car (data line)) s)
(terpri s)))
(setf (buffer-property buffer 'new-file) nil
(buffer-modified buffer) nil)
(format t "Wrote ~S~%" (buffer-property buffer 'path))))
| null | https://raw.githubusercontent.com/burtonsamograd/med/667c45032f60831447ad0eafd4d5c9a9748b4366/commands/file.lisp | lisp | read file
read directory
Loading the file will set the modified flag.
FIME: should be regexes and use ppcre to search the
list rather then just strings and search | (in-package :med)
(defun find-file (path)
(setf path (merge-pathnames path))
(dolist (buffer (buffer-list))
(when (equal (buffer-property buffer 'path) path)
(setf (last-buffer *editor*) (current-buffer *editor*))
(switch-to-buffer buffer)
(setf (buffer-property buffer 'default-pathname-defaults)
(make-pathname :name nil :type nil :version :newest :defaults path))
(return-from find-file buffer)))
(let ((buffer (make-instance 'buffer)))
(if (pathname-name path)
(with-open-file (s path :if-does-not-exist nil)
(cond (s
(loop
(multiple-value-bind (line missing-newline-p)
(read-line s nil)
(when (not line)
(return))
(insert buffer line)
(when (not missing-newline-p)
(insert buffer #\Newline)))))
(t (setf (buffer-property buffer 'new-file) t)))
(rename-buffer buffer (file-namestring path)))
(progn
(insert buffer (format nil "Directory: ~A~%~%" path))
(mapc (lambda (file)
(let* ((file-name (file-namestring file))
(name (if file-name file-name (directory-namestring file))))
(insert buffer name)
(insert buffer #\Newline)))
(directory (merge-pathnames "*.*" path)))
(setf (buffer-property buffer 'new-file) t)
(rename-buffer buffer (directory-namestring path))))
(push buffer (buffer-list))
(setf (buffer-property buffer 'path) path)
(move-beginning-of-buffer buffer)
(setf (last-buffer *editor*) (current-buffer *editor*))
(setf (buffer-modified buffer) nil)
(switch-to-buffer buffer)
(setf (buffer-property buffer 'default-pathname-defaults)
(make-pathname :name nil :type nil :version :newest :defaults path))
buffer))
(defvar *file-completion-ignore-filetype-list* '(".llf" "~"))
(defun any (&rest args)
(dolist (a args)
(when a (return-from any t))))
(defun file-completer (text)
(let (results)
(dolist (path (directory (merge-pathnames "*.*" (pathname text))))
(let ((file (namestring path)))
(when (and (search text file)
(not (apply #'any
(mapcar (lambda (ignore) (search ignore file))
*file-completion-ignore-filetype-list*))))
(push file results))))
results))
(defun find-file-command ()
(find-file (read-from-minibuffer "Find file: "
:default (namestring
(or (buffer-property (current-buffer *editor*) 'default-pathname-defaults)
*default-pathname-defaults*))
:completer #'file-completer)))
TODO : factor out the buffer saving from the below 3 functions into defun save - buffer
(defun save-buffer-command ()
(let ((buffer (current-buffer *editor*)))
(when (not (buffer-property buffer 'path))
(let* ((path (read-from-minibuffer (format nil "Write file (default ~S): "
:default (buffer-property buffer 'default-pathname-defaults))))
(filespec (merge-pathnames path)))
(rename-buffer buffer (file-namestring filespec))
(setf (buffer-property buffer 'path) filespec)))
(with-open-file (s (buffer-property buffer 'path)
:direction :output
:if-exists :new-version
:if-does-not-exist :create)
(do ((line (first-line buffer) (next-line line)))
((not line))
(write-sequence (map 'string #'car (data line)) s)
(when (next-line line)
(terpri s))))
(setf (buffer-property buffer 'new-file) nil
(buffer-modified buffer) nil)
(format t "Wrote ~S~%" (buffer-property buffer 'path))))
(defun save-some-buffers-command ()
(dolist (buffer (buffer-list))
(when (and (buffer-modified buffer)
(minibuffer-y-or-n-p
(format nil "Save buffer ~A?" (buffer-property buffer 'name)))
(buffer-property buffer 'path))
(with-open-file (s (buffer-property buffer 'path)
:direction :output
:if-exists :new-version
:if-does-not-exist :create)
(do ((line (first-line buffer) (next-line line)))
((not line))
(write-sequence (map 'string #'car (data line)) s)
(when (next-line line)
(terpri s))))
(setf (buffer-property buffer 'new-file) nil
(buffer-modified buffer) nil)
(format t "Wrote ~S~%" (buffer-property buffer 'path)))))
(defun write-file-command ()
(let* ((buffer (current-buffer *editor*))
(*default-pathname-defaults* (or (buffer-property buffer 'path)
(buffer-property buffer 'default-pathname-defaults)
*default-pathname-defaults*))
(path (read-from-minibuffer "Write file: "
:default (namestring *default-pathname-defaults*)))
(filespec (merge-pathnames path)))
(rename-buffer buffer (file-namestring filespec))
(setf (buffer-property buffer 'path) filespec)
(with-open-file (s (buffer-property buffer 'path)
:direction :output
:if-exists :new-version
:if-does-not-exist :create)
(do ((line (first-line buffer) (next-line line)))
((not line))
(write-sequence (map 'string #'car (data line)) s)
(terpri s)))
(setf (buffer-property buffer 'new-file) nil
(buffer-modified buffer) nil)
(format t "Wrote ~S~%" (buffer-property buffer 'path))))
|
c78d5188ad9bd1a77fe5029363604bb82618927a68cbead527e3b156d1428515 | jimpil/hotel-nlp | tools_test.clj | (ns hotel-nlp.tools_test
(:require [clojure.test :refer :all]
[hotel_nlp.tools.normalito.core :refer :all]
[clojure.data.generators :refer [collection]])) ;;might come in handy
(defn- === [x y]
(and (= x y)
(= (class x)
(class y))))
(deftest normalito-test
(testing "String normalisation..."
(is (= "eat" (normalise "eating" transform-by-porter)))
(is (= "kiss" (normalise "kissing" transform-by-porter)))
(is (= "danc" (normalise "dancing" transform-by-porter)))
(is (let [should ["eat" "kiss" "danc"]
res (normalise ["eating" "kissing" "dancing"] #(transform-by-porter % %2 "english"))]
(=== should res)))
(is (let [should ["eat" "kiss" "danc"]
res (normalise ["eating" "kissing" "dancing"] transform-by-porter)]
(=== should res)))
(is (= ["finish" "dinner" "quick"] (normalise (enumeration-seq (java.util.StringTokenizer. "finished dinner quickly")) transform-by-porter)))
(is (let [should #{"eat" "kiss" "danc"}
res (normalise #{"eating" "kissing" "dancing"} #(transform-by-porter % %2 "english"))]
(=== should res)))
(is (let [should '("eat" "kiss" "danc")
res (normalise '("eating" "kissing" "dancing") #(transform-by-porter % %2 "english"))]
(= should res))) ;;not the same classes with persistent list, cannot use ===
(testing "Number normalisation..."
(is (= {[-1 -3/4 -1/2 -1/4 0N 1/4 1/2 3/4 1] [-3/13 -11/13 -7/13 -1 1]}
(normalise {(java.util.ArrayList. (range -4 5)) [-3 -7 -5 -8 5]} in-range-formula)))
(is (= {[-5 -15/4 -5/2 -5/4 0N 5/4 5/2 15/4 5] [-15/13 -55/13 -35/13 -5 5]}
(normalise {(java.util.ArrayList. (range -4 5)) [-3 -7 -5 -8 5]} #(in-range-formula %1 %2 [-5 5]))))
(is (= '(1/4 1/5 1/6 1/7 1/8 1/9 1/10 1/11 1/12 1/13 1/14)
(normalise (range 4 15) #(transform-reciprocal %1 %2 [-1 1]))))
(is (= [-2 -28/15 -26/15 -8/5 -22/15 -4/3 -6/5 -16/15 -14/15 -4/5 -2/3 -8/15 -2/5 -4/15 -2/15 0N 2/15 4/15 2/5 8/15 2/3 4/5 14/15 16/15 6/5 4/3 22/15 8/5 26/15 28/15 2]
(normalise (vec (range -15 16)) #(in-range-formula %1 %2 [-2 2]))))
(is (= [0 1/100 1/50 3/100 1/25 1/20 3/50 7/100 2/25 9/100 1/10 11/100 3/25 13/100 7/50 3/20 4/25 17/100 9/50 19/100 1/5 21/100 11/50 23/100 6/25]
(normalise (vec (range 25)) divide-by-value-formula))) )
(testing "Correlation..."
(is (= [-1.0] (getCorrelation {[1 2 3 4] [4 3 2 1]} nil)))
(is (= [-0.75] (getCorrelation {[1 2 3 4] [4 3 2 1]} nil true)))
)
)
(let [should #{"eat" "kiss" "danc"}
res (normalise #{"eating" "kissing" "dancing"} #(transform-by-porter % %2 "english"))]
(=== should res)))
| null | https://raw.githubusercontent.com/jimpil/hotel-nlp/2647f999b5998595a5edac1900e850140f374de6/test/hotel_nlp/tools_test.clj | clojure | might come in handy
not the same classes with persistent list, cannot use === | (ns hotel-nlp.tools_test
(:require [clojure.test :refer :all]
[hotel_nlp.tools.normalito.core :refer :all]
(defn- === [x y]
(and (= x y)
(= (class x)
(class y))))
(deftest normalito-test
(testing "String normalisation..."
(is (= "eat" (normalise "eating" transform-by-porter)))
(is (= "kiss" (normalise "kissing" transform-by-porter)))
(is (= "danc" (normalise "dancing" transform-by-porter)))
(is (let [should ["eat" "kiss" "danc"]
res (normalise ["eating" "kissing" "dancing"] #(transform-by-porter % %2 "english"))]
(=== should res)))
(is (let [should ["eat" "kiss" "danc"]
res (normalise ["eating" "kissing" "dancing"] transform-by-porter)]
(=== should res)))
(is (= ["finish" "dinner" "quick"] (normalise (enumeration-seq (java.util.StringTokenizer. "finished dinner quickly")) transform-by-porter)))
(is (let [should #{"eat" "kiss" "danc"}
res (normalise #{"eating" "kissing" "dancing"} #(transform-by-porter % %2 "english"))]
(=== should res)))
(is (let [should '("eat" "kiss" "danc")
res (normalise '("eating" "kissing" "dancing") #(transform-by-porter % %2 "english"))]
(testing "Number normalisation..."
(is (= {[-1 -3/4 -1/2 -1/4 0N 1/4 1/2 3/4 1] [-3/13 -11/13 -7/13 -1 1]}
(normalise {(java.util.ArrayList. (range -4 5)) [-3 -7 -5 -8 5]} in-range-formula)))
(is (= {[-5 -15/4 -5/2 -5/4 0N 5/4 5/2 15/4 5] [-15/13 -55/13 -35/13 -5 5]}
(normalise {(java.util.ArrayList. (range -4 5)) [-3 -7 -5 -8 5]} #(in-range-formula %1 %2 [-5 5]))))
(is (= '(1/4 1/5 1/6 1/7 1/8 1/9 1/10 1/11 1/12 1/13 1/14)
(normalise (range 4 15) #(transform-reciprocal %1 %2 [-1 1]))))
(is (= [-2 -28/15 -26/15 -8/5 -22/15 -4/3 -6/5 -16/15 -14/15 -4/5 -2/3 -8/15 -2/5 -4/15 -2/15 0N 2/15 4/15 2/5 8/15 2/3 4/5 14/15 16/15 6/5 4/3 22/15 8/5 26/15 28/15 2]
(normalise (vec (range -15 16)) #(in-range-formula %1 %2 [-2 2]))))
(is (= [0 1/100 1/50 3/100 1/25 1/20 3/50 7/100 2/25 9/100 1/10 11/100 3/25 13/100 7/50 3/20 4/25 17/100 9/50 19/100 1/5 21/100 11/50 23/100 6/25]
(normalise (vec (range 25)) divide-by-value-formula))) )
(testing "Correlation..."
(is (= [-1.0] (getCorrelation {[1 2 3 4] [4 3 2 1]} nil)))
(is (= [-0.75] (getCorrelation {[1 2 3 4] [4 3 2 1]} nil true)))
)
)
(let [should #{"eat" "kiss" "danc"}
res (normalise #{"eating" "kissing" "dancing"} #(transform-by-porter % %2 "english"))]
(=== should res)))
|
e52a92c469bc00aac5fcddcef78488d38a44c642e54f7f97ab92e03cf7bb9294 | eglaysher/rldev | app.ml |
: RealLive archiver and disassembler
Copyright ( C ) 2006 Haeleth
This program is free software ; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation ; either version 2 of the License , or ( at your option ) any later
version .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU General Public License for more
details .
You should have received a copy of the GNU General Public License along with
this program ; if not , write to the Free Software Foundation , Inc. , 59 Temple
Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
Kprl: RealLive archiver and disassembler
Copyright (C) 2006 Haeleth
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open Optpp
let app =
{ Config.app_info with
exe_name = "kprl";
name = "Kprl";
description = "archiver/disassembler for RealLive";
usage =
"<options> <files or ranges>\n\
\n\
When adding to an archive, the first filename must be the archive name and \
the rest must be names of files to add. When disassembling or decompressing, \
if the first filename is NOT an archive, all files are taken as names of \
separate bytecode files. In all other cases, the first filename is the name \
of an archive and the rest should be a list of numerical ranges of files to \
be processed (e.g. `50 60 100-150'); if omitted, all files in the archive \
will be processed." }
let verbose = ref 0
let outdir = ref ""
let names_opt = ref false
let enc = ref Config.default_encoding
let bom = ref false
let auto_target = ref true
and target_version = ref (0, 0, 0, 0)
and target_interpreter = ref ""
and force_meta : [`Chinese | `None | `Western | `Korean ] option ref = ref None
| null | https://raw.githubusercontent.com/eglaysher/rldev/e59103b165e1c20bd940942405b2eee767933c96/src/kprl/app.ml | ocaml |
: RealLive archiver and disassembler
Copyright ( C ) 2006 Haeleth
This program is free software ; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation ; either version 2 of the License , or ( at your option ) any later
version .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU General Public License for more
details .
You should have received a copy of the GNU General Public License along with
this program ; if not , write to the Free Software Foundation , Inc. , 59 Temple
Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
Kprl: RealLive archiver and disassembler
Copyright (C) 2006 Haeleth
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open Optpp
let app =
{ Config.app_info with
exe_name = "kprl";
name = "Kprl";
description = "archiver/disassembler for RealLive";
usage =
"<options> <files or ranges>\n\
\n\
When adding to an archive, the first filename must be the archive name and \
the rest must be names of files to add. When disassembling or decompressing, \
if the first filename is NOT an archive, all files are taken as names of \
separate bytecode files. In all other cases, the first filename is the name \
of an archive and the rest should be a list of numerical ranges of files to \
be processed (e.g. `50 60 100-150'); if omitted, all files in the archive \
will be processed." }
let verbose = ref 0
let outdir = ref ""
let names_opt = ref false
let enc = ref Config.default_encoding
let bom = ref false
let auto_target = ref true
and target_version = ref (0, 0, 0, 0)
and target_interpreter = ref ""
and force_meta : [`Chinese | `None | `Western | `Korean ] option ref = ref None
| |
5749ff6a82e77fd435c378f9dc463a3a08b3944dc965e2107283754fb3074f18 | janestreet/accessor_base | accessor_bool.mli | open! Base
open! Import
(** Accesses [()] iff the boolean is [true]. *)
val true_ : (_, unit, bool, [< variant ]) Accessor.t
(** Accesses [()] iff the boolean is [false]. *)
val false_ : (_, unit, bool, [< variant ]) Accessor.t
(** Access a boolean as its inverse. *)
val negated : (_, bool, bool, [< isomorphism ]) Accessor.t
| null | https://raw.githubusercontent.com/janestreet/accessor_base/8384c29a37e557168ae8a43b2a5a531f0ffc16e4/src/accessor_bool.mli | ocaml | * Accesses [()] iff the boolean is [true].
* Accesses [()] iff the boolean is [false].
* Access a boolean as its inverse. | open! Base
open! Import
val true_ : (_, unit, bool, [< variant ]) Accessor.t
val false_ : (_, unit, bool, [< variant ]) Accessor.t
val negated : (_, bool, bool, [< isomorphism ]) Accessor.t
|
35e1ec4054b82c7e966ceaf6b8d0c83540357518e87caa74004f574abfc998e2 | haskell-tools/haskell-tools | Infix.hs | module Pattern.Infix where
( a : b : c : d ) = [ " 1","2","3","4 " ]
Nothing:_ = undefined | null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/refactor/examples/Pattern/Infix.hs | haskell | module Pattern.Infix where
( a : b : c : d ) = [ " 1","2","3","4 " ]
Nothing:_ = undefined | |
49d8ded92c22f70f137bcc14823354cd59f87da0d9800a02ce713e5bf2ce34f2 | clojure-interop/google-cloud-clients | GroupServiceStubSettings.clj | (ns com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings
"Settings class to configure an instance of GroupServiceStub.
The default instance has everything set to sensible defaults:
The default service address (monitoring.googleapis.com) and default port (443) are used.
Credentials are acquired automatically through Application Default Credentials.
Retries are configured for idempotent methods but not for non-idempotent methods.
The builder of this class is recursive, so contained classes are themselves builders. When
build() is called, the tree of builders is called to create the complete settings object. For
example, to set the total timeout of getGroup to 30 seconds:
GroupServiceStubSettings.Builder groupServiceSettingsBuilder =
GroupServiceStubSettings.newBuilder();
groupServiceSettingsBuilder.getGroupSettings().getRetrySettings().toBuilder()
.setTotalTimeout(Duration.ofSeconds(30));
GroupServiceStubSettings groupServiceSettings = groupServiceSettingsBuilder.build();"
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.monitoring.v3.stub GroupServiceStubSettings]))
(defn *default-executor-provider-builder
"Returns a builder for the default ExecutorProvider for this service.
returns: `com.google.api.gax.core.InstantiatingExecutorProvider.Builder`"
(^com.google.api.gax.core.InstantiatingExecutorProvider.Builder []
(GroupServiceStubSettings/defaultExecutorProviderBuilder )))
(defn *get-default-endpoint
"Returns the default service endpoint.
returns: `java.lang.String`"
(^java.lang.String []
(GroupServiceStubSettings/getDefaultEndpoint )))
(defn *get-default-service-scopes
"Returns the default service scopes.
returns: `java.util.List<java.lang.String>`"
(^java.util.List []
(GroupServiceStubSettings/getDefaultServiceScopes )))
(defn *default-credentials-provider-builder
"Returns a builder for the default credentials for this service.
returns: `com.google.api.gax.core.GoogleCredentialsProvider.Builder`"
(^com.google.api.gax.core.GoogleCredentialsProvider.Builder []
(GroupServiceStubSettings/defaultCredentialsProviderBuilder )))
(defn *default-grpc-transport-provider-builder
"Returns a builder for the default ChannelProvider for this service.
returns: `com.google.api.gax.grpc.InstantiatingGrpcChannelProvider.Builder`"
(^com.google.api.gax.grpc.InstantiatingGrpcChannelProvider.Builder []
(GroupServiceStubSettings/defaultGrpcTransportProviderBuilder )))
(defn *default-transport-channel-provider
"returns: `com.google.api.gax.rpc.TransportChannelProvider`"
(^com.google.api.gax.rpc.TransportChannelProvider []
(GroupServiceStubSettings/defaultTransportChannelProvider )))
(defn *default-api-client-header-provider-builder
"returns: `(value="The surface for customizing headers is not stable yet and may change in the future.") com.google.api.gax.rpc.ApiClientHeaderProvider.Builder`"
([]
(GroupServiceStubSettings/defaultApiClientHeaderProviderBuilder )))
(defn *new-builder
"Returns a new builder for this class.
client-context - `com.google.api.gax.rpc.ClientContext`
returns: `com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder`"
(^com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder [^com.google.api.gax.rpc.ClientContext client-context]
(GroupServiceStubSettings/newBuilder client-context))
(^com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder []
(GroupServiceStubSettings/newBuilder )))
(defn list-groups-settings
"Returns the object with the settings used for calls to listGroups.
returns: `com.google.api.gax.rpc.PagedCallSettings<com.google.monitoring.v3.ListGroupsRequest,com.google.monitoring.v3.ListGroupsResponse,com.google.cloud.monitoring.v3.GroupServiceClient$ListGroupsPagedResponse>`"
(^com.google.api.gax.rpc.PagedCallSettings [^GroupServiceStubSettings this]
(-> this (.listGroupsSettings))))
(defn get-group-settings
"Returns the object with the settings used for calls to getGroup.
returns: `com.google.api.gax.rpc.UnaryCallSettings<com.google.monitoring.v3.GetGroupRequest,com.google.monitoring.v3.Group>`"
(^com.google.api.gax.rpc.UnaryCallSettings [^GroupServiceStubSettings this]
(-> this (.getGroupSettings))))
(defn create-group-settings
"Returns the object with the settings used for calls to createGroup.
returns: `com.google.api.gax.rpc.UnaryCallSettings<com.google.monitoring.v3.CreateGroupRequest,com.google.monitoring.v3.Group>`"
(^com.google.api.gax.rpc.UnaryCallSettings [^GroupServiceStubSettings this]
(-> this (.createGroupSettings))))
(defn update-group-settings
"Returns the object with the settings used for calls to updateGroup.
returns: `com.google.api.gax.rpc.UnaryCallSettings<com.google.monitoring.v3.UpdateGroupRequest,com.google.monitoring.v3.Group>`"
(^com.google.api.gax.rpc.UnaryCallSettings [^GroupServiceStubSettings this]
(-> this (.updateGroupSettings))))
(defn delete-group-settings
"Returns the object with the settings used for calls to deleteGroup.
returns: `com.google.api.gax.rpc.UnaryCallSettings<com.google.monitoring.v3.DeleteGroupRequest,com.google.protobuf.Empty>`"
(^com.google.api.gax.rpc.UnaryCallSettings [^GroupServiceStubSettings this]
(-> this (.deleteGroupSettings))))
(defn list-group-members-settings
"Returns the object with the settings used for calls to listGroupMembers.
returns: `com.google.api.gax.rpc.PagedCallSettings<com.google.monitoring.v3.ListGroupMembersRequest,com.google.monitoring.v3.ListGroupMembersResponse,com.google.cloud.monitoring.v3.GroupServiceClient$ListGroupMembersPagedResponse>`"
(^com.google.api.gax.rpc.PagedCallSettings [^GroupServiceStubSettings this]
(-> this (.listGroupMembersSettings))))
(defn create-stub
"returns: `(value="A restructuring of stub classes is planned, so this may break in the future") com.google.cloud.monitoring.v3.stub.GroupServiceStub`
throws: java.io.IOException"
([^GroupServiceStubSettings this]
(-> this (.createStub))))
(defn to-builder
"Returns a builder containing all the values of this settings class.
returns: `com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder`"
(^com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder [^GroupServiceStubSettings this]
(-> this (.toBuilder))))
| null | https://raw.githubusercontent.com/clojure-interop/google-cloud-clients/80852d0496057c22f9cdc86d6f9ffc0fa3cd7904/com.google.cloud.monitoring/src/com/google/cloud/monitoring/v3/stub/GroupServiceStubSettings.clj | clojure |
" | (ns com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings
"Settings class to configure an instance of GroupServiceStub.
The default instance has everything set to sensible defaults:
The default service address (monitoring.googleapis.com) and default port (443) are used.
Credentials are acquired automatically through Application Default Credentials.
Retries are configured for idempotent methods but not for non-idempotent methods.
The builder of this class is recursive, so contained classes are themselves builders. When
build() is called, the tree of builders is called to create the complete settings object. For
example, to set the total timeout of getGroup to 30 seconds:
GroupServiceStubSettings.Builder groupServiceSettingsBuilder =
groupServiceSettingsBuilder.getGroupSettings().getRetrySettings().toBuilder()
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.monitoring.v3.stub GroupServiceStubSettings]))
(defn *default-executor-provider-builder
"Returns a builder for the default ExecutorProvider for this service.
returns: `com.google.api.gax.core.InstantiatingExecutorProvider.Builder`"
(^com.google.api.gax.core.InstantiatingExecutorProvider.Builder []
(GroupServiceStubSettings/defaultExecutorProviderBuilder )))
(defn *get-default-endpoint
"Returns the default service endpoint.
returns: `java.lang.String`"
(^java.lang.String []
(GroupServiceStubSettings/getDefaultEndpoint )))
(defn *get-default-service-scopes
"Returns the default service scopes.
returns: `java.util.List<java.lang.String>`"
(^java.util.List []
(GroupServiceStubSettings/getDefaultServiceScopes )))
(defn *default-credentials-provider-builder
"Returns a builder for the default credentials for this service.
returns: `com.google.api.gax.core.GoogleCredentialsProvider.Builder`"
(^com.google.api.gax.core.GoogleCredentialsProvider.Builder []
(GroupServiceStubSettings/defaultCredentialsProviderBuilder )))
(defn *default-grpc-transport-provider-builder
"Returns a builder for the default ChannelProvider for this service.
returns: `com.google.api.gax.grpc.InstantiatingGrpcChannelProvider.Builder`"
(^com.google.api.gax.grpc.InstantiatingGrpcChannelProvider.Builder []
(GroupServiceStubSettings/defaultGrpcTransportProviderBuilder )))
(defn *default-transport-channel-provider
"returns: `com.google.api.gax.rpc.TransportChannelProvider`"
(^com.google.api.gax.rpc.TransportChannelProvider []
(GroupServiceStubSettings/defaultTransportChannelProvider )))
(defn *default-api-client-header-provider-builder
"returns: `(value="The surface for customizing headers is not stable yet and may change in the future.") com.google.api.gax.rpc.ApiClientHeaderProvider.Builder`"
([]
(GroupServiceStubSettings/defaultApiClientHeaderProviderBuilder )))
(defn *new-builder
"Returns a new builder for this class.
client-context - `com.google.api.gax.rpc.ClientContext`
returns: `com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder`"
(^com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder [^com.google.api.gax.rpc.ClientContext client-context]
(GroupServiceStubSettings/newBuilder client-context))
(^com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder []
(GroupServiceStubSettings/newBuilder )))
(defn list-groups-settings
"Returns the object with the settings used for calls to listGroups.
returns: `com.google.api.gax.rpc.PagedCallSettings<com.google.monitoring.v3.ListGroupsRequest,com.google.monitoring.v3.ListGroupsResponse,com.google.cloud.monitoring.v3.GroupServiceClient$ListGroupsPagedResponse>`"
(^com.google.api.gax.rpc.PagedCallSettings [^GroupServiceStubSettings this]
(-> this (.listGroupsSettings))))
(defn get-group-settings
"Returns the object with the settings used for calls to getGroup.
returns: `com.google.api.gax.rpc.UnaryCallSettings<com.google.monitoring.v3.GetGroupRequest,com.google.monitoring.v3.Group>`"
(^com.google.api.gax.rpc.UnaryCallSettings [^GroupServiceStubSettings this]
(-> this (.getGroupSettings))))
(defn create-group-settings
"Returns the object with the settings used for calls to createGroup.
returns: `com.google.api.gax.rpc.UnaryCallSettings<com.google.monitoring.v3.CreateGroupRequest,com.google.monitoring.v3.Group>`"
(^com.google.api.gax.rpc.UnaryCallSettings [^GroupServiceStubSettings this]
(-> this (.createGroupSettings))))
(defn update-group-settings
"Returns the object with the settings used for calls to updateGroup.
returns: `com.google.api.gax.rpc.UnaryCallSettings<com.google.monitoring.v3.UpdateGroupRequest,com.google.monitoring.v3.Group>`"
(^com.google.api.gax.rpc.UnaryCallSettings [^GroupServiceStubSettings this]
(-> this (.updateGroupSettings))))
(defn delete-group-settings
"Returns the object with the settings used for calls to deleteGroup.
returns: `com.google.api.gax.rpc.UnaryCallSettings<com.google.monitoring.v3.DeleteGroupRequest,com.google.protobuf.Empty>`"
(^com.google.api.gax.rpc.UnaryCallSettings [^GroupServiceStubSettings this]
(-> this (.deleteGroupSettings))))
(defn list-group-members-settings
"Returns the object with the settings used for calls to listGroupMembers.
returns: `com.google.api.gax.rpc.PagedCallSettings<com.google.monitoring.v3.ListGroupMembersRequest,com.google.monitoring.v3.ListGroupMembersResponse,com.google.cloud.monitoring.v3.GroupServiceClient$ListGroupMembersPagedResponse>`"
(^com.google.api.gax.rpc.PagedCallSettings [^GroupServiceStubSettings this]
(-> this (.listGroupMembersSettings))))
(defn create-stub
"returns: `(value="A restructuring of stub classes is planned, so this may break in the future") com.google.cloud.monitoring.v3.stub.GroupServiceStub`
throws: java.io.IOException"
([^GroupServiceStubSettings this]
(-> this (.createStub))))
(defn to-builder
"Returns a builder containing all the values of this settings class.
returns: `com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder`"
(^com.google.cloud.monitoring.v3.stub.GroupServiceStubSettings$Builder [^GroupServiceStubSettings this]
(-> this (.toBuilder))))
|
63a71e3b5d836ffb8bd35627420cd220354e92b6977b949928be802f25d19d86 | realworldocaml/book | version.mli | * Version numbers for ocamlc and ocamlopt
type t
val make : int * int * int -> t
val of_ocaml_config : Ocaml_config.t -> t
(** Does this support [-no-keep-locs]? *)
val supports_no_keep_locs : t -> bool
(** Does this support [-opaque] for [.mli] files? *)
val supports_opaque_for_mli : t -> bool
(** Does it read the [.cmi] file of module alias even when [-no-alias-deps] is
passed? *)
val always_reads_alias_cmi : t -> bool
* Does this support [ ' color ' ] in [ OCAMLPARAM ] ?
val supports_color_in_ocamlparam : t -> bool
(** Does this support [OCAML_COLOR]? *)
val supports_ocaml_color : t -> bool
(** Does this this support [-args0]? *)
val supports_response_file : t -> bool
(** Does ocamlmklib support [-args0]? *)
val ocamlmklib_supports_response_file : t -> bool
(** Whether the standard library includes the [Bigarray] module *)
val stdlib_includes_bigarray : t -> bool
(** Whether ocamlobjinfo supports -no-approx*)
val ooi_supports_no_approx : t -> bool
(** Whether ocamlobjinfo supports -no-code*)
val ooi_supports_no_code : t -> bool
(** Whether the language supports custom let operators *)
val supports_let_syntax : t -> bool
(** Does this support [-output-complete-exe]? *)
val supports_output_complete_exe : t -> bool
(** Whether the compiler supports options for splitting compilation at emit:
[-stop-after scheduling] [-save-ir-after scheduling] [-start-from emit] *)
val supports_split_at_emit : t -> bool
(** Whether the compiler supports -function-sections *)
val supports_function_sections : t -> bool
(** [-custom] or [-output-complete-exe] depending on the version of OCaml *)
val custom_or_output_complete_exe : t -> string
(** ocamlopt -a always calls the native C linker, even for empty archives *)
val ocamlopt_always_calls_library_linker : t -> bool
(** Whether [Sys.opaque_identity] is in the standard library *)
val has_sys_opaque_identity : t -> bool
(** Whether [vmthreads] exists *)
val has_vmthreads : t -> bool
(** Whether [bigarray] {e library} exists *)
val has_bigarray_library : t -> bool
(** Whether the compiler supports alerts and the corresponding [-alert] option *)
val supports_alerts : t -> bool
(** Whether [dynlink], [str] and [unix] are in subdirectories of the standard
library *)
val has_sandboxed_otherlibs : t -> bool
* Whether the compiler distributes META files independently of ocamlfind
val has_META_files : t -> bool
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/dune_/src/ocaml/version.mli | ocaml | * Does this support [-no-keep-locs]?
* Does this support [-opaque] for [.mli] files?
* Does it read the [.cmi] file of module alias even when [-no-alias-deps] is
passed?
* Does this support [OCAML_COLOR]?
* Does this this support [-args0]?
* Does ocamlmklib support [-args0]?
* Whether the standard library includes the [Bigarray] module
* Whether ocamlobjinfo supports -no-approx
* Whether ocamlobjinfo supports -no-code
* Whether the language supports custom let operators
* Does this support [-output-complete-exe]?
* Whether the compiler supports options for splitting compilation at emit:
[-stop-after scheduling] [-save-ir-after scheduling] [-start-from emit]
* Whether the compiler supports -function-sections
* [-custom] or [-output-complete-exe] depending on the version of OCaml
* ocamlopt -a always calls the native C linker, even for empty archives
* Whether [Sys.opaque_identity] is in the standard library
* Whether [vmthreads] exists
* Whether [bigarray] {e library} exists
* Whether the compiler supports alerts and the corresponding [-alert] option
* Whether [dynlink], [str] and [unix] are in subdirectories of the standard
library | * Version numbers for ocamlc and ocamlopt
type t
val make : int * int * int -> t
val of_ocaml_config : Ocaml_config.t -> t
val supports_no_keep_locs : t -> bool
val supports_opaque_for_mli : t -> bool
val always_reads_alias_cmi : t -> bool
* Does this support [ ' color ' ] in [ OCAMLPARAM ] ?
val supports_color_in_ocamlparam : t -> bool
val supports_ocaml_color : t -> bool
val supports_response_file : t -> bool
val ocamlmklib_supports_response_file : t -> bool
val stdlib_includes_bigarray : t -> bool
val ooi_supports_no_approx : t -> bool
val ooi_supports_no_code : t -> bool
val supports_let_syntax : t -> bool
val supports_output_complete_exe : t -> bool
val supports_split_at_emit : t -> bool
val supports_function_sections : t -> bool
val custom_or_output_complete_exe : t -> string
val ocamlopt_always_calls_library_linker : t -> bool
val has_sys_opaque_identity : t -> bool
val has_vmthreads : t -> bool
val has_bigarray_library : t -> bool
val supports_alerts : t -> bool
val has_sandboxed_otherlibs : t -> bool
* Whether the compiler distributes META files independently of ocamlfind
val has_META_files : t -> bool
|
ecf80a4b92750495214b61140f5c0d975400bd428c28b7588548a448fe258388 | slyrus/mcclim-old | beagle-slider-pane.lisp |
(in-package :beagle)
;;; Limitations:
;;;
- ignores different NSControl sizes
- never restricts user to tickmark values only ( does ? )
;;; - no way to show current value
- no ' title ' ( not sure CLIM supports this anyway )
;;; - inherits from the 'standard' slider-pane, rather than from the abstract slider
(defclass beagle-slider-pane (slider-pane)
((tk-obj :initform (%null-ptr) :accessor toolkit-object)))
(defmethod initialize-instance :after ((sp beagle-slider-pane) &rest args)
(declare (ignore args))
sp)
(defmethod realize-mirror ((port beagle-port)
(sheet beagle-slider-pane))
;; Orientation is defined by the longer relative dimension in
Cocoa ; if > maxy - miny , we will get a : horizontal
;; bar; otherwise we get a :vertical bar.
(let* ((q (compose-space sheet))
(rect (make-ns-rect 0.0
0.0
(space-requirement-width q)
(space-requirement-height q)))
(mirror (make-instance 'lisp-slider :with-frame rect)))
(send mirror 'retain)
;; Sliders are disabled by default; enable it (otherwise nothing
is displayed ) . Looks like this is standard for any NSControl
;; subclass.
(send mirror :set-enabled #$YES)
(setf (toolkit-object sheet) mirror)
(setf (view-lisp-slider mirror) sheet)
;; Set slider up to handle _actions_ from the user.
(send mirror :set-target mirror)
(send mirror :set-action (ccl::@selector "takeSliderAction:"))
;; check if the slider works in discrete steps. There appears to be
no way in CLIM to restrict slider values only to these ticks , so
;; we make no use of 'setAllowsTickMarkValuesOnly:(BOOL)flag'.
;; This should automatically draw the tick marks, and change the style
;; of the scroller to have a little pointy bit.
(when (climi::slider-number-of-quanta sheet)
(send mirror :set-number-of-tick-marks (climi::slider-number-of-quanta sheet)))
(port-register-mirror (port sheet) sheet mirror)
(%beagle-mirror->sheet-assoc port mirror sheet)
(send (sheet-mirror (sheet-parent sheet)) :add-subview mirror)
(#_free rect)
mirror))
(defmethod handle-repaint :around ((pane beagle-slider-pane) region)
(declare (ignore region))
;; send a 'mark view dirty' message so it will be redrawn at the right
;; time.
(send (toolkit-object pane) 'set-needs-display))
#+nil
(defmethod compose-space ((sb beagle-scroll-bar-pane) &key width height)
(declare (ignore width height))
(let ((width (send (@class ns-scroller)
:scroller-width-for-control-size
#$NSRegularControlSize)))
;; For vertical scroll bars, ensure y > x. For horizontal, ensure
;; x > y.
(if (eq (gadget-orientation sb) :vertical)
(make-space-requirement :min-width width
:width width
:min-height (* 3 width)
:height (* 4 width))
(make-space-requirement :min-width (* 3 width)
:width (* 4 width)
:min-height width
:height width))))
;;; Not sure we want to implement all of these as separate methods...
;;; would be better to use a generic function, specialized on pane
;;; type?
(defun slider-action-handler (pane sender)
0.0 - 1.0
range ; 0.0 - > max extent ...
(gadget-min-value pane))))) ; ... (probably)
;; ::FIXME:: I don't like invoking the drag-callback directly...
(drag-callback pane
(gadget-client pane)
(gadget-id pane)
value))) | null | https://raw.githubusercontent.com/slyrus/mcclim-old/354cdf73c1a4c70e619ccd7d390cb2f416b21c1a/Backends/beagle/native-panes/beagle-slider-pane.lisp | lisp | Limitations:
- no way to show current value
- inherits from the 'standard' slider-pane, rather than from the abstract slider
Orientation is defined by the longer relative dimension in
if > maxy - miny , we will get a : horizontal
bar; otherwise we get a :vertical bar.
Sliders are disabled by default; enable it (otherwise nothing
subclass.
Set slider up to handle _actions_ from the user.
check if the slider works in discrete steps. There appears to be
we make no use of 'setAllowsTickMarkValuesOnly:(BOOL)flag'.
This should automatically draw the tick marks, and change the style
of the scroller to have a little pointy bit.
send a 'mark view dirty' message so it will be redrawn at the right
time.
For vertical scroll bars, ensure y > x. For horizontal, ensure
x > y.
Not sure we want to implement all of these as separate methods...
would be better to use a generic function, specialized on pane
type?
0.0 - > max extent ...
... (probably)
::FIXME:: I don't like invoking the drag-callback directly... |
(in-package :beagle)
- ignores different NSControl sizes
- never restricts user to tickmark values only ( does ? )
- no ' title ' ( not sure CLIM supports this anyway )
(defclass beagle-slider-pane (slider-pane)
((tk-obj :initform (%null-ptr) :accessor toolkit-object)))
(defmethod initialize-instance :after ((sp beagle-slider-pane) &rest args)
(declare (ignore args))
sp)
(defmethod realize-mirror ((port beagle-port)
(sheet beagle-slider-pane))
(let* ((q (compose-space sheet))
(rect (make-ns-rect 0.0
0.0
(space-requirement-width q)
(space-requirement-height q)))
(mirror (make-instance 'lisp-slider :with-frame rect)))
(send mirror 'retain)
is displayed ) . Looks like this is standard for any NSControl
(send mirror :set-enabled #$YES)
(setf (toolkit-object sheet) mirror)
(setf (view-lisp-slider mirror) sheet)
(send mirror :set-target mirror)
(send mirror :set-action (ccl::@selector "takeSliderAction:"))
no way in CLIM to restrict slider values only to these ticks , so
(when (climi::slider-number-of-quanta sheet)
(send mirror :set-number-of-tick-marks (climi::slider-number-of-quanta sheet)))
(port-register-mirror (port sheet) sheet mirror)
(%beagle-mirror->sheet-assoc port mirror sheet)
(send (sheet-mirror (sheet-parent sheet)) :add-subview mirror)
(#_free rect)
mirror))
(defmethod handle-repaint :around ((pane beagle-slider-pane) region)
(declare (ignore region))
(send (toolkit-object pane) 'set-needs-display))
#+nil
(defmethod compose-space ((sb beagle-scroll-bar-pane) &key width height)
(declare (ignore width height))
(let ((width (send (@class ns-scroller)
:scroller-width-for-control-size
#$NSRegularControlSize)))
(if (eq (gadget-orientation sb) :vertical)
(make-space-requirement :min-width width
:width width
:min-height (* 3 width)
:height (* 4 width))
(make-space-requirement :min-width (* 3 width)
:width (* 4 width)
:min-height width
:height width))))
(defun slider-action-handler (pane sender)
0.0 - 1.0
(drag-callback pane
(gadget-client pane)
(gadget-id pane)
value))) |
4a25cd028261a500fb71c2b4c22f9d2de227e796de6b65810eea050cb8bdc7bf | vernemq/vernemq | vmq_swc_app.erl | Copyright 2018 Octavo Labs AG Zurich Switzerland ( )
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%%-------------------------------------------------------------------
%% @doc vmq_swc public API
%% @end
%%%-------------------------------------------------------------------
-module(vmq_swc_app).
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1]).
%%====================================================================
%% API
%%====================================================================
start(_StartType, _StartArgs) ->
vmq_swc_sup:start_link().
%%--------------------------------------------------------------------
stop(_State) ->
ok.
%%====================================================================
Internal functions
%%====================================================================
| null | https://raw.githubusercontent.com/vernemq/vernemq/eb1a262035af47e90d9edf07f36c1b1503557c1f/apps/vmq_swc/src/vmq_swc_app.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-------------------------------------------------------------------
@doc vmq_swc public API
@end
-------------------------------------------------------------------
Application callbacks
====================================================================
API
====================================================================
--------------------------------------------------------------------
====================================================================
==================================================================== | Copyright 2018 Octavo Labs AG Zurich Switzerland ( )
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(vmq_swc_app).
-behaviour(application).
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
vmq_swc_sup:start_link().
stop(_State) ->
ok.
Internal functions
|
f0b87dea224bcf0eb28bbbfd1a2683696fa8403e0207dcb623db3201325ed244 | aymanosman/cl-missile-command | sound.lisp | (in-package :raylib)
(defun sine-wave (x)
(sin x))
(defun square-wave (x)
(signum (sin x)))
(defun sawtooth-wave (x)
(let ((y (/ x (* 2 pi))))
(* 2 (- y (floor (+ y 0.5))))))
(defun noise-wave (x)
(* (random 1.0) (signum (sin x))))
(defun sweep (start end)
(lambda (x)
(- start (* x (- start end)))) )
(defun fade-out-envelope ()
(lambda (time)
(min 1.0 (max 0.0 (- 1.0 time)))))
(defun f-wave (f curve sample-rate duration envelope)
(flet ((float-to-s16 (f)
(assert (<= -1.0 f 1.0))
(if (plusp f)
(floor (* f (- (expt 2 15) 1)))
(floor (* f (expt 2 15))))))
(let* ((block-align 2)
(total-samples (ceiling (* sample-rate duration)))
(size (* total-samples block-align))
(data (make-array size :element-type '(unsigned-byte 8))))
(loop :for i :from 0 :below size :by block-align
:do (setf (nibbles:sb16ref/le data i)
(float-to-s16
(* (funcall f (/ (* i pi (funcall curve (/ i size))) sample-rate))
(funcall envelope (/ i size))))))
data)))
(defun make-sound (curve duration instrument envelope)
(let ((sample-rate 4000))
(load-sound-from-wave
(make-wave-from-memory ".wav"
(wav:make-wav :channels 1
:sample-rate sample-rate
:bits-per-sample 16
:data (f-wave instrument curve sample-rate duration envelope))))))
| null | https://raw.githubusercontent.com/aymanosman/cl-missile-command/5f99bc8cec1ff06a2450b9408feb2c57df1561b9/sound.lisp | lisp | (in-package :raylib)
(defun sine-wave (x)
(sin x))
(defun square-wave (x)
(signum (sin x)))
(defun sawtooth-wave (x)
(let ((y (/ x (* 2 pi))))
(* 2 (- y (floor (+ y 0.5))))))
(defun noise-wave (x)
(* (random 1.0) (signum (sin x))))
(defun sweep (start end)
(lambda (x)
(- start (* x (- start end)))) )
(defun fade-out-envelope ()
(lambda (time)
(min 1.0 (max 0.0 (- 1.0 time)))))
(defun f-wave (f curve sample-rate duration envelope)
(flet ((float-to-s16 (f)
(assert (<= -1.0 f 1.0))
(if (plusp f)
(floor (* f (- (expt 2 15) 1)))
(floor (* f (expt 2 15))))))
(let* ((block-align 2)
(total-samples (ceiling (* sample-rate duration)))
(size (* total-samples block-align))
(data (make-array size :element-type '(unsigned-byte 8))))
(loop :for i :from 0 :below size :by block-align
:do (setf (nibbles:sb16ref/le data i)
(float-to-s16
(* (funcall f (/ (* i pi (funcall curve (/ i size))) sample-rate))
(funcall envelope (/ i size))))))
data)))
(defun make-sound (curve duration instrument envelope)
(let ((sample-rate 4000))
(load-sound-from-wave
(make-wave-from-memory ".wav"
(wav:make-wav :channels 1
:sample-rate sample-rate
:bits-per-sample 16
:data (f-wave instrument curve sample-rate duration envelope))))))
| |
554a440f70a505ac7beeca56970c49cd2f374fefc5c80bb8eea159a893d77df9 | avsm/mirage-duniverse | tls_mirage.mli | * Effectful operations using Mirage for pure TLS .
open Result
(** TLS module given a flow *)
module Make (F : Mirage_flow_lwt.S) : sig
module FLOW : Mirage_flow_lwt.S
(** possible errors: incoming alert, processing failure, or a
problem in the underlying flow. *)
type error = [ `Tls_alert of Tls.Packet.alert_type
| `Tls_failure of Tls.Engine.failure
| `Read of F.error
| `Write of F.write_error ]
type write_error = [ `Closed | error ]
(** The type for write errors. *)
type buffer = Cstruct.t
type +'a io = 'a Lwt.t
type tracer = Sexplib.Sexp.t -> unit
(** we provide the FLOW interface *)
include Mirage_flow_lwt.S
with type 'a io := 'a io
and type buffer := buffer
and type error := error
and type write_error := write_error
* [ reneg ~drop t ] renegotiates the
session , and blocks until the renegotiation finished . Optionally , a new
[ authenticator ] and [ acceptable_cas ] can be used . The own certificate can
be adjusted by [ cert ] . If [ drop ] is [ true ] ( the default ) ,
application data received before the renegotiation finished is dropped .
session, and blocks until the renegotiation finished. Optionally, a new
[authenticator] and [acceptable_cas] can be used. The own certificate can
be adjusted by [cert]. If [drop] is [true] (the default),
application data received before the renegotiation finished is dropped. *)
val reneg : ?authenticator:X509.Authenticator.a ->
?acceptable_cas:X509.distinguished_name list -> ?cert:Tls.Config.own_cert ->
?drop:bool -> flow -> (unit, write_error) result Lwt.t
* [ client_of_flow ~trace client ~host flow ] upgrades the existing connection
to TLS using the [ client ] configuration , using [ host ] as peer name .
to TLS using the [client] configuration, using [host] as peer name. *)
val client_of_flow :
?trace:tracer -> Tls.Config.client -> ?host:string -> FLOW.flow ->
(flow, write_error) result Lwt.t
* [ server_of_flow ? tracer server flow ] upgrades the flow to a TLS
connection using the [ server ] configuration .
connection using the [server] configuration. *)
val server_of_flow :
?trace:tracer -> Tls.Config.server -> FLOW.flow ->
(flow, write_error) result Lwt.t
(** [epoch flow] extracts information of the established session. *)
val epoch : flow -> (Tls.Core.epoch_data, unit) result
end
with module FLOW = F
(** X.509 handling given a key value store and a clock *)
module X509 (KV : Mirage_kv_lwt.RO) (C : Mirage_clock.PCLOCK) : sig
(** [authenticator store clock typ] creates an [authenticator], either
using the given certificate authorities in the [store] or
null. *)
val authenticator : KV.t -> C.t -> [< `Noop | `CAs ] -> X509.Authenticator.a Lwt.t
(** [certificate store typ] unmarshals a certificate chain and
private key material from the [store]. *)
val certificate : KV.t -> [< `Default | `Name of string ]
-> (X509.t list * Nocrypto.Rsa.priv) Lwt.t
end
| null | https://raw.githubusercontent.com/avsm/mirage-duniverse/983e115ff5a9fb37e3176c373e227e9379f0d777/ocaml_modules/tls/mirage/tls_mirage.mli | ocaml | * TLS module given a flow
* possible errors: incoming alert, processing failure, or a
problem in the underlying flow.
* The type for write errors.
* we provide the FLOW interface
* [epoch flow] extracts information of the established session.
* X.509 handling given a key value store and a clock
* [authenticator store clock typ] creates an [authenticator], either
using the given certificate authorities in the [store] or
null.
* [certificate store typ] unmarshals a certificate chain and
private key material from the [store]. | * Effectful operations using Mirage for pure TLS .
open Result
module Make (F : Mirage_flow_lwt.S) : sig
module FLOW : Mirage_flow_lwt.S
type error = [ `Tls_alert of Tls.Packet.alert_type
| `Tls_failure of Tls.Engine.failure
| `Read of F.error
| `Write of F.write_error ]
type write_error = [ `Closed | error ]
type buffer = Cstruct.t
type +'a io = 'a Lwt.t
type tracer = Sexplib.Sexp.t -> unit
include Mirage_flow_lwt.S
with type 'a io := 'a io
and type buffer := buffer
and type error := error
and type write_error := write_error
* [ reneg ~drop t ] renegotiates the
session , and blocks until the renegotiation finished . Optionally , a new
[ authenticator ] and [ acceptable_cas ] can be used . The own certificate can
be adjusted by [ cert ] . If [ drop ] is [ true ] ( the default ) ,
application data received before the renegotiation finished is dropped .
session, and blocks until the renegotiation finished. Optionally, a new
[authenticator] and [acceptable_cas] can be used. The own certificate can
be adjusted by [cert]. If [drop] is [true] (the default),
application data received before the renegotiation finished is dropped. *)
val reneg : ?authenticator:X509.Authenticator.a ->
?acceptable_cas:X509.distinguished_name list -> ?cert:Tls.Config.own_cert ->
?drop:bool -> flow -> (unit, write_error) result Lwt.t
* [ client_of_flow ~trace client ~host flow ] upgrades the existing connection
to TLS using the [ client ] configuration , using [ host ] as peer name .
to TLS using the [client] configuration, using [host] as peer name. *)
val client_of_flow :
?trace:tracer -> Tls.Config.client -> ?host:string -> FLOW.flow ->
(flow, write_error) result Lwt.t
* [ server_of_flow ? tracer server flow ] upgrades the flow to a TLS
connection using the [ server ] configuration .
connection using the [server] configuration. *)
val server_of_flow :
?trace:tracer -> Tls.Config.server -> FLOW.flow ->
(flow, write_error) result Lwt.t
val epoch : flow -> (Tls.Core.epoch_data, unit) result
end
with module FLOW = F
module X509 (KV : Mirage_kv_lwt.RO) (C : Mirage_clock.PCLOCK) : sig
val authenticator : KV.t -> C.t -> [< `Noop | `CAs ] -> X509.Authenticator.a Lwt.t
val certificate : KV.t -> [< `Default | `Name of string ]
-> (X509.t list * Nocrypto.Rsa.priv) Lwt.t
end
|
cca74e4802e37e4ea6ec9c1de017ff0a6fdbfada3d9f5a68218d6b33388ee3ee | gonzojive/elephant | elet-package.lisp | -*- Mode : Lisp ; Syntax : ANSI - Common - Lisp ; Base : 10 -*-
;;;
;;; tests.lisp -- package definition
;;;
Initial version 9/02/2004 by
;;; <>
;;;
;;; part of
;;;
Elephant : an object - oriented database for Common Lisp
;;;
Copyright ( c ) 2004 by and
;;; <> <>
;;;
;;; Elephant users are granted the rights to distribute and use this software
as governed by the terms of the Lisp Lesser GNU Public License
;;; (), also known as the LLGPL.
(defpackage elephant-tests
(:nicknames :ele-tests)
#+use-fiveam
(:use :common-lisp :elephant :5am :bordeaux-threads)
#-use-fiveam
(:use :common-lisp :elephant :regression-test :bordeaux-threads)
(:import-from :elephant
with-buffer-streams
serialize
deserialize)
#+cmu
(:import-from :pcl
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
#+sbcl
(:import-from :sb-mop
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
#+allegro
(:import-from :clos
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
#+openmcl
(:import-from :ccl
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
#+lispworks
(:import-from :clos
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
)
| null | https://raw.githubusercontent.com/gonzojive/elephant/b29a012ab75ccea2fc7fc4f1e9d5e821f0bd60bf/tests/elet-package.lisp | lisp | Syntax : ANSI - Common - Lisp ; Base : 10 -*-
tests.lisp -- package definition
<>
part of
<> <>
Elephant users are granted the rights to distribute and use this software
(), also known as the LLGPL. | Initial version 9/02/2004 by
Elephant : an object - oriented database for Common Lisp
Copyright ( c ) 2004 by and
as governed by the terms of the Lisp Lesser GNU Public License
(defpackage elephant-tests
(:nicknames :ele-tests)
#+use-fiveam
(:use :common-lisp :elephant :5am :bordeaux-threads)
#-use-fiveam
(:use :common-lisp :elephant :regression-test :bordeaux-threads)
(:import-from :elephant
with-buffer-streams
serialize
deserialize)
#+cmu
(:import-from :pcl
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
#+sbcl
(:import-from :sb-mop
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
#+allegro
(:import-from :clos
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
#+openmcl
(:import-from :ccl
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
#+lispworks
(:import-from :clos
finalize-inheritance
slot-definition-name
slot-makunbound-using-class
class-slots)
)
|
13bc40062e922543e0d819d4a01adf6f7c92aa11f06430ba52d08f16bbd886d4 | clash-lang/clash-compiler | ALU.hs | module ALU where
import Clash.Prelude
data OPC = ADD | MUL | SUB
topEntity :: OPC -> Integer -> Integer -> Integer
topEntity SUB = (-)
topEntity ADD = (+)
topEntity MUL = (*)
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/e0a532a9bbd0379c61b5387c05d01e262f4acf4d/examples/ALU.hs | haskell | module ALU where
import Clash.Prelude
data OPC = ADD | MUL | SUB
topEntity :: OPC -> Integer -> Integer -> Integer
topEntity SUB = (-)
topEntity ADD = (+)
topEntity MUL = (*)
| |
a4a29595cc5111506c2f7a7383cec1f67cd035ec732c06a38f27dad254a1c32f | GumTreeDiff/cgum | suffix_tree_ext.mli | (**
Generalized suffix trees (GSTs).
Computes generalized suffix trees from list of strings. A terminal symbol is
implicitly added to them, but is not returned in the word labeling nodes and
leaves. This should allow a rather transparent handling of GSTs.
Node-based accesses are provided (sequences, root, children, suffix links,
node labels, index), as well as a functional for synthesizing attributes from
a GST. A readable representation of GSTs is derived from the later.
*)
made by
extension by : the function add , allowing to extend a gst
( internally use now a DynArray instead of an Array )
(internally use now a DynArray instead of an Array)
*)
type node
(** Type of nodes in GSTs. *)
type t
(** Type of GSTs. *)
val make : string list -> t
(** [make l_str] computes a GST based on the set of strings given in [l_str]. *)
val add : string -> t -> unit
(** [add l_str gst] add a new string in the GST. Does it via a side effect. *)
val string_list : t -> string list
(** [string_list gst] returns the list of strings from which [gst] was computed. *)
val string : t -> int -> string
(** [string gst k] returns the sequence number [k] (starting from 0). *)
val root : t -> node
(** [root gst] returns the root node of the gst. *)
val word : t -> node -> string
(** [word gst n] returns the word labeling node [n] in [gst]. *)
val children : t -> node -> node list
(** [children gst n] returns a list of the children nodes of [n] in [gst]. *)
val linked_node : t -> node -> node
(** [linked_node gst n] returns the node pointed by the suffix link from [n] in [gst]. *)
val index : t -> node -> int * int
(** [index gst n] returns the index of a leaf [n] in [gst].
This index is a pair [(k,i)], where [k] is the number of the sequence (as used by [string]), and
[i] is the position of the related suffix (starting from [0] as usual in strings).
@raise Invalid_argument "Suffix_tree.index: not a leaf" if [n] is not a leaf (has some child). *)
val implicit_node : t -> string -> node * string * node
* [ implicit_node gst word ] returns an implicit_node [ ( node , ) ] , where [ node ] is the lowest
node in the suffix tre such that the concatenation of the word recognized by [ node ] and [ word ' ] is
equal to [ word ] , if [ word ' ] is not the empty string , then [ child ] is the child node of [ node ] , whose
label has [ word ' ] as a prefix .
@raise Not_found when [ word ] is not a substring of [ string_list gst ] .
node in the suffix tre such that the concatenation of the word recognized by [node] and [word'] is
equal to [word], if [word'] is not the empty string, then [child] is the child node of [node], whose
label has [word'] as a prefix.
@raise Not_found when [word] is not a substring of [string_list gst]. *)
val fold : t -> ('h -> node -> bool) -> ('h -> node -> 'h) -> ('s list -> 'h -> node -> 's) -> 'h -> 's
* [ fold gst filter herit synth init ] computes some attribute(s ) over a GST by using the 3 functions
[ filter ] , [ herit ] , [ synth ] , and the initial value [ init ] inherited by the root node . [ ' h ] is the type
of inherited attributes , and [ 's ] is the type of synthesized attributes , and so the type of the result .
The meaning of 3 functions is as follows :
- [ filter h child ] returns [ true ] if the node [ child ] must be explored given the inherited value of the current
node ( parent of [ child ] ) ,
- [ herit h child ] returns the value inherited by [ child ] given the inherited value of the current node
( parent of [ child ] ) ,
- [ synth l h node ] returns the synthesized value of the current node , given its inherited value [ h ] , and
the list [ l ] of synthesized values of explored children of [ node ] ( according to [ filter ] ) .
[filter], [herit], [synth], and the initial value [init] inherited by the root node. ['h] is the type
of inherited attributes, and ['s] is the type of synthesized attributes, and so the type of the result.
The meaning of 3 functions is as follows:
- [filter h child] returns [true] if the node [child] must be explored given the inherited value of the current
node (parent of [child]),
- [herit h child] returns the value inherited by [child] given the inherited value of the current node
(parent of [child]),
- [synth l h node] returns the synthesized value of the current node, given its inherited value [h], and
the list [l] of synthesized values of explored children of [node] (according to [filter]).
*)
val fold_node : t -> ('h -> node -> bool) -> ('h -> node -> 'h) -> ('s list -> 'h -> node -> 's) -> 'h -> node -> 's
(** Same as [fold], except the computation starts and finishes at the last argument node. *)
val fold_s : t -> ('s list -> node -> 's) -> 's
* [ fold_s gst synth ] is equivalent to [ fold gst filter herit synth init ] , where there is no filtering , and
no inherited values : purely synthetic .
no inherited values: purely synthetic. *)
val fold_s_node : t -> ('s list -> node -> 's) -> node -> 's
(** Same as [fold_s], except the computation starts and finishes at the last argument node. *)
val fold_fs : t -> (node -> bool) -> ('s list -> node -> 's) -> 's
* [ fold_fs gst filter synth ] is equivalent to [ fold gst filter herit synth init ] , where there is no inherited
values .
values. *)
type tree = Node of string * tree list | Leaf of string * (int * int)
val readable : t -> tree
(** [readable gst] returns a (more) readable representation of [gst].
Each node and leaf is decorated by its word label, and leaves are
also decorated by their index. *)
val exact_matches : t -> string -> (int * int) list
| null | https://raw.githubusercontent.com/GumTreeDiff/cgum/8521aa80fcf4873a19e60ce8c846c886aaefb41b/commons/ocamlextra/suffix_tree_ext.mli | ocaml | *
Generalized suffix trees (GSTs).
Computes generalized suffix trees from list of strings. A terminal symbol is
implicitly added to them, but is not returned in the word labeling nodes and
leaves. This should allow a rather transparent handling of GSTs.
Node-based accesses are provided (sequences, root, children, suffix links,
node labels, index), as well as a functional for synthesizing attributes from
a GST. A readable representation of GSTs is derived from the later.
* Type of nodes in GSTs.
* Type of GSTs.
* [make l_str] computes a GST based on the set of strings given in [l_str].
* [add l_str gst] add a new string in the GST. Does it via a side effect.
* [string_list gst] returns the list of strings from which [gst] was computed.
* [string gst k] returns the sequence number [k] (starting from 0).
* [root gst] returns the root node of the gst.
* [word gst n] returns the word labeling node [n] in [gst].
* [children gst n] returns a list of the children nodes of [n] in [gst].
* [linked_node gst n] returns the node pointed by the suffix link from [n] in [gst].
* [index gst n] returns the index of a leaf [n] in [gst].
This index is a pair [(k,i)], where [k] is the number of the sequence (as used by [string]), and
[i] is the position of the related suffix (starting from [0] as usual in strings).
@raise Invalid_argument "Suffix_tree.index: not a leaf" if [n] is not a leaf (has some child).
* Same as [fold], except the computation starts and finishes at the last argument node.
* Same as [fold_s], except the computation starts and finishes at the last argument node.
* [readable gst] returns a (more) readable representation of [gst].
Each node and leaf is decorated by its word label, and leaves are
also decorated by their index. | made by
extension by : the function add , allowing to extend a gst
( internally use now a DynArray instead of an Array )
(internally use now a DynArray instead of an Array)
*)
type node
type t
val make : string list -> t
val add : string -> t -> unit
val string_list : t -> string list
val string : t -> int -> string
val root : t -> node
val word : t -> node -> string
val children : t -> node -> node list
val linked_node : t -> node -> node
val index : t -> node -> int * int
val implicit_node : t -> string -> node * string * node
* [ implicit_node gst word ] returns an implicit_node [ ( node , ) ] , where [ node ] is the lowest
node in the suffix tre such that the concatenation of the word recognized by [ node ] and [ word ' ] is
equal to [ word ] , if [ word ' ] is not the empty string , then [ child ] is the child node of [ node ] , whose
label has [ word ' ] as a prefix .
@raise Not_found when [ word ] is not a substring of [ string_list gst ] .
node in the suffix tre such that the concatenation of the word recognized by [node] and [word'] is
equal to [word], if [word'] is not the empty string, then [child] is the child node of [node], whose
label has [word'] as a prefix.
@raise Not_found when [word] is not a substring of [string_list gst]. *)
val fold : t -> ('h -> node -> bool) -> ('h -> node -> 'h) -> ('s list -> 'h -> node -> 's) -> 'h -> 's
* [ fold gst filter herit synth init ] computes some attribute(s ) over a GST by using the 3 functions
[ filter ] , [ herit ] , [ synth ] , and the initial value [ init ] inherited by the root node . [ ' h ] is the type
of inherited attributes , and [ 's ] is the type of synthesized attributes , and so the type of the result .
The meaning of 3 functions is as follows :
- [ filter h child ] returns [ true ] if the node [ child ] must be explored given the inherited value of the current
node ( parent of [ child ] ) ,
- [ herit h child ] returns the value inherited by [ child ] given the inherited value of the current node
( parent of [ child ] ) ,
- [ synth l h node ] returns the synthesized value of the current node , given its inherited value [ h ] , and
the list [ l ] of synthesized values of explored children of [ node ] ( according to [ filter ] ) .
[filter], [herit], [synth], and the initial value [init] inherited by the root node. ['h] is the type
of inherited attributes, and ['s] is the type of synthesized attributes, and so the type of the result.
The meaning of 3 functions is as follows:
- [filter h child] returns [true] if the node [child] must be explored given the inherited value of the current
node (parent of [child]),
- [herit h child] returns the value inherited by [child] given the inherited value of the current node
(parent of [child]),
- [synth l h node] returns the synthesized value of the current node, given its inherited value [h], and
the list [l] of synthesized values of explored children of [node] (according to [filter]).
*)
val fold_node : t -> ('h -> node -> bool) -> ('h -> node -> 'h) -> ('s list -> 'h -> node -> 's) -> 'h -> node -> 's
val fold_s : t -> ('s list -> node -> 's) -> 's
* [ fold_s gst synth ] is equivalent to [ fold gst filter herit synth init ] , where there is no filtering , and
no inherited values : purely synthetic .
no inherited values: purely synthetic. *)
val fold_s_node : t -> ('s list -> node -> 's) -> node -> 's
val fold_fs : t -> (node -> bool) -> ('s list -> node -> 's) -> 's
* [ fold_fs gst filter synth ] is equivalent to [ fold gst filter herit synth init ] , where there is no inherited
values .
values. *)
type tree = Node of string * tree list | Leaf of string * (int * int)
val readable : t -> tree
val exact_matches : t -> string -> (int * int) list
|
59d9104429abdb5a4550d1690f344add27959590e5c6d50e840c7f7898eb9472 | CSCfi/rems | duo.cljc | (ns rems.common.duo
(:require [medley.core :refer [distinct-by]]))
(def duo-restriction-label
{:collaboration :t.duo.restriction/collaboration
:date :t.duo.restriction/date
:institute :t.duo.restriction/institute
:location :t.duo.restriction/location
:mondo :t.duo.restriction/mondo
:months :t.duo.restriction/months
:project :t.duo.restriction/project
:topic :t.duo.restriction/topic
:users :t.duo.restriction/users})
(defn duo-validation-summary [statuses]
(when-let [statuses (not-empty (remove #{:duo/not-found} statuses))]
(or (some #{:duo/not-compatible} statuses)
(some #{:duo/needs-manual-validation} statuses)
:duo/compatible)))
(defn unmatched-duos [duo-matches]
(->> duo-matches
(filter (comp #{:duo/not-found} :validity :duo/validation))
(distinct-by :duo/id)))
| null | https://raw.githubusercontent.com/CSCfi/rems/6c34d51199289c395b0cd0b9a3176bc5f0e83758/src/cljc/rems/common/duo.cljc | clojure | (ns rems.common.duo
(:require [medley.core :refer [distinct-by]]))
(def duo-restriction-label
{:collaboration :t.duo.restriction/collaboration
:date :t.duo.restriction/date
:institute :t.duo.restriction/institute
:location :t.duo.restriction/location
:mondo :t.duo.restriction/mondo
:months :t.duo.restriction/months
:project :t.duo.restriction/project
:topic :t.duo.restriction/topic
:users :t.duo.restriction/users})
(defn duo-validation-summary [statuses]
(when-let [statuses (not-empty (remove #{:duo/not-found} statuses))]
(or (some #{:duo/not-compatible} statuses)
(some #{:duo/needs-manual-validation} statuses)
:duo/compatible)))
(defn unmatched-duos [duo-matches]
(->> duo-matches
(filter (comp #{:duo/not-found} :validity :duo/validation))
(distinct-by :duo/id)))
| |
9de9a6656f32ea7b1f84b807bb27eecc71cd747d6e7b1dd43a10e58e3f8377ed | shayne-fletcher/zen | snoc.ml | type 'a snoc_list =
| Empty | Snoc of 'a snoc_list * 'a
let hd : 'a snoc_list -> 'a = function
| Empty -> raise (Failure "hd") | Snoc (_, s) -> s
let tl : 'a snoc_list -> 'a snoc_list = function
| Empty -> raise (Failure "tl") | Snoc (l, _) -> l
let rec list_of_snoc_list : 'a snoc_list -> 'a list = function
| Empty -> [] | Snoc (t, h) -> h :: list_of_snoc_list t
let rec snoc_list_of_list : 'a list -> 'a snoc_list = function
| [] -> Empty | (h :: t) -> Snoc (snoc_list_of_list t, h)
| null | https://raw.githubusercontent.com/shayne-fletcher/zen/10a1d0b9bf261bb133918dd62fb1593c3d4d21cb/ocaml/snoc/snoc.ml | ocaml | type 'a snoc_list =
| Empty | Snoc of 'a snoc_list * 'a
let hd : 'a snoc_list -> 'a = function
| Empty -> raise (Failure "hd") | Snoc (_, s) -> s
let tl : 'a snoc_list -> 'a snoc_list = function
| Empty -> raise (Failure "tl") | Snoc (l, _) -> l
let rec list_of_snoc_list : 'a snoc_list -> 'a list = function
| Empty -> [] | Snoc (t, h) -> h :: list_of_snoc_list t
let rec snoc_list_of_list : 'a list -> 'a snoc_list = function
| [] -> Empty | (h :: t) -> Snoc (snoc_list_of_list t, h)
| |
1df26acb73771ec5ee512c5ffe6711142a12e411170d72d601410aa53b7c528e | eponai/sulolive | shipping.cljc | (ns eponai.web.ui.checkout.shipping
(:require
#?(:cljs [cljs.spec.alpha :as s]
:clj
[clojure.spec.alpha :as s])
#?(:cljs
[eponai.web.utils :as web-utils])
#?(:cljs
[eponai.web.google-places :as places])
[taoensso.timbre :refer [debug]]))
(def form-inputs
{:shipping/name "sulo-shipping-full-name"
:shipping.address/street "sulo-shipping-street-address-1"
:shipping.address/street2 "sulo-shipping-street-address-2"
:shipping.address/postal "sulo-shipping-postal-code"
:shipping.address/locality "sulo-shipping-locality"
:shipping.address/region "sulo-shipping-region"
:shipping.address/country "sulo-shipping-country"})
(s/def :country/code (s/and string? #(re-matches #"\w{2}" %)))
(s/def :shipping/name (s/and string? #(not-empty %)))
(s/def :shipping.address/street (s/and string? #(not-empty %)))
(s/def :shipping.address/street2 (s/or :value string? :empty nil?))
(s/def :shipping.address/postal (s/and string? #(not-empty %)))
(s/def :shipping.address/locality (s/and string? #(not-empty %)))
(s/def :shipping.address/region (s/or :value #(string? (not-empty %)) :empty nil?))
(s/def :shipping.address/country (s/keys :req [:country/code]))
(s/def :shipping/address (s/keys :req [:shipping.address/street
:shipping.address/postal
:shipping.address/locality]
:opt [
;:shipping.address/street2
:shipping.address/region]))
(s/def ::shipping (s/keys :req [:shipping/address
:shipping/name]))
(def regions
{"CA" ["ON" "QC" "NS" "NB" "MB" "BC" "PE" "SK" "AB" "NL"]
"US" ["AL" "AK" "AZ" "AR" "CA" "CO" "CT" "DE" "FL" "GA"
"HI" "ID" "IL" "IN" "IA" "KS" "KY" "LA" "ME" "MD"
"MA" "MI" "MN" "MS" "MO" "MT" "NE" "NV" "NH" "NJ"
"NM" "NY" "NC" "ND" "OH" "OK" "OR" "PA" "RI" "SC"
"SD" "TN" "TX" "UT" "VT" "VA" "WA" "WV" "WI" "WY"]
})
(def region-names
{"CA" "Province"
"US" "State"})
| null | https://raw.githubusercontent.com/eponai/sulolive/7a70701bbd3df6bbb92682679dcedb53f8822c18/src/eponai/web/ui/checkout/shipping.cljc | clojure | :shipping.address/street2 | (ns eponai.web.ui.checkout.shipping
(:require
#?(:cljs [cljs.spec.alpha :as s]
:clj
[clojure.spec.alpha :as s])
#?(:cljs
[eponai.web.utils :as web-utils])
#?(:cljs
[eponai.web.google-places :as places])
[taoensso.timbre :refer [debug]]))
(def form-inputs
{:shipping/name "sulo-shipping-full-name"
:shipping.address/street "sulo-shipping-street-address-1"
:shipping.address/street2 "sulo-shipping-street-address-2"
:shipping.address/postal "sulo-shipping-postal-code"
:shipping.address/locality "sulo-shipping-locality"
:shipping.address/region "sulo-shipping-region"
:shipping.address/country "sulo-shipping-country"})
(s/def :country/code (s/and string? #(re-matches #"\w{2}" %)))
(s/def :shipping/name (s/and string? #(not-empty %)))
(s/def :shipping.address/street (s/and string? #(not-empty %)))
(s/def :shipping.address/street2 (s/or :value string? :empty nil?))
(s/def :shipping.address/postal (s/and string? #(not-empty %)))
(s/def :shipping.address/locality (s/and string? #(not-empty %)))
(s/def :shipping.address/region (s/or :value #(string? (not-empty %)) :empty nil?))
(s/def :shipping.address/country (s/keys :req [:country/code]))
(s/def :shipping/address (s/keys :req [:shipping.address/street
:shipping.address/postal
:shipping.address/locality]
:opt [
:shipping.address/region]))
(s/def ::shipping (s/keys :req [:shipping/address
:shipping/name]))
(def regions
{"CA" ["ON" "QC" "NS" "NB" "MB" "BC" "PE" "SK" "AB" "NL"]
"US" ["AL" "AK" "AZ" "AR" "CA" "CO" "CT" "DE" "FL" "GA"
"HI" "ID" "IL" "IN" "IA" "KS" "KY" "LA" "ME" "MD"
"MA" "MI" "MN" "MS" "MO" "MT" "NE" "NV" "NH" "NJ"
"NM" "NY" "NC" "ND" "OH" "OK" "OR" "PA" "RI" "SC"
"SD" "TN" "TX" "UT" "VT" "VA" "WA" "WV" "WI" "WY"]
})
(def region-names
{"CA" "Province"
"US" "State"})
|
35a39917f7f72f5b93d2cc0929e38ebaaf589609e93eb8b38d5df6b91fd43979 | theodormoroianu/SecondYearCourses | lab6-sol_20210115150941.hs | -- /
import Data.Char
import Data.List
import Test.QuickCheck
1 .
rotate :: Int -> [Char] -> [Char]
rotate n l
| n > 0
, n < length l
= suf ++ pre
where
(pre, suf) = splitAt n l
rotate _ _ = error "număr negativ sau prea mare"
2 .
prop_rotate :: Int -> String -> Bool
prop_rotate k str = rotate (l + 1 - m) (rotate m str') == str'
where
str' = "ab" ++ str
l = length str + 1
m = 1 + k `mod` l
3 .
makeKey :: Int -> [(Char, Char)]
makeKey n = zip alphabet (rotate n alphabet)
where
alphabet = ['A'..'Z']
4 .
lookUp :: Char -> [(Char, Char)] -> Char
lookUp c l = head $ [y | (x, y) <- l, x == c] ++ [c]
5 .
encipher :: Int -> Char -> Char
encipher n c = lookUp c (makeKey n)
6 .
normalize :: String -> String
normalize = map toUpper . filter isAlphaNum
7 .
encipherStr :: Int -> String -> String
encipherStr n = map (encipher n) . normalize
8 .
reverseKey :: [(Char, Char)] -> [(Char, Char)]
reverseKey = map (\(x,y) -> (y,x))
9 .
decipher :: Int -> Char -> Char
decipher n c = lookUp c (reverseKey (makeKey n))
decipherStr :: Int -> String -> String
decipherStr = map . decipher
data Fruct
= Mar String Bool
| Portocala String Int
ionatanFaraVierme = Mar "Ionatan" False
goldenCuVierme = Mar "Golden Delicious" True
portocalaSicilia10 = Portocala "Sanguinello" 10
listaFructe =
[ Mar "Ionatan" False
, Portocala "Sanguinello" 10
, Portocala "Valencia" 22
, Mar "Golden Delicious" True
, Portocala "Sanguinello" 15
, Portocala "Moro" 12
, Portocala "Tarocco" 3
, Portocala "Moro" 12
, Portocala "Valencia" 2
, Mar "Golden Delicious" False
, Mar "Golden" False
, Mar "Golden" True
]
ePortocalaDeSicilia :: Fruct -> Bool
ePortocalaDeSicilia (Portocala soi _) =
soi `elem` ["Tarocco", "Moro", "Sanguinello"]
ePortocalaDeSicilia _ = False
nrFeliiSicilia :: [Fruct] -> Int
nrFeliiSicilia fructe = sum [felii | p@(Portocala _ felii) <- fructe, ePortocalaDeSicilia p]
nrMereViermi :: [Fruct] -> Int
nrMereViermi fructe = length [1 | Mar _ True <- fructe]
data Linie = L [Int]
deriving Show
data Matrice = M [Linie]
verifica :: Num n => Matrice -> n -> Bool | null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/99185b0e97119135e7301c2c7be0f07ae7258006/Haskell/l/.history/lab6/lab6-sol_20210115150941.hs | haskell | / |
import Data.Char
import Data.List
import Test.QuickCheck
1 .
rotate :: Int -> [Char] -> [Char]
rotate n l
| n > 0
, n < length l
= suf ++ pre
where
(pre, suf) = splitAt n l
rotate _ _ = error "număr negativ sau prea mare"
2 .
prop_rotate :: Int -> String -> Bool
prop_rotate k str = rotate (l + 1 - m) (rotate m str') == str'
where
str' = "ab" ++ str
l = length str + 1
m = 1 + k `mod` l
3 .
makeKey :: Int -> [(Char, Char)]
makeKey n = zip alphabet (rotate n alphabet)
where
alphabet = ['A'..'Z']
4 .
lookUp :: Char -> [(Char, Char)] -> Char
lookUp c l = head $ [y | (x, y) <- l, x == c] ++ [c]
5 .
encipher :: Int -> Char -> Char
encipher n c = lookUp c (makeKey n)
6 .
normalize :: String -> String
normalize = map toUpper . filter isAlphaNum
7 .
encipherStr :: Int -> String -> String
encipherStr n = map (encipher n) . normalize
8 .
reverseKey :: [(Char, Char)] -> [(Char, Char)]
reverseKey = map (\(x,y) -> (y,x))
9 .
decipher :: Int -> Char -> Char
decipher n c = lookUp c (reverseKey (makeKey n))
decipherStr :: Int -> String -> String
decipherStr = map . decipher
data Fruct
= Mar String Bool
| Portocala String Int
ionatanFaraVierme = Mar "Ionatan" False
goldenCuVierme = Mar "Golden Delicious" True
portocalaSicilia10 = Portocala "Sanguinello" 10
listaFructe =
[ Mar "Ionatan" False
, Portocala "Sanguinello" 10
, Portocala "Valencia" 22
, Mar "Golden Delicious" True
, Portocala "Sanguinello" 15
, Portocala "Moro" 12
, Portocala "Tarocco" 3
, Portocala "Moro" 12
, Portocala "Valencia" 2
, Mar "Golden Delicious" False
, Mar "Golden" False
, Mar "Golden" True
]
ePortocalaDeSicilia :: Fruct -> Bool
ePortocalaDeSicilia (Portocala soi _) =
soi `elem` ["Tarocco", "Moro", "Sanguinello"]
ePortocalaDeSicilia _ = False
nrFeliiSicilia :: [Fruct] -> Int
nrFeliiSicilia fructe = sum [felii | p@(Portocala _ felii) <- fructe, ePortocalaDeSicilia p]
nrMereViermi :: [Fruct] -> Int
nrMereViermi fructe = length [1 | Mar _ True <- fructe]
data Linie = L [Int]
deriving Show
data Matrice = M [Linie]
verifica :: Num n => Matrice -> n -> Bool |
2fe21b6898dd99773e1206967c67ff2c9b98ec6392b37ececa47f2a0bb1bd207 | unbounce/clojure-dogstatsd-client | project.clj | (defproject com.unbounce/clojure-dogstatsd-client "0.8.0-SNAPSHOT"
:description "A thin veneer over java-dogstatsd-client"
:url "-dogstatsd-client"
:license {:name "The MIT License (MIT)"
:url ""
:comments "Copyright (c) 2018 Unbounce Marketing Solutions Inc."}
:profiles {:dev {:dependencies [[org.clojure/test.check "0.9.0"]]}}
:dependencies [[org.clojure/clojure "1.9.0"]
[com.datadoghq/java-dogstatsd-client "2.11.0"]]
:global-vars {*warn-on-reflection* true}
:deploy-repositories {"releases" {:url "" :creds :gpg}})
| null | https://raw.githubusercontent.com/unbounce/clojure-dogstatsd-client/ef44e313aa1d36464d5ee72de16d35feac34187b/project.clj | clojure | (defproject com.unbounce/clojure-dogstatsd-client "0.8.0-SNAPSHOT"
:description "A thin veneer over java-dogstatsd-client"
:url "-dogstatsd-client"
:license {:name "The MIT License (MIT)"
:url ""
:comments "Copyright (c) 2018 Unbounce Marketing Solutions Inc."}
:profiles {:dev {:dependencies [[org.clojure/test.check "0.9.0"]]}}
:dependencies [[org.clojure/clojure "1.9.0"]
[com.datadoghq/java-dogstatsd-client "2.11.0"]]
:global-vars {*warn-on-reflection* true}
:deploy-repositories {"releases" {:url "" :creds :gpg}})
| |
40ac6162494c72ba1b856c355c51be88d2e911a247f607ae926fbf5923c6c1ed | zcaudate/hara | listener.clj | (ns hara.lib.aether.listener
(:require [hara.print :as print]
[hara.io.file :as fs]
[hara.module.artifact :as artifact]
[hara.object :as object])
(:import (org.eclipse.aether RepositoryEvent RepositoryListener)
(org.eclipse.aether.transfer TransferEvent TransferListener TransferResource)))
(def ^:dynamic *progress* nil)
(def ^:dynamic *pairs* nil)
(defonce ^:dynamic *current* (atom nil))
(defn- print-params
[color]
{:padding 0
:spacing 1
:columns [{:key :type
:align :left
:length 15
:color color}
{:key :key
:align :left
:length 49
:color color}
{:key :version
:align :left
:length 9
:color color}
{:key :repo
:align :left
:length 10
:color color}
{:key :info
:align :left
:length 10
:color color}]})
(def category {:deploying :deploy
:downloading :download
:installing :install
:resolving :resolve
:deployed :deploy
:downloaded :download
:installed :install
:resolved :resolve})
(defn event->rep
"converts the event to a map representation"
{:added "3.0"}
[val]
(if-let [artifact (-> val :event :artifact)]
(artifact/artifact artifact)
(assoc (artifact/artifact (-> val :event :metadata))
:version "<meta>")))
(defn record
"adds an event to the recorder"
{:added "3.0"}
([type event]
(record *progress* type event))
([progress type event]
(let [out {:type type
:event event
:time (System/currentTimeMillis)}]
(if progress
(swap! progress conj out))
(reset! *current* out))))
(defn aggregate
"summarises all events that have been processed"
{:added "3.0"}
[events]
(->> events
(reduce (fn [{:keys [finished initiated timeline] :as out} {:keys [type event time]}]
(let [{:keys [artifact]} event
init-types #{:deploying :downloading :installing :resolving}
end-types #{:deployed :downloaded :installed :resolved}]
(cond (init-types type)
(assoc-in out [:initiated artifact (category type)] time)
(end-types type)
(let [cat (category type)
start (get-in initiated [artifact cat] time)
initiated (update-in initiated [artifact] dissoc cat)
entry {:type cat :start start :total (- time start) :artifact artifact}
finished (update-in finished [artifact] (fnil #(conj % entry) []))]
{:initiated initiated
:finished finished}))))
{:finished {}
:initiated {}})
:finished))
(object/map-like
TransferEvent
{:tag "transfer.event"
:read :class
:exclude [:session]}
TransferResource
{:tag "transfer.resource"
:read :class
:exclude [:trace]})
(defonce +no-op+ (fn [_]))
(deftype TransferListenerProxy [fns]
TransferListener
(transferCorrupted [_ e] ((or (:corrupted fns) +no-op+) (object/to-map e)))
(transferFailed [_ e] ((or (:failed fns) +no-op+) (object/to-map e)))
(transferInitiated [_ e] ((or (:initiated fns) +no-op+) (object/to-map e)))
(transferProgressed [_ e] ((or (:progressed fns) +no-op+) (object/to-map e)))
(transferStarted [_ e] ((or (:started fns) +no-op+) (object/to-map e)))
(transferSucceeded [_ e] ((or (:succeded fns) +no-op+) (object/to-map e))))
(def default-transfer-listener
(TransferListenerProxy.
{:initiated (fn [{:keys [resource request-type] :as m}]
(println (str request-type ": " (:resource-name resource) " has INITIATED")))
:progressed (fn [{:keys [resource request-type] :as m}])
:succeded (fn [{:keys [resource request-type] :as m}]
(println (str request-type ": " (:resource-name resource) " has SUCCEEDED")))
:failed (fn [{:keys [resource request-type] :as m}]
(println (str request-type ": " (:resource-name resource) " has FAILED")))}))
(def blank-transfer-listener
(TransferListenerProxy. {}))
(object/map-like
RepositoryEvent
{:tag "repo.event"
:read :class
:exclude [:session :trace]})
(deftype RepositoryListenerProxy [fns]
RepositoryListener
(artifactDeployed [_ e] ((or (-> fns :artifact :deployed) +no-op+) (object/to-map e)))
(artifactDeploying [_ e] ((or (-> fns :artifact :deploying) +no-op+) (object/to-map e)))
(artifactDescriptorInvalid [_ e] ((or (-> fns :artifact :invalid) +no-op+) (object/to-map e)))
(artifactDescriptorMissing [_ e] ((or (-> fns :artifact :missing) +no-op+) (object/to-map e)))
(artifactDownloaded [_ e] ((or (-> fns :artifact :downloaded) +no-op+) (object/to-map e)))
(artifactDownloading [_ e] ((or (-> fns :artifact :downloading) +no-op+) (object/to-map e)))
(artifactInstalled [_ e] ((or (-> fns :artifact :installed) +no-op+) (object/to-map e)))
(artifactInstalling [_ e] ((or (-> fns :artifact :installing) +no-op+) (object/to-map e)))
(artifactResolved [_ e] ((or (-> fns :artifact :resolved) +no-op+) (object/to-map e)))
(artifactResolving [_ e] ((or (-> fns :artifact :resolving) +no-op+) (object/to-map e)))
(metadataDeployed [_ e] ((or (-> fns :metadata :deployed) +no-op+) (object/to-map e)))
(metadataDeploying [_ e] ((or (-> fns :metadata :deploying) +no-op+) (object/to-map e)))
(metadataDownloaded [_ e] ((or (-> fns :metadata :downloaded) +no-op+) (object/to-map e)))
(metadataDownloading [_ e] ((or (-> fns :metadata :downloading) +no-op+) (object/to-map e)))
(metadataInstalled [_ e] ((or (-> fns :metadata :installed) +no-op+) (object/to-map e)))
(metadataInstalling [_ e] ((or (-> fns :metadata :installing) +no-op+) (object/to-map e)))
(metadataResolved [_ e] ((or (-> fns :metadata :resolved) +no-op+) (object/to-map e)))
(metadataResolving [_ e] ((or (-> fns :metadata :resolving) +no-op+) (object/to-map e))))
(def +default-repository-listener+
(RepositoryListenerProxy. {:artifact {:deploying (fn [{:keys [artifact] :as m}]
(record :deploying m))
:deployed (fn [{:keys [artifact] :as m}]
(record :deployed m))
:downloading (fn [{:keys [artifact] :as m}]
(record :downloading m))
:downloaded (fn [{:keys [artifact] :as m}]
(record :downloaded m))
:installing (fn [{:keys [artifact] :as m}]
(record :installing m))
:installed (fn [{:keys [artifact] :as m}]
(record :installed m))
:resolving (fn [{:keys [artifact] :as m}]
(record :resolving m))
:resolved (fn [{:keys [artifact] :as m}]
(record :resolved m))}
:metadata {:deploying (fn [{:keys [artifact] :as m}]
(record :deploying m))
:deployed (fn [{:keys [artifact] :as m}]
(record :deployed m))
:downloading (fn [{:keys [artifact] :as m}]
(record :downloading m))
:downloaded (fn [{:keys [artifact] :as m}]
(record :downloaded m))
:installing (fn [{:keys [artifact] :as m}]
(record :installing m))
:installed (fn [{:keys [artifact] :as m}]
(record :installed m))
:resolving (fn [{:keys [artifact] :as m}]
(record :resolving m))
:resolved (fn [{:keys [artifact] :as m}]
(record :resolved m))}}))
(def blank-repository-listener
(RepositoryListenerProxy. {}))
(defn process-event
"processes a recorded event"
{:added "3.0"}
[v]
(if *pairs*
(let [rep (event->rep v)
label (category (:type v))
id (-> v :event :artifact)
p (if-let [p (get-in @*pairs* [label id])]
p
(do (swap! *pairs* assoc-in [label id] v)
nil))]
(print/print-row [(str " " (name (:type v)))
(str (:group rep) "/" (:artifact rep)
(if (:extension rep) (str ":" (:extension rep))))
(:version rep)
(if p
[(symbol (or (-> v :event :repository :id)
"local"))]
"")
(if p
(format "(%.2fs, %d bytes)"
(/ (- (:time v) (:time p)) 1000.0)
(-> v :event :file fs/file (.length)))
"")]
(print-params (if p #{:bold} #{:red}))))))
(add-watch *current* :print-listener
(fn [_ _ _ v] (process-event v)))
| null | https://raw.githubusercontent.com/zcaudate/hara/481316c1f5c2aeba5be6e01ae673dffc46a63ec9/src/hara/lib/aether/listener.clj | clojure | (ns hara.lib.aether.listener
(:require [hara.print :as print]
[hara.io.file :as fs]
[hara.module.artifact :as artifact]
[hara.object :as object])
(:import (org.eclipse.aether RepositoryEvent RepositoryListener)
(org.eclipse.aether.transfer TransferEvent TransferListener TransferResource)))
(def ^:dynamic *progress* nil)
(def ^:dynamic *pairs* nil)
(defonce ^:dynamic *current* (atom nil))
(defn- print-params
[color]
{:padding 0
:spacing 1
:columns [{:key :type
:align :left
:length 15
:color color}
{:key :key
:align :left
:length 49
:color color}
{:key :version
:align :left
:length 9
:color color}
{:key :repo
:align :left
:length 10
:color color}
{:key :info
:align :left
:length 10
:color color}]})
(def category {:deploying :deploy
:downloading :download
:installing :install
:resolving :resolve
:deployed :deploy
:downloaded :download
:installed :install
:resolved :resolve})
(defn event->rep
"converts the event to a map representation"
{:added "3.0"}
[val]
(if-let [artifact (-> val :event :artifact)]
(artifact/artifact artifact)
(assoc (artifact/artifact (-> val :event :metadata))
:version "<meta>")))
(defn record
"adds an event to the recorder"
{:added "3.0"}
([type event]
(record *progress* type event))
([progress type event]
(let [out {:type type
:event event
:time (System/currentTimeMillis)}]
(if progress
(swap! progress conj out))
(reset! *current* out))))
(defn aggregate
"summarises all events that have been processed"
{:added "3.0"}
[events]
(->> events
(reduce (fn [{:keys [finished initiated timeline] :as out} {:keys [type event time]}]
(let [{:keys [artifact]} event
init-types #{:deploying :downloading :installing :resolving}
end-types #{:deployed :downloaded :installed :resolved}]
(cond (init-types type)
(assoc-in out [:initiated artifact (category type)] time)
(end-types type)
(let [cat (category type)
start (get-in initiated [artifact cat] time)
initiated (update-in initiated [artifact] dissoc cat)
entry {:type cat :start start :total (- time start) :artifact artifact}
finished (update-in finished [artifact] (fnil #(conj % entry) []))]
{:initiated initiated
:finished finished}))))
{:finished {}
:initiated {}})
:finished))
(object/map-like
TransferEvent
{:tag "transfer.event"
:read :class
:exclude [:session]}
TransferResource
{:tag "transfer.resource"
:read :class
:exclude [:trace]})
(defonce +no-op+ (fn [_]))
(deftype TransferListenerProxy [fns]
TransferListener
(transferCorrupted [_ e] ((or (:corrupted fns) +no-op+) (object/to-map e)))
(transferFailed [_ e] ((or (:failed fns) +no-op+) (object/to-map e)))
(transferInitiated [_ e] ((or (:initiated fns) +no-op+) (object/to-map e)))
(transferProgressed [_ e] ((or (:progressed fns) +no-op+) (object/to-map e)))
(transferStarted [_ e] ((or (:started fns) +no-op+) (object/to-map e)))
(transferSucceeded [_ e] ((or (:succeded fns) +no-op+) (object/to-map e))))
(def default-transfer-listener
(TransferListenerProxy.
{:initiated (fn [{:keys [resource request-type] :as m}]
(println (str request-type ": " (:resource-name resource) " has INITIATED")))
:progressed (fn [{:keys [resource request-type] :as m}])
:succeded (fn [{:keys [resource request-type] :as m}]
(println (str request-type ": " (:resource-name resource) " has SUCCEEDED")))
:failed (fn [{:keys [resource request-type] :as m}]
(println (str request-type ": " (:resource-name resource) " has FAILED")))}))
(def blank-transfer-listener
(TransferListenerProxy. {}))
(object/map-like
RepositoryEvent
{:tag "repo.event"
:read :class
:exclude [:session :trace]})
(deftype RepositoryListenerProxy [fns]
RepositoryListener
(artifactDeployed [_ e] ((or (-> fns :artifact :deployed) +no-op+) (object/to-map e)))
(artifactDeploying [_ e] ((or (-> fns :artifact :deploying) +no-op+) (object/to-map e)))
(artifactDescriptorInvalid [_ e] ((or (-> fns :artifact :invalid) +no-op+) (object/to-map e)))
(artifactDescriptorMissing [_ e] ((or (-> fns :artifact :missing) +no-op+) (object/to-map e)))
(artifactDownloaded [_ e] ((or (-> fns :artifact :downloaded) +no-op+) (object/to-map e)))
(artifactDownloading [_ e] ((or (-> fns :artifact :downloading) +no-op+) (object/to-map e)))
(artifactInstalled [_ e] ((or (-> fns :artifact :installed) +no-op+) (object/to-map e)))
(artifactInstalling [_ e] ((or (-> fns :artifact :installing) +no-op+) (object/to-map e)))
(artifactResolved [_ e] ((or (-> fns :artifact :resolved) +no-op+) (object/to-map e)))
(artifactResolving [_ e] ((or (-> fns :artifact :resolving) +no-op+) (object/to-map e)))
(metadataDeployed [_ e] ((or (-> fns :metadata :deployed) +no-op+) (object/to-map e)))
(metadataDeploying [_ e] ((or (-> fns :metadata :deploying) +no-op+) (object/to-map e)))
(metadataDownloaded [_ e] ((or (-> fns :metadata :downloaded) +no-op+) (object/to-map e)))
(metadataDownloading [_ e] ((or (-> fns :metadata :downloading) +no-op+) (object/to-map e)))
(metadataInstalled [_ e] ((or (-> fns :metadata :installed) +no-op+) (object/to-map e)))
(metadataInstalling [_ e] ((or (-> fns :metadata :installing) +no-op+) (object/to-map e)))
(metadataResolved [_ e] ((or (-> fns :metadata :resolved) +no-op+) (object/to-map e)))
(metadataResolving [_ e] ((or (-> fns :metadata :resolving) +no-op+) (object/to-map e))))
(def +default-repository-listener+
(RepositoryListenerProxy. {:artifact {:deploying (fn [{:keys [artifact] :as m}]
(record :deploying m))
:deployed (fn [{:keys [artifact] :as m}]
(record :deployed m))
:downloading (fn [{:keys [artifact] :as m}]
(record :downloading m))
:downloaded (fn [{:keys [artifact] :as m}]
(record :downloaded m))
:installing (fn [{:keys [artifact] :as m}]
(record :installing m))
:installed (fn [{:keys [artifact] :as m}]
(record :installed m))
:resolving (fn [{:keys [artifact] :as m}]
(record :resolving m))
:resolved (fn [{:keys [artifact] :as m}]
(record :resolved m))}
:metadata {:deploying (fn [{:keys [artifact] :as m}]
(record :deploying m))
:deployed (fn [{:keys [artifact] :as m}]
(record :deployed m))
:downloading (fn [{:keys [artifact] :as m}]
(record :downloading m))
:downloaded (fn [{:keys [artifact] :as m}]
(record :downloaded m))
:installing (fn [{:keys [artifact] :as m}]
(record :installing m))
:installed (fn [{:keys [artifact] :as m}]
(record :installed m))
:resolving (fn [{:keys [artifact] :as m}]
(record :resolving m))
:resolved (fn [{:keys [artifact] :as m}]
(record :resolved m))}}))
(def blank-repository-listener
(RepositoryListenerProxy. {}))
(defn process-event
"processes a recorded event"
{:added "3.0"}
[v]
(if *pairs*
(let [rep (event->rep v)
label (category (:type v))
id (-> v :event :artifact)
p (if-let [p (get-in @*pairs* [label id])]
p
(do (swap! *pairs* assoc-in [label id] v)
nil))]
(print/print-row [(str " " (name (:type v)))
(str (:group rep) "/" (:artifact rep)
(if (:extension rep) (str ":" (:extension rep))))
(:version rep)
(if p
[(symbol (or (-> v :event :repository :id)
"local"))]
"")
(if p
(format "(%.2fs, %d bytes)"
(/ (- (:time v) (:time p)) 1000.0)
(-> v :event :file fs/file (.length)))
"")]
(print-params (if p #{:bold} #{:red}))))))
(add-watch *current* :print-listener
(fn [_ _ _ v] (process-event v)))
| |
d2bf2c5f792042adf31b60123770e01eeace073081f39713099412088cb520c4 | ghcjs/jsaddle-dom | IDBFactory.hs | # LANGUAGE PatternSynonyms #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.IDBFactory
(open, open_, deleteDatabase, deleteDatabase_, cmp, cmp_,
IDBFactory(..), gTypeIDBFactory)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/IDBFactory.open Mozilla IDBFactory.open documentation >
open ::
(MonadDOM m, ToJSString name) =>
IDBFactory -> name -> Maybe Word64 -> m IDBOpenDBRequest
open self name version
= liftDOM
((self ^. jsf "open"
[toJSVal name,
maybe (return jsUndefined) integralToDoubleToJSVal version])
>>= fromJSValUnchecked)
| < -US/docs/Web/API/IDBFactory.open Mozilla IDBFactory.open documentation >
open_ ::
(MonadDOM m, ToJSString name) =>
IDBFactory -> name -> Maybe Word64 -> m ()
open_ self name version
= liftDOM
(void
(self ^. jsf "open"
[toJSVal name,
maybe (return jsUndefined) integralToDoubleToJSVal version]))
| < -US/docs/Web/API/IDBFactory.deleteDatabase Mozilla IDBFactory.deleteDatabase documentation >
deleteDatabase ::
(MonadDOM m, ToJSString name) =>
IDBFactory -> name -> m IDBOpenDBRequest
deleteDatabase self name
= liftDOM
((self ^. jsf "deleteDatabase" [toJSVal name]) >>=
fromJSValUnchecked)
| < -US/docs/Web/API/IDBFactory.deleteDatabase Mozilla IDBFactory.deleteDatabase documentation >
deleteDatabase_ ::
(MonadDOM m, ToJSString name) => IDBFactory -> name -> m ()
deleteDatabase_ self name
= liftDOM (void (self ^. jsf "deleteDatabase" [toJSVal name]))
| < -US/docs/Web/API/IDBFactory.cmp Mozilla IDBFactory.cmp documentation >
cmp ::
(MonadDOM m, ToJSVal first, ToJSVal second) =>
IDBFactory -> first -> second -> m Int
cmp self first second
= liftDOM
(round <$>
((self ^. jsf "cmp" [toJSVal first, toJSVal second]) >>=
valToNumber))
| < -US/docs/Web/API/IDBFactory.cmp Mozilla IDBFactory.cmp documentation >
cmp_ ::
(MonadDOM m, ToJSVal first, ToJSVal second) =>
IDBFactory -> first -> second -> m ()
cmp_ self first second
= liftDOM
(void (self ^. jsf "cmp" [toJSVal first, toJSVal second]))
| null | https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/IDBFactory.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.IDBFactory
(open, open_, deleteDatabase, deleteDatabase_, cmp, cmp_,
IDBFactory(..), gTypeIDBFactory)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/IDBFactory.open Mozilla IDBFactory.open documentation >
open ::
(MonadDOM m, ToJSString name) =>
IDBFactory -> name -> Maybe Word64 -> m IDBOpenDBRequest
open self name version
= liftDOM
((self ^. jsf "open"
[toJSVal name,
maybe (return jsUndefined) integralToDoubleToJSVal version])
>>= fromJSValUnchecked)
| < -US/docs/Web/API/IDBFactory.open Mozilla IDBFactory.open documentation >
open_ ::
(MonadDOM m, ToJSString name) =>
IDBFactory -> name -> Maybe Word64 -> m ()
open_ self name version
= liftDOM
(void
(self ^. jsf "open"
[toJSVal name,
maybe (return jsUndefined) integralToDoubleToJSVal version]))
| < -US/docs/Web/API/IDBFactory.deleteDatabase Mozilla IDBFactory.deleteDatabase documentation >
deleteDatabase ::
(MonadDOM m, ToJSString name) =>
IDBFactory -> name -> m IDBOpenDBRequest
deleteDatabase self name
= liftDOM
((self ^. jsf "deleteDatabase" [toJSVal name]) >>=
fromJSValUnchecked)
| < -US/docs/Web/API/IDBFactory.deleteDatabase Mozilla IDBFactory.deleteDatabase documentation >
deleteDatabase_ ::
(MonadDOM m, ToJSString name) => IDBFactory -> name -> m ()
deleteDatabase_ self name
= liftDOM (void (self ^. jsf "deleteDatabase" [toJSVal name]))
| < -US/docs/Web/API/IDBFactory.cmp Mozilla IDBFactory.cmp documentation >
cmp ::
(MonadDOM m, ToJSVal first, ToJSVal second) =>
IDBFactory -> first -> second -> m Int
cmp self first second
= liftDOM
(round <$>
((self ^. jsf "cmp" [toJSVal first, toJSVal second]) >>=
valToNumber))
| < -US/docs/Web/API/IDBFactory.cmp Mozilla IDBFactory.cmp documentation >
cmp_ ::
(MonadDOM m, ToJSVal first, ToJSVal second) =>
IDBFactory -> first -> second -> m ()
cmp_ self first second
= liftDOM
(void (self ^. jsf "cmp" [toJSVal first, toJSVal second]))
|
2b00185bd356f75e02f0504c243a48b5c78fff1c125177fed554c3d04325fd54 | AnyChart/export-server | templates.clj | (ns export-server.browser.templates
(:require [clojure.java.io :as io]
[export-server.data.config :as config]))
(def anychart-binary (slurp (io/resource "js/anychart-bundle.min.js")))
(defn create-svg-html [svg]
(str "<html lang=\"en\">
<head>
<meta charset=\"UTF-8\">
<style>
body {
margin: 0;
}
</style>
</head>
<body>"
svg
"</body></html>"))
(defn create-script-html [options script]
(str "<html lang=\"en\">
<head>
<meta charset=\"UTF-8\">
<script>"
anychart-binary
"</script>
<style>
body{margin:0;}
.anychart-credits{display:none;}
</style>
</head>
<body>"
"<div id='" (:container-id options) "' style='width:"
(str (config/min-size (:image-width options) (:container-width options)))
";height:"
(str (config/min-size (:image-height options) (:container-height options)))
":'></div>"
"<script>"
script
"</script>"
"</body></html>"))
| null | https://raw.githubusercontent.com/AnyChart/export-server/0ef2f084e07aaa144e38dff30e3283f686de1108/src/export_server/browser/templates.clj | clojure |
}
} | (ns export-server.browser.templates
(:require [clojure.java.io :as io]
[export-server.data.config :as config]))
(def anychart-binary (slurp (io/resource "js/anychart-bundle.min.js")))
(defn create-svg-html [svg]
(str "<html lang=\"en\">
<head>
<meta charset=\"UTF-8\">
<style>
body {
}
</style>
</head>
<body>"
svg
"</body></html>"))
(defn create-script-html [options script]
(str "<html lang=\"en\">
<head>
<meta charset=\"UTF-8\">
<script>"
anychart-binary
"</script>
<style>
</style>
</head>
<body>"
"<div id='" (:container-id options) "' style='width:"
(str (config/min-size (:image-width options) (:container-width options)))
";height:"
(str (config/min-size (:image-height options) (:container-height options)))
":'></div>"
"<script>"
script
"</script>"
"</body></html>"))
|
1b73ed4ceb18041e0ee00cdc2215f97be235a127ad24abe6a7421e3b34685a05 | someodd/waffle | Main.hs | module Main where
import qualified Data.Text as T
import System.Environment
import BrickApp
import Config
import Config.ConfigOpen
import Config.Bookmarks
import Config.Homepage
import Config.Theme
handleArgs :: [String] -> IO ()
handleArgs [] = uiMain Nothing
handleArgs (host:port:resource:[]) = uiMain (Just (T.pack host, read port :: Int, T.pack resource))
handleArgs (_) = error "Error! Need to supply host, port, and selector (or no args!). For empty selector you can use \"\"."
main :: IO ()
main = do
First do config file check
-- FIXME: hacky doing setup here...
-- maybe could have a cli option to reset even
setupConfigDirectory
setupDefaultOpenConfig
setupDefaultBookmarks
setupDefaultHomepageConfig
setupDefaultTheme
-- Now run!
args <- getArgs
handleArgs args
| null | https://raw.githubusercontent.com/someodd/waffle/71586b271878df4cb2368ae3bd6e2961cb79fa15/app/Main.hs | haskell | FIXME: hacky doing setup here...
maybe could have a cli option to reset even
Now run! | module Main where
import qualified Data.Text as T
import System.Environment
import BrickApp
import Config
import Config.ConfigOpen
import Config.Bookmarks
import Config.Homepage
import Config.Theme
handleArgs :: [String] -> IO ()
handleArgs [] = uiMain Nothing
handleArgs (host:port:resource:[]) = uiMain (Just (T.pack host, read port :: Int, T.pack resource))
handleArgs (_) = error "Error! Need to supply host, port, and selector (or no args!). For empty selector you can use \"\"."
main :: IO ()
main = do
First do config file check
setupConfigDirectory
setupDefaultOpenConfig
setupDefaultBookmarks
setupDefaultHomepageConfig
setupDefaultTheme
args <- getArgs
handleArgs args
|
0d44d66362d760c01d18eecb3ee1e106234934c1afd292297fe7fd39108bd27b | ktakashi/sagittarius-scheme | request.scm | -*- mode : scheme;coding : utf-8 -*-
;;;
;;; net/http-client/request.scm - HTTP request/response of HTTP client
;;;
Copyright ( c ) 2021 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
#!nounbound
(library (net http-client request)
(export http:request? http:request-builder
(rename (http:request <http:request>))
http:request-uri http:request-method
http:request-content-type
http:request-auth
http:request-headers
http:request-cookies
http:request-body
http:request-basic-auth
http:request-bearer-auth
;; helper
http:request->request-uri
http:response? http:response-builder
(rename (http:response <http:response>))
http:response-status http:response-headers
http:response-cookies http:response-body
+http:managed-headers+
http:make-headers http:headers?
http:headers-ref* http:headers-ref http:headers-contains?
http:headers-set! http:headers-add!
http:headers-names
http:headers->alist
http:method
http-method-set
http:no-body-method?
)
(import (rnrs)
(record builder)
(rfc base64)
(rfc cookie)
(net uri)
(util hashtables))
;;; TODO maybe should make a record for this
(define (->headers l)
(if (http:headers? l)
(hashtable-copy l #t)
(let ((ht (http:make-headers)))
(for-each (lambda (kv)
(let ((n (car kv))
(v* (cdr kv)))
(if (pair? v*)
(for-each (lambda (v)
(http:headers-add! ht n v)) v*)
(http:headers-set! ht n v*))))
l)
ht)))
(define (->cookies l)
(define (->cookie v)
(or (and (cookie? v) v)
(assertion-violation '->cookies "Unknown type" v)))
(map ->cookie l))
(define (http:make-headers) (make-hashtable string-ci-hash string-ci=?))
(define http:headers? hashtable?)
(define (http:headers-ref* h k) (hashtable-ref h k '()))
(define (http:headers-ref h k) (cond ((hashtable-ref h k #f) => car)
(else #f)))
(define (http:headers-contains? h k) (hashtable-contains? h k))
(define (http:headers-set! h k v) (hashtable-set! h k (list v)))
(define (http:headers-add! h k v)
(hashtable-update! h k (lambda (v*) (cons v v*)) '()))
(define (http:headers-names header) (vector->list (hashtable-keys header)))
(define (http:headers->alist header) (hashtable->alist header))
(define-enumeration http:method
(CONNECT DELETE GET HEAD OPTIONS PATCH POST PUT TRACE)
http-method-set)
(define *http:no-body-methods*
(http-method-set CONNECT GET HEAD OPTIONS TRACE))
(define (http:no-body-method? method)
(enum-set-member? method *http:no-body-methods*))
(define-record-type http:request
(fields uri
method
content-type
auth
headers
cookies
body))
(define (->uri uri)
(if (uri? uri)
uri
(string->uri uri)))
(define-syntax http:request-builder
(make-record-builder http:request
((method 'GET)
(uri #f ->uri)
(content-type "application/octet-stream")
(body #f)
(headers '() ->headers)
(cookies '() ->cookies))))
(define (http:request-basic-auth username password)
(let* ((cred (base64-encode-string (string-append username ":" password)))
(value (string-append "Basic " cred)))
(lambda () value)))
(define (http:request-bearer-auth token)
(let ((value (string-append "Bearer " token)))
(lambda () value)))
(define (http:request->request-uri request)
(define uri (http:request-uri request))
(let ((path (or (uri-path uri) "/"))
(query (uri-query uri)))
;; encode?
(if query
(string-append path "?" query)
path)))
(define-record-type http:response
(fields status
headers
cookies
body))
(define-syntax http:response-builder
(make-record-builder http:response
((body #f)
;; let it fail if no header is provided...
(headers #f ->headers)
(cookies '()))))
;; Managed headers (these headers are ignored if user set)
;; Host is not listed here deliberately
(define +http:managed-headers+
'("host" ;; this is handled separately but user can stil specify ;)
"content-length"
"content-type"
"transfer-encoding"
"connection"))
)
| null | https://raw.githubusercontent.com/ktakashi/sagittarius-scheme/bf27a91a3de6b94a62956f5ca509496459464329/sitelib/net/http-client/request.scm | scheme | coding : utf-8 -*-
net/http-client/request.scm - HTTP request/response of HTTP client
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
helper
TODO maybe should make a record for this
encode?
let it fail if no header is provided...
Managed headers (these headers are ignored if user set)
Host is not listed here deliberately
this is handled separately but user can stil specify ;) | Copyright ( c ) 2021 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
#!nounbound
(library (net http-client request)
(export http:request? http:request-builder
(rename (http:request <http:request>))
http:request-uri http:request-method
http:request-content-type
http:request-auth
http:request-headers
http:request-cookies
http:request-body
http:request-basic-auth
http:request-bearer-auth
http:request->request-uri
http:response? http:response-builder
(rename (http:response <http:response>))
http:response-status http:response-headers
http:response-cookies http:response-body
+http:managed-headers+
http:make-headers http:headers?
http:headers-ref* http:headers-ref http:headers-contains?
http:headers-set! http:headers-add!
http:headers-names
http:headers->alist
http:method
http-method-set
http:no-body-method?
)
(import (rnrs)
(record builder)
(rfc base64)
(rfc cookie)
(net uri)
(util hashtables))
(define (->headers l)
(if (http:headers? l)
(hashtable-copy l #t)
(let ((ht (http:make-headers)))
(for-each (lambda (kv)
(let ((n (car kv))
(v* (cdr kv)))
(if (pair? v*)
(for-each (lambda (v)
(http:headers-add! ht n v)) v*)
(http:headers-set! ht n v*))))
l)
ht)))
(define (->cookies l)
(define (->cookie v)
(or (and (cookie? v) v)
(assertion-violation '->cookies "Unknown type" v)))
(map ->cookie l))
(define (http:make-headers) (make-hashtable string-ci-hash string-ci=?))
(define http:headers? hashtable?)
(define (http:headers-ref* h k) (hashtable-ref h k '()))
(define (http:headers-ref h k) (cond ((hashtable-ref h k #f) => car)
(else #f)))
(define (http:headers-contains? h k) (hashtable-contains? h k))
(define (http:headers-set! h k v) (hashtable-set! h k (list v)))
(define (http:headers-add! h k v)
(hashtable-update! h k (lambda (v*) (cons v v*)) '()))
(define (http:headers-names header) (vector->list (hashtable-keys header)))
(define (http:headers->alist header) (hashtable->alist header))
(define-enumeration http:method
(CONNECT DELETE GET HEAD OPTIONS PATCH POST PUT TRACE)
http-method-set)
(define *http:no-body-methods*
(http-method-set CONNECT GET HEAD OPTIONS TRACE))
(define (http:no-body-method? method)
(enum-set-member? method *http:no-body-methods*))
(define-record-type http:request
(fields uri
method
content-type
auth
headers
cookies
body))
(define (->uri uri)
(if (uri? uri)
uri
(string->uri uri)))
(define-syntax http:request-builder
(make-record-builder http:request
((method 'GET)
(uri #f ->uri)
(content-type "application/octet-stream")
(body #f)
(headers '() ->headers)
(cookies '() ->cookies))))
(define (http:request-basic-auth username password)
(let* ((cred (base64-encode-string (string-append username ":" password)))
(value (string-append "Basic " cred)))
(lambda () value)))
(define (http:request-bearer-auth token)
(let ((value (string-append "Bearer " token)))
(lambda () value)))
(define (http:request->request-uri request)
(define uri (http:request-uri request))
(let ((path (or (uri-path uri) "/"))
(query (uri-query uri)))
(if query
(string-append path "?" query)
path)))
(define-record-type http:response
(fields status
headers
cookies
body))
(define-syntax http:response-builder
(make-record-builder http:response
((body #f)
(headers #f ->headers)
(cookies '()))))
(define +http:managed-headers+
"content-length"
"content-type"
"transfer-encoding"
"connection"))
)
|
5797bfed66ff12b3d61a876f69dcb3d41ef0d91ba9778ac5f6dfbe0610a3ddb2 | gwathlobal/CotD | window-messages.lisp | (in-package :cotd)
(defclass message-window (window)
((cur-str :accessor cur-str)
(message-box :initarg :message-box :accessor message-window/message-box)
(header-str :initarg :header-str :accessor message-window/header-str)
))
(defmethod initialize-instance :after ((win message-window) &key)
(with-slots (cur-str message-box) win
(setf cur-str (message-list-length message-box))))
(defmethod make-output ((win message-window))
(with-slots (cur-str message-box header-str) win
(sdl:with-rectangle (a-rect (sdl:rectangle :x 0 :y 0 :w *window-width* :h *window-height*))
(sdl:fill-surface sdl:*black* :template a-rect))
(sdl:draw-string-solid-* header-str (truncate *window-width* 2) 0 :justify :center)
(sdl:with-rectangle (rect (sdl:rectangle :x 10 :y (+ 10 (sdl:char-height sdl:*default-font*)) :w (- *window-width* 10) :h (- *window-height* (+ 10 (sdl:char-height sdl:*default-font*)) (sdl:char-height sdl:*default-font*) 20)))
(let ((max-str (write-colored-text (colored-txt-list (message-box-strings message-box)) rect :count-only t)))
(when (< max-str (+ cur-str (truncate (sdl:height rect) (sdl:char-height sdl:*default-font*))))
(setf cur-str (- max-str (truncate (sdl:height rect) (sdl:char-height sdl:*default-font*)))))
(when (< cur-str 0)
(setf cur-str 0))
(write-colored-text (colored-txt-list (message-box-strings message-box)) rect :start-line cur-str)
)
)
(sdl:draw-string-solid-* (format nil "[Shift+Up/Down] Scroll page [Up/Down] Scroll text [Esc] Exit")
10 (- *window-height* 10 (sdl:char-height sdl:*default-font*)))
(sdl:update-display)))
(defmethod run-window ((win message-window))
(with-slots (cur-str) win
(sdl:with-events ()
(:quit-event () (funcall (quit-func win)) t)
(:key-down-event (:key key :mod mod :unicode unicode)
(declare (ignore unicode))
;; normalize mod
(loop while (>= mod sdl-key-mod-num) do
(decf mod sdl-key-mod-num))
(cond
((and (sdl:key= key :sdl-key-up) (= mod 0))
(decf cur-str))
((and (sdl:key= key :sdl-key-down) (= mod 0))
(incf cur-str))
((and (or (sdl:key= key :sdl-key-up) (sdl:key= key :sdl-key-kp8)) (/= (logand mod sdl-cffi::sdl-key-mod-shift) 0))
(decf cur-str 30))
((and (or (sdl:key= key :sdl-key-down) (sdl:key= key :sdl-key-kp2)) (/= (logand mod sdl-cffi::sdl-key-mod-shift) 0))
(incf cur-str 30))
;; escape - quit
((sdl:key= key :sdl-key-escape)
(setf *current-window* (return-to win))
(make-output *current-window*)
(return-from run-window nil))
)
(make-output *current-window*))
(:video-expose-event () (make-output *current-window*)))
))
| null | https://raw.githubusercontent.com/gwathlobal/CotD/d01ef486cc1d3b21d2ad670ebdb443e957290aa2/src/windows/window-messages.lisp | lisp | normalize mod
escape - quit
| (in-package :cotd)
(defclass message-window (window)
((cur-str :accessor cur-str)
(message-box :initarg :message-box :accessor message-window/message-box)
(header-str :initarg :header-str :accessor message-window/header-str)
))
(defmethod initialize-instance :after ((win message-window) &key)
(with-slots (cur-str message-box) win
(setf cur-str (message-list-length message-box))))
(defmethod make-output ((win message-window))
(with-slots (cur-str message-box header-str) win
(sdl:with-rectangle (a-rect (sdl:rectangle :x 0 :y 0 :w *window-width* :h *window-height*))
(sdl:fill-surface sdl:*black* :template a-rect))
(sdl:draw-string-solid-* header-str (truncate *window-width* 2) 0 :justify :center)
(sdl:with-rectangle (rect (sdl:rectangle :x 10 :y (+ 10 (sdl:char-height sdl:*default-font*)) :w (- *window-width* 10) :h (- *window-height* (+ 10 (sdl:char-height sdl:*default-font*)) (sdl:char-height sdl:*default-font*) 20)))
(let ((max-str (write-colored-text (colored-txt-list (message-box-strings message-box)) rect :count-only t)))
(when (< max-str (+ cur-str (truncate (sdl:height rect) (sdl:char-height sdl:*default-font*))))
(setf cur-str (- max-str (truncate (sdl:height rect) (sdl:char-height sdl:*default-font*)))))
(when (< cur-str 0)
(setf cur-str 0))
(write-colored-text (colored-txt-list (message-box-strings message-box)) rect :start-line cur-str)
)
)
(sdl:draw-string-solid-* (format nil "[Shift+Up/Down] Scroll page [Up/Down] Scroll text [Esc] Exit")
10 (- *window-height* 10 (sdl:char-height sdl:*default-font*)))
(sdl:update-display)))
(defmethod run-window ((win message-window))
(with-slots (cur-str) win
(sdl:with-events ()
(:quit-event () (funcall (quit-func win)) t)
(:key-down-event (:key key :mod mod :unicode unicode)
(declare (ignore unicode))
(loop while (>= mod sdl-key-mod-num) do
(decf mod sdl-key-mod-num))
(cond
((and (sdl:key= key :sdl-key-up) (= mod 0))
(decf cur-str))
((and (sdl:key= key :sdl-key-down) (= mod 0))
(incf cur-str))
((and (or (sdl:key= key :sdl-key-up) (sdl:key= key :sdl-key-kp8)) (/= (logand mod sdl-cffi::sdl-key-mod-shift) 0))
(decf cur-str 30))
((and (or (sdl:key= key :sdl-key-down) (sdl:key= key :sdl-key-kp2)) (/= (logand mod sdl-cffi::sdl-key-mod-shift) 0))
(incf cur-str 30))
((sdl:key= key :sdl-key-escape)
(setf *current-window* (return-to win))
(make-output *current-window*)
(return-from run-window nil))
)
(make-output *current-window*))
(:video-expose-event () (make-output *current-window*)))
))
|
aa3ae4596dc9eb51869b9946e015c71096f85fe7a25ff589b092a4f5753918bb | static-analysis-engineering/codehawk | bCHTrace.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 - 2021 ) 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020-2021 Henny Sipma
Copyright (c) 2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHLanguage
(* xprlib *)
open XprTypes
(* bchlib *)
open BCHLibTypes
(* bchlibx86 *)
open BCHLibx86Types
val get_callers: doubleword_int -> floc_int list
val get_callees: doubleword_int -> floc_int list
val get_app_callees: doubleword_int -> doubleword_int list
val record_fpcallback_arguments: assembly_function_int -> unit
val var_is_referenced: function_info_int -> xpr_t -> variable_t -> bool
val se_address_is_referenced:
function_info_int -> floc_int -> xpr_t -> variable_t -> bool
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/c1b3158e0d73cda7cfc10d75f6173f4297991a82/CodeHawk/CHB/bchanalyze/bCHTrace.mli | ocaml | xprlib
bchlib
bchlibx86 | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 - 2021 ) 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020-2021 Henny Sipma
Copyright (c) 2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHLanguage
open XprTypes
open BCHLibTypes
open BCHLibx86Types
val get_callers: doubleword_int -> floc_int list
val get_callees: doubleword_int -> floc_int list
val get_app_callees: doubleword_int -> doubleword_int list
val record_fpcallback_arguments: assembly_function_int -> unit
val var_is_referenced: function_info_int -> xpr_t -> variable_t -> bool
val se_address_is_referenced:
function_info_int -> floc_int -> xpr_t -> variable_t -> bool
|
6a95e738ef8336a4a241b10f77990b6f1d892ab1a55fb0926d2a81834deea953 | coding-robots/iwl | bayes.rkt | #lang racket
(require srfi/1
srfi/13
racket/serialize
racket/runtime-path)
(define *categories* (make-vector 0))
(define *totals* (make-hash))
(define *tokens* (make-hash))
(define *readabilities* (make-hash))
(define words-re (pregexp "(?>\\p{L}+-\\p{L}+|\\p{L}+)"))
(define sentences-re (pregexp "(?>[\\.\\?!]\\\"?(?:\\s|--)+?)"))
(define syllables-re (pregexp "(?i:[AEIOUÄÖÜ]+)"))
(define (get-words s)
(map bytes->string/utf-8
(regexp-match* words-re (string->bytes/utf-8 s))))
(define (get-sentences s)
(map bytes->string/utf-8
(regexp-split sentences-re (string->bytes/utf-8 s))))
(define (word-count s)
(length (get-words s)))
(define (substring-count sub s)
(length (regexp-match* (regexp-quote sub) s)))
(define (comma-count s)
(substring-count "," s))
(define (semicolon-count s)
(substring-count ":" s))
(define (has-substring sub s)
(regexp-match? (regexp-quote sub) s))
(define (has-quote s)
(has-substring "\"" s))
(define (has-dashes s)
(or (has-substring "- " s)
(has-substring "--" s)))
(define (word-count-token s)
(let ([wc (word-count s)])
(and (> wc 4)
(format "**S_WC=~a" wc))))
(define (comma-count-token s)
(let ([cc (comma-count s)])
(and (> cc 0)
(format "**S_CC=~a" cc))))
(define (semicolon-count-token s)
(let ([sc (semicolon-count s)])
(and (> sc 0)
(format "**S_SCC=~a" sc))))
(define (quote-token s)
(and (has-quote s)
"**S_HQ"))
(define (dashes-token s)
(and (has-dashes s)
"**S_HD"))
(define (get-special-tokens text)
(foldl (lambda (s lst)
(append lst
(remq* '(#f)
(list
(word-count-token s)
(comma-count-token s)
(semicolon-count-token s)
(quote-token s)
(dashes-token s)))))
null (get-sentences text)))
(define (safe-substring s start end)
(substring s start (min end (string-length s))))
(define (get-tokens text)
(append (map (lambda (x) (string-upcase (safe-substring x 0 26)))
(get-words text))
(get-special-tokens text)))
(define (syllables-count s)
(length (regexp-match* syllables-re s)))
(define (readability-score text)
; Flesch Reading Ease
(let* ([words (get-words text)]
[word-count (length words)])
(- 206.876
(* 1.015 (/ word-count (length (get-sentences text))))
(* 84.6 (/ (apply + (map syllables-count words)) word-count)))))
(define (hash-inc! hash key)
(hash-update! hash key add1 0))
(define-syntax-rule (vector-expand! vec val)
; Expands vector with value val and returns its index
(begin
(set! vec (vector-append vec (vector val)))
(sub1 (vector-length vec))))
(define (train! text cat)
(let ([idx (or (vector-member cat *categories*)
(vector-expand! *categories* cat))])
; Tokens
(for-each (lambda (w)
(hash-inc! *totals* idx)
(hash-inc! (hash-ref! *tokens* w (make-hasheqv)) idx))
(get-tokens text))
Readabilities
(let ([cur-rdb (readability-score text)])
(hash-update! *readabilities* idx
(lambda (x) (/ (+ cur-rdb x) 2)) cur-rdb))))
(define (hash-sum hash)
(apply + (hash-values hash)))
(define (list-top-bottom num lst)
return list with only num top and num bottom elements of sorted lst
(if (> (length lst) (* 2 num))
(let ([slst (sort lst <)])
(append (take slst num) (take-right slst num)))
lst))
(define (lim-frac x)
(max (min x 0.99) 0.01))
(define (fold-ratings probs)
(let* ([fr (/ 1 (length probs))]
[P (- 1 (expt (reduce * 1 (map (lambda (p) (- 1 p)) probs)) fr))]
[Q (- 1 (expt (reduce * 1 probs) fr))]
[S (/ (- P Q) (+ P Q))])
(/ (+ 1 S) 2)))
(define (readability-prob maxr cat current)
(lim-frac (/ (- maxr (abs (- current cat))) maxr)))
(define (get-ratings text)
(let ([ratings (make-hash)]
[all-totals (hash-sum *totals*)])
; Generate list of probabilities per category for each token in text
(for-each
(lambda (w)
(let* ([token-counts (hash-ref *tokens* w (make-hash))]
[all-count (hash-sum token-counts)])
(hash-for-each
*totals*
(lambda (cat cat-total)
(let* ([cnt (hash-ref token-counts cat 0)]
[this-prob (/ cnt cat-total)]
[other-prob (/ (- all-count cnt)
(- all-totals cat-total))]
[rating (if (> all-count 0)
(lim-frac (/ this-prob (+ this-prob other-prob)))
0.4)])
(hash-update! ratings cat (lambda (x) (cons rating x)) null))))))
(get-tokens text))
; Calculate single "rating" value from list of probabilities (including
; readabilities) for each category for which we generated probabilities
(let ([cur-readability (readability-score text)]
[max-readability (reduce max 0 (hash-values *readabilities*))])
(for/hash ([cat (hash-keys ratings)])
(values
cat
(fold-ratings (cons
(readability-prob max-readability
(hash-ref *readabilities* cat 0)
cur-readability)
(list-top-bottom 10 (hash-ref ratings cat)))))))))
(define (get-category text)
(with-handlers ([exn:fail:contract:divide-by-zero? (lambda (_) #f)])
(vector-ref *categories*
(car (argmax cdr (hash->list (get-ratings text)))))))
; Data saving and loading
(define-runtime-paths
(categories-file
totals-file
tokens-file
readabilities-file)
(values
"data/categories.dat"
"data/totals.dat"
"data/tokens.dat"
"data/readabilities.dat"))
; I tried fasl and just plain dumping of s-exprs, but while it's
; faster to dump and load, for some reason it uses a lot more memory
; after loading, plus it's less portable, so stick with serialize.
(define (dump-data)
(define (dump-var var file)
(write-to-file (serialize var) file #:exists 'replace))
(dump-var *categories* categories-file)
(dump-var *totals* totals-file)
(dump-var *tokens* tokens-file)
(dump-var *readabilities* readabilities-file))
(define (load-data!)
(define (load-var file)
(deserialize (file->value file)))
(set! *categories* (load-var categories-file))
(set! *totals* (load-var totals-file))
(set! *tokens* (load-var tokens-file))
(set! *readabilities* (load-var readabilities-file))
(collect-garbage)
collects better when used two times
; File trainer
(define (train-on-file! file category)
(train! (file->string file) category))
(define (train-path file)
(build-path (current-directory) "train-data" file))
(define (train-on-authors! lst)
(displayln "Training...")
(for-each (lambda (x)
(train-on-file! (train-path (cdr x)) (car x))
(displayln (car x)))
lst))
(provide (all-defined-out))
| null | https://raw.githubusercontent.com/coding-robots/iwl/bf13ab3f75aff0fe63c07555a41574e919bb11db/bayes.rkt | racket | Flesch Reading Ease
Expands vector with value val and returns its index
Tokens
Generate list of probabilities per category for each token in text
Calculate single "rating" value from list of probabilities (including
readabilities) for each category for which we generated probabilities
Data saving and loading
I tried fasl and just plain dumping of s-exprs, but while it's
faster to dump and load, for some reason it uses a lot more memory
after loading, plus it's less portable, so stick with serialize.
File trainer | #lang racket
(require srfi/1
srfi/13
racket/serialize
racket/runtime-path)
(define *categories* (make-vector 0))
(define *totals* (make-hash))
(define *tokens* (make-hash))
(define *readabilities* (make-hash))
(define words-re (pregexp "(?>\\p{L}+-\\p{L}+|\\p{L}+)"))
(define sentences-re (pregexp "(?>[\\.\\?!]\\\"?(?:\\s|--)+?)"))
(define syllables-re (pregexp "(?i:[AEIOUÄÖÜ]+)"))
(define (get-words s)
(map bytes->string/utf-8
(regexp-match* words-re (string->bytes/utf-8 s))))
(define (get-sentences s)
(map bytes->string/utf-8
(regexp-split sentences-re (string->bytes/utf-8 s))))
(define (word-count s)
(length (get-words s)))
(define (substring-count sub s)
(length (regexp-match* (regexp-quote sub) s)))
(define (comma-count s)
(substring-count "," s))
(define (semicolon-count s)
(substring-count ":" s))
(define (has-substring sub s)
(regexp-match? (regexp-quote sub) s))
(define (has-quote s)
(has-substring "\"" s))
(define (has-dashes s)
(or (has-substring "- " s)
(has-substring "--" s)))
(define (word-count-token s)
(let ([wc (word-count s)])
(and (> wc 4)
(format "**S_WC=~a" wc))))
(define (comma-count-token s)
(let ([cc (comma-count s)])
(and (> cc 0)
(format "**S_CC=~a" cc))))
(define (semicolon-count-token s)
(let ([sc (semicolon-count s)])
(and (> sc 0)
(format "**S_SCC=~a" sc))))
(define (quote-token s)
(and (has-quote s)
"**S_HQ"))
(define (dashes-token s)
(and (has-dashes s)
"**S_HD"))
(define (get-special-tokens text)
(foldl (lambda (s lst)
(append lst
(remq* '(#f)
(list
(word-count-token s)
(comma-count-token s)
(semicolon-count-token s)
(quote-token s)
(dashes-token s)))))
null (get-sentences text)))
(define (safe-substring s start end)
(substring s start (min end (string-length s))))
(define (get-tokens text)
(append (map (lambda (x) (string-upcase (safe-substring x 0 26)))
(get-words text))
(get-special-tokens text)))
(define (syllables-count s)
(length (regexp-match* syllables-re s)))
(define (readability-score text)
(let* ([words (get-words text)]
[word-count (length words)])
(- 206.876
(* 1.015 (/ word-count (length (get-sentences text))))
(* 84.6 (/ (apply + (map syllables-count words)) word-count)))))
(define (hash-inc! hash key)
(hash-update! hash key add1 0))
(define-syntax-rule (vector-expand! vec val)
(begin
(set! vec (vector-append vec (vector val)))
(sub1 (vector-length vec))))
(define (train! text cat)
(let ([idx (or (vector-member cat *categories*)
(vector-expand! *categories* cat))])
(for-each (lambda (w)
(hash-inc! *totals* idx)
(hash-inc! (hash-ref! *tokens* w (make-hasheqv)) idx))
(get-tokens text))
Readabilities
(let ([cur-rdb (readability-score text)])
(hash-update! *readabilities* idx
(lambda (x) (/ (+ cur-rdb x) 2)) cur-rdb))))
(define (hash-sum hash)
(apply + (hash-values hash)))
(define (list-top-bottom num lst)
return list with only num top and num bottom elements of sorted lst
(if (> (length lst) (* 2 num))
(let ([slst (sort lst <)])
(append (take slst num) (take-right slst num)))
lst))
(define (lim-frac x)
(max (min x 0.99) 0.01))
(define (fold-ratings probs)
(let* ([fr (/ 1 (length probs))]
[P (- 1 (expt (reduce * 1 (map (lambda (p) (- 1 p)) probs)) fr))]
[Q (- 1 (expt (reduce * 1 probs) fr))]
[S (/ (- P Q) (+ P Q))])
(/ (+ 1 S) 2)))
(define (readability-prob maxr cat current)
(lim-frac (/ (- maxr (abs (- current cat))) maxr)))
(define (get-ratings text)
(let ([ratings (make-hash)]
[all-totals (hash-sum *totals*)])
(for-each
(lambda (w)
(let* ([token-counts (hash-ref *tokens* w (make-hash))]
[all-count (hash-sum token-counts)])
(hash-for-each
*totals*
(lambda (cat cat-total)
(let* ([cnt (hash-ref token-counts cat 0)]
[this-prob (/ cnt cat-total)]
[other-prob (/ (- all-count cnt)
(- all-totals cat-total))]
[rating (if (> all-count 0)
(lim-frac (/ this-prob (+ this-prob other-prob)))
0.4)])
(hash-update! ratings cat (lambda (x) (cons rating x)) null))))))
(get-tokens text))
(let ([cur-readability (readability-score text)]
[max-readability (reduce max 0 (hash-values *readabilities*))])
(for/hash ([cat (hash-keys ratings)])
(values
cat
(fold-ratings (cons
(readability-prob max-readability
(hash-ref *readabilities* cat 0)
cur-readability)
(list-top-bottom 10 (hash-ref ratings cat)))))))))
(define (get-category text)
(with-handlers ([exn:fail:contract:divide-by-zero? (lambda (_) #f)])
(vector-ref *categories*
(car (argmax cdr (hash->list (get-ratings text)))))))
(define-runtime-paths
(categories-file
totals-file
tokens-file
readabilities-file)
(values
"data/categories.dat"
"data/totals.dat"
"data/tokens.dat"
"data/readabilities.dat"))
(define (dump-data)
(define (dump-var var file)
(write-to-file (serialize var) file #:exists 'replace))
(dump-var *categories* categories-file)
(dump-var *totals* totals-file)
(dump-var *tokens* tokens-file)
(dump-var *readabilities* readabilities-file))
(define (load-data!)
(define (load-var file)
(deserialize (file->value file)))
(set! *categories* (load-var categories-file))
(set! *totals* (load-var totals-file))
(set! *tokens* (load-var tokens-file))
(set! *readabilities* (load-var readabilities-file))
(collect-garbage)
collects better when used two times
(define (train-on-file! file category)
(train! (file->string file) category))
(define (train-path file)
(build-path (current-directory) "train-data" file))
(define (train-on-authors! lst)
(displayln "Training...")
(for-each (lambda (x)
(train-on-file! (train-path (cdr x)) (car x))
(displayln (car x)))
lst))
(provide (all-defined-out))
|
e1c47507eb87b137f1e5420d7b7afc07a88e2e6a7bfbd610239e9c3e7b6c93fc | chicken-mobile/chicken-sdl2-android-builder | event-array.scm | ;;
chicken - sdl2 : CHICKEN Scheme bindings to Simple DirectMedia Layer 2
;;
Copyright © 2013 , 2015 - 2016 .
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions
;; are met:
;;
;; - Redistributions of source code must retain the above copyright
;; notice, this list of conditions and the following disclaimer.
;;
;; - Redistributions in binary form must reproduce the above copyright
;; notice, this list of conditions and the following disclaimer in
;; the documentation and/or other materials provided with the
;; distribution.
;;
;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
;; FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT HOLDER OR FOR ANY DIRECT ,
INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
;; STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
;; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
;; OF THE POSSIBILITY OF SUCH DAMAGE.
(export %allocate-event-array
%read-event-array
%write-event-array
%event-array->list
%event-list->array)
(define-allocator %allocate-event-array "SDL_Event")
(define-array-reader %read-event-array
"SDL_Event*" SDL_Event*)
(define-array-writer %write-event-array
"SDL_Event*" SDL_Event*)
(define-array->list %event-array->list
%read-event-array (alloc-event))
(define-list->array %event-list->array
%allocate-event-array %write-event-array)
| null | https://raw.githubusercontent.com/chicken-mobile/chicken-sdl2-android-builder/90ef1f0ff667737736f1932e204d29ae615a00c4/eggs/sdl2/lib/sdl2-internals/arrays/event-array.scm | scheme |
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE. | chicken - sdl2 : CHICKEN Scheme bindings to Simple DirectMedia Layer 2
Copyright © 2013 , 2015 - 2016 .
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
COPYRIGHT HOLDER OR FOR ANY DIRECT ,
INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
(export %allocate-event-array
%read-event-array
%write-event-array
%event-array->list
%event-list->array)
(define-allocator %allocate-event-array "SDL_Event")
(define-array-reader %read-event-array
"SDL_Event*" SDL_Event*)
(define-array-writer %write-event-array
"SDL_Event*" SDL_Event*)
(define-array->list %event-array->list
%read-event-array (alloc-event))
(define-list->array %event-list->array
%allocate-event-array %write-event-array)
|
0ed7bcd539f379a34dc9cd1868b9a2f40cd90641fc1935ae6820b823190f922e | parapluu/Concuerror | main_and_backup.erl | -module(main_and_backup).
-export([main_and_backup/0]).
-export([scenarios/0]).
scenarios() -> [{?MODULE, inf, dpor}].
main_and_backup() ->
register(parent, self()),
spawn(fun spawner/0),
spawn(fun spawner/0),
receive
ok ->
receive
ok -> ok
end
end,
server_backup ! ok,
server ! ok.
spawner() ->
case whereis(server) of
undefined ->
Spid = spawn(fun server/0),
In race condition there can be only one that succeeds to register
try
register(server, Spid) % We are it
catch
_:_ -> % Server registered, register as backup
register(server_backup, Spid)
end;
_ -> % Server registered, start backup
Bpid = spawn(fun server/0),
register(server_backup, Bpid)
end,
parent ! ok.
server() ->
receive
ok -> ok
end.
| null | https://raw.githubusercontent.com/parapluu/Concuerror/152a5ccee0b6e97d8c3329c2167166435329d261/tests/suites/basic_tests/src/main_and_backup.erl | erlang | We are it
Server registered, register as backup
Server registered, start backup | -module(main_and_backup).
-export([main_and_backup/0]).
-export([scenarios/0]).
scenarios() -> [{?MODULE, inf, dpor}].
main_and_backup() ->
register(parent, self()),
spawn(fun spawner/0),
spawn(fun spawner/0),
receive
ok ->
receive
ok -> ok
end
end,
server_backup ! ok,
server ! ok.
spawner() ->
case whereis(server) of
undefined ->
Spid = spawn(fun server/0),
In race condition there can be only one that succeeds to register
try
catch
register(server_backup, Spid)
end;
Bpid = spawn(fun server/0),
register(server_backup, Bpid)
end,
parent ! ok.
server() ->
receive
ok -> ok
end.
|
8bb60c869bd88d0d15abadc3a4b84fd418278fe5220e1fbe7e58a9e62af6762a | conal/lambda-ccc | Run.hs | # LANGUAGE CPP #
# LANGUAGE TypeOperators , ExistentialQuantification , FlexibleContexts #
{-# LANGUAGE ConstraintKinds, StandaloneDeriving #-}
-- Okay
# LANGUAGE FlexibleInstances , UndecidableInstances #
# OPTIONS_GHC -Wall #
{ - # OPTIONS_GHC -fno - warn - unused - imports # - } -- TEMP
{ - # OPTIONS_GHC -fno - warn - unused - binds # - } -- TEMP
#define MealyAsFun
----------------------------------------------------------------------
-- |
-- Module : Run
Copyright : ( c ) 2014 Tabula , Inc.
--
-- Maintainer :
-- Stability : experimental
--
Run a test : reify , CCC , circuit
----------------------------------------------------------------------
module LambdaCCC.Run
( go,go',goSep,run,goM,goM',goMSep
, goNew, goNew'
) where
import Prelude
import LambdaCCC.Lambda (EP,reifyEP)
import LambdaCCC.ToCCC (toCCC)
import Circat.Category (Uncurriable)
import Circat.Circuit (Attr,mkGraph,UU,outDotG,unitize',(:>))
import Circat.Netlist (saveAsVerilog)
import Circat.Mealy (Mealy(..))
#if defined MealyAsFun
import Circat.Mealy (asFun)
#else
import Circat.Circuit (MealyC(..),unitizeMealyC)
import Control.Arrow (first)
#endif
ranksep :: Double -> Attr
ranksep n = ("ranksep",show n)
type Okay = Uncurriable (:>) ()
go' :: Okay a => String -> [Attr] -> a -> IO ()
#if defined MealyAsFun
go' = goNew' -- Tidy up later
#else
go' name attrs f = goM' name attrs (Mealy (first f) ())
#endif
{-# INLINE go' #-}
go :: Okay a => String -> a -> IO ()
go name = go' name []
# INLINE go #
goSep :: Okay a => String -> Double -> a -> IO ()
goSep name s = go' name [ranksep s]
Run an example : reify , CCC , circuit .
run :: Okay a => String -> [Attr] -> EP a -> IO ()
run name attrs e = do print e
outGV name attrs (unitize' (toCCC e))
# NOINLINE run #
goNew' :: Okay a => String -> [Attr] -> a -> IO ()
goNew' name attrs f = run name attrs (reifyEP f)
{-# INLINE goNew' #-}
goNew :: Okay a => String -> a -> IO ()
goNew name = goNew' name []
# INLINE goNew #
Diagram and Verilog
outGV :: String -> [Attr] -> UU -> IO ()
outGV name attrs circ =
do outD ("pdf","")
-- outD ("svg","")
-- outD ("png","-Gdpi=200")
outV
where
g = mkGraph name circ
outD ss = outDotG ss attrs g
outV = saveAsVerilog g
# NOINLINE outGV #
TODO : Move file - saving code from outD and saveVerilog to here .
{--------------------------------------------------------------------
State machines
--------------------------------------------------------------------}
goM :: Okay (a -> b) => String -> Mealy a b -> IO ()
goM name = goM' name []
# INLINE goM #
goMSep :: Okay (a -> b) => String -> Double -> Mealy a b -> IO ()
goMSep name s = goM' name [ranksep s]
# INLINE goMSep #
goM' :: Okay (a -> b) => String -> [Attr] -> Mealy a b -> IO ()
# INLINE goM ' #
#if defined MealyAsFun
goM' name attrs = go' name attrs . asFun
#else
goM' name attrs m = putStrLn ("Compiling " ++ name) >>
runM name attrs (reifyMealy m)
Reified Mealy machine
data MealyE a b =
forall s. (GenBuses s, Show s) => MealyE (EP ((a,s) -> (b,s))) s
-- The Show constraint is just for the following Show, which is handy for debugging.
-- (See the 'print' in toMealyC.)
deriving instance Show (MealyE a b)
reifyMealy :: Mealy a b -> MealyE a b
reifyMealy (Mealy f s) = MealyE (reifyEP f) s
# INLINE reifyMealy #
toMealyC :: MealyE a b -> MealyC a b
toMealyC (MealyE f s) = MealyC (toCCC' f) s
runM :: Okay a => String -> [Attr] -> MealyE a b -> IO ()
runM name attrs e = do print e
outGV name attrs (unitizeMealyC (toMealyC e))
TODO : When mealyAsArrow works , rewrite goM ' via go ' instead of vice versa
Despite INLINE pragmas , I still have to explicitly tell to unfold
-- definitions from this module:
--
-- try (any-td (unfold ['go,'go','goM,'goM','reifyMealy]))
TODO : Maybe pull unitizeMealyC into toMealyC , renaming to " toMealyU "
#endif
| null | https://raw.githubusercontent.com/conal/lambda-ccc/141a713456d447d27dbe440fa27a9372cd44dc7f/src/LambdaCCC/Run.hs | haskell | # LANGUAGE ConstraintKinds, StandaloneDeriving #
Okay
TEMP
TEMP
--------------------------------------------------------------------
|
Module : Run
Maintainer :
Stability : experimental
--------------------------------------------------------------------
Tidy up later
# INLINE go' #
# INLINE goNew' #
outD ("svg","")
outD ("png","-Gdpi=200")
-------------------------------------------------------------------
State machines
-------------------------------------------------------------------
The Show constraint is just for the following Show, which is handy for debugging.
(See the 'print' in toMealyC.)
definitions from this module:
try (any-td (unfold ['go,'go','goM,'goM','reifyMealy])) | # LANGUAGE CPP #
# LANGUAGE TypeOperators , ExistentialQuantification , FlexibleContexts #
# LANGUAGE FlexibleInstances , UndecidableInstances #
# OPTIONS_GHC -Wall #
#define MealyAsFun
Copyright : ( c ) 2014 Tabula , Inc.
Run a test : reify , CCC , circuit
module LambdaCCC.Run
( go,go',goSep,run,goM,goM',goMSep
, goNew, goNew'
) where
import Prelude
import LambdaCCC.Lambda (EP,reifyEP)
import LambdaCCC.ToCCC (toCCC)
import Circat.Category (Uncurriable)
import Circat.Circuit (Attr,mkGraph,UU,outDotG,unitize',(:>))
import Circat.Netlist (saveAsVerilog)
import Circat.Mealy (Mealy(..))
#if defined MealyAsFun
import Circat.Mealy (asFun)
#else
import Circat.Circuit (MealyC(..),unitizeMealyC)
import Control.Arrow (first)
#endif
ranksep :: Double -> Attr
ranksep n = ("ranksep",show n)
type Okay = Uncurriable (:>) ()
go' :: Okay a => String -> [Attr] -> a -> IO ()
#if defined MealyAsFun
#else
go' name attrs f = goM' name attrs (Mealy (first f) ())
#endif
go :: Okay a => String -> a -> IO ()
go name = go' name []
# INLINE go #
goSep :: Okay a => String -> Double -> a -> IO ()
goSep name s = go' name [ranksep s]
Run an example : reify , CCC , circuit .
run :: Okay a => String -> [Attr] -> EP a -> IO ()
run name attrs e = do print e
outGV name attrs (unitize' (toCCC e))
# NOINLINE run #
goNew' :: Okay a => String -> [Attr] -> a -> IO ()
goNew' name attrs f = run name attrs (reifyEP f)
goNew :: Okay a => String -> a -> IO ()
goNew name = goNew' name []
# INLINE goNew #
Diagram and Verilog
outGV :: String -> [Attr] -> UU -> IO ()
outGV name attrs circ =
do outD ("pdf","")
outV
where
g = mkGraph name circ
outD ss = outDotG ss attrs g
outV = saveAsVerilog g
# NOINLINE outGV #
TODO : Move file - saving code from outD and saveVerilog to here .
goM :: Okay (a -> b) => String -> Mealy a b -> IO ()
goM name = goM' name []
# INLINE goM #
goMSep :: Okay (a -> b) => String -> Double -> Mealy a b -> IO ()
goMSep name s = goM' name [ranksep s]
# INLINE goMSep #
goM' :: Okay (a -> b) => String -> [Attr] -> Mealy a b -> IO ()
# INLINE goM ' #
#if defined MealyAsFun
goM' name attrs = go' name attrs . asFun
#else
goM' name attrs m = putStrLn ("Compiling " ++ name) >>
runM name attrs (reifyMealy m)
Reified Mealy machine
data MealyE a b =
forall s. (GenBuses s, Show s) => MealyE (EP ((a,s) -> (b,s))) s
deriving instance Show (MealyE a b)
reifyMealy :: Mealy a b -> MealyE a b
reifyMealy (Mealy f s) = MealyE (reifyEP f) s
# INLINE reifyMealy #
toMealyC :: MealyE a b -> MealyC a b
toMealyC (MealyE f s) = MealyC (toCCC' f) s
runM :: Okay a => String -> [Attr] -> MealyE a b -> IO ()
runM name attrs e = do print e
outGV name attrs (unitizeMealyC (toMealyC e))
TODO : When mealyAsArrow works , rewrite goM ' via go ' instead of vice versa
Despite INLINE pragmas , I still have to explicitly tell to unfold
TODO : Maybe pull unitizeMealyC into toMealyC , renaming to " toMealyU "
#endif
|
0e456758cd50c50ae063eb9971d2dc8de61e9e654620f3a9a0628e7d8ce67ef9 | divipp/lensref | Elerea.hs | # LANGUAGE NoMonomorphismRestriction #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE LambdaCase #
# LANGUAGE ViewPatterns #
# LANGUAGE RecordWildCards #
# LANGUAGE RecursiveDo #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE StandaloneDeriving #
# LANGUAGE DeriveFunctor #
{-# LANGUAGE GADTs #-}
# LANGUAGE TypeFamilies #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TemplateHaskell #
module LensRef.Elerea
(
elerea
, IO'
, neutral
, postpone
, rev
-- * The signal abstraction
, Signal
, SignalGen
-- * Embedding into I/O
, start
, external
, externalMulti
-- * Basic building blocks
, delay
, snapshot
, generator
, memo
, until
-- * Derived combinators
, stateful
, transfer
, transfer2
, transfer3
, transfer4
-- * Signals with side effects
, execute
, effectful
, effectful1
, effectful2
, effectful3
, effectful4
, test, test2
) where
import Data.IORef
import Control.Applicative
import Control.Arrow
import Control.Monad.Reader
import Lens .
import Lens . Family2.State
import Lens . Family2.Unchecked
import Lens . Family2.Stock
--import Debug.Trace
import Prelude hiding (until)
import LensRef
Monad '
,
, -}MonadTrace (..), SimpleRefs(..), modSimpleRef
, Reversible(..), ReversibleT, runRev, postponed, reversible, neut
, Ref
, RefWriter, writeRef, creatorToWriter
, RefReader, readRef
, RefCreator, readerToCreator, newRef, delayPrev, generator', joinCreator -- , stabilize_
, runRefCreator
)
infixl 1 <&>
(<&>) = flip (<$>)
type Signal = RefReader IO_
type SignalGen = ReaderT (Ref IO_ ()) (RefCreator IO_)
type IO' = RefWriter IO_
newtype IO _ a = IO _ { runBase : : ReaderT BaseRun IO a }
newtype IO_ a = IO_ { runBase :: ReaderT BaseRun (ReversibleT IO) a }
deriving (Functor, Applicative, Monad, MonadFix)
newtype BaseRun = BaseRun { runBaseRun :: forall a . IO' a -> IO_ a }
instance MonadTrace IO_ where
traceM = IO_ . traceM
instance Reversible IO_ where
restore m = IO_ $ restore $ runBase m <&> (runBase +++ id)
neutral :: IO a -> IO' a
neutral = lift . IO_ . lift . neut
postpone :: IO () -> IO' ()
postpone = lift . IO_ . lift . postponed
rev :: IO (a, IO ()) -> IO' a
rev = lift . IO_ . lift . reversible
instance SimpleRefs IO_ where
type SimpleRef IO_ = IORef
newSimpleRef = IO_ . newSimpleRef
readSimpleRef = IO_ . readSimpleRef
writeSimpleRef r = IO_ . writeSimpleRef r
down_ :: IO' a -> IO_ a
down_ m = do
br <- IO_ ask
runBaseRun br m
elerea :: IO' a -> IO a
elerea m = mdo
let run :: forall a . IO_ a -> IO a
run = runRev . flip runReaderT br . runBase
br <- run $ runRefCreator $ \w -> return $ BaseRun w
run $ down_ m
-------------------------------------------------------------------------------- core
start :: SignalGen (Signal a) -> IO' (IO' a)
start s = creatorToWriter $ do
t <- newRef ()
x <- runReaderT s t
i <- newSimpleRef True
return $ do
init <- modSimpleRef i $ \x -> return (False, x)
when (not init) $ writeRef t ()
creatorToWriter $ readerToCreator x
external :: a -> IO' (Signal a, a -> IO' ())
external init = creatorToWriter $ do
r <- newRef init
return (readRef r, writeRef r)
externalMulti :: IO' (SignalGen (Signal [a]), a -> IO' ())
externalMulti = creatorToWriter $ do
r <- newSimpleRef []
let mk = ReaderT $ \t -> joinCreator $ readRef t <&> \() -> modSimpleRef r $ \x -> return ([], x)
return (mk, \a -> modSimpleRef r $ \as -> return (a:as, ()))
delay :: a -> Signal a -> SignalGen (Signal a)
delay x s = ReaderT $ \t -> delayPrev x (const id <$> readRef t) s
snapshot :: Signal a -> SignalGen a
snapshot = lift . readerToCreator
generator :: Signal (SignalGen a) -> SignalGen (Signal a)
generator s = ReaderT $ \t -> generator' $ readRef t <&> \() -> join $ flip runReaderT t <$> readerToCreator s
-- TODO: try to break this
execute :: IO' a -> SignalGen a
execute = lift . lift . down_
-------------------------------------------------------------------------------- for efficiency
-- TODO: optimize
until :: Signal Bool -> SignalGen (Signal Bool)
until s = do
step <- transfer False (||) s
dstep <- delay False step
memo (liftA2 (/=) step dstep)
-- TODO: optimize
memo :: Signal a -> SignalGen (Signal a)
lift r > > = \a - > readRef < $ > stabilize _ ( \ _ _ - > False ) a r
-------------------------------------------------------------------------------- derived
stateful :: a -> (a -> a) -> SignalGen (Signal a)
stateful x0 f = mfix $ \sig -> delay x0 (f <$> sig)
transfer :: a -> (t -> a -> a) -> Signal t -> SignalGen (Signal a)
transfer x0 f s = mfix $ \sig -> do
sig' <- delay x0 sig
memo $ f <$> s <*> sig' -- TODO: why memo?
transfer2 :: a -> (t1 -> t2 -> a -> a) -> Signal t1 -> Signal t2 -> SignalGen (Signal a)
transfer2 x0 f s1 s2 = transfer x0 ($) $ f <$> s1 <*> s2
transfer3 :: a -> (t1 -> t2 -> t3 -> a -> a) -> Signal t1 -> Signal t2 -> Signal t3 -> SignalGen (Signal a)
transfer3 x0 f s1 s2 s3 = transfer x0 ($) $ f <$> s1 <*> s2 <*> s3
transfer4 :: a -> (t1 -> t2 -> t3 -> t4 -> a -> a) -> Signal t1 -> Signal t2 -> Signal t3 -> Signal t4 -> SignalGen (Signal a)
transfer4 x0 f s1 s2 s3 s4 = transfer x0 ($) $ f <$> s1 <*> s2 <*> s3 <*> s4
effectful :: IO' a -> SignalGen (Signal a)
effectful = generator . return . execute
effectful1 :: (t -> IO' a) -> Signal t -> SignalGen (Signal a)
effectful1 m s = generator $ execute . m <$> s
effectful2 :: (t1 -> t2 -> IO' a) -> Signal t1 -> Signal t2 -> SignalGen (Signal a)
effectful2 m s1 s2 = generator $ execute <$> (m <$> s1 <*> s2)
effectful3 :: (t1 -> t2 -> t3 -> IO' a) -> Signal t1 -> Signal t2 -> Signal t3 -> SignalGen (Signal a)
effectful3 m s1 s2 s3 = generator $ execute <$> (m <$> s1 <*> s2 <*> s3)
effectful4 :: (t1 -> t2 -> t3 -> t4 -> IO' a) -> Signal t1 -> Signal t2 -> Signal t3 -> Signal t4 -> SignalGen (Signal a)
effectful4 m s1 s2 s3 s4 = generator $ execute <$> (m <$> s1 <*> s2 <*> s3 <*> s4)
--------------------------------------------------------------------------------
test = elerea $ do
smp <- start $ mdo
let fib'' = liftA2 (+) fib' fib
fib' <- delay 1 fib''
fib <- delay 1 fib'
return fib
res <- replicateM 8 smp
neutral $ print res
test2 = elerea $ do
smp <- start $ do
keys <- fmap head <$> flip stateful tail (map Just $ "d d d\n" ++ repeat ' ')
game (pure 0) renderMenu close keys
res <- replicateM 8 smp
sequence_ res
where
close = neutral $ print "close!"
renderMenu _score items i = do
neutral $ print ("item", items !! i)
game
:: Functor f =>
Signal a1
-> (a1 -> [[Char]] -> Int -> f ())
-> f a
-> Signal (Maybe Char)
-> SignalGen (Signal (f ()))
game highScore renderMenu closeAction keys = do
let firstTrue s = do
mask <- delay False =<< transfer False (||) s
return (liftA2 (&&) (not <$> mask) s)
mkGame 0 = error "evaluated"
mkGame _ = return (pure (void closeAction),pure True)
items = ["QUIT","ONE PLAYER GAME","TWO PLAYER GAME","QUIT"]
(output,_) <- switcher . flip fmap highScore $ \score -> do
pick <- displayMenu (length items) keys
let menu = (renderMenu score items <$> pick, pure False)
picked <- firstTrue ((== Just '\n') <$> keys)
gameSource <- generator (toMaybe <$> picked <*> (mkGame <$> pick))
fullOutput <- menu --> gameSource
return (fst =<< fullOutput,snd =<< fullOutput)
return output
displayMenu
:: Int -> Signal (Maybe Char) -> SignalGen (Signal Int)
displayMenu n keys = do
up <- edge ((==Just 'u') <$> keys)
down <- edge ((==Just 'd') <$> keys)
item <- transfer2 0 (\u d i -> (i + fromEnum d - fromEnum u) `mod` n) up down
return item
edge :: Signal Bool -> SignalGen (Signal Bool)
edge s = do
s' <- delay False s
return $ s' >>= \x -> if x then return False else s
infix 2 -->
> ) : : a - > Signal ( Maybe a ) - > ( Signal a )
> s = transfer
where store Nothing x = x
store (Just x) _ = x
collection
:: Signal [Signal b]
-> Signal (b -> Bool) -> SignalGen (Signal [b])
collection source isAlive = mdo
sig <- delay [] (map snd <$> collWithVals')
coll <- memo (liftA2 (++) source sig)
let collWithVals = zip <$> (sequence =<< coll) <*> coll
collWithVals' <- memo (filter <$> ((.fst) <$> isAlive) <*> collWithVals)
return $ map fst <$> collWithVals'
switcher
:: Signal (SignalGen (Signal b, Signal Bool))
-> SignalGen (Signal b, Signal Bool)
switcher gen = mdo
trig <- memo (snd =<< pw)
trig' <- delay True trig
ss <- generator (toMaybe <$> trig' <*> gen)
pw <- undefined --> ss
return (fst =<< pw,trig)
toMaybe :: Applicative f => Bool -> f a -> f (Maybe a)
toMaybe b s = if b then Just <$> s else pure Nothing
| null | https://raw.githubusercontent.com/divipp/lensref/2f0b9a36ac8853780e2b09ad0769464dd3837dab/src/LensRef/Elerea.hs | haskell | # LANGUAGE TypeSynonymInstances #
# LANGUAGE GADTs #
# LANGUAGE RankNTypes #
* The signal abstraction
* Embedding into I/O
* Basic building blocks
* Derived combinators
* Signals with side effects
import Debug.Trace
, stabilize_
------------------------------------------------------------------------------ core
TODO: try to break this
------------------------------------------------------------------------------ for efficiency
TODO: optimize
TODO: optimize
------------------------------------------------------------------------------ derived
TODO: why memo?
------------------------------------------------------------------------------
> gameSource
>
> ss | # LANGUAGE NoMonomorphismRestriction #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE LambdaCase #
# LANGUAGE ViewPatterns #
# LANGUAGE RecordWildCards #
# LANGUAGE RecursiveDo #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE StandaloneDeriving #
# LANGUAGE DeriveFunctor #
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TemplateHaskell #
module LensRef.Elerea
(
elerea
, IO'
, neutral
, postpone
, rev
, Signal
, SignalGen
, start
, external
, externalMulti
, delay
, snapshot
, generator
, memo
, until
, stateful
, transfer
, transfer2
, transfer3
, transfer4
, execute
, effectful
, effectful1
, effectful2
, effectful3
, effectful4
, test, test2
) where
import Data.IORef
import Control.Applicative
import Control.Arrow
import Control.Monad.Reader
import Lens .
import Lens . Family2.State
import Lens . Family2.Unchecked
import Lens . Family2.Stock
import Prelude hiding (until)
import LensRef
Monad '
,
, -}MonadTrace (..), SimpleRefs(..), modSimpleRef
, Reversible(..), ReversibleT, runRev, postponed, reversible, neut
, Ref
, RefWriter, writeRef, creatorToWriter
, RefReader, readRef
, runRefCreator
)
infixl 1 <&>
(<&>) = flip (<$>)
type Signal = RefReader IO_
type SignalGen = ReaderT (Ref IO_ ()) (RefCreator IO_)
type IO' = RefWriter IO_
newtype IO _ a = IO _ { runBase : : ReaderT BaseRun IO a }
newtype IO_ a = IO_ { runBase :: ReaderT BaseRun (ReversibleT IO) a }
deriving (Functor, Applicative, Monad, MonadFix)
newtype BaseRun = BaseRun { runBaseRun :: forall a . IO' a -> IO_ a }
instance MonadTrace IO_ where
traceM = IO_ . traceM
instance Reversible IO_ where
restore m = IO_ $ restore $ runBase m <&> (runBase +++ id)
neutral :: IO a -> IO' a
neutral = lift . IO_ . lift . neut
postpone :: IO () -> IO' ()
postpone = lift . IO_ . lift . postponed
rev :: IO (a, IO ()) -> IO' a
rev = lift . IO_ . lift . reversible
instance SimpleRefs IO_ where
type SimpleRef IO_ = IORef
newSimpleRef = IO_ . newSimpleRef
readSimpleRef = IO_ . readSimpleRef
writeSimpleRef r = IO_ . writeSimpleRef r
down_ :: IO' a -> IO_ a
down_ m = do
br <- IO_ ask
runBaseRun br m
elerea :: IO' a -> IO a
elerea m = mdo
let run :: forall a . IO_ a -> IO a
run = runRev . flip runReaderT br . runBase
br <- run $ runRefCreator $ \w -> return $ BaseRun w
run $ down_ m
start :: SignalGen (Signal a) -> IO' (IO' a)
start s = creatorToWriter $ do
t <- newRef ()
x <- runReaderT s t
i <- newSimpleRef True
return $ do
init <- modSimpleRef i $ \x -> return (False, x)
when (not init) $ writeRef t ()
creatorToWriter $ readerToCreator x
external :: a -> IO' (Signal a, a -> IO' ())
external init = creatorToWriter $ do
r <- newRef init
return (readRef r, writeRef r)
externalMulti :: IO' (SignalGen (Signal [a]), a -> IO' ())
externalMulti = creatorToWriter $ do
r <- newSimpleRef []
let mk = ReaderT $ \t -> joinCreator $ readRef t <&> \() -> modSimpleRef r $ \x -> return ([], x)
return (mk, \a -> modSimpleRef r $ \as -> return (a:as, ()))
delay :: a -> Signal a -> SignalGen (Signal a)
delay x s = ReaderT $ \t -> delayPrev x (const id <$> readRef t) s
snapshot :: Signal a -> SignalGen a
snapshot = lift . readerToCreator
generator :: Signal (SignalGen a) -> SignalGen (Signal a)
generator s = ReaderT $ \t -> generator' $ readRef t <&> \() -> join $ flip runReaderT t <$> readerToCreator s
execute :: IO' a -> SignalGen a
execute = lift . lift . down_
until :: Signal Bool -> SignalGen (Signal Bool)
until s = do
step <- transfer False (||) s
dstep <- delay False step
memo (liftA2 (/=) step dstep)
memo :: Signal a -> SignalGen (Signal a)
lift r > > = \a - > readRef < $ > stabilize _ ( \ _ _ - > False ) a r
stateful :: a -> (a -> a) -> SignalGen (Signal a)
stateful x0 f = mfix $ \sig -> delay x0 (f <$> sig)
transfer :: a -> (t -> a -> a) -> Signal t -> SignalGen (Signal a)
transfer x0 f s = mfix $ \sig -> do
sig' <- delay x0 sig
transfer2 :: a -> (t1 -> t2 -> a -> a) -> Signal t1 -> Signal t2 -> SignalGen (Signal a)
transfer2 x0 f s1 s2 = transfer x0 ($) $ f <$> s1 <*> s2
transfer3 :: a -> (t1 -> t2 -> t3 -> a -> a) -> Signal t1 -> Signal t2 -> Signal t3 -> SignalGen (Signal a)
transfer3 x0 f s1 s2 s3 = transfer x0 ($) $ f <$> s1 <*> s2 <*> s3
transfer4 :: a -> (t1 -> t2 -> t3 -> t4 -> a -> a) -> Signal t1 -> Signal t2 -> Signal t3 -> Signal t4 -> SignalGen (Signal a)
transfer4 x0 f s1 s2 s3 s4 = transfer x0 ($) $ f <$> s1 <*> s2 <*> s3 <*> s4
effectful :: IO' a -> SignalGen (Signal a)
effectful = generator . return . execute
effectful1 :: (t -> IO' a) -> Signal t -> SignalGen (Signal a)
effectful1 m s = generator $ execute . m <$> s
effectful2 :: (t1 -> t2 -> IO' a) -> Signal t1 -> Signal t2 -> SignalGen (Signal a)
effectful2 m s1 s2 = generator $ execute <$> (m <$> s1 <*> s2)
effectful3 :: (t1 -> t2 -> t3 -> IO' a) -> Signal t1 -> Signal t2 -> Signal t3 -> SignalGen (Signal a)
effectful3 m s1 s2 s3 = generator $ execute <$> (m <$> s1 <*> s2 <*> s3)
effectful4 :: (t1 -> t2 -> t3 -> t4 -> IO' a) -> Signal t1 -> Signal t2 -> Signal t3 -> Signal t4 -> SignalGen (Signal a)
effectful4 m s1 s2 s3 s4 = generator $ execute <$> (m <$> s1 <*> s2 <*> s3 <*> s4)
test = elerea $ do
smp <- start $ mdo
let fib'' = liftA2 (+) fib' fib
fib' <- delay 1 fib''
fib <- delay 1 fib'
return fib
res <- replicateM 8 smp
neutral $ print res
test2 = elerea $ do
smp <- start $ do
keys <- fmap head <$> flip stateful tail (map Just $ "d d d\n" ++ repeat ' ')
game (pure 0) renderMenu close keys
res <- replicateM 8 smp
sequence_ res
where
close = neutral $ print "close!"
renderMenu _score items i = do
neutral $ print ("item", items !! i)
game
:: Functor f =>
Signal a1
-> (a1 -> [[Char]] -> Int -> f ())
-> f a
-> Signal (Maybe Char)
-> SignalGen (Signal (f ()))
game highScore renderMenu closeAction keys = do
let firstTrue s = do
mask <- delay False =<< transfer False (||) s
return (liftA2 (&&) (not <$> mask) s)
mkGame 0 = error "evaluated"
mkGame _ = return (pure (void closeAction),pure True)
items = ["QUIT","ONE PLAYER GAME","TWO PLAYER GAME","QUIT"]
(output,_) <- switcher . flip fmap highScore $ \score -> do
pick <- displayMenu (length items) keys
let menu = (renderMenu score items <$> pick, pure False)
picked <- firstTrue ((== Just '\n') <$> keys)
gameSource <- generator (toMaybe <$> picked <*> (mkGame <$> pick))
return (fst =<< fullOutput,snd =<< fullOutput)
return output
displayMenu
:: Int -> Signal (Maybe Char) -> SignalGen (Signal Int)
displayMenu n keys = do
up <- edge ((==Just 'u') <$> keys)
down <- edge ((==Just 'd') <$> keys)
item <- transfer2 0 (\u d i -> (i + fromEnum d - fromEnum u) `mod` n) up down
return item
edge :: Signal Bool -> SignalGen (Signal Bool)
edge s = do
s' <- delay False s
return $ s' >>= \x -> if x then return False else s
> ) : : a - > Signal ( Maybe a ) - > ( Signal a )
> s = transfer
where store Nothing x = x
store (Just x) _ = x
collection
:: Signal [Signal b]
-> Signal (b -> Bool) -> SignalGen (Signal [b])
collection source isAlive = mdo
sig <- delay [] (map snd <$> collWithVals')
coll <- memo (liftA2 (++) source sig)
let collWithVals = zip <$> (sequence =<< coll) <*> coll
collWithVals' <- memo (filter <$> ((.fst) <$> isAlive) <*> collWithVals)
return $ map fst <$> collWithVals'
switcher
:: Signal (SignalGen (Signal b, Signal Bool))
-> SignalGen (Signal b, Signal Bool)
switcher gen = mdo
trig <- memo (snd =<< pw)
trig' <- delay True trig
ss <- generator (toMaybe <$> trig' <*> gen)
return (fst =<< pw,trig)
toMaybe :: Applicative f => Bool -> f a -> f (Maybe a)
toMaybe b s = if b then Just <$> s else pure Nothing
|
e1fadf0ea3d81d4fed41d99b12c946214f287525294ca84c8e7827e6a661fbef | Naproche-SAD/Naproche-SAD | Definition.hs | module SAD.Data.Definition where
import SAD.Data.Formula (Formula)
import qualified SAD.Data.Structures.DisTree as DT
import Data.IntMap (IntMap)
import Data.Maybe
data DefType = Signature | Definition deriving (Eq, Show)
data DefEntry = DE {
guards :: [Formula], -- guards of the definitions
formula :: Formula, -- defining formula
kind :: DefType, -- proper definition or only sig extension
term :: Formula, -- defined term
evidence :: [Formula], -- evidence from the defining formula
typeLikes :: [[Formula]] -- type-likes of the definition
} deriving Show
{- yields information as to what can be unfolded -}
isDefinition :: DefEntry -> Bool
isDefinition = (==) Definition . kind
{- storage of definitions by term id -}
type Definitions = IntMap DefEntry
--- guards
type Guards = DT.DisTree Bool
isGuard :: Formula -> Guards -> Bool
isGuard f = head . fromMaybe [False] . DT.lookup f
| null | https://raw.githubusercontent.com/Naproche-SAD/Naproche-SAD/da131a6eaf65d4e02e82082a50a4febb6d42db3d/src/SAD/Data/Definition.hs | haskell | guards of the definitions
defining formula
proper definition or only sig extension
defined term
evidence from the defining formula
type-likes of the definition
yields information as to what can be unfolded
storage of definitions by term id
- guards | module SAD.Data.Definition where
import SAD.Data.Formula (Formula)
import qualified SAD.Data.Structures.DisTree as DT
import Data.IntMap (IntMap)
import Data.Maybe
data DefType = Signature | Definition deriving (Eq, Show)
data DefEntry = DE {
} deriving Show
isDefinition :: DefEntry -> Bool
isDefinition = (==) Definition . kind
type Definitions = IntMap DefEntry
type Guards = DT.DisTree Bool
isGuard :: Formula -> Guards -> Bool
isGuard f = head . fromMaybe [False] . DT.lookup f
|
9f0c987cd62df3ddad197545bdec7dc8580e94a6e392661d4da3e27d4e01a9ec | arttuka/reagent-material-ui | view_compact_outlined.cljs | (ns reagent-mui.icons.view-compact-outlined
"Imports @mui/icons-material/ViewCompactOutlined as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def view-compact-outlined (create-svg-icon (e "path" #js {"d" "M2 4v16h20V4H2zm4.5 14H4v-2.5h2.5V18zm0-4.75H4v-2.5h2.5v2.5zm0-4.75H4V6h2.5v2.5zM11 18H8.5v-2.5H11V18zm0-4.75H8.5v-2.5H11v2.5zm0-4.75H8.5V6H11v2.5zm4.5 9.5H13v-2.5h2.5V18zm0-4.75H13v-2.5h2.5v2.5zm0-4.75H13V6h2.5v2.5zM20 18h-2.5v-2.5H20V18zm0-4.75h-2.5v-2.5H20v2.5zm0-4.75h-2.5V6H20v2.5z"})
"ViewCompactOutlined"))
| null | https://raw.githubusercontent.com/arttuka/reagent-material-ui/14103a696c41c0eb67fc07fc67cd8799efd88cb9/src/icons/reagent_mui/icons/view_compact_outlined.cljs | clojure | (ns reagent-mui.icons.view-compact-outlined
"Imports @mui/icons-material/ViewCompactOutlined as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def view-compact-outlined (create-svg-icon (e "path" #js {"d" "M2 4v16h20V4H2zm4.5 14H4v-2.5h2.5V18zm0-4.75H4v-2.5h2.5v2.5zm0-4.75H4V6h2.5v2.5zM11 18H8.5v-2.5H11V18zm0-4.75H8.5v-2.5H11v2.5zm0-4.75H8.5V6H11v2.5zm4.5 9.5H13v-2.5h2.5V18zm0-4.75H13v-2.5h2.5v2.5zm0-4.75H13V6h2.5v2.5zM20 18h-2.5v-2.5H20V18zm0-4.75h-2.5v-2.5H20v2.5zm0-4.75h-2.5V6H20v2.5z"})
"ViewCompactOutlined"))
| |
fa4c15ea0042b4f7e625ec516863bb563ec9c024343582a7f2aec598658fc784 | bamos/snowglobe | Main.hs | This is the main entry point for SnowGlobe analytics .
-- The command-line options are parsed and functionality is
-- delegated to other portions of the code.
--
< >
-- 2015.05.08
import Data.Char(ord)
import Data.Csv (HasHeader(NoHeader), decodeWith, decDelimiter,
defaultDecodeOptions)
import Data.List(isInfixOf)
import Data.Time(defaultTimeLocale, getCurrentTime, getCurrentTimeZone,
utcToLocalTime, parseTimeOrError, LocalTime)
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vector as V
import Options.Applicative
import SnowGlobe.EnrichedEvent
import SnowGlobe.Queries(daySummary, dayReport, weekReport)
data Args = Args
{ events :: String
, day :: String
, mode :: Mode
} deriving Show
-- Argument syntax and help functions.
args :: Parser Args
args = Args
<$> strOption (long "events" <> metavar "FILE" <>
help "Location of events.tsv" )
<*> strOption (long "day" <> metavar "YYYY-MM-DD" <>
help "Day (or end of week) to obtain analytics for." <>
value "today")
<*> modeParser
data Mode
= DaySummary
| DayReport
| WeekReport
deriving Show
modeParser :: Parser Mode
modeParser = subparser
( command "DaySummary" dsInfo
<> command "DayReport" drInfo
<> command "WeekReport" wrInfo
)
where
mInfo :: Mode -> String -> ParserInfo Mode
mInfo m d = info (pure m) (progDesc d)
dsInfo = mInfo DaySummary "Print a concise summary for the current day."
drInfo = mInfo DayReport "Print a report for the current day."
wrInfo = mInfo WeekReport "Print a report for the past week."
Parse the raw TSV events file into a vector of Snowplow EnrichedEvents .
parseEvents:: BL.ByteString -> Either String (V.Vector EnrichedEvent)
parseEvents = decodeWith opts NoHeader
where opts = defaultDecodeOptions {decDelimiter = fromIntegral $ ord '\t'}
-- Load data from files and delegate functionality.
run:: Args -> IO()
run args = do
rawEvents <- BL.readFile $ events args
tz <- getCurrentTimeZone
now <- utcToLocalTime tz <$> getCurrentTime
let parsedTime = parseTimeOrError True defaultTimeLocale
"%Y-%m-%d" (day args) :: LocalTime
case parseEvents rawEvents of
Left err -> putStrLn err
Right eventsV ->
case mode args of
DaySummary -> putStrLn $ daySummary tz queryDay events
DayReport -> putStrLn $ dayReport tz queryDay events
WeekReport -> putStrLn $ weekReport tz queryDay events
where events = filter isMine $ V.toList eventsV
isMine e = any (\domain -> isInfixOf domain $ pageUrl e) whitelist
whitelist = [ "bamos.github.io"
, "derecho.elijah"
, "cmusatyalab.github.io/openface"
]
queryDay = if day args == "today" then now else parsedTime
main :: IO ()
main = execParser opts >>= run
where opts = info (helper <*> args)
(fullDesc <> header "SnowGlobe Analytics")
| null | https://raw.githubusercontent.com/bamos/snowglobe/b5211bbc2029b52339175af413aea52cf9ab2eb9/analysis/Main.hs | haskell | The command-line options are parsed and functionality is
delegated to other portions of the code.
2015.05.08
Argument syntax and help functions.
Load data from files and delegate functionality. | This is the main entry point for SnowGlobe analytics .
< >
import Data.Char(ord)
import Data.Csv (HasHeader(NoHeader), decodeWith, decDelimiter,
defaultDecodeOptions)
import Data.List(isInfixOf)
import Data.Time(defaultTimeLocale, getCurrentTime, getCurrentTimeZone,
utcToLocalTime, parseTimeOrError, LocalTime)
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vector as V
import Options.Applicative
import SnowGlobe.EnrichedEvent
import SnowGlobe.Queries(daySummary, dayReport, weekReport)
data Args = Args
{ events :: String
, day :: String
, mode :: Mode
} deriving Show
args :: Parser Args
args = Args
<$> strOption (long "events" <> metavar "FILE" <>
help "Location of events.tsv" )
<*> strOption (long "day" <> metavar "YYYY-MM-DD" <>
help "Day (or end of week) to obtain analytics for." <>
value "today")
<*> modeParser
data Mode
= DaySummary
| DayReport
| WeekReport
deriving Show
modeParser :: Parser Mode
modeParser = subparser
( command "DaySummary" dsInfo
<> command "DayReport" drInfo
<> command "WeekReport" wrInfo
)
where
mInfo :: Mode -> String -> ParserInfo Mode
mInfo m d = info (pure m) (progDesc d)
dsInfo = mInfo DaySummary "Print a concise summary for the current day."
drInfo = mInfo DayReport "Print a report for the current day."
wrInfo = mInfo WeekReport "Print a report for the past week."
Parse the raw TSV events file into a vector of Snowplow EnrichedEvents .
parseEvents:: BL.ByteString -> Either String (V.Vector EnrichedEvent)
parseEvents = decodeWith opts NoHeader
where opts = defaultDecodeOptions {decDelimiter = fromIntegral $ ord '\t'}
run:: Args -> IO()
run args = do
rawEvents <- BL.readFile $ events args
tz <- getCurrentTimeZone
now <- utcToLocalTime tz <$> getCurrentTime
let parsedTime = parseTimeOrError True defaultTimeLocale
"%Y-%m-%d" (day args) :: LocalTime
case parseEvents rawEvents of
Left err -> putStrLn err
Right eventsV ->
case mode args of
DaySummary -> putStrLn $ daySummary tz queryDay events
DayReport -> putStrLn $ dayReport tz queryDay events
WeekReport -> putStrLn $ weekReport tz queryDay events
where events = filter isMine $ V.toList eventsV
isMine e = any (\domain -> isInfixOf domain $ pageUrl e) whitelist
whitelist = [ "bamos.github.io"
, "derecho.elijah"
, "cmusatyalab.github.io/openface"
]
queryDay = if day args == "today" then now else parsedTime
main :: IO ()
main = execParser opts >>= run
where opts = info (helper <*> args)
(fullDesc <> header "SnowGlobe Analytics")
|
8f3370f40ace340755cf2949d1befc72e87f13e023f856000f93be578f37c168 | kelsey-sorrels/robinson | actor_protocol.clj | (ns robinson.actor-protocol)
(defprotocol Actor
(receive [this state]))
| null | https://raw.githubusercontent.com/kelsey-sorrels/robinson/337fd2646882708331257d1f3db78a3074ccc67a/src/robinson/actor_protocol.clj | clojure | (ns robinson.actor-protocol)
(defprotocol Actor
(receive [this state]))
| |
43fd9103d10b65c841d24268772ebe2d25c8dc0a5e0fed7a35ed60da3d33bafd | VisionsGlobalEmpowerment/webchange | state.cljs | (ns webchange.admin.pages.student-add.state
(:require
[re-frame.core :as re-frame]
[re-frame.std-interceptors :as i]
[webchange.admin.routes :as routes]))
(def path-to-db :page/student-add)
(re-frame/reg-sub
path-to-db
(fn [db]
(get db path-to-db)))
(re-frame/reg-event-fx
::init
[(i/path path-to-db)]
(fn [{:keys [db]} [_ {:keys [school-id]}]]
{:db (-> db (assoc :school-id school-id))}))
(re-frame/reg-event-fx
::open-students-list
[(i/path path-to-db)]
(fn [{:keys [db]} [_]]
(let [school-id (:school-id db)]
{:dispatch [::routes/redirect :students :school-id school-id]})))
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/a6846777664ff58d613235444893e0ac09fa563b/src/cljs/webchange/admin/pages/student_add/state.cljs | clojure | (ns webchange.admin.pages.student-add.state
(:require
[re-frame.core :as re-frame]
[re-frame.std-interceptors :as i]
[webchange.admin.routes :as routes]))
(def path-to-db :page/student-add)
(re-frame/reg-sub
path-to-db
(fn [db]
(get db path-to-db)))
(re-frame/reg-event-fx
::init
[(i/path path-to-db)]
(fn [{:keys [db]} [_ {:keys [school-id]}]]
{:db (-> db (assoc :school-id school-id))}))
(re-frame/reg-event-fx
::open-students-list
[(i/path path-to-db)]
(fn [{:keys [db]} [_]]
(let [school-id (:school-id db)]
{:dispatch [::routes/redirect :students :school-id school-id]})))
| |
4abadad601e657cafe6351b0bfa857a44cc4360e066fd1410ac8600663548cfb | stchang/macrotypes | more-utils.rkt | #lang turnstile
turnstile library of extra stx helpers
(provide (for-syntax x+τ stx-parse/fold transfer-type)
define-nested/R define-nested/L)
(begin-for-syntax
(define (transfer-type from to)
(if (typeof from)
(syntax-property to ': (typeof from))
to))
;; syntax class matching [x : τ], ie a parameter with type annotation
;; TODO: generalize to arbitrary tags (not always :)?
(define-syntax-class x+τ
(pattern [(~var x id) (~datum :) τ]))
returns a flattened list of stx objs , outermost first
;; usage: (stx-parse/fold stx pattern)
- where pattern has shape ( pexpander element remainder )
(define-syntax stx-parse/fold ; foldl
(syntax-parser
[(_ e (pexpander x rst))
#:with L (generate-temporary 'L)
#:with e-rst (generate-temporary #'e)
#:with acc (generate-temporary 'acc)
#`(let L ([e-rst e][acc null])
(syntax-parse e-rst
[(pexpander x rst) (L #'rst (cons #'x acc))]
[last (reverse (cons #'last acc))]))])))
;; R = like foldr, eg λ
;; L = like foldl, eg app
;; usage: (define-nested name name/1)
;; name = name of the curried form, eg λ/c
;; name/1 = name of the unit form, eg λ/1
;; TODO: specify more specific path? eg, can do (λ (x) x) with grouped binders
(define-syntax define-nested/R
(syntax-parser
[(_ name:id name/1) #'(define-nested/R name name/1 #:as (λ (x) x))]
[(_ name:id name/1 #:as wrap-fn)
#'(define-syntax name
(wrap-fn ; eg pattern-expander
(syntax-parser
[(_ e) #'e]
[(_ x . rst)
(quasisyntax/loc this-syntax
(name/1 x #,(syntax/loc this-syntax (name . rst))))])))]))
(define-syntax define-nested/L
(syntax-parser
[(_ name:id name/1) #'(define-nested/L name name/1 #:as (λ (x) x))]
[(_ name:id name/1 #:as wrap-fn)
#'(define-syntax name
(wrap-fn
(syntax-parser
[(_ e) (transfer-type this-syntax #'e)]
[(_ f e . rst)
(quasisyntax/loc this-syntax
(name
#,(syntax/loc this-syntax (name/1 f e)) . rst))])))]))
| null | https://raw.githubusercontent.com/stchang/macrotypes/05ec31f2e1fe0ddd653211e041e06c6c8071ffa6/turnstile-lib/turnstile/more-utils.rkt | racket | syntax class matching [x : τ], ie a parameter with type annotation
TODO: generalize to arbitrary tags (not always :)?
usage: (stx-parse/fold stx pattern)
foldl
R = like foldr, eg λ
L = like foldl, eg app
usage: (define-nested name name/1)
name = name of the curried form, eg λ/c
name/1 = name of the unit form, eg λ/1
TODO: specify more specific path? eg, can do (λ (x) x) with grouped binders
eg pattern-expander | #lang turnstile
turnstile library of extra stx helpers
(provide (for-syntax x+τ stx-parse/fold transfer-type)
define-nested/R define-nested/L)
(begin-for-syntax
(define (transfer-type from to)
(if (typeof from)
(syntax-property to ': (typeof from))
to))
(define-syntax-class x+τ
(pattern [(~var x id) (~datum :) τ]))
returns a flattened list of stx objs , outermost first
- where pattern has shape ( pexpander element remainder )
(syntax-parser
[(_ e (pexpander x rst))
#:with L (generate-temporary 'L)
#:with e-rst (generate-temporary #'e)
#:with acc (generate-temporary 'acc)
#`(let L ([e-rst e][acc null])
(syntax-parse e-rst
[(pexpander x rst) (L #'rst (cons #'x acc))]
[last (reverse (cons #'last acc))]))])))
(define-syntax define-nested/R
(syntax-parser
[(_ name:id name/1) #'(define-nested/R name name/1 #:as (λ (x) x))]
[(_ name:id name/1 #:as wrap-fn)
#'(define-syntax name
(syntax-parser
[(_ e) #'e]
[(_ x . rst)
(quasisyntax/loc this-syntax
(name/1 x #,(syntax/loc this-syntax (name . rst))))])))]))
(define-syntax define-nested/L
(syntax-parser
[(_ name:id name/1) #'(define-nested/L name name/1 #:as (λ (x) x))]
[(_ name:id name/1 #:as wrap-fn)
#'(define-syntax name
(wrap-fn
(syntax-parser
[(_ e) (transfer-type this-syntax #'e)]
[(_ f e . rst)
(quasisyntax/loc this-syntax
(name
#,(syntax/loc this-syntax (name/1 f e)) . rst))])))]))
|
2452044599c53e9e2c55997a7f9bd0b3d1e98587293066991aba643d119a4adb | dradtke/Lisp-Text-Editor | icon.lisp | (in-package :gtk-cffi)
(defcenum icon-size
:invalid
:menu
:small-toolbar
:large-toolbar
:button
:dnd
:dialog)
(defcenum state
:normal :active :prelight :selected :insensitive :inconsistent :focused)
(defclass icon-source (object) ())
(defcfun "gtk_icon_source_new" :pointer)
(defmethod gconstructor ((icon-source icon-source) &rest rest)
(declare (ignore icon-source rest))
(gtk-icon-source-new))
(defgtkslots icon-source
direction text-direction
direction-wildcarded :boolean
filename gtk-string
pixbuf pobject
icon-name gtk-string
size icon-size
size-wildcarded :boolean
state state
state-wildcarded :boolean) | null | https://raw.githubusercontent.com/dradtke/Lisp-Text-Editor/b0947828eda82d7edd0df8ec2595e7491a633580/quicklisp/dists/quicklisp/software/gtk-cffi-20120208-cvs/gtk/icon.lisp | lisp | (in-package :gtk-cffi)
(defcenum icon-size
:invalid
:menu
:small-toolbar
:large-toolbar
:button
:dnd
:dialog)
(defcenum state
:normal :active :prelight :selected :insensitive :inconsistent :focused)
(defclass icon-source (object) ())
(defcfun "gtk_icon_source_new" :pointer)
(defmethod gconstructor ((icon-source icon-source) &rest rest)
(declare (ignore icon-source rest))
(gtk-icon-source-new))
(defgtkslots icon-source
direction text-direction
direction-wildcarded :boolean
filename gtk-string
pixbuf pobject
icon-name gtk-string
size icon-size
size-wildcarded :boolean
state state
state-wildcarded :boolean) | |
42dd729b5585790d1dac27c10e306b18c1448375548e3045da735f75f989df8d | input-output-hk/cardano-wallet | PartialOrd.hs | # LANGUAGE ScopedTypeVariables #
-- |
Copyright : © 2018 - 2020 IOHK
-- License: Apache-2.0
--
-- Provides laws for the 'PartialOrd' class.
--
module Test.Utils.Laws.PartialOrd
( partialOrdLaws
) where
import Prelude
import Algebra.PartialOrd
( PartialOrd (..) )
import Data.Proxy
( Proxy )
import Test.QuickCheck
( Arbitrary, Property, property )
import Test.QuickCheck.Classes
( Laws (..) )
partialOrdLaws :: (PartialOrd a, Arbitrary a, Show a) => Proxy a -> Laws
partialOrdLaws p = Laws "PartialOrd"
[ ( "Antisymmetry"
, partialOrdAntisymmetric p)
, ( "Reflexivity"
, partialOrdReflexive p)
, ( "Transitivity"
, partialOrdTransitive p)
]
partialOrdAntisymmetric
:: forall a. (Show a, PartialOrd a, Arbitrary a) => Proxy a -> Property
partialOrdAntisymmetric _ = property $
\(a :: a) b -> ((a `leq` b) && (b `leq` a)) == (a == b)
partialOrdReflexive
:: forall a. (Show a, PartialOrd a, Arbitrary a) => Proxy a -> Property
partialOrdReflexive _ = property $
\(a :: a) -> a `leq` a
partialOrdTransitive
:: forall a. (Show a, PartialOrd a, Arbitrary a) => Proxy a -> Property
partialOrdTransitive _ = property test
where
test (a :: a) b c
| a `leq` b && b `leq` c = a `leq` c
| a `leq` c && c `leq` b = a `leq` b
| b `leq` a && a `leq` c = b `leq` c
| b `leq` c && c `leq` a = b `leq` a
| c `leq` a && a `leq` b = c `leq` b
| c `leq` b && b `leq` a = c `leq` a
| otherwise = True
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet/4dd33e7842f5cf3d33ffdfde35a3398504cc7dc0/lib/test-utils/src/Test/Utils/Laws/PartialOrd.hs | haskell | |
License: Apache-2.0
Provides laws for the 'PartialOrd' class.
| # LANGUAGE ScopedTypeVariables #
Copyright : © 2018 - 2020 IOHK
module Test.Utils.Laws.PartialOrd
( partialOrdLaws
) where
import Prelude
import Algebra.PartialOrd
( PartialOrd (..) )
import Data.Proxy
( Proxy )
import Test.QuickCheck
( Arbitrary, Property, property )
import Test.QuickCheck.Classes
( Laws (..) )
partialOrdLaws :: (PartialOrd a, Arbitrary a, Show a) => Proxy a -> Laws
partialOrdLaws p = Laws "PartialOrd"
[ ( "Antisymmetry"
, partialOrdAntisymmetric p)
, ( "Reflexivity"
, partialOrdReflexive p)
, ( "Transitivity"
, partialOrdTransitive p)
]
partialOrdAntisymmetric
:: forall a. (Show a, PartialOrd a, Arbitrary a) => Proxy a -> Property
partialOrdAntisymmetric _ = property $
\(a :: a) b -> ((a `leq` b) && (b `leq` a)) == (a == b)
partialOrdReflexive
:: forall a. (Show a, PartialOrd a, Arbitrary a) => Proxy a -> Property
partialOrdReflexive _ = property $
\(a :: a) -> a `leq` a
partialOrdTransitive
:: forall a. (Show a, PartialOrd a, Arbitrary a) => Proxy a -> Property
partialOrdTransitive _ = property test
where
test (a :: a) b c
| a `leq` b && b `leq` c = a `leq` c
| a `leq` c && c `leq` b = a `leq` b
| b `leq` a && a `leq` c = b `leq` c
| b `leq` c && c `leq` a = b `leq` a
| c `leq` a && a `leq` b = c `leq` b
| c `leq` b && b `leq` a = c `leq` a
| otherwise = True
|
5a1615c2896931f7d4868189be09df0aa7f9cd03030141cf467abcabb4018c79 | ctford/Idris-Elba-Dev | Colours.hs | module Idris.Colours (
IdrisColour(..),
ColourTheme(..),
defaultTheme,
colouriseKwd, colouriseBound, colouriseImplicit,
colouriseType, colouriseFun, colouriseData,
colourisePrompt,
ColourType(..)) where
import System.Console.ANSI
data IdrisColour = IdrisColour { colour :: Maybe Color
, vivid :: Bool
, underline :: Bool
, bold :: Bool
, italic :: Bool
}
deriving (Eq, Show)
mkColour :: Color -> IdrisColour
mkColour c = IdrisColour (Just c) True False False False
data ColourTheme = ColourTheme { keywordColour :: IdrisColour
, boundVarColour :: IdrisColour
, implicitColour :: IdrisColour
, functionColour :: IdrisColour
, typeColour :: IdrisColour
, dataColour :: IdrisColour
, promptColour :: IdrisColour
}
deriving (Eq, Show)
defaultTheme :: ColourTheme
defaultTheme = ColourTheme { keywordColour = IdrisColour Nothing True True True False
, boundVarColour = mkColour Magenta
, implicitColour = IdrisColour (Just Magenta) True True False False
, functionColour = mkColour Green
, typeColour = mkColour Blue
, dataColour = mkColour Red
, promptColour = IdrisColour Nothing True False True False
}
-- Set the colour of a string using POSIX escape codes
colourise :: IdrisColour -> String -> String
colourise (IdrisColour c v u b i) str = setSGRCode sgr ++ str ++ setSGRCode [Reset]
where sgr = fg c ++
(if u then [SetUnderlining SingleUnderline] else []) ++
(if b then [SetConsoleIntensity BoldIntensity] else []) ++
(if i then [SetItalicized True] else [])
fg Nothing = []
fg (Just c) = [SetColor Foreground (if v then Vivid else Dull) c]
colouriseKwd :: ColourTheme -> String -> String
colouriseKwd t = colourise (keywordColour t)
colouriseBound :: ColourTheme -> String -> String
colouriseBound t = colourise (boundVarColour t)
colouriseImplicit :: ColourTheme -> String -> String
colouriseImplicit t = colourise (implicitColour t)
colouriseFun :: ColourTheme -> String -> String
colouriseFun t = colourise (functionColour t)
colouriseType :: ColourTheme -> String -> String
colouriseType t = colourise (typeColour t)
colouriseData :: ColourTheme -> String -> String
colouriseData t = colourise (dataColour t)
colourisePrompt :: ColourTheme -> String -> String
colourisePrompt t = colourise (promptColour t)
data ColourType = KeywordColour
| BoundVarColour
| ImplicitColour
| FunctionColour
| TypeColour
| DataColour
| PromptColour
deriving (Eq, Show, Bounded, Enum)
| null | https://raw.githubusercontent.com/ctford/Idris-Elba-Dev/e915e1d6b7a5921ba43d2572a9ad9b980619b8ee/src/Idris/Colours.hs | haskell | Set the colour of a string using POSIX escape codes | module Idris.Colours (
IdrisColour(..),
ColourTheme(..),
defaultTheme,
colouriseKwd, colouriseBound, colouriseImplicit,
colouriseType, colouriseFun, colouriseData,
colourisePrompt,
ColourType(..)) where
import System.Console.ANSI
data IdrisColour = IdrisColour { colour :: Maybe Color
, vivid :: Bool
, underline :: Bool
, bold :: Bool
, italic :: Bool
}
deriving (Eq, Show)
mkColour :: Color -> IdrisColour
mkColour c = IdrisColour (Just c) True False False False
data ColourTheme = ColourTheme { keywordColour :: IdrisColour
, boundVarColour :: IdrisColour
, implicitColour :: IdrisColour
, functionColour :: IdrisColour
, typeColour :: IdrisColour
, dataColour :: IdrisColour
, promptColour :: IdrisColour
}
deriving (Eq, Show)
defaultTheme :: ColourTheme
defaultTheme = ColourTheme { keywordColour = IdrisColour Nothing True True True False
, boundVarColour = mkColour Magenta
, implicitColour = IdrisColour (Just Magenta) True True False False
, functionColour = mkColour Green
, typeColour = mkColour Blue
, dataColour = mkColour Red
, promptColour = IdrisColour Nothing True False True False
}
colourise :: IdrisColour -> String -> String
colourise (IdrisColour c v u b i) str = setSGRCode sgr ++ str ++ setSGRCode [Reset]
where sgr = fg c ++
(if u then [SetUnderlining SingleUnderline] else []) ++
(if b then [SetConsoleIntensity BoldIntensity] else []) ++
(if i then [SetItalicized True] else [])
fg Nothing = []
fg (Just c) = [SetColor Foreground (if v then Vivid else Dull) c]
colouriseKwd :: ColourTheme -> String -> String
colouriseKwd t = colourise (keywordColour t)
colouriseBound :: ColourTheme -> String -> String
colouriseBound t = colourise (boundVarColour t)
colouriseImplicit :: ColourTheme -> String -> String
colouriseImplicit t = colourise (implicitColour t)
colouriseFun :: ColourTheme -> String -> String
colouriseFun t = colourise (functionColour t)
colouriseType :: ColourTheme -> String -> String
colouriseType t = colourise (typeColour t)
colouriseData :: ColourTheme -> String -> String
colouriseData t = colourise (dataColour t)
colourisePrompt :: ColourTheme -> String -> String
colourisePrompt t = colourise (promptColour t)
data ColourType = KeywordColour
| BoundVarColour
| ImplicitColour
| FunctionColour
| TypeColour
| DataColour
| PromptColour
deriving (Eq, Show, Bounded, Enum)
|
e8cf17e5dddf9137a9c249914094a7dbbdc5a44a386b81fcc8dcbc130b0e5a25 | dinosaure/art | atomic.ml | let parse s = Scanf.sscanf s "%d.%d" (fun major minor -> (major, minor))
let () =
let version = parse Sys.ocaml_version in
if version >= (4, 12) && version < (5, 0)
then print_string "atomic_stdlib.ml"
else print_string "atomic_pre412.ml"
| null | https://raw.githubusercontent.com/dinosaure/art/0c134cd24b28aaae2a6f7d22d5ad0b73e74ae375/conf/atomic.ml | ocaml | let parse s = Scanf.sscanf s "%d.%d" (fun major minor -> (major, minor))
let () =
let version = parse Sys.ocaml_version in
if version >= (4, 12) && version < (5, 0)
then print_string "atomic_stdlib.ml"
else print_string "atomic_pre412.ml"
| |
8a21ffc5a984c4ab96f834dd4d397617e4fc7273d51007ba6c2ef60fd27ec09d | ocaml/dune | env.ml | module Sys = Stdlib.Sys
module Var = struct
module T = struct
type t = string
let compare =
if Sys.win32 then fun a b ->
String.compare (String.lowercase a) (String.lowercase b)
else String.compare
let to_dyn = Dyn.string
end
let temp_dir = if Sys.win32 then "TEMP" else "TMPDIR"
include Comparable.Make (T)
include T
end
module Set = Var.Set
module Map = Var.Map
(* The use of [mutable] here is safe, since we never call (back) to the
memoization framework when computing [unix]. *)
type t =
{ vars : string Map.t
; mutable unix : string list option
}
let equal t { vars; unix = _ } = Map.equal ~equal:String.equal t.vars vars
let hash { vars; unix = _ } = Poly.hash vars
let make vars = { vars; unix = None }
let empty = make Map.empty
let vars t = Var.Set.of_keys t.vars
let get t k = Map.find t.vars k
let to_unix t =
match t.unix with
| Some v -> v
| None ->
let res =
Map.foldi ~init:[]
~f:(fun k v acc -> Printf.sprintf "%s=%s" k v :: acc)
t.vars
in
t.unix <- Some res;
res
let of_unix arr =
Array.to_list arr
|> List.map ~f:(fun s ->
match String.lsplit2 s ~on:'=' with
| None ->
Code_error.raise
"Env.of_unix: entry without '=' found in the environment"
[ ("var", String s) ]
| Some (k, v) -> (k, v))
|> Map.of_list_multi
|> Map.map ~f:(function
| [] -> assert false
| x :: _ -> x)
let initial = make (of_unix (Unix.environment ()))
let of_unix u = make (of_unix u)
let add t ~var ~value = make (Map.set t.vars var value)
let remove t ~var = make (Map.remove t.vars var)
let extend t ~vars =
if Map.is_empty vars then t else make (Map.superpose t.vars vars)
let extend_env x y = if Map.is_empty x.vars then y else extend x ~vars:y.vars
let to_dyn t =
let open Dyn in
Map.to_dyn string t.vars
let diff x y =
Map.merge x.vars y.vars ~f:(fun _k vx vy ->
match vy with
| Some _ -> None
| None -> vx)
|> make
let update t ~var ~f = make (Map.update t.vars var ~f)
let of_string_map m =
make (String.Map.foldi ~init:Map.empty ~f:(fun k v acc -> Map.set acc k v) m)
let iter t = Map.iteri t.vars
let to_map t = t.vars
| null | https://raw.githubusercontent.com/ocaml/dune/714626f4d408e5c71c24ba91d0d520588702ec52/otherlibs/stdune/src/env.ml | ocaml | The use of [mutable] here is safe, since we never call (back) to the
memoization framework when computing [unix]. | module Sys = Stdlib.Sys
module Var = struct
module T = struct
type t = string
let compare =
if Sys.win32 then fun a b ->
String.compare (String.lowercase a) (String.lowercase b)
else String.compare
let to_dyn = Dyn.string
end
let temp_dir = if Sys.win32 then "TEMP" else "TMPDIR"
include Comparable.Make (T)
include T
end
module Set = Var.Set
module Map = Var.Map
type t =
{ vars : string Map.t
; mutable unix : string list option
}
let equal t { vars; unix = _ } = Map.equal ~equal:String.equal t.vars vars
let hash { vars; unix = _ } = Poly.hash vars
let make vars = { vars; unix = None }
let empty = make Map.empty
let vars t = Var.Set.of_keys t.vars
let get t k = Map.find t.vars k
let to_unix t =
match t.unix with
| Some v -> v
| None ->
let res =
Map.foldi ~init:[]
~f:(fun k v acc -> Printf.sprintf "%s=%s" k v :: acc)
t.vars
in
t.unix <- Some res;
res
let of_unix arr =
Array.to_list arr
|> List.map ~f:(fun s ->
match String.lsplit2 s ~on:'=' with
| None ->
Code_error.raise
"Env.of_unix: entry without '=' found in the environment"
[ ("var", String s) ]
| Some (k, v) -> (k, v))
|> Map.of_list_multi
|> Map.map ~f:(function
| [] -> assert false
| x :: _ -> x)
let initial = make (of_unix (Unix.environment ()))
let of_unix u = make (of_unix u)
let add t ~var ~value = make (Map.set t.vars var value)
let remove t ~var = make (Map.remove t.vars var)
let extend t ~vars =
if Map.is_empty vars then t else make (Map.superpose t.vars vars)
let extend_env x y = if Map.is_empty x.vars then y else extend x ~vars:y.vars
let to_dyn t =
let open Dyn in
Map.to_dyn string t.vars
let diff x y =
Map.merge x.vars y.vars ~f:(fun _k vx vy ->
match vy with
| Some _ -> None
| None -> vx)
|> make
let update t ~var ~f = make (Map.update t.vars var ~f)
let of_string_map m =
make (String.Map.foldi ~init:Map.empty ~f:(fun k v acc -> Map.set acc k v) m)
let iter t = Map.iteri t.vars
let to_map t = t.vars
|
bd7d4f1e6994ab7c7aad6bfabb0c6bc36793e0fab6bc7dee5e0e5ccb7f4c27aa | elaforge/karya | Serialize.hs | Copyright 2013
-- This program is distributed under the terms of the GNU General Public
-- License 3.0, see COPYING or -3.0.txt
# LANGUAGE ScopedTypeVariables #
| This module implements a Serialize class and serializers for basic types .
It duplicates a lot from the standard Serialize class , but this one at
least is under my control . The other one is not guaranteed to remain
compatible . Of course , it 's unlikely to change incompatibly and I use it
myself rather than re - implementing and Integer encoding , but that 's
the theory anyway .
At the least it lets me use a direct float encoding rather than hacking
around the large and buggy default implementation .
It duplicates a lot from the standard Serialize class, but this one at
least is under my control. The other one is not guaranteed to remain
compatible. Of course, it's unlikely to change incompatibly and I use it
myself rather than re-implementing String and Integer encoding, but that's
the theory anyway.
At the least it lets me use a direct float encoding rather than hacking
around the large and buggy default implementation.
-}
module Util.Serialize (
encode, decode
, Serialize(..)
, Get, Put
-- * magic
, Magic(..)
, magicBytes
, serialize, serialize_rotate
, UnserializeError(..)
, unserialize
-- * util
, get_tag, put_tag, bad_tag
, get_enum, put_enum, bad_enum
, get_enum_unsafe, put_enum_unsafe
-- * versions
, get_version, put_version, bad_version
) where
import qualified Control.Exception as Exception
import qualified Data.Array.IArray as IArray
import qualified Data.ByteString as ByteString
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as Char8
import qualified Data.Int as Int
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map as Map
import qualified Data.Serialize as Serialize
import Data.Serialize (getWord8, putWord8, Get, Put)
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text.Encoding
import qualified Data.Time as Time
import qualified Data.Vector as Vector
import qualified Data.Vector.Storable as Vector.Storable
import qualified Data.Vector.Unboxed as Unboxed
import qualified Data.Word as Word
import qualified Foreign
import qualified GHC.Float as Float
import qualified System.Directory as Directory
import qualified System.FilePath as FilePath
import qualified System.IO.Error as IO.Error
import qualified Util.CallStack as CallStack
import qualified Util.File as File
import Global
encode :: Serialize a => a -> ByteString
encode = Serialize.runPut . put
decode :: Serialize a => ByteString -> Either String a
decode = Serialize.runGet get
class Serialize a where
put :: Serialize.Putter a
get :: Get a
-- * magic
| This is a four byte prefix to identify a particular file type , tagged with
the serialized type . are just for syntactic convenience only , and
-- must be ASCII.
--
-- The constructor is not exported, so all magics have to be defined here,
-- which should make it easy to avoid collisions.
data Magic a = Magic !Char !Char !Char !Char deriving (Show)
magicBytes :: Magic a -> ByteString
magicBytes (Magic c1 c2 c3 c4) = Char8.pack [c1, c2, c3, c4]
magicLength :: Int
magicLength = 4
serialize :: Serialize a => Magic a -> FilePath -> a -> IO Bool
-- ^ result of 'File.writeGz'.
serialize = serialize_rotate 1
serialize_rotate :: Serialize a => Int -> Magic a -> FilePath -> a -> IO Bool
serialize_rotate rotations magic fname state = do
Directory.createDirectoryIfMissing True $ FilePath.takeDirectory fname
File.writeGz rotations fname $ magicBytes magic <> encode state
data UnserializeError = BadMagic ByteString ByteString
| IOError IO.Error.IOError | UnserializeError String
deriving (Show)
unserialize :: Serialize a => Magic a -> FilePath
-> IO (Either UnserializeError a)
unserialize magic fname = catch $ do
bytes <- either (Exception.throw . IO.Error.userError) return
=<< File.readGz fname
let (file_magic, rest) = ByteString.splitAt magicLength bytes
if file_magic /= magicBytes magic
then return $ Left $ BadMagic (magicBytes magic) file_magic
else first UnserializeError <$> Exception.evaluate (decode rest)
-- Apparently decode can still throw an exception unless
-- the contents of the Either is forced to whnf.
where catch = fmap (either (Left . IOError) id) . Exception.try
instance Pretty UnserializeError where
pretty e = case e of
BadMagic expected got -> "expected file magic " <> showt expected
<> " but got " <> showt got
IOError exc -> "io error: " <> showt exc
UnserializeError err -> "unserialize error: " <> txt err
-- * numeric
instance Serialize Integer where
put = Serialize.put
get = Serialize.get
instance Serialize Int where
put i = put (fromIntegral i :: Int.Int64)
get = fromIntegral <$> (get :: Get Int.Int64)
instance Serialize Int.Int64 where
put i = put (fromIntegral i :: Word.Word64)
get = fromIntegral <$> (get :: Get Word.Word64)
instance Serialize Word.Word8 where
put = putWord8
get = getWord8
instance Serialize Word.Word32 where
put = Serialize.putWord32le
get = Serialize.getWord32le
instance Serialize Word.Word64 where
put = Serialize.putWord64le
get = Serialize.getWord64le
instance Serialize Double where
put = put . Float.castDoubleToWord64
get = Float.castWord64ToDouble <$> get
instance Serialize Float where
put = put . Float.castFloatToWord32
get = Float.castWord32ToFloat <$> get
-- * util
get_tag :: Get Word.Word8
get_tag = getWord8
put_tag :: Word.Word8 -> Put
put_tag = putWord8
bad_tag :: String -> Word.Word8 -> Get a
bad_tag typ tag = fail $ "unknown tag for " ++ typ ++ ": " ++ show tag
-- | These are convenient but dangerous. If they are are used in a context
where backward compatibility matters ( " Cmd . Serialize " ) then it 's too easy
-- to break compatibility by adding or removing an enum.
--
-- But they're fine if used in an enum that will never change, or where
-- compatibility doesn't matter.
get_enum_unsafe :: (Bounded a, Enum a) => Serialize.Get a
get_enum_unsafe = get >>= \n ->
maybe (fail $ "enum value out of range: " ++ show n) return (to_enum n)
put_enum_unsafe :: Enum a => a -> Serialize.Put
put_enum_unsafe = put . fromEnum
-- | A safe version of 'toEnum'.
to_enum :: forall a. (Enum a, Bounded a) => Int -> Maybe a
to_enum n
| fromEnum (minBound :: a) <= n && n <= fromEnum (maxBound :: a) =
Just (toEnum n)
| otherwise = Nothing
get_enum :: Serialize.Get Word.Word8
get_enum = get
-- | It's just put, but make sure it's using Int. Word8 would be more
-- suitable.
put_enum :: Word.Word8 -> Serialize.Put
put_enum = put
bad_enum :: String -> Word.Word8 -> Get a
bad_enum name val = fail $ "unknown enum val for " <> name <> ": " <> show val
-- * basic types
instance Serialize () where
put () = return ()
get = return ()
instance Serialize Bool where
put False = put_tag 0
put True = put_tag 1
get = (/= 0) <$> get_tag
instance Serialize Char where
put = Serialize.put
get = Serialize.get
instance Serialize Time.UTCTime where
put time = put (show time)
get = get >>= return . read
-- * sums and products
instance (Serialize a, Serialize b) => Serialize (Either a b) where
put (Left a) = put_tag 0 >> put a
put (Right b) = put_tag 1 >> put b
get = get_tag >>= \case
0 -> Left <$> get
1 -> Right <$> get
tag -> bad_tag "Either" tag
instance Serialize a => Serialize (Maybe a) where
put Nothing = put_tag 0
put (Just a) = put_tag 1 >> put a
get = get_tag >>= \case
0 -> return Nothing
1 -> Just <$> get
tag -> bad_tag "Maybe" tag
instance (Serialize a, Serialize b) => Serialize (a, b) where
put (a, b) = put a >> put b
get = (,) <$> get <*> get
instance (Serialize a, Serialize b, Serialize c) => Serialize (a, b, c) where
put (a, b, c) = put a >> put b >> put c
get = (,,) <$> get <*> get <*> get
-- * containers
instance Serialize a => Serialize [a] where
put = Serialize.putListOf put
get = Serialize.getListOf get
instance Serialize a => Serialize (NonEmpty a) where
put = put . NonEmpty.toList
get = fmap NonEmpty.fromList get
instance (Ord a, Serialize a) => Serialize (Set a) where
put = put . Set.toAscList
get = Set.fromAscList <$> get
instance (Ord k, Serialize k, Serialize v) => Serialize (Map k v) where
put = put . Map.toAscList
get = Map.fromAscList <$> get
instance (Serialize i, IArray.Ix i, Serialize e) =>
Serialize (IArray.Array i e) where
put = Serialize.putIArrayOf put put
get = Serialize.getIArrayOf get get
instance Serialize ByteString where
put bs = do
put $ ByteString.length bs
Serialize.putByteString bs
get = get >>= Serialize.getByteString
instance Serialize Text.Text where
put = put . Text.Encoding.encodeUtf8
get = Text.Encoding.decodeUtf8 <$> get
instance (Serialize a, Unboxed.Unbox a) => Serialize (Unboxed.Vector a) where
put v = do
put (Unboxed.length v)
Unboxed.mapM_ put v
get = do
len :: Int <- get
Unboxed.replicateM len get
instance Serialize a => Serialize (Vector.Vector a) where
put v = do
put (Vector.length v)
Vector.mapM_ put v
get = do
len :: Int <- get
Vector.replicateM len get
instance (Serialize a, Foreign.Storable a) =>
Serialize (Vector.Storable.Vector a) where
put v = do
put (Vector.Storable.length v)
Vector.Storable.mapM_ put v
get = do
len :: Int <- get
Vector.Storable.replicateM len get
This has to be here instead of in CallStack to avoid a circular import .
instance Serialize CallStack.Caller where
put (CallStack.Caller a b) = put_tag 0 >> put a >> put b
put CallStack.NoCaller = put_tag 1
get = get_tag >>= \case
0 -> CallStack.Caller <$> get <*> get
1 -> return CallStack.NoCaller
tag -> bad_tag "Caller" tag
-- * versions
get_version :: Get Word.Word8
get_version = getWord8
put_version :: Word.Word8 -> Put
put_version = putWord8
bad_version :: CallStack.Stack => String -> Word.Word8 -> a
bad_version typ ver = errorStack $
"unknown version " <> showt ver <> " for " <> showt typ
| null | https://raw.githubusercontent.com/elaforge/karya/89d1651424c35e564138d93424a157ff87457245/Util/Serialize.hs | haskell | This program is distributed under the terms of the GNU General Public
License 3.0, see COPYING or -3.0.txt
* magic
* util
* versions
* magic
must be ASCII.
The constructor is not exported, so all magics have to be defined here,
which should make it easy to avoid collisions.
^ result of 'File.writeGz'.
Apparently decode can still throw an exception unless
the contents of the Either is forced to whnf.
* numeric
* util
| These are convenient but dangerous. If they are are used in a context
to break compatibility by adding or removing an enum.
But they're fine if used in an enum that will never change, or where
compatibility doesn't matter.
| A safe version of 'toEnum'.
| It's just put, but make sure it's using Int. Word8 would be more
suitable.
* basic types
* sums and products
* containers
* versions | Copyright 2013
# LANGUAGE ScopedTypeVariables #
| This module implements a Serialize class and serializers for basic types .
It duplicates a lot from the standard Serialize class , but this one at
least is under my control . The other one is not guaranteed to remain
compatible . Of course , it 's unlikely to change incompatibly and I use it
myself rather than re - implementing and Integer encoding , but that 's
the theory anyway .
At the least it lets me use a direct float encoding rather than hacking
around the large and buggy default implementation .
It duplicates a lot from the standard Serialize class, but this one at
least is under my control. The other one is not guaranteed to remain
compatible. Of course, it's unlikely to change incompatibly and I use it
myself rather than re-implementing String and Integer encoding, but that's
the theory anyway.
At the least it lets me use a direct float encoding rather than hacking
around the large and buggy default implementation.
-}
module Util.Serialize (
encode, decode
, Serialize(..)
, Get, Put
, Magic(..)
, magicBytes
, serialize, serialize_rotate
, UnserializeError(..)
, unserialize
, get_tag, put_tag, bad_tag
, get_enum, put_enum, bad_enum
, get_enum_unsafe, put_enum_unsafe
, get_version, put_version, bad_version
) where
import qualified Control.Exception as Exception
import qualified Data.Array.IArray as IArray
import qualified Data.ByteString as ByteString
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as Char8
import qualified Data.Int as Int
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map as Map
import qualified Data.Serialize as Serialize
import Data.Serialize (getWord8, putWord8, Get, Put)
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text.Encoding
import qualified Data.Time as Time
import qualified Data.Vector as Vector
import qualified Data.Vector.Storable as Vector.Storable
import qualified Data.Vector.Unboxed as Unboxed
import qualified Data.Word as Word
import qualified Foreign
import qualified GHC.Float as Float
import qualified System.Directory as Directory
import qualified System.FilePath as FilePath
import qualified System.IO.Error as IO.Error
import qualified Util.CallStack as CallStack
import qualified Util.File as File
import Global
encode :: Serialize a => a -> ByteString
encode = Serialize.runPut . put
decode :: Serialize a => ByteString -> Either String a
decode = Serialize.runGet get
class Serialize a where
put :: Serialize.Putter a
get :: Get a
| This is a four byte prefix to identify a particular file type , tagged with
the serialized type . are just for syntactic convenience only , and
data Magic a = Magic !Char !Char !Char !Char deriving (Show)
magicBytes :: Magic a -> ByteString
magicBytes (Magic c1 c2 c3 c4) = Char8.pack [c1, c2, c3, c4]
magicLength :: Int
magicLength = 4
serialize :: Serialize a => Magic a -> FilePath -> a -> IO Bool
serialize = serialize_rotate 1
serialize_rotate :: Serialize a => Int -> Magic a -> FilePath -> a -> IO Bool
serialize_rotate rotations magic fname state = do
Directory.createDirectoryIfMissing True $ FilePath.takeDirectory fname
File.writeGz rotations fname $ magicBytes magic <> encode state
data UnserializeError = BadMagic ByteString ByteString
| IOError IO.Error.IOError | UnserializeError String
deriving (Show)
unserialize :: Serialize a => Magic a -> FilePath
-> IO (Either UnserializeError a)
unserialize magic fname = catch $ do
bytes <- either (Exception.throw . IO.Error.userError) return
=<< File.readGz fname
let (file_magic, rest) = ByteString.splitAt magicLength bytes
if file_magic /= magicBytes magic
then return $ Left $ BadMagic (magicBytes magic) file_magic
else first UnserializeError <$> Exception.evaluate (decode rest)
where catch = fmap (either (Left . IOError) id) . Exception.try
instance Pretty UnserializeError where
pretty e = case e of
BadMagic expected got -> "expected file magic " <> showt expected
<> " but got " <> showt got
IOError exc -> "io error: " <> showt exc
UnserializeError err -> "unserialize error: " <> txt err
instance Serialize Integer where
put = Serialize.put
get = Serialize.get
instance Serialize Int where
put i = put (fromIntegral i :: Int.Int64)
get = fromIntegral <$> (get :: Get Int.Int64)
instance Serialize Int.Int64 where
put i = put (fromIntegral i :: Word.Word64)
get = fromIntegral <$> (get :: Get Word.Word64)
instance Serialize Word.Word8 where
put = putWord8
get = getWord8
instance Serialize Word.Word32 where
put = Serialize.putWord32le
get = Serialize.getWord32le
instance Serialize Word.Word64 where
put = Serialize.putWord64le
get = Serialize.getWord64le
instance Serialize Double where
put = put . Float.castDoubleToWord64
get = Float.castWord64ToDouble <$> get
instance Serialize Float where
put = put . Float.castFloatToWord32
get = Float.castWord32ToFloat <$> get
get_tag :: Get Word.Word8
get_tag = getWord8
put_tag :: Word.Word8 -> Put
put_tag = putWord8
bad_tag :: String -> Word.Word8 -> Get a
bad_tag typ tag = fail $ "unknown tag for " ++ typ ++ ": " ++ show tag
where backward compatibility matters ( " Cmd . Serialize " ) then it 's too easy
get_enum_unsafe :: (Bounded a, Enum a) => Serialize.Get a
get_enum_unsafe = get >>= \n ->
maybe (fail $ "enum value out of range: " ++ show n) return (to_enum n)
put_enum_unsafe :: Enum a => a -> Serialize.Put
put_enum_unsafe = put . fromEnum
to_enum :: forall a. (Enum a, Bounded a) => Int -> Maybe a
to_enum n
| fromEnum (minBound :: a) <= n && n <= fromEnum (maxBound :: a) =
Just (toEnum n)
| otherwise = Nothing
get_enum :: Serialize.Get Word.Word8
get_enum = get
put_enum :: Word.Word8 -> Serialize.Put
put_enum = put
bad_enum :: String -> Word.Word8 -> Get a
bad_enum name val = fail $ "unknown enum val for " <> name <> ": " <> show val
instance Serialize () where
put () = return ()
get = return ()
instance Serialize Bool where
put False = put_tag 0
put True = put_tag 1
get = (/= 0) <$> get_tag
instance Serialize Char where
put = Serialize.put
get = Serialize.get
instance Serialize Time.UTCTime where
put time = put (show time)
get = get >>= return . read
instance (Serialize a, Serialize b) => Serialize (Either a b) where
put (Left a) = put_tag 0 >> put a
put (Right b) = put_tag 1 >> put b
get = get_tag >>= \case
0 -> Left <$> get
1 -> Right <$> get
tag -> bad_tag "Either" tag
instance Serialize a => Serialize (Maybe a) where
put Nothing = put_tag 0
put (Just a) = put_tag 1 >> put a
get = get_tag >>= \case
0 -> return Nothing
1 -> Just <$> get
tag -> bad_tag "Maybe" tag
instance (Serialize a, Serialize b) => Serialize (a, b) where
put (a, b) = put a >> put b
get = (,) <$> get <*> get
instance (Serialize a, Serialize b, Serialize c) => Serialize (a, b, c) where
put (a, b, c) = put a >> put b >> put c
get = (,,) <$> get <*> get <*> get
instance Serialize a => Serialize [a] where
put = Serialize.putListOf put
get = Serialize.getListOf get
instance Serialize a => Serialize (NonEmpty a) where
put = put . NonEmpty.toList
get = fmap NonEmpty.fromList get
instance (Ord a, Serialize a) => Serialize (Set a) where
put = put . Set.toAscList
get = Set.fromAscList <$> get
instance (Ord k, Serialize k, Serialize v) => Serialize (Map k v) where
put = put . Map.toAscList
get = Map.fromAscList <$> get
instance (Serialize i, IArray.Ix i, Serialize e) =>
Serialize (IArray.Array i e) where
put = Serialize.putIArrayOf put put
get = Serialize.getIArrayOf get get
instance Serialize ByteString where
put bs = do
put $ ByteString.length bs
Serialize.putByteString bs
get = get >>= Serialize.getByteString
instance Serialize Text.Text where
put = put . Text.Encoding.encodeUtf8
get = Text.Encoding.decodeUtf8 <$> get
instance (Serialize a, Unboxed.Unbox a) => Serialize (Unboxed.Vector a) where
put v = do
put (Unboxed.length v)
Unboxed.mapM_ put v
get = do
len :: Int <- get
Unboxed.replicateM len get
instance Serialize a => Serialize (Vector.Vector a) where
put v = do
put (Vector.length v)
Vector.mapM_ put v
get = do
len :: Int <- get
Vector.replicateM len get
instance (Serialize a, Foreign.Storable a) =>
Serialize (Vector.Storable.Vector a) where
put v = do
put (Vector.Storable.length v)
Vector.Storable.mapM_ put v
get = do
len :: Int <- get
Vector.Storable.replicateM len get
This has to be here instead of in CallStack to avoid a circular import .
instance Serialize CallStack.Caller where
put (CallStack.Caller a b) = put_tag 0 >> put a >> put b
put CallStack.NoCaller = put_tag 1
get = get_tag >>= \case
0 -> CallStack.Caller <$> get <*> get
1 -> return CallStack.NoCaller
tag -> bad_tag "Caller" tag
get_version :: Get Word.Word8
get_version = getWord8
put_version :: Word.Word8 -> Put
put_version = putWord8
bad_version :: CallStack.Stack => String -> Word.Word8 -> a
bad_version typ ver = errorStack $
"unknown version " <> showt ver <> " for " <> showt typ
|
35b0b89d9fbb9dfb63797bf12a0886ea600c3df169699481738b3b5b3686d3a1 | softwarelanguageslab/maf | logm.scm | (define NumSeries (int-top))
(define NumComputers NumSeries)
(define NumWorkers NumSeries)
(define StartRate (int-top))
(define Increment (int-top))
(define (build-vector1 n f)
(letrec ((v (make-vector n (f 0)))
(loop1 (lambda (i)
(if (< i n)
(begin
(vector-set! v i (f i))
(loop1 (+ i 1)))
v))))
(loop1 1)))
(define (build-vector2 n f)
(letrec ((v (make-vector n (f 0)))
(loop2 (lambda (i)
(if (< i n)
(begin
(vector-set! v i (f i))
(loop2 (+ i 1)))
v))))
(loop2 1)))
(define (vector-foreach f v)
(letrec ((loop (lambda (i)
(if (< i (vector-length v))
(begin
(f (vector-ref v i))
(loop (+ i 1)))
'done))))
(loop 0)))
(define master-init
(actor "master-init" ()
(start ()
(let* ((computers
(build-vector1 NumComputers
(lambda (i)
(let* ((rate (+ StartRate (* i Increment))))
(create rate-computer rate)))))
(workers
(build-vector2 NumWorkers
(lambda (i)
(let* ((rate-computer
(vector-ref computers (modulo i NumComputers)))
(start-term (* i Increment))
(actorRef (create series-worker a/self rate-computer start-term)))
(send actorRef next-term)
(send actorRef get-term)
actorRef
)))))
(become master computers workers NumWorkers 0 0)))))
(define master
(actor "master" (computers workers num-work-requested num-work-received terms-sum)
(result (term)
(if (= (+ num-work-received 1) num-work-requested)
(begin
(vector-foreach (lambda (a) (send a stop)) computers)
(vector-foreach (lambda (a) (send a stop)) workers)
(terminate))
(become master computers workers num-work-requested (+ num-work-received 1) (+ terms-sum term))))))
(define series-worker-wait
(actor "series-worker-wait" (master computer)
(next-term ()
(send a/self next-term)
(become series-worker-wait master computer))
(result (term)
(become series-worker master computer term))
(get-term ()
(send a/self get-term)
(become series-worker-wait master computer))
(stop ()
(send a/self stop)
(become series-worker-wait master computer))))
(define series-worker
(actor "series-worker" (master computer cur-term)
(next-term ()
(send computer compute cur-term a/self)
;; (become series-worker master computer cur-term)
(become series-worker-wait master computer)
)
(result (term)
(become series-worker master computer term))
(get-term ()
(send master result cur-term)
(become series-worker master computer cur-term))
(stop () (terminate))))
(define (compute-next-term cur rate)
(* rate cur (- 1 cur)))
(define rate-computer
(actor "rate-computer" (rate)
(compute (term sender)
(send sender result (compute-next-term term rate))
(become rate-computer rate))
(stop ()
(terminate))))
;; (define master-actor (create master #f #f 0 0 0))
(define master-actor (create master-init))
(send master-actor start)
| null | https://raw.githubusercontent.com/softwarelanguageslab/maf/be58e02c63d25cab5b48fdf7b737b68b882e9dca/test/concurrentScheme/actors/contracts/savina/logm.scm | scheme | (become series-worker master computer cur-term)
(define master-actor (create master #f #f 0 0 0)) | (define NumSeries (int-top))
(define NumComputers NumSeries)
(define NumWorkers NumSeries)
(define StartRate (int-top))
(define Increment (int-top))
(define (build-vector1 n f)
(letrec ((v (make-vector n (f 0)))
(loop1 (lambda (i)
(if (< i n)
(begin
(vector-set! v i (f i))
(loop1 (+ i 1)))
v))))
(loop1 1)))
(define (build-vector2 n f)
(letrec ((v (make-vector n (f 0)))
(loop2 (lambda (i)
(if (< i n)
(begin
(vector-set! v i (f i))
(loop2 (+ i 1)))
v))))
(loop2 1)))
(define (vector-foreach f v)
(letrec ((loop (lambda (i)
(if (< i (vector-length v))
(begin
(f (vector-ref v i))
(loop (+ i 1)))
'done))))
(loop 0)))
(define master-init
(actor "master-init" ()
(start ()
(let* ((computers
(build-vector1 NumComputers
(lambda (i)
(let* ((rate (+ StartRate (* i Increment))))
(create rate-computer rate)))))
(workers
(build-vector2 NumWorkers
(lambda (i)
(let* ((rate-computer
(vector-ref computers (modulo i NumComputers)))
(start-term (* i Increment))
(actorRef (create series-worker a/self rate-computer start-term)))
(send actorRef next-term)
(send actorRef get-term)
actorRef
)))))
(become master computers workers NumWorkers 0 0)))))
(define master
(actor "master" (computers workers num-work-requested num-work-received terms-sum)
(result (term)
(if (= (+ num-work-received 1) num-work-requested)
(begin
(vector-foreach (lambda (a) (send a stop)) computers)
(vector-foreach (lambda (a) (send a stop)) workers)
(terminate))
(become master computers workers num-work-requested (+ num-work-received 1) (+ terms-sum term))))))
(define series-worker-wait
(actor "series-worker-wait" (master computer)
(next-term ()
(send a/self next-term)
(become series-worker-wait master computer))
(result (term)
(become series-worker master computer term))
(get-term ()
(send a/self get-term)
(become series-worker-wait master computer))
(stop ()
(send a/self stop)
(become series-worker-wait master computer))))
(define series-worker
(actor "series-worker" (master computer cur-term)
(next-term ()
(send computer compute cur-term a/self)
(become series-worker-wait master computer)
)
(result (term)
(become series-worker master computer term))
(get-term ()
(send master result cur-term)
(become series-worker master computer cur-term))
(stop () (terminate))))
(define (compute-next-term cur rate)
(* rate cur (- 1 cur)))
(define rate-computer
(actor "rate-computer" (rate)
(compute (term sender)
(send sender result (compute-next-term term rate))
(become rate-computer rate))
(stop ()
(terminate))))
(define master-actor (create master-init))
(send master-actor start)
|
c18f447b8771e71660dad8b4ecf62d2ac07fe1a8ae71faa957268b383410e8ad | dhleong/wish | sheets.cljs | (ns ^{:author "Daniel Leong"
:doc "sheets"}
wish.sheets
(:require [archetype.views.error-boundary :refer [error-boundary]]
[wish.sheets.dnd5e :as dnd5e]
[wish.sheets.dnd5e.builder :as dnd5e-builder]
[wish.sheets.dnd5e.campaign :as dnd5e-campaign]
[wish.sheets.dnd5e.engine :as dnd5e-engine]
[wish.sheets.dnd5e.keymaps :as dnd5e-key]
[wish.providers :refer [create-file-with-data
error-resolver-view]]
[wish.util :refer [click>evt <sub >evt]]
[wish.util.nav :refer [sheet-url]]
[wish.views.widgets :as widgets :refer [link link>evt]]))
; ======= const data =======================================
(def compiler-version 2)
; TODO we could use code splitting here to avoid loading
; sheet templates that we don't care about
(def sheets
{:dnd5e {:name "D&D 5E"
:fn #'dnd5e/sheet
:builder #'dnd5e-builder/view
:campaign #'dnd5e-campaign/view
:v 1
:default-sources [:wish/wdnd5e-srd]
:engine (delay
(dnd5e-engine/create-engine))
:keymaps dnd5e-key/maps}})
; ======= Public interface =================================
(defn get-engine
[sheet-kind]
(deref (get-in sheets [sheet-kind :engine])))
(defn get-keymaps
[sheet-kind]
(get-in sheets [sheet-kind :keymaps]))
(defn stub-campaign
"Create the initial data for a new campaign"
[kind campaign-name]
(let [kind-meta (get sheets kind)]
(when-not kind-meta
(throw (js/Error.
(str "Unable to get sheet meta for kind: " kind))))
{:v [compiler-version (:v kind-meta)] ; wish + sheet version numbers
:updated (.getTime (js/Date.)) ; date
:kind kind
:name campaign-name
:sources (:default-sources kind-meta)
:players #{}
}))
(defn stub-sheet
"Create the initial data for a new sheet"
[kind sheet-name]
(let [kind-meta (get sheets kind)]
(when-not kind-meta
(throw (js/Error.
(str "Unable to get sheet meta for kind: " kind))))
{:v [compiler-version (:v kind-meta)] ; wish + sheet version numbers
:updated (.getTime (js/Date.)) ; date
:kind kind
:name sheet-name
:sources (:default-sources kind-meta)
:classes {}
:races []
:inventory {}
:items {}
:equipped #{}
}))
(defn create-campaign!
"Returns a channel that emits [err sheet-id] on success"
[campaign-name provider-id sheet-kind]
{:pre [(not (nil? provider-id))
(not (nil? sheet-kind))]}
(create-file-with-data :campaign campaign-name provider-id
(stub-campaign sheet-kind campaign-name)))
(defn create-sheet!
"Returns a channel that emits [err sheet-id] on success"
[sheet-name provider-id sheet-kind]
{:pre [(not (nil? provider-id))
(not (nil? sheet-kind))]}
(create-file-with-data :sheet sheet-name provider-id
(stub-sheet sheet-kind sheet-name)))
; ======= Views ============================================
(defn- sheet-error-widget [what]
(when-let [{:keys [err retry-evt]} (<sub [:sheet-error-info])]
[:div.sheet.error
[:p "Error loading " what]
(if-let [data (ex-data err)]
[error-resolver-view data]
(if (keyword? err)
[error-resolver-view {:state err}]
; unknown error; something went wrong
[widgets/error-box err]))
[:div
[:a {:href "#"
:on-click (click>evt retry-evt)}
"Try again"]]
[:div
[link {:href "/sheets"}
"Pick another sheet"]]]))
(defn sheet-loader [?sheet]
(let [{sheet-name :name} ?sheet]
(if-let [err-widget (sheet-error-widget (if sheet-name
sheet-name
"Sheet"))]
err-widget
(if sheet-name
[:div (str "Loading " sheet-name "...")]
[:div "Loading..."]))))
(defn sources-loader
[sheet]
(if-let [err-widget (sheet-error-widget (str "Data for " (:name sheet)))]
err-widget
[:div "Loading data for " (:name sheet) "..."]))
(defn sheet-unknown [kind]
[:div (str "`" kind "`") " is not a type of sheet we know about"])
(defn- safe-sheet-content [sheet-id content]
(try
; eager evaluate class, race, etc. to ensure that
; we can inflate everything without error
(<sub [:all-attrs])
; the actual content view, wrapped in an error boundary; any
; errors it catches *should* be rendering-related, and not
; something we can do anything baout here
[error-boundary content]
(catch :default err
[:div.sheet.error
[:p "Error inflating sheet"]
[:div
[link {:href (sheet-url sheet-id :builder :home)}
"Adjust sheet sources"]]
[:div.nav-link
[link>evt [:load-sheet! sheet-id]
"Reload sheet"]]
[:div
[link {:href "/sheets"}
"Pick another sheet"]]
[widgets/error-box err]])))
(defn- ensuring-loaded
[sheet-id content-fn]
(let [sheet (<sub [:provided-sheet sheet-id])]
(if (:sources sheet)
(let [kind (:kind sheet)]
(if-let [sheet-info (get sheets (keyword kind))]
(if (<sub [:sheet-source sheet-id])
; sheet is ready; render!
[error-boundary
(content-fn sheet-info)]
(do
(>evt [:load-sheet-source! sheet (:sources sheet)])
[sources-loader sheet]))
; unknown sheet kind
[sheet-unknown kind]))
; either we don't have the sheet at all, or it's just
; a stub with no actual data; either way, load it!
(do
(>evt [:load-sheet! sheet-id])
[sheet-loader sheet]))))
(defn builder
[[sheet-id section]]
(ensuring-loaded
sheet-id
(fn [{view :builder}]
[view section])))
(defn campaign
[[campaign-id section]]
(ensuring-loaded
campaign-id
(fn [{view :campaign}]
[view section])))
(defn viewer
[sheet-id]
(ensuring-loaded
sheet-id
(fn [{view :fn}]
[safe-sheet-content
sheet-id
[view]])))
| null | https://raw.githubusercontent.com/dhleong/wish/9036f9da3706bfcc1e4b4736558b6f7309f53b7b/src/cljs/wish/sheets.cljs | clojure | ======= const data =======================================
TODO we could use code splitting here to avoid loading
sheet templates that we don't care about
======= Public interface =================================
wish + sheet version numbers
date
wish + sheet version numbers
date
======= Views ============================================
unknown error; something went wrong
eager evaluate class, race, etc. to ensure that
we can inflate everything without error
the actual content view, wrapped in an error boundary; any
errors it catches *should* be rendering-related, and not
something we can do anything baout here
sheet is ready; render!
unknown sheet kind
either we don't have the sheet at all, or it's just
a stub with no actual data; either way, load it! | (ns ^{:author "Daniel Leong"
:doc "sheets"}
wish.sheets
(:require [archetype.views.error-boundary :refer [error-boundary]]
[wish.sheets.dnd5e :as dnd5e]
[wish.sheets.dnd5e.builder :as dnd5e-builder]
[wish.sheets.dnd5e.campaign :as dnd5e-campaign]
[wish.sheets.dnd5e.engine :as dnd5e-engine]
[wish.sheets.dnd5e.keymaps :as dnd5e-key]
[wish.providers :refer [create-file-with-data
error-resolver-view]]
[wish.util :refer [click>evt <sub >evt]]
[wish.util.nav :refer [sheet-url]]
[wish.views.widgets :as widgets :refer [link link>evt]]))
(def compiler-version 2)
(def sheets
{:dnd5e {:name "D&D 5E"
:fn #'dnd5e/sheet
:builder #'dnd5e-builder/view
:campaign #'dnd5e-campaign/view
:v 1
:default-sources [:wish/wdnd5e-srd]
:engine (delay
(dnd5e-engine/create-engine))
:keymaps dnd5e-key/maps}})
(defn get-engine
[sheet-kind]
(deref (get-in sheets [sheet-kind :engine])))
(defn get-keymaps
[sheet-kind]
(get-in sheets [sheet-kind :keymaps]))
(defn stub-campaign
"Create the initial data for a new campaign"
[kind campaign-name]
(let [kind-meta (get sheets kind)]
(when-not kind-meta
(throw (js/Error.
(str "Unable to get sheet meta for kind: " kind))))
:kind kind
:name campaign-name
:sources (:default-sources kind-meta)
:players #{}
}))
(defn stub-sheet
"Create the initial data for a new sheet"
[kind sheet-name]
(let [kind-meta (get sheets kind)]
(when-not kind-meta
(throw (js/Error.
(str "Unable to get sheet meta for kind: " kind))))
:kind kind
:name sheet-name
:sources (:default-sources kind-meta)
:classes {}
:races []
:inventory {}
:items {}
:equipped #{}
}))
(defn create-campaign!
"Returns a channel that emits [err sheet-id] on success"
[campaign-name provider-id sheet-kind]
{:pre [(not (nil? provider-id))
(not (nil? sheet-kind))]}
(create-file-with-data :campaign campaign-name provider-id
(stub-campaign sheet-kind campaign-name)))
(defn create-sheet!
"Returns a channel that emits [err sheet-id] on success"
[sheet-name provider-id sheet-kind]
{:pre [(not (nil? provider-id))
(not (nil? sheet-kind))]}
(create-file-with-data :sheet sheet-name provider-id
(stub-sheet sheet-kind sheet-name)))
(defn- sheet-error-widget [what]
(when-let [{:keys [err retry-evt]} (<sub [:sheet-error-info])]
[:div.sheet.error
[:p "Error loading " what]
(if-let [data (ex-data err)]
[error-resolver-view data]
(if (keyword? err)
[error-resolver-view {:state err}]
[widgets/error-box err]))
[:div
[:a {:href "#"
:on-click (click>evt retry-evt)}
"Try again"]]
[:div
[link {:href "/sheets"}
"Pick another sheet"]]]))
(defn sheet-loader [?sheet]
(let [{sheet-name :name} ?sheet]
(if-let [err-widget (sheet-error-widget (if sheet-name
sheet-name
"Sheet"))]
err-widget
(if sheet-name
[:div (str "Loading " sheet-name "...")]
[:div "Loading..."]))))
(defn sources-loader
[sheet]
(if-let [err-widget (sheet-error-widget (str "Data for " (:name sheet)))]
err-widget
[:div "Loading data for " (:name sheet) "..."]))
(defn sheet-unknown [kind]
[:div (str "`" kind "`") " is not a type of sheet we know about"])
(defn- safe-sheet-content [sheet-id content]
(try
(<sub [:all-attrs])
[error-boundary content]
(catch :default err
[:div.sheet.error
[:p "Error inflating sheet"]
[:div
[link {:href (sheet-url sheet-id :builder :home)}
"Adjust sheet sources"]]
[:div.nav-link
[link>evt [:load-sheet! sheet-id]
"Reload sheet"]]
[:div
[link {:href "/sheets"}
"Pick another sheet"]]
[widgets/error-box err]])))
(defn- ensuring-loaded
[sheet-id content-fn]
(let [sheet (<sub [:provided-sheet sheet-id])]
(if (:sources sheet)
(let [kind (:kind sheet)]
(if-let [sheet-info (get sheets (keyword kind))]
(if (<sub [:sheet-source sheet-id])
[error-boundary
(content-fn sheet-info)]
(do
(>evt [:load-sheet-source! sheet (:sources sheet)])
[sources-loader sheet]))
[sheet-unknown kind]))
(do
(>evt [:load-sheet! sheet-id])
[sheet-loader sheet]))))
(defn builder
[[sheet-id section]]
(ensuring-loaded
sheet-id
(fn [{view :builder}]
[view section])))
(defn campaign
[[campaign-id section]]
(ensuring-loaded
campaign-id
(fn [{view :campaign}]
[view section])))
(defn viewer
[sheet-id]
(ensuring-loaded
sheet-id
(fn [{view :fn}]
[safe-sheet-content
sheet-id
[view]])))
|
642c79cfcfa16ec3421a25a9660a39bfad58b9d46ab927701e803077fd1554ec | bsansouci/bsb-native | errors.ml | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
(* This module should be removed. We keep it for now, to avoid
breaking external tools depending on it. *)
let report_error = Location.report_exception
| null | https://raw.githubusercontent.com/bsansouci/bsb-native/9a89457783d6e80deb0fba9ca7372c10a768a9ea/vendor/ocaml/driver/errors.ml | ocaml | *********************************************************************
OCaml
*********************************************************************
This module should be removed. We keep it for now, to avoid
breaking external tools depending on it. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
let report_error = Location.report_exception
|
2f9ed4d864cd2ddb1f0299139898868b857377ed8bda1b66dce11e520ed28ceb | sjoerdvisscher/data-category | Functor.hs | # LANGUAGE
GADTs
, PolyKinds
, RankNTypes
, , NoImplicitPrelude
, TypeOperators
, TypeFamilies
, PatternSynonyms
, FlexibleContexts
, FlexibleInstances
, UndecidableInstances
, GeneralizedNewtypeDeriving
#
GADTs
, PolyKinds
, RankNTypes
, ConstraintKinds
, NoImplicitPrelude
, TypeOperators
, TypeFamilies
, PatternSynonyms
, FlexibleContexts
, FlexibleInstances
, UndecidableInstances
, GeneralizedNewtypeDeriving
#-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Category.Functor
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer :
-- Stability : experimental
-- Portability : non-portable
-----------------------------------------------------------------------------
module Data.Category.Functor (
-- * Cat
Cat(..)
-- * Functors
, Functor(..)
, FunctorOf
-- ** Functor instances
, Id(..)
, (:.:)(..)
, Const(..), ConstF
, OpOp(..)
, OpOpInv(..)
, Any(..)
-- *** Related to the product category
, Proj1(..)
, Proj2(..)
, (:***:)(..)
, DiagProd(..)
, Tuple1, pattern Tuple1
, Tuple2, pattern Tuple2
, Swap, pattern Swap
* * * functors
, Hom(..)
, (:*-:), pattern HomX_
, (:-*:), pattern Hom_X
) where
import Data.Kind (Type)
import Data.Category
import Data.Category.Product
infixr 9 %
infixr 9 :%
-- | Functors map objects and arrows.
class (Category (Dom ftag), Category (Cod ftag)) => Functor ftag where
-- | The domain, or source category, of the functor.
type Dom ftag :: Type -> Type -> Type
-- | The codomain, or target category, of the functor.
type Cod ftag :: Type -> Type -> Type
-- | @:%@ maps objects.
type ftag :% a :: Type
-- | @%@ maps arrows.
(%) :: ftag -> Dom ftag a b -> Cod ftag (ftag :% a) (ftag :% b)
type FunctorOf a b t = (Functor t, Dom t ~ a, Cod t ~ b)
-- | Functors are arrows in the category Cat.
data Cat :: (Type -> Type -> Type) -> (Type -> Type -> Type) -> Type where
CatA :: (Functor ftag, Category (Dom ftag), Category (Cod ftag)) => ftag -> Cat (Dom ftag) (Cod ftag)
-- | @Cat@ is the category with categories as objects and funtors as arrows.
instance Category Cat where
src (CatA _) = CatA Id
tgt (CatA _) = CatA Id
CatA f1 . CatA f2 = CatA (f1 :.: f2)
data Id (k :: Type -> Type -> Type) = Id
-- | The identity functor on k
instance Category k => Functor (Id k) where
type Dom (Id k) = k
type Cod (Id k) = k
type Id k :% a = a
_ % f = f
data (g :.: h) where
(:.:) :: (Functor g, Functor h, Cod h ~ Dom g) => g -> h -> g :.: h
| The composition of two functors .
instance (Category (Cod g), Category (Dom h)) => Functor (g :.: h) where
type Dom (g :.: h) = Dom h
type Cod (g :.: h) = Cod g
type (g :.: h) :% a = g :% (h :% a)
(g :.: h) % f = g % (h % f)
data Const (c1 :: Type -> Type -> Type) (c2 :: Type -> Type -> Type) x where
Const :: Obj c2 x -> Const c1 c2 x
-- | The constant functor.
instance (Category c1, Category c2) => Functor (Const c1 c2 x) where
type Dom (Const c1 c2 x) = c1
type Cod (Const c1 c2 x) = c2
type Const c1 c2 x :% a = x
Const x % _ = x
-- | The constant functor with the same domain and codomain as f.
type ConstF f = Const (Dom f) (Cod f)
data OpOp (k :: Type -> Type -> Type) = OpOp
| The @Op ( Op x ) = functor .
instance Category k => Functor (OpOp k) where
type Dom (OpOp k) = Op (Op k)
type Cod (OpOp k) = k
type OpOp k :% a = a
OpOp % Op (Op f) = f
data OpOpInv (k :: Type -> Type -> Type) = OpOpInv
-- | The @x = Op (Op x)@ functor.
instance Category k => Functor (OpOpInv k) where
type Dom (OpOpInv k) = k
type Cod (OpOpInv k) = Op (Op k)
type OpOpInv k :% a = a
OpOpInv % f = Op (Op f)
-- | A functor wrapper in case of conflicting family instance declarations
newtype Any f = Any f deriving Functor
data Proj1 (c1 :: Type -> Type -> Type) (c2 :: Type -> Type -> Type) = Proj1
| ' Proj1 ' is a bifunctor that projects out the first component of a product .
instance (Category c1, Category c2) => Functor (Proj1 c1 c2) where
type Dom (Proj1 c1 c2) = c1 :**: c2
type Cod (Proj1 c1 c2) = c1
type Proj1 c1 c2 :% (a1, a2) = a1
Proj1 % (f1 :**: _) = f1
data Proj2 (c1 :: Type -> Type -> Type) (c2 :: Type -> Type -> Type) = Proj2
| ' Proj2 ' is a bifunctor that projects out the second component of a product .
instance (Category c1, Category c2) => Functor (Proj2 c1 c2) where
type Dom (Proj2 c1 c2) = c1 :**: c2
type Cod (Proj2 c1 c2) = c2
type Proj2 c1 c2 :% (a1, a2) = a2
Proj2 % (_ :**: f2) = f2
data f1 :***: f2 where (:***:) :: (Functor f1, Functor f2) => f1 -> f2 -> f1 :***: f2
| @f1 :* * * : is the product of the functors @f1@ and @f2@.
instance (Functor f1, Functor f2) => Functor (f1 :***: f2) where
type Dom (f1 :***: f2) = Dom f1 :**: Dom f2
type Cod (f1 :***: f2) = Cod f1 :**: Cod f2
type (f1 :***: f2) :% (a1, a2) = (f1 :% a1, f2 :% a2)
(g1 :***: g2) % (f1 :**: f2) = (g1 % f1) :**: (g2 % f2)
data DiagProd (k :: Type -> Type -> Type) = DiagProd
-- | 'DiagProd' is the diagonal functor for products.
instance Category k => Functor (DiagProd k) where
type Dom (DiagProd k) = k
type Cod (DiagProd k) = k :**: k
type DiagProd k :% a = (a, a)
DiagProd % f = f :**: f
type Tuple1 c1 c2 a = (Const c2 c1 a :***: Id c2) :.: DiagProd c2
-- | 'Tuple1' tuples with a fixed object on the left.
pattern Tuple1 :: (Category c1, Category c2) => Obj c1 a -> Tuple1 c1 c2 a
pattern Tuple1 a = (Const a :***: Id) :.: DiagProd
type Swap (c1 :: Type -> Type -> Type) (c2 :: Type -> Type -> Type) = (Proj2 c1 c2 :***: Proj1 c1 c2) :.: DiagProd (c1 :**: c2)
| ' swap ' swaps the 2 categories of the product of categories .
pattern Swap :: (Category c1, Category c2) => Swap c1 c2
pattern Swap = (Proj2 :***: Proj1) :.: DiagProd
type Tuple2 c1 c2 a = Swap c2 c1 :.: Tuple1 c2 c1 a
-- | 'Tuple2' tuples with a fixed object on the right.
pattern Tuple2 :: (Category c1, Category c2) => Obj c2 a -> Tuple2 c1 c2 a
pattern Tuple2 a = Swap :.: Tuple1 a
data Hom (k :: Type -> Type -> Type) = Hom
| The functor , Hom(--,-- ) , a bifunctor contravariant in its first argument and covariant in its second argument .
instance Category k => Functor (Hom k) where
type Dom (Hom k) = Op k :**: k
type Cod (Hom k) = (->)
type (Hom k) :% (a1, a2) = k a1 a2
Hom % (Op f1 :**: f2) = \g -> f2 . g . f1
type x :*-: k = Hom k :.: Tuple1 (Op k) k x
-- | The covariant functor Hom(X,--)
pattern HomX_ :: Category k => Obj k x -> x :*-: k
pattern HomX_ x = Hom :.: Tuple1 (Op x)
type k :-*: x = Hom k :.: Tuple2 (Op k) k x
| The contravariant functor Hom(--,X )
pattern Hom_X :: Category k => Obj k x -> k :-*: x
pattern Hom_X x = Hom :.: Tuple2 x
| null | https://raw.githubusercontent.com/sjoerdvisscher/data-category/ee9960c9aa3cd5a8cc20c5f430ab63dc8004632d/Data/Category/Functor.hs | haskell | ---------------------------------------------------------------------------
|
Module : Data.Category.Functor
License : BSD-style (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
---------------------------------------------------------------------------
* Cat
* Functors
** Functor instances
*** Related to the product category
| Functors map objects and arrows.
| The domain, or source category, of the functor.
| The codomain, or target category, of the functor.
| @:%@ maps objects.
| @%@ maps arrows.
| Functors are arrows in the category Cat.
| @Cat@ is the category with categories as objects and funtors as arrows.
| The identity functor on k
| The constant functor.
| The constant functor with the same domain and codomain as f.
| The @x = Op (Op x)@ functor.
| A functor wrapper in case of conflicting family instance declarations
| 'DiagProd' is the diagonal functor for products.
| 'Tuple1' tuples with a fixed object on the left.
| 'Tuple2' tuples with a fixed object on the right.
,-- ) , a bifunctor contravariant in its first argument and covariant in its second argument .
| The covariant functor Hom(X,--)
,X ) | # LANGUAGE
GADTs
, PolyKinds
, RankNTypes
, , NoImplicitPrelude
, TypeOperators
, TypeFamilies
, PatternSynonyms
, FlexibleContexts
, FlexibleInstances
, UndecidableInstances
, GeneralizedNewtypeDeriving
#
GADTs
, PolyKinds
, RankNTypes
, ConstraintKinds
, NoImplicitPrelude
, TypeOperators
, TypeFamilies
, PatternSynonyms
, FlexibleContexts
, FlexibleInstances
, UndecidableInstances
, GeneralizedNewtypeDeriving
#-}
module Data.Category.Functor (
Cat(..)
, Functor(..)
, FunctorOf
, Id(..)
, (:.:)(..)
, Const(..), ConstF
, OpOp(..)
, OpOpInv(..)
, Any(..)
, Proj1(..)
, Proj2(..)
, (:***:)(..)
, DiagProd(..)
, Tuple1, pattern Tuple1
, Tuple2, pattern Tuple2
, Swap, pattern Swap
* * * functors
, Hom(..)
, (:*-:), pattern HomX_
, (:-*:), pattern Hom_X
) where
import Data.Kind (Type)
import Data.Category
import Data.Category.Product
infixr 9 %
infixr 9 :%
class (Category (Dom ftag), Category (Cod ftag)) => Functor ftag where
type Dom ftag :: Type -> Type -> Type
type Cod ftag :: Type -> Type -> Type
type ftag :% a :: Type
(%) :: ftag -> Dom ftag a b -> Cod ftag (ftag :% a) (ftag :% b)
type FunctorOf a b t = (Functor t, Dom t ~ a, Cod t ~ b)
data Cat :: (Type -> Type -> Type) -> (Type -> Type -> Type) -> Type where
CatA :: (Functor ftag, Category (Dom ftag), Category (Cod ftag)) => ftag -> Cat (Dom ftag) (Cod ftag)
instance Category Cat where
src (CatA _) = CatA Id
tgt (CatA _) = CatA Id
CatA f1 . CatA f2 = CatA (f1 :.: f2)
data Id (k :: Type -> Type -> Type) = Id
instance Category k => Functor (Id k) where
type Dom (Id k) = k
type Cod (Id k) = k
type Id k :% a = a
_ % f = f
data (g :.: h) where
(:.:) :: (Functor g, Functor h, Cod h ~ Dom g) => g -> h -> g :.: h
| The composition of two functors .
instance (Category (Cod g), Category (Dom h)) => Functor (g :.: h) where
type Dom (g :.: h) = Dom h
type Cod (g :.: h) = Cod g
type (g :.: h) :% a = g :% (h :% a)
(g :.: h) % f = g % (h % f)
data Const (c1 :: Type -> Type -> Type) (c2 :: Type -> Type -> Type) x where
Const :: Obj c2 x -> Const c1 c2 x
instance (Category c1, Category c2) => Functor (Const c1 c2 x) where
type Dom (Const c1 c2 x) = c1
type Cod (Const c1 c2 x) = c2
type Const c1 c2 x :% a = x
Const x % _ = x
type ConstF f = Const (Dom f) (Cod f)
data OpOp (k :: Type -> Type -> Type) = OpOp
| The @Op ( Op x ) = functor .
instance Category k => Functor (OpOp k) where
type Dom (OpOp k) = Op (Op k)
type Cod (OpOp k) = k
type OpOp k :% a = a
OpOp % Op (Op f) = f
data OpOpInv (k :: Type -> Type -> Type) = OpOpInv
instance Category k => Functor (OpOpInv k) where
type Dom (OpOpInv k) = k
type Cod (OpOpInv k) = Op (Op k)
type OpOpInv k :% a = a
OpOpInv % f = Op (Op f)
newtype Any f = Any f deriving Functor
data Proj1 (c1 :: Type -> Type -> Type) (c2 :: Type -> Type -> Type) = Proj1
| ' Proj1 ' is a bifunctor that projects out the first component of a product .
instance (Category c1, Category c2) => Functor (Proj1 c1 c2) where
type Dom (Proj1 c1 c2) = c1 :**: c2
type Cod (Proj1 c1 c2) = c1
type Proj1 c1 c2 :% (a1, a2) = a1
Proj1 % (f1 :**: _) = f1
data Proj2 (c1 :: Type -> Type -> Type) (c2 :: Type -> Type -> Type) = Proj2
| ' Proj2 ' is a bifunctor that projects out the second component of a product .
instance (Category c1, Category c2) => Functor (Proj2 c1 c2) where
type Dom (Proj2 c1 c2) = c1 :**: c2
type Cod (Proj2 c1 c2) = c2
type Proj2 c1 c2 :% (a1, a2) = a2
Proj2 % (_ :**: f2) = f2
data f1 :***: f2 where (:***:) :: (Functor f1, Functor f2) => f1 -> f2 -> f1 :***: f2
| @f1 :* * * : is the product of the functors @f1@ and @f2@.
instance (Functor f1, Functor f2) => Functor (f1 :***: f2) where
type Dom (f1 :***: f2) = Dom f1 :**: Dom f2
type Cod (f1 :***: f2) = Cod f1 :**: Cod f2
type (f1 :***: f2) :% (a1, a2) = (f1 :% a1, f2 :% a2)
(g1 :***: g2) % (f1 :**: f2) = (g1 % f1) :**: (g2 % f2)
data DiagProd (k :: Type -> Type -> Type) = DiagProd
instance Category k => Functor (DiagProd k) where
type Dom (DiagProd k) = k
type Cod (DiagProd k) = k :**: k
type DiagProd k :% a = (a, a)
DiagProd % f = f :**: f
type Tuple1 c1 c2 a = (Const c2 c1 a :***: Id c2) :.: DiagProd c2
pattern Tuple1 :: (Category c1, Category c2) => Obj c1 a -> Tuple1 c1 c2 a
pattern Tuple1 a = (Const a :***: Id) :.: DiagProd
type Swap (c1 :: Type -> Type -> Type) (c2 :: Type -> Type -> Type) = (Proj2 c1 c2 :***: Proj1 c1 c2) :.: DiagProd (c1 :**: c2)
| ' swap ' swaps the 2 categories of the product of categories .
pattern Swap :: (Category c1, Category c2) => Swap c1 c2
pattern Swap = (Proj2 :***: Proj1) :.: DiagProd
type Tuple2 c1 c2 a = Swap c2 c1 :.: Tuple1 c2 c1 a
pattern Tuple2 :: (Category c1, Category c2) => Obj c2 a -> Tuple2 c1 c2 a
pattern Tuple2 a = Swap :.: Tuple1 a
data Hom (k :: Type -> Type -> Type) = Hom
instance Category k => Functor (Hom k) where
type Dom (Hom k) = Op k :**: k
type Cod (Hom k) = (->)
type (Hom k) :% (a1, a2) = k a1 a2
Hom % (Op f1 :**: f2) = \g -> f2 . g . f1
type x :*-: k = Hom k :.: Tuple1 (Op k) k x
pattern HomX_ :: Category k => Obj k x -> x :*-: k
pattern HomX_ x = Hom :.: Tuple1 (Op x)
type k :-*: x = Hom k :.: Tuple2 (Op k) k x
pattern Hom_X :: Category k => Obj k x -> k :-*: x
pattern Hom_X x = Hom :.: Tuple2 x
|
036e22b39f98a9718e4f8def3fa5d6692752df3667407dcc610507cdfd23032f | philnguyen/soft-contract | ex-4.rkt | #lang racket/base
(require soft-contract/fake-contract)
(define (p m n r)
(cond [(> r 0) (p m (- r 1) n)]
[(> n 0) (p r (- n 1) m)]
[else m]))
(provide
(contract-out
[p (exact-nonnegative-integer? exact-nonnegative-integer? exact-nonnegative-integer? . -> . exact-nonnegative-integer? #:total? #t)]))
| null | https://raw.githubusercontent.com/philnguyen/soft-contract/5e07dc2d622ee80b961f4e8aebd04ce950720239/soft-contract/test/programs/safe/termination/fo-sc/ex-4.rkt | racket | #lang racket/base
(require soft-contract/fake-contract)
(define (p m n r)
(cond [(> r 0) (p m (- r 1) n)]
[(> n 0) (p r (- n 1) m)]
[else m]))
(provide
(contract-out
[p (exact-nonnegative-integer? exact-nonnegative-integer? exact-nonnegative-integer? . -> . exact-nonnegative-integer? #:total? #t)]))
| |
d9fb5fbd52a5044cba85b0d215275290a0b76ef8e36be18b15a00a8571e3c266 | Eonblast/Scalaxis | api_tx_SUITE.erl | 2011 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
@author < >
@author < >
%% @version $Id$
-module(api_tx_SUITE).
-author('').
-vsn('$Id$').
-compile(export_all).
-include("scalaris.hrl").
-include("unittest.hrl").
-include("client_types.hrl").
all() -> [new_tlog_0,
req_list_2,
read_2,
write_3,
commit_1,
read_1,
write_2,
test_and_set_3,
conflicting_tx,
conflicting_tx2,
write2_read2,
multi_write,
write_test_race_mult_rings,
tester_encode_decode
].
suite() -> [ {timetrap, {seconds, 40}} ].
init_per_suite(Config) ->
unittest_helper:init_per_suite(Config).
end_per_suite(Config) ->
_ = unittest_helper:end_per_suite(Config),
ok.
init_per_testcase(TestCase, Config) ->
case TestCase of
write_test_race_mult_rings -> %% this case creates its own ring
Config;
tester_encode_decode -> %% this case does not need a ring
Config;
_ ->
%% stop ring from previous test case (it may have run into a timeout
unittest_helper:stop_ring(),
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_ring(4, [{config, [{log_path, PrivDir}]}]),
Config
end.
end_per_testcase(_TestCase, Config) ->
unittest_helper:stop_ring(),
Config.
new_tlog_0(_Config) ->
?equals(api_tx:new_tlog(), []),
ok.
req_list_2(_Config) ->
EmptyTLog = api_tx:new_tlog(),
%% execute empty request list
?equals(api_tx:req_list(EmptyTLog, []), {[], []}),
%% write new item
?equals_pattern(api_tx:req_list(EmptyTLog,
[{write, "req_list_2_B", 7}, {commit}]),
{_TLog, [_WriteRes = {ok}, _CommitRes = {ok}]}),
%% read existing item
?equals_pattern(api_tx:req_list(EmptyTLog,
[{read, "req_list_2_B"}, {commit}]),
{_TLog, [_ReadRes = {ok, _ReadVal=7}, _CommitRes = {ok}]}),
%% read non-existing item
?equals_pattern(api_tx:req_list(EmptyTLog,
[{read, "non-existing"}, {commit}]),
{_TLog, [_ReadRes = {fail, not_found},
%% allow test for existance of a key to be ok
_CommitRes = {ok}]}),
%% read non-existing item and write to that item afterwards
?equals_pattern(api_tx:req_list(EmptyTLog,
[{read, "non-existing1"},
{write, "non-existing1", "value"},
{commit}]),
{_TLog, [_ReadRes = {fail, not_found},
_WriteRes = {ok},
_CommitRes = {ok}]}),
%% exec more complex transaction with repeated requests
?equals_pattern(api_tx:req_list(EmptyTLog,
[{read, "B"}, {read, "B"},
{write, "A", 8}, {read, "A"}, {read, "A"},
{read, "A"}, {write, "B", 9},
{commit}]),
{_TLog, [{fail,not_found}, {fail,not_found},
{ok}, {ok, 8}, {ok, 8},
{ok, 8}, {ok},
{ok}]}),
%% exec empty commit
?equals_pattern(api_tx:req_list(EmptyTLog, [{commit}]),
{_TLog, [{ok}]}),
%% exec empty double commit
?equals_pattern(api_tx:req_list(EmptyTLog, [{commit}, {commit}]),
{_TLog, [{fail, abort}, {fail, abort}]}),
%% try commit not as last operation in request list
?equals_pattern(api_tx:req_list(EmptyTLog, [{commit}, {read, "A"}]),
{_TLog, [{fail, abort}, {ok, 8}]}),
%% try commit not as last operation in request list with longer list
?equals_pattern(api_tx:req_list(EmptyTLog,
[{commit}, {read, "A"}, {read, "B"}]),
{_TLog, [{fail, abort}, {ok, 8}, {ok,9}]}),
%% ops based on tlog
{NonExistReadTLog, _Res1} = api_tx:read(EmptyTLog, "req_list_2_C"),
%% write new item which is already in tlog
?equals_pattern(api_tx:req_list(NonExistReadTLog,
[{write, "req_list_2_C", 42}, {commit}]),
{_TLog, [_WriteRes = {ok}, _CommitRes = {ok}]}),
%% read existing item which is already in tlog
{ExistReadTLog, _Res2} = api_tx:read(EmptyTLog, "req_list_2_C"),
?equals_pattern(api_tx:req_list(ExistReadTLog,
[{read, "req_list_2_C"}, {commit}]),
{_TLog, [_ReadRes = {ok, _ReadVal=42}, _CommitRes = {ok}]}),
%% read non-existing item
{NonExistReadTLog2, _Res3} = api_tx:read(EmptyTLog, "non-existing"),
?equals_pattern(api_tx:req_list(NonExistReadTLog2,
[{read, "non-existing"}, {commit}]),
{_TLog, [_ReadRes = {fail, not_found},
%% allow test for existance of a key to be ok
_CommitRes = {ok}]}),
ok.
read_2(_Config) ->
_ = api_tx:write("A", 7),
%% read existing key
?equals_pattern(api_tx:read(api_tx:new_tlog(), "A"),
{_, {ok, 7}}),
%% read non existing key
?equals_pattern(api_tx:read(api_tx:new_tlog(), "non-existing"),
{_, {fail, not_found}}),
ok.
write_3(_Config) ->
%% write new key
?equals_pattern(api_tx:write(api_tx:new_tlog(), "write_3_newkey", 7),
{_, {ok}}),
%% modify existing key
?equals_pattern(api_tx:write(api_tx:new_tlog(), "write_3_newkey", 8),
{_, {ok}}),
%% write a key that is already in tlog
{TLogA, _} = api_tx:read(api_tx:new_tlog(), "write_3_newkey"),
?equals_pattern(api_tx:write(TLogA, "write_3_newkey", 9), {_, {ok}}),
%% write key that does not exist and the read in tlog failed
{TLogB, {fail, not_found}} =
api_tx:read(api_tx:new_tlog(), "write_3_newkey2"),
?equals_pattern(api_tx:write(TLogB, "write_3_newkey2", 9), {_, {ok}}),
ok.
commit_1(_Config) ->
EmptyTLog = api_tx:new_tlog(),
%% commit empty tlog
?equals(api_tx:commit(EmptyTLog), {ok}),
%% commit a tlog
{WriteTLog, _} = api_tx:write(api_tx:new_tlog(), "commit_1_A", 7),
?equals(api_tx:commit(WriteTLog), {ok}),
_ = api_tx:write("commit_1_B", 7),
{ReadTLog, _} = api_tx:read(api_tx:new_tlog(), "commit_1_B"),
?equals(api_tx:commit(ReadTLog), {ok}),
%% commit a timedout TLog
TimeoutReadTLog =
[ tx_tlog:set_entry_status(X, {fail, timeout}) || X <- ReadTLog ],
?equals(api_tx:commit(TimeoutReadTLog), {fail, abort}),
{WriteTLog2, _} = api_tx:write(api_tx:new_tlog(), "commit_1_C", 7),
TimeoutWriteTLog =
[ tx_tlog:set_entry_status(X, {fail, timeout}) || X <- WriteTLog2 ],
?equals(api_tx:commit(TimeoutWriteTLog), {fail, abort}),
%% commit a non-existing tlog
{NonExistReadTLog, _} = api_tx:read(EmptyTLog, "non-existing"),
%% allow test for existance of a key to be ok
?equals(api_tx:commit(NonExistReadTLog), {ok}),
ok.
read_1(_Config) ->
?equals(api_tx:read("non-existing"), {fail, not_found}),
?equals(api_tx:read("read_1_ReadKey"), {fail, not_found}),
?equals(api_tx:write("read_1_ReadKey", "IsSet"), {ok}),
?equals(api_tx:read("read_1_ReadKey"), {ok, "IsSet"}),
ok.
write_2(_Config) ->
?equals(api_tx:write("write_2_WriteKey", "Value"), {ok}),
?equals(api_tx:read("write_2_WriteKey"), {ok, "Value"}),
?equals(api_tx:write("write_2_WriteKey", "Value2"), {ok}),
?equals(api_tx:read("write_2_WriteKey"), {ok, "Value2"}),
%% invalid key
try ?RT:hash_key([a,b,c]) of
_ -> ?equals(catch api_tx:write([a,b,c], "Value"), {ok})
catch
error:badarg ->
?equals_pattern(catch api_tx:write([a,b,c], "Value"), {'EXIT',{badarg, _}})
end,
ok.
test_and_set_3(_Config) ->
?equals(api_tx:test_and_set("test_and_set_3", "Value", "NextValue"),
{fail, not_found}),
?equals(api_tx:write("test_and_set_3", "Value"), {ok}),
?equals(api_tx:test_and_set("test_and_set_3", "Value", "NextValue"), {ok}),
?equals(api_tx:test_and_set("test_and_set_3", "wrong", "NewValue"),
{fail, {key_changed, "NextValue"}}),
ok.
conflicting_tx(_Config) ->
EmptyTLog = api_tx:new_tlog(),
ops with other interleaving tx
%% prepare an account
_ = api_tx:write("Account A", 100),
%% Tx1: read the balance and later try to modify it
{Tx1TLog, {ok, Bal1}} = api_tx:read(EmptyTLog, "Account A"),
%% Tx3: read the balance and later try to commit the read
{Tx3TLog, {ok, _Bal3}} = api_tx:read(EmptyTLog, "Account A"),
%% Tx2 reads the balance and increases it
{Tx2TLog, {ok, Bal2}} = api_tx:read(EmptyTLog, "Account A"),
?equals_pattern(
api_tx:req_list(Tx2TLog, [{write, "Account A", Bal2 + 100}, {commit}]),
{_, [_WriteRes = {ok}, _CommitRes = {ok}]}),
%% Tx1 tries to increases it atomically and fails
?equals_pattern(
api_tx:req_list(Tx1TLog, [{write, "Account A", Bal1 + 100}, {commit}]),
{_, [_WriteRes = {ok}, _CommitRes = {fail, abort}]}),
%% Tx3: try to commit the read and fail (value changed in the meantime)
?equals_pattern(api_tx:commit(Tx3TLog), {fail, abort}),
check that two reading transactions can coexist
Tx4 : read the balance and later try to commit the read
{Tx4TLog, {ok, _Bal4}} = api_tx:read(EmptyTLog, "Account A"),
%% Tx5: read the balance and commit the read
{Tx5TLog, {ok, _Bal5}} = api_tx:read(EmptyTLog, "Account A"),
?equals_pattern(api_tx:commit(Tx5TLog), {ok}),
Tx4 : try to commit a read and succeed ( no updates in the meantime )
?equals_pattern(api_tx:commit(Tx4TLog), {ok}),
ok.
conflicting_tx2(_Config) ->
%% read non-existing item
{TLog1a, [ReadRes1a]} =
api_tx:req_list([{read, "conflicting_tx2_non-existing"}]),
?equals(ReadRes1a, {fail, not_found}),
?equals(api_tx:commit(TLog1a), {ok}),
_ = api_tx:write("conflicting_tx2_non-existing", "Value"),
%% verify not_found of tlog in commit phase? key now exists!
?equals(api_tx:commit(TLog1a), {fail, abort}),
?equals_pattern(api_tx:req_list(TLog1a,
[{write, "conflicting_tx2_non-existing", "NewValue"},
{commit}]),
{_TLog, [_WriteRes = {ok},
_CommitRes = {fail, abort}]}),
?equals(api_tx:read("conflicting_tx2_non-existing"), {ok, "Value"}),
ok.
write2_read2(_Config) ->
KeyA = "KeyA",
KeyB = "KeyB",
ValueA = "Value1",
ValueB = "Value2",
{TLog1, _} = api_tx:write(api_tx:new_tlog(), KeyA, ValueA),
{TLog2, _} = api_tx:write(TLog1, KeyB, ValueB),
{ok} = api_tx:commit(TLog2),
?equals_pattern(api_tx:req_list([{read, KeyA}, {read, KeyB}, {commit}]),
{_TLog4, [{ok, ValueA}, {ok, ValueB}, {ok}]}),
ok.
multi_write(_Config) ->
Key = "MultiWrite",
Value1 = "Value1",
Value2 = "Value2",
{TLog1, _} = api_tx:write(api_tx:new_tlog(), Key, Value1),
{TLog2, _} = api_tx:write(TLog1, Key, Value2),
?equals(api_tx:commit(TLog2), {ok}),
?equals(api_tx:read(Key), {ok, Value2}),
ok.
@doc Test for api_tx : write taking at least 2s after stopping a ring
%% and starting a new one.
write_test_race_mult_rings(Config) ->
% first ring:
write_test(Config),
% second ring and more:
write_test(Config),
write_test(Config),
write_test(Config),
write_test(Config),
write_test(Config),
write_test(Config),
write_test(Config).
-spec write_test(Config::[tuple()]) -> ok.
write_test(Config) ->
OldRegistered = erlang:registered(),
OldProcesses = unittest_helper:get_processes(),
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_ring(1, [{config, [{log_path, PrivDir}, {monitor_perf_interval, 0}]}]),
Self = self(),
BenchPid1 = erlang:spawn(fun() ->
{Time, _} = util:tc(api_tx, write, ["1", 1]),
comm:send_local(Self, {time, Time}),
ct:pal("~.0pus~n", [Time])
end),
receive {time, FirstWriteTime} -> ok
end,
util:wait_for_process_to_die(BenchPid1),
BenchPid2 = erlang:spawn(fun() ->
{Time, _} = util:tc(api_tx, write, ["2", 2]),
comm:send_local(Self, {time, Time}),
ct:pal("~.0pus~n", [Time])
end),
receive {time, SecondWriteTime} -> ok
end,
util:wait_for_process_to_die(BenchPid2),
unittest_helper:check_ring_load(4 * 2),
unittest_helper:check_ring_data(),
unittest_helper:stop_ring(),
%% randoms:stop(), %doesn't matter
_ = inets:stop(),
unittest_helper:kill_new_processes(OldProcesses),
{_, _, OnlyNewReg} =
util:split_unique(OldRegistered, erlang:registered()),
ct:pal("NewReg: ~.0p~n", [OnlyNewReg]),
?equals_pattern(FirstWriteTime, X when X =< 1000000),
?equals_pattern(SecondWriteTime, X when X =< 1000000).
-spec prop_encode_decode(Value::client_value()) -> boolean().
prop_encode_decode(Value) ->
Value =:= api_tx:decode_value(api_tx:encode_value(Value)).
tester_encode_decode(_Config) ->
tester:test(?MODULE, prop_encode_decode, 1, 10000).
| null | https://raw.githubusercontent.com/Eonblast/Scalaxis/10287d11428e627dca8c41c818745763b9f7e8d4/test/api_tx_SUITE.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@version $Id$
this case creates its own ring
this case does not need a ring
stop ring from previous test case (it may have run into a timeout
execute empty request list
write new item
read existing item
read non-existing item
allow test for existance of a key to be ok
read non-existing item and write to that item afterwards
exec more complex transaction with repeated requests
exec empty commit
exec empty double commit
try commit not as last operation in request list
try commit not as last operation in request list with longer list
ops based on tlog
write new item which is already in tlog
read existing item which is already in tlog
read non-existing item
allow test for existance of a key to be ok
read existing key
read non existing key
write new key
modify existing key
write a key that is already in tlog
write key that does not exist and the read in tlog failed
commit empty tlog
commit a tlog
commit a timedout TLog
commit a non-existing tlog
allow test for existance of a key to be ok
invalid key
prepare an account
Tx1: read the balance and later try to modify it
Tx3: read the balance and later try to commit the read
Tx2 reads the balance and increases it
Tx1 tries to increases it atomically and fails
Tx3: try to commit the read and fail (value changed in the meantime)
Tx5: read the balance and commit the read
read non-existing item
verify not_found of tlog in commit phase? key now exists!
and starting a new one.
first ring:
second ring and more:
randoms:stop(), %doesn't matter | 2011 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
@author < >
@author < >
-module(api_tx_SUITE).
-author('').
-vsn('$Id$').
-compile(export_all).
-include("scalaris.hrl").
-include("unittest.hrl").
-include("client_types.hrl").
all() -> [new_tlog_0,
req_list_2,
read_2,
write_3,
commit_1,
read_1,
write_2,
test_and_set_3,
conflicting_tx,
conflicting_tx2,
write2_read2,
multi_write,
write_test_race_mult_rings,
tester_encode_decode
].
suite() -> [ {timetrap, {seconds, 40}} ].
init_per_suite(Config) ->
unittest_helper:init_per_suite(Config).
end_per_suite(Config) ->
_ = unittest_helper:end_per_suite(Config),
ok.
init_per_testcase(TestCase, Config) ->
case TestCase of
Config;
Config;
_ ->
unittest_helper:stop_ring(),
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_ring(4, [{config, [{log_path, PrivDir}]}]),
Config
end.
end_per_testcase(_TestCase, Config) ->
unittest_helper:stop_ring(),
Config.
new_tlog_0(_Config) ->
?equals(api_tx:new_tlog(), []),
ok.
req_list_2(_Config) ->
EmptyTLog = api_tx:new_tlog(),
?equals(api_tx:req_list(EmptyTLog, []), {[], []}),
?equals_pattern(api_tx:req_list(EmptyTLog,
[{write, "req_list_2_B", 7}, {commit}]),
{_TLog, [_WriteRes = {ok}, _CommitRes = {ok}]}),
?equals_pattern(api_tx:req_list(EmptyTLog,
[{read, "req_list_2_B"}, {commit}]),
{_TLog, [_ReadRes = {ok, _ReadVal=7}, _CommitRes = {ok}]}),
?equals_pattern(api_tx:req_list(EmptyTLog,
[{read, "non-existing"}, {commit}]),
{_TLog, [_ReadRes = {fail, not_found},
_CommitRes = {ok}]}),
?equals_pattern(api_tx:req_list(EmptyTLog,
[{read, "non-existing1"},
{write, "non-existing1", "value"},
{commit}]),
{_TLog, [_ReadRes = {fail, not_found},
_WriteRes = {ok},
_CommitRes = {ok}]}),
?equals_pattern(api_tx:req_list(EmptyTLog,
[{read, "B"}, {read, "B"},
{write, "A", 8}, {read, "A"}, {read, "A"},
{read, "A"}, {write, "B", 9},
{commit}]),
{_TLog, [{fail,not_found}, {fail,not_found},
{ok}, {ok, 8}, {ok, 8},
{ok, 8}, {ok},
{ok}]}),
?equals_pattern(api_tx:req_list(EmptyTLog, [{commit}]),
{_TLog, [{ok}]}),
?equals_pattern(api_tx:req_list(EmptyTLog, [{commit}, {commit}]),
{_TLog, [{fail, abort}, {fail, abort}]}),
?equals_pattern(api_tx:req_list(EmptyTLog, [{commit}, {read, "A"}]),
{_TLog, [{fail, abort}, {ok, 8}]}),
?equals_pattern(api_tx:req_list(EmptyTLog,
[{commit}, {read, "A"}, {read, "B"}]),
{_TLog, [{fail, abort}, {ok, 8}, {ok,9}]}),
{NonExistReadTLog, _Res1} = api_tx:read(EmptyTLog, "req_list_2_C"),
?equals_pattern(api_tx:req_list(NonExistReadTLog,
[{write, "req_list_2_C", 42}, {commit}]),
{_TLog, [_WriteRes = {ok}, _CommitRes = {ok}]}),
{ExistReadTLog, _Res2} = api_tx:read(EmptyTLog, "req_list_2_C"),
?equals_pattern(api_tx:req_list(ExistReadTLog,
[{read, "req_list_2_C"}, {commit}]),
{_TLog, [_ReadRes = {ok, _ReadVal=42}, _CommitRes = {ok}]}),
{NonExistReadTLog2, _Res3} = api_tx:read(EmptyTLog, "non-existing"),
?equals_pattern(api_tx:req_list(NonExistReadTLog2,
[{read, "non-existing"}, {commit}]),
{_TLog, [_ReadRes = {fail, not_found},
_CommitRes = {ok}]}),
ok.
read_2(_Config) ->
_ = api_tx:write("A", 7),
?equals_pattern(api_tx:read(api_tx:new_tlog(), "A"),
{_, {ok, 7}}),
?equals_pattern(api_tx:read(api_tx:new_tlog(), "non-existing"),
{_, {fail, not_found}}),
ok.
write_3(_Config) ->
?equals_pattern(api_tx:write(api_tx:new_tlog(), "write_3_newkey", 7),
{_, {ok}}),
?equals_pattern(api_tx:write(api_tx:new_tlog(), "write_3_newkey", 8),
{_, {ok}}),
{TLogA, _} = api_tx:read(api_tx:new_tlog(), "write_3_newkey"),
?equals_pattern(api_tx:write(TLogA, "write_3_newkey", 9), {_, {ok}}),
{TLogB, {fail, not_found}} =
api_tx:read(api_tx:new_tlog(), "write_3_newkey2"),
?equals_pattern(api_tx:write(TLogB, "write_3_newkey2", 9), {_, {ok}}),
ok.
commit_1(_Config) ->
EmptyTLog = api_tx:new_tlog(),
?equals(api_tx:commit(EmptyTLog), {ok}),
{WriteTLog, _} = api_tx:write(api_tx:new_tlog(), "commit_1_A", 7),
?equals(api_tx:commit(WriteTLog), {ok}),
_ = api_tx:write("commit_1_B", 7),
{ReadTLog, _} = api_tx:read(api_tx:new_tlog(), "commit_1_B"),
?equals(api_tx:commit(ReadTLog), {ok}),
TimeoutReadTLog =
[ tx_tlog:set_entry_status(X, {fail, timeout}) || X <- ReadTLog ],
?equals(api_tx:commit(TimeoutReadTLog), {fail, abort}),
{WriteTLog2, _} = api_tx:write(api_tx:new_tlog(), "commit_1_C", 7),
TimeoutWriteTLog =
[ tx_tlog:set_entry_status(X, {fail, timeout}) || X <- WriteTLog2 ],
?equals(api_tx:commit(TimeoutWriteTLog), {fail, abort}),
{NonExistReadTLog, _} = api_tx:read(EmptyTLog, "non-existing"),
?equals(api_tx:commit(NonExistReadTLog), {ok}),
ok.
read_1(_Config) ->
?equals(api_tx:read("non-existing"), {fail, not_found}),
?equals(api_tx:read("read_1_ReadKey"), {fail, not_found}),
?equals(api_tx:write("read_1_ReadKey", "IsSet"), {ok}),
?equals(api_tx:read("read_1_ReadKey"), {ok, "IsSet"}),
ok.
write_2(_Config) ->
?equals(api_tx:write("write_2_WriteKey", "Value"), {ok}),
?equals(api_tx:read("write_2_WriteKey"), {ok, "Value"}),
?equals(api_tx:write("write_2_WriteKey", "Value2"), {ok}),
?equals(api_tx:read("write_2_WriteKey"), {ok, "Value2"}),
try ?RT:hash_key([a,b,c]) of
_ -> ?equals(catch api_tx:write([a,b,c], "Value"), {ok})
catch
error:badarg ->
?equals_pattern(catch api_tx:write([a,b,c], "Value"), {'EXIT',{badarg, _}})
end,
ok.
test_and_set_3(_Config) ->
?equals(api_tx:test_and_set("test_and_set_3", "Value", "NextValue"),
{fail, not_found}),
?equals(api_tx:write("test_and_set_3", "Value"), {ok}),
?equals(api_tx:test_and_set("test_and_set_3", "Value", "NextValue"), {ok}),
?equals(api_tx:test_and_set("test_and_set_3", "wrong", "NewValue"),
{fail, {key_changed, "NextValue"}}),
ok.
conflicting_tx(_Config) ->
EmptyTLog = api_tx:new_tlog(),
ops with other interleaving tx
_ = api_tx:write("Account A", 100),
{Tx1TLog, {ok, Bal1}} = api_tx:read(EmptyTLog, "Account A"),
{Tx3TLog, {ok, _Bal3}} = api_tx:read(EmptyTLog, "Account A"),
{Tx2TLog, {ok, Bal2}} = api_tx:read(EmptyTLog, "Account A"),
?equals_pattern(
api_tx:req_list(Tx2TLog, [{write, "Account A", Bal2 + 100}, {commit}]),
{_, [_WriteRes = {ok}, _CommitRes = {ok}]}),
?equals_pattern(
api_tx:req_list(Tx1TLog, [{write, "Account A", Bal1 + 100}, {commit}]),
{_, [_WriteRes = {ok}, _CommitRes = {fail, abort}]}),
?equals_pattern(api_tx:commit(Tx3TLog), {fail, abort}),
check that two reading transactions can coexist
Tx4 : read the balance and later try to commit the read
{Tx4TLog, {ok, _Bal4}} = api_tx:read(EmptyTLog, "Account A"),
{Tx5TLog, {ok, _Bal5}} = api_tx:read(EmptyTLog, "Account A"),
?equals_pattern(api_tx:commit(Tx5TLog), {ok}),
Tx4 : try to commit a read and succeed ( no updates in the meantime )
?equals_pattern(api_tx:commit(Tx4TLog), {ok}),
ok.
conflicting_tx2(_Config) ->
{TLog1a, [ReadRes1a]} =
api_tx:req_list([{read, "conflicting_tx2_non-existing"}]),
?equals(ReadRes1a, {fail, not_found}),
?equals(api_tx:commit(TLog1a), {ok}),
_ = api_tx:write("conflicting_tx2_non-existing", "Value"),
?equals(api_tx:commit(TLog1a), {fail, abort}),
?equals_pattern(api_tx:req_list(TLog1a,
[{write, "conflicting_tx2_non-existing", "NewValue"},
{commit}]),
{_TLog, [_WriteRes = {ok},
_CommitRes = {fail, abort}]}),
?equals(api_tx:read("conflicting_tx2_non-existing"), {ok, "Value"}),
ok.
write2_read2(_Config) ->
KeyA = "KeyA",
KeyB = "KeyB",
ValueA = "Value1",
ValueB = "Value2",
{TLog1, _} = api_tx:write(api_tx:new_tlog(), KeyA, ValueA),
{TLog2, _} = api_tx:write(TLog1, KeyB, ValueB),
{ok} = api_tx:commit(TLog2),
?equals_pattern(api_tx:req_list([{read, KeyA}, {read, KeyB}, {commit}]),
{_TLog4, [{ok, ValueA}, {ok, ValueB}, {ok}]}),
ok.
multi_write(_Config) ->
Key = "MultiWrite",
Value1 = "Value1",
Value2 = "Value2",
{TLog1, _} = api_tx:write(api_tx:new_tlog(), Key, Value1),
{TLog2, _} = api_tx:write(TLog1, Key, Value2),
?equals(api_tx:commit(TLog2), {ok}),
?equals(api_tx:read(Key), {ok, Value2}),
ok.
@doc Test for api_tx : write taking at least 2s after stopping a ring
write_test_race_mult_rings(Config) ->
write_test(Config),
write_test(Config),
write_test(Config),
write_test(Config),
write_test(Config),
write_test(Config),
write_test(Config),
write_test(Config).
-spec write_test(Config::[tuple()]) -> ok.
write_test(Config) ->
OldRegistered = erlang:registered(),
OldProcesses = unittest_helper:get_processes(),
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_ring(1, [{config, [{log_path, PrivDir}, {monitor_perf_interval, 0}]}]),
Self = self(),
BenchPid1 = erlang:spawn(fun() ->
{Time, _} = util:tc(api_tx, write, ["1", 1]),
comm:send_local(Self, {time, Time}),
ct:pal("~.0pus~n", [Time])
end),
receive {time, FirstWriteTime} -> ok
end,
util:wait_for_process_to_die(BenchPid1),
BenchPid2 = erlang:spawn(fun() ->
{Time, _} = util:tc(api_tx, write, ["2", 2]),
comm:send_local(Self, {time, Time}),
ct:pal("~.0pus~n", [Time])
end),
receive {time, SecondWriteTime} -> ok
end,
util:wait_for_process_to_die(BenchPid2),
unittest_helper:check_ring_load(4 * 2),
unittest_helper:check_ring_data(),
unittest_helper:stop_ring(),
_ = inets:stop(),
unittest_helper:kill_new_processes(OldProcesses),
{_, _, OnlyNewReg} =
util:split_unique(OldRegistered, erlang:registered()),
ct:pal("NewReg: ~.0p~n", [OnlyNewReg]),
?equals_pattern(FirstWriteTime, X when X =< 1000000),
?equals_pattern(SecondWriteTime, X when X =< 1000000).
-spec prop_encode_decode(Value::client_value()) -> boolean().
prop_encode_decode(Value) ->
Value =:= api_tx:decode_value(api_tx:encode_value(Value)).
tester_encode_decode(_Config) ->
tester:test(?MODULE, prop_encode_decode, 1, 10000).
|
f60da64a81753365c1ca9afd88d9766b30f8a0111af316a549f8adb75e206988 | clj-kondo/clj-kondo | test.clj | (ns clj-kondo.impl.analyzer.test
{:no-doc true}
(:require
[clj-kondo.impl.analyzer.common :as common]
[clj-kondo.impl.macroexpand :as macros]
[clj-kondo.impl.utils :as utils]))
(defn analyze-deftest [ctx expr defined-by resolved-as-ns resolved-as-name]
(common/analyze-defn
ctx
(-> expr
(update
:children
(fn [[_ name-expr & body]]
(list*
(utils/token-node 'clojure.core/defn)
(when name-expr (vary-meta name-expr
assoc
:linted-as (symbol (str resolved-as-ns) (str resolved-as-name))
:defined-by defined-by
:test true))
(utils/vector-node [])
body))))
defined-by))
(defn analyze-cljs-test-async [ctx expr]
(let [[binding-expr & rest-children] (rest (:children expr))
binding-name (:value binding-expr)
ctx (utils/ctx-with-bindings ctx {binding-name (meta binding-expr)})
ctx (assoc-in ctx [:arities binding-name]
{:fixed-arities #{0}})]
(common/analyze-children ctx rest-children)))
(defn analyze-are [ctx resolved-namespace expr]
(let [[_ argv expr & args] (:children expr)
is-expr (utils/list-node [(utils/token-node (symbol (str resolved-namespace) "is")) expr])
new-node (macros/expand-do-template ctx
(utils/list-node (list* nil
argv
is-expr
args)))]
(common/analyze-expression** ctx new-node)))
(defn testing-hook [{:keys [node]}]
(let [[testing testing-str & children] (:children node)
new-node (assoc node :children
(list* (assoc-in testing [:context :clojure.test :testing-str] (utils/sexpr testing-str))
testing-str children))]
{:node new-node}))
| null | https://raw.githubusercontent.com/clj-kondo/clj-kondo/851020d86cb58094f25518a4416d5e7362b4b3d7/src/clj_kondo/impl/analyzer/test.clj | clojure | (ns clj-kondo.impl.analyzer.test
{:no-doc true}
(:require
[clj-kondo.impl.analyzer.common :as common]
[clj-kondo.impl.macroexpand :as macros]
[clj-kondo.impl.utils :as utils]))
(defn analyze-deftest [ctx expr defined-by resolved-as-ns resolved-as-name]
(common/analyze-defn
ctx
(-> expr
(update
:children
(fn [[_ name-expr & body]]
(list*
(utils/token-node 'clojure.core/defn)
(when name-expr (vary-meta name-expr
assoc
:linted-as (symbol (str resolved-as-ns) (str resolved-as-name))
:defined-by defined-by
:test true))
(utils/vector-node [])
body))))
defined-by))
(defn analyze-cljs-test-async [ctx expr]
(let [[binding-expr & rest-children] (rest (:children expr))
binding-name (:value binding-expr)
ctx (utils/ctx-with-bindings ctx {binding-name (meta binding-expr)})
ctx (assoc-in ctx [:arities binding-name]
{:fixed-arities #{0}})]
(common/analyze-children ctx rest-children)))
(defn analyze-are [ctx resolved-namespace expr]
(let [[_ argv expr & args] (:children expr)
is-expr (utils/list-node [(utils/token-node (symbol (str resolved-namespace) "is")) expr])
new-node (macros/expand-do-template ctx
(utils/list-node (list* nil
argv
is-expr
args)))]
(common/analyze-expression** ctx new-node)))
(defn testing-hook [{:keys [node]}]
(let [[testing testing-str & children] (:children node)
new-node (assoc node :children
(list* (assoc-in testing [:context :clojure.test :testing-str] (utils/sexpr testing-str))
testing-str children))]
{:node new-node}))
| |
2d245e2eff28019a4375d30b5526c22e3528303c33cedb47f522101958f2c983 | mirage/ocaml-matrix | query.ml | open Json_encoding
module Directory = struct
module Response = struct
type t = {room_id: string; servers: string list} [@@deriving accessor]
let encoding =
let to_tuple t = t.room_id, t.servers in
let of_tuple v =
let room_id, servers = v in
{room_id; servers} in
let with_tuple =
obj2 (req "room_id" string) (req "servers" (list string)) in
conv to_tuple of_tuple with_tuple
end
end
module Profile = struct
module Response = struct
type t = {avatar_url: string option; displayname: string option}
[@@deriving accessor]
let encoding =
let to_tuple t = t.avatar_url, t.displayname in
let of_tuple v =
let avatar_url, displayname = v in
{avatar_url; displayname} in
let with_tuple =
obj2 (opt "avatar_url" string) (opt "displayname" string) in
conv to_tuple of_tuple with_tuple
end
end
| null | https://raw.githubusercontent.com/mirage/ocaml-matrix/2a58d3d41c43404741f2dfdaf1d2d0f3757b2b69/lib/matrix-stos/query.ml | ocaml | open Json_encoding
module Directory = struct
module Response = struct
type t = {room_id: string; servers: string list} [@@deriving accessor]
let encoding =
let to_tuple t = t.room_id, t.servers in
let of_tuple v =
let room_id, servers = v in
{room_id; servers} in
let with_tuple =
obj2 (req "room_id" string) (req "servers" (list string)) in
conv to_tuple of_tuple with_tuple
end
end
module Profile = struct
module Response = struct
type t = {avatar_url: string option; displayname: string option}
[@@deriving accessor]
let encoding =
let to_tuple t = t.avatar_url, t.displayname in
let of_tuple v =
let avatar_url, displayname = v in
{avatar_url; displayname} in
let with_tuple =
obj2 (opt "avatar_url" string) (opt "displayname" string) in
conv to_tuple of_tuple with_tuple
end
end
| |
1e4b9f2bfa548c6aff1634dc9bb2b12f8576e88efa086da97b743aad2cfe08a4 | Tim-ats-d/Tim-lang | main.ml | let () = Core.Compiler.from_file "bin/foo.applescript"
| null | https://raw.githubusercontent.com/Tim-ats-d/Tim-lang/005d04de07871fe464fadbb80c3050b9bc9b0ace/bin/main.ml | ocaml | let () = Core.Compiler.from_file "bin/foo.applescript"
| |
0733a0811e375c9eada1e82607036fe01f1533fcdcd0a350ebaba6ddd5bbe0e0 | madgen/exalog | Logger.hs | # LANGUAGE GeneralizedNewtypeDeriving #
module Language.Exalog.Logger
( LoggerT
, Logger
, LoggerEnv(..)
, vanillaEnv
, runLoggerT
, whisper
, scold
, scream
, Err.Error
, Err.Severity(..)
) where
import Protolude hiding (log)
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT)
import Language.Exalog.Pretty (pp)
import qualified Language.Exalog.Error as Err
import Language.Exalog.SrcLoc (SrcSpan)
newtype LoggerEnv = LoggerEnv
{ -- |Optional because test cases don't have source code
_mSource :: Maybe Text
}
vanillaEnv :: LoggerEnv
vanillaEnv = LoggerEnv
{ _mSource = Nothing
}
newtype LoggerT m a = LoggerT (ReaderT LoggerEnv (MaybeT m) a)
deriving (Functor, Applicative, Monad, MonadIO, MonadReader LoggerEnv)
type Logger = LoggerT IO
instance MonadTrans LoggerT where
lift m = LoggerT (lift (lift m))
runLoggerT :: Monad m => LoggerEnv -> LoggerT m a -> m (Maybe a)
runLoggerT env (LoggerT act) = runMaybeT (runReaderT act env)
whisper :: MonadIO m => SrcSpan -> Text -> LoggerT m ()
whisper = common Err.Warning
scold :: MonadIO m => SrcSpan -> Text -> LoggerT m a
scold mSpan msg = do
common Err.User mSpan msg
LoggerT (lift $ MaybeT (pure Nothing))
scream :: MonadIO m => SrcSpan -> Text -> LoggerT m a
scream mSpan msg = do
common Err.Impossible mSpan msg
LoggerT (lift $ MaybeT (pure Nothing))
common :: MonadIO m => Err.Severity -> SrcSpan -> Text -> LoggerT m ()
common severity mSpan msg = do
mSrc <- _mSource <$> ask
let renderedErr = pp $ Err.Error severity mSrc mSpan msg
liftIO $ putStrLn renderedErr
| null | https://raw.githubusercontent.com/madgen/exalog/7d169b066c5c08f2b8e44f5e078df264731ac177/src/Language/Exalog/Logger.hs | haskell | |Optional because test cases don't have source code | # LANGUAGE GeneralizedNewtypeDeriving #
module Language.Exalog.Logger
( LoggerT
, Logger
, LoggerEnv(..)
, vanillaEnv
, runLoggerT
, whisper
, scold
, scream
, Err.Error
, Err.Severity(..)
) where
import Protolude hiding (log)
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT)
import Language.Exalog.Pretty (pp)
import qualified Language.Exalog.Error as Err
import Language.Exalog.SrcLoc (SrcSpan)
newtype LoggerEnv = LoggerEnv
_mSource :: Maybe Text
}
vanillaEnv :: LoggerEnv
vanillaEnv = LoggerEnv
{ _mSource = Nothing
}
newtype LoggerT m a = LoggerT (ReaderT LoggerEnv (MaybeT m) a)
deriving (Functor, Applicative, Monad, MonadIO, MonadReader LoggerEnv)
type Logger = LoggerT IO
instance MonadTrans LoggerT where
lift m = LoggerT (lift (lift m))
runLoggerT :: Monad m => LoggerEnv -> LoggerT m a -> m (Maybe a)
runLoggerT env (LoggerT act) = runMaybeT (runReaderT act env)
whisper :: MonadIO m => SrcSpan -> Text -> LoggerT m ()
whisper = common Err.Warning
scold :: MonadIO m => SrcSpan -> Text -> LoggerT m a
scold mSpan msg = do
common Err.User mSpan msg
LoggerT (lift $ MaybeT (pure Nothing))
scream :: MonadIO m => SrcSpan -> Text -> LoggerT m a
scream mSpan msg = do
common Err.Impossible mSpan msg
LoggerT (lift $ MaybeT (pure Nothing))
common :: MonadIO m => Err.Severity -> SrcSpan -> Text -> LoggerT m ()
common severity mSpan msg = do
mSrc <- _mSource <$> ask
let renderedErr = pp $ Err.Error severity mSrc mSpan msg
liftIO $ putStrLn renderedErr
|
ad695e19a9d6e85c0480207a5e6b6d7f9f14222e0b2722b3f61d6ec8d7b30790 | akazukin5151/kpxhs | Utils.hs | -- | Utils to navigate around the list
{-# LANGUAGE OverloadedStrings #-}
module ViewEvents.BrowserEvents.Utils
( listMoveWith
, listMovePageUp
, listMovePageDown ) where
import Brick.Util (clamp)
import qualified Brick.Widgets.List as L
import Data.Maybe (fromMaybe)
listMoveWith :: (L.Splittable t, Foldable t)
=> (Int -> Int) -> L.GenericList n t a -> L.GenericList n t a
listMoveWith f l = L.listMoveTo clamped l
where
clamped = clamp 0 (length $ L.listElements l) num
num = f (fromMaybe 0 $ L.listSelected l)
-- Default page up and down functions too fast for me
listMovePageUp :: (L.Splittable t, Foldable t)
=> L.GenericList n t a -> L.GenericList n t a
listMovePageUp = listMoveWith (subtract 5)
listMovePageDown :: (L.Splittable t, Foldable t)
=> L.GenericList n t a -> L.GenericList n t a
listMovePageDown = listMoveWith (5 +)
| null | https://raw.githubusercontent.com/akazukin5151/kpxhs/e3195b0ef5b8bee37593c7a5967ae476b1497fb7/src/kpxhs/ViewEvents/BrowserEvents/Utils.hs | haskell | | Utils to navigate around the list
# LANGUAGE OverloadedStrings #
Default page up and down functions too fast for me |
module ViewEvents.BrowserEvents.Utils
( listMoveWith
, listMovePageUp
, listMovePageDown ) where
import Brick.Util (clamp)
import qualified Brick.Widgets.List as L
import Data.Maybe (fromMaybe)
listMoveWith :: (L.Splittable t, Foldable t)
=> (Int -> Int) -> L.GenericList n t a -> L.GenericList n t a
listMoveWith f l = L.listMoveTo clamped l
where
clamped = clamp 0 (length $ L.listElements l) num
num = f (fromMaybe 0 $ L.listSelected l)
listMovePageUp :: (L.Splittable t, Foldable t)
=> L.GenericList n t a -> L.GenericList n t a
listMovePageUp = listMoveWith (subtract 5)
listMovePageDown :: (L.Splittable t, Foldable t)
=> L.GenericList n t a -> L.GenericList n t a
listMovePageDown = listMoveWith (5 +)
|
82e330687ccdaf5d36a9bdb55e14417c73828e8633000da6ae96dd72d84ebea0 | ucsd-progsys/dsolve | nestedmatch.ml | type t = Node of int
type tt = Pode of (int * int) * (int * int)
let printer z =
match z with Pode (z,z') ->
let (x,y) = z in
let (x',y') = z' in
x + y + x' + y'
let make x =
match x with None -> None | Some x -> Some (Node x)
let check x =
match x with
| Some y -> (match y with Node z -> assert (z>0))
(* UNCOMMENT TO SEE CRASH:
| Some (Node x) -> assert (x > 0) *)
| Some (Node x) -> assert (x > 0)
| _ -> ()
let rec go z =
let x = read_int () in
let y = if x > 0 then Some x else None in
check (make y); go z
let _ = go 12
| null | https://raw.githubusercontent.com/ucsd-progsys/dsolve/bfbbb8ed9bbf352d74561e9f9127ab07b7882c0c/tests/POPL2008/nestedmatch.ml | ocaml | UNCOMMENT TO SEE CRASH:
| Some (Node x) -> assert (x > 0) | type t = Node of int
type tt = Pode of (int * int) * (int * int)
let printer z =
match z with Pode (z,z') ->
let (x,y) = z in
let (x',y') = z' in
x + y + x' + y'
let make x =
match x with None -> None | Some x -> Some (Node x)
let check x =
match x with
| Some y -> (match y with Node z -> assert (z>0))
| Some (Node x) -> assert (x > 0)
| _ -> ()
let rec go z =
let x = read_int () in
let y = if x > 0 then Some x else None in
check (make y); go z
let _ = go 12
|
2d9d4d007083a7f4f5aa3e9cbe25f44855f7798a44adbdd6a72882d6f63e1293 | patricoferris/ocaml-multicore-monorepo | linear_algebra.ml | Code under Apache License 2.0 - Jane Street Group , LLC < >
let col_norm a column =
let acc = ref 0. in
for i = 0 to Array.length a - 1 do
let entry = a.(i).(column) in
acc := !acc +. (entry *. entry)
done ;
sqrt !acc
let col_inner_prod t j1 j2 =
let acc = ref 0. in
for i = 0 to Array.length t - 1 do
acc := !acc +. (t.(i).(j1) *. t.(i).(j2))
done ;
!acc
let qr_in_place a =
let m = Array.length a in
if m = 0 then ([||], [||])
else
let n = Array.length a.(0) in
let r = Array.make_matrix n n 0. in
for j = 0 to n - 1 do
let alpha = col_norm a j in
r.(j).(j) <- alpha ;
let one_over_alpha = 1. /. alpha in
for i = 0 to m - 1 do
a.(i).(j) <- a.(i).(j) *. one_over_alpha
done ;
for j2 = j + 1 to n - 1 do
let c = col_inner_prod a j j2 in
r.(j).(j2) <- c ;
for i = 0 to m - 1 do
a.(i).(j2) <- a.(i).(j2) -. (c *. a.(i).(j))
done
done
done ;
(a, r)
let qr ?(in_place = false) a =
let a = if in_place then a else Array.map Array.copy a in
qr_in_place a
let mul_mv ?(trans = false) a x =
let rows = Array.length a in
if rows = 0 then [||]
else
let cols = Array.length a.(0) in
let m, n, get =
if trans then
let get i j = a.(j).(i) in
(cols, rows, get)
else
let get i j = a.(i).(j) in
(rows, cols, get)
in
if n <> Array.length x then failwith "Dimension mismatch" ;
let result = Array.make m 0. in
for i = 0 to m - 1 do
let v, _ =
Array.fold_left
(fun (acc, j) x -> (acc +. (get i j *. x), succ j))
(0., 0) x
in
result.(i) <- v
done ;
result
let is_nan v = match classify_float v with FP_nan -> true | _ -> false
let error_msg msg = Error (`Msg msg)
let triu_solve r b =
let m = Array.length b in
if m <> Array.length r then
error_msg
"triu_solve R b requires R to be square with same number of rows as b"
else if m = 0 then Ok [||]
else if m <> Array.length r.(0) then
error_msg "triu_solve R b requires R to be a square"
else
let sol = Array.copy b in
for i = m - 1 downto 0 do
sol.(i) <- sol.(i) /. r.(i).(i) ;
for j = 0 to i - 1 do
sol.(j) <- sol.(j) -. (r.(j).(i) *. sol.(i))
done
done ;
if Array.exists is_nan sol then
error_msg "triu_solve detected NaN result"
else Ok sol
let ols ?(in_place = false) a b =
let q, r = qr ~in_place a in
triu_solve r (mul_mv ~trans:true q b)
let make_lr_inputs responder predictors m =
Array.init (Array.length m) (fun i -> Array.map (fun a -> a m.(i)) predictors),
Array.init (Array.length m) (fun i -> responder m.(i))
let r_square m responder predictors r =
let predictors_matrix, responder_vector =
make_lr_inputs responder predictors m
in
let sum_responder = Array.fold_left ( +. ) 0. responder_vector in
let mean = sum_responder /. float (Array.length responder_vector) in
let tot_ss = ref 0. in
let res_ss = ref 0. in
let predicted i =
let x = ref 0. in
for j = 0 to Array.length r - 1 do
x := !x +. (predictors_matrix.(i).(j) *. r.(j))
done ;
!x
in
for i = 0 to Array.length responder_vector - 1 do
tot_ss := !tot_ss +. ((responder_vector.(i) -. mean) ** 2.) ;
res_ss := !res_ss +. ((responder_vector.(i) -. predicted i) ** 2.)
done ;
1. -. (!res_ss /. !tot_ss)
let ols responder predictors m =
let matrix, vector = make_lr_inputs responder predictors m in
match ols ~in_place:true matrix vector with
| Ok estimates ->
let r_square = r_square m responder predictors estimates in
Ok (estimates, r_square)
| Error _ as err -> err
| null | https://raw.githubusercontent.com/patricoferris/ocaml-multicore-monorepo/22b441e6727bc303950b3b37c8fbc024c748fe55/duniverse/eqaf/check/linear_algebra.ml | ocaml | Code under Apache License 2.0 - Jane Street Group , LLC < >
let col_norm a column =
let acc = ref 0. in
for i = 0 to Array.length a - 1 do
let entry = a.(i).(column) in
acc := !acc +. (entry *. entry)
done ;
sqrt !acc
let col_inner_prod t j1 j2 =
let acc = ref 0. in
for i = 0 to Array.length t - 1 do
acc := !acc +. (t.(i).(j1) *. t.(i).(j2))
done ;
!acc
let qr_in_place a =
let m = Array.length a in
if m = 0 then ([||], [||])
else
let n = Array.length a.(0) in
let r = Array.make_matrix n n 0. in
for j = 0 to n - 1 do
let alpha = col_norm a j in
r.(j).(j) <- alpha ;
let one_over_alpha = 1. /. alpha in
for i = 0 to m - 1 do
a.(i).(j) <- a.(i).(j) *. one_over_alpha
done ;
for j2 = j + 1 to n - 1 do
let c = col_inner_prod a j j2 in
r.(j).(j2) <- c ;
for i = 0 to m - 1 do
a.(i).(j2) <- a.(i).(j2) -. (c *. a.(i).(j))
done
done
done ;
(a, r)
let qr ?(in_place = false) a =
let a = if in_place then a else Array.map Array.copy a in
qr_in_place a
let mul_mv ?(trans = false) a x =
let rows = Array.length a in
if rows = 0 then [||]
else
let cols = Array.length a.(0) in
let m, n, get =
if trans then
let get i j = a.(j).(i) in
(cols, rows, get)
else
let get i j = a.(i).(j) in
(rows, cols, get)
in
if n <> Array.length x then failwith "Dimension mismatch" ;
let result = Array.make m 0. in
for i = 0 to m - 1 do
let v, _ =
Array.fold_left
(fun (acc, j) x -> (acc +. (get i j *. x), succ j))
(0., 0) x
in
result.(i) <- v
done ;
result
let is_nan v = match classify_float v with FP_nan -> true | _ -> false
let error_msg msg = Error (`Msg msg)
let triu_solve r b =
let m = Array.length b in
if m <> Array.length r then
error_msg
"triu_solve R b requires R to be square with same number of rows as b"
else if m = 0 then Ok [||]
else if m <> Array.length r.(0) then
error_msg "triu_solve R b requires R to be a square"
else
let sol = Array.copy b in
for i = m - 1 downto 0 do
sol.(i) <- sol.(i) /. r.(i).(i) ;
for j = 0 to i - 1 do
sol.(j) <- sol.(j) -. (r.(j).(i) *. sol.(i))
done
done ;
if Array.exists is_nan sol then
error_msg "triu_solve detected NaN result"
else Ok sol
let ols ?(in_place = false) a b =
let q, r = qr ~in_place a in
triu_solve r (mul_mv ~trans:true q b)
let make_lr_inputs responder predictors m =
Array.init (Array.length m) (fun i -> Array.map (fun a -> a m.(i)) predictors),
Array.init (Array.length m) (fun i -> responder m.(i))
let r_square m responder predictors r =
let predictors_matrix, responder_vector =
make_lr_inputs responder predictors m
in
let sum_responder = Array.fold_left ( +. ) 0. responder_vector in
let mean = sum_responder /. float (Array.length responder_vector) in
let tot_ss = ref 0. in
let res_ss = ref 0. in
let predicted i =
let x = ref 0. in
for j = 0 to Array.length r - 1 do
x := !x +. (predictors_matrix.(i).(j) *. r.(j))
done ;
!x
in
for i = 0 to Array.length responder_vector - 1 do
tot_ss := !tot_ss +. ((responder_vector.(i) -. mean) ** 2.) ;
res_ss := !res_ss +. ((responder_vector.(i) -. predicted i) ** 2.)
done ;
1. -. (!res_ss /. !tot_ss)
let ols responder predictors m =
let matrix, vector = make_lr_inputs responder predictors m in
match ols ~in_place:true matrix vector with
| Ok estimates ->
let r_square = r_square m responder predictors estimates in
Ok (estimates, r_square)
| Error _ as err -> err
| |
8f44894d48f475a7f6a9ce7889f06c99e0274637d321d59eb024696b784cee86 | kadena-io/chainweb-node | InMemTypes.hs | # LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
| An in - memory backend that does not persist to disk .
module Chainweb.Mempool.InMemTypes
( InMemConfig(..)
, InMemoryMempool(..)
, InMemoryMempoolData(..)
, PendingEntry(..)
, PendingMap
, RecentItem
, RecentLog(..)
, MempoolStats(..)
, BadMap
) where
------------------------------------------------------------------------------
import Control.Concurrent.MVar (MVar)
import Control.DeepSeq
import Data.Aeson
import qualified Data.ByteString.Short as SB
import Data.Function (on)
import Data.HashMap.Strict (HashMap)
import Data.IORef (IORef)
import Data.Ord
import qualified Data.Vector as V
import GHC.Generics
import Numeric.Natural
-- internal imports
import Chainweb.Mempool.CurrentTxs
import Chainweb.Mempool.Mempool
import Chainweb.Time (Micros(..), Time(..))
import Chainweb.Utils (T2)
------------------------------------------------------------------------------
data PendingEntry = PendingEntry
{ _inmemPeGasPrice :: !GasPrice
, _inmemPeGasLimit :: !GasLimit
, _inmemPeBytes :: !SB.ShortByteString
, _inmemPeExpires :: !(Time Micros)
} deriving (Eq, Generic, Show, NFData)
instance Ord PendingEntry where
compare = compare `on` (Down . _inmemPeGasPrice)
type PendingMap = HashMap TransactionHash PendingEntry
------------------------------------------------------------------------------
| Configuration for in - memory .
data InMemConfig t = InMemConfig {
_inmemTxCfg :: {-# UNPACK #-} !(TransactionConfig t)
, _inmemTxBlockSizeLimit :: !GasLimit
, _inmemTxMinGasPrice :: !GasPrice
, _inmemMaxRecentItems :: {-# UNPACK #-} !Int
, _inmemPreInsertPureChecks :: t -> Either InsertError t
, _inmemPreInsertBatchChecks
:: V.Vector (T2 TransactionHash t)
-> IO (V.Vector (Either (T2 TransactionHash InsertError) (T2 TransactionHash t)))
, _inmemCurrentTxsSize :: !Natural
-- ^ The number of active transactions in validated blocks that can be
-- distinguished. If there are more txs than this number, checks can
-- return false negatives (and a very small amount of false positives).
--
The set uses 16 bytes per entry .
}
------------------------------------------------------------------------------
data InMemoryMempool t = InMemoryMempool {
_inmemCfg :: !(InMemConfig t)
, _inmemDataLock :: !(MVar (InMemoryMempoolData t))
, _inmemNonce :: !ServerNonce
}
------------------------------------------------------------------------------
type BadMap = HashMap TransactionHash (Time Micros)
------------------------------------------------------------------------------
data InMemoryMempoolData t = InMemoryMempoolData {
_inmemPending :: !(IORef PendingMap)
-- ^ The set of pending transactions
, _inmemRecentLog :: !(IORef RecentLog)
-- ^ The log of all recently added transactions. This is used to compute the
-- highwater mark for synchronization with remote mempools.
, _inmemBadMap :: !(IORef BadMap)
-- ^ Non-expired transactions that failed during pact validation and are
-- known to be bad. Those must not be attempted again because the user would
-- possibly have to pay gas for it several times.
, _inmemCurrentTxs :: !(IORef CurrentTxs)
-- ^ The set of non-expired transactions that have been addeded to a block.
-- Transactions are remove from the set of pending transactions when they
-- are added to a block. This set is used to prevent transactions from being
-- re-inserts when synchronizing with nodes that haven't yet validated the
-- block.
}
------------------------------------------------------------------------------
type RecentItem = T2 MempoolTxId TransactionHash
data RecentLog = RecentLog {
_rlNext :: {-# UNPACK #-} !MempoolTxId
, _rlRecent :: !(V.Vector RecentItem)
}
------------------------------------------------------------------------------
data MempoolStats = MempoolStats
{ _mStatsPendingCount :: {-# UNPACK #-} !Int
, _mStatsRecentCount :: {-# UNPACK #-} !Int
, _mStatsBadlistCount :: {-# UNPACK #-} !Int
, _mStatsCurrentTxsCount :: {-# UNPACK #-} !Int
}
deriving (Show, Eq, Ord, Generic)
deriving anyclass (ToJSON, NFData)
| null | https://raw.githubusercontent.com/kadena-io/chainweb-node/17d69b99d79c56e282b72e3f19222974de784e18/src/Chainweb/Mempool/InMemTypes.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE RankNTypes #
----------------------------------------------------------------------------
internal imports
----------------------------------------------------------------------------
----------------------------------------------------------------------------
# UNPACK #
# UNPACK #
^ The number of active transactions in validated blocks that can be
distinguished. If there are more txs than this number, checks can
return false negatives (and a very small amount of false positives).
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
^ The set of pending transactions
^ The log of all recently added transactions. This is used to compute the
highwater mark for synchronization with remote mempools.
^ Non-expired transactions that failed during pact validation and are
known to be bad. Those must not be attempted again because the user would
possibly have to pay gas for it several times.
^ The set of non-expired transactions that have been addeded to a block.
Transactions are remove from the set of pending transactions when they
are added to a block. This set is used to prevent transactions from being
re-inserts when synchronizing with nodes that haven't yet validated the
block.
----------------------------------------------------------------------------
# UNPACK #
----------------------------------------------------------------------------
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK # | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
| An in - memory backend that does not persist to disk .
module Chainweb.Mempool.InMemTypes
( InMemConfig(..)
, InMemoryMempool(..)
, InMemoryMempoolData(..)
, PendingEntry(..)
, PendingMap
, RecentItem
, RecentLog(..)
, MempoolStats(..)
, BadMap
) where
import Control.Concurrent.MVar (MVar)
import Control.DeepSeq
import Data.Aeson
import qualified Data.ByteString.Short as SB
import Data.Function (on)
import Data.HashMap.Strict (HashMap)
import Data.IORef (IORef)
import Data.Ord
import qualified Data.Vector as V
import GHC.Generics
import Numeric.Natural
import Chainweb.Mempool.CurrentTxs
import Chainweb.Mempool.Mempool
import Chainweb.Time (Micros(..), Time(..))
import Chainweb.Utils (T2)
data PendingEntry = PendingEntry
{ _inmemPeGasPrice :: !GasPrice
, _inmemPeGasLimit :: !GasLimit
, _inmemPeBytes :: !SB.ShortByteString
, _inmemPeExpires :: !(Time Micros)
} deriving (Eq, Generic, Show, NFData)
instance Ord PendingEntry where
compare = compare `on` (Down . _inmemPeGasPrice)
type PendingMap = HashMap TransactionHash PendingEntry
| Configuration for in - memory .
data InMemConfig t = InMemConfig {
, _inmemTxBlockSizeLimit :: !GasLimit
, _inmemTxMinGasPrice :: !GasPrice
, _inmemPreInsertPureChecks :: t -> Either InsertError t
, _inmemPreInsertBatchChecks
:: V.Vector (T2 TransactionHash t)
-> IO (V.Vector (Either (T2 TransactionHash InsertError) (T2 TransactionHash t)))
, _inmemCurrentTxsSize :: !Natural
The set uses 16 bytes per entry .
}
data InMemoryMempool t = InMemoryMempool {
_inmemCfg :: !(InMemConfig t)
, _inmemDataLock :: !(MVar (InMemoryMempoolData t))
, _inmemNonce :: !ServerNonce
}
type BadMap = HashMap TransactionHash (Time Micros)
data InMemoryMempoolData t = InMemoryMempoolData {
_inmemPending :: !(IORef PendingMap)
, _inmemRecentLog :: !(IORef RecentLog)
, _inmemBadMap :: !(IORef BadMap)
, _inmemCurrentTxs :: !(IORef CurrentTxs)
}
type RecentItem = T2 MempoolTxId TransactionHash
data RecentLog = RecentLog {
, _rlRecent :: !(V.Vector RecentItem)
}
data MempoolStats = MempoolStats
}
deriving (Show, Eq, Ord, Generic)
deriving anyclass (ToJSON, NFData)
|
a805ba345a669b41462c64ef5e54876e7b5dc735b449665ff07be2286f32c209 | input-output-hk/cardano-ledger-byron | ProtocolVersion.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Cardano.Chain.Update.ProtocolVersion
( ProtocolVersion(..)
)
where
import Cardano.Prelude
import Data.Aeson (ToJSON)
import Formatting (bprint, shown)
import Formatting.Buildable (Buildable(..))
import qualified Prelude
import Cardano.Binary (FromCBOR(..), ToCBOR(..), encodeListLen, enforceSize)
-- | Communication protocol version
data ProtocolVersion = ProtocolVersion
{ pvMajor :: !Word16
, pvMinor :: !Word16
, pvAlt :: !Word8
} deriving (Eq, Generic, Ord)
deriving anyclass (NFData, NoUnexpectedThunks)
instance Show ProtocolVersion where
show pv =
intercalate "." [show (pvMajor pv), show (pvMinor pv), show (pvAlt pv)]
instance Buildable ProtocolVersion where
build = bprint shown
-- Used for debugging purposes only
instance ToJSON ProtocolVersion where
instance ToCBOR ProtocolVersion where
toCBOR pv =
encodeListLen 3 <> toCBOR (pvMajor pv) <> toCBOR (pvMinor pv) <> toCBOR
(pvAlt pv)
encodedSizeExpr f pv = 1
+ encodedSizeExpr f (pvMajor <$> pv)
+ encodedSizeExpr f (pvMinor <$> pv)
+ encodedSizeExpr f (pvAlt <$> pv)
instance FromCBOR ProtocolVersion where
fromCBOR = do
enforceSize "ProtocolVersion" 3
ProtocolVersion <$> fromCBOR <*> fromCBOR <*> fromCBOR
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger-byron/d309449e6c303a9f0dcc8dcf172df6f0b3195ed5/cardano-ledger/src/Cardano/Chain/Update/ProtocolVersion.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings #
# LANGUAGE TemplateHaskell #
| Communication protocol version
Used for debugging purposes only | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
module Cardano.Chain.Update.ProtocolVersion
( ProtocolVersion(..)
)
where
import Cardano.Prelude
import Data.Aeson (ToJSON)
import Formatting (bprint, shown)
import Formatting.Buildable (Buildable(..))
import qualified Prelude
import Cardano.Binary (FromCBOR(..), ToCBOR(..), encodeListLen, enforceSize)
data ProtocolVersion = ProtocolVersion
{ pvMajor :: !Word16
, pvMinor :: !Word16
, pvAlt :: !Word8
} deriving (Eq, Generic, Ord)
deriving anyclass (NFData, NoUnexpectedThunks)
instance Show ProtocolVersion where
show pv =
intercalate "." [show (pvMajor pv), show (pvMinor pv), show (pvAlt pv)]
instance Buildable ProtocolVersion where
build = bprint shown
instance ToJSON ProtocolVersion where
instance ToCBOR ProtocolVersion where
toCBOR pv =
encodeListLen 3 <> toCBOR (pvMajor pv) <> toCBOR (pvMinor pv) <> toCBOR
(pvAlt pv)
encodedSizeExpr f pv = 1
+ encodedSizeExpr f (pvMajor <$> pv)
+ encodedSizeExpr f (pvMinor <$> pv)
+ encodedSizeExpr f (pvAlt <$> pv)
instance FromCBOR ProtocolVersion where
fromCBOR = do
enforceSize "ProtocolVersion" 3
ProtocolVersion <$> fromCBOR <*> fromCBOR <*> fromCBOR
|
991660fabb810737131f8526f066d5f9e5d3c63018f65f9b7c6271adce205378 | Gbury/dolmen | map.ml |
(* This file is free software, part of dolmen. See file "LICENSE" for more information *)
module type S = sig
type key
type 'a t
val empty : _ t
val find_exn : key -> 'a t -> 'a
(** Exception-raising find function.
@raise Not_found *)
val find_opt : key -> 'a t -> 'a option
(** Option-returning find function. *)
val add : key -> 'a -> 'a t -> 'a t
(** Add a new binding, shadowing any earlier bdingin to the same key. *)
val find_add : key -> ('a option -> 'a) -> 'a t -> 'a t
(** Update the value bound to a key. *)
val iter : (key -> 'a -> unit) -> 'a t -> unit
(** Iter on the map. *)
val fold : (key -> 'a -> 'acc -> 'acc) -> 'a t -> 'acc -> 'acc
(** Fold on the map. *)
end
| null | https://raw.githubusercontent.com/Gbury/dolmen/21bfd8d221b39dfd72a7938a09a6b2ae85b3c391/src/interface/map.ml | ocaml | This file is free software, part of dolmen. See file "LICENSE" for more information
* Exception-raising find function.
@raise Not_found
* Option-returning find function.
* Add a new binding, shadowing any earlier bdingin to the same key.
* Update the value bound to a key.
* Iter on the map.
* Fold on the map. |
module type S = sig
type key
type 'a t
val empty : _ t
val find_exn : key -> 'a t -> 'a
val find_opt : key -> 'a t -> 'a option
val add : key -> 'a -> 'a t -> 'a t
val find_add : key -> ('a option -> 'a) -> 'a t -> 'a t
val iter : (key -> 'a -> unit) -> 'a t -> unit
val fold : (key -> 'a -> 'acc -> 'acc) -> 'a t -> 'acc -> 'acc
end
|
cb0dc8fa47db965392bd468e3b7a573ee27068822ca92fff848809f5ce9aa9e1 | job-streamer/job-streamer-control-bus | token_test.clj | (ns job-streamer.control-bus.component.token-test
(:require [job-streamer.control-bus.component.token :refer :all]
[com.stuartsierra.component :as component]
[clojure.pprint :refer :all]
[clojure.test :refer :all]))
(deftest token-provider-test
(testing "New token"
(let [token-provider (component/start (token-provider-component {:session-timeout (* 30 60)}))
token1 (new-token token-provider "user1")
token2 (new-token token-provider "user1")]
(is (instance? java.util.UUID token1))
(is (instance? java.util.UUID token2))
(is (not= token1 token2))))
(testing "Authenticate"
(let [token-provider (component/start (token-provider-component {:session-timeout (* 30 60)}))
token (new-token token-provider "user1")]
(is (= "user1" (auth-by token-provider token)))))
(testing "Violates the precondition of `auth-by`"
(let [token-provider (component/start (token-provider-component {:session-timeout (* 30 60)}))
token (new-token token-provider "user1")]
(is (thrown? IllegalArgumentException (auth-by token-provider 123))))))
| null | https://raw.githubusercontent.com/job-streamer/job-streamer-control-bus/af38bc8838366c230e4f53f08b447eecc3e794b7/test/clj/job_streamer/control_bus/component/token_test.clj | clojure | (ns job-streamer.control-bus.component.token-test
(:require [job-streamer.control-bus.component.token :refer :all]
[com.stuartsierra.component :as component]
[clojure.pprint :refer :all]
[clojure.test :refer :all]))
(deftest token-provider-test
(testing "New token"
(let [token-provider (component/start (token-provider-component {:session-timeout (* 30 60)}))
token1 (new-token token-provider "user1")
token2 (new-token token-provider "user1")]
(is (instance? java.util.UUID token1))
(is (instance? java.util.UUID token2))
(is (not= token1 token2))))
(testing "Authenticate"
(let [token-provider (component/start (token-provider-component {:session-timeout (* 30 60)}))
token (new-token token-provider "user1")]
(is (= "user1" (auth-by token-provider token)))))
(testing "Violates the precondition of `auth-by`"
(let [token-provider (component/start (token-provider-component {:session-timeout (* 30 60)}))
token (new-token token-provider "user1")]
(is (thrown? IllegalArgumentException (auth-by token-provider 123))))))
| |
45ec3322d5f22e988ebcc88b5ec5bfb47ead9e0a06904434dad4b189ac02bbbc | mks-m/wower | dbc.erl | -module(dbc).
-export([import/1, import_all/0]).
-import(packet_helper, [read_cstring/1]).
-include_lib("kernel/include/file.hrl").
-include("dbc_records.hrl").
-define(I, :32/integer-little).
-define(F, :32/float-little).
%% @type field_atom() = int | float | cstring.
%% @type field_type() = int() | float() | string().
%% @type field_desc() = {atom(), field_atom(), int()}.
( ) - > ok .
import_all() ->
mnesia:start(),
ok = import(dbc_chr_race),
ok.
%% @spec import(atom()) -> ok.
import(Atom) ->
{ok, Name, Struct} = file_info(Atom),
Fields = lists:map(fun({N, _, _}) -> N end, Struct),
mnesia:delete_table(Atom),
mnesia:create_table(Atom, [{attributes, Fields}, {disc_copies, [node()]}]),
ok = import(Atom, Name, Struct),
ok.
%% @spec import(atom(), string(), [field_desc()]) -> ok.
import(Atom, File, Struct) ->
FileName = "dbc/" ++ File ++ ".dbc",
{ok, FileInfo} = file:read_file_info(FileName),
{ok, Dbc} = file:open(FileName, [read, raw, binary]),
{ok, <<(16#43424457)?I>>} = file:read(Dbc, 4),
{ok, <<Records?I, _?I, Size?I, StringSize?I>>} = file:read(Dbc, 16),
{ok, BinData} = file:read(Dbc, FileInfo#file_info.size - StringSize - 20),
{ok, StringData} = file:read(Dbc, StringSize),
read_records(Atom, Records, Struct, Size, BinData, StringData).
%% @spec read_records(atom(), int(), [field_desc()], int(), binary(), binary()) -> ok.
read_records(_, 0, _, _, _, _) ->
ok;
read_records(Atom, RecordsLeft, Struct, Size, BinData, StringData) ->
{Row, NewBin} = erlang:split_binary(BinData, Size),
Record = read_record({Atom}, Struct, Row, StringData),
mnesia:dirty_write(Atom, Record),
read_records(Atom, RecordsLeft-1, Struct, Size, NewBin, StringData).
( ) , [ field_desc ( ) ] , binary ( ) , binary ( ) ) - > tuple ( ) .
read_record(Element, [], _, _) ->
Element;
read_record(Element, [{_, Type, Index}|Struct], Bin, String) ->
Value = read_field(Type, Index, Bin, String),
NewElement = erlang:append_element(Element, Value),
read_record(NewElement, Struct, Bin, String).
( ) , int ( ) , binary ( ) , binary ( ) ) - > field_type ( ) .
read_field(int, Index, Bin, _) ->
Offset = Index * 4,
<<_:Offset/binary, Value?I, _/binary>> = Bin,
Value;
read_field(float, Index, Bin, _) ->
Offset = Index * 4,
<<_:Offset/binary, Value?F, _/binary>> = Bin,
Value;
read_field(cstring, Index, Bin, String) ->
Offset = Index * 4,
<<_:Offset/binary, Value?I, _/binary>> = Bin,
{_, StringStart} = erlang:split_binary(String, Value),
{StringValue, _} = read_cstring(StringStart),
StringValue.
%% @spec file_info(atom()) -> {ok, string(), [field_desc()]}.
file_info(dbc_chr_race) ->
{ok, "ChrRaces",
[{id, int, 0}, {faction_template_id, int, 2},
{male_model_id, int, 4}, {female_model_id, int, 5},
{team_id, int, 8}, {cinematic_id, int, 13},
{name, cstring, 14}, {expansion_required, int, 34}]}.
| null | https://raw.githubusercontent.com/mks-m/wower/ce9724876cf57b67ce72f2a9a6f74bb1ebffd53a/realm/src/dbc.erl | erlang | @type field_atom() = int | float | cstring.
@type field_type() = int() | float() | string().
@type field_desc() = {atom(), field_atom(), int()}.
@spec import(atom()) -> ok.
@spec import(atom(), string(), [field_desc()]) -> ok.
@spec read_records(atom(), int(), [field_desc()], int(), binary(), binary()) -> ok.
@spec file_info(atom()) -> {ok, string(), [field_desc()]}. | -module(dbc).
-export([import/1, import_all/0]).
-import(packet_helper, [read_cstring/1]).
-include_lib("kernel/include/file.hrl").
-include("dbc_records.hrl").
-define(I, :32/integer-little).
-define(F, :32/float-little).
( ) - > ok .
import_all() ->
mnesia:start(),
ok = import(dbc_chr_race),
ok.
import(Atom) ->
{ok, Name, Struct} = file_info(Atom),
Fields = lists:map(fun({N, _, _}) -> N end, Struct),
mnesia:delete_table(Atom),
mnesia:create_table(Atom, [{attributes, Fields}, {disc_copies, [node()]}]),
ok = import(Atom, Name, Struct),
ok.
import(Atom, File, Struct) ->
FileName = "dbc/" ++ File ++ ".dbc",
{ok, FileInfo} = file:read_file_info(FileName),
{ok, Dbc} = file:open(FileName, [read, raw, binary]),
{ok, <<(16#43424457)?I>>} = file:read(Dbc, 4),
{ok, <<Records?I, _?I, Size?I, StringSize?I>>} = file:read(Dbc, 16),
{ok, BinData} = file:read(Dbc, FileInfo#file_info.size - StringSize - 20),
{ok, StringData} = file:read(Dbc, StringSize),
read_records(Atom, Records, Struct, Size, BinData, StringData).
read_records(_, 0, _, _, _, _) ->
ok;
read_records(Atom, RecordsLeft, Struct, Size, BinData, StringData) ->
{Row, NewBin} = erlang:split_binary(BinData, Size),
Record = read_record({Atom}, Struct, Row, StringData),
mnesia:dirty_write(Atom, Record),
read_records(Atom, RecordsLeft-1, Struct, Size, NewBin, StringData).
( ) , [ field_desc ( ) ] , binary ( ) , binary ( ) ) - > tuple ( ) .
read_record(Element, [], _, _) ->
Element;
read_record(Element, [{_, Type, Index}|Struct], Bin, String) ->
Value = read_field(Type, Index, Bin, String),
NewElement = erlang:append_element(Element, Value),
read_record(NewElement, Struct, Bin, String).
( ) , int ( ) , binary ( ) , binary ( ) ) - > field_type ( ) .
read_field(int, Index, Bin, _) ->
Offset = Index * 4,
<<_:Offset/binary, Value?I, _/binary>> = Bin,
Value;
read_field(float, Index, Bin, _) ->
Offset = Index * 4,
<<_:Offset/binary, Value?F, _/binary>> = Bin,
Value;
read_field(cstring, Index, Bin, String) ->
Offset = Index * 4,
<<_:Offset/binary, Value?I, _/binary>> = Bin,
{_, StringStart} = erlang:split_binary(String, Value),
{StringValue, _} = read_cstring(StringStart),
StringValue.
file_info(dbc_chr_race) ->
{ok, "ChrRaces",
[{id, int, 0}, {faction_template_id, int, 2},
{male_model_id, int, 4}, {female_model_id, int, 5},
{team_id, int, 8}, {cinematic_id, int, 13},
{name, cstring, 14}, {expansion_required, int, 34}]}.
|
431f6c009c3df323453bcea7d7a44c8991c4b649990a784f419d9e10abca596b | mirage/qubes-mirage-firewall | unikernel.ml | Copyright ( C ) 2015 , < >
See the README file for details .
See the README file for details. *)
open Lwt
open Qubes
let src = Logs.Src.create "unikernel" ~doc:"Main unikernel code"
module Log = (val Logs.src_log src : Logs.LOG)
module Main (R : Mirage_random.S)(Clock : Mirage_clock.MCLOCK)(Time : Mirage_time.S) = struct
module Uplink = Uplink.Make(R)(Clock)(Time)
module Dns_transport = My_dns.Transport(R)(Clock)(Time)
module Dns_client = Dns_client.Make(Dns_transport)
(* Set up networking and listen for incoming packets. *)
let network dns_client dns_responses dns_servers uplink qubesDB router =
(* Report success *)
Dao.set_iptables_error qubesDB "" >>= fun () ->
(* Handle packets from both networks *)
Lwt.choose [
Client_net.listen Clock.elapsed_ns dns_client dns_servers qubesDB router;
Uplink.listen uplink Clock.elapsed_ns dns_responses router
]
Main unikernel entry point ( called from auto - generated main.ml ) .
let start _random _clock _time =
let start_time = Clock.elapsed_ns () in
(* Start qrexec agent and QubesDB agent in parallel *)
let qrexec = RExec.connect ~domid:0 () in
let qubesDB = DB.connect ~domid:0 () in
(* Wait for clients to connect *)
qrexec >>= fun qrexec ->
let agent_listener = RExec.listen qrexec Command.handler in
qubesDB >>= fun qubesDB ->
let startup_time =
let (-) = Int64.sub in
let time_in_ns = Clock.elapsed_ns () - start_time in
Int64.to_float time_in_ns /. 1e9
in
Log.info (fun f -> f "QubesDB and qrexec agents connected in %.3f s" startup_time);
(* Watch for shutdown requests from Qubes *)
let shutdown_rq =
Xen_os.Lifecycle.await_shutdown_request () >>= fun (`Poweroff | `Reboot) ->
Lwt.return_unit in
(* Set up networking *)
let max_entries = Key_gen.nat_table_size () in
let nat = My_nat.create ~max_entries in
(* Read network configuration from QubesDB *)
Dao.read_network_config qubesDB >>= fun config ->
Uplink.connect config >>= fun uplink ->
(* Set up client-side networking *)
let client_eth = Client_eth.create
~client_gw:config.Dao.clients_our_ip in
(* Set up routing between networks and hosts *)
let router = Router.create
~client_eth
~uplink:(Uplink.interface uplink)
~nat
in
let send_dns_query = Uplink.send_dns_client_query uplink in
let dns_mvar = Lwt_mvar.create_empty () in
let nameservers = `Udp, [ config.Dao.dns, 53 ; config.Dao.dns2, 53 ] in
let dns_client = Dns_client.create ~nameservers (router, send_dns_query, dns_mvar) in
let dns_servers = [ config.Dao.dns ; config.Dao.dns2 ] in
let net_listener = network (Dns_client.getaddrinfo dns_client Dns.Rr_map.A) dns_mvar dns_servers uplink qubesDB router in
Report memory usage to
Memory_pressure.init ();
(* Run until something fails or we get a shutdown request. *)
Lwt.choose [agent_listener; net_listener; shutdown_rq] >>= fun () ->
(* Give the console daemon time to show any final log messages. *)
Time.sleep_ns (1.0 *. 1e9 |> Int64.of_float)
end
| null | https://raw.githubusercontent.com/mirage/qubes-mirage-firewall/065c8bb69a0bc1d77c07a99ab276782fc4264bae/unikernel.ml | ocaml | Set up networking and listen for incoming packets.
Report success
Handle packets from both networks
Start qrexec agent and QubesDB agent in parallel
Wait for clients to connect
Watch for shutdown requests from Qubes
Set up networking
Read network configuration from QubesDB
Set up client-side networking
Set up routing between networks and hosts
Run until something fails or we get a shutdown request.
Give the console daemon time to show any final log messages. | Copyright ( C ) 2015 , < >
See the README file for details .
See the README file for details. *)
open Lwt
open Qubes
let src = Logs.Src.create "unikernel" ~doc:"Main unikernel code"
module Log = (val Logs.src_log src : Logs.LOG)
module Main (R : Mirage_random.S)(Clock : Mirage_clock.MCLOCK)(Time : Mirage_time.S) = struct
module Uplink = Uplink.Make(R)(Clock)(Time)
module Dns_transport = My_dns.Transport(R)(Clock)(Time)
module Dns_client = Dns_client.Make(Dns_transport)
let network dns_client dns_responses dns_servers uplink qubesDB router =
Dao.set_iptables_error qubesDB "" >>= fun () ->
Lwt.choose [
Client_net.listen Clock.elapsed_ns dns_client dns_servers qubesDB router;
Uplink.listen uplink Clock.elapsed_ns dns_responses router
]
Main unikernel entry point ( called from auto - generated main.ml ) .
let start _random _clock _time =
let start_time = Clock.elapsed_ns () in
let qrexec = RExec.connect ~domid:0 () in
let qubesDB = DB.connect ~domid:0 () in
qrexec >>= fun qrexec ->
let agent_listener = RExec.listen qrexec Command.handler in
qubesDB >>= fun qubesDB ->
let startup_time =
let (-) = Int64.sub in
let time_in_ns = Clock.elapsed_ns () - start_time in
Int64.to_float time_in_ns /. 1e9
in
Log.info (fun f -> f "QubesDB and qrexec agents connected in %.3f s" startup_time);
let shutdown_rq =
Xen_os.Lifecycle.await_shutdown_request () >>= fun (`Poweroff | `Reboot) ->
Lwt.return_unit in
let max_entries = Key_gen.nat_table_size () in
let nat = My_nat.create ~max_entries in
Dao.read_network_config qubesDB >>= fun config ->
Uplink.connect config >>= fun uplink ->
let client_eth = Client_eth.create
~client_gw:config.Dao.clients_our_ip in
let router = Router.create
~client_eth
~uplink:(Uplink.interface uplink)
~nat
in
let send_dns_query = Uplink.send_dns_client_query uplink in
let dns_mvar = Lwt_mvar.create_empty () in
let nameservers = `Udp, [ config.Dao.dns, 53 ; config.Dao.dns2, 53 ] in
let dns_client = Dns_client.create ~nameservers (router, send_dns_query, dns_mvar) in
let dns_servers = [ config.Dao.dns ; config.Dao.dns2 ] in
let net_listener = network (Dns_client.getaddrinfo dns_client Dns.Rr_map.A) dns_mvar dns_servers uplink qubesDB router in
Report memory usage to
Memory_pressure.init ();
Lwt.choose [agent_listener; net_listener; shutdown_rq] >>= fun () ->
Time.sleep_ns (1.0 *. 1e9 |> Int64.of_float)
end
|
950a5105e709f3ef5ae988f0e6d6905351f1523040f99ea1edaa72b273e3c8f6 | LexiFi/menhir | infer.mli | (******************************************************************************)
(* *)
(* *)
, Paris
, PPS , Université Paris Diderot
(* *)
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
(* file LICENSE. *)
(* *)
(******************************************************************************)
open BasicSyntax
(* [ntvar symbol] is the name of the type variable associated with a
nonterminal symbol. *)
val ntvar: string -> IL.typ
(* [infer grammar] analyzes the grammar [grammar] and returns a new
grammar, augmented with a [%type] declaration for every nonterminal
symbol. The [ocamlc] compiler is used to infer types. *)
val infer: grammar -> grammar
(* [depend postprocess grammar] prints (on the standard output channel) the
OCaml dependencies induced by the semantic actions. If [postprocess] is
[true], then ocamldep's output is postprocessed, otherwise it is echoed
unchanged. This function does not return; it terminates the program. *)
val depend: bool -> grammar -> 'never_returns
[ write_query filename grammar ] writes the grammar 's semantic actions to a
mock [ .ml ] file named [ filename ] . This file can then be submitted to
[ ocamlc ] for type inference . See [ --infer - write - query < filename > ] in the
manual .
mock [.ml] file named [filename]. This file can then be submitted to
[ocamlc] for type inference. See [--infer-write-query <filename>] in the
manual. *)
val write_query: string -> grammar -> 'never_returns
[ read_reply filename grammar ] reads the types inferred by OCaml for the
mock [ .ml ] file described above , and returns a new grammar , augmented with
a [ % type ] declaration for every nonterminal symbol .
mock [.ml] file described above, and returns a new grammar, augmented with
a [%type] declaration for every nonterminal symbol. *)
val read_reply: string -> grammar -> grammar
| null | https://raw.githubusercontent.com/LexiFi/menhir/794e64e7997d4d3f91d36dd49aaecc942ea858b7/src/infer.mli | ocaml | ****************************************************************************
file LICENSE.
****************************************************************************
[ntvar symbol] is the name of the type variable associated with a
nonterminal symbol.
[infer grammar] analyzes the grammar [grammar] and returns a new
grammar, augmented with a [%type] declaration for every nonterminal
symbol. The [ocamlc] compiler is used to infer types.
[depend postprocess grammar] prints (on the standard output channel) the
OCaml dependencies induced by the semantic actions. If [postprocess] is
[true], then ocamldep's output is postprocessed, otherwise it is echoed
unchanged. This function does not return; it terminates the program. |
, Paris
, PPS , Université Paris Diderot
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
open BasicSyntax
val ntvar: string -> IL.typ
val infer: grammar -> grammar
val depend: bool -> grammar -> 'never_returns
[ write_query filename grammar ] writes the grammar 's semantic actions to a
mock [ .ml ] file named [ filename ] . This file can then be submitted to
[ ocamlc ] for type inference . See [ --infer - write - query < filename > ] in the
manual .
mock [.ml] file named [filename]. This file can then be submitted to
[ocamlc] for type inference. See [--infer-write-query <filename>] in the
manual. *)
val write_query: string -> grammar -> 'never_returns
[ read_reply filename grammar ] reads the types inferred by OCaml for the
mock [ .ml ] file described above , and returns a new grammar , augmented with
a [ % type ] declaration for every nonterminal symbol .
mock [.ml] file described above, and returns a new grammar, augmented with
a [%type] declaration for every nonterminal symbol. *)
val read_reply: string -> grammar -> grammar
|
e5754e85c43426b5c942bde71f732e7424d4797be1f426a7a56db7632d688f97 | lab-79/dspec | util.clj | (ns dspec.util
(:import (datomic.db DbId)))
(def db-id? #(or (integer? %)
(instance? DbId %)))
| null | https://raw.githubusercontent.com/lab-79/dspec/26f88e74066e381c8569d175c1bd5948a8005bd0/test/dspec/util.clj | clojure | (ns dspec.util
(:import (datomic.db DbId)))
(def db-id? #(or (integer? %)
(instance? DbId %)))
| |
2a8c5461580f9fe10b4d8d99f6997eb604b64e3cd6ad2a9d05d33f9ade906cb8 | atolab/apero-net | lcodec.ml | open Locator
open Apero
let decode_locator buf =
let s = decode_string buf in
Locator.of_string s
let encode_locator l =
encode_string (Locator.to_string l)
let decode_locators buf =
let ols = decode_seq decode_locator buf in
let ls = Option.get @@ Option.flatten ols in
Locators.of_list ls
let encode_locators ls buf =
let locs = Locators.to_list ls in
encode_seq encode_locator locs buf
| null | https://raw.githubusercontent.com/atolab/apero-net/d21290610d06a36f1ee571dea475f25ed5735151/lib/lcodec.ml | ocaml | open Locator
open Apero
let decode_locator buf =
let s = decode_string buf in
Locator.of_string s
let encode_locator l =
encode_string (Locator.to_string l)
let decode_locators buf =
let ols = decode_seq decode_locator buf in
let ls = Option.get @@ Option.flatten ols in
Locators.of_list ls
let encode_locators ls buf =
let locs = Locators.to_list ls in
encode_seq encode_locator locs buf
| |
47ae9ef9eccbb0a87308dc5f323d55f27433165f9477929b0297a81fc54348e5 | polytypic/par-ml | FibParCutoff.ml | open Par
let n = try int_of_string Sys.argv.(2) with _ -> 30
let cutoff = try int_of_string Sys.argv.(3) with _ -> 20
let rec fib_ser n = if n <= 1 then n else fib_ser (n - 1) + fib_ser (n - 2)
let rec fib n =
if n <= cutoff then fib_ser n
else
let n1, n2 = par (fun () -> fib (n - 1)) (fun () -> fib (n - 2)) in
n1 + n2
let () = Printf.printf "fib %d = %d\n" n (run @@ fun () -> fib n)
| null | https://raw.githubusercontent.com/polytypic/par-ml/17e2ef61b334073cadc4b1d7de823a486bd2e51d/src/test/FibParCutoff.ml | ocaml | open Par
let n = try int_of_string Sys.argv.(2) with _ -> 30
let cutoff = try int_of_string Sys.argv.(3) with _ -> 20
let rec fib_ser n = if n <= 1 then n else fib_ser (n - 1) + fib_ser (n - 2)
let rec fib n =
if n <= cutoff then fib_ser n
else
let n1, n2 = par (fun () -> fib (n - 1)) (fun () -> fib (n - 2)) in
n1 + n2
let () = Printf.printf "fib %d = %d\n" n (run @@ fun () -> fib n)
| |
fa5ddf0d6f8f3a653b696cadb6702711e86eccdb2f72a786b0d4bf45850c79e4 | Breezeemr/hitch | mutable_var.cljc | (ns hitch.selectors.mutable-var
(:require [hitch.oldprotocols :as oldproto]
[hitch.protocol :as proto]))
(defrecord MutableVar [name]
proto/StatefulSelector
(create [selector]
(proto/->StateEffect oldproto/NOT-FOUND-SENTINEL nil nil))
(destroy [selector state]
nil)
proto/CommandableSelector
(command-accumulator [_ state]
state)
(command-step [_ _ event]
(case (first event)
:set-value (second event)
:clear oldproto/NOT-FOUND-SENTINEL))
(command-result [_ state]
(proto/->StateEffect state nil nil))
proto/Selector
(value [_ _ state]
(if (identical? state oldproto/NOT-FOUND-SENTINEL)
(proto/->SelectorUnresolved nil)
(proto/->SelectorValue state nil))))
(def mutable-var ->MutableVar)
| null | https://raw.githubusercontent.com/Breezeemr/hitch/79efea3cbf601a56060237fd9add2c0b68024a03/src/hitch/selectors/mutable_var.cljc | clojure | (ns hitch.selectors.mutable-var
(:require [hitch.oldprotocols :as oldproto]
[hitch.protocol :as proto]))
(defrecord MutableVar [name]
proto/StatefulSelector
(create [selector]
(proto/->StateEffect oldproto/NOT-FOUND-SENTINEL nil nil))
(destroy [selector state]
nil)
proto/CommandableSelector
(command-accumulator [_ state]
state)
(command-step [_ _ event]
(case (first event)
:set-value (second event)
:clear oldproto/NOT-FOUND-SENTINEL))
(command-result [_ state]
(proto/->StateEffect state nil nil))
proto/Selector
(value [_ _ state]
(if (identical? state oldproto/NOT-FOUND-SENTINEL)
(proto/->SelectorUnresolved nil)
(proto/->SelectorValue state nil))))
(def mutable-var ->MutableVar)
| |
26f373efa4f65aeb927fd03f602c70962d46c67e8af111a3337097c0dc7cfb30 | facebook/infer | Source.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
let all_formals_untainted pdesc =
let make_untainted (name, typ, _) = (name, typ, None) in
List.map ~f:make_untainted (Procdesc.get_formals pdesc)
module type Kind = sig
include TaintTraceElem.Kind
val get :
caller_pname:Procname.t -> Procname.t -> HilExp.t list -> Tenv.t -> (t * int option) list
val get_tainted_formals : Procdesc.t -> Tenv.t -> (Mangled.t * Typ.t * t option) list
end
module type S = sig
include TaintTraceElem.S
type spec = {source: t; index: int option}
val get : caller_pname:Procname.t -> CallSite.t -> HilExp.t list -> Tenv.t -> spec list
val get_tainted_formals : Procdesc.t -> Tenv.t -> (Mangled.t * Typ.t * t option) list
end
module Make (Kind : Kind) = struct
module Kind = Kind
type t = {kind: Kind.t; site: CallSite.t} [@@deriving compare, equal]
type spec = {source: t; index: int option}
let call_site t = t.site
let kind t = t.kind
let make ?indexes:_ kind site = {site; kind}
let get ~caller_pname site actuals tenv =
Kind.get ~caller_pname (CallSite.pname site) actuals tenv
|> List.rev_map ~f:(fun (kind, index) ->
let source = make kind site in
{source; index} )
let get_tainted_formals pdesc tenv =
let site = CallSite.make (Procdesc.get_proc_name pdesc) (Procdesc.get_loc pdesc) in
List.map
~f:(fun (name, typ, kind_opt) ->
(name, typ, Option.map kind_opt ~f:(fun kind -> make kind site)) )
(Kind.get_tainted_formals pdesc tenv)
let pp fmt s = F.fprintf fmt "%a(%a)" Kind.pp s.kind CallSite.pp s.site
let with_callsite t callee_site = {t with site= callee_site}
module Set = PrettyPrintable.MakePPSet (struct
type nonrec t = t [@@deriving compare]
let pp = pp
end)
end
module Dummy = struct
type t = unit [@@deriving compare, equal]
type spec = {source: t; index: int option}
let call_site _ = CallSite.dummy
let kind t = t
let make ?indexes:_ kind _ = kind
let pp _ () = ()
let get ~caller_pname:_ _ _ _ = []
let get_tainted_formals pdesc _ =
List.map ~f:(fun (name, typ, _) -> (name, typ, None)) (Procdesc.get_formals pdesc)
module Kind = struct
type nonrec t = t [@@deriving compare, equal]
let matches ~caller ~callee = Int.equal 0 (compare caller callee)
let pp = pp
end
module Set = PrettyPrintable.MakePPSet (struct
type nonrec t = t [@@deriving compare]
let pp = pp
end)
let with_callsite t _ = t
end
| null | https://raw.githubusercontent.com/facebook/infer/d2e59e6df24858729129debcc2813ae3915c4f0a/infer/src/absint/Source.ml | ocaml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
let all_formals_untainted pdesc =
let make_untainted (name, typ, _) = (name, typ, None) in
List.map ~f:make_untainted (Procdesc.get_formals pdesc)
module type Kind = sig
include TaintTraceElem.Kind
val get :
caller_pname:Procname.t -> Procname.t -> HilExp.t list -> Tenv.t -> (t * int option) list
val get_tainted_formals : Procdesc.t -> Tenv.t -> (Mangled.t * Typ.t * t option) list
end
module type S = sig
include TaintTraceElem.S
type spec = {source: t; index: int option}
val get : caller_pname:Procname.t -> CallSite.t -> HilExp.t list -> Tenv.t -> spec list
val get_tainted_formals : Procdesc.t -> Tenv.t -> (Mangled.t * Typ.t * t option) list
end
module Make (Kind : Kind) = struct
module Kind = Kind
type t = {kind: Kind.t; site: CallSite.t} [@@deriving compare, equal]
type spec = {source: t; index: int option}
let call_site t = t.site
let kind t = t.kind
let make ?indexes:_ kind site = {site; kind}
let get ~caller_pname site actuals tenv =
Kind.get ~caller_pname (CallSite.pname site) actuals tenv
|> List.rev_map ~f:(fun (kind, index) ->
let source = make kind site in
{source; index} )
let get_tainted_formals pdesc tenv =
let site = CallSite.make (Procdesc.get_proc_name pdesc) (Procdesc.get_loc pdesc) in
List.map
~f:(fun (name, typ, kind_opt) ->
(name, typ, Option.map kind_opt ~f:(fun kind -> make kind site)) )
(Kind.get_tainted_formals pdesc tenv)
let pp fmt s = F.fprintf fmt "%a(%a)" Kind.pp s.kind CallSite.pp s.site
let with_callsite t callee_site = {t with site= callee_site}
module Set = PrettyPrintable.MakePPSet (struct
type nonrec t = t [@@deriving compare]
let pp = pp
end)
end
module Dummy = struct
type t = unit [@@deriving compare, equal]
type spec = {source: t; index: int option}
let call_site _ = CallSite.dummy
let kind t = t
let make ?indexes:_ kind _ = kind
let pp _ () = ()
let get ~caller_pname:_ _ _ _ = []
let get_tainted_formals pdesc _ =
List.map ~f:(fun (name, typ, _) -> (name, typ, None)) (Procdesc.get_formals pdesc)
module Kind = struct
type nonrec t = t [@@deriving compare, equal]
let matches ~caller ~callee = Int.equal 0 (compare caller callee)
let pp = pp
end
module Set = PrettyPrintable.MakePPSet (struct
type nonrec t = t [@@deriving compare]
let pp = pp
end)
let with_callsite t _ = t
end
| |
ba0ef132b164a3c5d9d9e05cdf820c26c0e2d54a4a62b5db5a30d388482e272f | yetanalytics/dave | path.clj | (ns com.yetanalytics.dave.filter-scratch.json.path
(:require [blancas.kern.core :as k]
[blancas.kern.lexer.basic :as kl]
[clojure.spec.alpha :as s]
[com.yetanalytics.dave.filter-scratch.json :as json]
[clojure.math.combinatorics :as combo]))
(s/def ::root
#{'$})
(def root
(k/<$>
(constantly '$)
(k/sym* \$)))
(s/def ::wildcard
#{'*})
(def wildcard
(k/<$>
(constantly '*)
(k/sym* \*)))
(s/def ::keyset
(s/or :keys (s/every string?
:type set?
:into #{}
:min-count 1)
:indices (s/every int?
:type set?
:into #{}
:min-count 1)))
(def index
(k/<$>
(fn [^String x]
(Long/parseLong x))
(k/<+>
(k/optional (k/sym* \-))
kl/dec-lit)))
(s/def :range/start
int?)
(s/def :range/end
int?)
(s/def :range/step
int?)
(s/def :range/bounded? ;; was this range bounded, or does it use a MAX_VALUE?
boolean?)
(defrecord RangeSpec [start end step bounded?])
(s/fdef range-spec?
:args (s/cat :item any?)
:ret boolean?)
(defn range-spec?
[item]
(instance? RangeSpec item))
(s/def ::range
(s/keys :req-un [:range/start
:range/end
:range/step
:range/bounded?]))
(def ^:const max-long-str
(str Long/MAX_VALUE))
(def index-range
(k/bind [start (k/option 0
index)
_ kl/colon
end (k/option Long/MAX_VALUE
index)
_ (k/optional kl/colon)
step (k/option 1
index)]
(k/return
(->RangeSpec (if (number? start)
start
(Long/parseLong start))
(if (number? end)
end
(Long/parseLong end))
(if (number? step)
step
(Long/parseLong step))
;; if the option is used, we're unbounded
(if (#{Long/MAX_VALUE
max-long-str} end)
false
true)))))
(defn escaped-by
[c & [charset-p]]
(k/>> (k/sym* c)
(or charset-p k/any-char)))
(def escaped-char
(escaped-by (char 92)))
(def safe-char
(k/none-of* (str
;; double-quote
(char 34)
;; single quote
(char 39)
;; escape char
(char 92))))
(def json-key
(k/<+>
(k/many
(k/<|>
escaped-char
safe-char))))
(def json-key-lit
(k/<+>
(k/between (k/sym* \') json-key)))
(defn union-of1
"A comma separated union of at least 1 of something.
Returns a set."
[p]
(k/<$>
set
(k/sep-by1 (k/sym* \,) p)))
(def child-normal
"Normal bracket-style child"
(kl/brackets
(k/<|>
wildcard
(k/<:> index-range)
(union-of1 index)
(union-of1 json-key-lit))))
(def child-dot
"Stupid dot syntax"
(k/>>
kl/dot
(k/<|>
;; normal key
(k/<$>
(partial hash-set)
(k/<+> (k/many1 k/alpha-num)))
;; dot wildcard
wildcard
;; double-dot wildcard
(k/<$>
(constantly '*)
(k/look-ahead
(k/>> kl/dot
(k/many1 k/alpha-num)))))))
(def json-path
(k/>> root
(k/many (k/<|>
child-normal
child-dot))))
(s/def ::json-path
(s/every (s/or :keyset ::keyset
:wildcard ::wildcard
:range ::range)))
(s/fdef parse
:args (s/cat :path string?)
:ret ::json-path)
(defn parse
"Given a JSON-path, parse it into data"
[path]
(:value (k/parse json-path
path)))
(s/def :enumerate/limit
number?)
(s/fdef enumerate
:args (s/cat :path ::json-path
:options (s/keys* :opt-un [:enumerate/limit]))
:ret (s/every ::json/key-path))
(defn enumerate
"Given a json path, return a lazy seq of concrete key paths. wildcards/ranges
will be enumerated up to :limit, which defaults to 10"
[path & {:keys [limit]}]
(map vec
(apply combo/cartesian-product
(map
(fn [element]
(cond
(set? element)
element
(= '* element)
(if limit
(range limit)
(range))
;; otherwise, itsa range spec
:else
(let [{:keys [start end step]} element]
(cond->> (range start end step)
limit (take limit)))))
path))))
(s/fdef path-seq
:args (s/cat :json ::json/any
:path ::json-path)
:ret (s/every (s/tuple ::json/key-path
::json/any)))
(defn path-seq*
[json path]
(lazy-seq
(let [path-enum (map vec
(apply combo/cartesian-product
path))
hits (for [p path-enum
:let [hit (get-in json p)]
:while (some? hit)]
[p hit])
]
(when (not-empty hits)
(concat
hits
;; if the enum continues,
(when-let [first-fail (first (drop (count hits) path-enum))]
(let [last-pass (first (last hits))
fail-idx
(some
(fn [[idx pv fv]]
(when (not= pv fv)
idx))
(map vector
(range)
last-pass
first-fail))]
(when-let [[edit-idx v]
(and (< 0 fail-idx)
(some
(fn [idx]
(let [seg (get path idx)]
(if (set? seg)
(when (< 1 (count seg))
[idx (disj seg
(get first-fail
idx))])
(let [re-ranged
(drop-while
#(<= % (get first-fail idx))
seg)]
(when (first re-ranged)
[idx re-ranged])))))
(reverse (range fail-idx))))]
(path-seq*
json
(assoc path edit-idx v))))))))))
(defn path-seq
[json path]
(path-seq* json (mapv
(fn [element]
(cond
(set? element)
element
(= '* element)
(range)
;; otherwise, itsa range spec
:else
(let [{:keys [start end step]} element]
(range start end step))))
path)))
(s/fdef select
:args (s/cat :json ::json/any
:path ::json-path)
:ret (s/every ::json/any))
(defn select
"Given json data and a parsed path, return a selection vector."
[json path]
(let [ps (path-seq json path)]
(vary-meta (into []
(map second ps))
assoc :paths (map first ps))))
(s/fdef select-paths
:args (s/cat :json ::json/any
:path ::json-path)
:ret (s/map-of ::json/key-path
::json/any))
(defn select-paths
"Given json data and a parsed path, return a selection map of key paths to values"
[json path]
(into {}
(path-seq json path)))
(s/def :excise/prune-empty?
boolean?)
(s/fdef excise
:args (s/cat :json ::json/any
:path ::json-path
:options (s/keys* :opt-un [:excise/prune-empty?]))
:ret (s/every ::json/any)
:fn (fn [{:keys [ret]
{path :path
json :json} :args}]
(empty? (select json path))))
(defn- cut [prune-empty? j key-path]
(if (some? (get-in j key-path))
(if (= 1 (count key-path))
;; we don't prune at the top level, so this is simple
(let [[k] key-path
j-after (if (string? k)
(dissoc j k)
(into []
(let [[before [_ & after]] (split-at k j)]
(concat before after))))]
j-after)
(let [last-k (peek key-path)
parent-key-path (into [] (butlast key-path))
parent (get-in j parent-key-path)
j-after (update-in j
parent-key-path
(partial cut prune-empty?)
[last-k])]
(if (and prune-empty?
(empty? (get-in j-after parent-key-path)))
(recur prune-empty? j-after parent-key-path)
j-after)))
j))
(defn excise
"Given json data and a parsed path, return the data without the selection, and
any empty container.
If :prune-empty? is true, will remove empty arrays and maps"
[json path & {:keys [prune-empty?
]}]
(let [ps (path-seq json path)
psk (map first ps)]
(vary-meta
(reduce
(partial cut prune-empty?)
json
;; reverse the paths so the indices stay correct!
;; TODO: probably doesn't handle array slices
(reverse psk))
assoc :paths (into #{} psk))))
(s/fdef discrete?
:args (s/cat :path ::json-path)
:ret boolean?)
(defn discrete?
"Is the path free of wildcards?"
[path]
(not
(some (fn [x]
(or (= '* x)
(and (range-spec? x)
(not (:bounded? x)))))
path)))
(s/fdef apply-values
:args (s/cat :json ::json/any
:path ::json-path
:values (s/every ::json/any))
:ret (s/every ::json/any)
:fn (fn [{:keys [ret]
{path :path
json :json
values :values} :args}]
(= (set values)
(set (select ret path)))))
(defn apply-values
"Given json data, path and values, apply them to the structure.
If there is no place to put a value, enumerate further possible paths and use
those."
[json path values & {:keys [enum-limit]}]
;; TODO: probably doesn't handle array slices
(let [ps (map first (path-seq json path))
[splice-vals append-vals] (split-at (count ps) values)
json-spliced (reduce
(fn [j [p v]]
(assoc-in j p v))
json
(map vector
ps
splice-vals))]
(if (not-empty append-vals)
;; if there are still vals to append, we should do so
(loop [key-paths (remove
(partial contains? (set ps))
(enumerate
path
:limit (or enum-limit 3)))
vs values
j json
applied-paths #{}]
(if-some [v (first vs)]
(if-let [key-path (first key-paths)]
(recur (rest key-paths)
(rest vs)
(json/jassoc-in j key-path v)
(conj applied-paths key-path))
(throw (ex-info "Couldn't make enough paths"
{:type ::out-of-paths
:path path
:json json
:json-mod j
:values values
:values-remaining vs
})))
(vary-meta j assoc :paths applied-paths)))
;; extra hanging paths should be removed.
(let [spliced-count (count json-spliced)]
(vary-meta
(if-let [extra-paths (not-empty (drop spliced-count
ps))]
(reduce
(partial cut true)
json-spliced
(reverse extra-paths))
json-spliced)
assoc :paths (into #{}
(take spliced-count
ps)))))))
| null | https://raw.githubusercontent.com/yetanalytics/dave/7a71c2017889862b2fb567edc8196b4382d01beb/scratch/filter-scratch/src/main/com/yetanalytics/dave/filter_scratch/json/path.clj | clojure | was this range bounded, or does it use a MAX_VALUE?
if the option is used, we're unbounded
double-quote
single quote
escape char
normal key
dot wildcard
double-dot wildcard
otherwise, itsa range spec
if the enum continues,
otherwise, itsa range spec
we don't prune at the top level, so this is simple
reverse the paths so the indices stay correct!
TODO: probably doesn't handle array slices
TODO: probably doesn't handle array slices
if there are still vals to append, we should do so
extra hanging paths should be removed. | (ns com.yetanalytics.dave.filter-scratch.json.path
(:require [blancas.kern.core :as k]
[blancas.kern.lexer.basic :as kl]
[clojure.spec.alpha :as s]
[com.yetanalytics.dave.filter-scratch.json :as json]
[clojure.math.combinatorics :as combo]))
(s/def ::root
#{'$})
(def root
(k/<$>
(constantly '$)
(k/sym* \$)))
(s/def ::wildcard
#{'*})
(def wildcard
(k/<$>
(constantly '*)
(k/sym* \*)))
(s/def ::keyset
(s/or :keys (s/every string?
:type set?
:into #{}
:min-count 1)
:indices (s/every int?
:type set?
:into #{}
:min-count 1)))
(def index
(k/<$>
(fn [^String x]
(Long/parseLong x))
(k/<+>
(k/optional (k/sym* \-))
kl/dec-lit)))
(s/def :range/start
int?)
(s/def :range/end
int?)
(s/def :range/step
int?)
boolean?)
(defrecord RangeSpec [start end step bounded?])
(s/fdef range-spec?
:args (s/cat :item any?)
:ret boolean?)
(defn range-spec?
[item]
(instance? RangeSpec item))
(s/def ::range
(s/keys :req-un [:range/start
:range/end
:range/step
:range/bounded?]))
(def ^:const max-long-str
(str Long/MAX_VALUE))
(def index-range
(k/bind [start (k/option 0
index)
_ kl/colon
end (k/option Long/MAX_VALUE
index)
_ (k/optional kl/colon)
step (k/option 1
index)]
(k/return
(->RangeSpec (if (number? start)
start
(Long/parseLong start))
(if (number? end)
end
(Long/parseLong end))
(if (number? step)
step
(Long/parseLong step))
(if (#{Long/MAX_VALUE
max-long-str} end)
false
true)))))
(defn escaped-by
[c & [charset-p]]
(k/>> (k/sym* c)
(or charset-p k/any-char)))
(def escaped-char
(escaped-by (char 92)))
(def safe-char
(k/none-of* (str
(char 34)
(char 39)
(char 92))))
(def json-key
(k/<+>
(k/many
(k/<|>
escaped-char
safe-char))))
(def json-key-lit
(k/<+>
(k/between (k/sym* \') json-key)))
(defn union-of1
"A comma separated union of at least 1 of something.
Returns a set."
[p]
(k/<$>
set
(k/sep-by1 (k/sym* \,) p)))
(def child-normal
"Normal bracket-style child"
(kl/brackets
(k/<|>
wildcard
(k/<:> index-range)
(union-of1 index)
(union-of1 json-key-lit))))
(def child-dot
"Stupid dot syntax"
(k/>>
kl/dot
(k/<|>
(k/<$>
(partial hash-set)
(k/<+> (k/many1 k/alpha-num)))
wildcard
(k/<$>
(constantly '*)
(k/look-ahead
(k/>> kl/dot
(k/many1 k/alpha-num)))))))
(def json-path
(k/>> root
(k/many (k/<|>
child-normal
child-dot))))
(s/def ::json-path
(s/every (s/or :keyset ::keyset
:wildcard ::wildcard
:range ::range)))
(s/fdef parse
:args (s/cat :path string?)
:ret ::json-path)
(defn parse
"Given a JSON-path, parse it into data"
[path]
(:value (k/parse json-path
path)))
(s/def :enumerate/limit
number?)
(s/fdef enumerate
:args (s/cat :path ::json-path
:options (s/keys* :opt-un [:enumerate/limit]))
:ret (s/every ::json/key-path))
(defn enumerate
"Given a json path, return a lazy seq of concrete key paths. wildcards/ranges
will be enumerated up to :limit, which defaults to 10"
[path & {:keys [limit]}]
(map vec
(apply combo/cartesian-product
(map
(fn [element]
(cond
(set? element)
element
(= '* element)
(if limit
(range limit)
(range))
:else
(let [{:keys [start end step]} element]
(cond->> (range start end step)
limit (take limit)))))
path))))
(s/fdef path-seq
:args (s/cat :json ::json/any
:path ::json-path)
:ret (s/every (s/tuple ::json/key-path
::json/any)))
(defn path-seq*
[json path]
(lazy-seq
(let [path-enum (map vec
(apply combo/cartesian-product
path))
hits (for [p path-enum
:let [hit (get-in json p)]
:while (some? hit)]
[p hit])
]
(when (not-empty hits)
(concat
hits
(when-let [first-fail (first (drop (count hits) path-enum))]
(let [last-pass (first (last hits))
fail-idx
(some
(fn [[idx pv fv]]
(when (not= pv fv)
idx))
(map vector
(range)
last-pass
first-fail))]
(when-let [[edit-idx v]
(and (< 0 fail-idx)
(some
(fn [idx]
(let [seg (get path idx)]
(if (set? seg)
(when (< 1 (count seg))
[idx (disj seg
(get first-fail
idx))])
(let [re-ranged
(drop-while
#(<= % (get first-fail idx))
seg)]
(when (first re-ranged)
[idx re-ranged])))))
(reverse (range fail-idx))))]
(path-seq*
json
(assoc path edit-idx v))))))))))
(defn path-seq
[json path]
(path-seq* json (mapv
(fn [element]
(cond
(set? element)
element
(= '* element)
(range)
:else
(let [{:keys [start end step]} element]
(range start end step))))
path)))
(s/fdef select
:args (s/cat :json ::json/any
:path ::json-path)
:ret (s/every ::json/any))
(defn select
"Given json data and a parsed path, return a selection vector."
[json path]
(let [ps (path-seq json path)]
(vary-meta (into []
(map second ps))
assoc :paths (map first ps))))
(s/fdef select-paths
:args (s/cat :json ::json/any
:path ::json-path)
:ret (s/map-of ::json/key-path
::json/any))
(defn select-paths
"Given json data and a parsed path, return a selection map of key paths to values"
[json path]
(into {}
(path-seq json path)))
(s/def :excise/prune-empty?
boolean?)
(s/fdef excise
:args (s/cat :json ::json/any
:path ::json-path
:options (s/keys* :opt-un [:excise/prune-empty?]))
:ret (s/every ::json/any)
:fn (fn [{:keys [ret]
{path :path
json :json} :args}]
(empty? (select json path))))
(defn- cut [prune-empty? j key-path]
(if (some? (get-in j key-path))
(if (= 1 (count key-path))
(let [[k] key-path
j-after (if (string? k)
(dissoc j k)
(into []
(let [[before [_ & after]] (split-at k j)]
(concat before after))))]
j-after)
(let [last-k (peek key-path)
parent-key-path (into [] (butlast key-path))
parent (get-in j parent-key-path)
j-after (update-in j
parent-key-path
(partial cut prune-empty?)
[last-k])]
(if (and prune-empty?
(empty? (get-in j-after parent-key-path)))
(recur prune-empty? j-after parent-key-path)
j-after)))
j))
(defn excise
"Given json data and a parsed path, return the data without the selection, and
any empty container.
If :prune-empty? is true, will remove empty arrays and maps"
[json path & {:keys [prune-empty?
]}]
(let [ps (path-seq json path)
psk (map first ps)]
(vary-meta
(reduce
(partial cut prune-empty?)
json
(reverse psk))
assoc :paths (into #{} psk))))
(s/fdef discrete?
:args (s/cat :path ::json-path)
:ret boolean?)
(defn discrete?
"Is the path free of wildcards?"
[path]
(not
(some (fn [x]
(or (= '* x)
(and (range-spec? x)
(not (:bounded? x)))))
path)))
(s/fdef apply-values
:args (s/cat :json ::json/any
:path ::json-path
:values (s/every ::json/any))
:ret (s/every ::json/any)
:fn (fn [{:keys [ret]
{path :path
json :json
values :values} :args}]
(= (set values)
(set (select ret path)))))
(defn apply-values
"Given json data, path and values, apply them to the structure.
If there is no place to put a value, enumerate further possible paths and use
those."
[json path values & {:keys [enum-limit]}]
(let [ps (map first (path-seq json path))
[splice-vals append-vals] (split-at (count ps) values)
json-spliced (reduce
(fn [j [p v]]
(assoc-in j p v))
json
(map vector
ps
splice-vals))]
(if (not-empty append-vals)
(loop [key-paths (remove
(partial contains? (set ps))
(enumerate
path
:limit (or enum-limit 3)))
vs values
j json
applied-paths #{}]
(if-some [v (first vs)]
(if-let [key-path (first key-paths)]
(recur (rest key-paths)
(rest vs)
(json/jassoc-in j key-path v)
(conj applied-paths key-path))
(throw (ex-info "Couldn't make enough paths"
{:type ::out-of-paths
:path path
:json json
:json-mod j
:values values
:values-remaining vs
})))
(vary-meta j assoc :paths applied-paths)))
(let [spliced-count (count json-spliced)]
(vary-meta
(if-let [extra-paths (not-empty (drop spliced-count
ps))]
(reduce
(partial cut true)
json-spliced
(reverse extra-paths))
json-spliced)
assoc :paths (into #{}
(take spliced-count
ps)))))))
|
70fae2544587dbd385ede929d26ac15e50b03e99186b8836eb7f631009dae604 | lehins/hip | Filter.hs | # LANGUAGE FlexibleContexts #
module Main where
import Criterion.Main
import Graphics.Image as I
import Graphics.Image.Processing.Canny as I
import Prelude as P
main :: IO ()
main = do
imgY <- readImageY "images/downloaded/frog-1280x824.jpg"
imgRGBD <- readImageRGB "images/downloaded/frog-1280x824.jpg"
defaultMain
[ env (return imgRGBD) $ \img ->
bgroup
"Gaussian"
[ bgroup
"StdDev=derived"
[ bench "3x3" $ whnf (gaussianBlur3x3 Edge) img
, bench "5x5" $ whnf (gaussianBlur5x5 Edge) img
, bench "7x7" $ whnf (gaussianBlur7x7 Edge) img
, bench "9x9" $ whnf (gaussianBlur 4 Nothing Edge) img
, bench "11x11" $ whnf (gaussianBlur 5 Nothing Edge) img
]
, bgroup
"StdDev=1"
[ bench "3x3" $ whnf (gaussianBlur 1 (Just 1) Edge) img
, bench "5x5" $ whnf (gaussianBlur 2 (Just 1) Edge) img
, bench "7x7" $ whnf (gaussianBlur 3 (Just 1) Edge) img
, bench "9x9" $ whnf (gaussianBlur 4 (Just 1) Edge) img
, bench "11x11" $ whnf (gaussianBlur 5 (Just 1) Edge) img
]
]
, env (return imgRGBD) $ \img ->
bgroup
"Average"
[ bench "3x3" $ whnf (averageBlur3x3 Edge) img
, bench "5x5" $ whnf (averageBlur5x5 Edge) img
, bench "7x7" $ whnf (averageBlur7x7 Edge) img
, bench "9x9" $ whnf (averageBlur 4 Edge) img
, bench "11x11" $ whnf (averageBlur 5 Edge) img
]
, env (return imgRGBD) $ \img ->
bgroup
"Sobel Operator"
[ bench "Not Normalized" $ whnf (mapFilter Edge sobelOperator) img
, bench "Normalized" $ whnf (mapFilter Edge sobelOperatorNormal) img
]
-- , env (return imgY) $ \img ->
-- bgroup
" Sobel Operator Y "
[ bench " No Normalization " $ whnf ( applyFilter Edge sobelOperator ) img
-- ]
, bench "Canny" $ whnf (canny 0.2 0.4) imgY
]
| null | https://raw.githubusercontent.com/lehins/hip/1386e83ad4c3e1bdb01f3d6bdec6fe40489dabd1/hip/bench/Filter.hs | haskell | , env (return imgY) $ \img ->
bgroup
] | # LANGUAGE FlexibleContexts #
module Main where
import Criterion.Main
import Graphics.Image as I
import Graphics.Image.Processing.Canny as I
import Prelude as P
main :: IO ()
main = do
imgY <- readImageY "images/downloaded/frog-1280x824.jpg"
imgRGBD <- readImageRGB "images/downloaded/frog-1280x824.jpg"
defaultMain
[ env (return imgRGBD) $ \img ->
bgroup
"Gaussian"
[ bgroup
"StdDev=derived"
[ bench "3x3" $ whnf (gaussianBlur3x3 Edge) img
, bench "5x5" $ whnf (gaussianBlur5x5 Edge) img
, bench "7x7" $ whnf (gaussianBlur7x7 Edge) img
, bench "9x9" $ whnf (gaussianBlur 4 Nothing Edge) img
, bench "11x11" $ whnf (gaussianBlur 5 Nothing Edge) img
]
, bgroup
"StdDev=1"
[ bench "3x3" $ whnf (gaussianBlur 1 (Just 1) Edge) img
, bench "5x5" $ whnf (gaussianBlur 2 (Just 1) Edge) img
, bench "7x7" $ whnf (gaussianBlur 3 (Just 1) Edge) img
, bench "9x9" $ whnf (gaussianBlur 4 (Just 1) Edge) img
, bench "11x11" $ whnf (gaussianBlur 5 (Just 1) Edge) img
]
]
, env (return imgRGBD) $ \img ->
bgroup
"Average"
[ bench "3x3" $ whnf (averageBlur3x3 Edge) img
, bench "5x5" $ whnf (averageBlur5x5 Edge) img
, bench "7x7" $ whnf (averageBlur7x7 Edge) img
, bench "9x9" $ whnf (averageBlur 4 Edge) img
, bench "11x11" $ whnf (averageBlur 5 Edge) img
]
, env (return imgRGBD) $ \img ->
bgroup
"Sobel Operator"
[ bench "Not Normalized" $ whnf (mapFilter Edge sobelOperator) img
, bench "Normalized" $ whnf (mapFilter Edge sobelOperatorNormal) img
]
" Sobel Operator Y "
[ bench " No Normalization " $ whnf ( applyFilter Edge sobelOperator ) img
, bench "Canny" $ whnf (canny 0.2 0.4) imgY
]
|
24467efeb6853cd9db524ff12dfd4ac37468bc220bff974a60b6a93574c0a4a8 | int-index/kalium | Valueficate.hs | # LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE NoMonomorphismRestriction #
module Kalium.Nucleus.Scalar.Valueficate where
import Kalium.Prelude
import Control.Monad.Reader
import qualified Data.Map as M
import Kalium.Nucleus.Scalar.Program
class ValueficateSubstitute a where
valueficateSubstitute
:: (MonadReader ReferenceInfo m)
=> a (Configuration param0 Pattern Atom)
-> m (a (Configuration param1 Pattern Expression))
ap1On f a = f <$> valueficateSubstitute a
ap2On f a b = ap1On f a <*> valueficateSubstitute b
instance ValueficateSubstitute Exec where
valueficateSubstitute (Exec ret op tyArgs args) = do
referenceInfo <- ask
let ret' = foldr1 PTuple (ret:rets)
rets = case M.lookup op referenceInfo of
Nothing -> []
Just currentReferenceInfo -> do
(keep, arg) <- zip currentReferenceInfo args
guard keep
return $ case arg of
Access name -> PAccess name
Primary _ -> error "non-variables by-ref"
return $ Exec ret' op tyArgs (map Atom args)
instance ValueficateSubstitute Statement where
valueficateSubstitute = \case
Follow a1 a2 -> ap2On Follow a1 a2
ScopeStatement a -> ap1On ScopeStatement a
IfStatement a -> ap1On IfStatement a
ForStatement a -> ap1On ForStatement a
Execute a -> ap1On Execute a
Pass -> return Pass
instance ValueficateSubstitute obj
=> ValueficateSubstitute (Scope vars obj) where
valueficateSubstitute = scopeElem valueficateSubstitute
instance ValueficateSubstitute If where
valueficateSubstitute (If ifCond ifThen ifElse)
= ap2On (If (Atom ifCond)) ifThen ifElse
instance ValueficateSubstitute ForCycle where
valueficateSubstitute (ForCycle forName forRange forStatement)
= ap1On (ForCycle forName (Atom forRange)) forStatement
valueficate
:: Program (Configuration ByType Pattern Atom)
-> Program (Configuration Type Pattern Expression)
valueficate program =
let referenceInfo = gather program
in program & programFuncs %~ imap (valueficateFunc referenceInfo)
valueficateFunc
:: ReferenceInfo
-> Name
-> Func (Configuration ByType Pattern Atom)
-> Func (Configuration Type Pattern Expression)
valueficateFunc referenceInfo name
(Func ty (Scope params (Scope vars (Body statement result))))
= let currentReferenceInfo = referenceInfo M.! name
params' = params & map (\(name, (_by, ty)) -> (name, ty))
ty' = foldr1 (TypeApp2 TypePair) (ty : tys)
result' = foldr1 (\x y -> Call (NameSpecial OpPair) [] [x, y])
$ Atom result : map (Atom . Access) results
(results, tys) = unzip $ do
(keep, param) <- zip currentReferenceInfo params'
guard keep
return param
statement' = runReader (valueficateSubstitute statement) referenceInfo
in Func ty' (Scope params' (Scope vars (Body statement' result')))
type ReferenceInfo = Map Name [Bool]
gather :: Program (Configuration ByType pat expr) -> ReferenceInfo
gather = fmap inspect . view programFuncs where
inspect = map check . view (funcScope . scopeVars)
check (_name, (by, _ty)) = by == ByReference
| null | https://raw.githubusercontent.com/int-index/kalium/0653b4229001880322acf3016de595360de726ec/src/Kalium/Nucleus/Scalar/Valueficate.hs | haskell | # LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE NoMonomorphismRestriction #
module Kalium.Nucleus.Scalar.Valueficate where
import Kalium.Prelude
import Control.Monad.Reader
import qualified Data.Map as M
import Kalium.Nucleus.Scalar.Program
class ValueficateSubstitute a where
valueficateSubstitute
:: (MonadReader ReferenceInfo m)
=> a (Configuration param0 Pattern Atom)
-> m (a (Configuration param1 Pattern Expression))
ap1On f a = f <$> valueficateSubstitute a
ap2On f a b = ap1On f a <*> valueficateSubstitute b
instance ValueficateSubstitute Exec where
valueficateSubstitute (Exec ret op tyArgs args) = do
referenceInfo <- ask
let ret' = foldr1 PTuple (ret:rets)
rets = case M.lookup op referenceInfo of
Nothing -> []
Just currentReferenceInfo -> do
(keep, arg) <- zip currentReferenceInfo args
guard keep
return $ case arg of
Access name -> PAccess name
Primary _ -> error "non-variables by-ref"
return $ Exec ret' op tyArgs (map Atom args)
instance ValueficateSubstitute Statement where
valueficateSubstitute = \case
Follow a1 a2 -> ap2On Follow a1 a2
ScopeStatement a -> ap1On ScopeStatement a
IfStatement a -> ap1On IfStatement a
ForStatement a -> ap1On ForStatement a
Execute a -> ap1On Execute a
Pass -> return Pass
instance ValueficateSubstitute obj
=> ValueficateSubstitute (Scope vars obj) where
valueficateSubstitute = scopeElem valueficateSubstitute
instance ValueficateSubstitute If where
valueficateSubstitute (If ifCond ifThen ifElse)
= ap2On (If (Atom ifCond)) ifThen ifElse
instance ValueficateSubstitute ForCycle where
valueficateSubstitute (ForCycle forName forRange forStatement)
= ap1On (ForCycle forName (Atom forRange)) forStatement
valueficate
:: Program (Configuration ByType Pattern Atom)
-> Program (Configuration Type Pattern Expression)
valueficate program =
let referenceInfo = gather program
in program & programFuncs %~ imap (valueficateFunc referenceInfo)
valueficateFunc
:: ReferenceInfo
-> Name
-> Func (Configuration ByType Pattern Atom)
-> Func (Configuration Type Pattern Expression)
valueficateFunc referenceInfo name
(Func ty (Scope params (Scope vars (Body statement result))))
= let currentReferenceInfo = referenceInfo M.! name
params' = params & map (\(name, (_by, ty)) -> (name, ty))
ty' = foldr1 (TypeApp2 TypePair) (ty : tys)
result' = foldr1 (\x y -> Call (NameSpecial OpPair) [] [x, y])
$ Atom result : map (Atom . Access) results
(results, tys) = unzip $ do
(keep, param) <- zip currentReferenceInfo params'
guard keep
return param
statement' = runReader (valueficateSubstitute statement) referenceInfo
in Func ty' (Scope params' (Scope vars (Body statement' result')))
type ReferenceInfo = Map Name [Bool]
gather :: Program (Configuration ByType pat expr) -> ReferenceInfo
gather = fmap inspect . view programFuncs where
inspect = map check . view (funcScope . scopeVars)
check (_name, (by, _ty)) = by == ByReference
| |
f723f9715e8cc8a1f122fee8d6307dfc93be205f4c87a1dca472ad9bffb46de8 | cedlemo/OCaml-GI-ctypes-bindings-generator | Event_box_private.ml | open Ctypes
open Foreign
type t
let t_typ : t structure typ = structure "Event_box_private"
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Event_box_private.ml | ocaml | open Ctypes
open Foreign
type t
let t_typ : t structure typ = structure "Event_box_private"
| |
279f6eab9878bc5a8db87f1ddebcbd6273b4df0239c1b0b41f858cdc1e094f2b | geo2a/redfin-lib | Parser.hs | |
Module : ISA.Types . Symbolic . Parser
Copyright : ( c ) 2019
License : MIT ( see the file LICENSE )
Maintainer :
Stability : experimental
Parse symbolic expressions
Module : ISA.Types.Symbolic.Parser
Copyright : (c) Georgy Lukyanov 2019
License : MIT (see the file LICENSE)
Maintainer :
Stability : experimental
Parse symbolic expressions
-}
module ISA.Types.Symbolic.Parser (parseSym, pSym, pSAny, pAddress) where
import Control.Monad.Combinators.Expr
import Data.Text (Text)
import qualified Data.Text as Text
import Text.Megaparsec
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
import ISA.Types
import ISA.Types.Parser
import ISA.Types.Symbolic
import ISA.Types.Symbolic.Address
parseSym :: String -> Text -> Either Text Sym
parseSym symName =
either (Left . Text.pack . errorBundlePretty) (Right . id)
. parse pSym symName
pPredicate : : ( Sym - > Sym )
-- pPredicate = do
( SAny var ) < - char ' \\ ' * > pSAny
-- symbol "->"
-- property <- pSym
-- pure (\expr -> subst expr var property)
pCAddress :: Parser CAddress
pCAddress = CAddress <$> lexeme L.decimal <?> "concrete memory address"
pAddress :: Parser Address
pAddress =
MkAddress
<$> choice
[ Left <$> pCAddress
, Right <$> pSym
]
pInt32 :: Parser Concrete
pInt32 =
CInt32
<$> L.signed sc (lexeme L.decimal) <?> "Int32 literal"
pWord16 :: Parser Concrete
pWord16 =
CWord
<$> lexeme L.decimal <?> "Word 16 literal"
pBool :: Parser Concrete
pBool =
CBool
<$> lexeme
( (string "true" *> pure True)
<|> (string "false" *> pure False)
)
<?> "boolean literal"
pConcrete :: Parser Concrete
pConcrete = pInt32 <|> pWord16 <|> pBool
pSConst :: Parser Sym
pSConst = SConst <$> pConcrete
pSAny :: Parser Sym
pSAny = do
_ <- char '$'
name <- lexeme ((:) <$> letterChar <*> many (alphaNumChar <|> char '_') <?> "variable")
pure (SAny . Text.pack $ name)
pTerm :: Parser Sym
pTerm =
choice
[ parens pSym
, pSAny
, pSConst
]
pSym :: Parser Sym
pSym = makeExprParser pTerm operatorTable
operatorTable :: [[Operator Parser Sym]]
operatorTable =
[
[ prefix "&" SPointer
, prefix "not" SNot
, prefix "abs" SAbs
]
,
[ binary "&&" SAnd
, binary "||" SOr
]
,
[ binary "*" SMul
, binary "/" SDiv
, binary "%" SMod
]
,
[ binary "+" SAdd
, binary "-" SSub
]
,
[ binary "==" SEq
, binary "<" SLt
, binary ">" SGt
]
]
binary :: Text -> (Sym -> Sym -> Sym) -> Operator Parser Sym
binary name f = InfixL (f <$ symbol name)
prefix :: Text -> (Sym -> Sym) -> Operator Parser Sym
prefix name f = Prefix (f <$ symbol name)
| null | https://raw.githubusercontent.com/geo2a/redfin-lib/55fa785283b1a33976958a63cea557e38d76c5e3/src/ISA/Types/Symbolic/Parser.hs | haskell | pPredicate = do
symbol "->"
property <- pSym
pure (\expr -> subst expr var property) | |
Module : ISA.Types . Symbolic . Parser
Copyright : ( c ) 2019
License : MIT ( see the file LICENSE )
Maintainer :
Stability : experimental
Parse symbolic expressions
Module : ISA.Types.Symbolic.Parser
Copyright : (c) Georgy Lukyanov 2019
License : MIT (see the file LICENSE)
Maintainer :
Stability : experimental
Parse symbolic expressions
-}
module ISA.Types.Symbolic.Parser (parseSym, pSym, pSAny, pAddress) where
import Control.Monad.Combinators.Expr
import Data.Text (Text)
import qualified Data.Text as Text
import Text.Megaparsec
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
import ISA.Types
import ISA.Types.Parser
import ISA.Types.Symbolic
import ISA.Types.Symbolic.Address
parseSym :: String -> Text -> Either Text Sym
parseSym symName =
either (Left . Text.pack . errorBundlePretty) (Right . id)
. parse pSym symName
pPredicate : : ( Sym - > Sym )
( SAny var ) < - char ' \\ ' * > pSAny
pCAddress :: Parser CAddress
pCAddress = CAddress <$> lexeme L.decimal <?> "concrete memory address"
pAddress :: Parser Address
pAddress =
MkAddress
<$> choice
[ Left <$> pCAddress
, Right <$> pSym
]
pInt32 :: Parser Concrete
pInt32 =
CInt32
<$> L.signed sc (lexeme L.decimal) <?> "Int32 literal"
pWord16 :: Parser Concrete
pWord16 =
CWord
<$> lexeme L.decimal <?> "Word 16 literal"
pBool :: Parser Concrete
pBool =
CBool
<$> lexeme
( (string "true" *> pure True)
<|> (string "false" *> pure False)
)
<?> "boolean literal"
pConcrete :: Parser Concrete
pConcrete = pInt32 <|> pWord16 <|> pBool
pSConst :: Parser Sym
pSConst = SConst <$> pConcrete
pSAny :: Parser Sym
pSAny = do
_ <- char '$'
name <- lexeme ((:) <$> letterChar <*> many (alphaNumChar <|> char '_') <?> "variable")
pure (SAny . Text.pack $ name)
pTerm :: Parser Sym
pTerm =
choice
[ parens pSym
, pSAny
, pSConst
]
pSym :: Parser Sym
pSym = makeExprParser pTerm operatorTable
operatorTable :: [[Operator Parser Sym]]
operatorTable =
[
[ prefix "&" SPointer
, prefix "not" SNot
, prefix "abs" SAbs
]
,
[ binary "&&" SAnd
, binary "||" SOr
]
,
[ binary "*" SMul
, binary "/" SDiv
, binary "%" SMod
]
,
[ binary "+" SAdd
, binary "-" SSub
]
,
[ binary "==" SEq
, binary "<" SLt
, binary ">" SGt
]
]
binary :: Text -> (Sym -> Sym -> Sym) -> Operator Parser Sym
binary name f = InfixL (f <$ symbol name)
prefix :: Text -> (Sym -> Sym) -> Operator Parser Sym
prefix name f = Prefix (f <$ symbol name)
|
245824024448fd655e2c99f4fc4b6a77d4fdca1fa0bad557524e02f393de51b9 | nilern/monnit | reader_macros.clj | (ns monnit.impl.reader-macros
(:require [monnit.core :as m]))
(defmacro defreadertype [name fields & impls]
(concat
`(deftype ~name ~fields ~@impls)
'(m/Functor
(-fmap [self f] (->FMap1 f self))
(-fmap [self f b] (->FMap2 f self b))
(-fmap [self f b c] (->FMap3 f self b c))
(-fmap [self f b c d] (->FMap4 f self b c d))
(-fmap [self f b c d args] (->FMapN f self b c d args))
m/Monad
(bind [self f] (->Bind self f)))))
| null | https://raw.githubusercontent.com/nilern/monnit/cc5fe2a031ef8540c4f77bb620d05a0f0564292b/src/monnit/impl/reader_macros.clj | clojure | (ns monnit.impl.reader-macros
(:require [monnit.core :as m]))
(defmacro defreadertype [name fields & impls]
(concat
`(deftype ~name ~fields ~@impls)
'(m/Functor
(-fmap [self f] (->FMap1 f self))
(-fmap [self f b] (->FMap2 f self b))
(-fmap [self f b c] (->FMap3 f self b c))
(-fmap [self f b c d] (->FMap4 f self b c d))
(-fmap [self f b c d args] (->FMapN f self b c d args))
m/Monad
(bind [self f] (->Bind self f)))))
| |
a57225cbf522eb6d6f342defe32b5c5c844c8bc19299717f2d8fb509b6fb9d83 | dvcrn/proton | core.cljs | (ns proton.layers.frameworks.django.core
(:use [proton.layers.base :only [init-layer! register-layer-dependencies]]))
(defmethod init-layer! :frameworks/django []
(register-layer-dependencies :lang/python [:atom-django]))
| null | https://raw.githubusercontent.com/dvcrn/proton/427d83ffdb61d84a04e3d30032b792a9cfbfc53f/src/cljs/proton/layers/frameworks/django/core.cljs | clojure | (ns proton.layers.frameworks.django.core
(:use [proton.layers.base :only [init-layer! register-layer-dependencies]]))
(defmethod init-layer! :frameworks/django []
(register-layer-dependencies :lang/python [:atom-django]))
| |
0ef2bf50121ccb5dda06b56cfce67bdadfeaa35be714489e4b3e42ae3161bbfe | finnishtransportagency/harja | roolit.cljc | (ns harja.domain.roolit
"Harjan käyttäjäroolit"
(:require
[clojure.set :refer [intersection]]
#?(:cljs [harja.tiedot.istunto :as istunto])
[taoensso.timbre :as log]
#?(:clj [slingshot.slingshot :refer [throw+]])))
(defrecord EiOikeutta [syy])
(defn ei-oikeutta? [arvo]
(instance? EiOikeutta arvo))
varten
(def tilaajan-rakennuttajakonsultti "tilaajan rakennuttajakonsultti")
(def ely-rakennuttajakonsultti "rakennuttajakonsultti")
UHA : uv
( " vaylamuodon vastuuhenkilo " )
(def liikennepaivystaja "liikennepaivystaja")
(def tilaajan-laadunvalvontakonsultti "tilaajan laadunvalvontakonsultti")
(def urakoitsijan-paakayttaja "urakoitsijan paakayttaja")
(def urakoitsijan-urakan-vastuuhenkilo "urakoitsijan urakan vastuuhenkilo")
(def urakoitsijan-laatuvastaava "urakoitsijan laatuvastaava")
(def jarjestelmavastaava "Jarjestelmavastaava")
olla osittain vanhentuneita ( 2021 ) .
Nykyiset tarkastaa roolit.xlsx - tiedoston otsikoista
tai kutsumalla ( keys ) REPListä .
(def tilaajan-urakanvalvoja "Tilaajan_Urakanvalvoja")
(def ely-urakanvalvoja "ELY_Urakanvalvoja")
(def ely-paakayttaja "ELY_Paakayttaja")
(def toteumien-kirjaus
"Roolit, joilla on oikeus kirjoittaa urakkaan toteumatietoja."
#{urakanvalvoja
urakoitsijan-paakayttaja
urakoitsijan-urakan-vastuuhenkilo
urakoitsijan-laatuvastaava})
(def laadunseuranta-kirjaus
"Roolit, joilla on oikeus kirjata laadunseurantaa urakkaan."
#{urakanvalvoja
urakoitsijan-paakayttaja
urakoitsijan-urakan-vastuuhenkilo
urakoitsijan-laatuvastaava
tilaajan-laadunvalvontakonsultti})
Tietokannan rooli enumin selvempi kuvaus
(def +rooli->kuvaus+
{"jarjestelmavastuuhenkilo" "Järjestelmävastuuhenkilö"
"tilaajan kayttaja" " Tilaajan käyttäjä"
"urakanvalvoja" "Urakanvalvoja"
" " " Väylämuodon vastuuhenkilö "
"hallintayksikon vastuuhenkilo" "Hallintayksikön vastuuhenkilö"
"liikennepäivystäjä" "Liikennepäivystäjä"
"tilaajan asiantuntija" "Tilaajan asiantuntija"
"tilaajan laadunvalvontakonsultti" "Tilaajan laadunvalvontakonsultti"
"urakoitsijan paakayttaja" "Urakoitsijan pääkäyttäjä"
"urakoitsijan urakan vastuuhenkilo" "Urakoitsijan urakan vastuuhenkilö"
"urakoitsijan kayttaja" "Urakoitsijan käyttäjä"
"urakoitsijan laatuvastaava" "Urakoitsijan laatuvastaava"})
(defn rooli->kuvaus
"Antaa roolin ihmisen luettavan kuvauksen käyttöliittymää varten."
[rooli]
(get +rooli->kuvaus+ rooli))
(defn urakkaroolit
"Palauttaa setin rooleja, joita käyttäjällä on annetussa urakassa."
#?(:cljs ([urakka-id] (urakkaroolit @istunto/kayttaja urakka-id)))
([{r :urakkaroolit} urakka-id]
(or (get r urakka-id) #{})))
(defn organisaatioroolit
"Palauttaa setin rooleja, joita käyttäjällä on annetussa organisaatiossa."
([kayttaja]
(organisaatioroolit kayttaja (get-in kayttaja [:organisaatio :id])))
([kayttaja organisaatio-id]
(get (:organisaatioroolit kayttaja) organisaatio-id)))
(defn organisaation-urakka?
"Tarkistaa onko annettu urakka käyttäjän organisaation oma urakka.
Oma urakka on urakka, jossa käyttäjän organisaatio on hallintayksikkö tai
urakoitsija."
[{urakat :organisaation-urakat} urakka-id]
(and urakat
(urakat urakka-id)))
(defn roolissa?
"Tarkistaa onko käyttäjällä tietty rooli. Rooli voi olla joko yksittäinen rooli
tai setti rooleja. Jos annetaan setti, tarkistetaan onko käyttäjällä joku annetuista
rooleista."
#?(:cljs ([rooli] (roolissa? @istunto/kayttaja rooli)))
([kayttaja rooli]
(let [roolit (if (set? rooli)
rooli
#{rooli})]
(if (or (some roolit (:roolit kayttaja))
(some (fn [organisaatioroolit]
(some roolit organisaatioroolit))
(vals (:organisaatioroolit kayttaja))))
true
false))))
(defn jvh? [kayttaja]
(roolissa? kayttaja jarjestelmavastaava))
(defn rooli-urakassa?
"Tarkistaa onko käyttäjällä tietty rooli urakassa."
#?(:cljs ([rooli urakka-id] (rooli-urakassa? @istunto/kayttaja rooli urakka-id)))
([kayttaja rooli urakka-id]
(let [roolit (if (set? rooli)
rooli
#{rooli})]
(or
on
(some roolit (urakkaroolit kayttaja urakka-id))
Tai käyttäjällä on on organisaation urakka
(and (organisaation-urakka? kayttaja urakka-id)
(some roolit (organisaatioroolit kayttaja)))))))
(defn rooli-jossain-urakassa?
"Tarkistaa onko käyttäjällä tietty rooli jossain urakassa (ei väliä missä)."
#?(:cljs ([rooli] (rooli-jossain-urakassa? @istunto/kayttaja rooli)))
([kayttaja rooli]
(some (partial rooli-urakassa? kayttaja rooli)
(keys (:urakkaroolit kayttaja)))))
;; VAIN BACKILLÄ
#?(:clj
(defn vaadi-rooli
[kayttaja rooli]
(when-not (roolissa? kayttaja rooli)
(let [viesti (format "Käyttäjällä '%1$s' ei vaadittua roolia '%2$s'", (:kayttajanimi kayttaja) rooli)]
(log/warn viesti)
(throw+ (->EiOikeutta viesti))))))
#?(:clj
(defn vaadi-rooli-urakassa
[kayttaja rooli urakka-id]
(when-not (rooli-urakassa? kayttaja rooli urakka-id)
(let [viesti (format "Käyttäjällä '%1$s' ei vaadittua roolia '%2$s' urakassa jonka id on %3$s",
(:kayttajanimi kayttaja) rooli urakka-id)]
(log/warn viesti)
(throw+ (->EiOikeutta viesti))))))
(defn voi-kirjata-toteumia?
"Käyttäjä voi kirjata toteumia, jos hänellä on toteumien kirjauksen rooli
tai jos hän on urakan urakoitsijaorganisaation pääkäyttäjä"
#?(:cljs ([urakka-id] (voi-kirjata-toteumia? @istunto/kayttaja urakka-id)))
([kayttaja urakka-id]
(or (rooli-urakassa? kayttaja toteumien-kirjaus urakka-id)
(and (organisaation-urakka? kayttaja urakka-id)
(roolissa? kayttaja urakoitsijan-paakayttaja)))))
(defn lukuoikeus-kaikkiin-urakoihin?
"Käyttäjä voi nähdä kaikki urakat, jos hän on tilaajaorganisaation edustaja (ELY tai LIVI)"
#?(:cljs ([] (lukuoikeus-kaikkiin-urakoihin? @istunto/kayttaja)))
([kayttaja]
(roolissa? kayttaja jarjestelmavastaava)))
(defn osapuoli
"Päättelee kuka osapuoli on kyseessä käyttäjän organisaation perusteella.
Palauttaa avainsanan :urakoitsija, :konsultti tai :tilaaja."
[kayttaja]
(case (name (or (get-in kayttaja [:organisaatio :tyyppi]) "tilaaja"))
"liikennevirasto" :tilaaja
"hallintayksikko" :tilaaja
"tilaajan-konsultti" :konsultti
"urakoitsija" :urakoitsija
:urakoitsija))
(defn tilaajan-kayttaja?
[kayttaja]
(= :tilaaja (osapuoli kayttaja)))
(defn liikenneviraston-kayttaja? [kayttaja]
(= "liikennevirasto" (get-in kayttaja [:organisaatio :tyyppi])))
(defn ely-kayttaja? [kayttaja]
(= "hallintayksikko" (get-in kayttaja [:organisaatio :tyyppi])))
(defn urakoitsija?
[kayttaja]
(= :urakoitsija (osapuoli kayttaja)))
(defn kayttaja-on-laajasti-ottaen-tilaaja?
"Poikkeuksellisen laajasti katsottuna joissakin tilanteissa ELY_Urakanvalvoja katsotaan tilaajaksi. Näin
ainakin paikkauksissa ja lupauksissa. Tämän funktion olemassaolo on ristiriidassa roolit excelin kanssa ja
tämä tulisi poistaa ja asia korjata jotenkin muuten. Mutta koska kiire, niin mennään nyt tällä."
[roolit kayttaja]
(let [roolit (if (set? roolit)
roolit
#{roolit})
tilaajaroolit #{"Jarjestelmavastaava"
"Tilaajan_Asiantuntija"
"Tilaajan_Kayttaja"
"Tilaajan_Urakanvalvoja"
"Tilaajan_laadunvalvoja"
"Tilaajan_turvallisuusvastaava"
"Tilaajan_Rakennuttajakonsultti"
"ELY_Urakanvalvoja"}]
Järjestelmävastaava on on päällystysurakka tyyppinä on
;; myös aina tilaaja
(if (or
(jvh? kayttaja)
(= :tilaaja (osapuoli kayttaja)))
true
(some (fn [rooli]
(true?
(some #(= rooli %) tilaajaroolit)))
roolit))))
(defn voi-nahda-raportit?
"Käyttäjä voi nähdä raportit, jos hän on tilaajaorganisaation edustaja (ELY tai LIVI)"
#?(:cljs ([] (voi-nahda-raportit? @istunto/kayttaja)))
([kayttaja]
(tilaajan-kayttaja? kayttaja)))
;;VAIN FRONTILLA
#?(:cljs
(defn jos-rooli-urakassa
"Palauttaa komponentin käyttöliittymään jos käyttäjän rooli sallii.
Palauttaa muutoin-komponentin jos ei kyseistä roolia."
([rooli urakka-id sitten] (jos-rooli-urakassa rooli urakka-id sitten nil))
([rooli urakka-id sitten muutoin]
ei onnistunut 2
(if (rooli-urakassa? @istunto/kayttaja rooli urakka-id)
sitten
(let [viesti (str "Käyttäjällä '" (:kayttajanimi @istunto/kayttaja) "' ei vaadittua roolia '" rooli "' urakassa " urakka-id)]
(log/debug viesti)
muutoin)))))
#?(:cljs
(defn jos-rooli
"Palauttaa komponentin käyttöliittymään jos käyttäjän rooli sallii.
Palauttaa muutoin-komponentin jos ei kyseistä roolia. Annettu rooli voi olla
joko yksittäinen rooli tai joukko useita rooleja. Jos joukko, tarkistetaan että
käyttäjällä on joku annetuista rooleista."
([rooli sitten] (jos-rooli rooli sitten nil))
([rooli sitten muutoin]
(if (and @istunto/kayttaja
(or (and (set? rooli)
(some roolissa? rooli))
(roolissa? rooli)))
sitten
(let [viesti (str "Käyttäjällä '" (:kayttajanimi @istunto/kayttaja) "' ei vaadittua roolia '" rooli)]
(log/debug viesti)
muutoin)))))
| null | https://raw.githubusercontent.com/finnishtransportagency/harja/23a2e6a38b3482e3af0fb988ff567640091d25e2/src/cljc/harja/domain/roolit.cljc | clojure | VAIN BACKILLÄ
myös aina tilaaja
VAIN FRONTILLA | (ns harja.domain.roolit
"Harjan käyttäjäroolit"
(:require
[clojure.set :refer [intersection]]
#?(:cljs [harja.tiedot.istunto :as istunto])
[taoensso.timbre :as log]
#?(:clj [slingshot.slingshot :refer [throw+]])))
(defrecord EiOikeutta [syy])
(defn ei-oikeutta? [arvo]
(instance? EiOikeutta arvo))
varten
(def tilaajan-rakennuttajakonsultti "tilaajan rakennuttajakonsultti")
(def ely-rakennuttajakonsultti "rakennuttajakonsultti")
UHA : uv
( " vaylamuodon vastuuhenkilo " )
(def liikennepaivystaja "liikennepaivystaja")
(def tilaajan-laadunvalvontakonsultti "tilaajan laadunvalvontakonsultti")
(def urakoitsijan-paakayttaja "urakoitsijan paakayttaja")
(def urakoitsijan-urakan-vastuuhenkilo "urakoitsijan urakan vastuuhenkilo")
(def urakoitsijan-laatuvastaava "urakoitsijan laatuvastaava")
(def jarjestelmavastaava "Jarjestelmavastaava")
olla osittain vanhentuneita ( 2021 ) .
Nykyiset tarkastaa roolit.xlsx - tiedoston otsikoista
tai kutsumalla ( keys ) REPListä .
(def tilaajan-urakanvalvoja "Tilaajan_Urakanvalvoja")
(def ely-urakanvalvoja "ELY_Urakanvalvoja")
(def ely-paakayttaja "ELY_Paakayttaja")
(def toteumien-kirjaus
"Roolit, joilla on oikeus kirjoittaa urakkaan toteumatietoja."
#{urakanvalvoja
urakoitsijan-paakayttaja
urakoitsijan-urakan-vastuuhenkilo
urakoitsijan-laatuvastaava})
(def laadunseuranta-kirjaus
"Roolit, joilla on oikeus kirjata laadunseurantaa urakkaan."
#{urakanvalvoja
urakoitsijan-paakayttaja
urakoitsijan-urakan-vastuuhenkilo
urakoitsijan-laatuvastaava
tilaajan-laadunvalvontakonsultti})
Tietokannan rooli enumin selvempi kuvaus
(def +rooli->kuvaus+
{"jarjestelmavastuuhenkilo" "Järjestelmävastuuhenkilö"
"tilaajan kayttaja" " Tilaajan käyttäjä"
"urakanvalvoja" "Urakanvalvoja"
" " " Väylämuodon vastuuhenkilö "
"hallintayksikon vastuuhenkilo" "Hallintayksikön vastuuhenkilö"
"liikennepäivystäjä" "Liikennepäivystäjä"
"tilaajan asiantuntija" "Tilaajan asiantuntija"
"tilaajan laadunvalvontakonsultti" "Tilaajan laadunvalvontakonsultti"
"urakoitsijan paakayttaja" "Urakoitsijan pääkäyttäjä"
"urakoitsijan urakan vastuuhenkilo" "Urakoitsijan urakan vastuuhenkilö"
"urakoitsijan kayttaja" "Urakoitsijan käyttäjä"
"urakoitsijan laatuvastaava" "Urakoitsijan laatuvastaava"})
(defn rooli->kuvaus
"Antaa roolin ihmisen luettavan kuvauksen käyttöliittymää varten."
[rooli]
(get +rooli->kuvaus+ rooli))
(defn urakkaroolit
"Palauttaa setin rooleja, joita käyttäjällä on annetussa urakassa."
#?(:cljs ([urakka-id] (urakkaroolit @istunto/kayttaja urakka-id)))
([{r :urakkaroolit} urakka-id]
(or (get r urakka-id) #{})))
(defn organisaatioroolit
"Palauttaa setin rooleja, joita käyttäjällä on annetussa organisaatiossa."
([kayttaja]
(organisaatioroolit kayttaja (get-in kayttaja [:organisaatio :id])))
([kayttaja organisaatio-id]
(get (:organisaatioroolit kayttaja) organisaatio-id)))
(defn organisaation-urakka?
"Tarkistaa onko annettu urakka käyttäjän organisaation oma urakka.
Oma urakka on urakka, jossa käyttäjän organisaatio on hallintayksikkö tai
urakoitsija."
[{urakat :organisaation-urakat} urakka-id]
(and urakat
(urakat urakka-id)))
(defn roolissa?
"Tarkistaa onko käyttäjällä tietty rooli. Rooli voi olla joko yksittäinen rooli
tai setti rooleja. Jos annetaan setti, tarkistetaan onko käyttäjällä joku annetuista
rooleista."
#?(:cljs ([rooli] (roolissa? @istunto/kayttaja rooli)))
([kayttaja rooli]
(let [roolit (if (set? rooli)
rooli
#{rooli})]
(if (or (some roolit (:roolit kayttaja))
(some (fn [organisaatioroolit]
(some roolit organisaatioroolit))
(vals (:organisaatioroolit kayttaja))))
true
false))))
(defn jvh? [kayttaja]
(roolissa? kayttaja jarjestelmavastaava))
(defn rooli-urakassa?
"Tarkistaa onko käyttäjällä tietty rooli urakassa."
#?(:cljs ([rooli urakka-id] (rooli-urakassa? @istunto/kayttaja rooli urakka-id)))
([kayttaja rooli urakka-id]
(let [roolit (if (set? rooli)
rooli
#{rooli})]
(or
on
(some roolit (urakkaroolit kayttaja urakka-id))
Tai käyttäjällä on on organisaation urakka
(and (organisaation-urakka? kayttaja urakka-id)
(some roolit (organisaatioroolit kayttaja)))))))
(defn rooli-jossain-urakassa?
"Tarkistaa onko käyttäjällä tietty rooli jossain urakassa (ei väliä missä)."
#?(:cljs ([rooli] (rooli-jossain-urakassa? @istunto/kayttaja rooli)))
([kayttaja rooli]
(some (partial rooli-urakassa? kayttaja rooli)
(keys (:urakkaroolit kayttaja)))))
#?(:clj
(defn vaadi-rooli
[kayttaja rooli]
(when-not (roolissa? kayttaja rooli)
(let [viesti (format "Käyttäjällä '%1$s' ei vaadittua roolia '%2$s'", (:kayttajanimi kayttaja) rooli)]
(log/warn viesti)
(throw+ (->EiOikeutta viesti))))))
#?(:clj
(defn vaadi-rooli-urakassa
[kayttaja rooli urakka-id]
(when-not (rooli-urakassa? kayttaja rooli urakka-id)
(let [viesti (format "Käyttäjällä '%1$s' ei vaadittua roolia '%2$s' urakassa jonka id on %3$s",
(:kayttajanimi kayttaja) rooli urakka-id)]
(log/warn viesti)
(throw+ (->EiOikeutta viesti))))))
(defn voi-kirjata-toteumia?
"Käyttäjä voi kirjata toteumia, jos hänellä on toteumien kirjauksen rooli
tai jos hän on urakan urakoitsijaorganisaation pääkäyttäjä"
#?(:cljs ([urakka-id] (voi-kirjata-toteumia? @istunto/kayttaja urakka-id)))
([kayttaja urakka-id]
(or (rooli-urakassa? kayttaja toteumien-kirjaus urakka-id)
(and (organisaation-urakka? kayttaja urakka-id)
(roolissa? kayttaja urakoitsijan-paakayttaja)))))
(defn lukuoikeus-kaikkiin-urakoihin?
"Käyttäjä voi nähdä kaikki urakat, jos hän on tilaajaorganisaation edustaja (ELY tai LIVI)"
#?(:cljs ([] (lukuoikeus-kaikkiin-urakoihin? @istunto/kayttaja)))
([kayttaja]
(roolissa? kayttaja jarjestelmavastaava)))
(defn osapuoli
"Päättelee kuka osapuoli on kyseessä käyttäjän organisaation perusteella.
Palauttaa avainsanan :urakoitsija, :konsultti tai :tilaaja."
[kayttaja]
(case (name (or (get-in kayttaja [:organisaatio :tyyppi]) "tilaaja"))
"liikennevirasto" :tilaaja
"hallintayksikko" :tilaaja
"tilaajan-konsultti" :konsultti
"urakoitsija" :urakoitsija
:urakoitsija))
(defn tilaajan-kayttaja?
[kayttaja]
(= :tilaaja (osapuoli kayttaja)))
(defn liikenneviraston-kayttaja? [kayttaja]
(= "liikennevirasto" (get-in kayttaja [:organisaatio :tyyppi])))
(defn ely-kayttaja? [kayttaja]
(= "hallintayksikko" (get-in kayttaja [:organisaatio :tyyppi])))
(defn urakoitsija?
[kayttaja]
(= :urakoitsija (osapuoli kayttaja)))
(defn kayttaja-on-laajasti-ottaen-tilaaja?
"Poikkeuksellisen laajasti katsottuna joissakin tilanteissa ELY_Urakanvalvoja katsotaan tilaajaksi. Näin
ainakin paikkauksissa ja lupauksissa. Tämän funktion olemassaolo on ristiriidassa roolit excelin kanssa ja
tämä tulisi poistaa ja asia korjata jotenkin muuten. Mutta koska kiire, niin mennään nyt tällä."
[roolit kayttaja]
(let [roolit (if (set? roolit)
roolit
#{roolit})
tilaajaroolit #{"Jarjestelmavastaava"
"Tilaajan_Asiantuntija"
"Tilaajan_Kayttaja"
"Tilaajan_Urakanvalvoja"
"Tilaajan_laadunvalvoja"
"Tilaajan_turvallisuusvastaava"
"Tilaajan_Rakennuttajakonsultti"
"ELY_Urakanvalvoja"}]
Järjestelmävastaava on on päällystysurakka tyyppinä on
(if (or
(jvh? kayttaja)
(= :tilaaja (osapuoli kayttaja)))
true
(some (fn [rooli]
(true?
(some #(= rooli %) tilaajaroolit)))
roolit))))
(defn voi-nahda-raportit?
"Käyttäjä voi nähdä raportit, jos hän on tilaajaorganisaation edustaja (ELY tai LIVI)"
#?(:cljs ([] (voi-nahda-raportit? @istunto/kayttaja)))
([kayttaja]
(tilaajan-kayttaja? kayttaja)))
#?(:cljs
(defn jos-rooli-urakassa
"Palauttaa komponentin käyttöliittymään jos käyttäjän rooli sallii.
Palauttaa muutoin-komponentin jos ei kyseistä roolia."
([rooli urakka-id sitten] (jos-rooli-urakassa rooli urakka-id sitten nil))
([rooli urakka-id sitten muutoin]
ei onnistunut 2
(if (rooli-urakassa? @istunto/kayttaja rooli urakka-id)
sitten
(let [viesti (str "Käyttäjällä '" (:kayttajanimi @istunto/kayttaja) "' ei vaadittua roolia '" rooli "' urakassa " urakka-id)]
(log/debug viesti)
muutoin)))))
#?(:cljs
(defn jos-rooli
"Palauttaa komponentin käyttöliittymään jos käyttäjän rooli sallii.
Palauttaa muutoin-komponentin jos ei kyseistä roolia. Annettu rooli voi olla
joko yksittäinen rooli tai joukko useita rooleja. Jos joukko, tarkistetaan että
käyttäjällä on joku annetuista rooleista."
([rooli sitten] (jos-rooli rooli sitten nil))
([rooli sitten muutoin]
(if (and @istunto/kayttaja
(or (and (set? rooli)
(some roolissa? rooli))
(roolissa? rooli)))
sitten
(let [viesti (str "Käyttäjällä '" (:kayttajanimi @istunto/kayttaja) "' ei vaadittua roolia '" rooli)]
(log/debug viesti)
muutoin)))))
|
77ddc8271ba4a41573fc128329828a7d2a251389a01d946c3bf3d467080a195c | clojure-interop/java-jdk | XPathException.clj | (ns javax.xml.xpath.XPathException
"XPathException represents a generic XPath exception."
(:refer-clojure :only [require comment defn ->])
(:import [javax.xml.xpath XPathException]))
(defn ->x-path-exception
"Constructor.
Constructs a new XPathException
with the specified detail message.
The cause is not initialized.
If message is null,
then a NullPointerException is thrown.
message - The detail message. - `java.lang.String`
throws: java.lang.NullPointerException - When message is null."
(^XPathException [^java.lang.String message]
(new XPathException message)))
(defn get-cause
"Get the cause of this XPathException.
returns: Cause of this XPathException. - `java.lang.Throwable`"
(^java.lang.Throwable [^XPathException this]
(-> this (.getCause))))
(defn print-stack-trace
"Print stack trace to specified PrintStream.
s - Print stack trace to this PrintStream. - `java.io.PrintStream`"
([^XPathException this ^java.io.PrintStream s]
(-> this (.printStackTrace s)))
([^XPathException this]
(-> this (.printStackTrace))))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.xml/src/javax/xml/xpath/XPathException.clj | clojure | (ns javax.xml.xpath.XPathException
"XPathException represents a generic XPath exception."
(:refer-clojure :only [require comment defn ->])
(:import [javax.xml.xpath XPathException]))
(defn ->x-path-exception
"Constructor.
Constructs a new XPathException
with the specified detail message.
The cause is not initialized.
If message is null,
then a NullPointerException is thrown.
message - The detail message. - `java.lang.String`
throws: java.lang.NullPointerException - When message is null."
(^XPathException [^java.lang.String message]
(new XPathException message)))
(defn get-cause
"Get the cause of this XPathException.
returns: Cause of this XPathException. - `java.lang.Throwable`"
(^java.lang.Throwable [^XPathException this]
(-> this (.getCause))))
(defn print-stack-trace
"Print stack trace to specified PrintStream.
s - Print stack trace to this PrintStream. - `java.io.PrintStream`"
([^XPathException this ^java.io.PrintStream s]
(-> this (.printStackTrace s)))
([^XPathException this]
(-> this (.printStackTrace))))
| |
b6145f95ac9ff51f185de3c06247eddd06095360bd871dfecba51a4f52a04a4f | soulomoon/haskell-katas | Isomorphism.hs | module Katas.Isomorphism where
import Kyu3.Isomorphism
import Test.Hspec
import Test.QuickCheck
import Data.Either
bISO :: ISO Bool Bool
bISO = (not, not)
lrl :: ISO a b -> (a -> a)
lrl (ab, ba) = ba . ab
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "IsomorphString" $ do
describe "subst" $ do
it "substL" $ do
substL bISO True `shouldBe` False
substL bISO False `shouldBe` True
substL isoBool False `shouldBe` False
substL isoBool True `shouldBe` True
it "substR" $ do
substR bISO True `shouldBe` False
substR bISO False `shouldBe` True
substR isoBool True `shouldBe` True
substR isoBool False `shouldBe` False
it "isoEU" $ do
isLeft (substL isoEU (Right ())) `shouldBe` True
it "lrl isoEU (Left (replicate n ())) == Left (replicate n ())" $
property $ \(NonNegative n) ->
lrl isoEU (Left (replicate n ())) == Left (replicate n ())
| null | https://raw.githubusercontent.com/soulomoon/haskell-katas/0861338e945e5cbaadf98138cf8f5f24a6ca8bb3/test/Katas/Isomorphism.hs | haskell | module Katas.Isomorphism where
import Kyu3.Isomorphism
import Test.Hspec
import Test.QuickCheck
import Data.Either
bISO :: ISO Bool Bool
bISO = (not, not)
lrl :: ISO a b -> (a -> a)
lrl (ab, ba) = ba . ab
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "IsomorphString" $ do
describe "subst" $ do
it "substL" $ do
substL bISO True `shouldBe` False
substL bISO False `shouldBe` True
substL isoBool False `shouldBe` False
substL isoBool True `shouldBe` True
it "substR" $ do
substR bISO True `shouldBe` False
substR bISO False `shouldBe` True
substR isoBool True `shouldBe` True
substR isoBool False `shouldBe` False
it "isoEU" $ do
isLeft (substL isoEU (Right ())) `shouldBe` True
it "lrl isoEU (Left (replicate n ())) == Left (replicate n ())" $
property $ \(NonNegative n) ->
lrl isoEU (Left (replicate n ())) == Left (replicate n ())
| |
d1e55cd76fabd751225bb8326c970ddad5f248e05e71a75cd35f26506fdc8592 | eugenehr/erlyconv | iso8859_8.erl | %% THIS FILE WAS AUTOMATICALLY GENERATED BY gen_src.pl
FROM mappings / ISO8859/8859 - 8.TXT AT 2016 - 08 - 19
-module(iso8859_8).
-vsn(20160819).
-export([to_unicode/1, from_unicode/1]).
%% Public functions
to_unicode(16#AA) -> 16#00D7;
to_unicode(16#BA) -> 16#00F7;
to_unicode(16#DF) -> 16#2017;
to_unicode(16#E0) -> 16#05D0;
to_unicode(16#E1) -> 16#05D1;
to_unicode(16#E2) -> 16#05D2;
to_unicode(16#E3) -> 16#05D3;
to_unicode(16#E4) -> 16#05D4;
to_unicode(16#E5) -> 16#05D5;
to_unicode(16#E6) -> 16#05D6;
to_unicode(16#E7) -> 16#05D7;
to_unicode(16#E8) -> 16#05D8;
to_unicode(16#E9) -> 16#05D9;
to_unicode(16#EA) -> 16#05DA;
to_unicode(16#EB) -> 16#05DB;
to_unicode(16#EC) -> 16#05DC;
to_unicode(16#ED) -> 16#05DD;
to_unicode(16#EE) -> 16#05DE;
to_unicode(16#EF) -> 16#05DF;
to_unicode(16#F0) -> 16#05E0;
to_unicode(16#F1) -> 16#05E1;
to_unicode(16#F2) -> 16#05E2;
to_unicode(16#F3) -> 16#05E3;
to_unicode(16#F4) -> 16#05E4;
to_unicode(16#F5) -> 16#05E5;
to_unicode(16#F6) -> 16#05E6;
to_unicode(16#F7) -> 16#05E7;
to_unicode(16#F8) -> 16#05E8;
to_unicode(16#F9) -> 16#05E9;
to_unicode(16#FA) -> 16#05EA;
to_unicode(16#FD) -> 16#200E;
to_unicode(16#FE) -> 16#200F;
to_unicode(List) when is_list(List) -> [to_unicode(C) || C <- List];
to_unicode(Bin) when is_binary(Bin) -> bin_to_unicode(Bin, <<>>);
to_unicode(Other) -> Other.
from_unicode(16#00D7) -> 16#AA;
from_unicode(16#00F7) -> 16#BA;
from_unicode(16#2017) -> 16#DF;
from_unicode(16#05D0) -> 16#E0;
from_unicode(16#05D1) -> 16#E1;
from_unicode(16#05D2) -> 16#E2;
from_unicode(16#05D3) -> 16#E3;
from_unicode(16#05D4) -> 16#E4;
from_unicode(16#05D5) -> 16#E5;
from_unicode(16#05D6) -> 16#E6;
from_unicode(16#05D7) -> 16#E7;
from_unicode(16#05D8) -> 16#E8;
from_unicode(16#05D9) -> 16#E9;
from_unicode(16#05DA) -> 16#EA;
from_unicode(16#05DB) -> 16#EB;
from_unicode(16#05DC) -> 16#EC;
from_unicode(16#05DD) -> 16#ED;
from_unicode(16#05DE) -> 16#EE;
from_unicode(16#05DF) -> 16#EF;
from_unicode(16#05E0) -> 16#F0;
from_unicode(16#05E1) -> 16#F1;
from_unicode(16#05E2) -> 16#F2;
from_unicode(16#05E3) -> 16#F3;
from_unicode(16#05E4) -> 16#F4;
from_unicode(16#05E5) -> 16#F5;
from_unicode(16#05E6) -> 16#F6;
from_unicode(16#05E7) -> 16#F7;
from_unicode(16#05E8) -> 16#F8;
from_unicode(16#05E9) -> 16#F9;
from_unicode(16#05EA) -> 16#FA;
from_unicode(16#200E) -> 16#FD;
from_unicode(16#200F) -> 16#FE;
from_unicode(List) when is_list(List) -> [from_unicode(C) || C <- List];
from_unicode(Bin) when is_binary(Bin) -> bin_from_unicode(Bin, <<>>);
from_unicode(Other) -> Other.
%% Private functions
bin_to_unicode(<<>>, Bin) -> Bin;
bin_to_unicode(<<C, Rest/binary>>, Acc) ->
U = to_unicode(C),
bin_to_unicode(Rest, <<Acc/binary, U/utf8>>).
bin_from_unicode(<<>>, Bin) -> Bin;
bin_from_unicode(<<U/utf8, Rest/binary>>, Acc) ->
C = from_unicode(U),
bin_from_unicode(Rest, <<Acc/binary, C>>).
| null | https://raw.githubusercontent.com/eugenehr/erlyconv/ecdcd7db8f785c9638cd1ebad37ccd426c050cdf/src/iso8859_8.erl | erlang | THIS FILE WAS AUTOMATICALLY GENERATED BY gen_src.pl
Public functions
Private functions | FROM mappings / ISO8859/8859 - 8.TXT AT 2016 - 08 - 19
-module(iso8859_8).
-vsn(20160819).
-export([to_unicode/1, from_unicode/1]).
to_unicode(16#AA) -> 16#00D7;
to_unicode(16#BA) -> 16#00F7;
to_unicode(16#DF) -> 16#2017;
to_unicode(16#E0) -> 16#05D0;
to_unicode(16#E1) -> 16#05D1;
to_unicode(16#E2) -> 16#05D2;
to_unicode(16#E3) -> 16#05D3;
to_unicode(16#E4) -> 16#05D4;
to_unicode(16#E5) -> 16#05D5;
to_unicode(16#E6) -> 16#05D6;
to_unicode(16#E7) -> 16#05D7;
to_unicode(16#E8) -> 16#05D8;
to_unicode(16#E9) -> 16#05D9;
to_unicode(16#EA) -> 16#05DA;
to_unicode(16#EB) -> 16#05DB;
to_unicode(16#EC) -> 16#05DC;
to_unicode(16#ED) -> 16#05DD;
to_unicode(16#EE) -> 16#05DE;
to_unicode(16#EF) -> 16#05DF;
to_unicode(16#F0) -> 16#05E0;
to_unicode(16#F1) -> 16#05E1;
to_unicode(16#F2) -> 16#05E2;
to_unicode(16#F3) -> 16#05E3;
to_unicode(16#F4) -> 16#05E4;
to_unicode(16#F5) -> 16#05E5;
to_unicode(16#F6) -> 16#05E6;
to_unicode(16#F7) -> 16#05E7;
to_unicode(16#F8) -> 16#05E8;
to_unicode(16#F9) -> 16#05E9;
to_unicode(16#FA) -> 16#05EA;
to_unicode(16#FD) -> 16#200E;
to_unicode(16#FE) -> 16#200F;
to_unicode(List) when is_list(List) -> [to_unicode(C) || C <- List];
to_unicode(Bin) when is_binary(Bin) -> bin_to_unicode(Bin, <<>>);
to_unicode(Other) -> Other.
from_unicode(16#00D7) -> 16#AA;
from_unicode(16#00F7) -> 16#BA;
from_unicode(16#2017) -> 16#DF;
from_unicode(16#05D0) -> 16#E0;
from_unicode(16#05D1) -> 16#E1;
from_unicode(16#05D2) -> 16#E2;
from_unicode(16#05D3) -> 16#E3;
from_unicode(16#05D4) -> 16#E4;
from_unicode(16#05D5) -> 16#E5;
from_unicode(16#05D6) -> 16#E6;
from_unicode(16#05D7) -> 16#E7;
from_unicode(16#05D8) -> 16#E8;
from_unicode(16#05D9) -> 16#E9;
from_unicode(16#05DA) -> 16#EA;
from_unicode(16#05DB) -> 16#EB;
from_unicode(16#05DC) -> 16#EC;
from_unicode(16#05DD) -> 16#ED;
from_unicode(16#05DE) -> 16#EE;
from_unicode(16#05DF) -> 16#EF;
from_unicode(16#05E0) -> 16#F0;
from_unicode(16#05E1) -> 16#F1;
from_unicode(16#05E2) -> 16#F2;
from_unicode(16#05E3) -> 16#F3;
from_unicode(16#05E4) -> 16#F4;
from_unicode(16#05E5) -> 16#F5;
from_unicode(16#05E6) -> 16#F6;
from_unicode(16#05E7) -> 16#F7;
from_unicode(16#05E8) -> 16#F8;
from_unicode(16#05E9) -> 16#F9;
from_unicode(16#05EA) -> 16#FA;
from_unicode(16#200E) -> 16#FD;
from_unicode(16#200F) -> 16#FE;
from_unicode(List) when is_list(List) -> [from_unicode(C) || C <- List];
from_unicode(Bin) when is_binary(Bin) -> bin_from_unicode(Bin, <<>>);
from_unicode(Other) -> Other.
bin_to_unicode(<<>>, Bin) -> Bin;
bin_to_unicode(<<C, Rest/binary>>, Acc) ->
U = to_unicode(C),
bin_to_unicode(Rest, <<Acc/binary, U/utf8>>).
bin_from_unicode(<<>>, Bin) -> Bin;
bin_from_unicode(<<U/utf8, Rest/binary>>, Acc) ->
C = from_unicode(U),
bin_from_unicode(Rest, <<Acc/binary, C>>).
|
b2095cca92e836ba920476ae3ee7967c75b854c77485383dc33aa543c39ab146 | okeuday/erlbench | ntree.erl | -*-Mode : erlang;coding : utf-8;tab - width:4;c - basic - offset:4;indent - tabs - mode:()-*-
ex : set utf-8 sts=4 ts=4 sw=4 et nomod :
%%%
%%%------------------------------------------------------------------------
%%% @doc
= = N - tree data structure.==
Attempting to exploit flat tree hierarchies with an adjustable size N.
%%% @end
%%%
MIT License
%%%
Copyright ( c ) 2012 - 2017 < mjtruog at protonmail dot com >
%%%
%%% Permission is hereby granted, free of charge, to any person obtaining a
%%% copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction , including without limitation
%%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software , and to permit persons to whom the
%%% Software is furnished to do so, subject to the following conditions:
%%%
%%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
%%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
%%% DEALINGS IN THE SOFTWARE.
%%%
@author < mjtruog at protonmail dot com >
2012 - 2017
%%% @version 0.0.1 {@date} {@time}
%%%------------------------------------------------------------------------
-module(ntree).
-author('mjtruog at protonmail dot com').
%% external interface
-export([find/2,
new/0,
new/1,
store/3,
test/0]).
%%%------------------------------------------------------------------------
%%% External interface functions
%%%------------------------------------------------------------------------
%%-------------------------------------------------------------------------
%% @doc
%% ===Find a value.===
%% @end
%%-------------------------------------------------------------------------
find(K, {N, _, Elements}) when is_list(Elements) ->
find_value_list(K, Elements, N);
find(K, {N, _, Elements}) when is_tuple(Elements) ->
find_value_tuple(erlang:round(N / 2), 1, N,
K, Elements, N).
find_value_list(K, [{K, V} | _], _) ->
{ok, V};
find_value_list(K, [{K1, _} | Elements], N) when K1 < K ->
find_value_list(K, Elements, N);
find_value_list(K, [{KT1, KT2, _, ElementsNext} | _], N)
when K >= KT1, K =< KT2 ->
if
is_list(ElementsNext) ->
find_value_list(K, ElementsNext, N);
is_tuple(ElementsNext) ->
find_value_tuple(erlang:round(N / 2), 1, N, K, ElementsNext, N)
end;
find_value_list(K, [{_, KT2, _, _} | Elements], N) when KT2 < K ->
find_value_list(K, Elements, N);
find_value_list(_, _, _) ->
error.
find_value_tuple(I, LeftI, RightI, K, Elements, N)
when I =:= LeftI; I =:= RightI ->
case erlang:element(I, Elements) of
{K, V} ->
{ok, V};
{_, _} ->
error;
{KT1, KT2, _, _} when K < KT1; K > KT2 ->
error;
{_, _, _, ElementsNext} when is_list(ElementsNext) ->
find_value_list(K, ElementsNext, N);
{_, _, _, ElementsNext} when is_tuple(ElementsNext) ->
find_value_tuple(erlang:round(N / 2), 1, N,
K, ElementsNext, N)
end;
find_value_tuple(I, LeftI, RightI, K, Elements, N) ->
case erlang:element(I, Elements) of
{K, V} ->
{ok, V};
{K1, _} when K1 > K ->
if
LeftI =:= I - 1 ->
find_value_tuple(LeftI, LeftI, I, K, Elements, N);
true ->
find_value_tuple(erlang:round((LeftI + I) / 2),
LeftI, I, K, Elements, N)
end;
K1 < K
if
RightI =:= I + 1 ->
find_value_tuple(RightI, I, RightI, K, Elements, N);
true ->
find_value_tuple(erlang:round((I + RightI) / 2),
I, RightI, K, Elements, N)
end;
{KT1, _, _, _} when KT1 > K ->
if
LeftI =:= I - 1 ->
find_value_tuple(LeftI, LeftI, I, K, Elements, N);
true ->
find_value_tuple(erlang:round((LeftI + I) / 2),
LeftI, I, K, Elements, N)
end;
{_, KT2, _, _} when KT2 < K ->
if
RightI =:= I + 1 ->
find_value_tuple(RightI, I, RightI, K, Elements, N);
true ->
find_value_tuple(erlang:round((I + RightI) / 2),
I, RightI, K, Elements, N)
end;
{_, _, _, ElementsNext} when is_list(ElementsNext) ->
find_value_list(K, ElementsNext, N);
{_, _, _, ElementsNext} when is_tuple(ElementsNext) ->
find_value_tuple(erlang:round(N / 2), 1, N,
K, ElementsNext, N)
end.
%%-------------------------------------------------------------------------
%% @doc
%% ===Create a new instance.===
%% tree:
{ N , L_length , L_list }
%% each element of L:
%% {K, V} (or)
{ K_low , K_high , L_length , L_list }
%% @end
%%-------------------------------------------------------------------------
new() ->
new(10).
new(N) when is_integer(N), N >= 2 ->
{N, 0, []}.
%%-------------------------------------------------------------------------
%% @doc
%% ===Store a key-value pair.===
%% @end
%%-------------------------------------------------------------------------
store(K, V, {N, N, Elements}) when is_list(Elements) ->
{N, N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
erlang:list_to_tuple(Elements), K, V, N)};
store(K, V, {N, N, Elements}) when is_tuple(Elements) ->
{N, N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
Elements, K, V, N)};
store(K, V, {N, 0, []}) ->
{N, 1, [{K, V}]};
store(K, V, {N, ElementCount, Elements}) ->
ElementsNew = store_element_0_list(Elements, K, V, N),
ElementCountNew = if
ElementCount =:= N - 1 ->
erlang:length(ElementsNew);
true ->
ElementCount + 1
end,
{N, ElementCountNew, ElementsNew}.
store_element_0_list([], K1, V1, _) ->
[{K1, V1}];
store_element_0_list([{K1, _} | Elements], K1, V1, _) ->
[{K1, V1} | Elements];
store_element_0_list([{K2, _} = E | Elements], K1, V1, N) when K2 < K1 ->
[E | store_element_0_list(Elements, K1, V1, N)];
store_element_0_list([{_, _} | _] = Elements, K1, V1, _) ->
[{K1, V1} | Elements];
store_element_0_list([{_, KT2, _, _} = E |
Elements], K1, V1, N) when KT2 < K1 ->
[E | store_element_0_list(Elements, K1, V1, N)];
store_element_0_list([{KT1, _, _, _} |
_] = Elements, K1, V1, _) when KT1 > K1 ->
[{K1, V1} | Elements];
store_element_0_list([{KT1, KT2, N, ElementsNext} |
Elements], K1, V1, N) ->
if
is_list(ElementsNext) ->
[{KT1, KT2, N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
erlang:list_to_tuple(ElementsNext),
K1, V1, N)} | Elements];
is_tuple(ElementsNext) ->
[{KT1, KT2, N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
ElementsNext,
K1, V1, N)} | Elements]
end;
store_element_0_list([{KT1, KT2, ElementCountNext, ElementsNext} |
Elements], K1, V1, N) ->
ElementsNextNew = store_element_0_list(ElementsNext, K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
[{KT1, KT2, ElementCountNextNew, ElementsNextNew} | Elements].
store_element_1_list([{K1, _} | Elements], K1, V1, _) ->
[{K1, V1} | Elements];
store_element_1_list([{K2, _} = E | Elements], K1, V1, N) when K2 < K1 ->
if
Elements =:= [] ->
[{K2, K1, 2, [E, {K1, V1}]}];
true ->
[E | store_element_1_list(Elements, K1, V1, N)]
end;
store_element_1_list([{K2, _} = E | Elements], K1, V1, _) ->
[{K1, K2, 2, [{K1, V1}, E]} | Elements];
store_element_1_list([{KT1, KT2, ElementCountNext, ElementsNext} = E |
Elements], K1, V1, N) when KT2 < K1 ->
if
Elements =:= [] ->
if
ElementCountNext =:= N ->
[{KT1, K1, N,
store_element_1_list(ElementsNext, K1, V1, N)}];
true ->
ElementsNextNew = store_element_0_list(ElementsNext,
K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
[{KT1, K1, ElementCountNextNew, ElementsNextNew}]
end;
true ->
[E | store_element_1_list(Elements, K1, V1, N)]
end;
store_element_1_list([{KT1, KT2, N, ElementsNext} |
Elements], K1, V1, N) ->
[{erlang:min(KT1, K1), KT2, N,
store_element_1_list(ElementsNext, K1, V1, N)} | Elements];
store_element_1_list([{KT1, KT2, ElementCountNext, ElementsNext} |
Elements], K1, V1, N) ->
ElementsNextNew = store_element_0_list(ElementsNext, K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
[{erlang:min(KT1, K1), KT2,
ElementCountNextNew, ElementsNextNew} | Elements].
store_element_1_tuple(I, LeftI, RightI, Elements, K1, V1, N)
when I =:= LeftI; I =:= RightI ->
case erlang:element(I, Elements) of
{K1, _} ->
erlang:setelement(I, Elements, {K1, V1});
{K2, _} = E when K2 > K1 ->
erlang:setelement(I, Elements, {K1, K2, 2, [{K1, V1}, E]});
K2 < K1
erlang:setelement(I, Elements, {K2, K1, 2, [E, {K1, V1}]});
{KT1, KT2, N, ElementsNext} when is_list(ElementsNext) ->
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2), N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
erlang:list_to_tuple(ElementsNext),
K1, V1, N)});
{KT1, KT2, N, ElementsNext} when is_tuple(ElementsNext) ->
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2), N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
ElementsNext, K1, V1, N)});
{KT1, KT2, ElementCountNext, ElementsNext} ->
ElementsNextNew = store_element_1_list(ElementsNext, K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2),
ElementCountNextNew, ElementsNextNew})
end;
store_element_1_tuple(I, LeftI, RightI, Elements, K1, V1, N) ->
case erlang:element(I, Elements) of
{K1, _} ->
erlang:setelement(I, Elements, {K1, V1});
{K2, _} when K2 > K1 ->
if
LeftI =:= I - 1 ->
store_element_1_tuple(LeftI, LeftI, I,
Elements, K1, V1, N);
true ->
store_element_1_tuple(erlang:round((LeftI + I) / 2),
LeftI, I,
Elements, K1, V1, N)
end;
K2 < K1
if
RightI =:= I + 1 ->
store_element_1_tuple(RightI, I, RightI,
Elements, K1, V1, N);
true ->
store_element_1_tuple(erlang:round((I + RightI) / 2),
I, RightI,
Elements, K1, V1, N)
end;
{KT1, _, _, _} when KT1 > K1 ->
if
LeftI =:= I - 1 ->
store_element_1_tuple(LeftI, LeftI, I,
Elements, K1, V1, N);
true ->
store_element_1_tuple(erlang:round((LeftI + I) / 2),
LeftI, I,
Elements, K1, V1, N)
end;
{_, KT2, _, _} when KT2 < K1 ->
if
RightI =:= I + 1 ->
store_element_1_tuple(RightI, I, RightI,
Elements, K1, V1, N);
true ->
store_element_1_tuple(erlang:round((I + RightI) / 2),
I, RightI,
Elements, K1, V1, N)
end;
{KT1, KT2, N, ElementsNext} when is_list(ElementsNext) ->
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2), N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
erlang:list_to_tuple(ElementsNext),
K1, V1, N)});
{KT1, KT2, N, ElementsNext} when is_tuple(ElementsNext) ->
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2), N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
ElementsNext, K1, V1, N)});
{KT1, KT2, ElementCountNext, ElementsNext} ->
ElementsNextNew = store_element_1_list(ElementsNext, K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2),
ElementCountNextNew, ElementsNextNew})
end.
%%-------------------------------------------------------------------------
%% @doc
%% ===Internal test.===
%% @end
%%-------------------------------------------------------------------------
test() ->
{3,0,[]} = A0 = ntree:new(3),
{3,1,[{50,50}]} = A1 = ntree:store(50, 50, A0),
{3,2,[{50,50},{100,100}]} = A2 = ntree:store(100, 100, A1),
{3,3,[{25,25},{50,50},{100,100}]} = A3 = ntree:store(25, 25, A2),
A4 = ntree:store(26, 26, A3),
A5 = ntree:store(27, 27, A4),
A6 = ntree:store(24, 24, A5),
A7 = ntree:store(200, 200, A6),
A8 = ntree:store(300, 300, A7),
{3,3,
{{24,27,2,[{24,25,2,[{24,24},{25,25}]},{26,27,2,[{26,26},{27,27}]}]},
{50,50},
{100,300,2,[{100,100},{200,300,2,[{200,200},{300,300}]}]}}} = A8,
error = ntree:find(23, A8),
{ok, 24} = ntree:find(24, A8),
{ok, 25} = ntree:find(25, A8),
{ok, 26} = ntree:find(26, A8),
error = ntree:find(28, A8),
{ok, 50} = ntree:find(50, A8),
error = ntree:find(400, A8),
{ok, 300} = ntree:find(300, A8),
ok.
| null | https://raw.githubusercontent.com/okeuday/erlbench/9fc02a2e748b287b85f6e9641db6b2ca68791fa4/src/ntree.erl | erlang |
------------------------------------------------------------------------
@doc
@end
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
@version 0.0.1 {@date} {@time}
------------------------------------------------------------------------
external interface
------------------------------------------------------------------------
External interface functions
------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc
===Find a value.===
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc
===Create a new instance.===
tree:
each element of L:
{K, V} (or)
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc
===Store a key-value pair.===
@end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc
===Internal test.===
@end
------------------------------------------------------------------------- | -*-Mode : erlang;coding : utf-8;tab - width:4;c - basic - offset:4;indent - tabs - mode:()-*-
ex : set utf-8 sts=4 ts=4 sw=4 et nomod :
= = N - tree data structure.==
Attempting to exploit flat tree hierarchies with an adjustable size N.
MIT License
Copyright ( c ) 2012 - 2017 < mjtruog at protonmail dot com >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
@author < mjtruog at protonmail dot com >
2012 - 2017
-module(ntree).
-author('mjtruog at protonmail dot com').
-export([find/2,
new/0,
new/1,
store/3,
test/0]).
find(K, {N, _, Elements}) when is_list(Elements) ->
find_value_list(K, Elements, N);
find(K, {N, _, Elements}) when is_tuple(Elements) ->
find_value_tuple(erlang:round(N / 2), 1, N,
K, Elements, N).
find_value_list(K, [{K, V} | _], _) ->
{ok, V};
find_value_list(K, [{K1, _} | Elements], N) when K1 < K ->
find_value_list(K, Elements, N);
find_value_list(K, [{KT1, KT2, _, ElementsNext} | _], N)
when K >= KT1, K =< KT2 ->
if
is_list(ElementsNext) ->
find_value_list(K, ElementsNext, N);
is_tuple(ElementsNext) ->
find_value_tuple(erlang:round(N / 2), 1, N, K, ElementsNext, N)
end;
find_value_list(K, [{_, KT2, _, _} | Elements], N) when KT2 < K ->
find_value_list(K, Elements, N);
find_value_list(_, _, _) ->
error.
find_value_tuple(I, LeftI, RightI, K, Elements, N)
when I =:= LeftI; I =:= RightI ->
case erlang:element(I, Elements) of
{K, V} ->
{ok, V};
{_, _} ->
error;
{KT1, KT2, _, _} when K < KT1; K > KT2 ->
error;
{_, _, _, ElementsNext} when is_list(ElementsNext) ->
find_value_list(K, ElementsNext, N);
{_, _, _, ElementsNext} when is_tuple(ElementsNext) ->
find_value_tuple(erlang:round(N / 2), 1, N,
K, ElementsNext, N)
end;
find_value_tuple(I, LeftI, RightI, K, Elements, N) ->
case erlang:element(I, Elements) of
{K, V} ->
{ok, V};
{K1, _} when K1 > K ->
if
LeftI =:= I - 1 ->
find_value_tuple(LeftI, LeftI, I, K, Elements, N);
true ->
find_value_tuple(erlang:round((LeftI + I) / 2),
LeftI, I, K, Elements, N)
end;
K1 < K
if
RightI =:= I + 1 ->
find_value_tuple(RightI, I, RightI, K, Elements, N);
true ->
find_value_tuple(erlang:round((I + RightI) / 2),
I, RightI, K, Elements, N)
end;
{KT1, _, _, _} when KT1 > K ->
if
LeftI =:= I - 1 ->
find_value_tuple(LeftI, LeftI, I, K, Elements, N);
true ->
find_value_tuple(erlang:round((LeftI + I) / 2),
LeftI, I, K, Elements, N)
end;
{_, KT2, _, _} when KT2 < K ->
if
RightI =:= I + 1 ->
find_value_tuple(RightI, I, RightI, K, Elements, N);
true ->
find_value_tuple(erlang:round((I + RightI) / 2),
I, RightI, K, Elements, N)
end;
{_, _, _, ElementsNext} when is_list(ElementsNext) ->
find_value_list(K, ElementsNext, N);
{_, _, _, ElementsNext} when is_tuple(ElementsNext) ->
find_value_tuple(erlang:round(N / 2), 1, N,
K, ElementsNext, N)
end.
{ N , L_length , L_list }
{ K_low , K_high , L_length , L_list }
new() ->
new(10).
new(N) when is_integer(N), N >= 2 ->
{N, 0, []}.
store(K, V, {N, N, Elements}) when is_list(Elements) ->
{N, N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
erlang:list_to_tuple(Elements), K, V, N)};
store(K, V, {N, N, Elements}) when is_tuple(Elements) ->
{N, N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
Elements, K, V, N)};
store(K, V, {N, 0, []}) ->
{N, 1, [{K, V}]};
store(K, V, {N, ElementCount, Elements}) ->
ElementsNew = store_element_0_list(Elements, K, V, N),
ElementCountNew = if
ElementCount =:= N - 1 ->
erlang:length(ElementsNew);
true ->
ElementCount + 1
end,
{N, ElementCountNew, ElementsNew}.
store_element_0_list([], K1, V1, _) ->
[{K1, V1}];
store_element_0_list([{K1, _} | Elements], K1, V1, _) ->
[{K1, V1} | Elements];
store_element_0_list([{K2, _} = E | Elements], K1, V1, N) when K2 < K1 ->
[E | store_element_0_list(Elements, K1, V1, N)];
store_element_0_list([{_, _} | _] = Elements, K1, V1, _) ->
[{K1, V1} | Elements];
store_element_0_list([{_, KT2, _, _} = E |
Elements], K1, V1, N) when KT2 < K1 ->
[E | store_element_0_list(Elements, K1, V1, N)];
store_element_0_list([{KT1, _, _, _} |
_] = Elements, K1, V1, _) when KT1 > K1 ->
[{K1, V1} | Elements];
store_element_0_list([{KT1, KT2, N, ElementsNext} |
Elements], K1, V1, N) ->
if
is_list(ElementsNext) ->
[{KT1, KT2, N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
erlang:list_to_tuple(ElementsNext),
K1, V1, N)} | Elements];
is_tuple(ElementsNext) ->
[{KT1, KT2, N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
ElementsNext,
K1, V1, N)} | Elements]
end;
store_element_0_list([{KT1, KT2, ElementCountNext, ElementsNext} |
Elements], K1, V1, N) ->
ElementsNextNew = store_element_0_list(ElementsNext, K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
[{KT1, KT2, ElementCountNextNew, ElementsNextNew} | Elements].
store_element_1_list([{K1, _} | Elements], K1, V1, _) ->
[{K1, V1} | Elements];
store_element_1_list([{K2, _} = E | Elements], K1, V1, N) when K2 < K1 ->
if
Elements =:= [] ->
[{K2, K1, 2, [E, {K1, V1}]}];
true ->
[E | store_element_1_list(Elements, K1, V1, N)]
end;
store_element_1_list([{K2, _} = E | Elements], K1, V1, _) ->
[{K1, K2, 2, [{K1, V1}, E]} | Elements];
store_element_1_list([{KT1, KT2, ElementCountNext, ElementsNext} = E |
Elements], K1, V1, N) when KT2 < K1 ->
if
Elements =:= [] ->
if
ElementCountNext =:= N ->
[{KT1, K1, N,
store_element_1_list(ElementsNext, K1, V1, N)}];
true ->
ElementsNextNew = store_element_0_list(ElementsNext,
K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
[{KT1, K1, ElementCountNextNew, ElementsNextNew}]
end;
true ->
[E | store_element_1_list(Elements, K1, V1, N)]
end;
store_element_1_list([{KT1, KT2, N, ElementsNext} |
Elements], K1, V1, N) ->
[{erlang:min(KT1, K1), KT2, N,
store_element_1_list(ElementsNext, K1, V1, N)} | Elements];
store_element_1_list([{KT1, KT2, ElementCountNext, ElementsNext} |
Elements], K1, V1, N) ->
ElementsNextNew = store_element_0_list(ElementsNext, K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
[{erlang:min(KT1, K1), KT2,
ElementCountNextNew, ElementsNextNew} | Elements].
store_element_1_tuple(I, LeftI, RightI, Elements, K1, V1, N)
when I =:= LeftI; I =:= RightI ->
case erlang:element(I, Elements) of
{K1, _} ->
erlang:setelement(I, Elements, {K1, V1});
{K2, _} = E when K2 > K1 ->
erlang:setelement(I, Elements, {K1, K2, 2, [{K1, V1}, E]});
K2 < K1
erlang:setelement(I, Elements, {K2, K1, 2, [E, {K1, V1}]});
{KT1, KT2, N, ElementsNext} when is_list(ElementsNext) ->
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2), N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
erlang:list_to_tuple(ElementsNext),
K1, V1, N)});
{KT1, KT2, N, ElementsNext} when is_tuple(ElementsNext) ->
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2), N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
ElementsNext, K1, V1, N)});
{KT1, KT2, ElementCountNext, ElementsNext} ->
ElementsNextNew = store_element_1_list(ElementsNext, K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2),
ElementCountNextNew, ElementsNextNew})
end;
store_element_1_tuple(I, LeftI, RightI, Elements, K1, V1, N) ->
case erlang:element(I, Elements) of
{K1, _} ->
erlang:setelement(I, Elements, {K1, V1});
{K2, _} when K2 > K1 ->
if
LeftI =:= I - 1 ->
store_element_1_tuple(LeftI, LeftI, I,
Elements, K1, V1, N);
true ->
store_element_1_tuple(erlang:round((LeftI + I) / 2),
LeftI, I,
Elements, K1, V1, N)
end;
K2 < K1
if
RightI =:= I + 1 ->
store_element_1_tuple(RightI, I, RightI,
Elements, K1, V1, N);
true ->
store_element_1_tuple(erlang:round((I + RightI) / 2),
I, RightI,
Elements, K1, V1, N)
end;
{KT1, _, _, _} when KT1 > K1 ->
if
LeftI =:= I - 1 ->
store_element_1_tuple(LeftI, LeftI, I,
Elements, K1, V1, N);
true ->
store_element_1_tuple(erlang:round((LeftI + I) / 2),
LeftI, I,
Elements, K1, V1, N)
end;
{_, KT2, _, _} when KT2 < K1 ->
if
RightI =:= I + 1 ->
store_element_1_tuple(RightI, I, RightI,
Elements, K1, V1, N);
true ->
store_element_1_tuple(erlang:round((I + RightI) / 2),
I, RightI,
Elements, K1, V1, N)
end;
{KT1, KT2, N, ElementsNext} when is_list(ElementsNext) ->
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2), N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
erlang:list_to_tuple(ElementsNext),
K1, V1, N)});
{KT1, KT2, N, ElementsNext} when is_tuple(ElementsNext) ->
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2), N,
store_element_1_tuple(erlang:round(N / 2), 1, N,
ElementsNext, K1, V1, N)});
{KT1, KT2, ElementCountNext, ElementsNext} ->
ElementsNextNew = store_element_1_list(ElementsNext, K1, V1, N),
ElementCountNextNew = if
ElementCountNext =:= N - 1 ->
erlang:length(ElementsNextNew);
true ->
ElementCountNext + 1
end,
erlang:setelement(I, Elements,
{erlang:min(K1, KT1), erlang:max(K1, KT2),
ElementCountNextNew, ElementsNextNew})
end.
test() ->
{3,0,[]} = A0 = ntree:new(3),
{3,1,[{50,50}]} = A1 = ntree:store(50, 50, A0),
{3,2,[{50,50},{100,100}]} = A2 = ntree:store(100, 100, A1),
{3,3,[{25,25},{50,50},{100,100}]} = A3 = ntree:store(25, 25, A2),
A4 = ntree:store(26, 26, A3),
A5 = ntree:store(27, 27, A4),
A6 = ntree:store(24, 24, A5),
A7 = ntree:store(200, 200, A6),
A8 = ntree:store(300, 300, A7),
{3,3,
{{24,27,2,[{24,25,2,[{24,24},{25,25}]},{26,27,2,[{26,26},{27,27}]}]},
{50,50},
{100,300,2,[{100,100},{200,300,2,[{200,200},{300,300}]}]}}} = A8,
error = ntree:find(23, A8),
{ok, 24} = ntree:find(24, A8),
{ok, 25} = ntree:find(25, A8),
{ok, 26} = ntree:find(26, A8),
error = ntree:find(28, A8),
{ok, 50} = ntree:find(50, A8),
error = ntree:find(400, A8),
{ok, 300} = ntree:find(300, A8),
ok.
|
390c47dec8b7316ea59db02fc1402e774e772de5023911dc997eeb696fa95ef4 | nushio3/learn-haskell | Vec.hs | module Data.Vec where
data Vec a = Vec a a a
(・) :: Vec a -> Vec a -> a
(Vec ax ay az) ・ (Vec bx by bz) = undefined
(×) :: Vec a -> Vec a -> Vec a
(Vec ax ay az) × (Vec bx by bz) = undefined
| null | https://raw.githubusercontent.com/nushio3/learn-haskell/eda0fd0b33e9c4b7552afd24c6a25a105cca5f94/exercise-6-1-data-Vec/src/Data/Vec.hs | haskell | module Data.Vec where
data Vec a = Vec a a a
(・) :: Vec a -> Vec a -> a
(Vec ax ay az) ・ (Vec bx by bz) = undefined
(×) :: Vec a -> Vec a -> Vec a
(Vec ax ay az) × (Vec bx by bz) = undefined
| |
dd4c5eb5377c09d1089b7d97254f5eb25e781cf89947335d6df3f23c2d6a46da | ndmitchell/catch | Evaluate.hs |
module Typey.Evaluate(evaluate) where
import Typey.Type
import Typey.Abstract
import Hite
import General.General
import Data.Maybe
import Data.List
import Typey.Faster
import Control.Monad
import Data.IORef
import qualified Data.Map as Map
type AbstractA = Abstract AExp
type Env = (Hite, DataM SmallT, FunctionM, Cache)
type Stack = [((FuncName, [AbstractA]), AbstractA)]
type Cache = IORef (Map.Map FuncName [([AbstractA], AbstractA)])
newCache :: IO Cache
newCache = newIORef Map.empty
getCache :: Cache -> FuncName -> [AbstractA] -> IO (Maybe AbstractA)
getCache cache func args = do
c <- readIORef cache
case Map.lookup func c of
Nothing -> return Nothing
Just x -> case lookup args x of
Nothing -> return Nothing
Just x -> do return () -- putStrLn $ "Cache hit, " ++ show (func,args,x)
return $ Just x
addCache :: Cache -> FuncName -> [AbstractA] -> AbstractA -> IO ()
addCache cache func args res = return ( )
addCache cache func args res = do
-- putStrLn $ "Cache add, " ++ show (func,args,res)
c <- readIORef cache
let c2 = Map.insertWith (++) func [(args,res)] c
writeIORef cache c2
dumpCache :: (String -> IO ()) -> Cache -> IO ()
dumpCache logger cache = do
c <- readIORef cache
logger $ show $ Map.toList c
type FunctionM = [(FuncName,Function)]
data Function =
Function {
funArgLen :: Int,
funArgs :: [String],
funCallset :: [FuncName], -- the functions i call
funFixpoint :: Bool, -- do I need to fixed point
funArgsPower :: [Bool], -- do I need to take the power of these arguments
funBody :: Expr,
funFast :: Bool
}
generateFunctions :: Hite -> Func2M -> FunctionM
generateFunctions hite types = map f (funcs hite)
where
f (Func name args bod _) = (name,
Function largs args callSet requiresFix (map f args) bod fast
)
where
fast = canFastEval name
largs = length args
f x = (length [() | Var a <- allExpr bod, a == x] > 1) && (not fast)
typ = lookupJust name types
isHigher = case typ of
Arr2T xs res -> any isHigherOrder xs
x -> False
requiresFix = isHigher || (name `elem` callSet)
callSet = fixSet g (g name)
g name = [x | CallFunc x <- allExpr $ body $ getFunc hite name]
absFuncAExp :: AbstractA -> [FuncName]
absFuncAExp x = absFunc f x
where
f (Value x) = absFuncAExp x
f (ASel x y) = f x
f (AMake x y) = concatMap f y
f (AFunc x) = [x]
f (ACall x xs) = concatMap f (x:xs)
f (Union xs) = concatMap f xs
f (ACase x y) = concatMap f (x:map snd y)
data AExp = Value AbstractA
| ASel AExp CtorArg
| AMake CtorName [AExp]
| AFunc FuncName
| ACall AExp [AExp]
| Union [AExp]
| ACase AExp [(CtorName, AExp)]
deriving (Show,Eq)
unionAExp :: [AExp] -> AExp
unionAExp [] = Value AbsVoid
unionAExp xs = foldr1 f xs
where
f (Union a) (Union b) = Union (a++b)
f (Union a) b = Union (b:a)
f a (Union b) = Union (a:b)
f (Value a) (Value b) = Value $ unionAbs [a,b]
f x y = Union [x,y]
fromValue (Value x) = x
evaluate :: (String -> IO ()) -> Hite -> DataM SmallT -> Func2M -> [Abstract ()] -> IO (Abstract ())
evaluate logger hite datam funcm args = do
c <- newCache
res <- evalCall logger (hite, datam, generateFunctions hite funcm, c) [] "main" (map liftAbs args)
dumpCache logger c
return $ liftAbs res
permuteAExp :: AExp -> [AExp]
permuteAExp (Value x) = map Value $ permuteAbs x
permuteAExp x = [x]
evalCall :: (String -> IO ()) -> Env -> Stack -> FuncName -> [AbstractA] -> IO AbstractA
evalCall logger env@(hite,datam,funcm,cache) stack func args
| isJust prev = return $ fromJust prev
| func == "_" = return AbsAny
| funFast fun = solveFast
| otherwise = solveSlow
where
cacheSafe = not $ any (`elem` stackSet) thisCallset
where
stackSet = map (fst . fst) stack
thisCallset = nub $ funCallset fun ++ concatMap absFuncAExp args
solveFast = do
logger $ msg 0 ++ "(fast)"
res <- fastEval doEval func args
logger $ pad ++ "= " ++ show res
return res
solveSlow = if cacheSafe then solveCache else solveNoCache
solveNoCache = if length args2 == 1 then f 0 AbsVoid else g 0 AbsVoid
solveCache = do
ca <- getCache cache func args
case ca of
Just r -> return r
Nothing -> do
res <- solveNoCache
addCache cache func args res
return res
-- the fast eval helper
doEval :: [AbstractA] -> IO AbstractA
doEval (x:xs) = evalExpr logger env (((func,args),AbsVoid):stack) (ACall (addValue x) (map addValue xs))
args2 = crossProduct $ zipWithEq perm (funArgsPower fun) args
perm True x = permuteAbs x
perm False x = [x]
pad = replicate (length stack * 2) ' '
msg n = pad ++ func ++ ":" ++ show n ++ " " ++ show args ++ " = "
g n x = do
logger $ msg n ++ show x
res <- mapM (evalCall logger env (((func,args),x):stack) func) args2
let res2 = unionAbs (x:res)
if norep || res2 == x
then logger (pad ++ "= " ++ show res2) >> return res2
else g (n+1) res2
f n x = do
logger $ msg n ++ show x
res <- evalExpr logger env (((func,args),x):stack) abody
let res2 = unionAbs (x:res:[])
if norep || res2 == x
then logger (pad ++ "= " ++ show res) >> return res2
else f (n+1) res2
abody = exprToAExp (zip (funArgs fun) args) (funBody fun)
norep = not $ funFixpoint fun
fun = lookupJust func funcm
prev = lookup (func,args) stack
evalExpr :: (String -> IO ()) -> Env -> Stack -> AExp -> IO AbstractA
evalExpr logger env@(hite,datam,funcm,cache) stack x =
case x of
ACall (AFunc name) args -> do
args2 <- mapM f args
let largs = funArgLen $ lookupJust name funcm
(argsNow, argsLater) = splitAt largs args2
if length argsNow == largs then do
res <- evalCall logger env stack name argsNow
if null argsLater
then return res
else f (ACall (addValue res) $ map addValue argsLater)
else if null args then
return $ AbsOther [AFunc name]
else
return $ AbsOther [ACall (AFunc name) $ map addValue args2]
ACall (ACall x xs) ys -> f (ACall x (xs ++ ys))
AFunc x -> return $ AbsOther [AFunc x]
Value x -> return $ x
ACase x alts -> do
x2 <- f x
alts2 <- mapM (g x2) alts
return $ unionAbs $ [AbsBottom | headBottom x2] ++ concat alts2
where
g x2 (opt,expr) = if hasCtorAbs datam x2 opt
then f expr >>= return . (:[])
else return []
ASel x y -> do
x2 <- f x
return $ followSelAbs hite datam x2 y
AMake name xs -> do
xs2 <- mapM f xs
return $ makeAbs datam name xs2
_ -> error $ "evalExpr, todo: " ++ show x
where
f x = evalExpr logger env stack x
exprToAExp :: [(String, AbstractA)] -> Expr -> AExp
exprToAExp args x =
case x of
Call a as -> ACall (f a) (map f as)
CallFunc a -> AFunc a
Sel a b -> ASel (f a) b
Error _ -> Value AbsBottom
Case a as -> ACase (f a) [(a,f b) | (a,b) <- as]
Var a -> addValue $ lookupJust a args
Make a as -> AMake a (map f as)
_ -> error $ "exprToAExp, todo: " -- ++ show x
where
f x = exprToAExp args x
addValue :: AbstractA -> AExp
addValue (AbsOther [x]) = x
addValue x = Value x
eval :: Env -> [(String, AbstractA )] -> Expr -> AbstractA
eval env@(hite,datam,funcm,cache) args expr =
case expr of
Call (CallFunc name) params -> eval env (zip (funArgs func) args2) (funBody func)
where
func = lookupJust name funcm
args2 = map (eval env args) params
Case x alts -> unionAbs $ concatMap f alts
where
x2 = eval env args x
f (opt,expr) = if hasCtorAbs datam x2 opt then [eval env args expr] else []
Var x -> lookupJust x args
Sel x y -> followSelAbs hite datam (eval env args x) y
Make name xs -> makeAbs datam name (map (eval env args) xs)
Error _ -> AbsBottom
x -> error $ "eval: " ++ show x
| null | https://raw.githubusercontent.com/ndmitchell/catch/5d834416a27b4df3f7ce7830c4757d4505aaf96e/src/Typey/Evaluate.hs | haskell | putStrLn $ "Cache hit, " ++ show (func,args,x)
putStrLn $ "Cache add, " ++ show (func,args,res)
the functions i call
do I need to fixed point
do I need to take the power of these arguments
the fast eval helper
++ show x
|
module Typey.Evaluate(evaluate) where
import Typey.Type
import Typey.Abstract
import Hite
import General.General
import Data.Maybe
import Data.List
import Typey.Faster
import Control.Monad
import Data.IORef
import qualified Data.Map as Map
type AbstractA = Abstract AExp
type Env = (Hite, DataM SmallT, FunctionM, Cache)
type Stack = [((FuncName, [AbstractA]), AbstractA)]
type Cache = IORef (Map.Map FuncName [([AbstractA], AbstractA)])
newCache :: IO Cache
newCache = newIORef Map.empty
getCache :: Cache -> FuncName -> [AbstractA] -> IO (Maybe AbstractA)
getCache cache func args = do
c <- readIORef cache
case Map.lookup func c of
Nothing -> return Nothing
Just x -> case lookup args x of
Nothing -> return Nothing
return $ Just x
addCache :: Cache -> FuncName -> [AbstractA] -> AbstractA -> IO ()
addCache cache func args res = return ( )
addCache cache func args res = do
c <- readIORef cache
let c2 = Map.insertWith (++) func [(args,res)] c
writeIORef cache c2
dumpCache :: (String -> IO ()) -> Cache -> IO ()
dumpCache logger cache = do
c <- readIORef cache
logger $ show $ Map.toList c
type FunctionM = [(FuncName,Function)]
data Function =
Function {
funArgLen :: Int,
funArgs :: [String],
funBody :: Expr,
funFast :: Bool
}
generateFunctions :: Hite -> Func2M -> FunctionM
generateFunctions hite types = map f (funcs hite)
where
f (Func name args bod _) = (name,
Function largs args callSet requiresFix (map f args) bod fast
)
where
fast = canFastEval name
largs = length args
f x = (length [() | Var a <- allExpr bod, a == x] > 1) && (not fast)
typ = lookupJust name types
isHigher = case typ of
Arr2T xs res -> any isHigherOrder xs
x -> False
requiresFix = isHigher || (name `elem` callSet)
callSet = fixSet g (g name)
g name = [x | CallFunc x <- allExpr $ body $ getFunc hite name]
absFuncAExp :: AbstractA -> [FuncName]
absFuncAExp x = absFunc f x
where
f (Value x) = absFuncAExp x
f (ASel x y) = f x
f (AMake x y) = concatMap f y
f (AFunc x) = [x]
f (ACall x xs) = concatMap f (x:xs)
f (Union xs) = concatMap f xs
f (ACase x y) = concatMap f (x:map snd y)
data AExp = Value AbstractA
| ASel AExp CtorArg
| AMake CtorName [AExp]
| AFunc FuncName
| ACall AExp [AExp]
| Union [AExp]
| ACase AExp [(CtorName, AExp)]
deriving (Show,Eq)
unionAExp :: [AExp] -> AExp
unionAExp [] = Value AbsVoid
unionAExp xs = foldr1 f xs
where
f (Union a) (Union b) = Union (a++b)
f (Union a) b = Union (b:a)
f a (Union b) = Union (a:b)
f (Value a) (Value b) = Value $ unionAbs [a,b]
f x y = Union [x,y]
fromValue (Value x) = x
evaluate :: (String -> IO ()) -> Hite -> DataM SmallT -> Func2M -> [Abstract ()] -> IO (Abstract ())
evaluate logger hite datam funcm args = do
c <- newCache
res <- evalCall logger (hite, datam, generateFunctions hite funcm, c) [] "main" (map liftAbs args)
dumpCache logger c
return $ liftAbs res
permuteAExp :: AExp -> [AExp]
permuteAExp (Value x) = map Value $ permuteAbs x
permuteAExp x = [x]
evalCall :: (String -> IO ()) -> Env -> Stack -> FuncName -> [AbstractA] -> IO AbstractA
evalCall logger env@(hite,datam,funcm,cache) stack func args
| isJust prev = return $ fromJust prev
| func == "_" = return AbsAny
| funFast fun = solveFast
| otherwise = solveSlow
where
cacheSafe = not $ any (`elem` stackSet) thisCallset
where
stackSet = map (fst . fst) stack
thisCallset = nub $ funCallset fun ++ concatMap absFuncAExp args
solveFast = do
logger $ msg 0 ++ "(fast)"
res <- fastEval doEval func args
logger $ pad ++ "= " ++ show res
return res
solveSlow = if cacheSafe then solveCache else solveNoCache
solveNoCache = if length args2 == 1 then f 0 AbsVoid else g 0 AbsVoid
solveCache = do
ca <- getCache cache func args
case ca of
Just r -> return r
Nothing -> do
res <- solveNoCache
addCache cache func args res
return res
doEval :: [AbstractA] -> IO AbstractA
doEval (x:xs) = evalExpr logger env (((func,args),AbsVoid):stack) (ACall (addValue x) (map addValue xs))
args2 = crossProduct $ zipWithEq perm (funArgsPower fun) args
perm True x = permuteAbs x
perm False x = [x]
pad = replicate (length stack * 2) ' '
msg n = pad ++ func ++ ":" ++ show n ++ " " ++ show args ++ " = "
g n x = do
logger $ msg n ++ show x
res <- mapM (evalCall logger env (((func,args),x):stack) func) args2
let res2 = unionAbs (x:res)
if norep || res2 == x
then logger (pad ++ "= " ++ show res2) >> return res2
else g (n+1) res2
f n x = do
logger $ msg n ++ show x
res <- evalExpr logger env (((func,args),x):stack) abody
let res2 = unionAbs (x:res:[])
if norep || res2 == x
then logger (pad ++ "= " ++ show res) >> return res2
else f (n+1) res2
abody = exprToAExp (zip (funArgs fun) args) (funBody fun)
norep = not $ funFixpoint fun
fun = lookupJust func funcm
prev = lookup (func,args) stack
evalExpr :: (String -> IO ()) -> Env -> Stack -> AExp -> IO AbstractA
evalExpr logger env@(hite,datam,funcm,cache) stack x =
case x of
ACall (AFunc name) args -> do
args2 <- mapM f args
let largs = funArgLen $ lookupJust name funcm
(argsNow, argsLater) = splitAt largs args2
if length argsNow == largs then do
res <- evalCall logger env stack name argsNow
if null argsLater
then return res
else f (ACall (addValue res) $ map addValue argsLater)
else if null args then
return $ AbsOther [AFunc name]
else
return $ AbsOther [ACall (AFunc name) $ map addValue args2]
ACall (ACall x xs) ys -> f (ACall x (xs ++ ys))
AFunc x -> return $ AbsOther [AFunc x]
Value x -> return $ x
ACase x alts -> do
x2 <- f x
alts2 <- mapM (g x2) alts
return $ unionAbs $ [AbsBottom | headBottom x2] ++ concat alts2
where
g x2 (opt,expr) = if hasCtorAbs datam x2 opt
then f expr >>= return . (:[])
else return []
ASel x y -> do
x2 <- f x
return $ followSelAbs hite datam x2 y
AMake name xs -> do
xs2 <- mapM f xs
return $ makeAbs datam name xs2
_ -> error $ "evalExpr, todo: " ++ show x
where
f x = evalExpr logger env stack x
exprToAExp :: [(String, AbstractA)] -> Expr -> AExp
exprToAExp args x =
case x of
Call a as -> ACall (f a) (map f as)
CallFunc a -> AFunc a
Sel a b -> ASel (f a) b
Error _ -> Value AbsBottom
Case a as -> ACase (f a) [(a,f b) | (a,b) <- as]
Var a -> addValue $ lookupJust a args
Make a as -> AMake a (map f as)
where
f x = exprToAExp args x
addValue :: AbstractA -> AExp
addValue (AbsOther [x]) = x
addValue x = Value x
eval :: Env -> [(String, AbstractA )] -> Expr -> AbstractA
eval env@(hite,datam,funcm,cache) args expr =
case expr of
Call (CallFunc name) params -> eval env (zip (funArgs func) args2) (funBody func)
where
func = lookupJust name funcm
args2 = map (eval env args) params
Case x alts -> unionAbs $ concatMap f alts
where
x2 = eval env args x
f (opt,expr) = if hasCtorAbs datam x2 opt then [eval env args expr] else []
Var x -> lookupJust x args
Sel x y -> followSelAbs hite datam (eval env args x) y
Make name xs -> makeAbs datam name (map (eval env args) xs)
Error _ -> AbsBottom
x -> error $ "eval: " ++ show x
|
2c4de620d280bf3af016ff8f5bfcd9598c7c15fac5f696813f8ef9c7fb932177 | ideas-edu/ideas | EncoderXML.hs | {-# LANGUAGE GADTs #-}
-----------------------------------------------------------------------------
Copyright 2019 , Ideas project team . This file is distributed under the
terms of the Apache License 2.0 . For more information , see the files
" LICENSE.txt " and " NOTICE.txt " , which are included in the distribution .
-----------------------------------------------------------------------------
-- |
-- Maintainer :
-- Stability : provisional
Portability : portable ( depends on ghc )
--
Services using XML notation
--
-----------------------------------------------------------------------------
module Ideas.Encoding.EncoderXML
( XMLEncoder
, xmlEncoder, encodeState
) where
import Data.Char
import Data.List
import Data.Maybe
import Data.Monoid
import Ideas.Common.Library hiding (exerciseId)
import Ideas.Encoding.Encoder
import Ideas.Encoding.OpenMathSupport
import Ideas.Encoding.Request hiding (XML)
import Ideas.Encoding.RulesInfo (rulesInfoXML)
import Ideas.Encoding.StrategyInfo
import Ideas.Service.BasicServices (StepInfo, tStepInfo)
import Ideas.Service.Diagnose
import Ideas.Service.FeedbackScript.Syntax
import Ideas.Service.State
import Ideas.Service.Types
import Ideas.Text.OpenMath.Object
import Ideas.Text.XML
import Ideas.Utils.Prelude (munless)
import qualified Ideas.Service.FeedbackText as FeedbackText
import qualified Ideas.Service.ProblemDecomposition as PD
-----------------
type XMLEncoder a = EncoderX a XMLBuilder
xmlEncoder :: TypedEncoder a XMLBuilder
xmlEncoder =
(encodeDiagnosis, tDiagnosis) <?>
(encodeDecompositionReply, PD.tReply) <?>
(encodeDerivation, tDerivation tStepInfo tContext) <?>
(encodeFirsts, tList tFirst) <?>
(encodeFirst, tFirst) <?>
(encodeDerivationText, tDerivation tString tContext) <?>
(encodeDifficulty, tDifficulty) <?>
(encodeMessage, FeedbackText.tMessage) <?>
\(val ::: tp) ->
case tp of
-- meta-information
Tag "RuleShortInfo" t ->
case equal t (Const Rule) of
Just f -> ruleShortInfo (f val)
Nothing -> fail "rule short info"
Tag "RulesInfo" _ -> do
ex <- getExercise
useOM <- useOpenMath <$> getRequest
return (rulesInfoXML ex (buildExpression useOM ex))
Tag "elem" t ->
tag "elem" (xmlEncoder (val ::: t))
-- special cases for lists
List (Const Rule) ->
encodeAsList [ ruleShortInfo r | r <- val ]
List t ->
encodeAsList [ xmlEncoder (a ::: t) | a <- val ]
-- standard
Tag _ t -> xmlEncoder (val ::: t)
Iso iso t -> xmlEncoder (to iso val ::: t)
Pair t1 t2 -> xmlEncoder (fst val ::: t1) <>
xmlEncoder (snd val ::: t2)
t1 :|: t2 -> case val of
Left a -> xmlEncoder (a ::: t1)
Right b -> xmlEncoder (b ::: t2)
Unit -> mempty
Const t -> xmlEncoderConst (val ::: t)
_ -> fail $ show tp
xmlEncoderConst :: TypedValue (Const a) -> XMLEncoder a
xmlEncoderConst tv@(val ::: tp) =
case tp of
SomeExercise -> case val of
Some a -> exerciseInfo a
Strategy -> builder (strategyToXML val)
Rule -> "ruleid" .=. show val
Constraint -> "constraint" .=. show val
State -> encodeState val
Context -> encodeContext val
Location -> encodeLocation val
Environment -> encodeEnvironment val
Term -> builderXML (toOMOBJ val)
Text -> encodeText val
Bool -> string (showBool val)
XML -> builder val
_ -> text tv
encodeState :: State a -> XMLEncoder a
encodeState st = element "state"
[ if withoutPrefix st
then mempty
else element "prefix" [string (show (statePrefix st))]
, encodeContext (stateContext st)
]
encodeContext :: Context a -> XMLEncoder a
encodeContext ctx = do
ex <- getExercise
useOM <- useOpenMath <$> getRequest
maybe (error "encodeContext") (buildExpression useOM ex) (fromContext ctx)
<>
let values = bindings (withLoc ctx)
loc = fromLocation (location ctx)
withLoc
| null loc = id
| otherwise = insertRef (makeRef "location") loc
in munless (null values) $ element "context"
[ element "item"
[ "name" .=. showId tb
, case getTermValue tb of
term | useOM ->
builder (omobj2xml (toOMOBJ term))
_ -> "value" .=. showValue tb
]
| tb <- values
]
buildExpression :: BuildXML b => Bool -> Exercise a -> a -> b
buildExpression useOM ex
| useOM = either msg builderXML . toOpenMath ex
| otherwise = tag "expr" . string . prettyPrinter ex
where
msg s = error ("Error encoding term in OpenMath: " ++ s)
encodeLocation :: Location -> XMLEncoder a
encodeLocation loc = "location" .=. show loc
encodeEnvironment :: HasEnvironment env => env -> XMLEncoder a
encodeEnvironment env = mconcat [ encodeTypedBinding b | b <- bindings env ]
encodeTypedBinding :: Binding -> XMLEncoder a
encodeTypedBinding tb = do
useOM <- useOpenMath <$> getRequest
tag "argument" $
("description" .=. showId tb) <>
case getTermValue tb of
term | useOM -> builder $
omobj2xml $ toOMOBJ term
_ -> string (showValue tb)
encodeDerivation :: Derivation (StepInfo a) (Context a) -> XMLEncoder a
encodeDerivation d =
let xs = [ ((r, env), a) | (_, (r, _, env), a) <- triples d ]
in xmlEncoder (xs ::: tList (tPair (tPair tRule tEnvironment) tContext))
encodeDerivationText :: Derivation String (Context a) -> XMLEncoder a
encodeDerivationText d = encodeAsList
[ ("ruletext" .=. s) <> encodeContext a
| (_, s, a) <- triples d
]
tFirst :: Type a (StepInfo a, State a)
tFirst = Tag "first" (tPair tStepInfo tState)
encodeFirst :: (StepInfo a, State a) -> XMLEncoder a
encodeFirst (step, st) =
tag "elem" (xmlEncoder (step ::: tStepInfo) <> encodeState st)
encodeFirsts :: [(StepInfo a, State a)] -> XMLEncoder a
encodeFirsts =
element "list" . map encodeFirst
ruleShortInfo :: Rule (Context a) -> XMLEncoder a
ruleShortInfo r = mconcat
[ "name" .=. showId r
, "buggy" .=. showBool (isBuggy r)
, "arguments" .=. show (length (getRefs r))
, "rewriterule" .=. showBool (isRewriteRule r)
]
encodeDifficulty :: Difficulty -> XMLEncoder a
encodeDifficulty d =
"difficulty" .=. show d
encodeText :: Text -> XMLEncoder a
encodeText txt = do
ex <- getExercise
useOM <- useOpenMath <$> getRequest
mconcat (intersperse (string " ") [ encodeItem ex useOM item | item <- textItems txt ])
where
encodeItem ex useOM item =
case item of
TextTerm a -> fromMaybe (text item) $ do
v <- hasTermView ex
b <- match v a
return (buildExpression useOM ex b)
_ -> text item
encodeMessage :: FeedbackText.Message -> XMLEncoder a
encodeMessage msg =
element "message"
[ case FeedbackText.accept msg of
Just b -> "accept" .=. showBool b
Nothing -> mempty
, encodeText (FeedbackText.text msg)
]
encodeDiagnosis :: Diagnosis a -> XMLEncoder a
encodeDiagnosis diagnosis =
case diagnosis of
SyntaxError s -> element "syntaxerror" [string s]
Buggy env r -> element "buggy"
[encodeEnvironment env, "ruleid" .=. showId r]
NotEquivalent s ->
if null s then emptyTag "notequiv"
else element "notequiv" [ "reason" .=. s ]
Similar b st mr -> element "similar" $
["ready" .=. showBool b, encodeState st] ++
maybe [] (\r -> ["ruleid" .=. showId r]) mr
WrongRule b st mr -> element "wrongrule" $
[ "ready" .=. showBool b, encodeState st ] ++
maybe [] (\r -> ["ruleid" .=. showId r]) mr
Expected b st r -> element "expected"
["ready" .=. showBool b, encodeState st, "ruleid" .=. showId r]
Detour b st env r -> element "detour"
[ "ready" .=. showBool b, encodeState st
, encodeEnvironment env, "ruleid" .=. showId r
]
Correct b st -> element "correct"
["ready" .=. showBool b, encodeState st]
Unknown b st -> element "unknown"
["ready" .=. showBool b, encodeState st]
encodeDecompositionReply :: PD.Reply a -> XMLEncoder a
encodeDecompositionReply reply =
case reply of
PD.Ok loc st ->
element "correct" [encLoc loc, encodeState st]
PD.Incorrect eq loc st env ->
element "incorrect"
[ "equivalent" .=. showBool eq
, encLoc loc
, encodeState st
, encodeEnvironment env
]
where
encLoc = tag "location" . text
exerciseInfo :: Exercise b -> XMLEncoder a
exerciseInfo ex = mconcat
[ "exerciseid" .=. showId ex
, "description" .=. description ex
, "status" .=. show (status ex)
]
------------------------------------------------
-- helpers
encodeAsList :: [XMLEncoder a] -> XMLEncoder a
encodeAsList = element "list" . map (tag "elem")
showBool :: Bool -> String
showBool = map toLower . show | null | https://raw.githubusercontent.com/ideas-edu/ideas/f84907f92a8c407b7313f99e65a08d2646dc1565/src/Ideas/Encoding/EncoderXML.hs | haskell | # LANGUAGE GADTs #
---------------------------------------------------------------------------
---------------------------------------------------------------------------
|
Maintainer :
Stability : provisional
---------------------------------------------------------------------------
---------------
meta-information
special cases for lists
standard
----------------------------------------------
helpers
| Copyright 2019 , Ideas project team . This file is distributed under the
terms of the Apache License 2.0 . For more information , see the files
" LICENSE.txt " and " NOTICE.txt " , which are included in the distribution .
Portability : portable ( depends on ghc )
Services using XML notation
module Ideas.Encoding.EncoderXML
( XMLEncoder
, xmlEncoder, encodeState
) where
import Data.Char
import Data.List
import Data.Maybe
import Data.Monoid
import Ideas.Common.Library hiding (exerciseId)
import Ideas.Encoding.Encoder
import Ideas.Encoding.OpenMathSupport
import Ideas.Encoding.Request hiding (XML)
import Ideas.Encoding.RulesInfo (rulesInfoXML)
import Ideas.Encoding.StrategyInfo
import Ideas.Service.BasicServices (StepInfo, tStepInfo)
import Ideas.Service.Diagnose
import Ideas.Service.FeedbackScript.Syntax
import Ideas.Service.State
import Ideas.Service.Types
import Ideas.Text.OpenMath.Object
import Ideas.Text.XML
import Ideas.Utils.Prelude (munless)
import qualified Ideas.Service.FeedbackText as FeedbackText
import qualified Ideas.Service.ProblemDecomposition as PD
type XMLEncoder a = EncoderX a XMLBuilder
xmlEncoder :: TypedEncoder a XMLBuilder
xmlEncoder =
(encodeDiagnosis, tDiagnosis) <?>
(encodeDecompositionReply, PD.tReply) <?>
(encodeDerivation, tDerivation tStepInfo tContext) <?>
(encodeFirsts, tList tFirst) <?>
(encodeFirst, tFirst) <?>
(encodeDerivationText, tDerivation tString tContext) <?>
(encodeDifficulty, tDifficulty) <?>
(encodeMessage, FeedbackText.tMessage) <?>
\(val ::: tp) ->
case tp of
Tag "RuleShortInfo" t ->
case equal t (Const Rule) of
Just f -> ruleShortInfo (f val)
Nothing -> fail "rule short info"
Tag "RulesInfo" _ -> do
ex <- getExercise
useOM <- useOpenMath <$> getRequest
return (rulesInfoXML ex (buildExpression useOM ex))
Tag "elem" t ->
tag "elem" (xmlEncoder (val ::: t))
List (Const Rule) ->
encodeAsList [ ruleShortInfo r | r <- val ]
List t ->
encodeAsList [ xmlEncoder (a ::: t) | a <- val ]
Tag _ t -> xmlEncoder (val ::: t)
Iso iso t -> xmlEncoder (to iso val ::: t)
Pair t1 t2 -> xmlEncoder (fst val ::: t1) <>
xmlEncoder (snd val ::: t2)
t1 :|: t2 -> case val of
Left a -> xmlEncoder (a ::: t1)
Right b -> xmlEncoder (b ::: t2)
Unit -> mempty
Const t -> xmlEncoderConst (val ::: t)
_ -> fail $ show tp
xmlEncoderConst :: TypedValue (Const a) -> XMLEncoder a
xmlEncoderConst tv@(val ::: tp) =
case tp of
SomeExercise -> case val of
Some a -> exerciseInfo a
Strategy -> builder (strategyToXML val)
Rule -> "ruleid" .=. show val
Constraint -> "constraint" .=. show val
State -> encodeState val
Context -> encodeContext val
Location -> encodeLocation val
Environment -> encodeEnvironment val
Term -> builderXML (toOMOBJ val)
Text -> encodeText val
Bool -> string (showBool val)
XML -> builder val
_ -> text tv
encodeState :: State a -> XMLEncoder a
encodeState st = element "state"
[ if withoutPrefix st
then mempty
else element "prefix" [string (show (statePrefix st))]
, encodeContext (stateContext st)
]
encodeContext :: Context a -> XMLEncoder a
encodeContext ctx = do
ex <- getExercise
useOM <- useOpenMath <$> getRequest
maybe (error "encodeContext") (buildExpression useOM ex) (fromContext ctx)
<>
let values = bindings (withLoc ctx)
loc = fromLocation (location ctx)
withLoc
| null loc = id
| otherwise = insertRef (makeRef "location") loc
in munless (null values) $ element "context"
[ element "item"
[ "name" .=. showId tb
, case getTermValue tb of
term | useOM ->
builder (omobj2xml (toOMOBJ term))
_ -> "value" .=. showValue tb
]
| tb <- values
]
buildExpression :: BuildXML b => Bool -> Exercise a -> a -> b
buildExpression useOM ex
| useOM = either msg builderXML . toOpenMath ex
| otherwise = tag "expr" . string . prettyPrinter ex
where
msg s = error ("Error encoding term in OpenMath: " ++ s)
encodeLocation :: Location -> XMLEncoder a
encodeLocation loc = "location" .=. show loc
encodeEnvironment :: HasEnvironment env => env -> XMLEncoder a
encodeEnvironment env = mconcat [ encodeTypedBinding b | b <- bindings env ]
encodeTypedBinding :: Binding -> XMLEncoder a
encodeTypedBinding tb = do
useOM <- useOpenMath <$> getRequest
tag "argument" $
("description" .=. showId tb) <>
case getTermValue tb of
term | useOM -> builder $
omobj2xml $ toOMOBJ term
_ -> string (showValue tb)
encodeDerivation :: Derivation (StepInfo a) (Context a) -> XMLEncoder a
encodeDerivation d =
let xs = [ ((r, env), a) | (_, (r, _, env), a) <- triples d ]
in xmlEncoder (xs ::: tList (tPair (tPair tRule tEnvironment) tContext))
encodeDerivationText :: Derivation String (Context a) -> XMLEncoder a
encodeDerivationText d = encodeAsList
[ ("ruletext" .=. s) <> encodeContext a
| (_, s, a) <- triples d
]
tFirst :: Type a (StepInfo a, State a)
tFirst = Tag "first" (tPair tStepInfo tState)
encodeFirst :: (StepInfo a, State a) -> XMLEncoder a
encodeFirst (step, st) =
tag "elem" (xmlEncoder (step ::: tStepInfo) <> encodeState st)
encodeFirsts :: [(StepInfo a, State a)] -> XMLEncoder a
encodeFirsts =
element "list" . map encodeFirst
ruleShortInfo :: Rule (Context a) -> XMLEncoder a
ruleShortInfo r = mconcat
[ "name" .=. showId r
, "buggy" .=. showBool (isBuggy r)
, "arguments" .=. show (length (getRefs r))
, "rewriterule" .=. showBool (isRewriteRule r)
]
encodeDifficulty :: Difficulty -> XMLEncoder a
encodeDifficulty d =
"difficulty" .=. show d
encodeText :: Text -> XMLEncoder a
encodeText txt = do
ex <- getExercise
useOM <- useOpenMath <$> getRequest
mconcat (intersperse (string " ") [ encodeItem ex useOM item | item <- textItems txt ])
where
encodeItem ex useOM item =
case item of
TextTerm a -> fromMaybe (text item) $ do
v <- hasTermView ex
b <- match v a
return (buildExpression useOM ex b)
_ -> text item
encodeMessage :: FeedbackText.Message -> XMLEncoder a
encodeMessage msg =
element "message"
[ case FeedbackText.accept msg of
Just b -> "accept" .=. showBool b
Nothing -> mempty
, encodeText (FeedbackText.text msg)
]
encodeDiagnosis :: Diagnosis a -> XMLEncoder a
encodeDiagnosis diagnosis =
case diagnosis of
SyntaxError s -> element "syntaxerror" [string s]
Buggy env r -> element "buggy"
[encodeEnvironment env, "ruleid" .=. showId r]
NotEquivalent s ->
if null s then emptyTag "notequiv"
else element "notequiv" [ "reason" .=. s ]
Similar b st mr -> element "similar" $
["ready" .=. showBool b, encodeState st] ++
maybe [] (\r -> ["ruleid" .=. showId r]) mr
WrongRule b st mr -> element "wrongrule" $
[ "ready" .=. showBool b, encodeState st ] ++
maybe [] (\r -> ["ruleid" .=. showId r]) mr
Expected b st r -> element "expected"
["ready" .=. showBool b, encodeState st, "ruleid" .=. showId r]
Detour b st env r -> element "detour"
[ "ready" .=. showBool b, encodeState st
, encodeEnvironment env, "ruleid" .=. showId r
]
Correct b st -> element "correct"
["ready" .=. showBool b, encodeState st]
Unknown b st -> element "unknown"
["ready" .=. showBool b, encodeState st]
encodeDecompositionReply :: PD.Reply a -> XMLEncoder a
encodeDecompositionReply reply =
case reply of
PD.Ok loc st ->
element "correct" [encLoc loc, encodeState st]
PD.Incorrect eq loc st env ->
element "incorrect"
[ "equivalent" .=. showBool eq
, encLoc loc
, encodeState st
, encodeEnvironment env
]
where
encLoc = tag "location" . text
exerciseInfo :: Exercise b -> XMLEncoder a
exerciseInfo ex = mconcat
[ "exerciseid" .=. showId ex
, "description" .=. description ex
, "status" .=. show (status ex)
]
encodeAsList :: [XMLEncoder a] -> XMLEncoder a
encodeAsList = element "list" . map (tag "elem")
showBool :: Bool -> String
showBool = map toLower . show |
1da6ae368233d6b6b0bc1eb09c1590f702ef7b2565552d6b013d9b577243ed6f | mokus0/junkbox | invertible.hs | class Invertible f a b where
apply :: (Invertible f a b) => f -> a -> b
invert :: forall a b. forall g. (Invertible f a b, Invertible g b a) => f -> g
compose :: forall a b c. forall g h. (Invertible f a b, Invertible g b c, Invertible h a c) => f -> g -> h
data InvFunc a b = Inv (a->b) (b->a)
instance forall a b. Invertible (InvFunc a b) a b where
apply (Inv f g) x = f x
invert (Inv x y) = (Inv y x)
compose (Inv x1 y1) (Inv x2 y2) = (Inv (x2.x1) (y1.y2))
| null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Haskell/TypeExperiments/invertible.hs | haskell | class Invertible f a b where
apply :: (Invertible f a b) => f -> a -> b
invert :: forall a b. forall g. (Invertible f a b, Invertible g b a) => f -> g
compose :: forall a b c. forall g h. (Invertible f a b, Invertible g b c, Invertible h a c) => f -> g -> h
data InvFunc a b = Inv (a->b) (b->a)
instance forall a b. Invertible (InvFunc a b) a b where
apply (Inv f g) x = f x
invert (Inv x y) = (Inv y x)
compose (Inv x1 y1) (Inv x2 y2) = (Inv (x2.x1) (y1.y2))
| |
cce97b014464a45f647165a42433470bfa8d6c77997cb51efa544db4c06e0170 | jeffshrager/biobike | jhelp-structs.lisp | -*- Package : help ; mode : lisp ; base : 10 ; Syntax : Common - Lisp ; -*-
(in-package :help)
;; Move here to avoid compiler warning -- JP.
(defparameter *jpage-count* 0)
(defparameter *jsymbol-count* 0)
(defun stringify (mess)
(if mess
(format nil "~a" mess)
"" ;; not "NIL".
)
)
Version 2 . Input side .
;; Currently used for incoming probe.
;;
Structure of a bag : ( ( wordID . count ) ( wordID . count ) ... ) i.e. , Assoc - list ; and wordIDs are sorted .
;;
(defstruct wordbag bag total bigrams) ;;deprecated. Delete soon.
;; (wordbag-bag myWB) (wordbag-total myWB) (wordbag-bigrams myWB)
( setf ( wordbag - bag myWB ) newList )
( make - wordbag : bag NIL : total 0 : )
(defvar *Bundle-count* 0) ;;deprecated. Delete soon.
(defstruct Bundle ID titleWB descriptionWB keywordsWB textWB DocObj) ;;deprecated. Delete soon.
;; inBundles is an unsorted list of Bundles that have that word contained in them.
(defstruct dicwordentry wordstring wordID inBundles) ;;deprecated. Delete soon.
( defstruct JDictionary wordentries )
;;(defvar *the-jdictionary* (make-JDictionary :wordentries NIL))
(defvar *the-jdictionary* (make-hash-table)) ;;deprecated. Delete soon.
;;---------------------------------------------------------------------
Version 3 . Run side .
(defstruct JDocument ID doctype thedoc functor-url text)
;; Main output item of system.
;; This is a wrapper that holds whatever the real contents should be.
;; Atomic entity.
;; The ID is unique (a positive integer), and allows it to be sorted.
: ' documentation - file ' function - documentation ' glossary - entry ' module
;; 'symbol-doc 'topic 'tutorial 'jpage
thedoc : stores an arbitrary documentation object , often the legacy docobj
;; functor-url is an expression that holds the documentation URL, that must be eval'd at run-time.
;; It is not a box command but is in fact a true URL that will often go to the legacy doc page. But quoted.
;;
;; New slot: text. Possibly null string that lists all interesting stuff in Document.
;; Used for searching on output to give context lines.
;; This is redundant with the document itself, but various flavors of document have different texts,
so it 's easier to keep a copy . Sigh . Next time do this with 10 different flavors of methods .
;; Primarily used for function documentation.
(defparameter *JDocument-count* 0)
Singleton array for keeping track of all of the known unique Documents . Make this a Hash soon .
(defparameter *JDocuments* (make-array 10000 :fill-pointer 0 :adjustable T))
(defun add-JDocument (JDoc) (vector-push-extend JDoc *JDocuments*))
(defun JDocuments-count () (length *JDocuments*)) ;;this is the boots-on-ground version. We also keep track in a var.
(defun get-JDocument (JDocID) (elt *JDocuments* JDocID)) ; throws error if out of bounds, fix this. 0-based.
JKM Mar 1 ' 13 Have to add fields " name " and " parameters " so as to shadow the functions ' slots .
;; "name" stores a simple symbol; "parameters" stores a list of parameter objects, get it from the function.
;; no, won't work, needs function "name" to get stuff, not JPage-name.
;; (defstruct JPage name title subtitle keywords logical-form summary text parameters )
(defstruct JPage title subtitle keywords logical-form summary text )
Barrel Entry is an assoc couplet of ( JDoc - IDint . score[/count ] ) .
;; It is simple enough that we do not declare a struct for it.
;;
Note that Barrels sort on the ID , whereas Scoring will later sort on the score , which will be reversed .
;; Score is absolute and is a float; it should be normalized for relevance, frequency, specificity.
;; We'll compare apples against other apples from other Words' barrels.
(defstruct JSummary results barrels)
A temporary holding system for the summarized results of the system . * One per query * , so sort of a singleton .
;; Summary is necessary as the merge interleaving must trickily be done later, after gathering, not inline.
" results " is a single list of SWAPPED Barrel Entries , with score in front ,
;; that will get MERGED, sorted/ranked, truncated, and used to return.
;; As the final step in currying this, the scores are thrown away, and instead of a dual-level list of conses,
it is boiled down into a single list of . But this is done as the last step .
" barrels " is a list of 0 or more Barrel structures , which each contain a list of Barrel Entries ( JDocID . score ) .
We own the results . We do * not * own the Barrels . Result entries must be * copied * from Barrel lists .
Results must be GC'd in a system that worries about such . Barrels do * not * get GC'd .
;;
;; (make-JSummary :results nil :barrels nil)
(defstruct JBarrel entries count)
entries is a list of Barrel Entries . count is the length of the list , for speed .
So a Barrel is effectively a list of JDocuments -- all the that have this Word / Concept in them .
The Barrels are compiled permanently at the beginning of JHelp startup ,
;; and are fixed constant thereafter (assuming documentation is constant).
A Barrel does not get added to , after it has been put together .
Barrels are result entries that get hashed by Word , Bigram , or Concept .
Google probably means something different with their Barrels
as to what I 'm calling mine this year , caveat . Got ta call these something , though .
;;
;; (make-JBarrel :entries nil :count 0)
(defstruct JHelp-Engine wordhash bigramhash categorieshash learnedhash)
;; Definition of the singleton object that runs everything.
(defparameter *JHelp-Engine* nil)
(defun clear-JHelp-Engine ()
(setq *JDocuments* (make-array 10000 :fill-pointer 0 :adjustable T))
(setq *JDocument-count* 0)
(setq *jpage-count* 0)
(setq *JHelp-Engine*
(make-JHelp-Engine :wordhash (make-hash-table :test #'equalp)
:bigramhash (make-hash-table :test #'equalp)
;; Extend system here if you need more tables.
:categorieshash (make-hash-table :test #'equalp) ;; Stub. Not supported yet.
:learnedhash (make-hash-table :test #'equalp) ;; " ".
)))
;; Note that equalp is agnostic w/r/t lower and upper case, whereas equal is strict.
;; So we don't need to smallify. Fix this if this becomes significant for this application.
(defstruct stemmer tree)
(defparameter *JHelp-suffix-taster* nil)
(clear-JHelp-Engine) ;;now called inside (jhelp-init). Also, MUST be called here, as some params get reset.
| null | https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/Help/jhelp-structs.lisp | lisp | mode : lisp ; base : 10 ; Syntax : Common - Lisp ; -*-
Move here to avoid compiler warning -- JP.
not "NIL".
Currently used for incoming probe.
and wordIDs are sorted .
deprecated. Delete soon.
(wordbag-bag myWB) (wordbag-total myWB) (wordbag-bigrams myWB)
deprecated. Delete soon.
deprecated. Delete soon.
inBundles is an unsorted list of Bundles that have that word contained in them.
deprecated. Delete soon.
(defvar *the-jdictionary* (make-JDictionary :wordentries NIL))
deprecated. Delete soon.
---------------------------------------------------------------------
Main output item of system.
This is a wrapper that holds whatever the real contents should be.
Atomic entity.
The ID is unique (a positive integer), and allows it to be sorted.
'symbol-doc 'topic 'tutorial 'jpage
functor-url is an expression that holds the documentation URL, that must be eval'd at run-time.
It is not a box command but is in fact a true URL that will often go to the legacy doc page. But quoted.
New slot: text. Possibly null string that lists all interesting stuff in Document.
Used for searching on output to give context lines.
This is redundant with the document itself, but various flavors of document have different texts,
Primarily used for function documentation.
this is the boots-on-ground version. We also keep track in a var.
throws error if out of bounds, fix this. 0-based.
"name" stores a simple symbol; "parameters" stores a list of parameter objects, get it from the function.
no, won't work, needs function "name" to get stuff, not JPage-name.
(defstruct JPage name title subtitle keywords logical-form summary text parameters )
It is simple enough that we do not declare a struct for it.
Score is absolute and is a float; it should be normalized for relevance, frequency, specificity.
We'll compare apples against other apples from other Words' barrels.
Summary is necessary as the merge interleaving must trickily be done later, after gathering, not inline.
that will get MERGED, sorted/ranked, truncated, and used to return.
As the final step in currying this, the scores are thrown away, and instead of a dual-level list of conses,
(make-JSummary :results nil :barrels nil)
and are fixed constant thereafter (assuming documentation is constant).
(make-JBarrel :entries nil :count 0)
Definition of the singleton object that runs everything.
Extend system here if you need more tables.
Stub. Not supported yet.
" ".
Note that equalp is agnostic w/r/t lower and upper case, whereas equal is strict.
So we don't need to smallify. Fix this if this becomes significant for this application.
now called inside (jhelp-init). Also, MUST be called here, as some params get reset. | (in-package :help)
(defparameter *jpage-count* 0)
(defparameter *jsymbol-count* 0)
(defun stringify (mess)
(if mess
(format nil "~a" mess)
)
)
Version 2 . Input side .
( setf ( wordbag - bag myWB ) newList )
( make - wordbag : bag NIL : total 0 : )
( defstruct JDictionary wordentries )
Version 3 . Run side .
(defstruct JDocument ID doctype thedoc functor-url text)
: ' documentation - file ' function - documentation ' glossary - entry ' module
thedoc : stores an arbitrary documentation object , often the legacy docobj
so it 's easier to keep a copy . Sigh . Next time do this with 10 different flavors of methods .
(defparameter *JDocument-count* 0)
Singleton array for keeping track of all of the known unique Documents . Make this a Hash soon .
(defparameter *JDocuments* (make-array 10000 :fill-pointer 0 :adjustable T))
(defun add-JDocument (JDoc) (vector-push-extend JDoc *JDocuments*))
JKM Mar 1 ' 13 Have to add fields " name " and " parameters " so as to shadow the functions ' slots .
(defstruct JPage title subtitle keywords logical-form summary text )
Barrel Entry is an assoc couplet of ( JDoc - IDint . score[/count ] ) .
Note that Barrels sort on the ID , whereas Scoring will later sort on the score , which will be reversed .
(defstruct JSummary results barrels)
A temporary holding system for the summarized results of the system . * One per query * , so sort of a singleton .
" results " is a single list of SWAPPED Barrel Entries , with score in front ,
it is boiled down into a single list of . But this is done as the last step .
" barrels " is a list of 0 or more Barrel structures , which each contain a list of Barrel Entries ( JDocID . score ) .
We own the results . We do * not * own the Barrels . Result entries must be * copied * from Barrel lists .
Results must be GC'd in a system that worries about such . Barrels do * not * get GC'd .
(defstruct JBarrel entries count)
entries is a list of Barrel Entries . count is the length of the list , for speed .
So a Barrel is effectively a list of JDocuments -- all the that have this Word / Concept in them .
The Barrels are compiled permanently at the beginning of JHelp startup ,
A Barrel does not get added to , after it has been put together .
Barrels are result entries that get hashed by Word , Bigram , or Concept .
Google probably means something different with their Barrels
as to what I 'm calling mine this year , caveat . Got ta call these something , though .
(defstruct JHelp-Engine wordhash bigramhash categorieshash learnedhash)
(defparameter *JHelp-Engine* nil)
(defun clear-JHelp-Engine ()
(setq *JDocuments* (make-array 10000 :fill-pointer 0 :adjustable T))
(setq *JDocument-count* 0)
(setq *jpage-count* 0)
(setq *JHelp-Engine*
(make-JHelp-Engine :wordhash (make-hash-table :test #'equalp)
:bigramhash (make-hash-table :test #'equalp)
)))
(defstruct stemmer tree)
(defparameter *JHelp-suffix-taster* nil)
|
f32e0c526d70ab802780968937eea69bf4d5d89526d8a86caa2ec21801c5a243 | coq-community/coqffi | m2.mli | val to_bool : M1.t -> bool
| null | https://raw.githubusercontent.com/coq-community/coqffi/9307403abe0919665cd38dcc8f71582e950aa7c1/examples/src/m2.mli | ocaml | val to_bool : M1.t -> bool
| |
0f8a143aca78c633d59e4b1a1202f3aba9cf4eeb89474ca0223c3c4fcae33b1f | Soyn/sicp | Ex2.39.rkt | #lang racket
Ex 2.39
(define(accumulate op init seqs)
(if (null? seqs)
init
(op (car seqs)
(accumulate op init (cdr seqs)))))
(define (fold-right op init sequence)
(if (null? sequence)
init
(op (car sequence)
(accumulate op init (cdr sequence)))))
(define (fold-left op init sequence)
(define (iter result rest)
(if (null? rest)
result
(iter (op result (car rest))
(cdr rest))))
(iter init sequence))
(define (reverse-using-right sequence)
(fold-right (lambda(first already-reversed)
(append already-reversed (list first)))
null
sequence))
(define (reverse-using-left sequence)
(fold-left (lambda (result first) (cons first result))
null
sequence))
;test usage
(define my-list (list 1 2 3 4))
(reverse-using-left my-list)
(reverse-using-right my-list) | null | https://raw.githubusercontent.com/Soyn/sicp/d2aa6e3b053f6d4c8150ab1b033a18f61fca7e1b/CH2/CH2.2/Ex2.39.rkt | racket | test usage | #lang racket
Ex 2.39
(define(accumulate op init seqs)
(if (null? seqs)
init
(op (car seqs)
(accumulate op init (cdr seqs)))))
(define (fold-right op init sequence)
(if (null? sequence)
init
(op (car sequence)
(accumulate op init (cdr sequence)))))
(define (fold-left op init sequence)
(define (iter result rest)
(if (null? rest)
result
(iter (op result (car rest))
(cdr rest))))
(iter init sequence))
(define (reverse-using-right sequence)
(fold-right (lambda(first already-reversed)
(append already-reversed (list first)))
null
sequence))
(define (reverse-using-left sequence)
(fold-left (lambda (result first) (cons first result))
null
sequence))
(define my-list (list 1 2 3 4))
(reverse-using-left my-list)
(reverse-using-right my-list) |
e53446ba51eee8b9d80a1306d32f77df5e52bfd00587884b81b2b98eed42fa7d | ocaml-multicore/tezos | main.mli | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
* Tezos Protocol Implementation - Protocol Signature Instance
This module is the entrypoint to the protocol for shells and other
embedders . This signature is an instance of
{ { ! Tezos_protocol_environment_sigs . V3.T.Updater . PROTOCOL } the
[ Updater . PROTOCOL ] signature } from the
{ { : #the-economic-protocol-environment-and-compiler }
Protocol Environment } .
Each Protocol depends on a version of the Protocol Environment . For the
currently developed protocol , this is normally the latest version . You can
see { { ! Tezos_protocol_environment_sigs } the full list of versions here } .
For details on how Protocol and Environment interact , see
{ { : } this overview } .
This module is the entrypoint to the protocol for shells and other
embedders. This signature is an instance of
{{!Tezos_protocol_environment_sigs.V3.T.Updater.PROTOCOL} the
[Updater.PROTOCOL] signature} from the
{{:#the-economic-protocol-environment-and-compiler}
Protocol Environment}.
Each Protocol depends on a version of the Protocol Environment. For the
currently developed protocol, this is normally the latest version. You can
see {{!Tezos_protocol_environment_sigs} the full list of versions here}.
For details on how Protocol and Environment interact, see
{{:} this overview}.
*)
type validation_mode =
| Application of {
block_header : Alpha_context.Block_header.t;
fitness : Alpha_context.Fitness.t;
payload_producer : Alpha_context.public_key_hash;
block_producer : Alpha_context.public_key_hash;
predecessor_round : Alpha_context.Round.t;
predecessor_level : Alpha_context.Level.t;
}
| Partial_application of {
block_header : Alpha_context.Block_header.t;
fitness : Alpha_context.Fitness.t;
payload_producer : Alpha_context.public_key_hash;
block_producer : Alpha_context.public_key_hash;
predecessor_level : Alpha_context.Level.t;
predecessor_round : Alpha_context.Round.t;
}
(* Mempool only *)
| Partial_construction of {
predecessor : Block_hash.t;
predecessor_fitness : Fitness.t;
predecessor_level : Alpha_context.Level.t;
predecessor_round : Alpha_context.Round.t;
}
only
| Full_construction of {
predecessor : Block_hash.t;
payload_producer : Alpha_context.public_key_hash;
block_producer : Alpha_context.public_key_hash;
protocol_data_contents : Alpha_context.Block_header.contents;
level : Int32.t;
round : Alpha_context.Round.t;
predecessor_level : Alpha_context.Level.t;
predecessor_round : Alpha_context.Round.t;
}
type validation_state = {
mode : validation_mode;
chain_id : Chain_id.t;
ctxt : Alpha_context.t;
op_count : int;
migration_balance_updates : Alpha_context.Receipt.balance_updates;
liquidity_baking_escape_ema : Int32.t;
implicit_operations_results :
Apply_results.packed_successful_manager_operation_result list;
}
type operation_data = Alpha_context.packed_protocol_data
type operation = Alpha_context.packed_operation = {
shell : Operation.shell_header;
protocol_data : operation_data;
}
* [ check_manager_signature validation_state op raw_operation ]
The function starts by retrieving the public key hash [ pkh ] of the manager
operation . In case the operation is batched , the function also checks that
the sources are all the same .
Once the [ pkh ] is retrieved , the function looks for its associated public
key . For that , the manager operation is inspected to check if it contains
a public key revelation . If not , the public key is searched in the context .
@return [ Error Invalid_signature ] if the signature check fails
@return [ Error Unrevealed_manager_key ] if the manager has not yet been
revealed
@return [ Error Failure " get_manager_key " ] if the key is not found in the
context
@return [ Error Inconsistent_sources ] if the operations in a batch are not
from the same manager
The function starts by retrieving the public key hash [pkh] of the manager
operation. In case the operation is batched, the function also checks that
the sources are all the same.
Once the [pkh] is retrieved, the function looks for its associated public
key. For that, the manager operation is inspected to check if it contains
a public key revelation. If not, the public key is searched in the context.
@return [Error Invalid_signature] if the signature check fails
@return [Error Unrevealed_manager_key] if the manager has not yet been
revealed
@return [Error Failure "get_manager_key"] if the key is not found in the
context
@return [Error Inconsistent_sources] if the operations in a batch are not
from the same manager *)
val check_manager_signature :
validation_state ->
'b Alpha_context.Kind.manager Alpha_context.contents_list ->
'a Alpha_context.operation ->
unit tzresult Lwt.t
* [ precheck_manager validation_state op ] returns [ ( ) ] if the manager operation
[ op ] is solveable , returns an error otherwise . An operation is solveable if
it is well - formed and can pay the fees to be included in a block with either
a success or a failure status .
This function uses [ Apply.precheck_manager_contents_list ] but discard the
context and balance update
[op] is solveable, returns an error otherwise. An operation is solveable if
it is well-formed and can pay the fees to be included in a block with either
a success or a failure status.
This function uses [Apply.precheck_manager_contents_list] but discard the
context and balance update *)
val precheck_manager :
validation_state ->
'a Alpha_context.Kind.manager Alpha_context.contents_list ->
unit tzresult Lwt.t
include
Updater.PROTOCOL
with type block_header_data = Alpha_context.Block_header.protocol_data
and type block_header_metadata = Apply_results.block_metadata
and type block_header = Alpha_context.Block_header.t
and type operation_data := operation_data
and type operation_receipt = Apply_results.packed_operation_metadata
and type operation := operation
and type validation_state := validation_state
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/src/proto_alpha/lib_protocol/main.mli | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
Mempool only | Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
* Tezos Protocol Implementation - Protocol Signature Instance
This module is the entrypoint to the protocol for shells and other
embedders . This signature is an instance of
{ { ! Tezos_protocol_environment_sigs . V3.T.Updater . PROTOCOL } the
[ Updater . PROTOCOL ] signature } from the
{ { : #the-economic-protocol-environment-and-compiler }
Protocol Environment } .
Each Protocol depends on a version of the Protocol Environment . For the
currently developed protocol , this is normally the latest version . You can
see { { ! Tezos_protocol_environment_sigs } the full list of versions here } .
For details on how Protocol and Environment interact , see
{ { : } this overview } .
This module is the entrypoint to the protocol for shells and other
embedders. This signature is an instance of
{{!Tezos_protocol_environment_sigs.V3.T.Updater.PROTOCOL} the
[Updater.PROTOCOL] signature} from the
{{:#the-economic-protocol-environment-and-compiler}
Protocol Environment}.
Each Protocol depends on a version of the Protocol Environment. For the
currently developed protocol, this is normally the latest version. You can
see {{!Tezos_protocol_environment_sigs} the full list of versions here}.
For details on how Protocol and Environment interact, see
{{:} this overview}.
*)
type validation_mode =
| Application of {
block_header : Alpha_context.Block_header.t;
fitness : Alpha_context.Fitness.t;
payload_producer : Alpha_context.public_key_hash;
block_producer : Alpha_context.public_key_hash;
predecessor_round : Alpha_context.Round.t;
predecessor_level : Alpha_context.Level.t;
}
| Partial_application of {
block_header : Alpha_context.Block_header.t;
fitness : Alpha_context.Fitness.t;
payload_producer : Alpha_context.public_key_hash;
block_producer : Alpha_context.public_key_hash;
predecessor_level : Alpha_context.Level.t;
predecessor_round : Alpha_context.Round.t;
}
| Partial_construction of {
predecessor : Block_hash.t;
predecessor_fitness : Fitness.t;
predecessor_level : Alpha_context.Level.t;
predecessor_round : Alpha_context.Round.t;
}
only
| Full_construction of {
predecessor : Block_hash.t;
payload_producer : Alpha_context.public_key_hash;
block_producer : Alpha_context.public_key_hash;
protocol_data_contents : Alpha_context.Block_header.contents;
level : Int32.t;
round : Alpha_context.Round.t;
predecessor_level : Alpha_context.Level.t;
predecessor_round : Alpha_context.Round.t;
}
type validation_state = {
mode : validation_mode;
chain_id : Chain_id.t;
ctxt : Alpha_context.t;
op_count : int;
migration_balance_updates : Alpha_context.Receipt.balance_updates;
liquidity_baking_escape_ema : Int32.t;
implicit_operations_results :
Apply_results.packed_successful_manager_operation_result list;
}
type operation_data = Alpha_context.packed_protocol_data
type operation = Alpha_context.packed_operation = {
shell : Operation.shell_header;
protocol_data : operation_data;
}
* [ check_manager_signature validation_state op raw_operation ]
The function starts by retrieving the public key hash [ pkh ] of the manager
operation . In case the operation is batched , the function also checks that
the sources are all the same .
Once the [ pkh ] is retrieved , the function looks for its associated public
key . For that , the manager operation is inspected to check if it contains
a public key revelation . If not , the public key is searched in the context .
@return [ Error Invalid_signature ] if the signature check fails
@return [ Error Unrevealed_manager_key ] if the manager has not yet been
revealed
@return [ Error Failure " get_manager_key " ] if the key is not found in the
context
@return [ Error Inconsistent_sources ] if the operations in a batch are not
from the same manager
The function starts by retrieving the public key hash [pkh] of the manager
operation. In case the operation is batched, the function also checks that
the sources are all the same.
Once the [pkh] is retrieved, the function looks for its associated public
key. For that, the manager operation is inspected to check if it contains
a public key revelation. If not, the public key is searched in the context.
@return [Error Invalid_signature] if the signature check fails
@return [Error Unrevealed_manager_key] if the manager has not yet been
revealed
@return [Error Failure "get_manager_key"] if the key is not found in the
context
@return [Error Inconsistent_sources] if the operations in a batch are not
from the same manager *)
val check_manager_signature :
validation_state ->
'b Alpha_context.Kind.manager Alpha_context.contents_list ->
'a Alpha_context.operation ->
unit tzresult Lwt.t
* [ precheck_manager validation_state op ] returns [ ( ) ] if the manager operation
[ op ] is solveable , returns an error otherwise . An operation is solveable if
it is well - formed and can pay the fees to be included in a block with either
a success or a failure status .
This function uses [ Apply.precheck_manager_contents_list ] but discard the
context and balance update
[op] is solveable, returns an error otherwise. An operation is solveable if
it is well-formed and can pay the fees to be included in a block with either
a success or a failure status.
This function uses [Apply.precheck_manager_contents_list] but discard the
context and balance update *)
val precheck_manager :
validation_state ->
'a Alpha_context.Kind.manager Alpha_context.contents_list ->
unit tzresult Lwt.t
include
Updater.PROTOCOL
with type block_header_data = Alpha_context.Block_header.protocol_data
and type block_header_metadata = Apply_results.block_metadata
and type block_header = Alpha_context.Block_header.t
and type operation_data := operation_data
and type operation_receipt = Apply_results.packed_operation_metadata
and type operation := operation
and type validation_state := validation_state
|
a21e08ea81e2488bccd4761cec69cb29d38ea2fd1456b53ffaed1c94a67e3c3b | den1k/vimsical | branches.cljc | (ns vimsical.vcs.state.branches
"Keep track of the deltas for a branch"
(:require
[clojure.spec.alpha :as s]
[vimsical.vcs.alg.topo :as topo]
[vimsical.vcs.branch :as branch]
[vimsical.vcs.data.indexed.vector :as indexed]
[vimsical.vcs.delta :as delta]
[vimsical.vcs.data.splittable :as splittable]))
;;
;; * Spec
;;
(s/def ::deltas (s/and ::indexed/vector (s/every ::delta/delta) topo/sorted?))
(s/def ::deltas-by-branch-uid (s/every-kv ::branch/uid ::deltas))
(def empty-deltas-by-branch-uid {})
;;
;; * Internal
;;
(defn- new-vector
([] (indexed/vector-by :uid))
([deltas] (indexed/vec-by :uid deltas)))
;;
;; * API
;;
(def ^:private conj-deltas (fnil conj (new-vector)))
(s/fdef add-delta
:args (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid :deltas ::delta/delta)
:ret ::deltas-by-branch-uid)
(defn add-delta
[deltas-by-branch-uid {:keys [branch-uid] :as delta}]
{:pre [branch-uid]}
(update deltas-by-branch-uid branch-uid conj-deltas delta))
(s/fdef add-deltas-by-branch-uid
:args (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid
:deltas-by-branch-uid (s/every-kv ::branch/uid (s/every ::delta/delta)))
:ret ::deltas-by-branch-uid)
(defn add-deltas-by-branch-uid
[deltas-by-branch-uid deltas-by-branch-uid']
(reduce-kv
(fn [acc branch-uid deltas]
(let [deltas' (new-vector deltas)]
(assoc acc branch-uid
(if-some [prev-deltas (get acc branch-uid)]
(splittable/append prev-deltas deltas')
deltas'))))
deltas-by-branch-uid
deltas-by-branch-uid'))
(s/fdef add-deltas
:args (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid :deltas (s/nilable (s/every ::delta/delta)))
:ret ::deltas-by-branch-uid)
(defn add-deltas
[deltas-by-branch-uid deltas]
(add-deltas-by-branch-uid deltas-by-branch-uid (group-by :branch-uid deltas)))
(s/fdef get-deltas
:args (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid
:branch (s/or :branch ::branch/branch :uuid uuid?))
:ret ::deltas)
(defn get-deltas
[deltas-by-branch-uid branch-or-branch-uid]
(cond->> branch-or-branch-uid
(map? branch-or-branch-uid) (:db/uid)
true (get deltas-by-branch-uid)))
(s/fdef index-of-delta
:args
(s/or :delta (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid :delta ::delta/delta)
:params (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid :branch-uid ::branch/uid :delta-uid ::delta/prev-uid))
:ret (s/nilable number?))
(defn index-of-delta
([deltas-by-branch-uid {:keys [branch-uid uid] :as delta}]
(index-of-delta deltas-by-branch-uid branch-uid uid))
([deltas-by-branch-uid branch-uid delta-uid]
(some-> deltas-by-branch-uid
(get-deltas branch-uid)
(indexed/index-of delta-uid))))
| null | https://raw.githubusercontent.com/den1k/vimsical/1e4a1f1297849b1121baf24bdb7a0c6ba3558954/src/vcs/vimsical/vcs/state/branches.cljc | clojure |
* Spec
* Internal
* API
| (ns vimsical.vcs.state.branches
"Keep track of the deltas for a branch"
(:require
[clojure.spec.alpha :as s]
[vimsical.vcs.alg.topo :as topo]
[vimsical.vcs.branch :as branch]
[vimsical.vcs.data.indexed.vector :as indexed]
[vimsical.vcs.delta :as delta]
[vimsical.vcs.data.splittable :as splittable]))
(s/def ::deltas (s/and ::indexed/vector (s/every ::delta/delta) topo/sorted?))
(s/def ::deltas-by-branch-uid (s/every-kv ::branch/uid ::deltas))
(def empty-deltas-by-branch-uid {})
(defn- new-vector
([] (indexed/vector-by :uid))
([deltas] (indexed/vec-by :uid deltas)))
(def ^:private conj-deltas (fnil conj (new-vector)))
(s/fdef add-delta
:args (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid :deltas ::delta/delta)
:ret ::deltas-by-branch-uid)
(defn add-delta
[deltas-by-branch-uid {:keys [branch-uid] :as delta}]
{:pre [branch-uid]}
(update deltas-by-branch-uid branch-uid conj-deltas delta))
(s/fdef add-deltas-by-branch-uid
:args (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid
:deltas-by-branch-uid (s/every-kv ::branch/uid (s/every ::delta/delta)))
:ret ::deltas-by-branch-uid)
(defn add-deltas-by-branch-uid
[deltas-by-branch-uid deltas-by-branch-uid']
(reduce-kv
(fn [acc branch-uid deltas]
(let [deltas' (new-vector deltas)]
(assoc acc branch-uid
(if-some [prev-deltas (get acc branch-uid)]
(splittable/append prev-deltas deltas')
deltas'))))
deltas-by-branch-uid
deltas-by-branch-uid'))
(s/fdef add-deltas
:args (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid :deltas (s/nilable (s/every ::delta/delta)))
:ret ::deltas-by-branch-uid)
(defn add-deltas
[deltas-by-branch-uid deltas]
(add-deltas-by-branch-uid deltas-by-branch-uid (group-by :branch-uid deltas)))
(s/fdef get-deltas
:args (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid
:branch (s/or :branch ::branch/branch :uuid uuid?))
:ret ::deltas)
(defn get-deltas
[deltas-by-branch-uid branch-or-branch-uid]
(cond->> branch-or-branch-uid
(map? branch-or-branch-uid) (:db/uid)
true (get deltas-by-branch-uid)))
(s/fdef index-of-delta
:args
(s/or :delta (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid :delta ::delta/delta)
:params (s/cat :deltas-by-branch-uid ::deltas-by-branch-uid :branch-uid ::branch/uid :delta-uid ::delta/prev-uid))
:ret (s/nilable number?))
(defn index-of-delta
([deltas-by-branch-uid {:keys [branch-uid uid] :as delta}]
(index-of-delta deltas-by-branch-uid branch-uid uid))
([deltas-by-branch-uid branch-uid delta-uid]
(some-> deltas-by-branch-uid
(get-deltas branch-uid)
(indexed/index-of delta-uid))))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.