_id
stringlengths
64
64
repository
stringlengths
6
84
name
stringlengths
4
110
content
stringlengths
0
248k
license
null
download_url
stringlengths
89
454
language
stringclasses
7 values
comments
stringlengths
0
74.6k
code
stringlengths
0
248k
911e6db95f196c3eedaae9a7ea52f27c8521bb0cadee7bdfc6fce266764a46c8
thierry-martinez/pyast
asdl.ml
include Ast module Err = Err let of_lexbuf lexbuf = try Parser.module_ Lexer.lexer lexbuf with Parser.Error -> Err.syntax (Loc.of_lexbuf lexbuf) let of_channel ?filename channel = let lexbuf = Lexing.from_channel channel in Option.iter (Lexing.set_filename lexbuf) filename; of_lexbuf lexbuf let of_file filename = let channel = open_in filename in Redirect.read_and_close channel (fun () -> of_channel ~filename channel)
null
https://raw.githubusercontent.com/thierry-martinez/pyast/2400e1a487daf3eadccf3385d39dbfb4cd7f7a8d/asdl/asdl.ml
ocaml
include Ast module Err = Err let of_lexbuf lexbuf = try Parser.module_ Lexer.lexer lexbuf with Parser.Error -> Err.syntax (Loc.of_lexbuf lexbuf) let of_channel ?filename channel = let lexbuf = Lexing.from_channel channel in Option.iter (Lexing.set_filename lexbuf) filename; of_lexbuf lexbuf let of_file filename = let channel = open_in filename in Redirect.read_and_close channel (fun () -> of_channel ~filename channel)
1579c47195b566c8f25601c74e2a469ee53f61dc137519ee1577bc839b77f595
KestrelInstitute/Specware
Tests.lisp
(test-directories ".") (test ("Bug 0016 : An incorrect SWPATH produces no error or warning" :path "/loser/loser/loser" :output '((:alternatives "Warning: Directory does not exist: /loser/loser/loser" "Warning: Directory does not exist: /loser/loser/loser/" "WARNING: Directory does not exist: /loser/loser/loser/") "Keeping old path:" (:alternatives "/:$SPECWARE/" "$SPECWARE:.:$SPECWARE/" "$SPECWARE/:./:/" "$SPECWARE/:./" "$SPECWARE/:/" "$SPECWARE/" "/:./:$SPECWARE/" "./:/:$SPECWARE/" ))) )
null
https://raw.githubusercontent.com/KestrelInstitute/Specware/2be6411c55f26432bf5c9e2f7778128898220c24/TestSuite/Bugs/Bug_0016/Tests.lisp
lisp
(test-directories ".") (test ("Bug 0016 : An incorrect SWPATH produces no error or warning" :path "/loser/loser/loser" :output '((:alternatives "Warning: Directory does not exist: /loser/loser/loser" "Warning: Directory does not exist: /loser/loser/loser/" "WARNING: Directory does not exist: /loser/loser/loser/") "Keeping old path:" (:alternatives "/:$SPECWARE/" "$SPECWARE:.:$SPECWARE/" "$SPECWARE/:./:/" "$SPECWARE/:./" "$SPECWARE/:/" "$SPECWARE/" "/:./:$SPECWARE/" "./:/:$SPECWARE/" ))) )
21e906d7441be43e84726e49031ae5fe46be45fbebfbc0a4c357ff50279643a1
tfausak/patrol
GeoSpec.hs
# LANGUAGE QuasiQuotes # module Patrol.Type.GeoSpec where import qualified Data.Aeson as Aeson import qualified Data.Aeson.QQ.Simple as Aeson import qualified Data.Text as Text import qualified Patrol.Type.Geo as Geo import qualified Test.Hspec as Hspec spec :: Hspec.Spec spec = Hspec.describe "Patrol.Type.Geo" $ do Hspec.describe "ToJSON" $ do Hspec.it "works" $ do let geo = Geo.empty json = [Aeson.aesonQQ| {} |] Aeson.toJSON geo `Hspec.shouldBe` json Hspec.it "works with a city" $ do let geo = Geo.empty {Geo.city = Text.pack "example-city"} json = [Aeson.aesonQQ| { "city": "example-city" } |] Aeson.toJSON geo `Hspec.shouldBe` json Hspec.it "works with a country code" $ do let geo = Geo.empty {Geo.countryCode = Text.pack "example-country-code"} json = [Aeson.aesonQQ| { "country_code": "example-country-code" } |] Aeson.toJSON geo `Hspec.shouldBe` json Hspec.it "works with a region" $ do let geo = Geo.empty {Geo.region = Text.pack "example-region"} json = [Aeson.aesonQQ| { "region": "example-region" } |] Aeson.toJSON geo `Hspec.shouldBe` json
null
https://raw.githubusercontent.com/tfausak/patrol/1cae55b3840b328cda7de85ea424333fcab434cb/source/test-suite/Patrol/Type/GeoSpec.hs
haskell
# LANGUAGE QuasiQuotes # module Patrol.Type.GeoSpec where import qualified Data.Aeson as Aeson import qualified Data.Aeson.QQ.Simple as Aeson import qualified Data.Text as Text import qualified Patrol.Type.Geo as Geo import qualified Test.Hspec as Hspec spec :: Hspec.Spec spec = Hspec.describe "Patrol.Type.Geo" $ do Hspec.describe "ToJSON" $ do Hspec.it "works" $ do let geo = Geo.empty json = [Aeson.aesonQQ| {} |] Aeson.toJSON geo `Hspec.shouldBe` json Hspec.it "works with a city" $ do let geo = Geo.empty {Geo.city = Text.pack "example-city"} json = [Aeson.aesonQQ| { "city": "example-city" } |] Aeson.toJSON geo `Hspec.shouldBe` json Hspec.it "works with a country code" $ do let geo = Geo.empty {Geo.countryCode = Text.pack "example-country-code"} json = [Aeson.aesonQQ| { "country_code": "example-country-code" } |] Aeson.toJSON geo `Hspec.shouldBe` json Hspec.it "works with a region" $ do let geo = Geo.empty {Geo.region = Text.pack "example-region"} json = [Aeson.aesonQQ| { "region": "example-region" } |] Aeson.toJSON geo `Hspec.shouldBe` json
cf6c1d04bc67524ce2d8bb6c38d6319abdd7a9204fe36c85e4023ae71cffa75a
eclipse-archived/agent
evals.ml
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright ( c ) 2020 ADLINK Technology Inc. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * -2.0 , or the Apache Software License 2.0 * which is available at -2.0 . * * SPDX - License - Identifier : EPL-2.0 OR Apache-2.0 * Contributors : 1 * ( gabriele ( dot ) baldoni ( at ) adlinktech ( dot ) com ) - 0.2.0 Development iteration * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright (c) 2020 ADLINK Technology Inc. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * -2.0, or the Apache Software License 2.0 * which is available at -2.0. * * SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 * Contributors: 1 * Gabriele Baldoni (gabriele (dot) baldoni (at) adlinktech (dot) com ) - 0.2.0 Development iteration *********************************************************************************) open Lwt.Infix open Fos_sdk open Fos_sdk.Errors open Agent_state open Utils (* Evals *) let eval_get_fdu_info self (props:Apero.properties) = MVar.read self >>= fun state -> let fdu_uuid = Apero.Option.get @@ Apero.Properties.get "fdu_uuid" props in try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_catalog_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id fdu_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ User.Descriptors.FDU.string_of_descriptor descriptor in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_get_image_info self (props:Apero.properties) = MVar.read self >>= fun state -> let image_uuid = Apero.Option.get @@ Apero.Properties.get "image_uuid" props in try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_image (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id image_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ User.Descriptors.FDU.string_of_image descriptor in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_get_node_fdu_info self (props:Apero.properties) = MVar.read self >>= fun state -> let fdu_uuid = Apero.Option.get @@ Apero.Properties.get "fdu_uuid" props in let node_uuid = Apero.Option.get @@ Apero.Properties.get "node_uuid" props in let instanceid = Apero.Option.get @@ Apero.Properties.get "instance_uuid" props in try%lwt Logs.debug (fun m -> m "[eval_get_node_fdu_info] - Search for FDU Info"); let%lwt descriptor = Yaks_connector.Global.Actual.get_node_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id node_uuid fdu_uuid instanceid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ Infra.Descriptors.FDU.string_of_record descriptor in Logs.debug (fun m -> m "[eval_get_node_fdu_info] - INFO %s" (FAgentTypes.string_of_json js)); let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_get_network_info self (props:Apero.properties) = MVar.read self >>= fun state -> let net_uuid = Apero.Option.get @@ Apero.Properties.get "uuid" props in try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_network (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id net_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ FTypes.string_of_virtual_network descriptor in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 22; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_get_port_info self (props:Apero.properties) = MVar.read self >>= fun state -> let cp_uuid = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in Logs.debug (fun m -> m "[eval_get_port_info] - Getting info for port %s" cp_uuid ); try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_port (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id cp_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ User.Descriptors.Network.string_of_connection_point_descriptor descriptor in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | _ -> Logs.debug (fun m -> m "[eval_get_port_info] - Search port on FDU"); let%lwt fdu_ids = Yaks_connector.Global.Actual.get_catalog_all_fdus (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id state.yaks in let%lwt cps = Lwt_list.filter_map_p (fun e -> let%lwt fdu = Yaks_connector.Global.Actual.get_catalog_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id e state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let%lwt c = Lwt_list.filter_map_p (fun (cp:User.Descriptors.Network.connection_point_descriptor) -> Logs.debug (fun m -> m "[eval_get_port_info] - %s == %s ? %d " cp.id cp_uuid (String.compare cp.id cp_uuid)); if (String.compare cp.id cp_uuid) == 0 then Lwt.return @@ Some cp else Lwt.return None ) fdu.connection_points in Lwt.return @@ List.nth_opt c 0 ) fdu_ids in try%lwt let cp = List.hd cps in let js = FAgentTypes.json_of_string @@ User.Descriptors.Network.string_of_connection_point_descriptor cp in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_get_node_mgmt_address self (props:Apero.properties) = MVar.read self >>= fun state -> let node_uuid = Apero.Option.get @@ Apero.Properties.get "node_uuid" props in try%lwt let%lwt nconf = Yaks_connector.Global.Actual.get_node_configuration (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id node_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let%lwt descriptor = Yaks_connector.Global.Actual.get_node_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id node_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let nws = descriptor.network in let%lwt addr = (Lwt_list.filter_map_p ( fun (e:FTypes.network_spec_type) -> if (String.compare e.intf_name nconf.agent.mgmt_interface) == 0 then Lwt.return @@ Some e.intf_configuration else Lwt.return None ) nws) >>= fun l -> Lwt.return @@ List.hd l in let js = FAgentTypes.json_of_string @@ FTypes.string_of_intf_conf_type addr in let eval_res = FAgentTypes.{result = Some js; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res NM Evals let eval_create_net self (props:Apero.properties) = MVar.read self >>= fun state -> try%lwt let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_create_net] - ##############"); Logs.debug (fun m -> m "[eval_create_net] - Properties: %s" (Apero.Properties.to_string props) ); let descriptor = FTypes.virtual_network_of_string @@ Apero.Option.get @@ Apero.Properties.get "descriptor" props in let record = FTypesRecord.{uuid = descriptor.uuid; status = `CREATE; properties = None; ip_configuration = descriptor.ip_configuration; overlay = None; vni = None; mcast_addr = None; vlan_id = None; face = None} in Yaks_connector.Local.Desired.add_node_network (Apero.Option.get state.configuration.agent.uuid) net_p descriptor.uuid record state.yaks >>= fun _ -> let js = JSON.of_string @@ FTypesRecord.string_of_virtual_network record in let eval_res = FAgentTypes.{result = Some js ; error=None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let bt = Printexc.get_backtrace () in Logs.err (fun m -> m "[eval_create_net] - Exception: %s Trace %s " (Printexc.to_string exn) bt); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_remove_net self (props:Apero.properties) = MVar.read self >>= fun state -> try%lwt let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_remove_net] - ##############"); Logs.debug (fun m -> m "[eval_remove_net] - Properties: %s" (Apero.Properties.to_string props)); let net_id =Apero.Option.get @@ Apero.Properties.get "net_id" props in let%lwt record = Yaks_connector.Local.Actual.get_node_network (Apero.Option.get state.configuration.agent.uuid) net_p net_id state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let record = {record with status = `DESTROY} in let%lwt _ = Yaks_connector.Local.Desired.add_node_network (Apero.Option.get state.configuration.agent.uuid) net_p net_id record state.yaks in Yaks_connector.Global.Actual.remove_network (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id record.uuid state.yaks >>= Lwt.return >>= fun _ -> let js = JSON.of_string @@ FTypesRecord.string_of_virtual_network record in let eval_res = FAgentTypes.{result = Some js ; error=None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> Logs.err (fun m -> m "[eval_remove_net] - EXCEPTION: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_create_cp self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_create_cp] - ##############"); Logs.debug (fun m -> m "[eval_create_cp] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in let descriptor = User.Descriptors.Network.connection_point_descriptor_of_string @@ Apero.Option.get @@ Apero.Properties.get "descriptor" props in Logs.debug (fun m -> m "[eval_create_cp] - # NetManager: %s" net_p); try%lwt let parameters = [("descriptor",User.Descriptors.Network.string_of_connection_point_descriptor descriptor)] in let fname = "create_port_agent" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot connect create cp %s") descriptor.id ),503))) with | exn -> Logs.err (fun m -> m "[eval_create_cp] Exception : %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_remove_cp self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_remove_cp] - ##############"); Logs.debug (fun m -> m "[eval_remove_cp] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_id" props in Logs.debug (fun m -> m "[eval_remove_cp] - # NetManager: %s" net_p); try%lwt let parameters = [("cp_id", cp_id)] in let fname = "destroy_port_agent" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot destroy create cp %s") cp_id ),503))) with | exn -> Logs.err (fun m -> m "[eval_remove_cp] Exception %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_connect_cp_to_fdu_face self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_connect_cp_to_fdu_face] - ##############"); Logs.debug (fun m -> m "[eval_connect_cp_to_fdu_face] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_id" props in let instance_id = Apero.Option.get @@ Apero.Properties.get "instance_id" props in let interface = Apero.Option.get @@ Apero.Properties.get "interface" props in try%lwt let%lwt record = Yaks_connector.Global.Actual.get_node_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) "*" instance_id state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in Logs.debug (fun m -> m "[[eval_connect_cp_to_fdu_face] - FDU Record: %s" (Infra.Descriptors.FDU.string_of_record record)); (* Find Correct Plugin *) let fdu_type = Fos_sdk.string_of_hv_type record.hypervisor in let%lwt plugins = Yaks_connector.Local.Actual.get_node_plugins (Apero.Option.get state.configuration.agent.uuid) state.yaks in let%lwt matching_plugins = Lwt_list.filter_map_p (fun e -> let%lwt pl = Yaks_connector.Local.Actual.get_node_plugin (Apero.Option.get state.configuration.agent.uuid) e state.yaks in if String.uppercase_ascii (pl.name) = String.uppercase_ascii (fdu_type) then Lwt.return @@ Some pl else Lwt.return None ) plugins in let pl = match matching_plugins with | [] -> Logs.err (fun m -> m "Cannot find a plugin for this FDU even if it is present in the node WTF!! %s" instance_id ); None | _ -> Some ((List.hd matching_plugins).uuid) in (* Create Record * Add UUID for each component * Fix references with UUIDs *) (match pl with | Some plid -> Logs.debug (fun m -> m "[eval_connect_cp_to_fdu_face] - Plugin ID: %s" plid); let parameters = [("cpid", cp_id);("instanceid", instance_id);("iface",interface)] in let fname = "connect_interface_to_cp" in Yaks_connector.Local.Actual.exec_plugin_eval (Apero.Option.get state.configuration.agent.uuid) plid fname parameters state.yaks >>= fun res -> (match res with | Some r -> Logs.debug (fun m -> m "[eval_connect_cp_to_fdu_face] - Connected!"); Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot connect cp to interface %s") cp_id ),503))) ) | None -> Lwt.fail @@ FException (`PluginNotFound (`MsgCode ((Printf.sprintf ("CRITICAL!!!! Cannot find a plugin for this FDU even if it is present in the node!! %s") instance_id ),404)))) with | exn -> Logs.err (fun m -> m "[eval_connect_cp_to_fdu_face] Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_disconnect_cp_from_fdu_face self (props:Apero.properties) = Logs.debug (fun m -> m "[[eval_disconnect_cp_from_fdu_face] - ##############"); Logs.debug (fun m -> m "[[eval_disconnect_cp_from_fdu_face] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let face = Apero.Option.get @@ Apero.Properties.get "interface" props in let instance_id = Apero.Option.get @@ Apero.Properties.get "instance_id" props in try%lwt let%lwt record = Yaks_connector.Global.Actual.get_node_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) "*" instance_id state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in (* Find Correct Plugin *) let fdu_type = Fos_sdk.string_of_hv_type record.hypervisor in let%lwt plugins = Yaks_connector.Local.Actual.get_node_plugins (Apero.Option.get state.configuration.agent.uuid) state.yaks in let%lwt matching_plugins = Lwt_list.filter_map_p (fun e -> let%lwt pl = Yaks_connector.Local.Actual.get_node_plugin (Apero.Option.get state.configuration.agent.uuid) e state.yaks in if String.uppercase_ascii (pl.name) = String.uppercase_ascii (fdu_type) then Lwt.return @@ Some pl else Lwt.return None ) plugins in let pl = match matching_plugins with | [] -> Logs.err (fun m -> m "[eval_disconnect_cp_from_fdu_face] - Cannot find a plugin for this FDU even if it is present in the node WTF!! %s" instance_id ); None | _ -> Some ((List.hd matching_plugins).uuid) in (* Create Record * Add UUID for each component * Fix references with UUIDs *) (match pl with | Some plid -> let parameters = [("iface", face);("instanceid", instance_id)] in let fname = "disconnect_interface_from_cp" in Yaks_connector.Local.Actual.exec_plugin_eval (Apero.Option.get state.configuration.agent.uuid) plid fname parameters state.yaks >>= fun res -> (match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot disconnect cp from interface %s") face ),503))) ) | None -> Lwt.fail @@ FException (`PluginNotFound (`MsgCode ((Printf.sprintf ("CRITICAL!!!! Cannot find a plugin for this FDU even if it is present in the node WTF!! %s") instance_id ),404)))) with | exn -> Logs.err (fun m -> m "[eval_disconnect_cp_from_fdu_face] - EXCEPTION: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_connect_cp_to_network self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_connect_cp_to_network] - ##############"); Logs.debug (fun m -> m "[eval_connect_cp_to_network] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in let net_id = Apero.Option.get @@ Apero.Properties.get "network_uuid" props in Logs.debug (fun m -> m "[eval_connect_cp_to_network] - # NetManager: %s" net_p); try%lwt let parameters = [("cp_id",cp_id);("vnet_id", net_id)] in let fname = "connect_cp_to_vnetwork" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot connect cp %s to netwokr %s") cp_id net_id ),503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_remove_cp_from_network self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_remove_cp_from_network] - ##############"); Logs.debug (fun m -> m "[eval_remove_cp_from_network] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in Logs.debug (fun m -> m "[eval_remove_cp_from_network] - # NetManager: %s" net_p); try%lwt let parameters = [("cp_id",cp_id)] in let fname = "disconnect_cp" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot Remove cp %s from netwokr") cp_id ),503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error = Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res FDU schedule let eval_schedule_fdu self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_schedule_fdu] - ##############"); Logs.debug (fun m -> m "[eval_schedule_fdu] - Properties: %s" (Apero.Properties.to_string props)); try%lwt MVar.read self >>= fun state -> let sysid = (Apero.Option.get @@ state.configuration.agent.system) in let tenantid = Yaks_connector.default_tenant_id in let fdu_uuid = Apero.Option.get @@ Apero.Properties.get "fdu_id" props in let%lwt descriptor = match%lwt Yaks_connector.Global.Actual.get_catalog_fdu_info sysid tenantid fdu_uuid state.yaks with | Some d -> Lwt.return d | None -> Logs.err (fun m -> m "[eval_schedule_fdu] - FDU not found"); Lwt.fail @@ FException (`NotFound (`MsgCode (( Printf.sprintf ("FDU %s not found in catalog") fdu_uuid),404) )) in let%lwt res = Yaks_connector.Global.Actual.call_multi_node_check sysid Yaks_connector.default_tenant_id descriptor state.yaks in match res with | [] -> Logs.err (fun m -> m "[eval_schedule_fdu] - No node found for this FDU"); Lwt.fail @@ FException (`NotFound (`MsgCode ("No node found for this FDU",404) )) | nodes -> let%lwt lst = Lwt_list.filter_map_p (fun (e:FAgentTypes.eval_result) -> match e.result with | Some r -> let r = (FAgentTypes.compatible_node_response_of_string (JSON.to_string r)) in (match r.is_compatible with | true -> Logs.info (fun m -> m "[eval_schedule_fdu] - Node %s is compatible" r.uuid ); Lwt.return (Some r.uuid) | false -> Lwt.return None) | None -> Lwt.return None ) nodes in match lst with | [] -> Lwt.fail @@ FException (`NotFound (`MsgCode ("No node found for this FDU",404) )) | compatibles -> let destination = List.nth compatibles (Random.int (List.length compatibles)) in Logs.info (fun m -> m "[eval_schedule_fdu] - Node %s is random selected as destination for %s " destination descriptor.id); let%lwt record = Fos_fim_api.FDU.define fdu_uuid destination state.fim_api in let eval_res = FAgentTypes.{result = Some (JSON.of_string (Infra.Descriptors.FDU.string_of_record record)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | FException ex -> Logs.err (fun m -> m "[eval_schedule_fdu] - EXCEPTION: %s" (Fos_errors.show_ferror ex)); (match ex with | `NotFound ei -> (match ei with | `MsgCode (err,code) -> let eval_res = FAgentTypes.{result = None ; error = Some code; error_msg = Some err} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | _ -> let eval_res = FAgentTypes.{result = None ; error = Some 500; error_msg = Some (Fos_errors.show_ferror ex)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res) | _ -> let eval_res = FAgentTypes.{result = None ; error = Some 500; error_msg = Some (Fos_errors.show_ferror ex)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res) | exn -> Logs.err (fun m -> m "[eval_schedule_fdu] - EXCEPTION: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error = Some 500; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res FDU Onboard in Catalog -- this may be moved to FOrcE let eval_onboard_fdu self (props:Apero.properties) = Logs.debug (fun m -> m "[[eval_onboard_fdu] - ##############"); Logs.debug (fun m -> m "[eval_onboard_fdu] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let descriptor = Apero.Option.get @@ Apero.Properties.get "descriptor" props in try%lwt let descriptor = User.Descriptors.FDU.descriptor_of_string descriptor in let descriptor = match descriptor.uuid with | Some _ -> descriptor | None -> let fduid = Apero.Uuid.to_string @@ Apero.Uuid.make_from_alias descriptor.id in {descriptor with uuid = Some fduid} in Yaks_connector.Global.Actual.add_catalog_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get descriptor.uuid) descriptor state.yaks >>= fun _ -> let js = JSON.of_string (User.Descriptors.FDU.string_of_descriptor descriptor) in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> Logs.err (fun m -> m "[eval_onboard_fdu] - EXCEPTION: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error = Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res FDU Definition in Node let eval_define_fdu self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_define_fdu] - ##############"); Logs.debug (fun m -> m "[eval_define_fdu] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let fdu_uuid = Apero.Option.get @@ Apero.Properties.get "fdu_id" props in try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_catalog_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id fdu_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in (* Find Correct Plugin *) let fdu_type = Fos_sdk.string_of_hv_type descriptor.hypervisor in let%lwt plugins = Yaks_connector.Local.Actual.get_node_plugins (Apero.Option.get state.configuration.agent.uuid) state.yaks in let%lwt matching_plugins = Lwt_list.filter_map_p (fun e -> let%lwt pl = Yaks_connector.Local.Actual.get_node_plugin (Apero.Option.get state.configuration.agent.uuid) e state.yaks in if String.uppercase_ascii (pl.name) = String.uppercase_ascii (fdu_type) then Lwt.return @@ Some pl else Lwt.return None ) plugins in let pl = match matching_plugins with | [] -> None | _ -> Some ((List.hd matching_plugins).uuid) in (* Create Record * Add UUID for each component * Fix references with UUIDs *) let instanceid = Apero.Uuid.to_string @@ Apero.Uuid.make () in let cp_records = List.map ( fun (e:User.Descriptors.Network.connection_point_descriptor) -> let cpuuid = Apero.Uuid.to_string @@ Apero.Uuid.make () in Infra.Descriptors.Network.{ uuid = cpuuid; status = `CREATE; cp_id = e.id; cp_type = e.cp_type; port_security_enabled = e.port_security_enabled; properties = None; veth_face_name = None; br_name = None; vld_ref = e.vld_ref } ) descriptor.connection_points in let interface_records = List.map (fun (e:User.Descriptors.FDU.interface) -> let cp_new_id = match e.cp_id with | Some cp_id -> let cp = List.find (fun (cp:Infra.Descriptors.FDU.connection_point_record) -> cp_id = cp.cp_id ) cp_records in Some cp.uuid | None -> None in match e.virtual_interface.intf_type with | `PHYSICAL | `BRIDGED -> Logs.debug (fun m -> m "[eval_define_fdu] - THIS FDU HAS PHYSICAL INTERFACE"); let r = Infra.Descriptors.FDU.{name = e.name; is_mgmt = e.is_mgmt; if_type = e.if_type; mac_address = e.mac_address; virtual_interface = e.virtual_interface; cp_id = cp_new_id; ext_cp_id = e.ext_cp_id; vintf_name = e.name; status = `CREATE; phy_face = Some e.virtual_interface.vpci; veth_face_name = None; properties = None} in Logs.debug (fun m -> m "[eval_define_fdu] - THIS FDU HAS PHYSICAL INTERFACE RECORD: %s" (Infra.Descriptors.FDU.string_of_interface r)); r | _ -> Infra.Descriptors.FDU.{name = e.name; is_mgmt = e.is_mgmt; if_type = e.if_type; mac_address = e.mac_address; virtual_interface = e.virtual_interface; cp_id = cp_new_id; ext_cp_id = e.ext_cp_id; vintf_name = e.name; status = `CREATE; phy_face = None; veth_face_name = None; properties = None} ) descriptor.interfaces in let storage_records = List.map (fun (e:User.Descriptors.FDU.storage_descriptor) -> let st_uuid = Apero.Uuid.to_string @@ Apero.Uuid.make () in let cp_new_id = match e.cp_id with | Some cp_id -> let cp = List.find (fun (cp:Infra.Descriptors.FDU.connection_point_record) -> cp_id = cp.cp_id ) cp_records in Some cp.cp_id | None -> None in Infra.Descriptors.FDU.{uuid = st_uuid; storage_id = e.id; storage_type = e.storage_type; size = e.size; file_system_protocol = e.file_system_protocol; cp_id = cp_new_id} ) descriptor.storage in let record = Infra.Descriptors.FDU.{ uuid = instanceid; fdu_id = Apero.Option.get @@ descriptor.uuid; status = `DEFINE; image = descriptor.image; command = descriptor.command; storage = storage_records; computation_requirements = descriptor.computation_requirements; geographical_requirements = descriptor.geographical_requirements; energy_requirements = descriptor.energy_requirements; hypervisor = descriptor.hypervisor; migration_kind = descriptor.migration_kind; configuration = descriptor.configuration; interfaces = interface_records; io_ports = descriptor.io_ports; connection_points = cp_records; depends_on = descriptor.depends_on; error_code = None; error_msg = None; migration_properties = None; hypervisor_info = JSON.create_empty () } in (match pl with | Some plid -> Yaks_connector.Local.Desired.add_node_fdu (Apero.Option.get state.configuration.agent.uuid) plid fdu_uuid instanceid record state.yaks >>= fun _ -> let js = JSON.of_string (Infra.Descriptors.FDU.string_of_record record) in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`PluginNotFound (`MsgCode ((Printf.sprintf ("Node %s has no plugin for %s") (Apero.Option.get state.configuration.agent.uuid) fdu_uuid ),404)))) with | exn -> Logs.err (fun m -> m "[eval_define_fdu] - Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res FDU Requirements Checks At Intitial implementation just checks cpu architecture , ram and if there is a plugin for the FDU * Checks : * Plugin : done * CPU : done * CPU Count : done * CPU : done * RAM size : done * Disk space : done * Image in case the image file starts with file:// : done * Command if starts with / : done * Interfaces in case they are bridged or physical : done ( * Following requirements can not be checked today because of lack of discovery from OS Plugins * Checks: * Plugin: done * CPU Arch: done * CPU Count : done * CPU Freq : done * RAM size: done * Disk space : done * Image in case the image file starts with file:// : done * Command if starts with / : done * Interfaces in case they are bridged or physical : done (* Following requirements cannot be checked today because of lack of discovery from OS Plugins *) * GPUs * FPGAs * I/O Devices And idea can be having some filters functions that return boolean value and run this function one after the other using AND logical operation eg. let compatible = true in let compatible = compatible and run_cpu_filter fdu node_info in let compatible = compatible and run_ram_filter fdu node_info in let compatible = compatible and run_disk_filter fdu node_info in .... *) let eval_check_fdu self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_check_fdu] - ##############"); Logs.debug (fun m -> m "[eval_check_fdu] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let descriptor = Apero.Option.get @@ Apero.Properties.get "descriptor" props in try%lwt let descriptor = User.Descriptors.FDU.descriptor_of_string descriptor in let%lwt node_info = Yaks_connector.Global.Actual.get_node_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let compare (fdu:User.Descriptors.FDU.descriptor) (ninfo:FTypes.node_info) = try%lwt let fdu_cp = fdu.computation_requirements in let fdu_net = fdu.interfaces in let ncpu = List.hd ninfo.cpu in let ndisk = List.find_opt (fun (e:FTypes.disks_spec_type) -> (String.compare e.mount_point "/")==0 ) ninfo.disks |> Apero.Option.get in let fdu_type = Fos_sdk.string_of_hv_type descriptor.hypervisor in let%lwt plugins = Yaks_connector.Local.Actual.get_node_plugins (Apero.Option.get state.configuration.agent.uuid) state.yaks in let%lwt matching_plugins = Lwt_list.filter_map_p (fun e -> let%lwt pl = Yaks_connector.Local.Actual.get_node_plugin (Apero.Option.get state.configuration.agent.uuid) e state.yaks in if String.uppercase_ascii (pl.name) = String.uppercase_ascii (fdu_type) then Lwt.return (Some pl.uuid) else Lwt.return None ) plugins in let has_plugin = match matching_plugins with | [] -> false | _ -> true in let cpu_arch_check = ((String.compare fdu_cp.cpu_arch ncpu.arch) == 0) in let cpu_number_check = (fdu_cp.cpu_min_count <= (List.length ninfo.cpu)) in let cpu_freq_check = (fdu_cp.cpu_min_freq <= (Float.to_int ncpu.frequency)) in let ram_size_check = (fdu_cp.ram_size_mb <= ninfo.ram.size) in let disk_size_check = fdu_cp.storage_size_gb <= ndisk.dimension in let image_check = match descriptor.image with | Some i -> let file_re = Str.regexp "file://*" in (match Str.string_match file_re i.uri 0 with | false -> true | true -> let fname = String.sub i.uri 7 ((String.length i.uri)-7) in Sys.file_exists fname ) | None -> true in let command_check = match descriptor.command with | Some i -> let file_re = Str.regexp "/*" in (match Str.string_match file_re i.binary 0 with | false -> true | true -> Sys.file_exists i.binary ) | None -> true in let interfaces_check = let ninterfaces = ninfo.network in let interfaces = List.filter (fun (e:User.Descriptors.FDU.interface) -> match e.virtual_interface.intf_type with | `PHYSICAL | `BRIDGED -> true | _ -> false ) fdu_net in let checks = List.map (fun (e:User.Descriptors.FDU.interface) -> let face_name = e.virtual_interface.vpci in match List.find_opt (fun (ne:FTypes.network_spec_type) -> (String.compare ne.intf_name face_name)==0 ) ninterfaces with | Some _ -> true | None -> false ) interfaces in List.fold_left (fun i j -> i && j) true checks in let res = has_plugin && cpu_arch_check && cpu_freq_check && cpu_number_check && ram_size_check && disk_size_check && image_check && command_check && interfaces_check in (* *) Logs.debug (fun m -> m "[eval_check_fdu] - Plugin Check: %b" has_plugin ); Logs.debug (fun m -> m "[eval_check_fdu] - CPU Arch Check: %s = %s ? %b" fdu_cp.cpu_arch ncpu.arch cpu_arch_check); Logs.debug (fun m -> m "[eval_check_fdu] - CPU Number Check: %d <= %d ? %b" fdu_cp.cpu_min_count (List.length ninfo.cpu) cpu_number_check); Logs.debug (fun m -> m "[eval_check_fdu] - CPU Freq Check: %d <= %d ? %b" fdu_cp.cpu_min_freq (Float.to_int ncpu.frequency) cpu_freq_check); Logs.debug (fun m -> m "[eval_check_fdu] - RAM Size Check: %f <= %f ? %b" fdu_cp.ram_size_mb ninfo.ram.size ram_size_check); Logs.debug (fun m -> m "[eval_check_fdu] - Disk Size Check: %f <= %f ? %b" fdu_cp.storage_size_gb ndisk.dimension disk_size_check); Logs.debug (fun m -> m "[eval_check_fdu] - Image Check: %b" image_check); Logs.debug (fun m -> m "[eval_check_fdu] - Command Check: %b" image_check); Logs.debug (fun m -> m "[eval_check_fdu] - Interfaces Check: %b" interfaces_check); Logs.debug (fun m -> m "[eval_check_fdu] - Is compatible? %b" res); Lwt.return res match ( has_plugin , cpu_arch_check , , cpu_number_check , ram_size_check , disk_size_check , image_check ) with | ( true , true , true , true , true , true , true ) - > Lwt.return true | ( _ , _ , _ , _ , _ , _ , _ ) - > Lwt.return false | (true, true, true, true, true, true, true) -> Lwt.return true | (_,_,_,_,_,_,_) -> Lwt.return false *) with | exn -> Logs.err (fun m -> m "[eval_check_fdu] - Exception: %s" (Printexc.to_string exn)); Lwt.return false in let%lwt res = compare descriptor node_info in let res = match res with | true -> FAgentTypes.{uuid = (Apero.Option.get state.configuration.agent.uuid); is_compatible=true } | false -> FAgentTypes.{uuid = (Apero.Option.get state.configuration.agent.uuid); is_compatible=false } in let js = JSON.of_string (FAgentTypes.string_of_compatible_node_response res) in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> Logs.err (fun m -> m "[eval_check_fdu] - Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* NM Floating IPs *) let eval_create_floating_ip self (props:Apero.properties) = ignore props; Logs.debug (fun m -> m "[eval_create_floating_ip] - ##############"); Logs.debug (fun m -> m "[eval_create_floating_ip] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_create_floating_ip] - # NetManager: %s" net_p); try%lwt let fname = "create_floating_ip" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname [] state.yaks >>= fun res -> match res with | Some r -> Logs.debug (fun m -> m "[eval_create_floating_ip] - Eval Result %s" (FAgentTypes.string_of_eval_result r)); Convertion from record let floating_r = FTypes.floating_ip_record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let floating = FTypes.{uuid = floating_r.uuid; ip_version = floating_r.ip_version; address = floating_r.address} in Yaks_connector.Global.Actual.add_node_floating_ip (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) floating.uuid floating state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_floating_ip floating)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ("Cannot create floating ip %s not found",503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error = Some 22; error_msg = Some (Printexc.to_string exn)} in Logs.err (fun m -> m "[eval_create_floating_ip] - # Error when creating floating IP: %s" (Printexc.to_string exn)); Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_delete_floating_ip self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_delete_floating_ip] - ##############"); Logs.debug (fun m -> m "[eval_delete_floating_ip] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_delete_floating_ip- # NetManager: %s" net_p); try%lwt let ip_id = Apero.Option.get @@ Apero.Properties.get "floating_uuid" props in let parameters = [("ip_id",ip_id)] in let fname = "delete_floating_ip" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with Some r -> Logs.debug (fun m -> m "[eval_delete_floating_ip] - Eval Result %s" (FAgentTypes.string_of_eval_result r)); Convertion from record let floating_r = FTypes.floating_ip_record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let floating = FTypes.{uuid = floating_r.uuid; ip_version = floating_r.ip_version; address = floating_r.address} in Yaks_connector.Global.Actual.add_node_floating_ip (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) floating.uuid floating state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_floating_ip floating)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`NotFound (`MsgCode ((Printf.sprintf ("Floating IP %s not found") ip_id ),404))) with e -> let msg = Printexc.to_string e and stack = Printexc.get_backtrace () in Logs.err (fun m -> m "[eval_delete_floating_ip]- Error: %s %s" msg stack); let eval_res = FAgentTypes.{result = None ; error = Some 22; error_msg = Some (Printexc.to_string e)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_assign_floating_ip self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_assign_floating_ip] - ##############"); Logs.debug (fun m -> m "[eval_assign_floating_ip] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in try%lwt let ip_id = Apero.Option.get @@ Apero.Properties.get "floating_uuid" props in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in let parameters = [("ip_id",ip_id);("cp_id",cp_id)] in let fname = "assign_floating_ip" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Convertion from record let floating_r = FTypes.floating_ip_record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let floating = FTypes.{uuid = floating_r.uuid; ip_version = floating_r.ip_version; address = floating_r.address} in Yaks_connector.Global.Actual.add_node_floating_ip (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) floating.uuid floating state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_floating_ip floating)) ; error=None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`NotFound (`MsgCode ((Printf.sprintf ("Cannot assing IP %s to cp %s") ip_id cp_id ),503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error = Some 33; error_msg = Some (Printexc.to_string exn)} in Logs.err (fun m -> m "[eval_assign_floating_ip] - Exception: %s" (Printexc.to_string exn)); Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_remove_floating_ip self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_remove_floating_ip] - ##############"); Logs.debug (fun m -> m "[eval_remove_floating_ip] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in try%lwt let ip_id = Apero.Option.get @@ Apero.Properties.get "floating_uuid" props in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in let parameters = [("ip_id",ip_id);("cp_id",cp_id)] in let fname = "remove_floating_ip" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Convertion from record let floating_r = FTypes.floating_ip_record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let floating = FTypes.{uuid = floating_r.uuid; ip_version = floating_r.ip_version; address = floating_r.address} in Yaks_connector.Global.Actual.add_node_floating_ip (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) floating.uuid floating state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_floating_ip floating)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot remove floating IP %s not found") ip_id ),503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error = Some 33; error_msg = Some (Printexc.to_string exn)} in Logs.err (fun m -> m "[eval_remove_floating_ip] - Exception: %s" (Printexc.to_string exn)); Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_add_router_port self (props:Apero.properties) = ignore props; Logs.debug (fun m -> m "[eval_add_router_port] - ##############"); Logs.debug (fun m -> m "[eval_add_router_port] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_add_router_port] - # NetManager: %s" net_p); try%lwt let fname = "add_router_port" in let rid = Apero.Option.get @@ Apero.Properties.get "router_id" props in let port_type = Apero.Option.get @@ Apero.Properties.get "port_type" props in let parameters = [("router_id", rid); ("port_type", port_type)] in let parameters = match Apero.Properties.get "vnet_id" props with | Some vid -> parameters @ [("vnet_id",vid)] | None -> parameters in let parameters = match Apero.Properties.get "ip_address" props with | Some ip -> parameters @ [("ip_address",ip)] | None -> parameters in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Logs.debug (fun m -> m "[eval_add_router_port] - Eval Result: %s" (FAgentTypes.string_of_eval_result r)); Convertion from record let router = Router.record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let%lwt ports = Lwt_list.map_p (fun (e:Router.router_port_record) -> Lwt.return Router.{port_type = e.port_type; vnet_id = e.vnet_id; ip_address = Some e.ip_address} ) router.ports in let router_desc = Router.{uuid = Some router.uuid; ports = ports; } in (* *) Yaks_connector.Global.Actual.add_node_router (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) router.uuid router_desc state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (Router.string_of_record router)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot create to router %s") rid ),503))) with | exn -> Logs.err (fun m -> m "[eval_add_router_port] Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error = Some 22; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_remove_router_port self (props:Apero.properties) = ignore props; Logs.debug (fun m -> m "[eval_remove_router_port]- ##############"); Logs.debug (fun m -> m "[eval_remove_router_port] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_remove_router_port] - # NetManager: %s" net_p); try%lwt let fname = "remove_router_port" in let rid = Apero.Option.get @@ Apero.Properties.get "router_id" props in let vid = Apero.Option.get @@ Apero.Properties.get "vnet_id" props in let parameters = [("router_id", rid); ("vnet_id", vid)] in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Logs.debug (fun m -> m "[eval_remove_router_port] Eval Result: %s" (FAgentTypes.string_of_eval_result r)); Convertion from record let router = Router.record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let%lwt ports = Lwt_list.map_p (fun (e:Router.router_port_record) -> Lwt.return Router.{port_type = e.port_type; vnet_id = e.vnet_id; ip_address = Some e.ip_address} ) router.ports in let router_desc = Router.{uuid = Some router.uuid; ports = ports; } in (* *) Yaks_connector.Global.Actual.add_node_router (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) router.uuid router_desc state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (Router.string_of_record router)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot remove port from router %s") rid ),503))) with | exn -> Logs.err (fun m -> m "[eval_remove_router_port] Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error = Some 22; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_heartbeat myuuid self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_heartbeat]- ##############"); Logs.debug (fun m -> m "[eval_heartbeat] - Properties: %s" (Apero.Properties.to_string props)); let _ = MVar.guarded self (fun state -> let source_id = Apero.Option.get @@ Apero.Properties.get "node_id" props in let timestamp = Unix.gettimeofday () in let current_available = state.available_nodes in let new_available = match List.find_opt (fun (n,_) -> String.compare n source_id == 0 ) current_available with | Some _ -> Logs.debug (fun m -> m "[eval_heartbeat] - Updating heartbeat information for %s" source_id); List.append (List.filter (fun (n,_) -> String.compare n source_id != 0) current_available) [(source_id,timestamp)] | None -> Logs.debug (fun m -> m "[eval_heartbeat] - Adding heartbeat information for %s" source_id); List.append current_available [(source_id,timestamp)] in let state = {state with available_nodes = new_available} in MVar.return () state) in let result = FTypes.{nodeid = myuuid } in let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_heartbeat_info result)) ; error = None; error_msg = None} in Logs.debug (fun m -> m "[eval_heartbeat] - Returning: %s" (FAgentTypes.string_of_eval_result eval_res)); Lwt.return @@ FAgentTypes.string_of_eval_result eval_res (* *) let eval_start_fdu myuuid instanceid self env = Logs.debug (fun m -> m "[eval_start_fdu]- ##############"); Logs.debug (fun m -> m "[eval_start_fdu]- InstanceID : %s Env: %s" instanceid env); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.start_fdu_in_node myuuid instanceid env state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res (* *) let eval_run_fdu myuuid instanceid self env = Logs.debug (fun m -> m "[eval_run_fdu]- ##############"); Logs.debug (fun m -> m "[eval_start_fdu]- InstanceID : %s Env: %s" instanceid env); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.run_fdu_in_node myuuid instanceid env state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res (* *) let eval_log_fdu myuuid instanceid self _ = Logs.debug (fun m -> m "[eval_log_fdu]- ##############"); Logs.debug (fun m -> m "[eval_log_fdu]- InstanceID : %s" instanceid); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.log_fdu_in_node myuuid instanceid state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res (* *) let eval_ls_fdu myuuid instanceid self _ = Logs.debug (fun m -> m "[eval_ls_fdu]- ##############"); Logs.debug (fun m -> m "[eval_ls_fdu]- InstanceID : %s" instanceid); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.ls_fdu_in_node myuuid instanceid state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res (* *) let eval_file_fdu myuuid instanceid self filename = Logs.debug (fun m -> m "[eval_ls_fdu]- ##############"); Logs.debug (fun m -> m "[eval_ls_fdu]- InstanceID : %s" instanceid); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.file_fdu_in_node myuuid instanceid filename state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res
null
https://raw.githubusercontent.com/eclipse-archived/agent/e2ee78050157eba3aa35f52496b590806c6d1d88/fos-agent/evals.ml
ocaml
Evals Find Correct Plugin Create Record * Add UUID for each component * Fix references with UUIDs Find Correct Plugin Create Record * Add UUID for each component * Fix references with UUIDs Find Correct Plugin Create Record * Add UUID for each component * Fix references with UUIDs Following requirements cannot be checked today because of lack of discovery from OS Plugins NM Floating IPs
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright ( c ) 2020 ADLINK Technology Inc. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * -2.0 , or the Apache Software License 2.0 * which is available at -2.0 . * * SPDX - License - Identifier : EPL-2.0 OR Apache-2.0 * Contributors : 1 * ( gabriele ( dot ) baldoni ( at ) adlinktech ( dot ) com ) - 0.2.0 Development iteration * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright (c) 2020 ADLINK Technology Inc. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * -2.0, or the Apache Software License 2.0 * which is available at -2.0. * * SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 * Contributors: 1 * Gabriele Baldoni (gabriele (dot) baldoni (at) adlinktech (dot) com ) - 0.2.0 Development iteration *********************************************************************************) open Lwt.Infix open Fos_sdk open Fos_sdk.Errors open Agent_state open Utils let eval_get_fdu_info self (props:Apero.properties) = MVar.read self >>= fun state -> let fdu_uuid = Apero.Option.get @@ Apero.Properties.get "fdu_uuid" props in try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_catalog_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id fdu_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ User.Descriptors.FDU.string_of_descriptor descriptor in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_get_image_info self (props:Apero.properties) = MVar.read self >>= fun state -> let image_uuid = Apero.Option.get @@ Apero.Properties.get "image_uuid" props in try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_image (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id image_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ User.Descriptors.FDU.string_of_image descriptor in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_get_node_fdu_info self (props:Apero.properties) = MVar.read self >>= fun state -> let fdu_uuid = Apero.Option.get @@ Apero.Properties.get "fdu_uuid" props in let node_uuid = Apero.Option.get @@ Apero.Properties.get "node_uuid" props in let instanceid = Apero.Option.get @@ Apero.Properties.get "instance_uuid" props in try%lwt Logs.debug (fun m -> m "[eval_get_node_fdu_info] - Search for FDU Info"); let%lwt descriptor = Yaks_connector.Global.Actual.get_node_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id node_uuid fdu_uuid instanceid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ Infra.Descriptors.FDU.string_of_record descriptor in Logs.debug (fun m -> m "[eval_get_node_fdu_info] - INFO %s" (FAgentTypes.string_of_json js)); let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_get_network_info self (props:Apero.properties) = MVar.read self >>= fun state -> let net_uuid = Apero.Option.get @@ Apero.Properties.get "uuid" props in try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_network (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id net_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ FTypes.string_of_virtual_network descriptor in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 22; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_get_port_info self (props:Apero.properties) = MVar.read self >>= fun state -> let cp_uuid = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in Logs.debug (fun m -> m "[eval_get_port_info] - Getting info for port %s" cp_uuid ); try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_port (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id cp_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let js = FAgentTypes.json_of_string @@ User.Descriptors.Network.string_of_connection_point_descriptor descriptor in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | _ -> Logs.debug (fun m -> m "[eval_get_port_info] - Search port on FDU"); let%lwt fdu_ids = Yaks_connector.Global.Actual.get_catalog_all_fdus (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id state.yaks in let%lwt cps = Lwt_list.filter_map_p (fun e -> let%lwt fdu = Yaks_connector.Global.Actual.get_catalog_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id e state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let%lwt c = Lwt_list.filter_map_p (fun (cp:User.Descriptors.Network.connection_point_descriptor) -> Logs.debug (fun m -> m "[eval_get_port_info] - %s == %s ? %d " cp.id cp_uuid (String.compare cp.id cp_uuid)); if (String.compare cp.id cp_uuid) == 0 then Lwt.return @@ Some cp else Lwt.return None ) fdu.connection_points in Lwt.return @@ List.nth_opt c 0 ) fdu_ids in try%lwt let cp = List.hd cps in let js = FAgentTypes.json_of_string @@ User.Descriptors.Network.string_of_connection_point_descriptor cp in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_get_node_mgmt_address self (props:Apero.properties) = MVar.read self >>= fun state -> let node_uuid = Apero.Option.get @@ Apero.Properties.get "node_uuid" props in try%lwt let%lwt nconf = Yaks_connector.Global.Actual.get_node_configuration (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id node_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let%lwt descriptor = Yaks_connector.Global.Actual.get_node_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id node_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let nws = descriptor.network in let%lwt addr = (Lwt_list.filter_map_p ( fun (e:FTypes.network_spec_type) -> if (String.compare e.intf_name nconf.agent.mgmt_interface) == 0 then Lwt.return @@ Some e.intf_configuration else Lwt.return None ) nws) >>= fun l -> Lwt.return @@ List.hd l in let js = FAgentTypes.json_of_string @@ FTypes.string_of_intf_conf_type addr in let eval_res = FAgentTypes.{result = Some js; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res NM Evals let eval_create_net self (props:Apero.properties) = MVar.read self >>= fun state -> try%lwt let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_create_net] - ##############"); Logs.debug (fun m -> m "[eval_create_net] - Properties: %s" (Apero.Properties.to_string props) ); let descriptor = FTypes.virtual_network_of_string @@ Apero.Option.get @@ Apero.Properties.get "descriptor" props in let record = FTypesRecord.{uuid = descriptor.uuid; status = `CREATE; properties = None; ip_configuration = descriptor.ip_configuration; overlay = None; vni = None; mcast_addr = None; vlan_id = None; face = None} in Yaks_connector.Local.Desired.add_node_network (Apero.Option.get state.configuration.agent.uuid) net_p descriptor.uuid record state.yaks >>= fun _ -> let js = JSON.of_string @@ FTypesRecord.string_of_virtual_network record in let eval_res = FAgentTypes.{result = Some js ; error=None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> let bt = Printexc.get_backtrace () in Logs.err (fun m -> m "[eval_create_net] - Exception: %s Trace %s " (Printexc.to_string exn) bt); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_remove_net self (props:Apero.properties) = MVar.read self >>= fun state -> try%lwt let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_remove_net] - ##############"); Logs.debug (fun m -> m "[eval_remove_net] - Properties: %s" (Apero.Properties.to_string props)); let net_id =Apero.Option.get @@ Apero.Properties.get "net_id" props in let%lwt record = Yaks_connector.Local.Actual.get_node_network (Apero.Option.get state.configuration.agent.uuid) net_p net_id state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let record = {record with status = `DESTROY} in let%lwt _ = Yaks_connector.Local.Desired.add_node_network (Apero.Option.get state.configuration.agent.uuid) net_p net_id record state.yaks in Yaks_connector.Global.Actual.remove_network (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id record.uuid state.yaks >>= Lwt.return >>= fun _ -> let js = JSON.of_string @@ FTypesRecord.string_of_virtual_network record in let eval_res = FAgentTypes.{result = Some js ; error=None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> Logs.err (fun m -> m "[eval_remove_net] - EXCEPTION: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_create_cp self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_create_cp] - ##############"); Logs.debug (fun m -> m "[eval_create_cp] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in let descriptor = User.Descriptors.Network.connection_point_descriptor_of_string @@ Apero.Option.get @@ Apero.Properties.get "descriptor" props in Logs.debug (fun m -> m "[eval_create_cp] - # NetManager: %s" net_p); try%lwt let parameters = [("descriptor",User.Descriptors.Network.string_of_connection_point_descriptor descriptor)] in let fname = "create_port_agent" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot connect create cp %s") descriptor.id ),503))) with | exn -> Logs.err (fun m -> m "[eval_create_cp] Exception : %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_remove_cp self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_remove_cp] - ##############"); Logs.debug (fun m -> m "[eval_remove_cp] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_id" props in Logs.debug (fun m -> m "[eval_remove_cp] - # NetManager: %s" net_p); try%lwt let parameters = [("cp_id", cp_id)] in let fname = "destroy_port_agent" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot destroy create cp %s") cp_id ),503))) with | exn -> Logs.err (fun m -> m "[eval_remove_cp] Exception %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_connect_cp_to_fdu_face self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_connect_cp_to_fdu_face] - ##############"); Logs.debug (fun m -> m "[eval_connect_cp_to_fdu_face] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_id" props in let instance_id = Apero.Option.get @@ Apero.Properties.get "instance_id" props in let interface = Apero.Option.get @@ Apero.Properties.get "interface" props in try%lwt let%lwt record = Yaks_connector.Global.Actual.get_node_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) "*" instance_id state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in Logs.debug (fun m -> m "[[eval_connect_cp_to_fdu_face] - FDU Record: %s" (Infra.Descriptors.FDU.string_of_record record)); let fdu_type = Fos_sdk.string_of_hv_type record.hypervisor in let%lwt plugins = Yaks_connector.Local.Actual.get_node_plugins (Apero.Option.get state.configuration.agent.uuid) state.yaks in let%lwt matching_plugins = Lwt_list.filter_map_p (fun e -> let%lwt pl = Yaks_connector.Local.Actual.get_node_plugin (Apero.Option.get state.configuration.agent.uuid) e state.yaks in if String.uppercase_ascii (pl.name) = String.uppercase_ascii (fdu_type) then Lwt.return @@ Some pl else Lwt.return None ) plugins in let pl = match matching_plugins with | [] -> Logs.err (fun m -> m "Cannot find a plugin for this FDU even if it is present in the node WTF!! %s" instance_id ); None | _ -> Some ((List.hd matching_plugins).uuid) in (match pl with | Some plid -> Logs.debug (fun m -> m "[eval_connect_cp_to_fdu_face] - Plugin ID: %s" plid); let parameters = [("cpid", cp_id);("instanceid", instance_id);("iface",interface)] in let fname = "connect_interface_to_cp" in Yaks_connector.Local.Actual.exec_plugin_eval (Apero.Option.get state.configuration.agent.uuid) plid fname parameters state.yaks >>= fun res -> (match res with | Some r -> Logs.debug (fun m -> m "[eval_connect_cp_to_fdu_face] - Connected!"); Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot connect cp to interface %s") cp_id ),503))) ) | None -> Lwt.fail @@ FException (`PluginNotFound (`MsgCode ((Printf.sprintf ("CRITICAL!!!! Cannot find a plugin for this FDU even if it is present in the node!! %s") instance_id ),404)))) with | exn -> Logs.err (fun m -> m "[eval_connect_cp_to_fdu_face] Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_disconnect_cp_from_fdu_face self (props:Apero.properties) = Logs.debug (fun m -> m "[[eval_disconnect_cp_from_fdu_face] - ##############"); Logs.debug (fun m -> m "[[eval_disconnect_cp_from_fdu_face] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let face = Apero.Option.get @@ Apero.Properties.get "interface" props in let instance_id = Apero.Option.get @@ Apero.Properties.get "instance_id" props in try%lwt let%lwt record = Yaks_connector.Global.Actual.get_node_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) "*" instance_id state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let fdu_type = Fos_sdk.string_of_hv_type record.hypervisor in let%lwt plugins = Yaks_connector.Local.Actual.get_node_plugins (Apero.Option.get state.configuration.agent.uuid) state.yaks in let%lwt matching_plugins = Lwt_list.filter_map_p (fun e -> let%lwt pl = Yaks_connector.Local.Actual.get_node_plugin (Apero.Option.get state.configuration.agent.uuid) e state.yaks in if String.uppercase_ascii (pl.name) = String.uppercase_ascii (fdu_type) then Lwt.return @@ Some pl else Lwt.return None ) plugins in let pl = match matching_plugins with | [] -> Logs.err (fun m -> m "[eval_disconnect_cp_from_fdu_face] - Cannot find a plugin for this FDU even if it is present in the node WTF!! %s" instance_id ); None | _ -> Some ((List.hd matching_plugins).uuid) in (match pl with | Some plid -> let parameters = [("iface", face);("instanceid", instance_id)] in let fname = "disconnect_interface_from_cp" in Yaks_connector.Local.Actual.exec_plugin_eval (Apero.Option.get state.configuration.agent.uuid) plid fname parameters state.yaks >>= fun res -> (match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot disconnect cp from interface %s") face ),503))) ) | None -> Lwt.fail @@ FException (`PluginNotFound (`MsgCode ((Printf.sprintf ("CRITICAL!!!! Cannot find a plugin for this FDU even if it is present in the node WTF!! %s") instance_id ),404)))) with | exn -> Logs.err (fun m -> m "[eval_disconnect_cp_from_fdu_face] - EXCEPTION: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_connect_cp_to_network self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_connect_cp_to_network] - ##############"); Logs.debug (fun m -> m "[eval_connect_cp_to_network] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in let net_id = Apero.Option.get @@ Apero.Properties.get "network_uuid" props in Logs.debug (fun m -> m "[eval_connect_cp_to_network] - # NetManager: %s" net_p); try%lwt let parameters = [("cp_id",cp_id);("vnet_id", net_id)] in let fname = "connect_cp_to_vnetwork" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot connect cp %s to netwokr %s") cp_id net_id ),503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_remove_cp_from_network self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_remove_cp_from_network] - ##############"); Logs.debug (fun m -> m "[eval_remove_cp_from_network] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in Logs.debug (fun m -> m "[eval_remove_cp_from_network] - # NetManager: %s" net_p); try%lwt let parameters = [("cp_id",cp_id)] in let fname = "disconnect_cp" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Lwt.return @@ FAgentTypes.string_of_eval_result r | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot Remove cp %s from netwokr") cp_id ),503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error = Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res FDU schedule let eval_schedule_fdu self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_schedule_fdu] - ##############"); Logs.debug (fun m -> m "[eval_schedule_fdu] - Properties: %s" (Apero.Properties.to_string props)); try%lwt MVar.read self >>= fun state -> let sysid = (Apero.Option.get @@ state.configuration.agent.system) in let tenantid = Yaks_connector.default_tenant_id in let fdu_uuid = Apero.Option.get @@ Apero.Properties.get "fdu_id" props in let%lwt descriptor = match%lwt Yaks_connector.Global.Actual.get_catalog_fdu_info sysid tenantid fdu_uuid state.yaks with | Some d -> Lwt.return d | None -> Logs.err (fun m -> m "[eval_schedule_fdu] - FDU not found"); Lwt.fail @@ FException (`NotFound (`MsgCode (( Printf.sprintf ("FDU %s not found in catalog") fdu_uuid),404) )) in let%lwt res = Yaks_connector.Global.Actual.call_multi_node_check sysid Yaks_connector.default_tenant_id descriptor state.yaks in match res with | [] -> Logs.err (fun m -> m "[eval_schedule_fdu] - No node found for this FDU"); Lwt.fail @@ FException (`NotFound (`MsgCode ("No node found for this FDU",404) )) | nodes -> let%lwt lst = Lwt_list.filter_map_p (fun (e:FAgentTypes.eval_result) -> match e.result with | Some r -> let r = (FAgentTypes.compatible_node_response_of_string (JSON.to_string r)) in (match r.is_compatible with | true -> Logs.info (fun m -> m "[eval_schedule_fdu] - Node %s is compatible" r.uuid ); Lwt.return (Some r.uuid) | false -> Lwt.return None) | None -> Lwt.return None ) nodes in match lst with | [] -> Lwt.fail @@ FException (`NotFound (`MsgCode ("No node found for this FDU",404) )) | compatibles -> let destination = List.nth compatibles (Random.int (List.length compatibles)) in Logs.info (fun m -> m "[eval_schedule_fdu] - Node %s is random selected as destination for %s " destination descriptor.id); let%lwt record = Fos_fim_api.FDU.define fdu_uuid destination state.fim_api in let eval_res = FAgentTypes.{result = Some (JSON.of_string (Infra.Descriptors.FDU.string_of_record record)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | FException ex -> Logs.err (fun m -> m "[eval_schedule_fdu] - EXCEPTION: %s" (Fos_errors.show_ferror ex)); (match ex with | `NotFound ei -> (match ei with | `MsgCode (err,code) -> let eval_res = FAgentTypes.{result = None ; error = Some code; error_msg = Some err} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | _ -> let eval_res = FAgentTypes.{result = None ; error = Some 500; error_msg = Some (Fos_errors.show_ferror ex)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res) | _ -> let eval_res = FAgentTypes.{result = None ; error = Some 500; error_msg = Some (Fos_errors.show_ferror ex)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res) | exn -> Logs.err (fun m -> m "[eval_schedule_fdu] - EXCEPTION: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error = Some 500; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res FDU Onboard in Catalog -- this may be moved to FOrcE let eval_onboard_fdu self (props:Apero.properties) = Logs.debug (fun m -> m "[[eval_onboard_fdu] - ##############"); Logs.debug (fun m -> m "[eval_onboard_fdu] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let descriptor = Apero.Option.get @@ Apero.Properties.get "descriptor" props in try%lwt let descriptor = User.Descriptors.FDU.descriptor_of_string descriptor in let descriptor = match descriptor.uuid with | Some _ -> descriptor | None -> let fduid = Apero.Uuid.to_string @@ Apero.Uuid.make_from_alias descriptor.id in {descriptor with uuid = Some fduid} in Yaks_connector.Global.Actual.add_catalog_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get descriptor.uuid) descriptor state.yaks >>= fun _ -> let js = JSON.of_string (User.Descriptors.FDU.string_of_descriptor descriptor) in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> Logs.err (fun m -> m "[eval_onboard_fdu] - EXCEPTION: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error = Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res FDU Definition in Node let eval_define_fdu self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_define_fdu] - ##############"); Logs.debug (fun m -> m "[eval_define_fdu] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let fdu_uuid = Apero.Option.get @@ Apero.Properties.get "fdu_id" props in try%lwt let%lwt descriptor = Yaks_connector.Global.Actual.get_catalog_fdu_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id fdu_uuid state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let fdu_type = Fos_sdk.string_of_hv_type descriptor.hypervisor in let%lwt plugins = Yaks_connector.Local.Actual.get_node_plugins (Apero.Option.get state.configuration.agent.uuid) state.yaks in let%lwt matching_plugins = Lwt_list.filter_map_p (fun e -> let%lwt pl = Yaks_connector.Local.Actual.get_node_plugin (Apero.Option.get state.configuration.agent.uuid) e state.yaks in if String.uppercase_ascii (pl.name) = String.uppercase_ascii (fdu_type) then Lwt.return @@ Some pl else Lwt.return None ) plugins in let pl = match matching_plugins with | [] -> None | _ -> Some ((List.hd matching_plugins).uuid) in let instanceid = Apero.Uuid.to_string @@ Apero.Uuid.make () in let cp_records = List.map ( fun (e:User.Descriptors.Network.connection_point_descriptor) -> let cpuuid = Apero.Uuid.to_string @@ Apero.Uuid.make () in Infra.Descriptors.Network.{ uuid = cpuuid; status = `CREATE; cp_id = e.id; cp_type = e.cp_type; port_security_enabled = e.port_security_enabled; properties = None; veth_face_name = None; br_name = None; vld_ref = e.vld_ref } ) descriptor.connection_points in let interface_records = List.map (fun (e:User.Descriptors.FDU.interface) -> let cp_new_id = match e.cp_id with | Some cp_id -> let cp = List.find (fun (cp:Infra.Descriptors.FDU.connection_point_record) -> cp_id = cp.cp_id ) cp_records in Some cp.uuid | None -> None in match e.virtual_interface.intf_type with | `PHYSICAL | `BRIDGED -> Logs.debug (fun m -> m "[eval_define_fdu] - THIS FDU HAS PHYSICAL INTERFACE"); let r = Infra.Descriptors.FDU.{name = e.name; is_mgmt = e.is_mgmt; if_type = e.if_type; mac_address = e.mac_address; virtual_interface = e.virtual_interface; cp_id = cp_new_id; ext_cp_id = e.ext_cp_id; vintf_name = e.name; status = `CREATE; phy_face = Some e.virtual_interface.vpci; veth_face_name = None; properties = None} in Logs.debug (fun m -> m "[eval_define_fdu] - THIS FDU HAS PHYSICAL INTERFACE RECORD: %s" (Infra.Descriptors.FDU.string_of_interface r)); r | _ -> Infra.Descriptors.FDU.{name = e.name; is_mgmt = e.is_mgmt; if_type = e.if_type; mac_address = e.mac_address; virtual_interface = e.virtual_interface; cp_id = cp_new_id; ext_cp_id = e.ext_cp_id; vintf_name = e.name; status = `CREATE; phy_face = None; veth_face_name = None; properties = None} ) descriptor.interfaces in let storage_records = List.map (fun (e:User.Descriptors.FDU.storage_descriptor) -> let st_uuid = Apero.Uuid.to_string @@ Apero.Uuid.make () in let cp_new_id = match e.cp_id with | Some cp_id -> let cp = List.find (fun (cp:Infra.Descriptors.FDU.connection_point_record) -> cp_id = cp.cp_id ) cp_records in Some cp.cp_id | None -> None in Infra.Descriptors.FDU.{uuid = st_uuid; storage_id = e.id; storage_type = e.storage_type; size = e.size; file_system_protocol = e.file_system_protocol; cp_id = cp_new_id} ) descriptor.storage in let record = Infra.Descriptors.FDU.{ uuid = instanceid; fdu_id = Apero.Option.get @@ descriptor.uuid; status = `DEFINE; image = descriptor.image; command = descriptor.command; storage = storage_records; computation_requirements = descriptor.computation_requirements; geographical_requirements = descriptor.geographical_requirements; energy_requirements = descriptor.energy_requirements; hypervisor = descriptor.hypervisor; migration_kind = descriptor.migration_kind; configuration = descriptor.configuration; interfaces = interface_records; io_ports = descriptor.io_ports; connection_points = cp_records; depends_on = descriptor.depends_on; error_code = None; error_msg = None; migration_properties = None; hypervisor_info = JSON.create_empty () } in (match pl with | Some plid -> Yaks_connector.Local.Desired.add_node_fdu (Apero.Option.get state.configuration.agent.uuid) plid fdu_uuid instanceid record state.yaks >>= fun _ -> let js = JSON.of_string (Infra.Descriptors.FDU.string_of_record record) in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`PluginNotFound (`MsgCode ((Printf.sprintf ("Node %s has no plugin for %s") (Apero.Option.get state.configuration.agent.uuid) fdu_uuid ),404)))) with | exn -> Logs.err (fun m -> m "[eval_define_fdu] - Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res FDU Requirements Checks At Intitial implementation just checks cpu architecture , ram and if there is a plugin for the FDU * Checks : * Plugin : done * CPU : done * CPU Count : done * CPU : done * RAM size : done * Disk space : done * Image in case the image file starts with file:// : done * Command if starts with / : done * Interfaces in case they are bridged or physical : done ( * Following requirements can not be checked today because of lack of discovery from OS Plugins * Checks: * Plugin: done * CPU Arch: done * CPU Count : done * CPU Freq : done * RAM size: done * Disk space : done * Image in case the image file starts with file:// : done * Command if starts with / : done * Interfaces in case they are bridged or physical : done * GPUs * FPGAs * I/O Devices And idea can be having some filters functions that return boolean value and run this function one after the other using AND logical operation eg. let compatible = true in let compatible = compatible and run_cpu_filter fdu node_info in let compatible = compatible and run_ram_filter fdu node_info in let compatible = compatible and run_disk_filter fdu node_info in .... *) let eval_check_fdu self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_check_fdu] - ##############"); Logs.debug (fun m -> m "[eval_check_fdu] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let descriptor = Apero.Option.get @@ Apero.Properties.get "descriptor" props in try%lwt let descriptor = User.Descriptors.FDU.descriptor_of_string descriptor in let%lwt node_info = Yaks_connector.Global.Actual.get_node_info (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) state.yaks >>= fun x -> Lwt.return @@ Apero.Option.get x in let compare (fdu:User.Descriptors.FDU.descriptor) (ninfo:FTypes.node_info) = try%lwt let fdu_cp = fdu.computation_requirements in let fdu_net = fdu.interfaces in let ncpu = List.hd ninfo.cpu in let ndisk = List.find_opt (fun (e:FTypes.disks_spec_type) -> (String.compare e.mount_point "/")==0 ) ninfo.disks |> Apero.Option.get in let fdu_type = Fos_sdk.string_of_hv_type descriptor.hypervisor in let%lwt plugins = Yaks_connector.Local.Actual.get_node_plugins (Apero.Option.get state.configuration.agent.uuid) state.yaks in let%lwt matching_plugins = Lwt_list.filter_map_p (fun e -> let%lwt pl = Yaks_connector.Local.Actual.get_node_plugin (Apero.Option.get state.configuration.agent.uuid) e state.yaks in if String.uppercase_ascii (pl.name) = String.uppercase_ascii (fdu_type) then Lwt.return (Some pl.uuid) else Lwt.return None ) plugins in let has_plugin = match matching_plugins with | [] -> false | _ -> true in let cpu_arch_check = ((String.compare fdu_cp.cpu_arch ncpu.arch) == 0) in let cpu_number_check = (fdu_cp.cpu_min_count <= (List.length ninfo.cpu)) in let cpu_freq_check = (fdu_cp.cpu_min_freq <= (Float.to_int ncpu.frequency)) in let ram_size_check = (fdu_cp.ram_size_mb <= ninfo.ram.size) in let disk_size_check = fdu_cp.storage_size_gb <= ndisk.dimension in let image_check = match descriptor.image with | Some i -> let file_re = Str.regexp "file://*" in (match Str.string_match file_re i.uri 0 with | false -> true | true -> let fname = String.sub i.uri 7 ((String.length i.uri)-7) in Sys.file_exists fname ) | None -> true in let command_check = match descriptor.command with | Some i -> let file_re = Str.regexp "/*" in (match Str.string_match file_re i.binary 0 with | false -> true | true -> Sys.file_exists i.binary ) | None -> true in let interfaces_check = let ninterfaces = ninfo.network in let interfaces = List.filter (fun (e:User.Descriptors.FDU.interface) -> match e.virtual_interface.intf_type with | `PHYSICAL | `BRIDGED -> true | _ -> false ) fdu_net in let checks = List.map (fun (e:User.Descriptors.FDU.interface) -> let face_name = e.virtual_interface.vpci in match List.find_opt (fun (ne:FTypes.network_spec_type) -> (String.compare ne.intf_name face_name)==0 ) ninterfaces with | Some _ -> true | None -> false ) interfaces in List.fold_left (fun i j -> i && j) true checks in let res = has_plugin && cpu_arch_check && cpu_freq_check && cpu_number_check && ram_size_check && disk_size_check && image_check && command_check && interfaces_check in Logs.debug (fun m -> m "[eval_check_fdu] - Plugin Check: %b" has_plugin ); Logs.debug (fun m -> m "[eval_check_fdu] - CPU Arch Check: %s = %s ? %b" fdu_cp.cpu_arch ncpu.arch cpu_arch_check); Logs.debug (fun m -> m "[eval_check_fdu] - CPU Number Check: %d <= %d ? %b" fdu_cp.cpu_min_count (List.length ninfo.cpu) cpu_number_check); Logs.debug (fun m -> m "[eval_check_fdu] - CPU Freq Check: %d <= %d ? %b" fdu_cp.cpu_min_freq (Float.to_int ncpu.frequency) cpu_freq_check); Logs.debug (fun m -> m "[eval_check_fdu] - RAM Size Check: %f <= %f ? %b" fdu_cp.ram_size_mb ninfo.ram.size ram_size_check); Logs.debug (fun m -> m "[eval_check_fdu] - Disk Size Check: %f <= %f ? %b" fdu_cp.storage_size_gb ndisk.dimension disk_size_check); Logs.debug (fun m -> m "[eval_check_fdu] - Image Check: %b" image_check); Logs.debug (fun m -> m "[eval_check_fdu] - Command Check: %b" image_check); Logs.debug (fun m -> m "[eval_check_fdu] - Interfaces Check: %b" interfaces_check); Logs.debug (fun m -> m "[eval_check_fdu] - Is compatible? %b" res); Lwt.return res match ( has_plugin , cpu_arch_check , , cpu_number_check , ram_size_check , disk_size_check , image_check ) with | ( true , true , true , true , true , true , true ) - > Lwt.return true | ( _ , _ , _ , _ , _ , _ , _ ) - > Lwt.return false | (true, true, true, true, true, true, true) -> Lwt.return true | (_,_,_,_,_,_,_) -> Lwt.return false *) with | exn -> Logs.err (fun m -> m "[eval_check_fdu] - Exception: %s" (Printexc.to_string exn)); Lwt.return false in let%lwt res = compare descriptor node_info in let res = match res with | true -> FAgentTypes.{uuid = (Apero.Option.get state.configuration.agent.uuid); is_compatible=true } | false -> FAgentTypes.{uuid = (Apero.Option.get state.configuration.agent.uuid); is_compatible=false } in let js = JSON.of_string (FAgentTypes.string_of_compatible_node_response res) in let eval_res = FAgentTypes.{result = Some js ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res with | exn -> Logs.err (fun m -> m "[eval_check_fdu] - Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error=Some 11; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_create_floating_ip self (props:Apero.properties) = ignore props; Logs.debug (fun m -> m "[eval_create_floating_ip] - ##############"); Logs.debug (fun m -> m "[eval_create_floating_ip] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_create_floating_ip] - # NetManager: %s" net_p); try%lwt let fname = "create_floating_ip" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname [] state.yaks >>= fun res -> match res with | Some r -> Logs.debug (fun m -> m "[eval_create_floating_ip] - Eval Result %s" (FAgentTypes.string_of_eval_result r)); Convertion from record let floating_r = FTypes.floating_ip_record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let floating = FTypes.{uuid = floating_r.uuid; ip_version = floating_r.ip_version; address = floating_r.address} in Yaks_connector.Global.Actual.add_node_floating_ip (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) floating.uuid floating state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_floating_ip floating)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ("Cannot create floating ip %s not found",503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error = Some 22; error_msg = Some (Printexc.to_string exn)} in Logs.err (fun m -> m "[eval_create_floating_ip] - # Error when creating floating IP: %s" (Printexc.to_string exn)); Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_delete_floating_ip self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_delete_floating_ip] - ##############"); Logs.debug (fun m -> m "[eval_delete_floating_ip] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_delete_floating_ip- # NetManager: %s" net_p); try%lwt let ip_id = Apero.Option.get @@ Apero.Properties.get "floating_uuid" props in let parameters = [("ip_id",ip_id)] in let fname = "delete_floating_ip" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with Some r -> Logs.debug (fun m -> m "[eval_delete_floating_ip] - Eval Result %s" (FAgentTypes.string_of_eval_result r)); Convertion from record let floating_r = FTypes.floating_ip_record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let floating = FTypes.{uuid = floating_r.uuid; ip_version = floating_r.ip_version; address = floating_r.address} in Yaks_connector.Global.Actual.add_node_floating_ip (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) floating.uuid floating state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_floating_ip floating)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`NotFound (`MsgCode ((Printf.sprintf ("Floating IP %s not found") ip_id ),404))) with e -> let msg = Printexc.to_string e and stack = Printexc.get_backtrace () in Logs.err (fun m -> m "[eval_delete_floating_ip]- Error: %s %s" msg stack); let eval_res = FAgentTypes.{result = None ; error = Some 22; error_msg = Some (Printexc.to_string e)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_assign_floating_ip self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_assign_floating_ip] - ##############"); Logs.debug (fun m -> m "[eval_assign_floating_ip] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in try%lwt let ip_id = Apero.Option.get @@ Apero.Properties.get "floating_uuid" props in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in let parameters = [("ip_id",ip_id);("cp_id",cp_id)] in let fname = "assign_floating_ip" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Convertion from record let floating_r = FTypes.floating_ip_record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let floating = FTypes.{uuid = floating_r.uuid; ip_version = floating_r.ip_version; address = floating_r.address} in Yaks_connector.Global.Actual.add_node_floating_ip (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) floating.uuid floating state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_floating_ip floating)) ; error=None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`NotFound (`MsgCode ((Printf.sprintf ("Cannot assing IP %s to cp %s") ip_id cp_id ),503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error = Some 33; error_msg = Some (Printexc.to_string exn)} in Logs.err (fun m -> m "[eval_assign_floating_ip] - Exception: %s" (Printexc.to_string exn)); Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_remove_floating_ip self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_remove_floating_ip] - ##############"); Logs.debug (fun m -> m "[eval_remove_floating_ip] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in try%lwt let ip_id = Apero.Option.get @@ Apero.Properties.get "floating_uuid" props in let cp_id = Apero.Option.get @@ Apero.Properties.get "cp_uuid" props in let parameters = [("ip_id",ip_id);("cp_id",cp_id)] in let fname = "remove_floating_ip" in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Convertion from record let floating_r = FTypes.floating_ip_record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let floating = FTypes.{uuid = floating_r.uuid; ip_version = floating_r.ip_version; address = floating_r.address} in Yaks_connector.Global.Actual.add_node_floating_ip (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) floating.uuid floating state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_floating_ip floating)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot remove floating IP %s not found") ip_id ),503))) with | exn -> let eval_res = FAgentTypes.{result = None ; error = Some 33; error_msg = Some (Printexc.to_string exn)} in Logs.err (fun m -> m "[eval_remove_floating_ip] - Exception: %s" (Printexc.to_string exn)); Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_add_router_port self (props:Apero.properties) = ignore props; Logs.debug (fun m -> m "[eval_add_router_port] - ##############"); Logs.debug (fun m -> m "[eval_add_router_port] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_add_router_port] - # NetManager: %s" net_p); try%lwt let fname = "add_router_port" in let rid = Apero.Option.get @@ Apero.Properties.get "router_id" props in let port_type = Apero.Option.get @@ Apero.Properties.get "port_type" props in let parameters = [("router_id", rid); ("port_type", port_type)] in let parameters = match Apero.Properties.get "vnet_id" props with | Some vid -> parameters @ [("vnet_id",vid)] | None -> parameters in let parameters = match Apero.Properties.get "ip_address" props with | Some ip -> parameters @ [("ip_address",ip)] | None -> parameters in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Logs.debug (fun m -> m "[eval_add_router_port] - Eval Result: %s" (FAgentTypes.string_of_eval_result r)); Convertion from record let router = Router.record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let%lwt ports = Lwt_list.map_p (fun (e:Router.router_port_record) -> Lwt.return Router.{port_type = e.port_type; vnet_id = e.vnet_id; ip_address = Some e.ip_address} ) router.ports in let router_desc = Router.{uuid = Some router.uuid; ports = ports; } in Yaks_connector.Global.Actual.add_node_router (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) router.uuid router_desc state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (Router.string_of_record router)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot create to router %s") rid ),503))) with | exn -> Logs.err (fun m -> m "[eval_add_router_port] Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error = Some 22; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_remove_router_port self (props:Apero.properties) = ignore props; Logs.debug (fun m -> m "[eval_remove_router_port]- ##############"); Logs.debug (fun m -> m "[eval_remove_router_port] - Properties: %s" (Apero.Properties.to_string props)); MVar.read self >>= fun state -> let%lwt net_p = get_network_plugin self in Logs.debug (fun m -> m "[eval_remove_router_port] - # NetManager: %s" net_p); try%lwt let fname = "remove_router_port" in let rid = Apero.Option.get @@ Apero.Properties.get "router_id" props in let vid = Apero.Option.get @@ Apero.Properties.get "vnet_id" props in let parameters = [("router_id", rid); ("vnet_id", vid)] in Yaks_connector.Local.Actual.exec_nm_eval (Apero.Option.get state.configuration.agent.uuid) net_p fname parameters state.yaks >>= fun res -> match res with | Some r -> Logs.debug (fun m -> m "[eval_remove_router_port] Eval Result: %s" (FAgentTypes.string_of_eval_result r)); Convertion from record let router = Router.record_of_string @@ JSON.to_string (Apero.Option.get r.result) in let%lwt ports = Lwt_list.map_p (fun (e:Router.router_port_record) -> Lwt.return Router.{port_type = e.port_type; vnet_id = e.vnet_id; ip_address = Some e.ip_address} ) router.ports in let router_desc = Router.{uuid = Some router.uuid; ports = ports; } in Yaks_connector.Global.Actual.add_node_router (Apero.Option.get @@ state.configuration.agent.system) Yaks_connector.default_tenant_id (Apero.Option.get state.configuration.agent.uuid) router.uuid router_desc state.yaks >>= fun _ -> let eval_res = FAgentTypes.{result = Some (JSON.of_string (Router.string_of_record router)) ; error = None; error_msg = None} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res | None -> Lwt.fail @@ FException (`InternalError (`MsgCode ((Printf.sprintf ("Cannot remove port from router %s") rid ),503))) with | exn -> Logs.err (fun m -> m "[eval_remove_router_port] Exception: %s" (Printexc.to_string exn)); let eval_res = FAgentTypes.{result = None ; error = Some 22; error_msg = Some (Printexc.to_string exn)} in Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_heartbeat myuuid self (props:Apero.properties) = Logs.debug (fun m -> m "[eval_heartbeat]- ##############"); Logs.debug (fun m -> m "[eval_heartbeat] - Properties: %s" (Apero.Properties.to_string props)); let _ = MVar.guarded self (fun state -> let source_id = Apero.Option.get @@ Apero.Properties.get "node_id" props in let timestamp = Unix.gettimeofday () in let current_available = state.available_nodes in let new_available = match List.find_opt (fun (n,_) -> String.compare n source_id == 0 ) current_available with | Some _ -> Logs.debug (fun m -> m "[eval_heartbeat] - Updating heartbeat information for %s" source_id); List.append (List.filter (fun (n,_) -> String.compare n source_id != 0) current_available) [(source_id,timestamp)] | None -> Logs.debug (fun m -> m "[eval_heartbeat] - Adding heartbeat information for %s" source_id); List.append current_available [(source_id,timestamp)] in let state = {state with available_nodes = new_available} in MVar.return () state) in let result = FTypes.{nodeid = myuuid } in let eval_res = FAgentTypes.{result = Some (JSON.of_string (FTypes.string_of_heartbeat_info result)) ; error = None; error_msg = None} in Logs.debug (fun m -> m "[eval_heartbeat] - Returning: %s" (FAgentTypes.string_of_eval_result eval_res)); Lwt.return @@ FAgentTypes.string_of_eval_result eval_res let eval_start_fdu myuuid instanceid self env = Logs.debug (fun m -> m "[eval_start_fdu]- ##############"); Logs.debug (fun m -> m "[eval_start_fdu]- InstanceID : %s Env: %s" instanceid env); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.start_fdu_in_node myuuid instanceid env state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res let eval_run_fdu myuuid instanceid self env = Logs.debug (fun m -> m "[eval_run_fdu]- ##############"); Logs.debug (fun m -> m "[eval_start_fdu]- InstanceID : %s Env: %s" instanceid env); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.run_fdu_in_node myuuid instanceid env state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res let eval_log_fdu myuuid instanceid self _ = Logs.debug (fun m -> m "[eval_log_fdu]- ##############"); Logs.debug (fun m -> m "[eval_log_fdu]- InstanceID : %s" instanceid); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.log_fdu_in_node myuuid instanceid state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res let eval_ls_fdu myuuid instanceid self _ = Logs.debug (fun m -> m "[eval_ls_fdu]- ##############"); Logs.debug (fun m -> m "[eval_ls_fdu]- InstanceID : %s" instanceid); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.ls_fdu_in_node myuuid instanceid state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res let eval_file_fdu myuuid instanceid self filename = Logs.debug (fun m -> m "[eval_ls_fdu]- ##############"); Logs.debug (fun m -> m "[eval_ls_fdu]- InstanceID : %s" instanceid); MVar.read self >>= fun state -> let%lwt res = Yaks_connector.Local.Actual.file_fdu_in_node myuuid instanceid filename state.yaks in Lwt.return @@ FAgentTypes.string_of_eval_result res
a5431e12a941b0be9cb5da74ee987baf6ad9adbbb95835ad5f20bc31ba318ed1
huangz1990/SICP-answers
test-8-cube.scm
(load "test-manager/load.scm") (load "8-cube.scm") (define-each-check (= (* 3 3 3) (cube 3)) (= (* 8 8 8) (cube 8)) ) (run-registered-tests)
null
https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/chp1/code/test-8-cube.scm
scheme
(load "test-manager/load.scm") (load "8-cube.scm") (define-each-check (= (* 3 3 3) (cube 3)) (= (* 8 8 8) (cube 8)) ) (run-registered-tests)
74729cd995efae2aa458247dfb90c5537b6e997852836013e3f30bdafdb828ae
shentufoundation/deepsea
FramesLabelsCintptr.ml
open StmtClinear open TempModelLow (** val ftype : ftemps **) let ftype = { f_temps = (Obj.magic StmtClinear.fn_temps); f_args = (Obj.magic StmtClinear.fn_params) }
null
https://raw.githubusercontent.com/shentufoundation/deepsea/970576a97c8992655ed2f173f576502d73b827e1/src/backend/extraction/FramesLabelsCintptr.ml
ocaml
* val ftype : ftemps *
open StmtClinear open TempModelLow let ftype = { f_temps = (Obj.magic StmtClinear.fn_temps); f_args = (Obj.magic StmtClinear.fn_params) }
4f3a7f265a9afc0bf177c51268b78fc6a86f2b921f41e5aad98b8792cdea339e
ocaml-ppx/ppxlib
ast_traverse.mli
* This module provides AST traversal classes , such as maps , iterations , folds , etc . on the { ! types . { 1 Link to the tutorial } For a detailed explanation on this module , refer to the { { ! " ast - traversal " } relevant } part of the manual . { 1 API } etc. on the {!Parsetree} types. {1 Link to the tutorial} For a detailed explanation on this module, refer to the {{!"ast-traversal"} relevant} part of the manual. {1 API} *) open! Import * To use these classes , inherit from them and override the methods corresponding to the types from [ Parsetree ] you want to process . For instance to collect all the string constants in a structure : { [ let string_constants_of = object inherit [ string list ] Ast_traverse.fold as super method ! expression e acc = let acc = super#expression e acc in match e.pexp_desc with | Pexp_constant ( Const_string ( s , _ ) ) - > s : : acc | _ - > acc method ! pattern p acc = let acc = super#pattern p acc in match p.ppat_desc with | Ppat_constant ( Const_string ( s , _ ) ) - > s : : acc | _ - > acc end let string_constants_of_structure = string_constants_of#structure ] } corresponding to the types from [Parsetree] you want to process. For instance to collect all the string constants in a structure: {[ let string_constants_of = object inherit [string list] Ast_traverse.fold as super method! expression e acc = let acc = super#expression e acc in match e.pexp_desc with | Pexp_constant (Const_string (s, _)) -> s :: acc | _ -> acc method! pattern p acc = let acc = super#pattern p acc in match p.ppat_desc with | Ppat_constant (Const_string (s, _)) -> s :: acc | _ -> acc end let string_constants_of_structure = string_constants_of#structure ]} *) class map : object inherit Ppxlib_traverse_builtins.map inherit Ast.map end class iter : object inherit Ppxlib_traverse_builtins.iter inherit Ast.iter end class ['acc] fold : object inherit ['acc] Ppxlib_traverse_builtins.fold inherit ['acc] Ast.fold end class ['acc] fold_map : object inherit ['acc] Ppxlib_traverse_builtins.fold_map inherit ['acc] Ast.fold_map end class ['ctx] map_with_context : object inherit ['ctx] Ppxlib_traverse_builtins.map_with_context inherit ['ctx] Ast.map_with_context end class map_with_path : [string] map_with_context val enter_value : (expression, string loc) Attribute.t val enter_module : (module_expr, string loc) Attribute.t val do_not_enter_value_binding : (value_binding, unit) Attribute.t val do_not_enter_value_description : (value_description, unit) Attribute.t val do_not_enter_module_binding : (module_binding, unit) Attribute.t val do_not_enter_module_declaration : (module_declaration, unit) Attribute.t val do_not_enter_module_type_declaration : (module_type_declaration, unit) Attribute.t val do_not_enter_let_module : (expression, unit) Attribute.t class virtual ['res] lift : object inherit ['res] Ppxlib_traverse_builtins.lift inherit ['res] Ast.lift end class virtual ['ctx, 'res] lift_map_with_context : object inherit ['ctx, 'res] Ppxlib_traverse_builtins.lift_map_with_context inherit ['ctx, 'res] Ast.lift_map_with_context end class map_with_expansion_context_and_errors : object inherit [Expansion_context.Base.t, Location.Error.t list] Ppxlib_traverse_builtins .std_lift_mappers_with_context inherit [Expansion_context.Base.t, Location.Error.t list] Ast .lift_map_with_context end class sexp_of : object inherit [Sexp.t] Ppxlib_traverse_builtins.std_lifters inherit [Sexp.t] Ast.lift end val sexp_of : sexp_of
null
https://raw.githubusercontent.com/ocaml-ppx/ppxlib/1110af2ea18f351cc3f2ccbee8444bb2a4b257b7/src/ast_traverse.mli
ocaml
* This module provides AST traversal classes , such as maps , iterations , folds , etc . on the { ! types . { 1 Link to the tutorial } For a detailed explanation on this module , refer to the { { ! " ast - traversal " } relevant } part of the manual . { 1 API } etc. on the {!Parsetree} types. {1 Link to the tutorial} For a detailed explanation on this module, refer to the {{!"ast-traversal"} relevant} part of the manual. {1 API} *) open! Import * To use these classes , inherit from them and override the methods corresponding to the types from [ Parsetree ] you want to process . For instance to collect all the string constants in a structure : { [ let string_constants_of = object inherit [ string list ] Ast_traverse.fold as super method ! expression e acc = let acc = super#expression e acc in match e.pexp_desc with | Pexp_constant ( Const_string ( s , _ ) ) - > s : : acc | _ - > acc method ! pattern p acc = let acc = super#pattern p acc in match p.ppat_desc with | Ppat_constant ( Const_string ( s , _ ) ) - > s : : acc | _ - > acc end let string_constants_of_structure = string_constants_of#structure ] } corresponding to the types from [Parsetree] you want to process. For instance to collect all the string constants in a structure: {[ let string_constants_of = object inherit [string list] Ast_traverse.fold as super method! expression e acc = let acc = super#expression e acc in match e.pexp_desc with | Pexp_constant (Const_string (s, _)) -> s :: acc | _ -> acc method! pattern p acc = let acc = super#pattern p acc in match p.ppat_desc with | Ppat_constant (Const_string (s, _)) -> s :: acc | _ -> acc end let string_constants_of_structure = string_constants_of#structure ]} *) class map : object inherit Ppxlib_traverse_builtins.map inherit Ast.map end class iter : object inherit Ppxlib_traverse_builtins.iter inherit Ast.iter end class ['acc] fold : object inherit ['acc] Ppxlib_traverse_builtins.fold inherit ['acc] Ast.fold end class ['acc] fold_map : object inherit ['acc] Ppxlib_traverse_builtins.fold_map inherit ['acc] Ast.fold_map end class ['ctx] map_with_context : object inherit ['ctx] Ppxlib_traverse_builtins.map_with_context inherit ['ctx] Ast.map_with_context end class map_with_path : [string] map_with_context val enter_value : (expression, string loc) Attribute.t val enter_module : (module_expr, string loc) Attribute.t val do_not_enter_value_binding : (value_binding, unit) Attribute.t val do_not_enter_value_description : (value_description, unit) Attribute.t val do_not_enter_module_binding : (module_binding, unit) Attribute.t val do_not_enter_module_declaration : (module_declaration, unit) Attribute.t val do_not_enter_module_type_declaration : (module_type_declaration, unit) Attribute.t val do_not_enter_let_module : (expression, unit) Attribute.t class virtual ['res] lift : object inherit ['res] Ppxlib_traverse_builtins.lift inherit ['res] Ast.lift end class virtual ['ctx, 'res] lift_map_with_context : object inherit ['ctx, 'res] Ppxlib_traverse_builtins.lift_map_with_context inherit ['ctx, 'res] Ast.lift_map_with_context end class map_with_expansion_context_and_errors : object inherit [Expansion_context.Base.t, Location.Error.t list] Ppxlib_traverse_builtins .std_lift_mappers_with_context inherit [Expansion_context.Base.t, Location.Error.t list] Ast .lift_map_with_context end class sexp_of : object inherit [Sexp.t] Ppxlib_traverse_builtins.std_lifters inherit [Sexp.t] Ast.lift end val sexp_of : sexp_of
a5b9b94508993734d8588f761e4f3f3009204eed97f5a4fa4302ab7a21a58c7d
haskus/haskus-system
Frame.hs
# LANGUAGE RecordWildCards # # LANGUAGE DataKinds # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE ScopedTypeVariables # # LANGUAGE BlockArguments # -- | Frame -- -- A frame is a picture in memory. Its pixel components may be scattered into -- different frame buffers though. module Haskus.System.Linux.Graphics.Frame ( Frame(..) , FrameBuffer (..) , handleCreateFrame , freeFrame , dirtyFrame , SwitchFrameFlag (..) , SwitchFrameFlags , DirtyAnnotation (..) , Clip (..) , forEachFrameLine , forEachFrameColumn , forEachFramePixel , frameBufferPixelOffset ) where import Haskus.System.Linux.ErrorCode import Haskus.System.Linux.Handle import Haskus.System.Linux.Graphics.PixelFormat import Haskus.System.Linux.Graphics.Entities import Haskus.System.Linux.Graphics.KIO import Haskus.Binary.Vector as Vector import Haskus.Number.Word import Foreign.Ptr import Haskus.Binary.Storable import Haskus.Utils.Tuple import Haskus.Utils.Flow import Haskus.Utils.List (zip5) fromFrame :: forall b. Frame b -> StructFrameCommand2 fromFrame Frame{..} = StructFrameCommand2 (unEntityID frameID) frameWidth frameHeight framePixelFormat frameFlags (g fbBufferHandle) (g fbPitch) (g fbOffset) (g fbModifiers) where g :: (Num a,Storable a) => (FrameBuffer b -> a) -> Vector 4 a g f = Vector.fromFilledList 0 (fmap f frameBuffers) toFrame :: Handle -> [FrameBuffer b] -> StructFrameCommand2 -> Frame b toFrame hdl fbs StructFrameCommand2{..} = s where bufs = uncurry5 FrameBuffer <$> zip5 (fmap fbBuffer fbs) (Vector.toList fc2Handles) (Vector.toList fc2Pitches) (Vector.toList fc2Offsets) (Vector.toList fc2Modifiers) s = Frame (EntityID fc2FbId) fc2Width fc2Height fc2PixelFormat fc2Flags bufs hdl -- | Create a frame handleCreateFrame :: MonadInIO m => Handle -> Word32 -> Word32 -> PixelFormat -> FrameFlags -> [FrameBuffer b] -> Excepts '[ErrorCode] m (Frame b) handleCreateFrame hdl width height fmt flags frameBuffers = do let s = Frame (EntityID 0) width height fmt flags frameBuffers hdl ioctlAddFrame (fromFrame s) hdl ||> toFrame hdl frameBuffers -- | Release a frame freeFrame :: MonadInIO m => Frame b -> Excepts '[ErrorCode] m () freeFrame frame = do void <| ioctlRemoveFrame (unEntityID (frameID frame)) (frameCardHandle frame) -- | Indicate dirty parts of a frame source dirtyFrame :: MonadInIO m => Frame b -> DirtyAnnotation -> Excepts '[ErrorCode] m () dirtyFrame frame mode = do let (color,flags,clips) = case mode of Dirty cs -> (0,0,cs) DirtyCopy cs -> (0,1, concatMap (\(a,b) -> [a,b]) cs) DirtyFill c cs -> (c,2,cs) void $ withArray clips $ \clipPtr -> do let s = StructFrameDirty { fdFbId = unEntityID (frameID frame) , fdFlags = flags , fdColor = color , fdNumClips = fromIntegral (length clips) , fdClipsPtr = fromIntegral (ptrToWordPtr clipPtr) } ioctlDirtyFrame s (frameCardHandle frame) -- | Do something for each line of the frame (top to bottom) forEachFrameLine :: Monad m => Frame b -> (Word32 -> m ()) -> m () # INLINABLE forEachFrameLine # forEachFrameLine frame action = forLoopM_ 0 (< frameHeight frame) (+1) action -- | Do something for each column of the frame (left to right) forEachFrameColumn :: Monad m => Frame b -> (Word32 -> m ()) -> m () # INLINABLE forEachFrameColumn # forEachFrameColumn frame action = forLoopM_ 0 (< frameWidth frame) (+1) action -- | Do something for each pixel (x,y) of the frame forEachFramePixel :: Monad m => Frame b -> (Word32 -> Word32 -> m ()) -> m () # INLINABLE forEachFramePixel # forEachFramePixel frame action = forEachFrameLine frame \y -> forEachFrameColumn frame \x -> action x y | Compute an offset in a FrameBuffer from pixel component size in bytes and -- pixel coordinates (x,y) frameBufferPixelOffset :: FrameBuffer b -> Word32 -> Word32 -> Word32 -> Word32 {-# INLINABLE frameBufferPixelOffset #-} frameBufferPixelOffset fb pixelSize x y = fbOffset fb + x*pixelSize + y*(fbPitch fb)
null
https://raw.githubusercontent.com/haskus/haskus-system/38b3a363c26bc4d82e3493d8638d46bc35678616/haskus-system/src/lib/Haskus/System/Linux/Graphics/Frame.hs
haskell
| Frame A frame is a picture in memory. Its pixel components may be scattered into different frame buffers though. | Create a frame | Release a frame | Indicate dirty parts of a frame source | Do something for each line of the frame (top to bottom) | Do something for each column of the frame (left to right) | Do something for each pixel (x,y) of the frame pixel coordinates (x,y) # INLINABLE frameBufferPixelOffset #
# LANGUAGE RecordWildCards # # LANGUAGE DataKinds # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE ScopedTypeVariables # # LANGUAGE BlockArguments # module Haskus.System.Linux.Graphics.Frame ( Frame(..) , FrameBuffer (..) , handleCreateFrame , freeFrame , dirtyFrame , SwitchFrameFlag (..) , SwitchFrameFlags , DirtyAnnotation (..) , Clip (..) , forEachFrameLine , forEachFrameColumn , forEachFramePixel , frameBufferPixelOffset ) where import Haskus.System.Linux.ErrorCode import Haskus.System.Linux.Handle import Haskus.System.Linux.Graphics.PixelFormat import Haskus.System.Linux.Graphics.Entities import Haskus.System.Linux.Graphics.KIO import Haskus.Binary.Vector as Vector import Haskus.Number.Word import Foreign.Ptr import Haskus.Binary.Storable import Haskus.Utils.Tuple import Haskus.Utils.Flow import Haskus.Utils.List (zip5) fromFrame :: forall b. Frame b -> StructFrameCommand2 fromFrame Frame{..} = StructFrameCommand2 (unEntityID frameID) frameWidth frameHeight framePixelFormat frameFlags (g fbBufferHandle) (g fbPitch) (g fbOffset) (g fbModifiers) where g :: (Num a,Storable a) => (FrameBuffer b -> a) -> Vector 4 a g f = Vector.fromFilledList 0 (fmap f frameBuffers) toFrame :: Handle -> [FrameBuffer b] -> StructFrameCommand2 -> Frame b toFrame hdl fbs StructFrameCommand2{..} = s where bufs = uncurry5 FrameBuffer <$> zip5 (fmap fbBuffer fbs) (Vector.toList fc2Handles) (Vector.toList fc2Pitches) (Vector.toList fc2Offsets) (Vector.toList fc2Modifiers) s = Frame (EntityID fc2FbId) fc2Width fc2Height fc2PixelFormat fc2Flags bufs hdl handleCreateFrame :: MonadInIO m => Handle -> Word32 -> Word32 -> PixelFormat -> FrameFlags -> [FrameBuffer b] -> Excepts '[ErrorCode] m (Frame b) handleCreateFrame hdl width height fmt flags frameBuffers = do let s = Frame (EntityID 0) width height fmt flags frameBuffers hdl ioctlAddFrame (fromFrame s) hdl ||> toFrame hdl frameBuffers freeFrame :: MonadInIO m => Frame b -> Excepts '[ErrorCode] m () freeFrame frame = do void <| ioctlRemoveFrame (unEntityID (frameID frame)) (frameCardHandle frame) dirtyFrame :: MonadInIO m => Frame b -> DirtyAnnotation -> Excepts '[ErrorCode] m () dirtyFrame frame mode = do let (color,flags,clips) = case mode of Dirty cs -> (0,0,cs) DirtyCopy cs -> (0,1, concatMap (\(a,b) -> [a,b]) cs) DirtyFill c cs -> (c,2,cs) void $ withArray clips $ \clipPtr -> do let s = StructFrameDirty { fdFbId = unEntityID (frameID frame) , fdFlags = flags , fdColor = color , fdNumClips = fromIntegral (length clips) , fdClipsPtr = fromIntegral (ptrToWordPtr clipPtr) } ioctlDirtyFrame s (frameCardHandle frame) forEachFrameLine :: Monad m => Frame b -> (Word32 -> m ()) -> m () # INLINABLE forEachFrameLine # forEachFrameLine frame action = forLoopM_ 0 (< frameHeight frame) (+1) action forEachFrameColumn :: Monad m => Frame b -> (Word32 -> m ()) -> m () # INLINABLE forEachFrameColumn # forEachFrameColumn frame action = forLoopM_ 0 (< frameWidth frame) (+1) action forEachFramePixel :: Monad m => Frame b -> (Word32 -> Word32 -> m ()) -> m () # INLINABLE forEachFramePixel # forEachFramePixel frame action = forEachFrameLine frame \y -> forEachFrameColumn frame \x -> action x y | Compute an offset in a FrameBuffer from pixel component size in bytes and frameBufferPixelOffset :: FrameBuffer b -> Word32 -> Word32 -> Word32 -> Word32 frameBufferPixelOffset fb pixelSize x y = fbOffset fb + x*pixelSize + y*(fbPitch fb)
b58171e9499ad7e133cbcb738a64fcf4b9fee238ff66f95973dcfb4934a23b30
gelisam/giggles-is-you
Simulate.hs
{-# LANGUAGE RankNTypes #-} module Graphics.Gloss.Internals.Interface.Simulate (simulateWithBackendIO) where import Graphics.Gloss.Data.Display import Graphics.Gloss.Data.Color import Graphics.Gloss.Data.Picture import Graphics.Gloss.Data.ViewPort import Graphics.Gloss.Data.ViewState import Graphics.Gloss.Rendering import Graphics.Gloss.Internals.Interface.Backend import Graphics.Gloss.Internals.Interface.Window import Graphics.Gloss.Internals.Interface.Common.Exit import Graphics.Gloss.Internals.Interface.ViewState.KeyMouse import Graphics.Gloss.Internals.Interface.ViewState.Motion import Graphics.Gloss.Internals.Interface.ViewState.Reshape import Graphics.Gloss.Internals.Interface.Animate.Timing import Graphics.Gloss.Internals.Interface.Simulate.Idle import qualified Graphics.Gloss.Internals.Interface.Callback as Callback import qualified Graphics.Gloss.Internals.Interface.Simulate.State as SM import qualified Graphics.Gloss.Internals.Interface.Animate.State as AN import Data.IORef import System.Mem simulateWithBackendIO :: forall model a . Backend a => a -- ^ Initial state of the backend -> Display -- ^ Display mode. -> Color -- ^ Background color. ^ Number of simulation steps to take for each second of real time . -> model -- ^ The initial model. -> (model -> IO Picture) -- ^ A function to convert the model to a picture. -> (ViewPort -> Float -> model -> IO model) ^ A function to step the model one iteration . It is passed the -- current viewport and the amount of time for this simulation step ( in seconds ) . -> IO () simulateWithBackendIO backend display backgroundColor simResolution worldStart worldToPicture worldAdvance = do let singleStepTime = 1 -- make the simulation state stateSR <- newIORef $ SM.stateInit simResolution -- make a reference to the initial world worldSR <- newIORef worldStart make the initial GL view and render states viewSR <- newIORef viewStateInit animateSR <- newIORef AN.stateInit renderS_ <- initState renderSR <- newIORef renderS_ let displayFun backendRef = do -- convert the world to a picture world <- readIORef worldSR port <- viewStateViewPort <$> readIORef viewSR picture <- worldToPicture world -- display the picture in the current view renderS <- readIORef renderSR windowSize <- getWindowDimensions backendRef -- render the frame displayPicture windowSize backgroundColor renderS (viewPortScale port) (applyViewPortToPicture port picture) perform GC every frame to try and avoid long pauses performGC let callbacks = [ Callback.Display (animateBegin animateSR) , Callback.Display displayFun , Callback.Display (animateEnd animateSR) , Callback.Idle (callback_simulate_idle stateSR animateSR (viewStateViewPort <$> readIORef viewSR) worldSR worldAdvance singleStepTime) , callback_exit () , callback_viewState_keyMouse viewSR , callback_viewState_motion viewSR , callback_viewState_reshape ] createWindow backend display backgroundColor callbacks (const (return ()))
null
https://raw.githubusercontent.com/gelisam/giggles-is-you/6487120b219bad80ff87a43f1d7d9fb97d17dfb5/gloss/Graphics/Gloss/Internals/Interface/Simulate.hs
haskell
# LANGUAGE RankNTypes # ^ Initial state of the backend ^ Display mode. ^ Background color. ^ The initial model. ^ A function to convert the model to a picture. current viewport and the amount of time for this simulation make the simulation state make a reference to the initial world convert the world to a picture display the picture in the current view render the frame
module Graphics.Gloss.Internals.Interface.Simulate (simulateWithBackendIO) where import Graphics.Gloss.Data.Display import Graphics.Gloss.Data.Color import Graphics.Gloss.Data.Picture import Graphics.Gloss.Data.ViewPort import Graphics.Gloss.Data.ViewState import Graphics.Gloss.Rendering import Graphics.Gloss.Internals.Interface.Backend import Graphics.Gloss.Internals.Interface.Window import Graphics.Gloss.Internals.Interface.Common.Exit import Graphics.Gloss.Internals.Interface.ViewState.KeyMouse import Graphics.Gloss.Internals.Interface.ViewState.Motion import Graphics.Gloss.Internals.Interface.ViewState.Reshape import Graphics.Gloss.Internals.Interface.Animate.Timing import Graphics.Gloss.Internals.Interface.Simulate.Idle import qualified Graphics.Gloss.Internals.Interface.Callback as Callback import qualified Graphics.Gloss.Internals.Interface.Simulate.State as SM import qualified Graphics.Gloss.Internals.Interface.Animate.State as AN import Data.IORef import System.Mem simulateWithBackendIO :: forall model a . Backend a ^ Number of simulation steps to take for each second of real time . -> (model -> IO Picture) -> (ViewPort -> Float -> model -> IO model) ^ A function to step the model one iteration . It is passed the step ( in seconds ) . -> IO () simulateWithBackendIO backend display backgroundColor simResolution worldStart worldToPicture worldAdvance = do let singleStepTime = 1 stateSR <- newIORef $ SM.stateInit simResolution worldSR <- newIORef worldStart make the initial GL view and render states viewSR <- newIORef viewStateInit animateSR <- newIORef AN.stateInit renderS_ <- initState renderSR <- newIORef renderS_ let displayFun backendRef = do world <- readIORef worldSR port <- viewStateViewPort <$> readIORef viewSR picture <- worldToPicture world renderS <- readIORef renderSR windowSize <- getWindowDimensions backendRef displayPicture windowSize backgroundColor renderS (viewPortScale port) (applyViewPortToPicture port picture) perform GC every frame to try and avoid long pauses performGC let callbacks = [ Callback.Display (animateBegin animateSR) , Callback.Display displayFun , Callback.Display (animateEnd animateSR) , Callback.Idle (callback_simulate_idle stateSR animateSR (viewStateViewPort <$> readIORef viewSR) worldSR worldAdvance singleStepTime) , callback_exit () , callback_viewState_keyMouse viewSR , callback_viewState_motion viewSR , callback_viewState_reshape ] createWindow backend display backgroundColor callbacks (const (return ()))
3d45e63c83d43585b04891b7e4a695111d990adfe07e5bb37fb62f9103a19cce
haskell-tools/haskell-tools
PatternImport.hs
# LANGUAGE PatternSynonyms # module Module.PatternImport where import Decl.PatternSynonym (pattern Arrow)
null
https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/refactor/examples/Module/PatternImport.hs
haskell
# LANGUAGE PatternSynonyms # module Module.PatternImport where import Decl.PatternSynonym (pattern Arrow)
874cc94562b0bca03b1dd6aa1162e54f8f277ff1fff2b88eed3b613e17979b0e
janestreet/resource_cache
address_config.ml
module Stable = struct open! Core.Core_stable module V3 = struct type t = { max_open_connections : int ; cleanup_idle_connection_after : Time_ns.Span.V2.t ; max_connections_per_address : int ; max_connection_reuse : int ; close_idle_connections_when_at_limit : bool ; close_connection_on_unhandled_exn : bool } [@@deriving bin_io, sexp] let%expect_test _ = print_endline [%bin_digest: t]; [%expect {| 03db3ce55a1aa6bbda6faa70d3b3e86b |}] ;; end module V2 = struct type t = { max_open_connections : int ; cleanup_idle_connection_after : Time_ns.Span.V2.t ; max_connections_per_address : int ; max_connection_reuse : int ; close_idle_connections_when_at_limit : bool } [@@deriving bin_io, sexp, stable_record ~version:V3.t ~add:[ close_connection_on_unhandled_exn ]] let of_v3 = of_V3_t let to_v3 = to_V3_t ~close_connection_on_unhandled_exn:false let%expect_test _ = print_endline [%bin_digest: t]; [%expect {| c8122cfd57d06e0d9201489d1d070af5 |}] ;; end module V1 = struct type t = { max_open_connections : int ; cleanup_idle_connection_after : Time_ns.Span.V2.t ; max_connections_per_address : int ; max_connection_reuse : int } [@@deriving bin_io , sexp , stable_record ~version:V2.t ~add:[ close_idle_connections_when_at_limit ]] let of_v2 = of_V2_t let to_v2 = to_V2_t ~close_idle_connections_when_at_limit:false let%expect_test _ = print_endline [%bin_digest: t]; [%expect {| f6909d04e51fd189259fe7dbe513e5a5 |}] ;; end end open! Core open! Async_kernel open! Import type t = Stable.V3.t = { max_open_connections : int ; cleanup_idle_connection_after : Time_ns.Span.t ; max_connections_per_address : int ; max_connection_reuse : int ; close_idle_connections_when_at_limit : bool ; close_connection_on_unhandled_exn : bool } [@@deriving compare, fields, sexp_of] let create = Fields.create let default = { max_open_connections = 500 ; cleanup_idle_connection_after = Time_ns.Span.of_sec 5. ; max_connections_per_address = 10 ; max_connection_reuse = 10 ; close_idle_connections_when_at_limit = false ; close_connection_on_unhandled_exn = true } ;; let to_cache_config t = Config.create ~max_resources:t.max_open_connections ~idle_cleanup_after:t.cleanup_idle_connection_after ~max_resources_per_id:t.max_connections_per_address ~max_resource_reuse:t.max_connection_reuse ~close_idle_resources_when_at_limit:t.close_idle_connections_when_at_limit ~close_resource_on_unhandled_exn:t.close_connection_on_unhandled_exn ;; let of_cache_config (cache_config : Config.t) = create ~max_open_connections:cache_config.max_resources ~cleanup_idle_connection_after:cache_config.idle_cleanup_after ~max_connections_per_address:cache_config.max_resources_per_id ~max_connection_reuse:cache_config.max_resource_reuse ~close_idle_connections_when_at_limit:cache_config.close_idle_resources_when_at_limit ~close_connection_on_unhandled_exn:cache_config.close_resource_on_unhandled_exn ;;
null
https://raw.githubusercontent.com/janestreet/resource_cache/e0693f18ad1c66a822a0487e9385a7240dcac99e/src/address_config.ml
ocaml
module Stable = struct open! Core.Core_stable module V3 = struct type t = { max_open_connections : int ; cleanup_idle_connection_after : Time_ns.Span.V2.t ; max_connections_per_address : int ; max_connection_reuse : int ; close_idle_connections_when_at_limit : bool ; close_connection_on_unhandled_exn : bool } [@@deriving bin_io, sexp] let%expect_test _ = print_endline [%bin_digest: t]; [%expect {| 03db3ce55a1aa6bbda6faa70d3b3e86b |}] ;; end module V2 = struct type t = { max_open_connections : int ; cleanup_idle_connection_after : Time_ns.Span.V2.t ; max_connections_per_address : int ; max_connection_reuse : int ; close_idle_connections_when_at_limit : bool } [@@deriving bin_io, sexp, stable_record ~version:V3.t ~add:[ close_connection_on_unhandled_exn ]] let of_v3 = of_V3_t let to_v3 = to_V3_t ~close_connection_on_unhandled_exn:false let%expect_test _ = print_endline [%bin_digest: t]; [%expect {| c8122cfd57d06e0d9201489d1d070af5 |}] ;; end module V1 = struct type t = { max_open_connections : int ; cleanup_idle_connection_after : Time_ns.Span.V2.t ; max_connections_per_address : int ; max_connection_reuse : int } [@@deriving bin_io , sexp , stable_record ~version:V2.t ~add:[ close_idle_connections_when_at_limit ]] let of_v2 = of_V2_t let to_v2 = to_V2_t ~close_idle_connections_when_at_limit:false let%expect_test _ = print_endline [%bin_digest: t]; [%expect {| f6909d04e51fd189259fe7dbe513e5a5 |}] ;; end end open! Core open! Async_kernel open! Import type t = Stable.V3.t = { max_open_connections : int ; cleanup_idle_connection_after : Time_ns.Span.t ; max_connections_per_address : int ; max_connection_reuse : int ; close_idle_connections_when_at_limit : bool ; close_connection_on_unhandled_exn : bool } [@@deriving compare, fields, sexp_of] let create = Fields.create let default = { max_open_connections = 500 ; cleanup_idle_connection_after = Time_ns.Span.of_sec 5. ; max_connections_per_address = 10 ; max_connection_reuse = 10 ; close_idle_connections_when_at_limit = false ; close_connection_on_unhandled_exn = true } ;; let to_cache_config t = Config.create ~max_resources:t.max_open_connections ~idle_cleanup_after:t.cleanup_idle_connection_after ~max_resources_per_id:t.max_connections_per_address ~max_resource_reuse:t.max_connection_reuse ~close_idle_resources_when_at_limit:t.close_idle_connections_when_at_limit ~close_resource_on_unhandled_exn:t.close_connection_on_unhandled_exn ;; let of_cache_config (cache_config : Config.t) = create ~max_open_connections:cache_config.max_resources ~cleanup_idle_connection_after:cache_config.idle_cleanup_after ~max_connections_per_address:cache_config.max_resources_per_id ~max_connection_reuse:cache_config.max_resource_reuse ~close_idle_connections_when_at_limit:cache_config.close_idle_resources_when_at_limit ~close_connection_on_unhandled_exn:cache_config.close_resource_on_unhandled_exn ;;
44dc3a360c034242fca7cb05341c2e32ed2eeed84766aec01db7b4426c77a625
patricoferris/ocaml-multicore-monorepo
caqti_mult.mli
Copyright ( C ) 2017 - -2018 Petter A. Urkedal < > * * This library is free software ; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation , either version 3 of the License , or ( at your * option ) any later version , with the OCaml static compilation exception . * * This library is distributed in the hope that it will be useful , but WITHOUT * ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public * License for more details . * * You should have received a copy of the GNU Lesser General Public License * along with this library . If not , see < / > . * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at your * option) any later version, with the OCaml static compilation exception. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this library. If not, see </>. *) (** Row multiplicity. *) type +'m t constraint 'm = [< `Zero | `One | `Many] type zero = [`Zero] type one = [`One] type zero_or_one = [`Zero | `One] type zero_or_more = [`Zero | `One | `Many] val zero : [> `Zero] t val one : [> `One] t val zero_or_one : [> `Zero | `One] t val zero_or_more : [> `Zero | `One | `Many] t val only_zero : [< `Zero] t -> unit val only_one : [< `One] t -> unit val only_zero_or_one : [< `Zero | `One] t -> unit val expose : 'm t -> [`Zero | `One | `Zero_or_one | `Zero_or_more]
null
https://raw.githubusercontent.com/patricoferris/ocaml-multicore-monorepo/22b441e6727bc303950b3b37c8fbc024c748fe55/duniverse/ocaml-caqti/lib/caqti_mult.mli
ocaml
* Row multiplicity.
Copyright ( C ) 2017 - -2018 Petter A. Urkedal < > * * This library is free software ; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation , either version 3 of the License , or ( at your * option ) any later version , with the OCaml static compilation exception . * * This library is distributed in the hope that it will be useful , but WITHOUT * ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public * License for more details . * * You should have received a copy of the GNU Lesser General Public License * along with this library . If not , see < / > . * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at your * option) any later version, with the OCaml static compilation exception. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this library. If not, see </>. *) type +'m t constraint 'm = [< `Zero | `One | `Many] type zero = [`Zero] type one = [`One] type zero_or_one = [`Zero | `One] type zero_or_more = [`Zero | `One | `Many] val zero : [> `Zero] t val one : [> `One] t val zero_or_one : [> `Zero | `One] t val zero_or_more : [> `Zero | `One | `Many] t val only_zero : [< `Zero] t -> unit val only_one : [< `One] t -> unit val only_zero_or_one : [< `Zero | `One] t -> unit val expose : 'm t -> [`Zero | `One | `Zero_or_one | `Zero_or_more]
a61efa94c5593ae36990a58ebb30c85bce98d61a7d4db4fc2055a7a06b051440
r-willis/biten
hmac_test.erl
@author %% @doc tests for hmac erlsha2 wrappers %% See also %% <a href=""> Identifiers and Test Vectors for HMAC - SHA * %% </a>. -module(hmac_test). -include_lib("eunit/include/eunit.hrl"). hex_int(Binary) -> list_to_integer(hmac:hexlify(Binary), 16). wikipedia_test() -> ?assertMatch(16#fbdb1d1b18aa6c08324b7d64b71fb76370690e1d, hex_int(hmac:hmac("", ""))), ?assertMatch(16#b613679a0814d9ec772f95d778c35fc5ff1697c493715653c6c712144292c5ad, hex_int(hmac:hmac256("", ""))), ?assertMatch(16#b936cee86c9f87aa5d3c6f2e84cb5a4239a5fe50480a6ec66b70ab5b1f4ac6730c6c515421b327ec1d69402e53dfb49ad7381eb067b338fd7b0cb22247225d47, hex_int(hmac:hmac512("", ""))), ?assertMatch(16#de7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9, hex_int(hmac:hmac("key", "The quick brown fox jumps over the lazy dog"))), ?assertMatch(16#f7bc83f430538424b13298e6aa6fb143ef4d59a14946175997479dbc2d1a3cd8, hex_int(hmac:hmac256("key", "The quick brown fox jumps over the lazy dog"))), ?assertMatch(16#b42af09057bac1e2d41708e48a902e09b5ff7f12ab428a4fe86653c73dd248fb82f948a549f7b791a5b41915ee4d1ec3935357e4e2317250d0372afa2ebeeb3a, hex_int(hmac:hmac512("key", "The quick brown fox jumps over the lazy dog"))), ok. rfc_4231_1_test() -> Key = binary:copy(<<16#0b>>, 20), Val = "Hi There", ?assertMatch(16#896fb1128abbdf196832107cd49df33f47b4b1169912ba4f53684b22, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#afd03944d84895626b0825f4ab46907f15f9dadbe4101ec682aa034c7cebc59cfaea9ea9076ede7f4af152e8b2fa9cb6, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#87aa7cdea5ef619d4ff0b4241a1d6cb02379f4e2ce4ec2787ad0b30545e17cdedaa833b7d6b8a702038b274eaea3f4e4be9d914eeb61f1702e696c203a126854, hex_int(hmac:hmac512(Key, Val))), ok. Test with a key shorter than the length of the HMAC output . rfc_4231_2_test() -> Key = "Jefe", Val = "what do ya want for nothing?", ?assertMatch(16#a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec3736322445e8e2240ca5e69e2c78b3239ecfab21649, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b636e070a38bce737, hex_int(hmac:hmac512(Key, Val))), ok. Test with a combined length of key and data that is larger than 64 bytes (= block - size of SHA-224 and SHA-256 ) . rfc_4231_3_test() -> Key = binary:copy(<<16#aa>>, 20), Val = binary:copy(<<16#dd>>, 50), ?assertMatch(16#7fb3cb3588c6c1f6ffa9694d7d6ad2649365b0c1f65d69d1ec8333ea, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#88062608d3e6ad8a0aa2ace014c8a86f0aa635d947ac9febe83ef4e55966144b2a5ab39dc13814b94e3ab6e101a34f27, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#fa73b0089d56a284efb0f0756c890be9b1b5dbdd8ee81a3655f83e33b2279d39bf3e848279a722c806b485a47e67c807b946a337bee8942674278859e13292fb, hex_int(hmac:hmac512(Key, Val))), ok. Test with a combined length of key and data that is larger than 64 bytes (= block - size of SHA-224 and SHA-256 ) . rfc_4231_4_test() -> Key = list_to_binary(lists:seq(1, 16#19)), Val = binary:copy(<<16#cd>>, 50), ?assertMatch(16#6c11506874013cac6a2abc1bb382627cec6a90d86efc012de7afec5a, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#3e8a69b7783c25851933ab6290af6ca77a9981480850009cc5577c6e1f573b4e6801dd23c4a7d679ccf8a386c674cffb, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#b0ba465637458c6990e5a8c5f61d4af7e576d97ff94b872de76f8050361ee3dba91ca5c11aa25eb4d679275cc5788063a5f19741120c4f2de2adebeb10a298dd, hex_int(hmac:hmac512(Key, Val))), ok. Test with a truncation of output to 128 bits . rfc_4231_5_test() -> Key = binary:copy(<<16#0c>>, 20), Val = "Test With Truncation", <<Left224:16/binary, _Rest224/binary>> = hmac:hmac224(Key, Val), <<Left256:16/binary, _Rest256/binary>> = hmac:hmac256(Key, Val), <<Left384:16/binary, _Rest384/binary>> = hmac:hmac384(Key, Val), <<Left512:16/binary, _Rest512/binary>> = hmac:hmac512(Key, Val), ?assertMatch(16#0e2aea68a90c8d37c988bcdb9fca6fa8, hex_int(Left224)), ?assertMatch(16#a3b6167473100ee06e0c796c2955552b, hex_int(Left256)), ?assertMatch(16#3abf34c3503b2a23a46efc619baef897, hex_int(Left384)), ?assertMatch(16#415fad6271580a531d4179bc891d87a6, hex_int(Left512)), ok. Test with a key larger than 128 bytes (= block - size of SHA-384 and SHA-512 ) . rfc_4231_6_test() -> Key = binary:copy(<<16#aa>>, 131), Val = "Test Using Larger Than Block-Size Key - Hash Key First", ?assertMatch(16#95e9a0db962095adaebe9b2d6f0dbce2d499f112f2d2b7273fa6870e, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#4ece084485813e9088d2c63a041bc5b44f9ef1012a2b588f3cd11f05033ac4c60c2ef6ab4030fe8296248df163f44952, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#80b24263c7c1a3ebb71493c1dd7be8b49b46d1f41b4aeec1121b013783f8f3526b56d037e05f2598bd0fd2215d6a1e5295e64f73f63f0aec8b915a985d786598, hex_int(hmac:hmac512(Key, Val))), ok. Test with a key and data that is larger than 128 bytes (= block - size of SHA-384 and SHA-512 ) . rfc_4231_7_test() -> Key = binary:copy(<<16#aa>>, 131), Val = "This is a test using a larger than block-size key and a larger than block-size data. " "The key needs to be hashed before being used by the HMAC algorithm.", ?assertMatch(16#3a854166ac5d9f023f54d517d0b39dbd946770db9c2b95c9f6f565d1, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#6617178e941f020d351e2f254e8fd32c602420feb0b8fb9adccebb82461e99c5a678cc31e799176d3860e6110c46523e, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#e37b6a775dc87dbaa4dfa9f96e5e3ffddebd71f8867289865df5a32d20cdc944b6022cac3c4982b10d5eeb55c3e4de15134676fb6de0446065c97440fa8c6a58, hex_int(hmac:hmac512(Key, Val))), ok.
null
https://raw.githubusercontent.com/r-willis/biten/75b13ea296992f8fa749646b9d7c15c5ef23d94d/apps/erlsha2/test/hmac_test.erl
erlang
@doc tests for hmac erlsha2 wrappers See also <a href=""> </a>.
@author Identifiers and Test Vectors for HMAC - SHA * -module(hmac_test). -include_lib("eunit/include/eunit.hrl"). hex_int(Binary) -> list_to_integer(hmac:hexlify(Binary), 16). wikipedia_test() -> ?assertMatch(16#fbdb1d1b18aa6c08324b7d64b71fb76370690e1d, hex_int(hmac:hmac("", ""))), ?assertMatch(16#b613679a0814d9ec772f95d778c35fc5ff1697c493715653c6c712144292c5ad, hex_int(hmac:hmac256("", ""))), ?assertMatch(16#b936cee86c9f87aa5d3c6f2e84cb5a4239a5fe50480a6ec66b70ab5b1f4ac6730c6c515421b327ec1d69402e53dfb49ad7381eb067b338fd7b0cb22247225d47, hex_int(hmac:hmac512("", ""))), ?assertMatch(16#de7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9, hex_int(hmac:hmac("key", "The quick brown fox jumps over the lazy dog"))), ?assertMatch(16#f7bc83f430538424b13298e6aa6fb143ef4d59a14946175997479dbc2d1a3cd8, hex_int(hmac:hmac256("key", "The quick brown fox jumps over the lazy dog"))), ?assertMatch(16#b42af09057bac1e2d41708e48a902e09b5ff7f12ab428a4fe86653c73dd248fb82f948a549f7b791a5b41915ee4d1ec3935357e4e2317250d0372afa2ebeeb3a, hex_int(hmac:hmac512("key", "The quick brown fox jumps over the lazy dog"))), ok. rfc_4231_1_test() -> Key = binary:copy(<<16#0b>>, 20), Val = "Hi There", ?assertMatch(16#896fb1128abbdf196832107cd49df33f47b4b1169912ba4f53684b22, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#afd03944d84895626b0825f4ab46907f15f9dadbe4101ec682aa034c7cebc59cfaea9ea9076ede7f4af152e8b2fa9cb6, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#87aa7cdea5ef619d4ff0b4241a1d6cb02379f4e2ce4ec2787ad0b30545e17cdedaa833b7d6b8a702038b274eaea3f4e4be9d914eeb61f1702e696c203a126854, hex_int(hmac:hmac512(Key, Val))), ok. Test with a key shorter than the length of the HMAC output . rfc_4231_2_test() -> Key = "Jefe", Val = "what do ya want for nothing?", ?assertMatch(16#a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec3736322445e8e2240ca5e69e2c78b3239ecfab21649, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b636e070a38bce737, hex_int(hmac:hmac512(Key, Val))), ok. Test with a combined length of key and data that is larger than 64 bytes (= block - size of SHA-224 and SHA-256 ) . rfc_4231_3_test() -> Key = binary:copy(<<16#aa>>, 20), Val = binary:copy(<<16#dd>>, 50), ?assertMatch(16#7fb3cb3588c6c1f6ffa9694d7d6ad2649365b0c1f65d69d1ec8333ea, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#88062608d3e6ad8a0aa2ace014c8a86f0aa635d947ac9febe83ef4e55966144b2a5ab39dc13814b94e3ab6e101a34f27, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#fa73b0089d56a284efb0f0756c890be9b1b5dbdd8ee81a3655f83e33b2279d39bf3e848279a722c806b485a47e67c807b946a337bee8942674278859e13292fb, hex_int(hmac:hmac512(Key, Val))), ok. Test with a combined length of key and data that is larger than 64 bytes (= block - size of SHA-224 and SHA-256 ) . rfc_4231_4_test() -> Key = list_to_binary(lists:seq(1, 16#19)), Val = binary:copy(<<16#cd>>, 50), ?assertMatch(16#6c11506874013cac6a2abc1bb382627cec6a90d86efc012de7afec5a, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#3e8a69b7783c25851933ab6290af6ca77a9981480850009cc5577c6e1f573b4e6801dd23c4a7d679ccf8a386c674cffb, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#b0ba465637458c6990e5a8c5f61d4af7e576d97ff94b872de76f8050361ee3dba91ca5c11aa25eb4d679275cc5788063a5f19741120c4f2de2adebeb10a298dd, hex_int(hmac:hmac512(Key, Val))), ok. Test with a truncation of output to 128 bits . rfc_4231_5_test() -> Key = binary:copy(<<16#0c>>, 20), Val = "Test With Truncation", <<Left224:16/binary, _Rest224/binary>> = hmac:hmac224(Key, Val), <<Left256:16/binary, _Rest256/binary>> = hmac:hmac256(Key, Val), <<Left384:16/binary, _Rest384/binary>> = hmac:hmac384(Key, Val), <<Left512:16/binary, _Rest512/binary>> = hmac:hmac512(Key, Val), ?assertMatch(16#0e2aea68a90c8d37c988bcdb9fca6fa8, hex_int(Left224)), ?assertMatch(16#a3b6167473100ee06e0c796c2955552b, hex_int(Left256)), ?assertMatch(16#3abf34c3503b2a23a46efc619baef897, hex_int(Left384)), ?assertMatch(16#415fad6271580a531d4179bc891d87a6, hex_int(Left512)), ok. Test with a key larger than 128 bytes (= block - size of SHA-384 and SHA-512 ) . rfc_4231_6_test() -> Key = binary:copy(<<16#aa>>, 131), Val = "Test Using Larger Than Block-Size Key - Hash Key First", ?assertMatch(16#95e9a0db962095adaebe9b2d6f0dbce2d499f112f2d2b7273fa6870e, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#4ece084485813e9088d2c63a041bc5b44f9ef1012a2b588f3cd11f05033ac4c60c2ef6ab4030fe8296248df163f44952, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#80b24263c7c1a3ebb71493c1dd7be8b49b46d1f41b4aeec1121b013783f8f3526b56d037e05f2598bd0fd2215d6a1e5295e64f73f63f0aec8b915a985d786598, hex_int(hmac:hmac512(Key, Val))), ok. Test with a key and data that is larger than 128 bytes (= block - size of SHA-384 and SHA-512 ) . rfc_4231_7_test() -> Key = binary:copy(<<16#aa>>, 131), Val = "This is a test using a larger than block-size key and a larger than block-size data. " "The key needs to be hashed before being used by the HMAC algorithm.", ?assertMatch(16#3a854166ac5d9f023f54d517d0b39dbd946770db9c2b95c9f6f565d1, hex_int(hmac:hmac224(Key, Val))), ?assertMatch(16#9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2, hex_int(hmac:hmac256(Key, Val))), ?assertMatch(16#6617178e941f020d351e2f254e8fd32c602420feb0b8fb9adccebb82461e99c5a678cc31e799176d3860e6110c46523e, hex_int(hmac:hmac384(Key, Val))), ?assertMatch(16#e37b6a775dc87dbaa4dfa9f96e5e3ffddebd71f8867289865df5a32d20cdc944b6022cac3c4982b10d5eeb55c3e4de15134676fb6de0446065c97440fa8c6a58, hex_int(hmac:hmac512(Key, Val))), ok.
e0bba40fe9c8e7676301f02dbf4a3c0f0e38b6461c5c788e6b28daee813b6a1b
2600hz/kazoo
kazoo_endpoint_app.erl
%%%----------------------------------------------------------------------------- ( C ) 2010 - 2020 , 2600Hz %%% @doc @author This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. %%% %%% @end %%%----------------------------------------------------------------------------- -module(kazoo_endpoint_app). -behaviour(application). -include_lib("kazoo_stdlib/include/kz_types.hrl"). -export([start/2, stop/1]). %%============================================================================== %% Application callbacks %%============================================================================== %%------------------------------------------------------------------------------ %% @doc Implement the application start behaviour. %% @end %%------------------------------------------------------------------------------ -spec start(application:start_type(), any()) -> kz_types:startapp_ret(). start(_Type, _Args) -> kazoo_endpoint_sup:start_link(). %%------------------------------------------------------------------------------ %% @doc Implement the application stop behaviour. %% @end %%------------------------------------------------------------------------------ -spec stop(any()) -> 'ok'. stop(_State) -> 'ok'.
null
https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/core/kazoo_endpoint/src/kazoo_endpoint_app.erl
erlang
----------------------------------------------------------------------------- @doc @end ----------------------------------------------------------------------------- ============================================================================== Application callbacks ============================================================================== ------------------------------------------------------------------------------ @doc Implement the application start behaviour. @end ------------------------------------------------------------------------------ ------------------------------------------------------------------------------ @doc Implement the application stop behaviour. @end ------------------------------------------------------------------------------
( C ) 2010 - 2020 , 2600Hz @author This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. -module(kazoo_endpoint_app). -behaviour(application). -include_lib("kazoo_stdlib/include/kz_types.hrl"). -export([start/2, stop/1]). -spec start(application:start_type(), any()) -> kz_types:startapp_ret(). start(_Type, _Args) -> kazoo_endpoint_sup:start_link(). -spec stop(any()) -> 'ok'. stop(_State) -> 'ok'.
de654f51c18d4c4ba88c69ab73ebc192cb3a3759395f5d3282b1f89fa1b9994c
janestreet/core_unix
command_test_helpers.mli
(** Functions to help test [Command]. *) open! Core open! Import (** [parse_command_line param] returns a function which evaluates [param] against a string list as if those were the arguments passed to [param]. No shelling out takes place. However, the [param] is evaluated, and side effects of that evaluation do occur. See [validate_command_line] below for a less accurate but generally safer test that does not evaluate the param. (Of course if your param is side-effect free, there's no reason to shy away from this one.) If the command-line fails to parse, an error will be printed. If the command-line parsing code exits for any reason (e.g. you passed "-help"), the exit code is printed. *) val parse_command_line : ?path:string list -> ?summary:string -> ?readme:(unit -> string) -> 'a Command.Param.t -> (?on_error:(unit -> unit) -> ?on_success:('a -> unit) -> string list -> unit) Staged.t val parse_command_line_or_error : ?path:string list -> ?summary:string -> ?readme:(unit -> string) -> 'a Command.Param.t -> (string list -> 'a Or_error.t) Staged.t (** [validate_command command] provides a function [f] s.t. [f args] will parse the args against [command] without executing the body of [command] if parsing succeeds. [f args] will raise if [args] goes through an [Exec _]. This will trigger any side-effects caused by parsing the args but it does guarentee the the args provided are completely valid. [validate_command command] does not work in top-level expect tests. *) val validate_command : Command.t -> string list -> unit Or_error.t * [ validate_command_line shape ] provides a function [ f ] s.t . [ f args ] is best - effort check of [ args ] against the command described by [ shape ] , without actual execution of that command . [ validate_command_line ] raises if any subcommand of [ shape ] would exec another command binary . This prevents us from introducing unexpected external dependencies into tests . What we check : 1 . [ args ] refers to a valid subcommand of [ shape ] . 2 . [ args ] passes an acceptable number of anonymous arguments . 3 . [ args ] passes flags that exist , an acceptable number of times , and with arguments where they are expected . What we do not check : 1 . Whether argument have acceptable values . E.g. , it falsely accepts floats where ints are expected . 2 . Side effects during argument parsing , including aborting further parsing of the command line . E.g. , it does not handle [ -help ] or [ escape ] flags correctly . 3 . Aliases excluded from help . E.g. , [ --help ] . 4 . [ full_flag_required ] . We assume every flag can be passed by prefix . check of [args] against the command described by [shape], without actual execution of that command. [validate_command_line] raises if any subcommand of [shape] would exec another command binary. This prevents us from introducing unexpected external dependencies into tests. What we check: 1. [args] refers to a valid subcommand of [shape]. 2. [args] passes an acceptable number of anonymous arguments. 3. [args] passes flags that exist, an acceptable number of times, and with arguments where they are expected. What we do not check: 1. Whether argument have acceptable values. E.g., it falsely accepts floats where ints are expected. 2. Side effects during argument parsing, including aborting further parsing of the command line. E.g., it does not handle [-help] or [escape] flags correctly. 3. Aliases excluded from help. E.g., [--help]. 4. [full_flag_required]. We assume every flag can be passed by prefix. *) val validate_command_line : Command.Shape.t -> (string list -> unit Or_error.t) Or_error.t * [ complete ? which_arg param ~args ] prints the completion suggestions to stderr . Thread safety : [ complete ] is not in general thread - safe . It sets and then restores the environment variable [ COMP_CWORD ] . However , the cooperative multi - threading semantics of [ Async ] mean that other async jobs will not see the altered environments . Side effects : [ complete ] will not perform the side effects of the param proper ( e.g. , due to the [ f ] of a [ Param.map ~f ] ) . [ complete ] will perform side effects of completion ( e.g. , due to the [ complete ] of [ Arg_type.create ~complete ] ) . Thread safety: [complete] is not in general thread-safe. It sets and then restores the environment variable [COMP_CWORD]. However, the cooperative multi-threading semantics of [Async] mean that other async jobs will not see the altered environments. Side effects: [complete] will not perform the side effects of the param proper (e.g., due to the [f] of a [Param.map ~f]). [complete] will perform side effects of completion (e.g., due to the [complete] of [Arg_type.create ~complete]). *) val complete * zero - indexed . Default : the last arg -> _ Command.Param.t -> args:string list -> unit (** As [complete] but applies to an intact [Command]. *) val complete_command : ?complete_subcommands: (path:string list -> part:string -> string list list -> string list option) -> ?which_arg:int -> Command.t -> args:string list -> unit
null
https://raw.githubusercontent.com/janestreet/core_unix/abfad608bb4ab04d16478a081cc284a88c3b3184/command_test_helpers/src/command_test_helpers.mli
ocaml
* Functions to help test [Command]. * [parse_command_line param] returns a function which evaluates [param] against a string list as if those were the arguments passed to [param]. No shelling out takes place. However, the [param] is evaluated, and side effects of that evaluation do occur. See [validate_command_line] below for a less accurate but generally safer test that does not evaluate the param. (Of course if your param is side-effect free, there's no reason to shy away from this one.) If the command-line fails to parse, an error will be printed. If the command-line parsing code exits for any reason (e.g. you passed "-help"), the exit code is printed. * [validate_command command] provides a function [f] s.t. [f args] will parse the args against [command] without executing the body of [command] if parsing succeeds. [f args] will raise if [args] goes through an [Exec _]. This will trigger any side-effects caused by parsing the args but it does guarentee the the args provided are completely valid. [validate_command command] does not work in top-level expect tests. * As [complete] but applies to an intact [Command].
open! Core open! Import val parse_command_line : ?path:string list -> ?summary:string -> ?readme:(unit -> string) -> 'a Command.Param.t -> (?on_error:(unit -> unit) -> ?on_success:('a -> unit) -> string list -> unit) Staged.t val parse_command_line_or_error : ?path:string list -> ?summary:string -> ?readme:(unit -> string) -> 'a Command.Param.t -> (string list -> 'a Or_error.t) Staged.t val validate_command : Command.t -> string list -> unit Or_error.t * [ validate_command_line shape ] provides a function [ f ] s.t . [ f args ] is best - effort check of [ args ] against the command described by [ shape ] , without actual execution of that command . [ validate_command_line ] raises if any subcommand of [ shape ] would exec another command binary . This prevents us from introducing unexpected external dependencies into tests . What we check : 1 . [ args ] refers to a valid subcommand of [ shape ] . 2 . [ args ] passes an acceptable number of anonymous arguments . 3 . [ args ] passes flags that exist , an acceptable number of times , and with arguments where they are expected . What we do not check : 1 . Whether argument have acceptable values . E.g. , it falsely accepts floats where ints are expected . 2 . Side effects during argument parsing , including aborting further parsing of the command line . E.g. , it does not handle [ -help ] or [ escape ] flags correctly . 3 . Aliases excluded from help . E.g. , [ --help ] . 4 . [ full_flag_required ] . We assume every flag can be passed by prefix . check of [args] against the command described by [shape], without actual execution of that command. [validate_command_line] raises if any subcommand of [shape] would exec another command binary. This prevents us from introducing unexpected external dependencies into tests. What we check: 1. [args] refers to a valid subcommand of [shape]. 2. [args] passes an acceptable number of anonymous arguments. 3. [args] passes flags that exist, an acceptable number of times, and with arguments where they are expected. What we do not check: 1. Whether argument have acceptable values. E.g., it falsely accepts floats where ints are expected. 2. Side effects during argument parsing, including aborting further parsing of the command line. E.g., it does not handle [-help] or [escape] flags correctly. 3. Aliases excluded from help. E.g., [--help]. 4. [full_flag_required]. We assume every flag can be passed by prefix. *) val validate_command_line : Command.Shape.t -> (string list -> unit Or_error.t) Or_error.t * [ complete ? which_arg param ~args ] prints the completion suggestions to stderr . Thread safety : [ complete ] is not in general thread - safe . It sets and then restores the environment variable [ COMP_CWORD ] . However , the cooperative multi - threading semantics of [ Async ] mean that other async jobs will not see the altered environments . Side effects : [ complete ] will not perform the side effects of the param proper ( e.g. , due to the [ f ] of a [ Param.map ~f ] ) . [ complete ] will perform side effects of completion ( e.g. , due to the [ complete ] of [ Arg_type.create ~complete ] ) . Thread safety: [complete] is not in general thread-safe. It sets and then restores the environment variable [COMP_CWORD]. However, the cooperative multi-threading semantics of [Async] mean that other async jobs will not see the altered environments. Side effects: [complete] will not perform the side effects of the param proper (e.g., due to the [f] of a [Param.map ~f]). [complete] will perform side effects of completion (e.g., due to the [complete] of [Arg_type.create ~complete]). *) val complete * zero - indexed . Default : the last arg -> _ Command.Param.t -> args:string list -> unit val complete_command : ?complete_subcommands: (path:string list -> part:string -> string list list -> string list option) -> ?which_arg:int -> Command.t -> args:string list -> unit
dbfbcd613b62dd188e131d3beba976fff3a0f2ec20f2b92c6393fdb2a4e1bcae
airalab/hs-web3
ERC20.hs
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} # LANGUAGE FlexibleInstances # # LANGUAGE MultiParamTypeClasses # # LANGUAGE OverloadedStrings # {-# LANGUAGE QuasiQuotes #-} module ERC20 where import Network.Ethereum.Contract.TH [abiFrom|ERC20.json|]
null
https://raw.githubusercontent.com/airalab/hs-web3/e6719ae384d2371a342f03afa3634921f4a8cd37/examples/erc20/ERC20.hs
haskell
# LANGUAGE DataKinds # # LANGUAGE DeriveGeneric # # LANGUAGE FlexibleContexts # # LANGUAGE QuasiQuotes #
# LANGUAGE FlexibleInstances # # LANGUAGE MultiParamTypeClasses # # LANGUAGE OverloadedStrings # module ERC20 where import Network.Ethereum.Contract.TH [abiFrom|ERC20.json|]
6a90c8852ea2877d4b718e38273b540fa5eb4ccbff790afa96263cd53fe2c532
victorvianna/mini-gcc
linear.ml
open X86_64 open Format exception Error of string let visited_labels = Hashtbl.create 17 type instr = Code of X86_64.text | Label of Label.t let code = ref [] let emit l instr = code := Code instr :: Label l :: !code let emit_wl instr = code := Code instr :: !code let emit_label l = code := Label l :: !code let labels = Hashtbl.create 17 let need_label l = Hashtbl.add labels l () let funs = ref [] produces an 64 - bit operand if possible let operand ltl_operand = match ltl_operand with | Ltltree.Reg r -> reg (register64 r) | Ltltree.Spilled offset -> ind ~ofs:(offset) rbp produces an 8 - bit operand if possible let operand8 ltl_operand = match ltl_operand with | Ltltree.Reg r -> reg (register8 (register64 r)) | Ltltree.Spilled offset -> ind ~ofs:(offset) rbp let get_fun_entry fun_name = let rec aux = function | [] -> raise (Error "function not found") | (fun_def : Ltltree.deffun) :: fun_def_list -> if fun_def.fun_name = fun_name then fun_def else aux fun_def_list in aux !funs let rec lin ltl_map l = if not (Hashtbl.mem visited_labels l) then begin Hashtbl.add visited_labels l (); instr ltl_map l (Label.M.find l ltl_map) end else begin need_label l; emit_wl (jmp (l :> string)) end and instr ltl_map l = function | Ltltree.Econst (n, r, l1) -> emit l (movq (imm32 n) (operand r)); lin ltl_map l1 | Ltltree.Eload (r1, i, r2, l1) -> let op1 = ind ~ofs:i (register64 r1) in let op2 = reg (register64 r2) in emit l (movq op1 op2); lin ltl_map l1 | Ltltree.Estore (r1, r2, i, l1) -> let op1 = reg (register64 r1) in let op2 = ind ~ofs:i (register64 r2) in emit l (movq op1 op2); lin ltl_map l1 | Ltltree.Egoto l1 -> if Hashtbl.mem visited_labels l1 then begin need_label l1; emit l (jmp (l1 :> string)) end else begin emit_label l; lin ltl_map l1 end | Ltltree.Ereturn -> emit l ret | Ltltree.Emunop (unop, op, l1) -> begin match unop with | Maddi i32 -> let op1 = imm32 i32 in let op2 = operand op in emit l (addq op1 op2); lin ltl_map l1 | Msetei i32 -> let op1 = imm32 i32 in let op2 = operand op in let op2_8bits = operand8 op in emit l (cmpq op1 op2); emit_wl (sete op2_8bits); lin ltl_map l1 | Msetnei i32 -> let op1 = imm32 i32 in let op2 = operand op in let op2_8bits = operand8 op in emit l (cmpq op1 op2); emit_wl (setne op2_8bits); lin ltl_map l1 end | Ltltree.Embinop (binop, op1, op2, l1) -> let op2_8bits = operand8 op2 in let op1 = operand op1 in let op2 = operand op2 in begin match binop with | Mmov -> emit l (movq op1 op2); lin ltl_map l1 | Madd -> emit l (addq op1 op2); lin ltl_map l1 | Msub -> emit l (subq op1 op2); lin ltl_map l1 | Mmul -> emit l (imulq op1 op2); lin ltl_map l1 | Mdiv -> emit l cqto; emit_wl (idivq op1); lin ltl_map l1 | Msete -> emit l (cmpq op1 op2); emit_wl (sete op2_8bits); lin ltl_map l1 | Msetne -> emit l (cmpq op1 op2); emit_wl (setne op2_8bits); lin ltl_map l1 | Msetl -> emit l (cmpq op1 op2); emit_wl (setl op2_8bits); lin ltl_map l1 | Msetle -> emit l (cmpq op1 op2); emit_wl (setle op2_8bits); lin ltl_map l1 | Msetg -> emit l (cmpq op1 op2); emit_wl (setg op2_8bits); lin ltl_map l1 | Msetge -> emit l (cmpq op1 op2); emit_wl (setge op2_8bits); lin ltl_map l1 end | Emubranch (branch, op, l1, l2) -> begin match branch with | Mjz -> let op1 = operand8 op in let tmp_op = reg (register8 (register64 Register.tmp1)) in emit l (movb op1 tmp_op); emit_wl (testb tmp_op tmp_op); if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jz (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jnz (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jz (l1 :> string)); emit_wl (jmp (l2 :> string)) end | Mjnz -> let op1 = operand8 op in let tmp_op = reg (register8 (register64 Register.tmp1)) in emit l (movb op1 tmp_op); emit_wl (testb tmp_op tmp_op); if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jnz (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jz (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jnz (l1 :> string)); emit_wl (jmp (l2 :> string)) end | Mjlei i32 -> let op1 = operand op in emit l (cmpq (imm32 i32) op1); if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jle (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jg (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jle (l1 :> string)); emit_wl (jmp (l2 :> string)) end | Mjgi i32 -> let op1 = operand op in emit l (cmpq (imm32 i32) op1); if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jg (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jle (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jg (l1 :> string)); emit_wl (jmp (l2 :> string)) end end | Embbranch (branch, op1, op2, l1, l2) -> let op1 = operand op1 in let op2 = operand op2 in emit l (cmpq op1 op2); begin match branch with | Mjl -> if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jl (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jge (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jl (l1 :> string)); emit_wl (jmp (l2 :> string)) end | Mjle -> if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jle (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jg (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jle (l1 :> string)); emit_wl (jmp (l2 :> string)) end end | Epush (op, l1) -> let op = operand op in emit l (pushq op); lin ltl_map l1 | Ecall (id, l1) -> let fun_name = try let fun_def = get_fun_entry id in fun_def.fun_name with Error _ -> if id = "sbrk" || id = "putchar" then id else raise (Error "Call to undefined function") in emit l (call fun_name); lin ltl_map l1 | Epop (r, l1) -> emit l (popq (register64 r)); lin ltl_map l1 let necessary_label = function | Code _ -> true | Label l -> Hashtbl.mem labels l let translate_function (fun_def : Ltltree.deffun) = code := []; lin fun_def.fun_body fun_def.fun_entry; code := List.rev !code; code := List.filter necessary_label !code; !code let concatenate_asm_text asm_text = function | Code c -> asm_text ++ c | Label l -> asm_text ++ (label (l :> string)) let program (file : Ltltree.file) = funs := file.funs; let code_text = ref (nop ++ (globl "main")) in let add_function_code (fun_def : Ltltree.deffun) = let new_code_text_fragment = translate_function fun_def in code_text := !code_text ++ (label fun_def.fun_name); code_text := List.fold_left concatenate_asm_text !code_text new_code_text_fragment in List.iter add_function_code !funs; {text = !code_text; data = nop}
null
https://raw.githubusercontent.com/victorvianna/mini-gcc/04659816d0a1097ae12b57243572fff9e91c0b13/linear.ml
ocaml
open X86_64 open Format exception Error of string let visited_labels = Hashtbl.create 17 type instr = Code of X86_64.text | Label of Label.t let code = ref [] let emit l instr = code := Code instr :: Label l :: !code let emit_wl instr = code := Code instr :: !code let emit_label l = code := Label l :: !code let labels = Hashtbl.create 17 let need_label l = Hashtbl.add labels l () let funs = ref [] produces an 64 - bit operand if possible let operand ltl_operand = match ltl_operand with | Ltltree.Reg r -> reg (register64 r) | Ltltree.Spilled offset -> ind ~ofs:(offset) rbp produces an 8 - bit operand if possible let operand8 ltl_operand = match ltl_operand with | Ltltree.Reg r -> reg (register8 (register64 r)) | Ltltree.Spilled offset -> ind ~ofs:(offset) rbp let get_fun_entry fun_name = let rec aux = function | [] -> raise (Error "function not found") | (fun_def : Ltltree.deffun) :: fun_def_list -> if fun_def.fun_name = fun_name then fun_def else aux fun_def_list in aux !funs let rec lin ltl_map l = if not (Hashtbl.mem visited_labels l) then begin Hashtbl.add visited_labels l (); instr ltl_map l (Label.M.find l ltl_map) end else begin need_label l; emit_wl (jmp (l :> string)) end and instr ltl_map l = function | Ltltree.Econst (n, r, l1) -> emit l (movq (imm32 n) (operand r)); lin ltl_map l1 | Ltltree.Eload (r1, i, r2, l1) -> let op1 = ind ~ofs:i (register64 r1) in let op2 = reg (register64 r2) in emit l (movq op1 op2); lin ltl_map l1 | Ltltree.Estore (r1, r2, i, l1) -> let op1 = reg (register64 r1) in let op2 = ind ~ofs:i (register64 r2) in emit l (movq op1 op2); lin ltl_map l1 | Ltltree.Egoto l1 -> if Hashtbl.mem visited_labels l1 then begin need_label l1; emit l (jmp (l1 :> string)) end else begin emit_label l; lin ltl_map l1 end | Ltltree.Ereturn -> emit l ret | Ltltree.Emunop (unop, op, l1) -> begin match unop with | Maddi i32 -> let op1 = imm32 i32 in let op2 = operand op in emit l (addq op1 op2); lin ltl_map l1 | Msetei i32 -> let op1 = imm32 i32 in let op2 = operand op in let op2_8bits = operand8 op in emit l (cmpq op1 op2); emit_wl (sete op2_8bits); lin ltl_map l1 | Msetnei i32 -> let op1 = imm32 i32 in let op2 = operand op in let op2_8bits = operand8 op in emit l (cmpq op1 op2); emit_wl (setne op2_8bits); lin ltl_map l1 end | Ltltree.Embinop (binop, op1, op2, l1) -> let op2_8bits = operand8 op2 in let op1 = operand op1 in let op2 = operand op2 in begin match binop with | Mmov -> emit l (movq op1 op2); lin ltl_map l1 | Madd -> emit l (addq op1 op2); lin ltl_map l1 | Msub -> emit l (subq op1 op2); lin ltl_map l1 | Mmul -> emit l (imulq op1 op2); lin ltl_map l1 | Mdiv -> emit l cqto; emit_wl (idivq op1); lin ltl_map l1 | Msete -> emit l (cmpq op1 op2); emit_wl (sete op2_8bits); lin ltl_map l1 | Msetne -> emit l (cmpq op1 op2); emit_wl (setne op2_8bits); lin ltl_map l1 | Msetl -> emit l (cmpq op1 op2); emit_wl (setl op2_8bits); lin ltl_map l1 | Msetle -> emit l (cmpq op1 op2); emit_wl (setle op2_8bits); lin ltl_map l1 | Msetg -> emit l (cmpq op1 op2); emit_wl (setg op2_8bits); lin ltl_map l1 | Msetge -> emit l (cmpq op1 op2); emit_wl (setge op2_8bits); lin ltl_map l1 end | Emubranch (branch, op, l1, l2) -> begin match branch with | Mjz -> let op1 = operand8 op in let tmp_op = reg (register8 (register64 Register.tmp1)) in emit l (movb op1 tmp_op); emit_wl (testb tmp_op tmp_op); if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jz (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jnz (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jz (l1 :> string)); emit_wl (jmp (l2 :> string)) end | Mjnz -> let op1 = operand8 op in let tmp_op = reg (register8 (register64 Register.tmp1)) in emit l (movb op1 tmp_op); emit_wl (testb tmp_op tmp_op); if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jnz (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jz (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jnz (l1 :> string)); emit_wl (jmp (l2 :> string)) end | Mjlei i32 -> let op1 = operand op in emit l (cmpq (imm32 i32) op1); if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jle (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jg (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jle (l1 :> string)); emit_wl (jmp (l2 :> string)) end | Mjgi i32 -> let op1 = operand op in emit l (cmpq (imm32 i32) op1); if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jg (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jle (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jg (l1 :> string)); emit_wl (jmp (l2 :> string)) end end | Embbranch (branch, op1, op2, l1, l2) -> let op1 = operand op1 in let op2 = operand op2 in emit l (cmpq op1 op2); begin match branch with | Mjl -> if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jl (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jge (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jl (l1 :> string)); emit_wl (jmp (l2 :> string)) end | Mjle -> if not (Hashtbl.mem visited_labels l2) then begin need_label l1; emit_wl (jle (l1 :> string)); lin ltl_map l2; lin ltl_map l1 end else if not (Hashtbl.mem visited_labels l1) then begin need_label l2; emit_wl (jg (l2 :> string)); lin ltl_map l1; lin ltl_map l2 end else begin need_label l1; need_label l2; emit_wl (jle (l1 :> string)); emit_wl (jmp (l2 :> string)) end end | Epush (op, l1) -> let op = operand op in emit l (pushq op); lin ltl_map l1 | Ecall (id, l1) -> let fun_name = try let fun_def = get_fun_entry id in fun_def.fun_name with Error _ -> if id = "sbrk" || id = "putchar" then id else raise (Error "Call to undefined function") in emit l (call fun_name); lin ltl_map l1 | Epop (r, l1) -> emit l (popq (register64 r)); lin ltl_map l1 let necessary_label = function | Code _ -> true | Label l -> Hashtbl.mem labels l let translate_function (fun_def : Ltltree.deffun) = code := []; lin fun_def.fun_body fun_def.fun_entry; code := List.rev !code; code := List.filter necessary_label !code; !code let concatenate_asm_text asm_text = function | Code c -> asm_text ++ c | Label l -> asm_text ++ (label (l :> string)) let program (file : Ltltree.file) = funs := file.funs; let code_text = ref (nop ++ (globl "main")) in let add_function_code (fun_def : Ltltree.deffun) = let new_code_text_fragment = translate_function fun_def in code_text := !code_text ++ (label fun_def.fun_name); code_text := List.fold_left concatenate_asm_text !code_text new_code_text_fragment in List.iter add_function_code !funs; {text = !code_text; data = nop}
dddd433457478d8a395fb3c8ffed9f487a2b64fe736341402fe67f3070083a6c
kaos/ecapnp
ecapnp_promise_sup.erl
%%%------------------------------------------------------------------- @author < > ( C ) 2014 , %%% @doc %%% %%% @end Created : 31 May 2014 by < > %%%------------------------------------------------------------------- -module(ecapnp_promise_sup). -behaviour(supervisor). %% API -export([start_link/0, start_promise/0, start_promise/1]). %% Supervisor callbacks -export([init/1]). -define(SERVER, ?MODULE). %%%=================================================================== %%% API functions %%%=================================================================== %%-------------------------------------------------------------------- %% @doc %% Starts the supervisor %% ( ) - > { ok , Pid } | ignore | { error , Error } %% @end %%-------------------------------------------------------------------- start_link() -> supervisor:start_link({local, ?SERVER}, ?MODULE, []). %%-------------------------------------------------------------------- start_promise() -> start_promise([]). start_promise(Opts) -> supervisor:start_child(?SERVER, [Opts]). %%%=================================================================== %%% Supervisor callbacks %%%=================================================================== %%-------------------------------------------------------------------- @private %% @doc %% Whenever a supervisor is started using supervisor:start_link/[2,3], %% this function is called by the new process to find out about %% restart strategy, maximum restart frequency and child %% specifications. %% ) - > { ok , { SupFlags , [ ChildSpec ] } } | %% ignore | %% {error, Reason} %% @end %%-------------------------------------------------------------------- init([]) -> RestartStrategy = simple_one_for_one, MaxRestarts = 10, MaxSecondsBetweenRestarts = 3600, SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts}, Restart = transient, Shutdown = 2000, Type = worker, AChild = {promise, {ecapnp_promise, start_link, []}, Restart, Shutdown, Type, [ecapnp_promise]}, {ok, {SupFlags, [AChild]}}. %%%=================================================================== Internal functions %%%===================================================================
null
https://raw.githubusercontent.com/kaos/ecapnp/f351c07730ba134b41b137dd6a6b93069a0908fc/src/ecapnp_promise_sup.erl
erlang
------------------------------------------------------------------- @doc @end ------------------------------------------------------------------- API Supervisor callbacks =================================================================== API functions =================================================================== -------------------------------------------------------------------- @doc Starts the supervisor @end -------------------------------------------------------------------- -------------------------------------------------------------------- =================================================================== Supervisor callbacks =================================================================== -------------------------------------------------------------------- @doc Whenever a supervisor is started using supervisor:start_link/[2,3], this function is called by the new process to find out about restart strategy, maximum restart frequency and child specifications. ignore | {error, Reason} @end -------------------------------------------------------------------- =================================================================== ===================================================================
@author < > ( C ) 2014 , Created : 31 May 2014 by < > -module(ecapnp_promise_sup). -behaviour(supervisor). -export([start_link/0, start_promise/0, start_promise/1]). -export([init/1]). -define(SERVER, ?MODULE). ( ) - > { ok , Pid } | ignore | { error , Error } start_link() -> supervisor:start_link({local, ?SERVER}, ?MODULE, []). start_promise() -> start_promise([]). start_promise(Opts) -> supervisor:start_child(?SERVER, [Opts]). @private ) - > { ok , { SupFlags , [ ChildSpec ] } } | init([]) -> RestartStrategy = simple_one_for_one, MaxRestarts = 10, MaxSecondsBetweenRestarts = 3600, SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts}, Restart = transient, Shutdown = 2000, Type = worker, AChild = {promise, {ecapnp_promise, start_link, []}, Restart, Shutdown, Type, [ecapnp_promise]}, {ok, {SupFlags, [AChild]}}. Internal functions
de2400bb2822c723b08bb64bc3577d29ac1c0e90da9e543a6afdcf59719399e3
amperity/vault-clj
project.clj
(defproject amperity/vault-clj "1.1.4-SNAPSHOT" :description "Clojure client for the Vault secret management system." :url "-clj" :license {:name "Apache License" :url "-2.0"} :deploy-branches ["master"] :pedantic? :abort :plugins [[lein-cloverage "1.2.2"]] :dependencies [[org.clojure/clojure "1.11.1"] [org.clojure/tools.logging "1.2.4"] [amperity/envoy "1.0.0"] [cheshire "5.11.0"] [http-kit "2.5.3"] [com.stuartsierra/component "1.1.0"]] :test-selectors {:default (complement :integration) :integration :integration} :profiles {:dev {:dependencies [[org.clojure/tools.trace "0.7.11"] [ch.qos.logback/logback-classic "1.2.11"]] :jvm-opts ["-Dclojure.main.report=stderr"]} :repl {:source-paths ["dev"] :dependencies [[org.clojure/tools.namespace "1.3.0"]] :jvm-opts ["-Dvault.log.appender=repl"]}})
null
https://raw.githubusercontent.com/amperity/vault-clj/43ea213822440766df6b1fae4aa1282ab9d65f7e/project.clj
clojure
(defproject amperity/vault-clj "1.1.4-SNAPSHOT" :description "Clojure client for the Vault secret management system." :url "-clj" :license {:name "Apache License" :url "-2.0"} :deploy-branches ["master"] :pedantic? :abort :plugins [[lein-cloverage "1.2.2"]] :dependencies [[org.clojure/clojure "1.11.1"] [org.clojure/tools.logging "1.2.4"] [amperity/envoy "1.0.0"] [cheshire "5.11.0"] [http-kit "2.5.3"] [com.stuartsierra/component "1.1.0"]] :test-selectors {:default (complement :integration) :integration :integration} :profiles {:dev {:dependencies [[org.clojure/tools.trace "0.7.11"] [ch.qos.logback/logback-classic "1.2.11"]] :jvm-opts ["-Dclojure.main.report=stderr"]} :repl {:source-paths ["dev"] :dependencies [[org.clojure/tools.namespace "1.3.0"]] :jvm-opts ["-Dvault.log.appender=repl"]}})
4eb71f16c4ce55a4c2b70f0c0e42def8d670cc00af5fd2445fd7f053638b9fa3
mirage/mirage-tcpip
tcpv4v6_socket.ml
* Copyright ( c ) 2014 Anil Madhavapeddy < > * Copyright ( c ) 2014 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2014 Anil Madhavapeddy <> * Copyright (c) 2014 Nicolas Ojeda Bar <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) let src = Logs.Src.create "tcpv4v6-socket" ~doc:"TCP socket v4v6 (platform native)" module Log = (val Logs.src_log src : Logs.LOG) open Lwt.Infix type ipaddr = Ipaddr.t type flow = Lwt_unix.file_descr type t = { interface: [ `Any | `Ip of Unix.inet_addr * Unix.inet_addr | `V4_only of Unix.inet_addr | `V6_only of Unix.inet_addr ]; (* source ip to bind to *) mutable active_connections : Lwt_unix.file_descr list; listen_sockets : (int, Lwt_unix.file_descr list) Hashtbl.t; mutable switched_off : unit Lwt.t; } let set_switched_off t switched_off = t.switched_off <- Lwt.pick [ switched_off; t.switched_off ] let any_v6 = Ipaddr_unix.V6.to_inet_addr Ipaddr.V6.unspecified include Tcp_socket let connect ~ipv4_only ~ipv6_only ipv4 ipv6 = let interface = let v4 = Ipaddr.V4.Prefix.address ipv4 in let v4_unix = Ipaddr_unix.V4.to_inet_addr v4 in if ipv4_only then `V4_only v4_unix else if ipv6_only then `V6_only (match ipv6 with | None -> any_v6 | Some x -> Ipaddr_unix.V6.to_inet_addr (Ipaddr.V6.Prefix.address x)) else match ipv6, Ipaddr.V4.(compare v4 any) with | None, 0 -> `Any | None, _ -> `Ip (v4_unix, any_v6) | Some x, v4_any -> let v6 = Ipaddr.V6.Prefix.address x in if Ipaddr.V6.(compare v6 unspecified = 0) && v4_any = 0 then `Any else `Ip (v4_unix, Ipaddr_unix.V6.to_inet_addr v6) in Lwt.return {interface; active_connections = []; listen_sockets = Hashtbl.create 7; switched_off = fst (Lwt.wait ())} let disconnect t = Lwt_list.iter_p close t.active_connections >>= fun () -> Lwt_list.iter_p close (Hashtbl.fold (fun _ fd acc -> fd @ acc) t.listen_sockets []) >>= fun () -> Lwt.cancel t.switched_off ; Lwt.return_unit let dst fd = match Lwt_unix.getpeername fd with | Unix.ADDR_UNIX _ -> raise (Failure "unexpected: got a unix instead of tcp sock") | Unix.ADDR_INET (ia,port) -> let ip = Ipaddr_unix.of_inet_addr ia in let ip = match Ipaddr.to_v4 ip with | None -> ip | Some v4 -> Ipaddr.V4 v4 in ip, port let create_connection ?keepalive t (dst,dst_port) = match match dst, t.interface with | Ipaddr.V4 _, (`Any | `Ip _ | `V4_only _) -> Ok (Lwt_unix.PF_INET, fst) | Ipaddr.V6 _, (`Any | `Ip _ | `V6_only _) -> Ok (Lwt_unix.PF_INET6, snd) | Ipaddr.V4 _, `V6_only _ -> Error (`Msg "Attempted to connect to an IPv4 host, but stack is IPv6 only") | Ipaddr.V6 _, `V4_only _ -> Error (`Msg "Attempted to connect to an IPv6 host, but stack is IPv4 only") with | Error (`Msg m) -> Lwt.return (Error (`Exn (Invalid_argument m))) | Ok (family, proj) -> let fd = Lwt_unix.(socket family SOCK_STREAM 0) in Lwt.catch (fun () -> (match t.interface with | `Any -> Lwt.return_unit | `Ip p -> Lwt_unix.bind fd (Lwt_unix.ADDR_INET (proj p, 0)) | `V4_only ip -> Lwt_unix.bind fd (Lwt_unix.ADDR_INET (ip, 0)) | `V6_only ip -> Lwt_unix.bind fd (Lwt_unix.ADDR_INET (ip, 0))) >>= fun () -> Lwt_unix.connect fd (Lwt_unix.ADDR_INET ((Ipaddr_unix.to_inet_addr dst), dst_port)) >>= fun () -> ( match keepalive with | None -> () | Some { Tcpip.Tcp.Keepalive.after; interval; probes } -> Tcp_socket_options.enable_keepalive ~fd ~after ~interval ~probes ); t.active_connections <- fd :: t.active_connections; Lwt.return (Ok fd)) (fun exn -> close fd >>= fun () -> Lwt.return (Error (`Exn exn))) let unlisten t ~port = match Hashtbl.find_opt t.listen_sockets port with | None -> () | Some fds -> Hashtbl.remove t.listen_sockets port; try List.iter (fun fd -> Unix.close (Lwt_unix.unix_file_descr fd)) fds with _ -> () let listen t ~port ?keepalive callback = if port < 0 || port > 65535 then raise (Invalid_argument (Printf.sprintf "invalid port number (%d)" port)); unlisten t ~port; let fds = match t.interface with | `Any -> let fd = Lwt_unix.(socket PF_INET6 SOCK_STREAM 0) in Lwt_unix.(setsockopt fd SO_REUSEADDR true); Lwt_unix.(setsockopt fd IPV6_ONLY false); [ (fd, Lwt_unix.ADDR_INET (any_v6, port)) ] | `Ip (v4, v6) -> let fd = Lwt_unix.(socket PF_INET SOCK_STREAM 0) in Lwt_unix.(setsockopt fd SO_REUSEADDR true); let fd' = Lwt_unix.(socket PF_INET6 SOCK_STREAM 0) in Lwt_unix.(setsockopt fd' SO_REUSEADDR true); Lwt_unix.(setsockopt fd' IPV6_ONLY true); [ (fd, Lwt_unix.ADDR_INET (v4, port)) ; (fd', Lwt_unix.ADDR_INET (v6, port)) ] | `V4_only ip -> let fd = Lwt_unix.(socket PF_INET SOCK_STREAM 0) in Lwt_unix.setsockopt fd Lwt_unix.SO_REUSEADDR true; [ (fd, Lwt_unix.ADDR_INET (ip, port)) ] | `V6_only ip -> let fd = Lwt_unix.(socket PF_INET6 SOCK_STREAM 0) in Lwt_unix.(setsockopt fd SO_REUSEADDR true); Lwt_unix.(setsockopt fd IPV6_ONLY true); [ (fd, Lwt_unix.ADDR_INET (ip, port)) ] in List.iter (fun (fd, addr) -> Unix.bind (Lwt_unix.unix_file_descr fd) addr; Hashtbl.replace t.listen_sockets port (List.map fst fds); Lwt_unix.listen fd 10; (* FIXME: we should not ignore the result *) Lwt.async (fun () -> (* TODO cancellation *) let rec loop () = if not (Lwt.is_sleeping t.switched_off) then raise Lwt.Canceled ; Lwt.catch (fun () -> Lwt_unix.accept fd >|= fun (afd, _) -> t.active_connections <- afd :: t.active_connections; (match keepalive with | None -> () | Some { Tcpip.Tcp.Keepalive.after; interval; probes } -> Tcp_socket_options.enable_keepalive ~fd:afd ~after ~interval ~probes); Lwt.async (fun () -> Lwt.catch (fun () -> callback afd) (fun exn -> Log.warn (fun m -> m "error %s in callback" (Printexc.to_string exn)) ; close afd)); `Continue) (function | Unix.Unix_error (Unix.EBADF, _, _) -> Log.warn (fun m -> m "error bad file descriptor in accept") ; Lwt.return `Stop | exn -> Log.warn (fun m -> m "error %s in accept" (Printexc.to_string exn)) ; Lwt.return `Continue) >>= function | `Continue -> loop () | `Stop -> Lwt.return_unit in Lwt.catch loop ignore_canceled >>= fun () -> close fd)) fds
null
https://raw.githubusercontent.com/mirage/mirage-tcpip/805d75c433bb764d8cfa434ffd1411b8704497b2/src/stack-unix/tcpv4v6_socket.ml
ocaml
source ip to bind to FIXME: we should not ignore the result TODO cancellation
* Copyright ( c ) 2014 Anil Madhavapeddy < > * Copyright ( c ) 2014 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2014 Anil Madhavapeddy <> * Copyright (c) 2014 Nicolas Ojeda Bar <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) let src = Logs.Src.create "tcpv4v6-socket" ~doc:"TCP socket v4v6 (platform native)" module Log = (val Logs.src_log src : Logs.LOG) open Lwt.Infix type ipaddr = Ipaddr.t type flow = Lwt_unix.file_descr type t = { mutable active_connections : Lwt_unix.file_descr list; listen_sockets : (int, Lwt_unix.file_descr list) Hashtbl.t; mutable switched_off : unit Lwt.t; } let set_switched_off t switched_off = t.switched_off <- Lwt.pick [ switched_off; t.switched_off ] let any_v6 = Ipaddr_unix.V6.to_inet_addr Ipaddr.V6.unspecified include Tcp_socket let connect ~ipv4_only ~ipv6_only ipv4 ipv6 = let interface = let v4 = Ipaddr.V4.Prefix.address ipv4 in let v4_unix = Ipaddr_unix.V4.to_inet_addr v4 in if ipv4_only then `V4_only v4_unix else if ipv6_only then `V6_only (match ipv6 with | None -> any_v6 | Some x -> Ipaddr_unix.V6.to_inet_addr (Ipaddr.V6.Prefix.address x)) else match ipv6, Ipaddr.V4.(compare v4 any) with | None, 0 -> `Any | None, _ -> `Ip (v4_unix, any_v6) | Some x, v4_any -> let v6 = Ipaddr.V6.Prefix.address x in if Ipaddr.V6.(compare v6 unspecified = 0) && v4_any = 0 then `Any else `Ip (v4_unix, Ipaddr_unix.V6.to_inet_addr v6) in Lwt.return {interface; active_connections = []; listen_sockets = Hashtbl.create 7; switched_off = fst (Lwt.wait ())} let disconnect t = Lwt_list.iter_p close t.active_connections >>= fun () -> Lwt_list.iter_p close (Hashtbl.fold (fun _ fd acc -> fd @ acc) t.listen_sockets []) >>= fun () -> Lwt.cancel t.switched_off ; Lwt.return_unit let dst fd = match Lwt_unix.getpeername fd with | Unix.ADDR_UNIX _ -> raise (Failure "unexpected: got a unix instead of tcp sock") | Unix.ADDR_INET (ia,port) -> let ip = Ipaddr_unix.of_inet_addr ia in let ip = match Ipaddr.to_v4 ip with | None -> ip | Some v4 -> Ipaddr.V4 v4 in ip, port let create_connection ?keepalive t (dst,dst_port) = match match dst, t.interface with | Ipaddr.V4 _, (`Any | `Ip _ | `V4_only _) -> Ok (Lwt_unix.PF_INET, fst) | Ipaddr.V6 _, (`Any | `Ip _ | `V6_only _) -> Ok (Lwt_unix.PF_INET6, snd) | Ipaddr.V4 _, `V6_only _ -> Error (`Msg "Attempted to connect to an IPv4 host, but stack is IPv6 only") | Ipaddr.V6 _, `V4_only _ -> Error (`Msg "Attempted to connect to an IPv6 host, but stack is IPv4 only") with | Error (`Msg m) -> Lwt.return (Error (`Exn (Invalid_argument m))) | Ok (family, proj) -> let fd = Lwt_unix.(socket family SOCK_STREAM 0) in Lwt.catch (fun () -> (match t.interface with | `Any -> Lwt.return_unit | `Ip p -> Lwt_unix.bind fd (Lwt_unix.ADDR_INET (proj p, 0)) | `V4_only ip -> Lwt_unix.bind fd (Lwt_unix.ADDR_INET (ip, 0)) | `V6_only ip -> Lwt_unix.bind fd (Lwt_unix.ADDR_INET (ip, 0))) >>= fun () -> Lwt_unix.connect fd (Lwt_unix.ADDR_INET ((Ipaddr_unix.to_inet_addr dst), dst_port)) >>= fun () -> ( match keepalive with | None -> () | Some { Tcpip.Tcp.Keepalive.after; interval; probes } -> Tcp_socket_options.enable_keepalive ~fd ~after ~interval ~probes ); t.active_connections <- fd :: t.active_connections; Lwt.return (Ok fd)) (fun exn -> close fd >>= fun () -> Lwt.return (Error (`Exn exn))) let unlisten t ~port = match Hashtbl.find_opt t.listen_sockets port with | None -> () | Some fds -> Hashtbl.remove t.listen_sockets port; try List.iter (fun fd -> Unix.close (Lwt_unix.unix_file_descr fd)) fds with _ -> () let listen t ~port ?keepalive callback = if port < 0 || port > 65535 then raise (Invalid_argument (Printf.sprintf "invalid port number (%d)" port)); unlisten t ~port; let fds = match t.interface with | `Any -> let fd = Lwt_unix.(socket PF_INET6 SOCK_STREAM 0) in Lwt_unix.(setsockopt fd SO_REUSEADDR true); Lwt_unix.(setsockopt fd IPV6_ONLY false); [ (fd, Lwt_unix.ADDR_INET (any_v6, port)) ] | `Ip (v4, v6) -> let fd = Lwt_unix.(socket PF_INET SOCK_STREAM 0) in Lwt_unix.(setsockopt fd SO_REUSEADDR true); let fd' = Lwt_unix.(socket PF_INET6 SOCK_STREAM 0) in Lwt_unix.(setsockopt fd' SO_REUSEADDR true); Lwt_unix.(setsockopt fd' IPV6_ONLY true); [ (fd, Lwt_unix.ADDR_INET (v4, port)) ; (fd', Lwt_unix.ADDR_INET (v6, port)) ] | `V4_only ip -> let fd = Lwt_unix.(socket PF_INET SOCK_STREAM 0) in Lwt_unix.setsockopt fd Lwt_unix.SO_REUSEADDR true; [ (fd, Lwt_unix.ADDR_INET (ip, port)) ] | `V6_only ip -> let fd = Lwt_unix.(socket PF_INET6 SOCK_STREAM 0) in Lwt_unix.(setsockopt fd SO_REUSEADDR true); Lwt_unix.(setsockopt fd IPV6_ONLY true); [ (fd, Lwt_unix.ADDR_INET (ip, port)) ] in List.iter (fun (fd, addr) -> Unix.bind (Lwt_unix.unix_file_descr fd) addr; Hashtbl.replace t.listen_sockets port (List.map fst fds); Lwt_unix.listen fd 10; Lwt.async (fun () -> let rec loop () = if not (Lwt.is_sleeping t.switched_off) then raise Lwt.Canceled ; Lwt.catch (fun () -> Lwt_unix.accept fd >|= fun (afd, _) -> t.active_connections <- afd :: t.active_connections; (match keepalive with | None -> () | Some { Tcpip.Tcp.Keepalive.after; interval; probes } -> Tcp_socket_options.enable_keepalive ~fd:afd ~after ~interval ~probes); Lwt.async (fun () -> Lwt.catch (fun () -> callback afd) (fun exn -> Log.warn (fun m -> m "error %s in callback" (Printexc.to_string exn)) ; close afd)); `Continue) (function | Unix.Unix_error (Unix.EBADF, _, _) -> Log.warn (fun m -> m "error bad file descriptor in accept") ; Lwt.return `Stop | exn -> Log.warn (fun m -> m "error %s in accept" (Printexc.to_string exn)) ; Lwt.return `Continue) >>= function | `Continue -> loop () | `Stop -> Lwt.return_unit in Lwt.catch loop ignore_canceled >>= fun () -> close fd)) fds
fd538b3adafb038912710f6181b2a904d4f807e777b82743b492389a88511167
nekketsuuu/tapis
constraintsT.ml
module ConstraintsT = Set.Make (struct type t = Type.t * Type.t let compare = compare end) include ConstraintsT open Sbst (* sbst : Type.t sbst -> t -> t *) let sbst (sigma : Type.t sbst) ct = (* (* * Set.map has a bug in OCaml 4.04.0 * *) let sbst' (t1, t2) = (Type.sbst sigma t1, Type.sbst sigma t2) in map sbst' ct *) let sbst' (t1, t2) ct = ConstraintsT.add (Type.sbst sigma t1, Type.sbst sigma t2) ct in ConstraintsT.fold sbst' ct ConstraintsT.empty (* for debug *) let rec to_list c = print_endline @@ string_of_int ; if ConstraintsT.is_empty c then [] else let elm = ConstraintsT.choose c in elm :: (to_list @@ ConstraintsT.remove elm c)
null
https://raw.githubusercontent.com/nekketsuuu/tapis/a61ecff95eaf2af27a85290d2a5f99341d28b43c/src/constraintsT.ml
ocaml
sbst : Type.t sbst -> t -> t (* * Set.map has a bug in OCaml 4.04.0 * for debug
module ConstraintsT = Set.Make (struct type t = Type.t * Type.t let compare = compare end) include ConstraintsT open Sbst let sbst (sigma : Type.t sbst) ct = let sbst' (t1, t2) = (Type.sbst sigma t1, Type.sbst sigma t2) in map sbst' ct *) let sbst' (t1, t2) ct = ConstraintsT.add (Type.sbst sigma t1, Type.sbst sigma t2) ct in ConstraintsT.fold sbst' ct ConstraintsT.empty let rec to_list c = print_endline @@ string_of_int ; if ConstraintsT.is_empty c then [] else let elm = ConstraintsT.choose c in elm :: (to_list @@ ConstraintsT.remove elm c)
4d99bf59b04c883427eff91ee5c69c4767b7e623cb2bdbb59521b0f1ac80021e
wilbowma/cur
Tactics3.rkt
#lang cur (require cur/stdlib/sugar cur/stdlib/equality cur/ntac/base cur/ntac/standard cur/ntac/rewrite rackunit/turnstile+ "../rackunit-ntac.rkt") Software Foundations Tactics.v , part 3 of 5 ;; copied from Poly-pairs.rkt (data bool : 0 Type (true : bool) (false : bool)) (data nat : 0 Type (O : nat) ; letter capital "O" (S : (-> nat nat))) (define/rec/match plus : nat [m : nat] -> nat [O => m] [(S n-1) => (S (plus n-1 m))]) (define/rec/match beq-nat : nat nat -> bool [O O => true] [O (S _) => false] [(S _) O => false] [(S n*) (S m*) => (beq-nat n* m*)]) re - define # % datum to use the new ` nat ` (define-syntax #%datum (syntax-parser [(_ . n:exact-nonnegative-integer) #:when (zero? (syntax-e #'n)) #'O] [(_ . n:exact-nonnegative-integer) #`(S (#%datum . #,(- (syntax-e #'n) 1)))])) continuing Tactics.v -------------------- (define-theorem f-equal (∀ [A B : Type] [f : (-> A B)] [x y : A] (-> (== x y) (== (f x) (f y)))) (by-intros A B f x y H) (by-rewrite H) reflexivity) (define-theorem S_inj (forall (n m : nat) (b : bool) (-> (== (beq-nat (S n) (S m)) b) (== (beq-nat n m) b))) (by-intros n m b H) ;; simpl in H ; unneeded (by-apply H)) (define-theorem silly3b (forall (n : nat) (-> (-> (== (beq-nat n 5) true) ; eq (== (beq-nat (S (S n)) 7) true)) (== true (beq-nat n 5)) ; H (== true (beq-nat (S (S n)) 7)))) (by-intros n eq H) (by-symmetry #:in H) (by-apply eq #:in H) (by-symmetry #:in H) by-assumption) (check-type silly3b : (forall (n : nat) (-> (-> (== (beq-nat n 5) true) ; eq (== (beq-nat (S (S n)) 7) true)) (== true (beq-nat n 5)) ; H (== true (beq-nat (S (S n)) 7))))) (define-theorem plus-n-Sm (∀ [n : nat] [m : nat] (== nat (S (plus n m)) (plus n (S m)))) (by-intro n) (by-intro m) simpl (by-induction n #:as [() (n-1 IH)]) subgoal 1 simpl reflexivity subgoal 1 simpl (by-rewrite IH) reflexivity) (define-theorem plus-n-n-injective (∀ [n m : nat] (-> (== (plus n n) (plus m m)) (== n m))) (by-intro n) (by-induction n #:as [() (n-1 IH)]) n = 0 (by-intros m H) (by-destruct m #:as [() (m-1)]) reflexivity (by-inversion H) ;; n = S n-1 (by-intros m H) (by-destruct m #:as [() (m-1)]) (by-inversion H) (by-rewriteL plus-n-Sm #:in H) (by-rewriteL plus-n-Sm #:in H) (by-inversion H #:as H2) (by-apply IH #:in H2) (by-rewrite H2) reflexivity) (check-type plus-n-n-injective : (∀ [n m : nat] (-> (== (plus n n) (plus m m)) (== n m)))) (require cur/stdlib/prop) ;; plus-n-n-injective CORRECT raw term: (λ (n : nat) ((new-elim n (λ n (Π (Π (m : nat) (→ (== nat (plus n n) (plus m m)) (== nat n m))))) (λ (λ (λ (m : nat) (λ (H : (== nat O (plus m m))) ((match m #:as m #:with-indices #:in nat #:return (Π (H : (== nat O (plus m m))) (== nat O m)) (O (λ (H : (== nat O (plus (O) (O)))) (λ (refl nat O)))) ((S m-1) (λ (H : (== nat O (plus (S m-1) (S m-1)))) ((new-elim H (λ y61 H (-> (== y61 (S (plus m-1 (S m-1)))) (== nat O (S m-1)))) (λ eq52 (new-elim (elim-== eq52 (λ y54 _ (match y54 #:return Type ((O) True) ((S X7) False))) I) (λ _ (== nat O (S m-1)))))) (refl nat (S (plus m-1 (S m-1)))))))) H))))) (λ n-1 IH (λ (λ (m : nat) (λ (H : (== nat (S (plus n-1 (S n-1))) (plus m m))) ((match m #:as m #:with-indices #:in nat #:return (Π (H : (== nat (S (plus n-1 (S n-1))) (plus m m))) (== nat (S n-1) m)) (O (λ (H : (== nat (S (plus n-1 (S n-1))) (plus (O) (O)))) ((new-elim H (λ y62 H (-> (== y62 O) (== nat (S n-1) O))) (λ eq55 (new-elim (elim-== eq55 (λ y57 _ (match y57 #:return Type ((O) False) ((S X7) True))) I) (λ _ (== nat (S n-1) O))))) (refl nat O)))) ((S m-1) (λ (H : (== nat (S (plus n-1 (S n-1))) (plus (S m-1) (S m-1)))) ((λ (H : (== nat (S (S (plus n-1 n-1))) (S (plus m-1 (S m-1))))) ((λ (H : (== nat (S (S (plus n-1 n-1))) (S (S (plus m-1 m-1))))) ((new-elim H (λ y65 H (-> (== y65 (S (S (plus m-1 m-1)))) (== nat (S n-1) (S m-1)))) (λ eq58 ((λ (H2 : (== nat (plus n-1 n-1) (plus m-1 m-1))) ((λ (H2 : (== nat n-1 m-1)) (new-elim (sym nat n-1 m-1 (H2)) (λ (g63 : nat) (λ (g64 : (== nat m-1 g63)) (== nat (S g63) (S m-1)))) (λ (refl nat (S m-1))))) (IH m-1 H2))) (f-equal nat nat (λ x59 (match (match x59 #:return nat ((O) (S (plus n-1 n-1))) ((S X7) X7)) #:return nat ((O) (plus n-1 n-1)) ((S X7) X7))) (S (S (plus n-1 n-1))) (S (S (plus m-1 m-1))) eq58)))) (refl nat (S (S (plus m-1 m-1)))))) WANT : ( = = ( S ( S ( plus n-1 n-1 ) ) ) ( S ( S ( plus m-1 m-1 ) ) ) ) (= = nat ( S ( plus m-1 m-1 ) ) ( plus m-1 ( S m-1 ) ) ) (λ (g66 : nat) (λ (g67 : (== nat (plus m-1 (S m-1)) g66)) (== nat (S (S (plus n-1 n-1))) (S g66)))) (= = nat ( S ( S ( plus n-1 n-1 ) ) ) ( S ( plus m-1 ( S m-1 ) ) ) ) ))) WANT : ( = = ( S ( S ( plus n-1 n-1 ) ) ) ( S ( plus m-1 ( S m-1 ) ) ) ) (sym nat (S (plus n-1 n-1)) (plus n-1 (S n-1)) (plus-n-Sm n-1 n-1)) ; (== nat (S (plus n-1 n-1)) (plus n-1 (S n-1))) (λ (g68 : nat) (λ (g69 : (== nat (plus n-1 (S n-1)) g68)) (== nat (S g68) (S (plus m-1 (S m-1)))))) (= = nat ( S ( plus n-1 ( S n-1 ) ) ) ( plus ( S m-1 ) ( S m-1 ) ) ) simpl (= = nat ( S ( plus n-1 ( S n-1 ) ) ) ( S ( plus m-1 ( S m-1 ) ) ) ) ))))) H)))))))) (define/rec/match double : nat -> nat [O => O] [(S n-1) => (S (S (double n-1)))]) ;; tests inversion of H with non-id base cases (define-theorem double-injective (forall (n m : nat) (-> (== (double n) (double m)) (== n m))) (by-intro n) (by-induction n #:as [() (n-1 IH)]) 1 (by-destruct m #:as [() (m-1)]) 1a 1b 2 (by-destruct m #:as [() (m-1)]) 2a 2b ; unify does nt find f - equal 's A arg (by-apply IH) (by-inversion H #:as H2) (by-rewrite H2) reflexivity) (check-type double-injective : (forall (n m : nat) (-> (== (double n) (double m)) (== n m)))) (define-theorem beq-nat-true (∀ [n m : nat] (-> (== (beq-nat n m) true) (== n m))) (by-intro n) (by-induction n #:as [() (n-1 IH)]) 1 (by-destruct m #:as [() (m-1)]) 1a 1b 2 (by-destruct m #:as [() (m-1)]) 2a 2b (by-apply IH) (by-inversion H #:as H1) (by-rewrite H1) reflexivity) (define-theorem double-injective-take2 (∀ [n m : nat] (-> (== (double n) (double m)) (== n m))) (by-intros n m) (by-generalize n) (by-induction m #:as [() (m-1 IH)]) 1 (by-destruct n #:as [() (n-1)]) 1a 1b 2 (by-destruct n #:as [() (n-1)]) 2a 2b (by-apply IH) (by-inversion H #:as H1) (by-rewrite H1) reflexivity) (data id : 0 Type [Id : (-> nat id)]) (define/rec/match beq-id : id id -> bool [(Id n1) (Id n2) => (beq-nat n1 n2)]) ;; TODO: support auto-destructing by-intros (define-theorem beq-id-true (forall [x y : id] (-> (== (beq-id x y) true) (== x y))) (by-intros x y) (by-destruct x #:as [(m)]) (by-destruct y #:as [(n)]) (by-intro H) (by-assert H1 (== m n)) (by-apply beq-nat-true) ; prove m = n (by-apply H) (by-rewrite H1) ; return to orig goal reflexivity)
null
https://raw.githubusercontent.com/wilbowma/cur/e039c98941b3d272c6e462387df22846e10b0128/cur-test/cur/tests/ntac/software-foundations/Tactics3.rkt
racket
copied from Poly-pairs.rkt letter capital "O" simpl in H ; unneeded eq H eq H n = S n-1 plus-n-n-injective CORRECT raw term: (== nat (S (plus n-1 n-1)) (plus n-1 (S n-1))) tests inversion of H with non-id base cases unify does nt find f - equal 's A arg TODO: support auto-destructing by-intros prove m = n return to orig goal
#lang cur (require cur/stdlib/sugar cur/stdlib/equality cur/ntac/base cur/ntac/standard cur/ntac/rewrite rackunit/turnstile+ "../rackunit-ntac.rkt") Software Foundations Tactics.v , part 3 of 5 (data bool : 0 Type (true : bool) (false : bool)) (data nat : 0 Type (S : (-> nat nat))) (define/rec/match plus : nat [m : nat] -> nat [O => m] [(S n-1) => (S (plus n-1 m))]) (define/rec/match beq-nat : nat nat -> bool [O O => true] [O (S _) => false] [(S _) O => false] [(S n*) (S m*) => (beq-nat n* m*)]) re - define # % datum to use the new ` nat ` (define-syntax #%datum (syntax-parser [(_ . n:exact-nonnegative-integer) #:when (zero? (syntax-e #'n)) #'O] [(_ . n:exact-nonnegative-integer) #`(S (#%datum . #,(- (syntax-e #'n) 1)))])) continuing Tactics.v -------------------- (define-theorem f-equal (∀ [A B : Type] [f : (-> A B)] [x y : A] (-> (== x y) (== (f x) (f y)))) (by-intros A B f x y H) (by-rewrite H) reflexivity) (define-theorem S_inj (forall (n m : nat) (b : bool) (-> (== (beq-nat (S n) (S m)) b) (== (beq-nat n m) b))) (by-intros n m b H) (by-apply H)) (define-theorem silly3b (forall (n : nat) (== (beq-nat (S (S n)) 7) true)) (== true (beq-nat (S (S n)) 7)))) (by-intros n eq H) (by-symmetry #:in H) (by-apply eq #:in H) (by-symmetry #:in H) by-assumption) (check-type silly3b : (forall (n : nat) (== (beq-nat (S (S n)) 7) true)) (== true (beq-nat (S (S n)) 7))))) (define-theorem plus-n-Sm (∀ [n : nat] [m : nat] (== nat (S (plus n m)) (plus n (S m)))) (by-intro n) (by-intro m) simpl (by-induction n #:as [() (n-1 IH)]) subgoal 1 simpl reflexivity subgoal 1 simpl (by-rewrite IH) reflexivity) (define-theorem plus-n-n-injective (∀ [n m : nat] (-> (== (plus n n) (plus m m)) (== n m))) (by-intro n) (by-induction n #:as [() (n-1 IH)]) n = 0 (by-intros m H) (by-destruct m #:as [() (m-1)]) reflexivity (by-inversion H) (by-intros m H) (by-destruct m #:as [() (m-1)]) (by-inversion H) (by-rewriteL plus-n-Sm #:in H) (by-rewriteL plus-n-Sm #:in H) (by-inversion H #:as H2) (by-apply IH #:in H2) (by-rewrite H2) reflexivity) (check-type plus-n-n-injective : (∀ [n m : nat] (-> (== (plus n n) (plus m m)) (== n m)))) (require cur/stdlib/prop) (λ (n : nat) ((new-elim n (λ n (Π (Π (m : nat) (→ (== nat (plus n n) (plus m m)) (== nat n m))))) (λ (λ (λ (m : nat) (λ (H : (== nat O (plus m m))) ((match m #:as m #:with-indices #:in nat #:return (Π (H : (== nat O (plus m m))) (== nat O m)) (O (λ (H : (== nat O (plus (O) (O)))) (λ (refl nat O)))) ((S m-1) (λ (H : (== nat O (plus (S m-1) (S m-1)))) ((new-elim H (λ y61 H (-> (== y61 (S (plus m-1 (S m-1)))) (== nat O (S m-1)))) (λ eq52 (new-elim (elim-== eq52 (λ y54 _ (match y54 #:return Type ((O) True) ((S X7) False))) I) (λ _ (== nat O (S m-1)))))) (refl nat (S (plus m-1 (S m-1)))))))) H))))) (λ n-1 IH (λ (λ (m : nat) (λ (H : (== nat (S (plus n-1 (S n-1))) (plus m m))) ((match m #:as m #:with-indices #:in nat #:return (Π (H : (== nat (S (plus n-1 (S n-1))) (plus m m))) (== nat (S n-1) m)) (O (λ (H : (== nat (S (plus n-1 (S n-1))) (plus (O) (O)))) ((new-elim H (λ y62 H (-> (== y62 O) (== nat (S n-1) O))) (λ eq55 (new-elim (elim-== eq55 (λ y57 _ (match y57 #:return Type ((O) False) ((S X7) True))) I) (λ _ (== nat (S n-1) O))))) (refl nat O)))) ((S m-1) (λ (H : (== nat (S (plus n-1 (S n-1))) (plus (S m-1) (S m-1)))) ((λ (H : (== nat (S (S (plus n-1 n-1))) (S (plus m-1 (S m-1))))) ((λ (H : (== nat (S (S (plus n-1 n-1))) (S (S (plus m-1 m-1))))) ((new-elim H (λ y65 H (-> (== y65 (S (S (plus m-1 m-1)))) (== nat (S n-1) (S m-1)))) (λ eq58 ((λ (H2 : (== nat (plus n-1 n-1) (plus m-1 m-1))) ((λ (H2 : (== nat n-1 m-1)) (new-elim (sym nat n-1 m-1 (H2)) (λ (g63 : nat) (λ (g64 : (== nat m-1 g63)) (== nat (S g63) (S m-1)))) (λ (refl nat (S m-1))))) (IH m-1 H2))) (f-equal nat nat (λ x59 (match (match x59 #:return nat ((O) (S (plus n-1 n-1))) ((S X7) X7)) #:return nat ((O) (plus n-1 n-1)) ((S X7) X7))) (S (S (plus n-1 n-1))) (S (S (plus m-1 m-1))) eq58)))) (refl nat (S (S (plus m-1 m-1)))))) WANT : ( = = ( S ( S ( plus n-1 n-1 ) ) ) ( S ( S ( plus m-1 m-1 ) ) ) ) (= = nat ( S ( plus m-1 m-1 ) ) ( plus m-1 ( S m-1 ) ) ) (λ (g66 : nat) (λ (g67 : (== nat (plus m-1 (S m-1)) g66)) (== nat (S (S (plus n-1 n-1))) (S g66)))) (= = nat ( S ( S ( plus n-1 n-1 ) ) ) ( S ( plus m-1 ( S m-1 ) ) ) ) ))) WANT : ( = = ( S ( S ( plus n-1 n-1 ) ) ) ( S ( plus m-1 ( S m-1 ) ) ) ) (λ (g68 : nat) (λ (g69 : (== nat (plus n-1 (S n-1)) g68)) (== nat (S g68) (S (plus m-1 (S m-1)))))) (= = nat ( S ( plus n-1 ( S n-1 ) ) ) ( plus ( S m-1 ) ( S m-1 ) ) ) simpl (= = nat ( S ( plus n-1 ( S n-1 ) ) ) ( S ( plus m-1 ( S m-1 ) ) ) ) ))))) H)))))))) (define/rec/match double : nat -> nat [O => O] [(S n-1) => (S (S (double n-1)))]) (define-theorem double-injective (forall (n m : nat) (-> (== (double n) (double m)) (== n m))) (by-intro n) (by-induction n #:as [() (n-1 IH)]) 1 (by-destruct m #:as [() (m-1)]) 1a 1b 2 (by-destruct m #:as [() (m-1)]) 2a (by-apply IH) (by-inversion H #:as H2) (by-rewrite H2) reflexivity) (check-type double-injective : (forall (n m : nat) (-> (== (double n) (double m)) (== n m)))) (define-theorem beq-nat-true (∀ [n m : nat] (-> (== (beq-nat n m) true) (== n m))) (by-intro n) (by-induction n #:as [() (n-1 IH)]) 1 (by-destruct m #:as [() (m-1)]) 1a 1b 2 (by-destruct m #:as [() (m-1)]) 2a 2b (by-apply IH) (by-inversion H #:as H1) (by-rewrite H1) reflexivity) (define-theorem double-injective-take2 (∀ [n m : nat] (-> (== (double n) (double m)) (== n m))) (by-intros n m) (by-generalize n) (by-induction m #:as [() (m-1 IH)]) 1 (by-destruct n #:as [() (n-1)]) 1a 1b 2 (by-destruct n #:as [() (n-1)]) 2a 2b (by-apply IH) (by-inversion H #:as H1) (by-rewrite H1) reflexivity) (data id : 0 Type [Id : (-> nat id)]) (define/rec/match beq-id : id id -> bool [(Id n1) (Id n2) => (beq-nat n1 n2)]) (define-theorem beq-id-true (forall [x y : id] (-> (== (beq-id x y) true) (== x y))) (by-intros x y) (by-destruct x #:as [(m)]) (by-destruct y #:as [(n)]) (by-intro H) (by-assert H1 (== m n)) (by-apply H) reflexivity)
07656b1fa34a5f2bb59b46c97d4288f2596f142ac567cd41010fb44be09d11cb
juxt/joplin
project.clj
(defproject joplin.datomic "0.3.12-SNAPSHOT" :description "Datomic support for Joplin" :url "" :scm {:name "git" :url ""} :license {:name "Eclipse Public License" :url "-v10.html"} :dependencies [[org.clojure/clojure "1.8.0"] [joplin.core "0.3.12-SNAPSHOT"] [com.datomic/datomic-free "0.9.5394" :exclusions [joda-time]]])
null
https://raw.githubusercontent.com/juxt/joplin/d190f883c47b11efafc5154237a80d36af0087d3/joplin.datomic/project.clj
clojure
(defproject joplin.datomic "0.3.12-SNAPSHOT" :description "Datomic support for Joplin" :url "" :scm {:name "git" :url ""} :license {:name "Eclipse Public License" :url "-v10.html"} :dependencies [[org.clojure/clojure "1.8.0"] [joplin.core "0.3.12-SNAPSHOT"] [com.datomic/datomic-free "0.9.5394" :exclusions [joda-time]]])
a4055b632e0c44af3b559282e2f1a82b4ff080a2f903f244d2065d98a2cf6fef
Decentralized-Pictures/T4L3NT
client_baking_scheduling.mli
(*****************************************************************************) (* *) (* Open Source License *) Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < > (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) to deal in the Software without restriction , including without limitation (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) and/or sell copies of the Software , and to permit persons to whom the (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************) val sleep_until : Time.Protocol.t -> unit Lwt.t option val wait_for_first_event : * name : string - > ' event event * * val main : * name : string - > * cctxt:(#Protocol_client_context.full as ' a ) - > * stream:'event tzresult Lwt_stream.t - > * state_maker:('event - > ' state ) - > * pre_loop:('a - > ' state - > ' event - > unit ) - > * compute_timeout:('state - > ' ) - > * timeout_k:('a - > ' state - > ' - > unit ) - > * event_k:('a - > ' state - > ' event - > unit ) - > * finalizer:('state - > unit * unit tzresult Lwt.t * name:string -> 'event tzresult Lwt_stream.t -> 'event Lwt.t * * val main : * name:string -> * cctxt:(#Protocol_client_context.full as 'a) -> * stream:'event tzresult Lwt_stream.t -> * state_maker:('event -> 'state tzresult Lwt.t) -> * pre_loop:('a -> 'state -> 'event -> unit tzresult Lwt.t) -> * compute_timeout:('state -> 'timesup Lwt.t) -> * timeout_k:('a -> 'state -> 'timesup -> unit tzresult Lwt.t) -> * event_k:('a -> 'state -> 'event -> unit tzresult Lwt.t) -> * finalizer:('state -> unit Lwt.t) -> * unit tzresult Lwt.t *) * [ main ~name ~cctxt ~stream ~state_maker ~pre_loop ~timeout_maker ~timeout_k ~event_k ] is an infinitely running loop that monitors new events arriving on [ stream ] . The loop exits when the [ stream ] gives an error . The function [ pre_loop ] is called before the loop starts . The loop maintains a state ( of type [ ' state ] ) initialized by [ state_maker ] and passed to the callbacks [ timeout_maker ] ( used to set up waking - up timeouts ) , [ timeout_k ] ( when a computed timeout happens ) , and [ event_k ] ( when a new event arrives on the stream ) . ~event_k] is an infinitely running loop that monitors new events arriving on [stream]. The loop exits when the [stream] gives an error. The function [pre_loop] is called before the loop starts. The loop maintains a state (of type ['state]) initialized by [state_maker] and passed to the callbacks [timeout_maker] (used to set up waking-up timeouts), [timeout_k] (when a computed timeout happens), and [event_k] (when a new event arrives on the stream). *)
null
https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/proto_012_Psithaca/lib_delegate/client_baking_scheduling.mli
ocaml
*************************************************************************** Open Source License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), the rights to use, copy, modify, merge, publish, distribute, sublicense, Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ***************************************************************************
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < > to deal in the Software without restriction , including without limitation and/or sell copies of the Software , and to permit persons to whom the THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING val sleep_until : Time.Protocol.t -> unit Lwt.t option val wait_for_first_event : * name : string - > ' event event * * val main : * name : string - > * cctxt:(#Protocol_client_context.full as ' a ) - > * stream:'event tzresult Lwt_stream.t - > * state_maker:('event - > ' state ) - > * pre_loop:('a - > ' state - > ' event - > unit ) - > * compute_timeout:('state - > ' ) - > * timeout_k:('a - > ' state - > ' - > unit ) - > * event_k:('a - > ' state - > ' event - > unit ) - > * finalizer:('state - > unit * unit tzresult Lwt.t * name:string -> 'event tzresult Lwt_stream.t -> 'event Lwt.t * * val main : * name:string -> * cctxt:(#Protocol_client_context.full as 'a) -> * stream:'event tzresult Lwt_stream.t -> * state_maker:('event -> 'state tzresult Lwt.t) -> * pre_loop:('a -> 'state -> 'event -> unit tzresult Lwt.t) -> * compute_timeout:('state -> 'timesup Lwt.t) -> * timeout_k:('a -> 'state -> 'timesup -> unit tzresult Lwt.t) -> * event_k:('a -> 'state -> 'event -> unit tzresult Lwt.t) -> * finalizer:('state -> unit Lwt.t) -> * unit tzresult Lwt.t *) * [ main ~name ~cctxt ~stream ~state_maker ~pre_loop ~timeout_maker ~timeout_k ~event_k ] is an infinitely running loop that monitors new events arriving on [ stream ] . The loop exits when the [ stream ] gives an error . The function [ pre_loop ] is called before the loop starts . The loop maintains a state ( of type [ ' state ] ) initialized by [ state_maker ] and passed to the callbacks [ timeout_maker ] ( used to set up waking - up timeouts ) , [ timeout_k ] ( when a computed timeout happens ) , and [ event_k ] ( when a new event arrives on the stream ) . ~event_k] is an infinitely running loop that monitors new events arriving on [stream]. The loop exits when the [stream] gives an error. The function [pre_loop] is called before the loop starts. The loop maintains a state (of type ['state]) initialized by [state_maker] and passed to the callbacks [timeout_maker] (used to set up waking-up timeouts), [timeout_k] (when a computed timeout happens), and [event_k] (when a new event arrives on the stream). *)
fa29a0948fd77a7b4c79b42813a7207d5789992958c74146e8444ffae5633edf
circuithub/rel8
Table.hs
# language AllowAmbiguousTypes # # language DataKinds # {-# language FlexibleContexts #-} # language FlexibleInstances # # language MultiParamTypeClasses # # language RankNTypes # {-# language ScopedTypeVariables #-} # language StandaloneKindSignatures # # language TypeApplications # {-# language TypeFamilies #-} # language TypeOperators # {-# language UndecidableInstances #-} module Rel8.Generic.Table ( GGSerialize, GGColumns, ggfromResult, ggtoResult , GAlgebra ) where -- base import Data.Kind ( Constraint, Type ) import GHC.Generics ( (:+:), (:*:), K1, M1, U1, V1 ) import Prelude () -- rel8 import Rel8.FCF ( Eval, Exp ) import Rel8.Generic.Table.ADT ( GSerializeADT, GColumnsADT, gtoResultADT, gfromResultADT ) import Rel8.Generic.Table.Record ( GSerialize, GColumns, gtoResult, gfromResult ) import Rel8.Kind.Algebra ( Algebra( Product, Sum ) , SAlgebra( SProduct, SSum ) , KnownAlgebra, algebraSing ) import qualified Rel8.Schema.Kind as K import Rel8.Schema.Result ( Result ) data GGSerialize :: Algebra -> (Type -> Type -> Exp Constraint) -> (Type -> Exp K.HTable) -> (Type -> Type) -> (Type -> Type) -> Exp Constraint type instance Eval (GGSerialize 'Product _Serialize _Columns exprs rep) = GSerialize _Serialize _Columns exprs rep type instance Eval (GGSerialize 'Sum _Serialize _Columns exprs rep) = GSerializeADT _Serialize _Columns exprs rep data GGColumns :: Algebra -> (Type -> Exp K.HTable) -> (Type -> Type) -> Exp K.HTable type instance Eval (GGColumns 'Product _Columns rep) = GColumns _Columns rep type instance Eval (GGColumns 'Sum _Columns rep) = GColumnsADT _Columns rep type GAlgebra :: (Type -> Type) -> Algebra type family GAlgebra rep where GAlgebra (M1 _ _ rep) = GAlgebra rep GAlgebra V1 = 'Sum GAlgebra (_ :+: _) = 'Sum GAlgebra U1 = 'Sum GAlgebra (_ :*: _) = 'Product GAlgebra (K1 _ _) = 'Product ggfromResult :: forall algebra _Serialize _Columns exprs rep x. ( KnownAlgebra algebra , Eval (GGSerialize algebra _Serialize _Columns exprs rep) ) => (forall expr a proxy. Eval (_Serialize expr a) => proxy expr -> Eval (_Columns expr) Result -> a) -> Eval (GGColumns algebra _Columns exprs) Result -> rep x ggfromResult f x = case algebraSing @algebra of SProduct -> gfromResult @_Serialize @_Columns @exprs @rep f x SSum -> gfromResultADT @_Serialize @_Columns @exprs @rep f x ggtoResult :: forall algebra _Serialize _Columns exprs rep x. ( KnownAlgebra algebra , Eval (GGSerialize algebra _Serialize _Columns exprs rep) ) => (forall expr a proxy. Eval (_Serialize expr a) => proxy expr -> a -> Eval (_Columns expr) Result) -> rep x -> Eval (GGColumns algebra _Columns exprs) Result ggtoResult f x = case algebraSing @algebra of SProduct -> gtoResult @_Serialize @_Columns @exprs @rep f x SSum -> gtoResultADT @_Serialize @_Columns @exprs @rep f x
null
https://raw.githubusercontent.com/circuithub/rel8/119c825e552b9ee3728992f545afda61e07d7625/src/Rel8/Generic/Table.hs
haskell
# language FlexibleContexts # # language ScopedTypeVariables # # language TypeFamilies # # language UndecidableInstances # base rel8
# language AllowAmbiguousTypes # # language DataKinds # # language FlexibleInstances # # language MultiParamTypeClasses # # language RankNTypes # # language StandaloneKindSignatures # # language TypeApplications # # language TypeOperators # module Rel8.Generic.Table ( GGSerialize, GGColumns, ggfromResult, ggtoResult , GAlgebra ) where import Data.Kind ( Constraint, Type ) import GHC.Generics ( (:+:), (:*:), K1, M1, U1, V1 ) import Prelude () import Rel8.FCF ( Eval, Exp ) import Rel8.Generic.Table.ADT ( GSerializeADT, GColumnsADT, gtoResultADT, gfromResultADT ) import Rel8.Generic.Table.Record ( GSerialize, GColumns, gtoResult, gfromResult ) import Rel8.Kind.Algebra ( Algebra( Product, Sum ) , SAlgebra( SProduct, SSum ) , KnownAlgebra, algebraSing ) import qualified Rel8.Schema.Kind as K import Rel8.Schema.Result ( Result ) data GGSerialize :: Algebra -> (Type -> Type -> Exp Constraint) -> (Type -> Exp K.HTable) -> (Type -> Type) -> (Type -> Type) -> Exp Constraint type instance Eval (GGSerialize 'Product _Serialize _Columns exprs rep) = GSerialize _Serialize _Columns exprs rep type instance Eval (GGSerialize 'Sum _Serialize _Columns exprs rep) = GSerializeADT _Serialize _Columns exprs rep data GGColumns :: Algebra -> (Type -> Exp K.HTable) -> (Type -> Type) -> Exp K.HTable type instance Eval (GGColumns 'Product _Columns rep) = GColumns _Columns rep type instance Eval (GGColumns 'Sum _Columns rep) = GColumnsADT _Columns rep type GAlgebra :: (Type -> Type) -> Algebra type family GAlgebra rep where GAlgebra (M1 _ _ rep) = GAlgebra rep GAlgebra V1 = 'Sum GAlgebra (_ :+: _) = 'Sum GAlgebra U1 = 'Sum GAlgebra (_ :*: _) = 'Product GAlgebra (K1 _ _) = 'Product ggfromResult :: forall algebra _Serialize _Columns exprs rep x. ( KnownAlgebra algebra , Eval (GGSerialize algebra _Serialize _Columns exprs rep) ) => (forall expr a proxy. Eval (_Serialize expr a) => proxy expr -> Eval (_Columns expr) Result -> a) -> Eval (GGColumns algebra _Columns exprs) Result -> rep x ggfromResult f x = case algebraSing @algebra of SProduct -> gfromResult @_Serialize @_Columns @exprs @rep f x SSum -> gfromResultADT @_Serialize @_Columns @exprs @rep f x ggtoResult :: forall algebra _Serialize _Columns exprs rep x. ( KnownAlgebra algebra , Eval (GGSerialize algebra _Serialize _Columns exprs rep) ) => (forall expr a proxy. Eval (_Serialize expr a) => proxy expr -> a -> Eval (_Columns expr) Result) -> rep x -> Eval (GGColumns algebra _Columns exprs) Result ggtoResult f x = case algebraSing @algebra of SProduct -> gtoResult @_Serialize @_Columns @exprs @rep f x SSum -> gtoResultADT @_Serialize @_Columns @exprs @rep f x
d336c50b4df18f2acf61644eff60c1cbdb9ba3acb4adb082711e1cf1e1e6d022
ilyasergey/monadic-cfa
Concrete.hs
# LANGUAGE MultiParamTypeClasses # # LANGUAGE FlexibleInstances # # LANGUAGE FunctionalDependencies # # LANGUAGE TypeOperators # # LANGUAGE ParallelListComp # {-# LANGUAGE TypeSynonymInstances #-} # LANGUAGE GeneralizedNewtypeDeriving # -- TODO: get rid of this -- {-# LANGUAGE UndecidableInstances #-} module CFA.CPS.Analysis.Concrete where import Data.Map as Map import Data.Set as Set import Data.List as List import Data.Foldable as Foldable import Control.Monad.State import Control.Monad.Identity import Control.Applicative import Util import CFA.CPS import CFA.Lattice import CFA.Store import CFA.CFAMonads import CFA.CPS.Analysis import CFA.CPS.Analysis.Runner data CAddr = CBind Var Int deriving (Eq, Ord, Show) type DStore a = a :-> (Val a) type ΣC = (DStore CAddr, Int) alterStore = mapFst increaseTime = mapSnd (+1) -- is a monad type Concrete = State ΣC readA :: CAddr -> Concrete (Val CAddr) readA a = gets $ (! a) . fst getTime :: Concrete Int getTime = gets snd instance Analysis Concrete CAddr where fun ρ (Lam l) = return $ Clo (l, ρ) fun ρ (Ref v) = readA (ρ!v) arg ρ (Lam l) = let proc = Clo(l, ρ) in return proc arg ρ (Ref v) = readA (ρ!v) a $= d = modify $ alterStore $ Map.insert a d alloc v = CBind v <$> getTime tick _ _ go = modify increaseTime >> go initialΣC :: ΣC initialΣC = (Map.empty, 0) injectConcrete :: CExp -> (PΣ CAddr, ΣC) injectConcrete call = ((call, ρ0), initialΣC) -- Add Garbage Collection instance GarbageCollector Concrete (PΣ CAddr) instance AddStepToFP Concrete (PΣ CAddr) (ℙ (PΣ CAddr, ΣC)) where applyStep step states = Set.map (uncurry $ runState . step) states inject s = Set.singleton (s, initialΣC) exploreConcrete :: CExp -> ℙ (PΣ CAddr, ΣC) exploreConcrete = exploreFP
null
https://raw.githubusercontent.com/ilyasergey/monadic-cfa/caeb9e5375affe9c3cdee0753ae2ba489cdc328a/CFA/CPS/Analysis/Concrete.hs
haskell
# LANGUAGE TypeSynonymInstances # TODO: get rid of this {-# LANGUAGE UndecidableInstances #-} is a monad Add Garbage Collection
# LANGUAGE MultiParamTypeClasses # # LANGUAGE FlexibleInstances # # LANGUAGE FunctionalDependencies # # LANGUAGE TypeOperators # # LANGUAGE ParallelListComp # # LANGUAGE GeneralizedNewtypeDeriving # module CFA.CPS.Analysis.Concrete where import Data.Map as Map import Data.Set as Set import Data.List as List import Data.Foldable as Foldable import Control.Monad.State import Control.Monad.Identity import Control.Applicative import Util import CFA.CPS import CFA.Lattice import CFA.Store import CFA.CFAMonads import CFA.CPS.Analysis import CFA.CPS.Analysis.Runner data CAddr = CBind Var Int deriving (Eq, Ord, Show) type DStore a = a :-> (Val a) type ΣC = (DStore CAddr, Int) alterStore = mapFst increaseTime = mapSnd (+1) type Concrete = State ΣC readA :: CAddr -> Concrete (Val CAddr) readA a = gets $ (! a) . fst getTime :: Concrete Int getTime = gets snd instance Analysis Concrete CAddr where fun ρ (Lam l) = return $ Clo (l, ρ) fun ρ (Ref v) = readA (ρ!v) arg ρ (Lam l) = let proc = Clo(l, ρ) in return proc arg ρ (Ref v) = readA (ρ!v) a $= d = modify $ alterStore $ Map.insert a d alloc v = CBind v <$> getTime tick _ _ go = modify increaseTime >> go initialΣC :: ΣC initialΣC = (Map.empty, 0) injectConcrete :: CExp -> (PΣ CAddr, ΣC) injectConcrete call = ((call, ρ0), initialΣC) instance GarbageCollector Concrete (PΣ CAddr) instance AddStepToFP Concrete (PΣ CAddr) (ℙ (PΣ CAddr, ΣC)) where applyStep step states = Set.map (uncurry $ runState . step) states inject s = Set.singleton (s, initialΣC) exploreConcrete :: CExp -> ℙ (PΣ CAddr, ΣC) exploreConcrete = exploreFP
306c52cf0c3768af78a10aeb15ccbb033657882024b90745be02f5736389ab39
andelf/erlang-proxy
proxy_client_worker.erl
%%%------------------------------------------------------------------- @author < > ( C ) 2013 , %%% @doc %%% %%% @end Created : 8 Apr 2013 by < > %%%------------------------------------------------------------------- -module(proxy_client_worker). -behaviour(gen_server). %% API -export([start_link/1]). %% gen_server callbacks -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). -define(SERVER, ?MODULE). -record(state, {server_ip, server_port, server_sock, client_ip, client_port, client_sock }). -define(SOCK_OPTIONS, [binary, {reuseaddr, true}, {active, false}, {nodelay, true} ]). -include("proxy_defs.hrl"). %%%=================================================================== %%% API %%%=================================================================== %%-------------------------------------------------------------------- %% @doc %% Starts the server %% ( ) - > { ok , Pid } | ignore | { error , Error } %% @end %%-------------------------------------------------------------------- % Leave name {local, name} so that the process % remains unregistered. This enables us to start mutliple processes using the pr_sup : ( ) call start_link(ClientSock) -> ConfFile = filename:join(code:priv_dir(proxy_client), "client.conf"), case file:consult(ConfFile) of {ok, Conf} -> gen_server:start_link(?MODULE, [{client_sock, ClientSock}|Conf], []); {error, _Reason} -> {error, conf_file_error} end. %%%=================================================================== %%% gen_server callbacks %%%=================================================================== %%-------------------------------------------------------------------- @private %% @doc %% Initializes the server %% ) - > { ok , State } | { ok , State , Timeout } | %% ignore | %% {stop, Reason} %% @end %%-------------------------------------------------------------------- init(Conf) -> ServerIP = proplists:get_value(server_ip, Conf), ServerPort = proplists:get_value(server_port, Conf), ClientIP = proplists:get_value(listen_ip, Conf), ClientPort = proplists:get_value(listen_port, Conf), Client = proplists:get_value(client_sock, Conf), case gen_tcp:connect(getaddr_or_fail(ServerIP), ServerPort, ?SOCK_OPTIONS) of {ok, RemoteSocket} -> ok = inet:setopts(RemoteSocket, [{active, true}]), {ok, #state{server_ip=ServerIP, server_port=ServerPort, server_sock=RemoteSocket, client_ip=ClientIP, client_port=ClientPort, client_sock=Client}, 0}; %%communicate(Client, RemoteSocket); {error, Error} -> ?LOG("Connect error, ~p. ~p:~p~n", [Error, ServerIP, ServerPort]), gen_tcp:close(Client), {stop, server_connect_fail} end. %%-------------------------------------------------------------------- @private %% @doc %% Handling call messages %% , From , State ) - > %% {reply, Reply, State} | { reply , Reply , State , Timeout } | { noreply , State } | { noreply , State , Timeout } | %% {stop, Reason, Reply, State} | %% {stop, Reason, State} %% @end %%-------------------------------------------------------------------- handle_call(_Request, _From, State) -> Reply = ok, {reply, Reply, State}. %%-------------------------------------------------------------------- @private %% @doc %% Handling cast messages %% @spec handle_cast(Msg , State ) - > { noreply , State } | { noreply , State , Timeout } | %% {stop, Reason, State} %% @end %%-------------------------------------------------------------------- handle_cast(_Msg, State) -> {noreply, State}. %%-------------------------------------------------------------------- @private %% @doc %% Handling all non call/cast messages %% , State ) - > { noreply , State } | { noreply , State , Timeout } | %% {stop, Reason, State} %% @end %%-------------------------------------------------------------------- handle_info(timeout, #state{server_sock=RemoteSocket, client_sock=Client, client_ip=LocalIP, client_port=LocalPort} = State) -> % try case find_target(Client) of {ok, Mod, {connect, Addr}} -> Target = encode_addr(Addr), ok = gen_tcp:send(RemoteSocket, proxy_transform:transform(Target)), ok = inet:setopts(Client, [{active, true}]), IP = list_to_binary(tuple_to_list(getaddr_or_fail(LocalIP))), ok = gen_tcp:send(Client, Mod:unparse_connection_response({granted, {ipv4, IP, LocalPort}})), {noreply, State}; {error, client_closed} -> {stop, normal, State}; {error, Reason} -> ?LOG("client communication init error: ~p~n", [Reason]), {stop, Reason, State} %% end %% catch %% error:{badmatch,_} -> { stop , normal , State } ; %% _Error:_Reason -> %% ?LOG("client recv error, ~p: ~p~n", [_Error, _Reason]), { stop , normal , State } end; handle_info({tcp, Client, Request}, #state{server_sock=RemoteSocket, client_sock=Client} = State) -> case gen_tcp:send(RemoteSocket, proxy_transform:transform(Request)) of ok -> {noreply, State}; {error, _Error} -> {stop, _Error, State} end; handle_info({tcp, RemoteSocket, Response}, #state{server_sock=RemoteSocket, client_sock=Client} = State) -> case gen_tcp:send(Client, proxy_transform:transform(Response)) of ok -> {noreply, State}; {error, _Error} -> {stop, _Error, State} end; handle_info({tcp_closed, ASocket}, #state{server_sock=RemoteSocket, client_sock=Client} = State) -> case ASocket of Client -> {stop, normal, State}; RemoteSocket -> {stop, normal, State} end; handle_info({tcp_error, ASocket, _Reason}, #state{server_sock=RemoteSocket, client_sock=Client} = State) -> case ASocket of Client -> ?LOG("~p client tcp error~n", [ASocket]), {stop, _Reason, State}; RemoteSocket -> ?LOG("~p server tcp error~n", [ASocket]), {stop, _Reason, State} end; handle_info(_Info, State) -> {noreply, State}. %%-------------------------------------------------------------------- @private %% @doc %% This function is called by a gen_server when it is about to %% terminate. It should be the opposite of Module:init/1 and do any %% necessary cleaning up. When it returns, the gen_server terminates with . The return value is ignored . %% , State ) - > void ( ) %% @end %%-------------------------------------------------------------------- terminate(_Reason, #state{server_sock=RemoteSocket, client_sock=Client}) -> gen_tcp:close(RemoteSocket), gen_tcp:close(Client), ok; terminate(_Reason, _State) -> ok. %%-------------------------------------------------------------------- @private %% @doc %% Convert process state when code is changed %% , State , Extra ) - > { ok , NewState } %% @end %%-------------------------------------------------------------------- code_change(_OldVsn, State, _Extra) -> {ok, State}. %%%=================================================================== Internal functions %%%=================================================================== getaddr_or_fail(IP) -> {ok, Addr} = inet:getaddr(IP, inet), Addr. find_target(Client) -> %% 0x05:version case gen_tcp:recv(Client, 0) of {ok, <<Version:8, _/binary>> = Greeting} -> socks_proxy_handshake(Client, Version, Greeting); {error, closed} -> {error, client_closed}; {error, Reason} -> {error, Reason} end. socks_proxy_handshake(Client, Version, Greeting) -> case Version of %% SOCKS4 16#04 -> case proxy_proto_socks4:parse_greeting_request(Greeting) of {connect, _UserId, Addr} -> {ok, proxy_proto_socks4, {connect, Addr}}; {error, Reason} -> {error, Reason} end; 16#05 -> {auth_methods, _} = proxy_proto_socks5:parse_greeting_request(Greeting), gen_tcp:send(Client, proxy_proto_socks5:unparse_greeting_response(no_auth)), {ok, ConnReq} = gen_tcp:recv(Client, 0), case proxy_proto_socks5:parse_connection_request(ConnReq) of {connect, Addr} -> {ok, proxy_proto_socks5, {connect, Addr}}; {error, Reason} -> {error, Reason} end end. encode_addr({ipv4, Address, Port}) -> <<?IPV4, Port:16, Address:32>>; encode_addr({ipv6, Address, Port}) -> <<?IPV6, Port:16, Address:128>>; encode_addr({domain, DomainBin, Port}) -> <<?DOMAIN, Port:16, (byte_size(DomainBin)):8, DomainBin/binary>>; encode_addr(_) -> error.
null
https://raw.githubusercontent.com/andelf/erlang-proxy/d3c6d7ba8825ba7fd438bef5d1e41aa5eae5d872/apps/proxy_client/src/proxy_client_worker.erl
erlang
------------------------------------------------------------------- @doc @end ------------------------------------------------------------------- API gen_server callbacks =================================================================== API =================================================================== -------------------------------------------------------------------- @doc Starts the server @end -------------------------------------------------------------------- Leave name {local, name} so that the process remains unregistered. This enables us to start =================================================================== gen_server callbacks =================================================================== -------------------------------------------------------------------- @doc Initializes the server ignore | {stop, Reason} @end -------------------------------------------------------------------- communicate(Client, RemoteSocket); -------------------------------------------------------------------- @doc Handling call messages {reply, Reply, State} | {stop, Reason, Reply, State} | {stop, Reason, State} @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc Handling cast messages {stop, Reason, State} @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc Handling all non call/cast messages {stop, Reason, State} @end -------------------------------------------------------------------- try end catch error:{badmatch,_} -> _Error:_Reason -> ?LOG("client recv error, ~p: ~p~n", [_Error, _Reason]), -------------------------------------------------------------------- @doc This function is called by a gen_server when it is about to terminate. It should be the opposite of Module:init/1 and do any necessary cleaning up. When it returns, the gen_server terminates @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc Convert process state when code is changed @end -------------------------------------------------------------------- =================================================================== =================================================================== 0x05:version SOCKS4
@author < > ( C ) 2013 , Created : 8 Apr 2013 by < > -module(proxy_client_worker). -behaviour(gen_server). -export([start_link/1]). -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). -define(SERVER, ?MODULE). -record(state, {server_ip, server_port, server_sock, client_ip, client_port, client_sock }). -define(SOCK_OPTIONS, [binary, {reuseaddr, true}, {active, false}, {nodelay, true} ]). -include("proxy_defs.hrl"). ( ) - > { ok , Pid } | ignore | { error , Error } mutliple processes using the pr_sup : ( ) call start_link(ClientSock) -> ConfFile = filename:join(code:priv_dir(proxy_client), "client.conf"), case file:consult(ConfFile) of {ok, Conf} -> gen_server:start_link(?MODULE, [{client_sock, ClientSock}|Conf], []); {error, _Reason} -> {error, conf_file_error} end. @private ) - > { ok , State } | { ok , State , Timeout } | init(Conf) -> ServerIP = proplists:get_value(server_ip, Conf), ServerPort = proplists:get_value(server_port, Conf), ClientIP = proplists:get_value(listen_ip, Conf), ClientPort = proplists:get_value(listen_port, Conf), Client = proplists:get_value(client_sock, Conf), case gen_tcp:connect(getaddr_or_fail(ServerIP), ServerPort, ?SOCK_OPTIONS) of {ok, RemoteSocket} -> ok = inet:setopts(RemoteSocket, [{active, true}]), {ok, #state{server_ip=ServerIP, server_port=ServerPort, server_sock=RemoteSocket, client_ip=ClientIP, client_port=ClientPort, client_sock=Client}, 0}; {error, Error} -> ?LOG("Connect error, ~p. ~p:~p~n", [Error, ServerIP, ServerPort]), gen_tcp:close(Client), {stop, server_connect_fail} end. @private , From , State ) - > { reply , Reply , State , Timeout } | { noreply , State } | { noreply , State , Timeout } | handle_call(_Request, _From, State) -> Reply = ok, {reply, Reply, State}. @private @spec handle_cast(Msg , State ) - > { noreply , State } | { noreply , State , Timeout } | handle_cast(_Msg, State) -> {noreply, State}. @private , State ) - > { noreply , State } | { noreply , State , Timeout } | handle_info(timeout, #state{server_sock=RemoteSocket, client_sock=Client, client_ip=LocalIP, client_port=LocalPort} = State) -> case find_target(Client) of {ok, Mod, {connect, Addr}} -> Target = encode_addr(Addr), ok = gen_tcp:send(RemoteSocket, proxy_transform:transform(Target)), ok = inet:setopts(Client, [{active, true}]), IP = list_to_binary(tuple_to_list(getaddr_or_fail(LocalIP))), ok = gen_tcp:send(Client, Mod:unparse_connection_response({granted, {ipv4, IP, LocalPort}})), {noreply, State}; {error, client_closed} -> {stop, normal, State}; {error, Reason} -> ?LOG("client communication init error: ~p~n", [Reason]), {stop, Reason, State} { stop , normal , State } ; { stop , normal , State } end; handle_info({tcp, Client, Request}, #state{server_sock=RemoteSocket, client_sock=Client} = State) -> case gen_tcp:send(RemoteSocket, proxy_transform:transform(Request)) of ok -> {noreply, State}; {error, _Error} -> {stop, _Error, State} end; handle_info({tcp, RemoteSocket, Response}, #state{server_sock=RemoteSocket, client_sock=Client} = State) -> case gen_tcp:send(Client, proxy_transform:transform(Response)) of ok -> {noreply, State}; {error, _Error} -> {stop, _Error, State} end; handle_info({tcp_closed, ASocket}, #state{server_sock=RemoteSocket, client_sock=Client} = State) -> case ASocket of Client -> {stop, normal, State}; RemoteSocket -> {stop, normal, State} end; handle_info({tcp_error, ASocket, _Reason}, #state{server_sock=RemoteSocket, client_sock=Client} = State) -> case ASocket of Client -> ?LOG("~p client tcp error~n", [ASocket]), {stop, _Reason, State}; RemoteSocket -> ?LOG("~p server tcp error~n", [ASocket]), {stop, _Reason, State} end; handle_info(_Info, State) -> {noreply, State}. @private with . The return value is ignored . , State ) - > void ( ) terminate(_Reason, #state{server_sock=RemoteSocket, client_sock=Client}) -> gen_tcp:close(RemoteSocket), gen_tcp:close(Client), ok; terminate(_Reason, _State) -> ok. @private , State , Extra ) - > { ok , NewState } code_change(_OldVsn, State, _Extra) -> {ok, State}. Internal functions getaddr_or_fail(IP) -> {ok, Addr} = inet:getaddr(IP, inet), Addr. find_target(Client) -> case gen_tcp:recv(Client, 0) of {ok, <<Version:8, _/binary>> = Greeting} -> socks_proxy_handshake(Client, Version, Greeting); {error, closed} -> {error, client_closed}; {error, Reason} -> {error, Reason} end. socks_proxy_handshake(Client, Version, Greeting) -> case Version of 16#04 -> case proxy_proto_socks4:parse_greeting_request(Greeting) of {connect, _UserId, Addr} -> {ok, proxy_proto_socks4, {connect, Addr}}; {error, Reason} -> {error, Reason} end; 16#05 -> {auth_methods, _} = proxy_proto_socks5:parse_greeting_request(Greeting), gen_tcp:send(Client, proxy_proto_socks5:unparse_greeting_response(no_auth)), {ok, ConnReq} = gen_tcp:recv(Client, 0), case proxy_proto_socks5:parse_connection_request(ConnReq) of {connect, Addr} -> {ok, proxy_proto_socks5, {connect, Addr}}; {error, Reason} -> {error, Reason} end end. encode_addr({ipv4, Address, Port}) -> <<?IPV4, Port:16, Address:32>>; encode_addr({ipv6, Address, Port}) -> <<?IPV6, Port:16, Address:128>>; encode_addr({domain, DomainBin, Port}) -> <<?DOMAIN, Port:16, (byte_size(DomainBin)):8, DomainBin/binary>>; encode_addr(_) -> error.
6a74e5eea436405e687c8cfc4e5430f4930540fcd648fae28439ccd945ed48ea
joinr/spork
fringe.clj
(ns spork.data.fringe "Stock implementations of the IFringe protocol from spork.protocols.fringe, as inspired by Bob Sedgewick's Graph Algorithms in C." (:require [spork.data [priorityq :as pq] [randq :as rq]] [spork.protocols [core :as generic]]) (:import [java.util PriorityQueue])) (def ^:constant emptyq clojure.lang.PersistentQueue/EMPTY) (def ^:constant emptylist (list)) ;;Now using mutable priority queues for search fringe. This ends up being faster than my persistent implementation . (defn entry-comparer [^clojure.lang.MapEntry l ^clojure.lang.MapEntry r] (let [pl (.key l) pr (.key r)] (cond (< pl pr) -1 (> pl pr) 1 :else 0))) (defn ^PriorityQueue make-pq ([] (PriorityQueue. 11 entry-comparer)) ([compf] (PriorityQueue. 11 compf))) (defn ^PriorityQueue pq [xs] (reduce (fn [^PriorityQueue acc x] (doto acc (.add x))) (make-pq) xs)) (defn ^PriorityQueue add-pq [^PriorityQueue q obj] (doto q (.add obj))) (defn ^PriorityQueue pop-pq [^PriorityQueue q ] (do (.poll q) q)) (deftype qset [^clojure.lang.PersistentHashSet enqueued ^clojure.lang.PersistentQueue q] clojure.lang.ISeq (seq [obj] (seq q)) generic/IFringe (conj-fringe [fringe n w] (if (enqueued n) fringe (qset. (.cons enqueued n) (.cons q n)))) (next-fringe [fringe] (.peek q)) (pop-fringe [fringe] (qset. (.disjoin enqueued (.peek q)) (.pop q)))) (def emptyqset (qset. #{} emptyq)) # note # Now , years later , I realize this is going to be slow ;;since we're not using inlined method invokation. ;;__TODO__ re-evaluate the use of entries as a standard api choice. ;;Do we really need access to the node weights? Can't we just look them up? ;;The current scheme is fine if the cost of a weight function is high, but ;;typically it'll just be a graph lookup...We might be introducing some ;;overhead due to all the garbage creation for the entry objects. Merits ;;re-looking. Implementations of basic stack ( depth first ) and queue ( breadth first ) ;;fringes. (extend-protocol generic/IFringe nil (conj-fringe [fringe n w] (conj '() n)) (next-fringe [fringe] nil) (pop-fringe [fringe] nil) clojure.lang.PersistentQueue (conj-fringe [fringe n w] (.cons fringe n)) (next-fringe [fringe] (.peek fringe)) (pop-fringe [fringe] (.pop fringe)) clojure.lang.PersistentList (conj-fringe [fringe n w] (conj fringe n)) (next-fringe [fringe] (.first fringe)) (pop-fringe [fringe] (.next fringe)) clojure.lang.PersistentList$EmptyList (conj-fringe [fringe n w] (conj fringe n)) (next-fringe [fringe] nil) (pop-fringe [fringe] nil) spork.data.randq.randomq (conj-fringe [fringe n w] (.cons fringe n)) (next-fringe [fringe] (.peek fringe)) (pop-fringe [fringe] (.pop fringe)) ;;TODO# Add a better priorityfringe ;;Note -> Using on a priority queue, we have a possible space leak. ;;I was approaching it much like the depth and breadth fringe ;;implementations. Specifically, we still keep the properties of Dijkstra 's algorithm ( or PFS ) , but we do n't update the weights on the PQ . We just queue the same item at a higher priority . ;;Because of the priority-order of visit, we still get the ;;properties of PFS, we just ignore duplicate items that occur later ;;(they don't decrease distance), and they have already been ;;visited. In large state spaces, like combinatorial search, we ;;pay a significant penalty here in both memory and computation ;;speed because we're not updating existing nodes on the fringe, and ;;allowing lots of garbage to accumulate. Instead, we should be ;;using an indexed priority queue, and upon discovering that a ;;node exists, we should re-weight the node and rebalance the pq. java.util.PriorityQueue (conj-fringe [fringe n w] (doto fringe (.add (generic/entry w n)))) (next-fringe [fringe] (when-let [^clojure.lang.MapEntry e (.peek ^PriorityQueue fringe)] (.val e))) (pop-fringe [fringe] (doto fringe (.poll)))) (extend-protocol generic/IClearable nil (-clear [x] x) clojure.lang.PersistentQueue (-clear [x] emptyq) clojure.lang.PersistentList (-clear [x] '()) clojure.lang.PersistentList$EmptyList (-clear [x] x) clojure.lang.Cons (-clear [x] '()) spork.data.randq.randomq (-clear [x] rq/emptyrq) java.util.PriorityQueue (-clear [x] (doto x (.clear)))) we wrap a priorityq with a map to get a priority fringe . Acts as an associative fringe , i.e. keeps exactly one instance of a value ;;on the fringe at any time. Could be supplanted by a priority map, or a ;;cheaplist, or a stock priority queue that doesn't bother to eliminate stale ;;values when re-weighing. ;;OBSOLETE (defrecord pfringe [priorities ^spork.data.priorityq.pqueue fringe] generic/IFringe (conj-fringe [pf n w] (let [w (or w 0)] (pfringe. (assoc priorities n w) (if-let [wold (get priorities n)] update the entry in the priorityq . (pq/alter-value fringe n wold w) (pq/push-node fringe n w))))) (next-fringe [pf] (peek fringe)) (pop-fringe [pf] (if (empty? priorities) fringe (pfringe. (dissoc priorities (peek fringe)) (pop fringe))))) The four primitive fringes . Just aliases for provided implementations . (def breadth-fringe "Builds a fringe that stores [node weight] entries in first-in-first-out FIFO order. Backed by a persistent queue" emptyq) (def bellman-fringe "Builds a fringe that stores [node weight] entries in first-in-first-out FIFO order. Backed by a persistent queue" emptyqset) Currently not used , in favor of mutable priority queue . May find ;;use again, if I can profile it and make it competitive. It's not ;;terrible, but the implementation is weak compared to the mutable pq. (def priority-fringe "Builds a fringe that stores [node weight] entries in priority order, according to minimal weight. Backed by a sorted map." (->pfringe {} pq/minq)) (def random-fringe "Builds a fringe that stores [node weight] entries in random order. Backed by a spork.data.randq.randomq" rq/emptyrq) (def depth-fringe "Builds a fringe that stores [node weight] entries in last-in-first-out LIFO order. Backed by a persistent list." emptylist) ;;Testing (comment (def nodes [[:a 2] [:b 3] [:c 10] [:d 11] [:e 0]]) (defn load-fringe [f &{:keys [xs] :or {xs nodes}}] (reduce (fn [acc [n w]] (generic/conj-fringe acc n w)) f xs)) (assert (= (generic/fringe-seq (load-fringe priority-fringe)) '([:e 0] [:a 2] [:b 3] [:c 10] [:d 11]))) (assert (= (generic/fringe-seq (load-fringe depth-fringe)) '([:e 0] [:d 11] [:c 10] [:b 3] [:a 2]))) (assert (= (generic/fringe-seq (load-fringe breadth-fringe)) '([:a 2] [:b 3] [:c 10] [:d 11] [:e 0]))) )
null
https://raw.githubusercontent.com/joinr/spork/bb80eddadf90bf92745bf5315217e25a99fbf9d6/src/spork/data/fringe.clj
clojure
Now using mutable priority queues for search fringe. This ends up since we're not using inlined method invokation. __TODO__ re-evaluate the use of entries as a standard api choice. Do we really need access to the node weights? Can't we just look them up? The current scheme is fine if the cost of a weight function is high, but typically it'll just be a graph lookup...We might be introducing some overhead due to all the garbage creation for the entry objects. Merits re-looking. fringes. TODO# Add a better priorityfringe Note -> Using on a priority queue, we have a possible space leak. I was approaching it much like the depth and breadth fringe implementations. Specifically, we still keep the properties of Because of the priority-order of visit, we still get the properties of PFS, we just ignore duplicate items that occur later (they don't decrease distance), and they have already been visited. In large state spaces, like combinatorial search, we pay a significant penalty here in both memory and computation speed because we're not updating existing nodes on the fringe, and allowing lots of garbage to accumulate. Instead, we should be using an indexed priority queue, and upon discovering that a node exists, we should re-weight the node and rebalance the pq. on the fringe at any time. Could be supplanted by a priority map, or a cheaplist, or a stock priority queue that doesn't bother to eliminate stale values when re-weighing. OBSOLETE use again, if I can profile it and make it competitive. It's not terrible, but the implementation is weak compared to the mutable pq. Testing
(ns spork.data.fringe "Stock implementations of the IFringe protocol from spork.protocols.fringe, as inspired by Bob Sedgewick's Graph Algorithms in C." (:require [spork.data [priorityq :as pq] [randq :as rq]] [spork.protocols [core :as generic]]) (:import [java.util PriorityQueue])) (def ^:constant emptyq clojure.lang.PersistentQueue/EMPTY) (def ^:constant emptylist (list)) being faster than my persistent implementation . (defn entry-comparer [^clojure.lang.MapEntry l ^clojure.lang.MapEntry r] (let [pl (.key l) pr (.key r)] (cond (< pl pr) -1 (> pl pr) 1 :else 0))) (defn ^PriorityQueue make-pq ([] (PriorityQueue. 11 entry-comparer)) ([compf] (PriorityQueue. 11 compf))) (defn ^PriorityQueue pq [xs] (reduce (fn [^PriorityQueue acc x] (doto acc (.add x))) (make-pq) xs)) (defn ^PriorityQueue add-pq [^PriorityQueue q obj] (doto q (.add obj))) (defn ^PriorityQueue pop-pq [^PriorityQueue q ] (do (.poll q) q)) (deftype qset [^clojure.lang.PersistentHashSet enqueued ^clojure.lang.PersistentQueue q] clojure.lang.ISeq (seq [obj] (seq q)) generic/IFringe (conj-fringe [fringe n w] (if (enqueued n) fringe (qset. (.cons enqueued n) (.cons q n)))) (next-fringe [fringe] (.peek q)) (pop-fringe [fringe] (qset. (.disjoin enqueued (.peek q)) (.pop q)))) (def emptyqset (qset. #{} emptyq)) # note # Now , years later , I realize this is going to be slow Implementations of basic stack ( depth first ) and queue ( breadth first ) (extend-protocol generic/IFringe nil (conj-fringe [fringe n w] (conj '() n)) (next-fringe [fringe] nil) (pop-fringe [fringe] nil) clojure.lang.PersistentQueue (conj-fringe [fringe n w] (.cons fringe n)) (next-fringe [fringe] (.peek fringe)) (pop-fringe [fringe] (.pop fringe)) clojure.lang.PersistentList (conj-fringe [fringe n w] (conj fringe n)) (next-fringe [fringe] (.first fringe)) (pop-fringe [fringe] (.next fringe)) clojure.lang.PersistentList$EmptyList (conj-fringe [fringe n w] (conj fringe n)) (next-fringe [fringe] nil) (pop-fringe [fringe] nil) spork.data.randq.randomq (conj-fringe [fringe n w] (.cons fringe n)) (next-fringe [fringe] (.peek fringe)) (pop-fringe [fringe] (.pop fringe)) Dijkstra 's algorithm ( or PFS ) , but we do n't update the weights on the PQ . We just queue the same item at a higher priority . java.util.PriorityQueue (conj-fringe [fringe n w] (doto fringe (.add (generic/entry w n)))) (next-fringe [fringe] (when-let [^clojure.lang.MapEntry e (.peek ^PriorityQueue fringe)] (.val e))) (pop-fringe [fringe] (doto fringe (.poll)))) (extend-protocol generic/IClearable nil (-clear [x] x) clojure.lang.PersistentQueue (-clear [x] emptyq) clojure.lang.PersistentList (-clear [x] '()) clojure.lang.PersistentList$EmptyList (-clear [x] x) clojure.lang.Cons (-clear [x] '()) spork.data.randq.randomq (-clear [x] rq/emptyrq) java.util.PriorityQueue (-clear [x] (doto x (.clear)))) we wrap a priorityq with a map to get a priority fringe . Acts as an associative fringe , i.e. keeps exactly one instance of a value (defrecord pfringe [priorities ^spork.data.priorityq.pqueue fringe] generic/IFringe (conj-fringe [pf n w] (let [w (or w 0)] (pfringe. (assoc priorities n w) (if-let [wold (get priorities n)] update the entry in the priorityq . (pq/alter-value fringe n wold w) (pq/push-node fringe n w))))) (next-fringe [pf] (peek fringe)) (pop-fringe [pf] (if (empty? priorities) fringe (pfringe. (dissoc priorities (peek fringe)) (pop fringe))))) The four primitive fringes . Just aliases for provided implementations . (def breadth-fringe "Builds a fringe that stores [node weight] entries in first-in-first-out FIFO order. Backed by a persistent queue" emptyq) (def bellman-fringe "Builds a fringe that stores [node weight] entries in first-in-first-out FIFO order. Backed by a persistent queue" emptyqset) Currently not used , in favor of mutable priority queue . May find (def priority-fringe "Builds a fringe that stores [node weight] entries in priority order, according to minimal weight. Backed by a sorted map." (->pfringe {} pq/minq)) (def random-fringe "Builds a fringe that stores [node weight] entries in random order. Backed by a spork.data.randq.randomq" rq/emptyrq) (def depth-fringe "Builds a fringe that stores [node weight] entries in last-in-first-out LIFO order. Backed by a persistent list." emptylist) (comment (def nodes [[:a 2] [:b 3] [:c 10] [:d 11] [:e 0]]) (defn load-fringe [f &{:keys [xs] :or {xs nodes}}] (reduce (fn [acc [n w]] (generic/conj-fringe acc n w)) f xs)) (assert (= (generic/fringe-seq (load-fringe priority-fringe)) '([:e 0] [:a 2] [:b 3] [:c 10] [:d 11]))) (assert (= (generic/fringe-seq (load-fringe depth-fringe)) '([:e 0] [:d 11] [:c 10] [:b 3] [:a 2]))) (assert (= (generic/fringe-seq (load-fringe breadth-fringe)) '([:a 2] [:b 3] [:c 10] [:d 11] [:e 0]))) )
340b2b773abec30000450b9a9d379a33d202ddd6201c4956bb26f87f3654f39a
mikera/clojure-utils
hex.clj
(ns mikera.cljutils.hex "Utilities for handling hexadecimal strings." (:require [clojure.string :as str]) (:require [mikera.cljutils.text :as text])) (set! *warn-on-reflection* true) (set! *unchecked-math* :warn-on-boxed) (defn hex-string "Converts an an integer value to a hexadecimal string representing the unsigned value. The length of the output depends on the value of the integer." ([n] (cond (instance? Long n) (Long/toHexString (unchecked-long n)) (instance? Integer n) (java.lang.Integer/toHexString (unchecked-int n)) (instance? Character n) (.substring (java.lang.Integer/toHexString (unchecked-int (char n))) 0 4) (instance? Byte n) (java.lang.Integer/toHexString (unchecked-byte n)) :else (Long/toHexString (unchecked-long n)))) ([n zero-pad-length] (text/pad-left (hex-string n) zero-pad-length "0"))) (defn hex-string-from-long "Converts an long value to a hexadecimal string representing the unsigned value of the long." ([^long n] (Long/toHexString n)) ([^long n zero-pad-length] (text/pad-left (hex-string-from-long n) zero-pad-length "0"))) (defn hex-string-from-byte "Converts an byte value to a hexadecimal string representing the unsigned value of the byte." ([b] (let [hs (Long/toHexString (+ 256 (long b))) n (.length hs)] (.substring hs (int (- n 2)))))) (defn bytes-from-hex-string "Converts a string of hex digits into a byte array." ([^String s] (let [s (str/replace s #"\s+" "") ^String s (str/replace s "0x" "") cc (.length s) n (quot cc 2) res (byte-array n)] (dotimes [i n] (aset res i (byte (+ (bit-and 0xF0 (bit-shift-left (Character/getNumericValue (char (.charAt s (int (* 2 i))))) 4)) (bit-and 0x0F (long (Character/getNumericValue (.charAt s (int (+ (* 2 i) 1)))))))))) res)))
null
https://raw.githubusercontent.com/mikera/clojure-utils/92f7fd7a40c9cf22ab7004a304303e45ea4d4284/src/main/clojure/mikera/cljutils/hex.clj
clojure
(ns mikera.cljutils.hex "Utilities for handling hexadecimal strings." (:require [clojure.string :as str]) (:require [mikera.cljutils.text :as text])) (set! *warn-on-reflection* true) (set! *unchecked-math* :warn-on-boxed) (defn hex-string "Converts an an integer value to a hexadecimal string representing the unsigned value. The length of the output depends on the value of the integer." ([n] (cond (instance? Long n) (Long/toHexString (unchecked-long n)) (instance? Integer n) (java.lang.Integer/toHexString (unchecked-int n)) (instance? Character n) (.substring (java.lang.Integer/toHexString (unchecked-int (char n))) 0 4) (instance? Byte n) (java.lang.Integer/toHexString (unchecked-byte n)) :else (Long/toHexString (unchecked-long n)))) ([n zero-pad-length] (text/pad-left (hex-string n) zero-pad-length "0"))) (defn hex-string-from-long "Converts an long value to a hexadecimal string representing the unsigned value of the long." ([^long n] (Long/toHexString n)) ([^long n zero-pad-length] (text/pad-left (hex-string-from-long n) zero-pad-length "0"))) (defn hex-string-from-byte "Converts an byte value to a hexadecimal string representing the unsigned value of the byte." ([b] (let [hs (Long/toHexString (+ 256 (long b))) n (.length hs)] (.substring hs (int (- n 2)))))) (defn bytes-from-hex-string "Converts a string of hex digits into a byte array." ([^String s] (let [s (str/replace s #"\s+" "") ^String s (str/replace s "0x" "") cc (.length s) n (quot cc 2) res (byte-array n)] (dotimes [i n] (aset res i (byte (+ (bit-and 0xF0 (bit-shift-left (Character/getNumericValue (char (.charAt s (int (* 2 i))))) 4)) (bit-and 0x0F (long (Character/getNumericValue (.charAt s (int (+ (* 2 i) 1)))))))))) res)))
b34ea6b1f469190f309459cad4bb68b9d17f354bf0950c614bfc571cc3c16096
bos/rwh
actions.hs
{-- snippet all --} str2action :: String -> IO () str2action input = putStrLn ("Data: " ++ input) list2actions :: [String] -> [IO ()] list2actions = map str2action numbers :: [Int] numbers = [1..10] strings :: [String] strings = map show numbers actions :: [IO ()] actions = list2actions strings printitall :: IO () printitall = runall actions -- Take a list of actions, and execute each of them in turn. runall :: [IO ()] -> IO () runall [] = return () runall (firstelem:remainingelems) = do firstelem runall remainingelems main = do str2action "Start of the program" printitall str2action "Done!" {-- /snippet all --}
null
https://raw.githubusercontent.com/bos/rwh/7fd1e467d54aef832f5476ebf5f4f6a898a895d1/examples/ch07/actions.hs
haskell
- snippet all - Take a list of actions, and execute each of them in turn. - /snippet all -
str2action :: String -> IO () str2action input = putStrLn ("Data: " ++ input) list2actions :: [String] -> [IO ()] list2actions = map str2action numbers :: [Int] numbers = [1..10] strings :: [String] strings = map show numbers actions :: [IO ()] actions = list2actions strings printitall :: IO () printitall = runall actions runall :: [IO ()] -> IO () runall [] = return () runall (firstelem:remainingelems) = do firstelem runall remainingelems main = do str2action "Start of the program" printitall str2action "Done!"
c2661c059161d17aaa982bcfb0fa1bb54bd939733c835c834269c937ed1ffcc0
johnridesabike/acutis
dagmap.mli
(**************************************************************************) (* *) Copyright ( c ) 2022 . (* *) This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. (* *) (**************************************************************************) * A utility to help { ! } and { ! Compile } enforce that templates form a directed acyclic graph . a directed acyclic graph. *) type ('a, 'b) t (** ['a] is the type of values before linking into the graph. ['b] is the type of values after they've been linked.*) val make : f:(('a, 'b) t -> 'a -> 'b) -> ?root:string -> 'a Map.String.t -> ('a, 'b) t (** Use {!get} inside the [f] function. @param f Transforms ['a] to ['b]. @param root The root component, if one is defined. *) val get : string -> ('a, 'b) t -> 'b (** Gets the ['b] value associated with a string key. @raise Error.Acutis_error When a key does not exist or if the graph forms a cycle.*) val prelinked : string -> 'a Map.String.t -> ('b, 'a) t val link_all : ('a, 'b) t -> 'b Map.String.t
null
https://raw.githubusercontent.com/johnridesabike/acutis/5c352a4892bee60b9bdc7e60ff57f2ddb950d6e1/lib/dagmap.mli
ocaml
************************************************************************ ************************************************************************ * ['a] is the type of values before linking into the graph. ['b] is the type of values after they've been linked. * Use {!get} inside the [f] function. @param f Transforms ['a] to ['b]. @param root The root component, if one is defined. * Gets the ['b] value associated with a string key. @raise Error.Acutis_error When a key does not exist or if the graph forms a cycle.
Copyright ( c ) 2022 . This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. * A utility to help { ! } and { ! Compile } enforce that templates form a directed acyclic graph . a directed acyclic graph. *) type ('a, 'b) t val make : f:(('a, 'b) t -> 'a -> 'b) -> ?root:string -> 'a Map.String.t -> ('a, 'b) t val get : string -> ('a, 'b) t -> 'b val prelinked : string -> 'a Map.String.t -> ('b, 'a) t val link_all : ('a, 'b) t -> 'b Map.String.t
9da364a2257f10fd0a0398569184b59f32877810c5f35f0d6988ad4219c50eb7
scrintal/heroicons-reagent
bars_3_bottom_left.cljs
(ns com.scrintal.heroicons.mini.bars-3-bottom-left) (defn render [] [:svg {:xmlns "" :viewBox "0 0 20 20" :fill "currentColor" :aria-hidden "true"} [:path {:fillRule "evenodd" :d "M2 4.75A.75.75 0 012.75 4h14.5a.75.75 0 010 1.5H2.75A.75.75 0 012 4.75zm0 10.5a.75.75 0 01.75-.75h7.5a.75.75 0 010 1.5h-7.5a.75.75 0 01-.75-.75zM2 10a.75.75 0 01.75-.75h14.5a.75.75 0 010 1.5H2.75A.75.75 0 012 10z" :clipRule "evenodd"}]])
null
https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/mini/bars_3_bottom_left.cljs
clojure
(ns com.scrintal.heroicons.mini.bars-3-bottom-left) (defn render [] [:svg {:xmlns "" :viewBox "0 0 20 20" :fill "currentColor" :aria-hidden "true"} [:path {:fillRule "evenodd" :d "M2 4.75A.75.75 0 012.75 4h14.5a.75.75 0 010 1.5H2.75A.75.75 0 012 4.75zm0 10.5a.75.75 0 01.75-.75h7.5a.75.75 0 010 1.5h-7.5a.75.75 0 01-.75-.75zM2 10a.75.75 0 01.75-.75h14.5a.75.75 0 010 1.5H2.75A.75.75 0 012 10z" :clipRule "evenodd"}]])
bc1683348c4804144f3948f1f421894a03c0f69f0113b05003fe58fc73751479
WorksHub/client
primitives.cljc
(ns wh.common.specs.primitives (:require [#?(:clj clojure.spec.alpha :cljs cljs.spec.alpha) :as s] [clojure.string :as str] #?(:clj [spec-tools.core :as st]) #?(:clj [clojure.spec.gen.alpha :as gen]) [wh.common.url :as url])) (defn valid-email? [email] (re-matches #"^[^@]+@[^@\\.]+[\\.].+" email)) (defn valid-url? [url] (or (str/starts-with? url "http://") (str/starts-with? url "https://"))) (defn valid-domain? [domain] (re-matches #"([a-z0-9-]+\.)+[a-z]+" domain)) (s/def ::email (s/and string? valid-email?)) (s/def ::url (s/and string? url/has-domain?)) (s/def ::domain (s/and string? valid-domain?)) (defn problematic-paths [spec val] (let [expl (s/explain-data spec val)] (map :path (::s/problems expl)))) (defn alphanumeric? [s] (not (re-find #"[^a-zA-Z0-9]" s))) (defn alphanumeric-slug? [s] (not (re-find #"[^a-zA-Z0-9\-]" s))) (s/def ::non-empty-string (s/and string? (complement str/blank?))) (s/def ::non-empty-alphanumeric-string (s/and ::non-empty-string alphanumeric?)) (s/def ::non-empty-slug (s/and ::non-empty-string alphanumeric-slug?)) (s/def :http.path/params (s/nilable (s/map-of keyword? string?))) (s/def :http/query-params (s/nilable ::url/query-params)) (s/def ::percentage (s/double-in :min 0 :max 100)) #?(:clj (s/def :wh/input-stream (st/spec (s/with-gen (s/conformer #(instance? java.io.InputStream %)) #(gen/return (java.io.ByteArrayInputStream. (.getBytes ""))))))) #?(:clj (s/def :wh/content-type #{"application/msword" "application/vnd.openxmlformats-officedocument.wordprocessingml.document" "application/pdf" "application/json" "audio/ogg" "image/apng" "image/avif" "image/flif"}))
null
https://raw.githubusercontent.com/WorksHub/client/77e4212a69dad049a9e784143915058acd918982/common/src/wh/common/specs/primitives.cljc
clojure
(ns wh.common.specs.primitives (:require [#?(:clj clojure.spec.alpha :cljs cljs.spec.alpha) :as s] [clojure.string :as str] #?(:clj [spec-tools.core :as st]) #?(:clj [clojure.spec.gen.alpha :as gen]) [wh.common.url :as url])) (defn valid-email? [email] (re-matches #"^[^@]+@[^@\\.]+[\\.].+" email)) (defn valid-url? [url] (or (str/starts-with? url "http://") (str/starts-with? url "https://"))) (defn valid-domain? [domain] (re-matches #"([a-z0-9-]+\.)+[a-z]+" domain)) (s/def ::email (s/and string? valid-email?)) (s/def ::url (s/and string? url/has-domain?)) (s/def ::domain (s/and string? valid-domain?)) (defn problematic-paths [spec val] (let [expl (s/explain-data spec val)] (map :path (::s/problems expl)))) (defn alphanumeric? [s] (not (re-find #"[^a-zA-Z0-9]" s))) (defn alphanumeric-slug? [s] (not (re-find #"[^a-zA-Z0-9\-]" s))) (s/def ::non-empty-string (s/and string? (complement str/blank?))) (s/def ::non-empty-alphanumeric-string (s/and ::non-empty-string alphanumeric?)) (s/def ::non-empty-slug (s/and ::non-empty-string alphanumeric-slug?)) (s/def :http.path/params (s/nilable (s/map-of keyword? string?))) (s/def :http/query-params (s/nilable ::url/query-params)) (s/def ::percentage (s/double-in :min 0 :max 100)) #?(:clj (s/def :wh/input-stream (st/spec (s/with-gen (s/conformer #(instance? java.io.InputStream %)) #(gen/return (java.io.ByteArrayInputStream. (.getBytes ""))))))) #?(:clj (s/def :wh/content-type #{"application/msword" "application/vnd.openxmlformats-officedocument.wordprocessingml.document" "application/pdf" "application/json" "audio/ogg" "image/apng" "image/avif" "image/flif"}))
20f7ce64e5edffa59dc409319418753fa90e518f23583a9aa6d5f9e2ba3d9ff0
keera-studios/keera-hails
HelloWorld.hs
import Data.ReactiveValue import Graphics.UI.Gtk import Graphics.UI.Gtk.Reactive import Graphics.UI.Gtk.Reactive.Gtk2 main = do -- View initGUI window <- windowNew set window [windowTitle := "Text Entry", containerBorderWidth := 10] vb <- vBoxNew False 0 containerAdd window vb txtfield <- entryNew boxPackStart vb txtfield PackNatural 0 lbl <- labelNew (Nothing :: Maybe String) boxPackStart vb lbl PackNatural 0 widgetShowAll window -- Controller Rules (printMsg <^> entryTextReactive txtfield) =:> labelTextReactive lbl objectDestroyReactive window =:> mainQuit -- Run! mainGUI -- Pure controller functions that can be debugged independently printMsg "" = "" printMsg txt = "\"" ++ txt ++ "\" is " ++ qual ++ " to its reverse" where qual | txt == reverse txt = "equal" | otherwise = "not equal"
null
https://raw.githubusercontent.com/keera-studios/keera-hails/bf069e5aafc85a1f55fa119ae45a025a2bd4a3d0/demos/keera-hails-demos-gtk/src/HelloWorld.hs
haskell
View Controller Rules Run! Pure controller functions that can be debugged independently
import Data.ReactiveValue import Graphics.UI.Gtk import Graphics.UI.Gtk.Reactive import Graphics.UI.Gtk.Reactive.Gtk2 main = do initGUI window <- windowNew set window [windowTitle := "Text Entry", containerBorderWidth := 10] vb <- vBoxNew False 0 containerAdd window vb txtfield <- entryNew boxPackStart vb txtfield PackNatural 0 lbl <- labelNew (Nothing :: Maybe String) boxPackStart vb lbl PackNatural 0 widgetShowAll window (printMsg <^> entryTextReactive txtfield) =:> labelTextReactive lbl objectDestroyReactive window =:> mainQuit mainGUI printMsg "" = "" printMsg txt = "\"" ++ txt ++ "\" is " ++ qual ++ " to its reverse" where qual | txt == reverse txt = "equal" | otherwise = "not equal"
7a574a15a8de79a4ab01c9092b564f6e6bd57175f2becb30ef52537582b005c2
dalaing/little-languages
SmallStep.hs
# LANGUAGE TemplateHaskell # # LANGUAGE MultiParamTypeClasses # # LANGUAGE FunctionalDependencies # # LANGUAGE FlexibleInstances # module Common.Term.Eval.SmallStep ( SmallStepInput(..) , HasSmallStepInput(..) , SmallStepOutput(..) , HasSmallStepOutput(..) , mkSmallStep ) where import Control.Lens.TH (makeClassy) import Control.Monad.Reader import Common.Recursion data SmallStepInput tm = SmallStepInput { _smallSteps :: [Step (ReaderT tm Maybe tm)] } makeClassy ''SmallStepInput instance Monoid (SmallStepInput tm) where mempty = SmallStepInput mempty mappend (SmallStepInput s1) (SmallStepInput s2) = SmallStepInput (mappend s1 s2) data SmallStepOutput tm = SmallStepOutput { _smallStepRules :: [tm -> Maybe tm] , _smallStep :: tm -> Maybe tm , _smallStepEval :: tm -> tm } makeClassy ''SmallStepOutput mkSmallStep :: SmallStepInput tm -> SmallStepOutput tm mkSmallStep (SmallStepInput i) = let ss = runReaderT $ combineSteps i in SmallStepOutput (runReaderT <$> mkSteps i) ss (mkSmallStepEval ss) mkSmallStepEval :: (tm -> Maybe tm) -> tm -> tm mkSmallStepEval step = eval where eval s = case step s of Just s' -> eval s' Nothing -> s
null
https://raw.githubusercontent.com/dalaing/little-languages/9f089f646a5344b8f7178700455a36a755d29b1f/code/old/multityped/nb-modular/src/Common/Term/Eval/SmallStep.hs
haskell
# LANGUAGE TemplateHaskell # # LANGUAGE MultiParamTypeClasses # # LANGUAGE FunctionalDependencies # # LANGUAGE FlexibleInstances # module Common.Term.Eval.SmallStep ( SmallStepInput(..) , HasSmallStepInput(..) , SmallStepOutput(..) , HasSmallStepOutput(..) , mkSmallStep ) where import Control.Lens.TH (makeClassy) import Control.Monad.Reader import Common.Recursion data SmallStepInput tm = SmallStepInput { _smallSteps :: [Step (ReaderT tm Maybe tm)] } makeClassy ''SmallStepInput instance Monoid (SmallStepInput tm) where mempty = SmallStepInput mempty mappend (SmallStepInput s1) (SmallStepInput s2) = SmallStepInput (mappend s1 s2) data SmallStepOutput tm = SmallStepOutput { _smallStepRules :: [tm -> Maybe tm] , _smallStep :: tm -> Maybe tm , _smallStepEval :: tm -> tm } makeClassy ''SmallStepOutput mkSmallStep :: SmallStepInput tm -> SmallStepOutput tm mkSmallStep (SmallStepInput i) = let ss = runReaderT $ combineSteps i in SmallStepOutput (runReaderT <$> mkSteps i) ss (mkSmallStepEval ss) mkSmallStepEval :: (tm -> Maybe tm) -> tm -> tm mkSmallStepEval step = eval where eval s = case step s of Just s' -> eval s' Nothing -> s
387a920a8ea8d1fa3f1e29171034c580f60db6e277924e28abd930e93fbeb8ec
greghendershott/frog
paths.rkt
#lang at-exp racket/base (require racket/require net/uri-codec (multi-in racket/contract (base region)) (multi-in racket (match string)) threading scribble/srcdoc (for-doc racket/base scribble/manual) "params.rkt" (multi-in "private" ("define-doc.rkt" "util.rkt"))) (module+ test (require rackunit racket/function) ;; For testing, define some root directory (define root (if (eq? 'windows (system-path-convention-type)) "C:\\" "/"))) (define top (make-parameter #f)) (provide (parameter-doc top (parameter/c (or/c #f absolute-path?)) path @{The project directory root. Frog sets the value after it has initialized and found the location of @secref["config"]. Many other functions in this module expect @racket[top] to be non-@racket[#f], so for example in unit tests you may need to set this yourself.})) ;; Composition of build-path, expand-user-path, simplify-path, and ;; path->directory-path. (define (build-path* . xs) (~> (apply build-path xs) expand-user-path ;expand things like ~ simplify-path ;factor out things like . and .. path->directory-path)) ;ensure trailing slash ;;; Source directories (define/doc (src-path absolute-path?) @{Resolved location of @racket[current-source-dir].} (define src (current-source-dir)) (cond [(relative-path? src) (build-path* (top) src)] [else (build-path* src)])) (define/doc (src/posts-path absolute-path?) @{The @filepath{posts} subdirectory of @racket[src-path].} (build-path* (src-path) "posts")) (module+ test (check-equal? (parameterize ([top (build-path root "projects" "blog")] [current-source-dir "_src"]) (src-path)) (path->directory-path (build-path root "projects" "blog" "_src"))) (check-equal? (parameterize ([top (build-path root "projects" "blog")] [current-source-dir (build-path 'up "source")]) (src-path)) (path->directory-path (build-path root "projects" "source")))) ;; some specific source files (define/doc (post-template.html absolute-path?) @{The @filepath{post-template.html} file in @racket[src-path].} (build-path (src-path) "post-template.html")) (define/doc (page-template.html absolute-path?) @{The @filepath{page-template.html} file in @racket[src-path].} (build-path (src-path) "page-template.html")) (define/doc (index-template.html absolute-path?) @{The @filepath{index-template.html} in @racket[src-path].} (build-path (src-path) "index-template.html")) ;;; Intermediate file directory (define/doc (obj-path absolute-path?) @{The @filepath{.frog} build cache subdirectory.} (build-path (top) ".frog")) ;;; Output directories (define/doc (www-path absolute-path?) @{Root of the output files, as determined by @racket[current-output-dir].} (define out (current-output-dir)) (cond [(relative-path? out) (build-path* (top) out)] [else (build-path* out)])) (module+ test (check-equal? (parameterize ([top (build-path root "projects" "blog")] [current-output-dir (build-path 'same)]) (www-path)) (path->directory-path (build-path root "projects" "blog"))) (check-equal? (parameterize ([top (build-path root "projects" "blog")] [current-output-dir (build-path 'up "build" "stuff")]) (www-path)) (path->directory-path (build-path root "projects" "build" "stuff")))) (define/doc (www/tags-path absolute-path?) @{The @filepath{tags/} subdirectory of @racket[www-path].} (build-path* (www-path) "tags")) (define/doc (www/feeds-path absolute-path?) @{The @filepath{feeds/} subdirectory of @racket[www-path].} (build-path* (www-path) "feeds")) (define/doc (www/img-path absolute-path?) @{The @filepath{img/} subdirectory of @racket[www-path].} (build-path* (www-path) "img")) (define/doc (www/index-pathname absolute-path?) @{Resolves @racket[current-posts-index-uri] regardless of it being any of @filepath{/path/index.html}, @filepath{\path\index.html}, or @filepath{c:\path\index.html}} (build-path (www-path) (path->relative-path (current-posts-index-uri)))) (define (path->relative-path p) ;; path-string? -> path-string? (if (absolute-path? p) remove leading / \ or C:\ p)) (module+ test (parameterize ([top (build-path root "projects" "blog")] [current-output-dir (build-path 'up "build" "stuff")]) ;; absolute (check-equal? (parameterize ([current-posts-index-uri "/index.html"]) (www/index-pathname)) (build-path root "projects" "build" "stuff" "index.html")) (check-equal? (parameterize ([current-posts-index-uri "/foo/bar.html"]) (www/index-pathname)) (build-path root "projects" "build" "stuff" "foo" "bar.html")) ;; relative (check-equal? (parameterize ([current-posts-index-uri "index.html"]) (www/index-pathname)) (build-path root "projects" "build" "stuff" "index.html")) (check-equal? (parameterize ([current-posts-index-uri "foo/bar.html"]) (www/index-pathname)) (build-path root "projects" "build" "stuff" "foo" "bar.html")))) ;;; abs->rel/* (define/doc (abs->rel/www [path absolute-path?] string?) @{Convert an absolute local path to a URI path string relative to @racket[www-path] --- which in turn is relative to @racket[current-output-dir]. The result is always in Unix style (even on Windows) so it is suitable for use as a URI path. For example if @racket[top] is @filepath{/project/blog} and @racket[current-output-dir] is @filepath{../build}, then given @filepath{/project/build/css} this should return @filepath{/css}. Same result if on Windows and @racket[top] is @filepath{c:\project\blog} and @racket[current-output-dir] is @filepath{..\build}. NOTE: If you're creating a URI that a client will use to make an HTTP request --- e.g. you will write it in an HTML, feed, or sitemap file --- this result isn't sufficient. You should run the result through @racket[canonical-uri], and if you need @racket[current-scheme/host] prepended, in turn through @racket[full-uri].} #:ex [(require frog/paths frog/params)] #:ex [(parameterize ([top "/projects/blog"] [current-output-dir "."]) (abs->rel/www (string->path "/projects/blog/css"))) "/css"] #:ex [(parameterize ([top "/projects/blog"] [current-output-dir "../build"]) (abs->rel/www (string->path "/projects/build/css"))) "/css"] (define segments (abs->rel 'abs->rel/www (simplify-path path) (www-path))) (string-append "/" (string-join (map path->string segments) "/"))) (module+ test (check-exn #rx"No common prefix: #<path:/not/projects/build/css> and #<path:/projects/build/>" (λ () (parameterize ([top "/projects/blog"] [current-output-dir "../build"]) (abs->rel/www (string->path "/not/projects/build/css")))))) (define/doc (abs->rel/src [path absolute-path?] path-string?) @{Convert an absolute local path to a local path-string relative to @racket[src-path].} (define segments (abs->rel 'abs->rel/src (simplify-path path) (src-path))) (path->string (apply build-path segments))) (module+ test (check-equal? (parameterize ([top "/projects/blog"] [current-source-dir "source"]) (abs->rel/src (string->path "/projects/blog/source/foo.md"))) "foo.md") (check-equal? (parameterize ([top "/projects/blog"] [current-source-dir "../blog-source"]) (abs->rel/src (string->path "/projects/blog-source/foo.md"))) "foo.md")) (define/doc (abs->rel/top [path absolute-path?] path-string?) @{Convert an absolute local path to a local path-string relative to @racket[top].} (define segments (abs->rel 'abs->rel/top (simplify-path path) (build-path* (top)))) (path->string (apply build-path segments))) (module+ test (check-equal? (parameterize ([top "/projects/blog"]) (abs->rel/top (string->path "/projects/blog/foo.md"))) "foo.md") (check-equal? (parameterize ([top "/projects/blog"]) (abs->rel/top (string->path "/projects/blog/source/foo.md"))) "source/foo.md")) symbol ? path ? path ? - > ( listof path ? ) (define as (explode-path a)) (define bs (explode-path b)) (define-values (prefix tail _) (split-common-prefix as bs)) (unless (equal? prefix bs) (error who "No common prefix: ~v and ~v" a b)) tail) (define/doc (canonical-uri [uri-path string?] string?) @{Possibly rewrite a URI path to take account of non-@racket[#f] @racket[current-uri-prefix] and @racket[uri-path-segment-encode] it.} #:ex [(require frog/paths frog/params)] #:ex [(canonical-uri "relative/λ/path") "relative/%CE%BB/path"] #:ex [(parameterize ([current-uri-prefix #f]) (canonical-uri "/absolute/λ/path")) "/absolute/%CE%BB/path"] #:ex [(parameterize ([current-uri-prefix "/prefix"]) (canonical-uri "/absolute/λ/path")) "/prefix/absolute/%CE%BB/path"] (define (reroot p) ;; (-> string? path-string?) (if (and (current-uri-prefix) (absolute-path? p)) (build-path (current-uri-prefix) (path->relative-path p)) p)) (define (encode p) ;; (-> path-string? string?) (define encode-seg (compose1 uri-path-segment-encode path->string)) (define segs do n't encode the leading / or c:\ (match (explode-path p) [(cons x xs) (cons x (map encode-seg xs))]) (map encode-seg (explode-path p)))) (path->string (apply build-path segs))) (define (dir? p) (define-values (_p _b dir?) (split-path p)) dir?) (define (preserve-trailing-slash orig new) ;; Restore trailing slash lost by path->relative-path, ;; explode-path, etc. (string-append new (if (dir? orig) "/" ""))) (preserve-trailing-slash uri-path (encode (reroot uri-path)))) (module+ test (parameterize ([current-uri-prefix "/prefix///"]) (check-equal? (canonical-uri "/a/λ/p") "/prefix/a/%CE%BB/p") (check-equal? (canonical-uri "/a/λ/p/") "/prefix/a/%CE%BB/p/"))) (define/doc (full-uri [uri-path string?] string?) @{Given a URI path string, prepend the scheme & host to make a full URI.} #:ex [(require frog/paths frog/params)] #:ex [(parameterize ([current-scheme/host ""]) (full-uri "/absolute/path/to/file.html")) ""] (match uri-path [(pregexp #px"^/") (string-append (current-scheme/host) uri-path)] [_ (raise-user-error 'full-uri "can't attach host/scheme to relative path")])) (module+ test (parameterize ([current-scheme/host ""]) (check-exn exn:fail? (λ () (full-uri "relative/path/to/file.html"))))) (define/doc (slug [s string?] string?) @{Convert a string into a "slug", in which: @itemlist[@item{The string is Unicode normalized to NFC form using @racket[string-normalize-nfc]} @item{Consecutive characters that are neither @racket[char-alphabetic?] nor @racket[char-numeric?] are replaced by hyphens.} @item{The string is Unicode normalized to NFD form using @racket[string-normalize-nfd]}]} #:ex [(require frog/paths)] #:ex [(slug "Foo? Bar. Baz.") "Foo-Bar-Baz"] #:ex [(slug "Here's a question--how many hyphens???") "Here-s-a-question-how-many-hyphens"] #:ex [(slug "La biblioteca está en el estómago de Godzilla") "La-biblioteca-está-en-el-estómago-de-Godzilla"] ;; WARNING: Changing this will break blog post permalink patterns ;;that use the {title} variable. Even if this could be improved, ;;doing so would break backward compatability. (~> First normalize string to Unicode composite form , so e.g. á will ;; be a single char for which char-alphabetic? is true. (In the decomposed form á would be a plain a char followed by an accent ;; char, and the latter is not char-alphabetic? and would get ;; slugged to a hyphen.) (for/list ([c (in-string (string-normalize-nfc s))]) (cond [(or (char-alphabetic? c) (char-numeric? c)) c] [else #\-])) list->string ;; Only one consecutive hyphen (regexp-replace* #px"-{2,}" _ "-") ;; No trailing hyphen (regexp-replace #px"-{1,}$" _ "") ;; Finally normalize to decomposed form. The rationale is that if ;; you use this result in a filename it will (hopefully) be consistent across filesystems like Linux vs macOS . string-normalize-nfd)) (module+ test (require rackunit) (check-equal? (slug "?") "") ;; Sadly we don't trim leading hyphens, because we didn't from day ;; one therefore changing it now would break old URIs -- both link ;; URLs and feed URNs. So the following test is, alas, correct: (check-equal? (slug "'Foo, bar'") "-Foo-bar"))
null
https://raw.githubusercontent.com/greghendershott/frog/93d8b442c2e619334612b7e2d091e4eb33995021/frog/paths.rkt
racket
For testing, define some root directory Composition of build-path, expand-user-path, simplify-path, and path->directory-path. expand things like ~ factor out things like . and .. ensure trailing slash Source directories some specific source files Intermediate file directory Output directories path-string? -> path-string? absolute relative abs->rel/* (-> string? path-string?) (-> path-string? string?) Restore trailing slash lost by path->relative-path, explode-path, etc. WARNING: Changing this will break blog post permalink patterns that use the {title} variable. Even if this could be improved, doing so would break backward compatability. be a single char for which char-alphabetic? is true. (In the char, and the latter is not char-alphabetic? and would get slugged to a hyphen.) Only one consecutive hyphen No trailing hyphen Finally normalize to decomposed form. The rationale is that if you use this result in a filename it will (hopefully) be Sadly we don't trim leading hyphens, because we didn't from day one therefore changing it now would break old URIs -- both link URLs and feed URNs. So the following test is, alas, correct:
#lang at-exp racket/base (require racket/require net/uri-codec (multi-in racket/contract (base region)) (multi-in racket (match string)) threading scribble/srcdoc (for-doc racket/base scribble/manual) "params.rkt" (multi-in "private" ("define-doc.rkt" "util.rkt"))) (module+ test (require rackunit racket/function) (define root (if (eq? 'windows (system-path-convention-type)) "C:\\" "/"))) (define top (make-parameter #f)) (provide (parameter-doc top (parameter/c (or/c #f absolute-path?)) path @{The project directory root. Frog sets the value after it has initialized and found the location of @secref["config"]. Many other functions in this module expect @racket[top] to be non-@racket[#f], so for example in unit tests you may need to set this yourself.})) (define (build-path* . xs) (~> (apply build-path xs) (define/doc (src-path absolute-path?) @{Resolved location of @racket[current-source-dir].} (define src (current-source-dir)) (cond [(relative-path? src) (build-path* (top) src)] [else (build-path* src)])) (define/doc (src/posts-path absolute-path?) @{The @filepath{posts} subdirectory of @racket[src-path].} (build-path* (src-path) "posts")) (module+ test (check-equal? (parameterize ([top (build-path root "projects" "blog")] [current-source-dir "_src"]) (src-path)) (path->directory-path (build-path root "projects" "blog" "_src"))) (check-equal? (parameterize ([top (build-path root "projects" "blog")] [current-source-dir (build-path 'up "source")]) (src-path)) (path->directory-path (build-path root "projects" "source")))) (define/doc (post-template.html absolute-path?) @{The @filepath{post-template.html} file in @racket[src-path].} (build-path (src-path) "post-template.html")) (define/doc (page-template.html absolute-path?) @{The @filepath{page-template.html} file in @racket[src-path].} (build-path (src-path) "page-template.html")) (define/doc (index-template.html absolute-path?) @{The @filepath{index-template.html} in @racket[src-path].} (build-path (src-path) "index-template.html")) (define/doc (obj-path absolute-path?) @{The @filepath{.frog} build cache subdirectory.} (build-path (top) ".frog")) (define/doc (www-path absolute-path?) @{Root of the output files, as determined by @racket[current-output-dir].} (define out (current-output-dir)) (cond [(relative-path? out) (build-path* (top) out)] [else (build-path* out)])) (module+ test (check-equal? (parameterize ([top (build-path root "projects" "blog")] [current-output-dir (build-path 'same)]) (www-path)) (path->directory-path (build-path root "projects" "blog"))) (check-equal? (parameterize ([top (build-path root "projects" "blog")] [current-output-dir (build-path 'up "build" "stuff")]) (www-path)) (path->directory-path (build-path root "projects" "build" "stuff")))) (define/doc (www/tags-path absolute-path?) @{The @filepath{tags/} subdirectory of @racket[www-path].} (build-path* (www-path) "tags")) (define/doc (www/feeds-path absolute-path?) @{The @filepath{feeds/} subdirectory of @racket[www-path].} (build-path* (www-path) "feeds")) (define/doc (www/img-path absolute-path?) @{The @filepath{img/} subdirectory of @racket[www-path].} (build-path* (www-path) "img")) (define/doc (www/index-pathname absolute-path?) @{Resolves @racket[current-posts-index-uri] regardless of it being any of @filepath{/path/index.html}, @filepath{\path\index.html}, or @filepath{c:\path\index.html}} (build-path (www-path) (path->relative-path (current-posts-index-uri)))) (if (absolute-path? p) remove leading / \ or C:\ p)) (module+ test (parameterize ([top (build-path root "projects" "blog")] [current-output-dir (build-path 'up "build" "stuff")]) (check-equal? (parameterize ([current-posts-index-uri "/index.html"]) (www/index-pathname)) (build-path root "projects" "build" "stuff" "index.html")) (check-equal? (parameterize ([current-posts-index-uri "/foo/bar.html"]) (www/index-pathname)) (build-path root "projects" "build" "stuff" "foo" "bar.html")) (check-equal? (parameterize ([current-posts-index-uri "index.html"]) (www/index-pathname)) (build-path root "projects" "build" "stuff" "index.html")) (check-equal? (parameterize ([current-posts-index-uri "foo/bar.html"]) (www/index-pathname)) (build-path root "projects" "build" "stuff" "foo" "bar.html")))) (define/doc (abs->rel/www [path absolute-path?] string?) @{Convert an absolute local path to a URI path string relative to @racket[www-path] --- which in turn is relative to @racket[current-output-dir]. The result is always in Unix style (even on Windows) so it is suitable for use as a URI path. For example if @racket[top] is @filepath{/project/blog} and @racket[current-output-dir] is @filepath{../build}, then given @filepath{/project/build/css} this should return @filepath{/css}. Same result if on Windows and @racket[top] is @filepath{c:\project\blog} and @racket[current-output-dir] is @filepath{..\build}. NOTE: If you're creating a URI that a client will use to make an HTTP request --- e.g. you will write it in an HTML, feed, or sitemap file --- this result isn't sufficient. You should run the result through @racket[canonical-uri], and if you need @racket[current-scheme/host] prepended, in turn through @racket[full-uri].} #:ex [(require frog/paths frog/params)] #:ex [(parameterize ([top "/projects/blog"] [current-output-dir "."]) (abs->rel/www (string->path "/projects/blog/css"))) "/css"] #:ex [(parameterize ([top "/projects/blog"] [current-output-dir "../build"]) (abs->rel/www (string->path "/projects/build/css"))) "/css"] (define segments (abs->rel 'abs->rel/www (simplify-path path) (www-path))) (string-append "/" (string-join (map path->string segments) "/"))) (module+ test (check-exn #rx"No common prefix: #<path:/not/projects/build/css> and #<path:/projects/build/>" (λ () (parameterize ([top "/projects/blog"] [current-output-dir "../build"]) (abs->rel/www (string->path "/not/projects/build/css")))))) (define/doc (abs->rel/src [path absolute-path?] path-string?) @{Convert an absolute local path to a local path-string relative to @racket[src-path].} (define segments (abs->rel 'abs->rel/src (simplify-path path) (src-path))) (path->string (apply build-path segments))) (module+ test (check-equal? (parameterize ([top "/projects/blog"] [current-source-dir "source"]) (abs->rel/src (string->path "/projects/blog/source/foo.md"))) "foo.md") (check-equal? (parameterize ([top "/projects/blog"] [current-source-dir "../blog-source"]) (abs->rel/src (string->path "/projects/blog-source/foo.md"))) "foo.md")) (define/doc (abs->rel/top [path absolute-path?] path-string?) @{Convert an absolute local path to a local path-string relative to @racket[top].} (define segments (abs->rel 'abs->rel/top (simplify-path path) (build-path* (top)))) (path->string (apply build-path segments))) (module+ test (check-equal? (parameterize ([top "/projects/blog"]) (abs->rel/top (string->path "/projects/blog/foo.md"))) "foo.md") (check-equal? (parameterize ([top "/projects/blog"]) (abs->rel/top (string->path "/projects/blog/source/foo.md"))) "source/foo.md")) symbol ? path ? path ? - > ( listof path ? ) (define as (explode-path a)) (define bs (explode-path b)) (define-values (prefix tail _) (split-common-prefix as bs)) (unless (equal? prefix bs) (error who "No common prefix: ~v and ~v" a b)) tail) (define/doc (canonical-uri [uri-path string?] string?) @{Possibly rewrite a URI path to take account of non-@racket[#f] @racket[current-uri-prefix] and @racket[uri-path-segment-encode] it.} #:ex [(require frog/paths frog/params)] #:ex [(canonical-uri "relative/λ/path") "relative/%CE%BB/path"] #:ex [(parameterize ([current-uri-prefix #f]) (canonical-uri "/absolute/λ/path")) "/absolute/%CE%BB/path"] #:ex [(parameterize ([current-uri-prefix "/prefix"]) (canonical-uri "/absolute/λ/path")) "/prefix/absolute/%CE%BB/path"] (if (and (current-uri-prefix) (absolute-path? p)) (build-path (current-uri-prefix) (path->relative-path p)) p)) (define encode-seg (compose1 uri-path-segment-encode path->string)) (define segs do n't encode the leading / or c:\ (match (explode-path p) [(cons x xs) (cons x (map encode-seg xs))]) (map encode-seg (explode-path p)))) (path->string (apply build-path segs))) (define (dir? p) (define-values (_p _b dir?) (split-path p)) dir?) (define (preserve-trailing-slash orig new) (string-append new (if (dir? orig) "/" ""))) (preserve-trailing-slash uri-path (encode (reroot uri-path)))) (module+ test (parameterize ([current-uri-prefix "/prefix///"]) (check-equal? (canonical-uri "/a/λ/p") "/prefix/a/%CE%BB/p") (check-equal? (canonical-uri "/a/λ/p/") "/prefix/a/%CE%BB/p/"))) (define/doc (full-uri [uri-path string?] string?) @{Given a URI path string, prepend the scheme & host to make a full URI.} #:ex [(require frog/paths frog/params)] #:ex [(parameterize ([current-scheme/host ""]) (full-uri "/absolute/path/to/file.html")) ""] (match uri-path [(pregexp #px"^/") (string-append (current-scheme/host) uri-path)] [_ (raise-user-error 'full-uri "can't attach host/scheme to relative path")])) (module+ test (parameterize ([current-scheme/host ""]) (check-exn exn:fail? (λ () (full-uri "relative/path/to/file.html"))))) (define/doc (slug [s string?] string?) @{Convert a string into a "slug", in which: @itemlist[@item{The string is Unicode normalized to NFC form using @racket[string-normalize-nfc]} @item{Consecutive characters that are neither @racket[char-alphabetic?] nor @racket[char-numeric?] are replaced by hyphens.} @item{The string is Unicode normalized to NFD form using @racket[string-normalize-nfd]}]} #:ex [(require frog/paths)] #:ex [(slug "Foo? Bar. Baz.") "Foo-Bar-Baz"] #:ex [(slug "Here's a question--how many hyphens???") "Here-s-a-question-how-many-hyphens"] #:ex [(slug "La biblioteca está en el estómago de Godzilla") "La-biblioteca-está-en-el-estómago-de-Godzilla"] (~> First normalize string to Unicode composite form , so e.g. á will decomposed form á would be a plain a char followed by an accent (for/list ([c (in-string (string-normalize-nfc s))]) (cond [(or (char-alphabetic? c) (char-numeric? c)) c] [else #\-])) list->string (regexp-replace* #px"-{2,}" _ "-") (regexp-replace #px"-{1,}$" _ "") consistent across filesystems like Linux vs macOS . string-normalize-nfd)) (module+ test (require rackunit) (check-equal? (slug "?") "") (check-equal? (slug "'Foo, bar'") "-Foo-bar"))
9dbf8f7ac466affe19d972160ce073e2786e4950aa5e3e94ef88a99172aaec1d
cram-code/cram_core
with-policy.lisp
Copyright ( c ) 2013 , < > ;;; All rights reserved. ;;; ;;; Redistribution and use in source and binary forms, with or without ;;; modification, are permitted provided that the following conditions are met: ;;; ;;; * Redistributions of source code must retain the above copyright ;;; notice, this list of conditions and the following disclaimer. ;;; * Redistributions in binary form must reproduce the above copyright ;;; notice, this list of conditions and the following disclaimer in the ;;; documentation and/or other materials provided with the distribution. * Neither the name of the Institute for Artificial Intelligence/ ;;; Universität Bremen nor the names of its contributors ;;; may be used to endorse or promote products derived from this software ;;; without specific prior written permission. ;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " ;;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR ;;; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF ;;; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN ;;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ;;; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ;;; POSSIBILITY OF SUCH DAMAGE. (in-package :cpl) ;;; Example on how to use policies (for function semantics, see the ;;; respective function doc strings): ;;; ;;; (define-policy my-policy (max-num match-num) ;;; "This is an example policy." ;;; (:init (format t "Initializing policy~%") ;;; t) (: check ( format t " Checking if random number from 0 to ~a equals ~a~% " max - num match - num ) ( let ( ( rnd ( random max - num ) ) ) ( format t " Got number ~a~% " ) ( cond ( ( eql rnd match - num ) ;;; (format t "Match~%") ;;; t) ( t ( sleep 1 ) ) ) ) ) ;;; (:recover (format t "Running recovery mechanisms~%")) ;;; (:clean-up (format t "Running clean-up~%"))) ;;; ;;; (top-level ( with - named - policy ' my - policy ( 10 5 ) ;;; (loop do (format t "Main loop cycle.~%") ( sleep 2 ) ) ) ) (defclass policy () ((name :reader name :initarg :name) (parameters :reader parameters :initarg :parameters) (description :reader description :initarg :description) (init :reader init :initarg :init) (check :reader check :initarg :check) (recover :reader recover :initarg :recover) (clean-up :reader clean-up :initarg :clean-up))) (defvar *policies* nil "List of defined policies") (defparameter *policy-check-consolidation-duration* 0.01) (define-condition policy-condition () ((name :initarg :name :reader policy-name))) (define-condition policy-not-found (policy-condition) () (:report (lambda (condition stream) (format stream "Policy ~a not found.~%" (policy-name condition))))) (define-condition policy-failure (policy-condition) ((parameters :initarg :parameters :reader policy-parameters))) (define-condition policy-init-failed (policy-failure) () (:report (lambda (condition stream) (format stream "Initialization of policy ~a failed.~%Parameters: ~a~%" (policy-name condition) (policy-parameters condition))))) (define-condition policy-check-condition-met (policy-failure) () (:report (lambda (condition stream) (format stream "Policy check condition met for policy ~a.~%Parameters: ~a.~%" (policy-name condition) (policy-parameters condition))))) (defmacro make-policy (name parameters &rest doc-and-properties) "Generates a policy based on the information supplied. `name' specifies an internal name for the policy, to be used with `named-policy', or `with-named-policy'. `parameters' is a list of parameter symbols to be used by code inside the policy. Every time, a piece of code inside the policy is executed, these parameters (with assigned values from the `with-policy', or `with-named-policy' call) are passed to the code segments. The `properties' variable holds a list of labelled code segments for execution during certain phases. An example would look like this: > (make-policy policy-1 (param-1 param-2) \"The documentation string for this policy\" (:init (do-initialization-here)) (:check (do-checking-here)) (:recover (do-recovering-here)) (:clean-up (do-cleaning-up-here))) This returns a policy object to be used with `with-policy'. For further information about when each function block is executed, see `with-policy'. The semantics of the `properties' variable are like this: - Forms given via `:init' are executed exactly once, when the policy is initialized for usage. In case this function returns `nil', execution of the `body' code is not started, none of the other policy code blocks are executed, and a failure of type `policy-init-failed' is thrown. - A function given under the label `:check' is executed every time `with-policy' checks if the policy condition is met or not. If this function returns `t', the condition is met, the `:recover' function block is executed, and the execution of both, the policy, and the wrapped body code of `with-policy' is stopped. - Either when the policy (due to a met policy condition), or the wrapped body of `with-policy' code stopped execution of the current code block, the `:clean-up' function block is executed to perform clean-up procedures." (multiple-value-bind (properties declarations doc-string) (alexandria:parse-body doc-and-properties :documentation t) (declare (ignore declarations)) (let* ((block-identifiers `(:init :check :recover :clean-up))) `(make-instance 'policy :name ',name :parameters ',parameters :description ,doc-string ,@(loop for identifier in block-identifiers for prop = (rest (find identifier properties :test (lambda (x y) (common-lisp:eql x (first y))))) collect identifier collect (when prop `(lambda ,(append `(policy-symbol-storage) parameters) (flet ((policy-setf (symbol value) (setf (cpl:value policy-symbol-storage) (remove symbol (cpl:value policy-symbol-storage) :test (lambda (x y) (eql x (car y))))) (push (cons symbol value) (cpl:value policy-symbol-storage))) (policy-get (symbol) (let ((asc (assoc symbol (cpl:value policy-symbol-storage)))) (cdr asc)))) ,@prop)))))))) (defmacro define-policy (name parameters &rest properties) "This macro implicitly calls `make-policy', and pushes the generated policy onto the list of defined policies, thus making it accessible to `named-policy' and `with-named-policy' by its name. The usage is the same as for `make-policy': > (define-policy policy-1 (param-1 param-2) \"The documentation string for this policy\" (:init (do-initialization-here)) (:check (do-checking-here)) (:recover (do-recovering-here)) (:clean-up (do-cleaning-up-here)))" `(progn (setf *policies* (remove ',name *policies* :test (lambda (x y) (common-lisp:eql x (name y))))) (let ((new-policy (make-policy ,name ,parameters ,@properties))) (push new-policy *policies*) (defparameter ,name new-policy) new-policy))) (defun named-policy (policy-name) "Returns the policy by the name `policy-name' from the list of defined policies. If the policy by this name is not in the list, the `policy-not-found' condition is signalled. Usage: > (named-policy 'policy-name)" (let ((policy (find policy-name *policies* :test (lambda (x y) (common-lisp:eql x (name y)))))) (cond (policy policy) (t (fail 'policy-not-found :name policy-name))))) (defmacro with-named-policy (policy-name policy-parameters &body body) "Performs the same as `with-policy', but accepts a policy name instead of the policy object itself. This calls an implicit `named-policy' to acquire the policy object. Otherwise, it has the same semantics as `with-policy'. Usage: > (with-named-policy 'policy-name (param-value-1 param-value-2) (body-code))" (let ((policy `(named-policy ,policy-name))) `(with-policy ,policy ,policy-parameters ,@body))) (cut:define-hook cram-language::on-with-policy-begin (name parameters)) (cut:define-hook cram-language::on-with-policy-end (id success)) (defmacro with-policy (policy policy-parameters &body body) "Wraps the code given as `body' into a `pursue' construct together with monitoring code supplied by the policy `policy', and given the parameters `policy-parameters'. The `policy-parameters' allow for custom parameterization of policies. First, the policy is initialized via the optional `:init' code block. In case this block returns `nil', execution of the `body' code or other policy-related code blocks is not started. An exception of type `policy-init-failed' is thrown. Otherwise, the `:check' code block of the policy is executed in a loop in parallel to the `body' code. If the `:check' code returns `t', the policy condition is met and the `:recover' code block is executed. The execution of both, the policy, and the `body' code is the stopped, and the `:clean-up' policy code is executed. If the policy condition is never met, `body' finishes and returns normally. To clarify the order of code execution here: - Initialization of policy is executed (`:init') - `pursue' code form is started, with up to two forms inside: - The policy `:check' code block (if present) - The `body' code - `:check' is evaluated continuously, in parallel to the normal execution of `body'. If it returns `nil', nothing happens. In any other case (i.e. return value is unequal to `nil'), the execution of the `body' code is interrupted, and `:check' is not performed again anymore. The policy code block given in `:recover' is executed (if present). This means (explicitly) that the `:recover' code is performed *after* the `body' code got interrupted. - If `:check' always returns `nil' until the `body' code execution finishes, `:recover' is never executed. - In either case (with or without `:recover'), the policy `:clean-up' code is performed (if present). In each of the phases, `policy-setf' and `policy-get' are available. They can be used to store variables associated with given symbols for the course of action of the current policy. Example: > (define-policy var-test-policy () \"Variable Test Policy\" (:init (policy-setf 'some-var 10) t) (:check (policy-setf 'some-other-var (+ (policy-get 'some-var) 2)) nil) (:clean-up (format t \"Variables: ~a ~a~%\" (policy-get 'some-var) (policy-get 'some-other-var)))) Usage of `with-policy': > (with-policy policy-object (param-value-1 param-value-2) (body-code))" (let ((init `(init ,policy)) (check `(check ,policy)) (clean-up `(clean-up ,policy)) (recover `(recover ,policy)) (name `(name ,policy)) (params `(parameters ,policy))) `(let ((log-id (first (cram-language::on-with-policy-begin ,name ())))) (let ((policy-symbol-storage (make-fluent))) (when ,init (unless (funcall ,init policy-symbol-storage ,@policy-parameters) (fail 'policy-init-failed :name ,name :parameters ',policy-parameters))) (let ((flag-do-recovery nil)) (unwind-protect (pursue (when ,check (loop while (not (funcall ,check policy-symbol-storage ,@policy-parameters)) do (sleep* *policy-check-consolidation-duration*)) (setf flag-do-recovery t)) (progn ,@body)) (when (and ,recover flag-do-recovery) (funcall ,recover policy-symbol-storage ,@policy-parameters)) (when ,clean-up (funcall ,clean-up policy-symbol-storage ,@policy-parameters)) (unwind-protect (when flag-do-recovery (cpl:fail 'policy-check-condition-met :name ,name :parameters ',policy-parameters)) (cram-language::on-with-policy-end log-id (not flag-do-recovery))))))))) (defmacro with-policies (policies-and-parameters-list &body body) "Allows for running a given `body' code segment wrapped in a list of policies (together with individual parameters for each of them). The `policies-and-parameters-list' parameter describes a list of policy/parameter-list pairs. The usage is as follows: > (with-policies ((my-policy-object (3 1)) (my-policy-object (100 4)) (my-other-policy-object (\"Test\"))) (body-code))" (cond (policies-and-parameters-list (let* ((current (first policies-and-parameters-list)) (the-rest (rest policies-and-parameters-list)) (current-policy (first current)) (current-parameters (second current))) (cond (the-rest `(with-policy ,current-policy ,current-parameters (with-policies ,the-rest ,@body))) (t `(with-policy ,current-policy ,current-parameters ,@body))))) (t `(progn ,@body)))) (defmacro with-named-policies (policies-and-parameters-list &body body) "The semantics of `with-named-policies' are the same as for `with-policies', except that instead of policy-objects, policy names are used: > (with-named-policies (('my-policy (3 1)) ('my-policy (100 4)) ('my-other-policy (\"Test\"))) (body-code))" (let* ((current (first policies-and-parameters-list)) (the-rest (rest policies-and-parameters-list)) (current-policy (first current)) (current-parameters (second current))) (cond (the-rest `(with-named-policy ,current-policy ,current-parameters (with-named-policies ,the-rest ,@body))) (t `(with-named-policy ,current-policy ,current-parameters ,@body)))))
null
https://raw.githubusercontent.com/cram-code/cram_core/984046abe2ec9e25b63e52007ed3b857c3d9a13c/cram_language/src/with-policy.lisp
lisp
All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Universität Bremen nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Example on how to use policies (for function semantics, see the respective function doc strings): (define-policy my-policy (max-num match-num) "This is an example policy." (:init (format t "Initializing policy~%") t) (format t "Match~%") t) (:recover (format t "Running recovery mechanisms~%")) (:clean-up (format t "Running clean-up~%"))) (top-level (loop do (format t "Main loop cycle.~%")
Copyright ( c ) 2013 , < > * Neither the name of the Institute for Artificial Intelligence/ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN (in-package :cpl) (: check ( format t " Checking if random number from 0 to ~a equals ~a~% " max - num match - num ) ( let ( ( rnd ( random max - num ) ) ) ( format t " Got number ~a~% " ) ( cond ( ( eql rnd match - num ) ( t ( sleep 1 ) ) ) ) ) ( with - named - policy ' my - policy ( 10 5 ) ( sleep 2 ) ) ) ) (defclass policy () ((name :reader name :initarg :name) (parameters :reader parameters :initarg :parameters) (description :reader description :initarg :description) (init :reader init :initarg :init) (check :reader check :initarg :check) (recover :reader recover :initarg :recover) (clean-up :reader clean-up :initarg :clean-up))) (defvar *policies* nil "List of defined policies") (defparameter *policy-check-consolidation-duration* 0.01) (define-condition policy-condition () ((name :initarg :name :reader policy-name))) (define-condition policy-not-found (policy-condition) () (:report (lambda (condition stream) (format stream "Policy ~a not found.~%" (policy-name condition))))) (define-condition policy-failure (policy-condition) ((parameters :initarg :parameters :reader policy-parameters))) (define-condition policy-init-failed (policy-failure) () (:report (lambda (condition stream) (format stream "Initialization of policy ~a failed.~%Parameters: ~a~%" (policy-name condition) (policy-parameters condition))))) (define-condition policy-check-condition-met (policy-failure) () (:report (lambda (condition stream) (format stream "Policy check condition met for policy ~a.~%Parameters: ~a.~%" (policy-name condition) (policy-parameters condition))))) (defmacro make-policy (name parameters &rest doc-and-properties) "Generates a policy based on the information supplied. `name' specifies an internal name for the policy, to be used with `named-policy', or `with-named-policy'. `parameters' is a list of parameter symbols to be used by code inside the policy. Every time, a piece of code inside the policy is executed, these parameters (with assigned values from the `with-policy', or `with-named-policy' call) are passed to the code segments. The `properties' variable holds a list of labelled code segments for execution during certain phases. An example would look like this: > (make-policy policy-1 (param-1 param-2) \"The documentation string for this policy\" (:init (do-initialization-here)) (:check (do-checking-here)) (:recover (do-recovering-here)) (:clean-up (do-cleaning-up-here))) This returns a policy object to be used with `with-policy'. For further information about when each function block is executed, see `with-policy'. The semantics of the `properties' variable are like this: - Forms given via `:init' are executed exactly once, when the policy is initialized for usage. In case this function returns `nil', execution of the `body' code is not started, none of the other policy code blocks are executed, and a failure of type `policy-init-failed' is thrown. - A function given under the label `:check' is executed every time `with-policy' checks if the policy condition is met or not. If this function returns `t', the condition is met, the `:recover' function block is executed, and the execution of both, the policy, and the wrapped body code of `with-policy' is stopped. - Either when the policy (due to a met policy condition), or the wrapped body of `with-policy' code stopped execution of the current code block, the `:clean-up' function block is executed to perform clean-up procedures." (multiple-value-bind (properties declarations doc-string) (alexandria:parse-body doc-and-properties :documentation t) (declare (ignore declarations)) (let* ((block-identifiers `(:init :check :recover :clean-up))) `(make-instance 'policy :name ',name :parameters ',parameters :description ,doc-string ,@(loop for identifier in block-identifiers for prop = (rest (find identifier properties :test (lambda (x y) (common-lisp:eql x (first y))))) collect identifier collect (when prop `(lambda ,(append `(policy-symbol-storage) parameters) (flet ((policy-setf (symbol value) (setf (cpl:value policy-symbol-storage) (remove symbol (cpl:value policy-symbol-storage) :test (lambda (x y) (eql x (car y))))) (push (cons symbol value) (cpl:value policy-symbol-storage))) (policy-get (symbol) (let ((asc (assoc symbol (cpl:value policy-symbol-storage)))) (cdr asc)))) ,@prop)))))))) (defmacro define-policy (name parameters &rest properties) "This macro implicitly calls `make-policy', and pushes the generated policy onto the list of defined policies, thus making it accessible to `named-policy' and `with-named-policy' by its name. The usage is the same as for `make-policy': > (define-policy policy-1 (param-1 param-2) \"The documentation string for this policy\" (:init (do-initialization-here)) (:check (do-checking-here)) (:recover (do-recovering-here)) (:clean-up (do-cleaning-up-here)))" `(progn (setf *policies* (remove ',name *policies* :test (lambda (x y) (common-lisp:eql x (name y))))) (let ((new-policy (make-policy ,name ,parameters ,@properties))) (push new-policy *policies*) (defparameter ,name new-policy) new-policy))) (defun named-policy (policy-name) "Returns the policy by the name `policy-name' from the list of defined policies. If the policy by this name is not in the list, the `policy-not-found' condition is signalled. Usage: > (named-policy 'policy-name)" (let ((policy (find policy-name *policies* :test (lambda (x y) (common-lisp:eql x (name y)))))) (cond (policy policy) (t (fail 'policy-not-found :name policy-name))))) (defmacro with-named-policy (policy-name policy-parameters &body body) "Performs the same as `with-policy', but accepts a policy name instead of the policy object itself. This calls an implicit `named-policy' to acquire the policy object. Otherwise, it has the same semantics as `with-policy'. Usage: > (with-named-policy 'policy-name (param-value-1 param-value-2) (body-code))" (let ((policy `(named-policy ,policy-name))) `(with-policy ,policy ,policy-parameters ,@body))) (cut:define-hook cram-language::on-with-policy-begin (name parameters)) (cut:define-hook cram-language::on-with-policy-end (id success)) (defmacro with-policy (policy policy-parameters &body body) "Wraps the code given as `body' into a `pursue' construct together with monitoring code supplied by the policy `policy', and given the parameters `policy-parameters'. The `policy-parameters' allow for custom parameterization of policies. First, the policy is initialized via the optional `:init' code block. In case this block returns `nil', execution of the `body' code or other policy-related code blocks is not started. An exception of type `policy-init-failed' is thrown. Otherwise, the `:check' code block of the policy is executed in a loop in parallel to the `body' code. If the `:check' code returns `t', the policy condition is met and the `:recover' code block is executed. The execution of both, the policy, and the `body' code is the stopped, and the `:clean-up' policy code is executed. If the policy condition is never met, `body' finishes and returns normally. To clarify the order of code execution here: - Initialization of policy is executed (`:init') - `pursue' code form is started, with up to two forms inside: - The policy `:check' code block (if present) - The `body' code - `:check' is evaluated continuously, in parallel to the normal execution of `body'. If it returns `nil', nothing happens. In any other case (i.e. return value is unequal to `nil'), the execution of the `body' code is interrupted, and `:check' is not performed again anymore. The policy code block given in `:recover' is executed (if present). This means (explicitly) that the `:recover' code is performed *after* the `body' code got interrupted. - If `:check' always returns `nil' until the `body' code execution finishes, `:recover' is never executed. - In either case (with or without `:recover'), the policy `:clean-up' code is performed (if present). In each of the phases, `policy-setf' and `policy-get' are available. They can be used to store variables associated with given symbols for the course of action of the current policy. Example: > (define-policy var-test-policy () \"Variable Test Policy\" (:init (policy-setf 'some-var 10) t) (:check (policy-setf 'some-other-var (+ (policy-get 'some-var) 2)) nil) (:clean-up (format t \"Variables: ~a ~a~%\" (policy-get 'some-var) (policy-get 'some-other-var)))) Usage of `with-policy': > (with-policy policy-object (param-value-1 param-value-2) (body-code))" (let ((init `(init ,policy)) (check `(check ,policy)) (clean-up `(clean-up ,policy)) (recover `(recover ,policy)) (name `(name ,policy)) (params `(parameters ,policy))) `(let ((log-id (first (cram-language::on-with-policy-begin ,name ())))) (let ((policy-symbol-storage (make-fluent))) (when ,init (unless (funcall ,init policy-symbol-storage ,@policy-parameters) (fail 'policy-init-failed :name ,name :parameters ',policy-parameters))) (let ((flag-do-recovery nil)) (unwind-protect (pursue (when ,check (loop while (not (funcall ,check policy-symbol-storage ,@policy-parameters)) do (sleep* *policy-check-consolidation-duration*)) (setf flag-do-recovery t)) (progn ,@body)) (when (and ,recover flag-do-recovery) (funcall ,recover policy-symbol-storage ,@policy-parameters)) (when ,clean-up (funcall ,clean-up policy-symbol-storage ,@policy-parameters)) (unwind-protect (when flag-do-recovery (cpl:fail 'policy-check-condition-met :name ,name :parameters ',policy-parameters)) (cram-language::on-with-policy-end log-id (not flag-do-recovery))))))))) (defmacro with-policies (policies-and-parameters-list &body body) "Allows for running a given `body' code segment wrapped in a list of policies (together with individual parameters for each of them). The `policies-and-parameters-list' parameter describes a list of policy/parameter-list pairs. The usage is as follows: > (with-policies ((my-policy-object (3 1)) (my-policy-object (100 4)) (my-other-policy-object (\"Test\"))) (body-code))" (cond (policies-and-parameters-list (let* ((current (first policies-and-parameters-list)) (the-rest (rest policies-and-parameters-list)) (current-policy (first current)) (current-parameters (second current))) (cond (the-rest `(with-policy ,current-policy ,current-parameters (with-policies ,the-rest ,@body))) (t `(with-policy ,current-policy ,current-parameters ,@body))))) (t `(progn ,@body)))) (defmacro with-named-policies (policies-and-parameters-list &body body) "The semantics of `with-named-policies' are the same as for `with-policies', except that instead of policy-objects, policy names are used: > (with-named-policies (('my-policy (3 1)) ('my-policy (100 4)) ('my-other-policy (\"Test\"))) (body-code))" (let* ((current (first policies-and-parameters-list)) (the-rest (rest policies-and-parameters-list)) (current-policy (first current)) (current-parameters (second current))) (cond (the-rest `(with-named-policy ,current-policy ,current-parameters (with-named-policies ,the-rest ,@body))) (t `(with-named-policy ,current-policy ,current-parameters ,@body)))))
d90fd59287ef33592d5545ebfea6a18ff4437402cace4ae30d28c268c6280081
mikpe/pdp10-tools
sim_ea_tests.erl
-*- erlang - indent - level : 2 -*- %%% simulator for pdp10 - elf Copyright ( C ) 2020 %%% This file is part of pdp10 - tools . %%% pdp10 - tools is free software : you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or %%% (at your option) any later version. %%% pdp10 - tools is distributed in the hope that it will be useful , %%% but WITHOUT ANY WARRANTY; without even the implied warranty of %%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the %%% GNU General Public License for more details. %%% You should have received a copy of the GNU General Public License along with pdp10 - tools . If not , see < / > . %%% %%%============================================================================= %%% %%% Test cases for PDP10 Effective Address Calculation, taken from " Extended Addressing " , Rev. 5 , Jul. 1983 , KC10 / Project Jupiter docs . -module(sim_ea_tests). -include("../src/sim_core.hrl"). -include_lib("eunit/include/eunit.hrl"). -define(LOW18(X), ((X) band ((1 bsl 18) - 1))). -define(INSN(OP, AC, I, X, Y), (((OP) bsl (35 - 8)) bor ((AC) bsl (35 - 12)) bor ((I) bsl (35 - 13)) bor ((X) bsl (35 - 17)) bor ?LOW18(Y))). -define(COMMA2(LEFT, RIGHT), ((?LOW18(LEFT) bsl 18) bor ?LOW18(RIGHT))). % LEFT,,RIGHT in MACRO-10 -define(OP_INVALID, 0). -define(OP_MOVE, 8#200). -define(OP_MOVEI, 8#201). no_indexing_5_1_1_test() -> Prog1 = 1,,100/ MOVE 1,200 ], expect(Prog1, [], {1, 8#100}, #ea{section = 1, offset = 8#200, islocal = true}), Prog2 = 1,,100/ MOVE 1,@150 , {1, 8#150, ?COMMA2(8#400000, 8#200)} % 1,,150/ 400000,,200 ; IFIW ], expect(Prog2, [], {1, 8#100}, #ea{section = 1, offset = 8#200, islocal = true}), Prog3 = 1,,100/ MOVE 1,@150 , {1, 8#150, ?COMMA2(8#1, 8#200)} % 1,,150/ 1,,200 ; EFIW ], expect(Prog3, [], {1, 8#100}, #ea{section = 1, offset = 8#200, islocal = false}). ifiw_with_local_index_5_1_2_test() -> Prog = 1,,100/ MOVE 1,150 1,,101/ MOVE 2,@151 , {1, 8#150, ?COMMA2(-1, 8#10)} % 1,,150/ -1,,10 ; local index 1,,151/ 400001,,200 ; IFIW ], expect(Prog, [], {1, 8#101}, #ea{section = 1, offset = 8#210, islocal = true}). ifiw_with_global_index_5_1_3_test() -> Prog = 1,,100/ MOVE 1,150 1,,101/ MOVE 2,-2(1 ) 1,,150/ [ 2,,10 ] ; global index ], expect(Prog, [], {1, 8#101}, #ea{section = 2, offset = 6, islocal = false}). efiw_with_global_index_5_1_4_test() -> Prog = 1,,100/ MOVE 1,150 1,,101/ MOVE 2,@151 1,,150/ 2,,10 1,,151/ 010002,,200 ; EFIW ], expect(Prog, [], {1, 8#101}, #ea{section = 4, offset = 8#210, islocal = false}). simple_ea_calc_examples_5_3_test() -> Prog1 = 0,,200/ MOVE 1,100 ], expect(Prog1, [], {0, 8#200}, #ea{section = 0, offset = 8#100, islocal = true}), Prog2 = [ {1, 8#200, ?INSN(?OP_INVALID, 1, 1, 0, 8#300)} % 1,,200/ MOVE1,@300 1,,300/ 400000,,100 ], expect(Prog2, [], {1, 8#200}, #ea{section = 1, offset = 8#100, islocal = true}), Prog3 = 1,,100/ MOVE 1,@300 , {1, 8#300, ?COMMA2(2, 8#200)} % 1,,300/ 2,,200 ], expect(Prog3, [], {1, 8#100}, #ea{section = 2, offset = 8#200, islocal = false}). ac_references_6_1_test() -> Prog1 = 2,,100/ MOVE 1,@150 2,,150/ 400000,,5 ], expect(Prog1, [], {2, 8#100}, #ea{section = 2, offset = 5, islocal = true}), Prog2 = 2,,100/ MOVE 1,@150 2,,150/ 2,,5 ], expect(Prog2, [], {2, 8#100}, #ea{section = 2, offset = 5, islocal = false}), Prog3 = 2,,100/ MOVE 1,@150 2,,150/ 1,,5 ], expect(Prog3, [], {2, 8#100}, #ea{section = 1, offset = 5, islocal = false}). incrementing_ea_6_2_test() -> Prog1 = 2,,100/ DMOVE 1,@150 2,,150/ 400000,,777777 ], expect(Prog1, [], {2, 8#100}, #ea{section = 2, offset = 8#777777, islocal = true}), Prog2 = 2,,100/ DMOVE 1,@150 , {2, 8#150, ?COMMA2(2, 8#777777)} % 2,,150/ 2,,777777 ], expect(Prog2, [], {2, 8#100}, #ea{section = 2, offset = 8#777777, islocal = false}). multi_section_ea_calcs_7_0_test() -> Prog1 = 3,,100/ MOVE 1,@150 3,,150/ 200002,,100 2,,100/ 3,,200 ], expect(Prog1, [], {3, 8#100}, #ea{section = 3, offset = 8#200, islocal = false}), Prog2 = 3,,100/ MOVE 1,@150 3,,150/ 200002,,100 2,,100/ 400000,,200 ], expect(Prog2, [], {3, 8#100}, #ea{section = 2, offset = 8#200, islocal = true}), Prog3 = 3,,077/ MOVEI 3,1 3,,100/ MOVE 1,@150 3,,150/ 200000,,100 0,,100/ 3,,200 ], expect(Prog3, [], {3, 8#100}, #ea{section = 0, offset = 8#201, islocal = true}). xmovei_and_xhlli_8_10_test() -> Prog1 = 2,,100/ XMOVEI 1,6 ], expect(Prog1, [], {2, 8#100}, #ea{section = 2, offset = 6, islocal = true}), The second example in 8.10 is broken , in that the EA - calculcation follows an indirect EFIW into section zero expecting to find an IFIW at 0,,6 . Compare this with the third example in 7.0 which also defines the IFIW that the indirect EFIW points to . Prog2 = 2,,100/ XMOVEI 1,@150 2,,150/ 200000,,100 ; indirect EFIW , {0, 8#100, ?COMMA2(0, 6)} % 0,,100/ 0,,6 ; IFIW ], expect(Prog2, [], {2, 8#100}, #ea{section = 0, offset = 6, islocal = true}). Remaining examples from the " Extended Addressing " document relate to instructions using the EA not the initial EA calculation itself , and %% they will be added as those instructions are implemented. Common code to run short sequences and check final EA = = = = = = = = = = = = = = = = = = = = = = = expect(Prog, ACs, ExpectedPC, ExpectedEA) -> {Core, Mem} = init(Prog, ACs), {_Core, _Mem, {error, {sim_core, {dispatch, PC, _IR, ActualEA}}}} = sim_core:run(Core, Mem), ActualPC = {PC bsr 18, PC band ((1 bsl 18) - 1)}, ?assertEqual(ExpectedPC, ActualPC), ?assertEqual(ExpectedEA, ActualEA), sim_mem:delete(Mem). init(Prog, ACs) -> {PCSection, PCOffset} = prog_pc(Prog), Mem = init_mem(Prog), Core = init_core(PCSection, PCOffset, ACs), {Core, Mem}. prog_pc([{Section, Offset, _Word} | _Rest]) -> {Section, Offset}. init_mem(Prog) -> init_mem(Prog, sim_mem:new()). init_mem([], Mem) -> Mem; init_mem([{Section, Offset, Word} | Rest], Mem) -> init_word(Section, Offset, Word, Mem), init_mem(Rest, Mem). init_word(Section, Offset, Word, Mem) -> Address = (Section bsl 18) bor Offset, PFN = Address bsr 9, case sim_mem:mquery(Mem, PFN) of false -> sim_mem:mmap(Mem, PFN, 4+2, core); {_Prot, _What} -> ok end, ok = sim_mem:write_word(Mem, Address, Word). init_core(PCSection, PCOffset, ACs) -> Flags = (1 bsl ?PDP10_PF_USER), #core{ pc_section = PCSection , pc_offset = PCOffset , acs = init_acs(ACs, list_to_tuple(lists:duplicate(16, 0))) , flags = Flags }. init_acs([], ACS) -> ACS; init_acs([{AC, Val} | Rest], ACS) -> init_acs(Rest, setelement(AC + 1, ACS, Val)).
null
https://raw.githubusercontent.com/mikpe/pdp10-tools/99216b63317fe5b5ac18f1a0d3c81b464f8b8f40/erlang/apps/sim/test/sim_ea_tests.erl
erlang
(at your option) any later version. but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. ============================================================================= Test cases for PDP10 Effective Address Calculation, taken from LEFT,,RIGHT in MACRO-10 1,,150/ 400000,,200 ; IFIW 1,,150/ 1,,200 ; EFIW 1,,150/ -1,,10 ; local index 1,,200/ MOVE1,@300 1,,300/ 2,,200 2,,150/ 2,,777777 0,,100/ 0,,6 ; IFIW they will be added as those instructions are implemented.
-*- erlang - indent - level : 2 -*- simulator for pdp10 - elf Copyright ( C ) 2020 This file is part of pdp10 - tools . pdp10 - tools is free software : you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or pdp10 - tools is distributed in the hope that it will be useful , You should have received a copy of the GNU General Public License along with pdp10 - tools . If not , see < / > . " Extended Addressing " , Rev. 5 , Jul. 1983 , KC10 / Project Jupiter docs . -module(sim_ea_tests). -include("../src/sim_core.hrl"). -include_lib("eunit/include/eunit.hrl"). -define(LOW18(X), ((X) band ((1 bsl 18) - 1))). -define(INSN(OP, AC, I, X, Y), (((OP) bsl (35 - 8)) bor ((AC) bsl (35 - 12)) bor ((I) bsl (35 - 13)) bor ((X) bsl (35 - 17)) bor ?LOW18(Y))). -define(OP_INVALID, 0). -define(OP_MOVE, 8#200). -define(OP_MOVEI, 8#201). no_indexing_5_1_1_test() -> Prog1 = 1,,100/ MOVE 1,200 ], expect(Prog1, [], {1, 8#100}, #ea{section = 1, offset = 8#200, islocal = true}), Prog2 = 1,,100/ MOVE 1,@150 ], expect(Prog2, [], {1, 8#100}, #ea{section = 1, offset = 8#200, islocal = true}), Prog3 = 1,,100/ MOVE 1,@150 ], expect(Prog3, [], {1, 8#100}, #ea{section = 1, offset = 8#200, islocal = false}). ifiw_with_local_index_5_1_2_test() -> Prog = 1,,100/ MOVE 1,150 1,,101/ MOVE 2,@151 1,,151/ 400001,,200 ; IFIW ], expect(Prog, [], {1, 8#101}, #ea{section = 1, offset = 8#210, islocal = true}). ifiw_with_global_index_5_1_3_test() -> Prog = 1,,100/ MOVE 1,150 1,,101/ MOVE 2,-2(1 ) 1,,150/ [ 2,,10 ] ; global index ], expect(Prog, [], {1, 8#101}, #ea{section = 2, offset = 6, islocal = false}). efiw_with_global_index_5_1_4_test() -> Prog = 1,,100/ MOVE 1,150 1,,101/ MOVE 2,@151 1,,150/ 2,,10 1,,151/ 010002,,200 ; EFIW ], expect(Prog, [], {1, 8#101}, #ea{section = 4, offset = 8#210, islocal = false}). simple_ea_calc_examples_5_3_test() -> Prog1 = 0,,200/ MOVE 1,100 ], expect(Prog1, [], {0, 8#200}, #ea{section = 0, offset = 8#100, islocal = true}), Prog2 = 1,,300/ 400000,,100 ], expect(Prog2, [], {1, 8#200}, #ea{section = 1, offset = 8#100, islocal = true}), Prog3 = 1,,100/ MOVE 1,@300 ], expect(Prog3, [], {1, 8#100}, #ea{section = 2, offset = 8#200, islocal = false}). ac_references_6_1_test() -> Prog1 = 2,,100/ MOVE 1,@150 2,,150/ 400000,,5 ], expect(Prog1, [], {2, 8#100}, #ea{section = 2, offset = 5, islocal = true}), Prog2 = 2,,100/ MOVE 1,@150 2,,150/ 2,,5 ], expect(Prog2, [], {2, 8#100}, #ea{section = 2, offset = 5, islocal = false}), Prog3 = 2,,100/ MOVE 1,@150 2,,150/ 1,,5 ], expect(Prog3, [], {2, 8#100}, #ea{section = 1, offset = 5, islocal = false}). incrementing_ea_6_2_test() -> Prog1 = 2,,100/ DMOVE 1,@150 2,,150/ 400000,,777777 ], expect(Prog1, [], {2, 8#100}, #ea{section = 2, offset = 8#777777, islocal = true}), Prog2 = 2,,100/ DMOVE 1,@150 ], expect(Prog2, [], {2, 8#100}, #ea{section = 2, offset = 8#777777, islocal = false}). multi_section_ea_calcs_7_0_test() -> Prog1 = 3,,100/ MOVE 1,@150 3,,150/ 200002,,100 2,,100/ 3,,200 ], expect(Prog1, [], {3, 8#100}, #ea{section = 3, offset = 8#200, islocal = false}), Prog2 = 3,,100/ MOVE 1,@150 3,,150/ 200002,,100 2,,100/ 400000,,200 ], expect(Prog2, [], {3, 8#100}, #ea{section = 2, offset = 8#200, islocal = true}), Prog3 = 3,,077/ MOVEI 3,1 3,,100/ MOVE 1,@150 3,,150/ 200000,,100 0,,100/ 3,,200 ], expect(Prog3, [], {3, 8#100}, #ea{section = 0, offset = 8#201, islocal = true}). xmovei_and_xhlli_8_10_test() -> Prog1 = 2,,100/ XMOVEI 1,6 ], expect(Prog1, [], {2, 8#100}, #ea{section = 2, offset = 6, islocal = true}), The second example in 8.10 is broken , in that the EA - calculcation follows an indirect EFIW into section zero expecting to find an IFIW at 0,,6 . Compare this with the third example in 7.0 which also defines the IFIW that the indirect EFIW points to . Prog2 = 2,,100/ XMOVEI 1,@150 2,,150/ 200000,,100 ; indirect EFIW ], expect(Prog2, [], {2, 8#100}, #ea{section = 0, offset = 6, islocal = true}). Remaining examples from the " Extended Addressing " document relate to instructions using the EA not the initial EA calculation itself , and Common code to run short sequences and check final EA = = = = = = = = = = = = = = = = = = = = = = = expect(Prog, ACs, ExpectedPC, ExpectedEA) -> {Core, Mem} = init(Prog, ACs), {_Core, _Mem, {error, {sim_core, {dispatch, PC, _IR, ActualEA}}}} = sim_core:run(Core, Mem), ActualPC = {PC bsr 18, PC band ((1 bsl 18) - 1)}, ?assertEqual(ExpectedPC, ActualPC), ?assertEqual(ExpectedEA, ActualEA), sim_mem:delete(Mem). init(Prog, ACs) -> {PCSection, PCOffset} = prog_pc(Prog), Mem = init_mem(Prog), Core = init_core(PCSection, PCOffset, ACs), {Core, Mem}. prog_pc([{Section, Offset, _Word} | _Rest]) -> {Section, Offset}. init_mem(Prog) -> init_mem(Prog, sim_mem:new()). init_mem([], Mem) -> Mem; init_mem([{Section, Offset, Word} | Rest], Mem) -> init_word(Section, Offset, Word, Mem), init_mem(Rest, Mem). init_word(Section, Offset, Word, Mem) -> Address = (Section bsl 18) bor Offset, PFN = Address bsr 9, case sim_mem:mquery(Mem, PFN) of false -> sim_mem:mmap(Mem, PFN, 4+2, core); {_Prot, _What} -> ok end, ok = sim_mem:write_word(Mem, Address, Word). init_core(PCSection, PCOffset, ACs) -> Flags = (1 bsl ?PDP10_PF_USER), #core{ pc_section = PCSection , pc_offset = PCOffset , acs = init_acs(ACs, list_to_tuple(lists:duplicate(16, 0))) , flags = Flags }. init_acs([], ACS) -> ACS; init_acs([{AC, Val} | Rest], ACS) -> init_acs(Rest, setelement(AC + 1, ACS, Val)).
dbef6488f4eeb82395b2909bf914f6d267d8f58bda005605f7cd0d73f3876e14
samply/blaze
util.clj
(ns blaze.interaction.util (:require [blaze.anomaly :as ba] [blaze.db.api :as d] [blaze.handler.fhir.util :as fhir-util] [blaze.luid :as luid] [clojure.string :as str] [cuerdas.core :as c-str])) (defn etag->t [etag] (let [[_ t] (re-find #"W/\"(\d+)\"" etag)] (some-> t parse-long))) (defn- remove-query-param? [[k]] (and (str/starts-with? k "_") (not (#{"_id" "_list" "_profile" "_lastUpdated"} k)) (not (str/starts-with? k "_has")))) (defn- query-param->clauses "Takes a query param with possible multiple values and returns possible multiple clauses one for each query param." [[k v]] (map #(into [k] (map str/trim) (str/split % #",")) (fhir-util/to-seq v))) (def ^:private query-params->clauses-xf (comp (remove remove-query-param?) (mapcat query-param->clauses))) (defn- sort-clauses [sort] (let [[param & params] (str/split sort #",") param (str/trim param)] (if params (ba/unsupported "More than one sort parameter is unsupported.") [[:sort (c-str/ltrim param "-") (if (str/starts-with? param "-") :desc :asc)]]))) (defn clauses [{:strs [_sort] :as query-params}] (into (if (str/blank? _sort) [] (sort-clauses _sort)) query-params->clauses-xf query-params)) (defn search-clauses [query-params] (into [] query-params->clauses-xf query-params)) (defn luid [{:keys [clock rng-fn]}] (luid/luid clock (rng-fn))) (defn successive-luids [{:keys [clock rng-fn]}] (luid/successive-luids clock (rng-fn))) (defn t [db] (or (d/as-of-t db) (d/basis-t db))) (defn- prep-if-none-match [if-none-match] (if (= "*" if-none-match) :any (etag->t if-none-match))) (defn put-tx-op [resource if-match if-none-match] (let [if-match (some-> if-match etag->t) if-none-match (some-> if-none-match prep-if-none-match)] (cond if-match [:put resource [:if-match if-match]] if-none-match [:put resource [:if-none-match if-none-match]] :else [:put resource]))) (defn subsetted? [{:keys [system code]}] (and (= #fhir/uri"-ObservationValue" system) (= #fhir/code"SUBSETTED" code)))
null
https://raw.githubusercontent.com/samply/blaze/948eee38021467fa343c522a644a7fd4b24b6467/modules/interaction/src/blaze/interaction/util.clj
clojure
(ns blaze.interaction.util (:require [blaze.anomaly :as ba] [blaze.db.api :as d] [blaze.handler.fhir.util :as fhir-util] [blaze.luid :as luid] [clojure.string :as str] [cuerdas.core :as c-str])) (defn etag->t [etag] (let [[_ t] (re-find #"W/\"(\d+)\"" etag)] (some-> t parse-long))) (defn- remove-query-param? [[k]] (and (str/starts-with? k "_") (not (#{"_id" "_list" "_profile" "_lastUpdated"} k)) (not (str/starts-with? k "_has")))) (defn- query-param->clauses "Takes a query param with possible multiple values and returns possible multiple clauses one for each query param." [[k v]] (map #(into [k] (map str/trim) (str/split % #",")) (fhir-util/to-seq v))) (def ^:private query-params->clauses-xf (comp (remove remove-query-param?) (mapcat query-param->clauses))) (defn- sort-clauses [sort] (let [[param & params] (str/split sort #",") param (str/trim param)] (if params (ba/unsupported "More than one sort parameter is unsupported.") [[:sort (c-str/ltrim param "-") (if (str/starts-with? param "-") :desc :asc)]]))) (defn clauses [{:strs [_sort] :as query-params}] (into (if (str/blank? _sort) [] (sort-clauses _sort)) query-params->clauses-xf query-params)) (defn search-clauses [query-params] (into [] query-params->clauses-xf query-params)) (defn luid [{:keys [clock rng-fn]}] (luid/luid clock (rng-fn))) (defn successive-luids [{:keys [clock rng-fn]}] (luid/successive-luids clock (rng-fn))) (defn t [db] (or (d/as-of-t db) (d/basis-t db))) (defn- prep-if-none-match [if-none-match] (if (= "*" if-none-match) :any (etag->t if-none-match))) (defn put-tx-op [resource if-match if-none-match] (let [if-match (some-> if-match etag->t) if-none-match (some-> if-none-match prep-if-none-match)] (cond if-match [:put resource [:if-match if-match]] if-none-match [:put resource [:if-none-match if-none-match]] :else [:put resource]))) (defn subsetted? [{:keys [system code]}] (and (= #fhir/uri"-ObservationValue" system) (= #fhir/code"SUBSETTED" code)))
b61f08e87d143d97b523176efa43c49d7163f5d874938381f6e37ddcc51024c3
SimonJF/monitored-session-erlang
ssa_gen_server.erl
-module(ssa_gen_server). -behaviour(gen_server2). -compile(export_all). -record(actor_state, {actor_type_name, monitor_pid, user_state}). % This is the behaviour for basic session actors. The behaviour requires two callbacks : % * ssactor_init, which returns the initial user state % * ssactor_handle_msg, which handles incoming messages. % % The session actor itself has certain bits of internal state, which we % leverage in order to perform monitoring and message routing. This is: % * Conversation process PID: We use this in order to relay messages to % the conversation process, which performs Role |-> Endpoint routing. % % * Roles the actor is playing in the current conversation % % * The currently-active role in the actor % % * And finally, a Role |-> Monitor mapping. % ssactor_init returns some state given some input args % ssactor_handle_message handles a message % ssactor_join is called when the actor is invited to participate in a % conversation. The user can decide to accept or decline this invitation. % ssactor_conversation_established is called when all actors have been invited. % ssactor_conversation_error is called when there was an error establishing the % conversation. behaviour_info(callbacks) -> [{ssactor_init,2}, {ssactor_join,4}, {ssactor_handle_message, 7}, {ssactor_become, 5}, {ssactor_conversation_established, 5}, {ssactor_conversation_error, 4}, {ssactor_conversation_ended, 3}, Name , Result , State , ConvKey Name , FailureName , State , ConvKey {ssactor_subsession_setup_failed, 4}, {handle_call, 3}, {handle_cast, 3}, {handle_info, 3}, {terminate, 2} ]; behaviour_info(_Other) -> undefined. update_user_state(SystemState, NewUserState) -> SystemState#actor_state{user_state = NewUserState}. log_msg(Func, Format, Args, State) -> InfoStr = "SSACTOR: Actor ~p, actor PID ~p, monitor PID ~p.", InfoArgs = [State#actor_state.actor_type_name, self(), State#actor_state.monitor_pid], Func(Format ++ "~n" ++ InfoStr, Args ++ InfoArgs). actor_warn(Format, Args, State) -> log_msg(fun error_logger:warning_msg/2, Format, Args, State). actor_error(Format, Args, State) -> log_msg(fun error_logger:error_msg/2, Format, Args, State). actor_info(Format, Args, State) -> log_msg(fun error_logger:info_msg/2, Format, Args, State). % gen_server2 callbacks init([Module, UserArgs, MonitorPID]) -> actor_registry:register_actor(Module, MonitorPID), UserState = Module:ssactor_init(UserArgs, MonitorPID), {ok, #actor_state{actor_type_name=Module, monitor_pid=MonitorPID, user_state=UserState}}. % Delegate calls, casts (other than ssa internal messages), info messages % and termination messages to the actor. delegate_async(Fun, Msg, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, UserResult = case Fun of handle_cast -> Module:handle_cast(Msg, UserState); handle_info -> Module:handle_info(Msg, UserState) end, % Propagate user state changes without losing system state case UserResult of {noreply, NewUserState} -> NewState = update_user_state(State, NewUserState), {noreply, NewState}; {noreply, NewUserState, Arg} -> NewState = update_user_state(State, NewUserState), {noreply, NewState, Arg}; {stop, Reason, NewUserState} -> NewState = update_user_state(State, NewUserState), {stop, Reason, NewState} end. % Same for synchronous messages, but there are a couple more things we need % to handle, in particular re: replies handle_call(ssa_get_monitor_id, _From, State) -> {reply, State#actor_state.monitor_pid, State}; handle_call({ssa_join_conversation, ProtocolName, RoleName, ConversationID}, _From, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, UserResult = Module:ssactor_join(ProtocolName, RoleName, ConversationID, UserState), case UserResult of {accept, NewUserState} -> NewState = update_user_state(State, NewUserState), {reply, accept, NewState}; {decline, NewUserState} -> NewState = update_user_state(State, NewUserState), {reply, decline, NewState} end; handle_call({delegate_call, From, Msg}, _From, State) -> Spoof From value handle_call(Msg, From, State); handle_call(Request, From, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, UserResult = Module:handle_call(Request, From, UserState), case UserResult of {reply, Reply, NewUserState} -> NewState = update_user_state(State, NewUserState), {reply, Reply, NewState}; {reply, Reply, NewUserState, Arg} -> NewState = update_user_state(State, NewUserState), {reply, Reply, NewState, Arg}; {noreply, NewUserState} -> NewState = update_user_state(State, NewUserState), {noreply, NewState}; {noreply, NewUserState, Arg} -> NewState = update_user_state(State, NewUserState), {noreply, NewState, Arg}; {stop, Reason, Reply, NewUserState} -> NewState = update_user_state(State, NewUserState), {stop, Reason, Reply, NewState}; {stop, Reason, NewUserState} -> NewState = update_user_state(State, NewUserState), {stop, Reason, NewState} end. make_conv_key(Protocol, Role, ConvID, MonitorID) -> {Protocol, Role, ConvID, MonitorID}. handle_subsession_setup_failure(SubsessionName, ProtocolName, RoleName, ConvID, Reason, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, MonitorPID = State#actor_state.monitor_pid, ConvKey = make_conv_key(ProtocolName, RoleName, ConvID, MonitorPID), {ok, NewUserState} = Module:ssactor_subsession_setup_failed(SubsessionName, Reason, UserState, ConvKey), {noreply, State#actor_state{user_state=NewUserState}}. handle_subsession_success(SubsessionName, ProtocolName, RoleName, ConvID, SubsessionResult, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, MonitorPID = State#actor_state.monitor_pid, ConvKey = make_conv_key(ProtocolName, RoleName, ConvID, MonitorPID), {ok, NewUserState} = Module:ssactor_subsession_complete(SubsessionName, SubsessionResult, UserState, ConvKey), {noreply, State#actor_state{user_state=NewUserState}}. handle_subsession_failure(SubsessionName, ProtocolName, RoleName, ConvID, FailureName, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, MonitorPID = State#actor_state.monitor_pid, ConvKey = make_conv_key(ProtocolName, RoleName, ConvID, MonitorPID), {ok, NewUserState} = Module:ssactor_subsession_failed(SubsessionName, FailureName, UserState, ConvKey), {noreply, State#actor_state{user_state=NewUserState}}. % Handle incoming user messages. These have been checked by the monitor to % ensure that they conform to the MPST. handle_cast({ssa_msg, Protocol, Role, ConversationID, MsgData}, State) -> { message , _ , Sender , _ , , Types , Payload } } , State ) - > actor_info("Processing message ~p " , [ MsgData ] , State ) , Sender = message:message_sender(MsgData), Op = message:message_name(MsgData), Payload = message:message_payload(MsgData), Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, % TODO: ssactor_handle_message currently just returns a new state. % Should we have some more complex callback here instead? {ok, NewUserState} = Module:ssactor_handle_message( Protocol, Role, ConversationID, Sender, Op, Payload, UserState, {Protocol, Role, ConversationID, State#actor_state.monitor_pid}), {noreply, State#actor_state{user_state=NewUserState}}; % Become handle_cast(_Msg = {become, Protocol, Role, Operation, Arguments, CID}, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, {ok, NewUserState} = Module:ssactor_become(Protocol, Role, Operation, Arguments, {Protocol, Role, CID, State#actor_state.monitor_pid}, UserState), {noreply, State#actor_state{user_state=NewUserState}}; % Setup failed handle_cast(_Msg = {ssa_conversation_setup_failed, Protocol, Role, Err}, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, {ok, NewUserState} = Module:ssactor_conversation_error(Protocol, Role, Err, UserState), {noreply, State#actor_state{user_state=NewUserState}}; % Setup successful handle_cast(_Msg = {ssa_session_established, Protocol, Role, CID}, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, ConvKey = {Protocol, Role, CID, State#actor_state.monitor_pid}, {ok, NewUserState} = Module:ssactor_conversation_established(Protocol, Role, CID, ConvKey, UserState), {noreply, State#actor_state{user_state=NewUserState}}; % Conversation ended handle_cast({conversation_ended, CID, Reason}, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, {ok, NewUserState} = Module:ssactor_conversation_ended(CID, Reason, UserState), {noreply, State#actor_state{user_state=NewUserState}}; % In-session synchronous call handle_cast({ssa_call_req, MonitorPID, ProtocolName, RoleName, ConversationID, MsgData, From}, State) -> actor_info("Processing synchronous call ~p ~n", [MsgData], State), Sender = message:message_sender(MsgData), Op = message:message_name(MsgData), Payload = message:message_payload(MsgData), Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, % Notify monitor that we've started processing the message HandleRes = Module:ssactor_handle_call( ProtocolName, RoleName, ConversationID, Sender, Op, Payload, UserState, {ProtocolName, RoleName, ConversationID, State#actor_state.monitor_pid}), % Save role state, reset handler state to idle, and grab new user state NewUserState = case HandleRes of {reply, Reply, NewState} -> % Send the reply back to the caller error_logger:info_msg("Sending reply back to caller~n"), actor_monitor:outgoing_call_response(MonitorPID, ProtocolName, RoleName, ConversationID, Sender, Op, Reply, From), NewState; {noreply, NewState} -> NewState; {stop, NewState} -> NewState; _Other -> exit(wrong_return_value) end, {noreply, State#actor_state{user_state=NewUserState}}; handle_cast({ssa_subsession_setup_failure, SubsessionName, ProtocolName, RoleName, ConvID, Reason}, State) -> handle_subsession_setup_failure(SubsessionName, ProtocolName, RoleName, ConvID, Reason, State); handle_cast({ssa_subsession_failure, SubsessionName, ProtocolName, RoleName, ConvID, FailureName}, State) -> handle_subsession_failure(SubsessionName, ProtocolName, RoleName, ConvID, FailureName, State); handle_cast({ssa_subsession_success, SubsessionName, ProtocolName, RoleName, ConvID, Result}, State) -> handle_subsession_success(SubsessionName, ProtocolName, RoleName, ConvID, Result, State); handle_cast(Msg, State) -> delegate_async(handle_cast, Msg, State). % Info messages -- we don't do anything with these handle_info(Msg, State) -> delegate_async(handle_info, Msg, State). % We don't need this. code_change(_PreviousVersion, State, _Extra) -> {ok, State}. terminate(Reason, State) -> actor_error("Actor terminating for reason ~p~n", [Reason], State), Module = State#actor_state.actor_type_name, MonitorPID = State#actor_state.monitor_pid, UserState = State#actor_state.user_state, actor_registry:deregister_actor(Module, MonitorPID), Module:terminate(Reason, UserState), ok. % Internal API message(ActorPID, ProtocolName, RoleName, ConvID, Msg) -> gen_server2:cast(ActorPID, {ssa_msg, ProtocolName, RoleName, ConvID, Msg}). conversation_success(ActorPID, ProtocolName, RoleName, ConvID) -> gen_server2:cast(ActorPID, {ssa_session_established, ProtocolName, RoleName, ConvID}). incoming_call_request(ActorPID, ProtocolName, RoleName, ConvID, MonitorPID, Message, From) -> gen_server2:cast(ActorPID, {ssa_call_req, MonitorPID, ProtocolName, RoleName, ConvID, Message, From}). join_conversation_request(ActorPID, ProtocolName, RoleName, ConvID) -> %io:format("JCR: Actor: ~p, PN: ~p, RN: ~p, CID: ~p~n", [ ActorPID , ProtocolName , RoleName , ConvID ] ) , gen_server2:call(ActorPID, {ssa_join_conversation, ProtocolName, RoleName, ConvID}). subsession_setup_failure(ActorPID, SubsessionName, ProtocolName, RoleName, ConvID, Reason) -> gen_server2:cast(ActorPID, {ssa_subsession_setup_failure, SubsessionName, ProtocolName, RoleName, ConvID, Reason}). subsession_failure(ActorPID, SubsessionName, ProtocolName, RoleName, ConvID, FailureName) -> gen_server2:cast(ActorPID, {ssa_subsession_failure, SubsessionName, ProtocolName, RoleName, ConvID, FailureName}). subsession_success(ActorPID, SubsessionName, ProtocolName, RoleName, ConvID, SubsessionResult) -> gen_server2:cast(ActorPID, {ssa_subsession_success, SubsessionName, ProtocolName, RoleName, ConvID, SubsessionResult}). %%%%%%%%%%%%% %%%% API %%%% %%%%%%%%%%%%% conversation_ended(ActorPID, CID, Reason) -> gen_server2:cast(ActorPID, {conversation_ended, CID, Reason}). call(ServerRef, Message) -> gen_server2:call(ServerRef, Message). call(ServerRef, Message, Timeout) -> gen_server2:call(ServerRef, Message, Timeout). cast(ServerRef, Message) -> gen_server2:cast(ServerRef, Message). reply(ServerRef, Message) -> gen_server2:reply(ServerRef, Message). start_link(ModuleName, Args, Options) -> actor_monitor:start_link(ModuleName, Args, Options). io : format("SSA Gen server start called for ~p ~ n " , [ ModuleName ] ) , Res = gen_server2 : start_link(ssa_gen_server , [ ModuleName , ] , Options ) , % unwrap_start_result(Res). start_link(RegName, ModuleName, Args, Options) -> actor_monitor:start_link(RegName, ModuleName, Args, Options). Res = gen_server2 : start_link(ssa_gen_server , [ RegName , ModuleName , ] , Options ) , % unwrap_start_result(Res). start(ModuleName, Args, Options) -> actor_monitor:start_link(ModuleName, Args, Options). Res = gen_server2 : start(ssa_gen_server , [ ModuleName , ] , Options ) , % unwrap_start_result(Res). start(RegName, ModuleName, Args, Options) -> actor_monitor:start_link(RegName, ModuleName, Args, Options). Res = gen_server2 : start(ssa_gen_server , [ RegName , ModuleName , ] , Options ) , % unwrap_start_result(Res). start_actor_process(ModuleName, Args, MonitorPID) -> gen_server2:start_link(ssa_gen_server, [ModuleName, Args, MonitorPID], []).
null
https://raw.githubusercontent.com/SimonJF/monitored-session-erlang/5ec76f327d3d2da4050d4c5a468ac343207f4521/src/behaviours/ssa_gen_server.erl
erlang
This is the behaviour for basic session actors. * ssactor_init, which returns the initial user state * ssactor_handle_msg, which handles incoming messages. The session actor itself has certain bits of internal state, which we leverage in order to perform monitoring and message routing. This is: * Conversation process PID: We use this in order to relay messages to the conversation process, which performs Role |-> Endpoint routing. * Roles the actor is playing in the current conversation * The currently-active role in the actor * And finally, a Role |-> Monitor mapping. ssactor_init returns some state given some input args ssactor_handle_message handles a message ssactor_join is called when the actor is invited to participate in a conversation. The user can decide to accept or decline this invitation. ssactor_conversation_established is called when all actors have been invited. ssactor_conversation_error is called when there was an error establishing the conversation. gen_server2 callbacks Delegate calls, casts (other than ssa internal messages), info messages and termination messages to the actor. Propagate user state changes without losing system state Same for synchronous messages, but there are a couple more things we need to handle, in particular re: replies Handle incoming user messages. These have been checked by the monitor to ensure that they conform to the MPST. TODO: ssactor_handle_message currently just returns a new state. Should we have some more complex callback here instead? Become Setup failed Setup successful Conversation ended In-session synchronous call Notify monitor that we've started processing the message Save role state, reset handler state to idle, and grab new user state Send the reply back to the caller Info messages -- we don't do anything with these We don't need this. Internal API io:format("JCR: Actor: ~p, PN: ~p, RN: ~p, CID: ~p~n", API %%%% unwrap_start_result(Res). unwrap_start_result(Res). unwrap_start_result(Res). unwrap_start_result(Res).
-module(ssa_gen_server). -behaviour(gen_server2). -compile(export_all). -record(actor_state, {actor_type_name, monitor_pid, user_state}). The behaviour requires two callbacks : behaviour_info(callbacks) -> [{ssactor_init,2}, {ssactor_join,4}, {ssactor_handle_message, 7}, {ssactor_become, 5}, {ssactor_conversation_established, 5}, {ssactor_conversation_error, 4}, {ssactor_conversation_ended, 3}, Name , Result , State , ConvKey Name , FailureName , State , ConvKey {ssactor_subsession_setup_failed, 4}, {handle_call, 3}, {handle_cast, 3}, {handle_info, 3}, {terminate, 2} ]; behaviour_info(_Other) -> undefined. update_user_state(SystemState, NewUserState) -> SystemState#actor_state{user_state = NewUserState}. log_msg(Func, Format, Args, State) -> InfoStr = "SSACTOR: Actor ~p, actor PID ~p, monitor PID ~p.", InfoArgs = [State#actor_state.actor_type_name, self(), State#actor_state.monitor_pid], Func(Format ++ "~n" ++ InfoStr, Args ++ InfoArgs). actor_warn(Format, Args, State) -> log_msg(fun error_logger:warning_msg/2, Format, Args, State). actor_error(Format, Args, State) -> log_msg(fun error_logger:error_msg/2, Format, Args, State). actor_info(Format, Args, State) -> log_msg(fun error_logger:info_msg/2, Format, Args, State). init([Module, UserArgs, MonitorPID]) -> actor_registry:register_actor(Module, MonitorPID), UserState = Module:ssactor_init(UserArgs, MonitorPID), {ok, #actor_state{actor_type_name=Module, monitor_pid=MonitorPID, user_state=UserState}}. delegate_async(Fun, Msg, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, UserResult = case Fun of handle_cast -> Module:handle_cast(Msg, UserState); handle_info -> Module:handle_info(Msg, UserState) end, case UserResult of {noreply, NewUserState} -> NewState = update_user_state(State, NewUserState), {noreply, NewState}; {noreply, NewUserState, Arg} -> NewState = update_user_state(State, NewUserState), {noreply, NewState, Arg}; {stop, Reason, NewUserState} -> NewState = update_user_state(State, NewUserState), {stop, Reason, NewState} end. handle_call(ssa_get_monitor_id, _From, State) -> {reply, State#actor_state.monitor_pid, State}; handle_call({ssa_join_conversation, ProtocolName, RoleName, ConversationID}, _From, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, UserResult = Module:ssactor_join(ProtocolName, RoleName, ConversationID, UserState), case UserResult of {accept, NewUserState} -> NewState = update_user_state(State, NewUserState), {reply, accept, NewState}; {decline, NewUserState} -> NewState = update_user_state(State, NewUserState), {reply, decline, NewState} end; handle_call({delegate_call, From, Msg}, _From, State) -> Spoof From value handle_call(Msg, From, State); handle_call(Request, From, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, UserResult = Module:handle_call(Request, From, UserState), case UserResult of {reply, Reply, NewUserState} -> NewState = update_user_state(State, NewUserState), {reply, Reply, NewState}; {reply, Reply, NewUserState, Arg} -> NewState = update_user_state(State, NewUserState), {reply, Reply, NewState, Arg}; {noreply, NewUserState} -> NewState = update_user_state(State, NewUserState), {noreply, NewState}; {noreply, NewUserState, Arg} -> NewState = update_user_state(State, NewUserState), {noreply, NewState, Arg}; {stop, Reason, Reply, NewUserState} -> NewState = update_user_state(State, NewUserState), {stop, Reason, Reply, NewState}; {stop, Reason, NewUserState} -> NewState = update_user_state(State, NewUserState), {stop, Reason, NewState} end. make_conv_key(Protocol, Role, ConvID, MonitorID) -> {Protocol, Role, ConvID, MonitorID}. handle_subsession_setup_failure(SubsessionName, ProtocolName, RoleName, ConvID, Reason, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, MonitorPID = State#actor_state.monitor_pid, ConvKey = make_conv_key(ProtocolName, RoleName, ConvID, MonitorPID), {ok, NewUserState} = Module:ssactor_subsession_setup_failed(SubsessionName, Reason, UserState, ConvKey), {noreply, State#actor_state{user_state=NewUserState}}. handle_subsession_success(SubsessionName, ProtocolName, RoleName, ConvID, SubsessionResult, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, MonitorPID = State#actor_state.monitor_pid, ConvKey = make_conv_key(ProtocolName, RoleName, ConvID, MonitorPID), {ok, NewUserState} = Module:ssactor_subsession_complete(SubsessionName, SubsessionResult, UserState, ConvKey), {noreply, State#actor_state{user_state=NewUserState}}. handle_subsession_failure(SubsessionName, ProtocolName, RoleName, ConvID, FailureName, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, MonitorPID = State#actor_state.monitor_pid, ConvKey = make_conv_key(ProtocolName, RoleName, ConvID, MonitorPID), {ok, NewUserState} = Module:ssactor_subsession_failed(SubsessionName, FailureName, UserState, ConvKey), {noreply, State#actor_state{user_state=NewUserState}}. handle_cast({ssa_msg, Protocol, Role, ConversationID, MsgData}, State) -> { message , _ , Sender , _ , , Types , Payload } } , State ) - > actor_info("Processing message ~p " , [ MsgData ] , State ) , Sender = message:message_sender(MsgData), Op = message:message_name(MsgData), Payload = message:message_payload(MsgData), Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, {ok, NewUserState} = Module:ssactor_handle_message( Protocol, Role, ConversationID, Sender, Op, Payload, UserState, {Protocol, Role, ConversationID, State#actor_state.monitor_pid}), {noreply, State#actor_state{user_state=NewUserState}}; handle_cast(_Msg = {become, Protocol, Role, Operation, Arguments, CID}, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, {ok, NewUserState} = Module:ssactor_become(Protocol, Role, Operation, Arguments, {Protocol, Role, CID, State#actor_state.monitor_pid}, UserState), {noreply, State#actor_state{user_state=NewUserState}}; handle_cast(_Msg = {ssa_conversation_setup_failed, Protocol, Role, Err}, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, {ok, NewUserState} = Module:ssactor_conversation_error(Protocol, Role, Err, UserState), {noreply, State#actor_state{user_state=NewUserState}}; handle_cast(_Msg = {ssa_session_established, Protocol, Role, CID}, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, ConvKey = {Protocol, Role, CID, State#actor_state.monitor_pid}, {ok, NewUserState} = Module:ssactor_conversation_established(Protocol, Role, CID, ConvKey, UserState), {noreply, State#actor_state{user_state=NewUserState}}; handle_cast({conversation_ended, CID, Reason}, State) -> Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, {ok, NewUserState} = Module:ssactor_conversation_ended(CID, Reason, UserState), {noreply, State#actor_state{user_state=NewUserState}}; handle_cast({ssa_call_req, MonitorPID, ProtocolName, RoleName, ConversationID, MsgData, From}, State) -> actor_info("Processing synchronous call ~p ~n", [MsgData], State), Sender = message:message_sender(MsgData), Op = message:message_name(MsgData), Payload = message:message_payload(MsgData), Module = State#actor_state.actor_type_name, UserState = State#actor_state.user_state, HandleRes = Module:ssactor_handle_call( ProtocolName, RoleName, ConversationID, Sender, Op, Payload, UserState, {ProtocolName, RoleName, ConversationID, State#actor_state.monitor_pid}), NewUserState = case HandleRes of {reply, Reply, NewState} -> error_logger:info_msg("Sending reply back to caller~n"), actor_monitor:outgoing_call_response(MonitorPID, ProtocolName, RoleName, ConversationID, Sender, Op, Reply, From), NewState; {noreply, NewState} -> NewState; {stop, NewState} -> NewState; _Other -> exit(wrong_return_value) end, {noreply, State#actor_state{user_state=NewUserState}}; handle_cast({ssa_subsession_setup_failure, SubsessionName, ProtocolName, RoleName, ConvID, Reason}, State) -> handle_subsession_setup_failure(SubsessionName, ProtocolName, RoleName, ConvID, Reason, State); handle_cast({ssa_subsession_failure, SubsessionName, ProtocolName, RoleName, ConvID, FailureName}, State) -> handle_subsession_failure(SubsessionName, ProtocolName, RoleName, ConvID, FailureName, State); handle_cast({ssa_subsession_success, SubsessionName, ProtocolName, RoleName, ConvID, Result}, State) -> handle_subsession_success(SubsessionName, ProtocolName, RoleName, ConvID, Result, State); handle_cast(Msg, State) -> delegate_async(handle_cast, Msg, State). handle_info(Msg, State) -> delegate_async(handle_info, Msg, State). code_change(_PreviousVersion, State, _Extra) -> {ok, State}. terminate(Reason, State) -> actor_error("Actor terminating for reason ~p~n", [Reason], State), Module = State#actor_state.actor_type_name, MonitorPID = State#actor_state.monitor_pid, UserState = State#actor_state.user_state, actor_registry:deregister_actor(Module, MonitorPID), Module:terminate(Reason, UserState), ok. message(ActorPID, ProtocolName, RoleName, ConvID, Msg) -> gen_server2:cast(ActorPID, {ssa_msg, ProtocolName, RoleName, ConvID, Msg}). conversation_success(ActorPID, ProtocolName, RoleName, ConvID) -> gen_server2:cast(ActorPID, {ssa_session_established, ProtocolName, RoleName, ConvID}). incoming_call_request(ActorPID, ProtocolName, RoleName, ConvID, MonitorPID, Message, From) -> gen_server2:cast(ActorPID, {ssa_call_req, MonitorPID, ProtocolName, RoleName, ConvID, Message, From}). join_conversation_request(ActorPID, ProtocolName, RoleName, ConvID) -> [ ActorPID , ProtocolName , RoleName , ConvID ] ) , gen_server2:call(ActorPID, {ssa_join_conversation, ProtocolName, RoleName, ConvID}). subsession_setup_failure(ActorPID, SubsessionName, ProtocolName, RoleName, ConvID, Reason) -> gen_server2:cast(ActorPID, {ssa_subsession_setup_failure, SubsessionName, ProtocolName, RoleName, ConvID, Reason}). subsession_failure(ActorPID, SubsessionName, ProtocolName, RoleName, ConvID, FailureName) -> gen_server2:cast(ActorPID, {ssa_subsession_failure, SubsessionName, ProtocolName, RoleName, ConvID, FailureName}). subsession_success(ActorPID, SubsessionName, ProtocolName, RoleName, ConvID, SubsessionResult) -> gen_server2:cast(ActorPID, {ssa_subsession_success, SubsessionName, ProtocolName, RoleName, ConvID, SubsessionResult}). conversation_ended(ActorPID, CID, Reason) -> gen_server2:cast(ActorPID, {conversation_ended, CID, Reason}). call(ServerRef, Message) -> gen_server2:call(ServerRef, Message). call(ServerRef, Message, Timeout) -> gen_server2:call(ServerRef, Message, Timeout). cast(ServerRef, Message) -> gen_server2:cast(ServerRef, Message). reply(ServerRef, Message) -> gen_server2:reply(ServerRef, Message). start_link(ModuleName, Args, Options) -> actor_monitor:start_link(ModuleName, Args, Options). io : format("SSA Gen server start called for ~p ~ n " , [ ModuleName ] ) , Res = gen_server2 : start_link(ssa_gen_server , [ ModuleName , ] , Options ) , start_link(RegName, ModuleName, Args, Options) -> actor_monitor:start_link(RegName, ModuleName, Args, Options). Res = gen_server2 : start_link(ssa_gen_server , [ RegName , ModuleName , ] , Options ) , start(ModuleName, Args, Options) -> actor_monitor:start_link(ModuleName, Args, Options). Res = gen_server2 : start(ssa_gen_server , [ ModuleName , ] , Options ) , start(RegName, ModuleName, Args, Options) -> actor_monitor:start_link(RegName, ModuleName, Args, Options). Res = gen_server2 : start(ssa_gen_server , [ RegName , ModuleName , ] , Options ) , start_actor_process(ModuleName, Args, MonitorPID) -> gen_server2:start_link(ssa_gen_server, [ModuleName, Args, MonitorPID], []).
ae458a40dbfc114203f7d3247aa28e554252bfd1c40e7cadb6ca82af52cf6497
racket/pkg-build
thread.rkt
#lang racket/base (provide thread/chunk-output wait-chunk-output flush-chunk-output) ;; Run `thunk` in a thread, capturing output to deliver ;; in chunks. (define (thread/chunk-output thunk) (define (make-port e?) (make-output-port (if e? 'stderr/chunked 'stdout/chunked) always-evt (lambda (bstr s e buffer? break?) (thread-send manager (vector t (subbytes bstr s e) e?)) (- e s)) void)) (define go (make-semaphore)) (define t (parameterize ([current-error-port (make-port #t)] [current-output-port (make-port #f)]) (thread (lambda () (semaphore-wait go) (thunk))))) (thread-send manager t) (semaphore-post go) t) ;; ---------------------------------------- (define no-threads-ch (make-channel)) (define manager (thread (lambda () (define (show-output t output) (define e (current-error-port)) (define o (current-output-port)) (define es (hash-ref output t '())) (for ([i (in-list (reverse es))]) (write-bytes (cdr i) (if (car i) e o)))) (let loop ([output (hash)]) (define (do-message msg-evt) (define msg (thread-receive)) (cond [(thread? msg) (loop (hash-set output msg null))] [(pair? msg) (define t (car msg)) (define s (cdr msg)) (cond [(hash-ref output t #f) (show-output t output) (semaphore-post s) (loop (hash-set output t null))] [else (semaphore-post s) (loop output)])] [else (define-values (t o e?) (vector->values msg)) (loop (hash-set output t (cons (cons e? o) (hash-ref output t null))))])) (sync/timeout (lambda () (apply sync (handle-evt (thread-receive-evt) do-message) (if (zero? (hash-count output)) (handle-evt (channel-put-evt no-threads-ch (void)) (lambda (_) (loop output))) never-evt) (map (lambda (t) (handle-evt t (lambda (_) (show-output t output) (loop (hash-remove output t))))) (hash-keys output)))) (handle-evt (thread-receive-evt) do-message)))))) (define (flush-chunk-output) (define s (make-semaphore)) (thread-send manager (cons (current-thread) s)) (semaphore-wait s)) (define (wait-chunk-output) (channel-get no-threads-ch)) ;; -------------------------------------------------- (module test racket/base (define o (open-output-bytes)) (parameterize ([current-output-port o] [current-error-port o]) (define-syntax-rule (def id) (define id (dynamic-require (module-path-index-join `(submod "..") (variable-reference->module-path-index (#%variable-reference))) 'id))) (def thread/chunk-output) (def flush-chunk-output) (def wait-chunk-output) (define t1 (thread/chunk-output (lambda () (printf "hi\n") (eprintf "bye\n") (flush-chunk-output) (sync (system-idle-evt)) (printf "HI\n") (eprintf "BYE\n")))) (define t2 (thread/chunk-output (lambda () (printf "hola\n") (eprintf "adios\n") (flush-chunk-output) (sync (system-idle-evt)) (printf "HOLA\n") (eprintf "ADIOS\n")))) (wait-chunk-output)) (let ([l '("hi\nbye" "hola\nadios")] [s (get-output-string o)] [sa (lambda (a b) (string-append (car a) "\n" (cadr a) "\n" (car b) "\n" (cadr b) "\n"))] [r reverse] [u (lambda (l) (map string-upcase l))]) (unless (or (equal? s (sa l (u l))) (equal? s (sa (r l) (u l))) (equal? s (sa (r l) (u (r l)))) (equal? s (sa l (u (r l))))) (error "mismatch: " s))))
null
https://raw.githubusercontent.com/racket/pkg-build/31fea3651b501e2ad333cf6133527290abd2eed1/private/thread.rkt
racket
Run `thunk` in a thread, capturing output to deliver in chunks. ---------------------------------------- --------------------------------------------------
#lang racket/base (provide thread/chunk-output wait-chunk-output flush-chunk-output) (define (thread/chunk-output thunk) (define (make-port e?) (make-output-port (if e? 'stderr/chunked 'stdout/chunked) always-evt (lambda (bstr s e buffer? break?) (thread-send manager (vector t (subbytes bstr s e) e?)) (- e s)) void)) (define go (make-semaphore)) (define t (parameterize ([current-error-port (make-port #t)] [current-output-port (make-port #f)]) (thread (lambda () (semaphore-wait go) (thunk))))) (thread-send manager t) (semaphore-post go) t) (define no-threads-ch (make-channel)) (define manager (thread (lambda () (define (show-output t output) (define e (current-error-port)) (define o (current-output-port)) (define es (hash-ref output t '())) (for ([i (in-list (reverse es))]) (write-bytes (cdr i) (if (car i) e o)))) (let loop ([output (hash)]) (define (do-message msg-evt) (define msg (thread-receive)) (cond [(thread? msg) (loop (hash-set output msg null))] [(pair? msg) (define t (car msg)) (define s (cdr msg)) (cond [(hash-ref output t #f) (show-output t output) (semaphore-post s) (loop (hash-set output t null))] [else (semaphore-post s) (loop output)])] [else (define-values (t o e?) (vector->values msg)) (loop (hash-set output t (cons (cons e? o) (hash-ref output t null))))])) (sync/timeout (lambda () (apply sync (handle-evt (thread-receive-evt) do-message) (if (zero? (hash-count output)) (handle-evt (channel-put-evt no-threads-ch (void)) (lambda (_) (loop output))) never-evt) (map (lambda (t) (handle-evt t (lambda (_) (show-output t output) (loop (hash-remove output t))))) (hash-keys output)))) (handle-evt (thread-receive-evt) do-message)))))) (define (flush-chunk-output) (define s (make-semaphore)) (thread-send manager (cons (current-thread) s)) (semaphore-wait s)) (define (wait-chunk-output) (channel-get no-threads-ch)) (module test racket/base (define o (open-output-bytes)) (parameterize ([current-output-port o] [current-error-port o]) (define-syntax-rule (def id) (define id (dynamic-require (module-path-index-join `(submod "..") (variable-reference->module-path-index (#%variable-reference))) 'id))) (def thread/chunk-output) (def flush-chunk-output) (def wait-chunk-output) (define t1 (thread/chunk-output (lambda () (printf "hi\n") (eprintf "bye\n") (flush-chunk-output) (sync (system-idle-evt)) (printf "HI\n") (eprintf "BYE\n")))) (define t2 (thread/chunk-output (lambda () (printf "hola\n") (eprintf "adios\n") (flush-chunk-output) (sync (system-idle-evt)) (printf "HOLA\n") (eprintf "ADIOS\n")))) (wait-chunk-output)) (let ([l '("hi\nbye" "hola\nadios")] [s (get-output-string o)] [sa (lambda (a b) (string-append (car a) "\n" (cadr a) "\n" (car b) "\n" (cadr b) "\n"))] [r reverse] [u (lambda (l) (map string-upcase l))]) (unless (or (equal? s (sa l (u l))) (equal? s (sa (r l) (u l))) (equal? s (sa (r l) (u (r l)))) (equal? s (sa l (u (r l))))) (error "mismatch: " s))))
1f059083be6bc724c8dae4e817b0746bf90e5fc09edac7a39723a235236df27e
ghc/testsuite
TH_bracket3.hs
# LANGUAGE MultiParamTypeClasses # module TH_bracket3 where d_class = [d| class Classy a b where f :: a -> b instance Classy Int Bool where f x = if x == 0 then True else False |]
null
https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/th/TH_bracket3.hs
haskell
# LANGUAGE MultiParamTypeClasses # module TH_bracket3 where d_class = [d| class Classy a b where f :: a -> b instance Classy Int Bool where f x = if x == 0 then True else False |]
9144260a4a47632ea73266852b90caad50432d68307756153079c4c02224bb80
YoshikuniJujo/test_haskell
Main.hs
# LANGUAGE BlockArguments # # OPTIONS_GHC -Wall -fno - warn - tabs # module Main where import System.Environment import Codec.Picture import qualified Data.ByteString as BS import qualified Data.Vector.Storable as V import Lib main :: IO () main = do fp : _ <- getArgs img <- readImageRGB8 fp print . BS.length . BS.pack . V.toList $ imageData img BS.writeFile "../texture0/tire.raw" . BS.pack . V.toList $ imageData img img' <- readImageRGBA8 fp print . BS.length . BS.pack . V.toList $ imageData img' BS.writeFile "../texture0/tire2.raw" . BS.pack . V.toList $ imageData img'
null
https://raw.githubusercontent.com/YoshikuniJujo/test_haskell/781a95b49ade603c438e021e8a3c1fdd9e9ccc44/other_language/c/opengl/glut/texture/try-raw-format/app/Main.hs
haskell
# LANGUAGE BlockArguments # # OPTIONS_GHC -Wall -fno - warn - tabs # module Main where import System.Environment import Codec.Picture import qualified Data.ByteString as BS import qualified Data.Vector.Storable as V import Lib main :: IO () main = do fp : _ <- getArgs img <- readImageRGB8 fp print . BS.length . BS.pack . V.toList $ imageData img BS.writeFile "../texture0/tire.raw" . BS.pack . V.toList $ imageData img img' <- readImageRGBA8 fp print . BS.length . BS.pack . V.toList $ imageData img' BS.writeFile "../texture0/tire2.raw" . BS.pack . V.toList $ imageData img'
47b67a0e995a2aabc68ab7fa84e1cb822f357a3252b10509326fe76c3dca63e6
novalabsxyz/BEAMCoin
beamcoin.erl
%%%------------------------------------------------------------------- %% @doc %% == BEAMCoin == %% @end %%%------------------------------------------------------------------- -module(beamcoin). -behaviour(gen_server). %% hard, but not too hard -define(LIMIT, math:pow(2, 240)). -define(SERVER, ?MODULE). %% ------------------------------------------------------------------ %% API Function Exports %% ------------------------------------------------------------------ -export([ start_link/1 ,genesis/0 ,status/1 ,get_blocks/3 ,spend/1 ,connect/1 ]). %% ------------------------------------------------------------------ gen_server Function Exports %% ------------------------------------------------------------------ -export([ init/1 ,handle_call/3 ,handle_cast/2 ,handle_info/2 ]). -record(coinbase_txn, { payee :: libp2p_crypto:address() ,amount :: pos_integer() }). -record(payment_txn, { payer :: libp2p_crypto:address() ,payee :: libp2p_crypto:address() ,amount :: pos_integer() ,nonce :: non_neg_integer() ,signature :: binary() }). -type hash() :: <<_:256>>. %% SHA256 digest -type transaction() :: #coinbase_txn{} | #payment_txn{}. -record(block, { prev_hash :: hash() ,height = 0 :: non_neg_integer() ,transactions = [] :: [transaction()] ,magic = <<>> :: binary() }). -record(ledger_entry, { nonce = 0 :: non_neg_integer() ,balance = 0 :: non_neg_integer() }). -type ledger() :: #{libp2p_crypto:address() => #ledger_entry{}}. -record(blockchain, { genesis_hash :: hash() ,blocks = #{} :: #{hash() => #block{}} ,ledger = #{} :: ledger() ,head :: hash() }). -record(state, { blockchain :: #blockchain{} ,address :: libp2p_crypto:address() ,swarm :: pid() ,miner :: pid() ,mempool = [] :: [#payment_txn{}] }). -include_lib("public_key/include/public_key.hrl"). -type block() :: #block{}. -type private_key() :: #'ECPrivateKey'{}. -type public_key() :: {#'ECPoint'{}, {namedCurve, ?secp256r1}}. %% ------------------------------------------------------------------ %% API Function Definitions %% ------------------------------------------------------------------ %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- get_blocks(Pid, Height, Hash) -> gen_server:call(Pid, {get_blocks, Height, Hash}). %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- status([Node]) -> pong = net_adm:ping(Node), {ok, State} = gen_server:call({?MODULE, Node}, status), CurrentHead = maps:get(State#state.blockchain#blockchain.head, State#state.blockchain#blockchain.blocks), io:format("blockchain is of height ~p with head ~s~n", [CurrentHead#block.height, beamcoin_sync_handler:hexdump(hash_block(CurrentHead))]), io:format("listen addresses are ~s~n", [lists:join(" ", libp2p_swarm:listen_addrs(State#state.swarm))]), io:format("miner address is ~s~n", [libp2p_crypto:address_to_b58(State#state.address)]), io:format("ledger ~n"), riak_core_console_table:print( [{address, 50}, {balance, 10}, {nonce, 6}] ,[[libp2p_crypto:address_to_b58(Address), Balance, Nonce] || {Address, #ledger_entry{nonce=Nonce, balance=Balance}} <- maps:to_list(State#state.blockchain#blockchain.ledger)] ), io:format("peers ~n"), Peers = libp2p_peerbook:values(libp2p_swarm:peerbook(State#state.swarm)), Rows = [[libp2p_crypto:address_to_b58(libp2p_peer:address(Peer)), lists:join("\n", libp2p_peer:listen_addrs(Peer)), lists:join("\n", [libp2p_crypto:address_to_b58(P) || P <- libp2p_peer:connected_peers(Peer)]), erlang:system_time(seconds) - libp2p_peer:timestamp(Peer) ] || Peer <- Peers], riak_core_console_table:print([{address, 50}, {'listening on', 30}, {peers, 50}, {age, 8}], Rows), ok. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- connect([NodeStr|MultiAddrs]) -> Node = erlang:list_to_atom(NodeStr), pong = net_adm:ping(Node), gen_server:cast({?MODULE, Node}, {connect, MultiAddrs}). %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- spend([Node, Amount, Recipient]) -> Address = libp2p_crypto:b58_to_address(Recipient), {ok, Txn} = gen_server:call({?MODULE, erlang:list_to_atom(Node)}, {spend, erlang:list_to_integer(Amount), Address}), io:format("transaction ~p submitted ~n", [Txn]), ok. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- genesis() -> Name = erlang:node(), FileName = erlang:atom_to_list(Name) ++ "-genesis.block", NewBlock = case load_genesis_block(FileName) of {ok, GenesisBlock} -> GenesisBlock; {error, _E} -> lager:info("could not load a genesis block: ~p, creating instead", [_E]), {_PrivKey, PubKey} = load_keys(Name), Address = libp2p_crypto:pubkey_to_address(PubKey), CoinBase = #coinbase_txn{payee=Address, amount=reward_amount(0)}, Block = #block{prev_hash = <<0:256>>, height=0, transactions=[CoinBase]}, {ok, _Pid} = start_miner(Block, self()), lager:info("mining genesis block"), receive {mined_block, MinedBlock, self} -> ok = file:write_file(FileName, erlang:term_to_binary(MinedBlock)), MinedBlock end end, lager:info("genesis block ~p", [NewBlock]), gen_server:start_link({local, ?MODULE}, ?MODULE, [NewBlock, []], []). %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- start_link([FileName | SeedNodes]=Args) -> case load_genesis_block(FileName) of {ok, GenesisBlock} -> gen_server:start_link({local, ?MODULE}, ?MODULE, [GenesisBlock, SeedNodes], []); {error, _E} -> lager:warning("fail to start ~p retrying in 5s", [_E]), timer:sleep(5000), ?MODULE:start_link(Args) end. %% ------------------------------------------------------------------ gen_server Function Definitions %% ------------------------------------------------------------------ init([GenesisBlock, SeedNodes]) -> application:ensure_all_started(ranch), % Create swarm / connect to peers Name = erlang:node(), {PrivKey, PubKey} = load_keys(Name), Swarm = start_swarm_server(Name, SeedNodes, {PrivKey, PubKey}), GenesisHash = hash_block(GenesisBlock), %% add any transactions in the genesis block to our ledger {ok, Ledger} = absorb_transactions(GenesisBlock#block.transactions, #{}), Blockchain = #blockchain{ genesis_hash=GenesisHash ,blocks=#{GenesisHash => GenesisBlock} ,ledger=Ledger ,head=GenesisHash }, {ok, PubKey, _} = libp2p_swarm:keys(Swarm), Address = libp2p_crypto:pubkey_to_address(PubKey), State = #state{ blockchain=Blockchain ,swarm=Swarm ,address=Address }, self() ! {start_mining, GenesisBlock}, {ok, State}. handle_call({get_blocks, _Height, _Hash}, _From, State) -> CurrentHead = maps:get(State#state.blockchain#blockchain.head, State#state.blockchain#blockchain.blocks), {ok, Blocks} = parent_blocks([CurrentHead], State#state.blockchain), {reply, {ok, Blocks}, State}; handle_call(status, _From, State) -> {reply, {ok, State}, State}; handle_call({spend, Amount, Recipient}, _From, State) -> #ledger_entry{nonce=Nonce} = maps:get(State#state.address, State#state.blockchain#blockchain.ledger, #ledger_entry{}), Txn0 = #payment_txn{payer=State#state.address, payee=Recipient, amount=Amount, nonce=Nonce+1, signature= <<>>}, {PrivKey, _PubKey} = libp2p_swarm:keys(State#state.swarm), Signature = public_key:sign(term_to_binary(Txn0), sha256, PrivKey), Txn = Txn0#payment_txn{signature=Signature}, {reply, {ok, base64:encode(term_to_binary(Txn))}, State#state{mempool=[Txn|State#state.mempool]}}; handle_call(_Msg, _From, State) -> lager:warning("unhandled call ~p", [_Msg]), {reply, ok, State}. handle_cast(_Msg, State) -> lager:warning("unhandled cast ~p", [_Msg]), {noreply, State}. handle_info({start_mining, Block}, #state{swarm=Swarm}=State) -> {ok, Miner} = start_mining(Block, Swarm, []), {noreply, State#state{miner=Miner}}; handle_info({mined_block, NewBlock, Addr}, State) -> CurrentHead = maps:get(State#state.blockchain#blockchain.head, State#state.blockchain#blockchain.blocks), case validate_chain(NewBlock, State#state.blockchain) of {error, {missing_block, _Hash}} when Addr /= self -> Path = "beamcoin_sync/1.0.0/" ++ erlang:integer_to_list(CurrentHead#block.height) ++ "/" ++ beamcoin_sync_handler:hexdump(hash_block(CurrentHead)), case libp2p_swarm:dial(State#state.swarm, Addr, Path) of {ok, Conn} -> libp2p_framed_stream:client(beamcoin_sync_handler, Conn, [self()]); Other -> lager:notice("Failed to dial sync service on ~p : ~p", [Addr, Other]) end, {noreply, State}; {error, Error} -> lager:info("block error ~p ~p", [NewBlock, Error]), {noreply, State}; {NewLedger, ProposedHead} when CurrentHead#block.height < ProposedHead#block.height -> case get_miner(ProposedHead) == State#state.address of true -> lager:info("mined a new block!"); false -> lager:info("received a new block!"), erlang:unlink(State#state.miner), erlang:exit(State#state.miner, kill) end, lager:info("Head is now ~w", [beamcoin_sync_handler:hexdump(hash_block(ProposedHead))]), catch [ M ! {block, ProposedHead} || M <- pg2:get_members(self())], Mempool = State#state.mempool -- ProposedHead#block.transactions, {ok, Miner} = start_mining(ProposedHead, State#state.swarm, Mempool), Blockchain = State#state.blockchain, NewHash = hash_block(ProposedHead), Blocks = maps:put(NewHash, ProposedHead, Blockchain#blockchain.blocks), {noreply, State#state{miner=Miner, mempool=Mempool, blockchain=Blockchain#blockchain{ledger=NewLedger, head=NewHash, blocks=Blocks}}}; {_NewLedger, _} -> case get_miner(NewBlock) == State#state.address andalso Addr == self of true -> lager:debug("mined sibling block, ignoring"), {ok, Miner} = start_mining(NewBlock, State#state.swarm, State#state.mempool), {noreply, State#state{miner=Miner}}; false -> lager:debug("received sibling block, ignoring"), {noreply, State} end end; handle_info({blocks, Blocks, From}, State) -> %% speculatively add the blocks to our blockchain and see what we get NewChain = add_blocks(Blocks, State#state.blockchain), %% Assume the blocks are in ascending order CurrentHead = maps:get(State#state.blockchain#blockchain.head, State#state.blockchain#blockchain.blocks), case validate_chain(lists:last(Blocks), NewChain) of {error, Reason} -> lager:warning("block sync with ~p failed: ~p", [From, Reason]), {noreply, State}; {NewLedger, ProposedHead} when CurrentHead#block.height < ProposedHead#block.height -> lager:info("received a block sync from ~p!", [From]), unlink(State#state.miner), exit(State#state.miner, kill), lager:info("Head is now ~w", [hash_block(ProposedHead)]), catch [ M ! {block, ProposedHead} || M <- pg2:get_members(self())], Mempool = State#state.mempool -- lists:flatten([ Transactions || #block{transactions=Transactions} <- Blocks]), {ok, Miner} = start_mining(ProposedHead, State#state.swarm, Mempool), NewHash = hash_block(ProposedHead), {noreply, State#state{miner=Miner, mempool=Mempool, blockchain=NewChain#blockchain{ledger=NewLedger, head=NewHash}}}; {_NewLedger, _} -> lager:info("got a stale block sync with ~p", [From]), %% send the peer our current head, in response, so they can know to sync with us %% TODO ideally we'd not have to broadcast it catch [ M ! {block, CurrentHead} || M <- pg2:get_members(self())], %% the blocks might be useful, stash them since we know they're valid Blockchain = State#state.blockchain, {noreply, State#state{blockchain=Blockchain#blockchain{blocks=NewChain#blockchain.blocks}}} end; handle_info(_Msg, State) -> lager:warning("unhandled info message ~p", [_Msg]), {noreply, State}. %% ------------------------------------------------------------------ %% Internal Function Definitions %% ------------------------------------------------------------------ %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- -spec load_genesis_block(string()) -> {ok, block()} | {error, any()}. load_genesis_block(FileName) -> case file:read_file(FileName) of {ok, <<>>} -> {error, empty_file}; {ok, Bin} -> try erlang:binary_to_term(Bin) of GenesisBlock -> case erlang:is_record(GenesisBlock, block) of true -> {ok, GenesisBlock}; _ -> {error, not_record} end catch Error -> {error, Error} end; Error -> Error end. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- -spec start_swarm_server(atom(), [string(), ...], {private_key(), public_key()}) -> pid(). start_swarm_server(Name, [], {PrivKey, PubKey}) -> Port = os:getenv("PORT", "0"), Opts = [ {key, {PubKey, libp2p_crypto:mk_sig_fun(PrivKey)}} ], {ok, Swarm} = libp2p_swarm:start(Name, Opts), lager:info("started swarm ~p with ~p", [Swarm, Opts]), ok = libp2p_swarm:add_stream_handler(Swarm, "beamcoin/1.0.0", {libp2p_framed_stream, server, [beamcoin_handler, self()]}), ok = libp2p_swarm:add_stream_handler(Swarm, "beamcoin_sync/1.0.0", {libp2p_framed_stream, server, [beamcoin_sync_handler, self()]}), ok = libp2p_swarm:listen(Swarm, "/ip4/0.0.0.0/tcp/" ++ Port), ok = libp2p_swarm:listen(Swarm, "/ip6/::/tcp/" ++ Port), ok = pg2:create(self()), Swarm; start_swarm_server(Name, SeedNodes, {PrivKey, PubKey}) -> Port = os:getenv("PORT", "0"), Opts = [ {key, {PubKey, libp2p_crypto:mk_sig_fun(PrivKey)}} ,{libp2p_group_gossip, [ {stream_clients, [ {"beamcoin/1.0.0", {beamcoin_handler, [self()]}} ]} ,{seed_nodes, SeedNodes} ]} ], {ok, Swarm} = libp2p_swarm:start(Name, Opts), lager:info("started swarm ~p with ~p", [Swarm, Opts]), ok = libp2p_swarm:add_stream_handler(Swarm, "beamcoin/1.0.0", {libp2p_framed_stream, server, [beamcoin_handler, self()]}), ok = libp2p_swarm:add_stream_handler(Swarm, "beamcoin_sync/1.0.0", {libp2p_framed_stream, server, [beamcoin_sync_handler, self()]}), ok = libp2p_swarm:listen(Swarm, "/ip4/0.0.0.0/tcp/" ++ Port), ok = libp2p_swarm:listen(Swarm, "/ip6/::/tcp/" ++ Port), ok = pg2:create(self()), Swarm. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- -spec load_keys(atom()) -> {private_key(), public_key()}. load_keys(Name) -> ok = filelib:ensure_dir("keys/"), KeyFile = "keys/" ++ erlang:atom_to_list(Name) ++ ".pem", case libp2p_crypto:load_keys(KeyFile) of {ok, PrivKey, PubKey} -> {PrivKey, PubKey}; {error, _} -> Keys = {PrivKey, PubKey} = libp2p_crypto:generate_keys(), ok = libp2p_crypto:save_keys(Keys, KeyFile), {PrivKey, PubKey} end. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- add_blocks([], Blockchain) -> Blockchain; add_blocks([Block|Tail], Blockchain=#blockchain{blocks=Blocks}) -> add_blocks(Tail, Blockchain#blockchain{blocks=maps:put(hash_block(Block), Block, Blocks)}). %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- get_miner(#block{transactions=[#coinbase_txn{payee=Account}|_]}) -> Account. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- -spec absorb_transactions([transaction(), ...], ledger()) -> {ok, ledger()} | {error, bad_transaction | bad_signature}. absorb_transactions([], Ledger) -> {ok, Ledger}; absorb_transactions([#coinbase_txn{payee=Address, amount=Amount}|Tail], Ledger) -> absorb_transactions(Tail, credit_account(Address, Amount, Ledger)); absorb_transactions([#payment_txn{amount=Amount}|_Tail], _Ledger) when Amount =< 0 -> {error, bad_transaction}; absorb_transactions([Txn=#payment_txn{payer=Payer, payee=Payee, amount=Amount, nonce=Nonce, signature=Sig}|Tail], Ledger) -> PubKey = libp2p_crypto:address_to_pubkey(Payer), case public_key:verify(term_to_binary(Txn#payment_txn{signature= <<>>}), sha256, Sig, PubKey) of true -> case credit_account(Payee, Amount, debit_account(Payer, Amount, Nonce, Ledger)) of error -> {error, bad_transaction}; NewLedger -> absorb_transactions(Tail, NewLedger) end; false -> {error, bad_signature} end. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- credit_account(_Address, _Amount, error) -> error; credit_account(Address, Amount, Ledger) -> Entry = maps:get(Address, Ledger, #ledger_entry{}), maps:put(Address, Entry#ledger_entry{balance = Entry#ledger_entry.balance + Amount}, Ledger). %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- debit_account(Address, Amount, Nonce, Ledger) -> Entry = maps:get(Address, Ledger, #ledger_entry{}), %% check things look OK case Nonce == Entry#ledger_entry.nonce + 1 andalso (Entry#ledger_entry.balance - Amount) >= 0 of true -> maps:put(Address, Entry#ledger_entry{balance = Entry#ledger_entry.balance - Amount, nonce=Nonce}, Ledger); _ -> error end. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- start_mining(ParentBlock, Swarm, Mempool) -> {ok, PubKey, _} = libp2p_swarm:keys(Swarm), Address = libp2p_crypto:pubkey_to_address(PubKey), NextHeight = ParentBlock#block.height + 1, CoinBase = #coinbase_txn{payee=Address, amount=reward_amount(NextHeight)}, NewBlock = #block{prev_hash=hash_block(ParentBlock), height=NextHeight, transactions=[CoinBase|lists:reverse(Mempool)]}, start_miner(NewBlock, self()). %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- validate_chain(Block, Blockchain) -> check the hash is under the limit and there 's only one coinbase transaction <<I:256/integer-unsigned-little>> = hash_block(Block), case I < ?LIMIT of false -> {error, insufficent_hash}; true -> case length(lists:filter(fun(E) -> is_record(E, coinbase_txn) end, Block#block.transactions)) == 1 of true -> %% construct the chain back to the genesis block case parent_blocks([Block], Blockchain) of {error, Reason} -> {error, Reason}; {ok, Blocks} -> %% attempt to compute a new ledger for this chain try ValidatedLedger = lists:foldl(fun(_Block, {error, _}=Acc) -> Acc; (ABlock, Ledger) -> case absorb_transactions(ABlock#block.transactions, Ledger) of {ok, NewLedger} -> NewLedger; %% return the ledger till a "bad block" is found _ -> throw({Ledger, ABlock}) end end, #{}, Blocks), {ValidatedLedger, Block} catch throw:{Ledger, B} -> {Ledger, B} end end; _ -> {error, incorrect_coinbase_txn} end end. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- -spec hash_block(block()) -> binary(). hash_block(Block) -> crypto:hash(sha256, erlang:term_to_binary(Block)). %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- parent_blocks([Head|Tail], Blockchain) -> Hash = Head#block.prev_hash, case maps:find(Hash, Blockchain#blockchain.blocks) of error -> GenesisHash = Blockchain#blockchain.genesis_hash, case hash_block(Head) == GenesisHash of true -> {ok, [Head|Tail]}; false -> {error, {missing_block, Hash}} end; {ok, PrevBlock} -> parent_blocks([PrevBlock, Head|Tail], Blockchain) end. %%-------------------------------------------------------------------- %% @doc Reward amounts start at 2018 and decrement by one every height until they reach 0 . %% This is to make very clear this is a toy blockchain you should not use for a long time. %% @end %%-------------------------------------------------------------------- reward_amount(Height) -> max(0, 2018 - Height). %%-------------------------------------------------------------------- %% @doc %% Mining functions %% @end %%-------------------------------------------------------------------- -spec start_miner(block(), pid()) -> {'ok', pid()}. start_miner(Block, Parent) -> {ok, erlang:spawn_link(fun() -> mine(Block, Parent) end)}. %%-------------------------------------------------------------------- %% @doc %% @end %%-------------------------------------------------------------------- mine(Block, Parent) -> <<I:256/integer-unsigned-little>> = hash_block(Block), case I < ?LIMIT of true -> Parent ! {mined_block, Block, self}, catch [ M ! {block, Block} || M <- pg2:get_members(Parent)]; false -> mine(Block#block{magic=crypto:strong_rand_bytes(16)}, Parent) end.
null
https://raw.githubusercontent.com/novalabsxyz/BEAMCoin/c460bf450fdcc9fb849ff338d077dd9d8271391e/src/beamcoin.erl
erlang
------------------------------------------------------------------- @doc == BEAMCoin == @end ------------------------------------------------------------------- hard, but not too hard ------------------------------------------------------------------ API Function Exports ------------------------------------------------------------------ ------------------------------------------------------------------ ------------------------------------------------------------------ SHA256 digest ------------------------------------------------------------------ API Function Definitions ------------------------------------------------------------------ -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- ------------------------------------------------------------------ ------------------------------------------------------------------ Create swarm / connect to peers add any transactions in the genesis block to our ledger speculatively add the blocks to our blockchain and see what we get Assume the blocks are in ascending order send the peer our current head, in response, so they can know to sync with us TODO ideally we'd not have to broadcast it the blocks might be useful, stash them since we know they're valid ------------------------------------------------------------------ Internal Function Definitions ------------------------------------------------------------------ -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- check things look OK -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- construct the chain back to the genesis block attempt to compute a new ledger for this chain return the ledger till a "bad block" is found -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc This is to make very clear this is a toy blockchain you should not use for a long time. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc Mining functions @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc @end --------------------------------------------------------------------
-module(beamcoin). -behaviour(gen_server). -define(LIMIT, math:pow(2, 240)). -define(SERVER, ?MODULE). -export([ start_link/1 ,genesis/0 ,status/1 ,get_blocks/3 ,spend/1 ,connect/1 ]). gen_server Function Exports -export([ init/1 ,handle_call/3 ,handle_cast/2 ,handle_info/2 ]). -record(coinbase_txn, { payee :: libp2p_crypto:address() ,amount :: pos_integer() }). -record(payment_txn, { payer :: libp2p_crypto:address() ,payee :: libp2p_crypto:address() ,amount :: pos_integer() ,nonce :: non_neg_integer() ,signature :: binary() }). -type transaction() :: #coinbase_txn{} | #payment_txn{}. -record(block, { prev_hash :: hash() ,height = 0 :: non_neg_integer() ,transactions = [] :: [transaction()] ,magic = <<>> :: binary() }). -record(ledger_entry, { nonce = 0 :: non_neg_integer() ,balance = 0 :: non_neg_integer() }). -type ledger() :: #{libp2p_crypto:address() => #ledger_entry{}}. -record(blockchain, { genesis_hash :: hash() ,blocks = #{} :: #{hash() => #block{}} ,ledger = #{} :: ledger() ,head :: hash() }). -record(state, { blockchain :: #blockchain{} ,address :: libp2p_crypto:address() ,swarm :: pid() ,miner :: pid() ,mempool = [] :: [#payment_txn{}] }). -include_lib("public_key/include/public_key.hrl"). -type block() :: #block{}. -type private_key() :: #'ECPrivateKey'{}. -type public_key() :: {#'ECPoint'{}, {namedCurve, ?secp256r1}}. get_blocks(Pid, Height, Hash) -> gen_server:call(Pid, {get_blocks, Height, Hash}). status([Node]) -> pong = net_adm:ping(Node), {ok, State} = gen_server:call({?MODULE, Node}, status), CurrentHead = maps:get(State#state.blockchain#blockchain.head, State#state.blockchain#blockchain.blocks), io:format("blockchain is of height ~p with head ~s~n", [CurrentHead#block.height, beamcoin_sync_handler:hexdump(hash_block(CurrentHead))]), io:format("listen addresses are ~s~n", [lists:join(" ", libp2p_swarm:listen_addrs(State#state.swarm))]), io:format("miner address is ~s~n", [libp2p_crypto:address_to_b58(State#state.address)]), io:format("ledger ~n"), riak_core_console_table:print( [{address, 50}, {balance, 10}, {nonce, 6}] ,[[libp2p_crypto:address_to_b58(Address), Balance, Nonce] || {Address, #ledger_entry{nonce=Nonce, balance=Balance}} <- maps:to_list(State#state.blockchain#blockchain.ledger)] ), io:format("peers ~n"), Peers = libp2p_peerbook:values(libp2p_swarm:peerbook(State#state.swarm)), Rows = [[libp2p_crypto:address_to_b58(libp2p_peer:address(Peer)), lists:join("\n", libp2p_peer:listen_addrs(Peer)), lists:join("\n", [libp2p_crypto:address_to_b58(P) || P <- libp2p_peer:connected_peers(Peer)]), erlang:system_time(seconds) - libp2p_peer:timestamp(Peer) ] || Peer <- Peers], riak_core_console_table:print([{address, 50}, {'listening on', 30}, {peers, 50}, {age, 8}], Rows), ok. connect([NodeStr|MultiAddrs]) -> Node = erlang:list_to_atom(NodeStr), pong = net_adm:ping(Node), gen_server:cast({?MODULE, Node}, {connect, MultiAddrs}). spend([Node, Amount, Recipient]) -> Address = libp2p_crypto:b58_to_address(Recipient), {ok, Txn} = gen_server:call({?MODULE, erlang:list_to_atom(Node)}, {spend, erlang:list_to_integer(Amount), Address}), io:format("transaction ~p submitted ~n", [Txn]), ok. genesis() -> Name = erlang:node(), FileName = erlang:atom_to_list(Name) ++ "-genesis.block", NewBlock = case load_genesis_block(FileName) of {ok, GenesisBlock} -> GenesisBlock; {error, _E} -> lager:info("could not load a genesis block: ~p, creating instead", [_E]), {_PrivKey, PubKey} = load_keys(Name), Address = libp2p_crypto:pubkey_to_address(PubKey), CoinBase = #coinbase_txn{payee=Address, amount=reward_amount(0)}, Block = #block{prev_hash = <<0:256>>, height=0, transactions=[CoinBase]}, {ok, _Pid} = start_miner(Block, self()), lager:info("mining genesis block"), receive {mined_block, MinedBlock, self} -> ok = file:write_file(FileName, erlang:term_to_binary(MinedBlock)), MinedBlock end end, lager:info("genesis block ~p", [NewBlock]), gen_server:start_link({local, ?MODULE}, ?MODULE, [NewBlock, []], []). start_link([FileName | SeedNodes]=Args) -> case load_genesis_block(FileName) of {ok, GenesisBlock} -> gen_server:start_link({local, ?MODULE}, ?MODULE, [GenesisBlock, SeedNodes], []); {error, _E} -> lager:warning("fail to start ~p retrying in 5s", [_E]), timer:sleep(5000), ?MODULE:start_link(Args) end. gen_server Function Definitions init([GenesisBlock, SeedNodes]) -> application:ensure_all_started(ranch), Name = erlang:node(), {PrivKey, PubKey} = load_keys(Name), Swarm = start_swarm_server(Name, SeedNodes, {PrivKey, PubKey}), GenesisHash = hash_block(GenesisBlock), {ok, Ledger} = absorb_transactions(GenesisBlock#block.transactions, #{}), Blockchain = #blockchain{ genesis_hash=GenesisHash ,blocks=#{GenesisHash => GenesisBlock} ,ledger=Ledger ,head=GenesisHash }, {ok, PubKey, _} = libp2p_swarm:keys(Swarm), Address = libp2p_crypto:pubkey_to_address(PubKey), State = #state{ blockchain=Blockchain ,swarm=Swarm ,address=Address }, self() ! {start_mining, GenesisBlock}, {ok, State}. handle_call({get_blocks, _Height, _Hash}, _From, State) -> CurrentHead = maps:get(State#state.blockchain#blockchain.head, State#state.blockchain#blockchain.blocks), {ok, Blocks} = parent_blocks([CurrentHead], State#state.blockchain), {reply, {ok, Blocks}, State}; handle_call(status, _From, State) -> {reply, {ok, State}, State}; handle_call({spend, Amount, Recipient}, _From, State) -> #ledger_entry{nonce=Nonce} = maps:get(State#state.address, State#state.blockchain#blockchain.ledger, #ledger_entry{}), Txn0 = #payment_txn{payer=State#state.address, payee=Recipient, amount=Amount, nonce=Nonce+1, signature= <<>>}, {PrivKey, _PubKey} = libp2p_swarm:keys(State#state.swarm), Signature = public_key:sign(term_to_binary(Txn0), sha256, PrivKey), Txn = Txn0#payment_txn{signature=Signature}, {reply, {ok, base64:encode(term_to_binary(Txn))}, State#state{mempool=[Txn|State#state.mempool]}}; handle_call(_Msg, _From, State) -> lager:warning("unhandled call ~p", [_Msg]), {reply, ok, State}. handle_cast(_Msg, State) -> lager:warning("unhandled cast ~p", [_Msg]), {noreply, State}. handle_info({start_mining, Block}, #state{swarm=Swarm}=State) -> {ok, Miner} = start_mining(Block, Swarm, []), {noreply, State#state{miner=Miner}}; handle_info({mined_block, NewBlock, Addr}, State) -> CurrentHead = maps:get(State#state.blockchain#blockchain.head, State#state.blockchain#blockchain.blocks), case validate_chain(NewBlock, State#state.blockchain) of {error, {missing_block, _Hash}} when Addr /= self -> Path = "beamcoin_sync/1.0.0/" ++ erlang:integer_to_list(CurrentHead#block.height) ++ "/" ++ beamcoin_sync_handler:hexdump(hash_block(CurrentHead)), case libp2p_swarm:dial(State#state.swarm, Addr, Path) of {ok, Conn} -> libp2p_framed_stream:client(beamcoin_sync_handler, Conn, [self()]); Other -> lager:notice("Failed to dial sync service on ~p : ~p", [Addr, Other]) end, {noreply, State}; {error, Error} -> lager:info("block error ~p ~p", [NewBlock, Error]), {noreply, State}; {NewLedger, ProposedHead} when CurrentHead#block.height < ProposedHead#block.height -> case get_miner(ProposedHead) == State#state.address of true -> lager:info("mined a new block!"); false -> lager:info("received a new block!"), erlang:unlink(State#state.miner), erlang:exit(State#state.miner, kill) end, lager:info("Head is now ~w", [beamcoin_sync_handler:hexdump(hash_block(ProposedHead))]), catch [ M ! {block, ProposedHead} || M <- pg2:get_members(self())], Mempool = State#state.mempool -- ProposedHead#block.transactions, {ok, Miner} = start_mining(ProposedHead, State#state.swarm, Mempool), Blockchain = State#state.blockchain, NewHash = hash_block(ProposedHead), Blocks = maps:put(NewHash, ProposedHead, Blockchain#blockchain.blocks), {noreply, State#state{miner=Miner, mempool=Mempool, blockchain=Blockchain#blockchain{ledger=NewLedger, head=NewHash, blocks=Blocks}}}; {_NewLedger, _} -> case get_miner(NewBlock) == State#state.address andalso Addr == self of true -> lager:debug("mined sibling block, ignoring"), {ok, Miner} = start_mining(NewBlock, State#state.swarm, State#state.mempool), {noreply, State#state{miner=Miner}}; false -> lager:debug("received sibling block, ignoring"), {noreply, State} end end; handle_info({blocks, Blocks, From}, State) -> NewChain = add_blocks(Blocks, State#state.blockchain), CurrentHead = maps:get(State#state.blockchain#blockchain.head, State#state.blockchain#blockchain.blocks), case validate_chain(lists:last(Blocks), NewChain) of {error, Reason} -> lager:warning("block sync with ~p failed: ~p", [From, Reason]), {noreply, State}; {NewLedger, ProposedHead} when CurrentHead#block.height < ProposedHead#block.height -> lager:info("received a block sync from ~p!", [From]), unlink(State#state.miner), exit(State#state.miner, kill), lager:info("Head is now ~w", [hash_block(ProposedHead)]), catch [ M ! {block, ProposedHead} || M <- pg2:get_members(self())], Mempool = State#state.mempool -- lists:flatten([ Transactions || #block{transactions=Transactions} <- Blocks]), {ok, Miner} = start_mining(ProposedHead, State#state.swarm, Mempool), NewHash = hash_block(ProposedHead), {noreply, State#state{miner=Miner, mempool=Mempool, blockchain=NewChain#blockchain{ledger=NewLedger, head=NewHash}}}; {_NewLedger, _} -> lager:info("got a stale block sync with ~p", [From]), catch [ M ! {block, CurrentHead} || M <- pg2:get_members(self())], Blockchain = State#state.blockchain, {noreply, State#state{blockchain=Blockchain#blockchain{blocks=NewChain#blockchain.blocks}}} end; handle_info(_Msg, State) -> lager:warning("unhandled info message ~p", [_Msg]), {noreply, State}. -spec load_genesis_block(string()) -> {ok, block()} | {error, any()}. load_genesis_block(FileName) -> case file:read_file(FileName) of {ok, <<>>} -> {error, empty_file}; {ok, Bin} -> try erlang:binary_to_term(Bin) of GenesisBlock -> case erlang:is_record(GenesisBlock, block) of true -> {ok, GenesisBlock}; _ -> {error, not_record} end catch Error -> {error, Error} end; Error -> Error end. -spec start_swarm_server(atom(), [string(), ...], {private_key(), public_key()}) -> pid(). start_swarm_server(Name, [], {PrivKey, PubKey}) -> Port = os:getenv("PORT", "0"), Opts = [ {key, {PubKey, libp2p_crypto:mk_sig_fun(PrivKey)}} ], {ok, Swarm} = libp2p_swarm:start(Name, Opts), lager:info("started swarm ~p with ~p", [Swarm, Opts]), ok = libp2p_swarm:add_stream_handler(Swarm, "beamcoin/1.0.0", {libp2p_framed_stream, server, [beamcoin_handler, self()]}), ok = libp2p_swarm:add_stream_handler(Swarm, "beamcoin_sync/1.0.0", {libp2p_framed_stream, server, [beamcoin_sync_handler, self()]}), ok = libp2p_swarm:listen(Swarm, "/ip4/0.0.0.0/tcp/" ++ Port), ok = libp2p_swarm:listen(Swarm, "/ip6/::/tcp/" ++ Port), ok = pg2:create(self()), Swarm; start_swarm_server(Name, SeedNodes, {PrivKey, PubKey}) -> Port = os:getenv("PORT", "0"), Opts = [ {key, {PubKey, libp2p_crypto:mk_sig_fun(PrivKey)}} ,{libp2p_group_gossip, [ {stream_clients, [ {"beamcoin/1.0.0", {beamcoin_handler, [self()]}} ]} ,{seed_nodes, SeedNodes} ]} ], {ok, Swarm} = libp2p_swarm:start(Name, Opts), lager:info("started swarm ~p with ~p", [Swarm, Opts]), ok = libp2p_swarm:add_stream_handler(Swarm, "beamcoin/1.0.0", {libp2p_framed_stream, server, [beamcoin_handler, self()]}), ok = libp2p_swarm:add_stream_handler(Swarm, "beamcoin_sync/1.0.0", {libp2p_framed_stream, server, [beamcoin_sync_handler, self()]}), ok = libp2p_swarm:listen(Swarm, "/ip4/0.0.0.0/tcp/" ++ Port), ok = libp2p_swarm:listen(Swarm, "/ip6/::/tcp/" ++ Port), ok = pg2:create(self()), Swarm. -spec load_keys(atom()) -> {private_key(), public_key()}. load_keys(Name) -> ok = filelib:ensure_dir("keys/"), KeyFile = "keys/" ++ erlang:atom_to_list(Name) ++ ".pem", case libp2p_crypto:load_keys(KeyFile) of {ok, PrivKey, PubKey} -> {PrivKey, PubKey}; {error, _} -> Keys = {PrivKey, PubKey} = libp2p_crypto:generate_keys(), ok = libp2p_crypto:save_keys(Keys, KeyFile), {PrivKey, PubKey} end. add_blocks([], Blockchain) -> Blockchain; add_blocks([Block|Tail], Blockchain=#blockchain{blocks=Blocks}) -> add_blocks(Tail, Blockchain#blockchain{blocks=maps:put(hash_block(Block), Block, Blocks)}). get_miner(#block{transactions=[#coinbase_txn{payee=Account}|_]}) -> Account. -spec absorb_transactions([transaction(), ...], ledger()) -> {ok, ledger()} | {error, bad_transaction | bad_signature}. absorb_transactions([], Ledger) -> {ok, Ledger}; absorb_transactions([#coinbase_txn{payee=Address, amount=Amount}|Tail], Ledger) -> absorb_transactions(Tail, credit_account(Address, Amount, Ledger)); absorb_transactions([#payment_txn{amount=Amount}|_Tail], _Ledger) when Amount =< 0 -> {error, bad_transaction}; absorb_transactions([Txn=#payment_txn{payer=Payer, payee=Payee, amount=Amount, nonce=Nonce, signature=Sig}|Tail], Ledger) -> PubKey = libp2p_crypto:address_to_pubkey(Payer), case public_key:verify(term_to_binary(Txn#payment_txn{signature= <<>>}), sha256, Sig, PubKey) of true -> case credit_account(Payee, Amount, debit_account(Payer, Amount, Nonce, Ledger)) of error -> {error, bad_transaction}; NewLedger -> absorb_transactions(Tail, NewLedger) end; false -> {error, bad_signature} end. credit_account(_Address, _Amount, error) -> error; credit_account(Address, Amount, Ledger) -> Entry = maps:get(Address, Ledger, #ledger_entry{}), maps:put(Address, Entry#ledger_entry{balance = Entry#ledger_entry.balance + Amount}, Ledger). debit_account(Address, Amount, Nonce, Ledger) -> Entry = maps:get(Address, Ledger, #ledger_entry{}), case Nonce == Entry#ledger_entry.nonce + 1 andalso (Entry#ledger_entry.balance - Amount) >= 0 of true -> maps:put(Address, Entry#ledger_entry{balance = Entry#ledger_entry.balance - Amount, nonce=Nonce}, Ledger); _ -> error end. start_mining(ParentBlock, Swarm, Mempool) -> {ok, PubKey, _} = libp2p_swarm:keys(Swarm), Address = libp2p_crypto:pubkey_to_address(PubKey), NextHeight = ParentBlock#block.height + 1, CoinBase = #coinbase_txn{payee=Address, amount=reward_amount(NextHeight)}, NewBlock = #block{prev_hash=hash_block(ParentBlock), height=NextHeight, transactions=[CoinBase|lists:reverse(Mempool)]}, start_miner(NewBlock, self()). validate_chain(Block, Blockchain) -> check the hash is under the limit and there 's only one coinbase transaction <<I:256/integer-unsigned-little>> = hash_block(Block), case I < ?LIMIT of false -> {error, insufficent_hash}; true -> case length(lists:filter(fun(E) -> is_record(E, coinbase_txn) end, Block#block.transactions)) == 1 of true -> case parent_blocks([Block], Blockchain) of {error, Reason} -> {error, Reason}; {ok, Blocks} -> try ValidatedLedger = lists:foldl(fun(_Block, {error, _}=Acc) -> Acc; (ABlock, Ledger) -> case absorb_transactions(ABlock#block.transactions, Ledger) of {ok, NewLedger} -> NewLedger; _ -> throw({Ledger, ABlock}) end end, #{}, Blocks), {ValidatedLedger, Block} catch throw:{Ledger, B} -> {Ledger, B} end end; _ -> {error, incorrect_coinbase_txn} end end. -spec hash_block(block()) -> binary(). hash_block(Block) -> crypto:hash(sha256, erlang:term_to_binary(Block)). parent_blocks([Head|Tail], Blockchain) -> Hash = Head#block.prev_hash, case maps:find(Hash, Blockchain#blockchain.blocks) of error -> GenesisHash = Blockchain#blockchain.genesis_hash, case hash_block(Head) == GenesisHash of true -> {ok, [Head|Tail]}; false -> {error, {missing_block, Hash}} end; {ok, PrevBlock} -> parent_blocks([PrevBlock, Head|Tail], Blockchain) end. Reward amounts start at 2018 and decrement by one every height until they reach 0 . reward_amount(Height) -> max(0, 2018 - Height). -spec start_miner(block(), pid()) -> {'ok', pid()}. start_miner(Block, Parent) -> {ok, erlang:spawn_link(fun() -> mine(Block, Parent) end)}. mine(Block, Parent) -> <<I:256/integer-unsigned-little>> = hash_block(Block), case I < ?LIMIT of true -> Parent ! {mined_block, Block, self}, catch [ M ! {block, Block} || M <- pg2:get_members(Parent)]; false -> mine(Block#block{magic=crypto:strong_rand_bytes(16)}, Parent) end.
7ce3cd9e18e4efd5efc6d07f19e201deff539a619115a47dedd9bd8dc28ee697
FlowerWrong/mblog
bad.erl
%% --- Excerpted from " Programming Erlang , Second Edition " , published by The Pragmatic Bookshelf . %% Copyrights apply to this code. It may not be used to create training material, %% courses, books, articles, and the like. Contact us if you are in doubt. %% We make no guarantees that this code is fit for any purpose. %% Visit for more book information. %%--- -module(bad). %% There are lots's of delibeate errors in this file %% so it's not in the makefile foo(1,2) -> a; foo(2,3,a) -> b. foo(A, B) -> bar(A, dothis(X), B), baz(Y, X). foo() -> case bar() of 1 -> X = 1, Y = 2; 2 -> X = 3 end, b(X, Y). foo() -> case bar() of 1 -> X = 1, Y = 2; 2 -> X = 3 end, b(X). foo(X, L) -> lists:map(fun(X) -> 2*X end, L). foo(X, L) -> lists:map(fun(Z) -> 2*Z end, L). foo(X) -> io:format("hello ~p~n, [X]).
null
https://raw.githubusercontent.com/FlowerWrong/mblog/3233ede938d2019a7b57391405197ac19c805b27/categories/erlang/demo/jaerlang2_code/bad.erl
erlang
--- Copyrights apply to this code. It may not be used to create training material, courses, books, articles, and the like. Contact us if you are in doubt. We make no guarantees that this code is fit for any purpose. Visit for more book information. --- There are lots's of delibeate errors in this file so it's not in the makefile
Excerpted from " Programming Erlang , Second Edition " , published by The Pragmatic Bookshelf . -module(bad). foo(1,2) -> a; foo(2,3,a) -> b. foo(A, B) -> bar(A, dothis(X), B), baz(Y, X). foo() -> case bar() of 1 -> X = 1, Y = 2; 2 -> X = 3 end, b(X, Y). foo() -> case bar() of 1 -> X = 1, Y = 2; 2 -> X = 3 end, b(X). foo(X, L) -> lists:map(fun(X) -> 2*X end, L). foo(X, L) -> lists:map(fun(Z) -> 2*Z end, L). foo(X) -> io:format("hello ~p~n, [X]).
e9ee3f7057f03746fb9d009a30a2ad4c064c751a33928dba50c729e3e0794800
simplegeo/erlang
test_server.erl
%% %% %CopyrightBegin% %% Copyright Ericsson AB 1996 - 2009 . All Rights Reserved . %% The contents of this file are subject to the Erlang Public License , Version 1.1 , ( the " License " ) ; you may not use this file except in %% compliance with the License. You should have received a copy of the %% Erlang Public License along with this software. If not, it can be %% retrieved online at /. %% Software distributed under the License is distributed on an " AS IS " %% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See %% the License for the specific language governing rights and limitations %% under the License. %% %% %CopyrightEnd% %% -module(test_server). -define(DEFAULT_TIMETRAP_SECS, 60). START % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % -export([start/1,start/2]). %%% TEST_SERVER_CTRL INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -export([run_test_case_apply/1,init_target_info/0,init_purify/0]). -export([cover_compile/1,cover_analyse/2]). %%% TEST_SERVER_SUP INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -export([get_loc/1]). %%% TEST SUITE INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -export([lookup_config/2]). -export([fail/0,fail/1,format/1,format/2,format/3]). -export([capture_start/0,capture_stop/0,capture_get/0]). -export([messages_get/0]). -export([hours/1,minutes/1,seconds/1,sleep/1,timecall/3]). -export([timetrap_scale_factor/0,timetrap/1,timetrap_cancel/1]). -export([m_out_of_n/3,do_times/4,do_times/2]). -export([call_crash/3,call_crash/4,call_crash/5]). -export([temp_name/1]). -export([start_node/3, stop_node/1, wait_for_node/1, is_release_available/1]). -export([app_test/1, app_test/2]). -export([is_native/1]). -export([comment/1]). -export([os_type/0]). -export([run_on_shielded_node/2]). -export([is_cover/0,is_debug/0,is_commercial/0]). -export([break/1,continue/0]). DEBUGGER INTERFACE % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % -export([purify_new_leaks/0, purify_format/2, purify_new_fds_inuse/0, purify_is_running/0]). %%% PRIVATE EXPORTED %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -export([]). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -record(state,{controller,jobs=[]}). -include("test_server_internal.hrl"). -include_lib("kernel/include/file.hrl"). -define(pl2a(M), test_server_sup:package_atom(M)). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% %% **** START *** CODE FOR REMOTE TARGET ONLY *** %% test_server %% This process is started only if the test is to be run on a remote target %% The process is then started on target A socket connection is established with the test_server_ctrl process %% on host, and information about target is sent to host. start([ControllerHost]) when is_atom(ControllerHost) -> start(atom_to_list(ControllerHost)); start(ControllerHost) when is_list(ControllerHost) -> start(ControllerHost,?MAIN_PORT). start(ControllerHost,ControllerPort) -> S = self(), Pid = spawn(fun() -> init(ControllerHost,ControllerPort,S) end), receive {Pid,started} -> {ok,Pid}; {Pid,Error} -> Error end. init(Host,Port,Starter) -> global:register_name(?MODULE,self()), process_flag(trap_exit,true), test_server_sup:cleanup_crash_dumps(), case gen_tcp:connect(Host,Port, [binary, {reuseaddr,true}, {packet,2}]) of {ok,MainSock} -> Starter ! {self(),started}, request(MainSock,{target_info,init_target_info()}), loop(#state{controller={Host,MainSock}}); Error -> Starter ! {self(),{error, {could_not_contact_controller,Error}}} end. init_target_info() -> [$.|Emu] = code:objfile_extension(), {_, OTPRel} = init:script_id(), TestServerDir = filename:absname(filename:dirname(code:which(?MODULE))), #target_info{os_family=test_server_sup:get_os_family(), os_type=os:type(), version=erlang:system_info(version), system_version=erlang:system_info(system_version), root_dir=code:root_dir(), test_server_dir=TestServerDir, emulator=Emu, otp_release=OTPRel, username=test_server_sup:get_username(), cookie=atom_to_list(erlang:get_cookie())}. loop(#state{controller={_,MainSock}} = State) -> receive {tcp, MainSock, <<1,Request/binary>>} -> State1 = decode_main(binary_to_term(Request),State), loop(State1); {tcp_closed, MainSock} -> gen_tcp:close(MainSock), halt(); {'EXIT',Pid,Reason} -> case lists:keysearch(Pid,1,State#state.jobs) of {value,{Pid,Name}} -> case Reason of normal -> ignore; _other -> request(MainSock,{job_proc_killed,Name,Reason}) end, NewJobs = lists:keydelete(Pid,1,State#state.jobs), loop(State#state{jobs = NewJobs}); false -> loop(State) end end. %% Decode request on main socket decode_main({job,Port,Name},#state{controller={Host,_},jobs=Jobs}=State) -> S = self(), NewJob = spawn_link(fun() -> job(Host,Port,S) end), receive {NewJob,started} -> State#state{jobs=[{NewJob,Name}|Jobs]}; {NewJob,_Error} -> State end. init_purify() -> purify_new_leaks(). %% Temporary job process on target %% This process will live while all test cases in the job are executed. %% A socket connection is established with the job process on host. job(Host,Port,Starter) -> process_flag(trap_exit,true), init_purify(), case gen_tcp:connect(Host,Port, [binary, {reuseaddr,true}, {packet,4}, {active,false}]) of {ok,JobSock} -> Starter ! {self(),started}, job(JobSock); Error -> Starter ! {self(),{error, {could_not_contact_controller,Error}}} end. job(JobSock) -> JobDir = get_jobdir(), ok = file:make_dir(JobDir), ok = file:make_dir(filename:join(JobDir,?priv_dir)), put(test_server_job_sock,JobSock), put(test_server_job_dir,JobDir), {ok,Cwd} = file:get_cwd(), job_loop(JobSock), ok = file:set_cwd(Cwd), also recursively removes ok. get_jobdir() -> Now = now(), {{Y,M,D},{H,Mi,S}} = calendar:now_to_local_time(Now), Basename = io_lib:format("~w-~2.2.0w-~2.2.0w_~2.2.0w.~2.2.0w.~2.2.0w_~w", [Y,M,D,H,Mi,S,element(3,Now)]), if target has a file master , do n't use to look up cwd case lists:keymember(master,1,init:get_arguments()) of true -> {ok,Cwd} = file:get_cwd(), Cwd ++ "/" ++ Basename; false -> filename:absname(Basename) end. send_privdir(JobDir,JobSock) -> LocalPrivDir = filename:join(JobDir,?priv_dir), case file:list_dir(LocalPrivDir) of {ok,List} when List/=[] -> Tarfile0 = ?priv_dir ++ ".tar.gz", Tarfile = filename:join(JobDir,Tarfile0), {ok,Tar} = erl_tar:open(Tarfile,[write,compressed,cooked]), ok = erl_tar:add(Tar,LocalPrivDir,?priv_dir,[]), ok = erl_tar:close(Tar), {ok,TarBin} = file:read_file(Tarfile), file:delete(Tarfile), ok = del_dir(JobDir), request(JobSock,{{privdir,Tarfile0},TarBin}); _ -> ok = del_dir(JobDir), request(JobSock,{privdir,empty_priv_dir}) end. del_dir(Dir) -> case file:read_file_info(Dir) of {ok,#file_info{type=directory}} -> {ok,Cont} = file:list_dir(Dir), lists:foreach(fun(F) -> del_dir(filename:join(Dir,F)) end, Cont), ok = file:del_dir(Dir); {ok,#file_info{}} -> ok = file:delete(Dir); _r -> %% This might be a symlink - let's try to delete it! catch file:delete(Dir), ok end. %% %% Receive and decode request on job socket %% job_loop(JobSock) -> Request = recv(JobSock), case decode_job(Request) of ok -> job_loop(JobSock); {stop,R} -> R end. decode_job({{beam,Mod,Which},Beam}) -> FIXME , shared directory structure on host and target required , " Library beams " are not loaded from ... /Patrik code:add_patha(filename:dirname(Which)), End of Patriks uglyness ... {module,Mod} = code:load_binary(Mod,Which,Beam), ok; decode_job({{datadir,Tarfile0},Archive}) -> JobDir = get(test_server_job_dir), Tarfile = filename:join(JobDir,Tarfile0), ok = file:write_file(Tarfile,Archive), % Cooked is temporary removed/broken % ok = erl_tar:extract(Tarfile,[compressed,{cwd,JobDir},cooked]), ok = erl_tar:extract(Tarfile,[compressed,{cwd,JobDir}]), ok = file:delete(Tarfile), ok; decode_job({test_case,Case}) -> Result = run_test_case_apply(Case), JobSock = get(test_server_job_sock), request(JobSock,{test_case_result,Result}), case test_server_sup:tar_crash_dumps() of {error,no_crash_dumps} -> request(JobSock,{crash_dumps,no_crash_dumps}); {ok,TarFile} -> {ok,TarBin} = file:read_file(TarFile), file:delete(TarFile), request(JobSock,{{crash_dumps,filename:basename(TarFile)},TarBin}) end, ok; decode_job({sync_apply,{M,F,A}}) -> R = apply(M,F,A), request(get(test_server_job_sock),{sync_result,R}), ok; decode_job(job_done) -> {stop,stopped}. %% %% **** STOP *** CODE FOR REMOTE TARGET ONLY *** %% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% cover_compile({App,Include,Exclude,Cross}) -> %% {ok,AnalyseModules} | {error,Reason} %% %% App = atom() , name of application to be compiled %% Exclude = [atom()], list of modules to exclude %% Include = [atom()], list of modules outside of App that should be included %% in the cover compilation %% Cross = [atoms()], list of modules outside of App shat should be included %% in the cover compilation, but that shall not be part of %% the cover analysis for this application. %% %% Cover compile the given application. Return {ok,AnalyseMods} if application %% is found, else {error,application_not_found}. cover_compile({none,_Exclude,Include,Cross}) -> CompileMods = Include++Cross, case length(CompileMods) of 0 -> io:fwrite("WARNING: No modules to cover compile!\n\n",[]), cover:start(), % start cover server anyway {ok,[]}; N -> io:fwrite("Cover compiling ~w modules - " "this may take some time... ",[N]), do_cover_compile(CompileMods), io:fwrite("done\n\n",[]), {ok,Include} end; cover_compile({App,all,Include,Cross}) -> CompileMods = Include++Cross, case length(CompileMods) of 0 -> io:fwrite("WARNING: No modules to cover compile!\n\n",[]), cover:start(), % start cover server anyway {ok,[]}; N -> io:fwrite("Cover compiling '~w' (~w files) - " "this may take some time... ",[App,N]), io:format("\nWARNING: All modules in \'~w\' are excluded\n" "Only cover compiling modules in include list " "and the modules\nin the cross cover file:\n" "~p\n", [App,CompileMods]), do_cover_compile(CompileMods), io:fwrite("done\n\n",[]), {ok,Include} end; cover_compile({App,Exclude,Include,Cross}) -> case code:lib_dir(App) of {error,bad_name} -> case Include++Cross of [] -> io:format("\nWARNING: Can't find lib_dir for \'~w\'\n" "Not cover compiling!\n\n",[App]), {error,application_not_found}; CompileMods -> io:fwrite("Cover compiling '~w' (~w files) - " "this may take some time... ", [App,length(CompileMods)]), io:format("\nWARNING: Can't find lib_dir for \'~w\'\n" "Only cover compiling modules in include list: " "~p\n", [App,Include]), do_cover_compile(CompileMods), io:fwrite("done\n\n",[]), {ok,Include} end; LibDir -> EbinDir = filename:join([LibDir,"ebin"]), WC = filename:join(EbinDir,"*.beam"), AllMods = module_names(filelib:wildcard(WC)), AnalyseMods = (AllMods ++ Include) -- Exclude, CompileMods = AnalyseMods ++ Cross, case length(CompileMods) of 0 -> io:fwrite("WARNING: No modules to cover compile!\n\n",[]), cover:start(), % start cover server anyway {ok,[]}; N -> io:fwrite("Cover compiling '~w' (~w files) - " "this may take some time... ",[App,N]), do_cover_compile(CompileMods), io:fwrite("done\n\n",[]), {ok,AnalyseMods} end end. module_names(Beams) -> [list_to_atom(filename:basename(filename:rootname(Beam))) || Beam <- Beams]. do_cover_compile(Modules) -> do_cover_compile1(lists:usort(Modules)). % remove duplicates do_cover_compile1([Dont|Rest]) when Dont=:=cover; Dont=:=test_server; Dont=:=test_server_ctrl -> do_cover_compile1(Rest); do_cover_compile1([M|Rest]) -> case {code:is_sticky(M),code:is_loaded(M)} of {true,_} -> code:unstick_mod(M), case cover:compile_beam(M) of {ok,_} -> ok; Error -> io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", [M,Error]) end, code:stick_mod(M), do_cover_compile1(Rest); {false,false} -> case code:load_file(M) of {module,_} -> do_cover_compile1([M|Rest]); Error -> io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]), do_cover_compile1(Rest) end; {false,_} -> case cover:compile_beam(M) of {ok,_} -> ok; Error -> io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", [M,Error]) end, do_cover_compile1(Rest) end; do_cover_compile1([]) -> ok. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% cover_analyse(Analyse , Modules ) - > [ { M,{Cov , NotCov , Details } } ] %% Analyse = { details , } | details | { overview , void ( ) } | overview %% Modules = [atom()], the modules to analyse %% Cover analysis . If this is a remote target , analyse_to_file can not be used . In that case the analyse level ' line ' is used instead if Analyse==details . %% %% If this is a local target, the test directory is given ( Analyse=={details , } ) and analyse_to_file can be used directly . %% %% If Analyse==overview | {overview,Dir} analyse_to_file is not used, only %% an overview containing the number of covered/not covered lines in each module. %% %% Also, if a Dir exists, cover data will be exported to a file called %% all.coverdata in that directory. cover_analyse(Analyse,Modules) -> io:fwrite("Cover analysing...\n",[]), DetailsFun = case Analyse of {details,Dir} -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> fun(M) -> OutFile = filename:join(Dir, atom_to_list(M) ++ ".COVER.html"), case cover:analyse_to_file(M,OutFile,[html]) of {ok,_} -> {file,OutFile}; Error -> Error end end; Error -> fun(_) -> Error end end; details -> fun(M) -> case cover:analyse(M,line) of {ok,Lines} -> {lines,Lines}; Error -> Error end end; {overview,Dir} -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> fun(_) -> undefined end; Error -> fun(_) -> Error end end; overview -> fun(_) -> undefined end end, R = lists:map( fun(M) -> case cover:analyse(M,module) of {ok,{M,{Cov,NotCov}}} -> {M,{Cov,NotCov,DetailsFun(M)}}; Err -> io:fwrite("WARNING: Analysis failed for ~w. Reason: ~p\n", [M,Err]), {M,Err} end end, Modules), Sticky = unstick_all_sticky(node()), cover:stop(), stick_all_sticky(node(),Sticky), R. unstick_all_sticky(Node) -> lists:filter( fun(M) -> case code:is_sticky(M) of true -> rpc:call(Node,code,unstick_mod,[M]), true; false -> false end end, cover:modules()). stick_all_sticky(Node,Sticky) -> lists:foreach( fun(M) -> rpc:call(Node,code,stick_mod,[M]) end, Sticky). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% run_test_case_apply(Mod , , , Name , RunInit , ) - > { Time , Value , , Opts , Comment } | { died , , unknown , Comment } %% Time = float ( ) ( seconds ) %% Value = term() %% Loc = term() %% Comment = string() %% Reason = term() %% %% Spawns off a process (case process) that actually runs the test suite. %% The case process will have the job process as group leader, which makes it possible to capture all it 's output from io : format/2 , etc . %% %% The job process then sits down and waits for news from the case process. %% This might be io requests (which are redirected to the log files). %% Returns a tuple with the time spent ( in seconds ) in the test case , the return value from the test case or an { ' } if the case failed , points out where the test case crashed ( if it did ) . Loc %% is either the name of the function, or {<Module>,<Line>} of the last %% line executed that had a ?line macro. If the test case did execute %% erase/0 or similar, it may be empty. Comment is the last comment added by test_server : comment/1 , the reason if test_server : fail has been %% called or the comment given by the return value {comment,Comment} from %% a test case. %% { died , , unknown , Comment } is returned if the test case was killed %% by some other process. Reason is the kill reason provided. %% indicates a possible extension of all timetraps Timetraps will be multiplied by this integer . If it is infinity , no timetraps will be started at all . run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,MultiplyTimetrap}) -> purify_format("Test case #~w ~w:~w/1", [CaseNum, Mod, Func]), case os:getenv("TS_RUN_VALGRIND") of false -> ok; _ -> os:putenv("VALGRIND_LOGFILE_INFIX",atom_to_list(Mod)++"."++ atom_to_list(Func)++"-") end, test_server_h:testcase({Mod,Func,1}), ProcBef = erlang:system_info(process_count), Result = run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap), ProcAft = erlang:system_info(process_count), purify_new_leaks(), DetFail = get(test_server_detected_fail), {Result,DetFail,ProcBef,ProcAft}. run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) -> case get(test_server_job_dir) of undefined -> %% i'm a local target do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap); JobDir -> %% i'm a remote target case Args of [Config] when is_list(Config) -> {value,{data_dir,HostDataDir}} = lists:keysearch(data_dir, 1, Config), DataBase = filename:basename(HostDataDir), TargetDataDir = filename:join(JobDir, DataBase), Config1 = lists:keyreplace(data_dir, 1, Config, {data_dir,TargetDataDir}), TargetPrivDir = filename:join(JobDir, ?priv_dir), Config2 = lists:keyreplace(priv_dir, 1, Config1, {priv_dir,TargetPrivDir}), do_run_test_case_apply(Mod, Func, [Config2], Name, RunInit, MultiplyTimetrap); _other -> do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) end end. do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) -> {ok,Cwd} = file:get_cwd(), Args2Print = case Args of [Args1] when is_list(Args1) -> lists:keydelete(tc_group_result, 1, Args1); _ -> Args end, print(minor, "Test case started with:\n~s:~s(~p)\n", [Mod,Func,Args2Print]), print(minor, "Current directory is ~p\n", [Cwd]), print_timestamp(minor,"Started at "), TCCallback = get(test_server_testcase_callback), Ref = make_ref(), OldGLeader = group_leader(), %% Set ourself to group leader for the spawned process group_leader(self(),self()), Pid = spawn_link( fun() -> run_test_case_eval(Mod, Func, Args, Name, Ref, RunInit, MultiplyTimetrap, TCCallback) end), group_leader(OldGLeader, self()), put(test_server_detected_fail, []), run_test_case_msgloop(Ref, Pid, false, false, ""). %% Ugly bug (pre R5A): %% If this process (group leader of the test case) terminates before %% all messages have been replied back to the io server, the io server hangs . Fixed by the 20 milli timeout check here , and by using monitor in io.erl ( livrem OCH hangslen mao :) %% A test case is known to have failed if it returns { ' EXIT ' , _ } tuple , or sends a message { failed , File , Line } to it 's group_leader %% run_test_case_msgloop(Ref, Pid, CaptureStdout, Terminate, Comment) -> %% NOTE: Keep job_proxy_msgloop/0 up to date when changes %% are made in this function! {Timeout,ReturnValue} = case Terminate of {true, ReturnVal} -> {20, ReturnVal}; false -> {infinity, should_never_appear} end, receive {abort_current_testcase,Reason,From} -> Line = get_loc(Pid), Mon = erlang:monitor(process, Pid), exit(Pid,{testcase_aborted,Reason,Line}), erlang:yield(), From ! {self(),abort_current_testcase,ok}, NewComment = receive {'DOWN', Mon, process, Pid, _} -> Comment after 10000 -> Pid is probably trapping exits , hit it harder ... exit(Pid, kill), here 's the only place we know , so we save %% it as a comment, potentially replacing user data Error = lists:flatten(io_lib:format("Aborted: ~p",[Reason])), Error1 = lists:flatten([string:strip(S,left) || S <- string:tokens(Error,[$\n])]), if length(Error1) > 63 -> string:substr(Error1,1,60) ++ "..."; true -> Error1 end end, run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,NewComment); {io_request,From,ReplyAs,{put_chars,io_lib,Func,[Format,Args]}} when is_list(Format) -> Msg = (catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,io_lib,Func,[Format,Args]}} when is_atom(Format) -> Msg = (catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,Bytes}} -> run_test_case_msgloop_io( ReplyAs,CaptureStdout,Bytes,From,put_chars), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,unicode,io_lib,Func,[Format,Args]}} when is_list(Format) -> Msg = unicode_to_latin1(catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,latin1,io_lib,Func,[Format,Args]}} when is_list(Format) -> Msg = (catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,unicode,io_lib,Func,[Format,Args]}} when is_atom(Format) -> Msg = unicode_to_latin1(catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,latin1,io_lib,Func,[Format,Args]}} when is_atom(Format) -> Msg = (catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,unicode,Bytes}} -> run_test_case_msgloop_io( ReplyAs,CaptureStdout,unicode_to_latin1(Bytes),From,put_chars), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,latin1,Bytes}} -> run_test_case_msgloop_io( ReplyAs,CaptureStdout,Bytes,From,put_chars), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); IoReq when element(1, IoReq) == io_request -> %% something else, just pass it on group_leader() ! IoReq, run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {structured_io,ClientPid,Msg} -> output(Msg, ClientPid), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {capture,NewCapture} -> run_test_case_msgloop(Ref,Pid,NewCapture,Terminate,Comment); {sync_apply,From,MFA} -> sync_local_or_remote_apply(false,From,MFA), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {sync_apply_proxy,Proxy,From,MFA} -> sync_local_or_remote_apply(Proxy,From,MFA), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {printout,Detail,Format,Args} -> print(Detail,Format,Args), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {comment,NewComment} -> Terminate1 = case Terminate of {true,{Time,Value,Loc,Opts,_OldComment}} -> {true,{Time,Value,mod_loc(Loc),Opts,NewComment}}; Other -> Other end, run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate1,NewComment); {'EXIT',Pid,{Ref,Time,Value,Loc,Opts}} -> RetVal = {Time/1000000,Value,mod_loc(Loc),Opts,Comment}, run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment); {'EXIT',Pid,Reason} -> case Reason of {timetrap_timeout,TVal,Loc} -> convert Loc to form that can be formatted Loc1 = mod_loc(Loc), {Mod,Func} = get_mf(Loc1), %% The framework functions mustn't execute on this %% group leader process or io will cause deadlock, %% so we spawn a dedicated process for the operation %% and let the group leader go back to handle io. spawn_fw_call(Mod,Func,Pid,{timetrap_timeout,TVal}, Loc1,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {timetrap_timeout,TVal,Loc,InitOrEnd} -> Loc1 = mod_loc(Loc), {Mod,_Func} = get_mf(Loc1), spawn_fw_call(Mod,InitOrEnd,Pid,{timetrap_timeout,TVal}, Loc1,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {testcase_aborted,Reason,Loc} -> Loc1 = mod_loc(Loc), {Mod,Func} = get_mf(Loc1), spawn_fw_call(Mod,Func,Pid,{testcase_aborted,Reason}, Loc1,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); killed -> %% result of an exit(TestCase,kill) call, which is the %% only way to abort a testcase process that traps exits %% (see abort_current_testcase) spawn_fw_call(undefined,undefined,Pid,testcase_aborted_or_killed, unknown,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); _ -> the testcase has terminated because of ( e.g. an exit %% because a linked process failed) spawn_fw_call(undefined,undefined,Pid,Reason, unknown,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment) end; {_FwCallPid,fw_notify_done,RetVal} -> %% the framework has been notified, we're finished run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment); {'EXIT',_FwCallPid,{fw_notify_done,Func,Error}} -> %% a framework function failed CB = os:getenv("TEST_SERVER_FRAMEWORK"), Loc = case CB of false -> {test_server,Func}; _ -> {list_to_atom(CB),Func} end, RetVal = {died,{framework_error,Loc,Error},Loc,"Framework error"}, run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment); {failed,File,Line} -> put(test_server_detected_fail, [{File, Line}| get(test_server_detected_fail)]), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); _Other when not is_tuple(_Other) -> %% ignore anything not generated by test server run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); _Other when element(1, _Other) /= 'EXIT', element(1, _Other) /= started, element(1, _Other) /= finished, element(1, _Other) /= print -> %% ignore anything not generated by test server run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment) after Timeout -> ReturnValue end. run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func) -> case Msg of {'EXIT',_} -> From ! {io_reply,ReplyAs,{error,Func}}; _ -> From ! {io_reply,ReplyAs,ok} end, if CaptureStdout /= false -> CaptureStdout ! {captured,Msg}; true -> ok end, output({minor,Msg},From). output(Msg,Sender) -> local_or_remote_apply({test_server_ctrl,output,[Msg,Sender]}). spawn_fw_call(Mod,{init_per_testcase,Func},Pid,{timetrap_timeout,TVal}=Why, Loc,SendTo,Comment) -> FwCall = fun() -> Skip = {skip,{failed,{Mod,init_per_testcase,Why}}}, %% if init_per_testcase fails, the test case %% should be skipped case catch test_server_sup:framework_call( end_tc,[?pl2a(Mod),Func,{Pid,Skip,[[]]}]) of {'EXIT',FwEndTCErr} -> exit({fw_notify_done,end_tc,FwEndTCErr}); _ -> ok end, %% finished, report back SendTo ! {self(),fw_notify_done, {TVal/1000,Skip,Loc,[],Comment}} end, spawn_link(FwCall); spawn_fw_call(Mod,{end_per_testcase,Func},Pid,{timetrap_timeout,TVal}=Why, Loc,SendTo,_Comment) -> FwCall = fun() -> Conf = [{tc_status,ok}], %% if end_per_testcase fails, the test case should be %% reported successful with a warning printed as comment case catch test_server_sup:framework_call(end_tc, [?pl2a(Mod),Func, {Pid, {failed,{Mod,end_per_testcase,Why}}, [Conf]}]) of {'EXIT',FwEndTCErr} -> exit({fw_notify_done,end_tc,FwEndTCErr}); _ -> ok end, %% finished, report back SendTo ! {self(),fw_notify_done, {TVal/1000,{error,{Mod,end_per_testcase,Why}},Loc,[], ["<font color=\"red\">" "WARNING: end_per_testcase timed out!" "</font>"]}} end, spawn_link(FwCall); spawn_fw_call(Mod,Func,Pid,Error,Loc,SendTo,Comment) -> FwCall = fun() -> case catch fw_error_notify(Mod,Func,[], Error,Loc) of {'EXIT',FwErrorNotifyErr} -> exit({fw_notify_done,error_notification, FwErrorNotifyErr}); _ -> ok end, Conf = [{tc_status,{failed,timetrap_timeout}}], case catch test_server_sup:framework_call(end_tc, [?pl2a(Mod),Func, {Pid,Error,[Conf]}]) of {'EXIT',FwEndTCErr} -> exit({fw_notify_done,end_tc,FwEndTCErr}); _ -> ok end, %% finished, report back SendTo ! {self(),fw_notify_done,{died,Error,Loc,Comment}} end, spawn_link(FwCall). %% The job proxy process forwards messages between the test case %% process on a shielded node (and its descendants) and the job process. %% %% The job proxy process have to be started by the test-case process %% on the shielded node! start_job_proxy() -> group_leader(spawn(fun () -> job_proxy_msgloop() end), self()), ok. %% The io_reply_proxy is not the most satisfying solution but it works... io_reply_proxy(ReplyTo) -> receive IoReply when is_tuple(IoReply), element(1, IoReply) == io_reply -> ReplyTo ! IoReply; _ -> io_reply_proxy(ReplyTo) end. job_proxy_msgloop() -> receive %% %% Messages that need intervention by proxy... %% %% io stuff ... IoReq when tuple_size(IoReq) >= 2, element(1, IoReq) == io_request -> ReplyProxy = spawn(fun () -> io_reply_proxy(element(2, IoReq)) end), group_leader() ! setelement(2, IoReq, ReplyProxy); test_server stuff ... {sync_apply, From, MFA} -> group_leader() ! {sync_apply_proxy, self(), From, MFA}; {sync_result_proxy, To, Result} -> To ! {sync_result, Result}; %% %% Messages that need no intervention by proxy... %% Msg -> group_leader() ! Msg end, job_proxy_msgloop(). A test case is known to have failed if it returns { ' EXIT ' , _ } tuple , or sends a message { failed , File , Line } to it 's group_leader run_test_case_eval(Mod, Func, Args0, Name, Ref, RunInit, MultiplyTimetrap, TCCallback) -> put(test_server_multiply_timetraps,MultiplyTimetrap), {{Time,Value},Loc,Opts} = case test_server_sup:framework_call(init_tc,[?pl2a(Mod),Func,Args0], {ok,Args0}) of {ok,Args} -> run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback); Error = {error,_Reason} -> test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,{Error,Args0}]), {{0,{skip,{failed,Error}}},{Mod,Func},[]}; {fail,Reason} -> [Conf] = Args0, Conf1 = [{tc_status,{failed,Reason}} | Conf], fw_error_notify(Mod, Func, Conf, Reason), test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func, {{error,Reason},[Conf1]}]), {{0,{failed,Reason}},{Mod,Func},[]}; Skip = {skip,_Reason} -> test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,{Skip,Args0}]), {{0,Skip},{Mod,Func},[]}; {auto_skip,Reason} -> test_server_sup:framework_call(end_tc,[?pl2a(Mod), Func, {{skip,Reason},Args0}]), {{0,{skip,{fw_auto_skip,Reason}}},{Mod,Func},[]} end, exit({Ref,Time,Value,Loc,Opts}). run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) -> case RunInit of run_init -> put(test_server_init_or_end_conf,{init_per_testcase,Func}), put(test_server_loc, {Mod,{init_per_testcase,Func}}), ensure_timetrap(Args), case init_per_testcase(Mod, Func, Args) of Skip = {skip,Reason} -> Line = get_loc(), Conf = [{tc_status,{skipped,Reason}}], test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,{Skip,[Conf]}]), {{0,{skip,Reason}},Line,[]}; {skip_and_save,Reason,SaveCfg} -> Line = get_loc(), Conf = [{tc_status,{skipped,Reason}},{save_config,SaveCfg}], test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func, {{skip,Reason},[Conf]}]), {{0,{skip,Reason}},Line,[]}; {ok,NewConf} -> put(test_server_init_or_end_conf,undefined), %% call user callback function if defined NewConf1 = user_callback(TCCallback, Mod, Func, init, NewConf), put(test_server_loc, {Mod,Func}), %% execute the test case {{T,Return},Loc} = {ts_tc(Mod, Func, [NewConf1]),get_loc()}, {EndConf,TSReturn,FWReturn} = case Return of {E,TCError} when E=='EXIT' ; E==failed -> fw_error_notify(Mod, Func, NewConf1, TCError, mod_loc(Loc)), {[{tc_status,{failed,TCError}}|NewConf1], Return,{error,TCError}}; SaveCfg={save_config,_} -> {[{tc_status,ok},SaveCfg|NewConf1],Return,ok}; {skip_and_save,Why,SaveCfg} -> Skip = {skip,Why}, {[{tc_status,{skipped,Why}},{save_config,SaveCfg}|NewConf1], Skip,Skip}; {skip,Why} -> {[{tc_status,{skipped,Why}}|NewConf1],Return,Return}; _ -> {[{tc_status,ok}|NewConf1],Return,ok} end, %% call user callback function if defined EndConf1 = user_callback(TCCallback, Mod, Func, 'end', EndConf), {FWReturn1,TSReturn1,EndConf2} = case end_per_testcase(Mod, Func, EndConf1) of SaveCfg1={save_config,_} -> {FWReturn,TSReturn,[SaveCfg1|lists:keydelete(save_config, 1, EndConf1)]}; {fail,ReasonToFail} -> % user has failed the testcase fw_error_notify(Mod, Func, EndConf1, ReasonToFail), {{error,ReasonToFail},{failed,ReasonToFail},EndConf1}; {failed,{_,end_per_testcase,_}} = Failure -> % unexpected termination {Failure,TSReturn,EndConf1}; _ -> {FWReturn,TSReturn,EndConf1} end, case test_server_sup:framework_call(end_tc, [?pl2a(Mod), Func, {FWReturn1,[EndConf2]}]) of {fail,Reason} -> fw_error_notify(Mod, Func, EndConf2, Reason), {{T,{failed,Reason}},{Mod,Func},[]}; _ -> {{T,TSReturn1},Loc,[]} end end; skip_init -> %% call user callback function if defined Args1 = user_callback(TCCallback, Mod, Func, init, Args), ensure_timetrap(Args1), ts_tc does a catch put(test_server_loc, {Mod,Func}), %% if this is a named conf group, the test case (init or end conf) should be called with the name as the first argument Args2 = if Name == undefined -> Args1; true -> [Name | Args1] end, %% execute the conf test case {{T,Return},Loc} = {ts_tc(Mod, Func, Args2),get_loc()}, %% call user callback function if defined Return1 = user_callback(TCCallback, Mod, Func, 'end', Return), {Return2,Opts} = process_return_val([Return1], Mod,Func,Args1, Loc, Return1), {{T,Return2},Loc,Opts} end. %% the return value is a list and we have to check if it contains %% the result of an end conf case or if it's a Config list process_return_val([Return], M,F,A, Loc, Final) when is_list(Return) -> ReturnTags = [skip,skip_and_save,save_config,comment,return_group_result], %% check if all elements in the list are valid end conf return value tuples case lists:all(fun(Val) when is_tuple(Val) -> lists:any(fun(T) -> T == element(1, Val) end, ReturnTags); (ok) -> true; (_) -> false end, Return) of true -> % must be return value from end conf case process_return_val1(Return, M,F,A, Loc, Final, []); false -> % must be Config value from init conf case test_server_sup:framework_call(end_tc, [?pl2a(M),F,{ok,A}]), {Return,[]} end; %% the return value is not a list, so it's the return value from an %% end conf case or it's a dummy value that can be ignored process_return_val(Return, M,F,A, Loc, Final) -> process_return_val1(Return, M,F,A, Loc, Final, []). process_return_val1([Failed={E,TCError}|_], M,F,A=[Args], Loc, _, SaveOpts) when E=='EXIT'; E==failed -> fw_error_notify(M,F,A, TCError, mod_loc(Loc)), test_server_sup:framework_call(end_tc, [?pl2a(M),F,{{error,TCError}, [[{tc_status,{failed,TCError}}|Args]]}]), {Failed,SaveOpts}; process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args], Loc, Final, SaveOpts) -> process_return_val1(Opts, M,F,[[SaveCfg|Args]], Loc, Final, SaveOpts); process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args], Loc, _, SaveOpts) -> process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]], Loc, {skip,Why}, SaveOpts); process_return_val1([GR={return_group_result,_}|Opts], M,F,A, Loc, Final, SaveOpts) -> process_return_val1(Opts, M,F,A, Loc, Final, [GR|SaveOpts]); process_return_val1([RetVal={Tag,_}|Opts], M,F,A, Loc, _, SaveOpts) when Tag==skip; Tag==comment -> process_return_val1(Opts, M,F,A, Loc, RetVal, SaveOpts); process_return_val1([_|Opts], M,F,A, Loc, Final, SaveOpts) -> process_return_val1(Opts, M,F,A, Loc, Final, SaveOpts); process_return_val1([], M,F,A, _Loc, Final, SaveOpts) -> test_server_sup:framework_call(end_tc, [?pl2a(M),F,{Final,A}]), {Final,lists:reverse(SaveOpts)}. user_callback(undefined, _, _, _, Args) -> Args; user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, [Args]) when is_list(Args) -> case catch apply(CBMod, CBFunc, [InitOrEnd,Mod,Func,Args]) of Args1 when is_list(Args1) -> [Args1]; _ -> [Args] end; user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, Args) -> case catch apply(CBMod, CBFunc, [InitOrEnd,Mod,Func,Args]) of Args1 when is_list(Args1) -> Args1; _ -> Args end. init_per_testcase(Mod, Func, Args) -> case code:is_loaded(Mod) of false -> code:load_file(Mod); _ -> ok end, %% init_per_testcase defined, returns new configuration case erlang:function_exported(Mod,init_per_testcase,2) of true -> case catch my_apply(Mod, init_per_testcase, [Func|Args]) of {'$test_server_ok',{Skip,Reason}} when Skip==skip; Skip==skipped -> {skip,Reason}; {'$test_server_ok',Res={skip_and_save,_,_}} -> Res; {'$test_server_ok',NewConf} when is_list(NewConf) -> case lists:filter(fun(T) when is_tuple(T) -> false; (_) -> true end, NewConf) of [] -> {ok,NewConf}; Bad -> group_leader() ! {printout,12, "ERROR! init_per_testcase has returned " "bad elements in Config: ~p\n",[Bad]}, {skip,{failed,{Mod,init_per_testcase,bad_return}}} end; {'$test_server_ok',_Other} -> group_leader() ! {printout,12, "ERROR! init_per_testcase did not return " "a Config list.\n",[]}, {skip,{failed,{Mod,init_per_testcase,bad_return}}}; {'EXIT',Reason} -> Line = get_loc(), FormattedLoc = test_server_sup:format_loc(mod_loc(Line)), group_leader() ! {printout,12, "ERROR! init_per_testcase crashed!\n" "\tLocation: ~s\n\tReason: ~p\n", [FormattedLoc,Reason]}, {skip,{failed,{Mod,init_per_testcase,Reason}}}; Other -> Line = get_loc(), FormattedLoc = test_server_sup:format_loc(mod_loc(Line)), group_leader() ! {printout,12, "ERROR! init_per_testcase thrown!\n" "\tLocation: ~s\n\tReason: ~p\n", [FormattedLoc, Other]}, {skip,{failed,{Mod,init_per_testcase,Other}}} end; false -> %% Optional init_per_testcase not defined %% keep quiet. [Config] = Args, {ok, Config} end. end_per_testcase(Mod, Func, Conf) -> case erlang:function_exported(Mod,end_per_testcase,2) of true -> do_end_per_testcase(Mod,end_per_testcase,Func,Conf); false -> %% Backwards compatibility! case erlang:function_exported(Mod,fin_per_testcase,2) of true -> do_end_per_testcase(Mod,fin_per_testcase,Func,Conf); false -> ok end end. do_end_per_testcase(Mod,EndFunc,Func,Conf) -> put(test_server_init_or_end_conf,{EndFunc,Func}), put(test_server_loc, {Mod,{EndFunc,Func}}), case catch my_apply(Mod, EndFunc, [Func,Conf]) of {'$test_server_ok',SaveCfg={save_config,_}} -> SaveCfg; {'$test_server_ok',{fail,_}=Fail} -> Fail; {'$test_server_ok',_} -> ok; {'EXIT',Reason} = Why -> comment(io_lib:format("<font color=\"red\">" "WARNING: ~w crashed!" "</font>\n",[EndFunc])), group_leader() ! {printout,12, "WARNING: ~w crashed!\n" "Reason: ~p\n" "Line: ~s\n", [EndFunc, Reason, test_server_sup:format_loc( mod_loc(get_loc()))]}, {failed,{Mod,end_per_testcase,Why}}; Other -> comment(io_lib:format("<font color=\"red\">" "WARNING: ~w thrown!" "</font>\n",[EndFunc])), group_leader() ! {printout,12, "WARNING: ~w thrown!\n" "Reason: ~p\n" "Line: ~s\n", [EndFunc, Other, test_server_sup:format_loc( mod_loc(get_loc()))]}, {failed,{Mod,end_per_testcase,Other}} end. get_loc() -> case catch test_server_line:get_lines() of [] -> get(test_server_loc); {'EXIT',_} -> get(test_server_loc); Loc -> Loc end. get_loc(Pid) -> {dictionary,Dict} = process_info(Pid, dictionary), lists:foreach(fun({Key,Val}) -> put(Key,Val) end,Dict), get_loc(). get_mf([{M,F,_}|_]) -> {M,F}; get_mf([{M,F}|_]) -> {M,F}; get_mf(_) -> {undefined,undefined}. mod_loc(Loc) -> %% handle diff line num versions case Loc of [{{_M,_F},_L}|_] -> [{?pl2a(M),F,L} || {{M,F},L} <- Loc]; [{_M,_F}|_] -> [{?pl2a(M),F} || {M,F} <- Loc]; {{M,F},L} -> [{?pl2a(M),F,L}]; {M,ForL} -> [{?pl2a(M),ForL}]; _ -> Loc end. fw_error_notify(Mod, Func, Args, Error) -> test_server_sup:framework_call(error_notification, [?pl2a(Mod),Func,[Args], {Error,unknown}]). fw_error_notify(Mod, Func, Args, Error, Loc) -> test_server_sup:framework_call(error_notification, [?pl2a(Mod),Func,[Args], {Error,Loc}]). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% print(Detail , Format , ) - > ok %% Detail = integer() %% Format = string() = [ term ( ) ] %% %% Just like io:format, except that depending on the Detail value, the output %% is directed to console, major and/or minor log files. print(Detail,Format,Args) -> local_or_remote_apply({test_server_ctrl,print,[Detail,Format,Args]}). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% print_timsteamp(Detail,Leader) -> ok %% %% Prints Leader followed by a time stamp (date and time). Depending on %% the Detail value, the output is directed to console, major and/or minor %% log files. print_timestamp(Detail,Leader) -> local_or_remote_apply({test_server_ctrl,print_timestamp,[Detail,Leader]}). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% lookup_config(Key,Config) -> {value,{Key,Value}} | undefined %% Key = term() %% Value = term() %% Config = [{Key,Value},...] %% %% Looks up a specific key in the config list, and returns the value %% of the associated key, or undefined if the key doesn't exist. lookup_config(Key,Config) -> case lists:keysearch(Key,1,Config) of {value,{Key,Val}} -> Val; _ -> io:format("Could not find element ~p in Config.~n",[Key]), undefined end. %% timer:tc/3 ts_tc(M, F, A) -> Before = erlang:now(), Val = (catch my_apply(M, F, A)), After = erlang:now(), Result = case Val of {'$test_server_ok', R} -> R; % test case ok {'EXIT',_Reason} = R -> R; % test case crashed Other -> {failed, {thrown,Other}} % test case was thrown end, Elapsed = (element(1,After)*1000000000000 +element(2,After)*1000000+element(3,After)) - (element(1,Before)*1000000000000 +element(2,Before)*1000000+element(3,Before)), {Elapsed, Result}. my_apply(M, F, A) -> {'$test_server_ok',apply(M, F, A)}. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% TEST SUITE SUPPORT FUNCTIONS %% %% %% %% Note: Some of these functions have been moved to test_server_sup %% %% in an attempt to keep this modules small (yeah, right!) %% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% unicode_to_latin1(Chars) when is_list(Chars); is_binary(Chars) -> lists:flatten( [ case X of High when High > 255 -> io_lib:format("\\{~.8B}",[X]); Low -> Low end || X <- unicode:characters_to_list(Chars,unicode) ]); unicode_to_latin1(Garbage) -> Garbage. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% format(Format) -> IoLibReturn %% format(Detail,Format) -> IoLibReturn format(Format , ) - > IoLibReturn format(Detail , Format , ) - > IoLibReturn %% Detail = integer() %% Format = string() = [ term ( ) , ... ] IoLibReturn = term ( ) %% Logs the Format string and , similar to io : format/1/2 etc . If Detail is not specified , the default detail level ( which is 50 ) is used . %% Which log files the string will be logged in depends on the thresholds %% set with set_levels/3. Typically with default detail level, only the %% minor log file is used. format(Format) -> format(minor, Format, []). format(major, Format) -> format(major, Format, []); format(minor, Format) -> format(minor, Format, []); format(Detail, Format) when is_integer(Detail) -> format(Detail, Format, []); format(Format, Args) -> format(minor, Format, Args). format(Detail, Format, Args) -> Str = case catch io_lib:format(Format,Args) of {'EXIT',_} -> io_lib:format("illegal format; ~p with args ~p.\n", [Format,Args]); Valid -> Valid end, log({Detail, Str}). log(Msg) -> group_leader() ! {structured_io, self(), Msg}, ok. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% capture_start() -> ok %% capture_stop() -> ok %% %% Starts/stops capturing all output from io:format, and similar. Capturing %% output doesn't stop output from happening. It just makes it possible %% to retrieve the output using capture_get/0. %% Starting and stopping capture doesn't affect already captured output. %% All output is stored as messages in the message queue until retrieved capture_start() -> group_leader() ! {capture,self()}, ok. capture_stop() -> group_leader() ! {capture,false}, ok. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% capture_get() -> Output %% Output = [string(),...] %% %% Retrieves all the captured output since last call to capture_get/0. %% Note that since output arrive as messages to the process, it takes %% a short while from the call to io:format until all output is available by capture_get/0 . It is not necessary to call capture_stop/0 before %% retreiving the output. capture_get() -> test_server_sup:capture_get([]). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% messages_get() -> Messages %% Messages = [term(),...] %% %% Returns all messages in the message queue. messages_get() -> test_server_sup:messages_get([]). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% sleep(Time) -> ok %% Time = integer() | float() | infinity %% %% Sleeps the specified number of milliseconds. This sleep also accepts %% floating point numbers (which are truncated) and the atom 'infinity'. sleep(infinity) -> receive after infinity -> ok end; sleep(MSecs) -> receive after trunc(MSecs) -> ok end, ok. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% fail(Reason) -> exit({suite_failed,Reason}) %% %% Immediately calls exit. Included because test suites are easier %% to read when using this function, rather than exit directly. fail(Reason) -> comment(cast_to_list(Reason)), exit({suite_failed,Reason}). cast_to_list(X) when is_list(X) -> X; cast_to_list(X) when is_atom(X) -> atom_to_list(X); cast_to_list(X) -> lists:flatten(io_lib:format("~p", [X])). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% fail() -> exit(suite_failed) %% %% Immediately calls exit. Included because test suites are easier %% to read when using this function, rather than exit directly. fail() -> exit(suite_failed). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% break(Comment) -> ok %% %% Break a test case so part of the test can be done manually. %% Use continue/0 to continue. break(Comment) -> case erase(test_server_timetraps) of undefined -> ok; List -> lists:foreach(fun(Ref) -> timetrap_cancel(Ref) end,List) end, io:format(user, "\n\n\n--- SEMIAUTOMATIC TESTING ---" "\nThe test case executes on process ~w" "\n\n\n~s" "\n\n\n-----------------------------\n\n" "Continue with --> test_server:continue().\n", [self(),Comment]), case whereis(test_server_break_process) of undefined -> spawn_break_process(self()); OldBreakProcess -> OldBreakProcess ! cancel, spawn_break_process(self()) end, receive continue -> ok end. spawn_break_process(Pid) -> spawn(fun() -> register(test_server_break_process,self()), receive continue -> continue(Pid); cancel -> ok end end). continue() -> case whereis(test_server_break_process) of undefined -> ok; BreakProcess -> BreakProcess ! continue end. continue(Pid) -> Pid ! continue. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% timetrap_scale_factor() -> Factor %% Returns the amount to scale timetraps with . timetrap_scale_factor() -> F0 = case test_server:purify_is_running() of true -> 5; false -> 1 end, F1 = case {is_debug(), has_lock_checking()} of {true,_} -> 6 * F0; {false,true} -> 2 * F0; {false,false} -> F0 end, F2 = case has_superfluous_schedulers() of true -> 3*F1; false -> F1 end, F = case test_server_sup:get_os_family() of vxworks -> 5 * F2; _ -> F2 end, case test_server:is_cover() of true -> 10 * F; false -> F end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% timetrap(Timeout) -> Handle %% Handle = term() %% %% Creates a time trap, that will kill the calling process if the trap is not cancelled with timetrap_cancel/1 , within Timeout milliseconds . timetrap(Timeout0) -> Timeout = time_ms(Timeout0), cancel_default_timetrap(), case get(test_server_multiply_timetraps) of undefined -> timetrap1(Timeout); infinity -> infinity; Int -> timetrap1(Timeout*Int) end. timetrap1(Timeout) -> Ref = spawn_link(test_server_sup,timetrap,[Timeout,self()]), case get(test_server_timetraps) of undefined -> put(test_server_timetraps,[Ref]); List -> put(test_server_timetraps,[Ref|List]) end, Ref. ensure_timetrap(Config) -> %format("ensure_timetrap:~p~n",[Config]), case get(test_server_timetraps) of [_|_] -> ok; _ -> case get(test_server_default_timetrap) of undefined -> ok; Garbage -> erase(test_server_default_timetrap), format("=== WARNING: garbage in test_server_default_timetrap: ~p~n", [Garbage]) end, DTmo = case lists:keysearch(default_timeout,1,Config) of {value,{default_timeout,Tmo}} -> Tmo; _ -> ?DEFAULT_TIMETRAP_SECS end, format("=== test_server setting default timetrap of ~p seconds~n", [DTmo]), put(test_server_default_timetrap, timetrap(seconds(DTmo))) end. cancel_default_timetrap() -> case get(test_server_default_timetrap) of undefined -> ok; TimeTrap when is_pid(TimeTrap) -> timetrap_cancel(TimeTrap), erase(test_server_default_timetrap), format("=== test_server canceled default timetrap since another timetrap was set~n"), ok; Garbage -> erase(test_server_default_timetrap), format("=== WARNING: garbage in test_server_default_timetrap: ~p~n", [Garbage]), error end. time_ms({hours,N}) -> hours(N); time_ms({minutes,N}) -> minutes(N); time_ms({seconds,N}) -> seconds(N); time_ms({Other,_N}) -> format("=== ERROR: Invalid time specification: ~p. " "Should be seconds, minutes, or hours.~n", [Other]), exit({invalid_time_spec,Other}); time_ms(Ms) when is_integer(Ms) -> Ms; time_ms(Other) -> exit({invalid_time_spec,Other}). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% timetrap_cancel(Handle) -> ok %% Handle = term() %% %% Cancels a time trap. timetrap_cancel(infinity) -> ok; timetrap_cancel(Handle) -> case get(test_server_timetraps) of undefined -> ok; [Handle] -> erase(test_server_timetraps); List -> put(test_server_timetraps,lists:delete(Handle,List)) end, test_server_sup:timetrap_cancel(Handle). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% hours(N) -> Milliseconds %% minutes(N) -> Milliseconds %% seconds(N) -> Milliseconds %% N = integer() | float() %% Milliseconds = integer() %% %% Transforms the named units to milliseconds. Fractions in the input %% are accepted. The output is an integer. hours(N) -> trunc(N * 1000 * 60 * 60). minutes(N) -> trunc(N * 1000 * 60). seconds(N) -> trunc(N * 1000). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% timecall(M,F,A) -> {Time,Val} %% Time = float() %% Measures the time spent evaluating MFA . The measurement is done with %% erlang:now/0, and should have pretty good accuracy on most platforms. %% The function is not evaluated in a catch context. timecall(M, F, A) -> test_server_sup:timecall(M,F,A). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% do_times(N,M,F,A) -> ok %% do_times(N,Fun) -> %% N = integer() %% Fun = fun() -> void() %% Evaluates MFA or Fun N times , and returns ok . do_times(N,M,F,A) when N>0 -> apply(M,F,A), do_times(N-1,M,F,A); do_times(0,_,_,_) -> ok. do_times(N,Fun) when N>0 -> Fun(), do_times(N-1,Fun); do_times(0,_) -> ok. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% m_out_of_n(M,N,Fun) -> ok | exit({m_out_of_n_failed,{R,left_to_do}}) %% M = integer() %% N = integer() %% Fun = fun() -> void() %% R = integer() %% %% Repeats evaluating the given function until it succeeded (didn't crash) %% M times. If, after N times, M successful attempts have not been %% accomplished, the process crashes with reason {m_out_of_n_failed %% {R,left_to_do}}, where R indicates how many cases that remained to be %% successfully completed. %% %% For example: %% m_out_of_n(1,4,fun() -> tricky_test_case() end) Tries to run tricky_test_case ( ) up to 4 times , %% and is happy if it succeeds once. %% %% m_out_of_n(7,8,fun() -> clock_sanity_check() end) Tries running clock_sanity_check ( ) up to 8 %% times and allows the function to fail once. %% This might be useful if clock_sanity_check/0 is known to fail if the clock crosses an hour boundary during the test ( and the up to 8 test runs could never cross 2 boundaries ) m_out_of_n(0,_,_) -> ok; m_out_of_n(M,0,_) -> exit({m_out_of_n_failed,{M,left_to_do}}); m_out_of_n(M,N,Fun) -> case catch Fun() of {'EXIT',_} -> m_out_of_n(M,N-1,Fun); _Other -> m_out_of_n(M-1,N-1,Fun) end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%call_crash(M,F,A) %%call_crash(Time,M,F,A) %%call_crash(Time,Crash,M,F,A) %% M - atom() %% F - atom() %% A - [term()] Time - integer ( ) in milliseconds . %% Crash - term() %% Spaws a new process that calls MFA . The call is considered %% successful if the call crashes with the given reason (Crash), %% or any other reason if Crash is not specified. * * The call must terminate withing the given Time ( defaults %% to infinity), or it is considered a failure (exit with reason %% 'call_crash_timeout' is generated). call_crash(M,F,A) -> call_crash(infinity,M,F,A). call_crash(Time,M,F,A) -> call_crash(Time,any,M,F,A). call_crash(Time,Crash,M,F,A) -> test_server_sup:call_crash(Time,Crash,M,F,A). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% start_node(SlaveName , Type , Options ) - > %% {ok, Slave} | {error, Reason} %% %% SlaveName = string(), atom(). %% Type = slave | peer %% Options = [{tuple(), term()}] %% OptionList is a tuplelist wich may contain one %% or more of these members: %% %% Slave and Peer: %% {remote, true} - Start the node on a remote host. If not specified, %% the node will be started on the local host (with some exceptions , as for the case of and OSE , %% where all nodes are started on a remote host). %% {args, Arguments} - Arguments passed directly to the node. %% {cleanup, false} - Nodes started with this option will not be killed %% by the test server after completion of the test case %% Therefore it is IMPORTANT that the USER terminates %% the node!! { erl , ReleaseList } - Use an Erlang emulator determined by ReleaseList %% when starting nodes, instead of the same emulator as the test server is running . ReleaseList is a list %% of specifiers, where a specifier is either { release , , { prog , Prog } , or ' this ' . Rel is %% either the name of a release, e.g., "r7a" or %% 'latest'. 'this' means using the same emulator as %% the test server. Prog is the name of an emulator executable . If the list has more than one element , %% one of them is picked randomly. (Only works on Solaris and Linux , and the test %% server gives warnings when it notices that %% nodes are not of the same version as %% itself.) %% %% Peer only: %% {wait, false} - Don't wait for the node to be started. %% {fail_on_error, false} - Returns {error, Reason} rather than failing %% the test case. This option can only be used with %% peer nodes. %% Note that slave nodes always act as if they had %% fail_on_error==false. %% start_node(Name, Type, Options) -> lists:foreach( fun(N) -> case firstname(N) of Name -> format("=== WARNING: Trying to start node \'~w\' when node" " with same first name exists: ~w", [Name, N]); _other -> ok end end, nodes()), group_leader() ! {sync_apply, self(), {test_server_ctrl,start_node,[Name,Type,Options]}}, Result = receive {sync_result,R} -> R end, case Result of {ok,Node} -> %% Cannot run cover on shielded node or on a node started %% by a shielded node. Cover = case is_cover() of true -> not is_shielded(Name) andalso same_version(Node); false -> false end, net_adm:ping(Node), case Cover of true -> Sticky = unstick_all_sticky(Node), cover:start(Node), stick_all_sticky(Node,Sticky); _ -> ok end, {ok,Node}; {fail,Reason} -> fail(Reason); Error -> Error end. firstname(N) -> list_to_atom(upto($@,atom_to_list(N))). %% This should!!! crash if H is not member in list. upto(H, [H | _T]) -> []; upto(H, [X | T]) -> [X | upto(H,T)]. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% wait_for_node(Name) -> ok | {error,timeout} %% %% If a node is started with the options {wait,false}, this function %% can be used to wait for the node to come up from the %% test server point of view (i.e. wait until it has contacted %% the test server controller after startup) wait_for_node(Slave) -> group_leader() ! {sync_apply, self(), {test_server_ctrl,wait_for_node,[Slave]}}, receive {sync_result,R} -> R end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% stop_node(Name) -> true|false %% %% Kills a (remote) node. Also inform so it can clean up ! stop_node(Slave) -> Nocover = is_shielded(Slave) orelse not same_version(Slave), case is_cover() of true when not Nocover -> Sticky = unstick_all_sticky(Slave), cover:stop(Slave), stick_all_sticky(Slave,Sticky); _ -> ok end, group_leader() ! {sync_apply,self(),{test_server_ctrl,stop_node,[Slave]}}, Result = receive {sync_result,R} -> R end, case Result of ok -> erlang:monitor_node(Slave, true), slave:stop(Slave), receive {nodedown, Slave} -> format(minor, "Stopped slave node: ~p", [Slave]), format(major, "=node_stop ~p", [Slave]), true after 30000 -> format("=== WARNING: Node ~p does not seem to terminate.", [Slave]), false end; {error, _Reason} -> %% Either, the node is already dead or it was started %% with the {cleanup,false} option, or it was started %% in some other way than test_server:start_node/3 format("=== WARNING: Attempt to stop a nonexisting slavenode (~p)~n" "=== Trying to kill it anyway!!!", [Slave]), case net_adm:ping(Slave)of pong -> slave:stop(Slave), true; pang -> false end end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% is_release_available(Release) -> true | false %% Release -> string() %% %% Test if a release (such as "r10b") is available to be started using start_node/3 . is_release_available(Release) -> group_leader() ! {sync_apply, self(), {test_server_ctrl,is_release_available,[Release]}}, receive {sync_result,R} -> R end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% run_on_shielded_node(Fun , CArgs ) - > term ( ) %% Fun -> function() CArg - > list ( ) %% %% %% Fun is executed in a process on a temporarily created %% hidden node. Communication with the job process goes %% via a job proxy process on the hidden node, i.e. the %% group leader of the test case process is the job proxy %% process. This makes it possible to start nodes from the %% hidden node that are unaware of the test server node. %% Without the job proxy process all processes would have a process residing on the test_server node as group_leader . %% %% Fun - Function to execute CArg - Extra command line arguments to use when starting %% the shielded node. %% %% If Fun is successfully executed, the result is returned. %% run_on_shielded_node(Fun, CArgs) when is_function(Fun), is_list(CArgs) -> {A,B,C} = now(), Name = "shielded_node-" ++ integer_to_list(A) ++ "-" ++ integer_to_list(B) ++ "-" ++ integer_to_list(C), Node = case start_node(Name, slave, [{args, "-hidden " ++ CArgs}]) of {ok, N} -> N; Err -> fail({failed_to_start_shielded_node, Err}) end, Master = self(), Ref = make_ref(), Slave = spawn(Node, fun () -> start_job_proxy(), receive Ref -> Master ! {Ref, Fun()} end, receive after infinity -> infinity end end), MRef = erlang:monitor(process, Slave), Slave ! Ref, receive {'DOWN', MRef, _, _, Info} -> stop_node(Node), fail(Info); {Ref, Res} -> stop_node(Node), receive {'DOWN', MRef, _, _, _} -> Res end end. %% Return true if Name or node() is a shielded node is_shielded(Name) -> case {cast_to_list(Name),atom_to_list(node())} of {"shielded_node"++_,_} -> true; {_,"shielded_node"++_} -> true; _ -> false end. same_version(Name) -> ThisVersion = erlang:system_info(version), OtherVersion = rpc:call(Name, erlang, system_info, [version]), ThisVersion =:= OtherVersion. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% temp_name(Stem) -> string() %% Stem = string() %% %% Create a unique file name, based on (starting with) Stem. %% A filename of the form <Stem><Number> is generated, and the %% function checks that that file doesn't already exist. temp_name(Stem) -> {A,B,C} = erlang:now(), RandomNum = A bxor B bxor C, RandomName = Stem ++ integer_to_list(RandomNum), {ok,Files} = file:list_dir(filename:dirname(Stem)), case lists:member(RandomName,Files) of true -> %% oh, already exists - bad luck. Try again. temp_name(Stem); %% recursively try again false -> RandomName end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% app_test/1 %% app_test(App) -> app_test(App, pedantic). app_test(App, Mode) -> case os:type() of {ose,_} -> Comment = "Skipping app_test on OSE", comment(Comment), % in case user ignores the return value {skip,Comment}; _other -> test_server_sup:app_test(App, Mode) end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% is_native(Mod ) - > true | false %% %% Checks wether the module is natively compiled or not. is_native(Mod) -> case catch Mod:module_info(native_addresses) of [_|_] -> true; _Other -> false end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% comment(String) -> ok %% %% The given String will occur in the comment field %% of the table on the test suite result page. If %% called several times, only the last comment is %% printed. %% comment/1 is also overwritten by the return value %% {comment,Comment} or fail/1 (which prints Reason %% as a comment). comment(String) -> group_leader() ! {comment,String}, ok. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% os_type() -> OsType %% Returns the OsType of the target node . OsType is %% the same as returned from os:type() os_type() -> test_server_ctrl:get_target_os_type(). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% is_cover ( ) - > boolean ( ) %% %% Returns true if cover is running, else false is_cover() -> case whereis(cover_server) of undefined -> false; _ -> true end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% is_debug() -> boolean() %% %% Returns true if the emulator is debug-compiled, false otherwise. is_debug() -> case catch erlang:system_info(debug_compiled) of {'EXIT', _} -> case string:str(erlang:system_info(system_version), "debug") of Int when is_integer(Int), Int > 0 -> true; _ -> false end; Res -> Res end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% has_lock_checking() -> boolean() %% %% Returns true if the emulator has lock checking enabled, false otherwise. has_lock_checking() -> case catch erlang:system_info(lock_checking) of {'EXIT', _} -> false; Res -> Res end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% has_superfluous_schedulers() -> boolean() %% %% Returns true if the emulator has more scheduler threads than logical %% processors, false otherwise. has_superfluous_schedulers() -> case catch {erlang:system_info(schedulers), erlang:system_info(logical_processors)} of {S, P} when is_integer(S), is_integer(P), S > P -> true; _ -> false end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% is_commercial_build() -> boolean() %% %% Returns true if the current emulator is commercially supported. %% (The emulator will not have "[source]" in its start-up message.) %% We might want to do more tests on a commercial platform, for instance %% ensuring that all applications have documentation). is_commercial() -> case string:str(erlang:system_info(system_version), "source") of Int when is_integer(Int), Int > 0 -> false; _ -> true end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% DEBUGGER INTERFACE %% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% purify_is_running() -> false|true %% Tests if Purify is currently running . purify_is_running() -> case catch erlang:system_info({error_checker, running}) of {'EXIT', _} -> false; Res -> Res end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% purify_new_leaks() -> false|BytesLeaked %% BytesLeaked = integer() %% Checks for new memory leaks if Purify is active . %% Returns the number of bytes leaked, or false if Purify %% is not running. purify_new_leaks() -> case catch erlang:system_info({error_checker, memory}) of {'EXIT', _} -> false; Leaked when is_integer(Leaked) -> Leaked end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% purify_new_fds_inuse() -> false|FdsInuse FdsInuse = integer ( ) %% %% Checks for new file descriptors in use. %% Returns the number of new file descriptors in use, or false if Purify is not running . purify_new_fds_inuse() -> case catch erlang:system_info({error_checker, fd}) of {'EXIT', _} -> false; Inuse when is_integer(Inuse) -> Inuse end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% purify_format(Format , ) - > ok %% Format = string() = lists ( ) %% Outputs the formatted string to Purify 's logfile , if Purify is active . purify_format(Format, Args) -> (catch erlang:system_info({error_checker, io_lib:format(Format, Args)})), ok. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% Generic send functions for communication with host %% sync_local_or_remote_apply(Proxy,From,{M,F,A} = MFA) -> case get(test_server_job_sock) of undefined -> %% i'm a local target Result = apply(M,F,A), if is_pid(Proxy) -> Proxy ! {sync_result_proxy,From,Result}; true -> From ! {sync_result,Result} end; JobSock -> %% i'm a remote target request(JobSock,{sync_apply,MFA}), {sync_result,Result} = recv(JobSock), if is_pid(Proxy) -> Proxy ! {sync_result_proxy,From,Result}; true -> From ! {sync_result,Result} end end. local_or_remote_apply({M,F,A} = MFA) -> case get(test_server_job_sock) of undefined -> %% i'm a local target apply(M,F,A), ok; JobSock -> %% i'm a remote target request(JobSock,{apply,MFA}), ok end. request(Sock,Request) -> gen_tcp:send(Sock,<<1,(term_to_binary(Request))/binary>>). %% Generic receive function for communication with host %% recv(Sock) -> case gen_tcp:recv(Sock,0) of {error,closed} -> gen_tcp:close(Sock), exit(connection_lost); {ok,<<1,Request/binary>>} -> binary_to_term(Request); {ok,<<0,B/binary>>} -> B end.
null
https://raw.githubusercontent.com/simplegeo/erlang/15eda8de27ba73d176c7eeb3a70a64167f50e2c4/lib/test_server/src/test_server.erl
erlang
%CopyrightBegin% compliance with the License. You should have received a copy of the Erlang Public License along with this software. If not, it can be retrieved online at /. basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the specific language governing rights and limitations under the License. %CopyrightEnd% % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % TEST_SERVER_CTRL INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% TEST_SERVER_SUP INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% TEST SUITE INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % PRIVATE EXPORTED %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% **** START *** CODE FOR REMOTE TARGET ONLY *** This process is started only if the test is to be run on a remote target The process is then started on target on host, and information about target is sent to host. Decode request on main socket Temporary job process on target This process will live while all test cases in the job are executed. A socket connection is established with the job process on host. This might be a symlink - let's try to delete it! Receive and decode request on job socket Cooked is temporary removed/broken ok = erl_tar:extract(Tarfile,[compressed,{cwd,JobDir},cooked]), **** STOP *** CODE FOR REMOTE TARGET ONLY *** cover_compile({App,Include,Exclude,Cross}) -> {ok,AnalyseModules} | {error,Reason} App = atom() , name of application to be compiled Exclude = [atom()], list of modules to exclude Include = [atom()], list of modules outside of App that should be included in the cover compilation Cross = [atoms()], list of modules outside of App shat should be included in the cover compilation, but that shall not be part of the cover analysis for this application. Cover compile the given application. Return {ok,AnalyseMods} if application is found, else {error,application_not_found}. start cover server anyway start cover server anyway start cover server anyway remove duplicates Modules = [atom()], the modules to analyse If this is a local target, the test directory is given If Analyse==overview | {overview,Dir} analyse_to_file is not used, only an overview containing the number of covered/not covered lines in each module. Also, if a Dir exists, cover data will be exported to a file called all.coverdata in that directory. Value = term() Loc = term() Comment = string() Reason = term() Spawns off a process (case process) that actually runs the test suite. The case process will have the job process as group leader, which makes The job process then sits down and waits for news from the case process. This might be io requests (which are redirected to the log files). is either the name of the function, or {<Module>,<Line>} of the last line executed that had a ?line macro. If the test case did execute erase/0 or similar, it may be empty. Comment is the last comment added called or the comment given by the return value {comment,Comment} from a test case. by some other process. Reason is the kill reason provided. i'm a local target i'm a remote target Set ourself to group leader for the spawned process Ugly bug (pre R5A): If this process (group leader of the test case) terminates before all messages have been replied back to the io server, the io server NOTE: Keep job_proxy_msgloop/0 up to date when changes are made in this function! it as a comment, potentially replacing user data something else, just pass it on The framework functions mustn't execute on this group leader process or io will cause deadlock, so we spawn a dedicated process for the operation and let the group leader go back to handle io. result of an exit(TestCase,kill) call, which is the only way to abort a testcase process that traps exits (see abort_current_testcase) because a linked process failed) the framework has been notified, we're finished a framework function failed ignore anything not generated by test server ignore anything not generated by test server if init_per_testcase fails, the test case should be skipped finished, report back if end_per_testcase fails, the test case should be reported successful with a warning printed as comment finished, report back finished, report back The job proxy process forwards messages between the test case process on a shielded node (and its descendants) and the job process. The job proxy process have to be started by the test-case process on the shielded node! The io_reply_proxy is not the most satisfying solution but it works... Messages that need intervention by proxy... io stuff ... Messages that need no intervention by proxy... call user callback function if defined execute the test case call user callback function if defined user has failed the testcase unexpected termination call user callback function if defined if this is a named conf group, the test case (init or end conf) execute the conf test case call user callback function if defined the return value is a list and we have to check if it contains the result of an end conf case or if it's a Config list check if all elements in the list are valid end conf return value tuples must be return value from end conf case must be Config value from init conf case the return value is not a list, so it's the return value from an end conf case or it's a dummy value that can be ignored init_per_testcase defined, returns new configuration Optional init_per_testcase not defined keep quiet. Backwards compatibility! handle diff line num versions Detail = integer() Format = string() Just like io:format, except that depending on the Detail value, the output is directed to console, major and/or minor log files. print_timsteamp(Detail,Leader) -> ok Prints Leader followed by a time stamp (date and time). Depending on the Detail value, the output is directed to console, major and/or minor log files. lookup_config(Key,Config) -> {value,{Key,Value}} | undefined Key = term() Value = term() Config = [{Key,Value},...] Looks up a specific key in the config list, and returns the value of the associated key, or undefined if the key doesn't exist. timer:tc/3 test case ok test case crashed test case was thrown TEST SUITE SUPPORT FUNCTIONS %% %% Note: Some of these functions have been moved to test_server_sup %% in an attempt to keep this modules small (yeah, right!) %% format(Format) -> IoLibReturn format(Detail,Format) -> IoLibReturn Detail = integer() Format = string() Which log files the string will be logged in depends on the thresholds set with set_levels/3. Typically with default detail level, only the minor log file is used. capture_start() -> ok capture_stop() -> ok Starts/stops capturing all output from io:format, and similar. Capturing output doesn't stop output from happening. It just makes it possible to retrieve the output using capture_get/0. Starting and stopping capture doesn't affect already captured output. All output is stored as messages in the message queue until retrieved capture_get() -> Output Output = [string(),...] Retrieves all the captured output since last call to capture_get/0. Note that since output arrive as messages to the process, it takes a short while from the call to io:format until all output is available retreiving the output. messages_get() -> Messages Messages = [term(),...] Returns all messages in the message queue. sleep(Time) -> ok Time = integer() | float() | infinity Sleeps the specified number of milliseconds. This sleep also accepts floating point numbers (which are truncated) and the atom 'infinity'. fail(Reason) -> exit({suite_failed,Reason}) Immediately calls exit. Included because test suites are easier to read when using this function, rather than exit directly. fail() -> exit(suite_failed) Immediately calls exit. Included because test suites are easier to read when using this function, rather than exit directly. break(Comment) -> ok Break a test case so part of the test can be done manually. Use continue/0 to continue. timetrap_scale_factor() -> Factor timetrap(Timeout) -> Handle Handle = term() Creates a time trap, that will kill the calling process if the format("ensure_timetrap:~p~n",[Config]), timetrap_cancel(Handle) -> ok Handle = term() Cancels a time trap. hours(N) -> Milliseconds minutes(N) -> Milliseconds seconds(N) -> Milliseconds N = integer() | float() Milliseconds = integer() Transforms the named units to milliseconds. Fractions in the input are accepted. The output is an integer. timecall(M,F,A) -> {Time,Val} Time = float() erlang:now/0, and should have pretty good accuracy on most platforms. The function is not evaluated in a catch context. do_times(N,M,F,A) -> ok do_times(N,Fun) -> N = integer() Fun = fun() -> void() m_out_of_n(M,N,Fun) -> ok | exit({m_out_of_n_failed,{R,left_to_do}}) M = integer() N = integer() Fun = fun() -> void() R = integer() Repeats evaluating the given function until it succeeded (didn't crash) M times. If, after N times, M successful attempts have not been accomplished, the process crashes with reason {m_out_of_n_failed {R,left_to_do}}, where R indicates how many cases that remained to be successfully completed. For example: m_out_of_n(1,4,fun() -> tricky_test_case() end) and is happy if it succeeds once. m_out_of_n(7,8,fun() -> clock_sanity_check() end) times and allows the function to fail once. This might be useful if clock_sanity_check/0 call_crash(M,F,A) call_crash(Time,M,F,A) call_crash(Time,Crash,M,F,A) M - atom() F - atom() A - [term()] Crash - term() successful if the call crashes with the given reason (Crash), or any other reason if Crash is not specified. to infinity), or it is considered a failure (exit with reason 'call_crash_timeout' is generated). {ok, Slave} | {error, Reason} SlaveName = string(), atom(). Type = slave | peer Options = [{tuple(), term()}] or more of these members: Slave and Peer: {remote, true} - Start the node on a remote host. If not specified, the node will be started on the local host (with where all nodes are started on a remote host). {args, Arguments} - Arguments passed directly to the node. {cleanup, false} - Nodes started with this option will not be killed by the test server after completion of the test case Therefore it is IMPORTANT that the USER terminates the node!! when starting nodes, instead of the same emulator of specifiers, where a specifier is either either the name of a release, e.g., "r7a" or 'latest'. 'this' means using the same emulator as the test server. Prog is the name of an emulator one of them is picked randomly. (Only server gives warnings when it notices that nodes are not of the same version as itself.) Peer only: {wait, false} - Don't wait for the node to be started. {fail_on_error, false} - Returns {error, Reason} rather than failing the test case. This option can only be used with peer nodes. Note that slave nodes always act as if they had fail_on_error==false. Cannot run cover on shielded node or on a node started by a shielded node. This should!!! crash if H is not member in list. wait_for_node(Name) -> ok | {error,timeout} If a node is started with the options {wait,false}, this function can be used to wait for the node to come up from the test server point of view (i.e. wait until it has contacted the test server controller after startup) stop_node(Name) -> true|false Kills a (remote) node. Either, the node is already dead or it was started with the {cleanup,false} option, or it was started in some other way than test_server:start_node/3 is_release_available(Release) -> true | false Release -> string() Test if a release (such as "r10b") is available to be Fun -> function() Fun is executed in a process on a temporarily created hidden node. Communication with the job process goes via a job proxy process on the hidden node, i.e. the group leader of the test case process is the job proxy process. This makes it possible to start nodes from the hidden node that are unaware of the test server node. Without the job proxy process all processes would have Fun - Function to execute the shielded node. If Fun is successfully executed, the result is returned. Return true if Name or node() is a shielded node temp_name(Stem) -> string() Stem = string() Create a unique file name, based on (starting with) Stem. A filename of the form <Stem><Number> is generated, and the function checks that that file doesn't already exist. oh, already exists - bad luck. Try again. recursively try again app_test/1 in case user ignores the return value Checks wether the module is natively compiled or not. comment(String) -> ok The given String will occur in the comment field of the table on the test suite result page. If called several times, only the last comment is printed. comment/1 is also overwritten by the return value {comment,Comment} or fail/1 (which prints Reason as a comment). os_type() -> OsType the same as returned from os:type() Returns true if cover is running, else false is_debug() -> boolean() Returns true if the emulator is debug-compiled, false otherwise. has_lock_checking() -> boolean() Returns true if the emulator has lock checking enabled, false otherwise. has_superfluous_schedulers() -> boolean() Returns true if the emulator has more scheduler threads than logical processors, false otherwise. is_commercial_build() -> boolean() Returns true if the current emulator is commercially supported. (The emulator will not have "[source]" in its start-up message.) We might want to do more tests on a commercial platform, for instance ensuring that all applications have documentation). DEBUGGER INTERFACE %% purify_is_running() -> false|true purify_new_leaks() -> false|BytesLeaked BytesLeaked = integer() Returns the number of bytes leaked, or false if Purify is not running. purify_new_fds_inuse() -> false|FdsInuse Checks for new file descriptors in use. Returns the number of new file descriptors in use, or false Format = string() i'm a local target i'm a remote target i'm a local target i'm a remote target
Copyright Ericsson AB 1996 - 2009 . All Rights Reserved . The contents of this file are subject to the Erlang Public License , Version 1.1 , ( the " License " ) ; you may not use this file except in Software distributed under the License is distributed on an " AS IS " -module(test_server). -define(DEFAULT_TIMETRAP_SECS, 60). -export([start/1,start/2]). -export([run_test_case_apply/1,init_target_info/0,init_purify/0]). -export([cover_compile/1,cover_analyse/2]). -export([get_loc/1]). -export([lookup_config/2]). -export([fail/0,fail/1,format/1,format/2,format/3]). -export([capture_start/0,capture_stop/0,capture_get/0]). -export([messages_get/0]). -export([hours/1,minutes/1,seconds/1,sleep/1,timecall/3]). -export([timetrap_scale_factor/0,timetrap/1,timetrap_cancel/1]). -export([m_out_of_n/3,do_times/4,do_times/2]). -export([call_crash/3,call_crash/4,call_crash/5]). -export([temp_name/1]). -export([start_node/3, stop_node/1, wait_for_node/1, is_release_available/1]). -export([app_test/1, app_test/2]). -export([is_native/1]). -export([comment/1]). -export([os_type/0]). -export([run_on_shielded_node/2]). -export([is_cover/0,is_debug/0,is_commercial/0]). -export([break/1,continue/0]). -export([purify_new_leaks/0, purify_format/2, purify_new_fds_inuse/0, purify_is_running/0]). -export([]). -record(state,{controller,jobs=[]}). -include("test_server_internal.hrl"). -include_lib("kernel/include/file.hrl"). -define(pl2a(M), test_server_sup:package_atom(M)). test_server A socket connection is established with the test_server_ctrl process start([ControllerHost]) when is_atom(ControllerHost) -> start(atom_to_list(ControllerHost)); start(ControllerHost) when is_list(ControllerHost) -> start(ControllerHost,?MAIN_PORT). start(ControllerHost,ControllerPort) -> S = self(), Pid = spawn(fun() -> init(ControllerHost,ControllerPort,S) end), receive {Pid,started} -> {ok,Pid}; {Pid,Error} -> Error end. init(Host,Port,Starter) -> global:register_name(?MODULE,self()), process_flag(trap_exit,true), test_server_sup:cleanup_crash_dumps(), case gen_tcp:connect(Host,Port, [binary, {reuseaddr,true}, {packet,2}]) of {ok,MainSock} -> Starter ! {self(),started}, request(MainSock,{target_info,init_target_info()}), loop(#state{controller={Host,MainSock}}); Error -> Starter ! {self(),{error, {could_not_contact_controller,Error}}} end. init_target_info() -> [$.|Emu] = code:objfile_extension(), {_, OTPRel} = init:script_id(), TestServerDir = filename:absname(filename:dirname(code:which(?MODULE))), #target_info{os_family=test_server_sup:get_os_family(), os_type=os:type(), version=erlang:system_info(version), system_version=erlang:system_info(system_version), root_dir=code:root_dir(), test_server_dir=TestServerDir, emulator=Emu, otp_release=OTPRel, username=test_server_sup:get_username(), cookie=atom_to_list(erlang:get_cookie())}. loop(#state{controller={_,MainSock}} = State) -> receive {tcp, MainSock, <<1,Request/binary>>} -> State1 = decode_main(binary_to_term(Request),State), loop(State1); {tcp_closed, MainSock} -> gen_tcp:close(MainSock), halt(); {'EXIT',Pid,Reason} -> case lists:keysearch(Pid,1,State#state.jobs) of {value,{Pid,Name}} -> case Reason of normal -> ignore; _other -> request(MainSock,{job_proc_killed,Name,Reason}) end, NewJobs = lists:keydelete(Pid,1,State#state.jobs), loop(State#state{jobs = NewJobs}); false -> loop(State) end end. decode_main({job,Port,Name},#state{controller={Host,_},jobs=Jobs}=State) -> S = self(), NewJob = spawn_link(fun() -> job(Host,Port,S) end), receive {NewJob,started} -> State#state{jobs=[{NewJob,Name}|Jobs]}; {NewJob,_Error} -> State end. init_purify() -> purify_new_leaks(). job(Host,Port,Starter) -> process_flag(trap_exit,true), init_purify(), case gen_tcp:connect(Host,Port, [binary, {reuseaddr,true}, {packet,4}, {active,false}]) of {ok,JobSock} -> Starter ! {self(),started}, job(JobSock); Error -> Starter ! {self(),{error, {could_not_contact_controller,Error}}} end. job(JobSock) -> JobDir = get_jobdir(), ok = file:make_dir(JobDir), ok = file:make_dir(filename:join(JobDir,?priv_dir)), put(test_server_job_sock,JobSock), put(test_server_job_dir,JobDir), {ok,Cwd} = file:get_cwd(), job_loop(JobSock), ok = file:set_cwd(Cwd), also recursively removes ok. get_jobdir() -> Now = now(), {{Y,M,D},{H,Mi,S}} = calendar:now_to_local_time(Now), Basename = io_lib:format("~w-~2.2.0w-~2.2.0w_~2.2.0w.~2.2.0w.~2.2.0w_~w", [Y,M,D,H,Mi,S,element(3,Now)]), if target has a file master , do n't use to look up cwd case lists:keymember(master,1,init:get_arguments()) of true -> {ok,Cwd} = file:get_cwd(), Cwd ++ "/" ++ Basename; false -> filename:absname(Basename) end. send_privdir(JobDir,JobSock) -> LocalPrivDir = filename:join(JobDir,?priv_dir), case file:list_dir(LocalPrivDir) of {ok,List} when List/=[] -> Tarfile0 = ?priv_dir ++ ".tar.gz", Tarfile = filename:join(JobDir,Tarfile0), {ok,Tar} = erl_tar:open(Tarfile,[write,compressed,cooked]), ok = erl_tar:add(Tar,LocalPrivDir,?priv_dir,[]), ok = erl_tar:close(Tar), {ok,TarBin} = file:read_file(Tarfile), file:delete(Tarfile), ok = del_dir(JobDir), request(JobSock,{{privdir,Tarfile0},TarBin}); _ -> ok = del_dir(JobDir), request(JobSock,{privdir,empty_priv_dir}) end. del_dir(Dir) -> case file:read_file_info(Dir) of {ok,#file_info{type=directory}} -> {ok,Cont} = file:list_dir(Dir), lists:foreach(fun(F) -> del_dir(filename:join(Dir,F)) end, Cont), ok = file:del_dir(Dir); {ok,#file_info{}} -> ok = file:delete(Dir); _r -> catch file:delete(Dir), ok end. job_loop(JobSock) -> Request = recv(JobSock), case decode_job(Request) of ok -> job_loop(JobSock); {stop,R} -> R end. decode_job({{beam,Mod,Which},Beam}) -> FIXME , shared directory structure on host and target required , " Library beams " are not loaded from ... /Patrik code:add_patha(filename:dirname(Which)), End of Patriks uglyness ... {module,Mod} = code:load_binary(Mod,Which,Beam), ok; decode_job({{datadir,Tarfile0},Archive}) -> JobDir = get(test_server_job_dir), Tarfile = filename:join(JobDir,Tarfile0), ok = file:write_file(Tarfile,Archive), ok = erl_tar:extract(Tarfile,[compressed,{cwd,JobDir}]), ok = file:delete(Tarfile), ok; decode_job({test_case,Case}) -> Result = run_test_case_apply(Case), JobSock = get(test_server_job_sock), request(JobSock,{test_case_result,Result}), case test_server_sup:tar_crash_dumps() of {error,no_crash_dumps} -> request(JobSock,{crash_dumps,no_crash_dumps}); {ok,TarFile} -> {ok,TarBin} = file:read_file(TarFile), file:delete(TarFile), request(JobSock,{{crash_dumps,filename:basename(TarFile)},TarBin}) end, ok; decode_job({sync_apply,{M,F,A}}) -> R = apply(M,F,A), request(get(test_server_job_sock),{sync_result,R}), ok; decode_job(job_done) -> {stop,stopped}. cover_compile({none,_Exclude,Include,Cross}) -> CompileMods = Include++Cross, case length(CompileMods) of 0 -> io:fwrite("WARNING: No modules to cover compile!\n\n",[]), {ok,[]}; N -> io:fwrite("Cover compiling ~w modules - " "this may take some time... ",[N]), do_cover_compile(CompileMods), io:fwrite("done\n\n",[]), {ok,Include} end; cover_compile({App,all,Include,Cross}) -> CompileMods = Include++Cross, case length(CompileMods) of 0 -> io:fwrite("WARNING: No modules to cover compile!\n\n",[]), {ok,[]}; N -> io:fwrite("Cover compiling '~w' (~w files) - " "this may take some time... ",[App,N]), io:format("\nWARNING: All modules in \'~w\' are excluded\n" "Only cover compiling modules in include list " "and the modules\nin the cross cover file:\n" "~p\n", [App,CompileMods]), do_cover_compile(CompileMods), io:fwrite("done\n\n",[]), {ok,Include} end; cover_compile({App,Exclude,Include,Cross}) -> case code:lib_dir(App) of {error,bad_name} -> case Include++Cross of [] -> io:format("\nWARNING: Can't find lib_dir for \'~w\'\n" "Not cover compiling!\n\n",[App]), {error,application_not_found}; CompileMods -> io:fwrite("Cover compiling '~w' (~w files) - " "this may take some time... ", [App,length(CompileMods)]), io:format("\nWARNING: Can't find lib_dir for \'~w\'\n" "Only cover compiling modules in include list: " "~p\n", [App,Include]), do_cover_compile(CompileMods), io:fwrite("done\n\n",[]), {ok,Include} end; LibDir -> EbinDir = filename:join([LibDir,"ebin"]), WC = filename:join(EbinDir,"*.beam"), AllMods = module_names(filelib:wildcard(WC)), AnalyseMods = (AllMods ++ Include) -- Exclude, CompileMods = AnalyseMods ++ Cross, case length(CompileMods) of 0 -> io:fwrite("WARNING: No modules to cover compile!\n\n",[]), {ok,[]}; N -> io:fwrite("Cover compiling '~w' (~w files) - " "this may take some time... ",[App,N]), do_cover_compile(CompileMods), io:fwrite("done\n\n",[]), {ok,AnalyseMods} end end. module_names(Beams) -> [list_to_atom(filename:basename(filename:rootname(Beam))) || Beam <- Beams]. do_cover_compile(Modules) -> do_cover_compile1([Dont|Rest]) when Dont=:=cover; Dont=:=test_server; Dont=:=test_server_ctrl -> do_cover_compile1(Rest); do_cover_compile1([M|Rest]) -> case {code:is_sticky(M),code:is_loaded(M)} of {true,_} -> code:unstick_mod(M), case cover:compile_beam(M) of {ok,_} -> ok; Error -> io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", [M,Error]) end, code:stick_mod(M), do_cover_compile1(Rest); {false,false} -> case code:load_file(M) of {module,_} -> do_cover_compile1([M|Rest]); Error -> io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]), do_cover_compile1(Rest) end; {false,_} -> case cover:compile_beam(M) of {ok,_} -> ok; Error -> io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", [M,Error]) end, do_cover_compile1(Rest) end; do_cover_compile1([]) -> ok. cover_analyse(Analyse , Modules ) - > [ { M,{Cov , NotCov , Details } } ] Analyse = { details , } | details | { overview , void ( ) } | overview Cover analysis . If this is a remote target , analyse_to_file can not be used . In that case the analyse level ' line ' is used instead if Analyse==details . ( Analyse=={details , } ) and analyse_to_file can be used directly . cover_analyse(Analyse,Modules) -> io:fwrite("Cover analysing...\n",[]), DetailsFun = case Analyse of {details,Dir} -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> fun(M) -> OutFile = filename:join(Dir, atom_to_list(M) ++ ".COVER.html"), case cover:analyse_to_file(M,OutFile,[html]) of {ok,_} -> {file,OutFile}; Error -> Error end end; Error -> fun(_) -> Error end end; details -> fun(M) -> case cover:analyse(M,line) of {ok,Lines} -> {lines,Lines}; Error -> Error end end; {overview,Dir} -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> fun(_) -> undefined end; Error -> fun(_) -> Error end end; overview -> fun(_) -> undefined end end, R = lists:map( fun(M) -> case cover:analyse(M,module) of {ok,{M,{Cov,NotCov}}} -> {M,{Cov,NotCov,DetailsFun(M)}}; Err -> io:fwrite("WARNING: Analysis failed for ~w. Reason: ~p\n", [M,Err]), {M,Err} end end, Modules), Sticky = unstick_all_sticky(node()), cover:stop(), stick_all_sticky(node(),Sticky), R. unstick_all_sticky(Node) -> lists:filter( fun(M) -> case code:is_sticky(M) of true -> rpc:call(Node,code,unstick_mod,[M]), true; false -> false end end, cover:modules()). stick_all_sticky(Node,Sticky) -> lists:foreach( fun(M) -> rpc:call(Node,code,stick_mod,[M]) end, Sticky). run_test_case_apply(Mod , , , Name , RunInit , ) - > { Time , Value , , Opts , Comment } | { died , , unknown , Comment } Time = float ( ) ( seconds ) it possible to capture all it 's output from io : format/2 , etc . Returns a tuple with the time spent ( in seconds ) in the test case , the return value from the test case or an { ' } if the case failed , points out where the test case crashed ( if it did ) . Loc by test_server : comment/1 , the reason if test_server : fail has been { died , , unknown , Comment } is returned if the test case was killed indicates a possible extension of all timetraps Timetraps will be multiplied by this integer . If it is infinity , no timetraps will be started at all . run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,MultiplyTimetrap}) -> purify_format("Test case #~w ~w:~w/1", [CaseNum, Mod, Func]), case os:getenv("TS_RUN_VALGRIND") of false -> ok; _ -> os:putenv("VALGRIND_LOGFILE_INFIX",atom_to_list(Mod)++"."++ atom_to_list(Func)++"-") end, test_server_h:testcase({Mod,Func,1}), ProcBef = erlang:system_info(process_count), Result = run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap), ProcAft = erlang:system_info(process_count), purify_new_leaks(), DetFail = get(test_server_detected_fail), {Result,DetFail,ProcBef,ProcAft}. run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) -> case get(test_server_job_dir) of undefined -> do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap); JobDir -> case Args of [Config] when is_list(Config) -> {value,{data_dir,HostDataDir}} = lists:keysearch(data_dir, 1, Config), DataBase = filename:basename(HostDataDir), TargetDataDir = filename:join(JobDir, DataBase), Config1 = lists:keyreplace(data_dir, 1, Config, {data_dir,TargetDataDir}), TargetPrivDir = filename:join(JobDir, ?priv_dir), Config2 = lists:keyreplace(priv_dir, 1, Config1, {priv_dir,TargetPrivDir}), do_run_test_case_apply(Mod, Func, [Config2], Name, RunInit, MultiplyTimetrap); _other -> do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) end end. do_run_test_case_apply(Mod, Func, Args, Name, RunInit, MultiplyTimetrap) -> {ok,Cwd} = file:get_cwd(), Args2Print = case Args of [Args1] when is_list(Args1) -> lists:keydelete(tc_group_result, 1, Args1); _ -> Args end, print(minor, "Test case started with:\n~s:~s(~p)\n", [Mod,Func,Args2Print]), print(minor, "Current directory is ~p\n", [Cwd]), print_timestamp(minor,"Started at "), TCCallback = get(test_server_testcase_callback), Ref = make_ref(), OldGLeader = group_leader(), group_leader(self(),self()), Pid = spawn_link( fun() -> run_test_case_eval(Mod, Func, Args, Name, Ref, RunInit, MultiplyTimetrap, TCCallback) end), group_leader(OldGLeader, self()), put(test_server_detected_fail, []), run_test_case_msgloop(Ref, Pid, false, false, ""). hangs . Fixed by the 20 milli timeout check here , and by using monitor in io.erl ( livrem OCH hangslen mao :) A test case is known to have failed if it returns { ' EXIT ' , _ } tuple , or sends a message { failed , File , Line } to it 's group_leader run_test_case_msgloop(Ref, Pid, CaptureStdout, Terminate, Comment) -> {Timeout,ReturnValue} = case Terminate of {true, ReturnVal} -> {20, ReturnVal}; false -> {infinity, should_never_appear} end, receive {abort_current_testcase,Reason,From} -> Line = get_loc(Pid), Mon = erlang:monitor(process, Pid), exit(Pid,{testcase_aborted,Reason,Line}), erlang:yield(), From ! {self(),abort_current_testcase,ok}, NewComment = receive {'DOWN', Mon, process, Pid, _} -> Comment after 10000 -> Pid is probably trapping exits , hit it harder ... exit(Pid, kill), here 's the only place we know , so we save Error = lists:flatten(io_lib:format("Aborted: ~p",[Reason])), Error1 = lists:flatten([string:strip(S,left) || S <- string:tokens(Error,[$\n])]), if length(Error1) > 63 -> string:substr(Error1,1,60) ++ "..."; true -> Error1 end end, run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,NewComment); {io_request,From,ReplyAs,{put_chars,io_lib,Func,[Format,Args]}} when is_list(Format) -> Msg = (catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,io_lib,Func,[Format,Args]}} when is_atom(Format) -> Msg = (catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,Bytes}} -> run_test_case_msgloop_io( ReplyAs,CaptureStdout,Bytes,From,put_chars), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,unicode,io_lib,Func,[Format,Args]}} when is_list(Format) -> Msg = unicode_to_latin1(catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,latin1,io_lib,Func,[Format,Args]}} when is_list(Format) -> Msg = (catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,unicode,io_lib,Func,[Format,Args]}} when is_atom(Format) -> Msg = unicode_to_latin1(catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,latin1,io_lib,Func,[Format,Args]}} when is_atom(Format) -> Msg = (catch io_lib:Func(Format,Args)), run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,unicode,Bytes}} -> run_test_case_msgloop_io( ReplyAs,CaptureStdout,unicode_to_latin1(Bytes),From,put_chars), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {io_request,From,ReplyAs,{put_chars,latin1,Bytes}} -> run_test_case_msgloop_io( ReplyAs,CaptureStdout,Bytes,From,put_chars), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); IoReq when element(1, IoReq) == io_request -> group_leader() ! IoReq, run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {structured_io,ClientPid,Msg} -> output(Msg, ClientPid), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {capture,NewCapture} -> run_test_case_msgloop(Ref,Pid,NewCapture,Terminate,Comment); {sync_apply,From,MFA} -> sync_local_or_remote_apply(false,From,MFA), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {sync_apply_proxy,Proxy,From,MFA} -> sync_local_or_remote_apply(Proxy,From,MFA), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {printout,Detail,Format,Args} -> print(Detail,Format,Args), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {comment,NewComment} -> Terminate1 = case Terminate of {true,{Time,Value,Loc,Opts,_OldComment}} -> {true,{Time,Value,mod_loc(Loc),Opts,NewComment}}; Other -> Other end, run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate1,NewComment); {'EXIT',Pid,{Ref,Time,Value,Loc,Opts}} -> RetVal = {Time/1000000,Value,mod_loc(Loc),Opts,Comment}, run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment); {'EXIT',Pid,Reason} -> case Reason of {timetrap_timeout,TVal,Loc} -> convert Loc to form that can be formatted Loc1 = mod_loc(Loc), {Mod,Func} = get_mf(Loc1), spawn_fw_call(Mod,Func,Pid,{timetrap_timeout,TVal}, Loc1,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {timetrap_timeout,TVal,Loc,InitOrEnd} -> Loc1 = mod_loc(Loc), {Mod,_Func} = get_mf(Loc1), spawn_fw_call(Mod,InitOrEnd,Pid,{timetrap_timeout,TVal}, Loc1,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); {testcase_aborted,Reason,Loc} -> Loc1 = mod_loc(Loc), {Mod,Func} = get_mf(Loc1), spawn_fw_call(Mod,Func,Pid,{testcase_aborted,Reason}, Loc1,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); killed -> spawn_fw_call(undefined,undefined,Pid,testcase_aborted_or_killed, unknown,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); _ -> the testcase has terminated because of ( e.g. an exit spawn_fw_call(undefined,undefined,Pid,Reason, unknown,self(),Comment), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment) end; {_FwCallPid,fw_notify_done,RetVal} -> run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment); {'EXIT',_FwCallPid,{fw_notify_done,Func,Error}} -> CB = os:getenv("TEST_SERVER_FRAMEWORK"), Loc = case CB of false -> {test_server,Func}; _ -> {list_to_atom(CB),Func} end, RetVal = {died,{framework_error,Loc,Error},Loc,"Framework error"}, run_test_case_msgloop(Ref,Pid,CaptureStdout,{true,RetVal},Comment); {failed,File,Line} -> put(test_server_detected_fail, [{File, Line}| get(test_server_detected_fail)]), run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); _Other when not is_tuple(_Other) -> run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment); _Other when element(1, _Other) /= 'EXIT', element(1, _Other) /= started, element(1, _Other) /= finished, element(1, _Other) /= print -> run_test_case_msgloop(Ref,Pid,CaptureStdout,Terminate,Comment) after Timeout -> ReturnValue end. run_test_case_msgloop_io(ReplyAs,CaptureStdout,Msg,From,Func) -> case Msg of {'EXIT',_} -> From ! {io_reply,ReplyAs,{error,Func}}; _ -> From ! {io_reply,ReplyAs,ok} end, if CaptureStdout /= false -> CaptureStdout ! {captured,Msg}; true -> ok end, output({minor,Msg},From). output(Msg,Sender) -> local_or_remote_apply({test_server_ctrl,output,[Msg,Sender]}). spawn_fw_call(Mod,{init_per_testcase,Func},Pid,{timetrap_timeout,TVal}=Why, Loc,SendTo,Comment) -> FwCall = fun() -> Skip = {skip,{failed,{Mod,init_per_testcase,Why}}}, case catch test_server_sup:framework_call( end_tc,[?pl2a(Mod),Func,{Pid,Skip,[[]]}]) of {'EXIT',FwEndTCErr} -> exit({fw_notify_done,end_tc,FwEndTCErr}); _ -> ok end, SendTo ! {self(),fw_notify_done, {TVal/1000,Skip,Loc,[],Comment}} end, spawn_link(FwCall); spawn_fw_call(Mod,{end_per_testcase,Func},Pid,{timetrap_timeout,TVal}=Why, Loc,SendTo,_Comment) -> FwCall = fun() -> Conf = [{tc_status,ok}], case catch test_server_sup:framework_call(end_tc, [?pl2a(Mod),Func, {Pid, {failed,{Mod,end_per_testcase,Why}}, [Conf]}]) of {'EXIT',FwEndTCErr} -> exit({fw_notify_done,end_tc,FwEndTCErr}); _ -> ok end, SendTo ! {self(),fw_notify_done, {TVal/1000,{error,{Mod,end_per_testcase,Why}},Loc,[], ["<font color=\"red\">" "WARNING: end_per_testcase timed out!" "</font>"]}} end, spawn_link(FwCall); spawn_fw_call(Mod,Func,Pid,Error,Loc,SendTo,Comment) -> FwCall = fun() -> case catch fw_error_notify(Mod,Func,[], Error,Loc) of {'EXIT',FwErrorNotifyErr} -> exit({fw_notify_done,error_notification, FwErrorNotifyErr}); _ -> ok end, Conf = [{tc_status,{failed,timetrap_timeout}}], case catch test_server_sup:framework_call(end_tc, [?pl2a(Mod),Func, {Pid,Error,[Conf]}]) of {'EXIT',FwEndTCErr} -> exit({fw_notify_done,end_tc,FwEndTCErr}); _ -> ok end, SendTo ! {self(),fw_notify_done,{died,Error,Loc,Comment}} end, spawn_link(FwCall). start_job_proxy() -> group_leader(spawn(fun () -> job_proxy_msgloop() end), self()), ok. io_reply_proxy(ReplyTo) -> receive IoReply when is_tuple(IoReply), element(1, IoReply) == io_reply -> ReplyTo ! IoReply; _ -> io_reply_proxy(ReplyTo) end. job_proxy_msgloop() -> receive IoReq when tuple_size(IoReq) >= 2, element(1, IoReq) == io_request -> ReplyProxy = spawn(fun () -> io_reply_proxy(element(2, IoReq)) end), group_leader() ! setelement(2, IoReq, ReplyProxy); test_server stuff ... {sync_apply, From, MFA} -> group_leader() ! {sync_apply_proxy, self(), From, MFA}; {sync_result_proxy, To, Result} -> To ! {sync_result, Result}; Msg -> group_leader() ! Msg end, job_proxy_msgloop(). A test case is known to have failed if it returns { ' EXIT ' , _ } tuple , or sends a message { failed , File , Line } to it 's group_leader run_test_case_eval(Mod, Func, Args0, Name, Ref, RunInit, MultiplyTimetrap, TCCallback) -> put(test_server_multiply_timetraps,MultiplyTimetrap), {{Time,Value},Loc,Opts} = case test_server_sup:framework_call(init_tc,[?pl2a(Mod),Func,Args0], {ok,Args0}) of {ok,Args} -> run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback); Error = {error,_Reason} -> test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,{Error,Args0}]), {{0,{skip,{failed,Error}}},{Mod,Func},[]}; {fail,Reason} -> [Conf] = Args0, Conf1 = [{tc_status,{failed,Reason}} | Conf], fw_error_notify(Mod, Func, Conf, Reason), test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func, {{error,Reason},[Conf1]}]), {{0,{failed,Reason}},{Mod,Func},[]}; Skip = {skip,_Reason} -> test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,{Skip,Args0}]), {{0,Skip},{Mod,Func},[]}; {auto_skip,Reason} -> test_server_sup:framework_call(end_tc,[?pl2a(Mod), Func, {{skip,Reason},Args0}]), {{0,{skip,{fw_auto_skip,Reason}}},{Mod,Func},[]} end, exit({Ref,Time,Value,Loc,Opts}). run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) -> case RunInit of run_init -> put(test_server_init_or_end_conf,{init_per_testcase,Func}), put(test_server_loc, {Mod,{init_per_testcase,Func}}), ensure_timetrap(Args), case init_per_testcase(Mod, Func, Args) of Skip = {skip,Reason} -> Line = get_loc(), Conf = [{tc_status,{skipped,Reason}}], test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func,{Skip,[Conf]}]), {{0,{skip,Reason}},Line,[]}; {skip_and_save,Reason,SaveCfg} -> Line = get_loc(), Conf = [{tc_status,{skipped,Reason}},{save_config,SaveCfg}], test_server_sup:framework_call(end_tc,[?pl2a(Mod),Func, {{skip,Reason},[Conf]}]), {{0,{skip,Reason}},Line,[]}; {ok,NewConf} -> put(test_server_init_or_end_conf,undefined), NewConf1 = user_callback(TCCallback, Mod, Func, init, NewConf), put(test_server_loc, {Mod,Func}), {{T,Return},Loc} = {ts_tc(Mod, Func, [NewConf1]),get_loc()}, {EndConf,TSReturn,FWReturn} = case Return of {E,TCError} when E=='EXIT' ; E==failed -> fw_error_notify(Mod, Func, NewConf1, TCError, mod_loc(Loc)), {[{tc_status,{failed,TCError}}|NewConf1], Return,{error,TCError}}; SaveCfg={save_config,_} -> {[{tc_status,ok},SaveCfg|NewConf1],Return,ok}; {skip_and_save,Why,SaveCfg} -> Skip = {skip,Why}, {[{tc_status,{skipped,Why}},{save_config,SaveCfg}|NewConf1], Skip,Skip}; {skip,Why} -> {[{tc_status,{skipped,Why}}|NewConf1],Return,Return}; _ -> {[{tc_status,ok}|NewConf1],Return,ok} end, EndConf1 = user_callback(TCCallback, Mod, Func, 'end', EndConf), {FWReturn1,TSReturn1,EndConf2} = case end_per_testcase(Mod, Func, EndConf1) of SaveCfg1={save_config,_} -> {FWReturn,TSReturn,[SaveCfg1|lists:keydelete(save_config, 1, EndConf1)]}; fw_error_notify(Mod, Func, EndConf1, ReasonToFail), {{error,ReasonToFail},{failed,ReasonToFail},EndConf1}; {Failure,TSReturn,EndConf1}; _ -> {FWReturn,TSReturn,EndConf1} end, case test_server_sup:framework_call(end_tc, [?pl2a(Mod), Func, {FWReturn1,[EndConf2]}]) of {fail,Reason} -> fw_error_notify(Mod, Func, EndConf2, Reason), {{T,{failed,Reason}},{Mod,Func},[]}; _ -> {{T,TSReturn1},Loc,[]} end end; skip_init -> Args1 = user_callback(TCCallback, Mod, Func, init, Args), ensure_timetrap(Args1), ts_tc does a catch put(test_server_loc, {Mod,Func}), should be called with the name as the first argument Args2 = if Name == undefined -> Args1; true -> [Name | Args1] end, {{T,Return},Loc} = {ts_tc(Mod, Func, Args2),get_loc()}, Return1 = user_callback(TCCallback, Mod, Func, 'end', Return), {Return2,Opts} = process_return_val([Return1], Mod,Func,Args1, Loc, Return1), {{T,Return2},Loc,Opts} end. process_return_val([Return], M,F,A, Loc, Final) when is_list(Return) -> ReturnTags = [skip,skip_and_save,save_config,comment,return_group_result], case lists:all(fun(Val) when is_tuple(Val) -> lists:any(fun(T) -> T == element(1, Val) end, ReturnTags); (ok) -> true; (_) -> false end, Return) of process_return_val1(Return, M,F,A, Loc, Final, []); test_server_sup:framework_call(end_tc, [?pl2a(M),F,{ok,A}]), {Return,[]} end; process_return_val(Return, M,F,A, Loc, Final) -> process_return_val1(Return, M,F,A, Loc, Final, []). process_return_val1([Failed={E,TCError}|_], M,F,A=[Args], Loc, _, SaveOpts) when E=='EXIT'; E==failed -> fw_error_notify(M,F,A, TCError, mod_loc(Loc)), test_server_sup:framework_call(end_tc, [?pl2a(M),F,{{error,TCError}, [[{tc_status,{failed,TCError}}|Args]]}]), {Failed,SaveOpts}; process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args], Loc, Final, SaveOpts) -> process_return_val1(Opts, M,F,[[SaveCfg|Args]], Loc, Final, SaveOpts); process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args], Loc, _, SaveOpts) -> process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]], Loc, {skip,Why}, SaveOpts); process_return_val1([GR={return_group_result,_}|Opts], M,F,A, Loc, Final, SaveOpts) -> process_return_val1(Opts, M,F,A, Loc, Final, [GR|SaveOpts]); process_return_val1([RetVal={Tag,_}|Opts], M,F,A, Loc, _, SaveOpts) when Tag==skip; Tag==comment -> process_return_val1(Opts, M,F,A, Loc, RetVal, SaveOpts); process_return_val1([_|Opts], M,F,A, Loc, Final, SaveOpts) -> process_return_val1(Opts, M,F,A, Loc, Final, SaveOpts); process_return_val1([], M,F,A, _Loc, Final, SaveOpts) -> test_server_sup:framework_call(end_tc, [?pl2a(M),F,{Final,A}]), {Final,lists:reverse(SaveOpts)}. user_callback(undefined, _, _, _, Args) -> Args; user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, [Args]) when is_list(Args) -> case catch apply(CBMod, CBFunc, [InitOrEnd,Mod,Func,Args]) of Args1 when is_list(Args1) -> [Args1]; _ -> [Args] end; user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, Args) -> case catch apply(CBMod, CBFunc, [InitOrEnd,Mod,Func,Args]) of Args1 when is_list(Args1) -> Args1; _ -> Args end. init_per_testcase(Mod, Func, Args) -> case code:is_loaded(Mod) of false -> code:load_file(Mod); _ -> ok end, case erlang:function_exported(Mod,init_per_testcase,2) of true -> case catch my_apply(Mod, init_per_testcase, [Func|Args]) of {'$test_server_ok',{Skip,Reason}} when Skip==skip; Skip==skipped -> {skip,Reason}; {'$test_server_ok',Res={skip_and_save,_,_}} -> Res; {'$test_server_ok',NewConf} when is_list(NewConf) -> case lists:filter(fun(T) when is_tuple(T) -> false; (_) -> true end, NewConf) of [] -> {ok,NewConf}; Bad -> group_leader() ! {printout,12, "ERROR! init_per_testcase has returned " "bad elements in Config: ~p\n",[Bad]}, {skip,{failed,{Mod,init_per_testcase,bad_return}}} end; {'$test_server_ok',_Other} -> group_leader() ! {printout,12, "ERROR! init_per_testcase did not return " "a Config list.\n",[]}, {skip,{failed,{Mod,init_per_testcase,bad_return}}}; {'EXIT',Reason} -> Line = get_loc(), FormattedLoc = test_server_sup:format_loc(mod_loc(Line)), group_leader() ! {printout,12, "ERROR! init_per_testcase crashed!\n" "\tLocation: ~s\n\tReason: ~p\n", [FormattedLoc,Reason]}, {skip,{failed,{Mod,init_per_testcase,Reason}}}; Other -> Line = get_loc(), FormattedLoc = test_server_sup:format_loc(mod_loc(Line)), group_leader() ! {printout,12, "ERROR! init_per_testcase thrown!\n" "\tLocation: ~s\n\tReason: ~p\n", [FormattedLoc, Other]}, {skip,{failed,{Mod,init_per_testcase,Other}}} end; false -> [Config] = Args, {ok, Config} end. end_per_testcase(Mod, Func, Conf) -> case erlang:function_exported(Mod,end_per_testcase,2) of true -> do_end_per_testcase(Mod,end_per_testcase,Func,Conf); false -> case erlang:function_exported(Mod,fin_per_testcase,2) of true -> do_end_per_testcase(Mod,fin_per_testcase,Func,Conf); false -> ok end end. do_end_per_testcase(Mod,EndFunc,Func,Conf) -> put(test_server_init_or_end_conf,{EndFunc,Func}), put(test_server_loc, {Mod,{EndFunc,Func}}), case catch my_apply(Mod, EndFunc, [Func,Conf]) of {'$test_server_ok',SaveCfg={save_config,_}} -> SaveCfg; {'$test_server_ok',{fail,_}=Fail} -> Fail; {'$test_server_ok',_} -> ok; {'EXIT',Reason} = Why -> comment(io_lib:format("<font color=\"red\">" "WARNING: ~w crashed!" "</font>\n",[EndFunc])), group_leader() ! {printout,12, "WARNING: ~w crashed!\n" "Reason: ~p\n" "Line: ~s\n", [EndFunc, Reason, test_server_sup:format_loc( mod_loc(get_loc()))]}, {failed,{Mod,end_per_testcase,Why}}; Other -> comment(io_lib:format("<font color=\"red\">" "WARNING: ~w thrown!" "</font>\n",[EndFunc])), group_leader() ! {printout,12, "WARNING: ~w thrown!\n" "Reason: ~p\n" "Line: ~s\n", [EndFunc, Other, test_server_sup:format_loc( mod_loc(get_loc()))]}, {failed,{Mod,end_per_testcase,Other}} end. get_loc() -> case catch test_server_line:get_lines() of [] -> get(test_server_loc); {'EXIT',_} -> get(test_server_loc); Loc -> Loc end. get_loc(Pid) -> {dictionary,Dict} = process_info(Pid, dictionary), lists:foreach(fun({Key,Val}) -> put(Key,Val) end,Dict), get_loc(). get_mf([{M,F,_}|_]) -> {M,F}; get_mf([{M,F}|_]) -> {M,F}; get_mf(_) -> {undefined,undefined}. mod_loc(Loc) -> case Loc of [{{_M,_F},_L}|_] -> [{?pl2a(M),F,L} || {{M,F},L} <- Loc]; [{_M,_F}|_] -> [{?pl2a(M),F} || {M,F} <- Loc]; {{M,F},L} -> [{?pl2a(M),F,L}]; {M,ForL} -> [{?pl2a(M),ForL}]; _ -> Loc end. fw_error_notify(Mod, Func, Args, Error) -> test_server_sup:framework_call(error_notification, [?pl2a(Mod),Func,[Args], {Error,unknown}]). fw_error_notify(Mod, Func, Args, Error, Loc) -> test_server_sup:framework_call(error_notification, [?pl2a(Mod),Func,[Args], {Error,Loc}]). print(Detail , Format , ) - > ok = [ term ( ) ] print(Detail,Format,Args) -> local_or_remote_apply({test_server_ctrl,print,[Detail,Format,Args]}). print_timestamp(Detail,Leader) -> local_or_remote_apply({test_server_ctrl,print_timestamp,[Detail,Leader]}). lookup_config(Key,Config) -> case lists:keysearch(Key,1,Config) of {value,{Key,Val}} -> Val; _ -> io:format("Could not find element ~p in Config.~n",[Key]), undefined end. ts_tc(M, F, A) -> Before = erlang:now(), Val = (catch my_apply(M, F, A)), After = erlang:now(), Result = case Val of {'$test_server_ok', R} -> {'EXIT',_Reason} = R -> Other -> end, Elapsed = (element(1,After)*1000000000000 +element(2,After)*1000000+element(3,After)) - (element(1,Before)*1000000000000 +element(2,Before)*1000000+element(3,Before)), {Elapsed, Result}. my_apply(M, F, A) -> {'$test_server_ok',apply(M, F, A)}. unicode_to_latin1(Chars) when is_list(Chars); is_binary(Chars) -> lists:flatten( [ case X of High when High > 255 -> io_lib:format("\\{~.8B}",[X]); Low -> Low end || X <- unicode:characters_to_list(Chars,unicode) ]); unicode_to_latin1(Garbage) -> Garbage. format(Format , ) - > IoLibReturn format(Detail , Format , ) - > IoLibReturn = [ term ( ) , ... ] IoLibReturn = term ( ) Logs the Format string and , similar to io : format/1/2 etc . If Detail is not specified , the default detail level ( which is 50 ) is used . format(Format) -> format(minor, Format, []). format(major, Format) -> format(major, Format, []); format(minor, Format) -> format(minor, Format, []); format(Detail, Format) when is_integer(Detail) -> format(Detail, Format, []); format(Format, Args) -> format(minor, Format, Args). format(Detail, Format, Args) -> Str = case catch io_lib:format(Format,Args) of {'EXIT',_} -> io_lib:format("illegal format; ~p with args ~p.\n", [Format,Args]); Valid -> Valid end, log({Detail, Str}). log(Msg) -> group_leader() ! {structured_io, self(), Msg}, ok. capture_start() -> group_leader() ! {capture,self()}, ok. capture_stop() -> group_leader() ! {capture,false}, ok. by capture_get/0 . It is not necessary to call capture_stop/0 before capture_get() -> test_server_sup:capture_get([]). messages_get() -> test_server_sup:messages_get([]). sleep(infinity) -> receive after infinity -> ok end; sleep(MSecs) -> receive after trunc(MSecs) -> ok end, ok. fail(Reason) -> comment(cast_to_list(Reason)), exit({suite_failed,Reason}). cast_to_list(X) when is_list(X) -> X; cast_to_list(X) when is_atom(X) -> atom_to_list(X); cast_to_list(X) -> lists:flatten(io_lib:format("~p", [X])). fail() -> exit(suite_failed). break(Comment) -> case erase(test_server_timetraps) of undefined -> ok; List -> lists:foreach(fun(Ref) -> timetrap_cancel(Ref) end,List) end, io:format(user, "\n\n\n--- SEMIAUTOMATIC TESTING ---" "\nThe test case executes on process ~w" "\n\n\n~s" "\n\n\n-----------------------------\n\n" "Continue with --> test_server:continue().\n", [self(),Comment]), case whereis(test_server_break_process) of undefined -> spawn_break_process(self()); OldBreakProcess -> OldBreakProcess ! cancel, spawn_break_process(self()) end, receive continue -> ok end. spawn_break_process(Pid) -> spawn(fun() -> register(test_server_break_process,self()), receive continue -> continue(Pid); cancel -> ok end end). continue() -> case whereis(test_server_break_process) of undefined -> ok; BreakProcess -> BreakProcess ! continue end. continue(Pid) -> Pid ! continue. Returns the amount to scale timetraps with . timetrap_scale_factor() -> F0 = case test_server:purify_is_running() of true -> 5; false -> 1 end, F1 = case {is_debug(), has_lock_checking()} of {true,_} -> 6 * F0; {false,true} -> 2 * F0; {false,false} -> F0 end, F2 = case has_superfluous_schedulers() of true -> 3*F1; false -> F1 end, F = case test_server_sup:get_os_family() of vxworks -> 5 * F2; _ -> F2 end, case test_server:is_cover() of true -> 10 * F; false -> F end. trap is not cancelled with timetrap_cancel/1 , within Timeout milliseconds . timetrap(Timeout0) -> Timeout = time_ms(Timeout0), cancel_default_timetrap(), case get(test_server_multiply_timetraps) of undefined -> timetrap1(Timeout); infinity -> infinity; Int -> timetrap1(Timeout*Int) end. timetrap1(Timeout) -> Ref = spawn_link(test_server_sup,timetrap,[Timeout,self()]), case get(test_server_timetraps) of undefined -> put(test_server_timetraps,[Ref]); List -> put(test_server_timetraps,[Ref|List]) end, Ref. ensure_timetrap(Config) -> case get(test_server_timetraps) of [_|_] -> ok; _ -> case get(test_server_default_timetrap) of undefined -> ok; Garbage -> erase(test_server_default_timetrap), format("=== WARNING: garbage in test_server_default_timetrap: ~p~n", [Garbage]) end, DTmo = case lists:keysearch(default_timeout,1,Config) of {value,{default_timeout,Tmo}} -> Tmo; _ -> ?DEFAULT_TIMETRAP_SECS end, format("=== test_server setting default timetrap of ~p seconds~n", [DTmo]), put(test_server_default_timetrap, timetrap(seconds(DTmo))) end. cancel_default_timetrap() -> case get(test_server_default_timetrap) of undefined -> ok; TimeTrap when is_pid(TimeTrap) -> timetrap_cancel(TimeTrap), erase(test_server_default_timetrap), format("=== test_server canceled default timetrap since another timetrap was set~n"), ok; Garbage -> erase(test_server_default_timetrap), format("=== WARNING: garbage in test_server_default_timetrap: ~p~n", [Garbage]), error end. time_ms({hours,N}) -> hours(N); time_ms({minutes,N}) -> minutes(N); time_ms({seconds,N}) -> seconds(N); time_ms({Other,_N}) -> format("=== ERROR: Invalid time specification: ~p. " "Should be seconds, minutes, or hours.~n", [Other]), exit({invalid_time_spec,Other}); time_ms(Ms) when is_integer(Ms) -> Ms; time_ms(Other) -> exit({invalid_time_spec,Other}). timetrap_cancel(infinity) -> ok; timetrap_cancel(Handle) -> case get(test_server_timetraps) of undefined -> ok; [Handle] -> erase(test_server_timetraps); List -> put(test_server_timetraps,lists:delete(Handle,List)) end, test_server_sup:timetrap_cancel(Handle). hours(N) -> trunc(N * 1000 * 60 * 60). minutes(N) -> trunc(N * 1000 * 60). seconds(N) -> trunc(N * 1000). Measures the time spent evaluating MFA . The measurement is done with timecall(M, F, A) -> test_server_sup:timecall(M,F,A). Evaluates MFA or Fun N times , and returns ok . do_times(N,M,F,A) when N>0 -> apply(M,F,A), do_times(N-1,M,F,A); do_times(0,_,_,_) -> ok. do_times(N,Fun) when N>0 -> Fun(), do_times(N-1,Fun); do_times(0,_) -> ok. Tries to run tricky_test_case ( ) up to 4 times , Tries running clock_sanity_check ( ) up to 8 is known to fail if the clock crosses an hour boundary during the test ( and the up to 8 test runs could never cross 2 boundaries ) m_out_of_n(0,_,_) -> ok; m_out_of_n(M,0,_) -> exit({m_out_of_n_failed,{M,left_to_do}}); m_out_of_n(M,N,Fun) -> case catch Fun() of {'EXIT',_} -> m_out_of_n(M,N-1,Fun); _Other -> m_out_of_n(M-1,N-1,Fun) end. Time - integer ( ) in milliseconds . Spaws a new process that calls MFA . The call is considered * * The call must terminate withing the given Time ( defaults call_crash(M,F,A) -> call_crash(infinity,M,F,A). call_crash(Time,M,F,A) -> call_crash(Time,any,M,F,A). call_crash(Time,Crash,M,F,A) -> test_server_sup:call_crash(Time,Crash,M,F,A). start_node(SlaveName , Type , Options ) - > OptionList is a tuplelist wich may contain one some exceptions , as for the case of and OSE , { erl , ReleaseList } - Use an Erlang emulator determined by ReleaseList as the test server is running . ReleaseList is a list { release , , { prog , Prog } , or ' this ' . Rel is executable . If the list has more than one element , works on Solaris and Linux , and the test start_node(Name, Type, Options) -> lists:foreach( fun(N) -> case firstname(N) of Name -> format("=== WARNING: Trying to start node \'~w\' when node" " with same first name exists: ~w", [Name, N]); _other -> ok end end, nodes()), group_leader() ! {sync_apply, self(), {test_server_ctrl,start_node,[Name,Type,Options]}}, Result = receive {sync_result,R} -> R end, case Result of {ok,Node} -> Cover = case is_cover() of true -> not is_shielded(Name) andalso same_version(Node); false -> false end, net_adm:ping(Node), case Cover of true -> Sticky = unstick_all_sticky(Node), cover:start(Node), stick_all_sticky(Node,Sticky); _ -> ok end, {ok,Node}; {fail,Reason} -> fail(Reason); Error -> Error end. firstname(N) -> list_to_atom(upto($@,atom_to_list(N))). upto(H, [H | _T]) -> []; upto(H, [X | T]) -> [X | upto(H,T)]. wait_for_node(Slave) -> group_leader() ! {sync_apply, self(), {test_server_ctrl,wait_for_node,[Slave]}}, receive {sync_result,R} -> R end. Also inform so it can clean up ! stop_node(Slave) -> Nocover = is_shielded(Slave) orelse not same_version(Slave), case is_cover() of true when not Nocover -> Sticky = unstick_all_sticky(Slave), cover:stop(Slave), stick_all_sticky(Slave,Sticky); _ -> ok end, group_leader() ! {sync_apply,self(),{test_server_ctrl,stop_node,[Slave]}}, Result = receive {sync_result,R} -> R end, case Result of ok -> erlang:monitor_node(Slave, true), slave:stop(Slave), receive {nodedown, Slave} -> format(minor, "Stopped slave node: ~p", [Slave]), format(major, "=node_stop ~p", [Slave]), true after 30000 -> format("=== WARNING: Node ~p does not seem to terminate.", [Slave]), false end; {error, _Reason} -> format("=== WARNING: Attempt to stop a nonexisting slavenode (~p)~n" "=== Trying to kill it anyway!!!", [Slave]), case net_adm:ping(Slave)of pong -> slave:stop(Slave), true; pang -> false end end. started using start_node/3 . is_release_available(Release) -> group_leader() ! {sync_apply, self(), {test_server_ctrl,is_release_available,[Release]}}, receive {sync_result,R} -> R end. run_on_shielded_node(Fun , CArgs ) - > term ( ) CArg - > list ( ) a process residing on the test_server node as group_leader . CArg - Extra command line arguments to use when starting run_on_shielded_node(Fun, CArgs) when is_function(Fun), is_list(CArgs) -> {A,B,C} = now(), Name = "shielded_node-" ++ integer_to_list(A) ++ "-" ++ integer_to_list(B) ++ "-" ++ integer_to_list(C), Node = case start_node(Name, slave, [{args, "-hidden " ++ CArgs}]) of {ok, N} -> N; Err -> fail({failed_to_start_shielded_node, Err}) end, Master = self(), Ref = make_ref(), Slave = spawn(Node, fun () -> start_job_proxy(), receive Ref -> Master ! {Ref, Fun()} end, receive after infinity -> infinity end end), MRef = erlang:monitor(process, Slave), Slave ! Ref, receive {'DOWN', MRef, _, _, Info} -> stop_node(Node), fail(Info); {Ref, Res} -> stop_node(Node), receive {'DOWN', MRef, _, _, _} -> Res end end. is_shielded(Name) -> case {cast_to_list(Name),atom_to_list(node())} of {"shielded_node"++_,_} -> true; {_,"shielded_node"++_} -> true; _ -> false end. same_version(Name) -> ThisVersion = erlang:system_info(version), OtherVersion = rpc:call(Name, erlang, system_info, [version]), ThisVersion =:= OtherVersion. temp_name(Stem) -> {A,B,C} = erlang:now(), RandomNum = A bxor B bxor C, RandomName = Stem ++ integer_to_list(RandomNum), {ok,Files} = file:list_dir(filename:dirname(Stem)), case lists:member(RandomName,Files) of true -> false -> RandomName end. app_test(App) -> app_test(App, pedantic). app_test(App, Mode) -> case os:type() of {ose,_} -> Comment = "Skipping app_test on OSE", {skip,Comment}; _other -> test_server_sup:app_test(App, Mode) end. is_native(Mod ) - > true | false is_native(Mod) -> case catch Mod:module_info(native_addresses) of [_|_] -> true; _Other -> false end. comment(String) -> group_leader() ! {comment,String}, ok. Returns the OsType of the target node . OsType is os_type() -> test_server_ctrl:get_target_os_type(). is_cover ( ) - > boolean ( ) is_cover() -> case whereis(cover_server) of undefined -> false; _ -> true end. is_debug() -> case catch erlang:system_info(debug_compiled) of {'EXIT', _} -> case string:str(erlang:system_info(system_version), "debug") of Int when is_integer(Int), Int > 0 -> true; _ -> false end; Res -> Res end. has_lock_checking() -> case catch erlang:system_info(lock_checking) of {'EXIT', _} -> false; Res -> Res end. has_superfluous_schedulers() -> case catch {erlang:system_info(schedulers), erlang:system_info(logical_processors)} of {S, P} when is_integer(S), is_integer(P), S > P -> true; _ -> false end. is_commercial() -> case string:str(erlang:system_info(system_version), "source") of Int when is_integer(Int), Int > 0 -> false; _ -> true end. Tests if Purify is currently running . purify_is_running() -> case catch erlang:system_info({error_checker, running}) of {'EXIT', _} -> false; Res -> Res end. Checks for new memory leaks if Purify is active . purify_new_leaks() -> case catch erlang:system_info({error_checker, memory}) of {'EXIT', _} -> false; Leaked when is_integer(Leaked) -> Leaked end. FdsInuse = integer ( ) if Purify is not running . purify_new_fds_inuse() -> case catch erlang:system_info({error_checker, fd}) of {'EXIT', _} -> false; Inuse when is_integer(Inuse) -> Inuse end. purify_format(Format , ) - > ok = lists ( ) Outputs the formatted string to Purify 's logfile , if Purify is active . purify_format(Format, Args) -> (catch erlang:system_info({error_checker, io_lib:format(Format, Args)})), ok. Generic send functions for communication with host sync_local_or_remote_apply(Proxy,From,{M,F,A} = MFA) -> case get(test_server_job_sock) of undefined -> Result = apply(M,F,A), if is_pid(Proxy) -> Proxy ! {sync_result_proxy,From,Result}; true -> From ! {sync_result,Result} end; JobSock -> request(JobSock,{sync_apply,MFA}), {sync_result,Result} = recv(JobSock), if is_pid(Proxy) -> Proxy ! {sync_result_proxy,From,Result}; true -> From ! {sync_result,Result} end end. local_or_remote_apply({M,F,A} = MFA) -> case get(test_server_job_sock) of undefined -> apply(M,F,A), ok; JobSock -> request(JobSock,{apply,MFA}), ok end. request(Sock,Request) -> gen_tcp:send(Sock,<<1,(term_to_binary(Request))/binary>>). Generic receive function for communication with host recv(Sock) -> case gen_tcp:recv(Sock,0) of {error,closed} -> gen_tcp:close(Sock), exit(connection_lost); {ok,<<1,Request/binary>>} -> binary_to_term(Request); {ok,<<0,B/binary>>} -> B end.
46be501e5b02280b0ba468f8366bf0a97e7b8ba328e32e000dfc5fe66aefb79d
baconpaul/composition-kit
tonal_theory.clj
(ns composition-kit.music-lib.tonal-theory) ;; A nascent library of concepts like notes and intervals. I used this less composing than I thought I would but it's ;; still handy to have around. (def ^:priave notes-data ;;(sorted-set-by (map-comparator :octave :notebase :accidental) (let [accidental-names { -2 "ees" -1 "es" 0 "" 1 "is" 2 "iis" } offset-from-c { :c 0 :d 2 :e 4 :f 5 :g 7 :a 9 :b 11 } notelist (for [notebase [ :a :b :c :d :e :f :g ] accidental [ -2 -1 0 1 2 ] octave (range 8)] { :note (keyword (str (name notebase) (get accidental-names accidental) octave)) :pitch (keyword (str (name notebase) (get accidental-names accidental))) :octave octave :notebase notebase :accidental accidental :midinote (+ 60 ( * 12 (- octave 4)) (notebase offset-from-c) accidental) } )] (reduce (fn [m v] (assoc m (:note v) v)) {} notelist) )) (def ^:private notes-data-by-midinote (loop [n (vals notes-data) res {}] (if (empty? n) res (let [currn (first n) ss (get res (:midinote currn) [] ) ] (recur (rest n) (assoc res (:midinote currn) (conj ss currn)))))) ) (defn note-by-name [name] "(note-by-name :cis2) works just fine!" (get notes-data name)) (defn notes-by-midinote [num] "(first (notes-by-midinote 60)) for instance" (get notes-data-by-midinote num)) (defn name-to-midinote [name] (:midinote (get notes-data name))) (defn transpose [note amt] "Transpose a note by an amount; but this may not give the best enhmarnoic match (so transpose b 1 may give bis, not c)" (let [nmn (+ (:midinote note) amt) mn (notes-by-midinote nmn)] (first mn))) (defn enharmonic-equal? [n1 n2] (= (:midinote n1) (:midinote n2))) (defn interval-from-c [note] "Return a positive interval from c. So (interval-from-c :cis) is 1 (interval-from-c :b) is 11" (let [[ tone & accidental ] (name note) accidental-values { "ees" -2 "es" -1 "" 0 "is" 1 "iis" 2 } offset-from-c { :c 0 :d 2 :e 4 :f 5 :g 7 :a 9 :b 11 } ] (mod (+ (offset-from-c (keyword (str tone))) (get accidental-values (reduce str accidental))) 12) ) ) (defn interval-between [n1 n2] "From n1 to the nearest n2 above it" (mod (- (interval-from-c n2) (interval-from-c n1)) 12)) (defn ^:private intervals-to-degrees [ intervals ] (reductions (fn [prior ivl] (cond (= ivl 'WH) (+ 3 prior) (= ivl 'W) (+ 2 prior) (= ivl 'H) (inc prior) )) 0 intervals)) (def ^:private scales-data (let [mode-r (fn [n] (let [maj '( W W H W W W H )] (concat (drop n maj) (take n maj))))] { :major (intervals-to-degrees '( W W H W W W H )) :natural-minor (intervals-to-degrees '( W H W W H W W )) :harmonic-minor (intervals-to-degrees '( W H W W H WH H )) :ionian (intervals-to-degrees (mode-r 0)) :dorian (intervals-to-degrees (mode-r 1)) :phrygian (intervals-to-degrees (mode-r 2)) :lydian (intervals-to-degrees (mode-r 3)) :mixolydian (intervals-to-degrees (mode-r 4)) :aeolian (intervals-to-degrees (mode-r 5)) :locrian (intervals-to-degrees (mode-r 6)) } ) ) (defn scale [type] (type scales-data)) (defn known-scales [] (keys scales-data)) (defn scale-to-notes [ in-scale base ] ;; A naive implementation, (map #(en-choice (get notes-by-midinote ;; (+ % (:midinote base)))) scale))) works fine but has all sorts of ;; problems with not picking the right note. So you need to do some enharmonic ;; optimization (I guess it woudl be called), hence the explicit loop here where ;; we pick the one with notename one up from prior, unless we can't, in which case ;; we just punt (loop [scale (rest in-scale) res [ base ]] (if (empty? scale) res (let [prior (last res) next-mn (+ (:midinote base) (first scale)) next-note-cands (notes-by-midinote next-mn) ;; The magic happens right here when we pick the next notebase keyword in string math space next-notebase (-> (name (:notebase prior)) (#(char (inc (int (first %))))) (#(if (= % \h) \a %)) str keyword) filtered (filter #(= (:notebase %) next-notebase) next-note-cands) best (if (empty? filtered) (first next-note-cands) ;; PUNT! (first filtered))] (recur (rest scale) (conj res best)) ))) ) (defn scale-pitches [ base scale-name ] "Return a set of pitches for a scale name. So (scale-pitches :c4 :major) gives you a c major scale starting at c4" (map :note (scale-to-notes (scale scale-name) (note-by-name base))))
null
https://raw.githubusercontent.com/baconpaul/composition-kit/fce0addb74a9c30ba06e051d3bca51c5a2b0ce6f/src/composition_kit/music_lib/tonal_theory.clj
clojure
A nascent library of concepts like notes and intervals. I used this less composing than I thought I would but it's still handy to have around. (sorted-set-by (map-comparator :octave :notebase :accidental) A naive implementation, (map #(en-choice (get notes-by-midinote (+ % (:midinote base)))) scale))) works fine but has all sorts of problems with not picking the right note. So you need to do some enharmonic optimization (I guess it woudl be called), hence the explicit loop here where we pick the one with notename one up from prior, unless we can't, in which case we just punt The magic happens right here when we pick the next notebase keyword in string math space PUNT!
(ns composition-kit.music-lib.tonal-theory) (def ^:priave notes-data (let [accidental-names { -2 "ees" -1 "es" 0 "" 1 "is" 2 "iis" } offset-from-c { :c 0 :d 2 :e 4 :f 5 :g 7 :a 9 :b 11 } notelist (for [notebase [ :a :b :c :d :e :f :g ] accidental [ -2 -1 0 1 2 ] octave (range 8)] { :note (keyword (str (name notebase) (get accidental-names accidental) octave)) :pitch (keyword (str (name notebase) (get accidental-names accidental))) :octave octave :notebase notebase :accidental accidental :midinote (+ 60 ( * 12 (- octave 4)) (notebase offset-from-c) accidental) } )] (reduce (fn [m v] (assoc m (:note v) v)) {} notelist) )) (def ^:private notes-data-by-midinote (loop [n (vals notes-data) res {}] (if (empty? n) res (let [currn (first n) ss (get res (:midinote currn) [] ) ] (recur (rest n) (assoc res (:midinote currn) (conj ss currn)))))) ) (defn note-by-name [name] "(note-by-name :cis2) works just fine!" (get notes-data name)) (defn notes-by-midinote [num] "(first (notes-by-midinote 60)) for instance" (get notes-data-by-midinote num)) (defn name-to-midinote [name] (:midinote (get notes-data name))) (defn transpose [note amt] "Transpose a note by an amount; but this may not give the best enhmarnoic match (so transpose b 1 may give bis, not c)" (let [nmn (+ (:midinote note) amt) mn (notes-by-midinote nmn)] (first mn))) (defn enharmonic-equal? [n1 n2] (= (:midinote n1) (:midinote n2))) (defn interval-from-c [note] "Return a positive interval from c. So (interval-from-c :cis) is 1 (interval-from-c :b) is 11" (let [[ tone & accidental ] (name note) accidental-values { "ees" -2 "es" -1 "" 0 "is" 1 "iis" 2 } offset-from-c { :c 0 :d 2 :e 4 :f 5 :g 7 :a 9 :b 11 } ] (mod (+ (offset-from-c (keyword (str tone))) (get accidental-values (reduce str accidental))) 12) ) ) (defn interval-between [n1 n2] "From n1 to the nearest n2 above it" (mod (- (interval-from-c n2) (interval-from-c n1)) 12)) (defn ^:private intervals-to-degrees [ intervals ] (reductions (fn [prior ivl] (cond (= ivl 'WH) (+ 3 prior) (= ivl 'W) (+ 2 prior) (= ivl 'H) (inc prior) )) 0 intervals)) (def ^:private scales-data (let [mode-r (fn [n] (let [maj '( W W H W W W H )] (concat (drop n maj) (take n maj))))] { :major (intervals-to-degrees '( W W H W W W H )) :natural-minor (intervals-to-degrees '( W H W W H W W )) :harmonic-minor (intervals-to-degrees '( W H W W H WH H )) :ionian (intervals-to-degrees (mode-r 0)) :dorian (intervals-to-degrees (mode-r 1)) :phrygian (intervals-to-degrees (mode-r 2)) :lydian (intervals-to-degrees (mode-r 3)) :mixolydian (intervals-to-degrees (mode-r 4)) :aeolian (intervals-to-degrees (mode-r 5)) :locrian (intervals-to-degrees (mode-r 6)) } ) ) (defn scale [type] (type scales-data)) (defn known-scales [] (keys scales-data)) (defn scale-to-notes [ in-scale base ] (loop [scale (rest in-scale) res [ base ]] (if (empty? scale) res (let [prior (last res) next-mn (+ (:midinote base) (first scale)) next-note-cands (notes-by-midinote next-mn) next-notebase (-> (name (:notebase prior)) (#(char (inc (int (first %))))) (#(if (= % \h) \a %)) str keyword) filtered (filter #(= (:notebase %) next-notebase) next-note-cands) best (if (empty? filtered) (first filtered))] (recur (rest scale) (conj res best)) ))) ) (defn scale-pitches [ base scale-name ] "Return a set of pitches for a scale name. So (scale-pitches :c4 :major) gives you a c major scale starting at c4" (map :note (scale-to-notes (scale scale-name) (note-by-name base))))
59694d832021dc293eea784aa437561b0ca6704ad37ef1f2d9eae5cdd32b1641
JadedCtrl/cl-ipfs-api2
main.lisp
;; This file is free software: you can redistribute it and/or modify it under the terms of version 3 of the GNU General Public License as published by the Free Software Foundation . ;; ;; This program is distributed in the hope that it will be useful, ;; but WITHOUT ANY WARRANTY; without even the implied warranty of ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;; GNU General Public License for more details. (in-package :cl-ipfs-api2) (defparameter *api-host* ":5001") (defparameter *api-root* "/api/v0/") correlates to the env variable $ IPFS_PATH , ;; only necessary if yours deviates from the ;; default path. only used for #'pubsub-* ;; ————————————————————————————————————— ;; BASE STRING LIST [: LIST : BOOLEAN : SYMBOL ] → STRING | ALIST | ( NIL STRING ) (defun ipfs-call (call arguments &key (parameters nil) (want-stream nil) (method :POST)) "Make an IPFS HTTP API call. Quite commonly used. Some calls return strings/raw data, and others return JSON. When strings/arbitrary data are recieved, they're returned verbatim. But, when JSON is returned, it is parsed into a hashtable. If the JSON is 'error JSON', I.E., it signals that an error has been recieved, two values are returned: NIL and the string-error-message." (let ((result (multiple-value-list (drakma:http-request (make-call-url call arguments) :method method :url-encoder #'ipfs::url-encode :parameters parameters :want-stream want-stream)))) (if want-stream (car result) (apply #'process-result result)))) (defun process-result (body status-code headers uri http-stream must-close status-text) (declare (ignore uri http-stream must-close status-text)) (let* ((result (cond ((stringp body) body) ((vectorp body) (flexi-streams:octets-to-string body)))) (result (if (search "application/json" (cdr (assoc :content-type headers))) (unless (empty-string-p result) (simplify (yason:parse result :object-as :alist))) result))) (if (eql 200 status-code) result (values nil (if (stringp result) result (ignore-errors (cdr (s-assoc "Message" result)))))))) ;; STRING LIST &key STRING STRING → STRING (defun make-call-url (call arguments &key (host *api-host*) (root *api-root*)) "Create the URL of an API call, as per the given arguments. Symbols are assumed to be something like 'T (so boolean), nil likewise. Arguments should look like this: (('recursive' nil)('name' 'xabbu'))" (let ((call-url (string+ host root call)) (first-arg 'T)) (mapcar (lambda (arg-pair) (when arg-pair (setq call-url (string+ call-url (if first-arg "?" "&") (first arg-pair) "=" (cond ((not (second arg-pair)) "false") ((symbolp (second arg-pair)) "true") ('T (second arg-pair))))) (setq first-arg nil))) arguments) call-url)) ;; ————————————————————————————————————— ;; ROOT CALLS ;; PATHNAME → (HASH-STRING SIZE-NUMBER) || (NIL STRING) (defun add (pathname &key (pin 't) (only-hash nil) (cid-version 0)) "Add a file to IPFS, return it's hash. /ipns/docs.ipfs.io/reference/api/http/#api-v0-add" (ipfs-call "add" `(("pin" ,pin) ("only-hash" ,only-hash) ("cid-version" ,cid-version)) :parameters `(("file" . ,pathname)))) ;; STRING :NUMBER :NUMBER → STRING || (NIL STRING) (defun cat (ipfs-path &key (offset nil) (length nil)) "Return a string of the data at the given IPFS path. /ipns/docs.ipfs.io/reference/api/http/#api-v0-cat" (ipfs-call "cat" `(("arg" ,ipfs-path) ,(if offset `("offset" ,offset)) ,(if length `("length" ,length))))) STRING [: BOOLEAN : BOOLEAN ] → ALIST || ( NIL STRING ) (defun ls (ipfs-path &key (resolve-type 't) (size 't)) "Returns all sub-objects (IPFS hashes) under a given IPFS/IPNS directory path. Returns as an associative list. /ipns/docs.ipfs.io/reference/api/http/#api-v0-ls" (ipfs-call "ls" `(("arg" ,ipfs-path) ("resolve-type" ,resolve-type) ("size" ,size)))) ;; STRING PATHNAME → NIL (defun dl (ipfs-path out-file) "Write an IPFS file directly to a file on the local file-system. Non-recursive, in the case of directories… for now. (Thanks to this thread ♥: ) Is a general replacement for the 'get' API call, but actually just uses the 'cat' call, due to some issues with using 'get'. Will not actually return NIL when an error is reached (like other functions) with an error-message, it'lll just write the error JSON to the file. Whoops." (with-open-file (out-stream out-file :direction :output :element-type '(unsigned-byte 8) :if-exists :overwrite :if-does-not-exist :create) (let ((in-stream (ipfs-call "cat" `(("arg" ,ipfs-path)) :want-stream 'T))) (awhile (read-byte in-stream nil nil) (write-byte it out-stream)) (close in-stream)))) ;; —————————————————— [ STRING ] → ALIST (defun id (&optional peer-id) "Return info on a node by ID. Returns as an associative list, the public key, agent version, etc. If no node ID is specified, then your own is assumed. /ipns/docs.ipfs.io/reference/api/http/#api-v0-id" (ipfs-call "id" `(,(if peer-id (list "arg" peer-id))))) ;; —————————————————— ;; STRING → STRING || (NIL STRING (defun dns (domain &key (recursive 't)) "Resolve a domain into a path (usually /ipfs/). /ipns/docs.ipfs.io/reference/api/http/#api-v0-dns" (ipfs-call "dns" `(("arg" ,domain) ("recursive" ,recursive)))) STRING [: BOOLEAN : NUMBER : NUMBER ] → STRING || ( NIL STRING ) (defun resolve (ipfs-path &key (recursive 't) (dht-record-count nil) (dht-timeout 30)) "Resolve a given name to an IPFS path." (ipfs-call "resolve" `(("arg" ,ipfs-path) ("recursive" ,recursive) ,(if dht-record-count (list "dht-record-count" dht-record-count)) ("dht-timeout" ,(string+ dht-timeout "s"))))) ;; —————————————————— ;; NIL → NIL (defun shutdown () "Shut down the connected IPFS node. /ipns/docs.ipfs.io/reference/api/http/#api-v0-shutdown" (ipfs-call "shutdown" '())) ;; ————————————————————————————————————— ;; BITSWAP CALLS ;; STRING → ALIST || (NIL STRING) (defun bitswap-ledger (peer-id) "Show the current ledger for a peer. /ipns/docs.ipfs.io/reference/api/http/#api-v0-bitswap-ledger" (ipfs-call "bitswap/ledger" `(("arg" ,peer-id)))) ;; NIL → NIL (defun bitswap-reprovide () "Trigger the reprovider. /ipns/docs.ipfs.io/reference/api/http/#api-v0-bitswap-reprovide" (ipfs-call "bitswap/reprovide" '())) ;; NIL → ALIST || (NIL STRING) (defun bitswap-stat () "Show diagnostic info on the bitswap agent. /ipns/docs.ipfs.io/reference/api/http/#api-v0-bitswap-stat" (ipfs-call "bitswap/stat" '())) ;; STRING → ALIST || (NIL STRING) (defun bitswap-wantlist (&optional peer-id) "Show blocks currently on the wantlist. /ipns/docs.ipfs.io/reference/api/http/#api-v0-bitswap-wantlist" (ipfs-call "bitswap/wantlist" `(,(if peer-id (list "peer" peer-id))))) ;; ————————————————————————————————————— ;; BLOCK CALLS ;; STRING → STRING || (NIL STRING) (defun block-get (hash) "Get a raw IPFS block. /ipns/docs.ipfs.io/reference/api/http/#api-v0-block-get" (ipfs-call "block/get" `(("arg" ,hash)))) ;; PATHNAME [:STRING :STRING :NUMBER :BOOLEAN] → ALIST || (NIL STRING) (defun block-put (pathname &key (format nil) (mhtype "sha2-256") (mhlen -1) (pin nil)) "Store input as an IPFS block. /ipns/docs.ipfs.io/reference/api/http/#api-v0-block-put" (ipfs-call "block/put" `(,(if format (list "format" format)) ("mhtype" ,mhtype) ("mhlen" ,mhlen) ("pin" ,pin)) :parameters `(("data" . ,pathname)))) ;; STRING → NIL (defun block-rm (hash &key (force nil)) "Delete an IPFS block(s). /ipns/docs.ipfs.io/reference/api/http/#api-v0-block-rm" (ipfs-call "block/rm" `(("arg" ,hash) ,(if force (list "force" force)))) nil) ;; STRING → ALIST || (NIL STRING) (defun block-stat (hash) "Print info about a raw IPFS block /ipns/docs.ipfs.io/reference/api/http/#api-v0-block-stat" (ipfs-call "block/stat" `(("arg" ,hash)))) ;; ————————————————————————————————————— BOOTSTRAP CALLS ;; NIL → LIST || (NIL STRING) (defun bootstrap () "Return a list of bootstrap peers /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap" (cdr (ipfs-call "bootstrap" '()))) ;; NIL → LIST || (NIL STRING) (defun bootstrap-list () "Return a list of bootstrap peers /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-list" (bootstrap)) ;; STRING → LIST || (NIL STRING) (defun bootstrap-add (peer) "Add a peer to the bootstrap list /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-add" (cdr (ipfs-call "bootstrap/add" `(("arg" ,peer))))) ;; NIL → LIST || (NIL STRING) (defun bootstrap-add-default () "Add default peers to the bootstrap list /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-add-default" (cdr (ipfs-call "bootstrap/add/default" '()))) ;; STRING → LIST || (NIL STRING) (defun bootstrap-rm (peer) "Remove a peer from the bootstrap list /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-rm" (cdr (ipfs-call "bootstrap/rm" `(("arg" ,peer))))) ;; NIL → LIST || (NIL STRING) (defun bootstrap-rm-all () "Remove a peer from the bootstrap list /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-rm" (cdr (ipfs-call "bootstrap/rm/all" '()))) ;; ————————————————————————————————————— ;; CID CALLS ;; STRING → STRING || (NIL STRING) (defun cid-base32 (cid) "Convert a CID into Base32 CIDv1 /ipns/docs.ipfs.io/reference/api/http/#api-v0-cid-base32" (let ((result (ipfs-call "cid/base32" `(("arg" ,cid))))) (if (zerop (length (cdr (s-assoc "ErrorMsg" result)))) (cdr (s-assoc "Formatted" result)) (values nil (cdr (s-assoc "ErrorMsg" result)))))) ;; NIL → ALIST || (NIL STRING) (defun cid-bases () "Return a associative list of available bases in plist format; each base's name is a assigned a given code-number. ((CODE-A . NAME-A) (CODE-B . NAME-B) … (CODE-N . NAME-N)) /ipns/docs.ipfs.io/reference/api/http/#api-v0-cid-bases" (ipfs-call "cid/bases" '())) ;; ————————————————————————————————————— ;; CONFIG CALLS STRING [: STRING : BOOLEAN : BOOLEAN ] → STRING || ( NIL STRING ) (defun config (key &key (value nil) (bool nil) (json nil)) "Get/set a config key's value. /ipns/docs.ipfs.io/reference/api/http/#api-v0-config" (cdr (s-assoc "Value" (ipfs-call "config" `(("arg" ,key) ,(if value (list "value" value)) ("bool" ,bool) ("json" ,json)))))) NIL → ALIST (defun config-show () "Return the config file's contents, in alist-format… y'know, with several sub-alists. Doesn't quite line up with #api-v0-config-show /ipns/docs.ipfs.io/reference/api/http/#api-v0-config-show" (ipfs-call "config/show" '())) ;; STRING → STRING || (NIL STRING) (defun config-get (key) "Get a config key's value. Doesn't map with any existant API call; it's just a convenience wrapper around #'config." (config key)) ;; STRING → STRING || (NIL STRING) (defun config-set (key value &key (bool nil) (json nil)) "Set a config key's value. Doesn't map with any existant API call; it's just a convenience wrapper around #'config." (config key :value value :bool bool :json json)) ;; ————————————————————————————————————— DAG CALLS ;; STRING → STRING || (NIL STRING) (defun dag-get (dag-node) "Get a dag node from IPFS. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dag-get" (ipfs-call "dag/get" `(("arg" ,dag-node)))) ;; STRING [:STRING :STRING :BOOLEAN] → STRING || (NIL STRING (defun dag-put (dag-node &key (format "cbor") (input-enc "json") (pin 'T)) "Add a dag node to IPFS. Returns CID string. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dag-put" (ipfs-call "dag/put" `(("arg" ,dag-node) ("format" ,format) ("input-enc" ,input-enc) ("pin" ,pin)))) ( gethash " / " ( gethash " Cid " result ) ) ) ) ;; STRING → ALIST || (NIL STRING) (defun dag-resolve (path) "Resolve an IPLD block. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dag-resolve" (ipfs-call "dag/resolve" `(("arg" ,path)))) ;; ————————————————————————————————————— ;; DHT CALLS ;; STRING → LIST || (NIL STRING) (defun dht-findpeer (peer-id) "Find the multiaddresses associated with a peer ID. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-findpeer" (cdr (s-assoc "Addrs" (cadr (s-assoc "Responses" (ipfs-call "dht/findpeer" `(("arg" ,peer-id)))))))) ;; STRING [:NUMBER] → LIST || (NIL STRING) (defun dht-findprovs (key &key (provider-quantity 20)) "Find peers that can provide a specific value, given a key. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-findprovs" (ipfs-call "dht/findprovs" `(("arg" ,key)("num-providers" ,provider-quantity)))) ;; STRING → LIST || (NIL STRING) (defun dht-get (key) "Query the routing system for a key's best value. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-get" (cdr (s-assoc "Addrs" (cadr (s-assoc "Responses" (ipfs-call "dht/get" `(("arg" ,key)))))))) STRING [: BOOLEAN ] → NIL (defun dht-provide (key &key (recursive nil)) "Announce to the network that you're providing the given values. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-provide" (ipfs-call "dht/provide" `(("arg" ,key)("recursive" ,recursive)))) ;; STRING STRING → NIL || (NIL STRING) (defun dht-put (key value) "Write a key-value pair to the routing system. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-put" (ipfs-call "dht/put" `(("arg" ,key)("arg" ,value)))) ;; STRING → ALIST || (NIL STRING) (defun dht-query (peer-id) "Find the closest peer IDs to the given one by querying the DHT. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-query" (cdr (s-assoc "Responses" (ipfs-call "dht/query" `(("arg" ,peer-id)))))) ;; ————————————————————————————————————— ;; DIAG CALLS NIL → ALIST (defun diag-cmds () "List commands run on this IPFS node. /ipns/docs.ipfs.io/reference/api/http/#api-v0-diag-cmds" (ipfs-call "diag/cmds" NIL)) ;; NIL → NIL || (NIL STRING) (defun diag-cmds-clear () "Clear inactive requests from the log. /ipns/docs.ipfs.io/reference/api/http/#api-v0-diag-cmds-clear" (ipfs-call "diag/cmds/clear" NIL)) ;; NUMBER → NIL || (NIL STRING) (defun diag-cmds-set-time (time) "Set how long to keep inactive requests in the log. /ipns/docs.ipfs.io/reference/api/http/#api-v0-diag-cmds-set-time" (ipfs-call "diag/cmds/set-time" `(("arg" ,time)))) ;; NIL → STRING || (NIL STRING) (defun diag-sys () "Print system diagnostic info. /ipns/docs.ipfs.io/reference/api/http/#api-v0-diag-sys" (ipfs-call "diag/sys" NIL)) ;; ————————————————————————————————————— ;; FILE CALLS (defun file-ls (path) "List directory contents for UNIX filesystem objects. /ipns/docs.ipfs.io/reference/api/http/#api-v0-file-ls" (cdr (s-assoc "Objects" (ipfs-call "file/ls" `(("arg" ,path)))))) ;; ————————————————————————————————————— ;; FILES CALLS ;; STRING [:NUMBER :STRING] → NIL (defun files-chcid (path &key (cid-version nil) (hash nil)) "Change the cid version or hash function of the root node of a given path. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-chcid" (ipfs-call "files/chcid" `(("arg" ,path) ,(if cid-version `("cid-version" ,cid-version)) ,(if hash (list "hash" hash))))) ;; STRING STRING → NIL || (NIL STRING) (defun files-cp (source destination) "Copy files into mfs. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-cp" (ipfs-call "files/cp" `(("arg" ,source)("arg" ,destination)))) ;; STRING → STRING (defun files-flush (&optional (path "/")) "Flush a given path's data to disk. Returns CID. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-flush" (ipfs-call "files/flush" `(("arg" ,path)))) ;; [STRING] → ALIST || (NIL STRING) (defun files-ls (&optional (path "/")) "List directories in local mutable namespace. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-ls" (ipfs-call "files/ls" `(("arg" ,path)))) STRING [: BOOLEAN : NUMBER : STRING ] → NIL || ( NIL STRING ) (defun files-mkdir (path &key (parents nil) (cid-version nil) (hash nil)) "Make a directory. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-mkdir" (ipfs-call "files/mkdir" `(("arg" ,path) ,(if parents (list "parents" parents)) ,(if cid-version `("cid-version" ,cid-version)) ,(if hash (list "hash" hash))))) ;; STRING STRING → NIL || (NIL STRING) (defun files-mv (source destination) "Move a file. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-mv" (ipfs-call "files/mv" `(("arg" ,source)("arg" ,destination)))) ;; STRING [:NUMBER :NUMBER] → STRING || (NIL STRING) (defun files-read (path &key (offset nil) (max nil)) "Read a file in given mfs. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-read" (ipfs-call "files/read" `(("arg" ,path) ,(if offset (list "offset" offset)) ,(if max (list "max" max))))) STRING [: BOOLEAN : BOOLEAN ] → NIL || ( NIL STRING ) (defun files-rm (path &key (recursive nil) (force nil)) "Remove a given file. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-rm" (ipfs-call "files/read" `(("arg" ,source) ("recursive" ,recursive) ("force" ,force)))) ;; STRING → ALIST || (NIL STRING) (defun files-stat (path) "Remove a given file. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-rm" (ipfs-call "files/stat" `(("arg" ,path)))) PATHNAME STRING [: NUMBER : BOOLEAN : BOOLEAN : BOOLEAN : NUMBER : BOOLEAN ;; :NUMBER :STRING] ;; → NIL || (NIL STRING) (defun files-write (path-or-string dest-path &key (offset nil) (create nil) (parents nil) (truncate nil) (count nil) (raw-leaves nil) (cid-version nil) (hash nil)) "Write to a given file. First parameter can be a string or a path to a local file. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-rm" (let ((result (multiple-value-list (drakma:http-request (make-call-url "files/write" `(("arg" ,dest-path) ("create", create) ("parents" ,parents) ("truncate" ,truncate) ("raw-leaves" ,raw-leaves) ,@(when offset (list "offset" offset)) ,@(when count (list "count" count)) ,@(when cid-version `("cid-version" ,cid-version)) ,@(when hash (list "hash" hash)))) :method :post :parameters `(("data" . ,path-or-string)) :form-data t)))) (apply #'process-result result))) (defmacro with-files-write ((stream dest-path &rest params) &body body) "A convenience macro for files-write. In the body of the macro, any writes to the stream named by STREAM will be sent to the mfs file at DEST-PATH. PARAMS will be passed directly to the files-write function." (let ((fn (gensym "FN"))) ;;FIXME: Would be nice to write the stream directly to files-write. ;; This feels a little less efficient. `(uiop:with-temporary-file (:stream ,stream :pathname ,fn) ,@body :close-stream (files-write ,fn ,dest-path ,@params)))) ;; ————————————————————————————————————— FILESTORE CALLS ;; NIL → ALIST || (NIL STRING) (defun filestore-dups () "List blocks that're both in the filestore and standard block storage. /ipns/docs.ipfs.io/reference/api/http/#api-v0-filestore-dups" (ipfs-call "filestore/dups" '())) ;; [STRING] → ALIST || (NIL STRING) (defun filestore-ls (&optional cid) "List objects in filestore. /ipns/docs.ipfs.io/reference/api/http/#api-v0-filestore-ls" (ipfs-call "filestore/ls" `(,(if cid (list "arg" cid))))) ;; [STRING] → ALIST || (NIL STRING) (defun filestore-verify (&optional cid) "Verify objects in filestore. /ipns/docs.ipfs.io/reference/api/http/#api-v0-filestore-verify" (ipfs-call "filestore/verify" `(,(if cid (list "arg" cid))))) ;; ————————————————————————————————————— ;; KEY CALLS ;; STRING [:STRING :NUMBER] → ALIST || (NIL STRING) (defun key-gen (name &key (type nil) (size nil)) "Create a new keypair. /ipns/docs.ipfs.io/reference/api/http/#api-v0-key-gen" (ipfs-call "key/gen" `(("name" ,name) ,(if type (list "type" type)) ,(if size (list "size" size))))) ;; NIL → ALIST || (NIL STRING) (defun key-list () "List all local keypairs. /ipns/docs.ipfs.io/reference/api/http/#api-v0-key-list" (ipfs-call "key/list" '())) STRING STRING [: BOOLEAN ] → ALIST || ( NIL STRING ) (defun key-rename (old-name new-name &key (force nil)) "Rename a local keypair. /ipns/docs.ipfs.io/reference/api/http/#api-v0-key-rename" (ipfs-call "key/rename" `(("arg" ,old-name) ("arg" ,new-name) ("force" ,force)))) ;; STRING → ALIST || (NIL STRING) (defun key-remove (name) "Remove a keypair, based on name. /ipns/docs.ipfs.io/reference/api/http/#api-v0-key-remove" (ipfs-call "key/remove" `(("arg" ,name)))) ;; ————————————————————————————————————— LOG CALLS ;; STRING STRING → STRING || (NIL STRING) (defun log-level (subsystem level) "Change the logging level of a subsystem. /ipns/docs.ipfs.io/reference/api/http/#api-v0-log-level" (cdr (s-assoc "Message" (ipfs-call "log/level" `(("arg" ,subsystem)("arg" ,level)))))) ;; NIL → LIST || (NIL STRING) (defun log-ls () "List the logging subsystems. /ipns/docs.ipfs.io/reference/api/http/#api-v0-log-ls" (cdr (ipfs-call "log/ls" '()))) ;; NIL → STRING || (NIL STRING) (defun log-tail () "Read the event log. /ipns/docs.ipfs.io/reference/api/http/#api-v0-log-tail" (ipfs-call "log/tail" '()) result) ;; ————————————————————————————————————— ;; NAME CALLS STRING [: BOOLEAN : STRING : BOOLEAN : STRING ] → ALIST || ( NIL STRING ) (defun name-publish (ipfs-path &key (resolve 'T) (lifetime "24h") (allow-offline 'T) (ttl nil)) "Publish an IPNS name-- associate it with an IPFS path. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-publish" (ipfs-call "name/publish" `(("arg" ,ipfs-path)("resolve" ,resolve) ("lifetime" ,lifetime) ("allow-offline" ,allow-offline) ,(if ttl (list "ttl" ttl))))) ;; STRING → STRING || (NIL STRING) (defun name-pubsub-cancel (name) "Cancel subscription to a name. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-pubsub-cancel" (cdr (s-assoc "Cancelled" (ipfs-call "name/pubsub/cancel" `(("arg" ,name)))))) ;; NIL → STRING || (NIL STRING) (defun name-pubsub-state () "Query the state of IPNS pubsub. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-pubsub-state" (cdr (s-assoc "Enabled" (ipfs-call "name/pubsub/state" '())))) ;; NIL → STRING || (NIL STRING) (defun name-pubsub-subs () "Show current name subscriptions. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-pubsub-subs" (cdr (s-assoc "Strings" (ipfs-call "name/pubsub/subs" '())))) STRING [: BOOLEAN : BOOLEAN : NUMBER : STRING ] → STRING || ( NIL STRING ) (defun name-resolve (name &key (recursive 't) (nocache "") (dht-record-count nil) (dht-timeout nil)) "Resolve a given IPNS name. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-resolve" (ipfs-call "name/resolve" `(("arg" ,name)("recursive" ,recursive) ,(when (not (empty-string-p nocache)) (list "nocache" nocache)) ,(when dht-record-count (list "dht-record-count" dht-record-count)) ,(when dht-timeout (list "dht-timeout" dht-timeout))))) ;; ————————————————————————————————————— ;; OBJECT CALLS ;; STRING → STRING || (NIL STRING) (defun object-data (key) "Output the raw data of an IPFS object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-data" (ipfs-call "object/data" `(("arg" ,key)))) ;; STRING STRING → ALIST || (NIL STRING) (defun object-diff (object-a object-b) "Display the differences between two IPFS objects. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-diff" (ipfs-call "object/diff" `(("arg" ,object-a)("arg" ,object-b)))) ;; STRING [:STRING] → STRING || (NIL STRING) (defun object-get (key &key (data-encoding "text")) "Get and serialize the named DAG node. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-get" (ipfs-call "object/get" `(("arg" ,key)("data-encoding" ,data-encoding)))) ;; STRING → ALIST || (NIL STRING) (defun object-links (key) "Output the links pointed to by the specified object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-links" (ipfs-call "object/links" `(("arg" ,key)))) ;; [:STRING] → ALIST || (NIL STRING) (defun object-new (&key (template nil)) "Create a new object from an IPFS template. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-new" (ipfs-call "object/new"`(,(if template `("template" ,template))))) STRING STRING STRING [: BOOLEAN ] → ALIST || ( NIL STRING ) (defun object-patch-add-link (hash name object &key (create "")) "Add a link to a given object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-patch-add-link" (ipfs-call "object/patch/add-link" `(("arg" ,hash)("arg" ,name)("arg" ,object) ,(when (not (empty-string-p create)) `("create" ,create))))) ;; STRING STRING → ALIST || (NIL STRING) (defun object-patch-rm-link (hash name) "Remove a link from a given object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-patch-rm-link" (ipfs-call "object/patch/rm-link" `(("arg" ,hash)("arg" ,name)))) ;; STRING → ALIST || (NIL STRING) (defun object-stat (key) "Get stats for a DAG node. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-stat" (ipfs-call "object/stat" `(("arg" ,key)))) ;; ————————————————————————————————————— ;; P2P CALLS [: BOOLEAN : STRING : STRING : STRING : STRING ] → NUMBER || ( NIL STRING ) (defun p2p-close (&key (all "") (protocol nil) (listen-address nil) (target-address nil)) "Stop listening for new connections to forward. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-close" (ipfs-call "p2p/close" `(,(when (not (empty-string-p all)) `("all" ,all)) ,(when protocol `("protocol" ,protocol)) ,(when listen-address `("listen-address" ,listen-address)) ,(when target-address `("target-address" ,target-address))))) STRING STRING STRING [: BOOLEAN ] → STRING || ( NIL STRING ) (defun p2p-forward (protocol listening-endpoint target-endpoint &key (allow-custom-protocol "")) "Forward connections to libp2p service. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-forward" (ipfs-call "p2p/forward" `(("arg" ,protocol)("arg" ,listening-endpoint) ("arg" ,target-endpoint) ,(when (not (empty-string-p allow-custom-protocol)) `("allow-custom-protocol" ,allow-custom-protocol))))) STRING STRING [: BOOLEAN : BOOLEAN ] → STRING || ( NIL STRING ) (defun p2p-listen (protocol target-endpoint &key (allow-custom-protocol "") (report-peer-id "")) "Create libp2p service. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-listen" (ipfs-call "p2p/listen" `(("arg" ,protocol)("arg" ,target-endpoint) ,(when (not (empty-string-p allow-custom-protocol)) `("allow-custom-protocol" ,allow-custom-protocol)) ,(when (not (empty-string-p report-peer-id)) `("report-peer-id" ,report-peer-id))))) ;; NIL → ALIST || (NIL STRING) (defun p2p-ls () "List active p2p listeners. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-ls" (ipfs-call "p2p/ls" '())) ;; [:STRING :BOOLEAN] → STRING || (NIL STRING) (defun p2p-stream-close (&key (identifier nil) (all "")) "Close an active p2p stream. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-stream-close" (ipfs-call "p2p/stream/close" `(,(when identifier `("arg" ,identifier)) ,(when (not (empty-string-p all)) `("all" ,all))))) ;; NIL → ALIST || (NIL STRING) (defun p2p-stream-ls () "List active p2p streams. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-stream-ls" (ipfs-call "p2p/stream/ls" '())) ;; ————————————————————————————————————— PIN CALLS STRING [: BOOLEAN ] → ALIST || ( NIL STRING ) (defun pin-add (path &key (recursive 'T)) "Pin an object to local storage. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-add" (ipfs-call "pin/add" `(("arg" ,path)("recursive" ,recursive)))) ;; [:STRING :STRING] → ALIST || (NIL STRING) (defun pin-ls (&key (path nil) (type "all")) "List objects pinned to local storage. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-ls" (let ((res (ipfs-call "pin/ls" `(,(when path `("arg" ,path)) ("type" ,type))))) (if (equal res '("Keys")) nil res))) STRING [: BOOLEAN ] → ALIAS || ( NIL STRING ) (defun pin-rm (path &key (recursive 'T)) "Remove pinned objects from local storage. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-rm" (ipfs-call "pin/rm" `(("arg" ,path)("recursive" ,recursive)))) STRING STRING [: BOOLEAN ] → ALIST || ( NIL STRING ) (defun pin-update (old-path new-path &key (unpin 'T)) "Update a recursive pin. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-update" (ipfs-call "pin/update" `(("arg" ,old-path)("arg" ,new-path)("unpin" ,unpin)))) ;; NIL → ALIST || (NIL STRING) (defun pin-verify () "Verify that recursive pins are complete. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-verify" (ipfs-call "pin/verify" '())) ;; ————————————————————————————————————— PUBSUB CALLS ;; STRING [:STRING] → PROCESS-INFO-STREAM (defun pubsub-sub (topic &key (env "")) "Subscribe to a given pubsub topic— this function requires go-ipfs to be installed on the current machine, and that `ipfs` is in the current $PATH. This probably will only work on *nix systems (sorry Windows nerds). Returns a uiop/launch-program::process-info socket-- can be used in conjunction with the #'pubsub-sub-* functions, or with :uiop/launch-program's functions. A system-dependent replacement for /ipns/docs.ipfs.io/reference/api/http/#api-v0-pubsub-sub" (when (and *ipfs-root* (empty-string-p env)) (setq env (string+ "env IPFS_PATH=" *ipfs-root* " > /dev/null;"))) (uiop:launch-program (string+ env "ipfs pubsub sub " topic) :output :stream)) PROCESS - INFO - STREAM → FD - STREAM (defun pubsub-sub-process (pubsub-socket) "Turn a uiop process-info-stream ('pubsub stream') into a fd-stream that is #'read-char-able, etc." (uiop/launch-program:process-info-output pubsub-socket)) ;; PROCESS-INFO-STREAM → CHARACTER (defun pubsub-sub-read-char (pubsub-socket) "Process a 'pubsub stream' (process-info-stream) and #'readchar it." (read-char (pubsub-sub-process pubsub-socket))) PROCESS - INFO - STREAM → BOOLEAN (defun pubsub-sub-listen (pubsub-socket) "Process a 'pubsub stream' (process-info-stream) and #'listen it." (listen (pubsub-sub-process pubsub-socket))) ;; PROCESS-INFO-STREAM → NIL (defun pubsub-sub-close (pubsub-socket) "Close a 'pubsub stream' (process-info-stream) and related processes." (and (uiop/launch-program:terminate-process pubsub-socket :urgent 't) (uiop/launch-program:close-streams pubsub-socket))) ;; ————————————————— ;; STRING STRING [:STRING] → NIL (defun pubsub-pub (topic string &key (env "")) "Publish a string to a given pubsub topic. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pubsub-pub" (when (and *ipfs-root* (empty-string-p env)) (setq env (string+ "env IPFS_PATH=" *ipfs-root* " > /dev/null;"))) (uiop:run-program (string+ env "ipfs pubsub pub " topic " \"" string "\"")) nil) ;; ————————————————— ;; NIL → LIST || (NIL STRING) (defun pubsub-ls () "Return a list of subscribed topics. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pubsub-ls" (s-assoc "Strings" (ipfs-call "pubsub/ls" '()))) ;; [STRING] → LIST || (NIL STRING) (defun pubsub-peers (&optional topic) "Return a list of peers with pubsub enabled. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pubsub-peers" (s-assoc "Strings" (ipfs-call "pubsub/peers" `(,(if topic (list "arg" topic)))))) ;; ————————————————————————————————————— ;; REFS CALLS STRING [: BOOLEAN : BOOLEAN : NUMBER ] → ALIST || ( NIL STRING ) (defun refs (path &key (unique "") (recursive "") (max-depth -1)) "List links (references) from an object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-refs" (ipfs-call "refs" `(("arg" ,path)("max-depth" ,max-depth) ,(if (not (empty-string-p recursive)) `("recursive" ,recursive)) ,(if (not (empty-string-p unique)) `("unique" ,unique))))) ;; NIL → ALIST || (NIL STRING) (defun refs-local () "List all local references. /ipns/docs.ipfs.io/reference/api/http/#api-v0-refs-local" (ipfs-call "refs/local" '())) ;; ————————————————————————————————————— ;; REPO CALLS ;; NIL → STRING || (NIL STRING) (defun repo-fsck () "Remove repo lock-files. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-fsck" (cdr (s-assoc "Message" (ipfs-call "repo/fsck" '())))) ;; NIL → ALIST || (NIL STRING) (defun repo-gc () "Perform garbage collection on the repo. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-gc" (ipfs-call "repo/gc" '())) ;; NIL → ALIST || (NIL STRING) (defun repo-stat () "Get stats for the current repo. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-stat" (ipfs-call "repo/stat" '())) ;; NIL → ALIST || (NIL STRING) (defun repo-verify () "Verify that all repo blocks aren't corrupted. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-verify" (ipfs-call "repo/verify" '())) ;; NIL → NUMBER || (NIL STRING) (defun repo-version () "Show the repo version. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-version" (parse-integer (ipfs-call "repo/version" '()))) ;; ————————————————————————————————————— ;; STATS CALLS ;; NIL → ALIST || (NIL STRING) (defun stats-bitswap () "Show diagnostics on bitswap. /ipns/docs.ipfs.io/reference/api/http/#api-v0-stats-bitswap" (ipfs-call "stats/bitswap" '())) ;; [:STRING :STRING :STRING] → ALIST || (NIL STRING) (defun stats-bw (&key (peer nil) (proto nil) (interval nil)) "Return bandwidth information. /ipns/docs.ipfs.io/reference/api/http/#api-v0-stats-bw" (ipfs-call "stats/bitswap" `(,(when peer `("peer" ,peer)) ,(when proto `("proto" ,proto)) ,(when interval `("interval" ,interval)) ,(when interval `("poll" 'T))))) ;; NIL → ALIST || (NIL STRING) (defun stats-repo () "Show diagnostics on current repo. /ipns/docs.ipfs.io/reference/api/http/#api-v0-stats-repo" (ipfs-call "stats/repo" '())) ;; ————————————————————————————————————— ;; SWARM CALLS ;; NIL → ALIST || (NIL STRING) (defun swarm-addrs () "List known addresses. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-addrs" (ipfs-call "swarm/addrs" '())) ;; NIL → LIST || (NIL STRING) (defun swarm-addrs-listen () "List interface listening addresses. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-addrs-listen" (cdr (ipfs-call "swarm/addrs/listen" '()))) ;; NIL → LIST || (NIL STRING) (defun swarm-addrs-local () "List local addresses. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-addrs-local" (cdr (ipfs-call "swarm/addrs/local" '()))) ;; STRING → LIST || (NIL STRING) (defun swarm-connect (address) "Open connection to a given address. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-connect" (cdr (ipfs-call "swarm/connect" `(("arg" ,address))))) ;; STRING → LIST || (NIL STRING) (defun swarm-disconnect (address) "Close connection to a given address. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-disconnect" (cdr (ipfs-call "swarm/disconnect" `(("arg" ,address))))) ;; NIL → LIST || (NIL STRING) (defun swarm-filters () "List address filters. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-filters" (ipfs-call "swarm/filters" '())) ;; STRING → LIST || (NIL STRING) (defun swarm-filters-add (multiaddr) "Add an address filter. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-filters-add" (ipfs-call "swarm/filters/add" `(("arg" ,multiaddr)))) ;; STRING → LIST || (NIL STRING) (defun swarm-filters-rm (multiaddr) "Remove an address filter. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-filters-rm" (ipfs-call "swarm/filters/rm" `(("arg" ,multiaddr)))) ;; NIL → ALIST || (NIL STRING) (defun swarm-peers () "List peers with open connections. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-peers" (ipfs-call "swarm/peers" '())) ;; ————————————————————————————————————— URLSTORE CALLS STRING [: BOOLEAN : BOOLEAN ] → ALIST || ( NIL STRING ) (defun urlstore-add (url &key (pin 'T) (trickle "")) "Add a URL via urlstore. /ipns/docs.ipfs.io/reference/api/http/#api-v0-urlstore-add" (ipfs-call "urlstore/add"`(("arg" ,url)("pin" ,pin) ,(when (not (empty-string-p trickle)) `("trickle" ,trickle))))) ;; ————————————————————————————————————— ;; VERSION CALLS ;; NIL → LIST || (NIL STRING) (defun version () "Return the current golang, system, repo, and IPFS versions. /ipns/docs.ipfs.io/reference/api/http/#api-v0-version" (ipfs-call "version" nil)) NIL → ALIST (defun version-deps () I.E. , Go version , OS , etc . /ipns/docs.ipfs.io/reference/api/http/#api-v0-version" (ipfs-call "version/deps" '())) ;; ————————————————————————————————————— ;; UTIL ;; LIST -> LIST (defun simplify (list) "'Simplify' a list. Remove any extraneous sublisting [ ((2 3)) -> (2 3) ], and remove extraneous strings in otherwise pure alists, e.g. [ (``Apple'' (2 2) (3 3) (4 4)) -> ((2 2) (3 3) (4 4)) ]" (cond ((and (stringp (car list)) (stringp (cdr list))) (cdr list)) ((and (eq 1 (length list)) (consp (car list))) (simplify (car list))) ((and (consp list) (stringp (car list)) (consp (cadr list))) (simplify (cdr list))) ('T list))) ;; STRING LIST (defun s-assoc (key alist) "Get the value of an associative list using a string key." (assoc key alist :test #'string-equal)) ;; STRING-A STRING-B … STRING-N → STRING (defun string+ (&rest strings) "Combine an arbitrary amount of strings into a single string." (reduce (lambda (a b) (format nil "~A~A" a b)) strings)) STRING → BOOLEAN (defun empty-string-p (string) "Return whether or not a given item is an empty string." (and (stringp string) (zerop (length string)))) ;; STRING → STRING (defun url-encode (string &rest ignored) "Wrap around drakma's url encoder, with a slight change-- instead of using plus-signs for spaces, we want to use %20." ignored (cl-ppcre:regex-replace-all "%2520" (drakma:url-encode (cl-ppcre:regex-replace-all " " string "%20") :utf-8) "%20"))
null
https://raw.githubusercontent.com/JadedCtrl/cl-ipfs-api2/3ee52c80023bcc662f7d01276ea0a5814bd0011b/main.lisp
lisp
This file is free software: you can redistribute it and/or modify This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. only necessary if yours deviates from the default path. only used for #'pubsub-* ————————————————————————————————————— BASE STRING LIST &key STRING STRING → STRING ————————————————————————————————————— ROOT CALLS PATHNAME → (HASH-STRING SIZE-NUMBER) || (NIL STRING) STRING :NUMBER :NUMBER → STRING || (NIL STRING) STRING PATHNAME → NIL —————————————————— —————————————————— STRING → STRING || (NIL STRING —————————————————— NIL → NIL ————————————————————————————————————— BITSWAP CALLS STRING → ALIST || (NIL STRING) NIL → NIL NIL → ALIST || (NIL STRING) STRING → ALIST || (NIL STRING) ————————————————————————————————————— BLOCK CALLS STRING → STRING || (NIL STRING) PATHNAME [:STRING :STRING :NUMBER :BOOLEAN] → ALIST || (NIL STRING) STRING → NIL STRING → ALIST || (NIL STRING) ————————————————————————————————————— NIL → LIST || (NIL STRING) NIL → LIST || (NIL STRING) STRING → LIST || (NIL STRING) NIL → LIST || (NIL STRING) STRING → LIST || (NIL STRING) NIL → LIST || (NIL STRING) ————————————————————————————————————— CID CALLS STRING → STRING || (NIL STRING) NIL → ALIST || (NIL STRING) each base's ————————————————————————————————————— CONFIG CALLS STRING → STRING || (NIL STRING) it's just a convenience wrapper STRING → STRING || (NIL STRING) it's just a convenience wrapper ————————————————————————————————————— STRING → STRING || (NIL STRING) STRING [:STRING :STRING :BOOLEAN] → STRING || (NIL STRING STRING → ALIST || (NIL STRING) ————————————————————————————————————— DHT CALLS STRING → LIST || (NIL STRING) STRING [:NUMBER] → LIST || (NIL STRING) STRING → LIST || (NIL STRING) STRING STRING → NIL || (NIL STRING) STRING → ALIST || (NIL STRING) ————————————————————————————————————— DIAG CALLS NIL → NIL || (NIL STRING) NUMBER → NIL || (NIL STRING) NIL → STRING || (NIL STRING) ————————————————————————————————————— FILE CALLS ————————————————————————————————————— FILES CALLS STRING [:NUMBER :STRING] → NIL STRING STRING → NIL || (NIL STRING) STRING → STRING [STRING] → ALIST || (NIL STRING) STRING STRING → NIL || (NIL STRING) STRING [:NUMBER :NUMBER] → STRING || (NIL STRING) STRING → ALIST || (NIL STRING) :NUMBER :STRING] → NIL || (NIL STRING) FIXME: Would be nice to write the stream directly to files-write. This feels a little less efficient. ————————————————————————————————————— NIL → ALIST || (NIL STRING) [STRING] → ALIST || (NIL STRING) [STRING] → ALIST || (NIL STRING) ————————————————————————————————————— KEY CALLS STRING [:STRING :NUMBER] → ALIST || (NIL STRING) NIL → ALIST || (NIL STRING) STRING → ALIST || (NIL STRING) ————————————————————————————————————— STRING STRING → STRING || (NIL STRING) NIL → LIST || (NIL STRING) NIL → STRING || (NIL STRING) ————————————————————————————————————— NAME CALLS STRING → STRING || (NIL STRING) NIL → STRING || (NIL STRING) NIL → STRING || (NIL STRING) ————————————————————————————————————— OBJECT CALLS STRING → STRING || (NIL STRING) STRING STRING → ALIST || (NIL STRING) STRING [:STRING] → STRING || (NIL STRING) STRING → ALIST || (NIL STRING) [:STRING] → ALIST || (NIL STRING) STRING STRING → ALIST || (NIL STRING) STRING → ALIST || (NIL STRING) ————————————————————————————————————— P2P CALLS NIL → ALIST || (NIL STRING) [:STRING :BOOLEAN] → STRING || (NIL STRING) NIL → ALIST || (NIL STRING) ————————————————————————————————————— [:STRING :STRING] → ALIST || (NIL STRING) NIL → ALIST || (NIL STRING) ————————————————————————————————————— STRING [:STRING] → PROCESS-INFO-STREAM PROCESS-INFO-STREAM → CHARACTER PROCESS-INFO-STREAM → NIL ————————————————— STRING STRING [:STRING] → NIL ————————————————— NIL → LIST || (NIL STRING) [STRING] → LIST || (NIL STRING) ————————————————————————————————————— REFS CALLS NIL → ALIST || (NIL STRING) ————————————————————————————————————— REPO CALLS NIL → STRING || (NIL STRING) NIL → ALIST || (NIL STRING) NIL → ALIST || (NIL STRING) NIL → ALIST || (NIL STRING) NIL → NUMBER || (NIL STRING) ————————————————————————————————————— STATS CALLS NIL → ALIST || (NIL STRING) [:STRING :STRING :STRING] → ALIST || (NIL STRING) NIL → ALIST || (NIL STRING) ————————————————————————————————————— SWARM CALLS NIL → ALIST || (NIL STRING) NIL → LIST || (NIL STRING) NIL → LIST || (NIL STRING) STRING → LIST || (NIL STRING) STRING → LIST || (NIL STRING) NIL → LIST || (NIL STRING) STRING → LIST || (NIL STRING) STRING → LIST || (NIL STRING) NIL → ALIST || (NIL STRING) ————————————————————————————————————— ————————————————————————————————————— VERSION CALLS NIL → LIST || (NIL STRING) ————————————————————————————————————— UTIL LIST -> LIST STRING LIST STRING-A STRING-B … STRING-N → STRING STRING → STRING
it under the terms of version 3 of the GNU General Public License as published by the Free Software Foundation . (in-package :cl-ipfs-api2) (defparameter *api-host* ":5001") (defparameter *api-root* "/api/v0/") correlates to the env variable $ IPFS_PATH , STRING LIST [: LIST : BOOLEAN : SYMBOL ] → STRING | ALIST | ( NIL STRING ) (defun ipfs-call (call arguments &key (parameters nil) (want-stream nil) (method :POST)) "Make an IPFS HTTP API call. Quite commonly used. Some calls return strings/raw data, and others return JSON. When strings/arbitrary data are recieved, they're returned verbatim. But, when JSON is returned, it is parsed into a hashtable. If the JSON is 'error JSON', I.E., it signals that an error has been recieved, two values are returned: NIL and the string-error-message." (let ((result (multiple-value-list (drakma:http-request (make-call-url call arguments) :method method :url-encoder #'ipfs::url-encode :parameters parameters :want-stream want-stream)))) (if want-stream (car result) (apply #'process-result result)))) (defun process-result (body status-code headers uri http-stream must-close status-text) (declare (ignore uri http-stream must-close status-text)) (let* ((result (cond ((stringp body) body) ((vectorp body) (flexi-streams:octets-to-string body)))) (result (if (search "application/json" (cdr (assoc :content-type headers))) (unless (empty-string-p result) (simplify (yason:parse result :object-as :alist))) result))) (if (eql 200 status-code) result (values nil (if (stringp result) result (ignore-errors (cdr (s-assoc "Message" result)))))))) (defun make-call-url (call arguments &key (host *api-host*) (root *api-root*)) "Create the URL of an API call, as per the given arguments. Symbols are assumed to be something like 'T (so boolean), nil likewise. Arguments should look like this: (('recursive' nil)('name' 'xabbu'))" (let ((call-url (string+ host root call)) (first-arg 'T)) (mapcar (lambda (arg-pair) (when arg-pair (setq call-url (string+ call-url (if first-arg "?" "&") (first arg-pair) "=" (cond ((not (second arg-pair)) "false") ((symbolp (second arg-pair)) "true") ('T (second arg-pair))))) (setq first-arg nil))) arguments) call-url)) (defun add (pathname &key (pin 't) (only-hash nil) (cid-version 0)) "Add a file to IPFS, return it's hash. /ipns/docs.ipfs.io/reference/api/http/#api-v0-add" (ipfs-call "add" `(("pin" ,pin) ("only-hash" ,only-hash) ("cid-version" ,cid-version)) :parameters `(("file" . ,pathname)))) (defun cat (ipfs-path &key (offset nil) (length nil)) "Return a string of the data at the given IPFS path. /ipns/docs.ipfs.io/reference/api/http/#api-v0-cat" (ipfs-call "cat" `(("arg" ,ipfs-path) ,(if offset `("offset" ,offset)) ,(if length `("length" ,length))))) STRING [: BOOLEAN : BOOLEAN ] → ALIST || ( NIL STRING ) (defun ls (ipfs-path &key (resolve-type 't) (size 't)) "Returns all sub-objects (IPFS hashes) under a given IPFS/IPNS directory path. Returns as an associative list. /ipns/docs.ipfs.io/reference/api/http/#api-v0-ls" (ipfs-call "ls" `(("arg" ,ipfs-path) ("resolve-type" ,resolve-type) ("size" ,size)))) (defun dl (ipfs-path out-file) "Write an IPFS file directly to a file on the local file-system. Non-recursive, in the case of directories… for now. (Thanks to this thread ♥: ) Is a general replacement for the 'get' API call, but actually just uses the 'cat' call, due to some issues with using 'get'. Will not actually return NIL when an error is reached (like other functions) with an error-message, it'lll just write the error JSON to the file. Whoops." (with-open-file (out-stream out-file :direction :output :element-type '(unsigned-byte 8) :if-exists :overwrite :if-does-not-exist :create) (let ((in-stream (ipfs-call "cat" `(("arg" ,ipfs-path)) :want-stream 'T))) (awhile (read-byte in-stream nil nil) (write-byte it out-stream)) (close in-stream)))) [ STRING ] → ALIST (defun id (&optional peer-id) "Return info on a node by ID. Returns as an associative list, the public key, agent version, etc. If no node ID is specified, then your own is assumed. /ipns/docs.ipfs.io/reference/api/http/#api-v0-id" (ipfs-call "id" `(,(if peer-id (list "arg" peer-id))))) (defun dns (domain &key (recursive 't)) "Resolve a domain into a path (usually /ipfs/). /ipns/docs.ipfs.io/reference/api/http/#api-v0-dns" (ipfs-call "dns" `(("arg" ,domain) ("recursive" ,recursive)))) STRING [: BOOLEAN : NUMBER : NUMBER ] → STRING || ( NIL STRING ) (defun resolve (ipfs-path &key (recursive 't) (dht-record-count nil) (dht-timeout 30)) "Resolve a given name to an IPFS path." (ipfs-call "resolve" `(("arg" ,ipfs-path) ("recursive" ,recursive) ,(if dht-record-count (list "dht-record-count" dht-record-count)) ("dht-timeout" ,(string+ dht-timeout "s"))))) (defun shutdown () "Shut down the connected IPFS node. /ipns/docs.ipfs.io/reference/api/http/#api-v0-shutdown" (ipfs-call "shutdown" '())) (defun bitswap-ledger (peer-id) "Show the current ledger for a peer. /ipns/docs.ipfs.io/reference/api/http/#api-v0-bitswap-ledger" (ipfs-call "bitswap/ledger" `(("arg" ,peer-id)))) (defun bitswap-reprovide () "Trigger the reprovider. /ipns/docs.ipfs.io/reference/api/http/#api-v0-bitswap-reprovide" (ipfs-call "bitswap/reprovide" '())) (defun bitswap-stat () "Show diagnostic info on the bitswap agent. /ipns/docs.ipfs.io/reference/api/http/#api-v0-bitswap-stat" (ipfs-call "bitswap/stat" '())) (defun bitswap-wantlist (&optional peer-id) "Show blocks currently on the wantlist. /ipns/docs.ipfs.io/reference/api/http/#api-v0-bitswap-wantlist" (ipfs-call "bitswap/wantlist" `(,(if peer-id (list "peer" peer-id))))) (defun block-get (hash) "Get a raw IPFS block. /ipns/docs.ipfs.io/reference/api/http/#api-v0-block-get" (ipfs-call "block/get" `(("arg" ,hash)))) (defun block-put (pathname &key (format nil) (mhtype "sha2-256") (mhlen -1) (pin nil)) "Store input as an IPFS block. /ipns/docs.ipfs.io/reference/api/http/#api-v0-block-put" (ipfs-call "block/put" `(,(if format (list "format" format)) ("mhtype" ,mhtype) ("mhlen" ,mhlen) ("pin" ,pin)) :parameters `(("data" . ,pathname)))) (defun block-rm (hash &key (force nil)) "Delete an IPFS block(s). /ipns/docs.ipfs.io/reference/api/http/#api-v0-block-rm" (ipfs-call "block/rm" `(("arg" ,hash) ,(if force (list "force" force)))) nil) (defun block-stat (hash) "Print info about a raw IPFS block /ipns/docs.ipfs.io/reference/api/http/#api-v0-block-stat" (ipfs-call "block/stat" `(("arg" ,hash)))) BOOTSTRAP CALLS (defun bootstrap () "Return a list of bootstrap peers /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap" (cdr (ipfs-call "bootstrap" '()))) (defun bootstrap-list () "Return a list of bootstrap peers /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-list" (bootstrap)) (defun bootstrap-add (peer) "Add a peer to the bootstrap list /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-add" (cdr (ipfs-call "bootstrap/add" `(("arg" ,peer))))) (defun bootstrap-add-default () "Add default peers to the bootstrap list /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-add-default" (cdr (ipfs-call "bootstrap/add/default" '()))) (defun bootstrap-rm (peer) "Remove a peer from the bootstrap list /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-rm" (cdr (ipfs-call "bootstrap/rm" `(("arg" ,peer))))) (defun bootstrap-rm-all () "Remove a peer from the bootstrap list /ipns/docs.ipfs.io/reference/api/http/#api-v0-bootstrap-rm" (cdr (ipfs-call "bootstrap/rm/all" '()))) (defun cid-base32 (cid) "Convert a CID into Base32 CIDv1 /ipns/docs.ipfs.io/reference/api/http/#api-v0-cid-base32" (let ((result (ipfs-call "cid/base32" `(("arg" ,cid))))) (if (zerop (length (cdr (s-assoc "ErrorMsg" result)))) (cdr (s-assoc "Formatted" result)) (values nil (cdr (s-assoc "ErrorMsg" result)))))) (defun cid-bases () name is a assigned a given code-number. ((CODE-A . NAME-A) (CODE-B . NAME-B) … (CODE-N . NAME-N)) /ipns/docs.ipfs.io/reference/api/http/#api-v0-cid-bases" (ipfs-call "cid/bases" '())) STRING [: STRING : BOOLEAN : BOOLEAN ] → STRING || ( NIL STRING ) (defun config (key &key (value nil) (bool nil) (json nil)) "Get/set a config key's value. /ipns/docs.ipfs.io/reference/api/http/#api-v0-config" (cdr (s-assoc "Value" (ipfs-call "config" `(("arg" ,key) ,(if value (list "value" value)) ("bool" ,bool) ("json" ,json)))))) NIL → ALIST (defun config-show () "Return the config file's contents, in alist-format… y'know, with several sub-alists. Doesn't quite line up with #api-v0-config-show /ipns/docs.ipfs.io/reference/api/http/#api-v0-config-show" (ipfs-call "config/show" '())) (defun config-get (key) "Get a config key's value. around #'config." (config key)) (defun config-set (key value &key (bool nil) (json nil)) "Set a config key's value. around #'config." (config key :value value :bool bool :json json)) DAG CALLS (defun dag-get (dag-node) "Get a dag node from IPFS. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dag-get" (ipfs-call "dag/get" `(("arg" ,dag-node)))) (defun dag-put (dag-node &key (format "cbor") (input-enc "json") (pin 'T)) "Add a dag node to IPFS. Returns CID string. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dag-put" (ipfs-call "dag/put" `(("arg" ,dag-node) ("format" ,format) ("input-enc" ,input-enc) ("pin" ,pin)))) ( gethash " / " ( gethash " Cid " result ) ) ) ) (defun dag-resolve (path) "Resolve an IPLD block. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dag-resolve" (ipfs-call "dag/resolve" `(("arg" ,path)))) (defun dht-findpeer (peer-id) "Find the multiaddresses associated with a peer ID. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-findpeer" (cdr (s-assoc "Addrs" (cadr (s-assoc "Responses" (ipfs-call "dht/findpeer" `(("arg" ,peer-id)))))))) (defun dht-findprovs (key &key (provider-quantity 20)) "Find peers that can provide a specific value, given a key. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-findprovs" (ipfs-call "dht/findprovs" `(("arg" ,key)("num-providers" ,provider-quantity)))) (defun dht-get (key) "Query the routing system for a key's best value. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-get" (cdr (s-assoc "Addrs" (cadr (s-assoc "Responses" (ipfs-call "dht/get" `(("arg" ,key)))))))) STRING [: BOOLEAN ] → NIL (defun dht-provide (key &key (recursive nil)) "Announce to the network that you're providing the given values. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-provide" (ipfs-call "dht/provide" `(("arg" ,key)("recursive" ,recursive)))) (defun dht-put (key value) "Write a key-value pair to the routing system. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-put" (ipfs-call "dht/put" `(("arg" ,key)("arg" ,value)))) (defun dht-query (peer-id) "Find the closest peer IDs to the given one by querying the DHT. /ipns/docs.ipfs.io/reference/api/http/#api-v0-dht-query" (cdr (s-assoc "Responses" (ipfs-call "dht/query" `(("arg" ,peer-id)))))) NIL → ALIST (defun diag-cmds () "List commands run on this IPFS node. /ipns/docs.ipfs.io/reference/api/http/#api-v0-diag-cmds" (ipfs-call "diag/cmds" NIL)) (defun diag-cmds-clear () "Clear inactive requests from the log. /ipns/docs.ipfs.io/reference/api/http/#api-v0-diag-cmds-clear" (ipfs-call "diag/cmds/clear" NIL)) (defun diag-cmds-set-time (time) "Set how long to keep inactive requests in the log. /ipns/docs.ipfs.io/reference/api/http/#api-v0-diag-cmds-set-time" (ipfs-call "diag/cmds/set-time" `(("arg" ,time)))) (defun diag-sys () "Print system diagnostic info. /ipns/docs.ipfs.io/reference/api/http/#api-v0-diag-sys" (ipfs-call "diag/sys" NIL)) (defun file-ls (path) "List directory contents for UNIX filesystem objects. /ipns/docs.ipfs.io/reference/api/http/#api-v0-file-ls" (cdr (s-assoc "Objects" (ipfs-call "file/ls" `(("arg" ,path)))))) (defun files-chcid (path &key (cid-version nil) (hash nil)) "Change the cid version or hash function of the root node of a given path. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-chcid" (ipfs-call "files/chcid" `(("arg" ,path) ,(if cid-version `("cid-version" ,cid-version)) ,(if hash (list "hash" hash))))) (defun files-cp (source destination) "Copy files into mfs. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-cp" (ipfs-call "files/cp" `(("arg" ,source)("arg" ,destination)))) (defun files-flush (&optional (path "/")) "Flush a given path's data to disk. Returns CID. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-flush" (ipfs-call "files/flush" `(("arg" ,path)))) (defun files-ls (&optional (path "/")) "List directories in local mutable namespace. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-ls" (ipfs-call "files/ls" `(("arg" ,path)))) STRING [: BOOLEAN : NUMBER : STRING ] → NIL || ( NIL STRING ) (defun files-mkdir (path &key (parents nil) (cid-version nil) (hash nil)) "Make a directory. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-mkdir" (ipfs-call "files/mkdir" `(("arg" ,path) ,(if parents (list "parents" parents)) ,(if cid-version `("cid-version" ,cid-version)) ,(if hash (list "hash" hash))))) (defun files-mv (source destination) "Move a file. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-mv" (ipfs-call "files/mv" `(("arg" ,source)("arg" ,destination)))) (defun files-read (path &key (offset nil) (max nil)) "Read a file in given mfs. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-read" (ipfs-call "files/read" `(("arg" ,path) ,(if offset (list "offset" offset)) ,(if max (list "max" max))))) STRING [: BOOLEAN : BOOLEAN ] → NIL || ( NIL STRING ) (defun files-rm (path &key (recursive nil) (force nil)) "Remove a given file. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-rm" (ipfs-call "files/read" `(("arg" ,source) ("recursive" ,recursive) ("force" ,force)))) (defun files-stat (path) "Remove a given file. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-rm" (ipfs-call "files/stat" `(("arg" ,path)))) PATHNAME STRING [: NUMBER : BOOLEAN : BOOLEAN : BOOLEAN : NUMBER : BOOLEAN (defun files-write (path-or-string dest-path &key (offset nil) (create nil) (parents nil) (truncate nil) (count nil) (raw-leaves nil) (cid-version nil) (hash nil)) "Write to a given file. First parameter can be a string or a path to a local file. /ipns/docs.ipfs.io/reference/api/http/#api-v0-files-rm" (let ((result (multiple-value-list (drakma:http-request (make-call-url "files/write" `(("arg" ,dest-path) ("create", create) ("parents" ,parents) ("truncate" ,truncate) ("raw-leaves" ,raw-leaves) ,@(when offset (list "offset" offset)) ,@(when count (list "count" count)) ,@(when cid-version `("cid-version" ,cid-version)) ,@(when hash (list "hash" hash)))) :method :post :parameters `(("data" . ,path-or-string)) :form-data t)))) (apply #'process-result result))) (defmacro with-files-write ((stream dest-path &rest params) &body body) "A convenience macro for files-write. In the body of the macro, any writes to the stream named by STREAM will be sent to the mfs file at DEST-PATH. PARAMS will be passed directly to the files-write function." (let ((fn (gensym "FN"))) `(uiop:with-temporary-file (:stream ,stream :pathname ,fn) ,@body :close-stream (files-write ,fn ,dest-path ,@params)))) FILESTORE CALLS (defun filestore-dups () "List blocks that're both in the filestore and standard block storage. /ipns/docs.ipfs.io/reference/api/http/#api-v0-filestore-dups" (ipfs-call "filestore/dups" '())) (defun filestore-ls (&optional cid) "List objects in filestore. /ipns/docs.ipfs.io/reference/api/http/#api-v0-filestore-ls" (ipfs-call "filestore/ls" `(,(if cid (list "arg" cid))))) (defun filestore-verify (&optional cid) "Verify objects in filestore. /ipns/docs.ipfs.io/reference/api/http/#api-v0-filestore-verify" (ipfs-call "filestore/verify" `(,(if cid (list "arg" cid))))) (defun key-gen (name &key (type nil) (size nil)) "Create a new keypair. /ipns/docs.ipfs.io/reference/api/http/#api-v0-key-gen" (ipfs-call "key/gen" `(("name" ,name) ,(if type (list "type" type)) ,(if size (list "size" size))))) (defun key-list () "List all local keypairs. /ipns/docs.ipfs.io/reference/api/http/#api-v0-key-list" (ipfs-call "key/list" '())) STRING STRING [: BOOLEAN ] → ALIST || ( NIL STRING ) (defun key-rename (old-name new-name &key (force nil)) "Rename a local keypair. /ipns/docs.ipfs.io/reference/api/http/#api-v0-key-rename" (ipfs-call "key/rename" `(("arg" ,old-name) ("arg" ,new-name) ("force" ,force)))) (defun key-remove (name) "Remove a keypair, based on name. /ipns/docs.ipfs.io/reference/api/http/#api-v0-key-remove" (ipfs-call "key/remove" `(("arg" ,name)))) LOG CALLS (defun log-level (subsystem level) "Change the logging level of a subsystem. /ipns/docs.ipfs.io/reference/api/http/#api-v0-log-level" (cdr (s-assoc "Message" (ipfs-call "log/level" `(("arg" ,subsystem)("arg" ,level)))))) (defun log-ls () "List the logging subsystems. /ipns/docs.ipfs.io/reference/api/http/#api-v0-log-ls" (cdr (ipfs-call "log/ls" '()))) (defun log-tail () "Read the event log. /ipns/docs.ipfs.io/reference/api/http/#api-v0-log-tail" (ipfs-call "log/tail" '()) result) STRING [: BOOLEAN : STRING : BOOLEAN : STRING ] → ALIST || ( NIL STRING ) (defun name-publish (ipfs-path &key (resolve 'T) (lifetime "24h") (allow-offline 'T) (ttl nil)) "Publish an IPNS name-- associate it with an IPFS path. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-publish" (ipfs-call "name/publish" `(("arg" ,ipfs-path)("resolve" ,resolve) ("lifetime" ,lifetime) ("allow-offline" ,allow-offline) ,(if ttl (list "ttl" ttl))))) (defun name-pubsub-cancel (name) "Cancel subscription to a name. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-pubsub-cancel" (cdr (s-assoc "Cancelled" (ipfs-call "name/pubsub/cancel" `(("arg" ,name)))))) (defun name-pubsub-state () "Query the state of IPNS pubsub. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-pubsub-state" (cdr (s-assoc "Enabled" (ipfs-call "name/pubsub/state" '())))) (defun name-pubsub-subs () "Show current name subscriptions. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-pubsub-subs" (cdr (s-assoc "Strings" (ipfs-call "name/pubsub/subs" '())))) STRING [: BOOLEAN : BOOLEAN : NUMBER : STRING ] → STRING || ( NIL STRING ) (defun name-resolve (name &key (recursive 't) (nocache "") (dht-record-count nil) (dht-timeout nil)) "Resolve a given IPNS name. /ipns/docs.ipfs.io/reference/api/http/#api-v0-name-resolve" (ipfs-call "name/resolve" `(("arg" ,name)("recursive" ,recursive) ,(when (not (empty-string-p nocache)) (list "nocache" nocache)) ,(when dht-record-count (list "dht-record-count" dht-record-count)) ,(when dht-timeout (list "dht-timeout" dht-timeout))))) (defun object-data (key) "Output the raw data of an IPFS object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-data" (ipfs-call "object/data" `(("arg" ,key)))) (defun object-diff (object-a object-b) "Display the differences between two IPFS objects. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-diff" (ipfs-call "object/diff" `(("arg" ,object-a)("arg" ,object-b)))) (defun object-get (key &key (data-encoding "text")) "Get and serialize the named DAG node. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-get" (ipfs-call "object/get" `(("arg" ,key)("data-encoding" ,data-encoding)))) (defun object-links (key) "Output the links pointed to by the specified object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-links" (ipfs-call "object/links" `(("arg" ,key)))) (defun object-new (&key (template nil)) "Create a new object from an IPFS template. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-new" (ipfs-call "object/new"`(,(if template `("template" ,template))))) STRING STRING STRING [: BOOLEAN ] → ALIST || ( NIL STRING ) (defun object-patch-add-link (hash name object &key (create "")) "Add a link to a given object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-patch-add-link" (ipfs-call "object/patch/add-link" `(("arg" ,hash)("arg" ,name)("arg" ,object) ,(when (not (empty-string-p create)) `("create" ,create))))) (defun object-patch-rm-link (hash name) "Remove a link from a given object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-patch-rm-link" (ipfs-call "object/patch/rm-link" `(("arg" ,hash)("arg" ,name)))) (defun object-stat (key) "Get stats for a DAG node. /ipns/docs.ipfs.io/reference/api/http/#api-v0-object-stat" (ipfs-call "object/stat" `(("arg" ,key)))) [: BOOLEAN : STRING : STRING : STRING : STRING ] → NUMBER || ( NIL STRING ) (defun p2p-close (&key (all "") (protocol nil) (listen-address nil) (target-address nil)) "Stop listening for new connections to forward. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-close" (ipfs-call "p2p/close" `(,(when (not (empty-string-p all)) `("all" ,all)) ,(when protocol `("protocol" ,protocol)) ,(when listen-address `("listen-address" ,listen-address)) ,(when target-address `("target-address" ,target-address))))) STRING STRING STRING [: BOOLEAN ] → STRING || ( NIL STRING ) (defun p2p-forward (protocol listening-endpoint target-endpoint &key (allow-custom-protocol "")) "Forward connections to libp2p service. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-forward" (ipfs-call "p2p/forward" `(("arg" ,protocol)("arg" ,listening-endpoint) ("arg" ,target-endpoint) ,(when (not (empty-string-p allow-custom-protocol)) `("allow-custom-protocol" ,allow-custom-protocol))))) STRING STRING [: BOOLEAN : BOOLEAN ] → STRING || ( NIL STRING ) (defun p2p-listen (protocol target-endpoint &key (allow-custom-protocol "") (report-peer-id "")) "Create libp2p service. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-listen" (ipfs-call "p2p/listen" `(("arg" ,protocol)("arg" ,target-endpoint) ,(when (not (empty-string-p allow-custom-protocol)) `("allow-custom-protocol" ,allow-custom-protocol)) ,(when (not (empty-string-p report-peer-id)) `("report-peer-id" ,report-peer-id))))) (defun p2p-ls () "List active p2p listeners. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-ls" (ipfs-call "p2p/ls" '())) (defun p2p-stream-close (&key (identifier nil) (all "")) "Close an active p2p stream. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-stream-close" (ipfs-call "p2p/stream/close" `(,(when identifier `("arg" ,identifier)) ,(when (not (empty-string-p all)) `("all" ,all))))) (defun p2p-stream-ls () "List active p2p streams. /ipns/docs.ipfs.io/reference/api/http/#api-v0-p2p-stream-ls" (ipfs-call "p2p/stream/ls" '())) PIN CALLS STRING [: BOOLEAN ] → ALIST || ( NIL STRING ) (defun pin-add (path &key (recursive 'T)) "Pin an object to local storage. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-add" (ipfs-call "pin/add" `(("arg" ,path)("recursive" ,recursive)))) (defun pin-ls (&key (path nil) (type "all")) "List objects pinned to local storage. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-ls" (let ((res (ipfs-call "pin/ls" `(,(when path `("arg" ,path)) ("type" ,type))))) (if (equal res '("Keys")) nil res))) STRING [: BOOLEAN ] → ALIAS || ( NIL STRING ) (defun pin-rm (path &key (recursive 'T)) "Remove pinned objects from local storage. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-rm" (ipfs-call "pin/rm" `(("arg" ,path)("recursive" ,recursive)))) STRING STRING [: BOOLEAN ] → ALIST || ( NIL STRING ) (defun pin-update (old-path new-path &key (unpin 'T)) "Update a recursive pin. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-update" (ipfs-call "pin/update" `(("arg" ,old-path)("arg" ,new-path)("unpin" ,unpin)))) (defun pin-verify () "Verify that recursive pins are complete. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pin-verify" (ipfs-call "pin/verify" '())) PUBSUB CALLS (defun pubsub-sub (topic &key (env "")) "Subscribe to a given pubsub topic— this function requires go-ipfs to be installed on the current machine, and that `ipfs` is in the current $PATH. This probably will only work on *nix systems (sorry Windows nerds). Returns a uiop/launch-program::process-info socket-- can be used in conjunction with the #'pubsub-sub-* functions, or with :uiop/launch-program's functions. A system-dependent replacement for /ipns/docs.ipfs.io/reference/api/http/#api-v0-pubsub-sub" (when (and *ipfs-root* (empty-string-p env)) (setq env (string+ "env IPFS_PATH=" *ipfs-root* " > /dev/null;"))) (uiop:launch-program (string+ env "ipfs pubsub sub " topic) :output :stream)) PROCESS - INFO - STREAM → FD - STREAM (defun pubsub-sub-process (pubsub-socket) "Turn a uiop process-info-stream ('pubsub stream') into a fd-stream that is #'read-char-able, etc." (uiop/launch-program:process-info-output pubsub-socket)) (defun pubsub-sub-read-char (pubsub-socket) "Process a 'pubsub stream' (process-info-stream) and #'readchar it." (read-char (pubsub-sub-process pubsub-socket))) PROCESS - INFO - STREAM → BOOLEAN (defun pubsub-sub-listen (pubsub-socket) "Process a 'pubsub stream' (process-info-stream) and #'listen it." (listen (pubsub-sub-process pubsub-socket))) (defun pubsub-sub-close (pubsub-socket) "Close a 'pubsub stream' (process-info-stream) and related processes." (and (uiop/launch-program:terminate-process pubsub-socket :urgent 't) (uiop/launch-program:close-streams pubsub-socket))) (defun pubsub-pub (topic string &key (env "")) "Publish a string to a given pubsub topic. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pubsub-pub" (when (and *ipfs-root* (empty-string-p env)) (setq env (string+ "env IPFS_PATH=" *ipfs-root* " > /dev/null;"))) (uiop:run-program (string+ env "ipfs pubsub pub " topic " \"" string "\"")) nil) (defun pubsub-ls () "Return a list of subscribed topics. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pubsub-ls" (s-assoc "Strings" (ipfs-call "pubsub/ls" '()))) (defun pubsub-peers (&optional topic) "Return a list of peers with pubsub enabled. /ipns/docs.ipfs.io/reference/api/http/#api-v0-pubsub-peers" (s-assoc "Strings" (ipfs-call "pubsub/peers" `(,(if topic (list "arg" topic)))))) STRING [: BOOLEAN : BOOLEAN : NUMBER ] → ALIST || ( NIL STRING ) (defun refs (path &key (unique "") (recursive "") (max-depth -1)) "List links (references) from an object. /ipns/docs.ipfs.io/reference/api/http/#api-v0-refs" (ipfs-call "refs" `(("arg" ,path)("max-depth" ,max-depth) ,(if (not (empty-string-p recursive)) `("recursive" ,recursive)) ,(if (not (empty-string-p unique)) `("unique" ,unique))))) (defun refs-local () "List all local references. /ipns/docs.ipfs.io/reference/api/http/#api-v0-refs-local" (ipfs-call "refs/local" '())) (defun repo-fsck () "Remove repo lock-files. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-fsck" (cdr (s-assoc "Message" (ipfs-call "repo/fsck" '())))) (defun repo-gc () "Perform garbage collection on the repo. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-gc" (ipfs-call "repo/gc" '())) (defun repo-stat () "Get stats for the current repo. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-stat" (ipfs-call "repo/stat" '())) (defun repo-verify () "Verify that all repo blocks aren't corrupted. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-verify" (ipfs-call "repo/verify" '())) (defun repo-version () "Show the repo version. /ipns/docs.ipfs.io/reference/api/http/#api-v0-repo-version" (parse-integer (ipfs-call "repo/version" '()))) (defun stats-bitswap () "Show diagnostics on bitswap. /ipns/docs.ipfs.io/reference/api/http/#api-v0-stats-bitswap" (ipfs-call "stats/bitswap" '())) (defun stats-bw (&key (peer nil) (proto nil) (interval nil)) "Return bandwidth information. /ipns/docs.ipfs.io/reference/api/http/#api-v0-stats-bw" (ipfs-call "stats/bitswap" `(,(when peer `("peer" ,peer)) ,(when proto `("proto" ,proto)) ,(when interval `("interval" ,interval)) ,(when interval `("poll" 'T))))) (defun stats-repo () "Show diagnostics on current repo. /ipns/docs.ipfs.io/reference/api/http/#api-v0-stats-repo" (ipfs-call "stats/repo" '())) (defun swarm-addrs () "List known addresses. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-addrs" (ipfs-call "swarm/addrs" '())) (defun swarm-addrs-listen () "List interface listening addresses. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-addrs-listen" (cdr (ipfs-call "swarm/addrs/listen" '()))) (defun swarm-addrs-local () "List local addresses. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-addrs-local" (cdr (ipfs-call "swarm/addrs/local" '()))) (defun swarm-connect (address) "Open connection to a given address. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-connect" (cdr (ipfs-call "swarm/connect" `(("arg" ,address))))) (defun swarm-disconnect (address) "Close connection to a given address. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-disconnect" (cdr (ipfs-call "swarm/disconnect" `(("arg" ,address))))) (defun swarm-filters () "List address filters. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-filters" (ipfs-call "swarm/filters" '())) (defun swarm-filters-add (multiaddr) "Add an address filter. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-filters-add" (ipfs-call "swarm/filters/add" `(("arg" ,multiaddr)))) (defun swarm-filters-rm (multiaddr) "Remove an address filter. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-filters-rm" (ipfs-call "swarm/filters/rm" `(("arg" ,multiaddr)))) (defun swarm-peers () "List peers with open connections. /ipns/docs.ipfs.io/reference/api/http/#api-v0-swarm-peers" (ipfs-call "swarm/peers" '())) URLSTORE CALLS STRING [: BOOLEAN : BOOLEAN ] → ALIST || ( NIL STRING ) (defun urlstore-add (url &key (pin 'T) (trickle "")) "Add a URL via urlstore. /ipns/docs.ipfs.io/reference/api/http/#api-v0-urlstore-add" (ipfs-call "urlstore/add"`(("arg" ,url)("pin" ,pin) ,(when (not (empty-string-p trickle)) `("trickle" ,trickle))))) (defun version () "Return the current golang, system, repo, and IPFS versions. /ipns/docs.ipfs.io/reference/api/http/#api-v0-version" (ipfs-call "version" nil)) NIL → ALIST (defun version-deps () I.E. , Go version , OS , etc . /ipns/docs.ipfs.io/reference/api/http/#api-v0-version" (ipfs-call "version/deps" '())) (defun simplify (list) "'Simplify' a list. Remove any extraneous sublisting [ ((2 3)) -> (2 3) ], and remove extraneous strings in otherwise pure alists, e.g. [ (``Apple'' (2 2) (3 3) (4 4)) -> ((2 2) (3 3) (4 4)) ]" (cond ((and (stringp (car list)) (stringp (cdr list))) (cdr list)) ((and (eq 1 (length list)) (consp (car list))) (simplify (car list))) ((and (consp list) (stringp (car list)) (consp (cadr list))) (simplify (cdr list))) ('T list))) (defun s-assoc (key alist) "Get the value of an associative list using a string key." (assoc key alist :test #'string-equal)) (defun string+ (&rest strings) "Combine an arbitrary amount of strings into a single string." (reduce (lambda (a b) (format nil "~A~A" a b)) strings)) STRING → BOOLEAN (defun empty-string-p (string) "Return whether or not a given item is an empty string." (and (stringp string) (zerop (length string)))) (defun url-encode (string &rest ignored) "Wrap around drakma's url encoder, with a slight change-- instead of using plus-signs for spaces, we want to use %20." ignored (cl-ppcre:regex-replace-all "%2520" (drakma:url-encode (cl-ppcre:regex-replace-all " " string "%20") :utf-8) "%20"))
bf43dd0c1f3dc372aad77aa11ee6e47066d2c8a74a66d39b43d9b8e2086d58e2
Proxymoron461/chesskell
EnPassantTests.hs
module EnPassantTests where import Data.Type.Equality ((:~:)(..)) import qualified GHC.TypeLits as TL (Nat) import Data.Proxy(Proxy(..)) import Data.Type.Nat hiding (SNat(..)) import Test.Hspec import Test.HUnit.Lang (Assertion, assertFailure) import Test.ShouldNotTypecheck (shouldNotTypecheck) import Control.DeepSeq (force, NFData) import Control.Exception (evaluate, try, TypeError(..)) import Data.Type.Equality ((:~:)(..)) import Data.Proxy(Proxy(..)) import Chesskell import Vec import FirstClassFunctions import ChessTypes import FlatBuilders import TestTypes type family IsSpaceVulnerableToEnPassant (b :: BoardDecorator) (p :: Position) :: Bool where IsSpaceVulnerableToEnPassant boardDec (At A row) = Eval (At A row `In` Eval (GetEnPassantPosition (At B row) boardDec)) IsSpaceVulnerableToEnPassant boardDec (At H row) = Eval (At H row `In` Eval (GetEnPassantPosition (At G row) boardDec)) IsSpaceVulnerableToEnPassant boardDec pos = Eval ( Eval (pos `In` (Eval (GetEnPassantPosition (OneLeft pos) boardDec))) :&&: (pos `In` (Eval (GetEnPassantPosition (OneRight pos) boardDec)))) type WhitePawnFirst = ( Eval ( Move ( At A Nat2 ) ( At A Nat4 ) StartDec ) ) -- enPassantTest1 :: True :~: IsSpaceVulnerableToEnPassant WhitePawnFirst (At A Nat4) -- enPassantTest1 = Refl enPassantTest2 : : False : ~ : IsSpaceVulnerableToEnPassant ( ( Eval ( Move ( At A Nat2 ) ( At A Nat3 ) StartDec ) ) ) ( At A Nat3 ) enPassantTest2 = type BlackPawnFirst = ( Eval ( Move ( At A Nat7 ) ( At A Nat5 ) StartDec ) ) -- enPassantTest3 :: True :~: IsSpaceVulnerableToEnPassant BlackPawnFirst (At A Nat5) -- enPassantTest3 = Refl enPassantTest4 : : False : ~ : IsSpaceVulnerableToEnPassant ( ( Eval ( Move ( At A Nat7 ) ( At A Nat6 ) StartDec ) ) ) ( At A Nat6 ) -- enPassantTest4 = Refl -- enPassantTest5 :: False :~: IsSpaceVulnerableToEnPassant ((Eval (Move (At A Nat7) (At A Nat6) WhitePawnFirst))) (At A Nat4) -- enPassantTest5 = Refl enPassantTest6 : : False : ~ : IsSpaceVulnerableToEnPassant ( ( Eval ( Move ( At A Nat2 ) ( At A Nat3 ) BlackPawnFirst ) ) ) ( At A Nat5 ) -- enPassantTest6 = Refl -- -- enPassantTest1 = chess -- -- pawn d2 to d4 -- -- pawn b7 to b5 -- -- pawn e2 to e3 -- -- pawn b5 to b4 -- -- pawn a2 to a4 -- -- pawn b4 to a3 -- -- end -- -- enPassantTest2 = chess -- -- pawn d2 to d4 -- -- pawn b7 to b5 -- -- pawn e2 to e3 -- -- pawn b5 to b4 -- -- pawn e3 to e4 -- -- pawn b4 to b3 -- -- pawn a2 to a3 -- -- pawn b3 to a2 -- -- end enPassantTestSuite = describe " En Passant Tests " $ do it " 1 : If a White Pawn moves forward 2 spaces as its ' first move , then it should be vulnerable to en passant " $ -- shouldTypeCheck enPassantTest1 it " 2 : If a White Pawn moves forward 1 space as its ' first move , then it should not be vulnerable to en passant " $ -- shouldNotTypecheck enPassantTest2 it " 3 : If a Black Pawn moves forward 2 spaces as its ' first move , then it should be vulnerable to en passant " $ -- shouldTypeCheck enPassantTest3 it " 4 : If a Black Pawn moves forward 1 space as its ' first move , then it should not be vulnerable to en passant " $ -- shouldTypeCheck enPassantTest4 it " 5 : If a White Pawn moves forward 2 spaces , but is not the last piece to move , then it should not be vulnerable to en passant " $ -- shouldTypeCheck enPassantTest5 it " 6 : If a Black Pawn moves forward 2 spaces , but is not the last piece to move , then it should not be vulnerable to en passant " $ -- shouldTypeCheck enPassantTest6
null
https://raw.githubusercontent.com/Proxymoron461/chesskell/fcfdb954ccc760d3689583d7198f0be5da2905a5/chess/test/EnPassantTests.hs
haskell
enPassantTest1 :: True :~: IsSpaceVulnerableToEnPassant WhitePawnFirst (At A Nat4) enPassantTest1 = Refl enPassantTest3 :: True :~: IsSpaceVulnerableToEnPassant BlackPawnFirst (At A Nat5) enPassantTest3 = Refl enPassantTest4 = Refl enPassantTest5 :: False :~: IsSpaceVulnerableToEnPassant ((Eval (Move (At A Nat7) (At A Nat6) WhitePawnFirst))) (At A Nat4) enPassantTest5 = Refl enPassantTest6 = Refl -- enPassantTest1 = chess -- pawn d2 to d4 -- pawn b7 to b5 -- pawn e2 to e3 -- pawn b5 to b4 -- pawn a2 to a4 -- pawn b4 to a3 -- end -- enPassantTest2 = chess -- pawn d2 to d4 -- pawn b7 to b5 -- pawn e2 to e3 -- pawn b5 to b4 -- pawn e3 to e4 -- pawn b4 to b3 -- pawn a2 to a3 -- pawn b3 to a2 -- end shouldTypeCheck enPassantTest1 shouldNotTypecheck enPassantTest2 shouldTypeCheck enPassantTest3 shouldTypeCheck enPassantTest4 shouldTypeCheck enPassantTest5 shouldTypeCheck enPassantTest6
module EnPassantTests where import Data.Type.Equality ((:~:)(..)) import qualified GHC.TypeLits as TL (Nat) import Data.Proxy(Proxy(..)) import Data.Type.Nat hiding (SNat(..)) import Test.Hspec import Test.HUnit.Lang (Assertion, assertFailure) import Test.ShouldNotTypecheck (shouldNotTypecheck) import Control.DeepSeq (force, NFData) import Control.Exception (evaluate, try, TypeError(..)) import Data.Type.Equality ((:~:)(..)) import Data.Proxy(Proxy(..)) import Chesskell import Vec import FirstClassFunctions import ChessTypes import FlatBuilders import TestTypes type family IsSpaceVulnerableToEnPassant (b :: BoardDecorator) (p :: Position) :: Bool where IsSpaceVulnerableToEnPassant boardDec (At A row) = Eval (At A row `In` Eval (GetEnPassantPosition (At B row) boardDec)) IsSpaceVulnerableToEnPassant boardDec (At H row) = Eval (At H row `In` Eval (GetEnPassantPosition (At G row) boardDec)) IsSpaceVulnerableToEnPassant boardDec pos = Eval ( Eval (pos `In` (Eval (GetEnPassantPosition (OneLeft pos) boardDec))) :&&: (pos `In` (Eval (GetEnPassantPosition (OneRight pos) boardDec)))) type WhitePawnFirst = ( Eval ( Move ( At A Nat2 ) ( At A Nat4 ) StartDec ) ) enPassantTest2 : : False : ~ : IsSpaceVulnerableToEnPassant ( ( Eval ( Move ( At A Nat2 ) ( At A Nat3 ) StartDec ) ) ) ( At A Nat3 ) enPassantTest2 = type BlackPawnFirst = ( Eval ( Move ( At A Nat7 ) ( At A Nat5 ) StartDec ) ) enPassantTest4 : : False : ~ : IsSpaceVulnerableToEnPassant ( ( Eval ( Move ( At A Nat7 ) ( At A Nat6 ) StartDec ) ) ) ( At A Nat6 ) enPassantTest6 : : False : ~ : IsSpaceVulnerableToEnPassant ( ( Eval ( Move ( At A Nat2 ) ( At A Nat3 ) BlackPawnFirst ) ) ) ( At A Nat5 ) enPassantTestSuite = describe " En Passant Tests " $ do it " 1 : If a White Pawn moves forward 2 spaces as its ' first move , then it should be vulnerable to en passant " $ it " 2 : If a White Pawn moves forward 1 space as its ' first move , then it should not be vulnerable to en passant " $ it " 3 : If a Black Pawn moves forward 2 spaces as its ' first move , then it should be vulnerable to en passant " $ it " 4 : If a Black Pawn moves forward 1 space as its ' first move , then it should not be vulnerable to en passant " $ it " 5 : If a White Pawn moves forward 2 spaces , but is not the last piece to move , then it should not be vulnerable to en passant " $ it " 6 : If a Black Pawn moves forward 2 spaces , but is not the last piece to move , then it should not be vulnerable to en passant " $
06d00397fb4c6c33fb78d0b9e958ef07c16bafb78c0e90f5820bc90eb3735ce7
mcna/alexandria
tests.lisp
(in-package :cl-user) (defpackage :alexandria-tests (:use :cl :alexandria #+sbcl :sb-rt #-sbcl :rtest) (:import-from #+sbcl :sb-rt #-sbcl :rtest #:*compile-tests* #:*expected-failures*)) (in-package :alexandria-tests) (defun run-tests (&key ((:compiled *compile-tests*))) (do-tests)) ;;;; Arrays (deftest copy-array.1 (let* ((orig (vector 1 2 3)) (copy (copy-array orig))) (values (eq orig copy) (equalp orig copy))) nil t) (deftest copy-array.2 (let ((orig (make-array 1024 :fill-pointer 0))) (vector-push-extend 1 orig) (vector-push-extend 2 orig) (vector-push-extend 3 orig) (let ((copy (copy-array orig))) (values (eq orig copy) (equalp orig copy) (array-has-fill-pointer-p copy) (eql (fill-pointer orig) (fill-pointer copy))))) nil t t t) (deftest array-index.1 (typep 0 'array-index) t) ;;;; Conditions (deftest unwind-protect-case.1 (let (result) (unwind-protect-case () (random 10) (:normal (push :normal result)) (:abort (push :abort result)) (:always (push :always result))) result) (:always :normal)) (deftest unwind-protect-case.2 (let (result) (unwind-protect-case () (random 10) (:always (push :always result)) (:normal (push :normal result)) (:abort (push :abort result))) result) (:normal :always)) (deftest unwind-protect-case.3 (let (result1 result2 result3) (ignore-errors (unwind-protect-case () (error "FOOF!") (:normal (push :normal result1)) (:abort (push :abort result1)) (:always (push :always result1)))) (catch 'foof (unwind-protect-case () (throw 'foof 42) (:normal (push :normal result2)) (:abort (push :abort result2)) (:always (push :always result2)))) (block foof (unwind-protect-case () (return-from foof 42) (:normal (push :normal result3)) (:abort (push :abort result3)) (:always (push :always result3)))) (values result1 result2 result3)) (:always :abort) (:always :abort) (:always :abort)) (deftest unwind-protect-case.4 (let (result) (unwind-protect-case (aborted-p) (random 42) (:always (setq result aborted-p))) result) nil) (deftest unwind-protect-case.5 (let (result) (block foof (unwind-protect-case (aborted-p) (return-from foof) (:always (setq result aborted-p)))) result) t) ;;;; Control flow (deftest switch.1 (switch (13 :test =) (12 :oops) (13.0 :yay)) :yay) (deftest switch.2 (switch (13) ((+ 12 2) :oops) ((- 13 1) :oops2) (t :yay)) :yay) (deftest eswitch.1 (let ((x 13)) (eswitch (x :test =) (12 :oops) (13.0 :yay))) :yay) (deftest eswitch.2 (let ((x 13)) (eswitch (x :key 1+) (11 :oops) (14 :yay))) :yay) (deftest cswitch.1 (cswitch (13 :test =) (12 :oops) (13.0 :yay)) :yay) (deftest cswitch.2 (cswitch (13 :key 1-) (12 :yay) (13.0 :oops)) :yay) (deftest multiple-value-prog2.1 (multiple-value-prog2 (values 1 1 1) (values 2 20 200) (values 3 3 3)) 2 20 200) (deftest nth-value-or.1 (multiple-value-bind (a b c) (nth-value-or 1 (values 1 nil 1) (values 2 2 2)) (= a b c 2)) t) (deftest whichever.1 (let ((x (whichever 1 2 3))) (and (member x '(1 2 3)) t)) t) (deftest whichever.2 (let* ((a 1) (b 2) (c 3) (x (whichever a b c))) (and (member x '(1 2 3)) t)) t) (deftest xor.1 (xor nil nil 1 nil) 1 t) (deftest xor.2 (xor nil nil 1 2) nil nil) (deftest xor.3 (xor nil nil nil) nil t) ;;;; Definitions (deftest define-constant.1 (let ((name (gensym))) (eval `(define-constant ,name "FOO" :test 'equal)) (eval `(define-constant ,name "FOO" :test 'equal)) (values (equal "FOO" (symbol-value name)) (constantp name))) t t) (deftest define-constant.2 (let ((name (gensym))) (eval `(define-constant ,name 13)) (eval `(define-constant ,name 13)) (values (eql 13 (symbol-value name)) (constantp name))) t t) ;;;; Errors TYPEP is specified to return a generalized boolean and , for example , ECL exploits this by returning the superclasses of ERROR ;;; in this case. (defun errorp (x) (not (null (typep x 'error)))) (deftest required-argument.1 (multiple-value-bind (res err) (ignore-errors (required-argument)) (errorp err)) t) ;;;; Hash tables (deftest ensure-hash-table.1 (let ((table (make-hash-table)) (x (list 1))) (multiple-value-bind (value already-there) (ensure-gethash x table 42) (and (= value 42) (not already-there) (= 42 (gethash x table)) (multiple-value-bind (value2 already-there2) (ensure-gethash x table 13) (and (= value2 42) already-there2 (= 42 (gethash x table))))))) t) #+clisp (pushnew 'copy-hash-table.1 *expected-failures*) (deftest copy-hash-table.1 (let ((orig (make-hash-table :test 'eq :size 123)) (foo "foo")) (setf (gethash orig orig) t (gethash foo orig) t) (let ((eq-copy (copy-hash-table orig)) (eql-copy (copy-hash-table orig :test 'eql)) (equal-copy (copy-hash-table orig :test 'equal)) CLISP overflows the stack with this bit . ;; See <>. #-clisp (equalp-copy (copy-hash-table orig :test 'equalp))) (list (eql (hash-table-size eq-copy) (hash-table-size orig)) (eql (hash-table-rehash-size eq-copy) (hash-table-rehash-size orig)) (hash-table-count eql-copy) (gethash orig eq-copy) (gethash (copy-seq foo) eql-copy) (gethash foo eql-copy) (gethash (copy-seq foo) equal-copy) (gethash "FOO" equal-copy) #-clisp (gethash "FOO" equalp-copy)))) (t t 2 t nil t t nil t)) (deftest copy-hash-table.2 (let ((ht (make-hash-table)) (list (list :list (vector :A :B :C)))) (setf (gethash 'list ht) list) (let* ((shallow-copy (copy-hash-table ht)) (deep1-copy (copy-hash-table ht :key 'copy-list)) (list (gethash 'list ht)) (shallow-list (gethash 'list shallow-copy)) (deep1-list (gethash 'list deep1-copy))) (list (eq ht shallow-copy) (eq ht deep1-copy) (eq list shallow-list) (eq list deep1-list) ; outer list was copied. (eq (second list) (second shallow-list)) (eq (second list) (second deep1-list)) ; inner vector wasn't copied. ))) (nil nil t nil t t)) (deftest maphash-keys.1 (let ((keys nil) (table (make-hash-table))) (declare (notinline maphash-keys)) (dotimes (i 10) (setf (gethash i table) t)) (maphash-keys (lambda (k) (push k keys)) table) (set-equal keys '(0 1 2 3 4 5 6 7 8 9))) t) (deftest maphash-values.1 (let ((vals nil) (table (make-hash-table))) (declare (notinline maphash-values)) (dotimes (i 10) (setf (gethash i table) (- i))) (maphash-values (lambda (v) (push v vals)) table) (set-equal vals '(0 -1 -2 -3 -4 -5 -6 -7 -8 -9))) t) (deftest hash-table-keys.1 (let ((table (make-hash-table))) (dotimes (i 10) (setf (gethash i table) t)) (set-equal (hash-table-keys table) '(0 1 2 3 4 5 6 7 8 9))) t) (deftest hash-table-values.1 (let ((table (make-hash-table))) (dotimes (i 10) (setf (gethash (gensym) table) i)) (set-equal (hash-table-values table) '(0 1 2 3 4 5 6 7 8 9))) t) (deftest hash-table-alist.1 (let ((table (make-hash-table))) (dotimes (i 10) (setf (gethash i table) (- i))) (let ((alist (hash-table-alist table))) (list (length alist) (assoc 0 alist) (assoc 3 alist) (assoc 9 alist) (assoc nil alist)))) (10 (0 . 0) (3 . -3) (9 . -9) nil)) (deftest hash-table-plist.1 (let ((table (make-hash-table))) (dotimes (i 10) (setf (gethash i table) (- i))) (let ((plist (hash-table-plist table))) (list (length plist) (getf plist 0) (getf plist 2) (getf plist 7) (getf plist nil)))) (20 0 -2 -7 nil)) #+clisp (pushnew 'alist-hash-table.1 *expected-failures*) (deftest alist-hash-table.1 (let* ((alist '((0 a) (1 b) (2 c))) (table (alist-hash-table alist))) (list (hash-table-count table) (gethash 0 table) (gethash 1 table) (gethash 2 table) CLISP returns EXT : FASTHASH - EQL . (3 (a) (b) (c) eql)) #+clisp (pushnew 'plist-hash-table.1 *expected-failures*) (deftest plist-hash-table.1 (let* ((plist '(:a 1 :b 2 :c 3)) (table (plist-hash-table plist :test 'eq))) (list (hash-table-count table) (gethash :a table) (gethash :b table) (gethash :c table) (gethash 2 table) (gethash nil table) CLISP returns EXT : FASTHASH - EQ . (3 1 2 3 nil nil eq)) ;;;; Functions (deftest disjoin.1 (let ((disjunction (disjoin (lambda (x) (and (consp x) :cons)) (lambda (x) (and (stringp x) :string))))) (list (funcall disjunction 'zot) (funcall disjunction '(foo bar)) (funcall disjunction "test"))) (nil :cons :string)) (deftest disjoin.2 (let ((disjunction (disjoin #'zerop))) (list (funcall disjunction 0) (funcall disjunction 1))) (t nil)) (deftest conjoin.1 (let ((conjunction (conjoin #'consp (lambda (x) (stringp (car x))) (lambda (x) (char (car x) 0))))) (list (funcall conjunction 'zot) (funcall conjunction '(foo)) (funcall conjunction '("foo")))) (nil nil #\f)) (deftest conjoin.2 (let ((conjunction (conjoin #'zerop))) (list (funcall conjunction 0) (funcall conjunction 1))) (t nil)) (deftest compose.1 (let ((composite (compose '1+ (lambda (x) (* x 2)) #'read-from-string))) (funcall composite "1")) 3) (deftest compose.2 (let ((composite (locally (declare (notinline compose)) (compose '1+ (lambda (x) (* x 2)) #'read-from-string)))) (funcall composite "2")) 5) (deftest compose.3 (let ((compose-form (funcall (compiler-macro-function 'compose) '(compose '1+ (lambda (x) (* x 2)) #'read-from-string) nil))) (let ((fun (funcall (compile nil `(lambda () ,compose-form))))) (funcall fun "3"))) 7) (deftest compose.4 (let ((composite (compose #'zerop))) (list (funcall composite 0) (funcall composite 1))) (t nil)) (deftest multiple-value-compose.1 (let ((composite (multiple-value-compose #'truncate (lambda (x y) (values y x)) (lambda (x) (with-input-from-string (s x) (values (read s) (read s))))))) (multiple-value-list (funcall composite "2 7"))) (3 1)) (deftest multiple-value-compose.2 (let ((composite (locally (declare (notinline multiple-value-compose)) (multiple-value-compose #'truncate (lambda (x y) (values y x)) (lambda (x) (with-input-from-string (s x) (values (read s) (read s)))))))) (multiple-value-list (funcall composite "2 11"))) (5 1)) (deftest multiple-value-compose.3 (let ((compose-form (funcall (compiler-macro-function 'multiple-value-compose) '(multiple-value-compose #'truncate (lambda (x y) (values y x)) (lambda (x) (with-input-from-string (s x) (values (read s) (read s))))) nil))) (let ((fun (funcall (compile nil `(lambda () ,compose-form))))) (multiple-value-list (funcall fun "2 9")))) (4 1)) (deftest multiple-value-compose.4 (let ((composite (multiple-value-compose #'truncate))) (multiple-value-list (funcall composite 9 2))) (4 1)) (deftest curry.1 (let ((curried (curry '+ 3))) (funcall curried 1 5)) 9) (deftest curry.2 (let ((curried (locally (declare (notinline curry)) (curry '* 2 3)))) (funcall curried 7)) 42) (deftest curry.3 (let ((curried-form (funcall (compiler-macro-function 'curry) '(curry '/ 8) nil))) (let ((fun (funcall (compile nil `(lambda () ,curried-form))))) (funcall fun 2))) 4) (deftest rcurry.1 (let ((r (rcurry '/ 2))) (funcall r 8)) 4) (deftest named-lambda.1 (let ((fac (named-lambda fac (x) (if (> x 1) (* x (fac (- x 1))) x)))) (funcall fac 5)) 120) (deftest named-lambda.2 (let ((fac (named-lambda fac (&key x) (if (> x 1) (* x (fac :x (- x 1))) x)))) (funcall fac :x 5)) 120) ;;;; Lists (deftest alist-plist.1 (alist-plist '((a . 1) (b . 2) (c . 3))) (a 1 b 2 c 3)) (deftest plist-alist.1 (plist-alist '(a 1 b 2 c 3)) ((a . 1) (b . 2) (c . 3))) (deftest unionf.1 (let* ((list (list 1 2 3)) (orig list)) (unionf list (list 1 2 4)) (values (equal orig (list 1 2 3)) (eql (length list) 4) (set-difference list (list 1 2 3 4)) (set-difference (list 1 2 3 4) list))) t t nil nil) (deftest nunionf.1 (let ((list (list 1 2 3))) (nunionf list (list 1 2 4)) (values (eql (length list) 4) (set-difference (list 1 2 3 4) list) (set-difference list (list 1 2 3 4)))) t nil nil) (deftest appendf.1 (let* ((list (list 1 2 3)) (orig list)) (appendf list '(4 5 6) '(7 8)) (list list (eq list orig))) ((1 2 3 4 5 6 7 8) nil)) (deftest nconcf.1 (let ((list1 (list 1 2 3)) (list2 (list 4 5 6))) (nconcf list1 list2 (list 7 8 9)) list1) (1 2 3 4 5 6 7 8 9)) (deftest circular-list.1 (let ((circle (circular-list 1 2 3))) (list (first circle) (second circle) (third circle) (fourth circle) (eq circle (nthcdr 3 circle)))) (1 2 3 1 t)) (deftest circular-list-p.1 (let* ((circle (circular-list 1 2 3 4)) (tree (list circle circle)) (dotted (cons circle t)) (proper (list 1 2 3 circle)) (tailcirc (list* 1 2 3 circle))) (list (circular-list-p circle) (circular-list-p tree) (circular-list-p dotted) (circular-list-p proper) (circular-list-p tailcirc))) (t nil nil nil t)) (deftest circular-list-p.2 (circular-list-p 'foo) nil) (deftest circular-tree-p.1 (let* ((circle (circular-list 1 2 3 4)) (tree1 (list circle circle)) (tree2 (let* ((level2 (list 1 nil 2)) (level1 (list level2))) (setf (second level2) level1) level1)) (dotted (cons circle t)) (proper (list 1 2 3 circle)) (tailcirc (list* 1 2 3 circle)) (quite-proper (list 1 2 3)) (quite-dotted (list 1 (cons 2 3)))) (list (circular-tree-p circle) (circular-tree-p tree1) (circular-tree-p tree2) (circular-tree-p dotted) (circular-tree-p proper) (circular-tree-p tailcirc) (circular-tree-p quite-proper) (circular-tree-p quite-dotted))) (t t t t t t nil nil)) (deftest proper-list-p.1 (let ((l1 (list 1)) (l2 (list 1 2)) (l3 (cons 1 2)) (l4 (list (cons 1 2) 3)) (l5 (circular-list 1 2))) (list (proper-list-p l1) (proper-list-p l2) (proper-list-p l3) (proper-list-p l4) (proper-list-p l5))) (t t nil t nil)) (deftest proper-list-p.2 (proper-list-p '(1 2 . 3)) nil) (deftest proper-list.type.1 (let ((l1 (list 1)) (l2 (list 1 2)) (l3 (cons 1 2)) (l4 (list (cons 1 2) 3)) (l5 (circular-list 1 2))) (list (typep l1 'proper-list) (typep l2 'proper-list) (typep l3 'proper-list) (typep l4 'proper-list) (typep l5 'proper-list))) (t t nil t nil)) (deftest proper-list-length.1 (values (proper-list-length nil) (proper-list-length (list 1)) (proper-list-length (list 2 2)) (proper-list-length (list 3 3 3)) (proper-list-length (list 4 4 4 4)) (proper-list-length (list 5 5 5 5 5)) (proper-list-length (list 6 6 6 6 6 6)) (proper-list-length (list 7 7 7 7 7 7 7)) (proper-list-length (list 8 8 8 8 8 8 8 8)) (proper-list-length (list 9 9 9 9 9 9 9 9 9))) 0 1 2 3 4 5 6 7 8 9) (deftest proper-list-length.2 (flet ((plength (x) (handler-case (proper-list-length x) (type-error () :ok)))) (values (plength (list* 1)) (plength (list* 2 2)) (plength (list* 3 3 3)) (plength (list* 4 4 4 4)) (plength (list* 5 5 5 5 5)) (plength (list* 6 6 6 6 6 6)) (plength (list* 7 7 7 7 7 7 7)) (plength (list* 8 8 8 8 8 8 8 8)) (plength (list* 9 9 9 9 9 9 9 9 9)))) :ok :ok :ok :ok :ok :ok :ok :ok :ok) (deftest lastcar.1 (let ((l1 (list 1)) (l2 (list 1 2))) (list (lastcar l1) (lastcar l2))) (1 2)) (deftest lastcar.error.2 (handler-case (progn (lastcar (circular-list 1 2 3)) nil) (error () t)) t) (deftest setf-lastcar.1 (let ((l (list 1 2 3 4))) (values (lastcar l) (progn (setf (lastcar l) 42) (lastcar l)))) 4 42) (deftest setf-lastcar.2 (let ((l (circular-list 1 2 3))) (multiple-value-bind (res err) (ignore-errors (setf (lastcar l) 4)) (typep err 'type-error))) t) (deftest make-circular-list.1 (let ((l (make-circular-list 3 :initial-element :x))) (setf (car l) :y) (list (eq l (nthcdr 3 l)) (first l) (second l) (third l) (fourth l))) (t :y :x :x :y)) (deftest circular-list.type.1 (let* ((l1 (list 1 2 3)) (l2 (circular-list 1 2 3)) (l3 (list* 1 2 3 l2))) (list (typep l1 'circular-list) (typep l2 'circular-list) (typep l3 'circular-list))) (nil t t)) (deftest ensure-list.1 (let ((x (list 1)) (y 2)) (list (ensure-list x) (ensure-list y))) ((1) (2))) (deftest ensure-cons.1 (let ((x (cons 1 2)) (y nil) (z "foo")) (values (ensure-cons x) (ensure-cons y) (ensure-cons z))) (1 . 2) (nil) ("foo")) (deftest setp.1 (setp '(1)) t) (deftest setp.2 (setp nil) t) (deftest setp.3 (setp "foo") nil) (deftest setp.4 (setp '(1 2 3 1)) nil) (deftest setp.5 (setp '(1 2 3)) t) (deftest setp.6 (setp '(a :a)) t) (deftest setp.7 (setp '(a :a) :key 'character) nil) (deftest setp.8 (setp '(a :a) :key 'character :test (constantly nil)) t) (deftest set-equal.1 (set-equal '(1 2 3) '(3 1 2)) t) (deftest set-equal.2 (set-equal '("Xa") '("Xb") :test (lambda (a b) (eql (char a 0) (char b 0)))) t) (deftest set-equal.3 (set-equal '(1 2) '(4 2)) nil) (deftest set-equal.4 (set-equal '(a b c) '(:a :b :c) :key 'string :test 'equal) t) (deftest set-equal.5 (set-equal '(a d c) '(:a :b :c) :key 'string :test 'equal) nil) (deftest set-equal.6 (set-equal '(a b c) '(a b c d)) nil) (deftest map-product.1 (map-product 'cons '(2 3) '(1 4)) ((2 . 1) (2 . 4) (3 . 1) (3 . 4))) (deftest map-product.2 (map-product #'cons '(2 3) '(1 4)) ((2 . 1) (2 . 4) (3 . 1) (3 . 4))) (deftest flatten.1 (flatten '((1) 2 (((3 4))) ((((5)) 6)) 7)) (1 2 3 4 5 6 7)) (deftest remove-from-plist.1 (let ((orig '(a 1 b 2 c 3 d 4))) (list (remove-from-plist orig 'a 'c) (remove-from-plist orig 'b 'd) (remove-from-plist orig 'b) (remove-from-plist orig 'a) (remove-from-plist orig 'd 42 "zot") (remove-from-plist orig 'a 'b 'c 'd) (remove-from-plist orig 'a 'b 'c 'd 'x) (equal orig '(a 1 b 2 c 3 d 4)))) ((b 2 d 4) (a 1 c 3) (a 1 c 3 d 4) (b 2 c 3 d 4) (a 1 b 2 c 3) nil nil t)) (deftest mappend.1 (mappend (compose 'list '*) '(1 2 3) '(1 2 3)) (1 4 9)) (deftest assoc-value.1 (let ((key1 '(complex key)) (key2 'simple-key) (alist '()) (result '())) (push 1 (assoc-value alist key1 :test #'equal)) (push 2 (assoc-value alist key1 :test 'equal)) (push 42 (assoc-value alist key2)) (push 43 (assoc-value alist key2 :test 'eq)) (push (assoc-value alist key1 :test #'equal) result) (push (assoc-value alist key2) result) (push 'very (rassoc-value alist (list 2 1) :test #'equal)) (push (cdr (assoc '(very complex key) alist :test #'equal)) result) result) ((2 1) (43 42) (2 1))) ;;;; Numbers (deftest clamp.1 (list (clamp 1.5 1 2) (clamp 2.0 1 2) (clamp 1.0 1 2) (clamp 3 1 2) (clamp 0 1 2)) (1.5 2.0 1.0 2 1)) (deftest gaussian-random.1 (let ((min -0.2) (max +0.2)) (multiple-value-bind (g1 g2) (gaussian-random min max) (values (<= min g1 max) (<= min g2 max) (/= g1 g2) ;uh ))) t t t) (deftest iota.1 (iota 3) (0 1 2)) (deftest iota.2 (iota 3 :start 0.0d0) (0.0d0 1.0d0 2.0d0)) (deftest iota.3 (iota 3 :start 2 :step 3.0) (2.0 5.0 8.0)) (deftest map-iota.1 (let (all) (declare (notinline map-iota)) (values (map-iota (lambda (x) (push x all)) 3 :start 2 :step 1.1d0) all)) 3 (4.2d0 3.1d0 2.0d0)) (deftest lerp.1 (lerp 0.5 1 2) 1.5) (deftest lerp.2 (lerp 0.1 1 2) 1.1) (deftest mean.1 (mean '(1 2 3)) 2) (deftest mean.2 (mean '(1 2 3 4)) 5/2) (deftest mean.3 (mean '(1 2 10)) 13/3) (deftest median.1 (median '(100 0 99 1 98 2 97)) 97) (deftest median.2 (median '(100 0 99 1 98 2 97 96)) 193/2) (deftest variance.1 (variance (list 1 2 3)) 2/3) (deftest standard-deviation.1 (< 0 (standard-deviation (list 1 2 3)) 1) t) (deftest maxf.1 (let ((x 1)) (maxf x 2) x) 2) (deftest maxf.2 (let ((x 1)) (maxf x 0) x) 1) (deftest maxf.3 (let ((x 1) (c 0)) (maxf x (incf c)) (list x c)) (1 1)) (deftest maxf.4 (let ((xv (vector 0 0 0)) (p 0)) (maxf (svref xv (incf p)) (incf p)) (list p xv)) (2 #(0 2 0))) (deftest minf.1 (let ((y 1)) (minf y 0) y) 0) (deftest minf.2 (let ((xv (vector 10 10 10)) (p 0)) (minf (svref xv (incf p)) (incf p)) (list p xv)) (2 #(10 2 10))) ;;;; Arrays #+nil (deftest array-index.type) #+nil (deftest copy-array) ;;;; Sequences (deftest rotate.1 (list (rotate (list 1 2 3) 0) (rotate (list 1 2 3) 1) (rotate (list 1 2 3) 2) (rotate (list 1 2 3) 3) (rotate (list 1 2 3) 4)) ((1 2 3) (3 1 2) (2 3 1) (1 2 3) (3 1 2))) (deftest rotate.2 (list (rotate (vector 1 2 3 4) 0) (rotate (vector 1 2 3 4)) (rotate (vector 1 2 3 4) 2) (rotate (vector 1 2 3 4) 3) (rotate (vector 1 2 3 4) 4) (rotate (vector 1 2 3 4) 5)) (#(1 2 3 4) #(4 1 2 3) #(3 4 1 2) #(2 3 4 1) #(1 2 3 4) #(4 1 2 3))) (deftest rotate.3 (list (rotate (list 1 2 3) 0) (rotate (list 1 2 3) -1) (rotate (list 1 2 3) -2) (rotate (list 1 2 3) -3) (rotate (list 1 2 3) -4)) ((1 2 3) (2 3 1) (3 1 2) (1 2 3) (2 3 1))) (deftest rotate.4 (list (rotate (vector 1 2 3 4) 0) (rotate (vector 1 2 3 4) -1) (rotate (vector 1 2 3 4) -2) (rotate (vector 1 2 3 4) -3) (rotate (vector 1 2 3 4) -4) (rotate (vector 1 2 3 4) -5)) (#(1 2 3 4) #(2 3 4 1) #(3 4 1 2) #(4 1 2 3) #(1 2 3 4) #(2 3 4 1))) (deftest rotate.5 (values (rotate (list 1) 17) (rotate (list 1) -5)) (1) (1)) (deftest shuffle.1 (let ((s (shuffle (iota 100)))) (list (equal s (iota 100)) (every (lambda (x) (member x s)) (iota 100)) (every (lambda (x) (typep x '(integer 0 99))) s))) (nil t t)) (deftest shuffle.2 (let ((s (shuffle (coerce (iota 100) 'vector)))) (list (equal s (coerce (iota 100) 'vector)) (every (lambda (x) (find x s)) (iota 100)) (every (lambda (x) (typep x '(integer 0 99))) s))) (nil t t)) (deftest random-elt.1 (let ((s1 #(1 2 3 4)) (s2 '(1 2 3 4))) (list (dotimes (i 1000 nil) (unless (member (random-elt s1) s2) (return nil)) (when (/= (random-elt s1) (random-elt s1)) (return t))) (dotimes (i 1000 nil) (unless (member (random-elt s2) s2) (return nil)) (when (/= (random-elt s2) (random-elt s2)) (return t))))) (t t)) (deftest removef.1 (let* ((x '(1 2 3)) (x* x) (y #(1 2 3)) (y* y)) (removef x 1) (removef y 3) (list x x* y y*)) ((2 3) (1 2 3) #(1 2) #(1 2 3))) (deftest deletef.1 (let* ((x (list 1 2 3)) (x* x) (y (vector 1 2 3))) (deletef x 2) (deletef y 1) (list x x* y)) ((1 3) (1 3) #(2 3))) (deftest map-permutations.1 (let ((seq (list 1 2 3)) (seen nil) (ok t)) (map-permutations (lambda (s) (unless (set-equal s seq) (setf ok nil)) (when (member s seen :test 'equal) (setf ok nil)) (push s seen)) seq :copy t) (values ok (length seen))) t 6) (deftest proper-sequence.type.1 (mapcar (lambda (x) (typep x 'proper-sequence)) (list (list 1 2 3) (vector 1 2 3) #2a((1 2) (3 4)) (circular-list 1 2 3 4))) (t t nil nil)) (deftest emptyp.1 (mapcar #'emptyp (list (list 1) (circular-list 1) nil (vector) (vector 1))) (nil nil t t nil)) (deftest sequence-of-length-p.1 (mapcar #'sequence-of-length-p (list nil #() (list 1) (vector 1) (list 1 2) (vector 1 2) (list 1 2) (vector 1 2) (list 1 2) (vector 1 2)) (list 0 0 1 1 2 2 1 1 4 4)) (t t t t t t nil nil nil nil)) (deftest length=.1 (mapcar #'length= (list nil #() (list 1) (vector 1) (list 1 2) (vector 1 2) (list 1 2) (vector 1 2) (list 1 2) (vector 1 2)) (list 0 0 1 1 2 2 1 1 4 4)) (t t t t t t nil nil nil nil)) (deftest length=.2 ;; test the compiler macro (macrolet ((x (&rest args) (funcall (compile nil `(lambda () (length= ,@args)))))) (list (x 2 '(1 2)) (x '(1 2) '(3 4)) (x '(1 2) 2) (x '(1 2) 2 '(3 4)) (x 1 2 3))) (t t t t nil)) (deftest copy-sequence.1 (let ((l (list 1 2 3)) (v (vector #\a #\b #\c))) (declare (notinline copy-sequence)) (let ((l.list (copy-sequence 'list l)) (l.vector (copy-sequence 'vector l)) (l.spec-v (copy-sequence '(vector fixnum) l)) (v.vector (copy-sequence 'vector v)) (v.list (copy-sequence 'list v)) (v.string (copy-sequence 'string v))) (list (member l (list l.list l.vector l.spec-v)) (member v (list v.vector v.list v.string)) (equal l.list l) (equalp l.vector #(1 2 3)) (eql (upgraded-array-element-type 'fixnum) (array-element-type l.spec-v)) (equalp v.vector v) (equal v.list '(#\a #\b #\c)) (equal "abc" v.string)))) (nil nil t t t t t t)) (deftest first-elt.1 (mapcar #'first-elt (list (list 1 2 3) "abc" (vector :a :b :c))) (1 #\a :a)) (deftest first-elt.error.1 (mapcar (lambda (x) (handler-case (first-elt x) (type-error () :type-error))) (list nil #() 12 :zot)) (:type-error :type-error :type-error :type-error)) (deftest setf-first-elt.1 (let ((l (list 1 2 3)) (s (copy-seq "foobar")) (v (vector :a :b :c))) (setf (first-elt l) -1 (first-elt s) #\x (first-elt v) 'zot) (values l s v)) (-1 2 3) "xoobar" #(zot :b :c)) (deftest setf-first-elt.error.1 (let ((l 'foo)) (multiple-value-bind (res err) (ignore-errors (setf (first-elt l) 4)) (typep err 'type-error))) t) (deftest last-elt.1 (mapcar #'last-elt (list (list 1 2 3) (vector :a :b :c) "FOOBAR" #*001 #*010)) (3 :c #\R 1 0)) (deftest last-elt.error.1 (mapcar (lambda (x) (handler-case (last-elt x) (type-error () :type-error))) (list nil #() 12 :zot (circular-list 1 2 3) (list* 1 2 3 (circular-list 4 5)))) (:type-error :type-error :type-error :type-error :type-error :type-error)) (deftest setf-last-elt.1 (let ((l (list 1 2 3)) (s (copy-seq "foobar")) (b (copy-seq #*010101001))) (setf (last-elt l) '??? (last-elt s) #\? (last-elt b) 0) (values l s b)) (1 2 ???) "fooba?" #*010101000) (deftest setf-last-elt.error.1 (handler-case (setf (last-elt 'foo) 13) (type-error () :type-error)) :type-error) (deftest starts-with.1 (list (starts-with 1 '(1 2 3)) (starts-with 1 #(1 2 3)) (starts-with #\x "xyz") (starts-with 2 '(1 2 3)) (starts-with 3 #(1 2 3)) (starts-with 1 1) (starts-with nil nil)) (t t t nil nil nil nil)) (deftest starts-with.2 (values (starts-with 1 '(-1 2 3) :key '-) (starts-with "foo" '("foo" "bar") :test 'equal) (starts-with "f" '(#\f) :key 'string :test 'equal) (starts-with -1 '(0 1 2) :key #'1+) (starts-with "zot" '("ZOT") :test 'equal)) t t t nil nil) (deftest ends-with.1 (list (ends-with 3 '(1 2 3)) (ends-with 3 #(1 2 3)) (ends-with #\z "xyz") (ends-with 2 '(1 2 3)) (ends-with 1 #(1 2 3)) (ends-with 1 1) (ends-with nil nil)) (t t t nil nil nil nil)) (deftest ends-with.2 (values (ends-with 2 '(0 13 1) :key '1+) (ends-with "foo" (vector "bar" "foo") :test 'equal) (ends-with "X" (vector 1 2 #\X) :key 'string :test 'equal) (ends-with "foo" "foo" :test 'equal)) t t t nil) (deftest ends-with.error.1 (handler-case (ends-with 3 (circular-list 3 3 3 1 3 3)) (type-error () :type-error)) :type-error) (deftest sequences.passing-improper-lists (macrolet ((signals-error-p (form) `(handler-case (progn ,form nil) (type-error (e) t))) (cut (fn &rest args) (with-gensyms (arg) (print`(lambda (,arg) (apply ,fn (list ,@(substitute arg '_ args)))))))) (let ((circular-list (make-circular-list 5 :initial-element :foo)) (dotted-list (list* 'a 'b 'c 'd))) (loop for nth from 0 for fn in (list (cut #'lastcar _) (cut #'rotate _ 3) (cut #'rotate _ -3) (cut #'shuffle _) (cut #'random-elt _) (cut #'last-elt _) (cut #'ends-with :foo _)) nconcing (let ((on-circular-p (signals-error-p (funcall fn circular-list))) (on-dotted-p (signals-error-p (funcall fn dotted-list)))) (when (or (not on-circular-p) (not on-dotted-p)) (append (unless on-circular-p (let ((*print-circle* t)) (list (format nil "No appropriate error signalled when passing ~S to ~Ath entry." circular-list nth)))) (unless on-dotted-p (list (format nil "No appropriate error signalled when passing ~S to ~Ath entry." dotted-list nth))))))))) nil) (deftest with-unique-names.1 (let ((*gensym-counter* 0)) (let ((syms (with-unique-names (foo bar quux) (list foo bar quux)))) (list (find-if #'symbol-package syms) (equal '("FOO0" "BAR1" "QUUX2") (mapcar #'symbol-name syms))))) (nil t)) (deftest with-unique-names.2 (let ((*gensym-counter* 0)) (let ((syms (with-unique-names ((foo "_foo_") (bar -bar-) (quux #\q)) (list foo bar quux)))) (list (find-if #'symbol-package syms) (equal '("_foo_0" "-BAR-1" "q2") (mapcar #'symbol-name syms))))) (nil t)) (deftest with-unique-names.3 (let ((*gensym-counter* 0)) (multiple-value-bind (res err) (ignore-errors (eval '(let ((syms (with-unique-names ((foo "_foo_") (bar -bar-) (quux 42)) (list foo bar quux)))) (list (find-if #'symbol-package syms) (equal '("_foo_0" "-BAR-1" "q2") (mapcar #'symbol-name syms)))))) (errorp err))) t) (deftest once-only.1 (macrolet ((cons1.good (x) (once-only (x) `(cons ,x ,x))) (cons1.bad (x) `(cons ,x ,x))) (let ((y 0)) (list (cons1.good (incf y)) y (cons1.bad (incf y)) y))) ((1 . 1) 1 (2 . 3) 3)) (deftest once-only.2 (macrolet ((cons1 (x) (once-only ((y x)) `(cons ,y ,y)))) (let ((z 0)) (list (cons1 (incf z)) z (cons1 (incf z))))) ((1 . 1) 1 (2 . 2))) (deftest parse-body.1 (parse-body '("doc" "body") :documentation t) ("body") nil "doc") (deftest parse-body.2 (parse-body '("body") :documentation t) ("body") nil nil) (deftest parse-body.3 (parse-body '("doc" "body")) ("doc" "body") nil nil) (deftest parse-body.4 (parse-body '((declare (foo)) "doc" (declare (bar)) body) :documentation t) (body) ((declare (foo)) (declare (bar))) "doc") (deftest parse-body.5 (parse-body '((declare (foo)) "doc" (declare (bar)) body)) ("doc" (declare (bar)) body) ((declare (foo))) nil) (deftest parse-body.6 (multiple-value-bind (res err) (ignore-errors (parse-body '("foo" "bar" "quux") :documentation t)) (errorp err)) t) ;;;; Symbols (deftest ensure-symbol.1 (ensure-symbol :cons :cl) cons :external) (deftest ensure-symbol.2 (ensure-symbol "CONS" :alexandria) cons :inherited) (deftest ensure-symbol.3 (ensure-symbol 'foo :keyword) :foo :external) (deftest ensure-symbol.4 (ensure-symbol #\* :alexandria) * :inherited) (deftest format-symbol.1 (let ((s (format-symbol nil "X-~D" 13))) (list (symbol-package s) (symbol-name s))) (nil "X-13")) (deftest format-symbol.2 (format-symbol :keyword "SYM-~A" :bolic) :sym-bolic) (deftest format-symbol.3 (let ((*package* (find-package :cl))) (format-symbol t "FIND-~A" 'package)) find-package) (deftest make-keyword.1 (list (make-keyword 'zot) (make-keyword "FOO") (make-keyword #\Q)) (:zot :foo :q)) (deftest make-gensym-list.1 (let ((*gensym-counter* 0)) (let ((syms (make-gensym-list 3 "FOO"))) (list (find-if 'symbol-package syms) (equal '("FOO0" "FOO1" "FOO2") (mapcar 'symbol-name syms))))) (nil t)) (deftest make-gensym-list.2 (let ((*gensym-counter* 0)) (let ((syms (make-gensym-list 3))) (list (find-if 'symbol-package syms) (equal '("G0" "G1" "G2") (mapcar 'symbol-name syms))))) (nil t)) ;;;; Type-system (deftest of-type.1 (locally (declare (notinline of-type)) (let ((f (of-type 'string))) (list (funcall f "foo") (funcall f 'bar)))) (t nil)) (deftest type=.1 (type= 'string 'string) t t) (deftest type=.2 (type= 'list '(or null cons)) t t) (deftest type=.3 (type= 'null '(and symbol list)) t t) (deftest type=.4 (type= 'string '(satisfies emptyp)) nil nil) (deftest type=.5 (type= 'string 'list) nil t) (macrolet ((test (type numbers) `(deftest ,(format-symbol t "CDR5.~A" type) (let ((numbers ,numbers)) (values (mapcar (of-type ',(format-symbol t "NEGATIVE-~A" type)) numbers) (mapcar (of-type ',(format-symbol t "NON-POSITIVE-~A" type)) numbers) (mapcar (of-type ',(format-symbol t "NON-NEGATIVE-~A" type)) numbers) (mapcar (of-type ',(format-symbol t "POSITIVE-~A" type)) numbers))) (t t t nil nil nil nil) (t t t t nil nil nil) (nil nil nil t t t t) (nil nil nil nil t t t)))) (test fixnum (list most-negative-fixnum -42 -1 0 1 42 most-positive-fixnum)) (test integer (list (1- most-negative-fixnum) -42 -1 0 1 42 (1+ most-positive-fixnum))) (test rational (list (1- most-negative-fixnum) -42/13 -1 0 1 42/13 (1+ most-positive-fixnum))) (test real (list most-negative-long-float -42/13 -1 0 1 42/13 most-positive-long-float)) (test float (list most-negative-short-float -42.02 -1.0 0.0 1.0 42.02 most-positive-short-float)) (test short-float (list most-negative-short-float -42.02s0 -1.0s0 0.0s0 1.0s0 42.02s0 most-positive-short-float)) (test single-float (list most-negative-single-float -42.02f0 -1.0f0 0.0f0 1.0f0 42.02f0 most-positive-single-float)) (test double-float (list most-negative-double-float -42.02d0 -1.0d0 0.0d0 1.0d0 42.02d0 most-positive-double-float)) (test long-float (list most-negative-long-float -42.02l0 -1.0l0 0.0l0 1.0l0 42.02l0 most-positive-long-float))) ;;;; Bindings (declaim (notinline opaque)) (defun opaque (x) x) (deftest if-let.1 (if-let (x (opaque :ok)) x :bad) :ok) (deftest if-let.2 (if-let (x (opaque nil)) :bad (and (not x) :ok)) :ok) (deftest if-let.3 (let ((x 1)) (if-let ((x 2) (y x)) (+ x y) :oops)) 3) (deftest if-let.4 (if-let ((x 1) (y nil)) :oops (and (not y) x)) 1) (deftest if-let.5 (if-let (x) :oops (not x)) t) (deftest if-let.error.1 (handler-case (eval '(if-let x :oops :oops)) (type-error () :type-error)) :type-error) (deftest when-let.1 (when-let (x (opaque :ok)) (setf x (cons x x)) x) (:ok . :ok)) (deftest when-let.2 (when-let ((x 1) (y nil) (z 3)) :oops) nil) (deftest when-let.3 (let ((x 1)) (when-let ((x 2) (y x)) (+ x y))) 3) (deftest when-let.error.1 (handler-case (eval '(when-let x :oops)) (type-error () :type-error)) :type-error) (deftest when-let*.1 (let ((x 1)) (when-let* ((x 2) (y x)) (+ x y))) 4) (deftest when-let*.2 (let ((y 1)) (when-let* (x y) (1+ x))) 2) (deftest when-let*.3 (when-let* ((x t) (y (consp x)) (z (error "OOPS"))) t) nil) (deftest when-let*.error.1 (handler-case (eval '(when-let* x :oops)) (type-error () :type-error)) :type-error) (deftest doplist.1 (let (keys values) (doplist (k v '(a 1 b 2 c 3) (values t (reverse keys) (reverse values) k v)) (push k keys) (push v values))) t (a b c) (1 2 3) nil nil) (deftest count-permutations.1 (values (count-permutations 31 7) (count-permutations 1 1) (count-permutations 2 1) (count-permutations 2 2) (count-permutations 3 2) (count-permutations 3 1)) 13253058000 1 2 2 6 3) (deftest binomial-coefficient.1 (alexandria:binomial-coefficient 1239 139) 28794902202288970200771694600561826718847179309929858835480006683522184441358211423695124921058123706380656375919763349913245306834194782172712255592710204598527867804110129489943080460154)
null
https://raw.githubusercontent.com/mcna/alexandria/3eacfac87b27654f7ca9eeaf1ce40344b8136b03/tests.lisp
lisp
Arrays Conditions Control flow Definitions Errors in this case. Hash tables See <>. outer list was copied. inner vector wasn't copied. Functions Lists Numbers uh Arrays Sequences test the compiler macro Symbols Type-system Bindings
(in-package :cl-user) (defpackage :alexandria-tests (:use :cl :alexandria #+sbcl :sb-rt #-sbcl :rtest) (:import-from #+sbcl :sb-rt #-sbcl :rtest #:*compile-tests* #:*expected-failures*)) (in-package :alexandria-tests) (defun run-tests (&key ((:compiled *compile-tests*))) (do-tests)) (deftest copy-array.1 (let* ((orig (vector 1 2 3)) (copy (copy-array orig))) (values (eq orig copy) (equalp orig copy))) nil t) (deftest copy-array.2 (let ((orig (make-array 1024 :fill-pointer 0))) (vector-push-extend 1 orig) (vector-push-extend 2 orig) (vector-push-extend 3 orig) (let ((copy (copy-array orig))) (values (eq orig copy) (equalp orig copy) (array-has-fill-pointer-p copy) (eql (fill-pointer orig) (fill-pointer copy))))) nil t t t) (deftest array-index.1 (typep 0 'array-index) t) (deftest unwind-protect-case.1 (let (result) (unwind-protect-case () (random 10) (:normal (push :normal result)) (:abort (push :abort result)) (:always (push :always result))) result) (:always :normal)) (deftest unwind-protect-case.2 (let (result) (unwind-protect-case () (random 10) (:always (push :always result)) (:normal (push :normal result)) (:abort (push :abort result))) result) (:normal :always)) (deftest unwind-protect-case.3 (let (result1 result2 result3) (ignore-errors (unwind-protect-case () (error "FOOF!") (:normal (push :normal result1)) (:abort (push :abort result1)) (:always (push :always result1)))) (catch 'foof (unwind-protect-case () (throw 'foof 42) (:normal (push :normal result2)) (:abort (push :abort result2)) (:always (push :always result2)))) (block foof (unwind-protect-case () (return-from foof 42) (:normal (push :normal result3)) (:abort (push :abort result3)) (:always (push :always result3)))) (values result1 result2 result3)) (:always :abort) (:always :abort) (:always :abort)) (deftest unwind-protect-case.4 (let (result) (unwind-protect-case (aborted-p) (random 42) (:always (setq result aborted-p))) result) nil) (deftest unwind-protect-case.5 (let (result) (block foof (unwind-protect-case (aborted-p) (return-from foof) (:always (setq result aborted-p)))) result) t) (deftest switch.1 (switch (13 :test =) (12 :oops) (13.0 :yay)) :yay) (deftest switch.2 (switch (13) ((+ 12 2) :oops) ((- 13 1) :oops2) (t :yay)) :yay) (deftest eswitch.1 (let ((x 13)) (eswitch (x :test =) (12 :oops) (13.0 :yay))) :yay) (deftest eswitch.2 (let ((x 13)) (eswitch (x :key 1+) (11 :oops) (14 :yay))) :yay) (deftest cswitch.1 (cswitch (13 :test =) (12 :oops) (13.0 :yay)) :yay) (deftest cswitch.2 (cswitch (13 :key 1-) (12 :yay) (13.0 :oops)) :yay) (deftest multiple-value-prog2.1 (multiple-value-prog2 (values 1 1 1) (values 2 20 200) (values 3 3 3)) 2 20 200) (deftest nth-value-or.1 (multiple-value-bind (a b c) (nth-value-or 1 (values 1 nil 1) (values 2 2 2)) (= a b c 2)) t) (deftest whichever.1 (let ((x (whichever 1 2 3))) (and (member x '(1 2 3)) t)) t) (deftest whichever.2 (let* ((a 1) (b 2) (c 3) (x (whichever a b c))) (and (member x '(1 2 3)) t)) t) (deftest xor.1 (xor nil nil 1 nil) 1 t) (deftest xor.2 (xor nil nil 1 2) nil nil) (deftest xor.3 (xor nil nil nil) nil t) (deftest define-constant.1 (let ((name (gensym))) (eval `(define-constant ,name "FOO" :test 'equal)) (eval `(define-constant ,name "FOO" :test 'equal)) (values (equal "FOO" (symbol-value name)) (constantp name))) t t) (deftest define-constant.2 (let ((name (gensym))) (eval `(define-constant ,name 13)) (eval `(define-constant ,name 13)) (values (eql 13 (symbol-value name)) (constantp name))) t t) TYPEP is specified to return a generalized boolean and , for example , ECL exploits this by returning the superclasses of ERROR (defun errorp (x) (not (null (typep x 'error)))) (deftest required-argument.1 (multiple-value-bind (res err) (ignore-errors (required-argument)) (errorp err)) t) (deftest ensure-hash-table.1 (let ((table (make-hash-table)) (x (list 1))) (multiple-value-bind (value already-there) (ensure-gethash x table 42) (and (= value 42) (not already-there) (= 42 (gethash x table)) (multiple-value-bind (value2 already-there2) (ensure-gethash x table 13) (and (= value2 42) already-there2 (= 42 (gethash x table))))))) t) #+clisp (pushnew 'copy-hash-table.1 *expected-failures*) (deftest copy-hash-table.1 (let ((orig (make-hash-table :test 'eq :size 123)) (foo "foo")) (setf (gethash orig orig) t (gethash foo orig) t) (let ((eq-copy (copy-hash-table orig)) (eql-copy (copy-hash-table orig :test 'eql)) (equal-copy (copy-hash-table orig :test 'equal)) CLISP overflows the stack with this bit . #-clisp (equalp-copy (copy-hash-table orig :test 'equalp))) (list (eql (hash-table-size eq-copy) (hash-table-size orig)) (eql (hash-table-rehash-size eq-copy) (hash-table-rehash-size orig)) (hash-table-count eql-copy) (gethash orig eq-copy) (gethash (copy-seq foo) eql-copy) (gethash foo eql-copy) (gethash (copy-seq foo) equal-copy) (gethash "FOO" equal-copy) #-clisp (gethash "FOO" equalp-copy)))) (t t 2 t nil t t nil t)) (deftest copy-hash-table.2 (let ((ht (make-hash-table)) (list (list :list (vector :A :B :C)))) (setf (gethash 'list ht) list) (let* ((shallow-copy (copy-hash-table ht)) (deep1-copy (copy-hash-table ht :key 'copy-list)) (list (gethash 'list ht)) (shallow-list (gethash 'list shallow-copy)) (deep1-list (gethash 'list deep1-copy))) (list (eq ht shallow-copy) (eq ht deep1-copy) (eq list shallow-list) (eq (second list) (second shallow-list)) ))) (nil nil t nil t t)) (deftest maphash-keys.1 (let ((keys nil) (table (make-hash-table))) (declare (notinline maphash-keys)) (dotimes (i 10) (setf (gethash i table) t)) (maphash-keys (lambda (k) (push k keys)) table) (set-equal keys '(0 1 2 3 4 5 6 7 8 9))) t) (deftest maphash-values.1 (let ((vals nil) (table (make-hash-table))) (declare (notinline maphash-values)) (dotimes (i 10) (setf (gethash i table) (- i))) (maphash-values (lambda (v) (push v vals)) table) (set-equal vals '(0 -1 -2 -3 -4 -5 -6 -7 -8 -9))) t) (deftest hash-table-keys.1 (let ((table (make-hash-table))) (dotimes (i 10) (setf (gethash i table) t)) (set-equal (hash-table-keys table) '(0 1 2 3 4 5 6 7 8 9))) t) (deftest hash-table-values.1 (let ((table (make-hash-table))) (dotimes (i 10) (setf (gethash (gensym) table) i)) (set-equal (hash-table-values table) '(0 1 2 3 4 5 6 7 8 9))) t) (deftest hash-table-alist.1 (let ((table (make-hash-table))) (dotimes (i 10) (setf (gethash i table) (- i))) (let ((alist (hash-table-alist table))) (list (length alist) (assoc 0 alist) (assoc 3 alist) (assoc 9 alist) (assoc nil alist)))) (10 (0 . 0) (3 . -3) (9 . -9) nil)) (deftest hash-table-plist.1 (let ((table (make-hash-table))) (dotimes (i 10) (setf (gethash i table) (- i))) (let ((plist (hash-table-plist table))) (list (length plist) (getf plist 0) (getf plist 2) (getf plist 7) (getf plist nil)))) (20 0 -2 -7 nil)) #+clisp (pushnew 'alist-hash-table.1 *expected-failures*) (deftest alist-hash-table.1 (let* ((alist '((0 a) (1 b) (2 c))) (table (alist-hash-table alist))) (list (hash-table-count table) (gethash 0 table) (gethash 1 table) (gethash 2 table) CLISP returns EXT : FASTHASH - EQL . (3 (a) (b) (c) eql)) #+clisp (pushnew 'plist-hash-table.1 *expected-failures*) (deftest plist-hash-table.1 (let* ((plist '(:a 1 :b 2 :c 3)) (table (plist-hash-table plist :test 'eq))) (list (hash-table-count table) (gethash :a table) (gethash :b table) (gethash :c table) (gethash 2 table) (gethash nil table) CLISP returns EXT : FASTHASH - EQ . (3 1 2 3 nil nil eq)) (deftest disjoin.1 (let ((disjunction (disjoin (lambda (x) (and (consp x) :cons)) (lambda (x) (and (stringp x) :string))))) (list (funcall disjunction 'zot) (funcall disjunction '(foo bar)) (funcall disjunction "test"))) (nil :cons :string)) (deftest disjoin.2 (let ((disjunction (disjoin #'zerop))) (list (funcall disjunction 0) (funcall disjunction 1))) (t nil)) (deftest conjoin.1 (let ((conjunction (conjoin #'consp (lambda (x) (stringp (car x))) (lambda (x) (char (car x) 0))))) (list (funcall conjunction 'zot) (funcall conjunction '(foo)) (funcall conjunction '("foo")))) (nil nil #\f)) (deftest conjoin.2 (let ((conjunction (conjoin #'zerop))) (list (funcall conjunction 0) (funcall conjunction 1))) (t nil)) (deftest compose.1 (let ((composite (compose '1+ (lambda (x) (* x 2)) #'read-from-string))) (funcall composite "1")) 3) (deftest compose.2 (let ((composite (locally (declare (notinline compose)) (compose '1+ (lambda (x) (* x 2)) #'read-from-string)))) (funcall composite "2")) 5) (deftest compose.3 (let ((compose-form (funcall (compiler-macro-function 'compose) '(compose '1+ (lambda (x) (* x 2)) #'read-from-string) nil))) (let ((fun (funcall (compile nil `(lambda () ,compose-form))))) (funcall fun "3"))) 7) (deftest compose.4 (let ((composite (compose #'zerop))) (list (funcall composite 0) (funcall composite 1))) (t nil)) (deftest multiple-value-compose.1 (let ((composite (multiple-value-compose #'truncate (lambda (x y) (values y x)) (lambda (x) (with-input-from-string (s x) (values (read s) (read s))))))) (multiple-value-list (funcall composite "2 7"))) (3 1)) (deftest multiple-value-compose.2 (let ((composite (locally (declare (notinline multiple-value-compose)) (multiple-value-compose #'truncate (lambda (x y) (values y x)) (lambda (x) (with-input-from-string (s x) (values (read s) (read s)))))))) (multiple-value-list (funcall composite "2 11"))) (5 1)) (deftest multiple-value-compose.3 (let ((compose-form (funcall (compiler-macro-function 'multiple-value-compose) '(multiple-value-compose #'truncate (lambda (x y) (values y x)) (lambda (x) (with-input-from-string (s x) (values (read s) (read s))))) nil))) (let ((fun (funcall (compile nil `(lambda () ,compose-form))))) (multiple-value-list (funcall fun "2 9")))) (4 1)) (deftest multiple-value-compose.4 (let ((composite (multiple-value-compose #'truncate))) (multiple-value-list (funcall composite 9 2))) (4 1)) (deftest curry.1 (let ((curried (curry '+ 3))) (funcall curried 1 5)) 9) (deftest curry.2 (let ((curried (locally (declare (notinline curry)) (curry '* 2 3)))) (funcall curried 7)) 42) (deftest curry.3 (let ((curried-form (funcall (compiler-macro-function 'curry) '(curry '/ 8) nil))) (let ((fun (funcall (compile nil `(lambda () ,curried-form))))) (funcall fun 2))) 4) (deftest rcurry.1 (let ((r (rcurry '/ 2))) (funcall r 8)) 4) (deftest named-lambda.1 (let ((fac (named-lambda fac (x) (if (> x 1) (* x (fac (- x 1))) x)))) (funcall fac 5)) 120) (deftest named-lambda.2 (let ((fac (named-lambda fac (&key x) (if (> x 1) (* x (fac :x (- x 1))) x)))) (funcall fac :x 5)) 120) (deftest alist-plist.1 (alist-plist '((a . 1) (b . 2) (c . 3))) (a 1 b 2 c 3)) (deftest plist-alist.1 (plist-alist '(a 1 b 2 c 3)) ((a . 1) (b . 2) (c . 3))) (deftest unionf.1 (let* ((list (list 1 2 3)) (orig list)) (unionf list (list 1 2 4)) (values (equal orig (list 1 2 3)) (eql (length list) 4) (set-difference list (list 1 2 3 4)) (set-difference (list 1 2 3 4) list))) t t nil nil) (deftest nunionf.1 (let ((list (list 1 2 3))) (nunionf list (list 1 2 4)) (values (eql (length list) 4) (set-difference (list 1 2 3 4) list) (set-difference list (list 1 2 3 4)))) t nil nil) (deftest appendf.1 (let* ((list (list 1 2 3)) (orig list)) (appendf list '(4 5 6) '(7 8)) (list list (eq list orig))) ((1 2 3 4 5 6 7 8) nil)) (deftest nconcf.1 (let ((list1 (list 1 2 3)) (list2 (list 4 5 6))) (nconcf list1 list2 (list 7 8 9)) list1) (1 2 3 4 5 6 7 8 9)) (deftest circular-list.1 (let ((circle (circular-list 1 2 3))) (list (first circle) (second circle) (third circle) (fourth circle) (eq circle (nthcdr 3 circle)))) (1 2 3 1 t)) (deftest circular-list-p.1 (let* ((circle (circular-list 1 2 3 4)) (tree (list circle circle)) (dotted (cons circle t)) (proper (list 1 2 3 circle)) (tailcirc (list* 1 2 3 circle))) (list (circular-list-p circle) (circular-list-p tree) (circular-list-p dotted) (circular-list-p proper) (circular-list-p tailcirc))) (t nil nil nil t)) (deftest circular-list-p.2 (circular-list-p 'foo) nil) (deftest circular-tree-p.1 (let* ((circle (circular-list 1 2 3 4)) (tree1 (list circle circle)) (tree2 (let* ((level2 (list 1 nil 2)) (level1 (list level2))) (setf (second level2) level1) level1)) (dotted (cons circle t)) (proper (list 1 2 3 circle)) (tailcirc (list* 1 2 3 circle)) (quite-proper (list 1 2 3)) (quite-dotted (list 1 (cons 2 3)))) (list (circular-tree-p circle) (circular-tree-p tree1) (circular-tree-p tree2) (circular-tree-p dotted) (circular-tree-p proper) (circular-tree-p tailcirc) (circular-tree-p quite-proper) (circular-tree-p quite-dotted))) (t t t t t t nil nil)) (deftest proper-list-p.1 (let ((l1 (list 1)) (l2 (list 1 2)) (l3 (cons 1 2)) (l4 (list (cons 1 2) 3)) (l5 (circular-list 1 2))) (list (proper-list-p l1) (proper-list-p l2) (proper-list-p l3) (proper-list-p l4) (proper-list-p l5))) (t t nil t nil)) (deftest proper-list-p.2 (proper-list-p '(1 2 . 3)) nil) (deftest proper-list.type.1 (let ((l1 (list 1)) (l2 (list 1 2)) (l3 (cons 1 2)) (l4 (list (cons 1 2) 3)) (l5 (circular-list 1 2))) (list (typep l1 'proper-list) (typep l2 'proper-list) (typep l3 'proper-list) (typep l4 'proper-list) (typep l5 'proper-list))) (t t nil t nil)) (deftest proper-list-length.1 (values (proper-list-length nil) (proper-list-length (list 1)) (proper-list-length (list 2 2)) (proper-list-length (list 3 3 3)) (proper-list-length (list 4 4 4 4)) (proper-list-length (list 5 5 5 5 5)) (proper-list-length (list 6 6 6 6 6 6)) (proper-list-length (list 7 7 7 7 7 7 7)) (proper-list-length (list 8 8 8 8 8 8 8 8)) (proper-list-length (list 9 9 9 9 9 9 9 9 9))) 0 1 2 3 4 5 6 7 8 9) (deftest proper-list-length.2 (flet ((plength (x) (handler-case (proper-list-length x) (type-error () :ok)))) (values (plength (list* 1)) (plength (list* 2 2)) (plength (list* 3 3 3)) (plength (list* 4 4 4 4)) (plength (list* 5 5 5 5 5)) (plength (list* 6 6 6 6 6 6)) (plength (list* 7 7 7 7 7 7 7)) (plength (list* 8 8 8 8 8 8 8 8)) (plength (list* 9 9 9 9 9 9 9 9 9)))) :ok :ok :ok :ok :ok :ok :ok :ok :ok) (deftest lastcar.1 (let ((l1 (list 1)) (l2 (list 1 2))) (list (lastcar l1) (lastcar l2))) (1 2)) (deftest lastcar.error.2 (handler-case (progn (lastcar (circular-list 1 2 3)) nil) (error () t)) t) (deftest setf-lastcar.1 (let ((l (list 1 2 3 4))) (values (lastcar l) (progn (setf (lastcar l) 42) (lastcar l)))) 4 42) (deftest setf-lastcar.2 (let ((l (circular-list 1 2 3))) (multiple-value-bind (res err) (ignore-errors (setf (lastcar l) 4)) (typep err 'type-error))) t) (deftest make-circular-list.1 (let ((l (make-circular-list 3 :initial-element :x))) (setf (car l) :y) (list (eq l (nthcdr 3 l)) (first l) (second l) (third l) (fourth l))) (t :y :x :x :y)) (deftest circular-list.type.1 (let* ((l1 (list 1 2 3)) (l2 (circular-list 1 2 3)) (l3 (list* 1 2 3 l2))) (list (typep l1 'circular-list) (typep l2 'circular-list) (typep l3 'circular-list))) (nil t t)) (deftest ensure-list.1 (let ((x (list 1)) (y 2)) (list (ensure-list x) (ensure-list y))) ((1) (2))) (deftest ensure-cons.1 (let ((x (cons 1 2)) (y nil) (z "foo")) (values (ensure-cons x) (ensure-cons y) (ensure-cons z))) (1 . 2) (nil) ("foo")) (deftest setp.1 (setp '(1)) t) (deftest setp.2 (setp nil) t) (deftest setp.3 (setp "foo") nil) (deftest setp.4 (setp '(1 2 3 1)) nil) (deftest setp.5 (setp '(1 2 3)) t) (deftest setp.6 (setp '(a :a)) t) (deftest setp.7 (setp '(a :a) :key 'character) nil) (deftest setp.8 (setp '(a :a) :key 'character :test (constantly nil)) t) (deftest set-equal.1 (set-equal '(1 2 3) '(3 1 2)) t) (deftest set-equal.2 (set-equal '("Xa") '("Xb") :test (lambda (a b) (eql (char a 0) (char b 0)))) t) (deftest set-equal.3 (set-equal '(1 2) '(4 2)) nil) (deftest set-equal.4 (set-equal '(a b c) '(:a :b :c) :key 'string :test 'equal) t) (deftest set-equal.5 (set-equal '(a d c) '(:a :b :c) :key 'string :test 'equal) nil) (deftest set-equal.6 (set-equal '(a b c) '(a b c d)) nil) (deftest map-product.1 (map-product 'cons '(2 3) '(1 4)) ((2 . 1) (2 . 4) (3 . 1) (3 . 4))) (deftest map-product.2 (map-product #'cons '(2 3) '(1 4)) ((2 . 1) (2 . 4) (3 . 1) (3 . 4))) (deftest flatten.1 (flatten '((1) 2 (((3 4))) ((((5)) 6)) 7)) (1 2 3 4 5 6 7)) (deftest remove-from-plist.1 (let ((orig '(a 1 b 2 c 3 d 4))) (list (remove-from-plist orig 'a 'c) (remove-from-plist orig 'b 'd) (remove-from-plist orig 'b) (remove-from-plist orig 'a) (remove-from-plist orig 'd 42 "zot") (remove-from-plist orig 'a 'b 'c 'd) (remove-from-plist orig 'a 'b 'c 'd 'x) (equal orig '(a 1 b 2 c 3 d 4)))) ((b 2 d 4) (a 1 c 3) (a 1 c 3 d 4) (b 2 c 3 d 4) (a 1 b 2 c 3) nil nil t)) (deftest mappend.1 (mappend (compose 'list '*) '(1 2 3) '(1 2 3)) (1 4 9)) (deftest assoc-value.1 (let ((key1 '(complex key)) (key2 'simple-key) (alist '()) (result '())) (push 1 (assoc-value alist key1 :test #'equal)) (push 2 (assoc-value alist key1 :test 'equal)) (push 42 (assoc-value alist key2)) (push 43 (assoc-value alist key2 :test 'eq)) (push (assoc-value alist key1 :test #'equal) result) (push (assoc-value alist key2) result) (push 'very (rassoc-value alist (list 2 1) :test #'equal)) (push (cdr (assoc '(very complex key) alist :test #'equal)) result) result) ((2 1) (43 42) (2 1))) (deftest clamp.1 (list (clamp 1.5 1 2) (clamp 2.0 1 2) (clamp 1.0 1 2) (clamp 3 1 2) (clamp 0 1 2)) (1.5 2.0 1.0 2 1)) (deftest gaussian-random.1 (let ((min -0.2) (max +0.2)) (multiple-value-bind (g1 g2) (gaussian-random min max) (values (<= min g1 max) (<= min g2 max) ))) t t t) (deftest iota.1 (iota 3) (0 1 2)) (deftest iota.2 (iota 3 :start 0.0d0) (0.0d0 1.0d0 2.0d0)) (deftest iota.3 (iota 3 :start 2 :step 3.0) (2.0 5.0 8.0)) (deftest map-iota.1 (let (all) (declare (notinline map-iota)) (values (map-iota (lambda (x) (push x all)) 3 :start 2 :step 1.1d0) all)) 3 (4.2d0 3.1d0 2.0d0)) (deftest lerp.1 (lerp 0.5 1 2) 1.5) (deftest lerp.2 (lerp 0.1 1 2) 1.1) (deftest mean.1 (mean '(1 2 3)) 2) (deftest mean.2 (mean '(1 2 3 4)) 5/2) (deftest mean.3 (mean '(1 2 10)) 13/3) (deftest median.1 (median '(100 0 99 1 98 2 97)) 97) (deftest median.2 (median '(100 0 99 1 98 2 97 96)) 193/2) (deftest variance.1 (variance (list 1 2 3)) 2/3) (deftest standard-deviation.1 (< 0 (standard-deviation (list 1 2 3)) 1) t) (deftest maxf.1 (let ((x 1)) (maxf x 2) x) 2) (deftest maxf.2 (let ((x 1)) (maxf x 0) x) 1) (deftest maxf.3 (let ((x 1) (c 0)) (maxf x (incf c)) (list x c)) (1 1)) (deftest maxf.4 (let ((xv (vector 0 0 0)) (p 0)) (maxf (svref xv (incf p)) (incf p)) (list p xv)) (2 #(0 2 0))) (deftest minf.1 (let ((y 1)) (minf y 0) y) 0) (deftest minf.2 (let ((xv (vector 10 10 10)) (p 0)) (minf (svref xv (incf p)) (incf p)) (list p xv)) (2 #(10 2 10))) #+nil (deftest array-index.type) #+nil (deftest copy-array) (deftest rotate.1 (list (rotate (list 1 2 3) 0) (rotate (list 1 2 3) 1) (rotate (list 1 2 3) 2) (rotate (list 1 2 3) 3) (rotate (list 1 2 3) 4)) ((1 2 3) (3 1 2) (2 3 1) (1 2 3) (3 1 2))) (deftest rotate.2 (list (rotate (vector 1 2 3 4) 0) (rotate (vector 1 2 3 4)) (rotate (vector 1 2 3 4) 2) (rotate (vector 1 2 3 4) 3) (rotate (vector 1 2 3 4) 4) (rotate (vector 1 2 3 4) 5)) (#(1 2 3 4) #(4 1 2 3) #(3 4 1 2) #(2 3 4 1) #(1 2 3 4) #(4 1 2 3))) (deftest rotate.3 (list (rotate (list 1 2 3) 0) (rotate (list 1 2 3) -1) (rotate (list 1 2 3) -2) (rotate (list 1 2 3) -3) (rotate (list 1 2 3) -4)) ((1 2 3) (2 3 1) (3 1 2) (1 2 3) (2 3 1))) (deftest rotate.4 (list (rotate (vector 1 2 3 4) 0) (rotate (vector 1 2 3 4) -1) (rotate (vector 1 2 3 4) -2) (rotate (vector 1 2 3 4) -3) (rotate (vector 1 2 3 4) -4) (rotate (vector 1 2 3 4) -5)) (#(1 2 3 4) #(2 3 4 1) #(3 4 1 2) #(4 1 2 3) #(1 2 3 4) #(2 3 4 1))) (deftest rotate.5 (values (rotate (list 1) 17) (rotate (list 1) -5)) (1) (1)) (deftest shuffle.1 (let ((s (shuffle (iota 100)))) (list (equal s (iota 100)) (every (lambda (x) (member x s)) (iota 100)) (every (lambda (x) (typep x '(integer 0 99))) s))) (nil t t)) (deftest shuffle.2 (let ((s (shuffle (coerce (iota 100) 'vector)))) (list (equal s (coerce (iota 100) 'vector)) (every (lambda (x) (find x s)) (iota 100)) (every (lambda (x) (typep x '(integer 0 99))) s))) (nil t t)) (deftest random-elt.1 (let ((s1 #(1 2 3 4)) (s2 '(1 2 3 4))) (list (dotimes (i 1000 nil) (unless (member (random-elt s1) s2) (return nil)) (when (/= (random-elt s1) (random-elt s1)) (return t))) (dotimes (i 1000 nil) (unless (member (random-elt s2) s2) (return nil)) (when (/= (random-elt s2) (random-elt s2)) (return t))))) (t t)) (deftest removef.1 (let* ((x '(1 2 3)) (x* x) (y #(1 2 3)) (y* y)) (removef x 1) (removef y 3) (list x x* y y*)) ((2 3) (1 2 3) #(1 2) #(1 2 3))) (deftest deletef.1 (let* ((x (list 1 2 3)) (x* x) (y (vector 1 2 3))) (deletef x 2) (deletef y 1) (list x x* y)) ((1 3) (1 3) #(2 3))) (deftest map-permutations.1 (let ((seq (list 1 2 3)) (seen nil) (ok t)) (map-permutations (lambda (s) (unless (set-equal s seq) (setf ok nil)) (when (member s seen :test 'equal) (setf ok nil)) (push s seen)) seq :copy t) (values ok (length seen))) t 6) (deftest proper-sequence.type.1 (mapcar (lambda (x) (typep x 'proper-sequence)) (list (list 1 2 3) (vector 1 2 3) #2a((1 2) (3 4)) (circular-list 1 2 3 4))) (t t nil nil)) (deftest emptyp.1 (mapcar #'emptyp (list (list 1) (circular-list 1) nil (vector) (vector 1))) (nil nil t t nil)) (deftest sequence-of-length-p.1 (mapcar #'sequence-of-length-p (list nil #() (list 1) (vector 1) (list 1 2) (vector 1 2) (list 1 2) (vector 1 2) (list 1 2) (vector 1 2)) (list 0 0 1 1 2 2 1 1 4 4)) (t t t t t t nil nil nil nil)) (deftest length=.1 (mapcar #'length= (list nil #() (list 1) (vector 1) (list 1 2) (vector 1 2) (list 1 2) (vector 1 2) (list 1 2) (vector 1 2)) (list 0 0 1 1 2 2 1 1 4 4)) (t t t t t t nil nil nil nil)) (deftest length=.2 (macrolet ((x (&rest args) (funcall (compile nil `(lambda () (length= ,@args)))))) (list (x 2 '(1 2)) (x '(1 2) '(3 4)) (x '(1 2) 2) (x '(1 2) 2 '(3 4)) (x 1 2 3))) (t t t t nil)) (deftest copy-sequence.1 (let ((l (list 1 2 3)) (v (vector #\a #\b #\c))) (declare (notinline copy-sequence)) (let ((l.list (copy-sequence 'list l)) (l.vector (copy-sequence 'vector l)) (l.spec-v (copy-sequence '(vector fixnum) l)) (v.vector (copy-sequence 'vector v)) (v.list (copy-sequence 'list v)) (v.string (copy-sequence 'string v))) (list (member l (list l.list l.vector l.spec-v)) (member v (list v.vector v.list v.string)) (equal l.list l) (equalp l.vector #(1 2 3)) (eql (upgraded-array-element-type 'fixnum) (array-element-type l.spec-v)) (equalp v.vector v) (equal v.list '(#\a #\b #\c)) (equal "abc" v.string)))) (nil nil t t t t t t)) (deftest first-elt.1 (mapcar #'first-elt (list (list 1 2 3) "abc" (vector :a :b :c))) (1 #\a :a)) (deftest first-elt.error.1 (mapcar (lambda (x) (handler-case (first-elt x) (type-error () :type-error))) (list nil #() 12 :zot)) (:type-error :type-error :type-error :type-error)) (deftest setf-first-elt.1 (let ((l (list 1 2 3)) (s (copy-seq "foobar")) (v (vector :a :b :c))) (setf (first-elt l) -1 (first-elt s) #\x (first-elt v) 'zot) (values l s v)) (-1 2 3) "xoobar" #(zot :b :c)) (deftest setf-first-elt.error.1 (let ((l 'foo)) (multiple-value-bind (res err) (ignore-errors (setf (first-elt l) 4)) (typep err 'type-error))) t) (deftest last-elt.1 (mapcar #'last-elt (list (list 1 2 3) (vector :a :b :c) "FOOBAR" #*001 #*010)) (3 :c #\R 1 0)) (deftest last-elt.error.1 (mapcar (lambda (x) (handler-case (last-elt x) (type-error () :type-error))) (list nil #() 12 :zot (circular-list 1 2 3) (list* 1 2 3 (circular-list 4 5)))) (:type-error :type-error :type-error :type-error :type-error :type-error)) (deftest setf-last-elt.1 (let ((l (list 1 2 3)) (s (copy-seq "foobar")) (b (copy-seq #*010101001))) (setf (last-elt l) '??? (last-elt s) #\? (last-elt b) 0) (values l s b)) (1 2 ???) "fooba?" #*010101000) (deftest setf-last-elt.error.1 (handler-case (setf (last-elt 'foo) 13) (type-error () :type-error)) :type-error) (deftest starts-with.1 (list (starts-with 1 '(1 2 3)) (starts-with 1 #(1 2 3)) (starts-with #\x "xyz") (starts-with 2 '(1 2 3)) (starts-with 3 #(1 2 3)) (starts-with 1 1) (starts-with nil nil)) (t t t nil nil nil nil)) (deftest starts-with.2 (values (starts-with 1 '(-1 2 3) :key '-) (starts-with "foo" '("foo" "bar") :test 'equal) (starts-with "f" '(#\f) :key 'string :test 'equal) (starts-with -1 '(0 1 2) :key #'1+) (starts-with "zot" '("ZOT") :test 'equal)) t t t nil nil) (deftest ends-with.1 (list (ends-with 3 '(1 2 3)) (ends-with 3 #(1 2 3)) (ends-with #\z "xyz") (ends-with 2 '(1 2 3)) (ends-with 1 #(1 2 3)) (ends-with 1 1) (ends-with nil nil)) (t t t nil nil nil nil)) (deftest ends-with.2 (values (ends-with 2 '(0 13 1) :key '1+) (ends-with "foo" (vector "bar" "foo") :test 'equal) (ends-with "X" (vector 1 2 #\X) :key 'string :test 'equal) (ends-with "foo" "foo" :test 'equal)) t t t nil) (deftest ends-with.error.1 (handler-case (ends-with 3 (circular-list 3 3 3 1 3 3)) (type-error () :type-error)) :type-error) (deftest sequences.passing-improper-lists (macrolet ((signals-error-p (form) `(handler-case (progn ,form nil) (type-error (e) t))) (cut (fn &rest args) (with-gensyms (arg) (print`(lambda (,arg) (apply ,fn (list ,@(substitute arg '_ args)))))))) (let ((circular-list (make-circular-list 5 :initial-element :foo)) (dotted-list (list* 'a 'b 'c 'd))) (loop for nth from 0 for fn in (list (cut #'lastcar _) (cut #'rotate _ 3) (cut #'rotate _ -3) (cut #'shuffle _) (cut #'random-elt _) (cut #'last-elt _) (cut #'ends-with :foo _)) nconcing (let ((on-circular-p (signals-error-p (funcall fn circular-list))) (on-dotted-p (signals-error-p (funcall fn dotted-list)))) (when (or (not on-circular-p) (not on-dotted-p)) (append (unless on-circular-p (let ((*print-circle* t)) (list (format nil "No appropriate error signalled when passing ~S to ~Ath entry." circular-list nth)))) (unless on-dotted-p (list (format nil "No appropriate error signalled when passing ~S to ~Ath entry." dotted-list nth))))))))) nil) (deftest with-unique-names.1 (let ((*gensym-counter* 0)) (let ((syms (with-unique-names (foo bar quux) (list foo bar quux)))) (list (find-if #'symbol-package syms) (equal '("FOO0" "BAR1" "QUUX2") (mapcar #'symbol-name syms))))) (nil t)) (deftest with-unique-names.2 (let ((*gensym-counter* 0)) (let ((syms (with-unique-names ((foo "_foo_") (bar -bar-) (quux #\q)) (list foo bar quux)))) (list (find-if #'symbol-package syms) (equal '("_foo_0" "-BAR-1" "q2") (mapcar #'symbol-name syms))))) (nil t)) (deftest with-unique-names.3 (let ((*gensym-counter* 0)) (multiple-value-bind (res err) (ignore-errors (eval '(let ((syms (with-unique-names ((foo "_foo_") (bar -bar-) (quux 42)) (list foo bar quux)))) (list (find-if #'symbol-package syms) (equal '("_foo_0" "-BAR-1" "q2") (mapcar #'symbol-name syms)))))) (errorp err))) t) (deftest once-only.1 (macrolet ((cons1.good (x) (once-only (x) `(cons ,x ,x))) (cons1.bad (x) `(cons ,x ,x))) (let ((y 0)) (list (cons1.good (incf y)) y (cons1.bad (incf y)) y))) ((1 . 1) 1 (2 . 3) 3)) (deftest once-only.2 (macrolet ((cons1 (x) (once-only ((y x)) `(cons ,y ,y)))) (let ((z 0)) (list (cons1 (incf z)) z (cons1 (incf z))))) ((1 . 1) 1 (2 . 2))) (deftest parse-body.1 (parse-body '("doc" "body") :documentation t) ("body") nil "doc") (deftest parse-body.2 (parse-body '("body") :documentation t) ("body") nil nil) (deftest parse-body.3 (parse-body '("doc" "body")) ("doc" "body") nil nil) (deftest parse-body.4 (parse-body '((declare (foo)) "doc" (declare (bar)) body) :documentation t) (body) ((declare (foo)) (declare (bar))) "doc") (deftest parse-body.5 (parse-body '((declare (foo)) "doc" (declare (bar)) body)) ("doc" (declare (bar)) body) ((declare (foo))) nil) (deftest parse-body.6 (multiple-value-bind (res err) (ignore-errors (parse-body '("foo" "bar" "quux") :documentation t)) (errorp err)) t) (deftest ensure-symbol.1 (ensure-symbol :cons :cl) cons :external) (deftest ensure-symbol.2 (ensure-symbol "CONS" :alexandria) cons :inherited) (deftest ensure-symbol.3 (ensure-symbol 'foo :keyword) :foo :external) (deftest ensure-symbol.4 (ensure-symbol #\* :alexandria) * :inherited) (deftest format-symbol.1 (let ((s (format-symbol nil "X-~D" 13))) (list (symbol-package s) (symbol-name s))) (nil "X-13")) (deftest format-symbol.2 (format-symbol :keyword "SYM-~A" :bolic) :sym-bolic) (deftest format-symbol.3 (let ((*package* (find-package :cl))) (format-symbol t "FIND-~A" 'package)) find-package) (deftest make-keyword.1 (list (make-keyword 'zot) (make-keyword "FOO") (make-keyword #\Q)) (:zot :foo :q)) (deftest make-gensym-list.1 (let ((*gensym-counter* 0)) (let ((syms (make-gensym-list 3 "FOO"))) (list (find-if 'symbol-package syms) (equal '("FOO0" "FOO1" "FOO2") (mapcar 'symbol-name syms))))) (nil t)) (deftest make-gensym-list.2 (let ((*gensym-counter* 0)) (let ((syms (make-gensym-list 3))) (list (find-if 'symbol-package syms) (equal '("G0" "G1" "G2") (mapcar 'symbol-name syms))))) (nil t)) (deftest of-type.1 (locally (declare (notinline of-type)) (let ((f (of-type 'string))) (list (funcall f "foo") (funcall f 'bar)))) (t nil)) (deftest type=.1 (type= 'string 'string) t t) (deftest type=.2 (type= 'list '(or null cons)) t t) (deftest type=.3 (type= 'null '(and symbol list)) t t) (deftest type=.4 (type= 'string '(satisfies emptyp)) nil nil) (deftest type=.5 (type= 'string 'list) nil t) (macrolet ((test (type numbers) `(deftest ,(format-symbol t "CDR5.~A" type) (let ((numbers ,numbers)) (values (mapcar (of-type ',(format-symbol t "NEGATIVE-~A" type)) numbers) (mapcar (of-type ',(format-symbol t "NON-POSITIVE-~A" type)) numbers) (mapcar (of-type ',(format-symbol t "NON-NEGATIVE-~A" type)) numbers) (mapcar (of-type ',(format-symbol t "POSITIVE-~A" type)) numbers))) (t t t nil nil nil nil) (t t t t nil nil nil) (nil nil nil t t t t) (nil nil nil nil t t t)))) (test fixnum (list most-negative-fixnum -42 -1 0 1 42 most-positive-fixnum)) (test integer (list (1- most-negative-fixnum) -42 -1 0 1 42 (1+ most-positive-fixnum))) (test rational (list (1- most-negative-fixnum) -42/13 -1 0 1 42/13 (1+ most-positive-fixnum))) (test real (list most-negative-long-float -42/13 -1 0 1 42/13 most-positive-long-float)) (test float (list most-negative-short-float -42.02 -1.0 0.0 1.0 42.02 most-positive-short-float)) (test short-float (list most-negative-short-float -42.02s0 -1.0s0 0.0s0 1.0s0 42.02s0 most-positive-short-float)) (test single-float (list most-negative-single-float -42.02f0 -1.0f0 0.0f0 1.0f0 42.02f0 most-positive-single-float)) (test double-float (list most-negative-double-float -42.02d0 -1.0d0 0.0d0 1.0d0 42.02d0 most-positive-double-float)) (test long-float (list most-negative-long-float -42.02l0 -1.0l0 0.0l0 1.0l0 42.02l0 most-positive-long-float))) (declaim (notinline opaque)) (defun opaque (x) x) (deftest if-let.1 (if-let (x (opaque :ok)) x :bad) :ok) (deftest if-let.2 (if-let (x (opaque nil)) :bad (and (not x) :ok)) :ok) (deftest if-let.3 (let ((x 1)) (if-let ((x 2) (y x)) (+ x y) :oops)) 3) (deftest if-let.4 (if-let ((x 1) (y nil)) :oops (and (not y) x)) 1) (deftest if-let.5 (if-let (x) :oops (not x)) t) (deftest if-let.error.1 (handler-case (eval '(if-let x :oops :oops)) (type-error () :type-error)) :type-error) (deftest when-let.1 (when-let (x (opaque :ok)) (setf x (cons x x)) x) (:ok . :ok)) (deftest when-let.2 (when-let ((x 1) (y nil) (z 3)) :oops) nil) (deftest when-let.3 (let ((x 1)) (when-let ((x 2) (y x)) (+ x y))) 3) (deftest when-let.error.1 (handler-case (eval '(when-let x :oops)) (type-error () :type-error)) :type-error) (deftest when-let*.1 (let ((x 1)) (when-let* ((x 2) (y x)) (+ x y))) 4) (deftest when-let*.2 (let ((y 1)) (when-let* (x y) (1+ x))) 2) (deftest when-let*.3 (when-let* ((x t) (y (consp x)) (z (error "OOPS"))) t) nil) (deftest when-let*.error.1 (handler-case (eval '(when-let* x :oops)) (type-error () :type-error)) :type-error) (deftest doplist.1 (let (keys values) (doplist (k v '(a 1 b 2 c 3) (values t (reverse keys) (reverse values) k v)) (push k keys) (push v values))) t (a b c) (1 2 3) nil nil) (deftest count-permutations.1 (values (count-permutations 31 7) (count-permutations 1 1) (count-permutations 2 1) (count-permutations 2 2) (count-permutations 3 2) (count-permutations 3 1)) 13253058000 1 2 2 6 3) (deftest binomial-coefficient.1 (alexandria:binomial-coefficient 1239 139) 28794902202288970200771694600561826718847179309929858835480006683522184441358211423695124921058123706380656375919763349913245306834194782172712255592710204598527867804110129489943080460154)
aef7df68cb27becf16f049c8d62d0b516120da9075ac1c0087a1d0ee6847364e
dinosaure/docteur
light.ml
* Copyright ( c ) 2011 Anil Madhavapeddy < > * Copyright ( c ) 2012 Citrix Systems Inc * Copyright ( c ) 2018 < > * Copyright ( c ) 2021 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2011 Anil Madhavapeddy <> * Copyright (c) 2012 Citrix Systems Inc * Copyright (c) 2018 Martin Lucina <> * Copyright (c) 2021 Romain Calascibetta <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) open Lwt.Infix let src = Logs.Src.create "pack" ~doc:"PACK file" module Log = (val Logs.src_log src : Logs.LOG) module SHA1 = Digestif.SHA1 exception Unspecified of string let invalid_arg fmt = Fmt.kstr invalid_arg fmt let unspecified fmt = Fmt.kstr (fun str -> raise (Unspecified str)) fmt let failwith fmt = Fmt.kstr failwith fmt open Solo5_os.Solo5 type solo5_block_info = { capacity : int64; block_size : int64 } external solo5_block_acquire : string -> solo5_result * int64 * solo5_block_info = "mirage_solo5_block_acquire" external solo5_block_read : int64 -> int64 -> Cstruct.buffer -> int -> int -> solo5_result = "mirage_solo5_block_read_3" let disconnect _id = Lwt.return_unit type buffers = { z : Bigstringaf.t; allocate : int -> De.window; w : (int64 * solo5_block_info) Carton.Dec.W.t; } type t = { name : string; handle : int64; capacity : int64; block_size : int64; pack : (int64 * solo5_block_info, SHA1.t) Carton.Dec.t; root : SHA1.t; buffers : buffers Lwt_pool.t; } module Commit = Git.Commit.Make (Git.Hash.Make (SHA1)) module Tree = Git.Tree.Make (Git.Hash.Make (SHA1)) let map (handle, (info : solo5_block_info)) ~pos len = Log.debug (fun m -> m "map ~pos:%Ld ~len:%d (block_size: %Ld)." pos len info.block_size) ; assert (len <= Int64.to_int info.block_size) ; assert (Int64.logand pos (Int64.pred info.block_size) = 0L) ; let len = Int64.to_int info.block_size in let res = Bigstringaf.create len in match solo5_block_read handle pos res 0 len with | SOLO5_R_OK -> Log.debug (fun m -> m "mmap: @[<hov>%a@]" (Hxd_string.pp Hxd.default) (Bigstringaf.to_string res)) ; res | SOLO5_R_AGAIN -> assert false | SOLO5_R_EINVAL -> invalid_arg "Block: read(): Invalid argument" | SOLO5_R_EUNSPEC -> unspecified "Block: read(): Unspecified error" let load pack uid = let open Rresult in let weight = Carton.Dec.weight_of_uid ~map pack ~weight:Carton.Dec.null uid in let raw = Carton.Dec.make_raw ~weight in let v = Carton.Dec.of_uid ~map pack raw uid in match Carton.Dec.kind v with | `A -> let parser = Encore.to_angstrom Commit.format in Angstrom.parse_bigstring ~consume:All parser (Bigstringaf.sub (Carton.Dec.raw v) ~off:0 ~len:(Carton.Dec.len v)) |> R.reword_error (fun _ -> R.msgf "Invalid commit (%a)" SHA1.pp uid) >>| fun v -> `Commit v | `B -> let parser = Encore.to_angstrom Tree.format in Angstrom.parse_bigstring ~consume:All parser (Bigstringaf.sub (Carton.Dec.raw v) ~off:0 ~len:(Carton.Dec.len v)) |> R.reword_error (fun _ -> R.msgf "Invalid tree (%a)" SHA1.pp uid) >>| fun v -> `Tree v | `C -> R.ok (`Blob (Bigstringaf.sub (Carton.Dec.raw v) ~off:0 ~len:(Carton.Dec.len v))) | `D -> R.ok `Tag type key = Mirage_kv.Key.t type error = [ `Invalid_store | `Msg of string | `Dictionary_expected of key | `Not_found of key | `Value_expected of key ] let pp_error ppf = function | `Invalid_store -> Fmt.pf ppf "Invalid store" | `Msg err -> Fmt.string ppf err | `Not_found key -> Fmt.pf ppf "%a not found" Mirage_kv.Key.pp key | `Dictionary_expected key -> Fmt.pf ppf "%a is not a directory" Mirage_kv.Key.pp key | `Value_expected key -> Fmt.pf ppf "%a is not a file" Mirage_kv.Key.pp key let rec split ~block_size index off acc = if off = Bigstringaf.length index then List.rev acc else let block = Bigstringaf.sub index ~off ~len:(Int64.to_int block_size) in split ~block_size index (off + Int64.to_int block_size) (block :: acc) let read_one_block handle offset ~off ~len buffer = match solo5_block_read handle offset buffer off len with | SOLO5_R_OK -> () | SOLO5_R_AGAIN -> assert false | SOLO5_R_EINVAL -> invalid_arg "Block: read(%Lx:%d): Invalid argument" offset len | SOLO5_R_EUNSPEC -> unspecified "Block: read(%Lx:%d): Unspecified error" offset len let read handle offset bs = let rec go offset = function | [] -> () | x :: r -> read_one_block handle offset ~off:0 ~len:(Bigstringaf.length x) x ; go (Int64.add offset (Int64.of_int (Bigstringaf.length x))) r in go offset bs let make name handle (info : solo5_block_info) commit cursor = let index = Bigstringaf.create (Int64.to_int (Int64.sub info.capacity cursor)) in let blocks = split ~block_size:info.block_size index 0 [] in read handle cursor blocks ; let index = Carton.Dec.Idx.make index ~uid_ln:SHA1.digest_size ~uid_rw:SHA1.to_raw_string ~uid_wr:SHA1.of_raw_string in let z = Bigstringaf.create De.io_buffer_size in let zw = De.make_window ~bits:15 in let allocate _ = zw in let find uid = match Carton.Dec.Idx.find index uid with | Some (_, offset) -> Int64.add (Int64.of_int (SHA1.digest_size + 8)) offset | None -> failwith "%a does not exist" SHA1.pp uid in let pack = Carton.Dec.make ~sector:info.block_size (handle, info) ~allocate ~z ~uid_ln:SHA1.digest_size ~uid_rw:SHA1.of_raw_string find in let buffers = Lwt_pool.create 4 @@ fun () -> let z = Bigstringaf.create De.io_buffer_size in let w = De.make_window ~bits:15 in let allocate _ = w in let w = Carton.Dec.W.make ~sector:info.block_size (handle, info) in Lwt.return { z; allocate; w } in match load pack commit with | Ok (`Commit commit) -> let root = Commit.tree commit in Lwt.return_ok { name; handle; capacity = info.capacity; block_size = info.block_size; root; buffers; pack; } | Ok _ -> Lwt.return_error (Rresult.R.msgf "Unexpected Git object %a" SHA1.pp commit) | Error _ as err -> Lwt.return err let connect name = match solo5_block_acquire name with | SOLO5_R_AGAIN, _, _ -> assert false (* not returned by solo5_block_acquire *) | SOLO5_R_EINVAL, _, _ -> invalid_arg "Block: connect(%s): Invalid argument" name | SOLO5_R_EUNSPEC, _, _ -> unspecified "Block: connect(%s): Unspecified error" name | SOLO5_R_OK, handle, info -> ( let commit = Bigstringaf.create (Int64.to_int info.block_size) in match solo5_block_read handle 0L commit 0 (Int64.to_int info.block_size) with | SOLO5_R_OK -> let index = Bigstringaf.get_int64_le commit SHA1.digest_size in let commit = Bigstringaf.substring commit ~off:0 ~len:SHA1.digest_size in let commit = SHA1.of_raw_string commit in make name handle info commit index | SOLO5_R_AGAIN -> assert false | SOLO5_R_EINVAL -> invalid_arg "Block: connect(%s): Invalid argument" name | SOLO5_R_EUNSPEC -> unspecified "Block: connect(%s): Unspecified error" name) let with_ressources pack uid buffers = Lwt.catch (fun () -> let pack = Carton.Dec.with_z buffers.z pack in let pack = Carton.Dec.with_allocate ~allocate:buffers.allocate pack in let pack = Carton.Dec.with_w buffers.w pack in load pack uid |> Lwt.return) @@ fun exn -> raise exn let tree_find tree name = let res = ref None in List.iter (fun ({ Git.Tree.name = name'; _ } as entry) -> if name = name' then res := Some entry) (Git.Tree.to_list tree) ; !res let load t key = let rec fold lst hash value = match (lst, value) with | [], value -> Lwt.return_ok (hash, value) | _ :: _, (`Commit _ | `Tag | `Blob _) -> Lwt.return_error (`Value_expected key) | x :: r, `Tree tree -> match tree_find tree x with | None -> Lwt.return_error (`Not_found key) | Some { Git.Tree.node; _ } -> ( Lwt_pool.use t.buffers (with_ressources t.pack node) >>= function | Ok value -> fold r node value | Error _ as err -> Lwt.return err) in let lst = Fpath.v (Mirage_kv.Key.to_string key) in let lst = Fpath.segs lst in Lwt_pool.use t.buffers (with_ressources t.pack t.root) >>= function | Ok value -> fold (List.tl lst) t.root value | Error _ as err -> Lwt.return err let exists t key = load t key >>= function | Ok (_, `Blob _) -> Lwt.return_ok (Some `Value) | Ok (_, `Tree _) -> Lwt.return_ok (Some `Dictionary) | _ -> Lwt.return_ok None let get t key = load t key >>= function | Ok (_, `Blob value) -> Lwt.return_ok (Bigstringaf.to_string value) | Ok _ -> Lwt.return_error (`Value_expected key) | Error _ as err -> Lwt.return err let list t key = load t key >>= function | Ok (_, `Tree tree) -> let f acc { Git.Tree.name; perm; _ } = match perm with | `Everybody | `Normal -> (name, `Value) :: acc | `Dir -> (name, `Dictionary) :: acc | _ -> acc in let lst = List.fold_left f [] (Git.Tree.to_list tree) in Lwt.return_ok lst | Ok _ -> Lwt.return_error (`Dictionary_expected key) | Error _ as err -> Lwt.return err let digest t key = load t key >>= function | Ok (hash, _) -> Lwt.return_ok (SHA1.to_raw_string hash) | Error _ as err -> Lwt.return err let last_modified _t _key = Lwt.return_ok (0, 0L)
null
https://raw.githubusercontent.com/dinosaure/docteur/3205a30eafe94c00eab12e981a47264f7a0a76d6/solo5/light.ml
ocaml
not returned by solo5_block_acquire
* Copyright ( c ) 2011 Anil Madhavapeddy < > * Copyright ( c ) 2012 Citrix Systems Inc * Copyright ( c ) 2018 < > * Copyright ( c ) 2021 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2011 Anil Madhavapeddy <> * Copyright (c) 2012 Citrix Systems Inc * Copyright (c) 2018 Martin Lucina <> * Copyright (c) 2021 Romain Calascibetta <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) open Lwt.Infix let src = Logs.Src.create "pack" ~doc:"PACK file" module Log = (val Logs.src_log src : Logs.LOG) module SHA1 = Digestif.SHA1 exception Unspecified of string let invalid_arg fmt = Fmt.kstr invalid_arg fmt let unspecified fmt = Fmt.kstr (fun str -> raise (Unspecified str)) fmt let failwith fmt = Fmt.kstr failwith fmt open Solo5_os.Solo5 type solo5_block_info = { capacity : int64; block_size : int64 } external solo5_block_acquire : string -> solo5_result * int64 * solo5_block_info = "mirage_solo5_block_acquire" external solo5_block_read : int64 -> int64 -> Cstruct.buffer -> int -> int -> solo5_result = "mirage_solo5_block_read_3" let disconnect _id = Lwt.return_unit type buffers = { z : Bigstringaf.t; allocate : int -> De.window; w : (int64 * solo5_block_info) Carton.Dec.W.t; } type t = { name : string; handle : int64; capacity : int64; block_size : int64; pack : (int64 * solo5_block_info, SHA1.t) Carton.Dec.t; root : SHA1.t; buffers : buffers Lwt_pool.t; } module Commit = Git.Commit.Make (Git.Hash.Make (SHA1)) module Tree = Git.Tree.Make (Git.Hash.Make (SHA1)) let map (handle, (info : solo5_block_info)) ~pos len = Log.debug (fun m -> m "map ~pos:%Ld ~len:%d (block_size: %Ld)." pos len info.block_size) ; assert (len <= Int64.to_int info.block_size) ; assert (Int64.logand pos (Int64.pred info.block_size) = 0L) ; let len = Int64.to_int info.block_size in let res = Bigstringaf.create len in match solo5_block_read handle pos res 0 len with | SOLO5_R_OK -> Log.debug (fun m -> m "mmap: @[<hov>%a@]" (Hxd_string.pp Hxd.default) (Bigstringaf.to_string res)) ; res | SOLO5_R_AGAIN -> assert false | SOLO5_R_EINVAL -> invalid_arg "Block: read(): Invalid argument" | SOLO5_R_EUNSPEC -> unspecified "Block: read(): Unspecified error" let load pack uid = let open Rresult in let weight = Carton.Dec.weight_of_uid ~map pack ~weight:Carton.Dec.null uid in let raw = Carton.Dec.make_raw ~weight in let v = Carton.Dec.of_uid ~map pack raw uid in match Carton.Dec.kind v with | `A -> let parser = Encore.to_angstrom Commit.format in Angstrom.parse_bigstring ~consume:All parser (Bigstringaf.sub (Carton.Dec.raw v) ~off:0 ~len:(Carton.Dec.len v)) |> R.reword_error (fun _ -> R.msgf "Invalid commit (%a)" SHA1.pp uid) >>| fun v -> `Commit v | `B -> let parser = Encore.to_angstrom Tree.format in Angstrom.parse_bigstring ~consume:All parser (Bigstringaf.sub (Carton.Dec.raw v) ~off:0 ~len:(Carton.Dec.len v)) |> R.reword_error (fun _ -> R.msgf "Invalid tree (%a)" SHA1.pp uid) >>| fun v -> `Tree v | `C -> R.ok (`Blob (Bigstringaf.sub (Carton.Dec.raw v) ~off:0 ~len:(Carton.Dec.len v))) | `D -> R.ok `Tag type key = Mirage_kv.Key.t type error = [ `Invalid_store | `Msg of string | `Dictionary_expected of key | `Not_found of key | `Value_expected of key ] let pp_error ppf = function | `Invalid_store -> Fmt.pf ppf "Invalid store" | `Msg err -> Fmt.string ppf err | `Not_found key -> Fmt.pf ppf "%a not found" Mirage_kv.Key.pp key | `Dictionary_expected key -> Fmt.pf ppf "%a is not a directory" Mirage_kv.Key.pp key | `Value_expected key -> Fmt.pf ppf "%a is not a file" Mirage_kv.Key.pp key let rec split ~block_size index off acc = if off = Bigstringaf.length index then List.rev acc else let block = Bigstringaf.sub index ~off ~len:(Int64.to_int block_size) in split ~block_size index (off + Int64.to_int block_size) (block :: acc) let read_one_block handle offset ~off ~len buffer = match solo5_block_read handle offset buffer off len with | SOLO5_R_OK -> () | SOLO5_R_AGAIN -> assert false | SOLO5_R_EINVAL -> invalid_arg "Block: read(%Lx:%d): Invalid argument" offset len | SOLO5_R_EUNSPEC -> unspecified "Block: read(%Lx:%d): Unspecified error" offset len let read handle offset bs = let rec go offset = function | [] -> () | x :: r -> read_one_block handle offset ~off:0 ~len:(Bigstringaf.length x) x ; go (Int64.add offset (Int64.of_int (Bigstringaf.length x))) r in go offset bs let make name handle (info : solo5_block_info) commit cursor = let index = Bigstringaf.create (Int64.to_int (Int64.sub info.capacity cursor)) in let blocks = split ~block_size:info.block_size index 0 [] in read handle cursor blocks ; let index = Carton.Dec.Idx.make index ~uid_ln:SHA1.digest_size ~uid_rw:SHA1.to_raw_string ~uid_wr:SHA1.of_raw_string in let z = Bigstringaf.create De.io_buffer_size in let zw = De.make_window ~bits:15 in let allocate _ = zw in let find uid = match Carton.Dec.Idx.find index uid with | Some (_, offset) -> Int64.add (Int64.of_int (SHA1.digest_size + 8)) offset | None -> failwith "%a does not exist" SHA1.pp uid in let pack = Carton.Dec.make ~sector:info.block_size (handle, info) ~allocate ~z ~uid_ln:SHA1.digest_size ~uid_rw:SHA1.of_raw_string find in let buffers = Lwt_pool.create 4 @@ fun () -> let z = Bigstringaf.create De.io_buffer_size in let w = De.make_window ~bits:15 in let allocate _ = w in let w = Carton.Dec.W.make ~sector:info.block_size (handle, info) in Lwt.return { z; allocate; w } in match load pack commit with | Ok (`Commit commit) -> let root = Commit.tree commit in Lwt.return_ok { name; handle; capacity = info.capacity; block_size = info.block_size; root; buffers; pack; } | Ok _ -> Lwt.return_error (Rresult.R.msgf "Unexpected Git object %a" SHA1.pp commit) | Error _ as err -> Lwt.return err let connect name = match solo5_block_acquire name with | SOLO5_R_AGAIN, _, _ -> | SOLO5_R_EINVAL, _, _ -> invalid_arg "Block: connect(%s): Invalid argument" name | SOLO5_R_EUNSPEC, _, _ -> unspecified "Block: connect(%s): Unspecified error" name | SOLO5_R_OK, handle, info -> ( let commit = Bigstringaf.create (Int64.to_int info.block_size) in match solo5_block_read handle 0L commit 0 (Int64.to_int info.block_size) with | SOLO5_R_OK -> let index = Bigstringaf.get_int64_le commit SHA1.digest_size in let commit = Bigstringaf.substring commit ~off:0 ~len:SHA1.digest_size in let commit = SHA1.of_raw_string commit in make name handle info commit index | SOLO5_R_AGAIN -> assert false | SOLO5_R_EINVAL -> invalid_arg "Block: connect(%s): Invalid argument" name | SOLO5_R_EUNSPEC -> unspecified "Block: connect(%s): Unspecified error" name) let with_ressources pack uid buffers = Lwt.catch (fun () -> let pack = Carton.Dec.with_z buffers.z pack in let pack = Carton.Dec.with_allocate ~allocate:buffers.allocate pack in let pack = Carton.Dec.with_w buffers.w pack in load pack uid |> Lwt.return) @@ fun exn -> raise exn let tree_find tree name = let res = ref None in List.iter (fun ({ Git.Tree.name = name'; _ } as entry) -> if name = name' then res := Some entry) (Git.Tree.to_list tree) ; !res let load t key = let rec fold lst hash value = match (lst, value) with | [], value -> Lwt.return_ok (hash, value) | _ :: _, (`Commit _ | `Tag | `Blob _) -> Lwt.return_error (`Value_expected key) | x :: r, `Tree tree -> match tree_find tree x with | None -> Lwt.return_error (`Not_found key) | Some { Git.Tree.node; _ } -> ( Lwt_pool.use t.buffers (with_ressources t.pack node) >>= function | Ok value -> fold r node value | Error _ as err -> Lwt.return err) in let lst = Fpath.v (Mirage_kv.Key.to_string key) in let lst = Fpath.segs lst in Lwt_pool.use t.buffers (with_ressources t.pack t.root) >>= function | Ok value -> fold (List.tl lst) t.root value | Error _ as err -> Lwt.return err let exists t key = load t key >>= function | Ok (_, `Blob _) -> Lwt.return_ok (Some `Value) | Ok (_, `Tree _) -> Lwt.return_ok (Some `Dictionary) | _ -> Lwt.return_ok None let get t key = load t key >>= function | Ok (_, `Blob value) -> Lwt.return_ok (Bigstringaf.to_string value) | Ok _ -> Lwt.return_error (`Value_expected key) | Error _ as err -> Lwt.return err let list t key = load t key >>= function | Ok (_, `Tree tree) -> let f acc { Git.Tree.name; perm; _ } = match perm with | `Everybody | `Normal -> (name, `Value) :: acc | `Dir -> (name, `Dictionary) :: acc | _ -> acc in let lst = List.fold_left f [] (Git.Tree.to_list tree) in Lwt.return_ok lst | Ok _ -> Lwt.return_error (`Dictionary_expected key) | Error _ as err -> Lwt.return err let digest t key = load t key >>= function | Ok (hash, _) -> Lwt.return_ok (SHA1.to_raw_string hash) | Error _ as err -> Lwt.return err let last_modified _t _key = Lwt.return_ok (0, 0L)
0cdf3f7c9278e842559253afe320dd5db0d5eaf7d0751dddbac4aa6e81c8eb04
hoplon/demos
core.clj
Copyright ( c ) and . All rights reserved . ;; The use and distribution terms for this software are covered by the Eclipse Public License 1.0 ( -1.0.php ) ;; which can be found in the file epl-v10.html at the root of this distribution. ;; By using this software in any fashion, you are agreeing to be bound by ;; the terms of this license. ;; You must not remove this notice, or any other, from this software. (ns demo.core (:require [castra.middleware :as castra] [compojure.core :as c] [compojure.route :as route] [ring.adapter.jetty :refer [run-jetty]] [ring.middleware.defaults :as d] [ring.util.response :as response])) (def server (atom nil)) (c/defroutes app-routes (c/GET "/" req (response/content-type (response/resource-response "index.html") "text/html") ) (route/resources "/" {:root ""})) (def handler (-> app-routes (castra/wrap-castra 'demo.api.chat) (castra/wrap-castra-session "a 16-byte secret") (d/wrap-defaults d/api-defaults))) (defn app [port] (-> handler (run-jetty {:join? false :port port}))) (defn start-server "Start castra demo server (port 33333)." [port] (swap! server #(or % (app port)))) (defn run-task [port] (start-server port) (fn [continue] (fn [event] (continue event)))) (defn -main "I don't do a whole lot." [& args])
null
https://raw.githubusercontent.com/hoplon/demos/50d613892db0624a4f0326c1427d82f5b8e2390f/castra-chat/src/castra/demo/core.clj
clojure
The use and distribution terms for this software are covered by the which can be found in the file epl-v10.html at the root of this distribution. By using this software in any fashion, you are agreeing to be bound by the terms of this license. You must not remove this notice, or any other, from this software.
Copyright ( c ) and . All rights reserved . Eclipse Public License 1.0 ( -1.0.php ) (ns demo.core (:require [castra.middleware :as castra] [compojure.core :as c] [compojure.route :as route] [ring.adapter.jetty :refer [run-jetty]] [ring.middleware.defaults :as d] [ring.util.response :as response])) (def server (atom nil)) (c/defroutes app-routes (c/GET "/" req (response/content-type (response/resource-response "index.html") "text/html") ) (route/resources "/" {:root ""})) (def handler (-> app-routes (castra/wrap-castra 'demo.api.chat) (castra/wrap-castra-session "a 16-byte secret") (d/wrap-defaults d/api-defaults))) (defn app [port] (-> handler (run-jetty {:join? false :port port}))) (defn start-server "Start castra demo server (port 33333)." [port] (swap! server #(or % (app port)))) (defn run-task [port] (start-server port) (fn [continue] (fn [event] (continue event)))) (defn -main "I don't do a whole lot." [& args])
a854b998da94f5a5aa28aa6fab9a703a836a49424741bf9fa5cf7757af9f0531
evincarofautumn/kitten
CollectInstantiations.hs
| Module : Kitten . CollectInstantiations Description : Generic instantiation collection Copyright : ( c ) , 2016 License : MIT Maintainer : Stability : experimental Portability : GHC Module : Kitten.CollectInstantiations Description : Generic instantiation collection Copyright : (c) Jon Purdy, 2016 License : MIT Maintainer : Stability : experimental Portability : GHC -} {-# LANGUAGE OverloadedStrings #-} module Kitten.CollectInstantiations ( collectInstantiations ) where import Data.Foldable (foldrM) import Data.List (foldl') import Kitten.Dictionary (Dictionary) import Kitten.Infer (typeSize) import Kitten.Informer (Informer(..)) import Kitten.Instantiated (Instantiated(Instantiated)) import Kitten.Monad (K) import Kitten.Name (GeneralName(..), Qualified(..), Unqualified(..)) import Kitten.Queue (Queue) import Kitten.Term (Case(..), Else(..), Term(..), Value(..)) import Kitten.Type (Constructor(..), Type(..)) import Text.PrettyPrint.HughesPJClass (Pretty(..)) import qualified Kitten.Dictionary as Dictionary import qualified Kitten.Entry as Entry import qualified Kitten.Entry.Parent as Parent import qualified Kitten.Infer as Infer import qualified Kitten.Instantiate as Instantiate import qualified Kitten.Kind as Kind import qualified Kitten.Mangle as Mangle import qualified Kitten.Pretty as Pretty import qualified Kitten.Queue as Queue import qualified Kitten.TypeEnv as TypeEnv import qualified Text.PrettyPrint as Pretty -- | In order to support unboxed generics, for every call site of a generic -- definition in a program, we produce a specialized instantiation of the -- definition with the value-kinded type parameters set to the given type -- arguments. This is transitive: if a generic definition calls another generic definition with one of its own generic type parameters as a type argument , -- then an instantiation must also be generated of the called definition. -- -- For generic types, we also generate specializations; this is mainly to have -- the size and alignment requirements on hand during code generation. A generic -- type is instantiated if it's mentioned in the signature of any instantiated -- word definition. collectInstantiations :: Dictionary -> K Dictionary collectInstantiations dictionary0 = do -- We enqueue all the instantiation sites reachable from the top level of the -- program, and any non-generic definitions. (entries, q0) <- foldrM (\ original@(name, entry) (acc, q) -> case entry of Entry.Word category merge origin parent signature (Just term) -> do (term', q') <- go q term let entry' = Entry.Word category merge origin parent signature $ Just term' return ((name, entry') : acc, q') _ -> return (original : acc, q)) ([], Queue.empty) $ Dictionary.toList dictionary0 Next , we process the queue . Doing so may enqueue new instantiation sites for -- processing; however, this is guaranteed to halt because the number of actual -- instantiations is finite. processQueue q0 $ Dictionary.fromList entries where -- We process a definition in a single pass, mangling all its call sites and -- enqueueing them for instantiation. We perform the actual instantiation while -- processing the queue, so as to avoid redundant instantiations. go :: InstantiationQueue -> Term Type -> K (Term Type, InstantiationQueue) go q0 term = case term of Coercion{} -> proceed Compose type_ a b -> do (a', q1) <- go q0 a (b', q2) <- go q1 b return (Compose type_ a' b', q2) -- If the definition is generic, we simply ignore it; we won't find any -- instantiations in it, because it's not instantiated itself! Generic{} -> proceed Group{} -> error "group should not appear after linearization" Lambda type_ name varType body origin -> do (body', q1) <- go q0 body return (Lambda type_ name varType body' origin, q1) Match hint type_ cases else_ origin -> do (cases', q1) <- foldrM (\ (Case name body caseOrigin) (bodies, q) -> do (body', q') <- go q body return (Case name body' caseOrigin : bodies, q')) ([], q0) cases (else', q2) <- case else_ of Else body elseOrigin -> do (body', q') <- go q1 body return (Else body' elseOrigin, q') return (Match hint type_ cases' else' origin, q2) New{} -> proceed NewClosure{} -> proceed NewVector _ _size elementType _origin -> do q1 <- instantiateTypes dictionary0 elementType q0 return (term, q1) Push _ Quotation{} _ -> error "quotation should not appear after quotation desugaring" Push{} -> proceed Word type_ fixity (QualifiedName name) args origin -> do let types = case Dictionary.lookup (Instantiated name []) dictionary0 of -- If this is a constructor call, generate an instantiation of the -- type it's constructing. Just (Entry.Word _ _ _ (Just (Parent.Type typeName)) _ _) -> Just (typeName, args) _ -> Nothing q1 = foldr Queue.enqueue q0 types return ( Word type_ fixity (UnqualifiedName $ Unqualified $ Mangle.name $ Instantiated name args) [] origin , Queue.enqueue (name, args) q1 ) -- FIXME: Should calls to non-qualified names even be around at this point? Word{} -> proceed where proceed = return (term, q0) -- Processing the queue operates by dequeueing and instantiating each definition -- in turn. If the definition has already been instantiated, we simply proceed. processQueue :: InstantiationQueue -> Dictionary -> K Dictionary processQueue q dictionary = case Queue.dequeue q of Nothing -> return dictionary Just ((name, args), q') -> case Dictionary.lookup (Instantiated name args) dictionary of Just{} -> processQueue q' dictionary Nothing -> case Dictionary.lookup (Instantiated name []) dictionary of -- The name is not user-defined, so it doesn't need to be mangled. Nothing -> processQueue q' dictionary Just (Entry.Word category merge origin parent signature mTerm) -> case mTerm of Just term -> do term' <- while origin (Pretty.hsep ["instantiating", Pretty.quote name]) $ Instantiate.term TypeEnv.empty term args (term'', q'') <- go q' term' let entry' = Entry.Word category merge origin parent signature $ Just term'' processQueue q'' $ Dictionary.insert (Instantiated name args) entry' dictionary -- There should be no need to instantiate declarations, as they -- should only refer to intrinsics. Nothing -> processQueue q' dictionary Just (Entry.Type origin params ctors) -> do q'' <- foldrM (instantiateTypes dictionary0) q' args type_ <- Infer.dataType origin params ctors dictionary0 TypeValue _ size <- typeSize dictionary0 $ foldl' (:@) type_ args let entry' = Entry.InstantiatedType origin size -- Maybe generate instantiations for constructors? processQueue q'' $ Dictionary.insert (Instantiated name args) entry' dictionary Just entry -> error $ Pretty.render $ Pretty.hcat ["attempt to instantiate non-word ", Pretty.quote name, ":", pPrint entry] type InstantiationQueue = Queue Instantiation type Instantiation = (Qualified, [Type]) instantiateTypes :: Dictionary -> Type -> InstantiationQueue -> K InstantiationQueue instantiateTypes dictionary type0 q0 = go type0 where go type_ = do kind <- Infer.typeKind dictionary type_ case kind of Kind.Value -> case type_ of (:@){} -> do instantiation <- collect [] type_ return $ Queue.enqueue instantiation q0 -- TODO: Forall. _ -> return q0 _ -> return q0 collect args t = case t of a :@ b -> do -- go b collect (b : args) a TypeConstructor _ (Constructor name) -> do -- go a return (name, args) _ -> error "non-constructor in type application (requires HKTs)"
null
https://raw.githubusercontent.com/evincarofautumn/kitten/a5301fe24dbb9ea91974abee73ad544156ee4722/lib/Kitten/CollectInstantiations.hs
haskell
# LANGUAGE OverloadedStrings # | In order to support unboxed generics, for every call site of a generic definition in a program, we produce a specialized instantiation of the definition with the value-kinded type parameters set to the given type arguments. This is transitive: if a generic definition calls another generic then an instantiation must also be generated of the called definition. For generic types, we also generate specializations; this is mainly to have the size and alignment requirements on hand during code generation. A generic type is instantiated if it's mentioned in the signature of any instantiated word definition. We enqueue all the instantiation sites reachable from the top level of the program, and any non-generic definitions. processing; however, this is guaranteed to halt because the number of actual instantiations is finite. We process a definition in a single pass, mangling all its call sites and enqueueing them for instantiation. We perform the actual instantiation while processing the queue, so as to avoid redundant instantiations. If the definition is generic, we simply ignore it; we won't find any instantiations in it, because it's not instantiated itself! If this is a constructor call, generate an instantiation of the type it's constructing. FIXME: Should calls to non-qualified names even be around at this point? Processing the queue operates by dequeueing and instantiating each definition in turn. If the definition has already been instantiated, we simply proceed. The name is not user-defined, so it doesn't need to be mangled. There should be no need to instantiate declarations, as they should only refer to intrinsics. Maybe generate instantiations for constructors? TODO: Forall. go b go a
| Module : Kitten . CollectInstantiations Description : Generic instantiation collection Copyright : ( c ) , 2016 License : MIT Maintainer : Stability : experimental Portability : GHC Module : Kitten.CollectInstantiations Description : Generic instantiation collection Copyright : (c) Jon Purdy, 2016 License : MIT Maintainer : Stability : experimental Portability : GHC -} module Kitten.CollectInstantiations ( collectInstantiations ) where import Data.Foldable (foldrM) import Data.List (foldl') import Kitten.Dictionary (Dictionary) import Kitten.Infer (typeSize) import Kitten.Informer (Informer(..)) import Kitten.Instantiated (Instantiated(Instantiated)) import Kitten.Monad (K) import Kitten.Name (GeneralName(..), Qualified(..), Unqualified(..)) import Kitten.Queue (Queue) import Kitten.Term (Case(..), Else(..), Term(..), Value(..)) import Kitten.Type (Constructor(..), Type(..)) import Text.PrettyPrint.HughesPJClass (Pretty(..)) import qualified Kitten.Dictionary as Dictionary import qualified Kitten.Entry as Entry import qualified Kitten.Entry.Parent as Parent import qualified Kitten.Infer as Infer import qualified Kitten.Instantiate as Instantiate import qualified Kitten.Kind as Kind import qualified Kitten.Mangle as Mangle import qualified Kitten.Pretty as Pretty import qualified Kitten.Queue as Queue import qualified Kitten.TypeEnv as TypeEnv import qualified Text.PrettyPrint as Pretty definition with one of its own generic type parameters as a type argument , collectInstantiations :: Dictionary -> K Dictionary collectInstantiations dictionary0 = do (entries, q0) <- foldrM (\ original@(name, entry) (acc, q) -> case entry of Entry.Word category merge origin parent signature (Just term) -> do (term', q') <- go q term let entry' = Entry.Word category merge origin parent signature $ Just term' return ((name, entry') : acc, q') _ -> return (original : acc, q)) ([], Queue.empty) $ Dictionary.toList dictionary0 Next , we process the queue . Doing so may enqueue new instantiation sites for processQueue q0 $ Dictionary.fromList entries where go :: InstantiationQueue -> Term Type -> K (Term Type, InstantiationQueue) go q0 term = case term of Coercion{} -> proceed Compose type_ a b -> do (a', q1) <- go q0 a (b', q2) <- go q1 b return (Compose type_ a' b', q2) Generic{} -> proceed Group{} -> error "group should not appear after linearization" Lambda type_ name varType body origin -> do (body', q1) <- go q0 body return (Lambda type_ name varType body' origin, q1) Match hint type_ cases else_ origin -> do (cases', q1) <- foldrM (\ (Case name body caseOrigin) (bodies, q) -> do (body', q') <- go q body return (Case name body' caseOrigin : bodies, q')) ([], q0) cases (else', q2) <- case else_ of Else body elseOrigin -> do (body', q') <- go q1 body return (Else body' elseOrigin, q') return (Match hint type_ cases' else' origin, q2) New{} -> proceed NewClosure{} -> proceed NewVector _ _size elementType _origin -> do q1 <- instantiateTypes dictionary0 elementType q0 return (term, q1) Push _ Quotation{} _ -> error "quotation should not appear after quotation desugaring" Push{} -> proceed Word type_ fixity (QualifiedName name) args origin -> do let types = case Dictionary.lookup (Instantiated name []) dictionary0 of Just (Entry.Word _ _ _ (Just (Parent.Type typeName)) _ _) -> Just (typeName, args) _ -> Nothing q1 = foldr Queue.enqueue q0 types return ( Word type_ fixity (UnqualifiedName $ Unqualified $ Mangle.name $ Instantiated name args) [] origin , Queue.enqueue (name, args) q1 ) Word{} -> proceed where proceed = return (term, q0) processQueue :: InstantiationQueue -> Dictionary -> K Dictionary processQueue q dictionary = case Queue.dequeue q of Nothing -> return dictionary Just ((name, args), q') -> case Dictionary.lookup (Instantiated name args) dictionary of Just{} -> processQueue q' dictionary Nothing -> case Dictionary.lookup (Instantiated name []) dictionary of Nothing -> processQueue q' dictionary Just (Entry.Word category merge origin parent signature mTerm) -> case mTerm of Just term -> do term' <- while origin (Pretty.hsep ["instantiating", Pretty.quote name]) $ Instantiate.term TypeEnv.empty term args (term'', q'') <- go q' term' let entry' = Entry.Word category merge origin parent signature $ Just term'' processQueue q'' $ Dictionary.insert (Instantiated name args) entry' dictionary Nothing -> processQueue q' dictionary Just (Entry.Type origin params ctors) -> do q'' <- foldrM (instantiateTypes dictionary0) q' args type_ <- Infer.dataType origin params ctors dictionary0 TypeValue _ size <- typeSize dictionary0 $ foldl' (:@) type_ args let entry' = Entry.InstantiatedType origin size processQueue q'' $ Dictionary.insert (Instantiated name args) entry' dictionary Just entry -> error $ Pretty.render $ Pretty.hcat ["attempt to instantiate non-word ", Pretty.quote name, ":", pPrint entry] type InstantiationQueue = Queue Instantiation type Instantiation = (Qualified, [Type]) instantiateTypes :: Dictionary -> Type -> InstantiationQueue -> K InstantiationQueue instantiateTypes dictionary type0 q0 = go type0 where go type_ = do kind <- Infer.typeKind dictionary type_ case kind of Kind.Value -> case type_ of (:@){} -> do instantiation <- collect [] type_ return $ Queue.enqueue instantiation q0 _ -> return q0 _ -> return q0 collect args t = case t of a :@ b -> do collect (b : args) a TypeConstructor _ (Constructor name) -> do return (name, args) _ -> error "non-constructor in type application (requires HKTs)"
78bcb0e419724d52ee519a70f07671a2795d2987b51b272fc57e20b45081b4e0
aryx/xix
main.ml
(*s: yacc/main.ml *) s : copyright ocamlyacc * * Copyright ( C ) 2015 * * This library is free software ; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation , with the * special exception on linking described in file license.txt . * * This library is distributed in the hope that it will be useful , but * WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the file * license.txt for more details . * * Copyright (C) 2015 Yoann Padioleau * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation, with the * special exception on linking described in file license.txt. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file * license.txt for more details. *) e : copyright ocamlyacc open Ast (*****************************************************************************) (* Prelude *) (*****************************************************************************) An OCaml port of yacc . * * The original yacc is written in old in the OCaml * distribution is actually also written in C. * * todo : * - handle priorities , precedences * - EBNF support ! * * The original yacc is written in old C. ocamlyacc in the OCaml * distribution is actually also written in C. * * todo: * - handle priorities, precedences * - EBNF support! *) (*****************************************************************************) (* Main entry point *) (*****************************************************************************) s : function [ [ ) let main () = if Array.length Sys.argv != 2 then begin prerr_endline "Usage: ocamlyacc <input file>"; exit 2 end; let source_name = Sys.argv.(1) in let dest_name = if Filename.check_suffix source_name ".mly" then Filename.chop_suffix source_name ".mly" ^ ".ml" else source_name ^ ".ml" in let ic = open_in source_name in let lexbuf = Lexing.from_channel ic in (* parsing *) let def = try Parser.parser_definition Lexer.main lexbuf with exn -> Sys.remove dest_name; (match exn with Parsing.Parse_error -> prerr_string "Syntax error around char "; prerr_int (Lexing.lexeme_start lexbuf); prerr_endline "." | Lexer.Lexical_error s -> prerr_string "Lexical error around char "; prerr_int (Lexing.lexeme_start lexbuf); prerr_string ": "; prerr_string s; prerr_endline "." | _ -> raise exn ); exit 2 in let env = Lr0.mk_env_augmented_grammar (Ast.start_symbol def) def.grm in let automaton = Lr0.canonical_lr0_automaton env in Dump.dump_lr0_automaton env automaton; let (first, eps) = First_follow.compute_first def.grm in let follow = First_follow.compute_follow env (first, eps) in let tables = Slr.lr_tables env automaton follow in Dump.dump_lrtables env tables; let oc = open_out dest_name in Output.output_parser def env tables ic oc; close_out oc; () e : function [ [ ) (*s: toplevel [[Main._1]](yacc) *) let _ = Tests.test_lr0 ( ) ; Tests.test_first_follow ( ) ; Tests.test_slr ( ) ; Tests.test_lr_engine ( ) ; Tests.test_lr0 (); Tests.test_first_follow (); Tests.test_slr (); Tests.test_lr_engine (); *) Printexc.catch main (); exit 0 (*e: toplevel [[Main._1]](yacc) *) e : yacc / main.ml
null
https://raw.githubusercontent.com/aryx/xix/60ce1bd9a3f923e0e8bb2192f8938a9aa49c739c/yacc/main.ml
ocaml
s: yacc/main.ml *************************************************************************** Prelude *************************************************************************** *************************************************************************** Main entry point *************************************************************************** parsing s: toplevel [[Main._1]](yacc) e: toplevel [[Main._1]](yacc)
s : copyright ocamlyacc * * Copyright ( C ) 2015 * * This library is free software ; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation , with the * special exception on linking described in file license.txt . * * This library is distributed in the hope that it will be useful , but * WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the file * license.txt for more details . * * Copyright (C) 2015 Yoann Padioleau * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation, with the * special exception on linking described in file license.txt. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file * license.txt for more details. *) e : copyright ocamlyacc open Ast An OCaml port of yacc . * * The original yacc is written in old in the OCaml * distribution is actually also written in C. * * todo : * - handle priorities , precedences * - EBNF support ! * * The original yacc is written in old C. ocamlyacc in the OCaml * distribution is actually also written in C. * * todo: * - handle priorities, precedences * - EBNF support! *) s : function [ [ ) let main () = if Array.length Sys.argv != 2 then begin prerr_endline "Usage: ocamlyacc <input file>"; exit 2 end; let source_name = Sys.argv.(1) in let dest_name = if Filename.check_suffix source_name ".mly" then Filename.chop_suffix source_name ".mly" ^ ".ml" else source_name ^ ".ml" in let ic = open_in source_name in let lexbuf = Lexing.from_channel ic in let def = try Parser.parser_definition Lexer.main lexbuf with exn -> Sys.remove dest_name; (match exn with Parsing.Parse_error -> prerr_string "Syntax error around char "; prerr_int (Lexing.lexeme_start lexbuf); prerr_endline "." | Lexer.Lexical_error s -> prerr_string "Lexical error around char "; prerr_int (Lexing.lexeme_start lexbuf); prerr_string ": "; prerr_string s; prerr_endline "." | _ -> raise exn ); exit 2 in let env = Lr0.mk_env_augmented_grammar (Ast.start_symbol def) def.grm in let automaton = Lr0.canonical_lr0_automaton env in Dump.dump_lr0_automaton env automaton; let (first, eps) = First_follow.compute_first def.grm in let follow = First_follow.compute_follow env (first, eps) in let tables = Slr.lr_tables env automaton follow in Dump.dump_lrtables env tables; let oc = open_out dest_name in Output.output_parser def env tables ic oc; close_out oc; () e : function [ [ ) let _ = Tests.test_lr0 ( ) ; Tests.test_first_follow ( ) ; Tests.test_slr ( ) ; Tests.test_lr_engine ( ) ; Tests.test_lr0 (); Tests.test_first_follow (); Tests.test_slr (); Tests.test_lr_engine (); *) Printexc.catch main (); exit 0 e : yacc / main.ml
6d9b9468008a0b71826c7411ec06c446c3c2867bfb9e258011e1832a820ef245
vikram/lisplibraries
text.lisp
Copyright ( c ) 2007 , All Rights Reserved ;;; ;;; Redistribution and use in source and binary forms, with or without ;;; modification, are permitted provided that the following conditions ;;; are met: ;;; ;;; * Redistributions of source code must retain the above copyright ;;; notice, this list of conditions and the following disclaimer. ;;; ;;; * Redistributions in binary form must reproduce the above ;;; copyright notice, this list of conditions and the following ;;; disclaimer in the documentation and/or other materials ;;; provided with the distribution. ;;; ;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED ;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL ;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE ;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , ;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING ;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;; $ I d : , v 1.8 2007/09/21 17:39:36 xach Exp $ (in-package #:vecto) (defclass font () ((loader :initarg :loader :accessor loader) (transform-matrix :initarg :transform-matrix :accessor transform-matrix) (size :initarg :size :accessor size))) (defun glyph-path-point (point) (paths:make-point (zpb-ttf:x point) (zpb-ttf:y point))) (defun glyph-paths (glyph) (let* ((paths '()) (path nil)) (zpb-ttf:do-contours (contour glyph (nreverse paths)) (when (plusp (length contour)) (let ((first-point (aref contour 0))) (setf path (paths:create-path :polygon)) (push path paths) (paths:path-reset path (glyph-path-point first-point)) (zpb-ttf:do-contour-segments* (control end) contour (if control (paths:path-extend path (paths:make-bezier-curve (list (glyph-path-point control))) (glyph-path-point end)) (paths:path-extend path (paths:make-straight-line) (glyph-path-point end))))))))) (defun string-glyphs (string loader) "Return STRING converted to a list of ZPB-TTF glyph objects from FONT." (map 'list (lambda (char) (zpb-ttf:find-glyph char loader)) string)) (defun string-paths (x y string font) "Return the paths of STRING, transformed by the font scale of FONT." (let ((glyphs (string-glyphs string (loader font))) (loader (loader font)) (matrix (mult (transform-matrix font) (translation-matrix x y))) (paths '())) (loop for (glyph . rest) on glyphs do (let ((glyph-paths (glyph-paths glyph)) (fun (make-transform-function matrix))) (dolist (path glyph-paths) (push (transform-path path fun) paths)) (when rest (let* ((next (first rest)) (offset (+ (zpb-ttf:advance-width glyph) (zpb-ttf:kerning-offset glyph next loader)))) (setf matrix (nmult (translation-matrix offset 0) matrix)))))) paths)) (defun nmerge-bounding-boxes (b1 b2) "Create a minimal bounding box that covers both B1 and B2 and destructively update B1 with its values. Returns the new box." (setf (xmin b1) (min (xmin b1) (xmin b2)) (ymin b1) (min (ymin b1) (ymin b2)) (xmax b1) (max (xmax b1) (xmax b2)) (ymax b1) (max (ymax b1) (ymax b2))) b1) (defun advance-bounding-box (bbox offset) "Return a bounding box advanced OFFSET units horizontally." (vector (+ (xmin bbox) offset) (ymin bbox) (+ (xmax bbox) offset) (ymax bbox))) (defun empty-bounding-box () (vector most-positive-fixnum most-positive-fixnum most-negative-fixnum most-negative-fixnum)) (defun ntransform-bounding-box (bbox fun) destructively modifies BBOX with the new values." (setf (values (xmin bbox) (ymin bbox)) (funcall fun (xmin bbox) (ymin bbox)) (values (xmax bbox) (ymax bbox)) (funcall fun (xmax bbox) (ymax bbox))) bbox) (defun loader-font-scale (size loader) "Return the horizontal and vertical scaling needed to draw the glyphs of LOADER at SIZE units." (float (/ size (zpb-ttf:units/em loader)))) (defun string-bounding-box (string size loader) (let* ((bbox (empty-bounding-box)) (scale (loader-font-scale size loader)) (fun (make-transform-function (scaling-matrix scale scale))) (glyphs (string-glyphs string loader)) (offset 0)) (loop for (glyph . rest) on glyphs do (let ((glyph-box (advance-bounding-box (bounding-box glyph) offset))) (setf bbox (nmerge-bounding-boxes bbox glyph-box)) (incf offset (zpb-ttf:advance-width glyph)) (when rest (let* ((next-glyph (first rest)) (kerning (zpb-ttf:kerning-offset glyph next-glyph loader))) (incf offset kerning))))) (ntransform-bounding-box bbox fun)))
null
https://raw.githubusercontent.com/vikram/lisplibraries/105e3ef2d165275eb78f36f5090c9e2cdd0754dd/site/vecto-1.1/text.lisp
lisp
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Copyright ( c ) 2007 , All Rights Reserved DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , $ I d : , v 1.8 2007/09/21 17:39:36 xach Exp $ (in-package #:vecto) (defclass font () ((loader :initarg :loader :accessor loader) (transform-matrix :initarg :transform-matrix :accessor transform-matrix) (size :initarg :size :accessor size))) (defun glyph-path-point (point) (paths:make-point (zpb-ttf:x point) (zpb-ttf:y point))) (defun glyph-paths (glyph) (let* ((paths '()) (path nil)) (zpb-ttf:do-contours (contour glyph (nreverse paths)) (when (plusp (length contour)) (let ((first-point (aref contour 0))) (setf path (paths:create-path :polygon)) (push path paths) (paths:path-reset path (glyph-path-point first-point)) (zpb-ttf:do-contour-segments* (control end) contour (if control (paths:path-extend path (paths:make-bezier-curve (list (glyph-path-point control))) (glyph-path-point end)) (paths:path-extend path (paths:make-straight-line) (glyph-path-point end))))))))) (defun string-glyphs (string loader) "Return STRING converted to a list of ZPB-TTF glyph objects from FONT." (map 'list (lambda (char) (zpb-ttf:find-glyph char loader)) string)) (defun string-paths (x y string font) "Return the paths of STRING, transformed by the font scale of FONT." (let ((glyphs (string-glyphs string (loader font))) (loader (loader font)) (matrix (mult (transform-matrix font) (translation-matrix x y))) (paths '())) (loop for (glyph . rest) on glyphs do (let ((glyph-paths (glyph-paths glyph)) (fun (make-transform-function matrix))) (dolist (path glyph-paths) (push (transform-path path fun) paths)) (when rest (let* ((next (first rest)) (offset (+ (zpb-ttf:advance-width glyph) (zpb-ttf:kerning-offset glyph next loader)))) (setf matrix (nmult (translation-matrix offset 0) matrix)))))) paths)) (defun nmerge-bounding-boxes (b1 b2) "Create a minimal bounding box that covers both B1 and B2 and destructively update B1 with its values. Returns the new box." (setf (xmin b1) (min (xmin b1) (xmin b2)) (ymin b1) (min (ymin b1) (ymin b2)) (xmax b1) (max (xmax b1) (xmax b2)) (ymax b1) (max (ymax b1) (ymax b2))) b1) (defun advance-bounding-box (bbox offset) "Return a bounding box advanced OFFSET units horizontally." (vector (+ (xmin bbox) offset) (ymin bbox) (+ (xmax bbox) offset) (ymax bbox))) (defun empty-bounding-box () (vector most-positive-fixnum most-positive-fixnum most-negative-fixnum most-negative-fixnum)) (defun ntransform-bounding-box (bbox fun) destructively modifies BBOX with the new values." (setf (values (xmin bbox) (ymin bbox)) (funcall fun (xmin bbox) (ymin bbox)) (values (xmax bbox) (ymax bbox)) (funcall fun (xmax bbox) (ymax bbox))) bbox) (defun loader-font-scale (size loader) "Return the horizontal and vertical scaling needed to draw the glyphs of LOADER at SIZE units." (float (/ size (zpb-ttf:units/em loader)))) (defun string-bounding-box (string size loader) (let* ((bbox (empty-bounding-box)) (scale (loader-font-scale size loader)) (fun (make-transform-function (scaling-matrix scale scale))) (glyphs (string-glyphs string loader)) (offset 0)) (loop for (glyph . rest) on glyphs do (let ((glyph-box (advance-bounding-box (bounding-box glyph) offset))) (setf bbox (nmerge-bounding-boxes bbox glyph-box)) (incf offset (zpb-ttf:advance-width glyph)) (when rest (let* ((next-glyph (first rest)) (kerning (zpb-ttf:kerning-offset glyph next-glyph loader))) (incf offset kerning))))) (ntransform-bounding-box bbox fun)))
09ffdc893f47613e59331e185bac01c6eac2a5a733d332dce6b8d047f0e43985
input-output-hk/cardano-sl
Logic.hs
| Functions which operate on ] . Business logic of -- Update System. module Pos.DB.Update.Poll.Logic ( verifyAndApplyUSPayload , rollbackUS , normalizePoll , refreshPoll , filterProposalsByThd -- * Base , canCreateBlockBV * resolution , processGenesisBlock , recordBlockIssuance ) where import Pos.DB.Update.Poll.Logic.Apply import Pos.DB.Update.Poll.Logic.Base import Pos.DB.Update.Poll.Logic.Normalize import Pos.DB.Update.Poll.Logic.Rollback import Pos.DB.Update.Poll.Logic.Softfork
null
https://raw.githubusercontent.com/input-output-hk/cardano-sl/1499214d93767b703b9599369a431e67d83f10a2/db/src/Pos/DB/Update/Poll/Logic.hs
haskell
Update System. * Base
| Functions which operate on ] . Business logic of module Pos.DB.Update.Poll.Logic ( verifyAndApplyUSPayload , rollbackUS , normalizePoll , refreshPoll , filterProposalsByThd , canCreateBlockBV * resolution , processGenesisBlock , recordBlockIssuance ) where import Pos.DB.Update.Poll.Logic.Apply import Pos.DB.Update.Poll.Logic.Base import Pos.DB.Update.Poll.Logic.Normalize import Pos.DB.Update.Poll.Logic.Rollback import Pos.DB.Update.Poll.Logic.Softfork
c86a890ac1bd8a677d09c9e4f9ba02db06e8fff48b42ffc76dd2f84d50f6da7c
Mzk-Levi/texts
Show.hs
# LANGUAGE TemplateHaskell , FlexibleInstances , IncoherentInstances , ScopedTypeVariables , UndecidableInstances , KindSignatures # ScopedTypeVariables, UndecidableInstances, KindSignatures #-} -------------------------------------------------------------------------------- -- | Module : Data . Comp . . Multi . Derive . Show Copyright : ( c ) 2011 , -- License : BSD3 Maintainer : < > -- Stability : experimental Portability : non - portable ( GHC Extensions ) -- Automatically derive instances of @ShowHD@. -- -------------------------------------------------------------------------------- module Data.Comp.Param.Multi.Derive.Show ( ShowHD(..), makeShowHD ) where import Data.Comp.Derive.Utils import Data.Comp.Param.Multi.FreshM hiding (Name) import qualified Data.Comp.Param.Multi.FreshM as FreshM import Data.Comp.Param.Multi.HDifunctor import Control.Monad import Language.Haskell.TH hiding (Cxt, match) import qualified Data.Traversable as T {-| Signature printing. An instance @ShowHD f@ gives rise to an instance @Show (Term f i)@. -} class ShowHD f where showHD :: f FreshM.Name (K (FreshM String)) i -> FreshM String newtype Dummy = Dummy String instance Show Dummy where show (Dummy s) = s | Derive an instance of ' ShowHD ' for a type constructor of any parametric kind taking at least three arguments . kind taking at least three arguments. -} makeShowHD :: Name -> Q [Dec] makeShowHD fname = do TyConI (DataD _ name args constrs _) <- abstractNewtypeQ $ reify fname let args' = init args -- covariant argument let coArg :: Name = tyVarBndrName $ last args' -- contravariant argument let conArg :: Name = tyVarBndrName $ last $ init args' let argNames = map (VarT . tyVarBndrName) (init $ init args') let complType = foldl AppT (ConT name) argNames let classType = AppT (ConT ''ShowHD) complType constrs' :: [(Name,[Type])] <- mapM normalConExp constrs showHDDecl <- funD 'showHD (map (showHDClause conArg coArg) constrs') let context = map (\arg -> ClassP ''Show [arg]) argNames return [InstanceD context classType [showHDDecl]] where showHDClause :: Name -> Name -> (Name,[Type]) -> ClauseQ showHDClause conArg coArg (constr, args) = do varXs <- newNames (length args) "x" -- Pattern for the constructor let patx = ConP constr $ map VarP varXs body <- showHDBody (nameBase constr) conArg coArg (zip varXs args) return $ Clause [patx] (NormalB body) [] showHDBody :: String -> Name -> Name -> [(Name, Type)] -> ExpQ showHDBody constr conArg coArg x = [|liftM (unwords . (constr :) . map (\x -> if elem ' ' x then "(" ++ x ++ ")" else x)) (sequence $(listE $ map (showHDB conArg coArg) x))|] showHDB :: Name -> Name -> (Name, Type) -> ExpQ showHDB conArg coArg (x, tp) | not (containsType tp (VarT conArg)) && not (containsType tp (VarT coArg)) = [| return $ show $(varE x) |] | otherwise = case tp of AppT (VarT a) _ | a == coArg -> [| unK $(varE x) |] AppT (AppT ArrowT (AppT (VarT a) _)) _ | a == conArg -> [| withName (\v -> do body <- (unK . $(varE x)) v return $ "\\" ++ show v ++ " -> " ++ body) |] SigT tp' _ -> showHDB conArg coArg (x, tp') _ -> if containsType tp (VarT conArg) then [| showHD $(varE x) |] else [| liftM show $ T.mapM (liftM Dummy . unK) $(varE x) |]
null
https://raw.githubusercontent.com/Mzk-Levi/texts/34916d6531af9bc39e50b596247ac2017d8cfdc3/compdata-param-master/src/Data/Comp/Param/Multi/Derive/Show.hs
haskell
------------------------------------------------------------------------------ | License : BSD3 Stability : experimental ------------------------------------------------------------------------------ | Signature printing. An instance @ShowHD f@ gives rise to an instance @Show (Term f i)@. covariant argument contravariant argument Pattern for the constructor
# LANGUAGE TemplateHaskell , FlexibleInstances , IncoherentInstances , ScopedTypeVariables , UndecidableInstances , KindSignatures # ScopedTypeVariables, UndecidableInstances, KindSignatures #-} Module : Data . Comp . . Multi . Derive . Show Copyright : ( c ) 2011 , Maintainer : < > Portability : non - portable ( GHC Extensions ) Automatically derive instances of @ShowHD@. module Data.Comp.Param.Multi.Derive.Show ( ShowHD(..), makeShowHD ) where import Data.Comp.Derive.Utils import Data.Comp.Param.Multi.FreshM hiding (Name) import qualified Data.Comp.Param.Multi.FreshM as FreshM import Data.Comp.Param.Multi.HDifunctor import Control.Monad import Language.Haskell.TH hiding (Cxt, match) import qualified Data.Traversable as T class ShowHD f where showHD :: f FreshM.Name (K (FreshM String)) i -> FreshM String newtype Dummy = Dummy String instance Show Dummy where show (Dummy s) = s | Derive an instance of ' ShowHD ' for a type constructor of any parametric kind taking at least three arguments . kind taking at least three arguments. -} makeShowHD :: Name -> Q [Dec] makeShowHD fname = do TyConI (DataD _ name args constrs _) <- abstractNewtypeQ $ reify fname let args' = init args let coArg :: Name = tyVarBndrName $ last args' let conArg :: Name = tyVarBndrName $ last $ init args' let argNames = map (VarT . tyVarBndrName) (init $ init args') let complType = foldl AppT (ConT name) argNames let classType = AppT (ConT ''ShowHD) complType constrs' :: [(Name,[Type])] <- mapM normalConExp constrs showHDDecl <- funD 'showHD (map (showHDClause conArg coArg) constrs') let context = map (\arg -> ClassP ''Show [arg]) argNames return [InstanceD context classType [showHDDecl]] where showHDClause :: Name -> Name -> (Name,[Type]) -> ClauseQ showHDClause conArg coArg (constr, args) = do varXs <- newNames (length args) "x" let patx = ConP constr $ map VarP varXs body <- showHDBody (nameBase constr) conArg coArg (zip varXs args) return $ Clause [patx] (NormalB body) [] showHDBody :: String -> Name -> Name -> [(Name, Type)] -> ExpQ showHDBody constr conArg coArg x = [|liftM (unwords . (constr :) . map (\x -> if elem ' ' x then "(" ++ x ++ ")" else x)) (sequence $(listE $ map (showHDB conArg coArg) x))|] showHDB :: Name -> Name -> (Name, Type) -> ExpQ showHDB conArg coArg (x, tp) | not (containsType tp (VarT conArg)) && not (containsType tp (VarT coArg)) = [| return $ show $(varE x) |] | otherwise = case tp of AppT (VarT a) _ | a == coArg -> [| unK $(varE x) |] AppT (AppT ArrowT (AppT (VarT a) _)) _ | a == conArg -> [| withName (\v -> do body <- (unK . $(varE x)) v return $ "\\" ++ show v ++ " -> " ++ body) |] SigT tp' _ -> showHDB conArg coArg (x, tp') _ -> if containsType tp (VarT conArg) then [| showHD $(varE x) |] else [| liftM show $ T.mapM (liftM Dummy . unK) $(varE x) |]
b82bcf759fe15db71a7ea744ad82f3e781d047d73eb835118ccf59bff4b8e30f
zyrolasting/racket-koans
syntax.rkt
#lang racket/base (require rackunit (for-syntax racket/base syntax/parse) syntax/macro-testing) ;; `syntax` creates a syntax object (check-equal? (syntax? (syntax A)) "true") ;; `#'` is short for `syntax`, similar to how `'` is short for `quote` (check-equal? (syntax? #'A) "true") ;; `syntax-e` extracts the datum from a syntax object (check-equal? (syntax-e #'A) 'B) (define (first-stx stx) (define x* (syntax-e stx)) (car x*)) (check-equal? (syntax-e (first-stx #'(1 2 3))) 2) ;; `#`` is short for `quasisyntax` and `#,` is short for `unsyntax` (define four 4) (check-equal? (syntax-e #`#,four) (syntax-e #'#,four)) ;; Syntax objects are Racket's internal representation of Racket source code. ;; ;; When you run Racket on a file, for example `racket koans/syntax.rkt`, Racket _ reads _ the program into one big syntax object . ;; ;; After reading, Racket _expands_ this syntax object into a well-formed ;; program. (well-formed = no unbound identifiers, no single-armed `if` ;; statements, etc.) ;; ;; And after expanding, Racket runs the program. (The expander is a ;; program that runs to produce the program you originally wanted to ;; run.) ;; ;; `define-syntax` binds an identifier to an expansion-time function. (define-syntax (f stx) #'42) (check-equal? (f) "the left side _expands_ to 42") ;; An expansion-time function that takes a syntax object as input and computes ;; a new syntax object is called a _syntax transformer_ or a _macro_. If `f` ;; is a macro and `(f x)` appears in a program, then the expander will invoke ;; `f` with a syntax object representing the application `(f x)`. (define-syntax (count-args stx) (define fn-and-args-list (syntax-e stx)) (define args-list (cdr fn-and-args-list)) (define num-args (length args-list)) #`#,num-args) (check-equal? (count-args) "`count-args` applied to #'(count-args)") (check-equal? (count-args 1 2 3) "`count-args` applied to #'(count-args 1 2 3)") (check-equal? (count-args unbound-id) "`unbound-id` disappears before the program runs") ;; `syntax-parse` is a pattern-matcher for syntax objects (define-syntax (count-args2 stx) (syntax-parse stx [(?fn ?arg* ...) (define args-list (syntax-e #'(?fn ?arg* ...))) (define num-args (length args-list)) #`#,num-args])) (check-not-exn (lambda () (convert-compile-time-error (let () (check-equal? (count-args2 'A) "there is an error in the definition of `count-args2`") (check-equal? (count-args2 a b c d) 4))))) ;; `syntax-parse` is helpful for writing macros, and macros are helpful for ;; programming your programs. (define-syntax (check-all-equal? stx) (syntax-parse stx [(_) #'(void)] [(_ ?e0 ?e1) #'(check-equal? "?e0" "?e1")] [(_ [?e0* ?e1*] ...) #'(begin (check-equal? ?e0* ?e1*) ...)])) (check-all-equal?) (check-all-equal? 2 0) (check-all-equal? [2 "there is an error in the definition of `check-all-equal?`"] [3 3] ['A 'A] ["hello" "hello"])
null
https://raw.githubusercontent.com/zyrolasting/racket-koans/09acc058244ed88d7addd434677426cd8580bb3c/koans/syntax.rkt
racket
`syntax` creates a syntax object `#'` is short for `syntax`, similar to how `'` is short for `quote` `syntax-e` extracts the datum from a syntax object `#`` is short for `quasisyntax` and `#,` is short for `unsyntax` Syntax objects are Racket's internal representation of Racket source code. When you run Racket on a file, for example `racket koans/syntax.rkt`, After reading, Racket _expands_ this syntax object into a well-formed program. (well-formed = no unbound identifiers, no single-armed `if` statements, etc.) And after expanding, Racket runs the program. (The expander is a program that runs to produce the program you originally wanted to run.) `define-syntax` binds an identifier to an expansion-time function. An expansion-time function that takes a syntax object as input and computes a new syntax object is called a _syntax transformer_ or a _macro_. If `f` is a macro and `(f x)` appears in a program, then the expander will invoke `f` with a syntax object representing the application `(f x)`. `syntax-parse` is a pattern-matcher for syntax objects `syntax-parse` is helpful for writing macros, and macros are helpful for programming your programs.
#lang racket/base (require rackunit (for-syntax racket/base syntax/parse) syntax/macro-testing) (check-equal? (syntax? (syntax A)) "true") (check-equal? (syntax? #'A) "true") (check-equal? (syntax-e #'A) 'B) (define (first-stx stx) (define x* (syntax-e stx)) (car x*)) (check-equal? (syntax-e (first-stx #'(1 2 3))) 2) (define four 4) (check-equal? (syntax-e #`#,four) (syntax-e #'#,four)) Racket _ reads _ the program into one big syntax object . (define-syntax (f stx) #'42) (check-equal? (f) "the left side _expands_ to 42") (define-syntax (count-args stx) (define fn-and-args-list (syntax-e stx)) (define args-list (cdr fn-and-args-list)) (define num-args (length args-list)) #`#,num-args) (check-equal? (count-args) "`count-args` applied to #'(count-args)") (check-equal? (count-args 1 2 3) "`count-args` applied to #'(count-args 1 2 3)") (check-equal? (count-args unbound-id) "`unbound-id` disappears before the program runs") (define-syntax (count-args2 stx) (syntax-parse stx [(?fn ?arg* ...) (define args-list (syntax-e #'(?fn ?arg* ...))) (define num-args (length args-list)) #`#,num-args])) (check-not-exn (lambda () (convert-compile-time-error (let () (check-equal? (count-args2 'A) "there is an error in the definition of `count-args2`") (check-equal? (count-args2 a b c d) 4))))) (define-syntax (check-all-equal? stx) (syntax-parse stx [(_) #'(void)] [(_ ?e0 ?e1) #'(check-equal? "?e0" "?e1")] [(_ [?e0* ?e1*] ...) #'(begin (check-equal? ?e0* ?e1*) ...)])) (check-all-equal?) (check-all-equal? 2 0) (check-all-equal? [2 "there is an error in the definition of `check-all-equal?`"] [3 3] ['A 'A] ["hello" "hello"])
b29df4dc16b7d9cb9f9d8423ebda5424cc2aecbc1675e58a12bfb2adc3d08407
logseq/logseq
marker.cljs
(ns frontend.util.marker "Task (formerly todo) related util fns" (:require [clojure.string :as string] [frontend.util :as util])) (defn marker-pattern [format] (re-pattern (str "^" (if (= format :markdown) "(#+\\s+)?" "(\\*+\\s+)?") "(NOW|LATER|TODO|DOING|DONE|WAITING|WAIT|CANCELED|CANCELLED|IN-PROGRESS)?\\s?"))) (def bare-marker-pattern #"(NOW|LATER|TODO|DOING|DONE|WAITING|WAIT|CANCELED|CANCELLED|IN-PROGRESS){1}\s+") (defn add-or-update-marker [content format marker] (let [[re-pattern new-line-re-pattern] (case format :org [#"^\*+\s" #"\n\*+\s"] (:markdown :md) [#"^#+\s" #"\n#+\s"] ;; failback to markdown [#"^#+\s" #"\n#+\s"]) pos (if-let [matches (seq (util/re-pos new-line-re-pattern content))] (let [[start-pos content] (last matches)] (+ start-pos (count content))) (count (util/safe-re-find re-pattern content))) new-content (str (subs content 0 pos) (string/replace-first (subs content pos) (marker-pattern format) (str marker (if (empty? marker) "" " "))))] new-content)) (defn cycle-marker-state [marker preferred-workflow] (case marker "TODO" "DOING" "DOING" "DONE" "LATER" "NOW" "NOW" "DONE" "DONE" nil (if (= :now preferred-workflow) "LATER" "TODO"))) (defn cycle-marker "The cycle-marker will cycle markers sequentially. You can find all its order in `cycle-marker-state`. It also accepts the specified `marker` and `new-marker`. If you don't specify it, it will automatically find it based on `content`. Returns [new-content new-marker]." [content marker new-marker format preferred-workflow] (let [content (string/triml content) marker (or (not-empty marker) Returns the last matching group ( ) new-marker (or (not-empty new-marker) (cycle-marker-state marker preferred-workflow))] [(add-or-update-marker content format new-marker) new-marker]))
null
https://raw.githubusercontent.com/logseq/logseq/e05f28672a3af487a57aa4fff416fb292fd41b3d/src/main/frontend/util/marker.cljs
clojure
failback to markdown
(ns frontend.util.marker "Task (formerly todo) related util fns" (:require [clojure.string :as string] [frontend.util :as util])) (defn marker-pattern [format] (re-pattern (str "^" (if (= format :markdown) "(#+\\s+)?" "(\\*+\\s+)?") "(NOW|LATER|TODO|DOING|DONE|WAITING|WAIT|CANCELED|CANCELLED|IN-PROGRESS)?\\s?"))) (def bare-marker-pattern #"(NOW|LATER|TODO|DOING|DONE|WAITING|WAIT|CANCELED|CANCELLED|IN-PROGRESS){1}\s+") (defn add-or-update-marker [content format marker] (let [[re-pattern new-line-re-pattern] (case format :org [#"^\*+\s" #"\n\*+\s"] (:markdown :md) [#"^#+\s" #"\n#+\s"] [#"^#+\s" #"\n#+\s"]) pos (if-let [matches (seq (util/re-pos new-line-re-pattern content))] (let [[start-pos content] (last matches)] (+ start-pos (count content))) (count (util/safe-re-find re-pattern content))) new-content (str (subs content 0 pos) (string/replace-first (subs content pos) (marker-pattern format) (str marker (if (empty? marker) "" " "))))] new-content)) (defn cycle-marker-state [marker preferred-workflow] (case marker "TODO" "DOING" "DOING" "DONE" "LATER" "NOW" "NOW" "DONE" "DONE" nil (if (= :now preferred-workflow) "LATER" "TODO"))) (defn cycle-marker "The cycle-marker will cycle markers sequentially. You can find all its order in `cycle-marker-state`. It also accepts the specified `marker` and `new-marker`. If you don't specify it, it will automatically find it based on `content`. Returns [new-content new-marker]." [content marker new-marker format preferred-workflow] (let [content (string/triml content) marker (or (not-empty marker) Returns the last matching group ( ) new-marker (or (not-empty new-marker) (cycle-marker-state marker preferred-workflow))] [(add-or-update-marker content format new-marker) new-marker]))
944d6f2751b0ff811067d46c6f3c6a7a07279841e541bf6c1930d488e25173cc
icicle-lang/icicle-ambiata
Base.hs
# LANGUAGE LambdaCase # # LANGUAGE NoImplicitPrelude # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE PatternGuards # # LANGUAGE ViewPatterns # module Icicle.Sea.FromAvalanche.Base ( SeaString , takeSeaString , seaOfChar , seaOfString , seaOfEscaped , seaOfTime , seaError , seaError' , assign , suffix , tuple ) where import Data.Char (ord) import qualified Data.List as List import qualified Data.Text as Text import Icicle.Data import Icicle.Data.Time (packedOfTime) import Icicle.Internal.Pretty import Numeric (showHex) import P import Text.Printf (printf) ------------------------------------------------------------------------ -- | A value that will need be quoted in C. newtype SeaString = SeaString { getSeaString :: Text } deriving (Eq, Ord, Show) takeSeaString :: SeaString -> Text takeSeaString = getSeaString ------------------------------------------------------------------------ seaOfChar :: Char -> Doc seaOfChar c = "'" <> text (escapeChars [c]) <> "'" seaOfString :: Text -> Doc seaOfString txt = "\"" <> seaOfEscaped txt <> "\"" seaOfEscaped :: Text -> Doc seaOfEscaped = text . escapeChars . Text.unpack escapeChars :: [Char] -> [Char] escapeChars = \case [] -> [] ('\a':xs) -> "\\a" <> escapeChars xs ('\b':xs) -> "\\b" <> escapeChars xs ('\t':xs) -> "\\t" <> escapeChars xs ('\r':xs) -> "\\r" <> escapeChars xs ('\v':xs) -> "\\v" <> escapeChars xs ('\f':xs) -> "\\f" <> escapeChars xs ('\n':xs) -> "\\n" <> escapeChars xs ('\\':xs) -> "\\\\" <> escapeChars xs ('\"':xs) -> "\\\"" <> escapeChars xs (x:xs) | ord x < 0x20 || ord x >= 0x7f && ord x <= 0xff -> printf "\\%03o" (ord x) <> escapeChars xs | ord x < 0x7f -> x : escapeChars xs | otherwise -> printf "\\U%08x" (ord x) <> escapeChars xs ------------------------------------------------------------------------ seaOfTime :: Time -> Doc seaOfTime x = text ("0x" <> showHex (packedOfTime x) "") ------------------------------------------------------------------------ seaError :: Show a => Doc -> a -> Doc seaError subject x = seaError' subject (text (List.take 100 (show x))) seaError' :: Doc -> Doc -> Doc seaError' subject msg = line <> "#error Failed during codegen (" <> subject <> ": " <> msg <> "..)" <> line assign :: Doc -> Doc -> Doc assign x y = x <> column (\k -> indent (45-k) " =") <+> y suffix :: Doc -> Doc suffix annot = column (\k -> indent (85-k) (" /*" <+> annot <+> "*/")) tuple :: [Doc] -> Doc tuple [] = "()" tuple [x] = "(" <> x <> ")" tuple xs = "(" <> go xs where go [] = ")" -- impossible go (y:[]) = y <> ")" go (y:ys) = y <> ", " <> go ys
null
https://raw.githubusercontent.com/icicle-lang/icicle-ambiata/9b9cc45a75f66603007e4db7e5f3ba908cae2df2/icicle-compiler/src/Icicle/Sea/FromAvalanche/Base.hs
haskell
# LANGUAGE OverloadedStrings # ---------------------------------------------------------------------- | A value that will need be quoted in C. ---------------------------------------------------------------------- ---------------------------------------------------------------------- ---------------------------------------------------------------------- impossible
# LANGUAGE LambdaCase # # LANGUAGE NoImplicitPrelude # # LANGUAGE PatternGuards # # LANGUAGE ViewPatterns # module Icicle.Sea.FromAvalanche.Base ( SeaString , takeSeaString , seaOfChar , seaOfString , seaOfEscaped , seaOfTime , seaError , seaError' , assign , suffix , tuple ) where import Data.Char (ord) import qualified Data.List as List import qualified Data.Text as Text import Icicle.Data import Icicle.Data.Time (packedOfTime) import Icicle.Internal.Pretty import Numeric (showHex) import P import Text.Printf (printf) newtype SeaString = SeaString { getSeaString :: Text } deriving (Eq, Ord, Show) takeSeaString :: SeaString -> Text takeSeaString = getSeaString seaOfChar :: Char -> Doc seaOfChar c = "'" <> text (escapeChars [c]) <> "'" seaOfString :: Text -> Doc seaOfString txt = "\"" <> seaOfEscaped txt <> "\"" seaOfEscaped :: Text -> Doc seaOfEscaped = text . escapeChars . Text.unpack escapeChars :: [Char] -> [Char] escapeChars = \case [] -> [] ('\a':xs) -> "\\a" <> escapeChars xs ('\b':xs) -> "\\b" <> escapeChars xs ('\t':xs) -> "\\t" <> escapeChars xs ('\r':xs) -> "\\r" <> escapeChars xs ('\v':xs) -> "\\v" <> escapeChars xs ('\f':xs) -> "\\f" <> escapeChars xs ('\n':xs) -> "\\n" <> escapeChars xs ('\\':xs) -> "\\\\" <> escapeChars xs ('\"':xs) -> "\\\"" <> escapeChars xs (x:xs) | ord x < 0x20 || ord x >= 0x7f && ord x <= 0xff -> printf "\\%03o" (ord x) <> escapeChars xs | ord x < 0x7f -> x : escapeChars xs | otherwise -> printf "\\U%08x" (ord x) <> escapeChars xs seaOfTime :: Time -> Doc seaOfTime x = text ("0x" <> showHex (packedOfTime x) "") seaError :: Show a => Doc -> a -> Doc seaError subject x = seaError' subject (text (List.take 100 (show x))) seaError' :: Doc -> Doc -> Doc seaError' subject msg = line <> "#error Failed during codegen (" <> subject <> ": " <> msg <> "..)" <> line assign :: Doc -> Doc -> Doc assign x y = x <> column (\k -> indent (45-k) " =") <+> y suffix :: Doc -> Doc suffix annot = column (\k -> indent (85-k) (" /*" <+> annot <+> "*/")) tuple :: [Doc] -> Doc tuple [] = "()" tuple [x] = "(" <> x <> ")" tuple xs = "(" <> go xs where go (y:[]) = y <> ")" go (y:ys) = y <> ", " <> go ys
87a4a02b1c9ed585436ea8bb838e6300e9d2921258d8ae776910b01b39e00cf6
NoRedInk/haskell-libraries
Encoding.hs
-- | Turns `Examples` into a `Test` module Test.Encoding (examplesToTest) where import qualified Examples import qualified Expect import qualified GHC.Stack as Stack import NriPrelude import System.FilePath ((</>)) import qualified System.FilePath as FilePath import Test (Test, test) import qualified Text -- | Creates tests for some examples examplesToTest :: Stack.HasCallStack => Text -> Text -> Examples.Examples -> Test examplesToTest name fileName examples = test name <| \() -> Expect.equalToContentsOf ( "test" </> "golden-results" </> Text.toList fileName |> FilePath.makeValid |> Text.fromList ) (Examples.render examples)
null
https://raw.githubusercontent.com/NoRedInk/haskell-libraries/90e7a06dc90e2ec10351b4efb631abf5f2aca6d8/nri-test-encoding/src/Test/Encoding.hs
haskell
| Turns `Examples` into a `Test` | Creates tests for some examples
module Test.Encoding (examplesToTest) where import qualified Examples import qualified Expect import qualified GHC.Stack as Stack import NriPrelude import System.FilePath ((</>)) import qualified System.FilePath as FilePath import Test (Test, test) import qualified Text examplesToTest :: Stack.HasCallStack => Text -> Text -> Examples.Examples -> Test examplesToTest name fileName examples = test name <| \() -> Expect.equalToContentsOf ( "test" </> "golden-results" </> Text.toList fileName |> FilePath.makeValid |> Text.fromList ) (Examples.render examples)
be4b3363f1ad2b83b2648cdd5900906eae0b1388a3be771699880cc9a5e2a924
farr/mcmc-ocaml
asserts.ml
open OUnit let assert_equal_float ?(epsabs = 1e-8) ?(epsrel = 1e-8) ?(msg = "") = assert_equal ~msg:msg ~cmp:(cmp_float ~epsabs:epsabs ~epsrel:epsrel) ~printer:string_of_float let assert_equal_float_array ?(epsabs = 1e-8) ?(epsrel = 1e-8) ?(msg = "assert_equal_float_array") x y = let n = Array.length x in assert_bool (Printf.sprintf "%s: array lengths differ" msg) (Array.length y = n); for i = 0 to n - 1 do assert_equal_float ~epsabs:epsabs ~epsrel:epsrel ~msg:msg x.(i) y.(i) done let assert_equal_float_matrix ?(epsabs = 1e-8) ?(epsrel = 1e-8) ?(msg = "assert_equal_float_matrix") x y = let n = Array.length x in assert_bool (Printf.sprintf "%s: sizes differ" msg) (Array.length y = n); for i = 0 to n - 1 do assert_equal_float_array ~epsabs:epsabs ~epsrel:epsrel ~msg:(Printf.sprintf "%s: matrix row differs" msg) x.(i) y.(i) done
null
https://raw.githubusercontent.com/farr/mcmc-ocaml/56e46b8539cb8d6fada2cb09810363adf18f4c60/test/asserts.ml
ocaml
open OUnit let assert_equal_float ?(epsabs = 1e-8) ?(epsrel = 1e-8) ?(msg = "") = assert_equal ~msg:msg ~cmp:(cmp_float ~epsabs:epsabs ~epsrel:epsrel) ~printer:string_of_float let assert_equal_float_array ?(epsabs = 1e-8) ?(epsrel = 1e-8) ?(msg = "assert_equal_float_array") x y = let n = Array.length x in assert_bool (Printf.sprintf "%s: array lengths differ" msg) (Array.length y = n); for i = 0 to n - 1 do assert_equal_float ~epsabs:epsabs ~epsrel:epsrel ~msg:msg x.(i) y.(i) done let assert_equal_float_matrix ?(epsabs = 1e-8) ?(epsrel = 1e-8) ?(msg = "assert_equal_float_matrix") x y = let n = Array.length x in assert_bool (Printf.sprintf "%s: sizes differ" msg) (Array.length y = n); for i = 0 to n - 1 do assert_equal_float_array ~epsabs:epsabs ~epsrel:epsrel ~msg:(Printf.sprintf "%s: matrix row differs" msg) x.(i) y.(i) done
37ed21d52a1f50fae3d87661e712eae8883cb459c79f84eed8d0c118b242de81
music-suite/music-suite
Literal.hs
------------------------------------------------------------------------------------- ------------------------------------------------------------------------------------- -- | Copyright : ( c ) 2012 -- -- License : BSD-style -- Maintainer : -- Stability : experimental -- Portability : portable -- -- Provides overloaded pitch and interval literals. module Music.Pitch.Literal ( module Music.Pitch.Literal.Pitch, module Music.Pitch.Literal.Interval, ) where import Music.Pitch.Literal.Interval import Music.Pitch.Literal.Pitch
null
https://raw.githubusercontent.com/music-suite/music-suite/7f01fd62334c66418043b7a2d662af127f98685d/src/Music/Pitch/Literal.hs
haskell
----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- | License : BSD-style Stability : experimental Portability : portable Provides overloaded pitch and interval literals.
Copyright : ( c ) 2012 Maintainer : module Music.Pitch.Literal ( module Music.Pitch.Literal.Pitch, module Music.Pitch.Literal.Interval, ) where import Music.Pitch.Literal.Interval import Music.Pitch.Literal.Pitch
2dc7599033b09cd867f2d65e488eb45e7dd3b49731b24110f73484c9862f9a89
hyperfiddle/electric
routes_nested1.clj
; demo entrypoint {:hyperfiddle.api/route [:user.demo-entrypoint/hfql-teeshirt-orders], :user.demo-entrypoint/hfql-teeshirt-orders {:wip.teeshirt-orders/route [wip.orders-datascript/one-order 10]}} ; teeshirt-orders scope {:wip.teeshirt-orders/route [wip.orders-datascript/one-order 10]} (comment {} {::demo-entrypoint {}} {::hello-world {}} {::system-properties {}} {::system-properties {::search "java"}} ; what's this? {::hello-world {} ::system-properties {}} ; the state should be able to express branching {:hyperfiddle.api/route [:user.demo-entrypoint/router], :user.demo-entrypoint/router {:wip.demo-branched-route/right nil, :wip.demo-branched-route/left nil, [wip.orders-datascript/orders .] {:needle "bob"}}} {:user.demo-entrypoint/router {'[wip.orders-datascript/orders .] {:needle "root"} :wip.demo-branched-route/left {'[wip.orders-datascript/orders .] {:needle "left"}} :wip.demo-branched-route/right {'[wip.orders-datascript/orders .] {:needle "right"}}}} {:hyperfiddle.api/route [:user.demo-entrypoint/router], :user.demo-entrypoint/router {[wip.orders-datascript/orders .] {:needle "root"} :wip.demo-branched-route/right {[wip.orders-datascript/orders .] {:needle "right"}}, :wip.demo-branched-route/left {[wip.orders-datascript/orders .] {:needle "left"}}}} )
null
https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/scratch/dustin/y2023/routes_nested1.clj
clojure
demo entrypoint teeshirt-orders scope what's this? the state should be able to express branching
{:hyperfiddle.api/route [:user.demo-entrypoint/hfql-teeshirt-orders], :user.demo-entrypoint/hfql-teeshirt-orders {:wip.teeshirt-orders/route [wip.orders-datascript/one-order 10]}} {:wip.teeshirt-orders/route [wip.orders-datascript/one-order 10]} (comment {} {::demo-entrypoint {}} {::hello-world {}} {::system-properties {}} {::system-properties {::search "java"}} {::hello-world {} ::system-properties {}} {:hyperfiddle.api/route [:user.demo-entrypoint/router], :user.demo-entrypoint/router {:wip.demo-branched-route/right nil, :wip.demo-branched-route/left nil, [wip.orders-datascript/orders .] {:needle "bob"}}} {:user.demo-entrypoint/router {'[wip.orders-datascript/orders .] {:needle "root"} :wip.demo-branched-route/left {'[wip.orders-datascript/orders .] {:needle "left"}} :wip.demo-branched-route/right {'[wip.orders-datascript/orders .] {:needle "right"}}}} {:hyperfiddle.api/route [:user.demo-entrypoint/router], :user.demo-entrypoint/router {[wip.orders-datascript/orders .] {:needle "root"} :wip.demo-branched-route/right {[wip.orders-datascript/orders .] {:needle "right"}}, :wip.demo-branched-route/left {[wip.orders-datascript/orders .] {:needle "left"}}}} )
c43bda86c3f49d07831f45aaefddfc8cb57fcdc72c00dd1873187b8658b80803
lpgauth/marina
marina_frame.erl
-module(marina_frame). -include("marina_internal.hrl"). -compile(inline). -compile({inline_size, 512}). -export([ decode/1, encode/1, pending_size/1 ]). %% public -spec decode(binary()) -> {binary(), [frame()]}. decode(Bin) -> decode(Bin, []). -spec encode(frame()) -> iolist(). encode(#frame { flags = Flags, stream = Stream, opcode = Opcode, body = Body }) -> [<<0:1, ?PROTO_VERSION:7/unsigned-integer, Flags:8/unsigned-integer, Stream:16/signed-integer, Opcode:8/unsigned-integer, (iolist_size(Body)):32/unsigned-integer>>, Body]. -spec pending_size(binary()) -> pos_integer() | undefined. pending_size(<<1:1, ?PROTO_VERSION:7/unsigned-integer, _Flags:8/unsigned-integer, _Stream:16/signed-integer, _Opcode:8/unsigned-integer, Length:32/unsigned-integer, _Rest/binary>>) -> Length + ?HEADER_SIZE; pending_size(_) -> undefined. %% private decode(<<1:1, ?PROTO_VERSION:7/unsigned-integer, Flags:8/unsigned-integer, Stream:16/signed-integer, Opcode:8/unsigned-integer, Length:32/unsigned-integer, Body:Length/binary, Rest/binary>>, Acc) -> decode(Rest, [#frame { flags = Flags, stream = Stream, opcode = Opcode, body = Body } | Acc]); decode(Rest, Acc) -> {Rest, Acc}.
null
https://raw.githubusercontent.com/lpgauth/marina/2d775c003f58d125bb38e7c953c30c36aebc72c6/src/marina_frame.erl
erlang
public private
-module(marina_frame). -include("marina_internal.hrl"). -compile(inline). -compile({inline_size, 512}). -export([ decode/1, encode/1, pending_size/1 ]). -spec decode(binary()) -> {binary(), [frame()]}. decode(Bin) -> decode(Bin, []). -spec encode(frame()) -> iolist(). encode(#frame { flags = Flags, stream = Stream, opcode = Opcode, body = Body }) -> [<<0:1, ?PROTO_VERSION:7/unsigned-integer, Flags:8/unsigned-integer, Stream:16/signed-integer, Opcode:8/unsigned-integer, (iolist_size(Body)):32/unsigned-integer>>, Body]. -spec pending_size(binary()) -> pos_integer() | undefined. pending_size(<<1:1, ?PROTO_VERSION:7/unsigned-integer, _Flags:8/unsigned-integer, _Stream:16/signed-integer, _Opcode:8/unsigned-integer, Length:32/unsigned-integer, _Rest/binary>>) -> Length + ?HEADER_SIZE; pending_size(_) -> undefined. decode(<<1:1, ?PROTO_VERSION:7/unsigned-integer, Flags:8/unsigned-integer, Stream:16/signed-integer, Opcode:8/unsigned-integer, Length:32/unsigned-integer, Body:Length/binary, Rest/binary>>, Acc) -> decode(Rest, [#frame { flags = Flags, stream = Stream, opcode = Opcode, body = Body } | Acc]); decode(Rest, Acc) -> {Rest, Acc}.
8622555a1138874dada4feea372c9c41f4a8c0dfb93a5598f3d247a35d32f624
zotonic/zotonic
z_pivot_rsc_task_job.erl
@author < > 2020 - 2022 %% @doc Run a pivot task queue job. Copyright 2020 - 2022 %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(z_pivot_rsc_task_job). -export([ start_task/2, task_job/2, maybe_schedule_retry/5, task_retry_due/1 ]). -include_lib("zotonic.hrl"). number of times a task will retry on fatal exceptions -define(MAX_TASK_ERROR_COUNT, 5). @doc Start a task queue sidejob . -spec start_task( map(), z:context() ) -> {ok, pid()} | {error, overload}. start_task(Task, Context) -> sidejob_supervisor:spawn( zotonic_sidejobs, {?MODULE, task_job, [ Task, Context ]}). @doc Run the sidejob task queue task . -spec task_job( map(), z:context() ) -> ok. task_job( #{ task_id := TaskId, mfa := {Module, Function, Args} } = Task, Context) -> z_context:logger_md(Context), try Args1 = ensure_list(Args), ?LOG_DEBUG(#{ text => <<"Pivot task starting">>, in => zotonic_core, mfa => {Module, Function, length(Args1)+1} }), case call_function(Module, Function, Args1, Context) of {delay, Delay} -> Due = if is_integer(Delay) -> calendar:gregorian_seconds_to_datetime( calendar:datetime_to_gregorian_seconds(calendar:universal_time()) + Delay); is_tuple(Delay) -> Delay end, z_db:update(pivot_task_queue, TaskId, [ {due, Due} ], Context); {delay, Delay, NewArgs} -> Due = if is_integer(Delay) -> calendar:gregorian_seconds_to_datetime( calendar:datetime_to_gregorian_seconds(calendar:universal_time()) + Delay); is_tuple(Delay) -> Delay end, Fields = #{ <<"due">> => Due, <<"args">> => NewArgs }, z_db:update(pivot_task_queue, TaskId, Fields, Context); _OK -> z_db:delete(pivot_task_queue, TaskId, Context) end catch error:undef:Trace -> ?LOG_ERROR(#{ text => <<"Pivot task failed - undefined function, aborting">>, in => zotonic_core, task_id => TaskId, mfa => {Module, Function, Args}, result => error, reason => undef, stack => Trace }), z_db:delete(pivot_task_queue, TaskId, Context); Error:Reason:Trace -> maybe_schedule_retry(Task, Error, Reason, Trace, Context) after z_pivot_rsc:task_job_done(TaskId, Context) end, ok. -spec maybe_schedule_retry(map(), atom(), term(), list(), z:context()) -> ok. maybe_schedule_retry(#{ task_id := TaskId, error_count := ErrCt, mfa := MFA }, Error, Reason, Trace, Context) when ErrCt < ?MAX_TASK_ERROR_COUNT -> RetryDue = task_retry_due(ErrCt), ?LOG_ERROR(#{ text => <<"Pivot task failed - will retry">>, in => zotonic_core, task_id => TaskId, mfa => MFA, result => Error, reason => Reason, retry_on => RetryDue, stack => Trace }), RetryFields = #{ <<"due">> => RetryDue, <<"error_count">> => ErrCt+1 }, {ok, _} = z_db:update(pivot_task_queue, TaskId, RetryFields, Context), ok; maybe_schedule_retry(#{ task_id := TaskId, mfa := MFA }, Error, Reason, Trace, Context) -> ?LOG_ERROR(#{ text => <<"Pivot task failed - aborting">>, in => zotonic_core, task_id => TaskId, mfa => MFA, result => Error, reason => Reason, stack => Trace }), z_db:delete(pivot_task_queue, TaskId, Context), {error, stopped}. call_function(Module, Function, As, Context) -> code:ensure_loaded(Module), AsLen = length(As), case erlang:function_exported(Module, Function, AsLen+1) of true -> % Assume function with extra Context arg erlang:apply(Module, Function, As ++ [Context]); false -> % Function called with only the arguments list erlang:apply(Module, Function, As) end. -spec task_retry_due( integer() ) -> calendar:datetime(). task_retry_due(ErrCt) -> calendar:gregorian_seconds_to_datetime( calendar:datetime_to_gregorian_seconds(calendar:universal_time()) + task_retry_backoff(ErrCt)). task_retry_backoff(0) -> 10; task_retry_backoff(1) -> 1800; task_retry_backoff(2) -> 7200; task_retry_backoff(3) -> 14400; task_retry_backoff(4) -> 12 * 3600; task_retry_backoff(N) -> (N-4) * 24 * 3600. ensure_list(L) when is_list(L) -> L; ensure_list(undefined) -> []; ensure_list(X) -> [X].
null
https://raw.githubusercontent.com/zotonic/zotonic/1bb4aa8a0688d007dd8ec8ba271546f658312da8/apps/zotonic_core/src/support/z_pivot_rsc_task_job.erl
erlang
@doc Run a pivot task queue job. you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Assume function with extra Context arg Function called with only the arguments list
@author < > 2020 - 2022 Copyright 2020 - 2022 Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , -module(z_pivot_rsc_task_job). -export([ start_task/2, task_job/2, maybe_schedule_retry/5, task_retry_due/1 ]). -include_lib("zotonic.hrl"). number of times a task will retry on fatal exceptions -define(MAX_TASK_ERROR_COUNT, 5). @doc Start a task queue sidejob . -spec start_task( map(), z:context() ) -> {ok, pid()} | {error, overload}. start_task(Task, Context) -> sidejob_supervisor:spawn( zotonic_sidejobs, {?MODULE, task_job, [ Task, Context ]}). @doc Run the sidejob task queue task . -spec task_job( map(), z:context() ) -> ok. task_job( #{ task_id := TaskId, mfa := {Module, Function, Args} } = Task, Context) -> z_context:logger_md(Context), try Args1 = ensure_list(Args), ?LOG_DEBUG(#{ text => <<"Pivot task starting">>, in => zotonic_core, mfa => {Module, Function, length(Args1)+1} }), case call_function(Module, Function, Args1, Context) of {delay, Delay} -> Due = if is_integer(Delay) -> calendar:gregorian_seconds_to_datetime( calendar:datetime_to_gregorian_seconds(calendar:universal_time()) + Delay); is_tuple(Delay) -> Delay end, z_db:update(pivot_task_queue, TaskId, [ {due, Due} ], Context); {delay, Delay, NewArgs} -> Due = if is_integer(Delay) -> calendar:gregorian_seconds_to_datetime( calendar:datetime_to_gregorian_seconds(calendar:universal_time()) + Delay); is_tuple(Delay) -> Delay end, Fields = #{ <<"due">> => Due, <<"args">> => NewArgs }, z_db:update(pivot_task_queue, TaskId, Fields, Context); _OK -> z_db:delete(pivot_task_queue, TaskId, Context) end catch error:undef:Trace -> ?LOG_ERROR(#{ text => <<"Pivot task failed - undefined function, aborting">>, in => zotonic_core, task_id => TaskId, mfa => {Module, Function, Args}, result => error, reason => undef, stack => Trace }), z_db:delete(pivot_task_queue, TaskId, Context); Error:Reason:Trace -> maybe_schedule_retry(Task, Error, Reason, Trace, Context) after z_pivot_rsc:task_job_done(TaskId, Context) end, ok. -spec maybe_schedule_retry(map(), atom(), term(), list(), z:context()) -> ok. maybe_schedule_retry(#{ task_id := TaskId, error_count := ErrCt, mfa := MFA }, Error, Reason, Trace, Context) when ErrCt < ?MAX_TASK_ERROR_COUNT -> RetryDue = task_retry_due(ErrCt), ?LOG_ERROR(#{ text => <<"Pivot task failed - will retry">>, in => zotonic_core, task_id => TaskId, mfa => MFA, result => Error, reason => Reason, retry_on => RetryDue, stack => Trace }), RetryFields = #{ <<"due">> => RetryDue, <<"error_count">> => ErrCt+1 }, {ok, _} = z_db:update(pivot_task_queue, TaskId, RetryFields, Context), ok; maybe_schedule_retry(#{ task_id := TaskId, mfa := MFA }, Error, Reason, Trace, Context) -> ?LOG_ERROR(#{ text => <<"Pivot task failed - aborting">>, in => zotonic_core, task_id => TaskId, mfa => MFA, result => Error, reason => Reason, stack => Trace }), z_db:delete(pivot_task_queue, TaskId, Context), {error, stopped}. call_function(Module, Function, As, Context) -> code:ensure_loaded(Module), AsLen = length(As), case erlang:function_exported(Module, Function, AsLen+1) of true -> erlang:apply(Module, Function, As ++ [Context]); false -> erlang:apply(Module, Function, As) end. -spec task_retry_due( integer() ) -> calendar:datetime(). task_retry_due(ErrCt) -> calendar:gregorian_seconds_to_datetime( calendar:datetime_to_gregorian_seconds(calendar:universal_time()) + task_retry_backoff(ErrCt)). task_retry_backoff(0) -> 10; task_retry_backoff(1) -> 1800; task_retry_backoff(2) -> 7200; task_retry_backoff(3) -> 14400; task_retry_backoff(4) -> 12 * 3600; task_retry_backoff(N) -> (N-4) * 24 * 3600. ensure_list(L) when is_list(L) -> L; ensure_list(undefined) -> []; ensure_list(X) -> [X].
e3a18295039dd348ac29afb7330a580ec59d507bdd2cefc40c4f2dfa0bb621b9
lambdamusic/The-Musical-Code
2011-10-turbobrobot-electronica.scm
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; 2011-10-turborobot-electronica.scm -- ;; ;; -r7MX6Q ;; ;; house electronica // made with ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (setup:base) (setup:create-au bass ++zebra 2) (setup:create-au brass ++zebra 3) (setup:create-au synth ++zebra 5) (patch:load-zebrapatch bass "dl_comb_bass") (patch:load-zebrapatch brass "bigbrass.patch") (setup:create-au bat ++battery3 4) (patch:load-batterypatch bat "staticgoldmine") (define =kick1= 1) (define =kick2= 2) (define =snare1= 14) (define =snare2= 15) (define =snaremute= 17) (define =snareechoed= 18) (define =hihat1= 30) (define bassline (lambda (beat) (let ((dur (ifbeat-gt? 8 3 2 2))) (play bass c3 50 1/8) (play 1/2 bass c4 60 1/16) (play 1 bass g4 40 1/16) (play 3/2 bass g3 60 1/16) (ifbeat-btw? 16 0 4 (play bass c5 60 1/16)) (ifbeat-btw? 16 4 10 (play 1/2 bass g5 50 1/16)) (ifbeat-btw? 16 8 12 (dotimes (i 4) (play (* i 1/16) bass g3 50 1/16))) (callback (*metro* (+ beat (* 1/2 dur))) 'bassline (+ beat dur))))) (bassline (*metro* 'get-beat 4)) (define kit (lambda (beat) (let ((dur 1/8)) (onbeat? 2 1 (play bat 1 80 1)) ( onbeat ? 4 1/2 ( play bat 17 70 1 ) ) (onbeat? 4 3/2 (play bat 18 70 1)) (ifbeat? 1 0 (play bat 2 70 1) (play bat (cosr 15 5 1/4) (cosr 30 5 1/2) 1/8)) ( onbeat ? 8 ' ( 1 3 5/2 ) ( dotimes ( i ( random 1 3 ) ) ( play ( * i 1/8 ) bat 14 60 1 ) ) ) (onbeat? 1 1/2 (play bat 30 60 1/8)) ( onbeat ? 4 ( + 1/8 1/2 ) ( play bat 30 60 1/8 ) ) (onbeat? 2 1 (play bat 15 60 1/8)) (onbeat? 8 (+ 1 1/4) (play (random 0 0 1/4 3/2) bat 15 60 1/8)) (callback (*metro* (+ beat (* 1/2 dur))) 'kit (+ beat dur))))) (kit (*metro* 'get-beat 4)) (define brasses (lambda (beat chord) (let ((dur 1/2)) (for-each (lambda (c) (play brass c (cosr 25 10 1/4) dur)) chord) (ifbeat-btw? 8 0 1 (play brass c3 10 6)) (ifbeat-btw? 8 2 3 (play brass g4 10 6)) (ifbeat-btw? 16 4 5 (play brass c5 30 6)) (ifbeat-btw? 16 8 9 (play brass c7 20 6)) (callback (*metro* (+ beat (* 1/2 dur))) 'brasses (+ beat dur) (ifbeat-btw? 16 8 14 (pc:make-chord 90 120 4 (pc:chord 5 '^)) achord))))) (define achord (pc:make-chord 90 120 2 (pc:chord 0 '^9))) ;; (brasses (*metro* 'get-beat) (pc:make-chord 90 120 2 (pc:chord 0 '^9))) (define finale (lambda (beat mel) (let ((dur 1/4) (newmelody (make-list-with-proc 8 (lambda (x) (pc:random 65 75 '(7)))))) (play synth (car mel) 10 dur) (play 1/8 synth (+ 12 (car mel)) 10 1/8) (play 2/8 synth (+ 24 (car mel)) 10 1/8) ( print ) (callback (*metro* (+ beat (* 1/2 dur))) 'finale (+ beat dur) (if-cdr-notnull mel newmelody))))) (finale (*metro* 'get-beat 4) (list c5 c4 c4 c4 c3))
null
https://raw.githubusercontent.com/lambdamusic/The-Musical-Code/4c0b578ea5fe4a45d4dad3e53d966e688af04fed/works/2011-10-turbobrobot-electronica.scm
scheme
-r7MX6Q house electronica // made with
2011-10-turborobot-electronica.scm -- (setup:base) (setup:create-au bass ++zebra 2) (setup:create-au brass ++zebra 3) (setup:create-au synth ++zebra 5) (patch:load-zebrapatch bass "dl_comb_bass") (patch:load-zebrapatch brass "bigbrass.patch") (setup:create-au bat ++battery3 4) (patch:load-batterypatch bat "staticgoldmine") (define =kick1= 1) (define =kick2= 2) (define =snare1= 14) (define =snare2= 15) (define =snaremute= 17) (define =snareechoed= 18) (define =hihat1= 30) (define bassline (lambda (beat) (let ((dur (ifbeat-gt? 8 3 2 2))) (play bass c3 50 1/8) (play 1/2 bass c4 60 1/16) (play 1 bass g4 40 1/16) (play 3/2 bass g3 60 1/16) (ifbeat-btw? 16 0 4 (play bass c5 60 1/16)) (ifbeat-btw? 16 4 10 (play 1/2 bass g5 50 1/16)) (ifbeat-btw? 16 8 12 (dotimes (i 4) (play (* i 1/16) bass g3 50 1/16))) (callback (*metro* (+ beat (* 1/2 dur))) 'bassline (+ beat dur))))) (bassline (*metro* 'get-beat 4)) (define kit (lambda (beat) (let ((dur 1/8)) (onbeat? 2 1 (play bat 1 80 1)) ( onbeat ? 4 1/2 ( play bat 17 70 1 ) ) (onbeat? 4 3/2 (play bat 18 70 1)) (ifbeat? 1 0 (play bat 2 70 1) (play bat (cosr 15 5 1/4) (cosr 30 5 1/2) 1/8)) ( onbeat ? 8 ' ( 1 3 5/2 ) ( dotimes ( i ( random 1 3 ) ) ( play ( * i 1/8 ) bat 14 60 1 ) ) ) (onbeat? 1 1/2 (play bat 30 60 1/8)) ( onbeat ? 4 ( + 1/8 1/2 ) ( play bat 30 60 1/8 ) ) (onbeat? 2 1 (play bat 15 60 1/8)) (onbeat? 8 (+ 1 1/4) (play (random 0 0 1/4 3/2) bat 15 60 1/8)) (callback (*metro* (+ beat (* 1/2 dur))) 'kit (+ beat dur))))) (kit (*metro* 'get-beat 4)) (define brasses (lambda (beat chord) (let ((dur 1/2)) (for-each (lambda (c) (play brass c (cosr 25 10 1/4) dur)) chord) (ifbeat-btw? 8 0 1 (play brass c3 10 6)) (ifbeat-btw? 8 2 3 (play brass g4 10 6)) (ifbeat-btw? 16 4 5 (play brass c5 30 6)) (ifbeat-btw? 16 8 9 (play brass c7 20 6)) (callback (*metro* (+ beat (* 1/2 dur))) 'brasses (+ beat dur) (ifbeat-btw? 16 8 14 (pc:make-chord 90 120 4 (pc:chord 5 '^)) achord))))) (define achord (pc:make-chord 90 120 2 (pc:chord 0 '^9))) (brasses (*metro* 'get-beat) (pc:make-chord 90 120 2 (pc:chord 0 '^9))) (define finale (lambda (beat mel) (let ((dur 1/4) (newmelody (make-list-with-proc 8 (lambda (x) (pc:random 65 75 '(7)))))) (play synth (car mel) 10 dur) (play 1/8 synth (+ 12 (car mel)) 10 1/8) (play 2/8 synth (+ 24 (car mel)) 10 1/8) ( print ) (callback (*metro* (+ beat (* 1/2 dur))) 'finale (+ beat dur) (if-cdr-notnull mel newmelody))))) (finale (*metro* 'get-beat 4) (list c5 c4 c4 c4 c3))
d672d15ea07781ba3d232279d2c5bfda38f76f235ff8609ea6040a1fd5106433
vert-x/mod-lang-clojure
websocket.clj
Copyright 2013 the original author or authors . ;; Licensed under the Apache License , Version 2.0 ( the " License " ) ; ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; -2.0 ;; ;; Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns vertx.http.websocket "Provides a set of functions for using http websockets." (:require [vertx.buffer :as buf] [vertx.core :as core] [vertx.http :as http] [vertx.utils :as u]) (:import [org.vertx.java.core.http HttpClient HttpServer WebSocket WebSocketVersion])) (defn on-websocket "Sets the websocket handler for the HTTP server. handler can either be a single-arity fn or a Handler instance that will be passed the ServerWebSocket when a successful connection is made." [^HttpServer server handler] (.websocketHandler server (core/as-handler handler))) (defn remote-address "Returns the remote address for the socket as an address-map of the form {:address \"127.0.0.1\" :port 8888 :basis inet-socket-address-object}" [^WebSocket socket] (u/inet-socket-address->map (.remoteAddress socket))) (def ^:private ws-version {:RFC6455 WebSocketVersion/RFC6455 :HYBI-00 WebSocketVersion/HYBI_00 :HYBI-08 WebSocketVersion/HYBI_08}) (defn connect "Connect the HTTP client to a websocket at the specified URI. version allows you to force the protocol version to one of: :RFC6455, :HYBI-00, or :HYBI-08. handler can either be a single-arity fn or a Handler instance that will be passed the WebSocket when a successful connection is made. Returns the client." ([client uri handler] (connect client uri nil nil handler)) ([client uri version handler] (connect client uri version nil handler)) ([^HttpClient client uri version header handler] (.connectWebsocket client uri (ws-version version) (http/encode-headers header) (core/as-handler handler)))) (defn write-binary-frame "Write data Buffer to websocket as a binary frame. Returns the websocket." [^WebSocket ws data] (.writeBinaryFrame ws (buf/as-buffer data))) (defn write-text-frame "Write data String to websocket as a text frame. Returns the websocket." [^WebSocket ws data] (.writeTextFrame ws (str data)))
null
https://raw.githubusercontent.com/vert-x/mod-lang-clojure/dcf713460b8f46c08d0db6e7bf8537f1dd91f297/api/src/main/clojure/vertx/http/websocket.clj
clojure
you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
Copyright 2013 the original author or authors . distributed under the License is distributed on an " AS IS " BASIS , (ns vertx.http.websocket "Provides a set of functions for using http websockets." (:require [vertx.buffer :as buf] [vertx.core :as core] [vertx.http :as http] [vertx.utils :as u]) (:import [org.vertx.java.core.http HttpClient HttpServer WebSocket WebSocketVersion])) (defn on-websocket "Sets the websocket handler for the HTTP server. handler can either be a single-arity fn or a Handler instance that will be passed the ServerWebSocket when a successful connection is made." [^HttpServer server handler] (.websocketHandler server (core/as-handler handler))) (defn remote-address "Returns the remote address for the socket as an address-map of the form {:address \"127.0.0.1\" :port 8888 :basis inet-socket-address-object}" [^WebSocket socket] (u/inet-socket-address->map (.remoteAddress socket))) (def ^:private ws-version {:RFC6455 WebSocketVersion/RFC6455 :HYBI-00 WebSocketVersion/HYBI_00 :HYBI-08 WebSocketVersion/HYBI_08}) (defn connect "Connect the HTTP client to a websocket at the specified URI. version allows you to force the protocol version to one of: :RFC6455, :HYBI-00, or :HYBI-08. handler can either be a single-arity fn or a Handler instance that will be passed the WebSocket when a successful connection is made. Returns the client." ([client uri handler] (connect client uri nil nil handler)) ([client uri version handler] (connect client uri version nil handler)) ([^HttpClient client uri version header handler] (.connectWebsocket client uri (ws-version version) (http/encode-headers header) (core/as-handler handler)))) (defn write-binary-frame "Write data Buffer to websocket as a binary frame. Returns the websocket." [^WebSocket ws data] (.writeBinaryFrame ws (buf/as-buffer data))) (defn write-text-frame "Write data String to websocket as a text frame. Returns the websocket." [^WebSocket ws data] (.writeTextFrame ws (str data)))
c59a6401e7753c906445544fd95eb0c8b94a7f8958a6e10ae7514b1c3a2ec8da
protz/mezzo
TypeErrors.mli
(*****************************************************************************) (* Mezzo, a programming language based on permissions *) Copyright ( C ) 2011 , 2012 and (* *) (* This program is free software: you can redistribute it and/or modify *) it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or (* (at your option) any later version. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU General Public License for more details. *) (* *) You should have received a copy of the GNU General Public License (* along with this program. If not, see </>. *) (* *) (*****************************************************************************) (** Everything you ever dreamed of for reporting errors. *) open Kind open TypeCore (** Clients of this module will want to use the various errors offered. *) type raw_error = | OverrideAutoload of string | CyclicDependency of Module.name | NotAFunction of var | ExpectedType of typ * var * Derivations.derivation | ExpectedPermission of typ * Derivations.derivation | RecursiveOnlyForFunctions | MissingField of Field.name | ExtraField of Field.name | NoSuchField of var * Field.name | CantAssignTag of var | NoSuchFieldInPattern of ExpressionsCore.pattern * Field.name | BadPattern of ExpressionsCore.pattern * var | BadField of Datacon.name * Field.name | NoTwoConstructors of var | MatchBadDatacon of var * Datacon.name | MatchBadTuple of var | AssignNotExclusive of typ * Datacon.name | FieldCountMismatch of typ * Datacon.name | NoMultipleArguments | ResourceAllocationConflict of var | UncertainMerge of var | ConflictingTypeAnnotations of typ * typ | IllKindedTypeApplication of ExpressionsCore.tapp * kind * kind | BadTypeApplication of var | NonExclusiveAdoptee of typ | NoAdoptsClause of var | NotDynamic of var | NoSuitableTypeForAdopts of var * typ | AdoptsNoAnnotation | NotMergingClauses of env * typ * typ * env * typ * typ | NoSuchTypeInSignature of var * typ * Derivations.derivation | DataTypeMismatchInSignature of Variable.name * string | VarianceAnnotationMismatch | ExportNotDuplicable of Variable.name | LocalType | Instantiated of Variable.name * typ | PackWithExists | SeveralWorkingFunctionTypes of var | InconsistentEnv (** Set up the module to take into account the warn / error / silent settings * specified on the command-line. *) val parse_warn_error: string -> unit (** This function raises an exception that will be later on catched in * {!Driver}. *) val raise_error : env -> raw_error -> 'a (** This function may raise an exception that will be later on catched in * {!Driver}, or emit a warning, or do nothing, depending on whether the error * has been tweaked with the warn/error string. *) val may_raise_error : env -> raw_error -> unit (** A {!raw_error} is wrapped. *) type error (** And this is the exception that you can catch. *) exception TypeCheckerError of error (** Once an exception is catched, it can be printed with {!Log.error} and * [%a]... *) val print_error : Buffer.t -> error -> unit (** ... or displayed as an HTML error. *) val html_error: error -> unit (**/**) val internal_extracterror: error -> raw_error
null
https://raw.githubusercontent.com/protz/mezzo/4e9d917558bd96067437116341b7a6ea02ab9c39/typing/TypeErrors.mli
ocaml
*************************************************************************** Mezzo, a programming language based on permissions This program is free software: you can redistribute it and/or modify (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. along with this program. If not, see </>. *************************************************************************** * Everything you ever dreamed of for reporting errors. * Clients of this module will want to use the various errors offered. * Set up the module to take into account the warn / error / silent settings * specified on the command-line. * This function raises an exception that will be later on catched in * {!Driver}. * This function may raise an exception that will be later on catched in * {!Driver}, or emit a warning, or do nothing, depending on whether the error * has been tweaked with the warn/error string. * A {!raw_error} is wrapped. * And this is the exception that you can catch. * Once an exception is catched, it can be printed with {!Log.error} and * [%a]... * ... or displayed as an HTML error. */*
Copyright ( C ) 2011 , 2012 and it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or You should have received a copy of the GNU General Public License open Kind open TypeCore type raw_error = | OverrideAutoload of string | CyclicDependency of Module.name | NotAFunction of var | ExpectedType of typ * var * Derivations.derivation | ExpectedPermission of typ * Derivations.derivation | RecursiveOnlyForFunctions | MissingField of Field.name | ExtraField of Field.name | NoSuchField of var * Field.name | CantAssignTag of var | NoSuchFieldInPattern of ExpressionsCore.pattern * Field.name | BadPattern of ExpressionsCore.pattern * var | BadField of Datacon.name * Field.name | NoTwoConstructors of var | MatchBadDatacon of var * Datacon.name | MatchBadTuple of var | AssignNotExclusive of typ * Datacon.name | FieldCountMismatch of typ * Datacon.name | NoMultipleArguments | ResourceAllocationConflict of var | UncertainMerge of var | ConflictingTypeAnnotations of typ * typ | IllKindedTypeApplication of ExpressionsCore.tapp * kind * kind | BadTypeApplication of var | NonExclusiveAdoptee of typ | NoAdoptsClause of var | NotDynamic of var | NoSuitableTypeForAdopts of var * typ | AdoptsNoAnnotation | NotMergingClauses of env * typ * typ * env * typ * typ | NoSuchTypeInSignature of var * typ * Derivations.derivation | DataTypeMismatchInSignature of Variable.name * string | VarianceAnnotationMismatch | ExportNotDuplicable of Variable.name | LocalType | Instantiated of Variable.name * typ | PackWithExists | SeveralWorkingFunctionTypes of var | InconsistentEnv val parse_warn_error: string -> unit val raise_error : env -> raw_error -> 'a val may_raise_error : env -> raw_error -> unit type error exception TypeCheckerError of error val print_error : Buffer.t -> error -> unit val html_error: error -> unit val internal_extracterror: error -> raw_error
1838e1df0e0e9c34e29c1a118988b3f022e183eb0002ff7c7c15fe35fe18d294
devonzuegel/smallworld
db.clj
(ns smallworld.db (:require [clojure.java.io :as io] [clojure.java.jdbc :as sql] ; [clojure.pprint :as pp] [clojure.string :as str] [clojure.walk :as walk] [jdbc.pool.c3p0 :as pool] [smallworld.clj-postgresql.types] ; this enables the :json type [smallworld.util :as util])) (def debug? false) (def db-uri (java.net.URI. (util/get-env-var "DATABASE_URL"))) (def user-and-password (if (nil? (.getUserInfo db-uri)) nil (clojure.string/split (.getUserInfo db-uri) #":"))) (def pool (delay (pool/make-datasource-spec {:classname "org.postgresql.Driver" :subprotocol "postgresql" :user (get user-and-password 0) :password (get user-and-password 1) :subname (if (= -1 (.getPort db-uri)) (format "//%s%s" (.getHost db-uri) (.getPath db-uri)) (format "//%s:%s%s" (.getHost db-uri) (.getPort db-uri) (.getPath db-uri)))}))) ; table names (def twitter-profiles-table :twitter_profiles) ; store all data from Twitter sign up store Small World - specific settings memoized storage : friends of the user ( request_key ) (def coordinates-table :coordinates) ; memoized storage: map of city/country names to coordinates (def access_tokens-table :access_tokens) ; memoized storage: Twitter access tokens (def impersonation-table :impersonation) ; stores screen_name of the user who the admin is impersonating (for debug only) (def twitter-profiles-schema (slurp (io/resource "sql/schema-twitter-profiles.sql"))) (def settings-schema (slurp (io/resource "sql/schema-settings.sql"))) (def friends-schema (slurp (io/resource "sql/schema-friends.sql"))) (def coordinates-schema (slurp (io/resource "sql/schema-coordinates.sql"))) (def access-tokens-schema (slurp (io/resource "sql/schema-access-tokens.sql"))) (def impersonation-schema (slurp (io/resource "sql/schema-impersonation.sql"))) (defn escape-str [str] ; TODO: replace this this the ? syntax, which escapes for you (str/replace str "'" "''")) (defn where [column-name value] (str " where " (name column-name) " = '" (escape-str value) "'")) (defn table-exists? [table-name] (->> table-name name escape-str (#(sql/query @pool (str "SELECT table_name FROM information_schema.tables where table_name = '" % "'"))) count (not= 0))) (defn create-table [table-name schema] (if (table-exists? table-name) (println "table" table-name "already exists") (do (println "creating table" table-name) (if (string? schema) (sql/db-do-commands @pool (clojure.string/split schema #"--- split here ---")) (sql/db-do-commands @pool (sql/create-table-ddl (name table-name) schema)))))) (defn recreate-table [table-name schema] ; leave this commented out by default, since it's destructive (sql/db-do-commands @pool (str " drop table if exists " (name table-name))) (create-table table-name schema) (when debug? (println "done dropping table named " table-name " (if it existed)") (println "done creating table named " table-name))) (defn select-all [table] (sql/query @pool (str "select * from " (name table)))) (defn select-first [table] (first (sql/query @pool (str "select * from " (name table) " limit 1")))) (defn show-all [table-name] (println) (let [results (if (= table-name friends-table) (sql/query @pool (str "select request_key from " (name friends-table))) (sql/query @pool (str "select * from " (name table-name))))] (pp/pprint results) (when (= table-name friends-table) (println "not printing {:data {:friends}} because it's too long")) (println "\ncount: " (count results))) (println)) (defn select-by-col [table-name col-name col-value] (when debug? (println "(select-by-col" table-name col-name col-value ")")) (if (nil? col-value) [] (walk/keywordize-keys (sql/query @pool (str "select * from " (name table-name) (where col-name col-value)))))) (defn insert! [table-name data] (when debug? (println "inserting the following data into" table-name) (pp/pprint data)) (sql/insert! @pool table-name data)) ; TODO: this was meant to simplify the code, but it's best to just replace it ; everywhere with sql/update! probably (defn update! [table-name col-name col-value new-json] (sql/update! @pool table-name new-json [(str (name col-name) " = ?") col-value])) ; TODO: turn this into a single query to speed it up (defn memoized-insert-or-update! [table-name request_key data] (let [sql-results (select-by-col table-name :request_key request_key) exists? (not= 0 (count sql-results))] (when debug? (println "result:" sql-results) (println "exists? " exists?) (pp/pprint (select-by-col table-name :request_key request_key))) (if-not exists? (insert! table-name {:request_key request_key :data data}) (update! table-name :request_key request_key {:data data})))) (defn insert-or-update! [table-name col-name data] (let [col-name (keyword col-name) col-value (get data col-name) sql-results (select-by-col table-name col-name col-value) exists? (not= 0 (count sql-results)) new-data (dissoc (merge (first sql-results) data) :id :updated_at) new-data (if (not= table-name settings-table) new-data (assoc new-data ; TODO: this only applies to settings table, not any others! yuck :locations (or (:locations data) (vec (:locations (first sql-results))))))] (when debug? (println "--- running fn: insert-or-update! ---------") (println "col-name: " col-name) (println "col-value: " col-value) (println "sql-results:" sql-results) (println "exists? " exists?) (println "data (arg): " data) (println "new data (merged): ") (pp/pprint new-data) (println "-------------------------------------------")) (if-not exists? (insert! table-name data) (update! table-name col-name col-value new-data)))) (defn update-twitter-last-fetched! [screen-name] (sql/db-do-commands @pool (str "update settings " "set twitter_last_fetched = now() " "where screen_name = '" screen-name "';"))) (comment (do (println "--------------------------------") (println) (pp/pprint (first (select-by-col settings-table :screen_name "devon_dos"))) (println) (pp/pprint (:email_address (first (select-by-col settings-table :screen_name "devon_dos")))) (println) (println "--------------------------------")) (recreate-table settings-table settings-schema) (insert! settings-table {:screen_name "aaa" :main_location_corrected "bbb"}) (update! settings-table :screen_name "aaa" {:welcome_flow_complete true}) (update! settings-table :screen_name "aaa" {:screen_name "foo"}) (update! settings-table :screen_name "foo" {:screen_name "aaa"}) (insert-or-update! settings-table :screen_name {:screen_name "devonzuegel" :main_location_corrected "bbb"}) (insert-or-update! settings-table :screen_name {:screen_name "devon_dos" :welcome_flow_complete false}) (insert-or-update! settings-table :screen_name {:screen_name "devon_dos" :email_address ""}) (select-by-col settings-table :screen_name "devonzuegel") (show-all settings-table) (show-all twitter-profiles-table) (recreate-table friends-table friends-schema) (select-by-col friends-table :request_key "devonzuegel") (get-in (vec (select-by-col friends-table :request_key "devon_dos")) [0 :data :friends]) (select-by-col friends-table :request_key "meadowmaus") (show-all friends-table) (sql/delete! @pool friends-table ["request_key = ?" "devonzuegel"]) (sql/delete! @pool access_tokens-table ["request_key = ?" "devonzuegel"]) (show-all access_tokens-table) (select-by-col access_tokens-table :request_key "devonzuegel") (select-by-col access_tokens-table :request_key "meadowmaus") (recreate-table :coordinates friends-schema) (show-all :coordinates) (pp/pprint (select-by-col :coordinates :request_key "Miami Beach")) (update! :coordinates :request_key "Miami Beach" {:data {:lat 25.792236328125 :lng -80.13484954833984}}) (select-by-col :coordinates :request_key "spain"))
null
https://raw.githubusercontent.com/devonzuegel/smallworld/49f1bee72681f627bcdd78d067e39157153adbb1/src/smallworld/db.clj
clojure
this enables the :json type table names store all data from Twitter sign up memoized storage: map of city/country names to coordinates memoized storage: Twitter access tokens stores screen_name of the user who the admin is impersonating (for debug only) TODO: replace this this the ? syntax, which escapes for you leave this commented out by default, since it's destructive TODO: this was meant to simplify the code, but it's best to just replace it everywhere with sql/update! probably TODO: turn this into a single query to speed it up TODO: this only applies to settings table, not any others! yuck
(ns smallworld.db (:require [clojure.java.io :as io] [clojure.pprint :as pp] [clojure.string :as str] [clojure.walk :as walk] [jdbc.pool.c3p0 :as pool] [smallworld.util :as util])) (def debug? false) (def db-uri (java.net.URI. (util/get-env-var "DATABASE_URL"))) (def user-and-password (if (nil? (.getUserInfo db-uri)) nil (clojure.string/split (.getUserInfo db-uri) #":"))) (def pool (delay (pool/make-datasource-spec {:classname "org.postgresql.Driver" :subprotocol "postgresql" :user (get user-and-password 0) :password (get user-and-password 1) :subname (if (= -1 (.getPort db-uri)) (format "//%s%s" (.getHost db-uri) (.getPath db-uri)) (format "//%s:%s%s" (.getHost db-uri) (.getPort db-uri) (.getPath db-uri)))}))) store Small World - specific settings memoized storage : friends of the user ( request_key ) (def twitter-profiles-schema (slurp (io/resource "sql/schema-twitter-profiles.sql"))) (def settings-schema (slurp (io/resource "sql/schema-settings.sql"))) (def friends-schema (slurp (io/resource "sql/schema-friends.sql"))) (def coordinates-schema (slurp (io/resource "sql/schema-coordinates.sql"))) (def access-tokens-schema (slurp (io/resource "sql/schema-access-tokens.sql"))) (def impersonation-schema (slurp (io/resource "sql/schema-impersonation.sql"))) (str/replace str "'" "''")) (defn where [column-name value] (str " where " (name column-name) " = '" (escape-str value) "'")) (defn table-exists? [table-name] (->> table-name name escape-str (#(sql/query @pool (str "SELECT table_name FROM information_schema.tables where table_name = '" % "'"))) count (not= 0))) (defn create-table [table-name schema] (if (table-exists? table-name) (println "table" table-name "already exists") (do (println "creating table" table-name) (if (string? schema) (sql/db-do-commands @pool (clojure.string/split schema #"--- split here ---")) (sql/db-do-commands @pool (sql/create-table-ddl (name table-name) schema)))))) (sql/db-do-commands @pool (str " drop table if exists " (name table-name))) (create-table table-name schema) (when debug? (println "done dropping table named " table-name " (if it existed)") (println "done creating table named " table-name))) (defn select-all [table] (sql/query @pool (str "select * from " (name table)))) (defn select-first [table] (first (sql/query @pool (str "select * from " (name table) " limit 1")))) (defn show-all [table-name] (println) (let [results (if (= table-name friends-table) (sql/query @pool (str "select request_key from " (name friends-table))) (sql/query @pool (str "select * from " (name table-name))))] (pp/pprint results) (when (= table-name friends-table) (println "not printing {:data {:friends}} because it's too long")) (println "\ncount: " (count results))) (println)) (defn select-by-col [table-name col-name col-value] (when debug? (println "(select-by-col" table-name col-name col-value ")")) (if (nil? col-value) [] (walk/keywordize-keys (sql/query @pool (str "select * from " (name table-name) (where col-name col-value)))))) (defn insert! [table-name data] (when debug? (println "inserting the following data into" table-name) (pp/pprint data)) (sql/insert! @pool table-name data)) (defn update! [table-name col-name col-value new-json] (sql/update! @pool table-name new-json [(str (name col-name) " = ?") col-value])) (defn memoized-insert-or-update! [table-name request_key data] (let [sql-results (select-by-col table-name :request_key request_key) exists? (not= 0 (count sql-results))] (when debug? (println "result:" sql-results) (println "exists? " exists?) (pp/pprint (select-by-col table-name :request_key request_key))) (if-not exists? (insert! table-name {:request_key request_key :data data}) (update! table-name :request_key request_key {:data data})))) (defn insert-or-update! [table-name col-name data] (let [col-name (keyword col-name) col-value (get data col-name) sql-results (select-by-col table-name col-name col-value) exists? (not= 0 (count sql-results)) new-data (dissoc (merge (first sql-results) data) :id :updated_at) new-data (if (not= table-name settings-table) new-data :locations (or (:locations data) (vec (:locations (first sql-results))))))] (when debug? (println "--- running fn: insert-or-update! ---------") (println "col-name: " col-name) (println "col-value: " col-value) (println "sql-results:" sql-results) (println "exists? " exists?) (println "data (arg): " data) (println "new data (merged): ") (pp/pprint new-data) (println "-------------------------------------------")) (if-not exists? (insert! table-name data) (update! table-name col-name col-value new-data)))) (defn update-twitter-last-fetched! [screen-name] (sql/db-do-commands @pool (str "update settings " "set twitter_last_fetched = now() " "where screen_name = '" screen-name "';"))) (comment (do (println "--------------------------------") (println) (pp/pprint (first (select-by-col settings-table :screen_name "devon_dos"))) (println) (pp/pprint (:email_address (first (select-by-col settings-table :screen_name "devon_dos")))) (println) (println "--------------------------------")) (recreate-table settings-table settings-schema) (insert! settings-table {:screen_name "aaa" :main_location_corrected "bbb"}) (update! settings-table :screen_name "aaa" {:welcome_flow_complete true}) (update! settings-table :screen_name "aaa" {:screen_name "foo"}) (update! settings-table :screen_name "foo" {:screen_name "aaa"}) (insert-or-update! settings-table :screen_name {:screen_name "devonzuegel" :main_location_corrected "bbb"}) (insert-or-update! settings-table :screen_name {:screen_name "devon_dos" :welcome_flow_complete false}) (insert-or-update! settings-table :screen_name {:screen_name "devon_dos" :email_address ""}) (select-by-col settings-table :screen_name "devonzuegel") (show-all settings-table) (show-all twitter-profiles-table) (recreate-table friends-table friends-schema) (select-by-col friends-table :request_key "devonzuegel") (get-in (vec (select-by-col friends-table :request_key "devon_dos")) [0 :data :friends]) (select-by-col friends-table :request_key "meadowmaus") (show-all friends-table) (sql/delete! @pool friends-table ["request_key = ?" "devonzuegel"]) (sql/delete! @pool access_tokens-table ["request_key = ?" "devonzuegel"]) (show-all access_tokens-table) (select-by-col access_tokens-table :request_key "devonzuegel") (select-by-col access_tokens-table :request_key "meadowmaus") (recreate-table :coordinates friends-schema) (show-all :coordinates) (pp/pprint (select-by-col :coordinates :request_key "Miami Beach")) (update! :coordinates :request_key "Miami Beach" {:data {:lat 25.792236328125 :lng -80.13484954833984}}) (select-by-col :coordinates :request_key "spain"))
c0eaaaac146c1963c436c1103a86fdc91f27a9cc07bf7e92e883362f229c04a4
mzp/websocket-ocaml
getHandler.mli
val handle : (Glob.t -> string) -> Handler.t
null
https://raw.githubusercontent.com/mzp/websocket-ocaml/b584bd20dfe6d95f65bc6e1ba8838b1ecfa8ec0e/webSocket/getHandler.mli
ocaml
val handle : (Glob.t -> string) -> Handler.t
8d91753f3de5ec081aff8270893f417291d1e7c0810d246e9eda406e618ce111
ejackson/mlapp
gauss_interp.clj
(ns mlapp.gauss-interp "Try to reproduce 4.3.2.2, figs 4.10" (:use clojure.test clojure.core.matrix.protocols [incanter.charts :only [xy-plot add-points]] [incanter.core :only [view]]) (:require [clatrix.core :as c] [clojure.core.matrix :as m] [clojure.core.matrix.protocols :as p])) (defn L-matrix "Create the interpolation matrix. This is GROSS" [n] (let [m0 (m/mul (m/identity-matrix :clatrix n) -1) m1 (reduce (fn [im i] (set-2d im i (inc i) 2)) m0 (range (dec n))) m2 (reduce (fn [im i] (set-2d im i (+ i 2) -1)) m1 (range (- n 2)))] (m/reshape m2 [(- n 2) n]))) (defn setup "Return a map of the problem setup. Hidden, observed data and values" [n n-observed lambda] (let [i (shuffle (range n))] {:L (m/mul (L-matrix n) lambda) :observed (take n-observed i) :hidden (drop n-observed i) :observed-values (m/matrix :clatrix (repeatedly n-observed rand))})) (defn solve "Return the MAP for each hidden point" [{:keys [L observed hidden observed-values] :as m}] (let [nc (m/column-count L) nr (m/row-count L) L1 (c/get L (range nr) hidden) L2 (c/get L (range nr) observed) l11 (m/mul (m/transpose L1) L1) l12 (m/mul (m/transpose L1) L2)] (assoc m :hidden-values (m/scale (m/mul (m/mul (m/inverse l11) l12) observed-values) -1)))) ;; Run (let [s (solve (setup 150 10 30))] (view (add-points (xy-plot (concat (:hidden s) (:observed s)) (concat (:hidden-values s) (:observed-values s))) (:observed s) (:observed-values s))))
null
https://raw.githubusercontent.com/ejackson/mlapp/b3613bedd65921dcc4dff561fca79d97c205db9a/src/mlapp/gauss_interp.clj
clojure
Run
(ns mlapp.gauss-interp "Try to reproduce 4.3.2.2, figs 4.10" (:use clojure.test clojure.core.matrix.protocols [incanter.charts :only [xy-plot add-points]] [incanter.core :only [view]]) (:require [clatrix.core :as c] [clojure.core.matrix :as m] [clojure.core.matrix.protocols :as p])) (defn L-matrix "Create the interpolation matrix. This is GROSS" [n] (let [m0 (m/mul (m/identity-matrix :clatrix n) -1) m1 (reduce (fn [im i] (set-2d im i (inc i) 2)) m0 (range (dec n))) m2 (reduce (fn [im i] (set-2d im i (+ i 2) -1)) m1 (range (- n 2)))] (m/reshape m2 [(- n 2) n]))) (defn setup "Return a map of the problem setup. Hidden, observed data and values" [n n-observed lambda] (let [i (shuffle (range n))] {:L (m/mul (L-matrix n) lambda) :observed (take n-observed i) :hidden (drop n-observed i) :observed-values (m/matrix :clatrix (repeatedly n-observed rand))})) (defn solve "Return the MAP for each hidden point" [{:keys [L observed hidden observed-values] :as m}] (let [nc (m/column-count L) nr (m/row-count L) L1 (c/get L (range nr) hidden) L2 (c/get L (range nr) observed) l11 (m/mul (m/transpose L1) L1) l12 (m/mul (m/transpose L1) L2)] (assoc m :hidden-values (m/scale (m/mul (m/mul (m/inverse l11) l12) observed-values) -1)))) (let [s (solve (setup 150 10 30))] (view (add-points (xy-plot (concat (:hidden s) (:observed s)) (concat (:hidden-values s) (:observed-values s))) (:observed s) (:observed-values s))))
4507fe573350d9d42c452c0e511d7af2a03d75b13547874066a55aa5bd9d7e8a
OCamlPro/liquidity
liquidNamespace.mli
(****************************************************************************) (* Liquidity *) (* *) Copyright ( C ) 2017 - 2020 OCamlPro SAS (* *) (* Authors: Fabrice Le Fessant *) (* *) (* This program is free software: you can redistribute it and/or modify *) it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or (* (at your option) any later version. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU General Public License for more details. *) (* *) You should have received a copy of the GNU General Public License (* along with this program. If not, see </>. *) (****************************************************************************) open LiquidTypes exception Unknown_namespace of string list * location * namespaces in name val unqualify : string -> string list * string (** Prepend name with namespace *) val add_path_name : string list -> string -> string (** Normalize type present in environment [in_env] with fully qualified names, constructors and fields, with respect to [from_env] (or from top-level if no argument [~from_env] is given). *) val normalize_type : ?from_env:env -> in_env:env -> datatype -> datatype val find_env : loc:location -> string list -> env -> env (** Find a type by its qualified alias, returns a fully qualified normalized type *) val find_type : loc:location -> string -> env -> datatype list -> datatype (** Find a contract signature by its qualified alias *) val find_contract_type : loc:location -> string -> env -> contract_sig (** Find the type to which a label belongs, returns its normalized version together with the type of the field value and its position in the record *) val find_label : loc:location -> string -> env -> datatype * (string * datatype * int) (** Find the type to which a constructor belongs, returns its normalized version together with the type of the constructor argument and its position in the sum type *) val find_constr : loc:location -> string -> env -> datatype * (string * datatype * int) (** Find a qualified external primitive in the environment *) val find_extprim : loc:location -> string -> env -> extprim val is_extprim : string -> env -> bool (** Look for a qualified global value exported in a sub module or another contract. Precondition: s must be unaliased (call with {!unalias_name}). *) val lookup_global_value : loc:location -> string -> typecheck_env -> typed_exp value val find_contract : loc:location -> string -> env -> 'a contract StringMap.t -> 'a contract val find_module : loc:location -> string list -> env -> 'a contract list -> 'a contract (** For debug *) val qual_contract_name : 'a contract -> string (** Replaces aliases by their aliased values in a qualified name *) val unalias_name : string -> env -> string
null
https://raw.githubusercontent.com/OCamlPro/liquidity/3578de34cf751f54b9e4c001a95625d2041b2962/tools/liquidity/liquidNamespace.mli
ocaml
************************************************************************** Liquidity Authors: Fabrice Le Fessant This program is free software: you can redistribute it and/or modify (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. along with this program. If not, see </>. ************************************************************************** * Prepend name with namespace * Normalize type present in environment [in_env] with fully qualified names, constructors and fields, with respect to [from_env] (or from top-level if no argument [~from_env] is given). * Find a type by its qualified alias, returns a fully qualified normalized type * Find a contract signature by its qualified alias * Find the type to which a label belongs, returns its normalized version together with the type of the field value and its position in the record * Find the type to which a constructor belongs, returns its normalized version together with the type of the constructor argument and its position in the sum type * Find a qualified external primitive in the environment * Look for a qualified global value exported in a sub module or another contract. Precondition: s must be unaliased (call with {!unalias_name}). * For debug * Replaces aliases by their aliased values in a qualified name
Copyright ( C ) 2017 - 2020 OCamlPro SAS it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or You should have received a copy of the GNU General Public License open LiquidTypes exception Unknown_namespace of string list * location * namespaces in name val unqualify : string -> string list * string val add_path_name : string list -> string -> string val normalize_type : ?from_env:env -> in_env:env -> datatype -> datatype val find_env : loc:location -> string list -> env -> env val find_type : loc:location -> string -> env -> datatype list -> datatype val find_contract_type : loc:location -> string -> env -> contract_sig val find_label : loc:location -> string -> env -> datatype * (string * datatype * int) val find_constr : loc:location -> string -> env -> datatype * (string * datatype * int) val find_extprim : loc:location -> string -> env -> extprim val is_extprim : string -> env -> bool val lookup_global_value : loc:location -> string -> typecheck_env -> typed_exp value val find_contract : loc:location -> string -> env -> 'a contract StringMap.t -> 'a contract val find_module : loc:location -> string list -> env -> 'a contract list -> 'a contract val qual_contract_name : 'a contract -> string val unalias_name : string -> env -> string
d2845cab61b3fa67dc4c28dac70e8a83f426b51ab317d1bbfd2708c37b5ca4fe
ocurrent/ocaml-multicore-ci
opam_build.ml
open Obuilder_spec let host_network = [ "host" ] let opam_download_cache = [ Cache.v "opam-archives" ~target:"/home/opam/.opam/download-cache" ] let run fmt = let network = host_network in let cache = opam_download_cache in Obuilder_spec.run ~network ~cache fmt let compiler_switch_name_from_commit commit = let s_hash = Current_git.Commit_id.hash commit |> Astring.String.with_range ~len:8 in "compiler-" ^ s_hash (* If the package's directory name doesn't contain a dot then opam will default to using the last known version, which is usually wrong. In particular, if a multi-project repostory adds a new package with a constraint "{ =version }" on an existing one, this will fail because opam will pin the new package as "dev" but the old one with the version of its last release. *) let maybe_add_dev ~dir name = if Fpath.is_current_dir dir || not (String.contains (Fpath.basename dir) '.') then name ^ ".dev" else name Group opam files by directory . e.g. [ " a / a1.opam " ; " a / a2.opam " ; " b / b1.opam " ] - > [ ( " a " , [ " a / a1.opam " ; " a / a2.opam " ] , [ " a1.dev " ; " a2.dev " ] ) ; ( " b " , [ " b / b1.opam " ] , [ " b1.dev " ] ) ] e.g. ["a/a1.opam"; "a/a2.opam"; "b/b1.opam"] -> [("a", ["a/a1.opam"; "a/a2.opam"], ["a1.dev"; "a2.dev"]); ("b", ["b/b1.opam"], ["b1.dev"]) ] *) let group_opam_files = ListLabels.fold_left ~init:[] ~f:(fun acc x -> let item = Fpath.v x in let dir = Fpath.parent item in let pkg = Filename.basename x |> Filename.chop_extension |> maybe_add_dev ~dir in match acc with | (prev_dir, prev_items, pkgs) :: rest when Fpath.equal dir prev_dir -> (prev_dir, x :: prev_items, pkg :: pkgs) :: rest | _ -> (dir, [ x ], [ pkg ]) :: acc) let mkdir dirs = let dirs = dirs |> List.map (fun (dir, _, _) -> Filename.quote (Fpath.to_string dir)) |> String.concat " " in [ run "mkdir -p %s" dirs ] Generate instructions to copy all the files in [ items ] into the image , creating the necessary directories first , and then pin them all . image, creating the necessary directories first, and then pin them all. *) let pin_opam_files groups = if groups = [] then [] else let cmds = mkdir groups @ (groups |> List.map (fun (dir, files, _) -> copy files ~dst:(Fpath.to_string dir))) @ [ groups |> List.concat_map (fun (dir, _, pkgs) -> pkgs |> List.map (fun pkg -> Printf.sprintf "opam pin add -yn %s %s" pkg (Filename.quote (Fpath.to_string dir)))) |> String.concat " && \n" |> run "%s"; ] in comment "Pin project opam files" :: workdir "/src" :: cmds (* Get the packages directly in "." *) let rec get_root_opam_packages = function | [] -> [] | (dir, _, pkgs) :: _ when Fpath.is_current_dir dir -> pkgs | _ :: rest -> get_root_opam_packages rest let install_deps ~groups ~selection = let { Selection.packages; _ } = selection in let root_pkgs = get_root_opam_packages groups in let non_root_pkgs = packages |> List.filter (fun pkg -> not (List.mem pkg root_pkgs)) in let non_root_pkgs_str = String.concat " " non_root_pkgs in let root_pkgs_str = String.concat " " root_pkgs in let install_cmds = match non_root_pkgs with | [] -> [ run "opam depext --update -y %s" root_pkgs_str ] | _ -> [ env "DEPS" non_root_pkgs_str; run "opam depext --update -y %s $DEPS" root_pkgs_str; run "opam install $DEPS"; ] in comment "Install opam deps" :: install_cmds let install_os_deps selection = let { Selection.variant; _ } = selection in let linux32 = if Variant.arch variant |> Ocaml_version.arch_is_32bit then [ shell [ "/usr/bin/linux32"; "/bin/sh"; "-c" ] ] else [] in let distro_extras = if Astring.String.is_prefix ~affix:"fedora" (Variant.id variant) then [ run "sudo dnf install -y findutils" ] (* (we need xargs) *) else [] in let result = linux32 @ distro_extras in if result = [] then [] else comment "Preamble" :: result let update_opam_repository selection = let { Selection.commits; _ } = selection in let commits_in_order = List.rev commits in let default = "-repository.git" in let commit, _ = List.partition (fun y -> String.equal default (fst y)) commits in let _, commit = List.hd commit in [ comment "Update opam-repository"; workdir "/home/opam/opam-repository"; run "(git cat-file -e %s || git fetch origin master) && git reset -q --hard \ %s && git log --no-decorate -n1 --oneline " commit commit; ] @ List.map (fun (repo, commit) -> if String.equal repo default then run "opam repo priority default 1 --set-default" else run "opam repo add rep-%s %s --set-default" (String.sub commit 0 7) repo) commits_in_order @ [ run "opam update -u" ] let copy_src = [ comment "Initialize project source"; copy [ "." ] ~dst:"/src/"; workdir "/src"; ] let pin_and_install_deps ~opam_files selection = let groups = group_opam_files opam_files in pin_opam_files groups @ install_deps ~groups ~selection let install_project_deps ~opam_files ~selection = install_os_deps selection @ update_opam_repository selection @ pin_and_install_deps ~opam_files selection let install_compiler commit = let switch_name = compiler_switch_name_from_commit commit in [ comment "Create switch for compiler (%s)" switch_name; run "opam switch create %s --empty && opam repository && opam pin add -y -k \ path --inplace-build ocaml-variants.$(opam show --file \ ocaml-variants.opam -f version) . && eval $(opam env) && ocamlrun \ -version" switch_name; ] let clone_target_repo repo = let repo_url, repo_gref = Repo_url_utils.url_gref_from_url repo in [ comment "Clone source repo %s to /testsrc/src" repo; user_unix ~uid:0 ~gid:0; run "mkdir /testsrc"; run "chown 1000:1000 /testsrc"; user_unix ~uid:1000 ~gid:1000; run "git clone %s /testsrc/src && git -C /testsrc/src -c \ advice.detachedHead=false checkout %s" repo_url repo_gref; ] let print_compiler_version = run "eval $(opam env) && opam switch && ocamlrun -version" let spec_helper ~body ~repo ~base ~opam_files ~compiler_commit ~selection = stage ~from:base ([ comment "Variant: %s" (Fmt.str "%a" Variant.pp selection.Selection.variant); user_unix ~uid:1000 ~gid:1000; ] @ install_os_deps selection @ update_opam_repository selection @ (match compiler_commit with | None -> (print_compiler_version :: pin_and_install_deps ~opam_files selection) @ copy_src | Some compiler_commit -> ( (match repo with Some repo -> clone_target_repo repo | None -> []) @ copy_src @ install_compiler compiler_commit @ match repo with Some _ -> [ workdir "/testsrc/src" ] | None -> [])) @ body) let run_opam_exec cmd = run "opam exec -- %s" cmd let run_all_opam_exec cmds = List.map run_opam_exec cmds let spec_script_bare ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds = let body = comment "Run build" :: cmds in spec_helper ~body ~repo ~base ~opam_files ~compiler_commit ~selection let spec_script ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds = spec_script_bare ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds:(run_all_opam_exec cmds) let spec_dune ~repo ~base ~opam_files ~compiler_commit ~selection = let cmd = match selection.Selection.command with | Some c -> c | None -> "dune build @install @runtest" in let cmds = [ "opam install dune"; cmd ^ " && rm -rf _build" ] in spec_script ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds (* Remove packages that are not in upstream opam-repository *) let is_package_blocklisted p = match p with "coq-doc" | "coqide-server" | "lwt_luv" -> true | _ -> false let filter_opam_packages pkgs = pkgs |> List.filter (fun p -> not (is_package_blocklisted p)) Remove " vendor " opam files from the list of things that we 're going * to install . This is required for irmin , which has vendored some * packages that are not themselves in opam . * * This seems like a hack that is not generic enough to be useful for * other packages , but I ca n't think of a better way to handle this * right now . * to install. This is required for irmin, which has vendored some * packages that are not themselves in opam. * * This seems like a hack that is not generic enough to be useful for * other packages, but I can't think of a better way to handle this * right now. *) let filter_opam_files paths = paths |> List.filter (fun p -> not (Astring.String.is_prefix ~affix:"vendor" p)) let spec_opam_install ~base ~opam_files ~compiler_commit ~selection = let opam_packages = opam_files |> filter_opam_files |> List.map Filename.basename |> List.map Filename.chop_extension |> filter_opam_packages in let pkgs_str = Fmt.(to_to_string (list ~sep:(any " ") string) opam_packages) in let cmds = [ run "opam depext --update -y %s" pkgs_str; run_opam_exec (Fmt.str "opam install -t %s" pkgs_str); ] in spec_script_bare ~repo:None ~base ~opam_files ~compiler_commit ~selection ~cmds let spec_make ~repo ~base ~opam_files ~compiler_commit ~selection ~targets = let cmds = [ Format.sprintf "make %s" (String.concat " " targets) ] in spec_script ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds
null
https://raw.githubusercontent.com/ocurrent/ocaml-multicore-ci/d46eecaf7269283a4b95ee40d2a9d6c7ec34a7bf/lib/opam_build.ml
ocaml
If the package's directory name doesn't contain a dot then opam will default to using the last known version, which is usually wrong. In particular, if a multi-project repostory adds a new package with a constraint "{ =version }" on an existing one, this will fail because opam will pin the new package as "dev" but the old one with the version of its last release. Get the packages directly in "." (we need xargs) Remove packages that are not in upstream opam-repository
open Obuilder_spec let host_network = [ "host" ] let opam_download_cache = [ Cache.v "opam-archives" ~target:"/home/opam/.opam/download-cache" ] let run fmt = let network = host_network in let cache = opam_download_cache in Obuilder_spec.run ~network ~cache fmt let compiler_switch_name_from_commit commit = let s_hash = Current_git.Commit_id.hash commit |> Astring.String.with_range ~len:8 in "compiler-" ^ s_hash let maybe_add_dev ~dir name = if Fpath.is_current_dir dir || not (String.contains (Fpath.basename dir) '.') then name ^ ".dev" else name Group opam files by directory . e.g. [ " a / a1.opam " ; " a / a2.opam " ; " b / b1.opam " ] - > [ ( " a " , [ " a / a1.opam " ; " a / a2.opam " ] , [ " a1.dev " ; " a2.dev " ] ) ; ( " b " , [ " b / b1.opam " ] , [ " b1.dev " ] ) ] e.g. ["a/a1.opam"; "a/a2.opam"; "b/b1.opam"] -> [("a", ["a/a1.opam"; "a/a2.opam"], ["a1.dev"; "a2.dev"]); ("b", ["b/b1.opam"], ["b1.dev"]) ] *) let group_opam_files = ListLabels.fold_left ~init:[] ~f:(fun acc x -> let item = Fpath.v x in let dir = Fpath.parent item in let pkg = Filename.basename x |> Filename.chop_extension |> maybe_add_dev ~dir in match acc with | (prev_dir, prev_items, pkgs) :: rest when Fpath.equal dir prev_dir -> (prev_dir, x :: prev_items, pkg :: pkgs) :: rest | _ -> (dir, [ x ], [ pkg ]) :: acc) let mkdir dirs = let dirs = dirs |> List.map (fun (dir, _, _) -> Filename.quote (Fpath.to_string dir)) |> String.concat " " in [ run "mkdir -p %s" dirs ] Generate instructions to copy all the files in [ items ] into the image , creating the necessary directories first , and then pin them all . image, creating the necessary directories first, and then pin them all. *) let pin_opam_files groups = if groups = [] then [] else let cmds = mkdir groups @ (groups |> List.map (fun (dir, files, _) -> copy files ~dst:(Fpath.to_string dir))) @ [ groups |> List.concat_map (fun (dir, _, pkgs) -> pkgs |> List.map (fun pkg -> Printf.sprintf "opam pin add -yn %s %s" pkg (Filename.quote (Fpath.to_string dir)))) |> String.concat " && \n" |> run "%s"; ] in comment "Pin project opam files" :: workdir "/src" :: cmds let rec get_root_opam_packages = function | [] -> [] | (dir, _, pkgs) :: _ when Fpath.is_current_dir dir -> pkgs | _ :: rest -> get_root_opam_packages rest let install_deps ~groups ~selection = let { Selection.packages; _ } = selection in let root_pkgs = get_root_opam_packages groups in let non_root_pkgs = packages |> List.filter (fun pkg -> not (List.mem pkg root_pkgs)) in let non_root_pkgs_str = String.concat " " non_root_pkgs in let root_pkgs_str = String.concat " " root_pkgs in let install_cmds = match non_root_pkgs with | [] -> [ run "opam depext --update -y %s" root_pkgs_str ] | _ -> [ env "DEPS" non_root_pkgs_str; run "opam depext --update -y %s $DEPS" root_pkgs_str; run "opam install $DEPS"; ] in comment "Install opam deps" :: install_cmds let install_os_deps selection = let { Selection.variant; _ } = selection in let linux32 = if Variant.arch variant |> Ocaml_version.arch_is_32bit then [ shell [ "/usr/bin/linux32"; "/bin/sh"; "-c" ] ] else [] in let distro_extras = if Astring.String.is_prefix ~affix:"fedora" (Variant.id variant) then else [] in let result = linux32 @ distro_extras in if result = [] then [] else comment "Preamble" :: result let update_opam_repository selection = let { Selection.commits; _ } = selection in let commits_in_order = List.rev commits in let default = "-repository.git" in let commit, _ = List.partition (fun y -> String.equal default (fst y)) commits in let _, commit = List.hd commit in [ comment "Update opam-repository"; workdir "/home/opam/opam-repository"; run "(git cat-file -e %s || git fetch origin master) && git reset -q --hard \ %s && git log --no-decorate -n1 --oneline " commit commit; ] @ List.map (fun (repo, commit) -> if String.equal repo default then run "opam repo priority default 1 --set-default" else run "opam repo add rep-%s %s --set-default" (String.sub commit 0 7) repo) commits_in_order @ [ run "opam update -u" ] let copy_src = [ comment "Initialize project source"; copy [ "." ] ~dst:"/src/"; workdir "/src"; ] let pin_and_install_deps ~opam_files selection = let groups = group_opam_files opam_files in pin_opam_files groups @ install_deps ~groups ~selection let install_project_deps ~opam_files ~selection = install_os_deps selection @ update_opam_repository selection @ pin_and_install_deps ~opam_files selection let install_compiler commit = let switch_name = compiler_switch_name_from_commit commit in [ comment "Create switch for compiler (%s)" switch_name; run "opam switch create %s --empty && opam repository && opam pin add -y -k \ path --inplace-build ocaml-variants.$(opam show --file \ ocaml-variants.opam -f version) . && eval $(opam env) && ocamlrun \ -version" switch_name; ] let clone_target_repo repo = let repo_url, repo_gref = Repo_url_utils.url_gref_from_url repo in [ comment "Clone source repo %s to /testsrc/src" repo; user_unix ~uid:0 ~gid:0; run "mkdir /testsrc"; run "chown 1000:1000 /testsrc"; user_unix ~uid:1000 ~gid:1000; run "git clone %s /testsrc/src && git -C /testsrc/src -c \ advice.detachedHead=false checkout %s" repo_url repo_gref; ] let print_compiler_version = run "eval $(opam env) && opam switch && ocamlrun -version" let spec_helper ~body ~repo ~base ~opam_files ~compiler_commit ~selection = stage ~from:base ([ comment "Variant: %s" (Fmt.str "%a" Variant.pp selection.Selection.variant); user_unix ~uid:1000 ~gid:1000; ] @ install_os_deps selection @ update_opam_repository selection @ (match compiler_commit with | None -> (print_compiler_version :: pin_and_install_deps ~opam_files selection) @ copy_src | Some compiler_commit -> ( (match repo with Some repo -> clone_target_repo repo | None -> []) @ copy_src @ install_compiler compiler_commit @ match repo with Some _ -> [ workdir "/testsrc/src" ] | None -> [])) @ body) let run_opam_exec cmd = run "opam exec -- %s" cmd let run_all_opam_exec cmds = List.map run_opam_exec cmds let spec_script_bare ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds = let body = comment "Run build" :: cmds in spec_helper ~body ~repo ~base ~opam_files ~compiler_commit ~selection let spec_script ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds = spec_script_bare ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds:(run_all_opam_exec cmds) let spec_dune ~repo ~base ~opam_files ~compiler_commit ~selection = let cmd = match selection.Selection.command with | Some c -> c | None -> "dune build @install @runtest" in let cmds = [ "opam install dune"; cmd ^ " && rm -rf _build" ] in spec_script ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds let is_package_blocklisted p = match p with "coq-doc" | "coqide-server" | "lwt_luv" -> true | _ -> false let filter_opam_packages pkgs = pkgs |> List.filter (fun p -> not (is_package_blocklisted p)) Remove " vendor " opam files from the list of things that we 're going * to install . This is required for irmin , which has vendored some * packages that are not themselves in opam . * * This seems like a hack that is not generic enough to be useful for * other packages , but I ca n't think of a better way to handle this * right now . * to install. This is required for irmin, which has vendored some * packages that are not themselves in opam. * * This seems like a hack that is not generic enough to be useful for * other packages, but I can't think of a better way to handle this * right now. *) let filter_opam_files paths = paths |> List.filter (fun p -> not (Astring.String.is_prefix ~affix:"vendor" p)) let spec_opam_install ~base ~opam_files ~compiler_commit ~selection = let opam_packages = opam_files |> filter_opam_files |> List.map Filename.basename |> List.map Filename.chop_extension |> filter_opam_packages in let pkgs_str = Fmt.(to_to_string (list ~sep:(any " ") string) opam_packages) in let cmds = [ run "opam depext --update -y %s" pkgs_str; run_opam_exec (Fmt.str "opam install -t %s" pkgs_str); ] in spec_script_bare ~repo:None ~base ~opam_files ~compiler_commit ~selection ~cmds let spec_make ~repo ~base ~opam_files ~compiler_commit ~selection ~targets = let cmds = [ Format.sprintf "make %s" (String.concat " " targets) ] in spec_script ~repo ~base ~opam_files ~compiler_commit ~selection ~cmds
e0775110d86f30a6d6dc41eebdd86639a6cc7bc8e0ffdb6187e2388e6c89c5dc
mpickering/apply-refact
Naming13.hs
replicateM_ = 1
null
https://raw.githubusercontent.com/mpickering/apply-refact/a4343ea0f4f9d8c2e16d6b16b9068f321ba4f272/tests/examples/Naming13.hs
haskell
replicateM_ = 1
c26139a9340f3dc57249070ac733eb575aa0419c503c58bd51d7d5901e335b6f
aryx/xix
unix.mli
(***********************************************************************) (* *) (* Objective Caml *) (* *) , projet Cristal , INRIA Rocquencourt (* *) Copyright 1996 Institut National de Recherche en Informatique et Automatique . Distributed only by permission . (* *) (***********************************************************************) $ I d : unix.mli , v 1.27 1997/08/29 15:37:19 xleroy Exp $ (* Module [Unix]: interface to the Unix system *) (*** Error report *) type error = (* Errors defined in the POSIX standard *) E2BIG (* Argument list too long *) | EACCES (* Permission denied *) | EAGAIN (* Resource temporarily unavailable; try again *) | EBADF (* Bad file descriptor *) | EBUSY (* Resource unavailable *) | ECHILD (* No child process *) Resource deadlock would occur | EDOM (* Domain error for math functions, etc. *) | EEXIST (* File exists *) | EFAULT (* Bad address *) | EFBIG (* File too large *) | EINTR (* Function interrupted by signal *) Invalid argument | EIO (* Hardware I/O error *) | EISDIR (* Is a directory *) | EMFILE (* Too many open files by the process *) | EMLINK (* Too many links *) | ENAMETOOLONG (* Filename too long *) | ENFILE (* Too many open files in the system *) | ENODEV (* No such device *) | ENOENT (* No such file or directory *) | ENOEXEC (* Not an executable file *) | ENOLCK (* No locks available *) | ENOMEM (* Not enough memory *) | ENOSPC (* No space left on device *) | ENOSYS (* Function not supported *) | ENOTDIR (* Not a directory *) | ENOTEMPTY (* Directory not empty *) | ENOTTY (* Inappropriate I/O control operation *) | ENXIO (* No such device or address *) | EPERM (* Operation not permitted *) | EPIPE (* Broken pipe *) | ERANGE (* Result too large *) | EROFS (* Read-only file system *) Invalid seek e.g. on a pipe | ESRCH (* No such process *) Invalid link Additional errors , mostly BSD | EWOULDBLOCK (* Operation would block *) | EINPROGRESS (* Operation now in progress *) | EALREADY (* Operation already in progress *) | ENOTSOCK (* Socket operation on non-socket *) | EDESTADDRREQ (* Destination address required *) | EMSGSIZE (* Message too long *) | EPROTOTYPE (* Protocol wrong type for socket *) | ENOPROTOOPT (* Protocol not available *) | EPROTONOSUPPORT (* Protocol not supported *) | ESOCKTNOSUPPORT (* Socket type not supported *) | EOPNOTSUPP (* Operation not supported on socket *) Protocol family not supported | EAFNOSUPPORT (* Address family not supported by protocol family *) | EADDRINUSE (* Address already in use *) | EADDRNOTAVAIL (* Can't assign requested address *) | ENETDOWN (* Network is down *) Network is unreachable Network dropped connection on reset | ECONNABORTED (* Software caused connection abort *) | ECONNRESET (* Connection reset by peer *) | ENOBUFS (* No buffer space available *) | EISCONN (* Socket is already connected *) | ENOTCONN (* Socket is not connected *) | ESHUTDOWN (* Can't send after socket shutdown *) | ETOOMANYREFS (* Too many references: can't splice *) Connection timed out Connection refused | EHOSTDOWN (* Host is down *) | EHOSTUNREACH (* No route to host *) | ELOOP (* Too many levels of symbolic links *) All other errors are mapped to EUNKNOWNERR | EUNKNOWNERR (* Unknown error *) (* The type of error codes. *) exception Unix_error of error * string * string Raised by the system calls below when an error is encountered . The first component is the error code ; the second component is the function name ; the third component is the string parameter to the function , if it has one , or the empty string otherwise . The first component is the error code; the second component is the function name; the third component is the string parameter to the function, if it has one, or the empty string otherwise. *) external error_message : error -> string = "unix_error_message" (* Return a string describing the given error code. *) val handle_unix_error : ('a -> 'b) -> 'a -> 'b [ handle_unix_error f x ] applies [ f ] to [ x ] and returns the result . If the exception [ Unix_error ] is raised , it prints a message describing the error and exits with code 2 . If the exception [Unix_error] is raised, it prints a message describing the error and exits with code 2. *) (*** Access to the process environment *) external environment : unit -> string array = "unix_environment" (* Return the process environment, as an array of strings with the format ``variable=value''. *) external getenv: string -> string = "sys_getenv" (* Return the value associated to a variable in the process environment. Raise [Not_found] if the variable is unbound. (This function is identical to [Sys.getenv].) *) external putenv: string -> string -> unit = "unix_putenv" [ Unix.putenv name value ] sets the value associated to a variable in the process environment . [ name ] is the name of the environment variable , and [ value ] its new associated value . variable in the process environment. [name] is the name of the environment variable, and [value] its new associated value. *) (*** Process handling *) type process_status = WEXITED of int | WSIGNALED of int | WSTOPPED of int The termination status of a process . [ WEXITED ] means that the process terminated normally by [ exit ] ; the argument is the return code . [ WSIGNALED ] means that the process was killed by a signal ; the argument is the signal number . [ WSTOPPED ] means that the process was stopped by a signal ; the argument is the signal number . process terminated normally by [exit]; the argument is the return code. [WSIGNALED] means that the process was killed by a signal; the argument is the signal number. [WSTOPPED] means that the process was stopped by a signal; the argument is the signal number. *) type wait_flag = WNOHANG | WUNTRACED Flags for [ waitopt ] and [ waitpid ] . [ WNOHANG ] means do not block if no child has died yet , but immediately return with a pid equal to 0 . [ WUNTRACED ] means report also the children that receive stop signals . [WNOHANG] means do not block if no child has died yet, but immediately return with a pid equal to 0. [WUNTRACED] means report also the children that receive stop signals. *) external execv : string -> string array -> unit = "unix_execv" (* [execv prog args] execute the program in file [prog], with the arguments [args], and the current process environment. *) external execve : string -> string array -> string array -> unit = "unix_execve" Same as [ execv ] , except that the third argument provides the environment to the program executed . environment to the program executed. *) external execvp : string -> string array -> unit = "unix_execvp" external execvpe : string -> string array -> string array -> unit = "unix_execvpe" (* Same as [execv] and [execvp] respectively, except that the program is searched in the path. *) external fork : unit -> int = "unix_fork" (* Fork a new process. The returned integer is 0 for the child process, the pid of the child process for the parent process. *) external wait : unit -> int * process_status = "unix_wait" Wait until one of the children processes die , and return its pid and termination status . and termination status. *) external waitpid : wait_flag list -> int -> int * process_status = "unix_waitpid" (* Same as [wait], but waits for the process whose pid is given. A pid of [-1] means wait for any child. A pid of [0] means wait for any child in the same process group as the current process. Negative pid arguments represent process groups. The list of options indicates whether [waitpid] should return immediately without waiting, or also report stopped children. *) val system : string -> process_status Execute the given command , wait until it terminates , and return its termination status . The string is interpreted by the shell [ /bin / sh ] and therefore can contain redirections , quotes , variables , etc . The result [ 127 ] indicates that the shell could n't be executed . its termination status. The string is interpreted by the shell [/bin/sh] and therefore can contain redirections, quotes, variables, etc. The result [WEXITED 127] indicates that the shell couldn't be executed. *) external getpid : unit -> int = "unix_getpid" (* Return the pid of the process. *) external getppid : unit -> int = "unix_getppid" (* Return the pid of the parent process. *) external nice : int -> int = "unix_nice" (* Change the process priority. The integer argument is added to the ``nice'' value. (Higher values of the ``nice'' value mean lower priorities.) Return the new nice value. *) (*** Basic file input/output *) type file_descr (* The abstract type of file descriptors. *) val stdin : file_descr val stdout : file_descr val stderr : file_descr (* File descriptors for standard input, standard output and standard error. *) type open_flag = O_RDONLY (* Open for reading *) | O_WRONLY (* Open for writing *) | O_RDWR (* Open for reading and writing *) | O_NONBLOCK (* Open in non-blocking mode *) | O_APPEND (* Open for append *) | O_CREAT (* Create if nonexistent *) Truncate to 0 length if existing | O_EXCL (* Fail if existing *) (* The flags to [open]. *) type file_perm = int (* The type of file access rights. *) external openfile : string -> open_flag list -> file_perm -> file_descr = "unix_open" Open the named file with the given flags . Third argument is the permissions to give to the file if it is created . Return a file descriptor on the named file . the permissions to give to the file if it is created. Return a file descriptor on the named file. *) external close : file_descr -> unit = "unix_close" (* Close a file descriptor. *) val read : file_descr -> string -> int -> int -> int [ read fd ] reads [ len ] characters from descriptor [ fd ] , storing them in string [ buff ] , starting at position [ ofs ] in string [ buff ] . Return the number of characters actually read . [fd], storing them in string [buff], starting at position [ofs] in string [buff]. Return the number of characters actually read. *) val write : file_descr -> string -> int -> int -> int [ write fd ] writes [ len ] characters to descriptor [ fd ] , taking them from string [ buff ] , starting at position [ ofs ] in string [ buff ] . Return the number of characters actually written . [fd], taking them from string [buff], starting at position [ofs] in string [buff]. Return the number of characters actually written. *) (*** Interfacing with the standard input/output library. *) external in_channel_of_descr : file_descr -> in_channel = "caml_open_descriptor" (* Create an input channel reading from the given descriptor. *) external out_channel_of_descr : file_descr -> out_channel = "caml_open_descriptor" (* Create an output channel writing on the given descriptor. *) external descr_of_in_channel : in_channel -> file_descr = "channel_descriptor" (* Return the descriptor corresponding to an input channel. *) external descr_of_out_channel : out_channel -> file_descr = "channel_descriptor" (* Return the descriptor corresponding to an output channel. *) (*** Seeking and truncating *) type seek_command = SEEK_SET | SEEK_CUR | SEEK_END Positioning modes for [ lseek ] . [ SEEK_SET ] indicates positions relative to the beginning of the file , [ SEEK_CUR ] relative to the current position , [ SEEK_END ] relative to the end of the file . relative to the beginning of the file, [SEEK_CUR] relative to the current position, [SEEK_END] relative to the end of the file. *) external lseek : file_descr -> int -> seek_command -> int = "unix_lseek" (* Set the current position for a file descriptor *) external truncate : string -> int -> unit = "unix_truncate" Truncates the named file to the given size . external ftruncate : file_descr -> int -> unit = "unix_ftruncate" (* Truncates the file corresponding to the given descriptor to the given size. *) (*** File statistics *) type file_kind = S_REG (* Regular file *) | S_DIR (* Directory *) | S_CHR (* Character device *) | S_BLK (* Block device *) | S_LNK (* Symbolic link *) | S_FIFO (* Named pipe *) | S_SOCK (* Socket *) type stats = { st_dev : int; (* Device number *) Inode number st_kind : file_kind; (* Kind of the file *) st_perm : file_perm; (* Access rights *) st_nlink : int; (* Number of links *) st_uid : int; (* User id of the owner *) st_gid : int; (* Group id of the owner *) st_rdev : int; (* Device minor number *) st_size : int; (* Size in bytes *) st_atime : float; (* Last access time *) st_mtime : float; (* Last modification time *) st_ctime : float } (* Last status change time *) (* The informations returned by the [stat] calls. *) external stat : string -> stats = "unix_stat" (* Return the information for the named file. *) external lstat : string -> stats = "unix_lstat" (* Same as [stat], but in case the file is a symbolic link, return the information for the link itself. *) external fstat : file_descr -> stats = "unix_fstat" (* Return the information for the file associated with the given descriptor. *) (*** Operations on file names *) external unlink : string -> unit = "unix_unlink" (* Removes the named file *) external rename : string -> string -> unit = "unix_rename" (* [rename old new] changes the name of a file from [old] to [new]. *) external link : string -> string -> unit = "unix_link" (* [link source dest] creates a hard link named [dest] to the file named [new]. *) (*** File permissions and ownership *) type access_permission = R_OK (* Read permission *) | W_OK (* Write permission *) | X_OK (* Execution permission *) | F_OK (* File exists *) (* Flags for the [access] call. *) external chmod : string -> file_perm -> unit = "unix_chmod" (* Change the permissions of the named file. *) external fchmod : file_descr -> file_perm -> unit = "unix_fchmod" (* Change the permissions of an opened file. *) external chown : string -> int -> int -> unit = "unix_chown" Change the owner uid and owner gid of the named file . external fchown : file_descr -> int -> int -> unit = "unix_fchown" Change the owner uid and owner gid of an opened file . external umask : int -> int = "unix_umask" (* Set the process creation mask, and return the previous mask. *) external access : string -> access_permission list -> unit = "unix_access" (* Check that the process has the given permissions over the named file. Raise [Unix_error] otherwise. *) (*** Operations on file descriptors *) external dup : file_descr -> file_descr = "unix_dup" (* Return a new file descriptor referencing the same file as the given descriptor. *) external dup2 : file_descr -> file_descr -> unit = "unix_dup2" (* [dup2 fd1 fd2] duplicates [fd1] to [fd2], closing [fd2] if already opened. *) external set_nonblock : file_descr -> unit = "unix_set_nonblock" external clear_nonblock : file_descr -> unit = "unix_clear_nonblock" (* Set or clear the ``non-blocking'' flag on the given descriptor. When the non-blocking flag is set, reading on a descriptor on which there is temporarily no data available raises the [EAGAIN] or [EWOULDBLOCK] error instead of blocking; writing on a descriptor on which there is temporarily no room for writing also raises [EAGAIN] or [EWOULDBLOCK]. *) external set_close_on_exec : file_descr -> unit = "unix_set_close_on_exec" external clear_close_on_exec : file_descr -> unit = "unix_clear_close_on_exec" (* Set or clear the ``close-on-exec'' flag on the given descriptor. A descriptor with the close-on-exec flag is automatically closed when the current process starts another program with one of the [exec] functions. *) (*** Directories *) external mkdir : string -> file_perm -> unit = "unix_mkdir" (* Create a directory with the given permissions. *) external rmdir : string -> unit = "unix_rmdir" (* Remove an empty directory. *) external chdir : string -> unit = "unix_chdir" (* Change the process working directory. *) external getcwd : unit -> string = "unix_getcwd" (* Return the name of the current working directory. *) type dir_handle (* The type of descriptors over opened directories. *) external opendir : string -> dir_handle = "unix_opendir" (* Open a descriptor on a directory *) external readdir : dir_handle -> string = "unix_readdir" (* Return the next entry in a directory. Raise [End_of_file] when the end of the directory has been reached. *) external rewinddir : dir_handle -> unit = "unix_rewinddir" (* Reposition the descriptor to the beginning of the directory *) external closedir : dir_handle -> unit = "unix_closedir" (* Close a directory descriptor. *) (*** Pipes and redirections *) external pipe : unit -> file_descr * file_descr = "unix_pipe" Create a pipe . The first component of the result is opened for reading , that 's the exit to the pipe . The second component is opened for writing , that 's the entrance to the pipe . for reading, that's the exit to the pipe. The second component is opened for writing, that's the entrance to the pipe. *) external mkfifo : string -> file_perm -> unit = "unix_mkfifo" (* Create a named pipe with the given permissions. *) (*** High-level process and redirection management *) val create_process : string -> string array -> file_descr -> file_descr -> file_descr -> int [ create_process prog args new_stdin ] forks a new process that executes the program in file [ prog ] , with arguments [ args ] . The pid of the new process is returned immediately ; the new process executes concurrently with the current process . The standard input and outputs of the new process are connected to the descriptors [ new_stdin ] , [ new_stdout ] and [ new_stderr ] . Passing e.g. [ stdout ] for [ new_stdout ] prevents the redirection and causes the new process to have the same standard output as the current process . The executable file [ prog ] is searched in the path . The new process has the same environment as the current process . All file descriptors of the current process are closed in the new process , except those redirected to standard input and outputs . forks a new process that executes the program in file [prog], with arguments [args]. The pid of the new process is returned immediately; the new process executes concurrently with the current process. The standard input and outputs of the new process are connected to the descriptors [new_stdin], [new_stdout] and [new_stderr]. Passing e.g. [stdout] for [new_stdout] prevents the redirection and causes the new process to have the same standard output as the current process. The executable file [prog] is searched in the path. The new process has the same environment as the current process. All file descriptors of the current process are closed in the new process, except those redirected to standard input and outputs. *) val create_process_env : string -> string array -> string array -> file_descr -> file_descr -> file_descr -> int [ create_process_env prog args env new_stdin new_stdout new_stderr ] works as [ create_process ] , except that the extra argument [ env ] specifies the environment passed to the program . works as [create_process], except that the extra argument [env] specifies the environment passed to the program. *) val open_process_in: string -> in_channel val open_process_out: string -> out_channel val open_process: string -> in_channel * out_channel (* High-level pipe and process management. These functions run the given command in parallel with the program, and return channels connected to the standard input and/or the standard output of the command. The command is interpreted by the shell [/bin/sh] (cf. [system]). Warning: writes on channels are buffered, hence be careful to call [flush] at the right times to ensure correct synchronization. *) val close_process_in: in_channel -> process_status val close_process_out: out_channel -> process_status val close_process: in_channel * out_channel -> process_status (* Close channels opened by [open_process_in], [open_process_out] and [open_process], respectively, wait for the associated command to terminate, and return its termination status. *) (*** Symbolic links *) external symlink : string -> string -> unit = "unix_symlink" (* [symlink source dest] creates the file [dest] as a symbolic link to the file [source]. *) external readlink : string -> string = "unix_readlink" (* Read the contents of a link. *) (*** Polling *) external select : file_descr list -> file_descr list -> file_descr list -> float -> file_descr list * file_descr list * file_descr list = "unix_select" Wait until some input / output operations become possible on some channels . The three list arguments are , respectively , a set of descriptors to check for reading ( first argument ) , for writing ( second argument ) , or for exceptional conditions ( third argument ) . The fourth argument is the maximal timeout , in seconds ; a negative fourth argument means no timeout ( unbounded wait ) . The result is composed of three sets of descriptors : those ready for reading ( first component ) , ready for writing ( second component ) , and over which an exceptional condition is pending ( third component ) . some channels. The three list arguments are, respectively, a set of descriptors to check for reading (first argument), for writing (second argument), or for exceptional conditions (third argument). The fourth argument is the maximal timeout, in seconds; a negative fourth argument means no timeout (unbounded wait). The result is composed of three sets of descriptors: those ready for reading (first component), ready for writing (second component), and over which an exceptional condition is pending (third component). *) (*** Locking *) type lock_command = F_ULOCK (* Unlock a region *) | F_LOCK (* Lock a region, and block if already locked *) | F_TLOCK (* Lock a region, or fail if already locked *) | F_TEST (* Test a region for other process' locks *) (* Commands for [lockf]. *) external lockf : file_descr -> lock_command -> int -> unit = "unix_lockf" [ lockf fd cmd size ] puts a lock on a region of the file opened as [ fd ] . The region starts at the current read / write position for [ fd ] ( as set by [ lseek ] ) , and extends [ size ] bytes forward if [ size ] is positive , [ size ] bytes backwards if [ size ] is negative , or to the end of the file if [ size ] is zero . as [fd]. The region starts at the current read/write position for [fd] (as set by [lseek]), and extends [size] bytes forward if [size] is positive, [size] bytes backwards if [size] is negative, or to the end of the file if [size] is zero. *) (*** Signals *) external kill : int -> int -> unit = "unix_kill" (* [kill pid sig] sends signal number [sig] to the process with id [pid]. *) external pause : unit -> unit = "unix_pause" (* Wait until a non-ignored signal is delivered. *) (*** Time functions *) type process_times = { tms_utime : float; (* User time for the process *) tms_stime : float; (* System time for the process *) tms_cutime : float; (* User time for the children processes *) tms_cstime : float } (* System time for the children processes *) (* The execution times (CPU times) of a process. *) type tm = Seconds 0 .. 59 Minutes 0 .. 59 Hours 0 .. 23 Day of month 1 .. 31 Month of year 0 .. 11 Year - 1900 Day of week ( Sunday is 0 ) Day of year 0 .. 365 tm_isdst : bool } (* Daylight time savings in effect *) (* The type representing wallclock time and calendar date. *) external time : unit -> float = "unix_time" Return the current time since 00:00:00 GMT , Jan. 1 , 1970 , in seconds . in seconds. *) external gettimeofday : unit -> float = "unix_gettimeofday" (* Same as [time], but with resolution better than 1 second. *) external gmtime : float -> tm = "unix_gmtime" Convert a time in seconds , as returned by [ time ] , into a date and a time . Assumes Greenwich meridian time zone . a time. Assumes Greenwich meridian time zone. *) external localtime : float -> tm = "unix_localtime" Convert a time in seconds , as returned by [ time ] , into a date and a time . Assumes the local time zone . a time. Assumes the local time zone. *) external mktime : tm -> float * tm = "unix_mktime" Convert a date and time , specified by the [ tm ] argument , into a time in seconds , as returned by [ time ] . Also return a normalized copy of the given [ tm ] record , with the [ tm_wday ] and [ tm_yday ] recomputed from the other fields . a time in seconds, as returned by [time]. Also return a normalized copy of the given [tm] record, with the [tm_wday] and [tm_yday] recomputed from the other fields. *) external alarm : int -> int = "unix_alarm" Schedule a [ SIGALRM ] signals after the given number of seconds . external sleep : int -> unit = "unix_sleep" (* Stop execution for the given number of seconds. *) external times : unit -> process_times = "unix_times_bytecode" "unix_times_native" (* Return the execution times of the process. *) external utimes : string -> float -> float -> unit = "unix_utimes" Set the last access time ( second arg ) and last modification time ( third arg ) for a file . Times are expressed in seconds from 00:00:00 GMT , Jan. 1 , 1970 . (third arg) for a file. Times are expressed in seconds from 00:00:00 GMT, Jan. 1, 1970. *) type interval_timer = ITIMER_REAL | ITIMER_VIRTUAL | ITIMER_PROF The three kinds of interval timers . [ ITIMER_REAL ] decrements in real time , and sends the signal [ SIGALRM ] when expired . [ ITIMER_VIRTUAL ] decrements in process virtual time , and sends [ ] when expired . [ ITIMER_PROF ] ( for profiling ) decrements both when the process is running and when the system is running on behalf of the process ; it sends [ SIGPROF ] when expired . [ITIMER_REAL] decrements in real time, and sends the signal [SIGALRM] when expired. [ITIMER_VIRTUAL] decrements in process virtual time, and sends [SIGVTALRM] when expired. [ITIMER_PROF] (for profiling) decrements both when the process is running and when the system is running on behalf of the process; it sends [SIGPROF] when expired. *) type interval_timer_status = { it_interval: float; (* Period *) it_value: float } (* Current value of the timer *) (* The type describing the status of an interval timer *) external getitimer: interval_timer -> interval_timer_status = "unix_getitimer" "unix_getitimer_native" (* Return the current status of the given interval timer. *) external setitimer: interval_timer -> interval_timer_status -> interval_timer_status = "unix_setitimer" "unix_setitimer_native" [ setitimer t s ] sets the interval timer [ t ] and returns its previous status . The [ s ] argument is interpreted as follows : [ s.it_value ] , if nonzero , is the time to the next timer expiration ; [ s.it_interval ] , if nonzero , specifies a value to be used in reloading it_value when the timer expires . Setting [ s.it_value ] to zero disable the timer . Setting [ s.it_interval ] to zero causes the timer to be disabled after its next expiration . its previous status. The [s] argument is interpreted as follows: [s.it_value], if nonzero, is the time to the next timer expiration; [s.it_interval], if nonzero, specifies a value to be used in reloading it_value when the timer expires. Setting [s.it_value] to zero disable the timer. Setting [s.it_interval] to zero causes the timer to be disabled after its next expiration. *) (*** User id, group id *) external getuid : unit -> int = "unix_getuid" (* Return the user id of the user executing the process. *) external geteuid : unit -> int = "unix_geteuid" (* Return the effective user id under which the process runs. *) external setuid : int -> unit = "unix_setuid" (* Set the real user id and effective user id for the process. *) external getgid : unit -> int = "unix_getgid" (* Return the group id of the user executing the process. *) external getegid : unit -> int = "unix_getegid" (* Return the effective group id under which the process runs. *) external setgid : int -> unit = "unix_setgid" (* Set the real group id and effective group id for the process. *) external getgroups : unit -> int array = "unix_getgroups" (* Return the list of groups to which the user executing the process belongs. *) type passwd_entry = { pw_name : string; pw_passwd : string; pw_uid : int; pw_gid : int; pw_gecos : string; pw_dir : string; pw_shell : string } (* Structure of entries in the [passwd] database. *) type group_entry = { gr_name : string; gr_passwd : string; gr_gid : int; gr_mem : string array } (* Structure of entries in the [groups] database. *) external getlogin : unit -> string = "unix_getlogin" (* Return the login name of the user executing the process. *) external getpwnam : string -> passwd_entry = "unix_getpwnam" (* Find an entry in [passwd] with the given name, or raise [Not_found]. *) external getgrnam : string -> group_entry = "unix_getgrnam" (* Find an entry in [group] with the given name, or raise [Not_found]. *) (* external getpwuid : int -> passwd_entry = "unix_getpwuid" (* Find an entry in [passwd] with the given user id, or raise [Not_found]. *) external getgrgid : int -> group_entry = "unix_getgrgid" (* Find an entry in [group] with the given group id, or raise [Not_found]. *) *) (*** Internet addresses *) type inet_addr (* The abstract type of Internet addresses. *) external inet_addr_of_string : string -> inet_addr = "unix_inet_addr_of_string" external string_of_inet_addr : inet_addr -> string = "unix_string_of_inet_addr" (* Conversions between string with the format [XXX.YYY.ZZZ.TTT] and Internet addresses. [inet_addr_of_string] raises [Failure] when given a string that does not match this format. *) val inet_addr_any : inet_addr (* A special Internet address, for use only with [bind], representing all the Internet addresses that the host machine possesses. *) (*** Sockets *) type socket_domain = PF_UNIX (* Unix domain *) | PF_INET (* Internet domain *) (* The type of socket domains. *) type socket_type = SOCK_STREAM (* Stream socket *) Datagram socket | SOCK_RAW (* Raw socket *) | SOCK_SEQPACKET (* Sequenced packets socket *) (* The type of socket kinds, specifying the semantics of communications. *) type sockaddr = ADDR_UNIX of string | ADDR_INET of inet_addr * int The type of socket addresses . [ ADDR_UNIX name ] is a socket address in the Unix domain ; [ name ] is a file name in the file system . [ ADDR_INET(addr , port ) ] is a socket address in the Internet domain ; [ addr ] is the Internet address of the machine , and [ port ] is the port number . address in the Unix domain; [name] is a file name in the file system. [ADDR_INET(addr,port)] is a socket address in the Internet domain; [addr] is the Internet address of the machine, and [port] is the port number. *) external socket : socket_domain -> socket_type -> int -> file_descr = "unix_socket" Create a new socket in the given domain , and with the given kind . The third argument is the protocol type ; 0 selects the default protocol for that kind of sockets . given kind. The third argument is the protocol type; 0 selects the default protocol for that kind of sockets. *) external socketpair : socket_domain -> socket_type -> int -> file_descr * file_descr = "unix_socketpair" (* Create a pair of unnamed sockets, connected together. *) external accept : file_descr -> file_descr * sockaddr = "unix_accept" (* Accept connections on the given socket. The returned descriptor is a socket connected to the client; the returned address is the address of the connecting client. *) external bind : file_descr -> sockaddr -> unit = "unix_bind" (* Bind a socket to an address. *) external connect : file_descr -> sockaddr -> unit = "unix_connect" Connect a socket to an address . external listen : file_descr -> int -> unit = "unix_listen" (* Set up a socket for receiving connection requests. The integer argument is the maximal number of pending requests. *) type shutdown_command = SHUTDOWN_RECEIVE (* Close for receiving *) | SHUTDOWN_SEND (* Close for sending *) | SHUTDOWN_ALL (* Close both *) (* The type of commands for [shutdown]. *) external shutdown : file_descr -> shutdown_command -> unit = "unix_shutdown" Shutdown a socket connection . [ SHUTDOWN_SEND ] as second argument causes reads on the other end of the connection to return an end - of - file condition . [ SHUTDOWN_RECEIVE ] causes writes on the other end of the connection to return a closed pipe condition ( [ ] signal ) . causes reads on the other end of the connection to return an end-of-file condition. [SHUTDOWN_RECEIVE] causes writes on the other end of the connection to return a closed pipe condition ([SIGPIPE] signal). *) external getsockname : file_descr -> sockaddr = "unix_getsockname" (* Return the address of the given socket. *) external getpeername : file_descr -> sockaddr = "unix_getpeername" (* Return the address of the host connected to the given socket. *) type msg_flag = MSG_OOB | MSG_DONTROUTE | MSG_PEEK (* The flags for [recv], [recvfrom], [send] and [sendto]. *) val recv : file_descr -> string -> int -> int -> msg_flag list -> int val recvfrom : file_descr -> string -> int -> int -> msg_flag list -> int * sockaddr (* Receive data from an unconnected socket. *) val send : file_descr -> string -> int -> int -> msg_flag list -> int val sendto : file_descr -> string -> int -> int -> msg_flag list -> sockaddr -> int (* Send data over an unconnected socket. *) type socket_option = SO_DEBUG (* Record debugging information *) | SO_BROADCAST (* Permit sending of broadcast messages *) | SO_REUSEADDR (* Allow reuse of local addresses for bind *) | SO_KEEPALIVE (* Keep connection active *) | SO_DONTROUTE (* Bypass the standard routing algorithms *) | SO_OOBINLINE (* Leave out-of-band data in line *) (* The socket options settable with [setsockopt]. *) external getsockopt : file_descr -> socket_option -> bool = "unix_getsockopt" (* Return the current status of an option in the given socket. *) external setsockopt : file_descr -> socket_option -> bool -> unit = "unix_setsockopt" (* Set or clear an option in the given socket. *) (*** High-level network connection functions *) val open_connection : sockaddr -> in_channel * out_channel Connect to a server at the given address . Return a pair of buffered channels connected to the server . Remember to call [ flush ] on the output channel at the right times to ensure correct synchronization . Return a pair of buffered channels connected to the server. Remember to call [flush] on the output channel at the right times to ensure correct synchronization. *) val shutdown_connection : in_channel -> unit (* ``Shut down'' a connection established with [open_connection]; that is, transmit an end-of-file condition to the server reading on the other side of the connection. *) val establish_server : (in_channel -> out_channel -> 'a) -> sockaddr -> unit Establish a server on the given address . The function given as first argument is called for each connection with two buffered channels connected to the client . A new process is created for each connection . The function [ establish_server ] never returns normally . The function given as first argument is called for each connection with two buffered channels connected to the client. A new process is created for each connection. The function [establish_server] never returns normally. *) (*** Host and protocol databases *) type host_entry = { h_name : string; h_aliases : string array; h_addrtype : socket_domain; h_addr_list : inet_addr array } (* Structure of entries in the [hosts] database. *) type protocol_entry = { p_name : string; p_aliases : string array; p_proto : int } (* Structure of entries in the [protocols] database. *) type service_entry = { s_name : string; s_aliases : string array; s_port : int; s_proto : string } (* Structure of entries in the [services] database. *) external gethostname : unit -> string = "unix_gethostname" (* Return the name of the local host. *) external gethostbyname : string -> host_entry = "unix_gethostbyname" (* Find an entry in [hosts] with the given name, or raise [Not_found]. *) external gethostbyaddr : inet_addr -> host_entry = "unix_gethostbyaddr" (* Find an entry in [hosts] with the given address, or raise [Not_found]. *) external getprotobyname : string -> protocol_entry = "unix_getprotobyname" (* Find an entry in [protocols] with the given name, or raise [Not_found]. *) external getprotobynumber : int -> protocol_entry = "unix_getprotobynumber" (* Find an entry in [protocols] with the given protocol number, or raise [Not_found]. *) external getservbyname : string -> string -> service_entry = "unix_getservbyname" (* Find an entry in [services] with the given name, or raise [Not_found]. *) external getservbyport : int -> string -> service_entry = "unix_getservbyport" (* Find an entry in [services] with the given service number, or raise [Not_found]. *) (*** Terminal interface *) pad : I removed it , has no such terminal
null
https://raw.githubusercontent.com/aryx/xix/60ce1bd9a3f923e0e8bb2192f8938a9aa49c739c/lib_system/unix/unix.mli
ocaml
********************************************************************* Objective Caml ********************************************************************* Module [Unix]: interface to the Unix system ** Error report Errors defined in the POSIX standard Argument list too long Permission denied Resource temporarily unavailable; try again Bad file descriptor Resource unavailable No child process Domain error for math functions, etc. File exists Bad address File too large Function interrupted by signal Hardware I/O error Is a directory Too many open files by the process Too many links Filename too long Too many open files in the system No such device No such file or directory Not an executable file No locks available Not enough memory No space left on device Function not supported Not a directory Directory not empty Inappropriate I/O control operation No such device or address Operation not permitted Broken pipe Result too large Read-only file system No such process Operation would block Operation now in progress Operation already in progress Socket operation on non-socket Destination address required Message too long Protocol wrong type for socket Protocol not available Protocol not supported Socket type not supported Operation not supported on socket Address family not supported by protocol family Address already in use Can't assign requested address Network is down Software caused connection abort Connection reset by peer No buffer space available Socket is already connected Socket is not connected Can't send after socket shutdown Too many references: can't splice Host is down No route to host Too many levels of symbolic links Unknown error The type of error codes. Return a string describing the given error code. ** Access to the process environment Return the process environment, as an array of strings with the format ``variable=value''. Return the value associated to a variable in the process environment. Raise [Not_found] if the variable is unbound. (This function is identical to [Sys.getenv].) ** Process handling [execv prog args] execute the program in file [prog], with the arguments [args], and the current process environment. Same as [execv] and [execvp] respectively, except that the program is searched in the path. Fork a new process. The returned integer is 0 for the child process, the pid of the child process for the parent process. Same as [wait], but waits for the process whose pid is given. A pid of [-1] means wait for any child. A pid of [0] means wait for any child in the same process group as the current process. Negative pid arguments represent process groups. The list of options indicates whether [waitpid] should return immediately without waiting, or also report stopped children. Return the pid of the process. Return the pid of the parent process. Change the process priority. The integer argument is added to the ``nice'' value. (Higher values of the ``nice'' value mean lower priorities.) Return the new nice value. ** Basic file input/output The abstract type of file descriptors. File descriptors for standard input, standard output and standard error. Open for reading Open for writing Open for reading and writing Open in non-blocking mode Open for append Create if nonexistent Fail if existing The flags to [open]. The type of file access rights. Close a file descriptor. ** Interfacing with the standard input/output library. Create an input channel reading from the given descriptor. Create an output channel writing on the given descriptor. Return the descriptor corresponding to an input channel. Return the descriptor corresponding to an output channel. ** Seeking and truncating Set the current position for a file descriptor Truncates the file corresponding to the given descriptor to the given size. ** File statistics Regular file Directory Character device Block device Symbolic link Named pipe Socket Device number Kind of the file Access rights Number of links User id of the owner Group id of the owner Device minor number Size in bytes Last access time Last modification time Last status change time The informations returned by the [stat] calls. Return the information for the named file. Same as [stat], but in case the file is a symbolic link, return the information for the link itself. Return the information for the file associated with the given descriptor. ** Operations on file names Removes the named file [rename old new] changes the name of a file from [old] to [new]. [link source dest] creates a hard link named [dest] to the file named [new]. ** File permissions and ownership Read permission Write permission Execution permission File exists Flags for the [access] call. Change the permissions of the named file. Change the permissions of an opened file. Set the process creation mask, and return the previous mask. Check that the process has the given permissions over the named file. Raise [Unix_error] otherwise. ** Operations on file descriptors Return a new file descriptor referencing the same file as the given descriptor. [dup2 fd1 fd2] duplicates [fd1] to [fd2], closing [fd2] if already opened. Set or clear the ``non-blocking'' flag on the given descriptor. When the non-blocking flag is set, reading on a descriptor on which there is temporarily no data available raises the [EAGAIN] or [EWOULDBLOCK] error instead of blocking; writing on a descriptor on which there is temporarily no room for writing also raises [EAGAIN] or [EWOULDBLOCK]. Set or clear the ``close-on-exec'' flag on the given descriptor. A descriptor with the close-on-exec flag is automatically closed when the current process starts another program with one of the [exec] functions. ** Directories Create a directory with the given permissions. Remove an empty directory. Change the process working directory. Return the name of the current working directory. The type of descriptors over opened directories. Open a descriptor on a directory Return the next entry in a directory. Raise [End_of_file] when the end of the directory has been reached. Reposition the descriptor to the beginning of the directory Close a directory descriptor. ** Pipes and redirections Create a named pipe with the given permissions. ** High-level process and redirection management High-level pipe and process management. These functions run the given command in parallel with the program, and return channels connected to the standard input and/or the standard output of the command. The command is interpreted by the shell [/bin/sh] (cf. [system]). Warning: writes on channels are buffered, hence be careful to call [flush] at the right times to ensure correct synchronization. Close channels opened by [open_process_in], [open_process_out] and [open_process], respectively, wait for the associated command to terminate, and return its termination status. ** Symbolic links [symlink source dest] creates the file [dest] as a symbolic link to the file [source]. Read the contents of a link. ** Polling ** Locking Unlock a region Lock a region, and block if already locked Lock a region, or fail if already locked Test a region for other process' locks Commands for [lockf]. ** Signals [kill pid sig] sends signal number [sig] to the process with id [pid]. Wait until a non-ignored signal is delivered. ** Time functions User time for the process System time for the process User time for the children processes System time for the children processes The execution times (CPU times) of a process. Daylight time savings in effect The type representing wallclock time and calendar date. Same as [time], but with resolution better than 1 second. Stop execution for the given number of seconds. Return the execution times of the process. Period Current value of the timer The type describing the status of an interval timer Return the current status of the given interval timer. ** User id, group id Return the user id of the user executing the process. Return the effective user id under which the process runs. Set the real user id and effective user id for the process. Return the group id of the user executing the process. Return the effective group id under which the process runs. Set the real group id and effective group id for the process. Return the list of groups to which the user executing the process belongs. Structure of entries in the [passwd] database. Structure of entries in the [groups] database. Return the login name of the user executing the process. Find an entry in [passwd] with the given name, or raise [Not_found]. Find an entry in [group] with the given name, or raise [Not_found]. external getpwuid : int -> passwd_entry = "unix_getpwuid" (* Find an entry in [passwd] with the given user id, or raise [Not_found]. Find an entry in [group] with the given group id, or raise [Not_found]. ** Internet addresses The abstract type of Internet addresses. Conversions between string with the format [XXX.YYY.ZZZ.TTT] and Internet addresses. [inet_addr_of_string] raises [Failure] when given a string that does not match this format. A special Internet address, for use only with [bind], representing all the Internet addresses that the host machine possesses. ** Sockets Unix domain Internet domain The type of socket domains. Stream socket Raw socket Sequenced packets socket The type of socket kinds, specifying the semantics of communications. Create a pair of unnamed sockets, connected together. Accept connections on the given socket. The returned descriptor is a socket connected to the client; the returned address is the address of the connecting client. Bind a socket to an address. Set up a socket for receiving connection requests. The integer argument is the maximal number of pending requests. Close for receiving Close for sending Close both The type of commands for [shutdown]. Return the address of the given socket. Return the address of the host connected to the given socket. The flags for [recv], [recvfrom], [send] and [sendto]. Receive data from an unconnected socket. Send data over an unconnected socket. Record debugging information Permit sending of broadcast messages Allow reuse of local addresses for bind Keep connection active Bypass the standard routing algorithms Leave out-of-band data in line The socket options settable with [setsockopt]. Return the current status of an option in the given socket. Set or clear an option in the given socket. ** High-level network connection functions ``Shut down'' a connection established with [open_connection]; that is, transmit an end-of-file condition to the server reading on the other side of the connection. ** Host and protocol databases Structure of entries in the [hosts] database. Structure of entries in the [protocols] database. Structure of entries in the [services] database. Return the name of the local host. Find an entry in [hosts] with the given name, or raise [Not_found]. Find an entry in [hosts] with the given address, or raise [Not_found]. Find an entry in [protocols] with the given name, or raise [Not_found]. Find an entry in [protocols] with the given protocol number, or raise [Not_found]. Find an entry in [services] with the given name, or raise [Not_found]. Find an entry in [services] with the given service number, or raise [Not_found]. ** Terminal interface
, projet Cristal , INRIA Rocquencourt Copyright 1996 Institut National de Recherche en Informatique et Automatique . Distributed only by permission . $ I d : unix.mli , v 1.27 1997/08/29 15:37:19 xleroy Exp $ type error = Resource deadlock would occur Invalid argument Invalid seek e.g. on a pipe Invalid link Additional errors , mostly BSD Protocol family not supported Network is unreachable Network dropped connection on reset Connection timed out Connection refused All other errors are mapped to EUNKNOWNERR exception Unix_error of error * string * string Raised by the system calls below when an error is encountered . The first component is the error code ; the second component is the function name ; the third component is the string parameter to the function , if it has one , or the empty string otherwise . The first component is the error code; the second component is the function name; the third component is the string parameter to the function, if it has one, or the empty string otherwise. *) external error_message : error -> string = "unix_error_message" val handle_unix_error : ('a -> 'b) -> 'a -> 'b [ handle_unix_error f x ] applies [ f ] to [ x ] and returns the result . If the exception [ Unix_error ] is raised , it prints a message describing the error and exits with code 2 . If the exception [Unix_error] is raised, it prints a message describing the error and exits with code 2. *) external environment : unit -> string array = "unix_environment" external getenv: string -> string = "sys_getenv" external putenv: string -> string -> unit = "unix_putenv" [ Unix.putenv name value ] sets the value associated to a variable in the process environment . [ name ] is the name of the environment variable , and [ value ] its new associated value . variable in the process environment. [name] is the name of the environment variable, and [value] its new associated value. *) type process_status = WEXITED of int | WSIGNALED of int | WSTOPPED of int The termination status of a process . [ WEXITED ] means that the process terminated normally by [ exit ] ; the argument is the return code . [ WSIGNALED ] means that the process was killed by a signal ; the argument is the signal number . [ WSTOPPED ] means that the process was stopped by a signal ; the argument is the signal number . process terminated normally by [exit]; the argument is the return code. [WSIGNALED] means that the process was killed by a signal; the argument is the signal number. [WSTOPPED] means that the process was stopped by a signal; the argument is the signal number. *) type wait_flag = WNOHANG | WUNTRACED Flags for [ waitopt ] and [ waitpid ] . [ WNOHANG ] means do not block if no child has died yet , but immediately return with a pid equal to 0 . [ WUNTRACED ] means report also the children that receive stop signals . [WNOHANG] means do not block if no child has died yet, but immediately return with a pid equal to 0. [WUNTRACED] means report also the children that receive stop signals. *) external execv : string -> string array -> unit = "unix_execv" external execve : string -> string array -> string array -> unit = "unix_execve" Same as [ execv ] , except that the third argument provides the environment to the program executed . environment to the program executed. *) external execvp : string -> string array -> unit = "unix_execvp" external execvpe : string -> string array -> string array -> unit = "unix_execvpe" external fork : unit -> int = "unix_fork" external wait : unit -> int * process_status = "unix_wait" Wait until one of the children processes die , and return its pid and termination status . and termination status. *) external waitpid : wait_flag list -> int -> int * process_status = "unix_waitpid" val system : string -> process_status Execute the given command , wait until it terminates , and return its termination status . The string is interpreted by the shell [ /bin / sh ] and therefore can contain redirections , quotes , variables , etc . The result [ 127 ] indicates that the shell could n't be executed . its termination status. The string is interpreted by the shell [/bin/sh] and therefore can contain redirections, quotes, variables, etc. The result [WEXITED 127] indicates that the shell couldn't be executed. *) external getpid : unit -> int = "unix_getpid" external getppid : unit -> int = "unix_getppid" external nice : int -> int = "unix_nice" type file_descr val stdin : file_descr val stdout : file_descr val stderr : file_descr type open_flag = Truncate to 0 length if existing type file_perm = int external openfile : string -> open_flag list -> file_perm -> file_descr = "unix_open" Open the named file with the given flags . Third argument is the permissions to give to the file if it is created . Return a file descriptor on the named file . the permissions to give to the file if it is created. Return a file descriptor on the named file. *) external close : file_descr -> unit = "unix_close" val read : file_descr -> string -> int -> int -> int [ read fd ] reads [ len ] characters from descriptor [ fd ] , storing them in string [ buff ] , starting at position [ ofs ] in string [ buff ] . Return the number of characters actually read . [fd], storing them in string [buff], starting at position [ofs] in string [buff]. Return the number of characters actually read. *) val write : file_descr -> string -> int -> int -> int [ write fd ] writes [ len ] characters to descriptor [ fd ] , taking them from string [ buff ] , starting at position [ ofs ] in string [ buff ] . Return the number of characters actually written . [fd], taking them from string [buff], starting at position [ofs] in string [buff]. Return the number of characters actually written. *) external in_channel_of_descr : file_descr -> in_channel = "caml_open_descriptor" external out_channel_of_descr : file_descr -> out_channel = "caml_open_descriptor" external descr_of_in_channel : in_channel -> file_descr = "channel_descriptor" external descr_of_out_channel : out_channel -> file_descr = "channel_descriptor" type seek_command = SEEK_SET | SEEK_CUR | SEEK_END Positioning modes for [ lseek ] . [ SEEK_SET ] indicates positions relative to the beginning of the file , [ SEEK_CUR ] relative to the current position , [ SEEK_END ] relative to the end of the file . relative to the beginning of the file, [SEEK_CUR] relative to the current position, [SEEK_END] relative to the end of the file. *) external lseek : file_descr -> int -> seek_command -> int = "unix_lseek" external truncate : string -> int -> unit = "unix_truncate" Truncates the named file to the given size . external ftruncate : file_descr -> int -> unit = "unix_ftruncate" type file_kind = type stats = Inode number external stat : string -> stats = "unix_stat" external lstat : string -> stats = "unix_lstat" external fstat : file_descr -> stats = "unix_fstat" external unlink : string -> unit = "unix_unlink" external rename : string -> string -> unit = "unix_rename" external link : string -> string -> unit = "unix_link" type access_permission = external chmod : string -> file_perm -> unit = "unix_chmod" external fchmod : file_descr -> file_perm -> unit = "unix_fchmod" external chown : string -> int -> int -> unit = "unix_chown" Change the owner uid and owner gid of the named file . external fchown : file_descr -> int -> int -> unit = "unix_fchown" Change the owner uid and owner gid of an opened file . external umask : int -> int = "unix_umask" external access : string -> access_permission list -> unit = "unix_access" external dup : file_descr -> file_descr = "unix_dup" external dup2 : file_descr -> file_descr -> unit = "unix_dup2" external set_nonblock : file_descr -> unit = "unix_set_nonblock" external clear_nonblock : file_descr -> unit = "unix_clear_nonblock" external set_close_on_exec : file_descr -> unit = "unix_set_close_on_exec" external clear_close_on_exec : file_descr -> unit = "unix_clear_close_on_exec" external mkdir : string -> file_perm -> unit = "unix_mkdir" external rmdir : string -> unit = "unix_rmdir" external chdir : string -> unit = "unix_chdir" external getcwd : unit -> string = "unix_getcwd" type dir_handle external opendir : string -> dir_handle = "unix_opendir" external readdir : dir_handle -> string = "unix_readdir" external rewinddir : dir_handle -> unit = "unix_rewinddir" external closedir : dir_handle -> unit = "unix_closedir" external pipe : unit -> file_descr * file_descr = "unix_pipe" Create a pipe . The first component of the result is opened for reading , that 's the exit to the pipe . The second component is opened for writing , that 's the entrance to the pipe . for reading, that's the exit to the pipe. The second component is opened for writing, that's the entrance to the pipe. *) external mkfifo : string -> file_perm -> unit = "unix_mkfifo" val create_process : string -> string array -> file_descr -> file_descr -> file_descr -> int [ create_process prog args new_stdin ] forks a new process that executes the program in file [ prog ] , with arguments [ args ] . The pid of the new process is returned immediately ; the new process executes concurrently with the current process . The standard input and outputs of the new process are connected to the descriptors [ new_stdin ] , [ new_stdout ] and [ new_stderr ] . Passing e.g. [ stdout ] for [ new_stdout ] prevents the redirection and causes the new process to have the same standard output as the current process . The executable file [ prog ] is searched in the path . The new process has the same environment as the current process . All file descriptors of the current process are closed in the new process , except those redirected to standard input and outputs . forks a new process that executes the program in file [prog], with arguments [args]. The pid of the new process is returned immediately; the new process executes concurrently with the current process. The standard input and outputs of the new process are connected to the descriptors [new_stdin], [new_stdout] and [new_stderr]. Passing e.g. [stdout] for [new_stdout] prevents the redirection and causes the new process to have the same standard output as the current process. The executable file [prog] is searched in the path. The new process has the same environment as the current process. All file descriptors of the current process are closed in the new process, except those redirected to standard input and outputs. *) val create_process_env : string -> string array -> string array -> file_descr -> file_descr -> file_descr -> int [ create_process_env prog args env new_stdin new_stdout new_stderr ] works as [ create_process ] , except that the extra argument [ env ] specifies the environment passed to the program . works as [create_process], except that the extra argument [env] specifies the environment passed to the program. *) val open_process_in: string -> in_channel val open_process_out: string -> out_channel val open_process: string -> in_channel * out_channel val close_process_in: in_channel -> process_status val close_process_out: out_channel -> process_status val close_process: in_channel * out_channel -> process_status external symlink : string -> string -> unit = "unix_symlink" external readlink : string -> string = "unix_readlink" external select : file_descr list -> file_descr list -> file_descr list -> float -> file_descr list * file_descr list * file_descr list = "unix_select" Wait until some input / output operations become possible on some channels . The three list arguments are , respectively , a set of descriptors to check for reading ( first argument ) , for writing ( second argument ) , or for exceptional conditions ( third argument ) . The fourth argument is the maximal timeout , in seconds ; a negative fourth argument means no timeout ( unbounded wait ) . The result is composed of three sets of descriptors : those ready for reading ( first component ) , ready for writing ( second component ) , and over which an exceptional condition is pending ( third component ) . some channels. The three list arguments are, respectively, a set of descriptors to check for reading (first argument), for writing (second argument), or for exceptional conditions (third argument). The fourth argument is the maximal timeout, in seconds; a negative fourth argument means no timeout (unbounded wait). The result is composed of three sets of descriptors: those ready for reading (first component), ready for writing (second component), and over which an exceptional condition is pending (third component). *) type lock_command = external lockf : file_descr -> lock_command -> int -> unit = "unix_lockf" [ lockf fd cmd size ] puts a lock on a region of the file opened as [ fd ] . The region starts at the current read / write position for [ fd ] ( as set by [ lseek ] ) , and extends [ size ] bytes forward if [ size ] is positive , [ size ] bytes backwards if [ size ] is negative , or to the end of the file if [ size ] is zero . as [fd]. The region starts at the current read/write position for [fd] (as set by [lseek]), and extends [size] bytes forward if [size] is positive, [size] bytes backwards if [size] is negative, or to the end of the file if [size] is zero. *) external kill : int -> int -> unit = "unix_kill" external pause : unit -> unit = "unix_pause" type process_times = type tm = Seconds 0 .. 59 Minutes 0 .. 59 Hours 0 .. 23 Day of month 1 .. 31 Month of year 0 .. 11 Year - 1900 Day of week ( Sunday is 0 ) Day of year 0 .. 365 external time : unit -> float = "unix_time" Return the current time since 00:00:00 GMT , Jan. 1 , 1970 , in seconds . in seconds. *) external gettimeofday : unit -> float = "unix_gettimeofday" external gmtime : float -> tm = "unix_gmtime" Convert a time in seconds , as returned by [ time ] , into a date and a time . Assumes Greenwich meridian time zone . a time. Assumes Greenwich meridian time zone. *) external localtime : float -> tm = "unix_localtime" Convert a time in seconds , as returned by [ time ] , into a date and a time . Assumes the local time zone . a time. Assumes the local time zone. *) external mktime : tm -> float * tm = "unix_mktime" Convert a date and time , specified by the [ tm ] argument , into a time in seconds , as returned by [ time ] . Also return a normalized copy of the given [ tm ] record , with the [ tm_wday ] and [ tm_yday ] recomputed from the other fields . a time in seconds, as returned by [time]. Also return a normalized copy of the given [tm] record, with the [tm_wday] and [tm_yday] recomputed from the other fields. *) external alarm : int -> int = "unix_alarm" Schedule a [ SIGALRM ] signals after the given number of seconds . external sleep : int -> unit = "unix_sleep" external times : unit -> process_times = "unix_times_bytecode" "unix_times_native" external utimes : string -> float -> float -> unit = "unix_utimes" Set the last access time ( second arg ) and last modification time ( third arg ) for a file . Times are expressed in seconds from 00:00:00 GMT , Jan. 1 , 1970 . (third arg) for a file. Times are expressed in seconds from 00:00:00 GMT, Jan. 1, 1970. *) type interval_timer = ITIMER_REAL | ITIMER_VIRTUAL | ITIMER_PROF The three kinds of interval timers . [ ITIMER_REAL ] decrements in real time , and sends the signal [ SIGALRM ] when expired . [ ITIMER_VIRTUAL ] decrements in process virtual time , and sends [ ] when expired . [ ITIMER_PROF ] ( for profiling ) decrements both when the process is running and when the system is running on behalf of the process ; it sends [ SIGPROF ] when expired . [ITIMER_REAL] decrements in real time, and sends the signal [SIGALRM] when expired. [ITIMER_VIRTUAL] decrements in process virtual time, and sends [SIGVTALRM] when expired. [ITIMER_PROF] (for profiling) decrements both when the process is running and when the system is running on behalf of the process; it sends [SIGPROF] when expired. *) type interval_timer_status = external getitimer: interval_timer -> interval_timer_status = "unix_getitimer" "unix_getitimer_native" external setitimer: interval_timer -> interval_timer_status -> interval_timer_status = "unix_setitimer" "unix_setitimer_native" [ setitimer t s ] sets the interval timer [ t ] and returns its previous status . The [ s ] argument is interpreted as follows : [ s.it_value ] , if nonzero , is the time to the next timer expiration ; [ s.it_interval ] , if nonzero , specifies a value to be used in reloading it_value when the timer expires . Setting [ s.it_value ] to zero disable the timer . Setting [ s.it_interval ] to zero causes the timer to be disabled after its next expiration . its previous status. The [s] argument is interpreted as follows: [s.it_value], if nonzero, is the time to the next timer expiration; [s.it_interval], if nonzero, specifies a value to be used in reloading it_value when the timer expires. Setting [s.it_value] to zero disable the timer. Setting [s.it_interval] to zero causes the timer to be disabled after its next expiration. *) external getuid : unit -> int = "unix_getuid" external geteuid : unit -> int = "unix_geteuid" external setuid : int -> unit = "unix_setuid" external getgid : unit -> int = "unix_getgid" external getegid : unit -> int = "unix_getegid" external setgid : int -> unit = "unix_setgid" external getgroups : unit -> int array = "unix_getgroups" type passwd_entry = { pw_name : string; pw_passwd : string; pw_uid : int; pw_gid : int; pw_gecos : string; pw_dir : string; pw_shell : string } type group_entry = { gr_name : string; gr_passwd : string; gr_gid : int; gr_mem : string array } external getlogin : unit -> string = "unix_getlogin" external getpwnam : string -> passwd_entry = "unix_getpwnam" external getgrnam : string -> group_entry = "unix_getgrnam" external getgrgid : int -> group_entry = "unix_getgrgid" *) type inet_addr external inet_addr_of_string : string -> inet_addr = "unix_inet_addr_of_string" external string_of_inet_addr : inet_addr -> string = "unix_string_of_inet_addr" val inet_addr_any : inet_addr type socket_domain = type socket_type = Datagram socket type sockaddr = ADDR_UNIX of string | ADDR_INET of inet_addr * int The type of socket addresses . [ ADDR_UNIX name ] is a socket address in the Unix domain ; [ name ] is a file name in the file system . [ ADDR_INET(addr , port ) ] is a socket address in the Internet domain ; [ addr ] is the Internet address of the machine , and [ port ] is the port number . address in the Unix domain; [name] is a file name in the file system. [ADDR_INET(addr,port)] is a socket address in the Internet domain; [addr] is the Internet address of the machine, and [port] is the port number. *) external socket : socket_domain -> socket_type -> int -> file_descr = "unix_socket" Create a new socket in the given domain , and with the given kind . The third argument is the protocol type ; 0 selects the default protocol for that kind of sockets . given kind. The third argument is the protocol type; 0 selects the default protocol for that kind of sockets. *) external socketpair : socket_domain -> socket_type -> int -> file_descr * file_descr = "unix_socketpair" external accept : file_descr -> file_descr * sockaddr = "unix_accept" external bind : file_descr -> sockaddr -> unit = "unix_bind" external connect : file_descr -> sockaddr -> unit = "unix_connect" Connect a socket to an address . external listen : file_descr -> int -> unit = "unix_listen" type shutdown_command = external shutdown : file_descr -> shutdown_command -> unit = "unix_shutdown" Shutdown a socket connection . [ SHUTDOWN_SEND ] as second argument causes reads on the other end of the connection to return an end - of - file condition . [ SHUTDOWN_RECEIVE ] causes writes on the other end of the connection to return a closed pipe condition ( [ ] signal ) . causes reads on the other end of the connection to return an end-of-file condition. [SHUTDOWN_RECEIVE] causes writes on the other end of the connection to return a closed pipe condition ([SIGPIPE] signal). *) external getsockname : file_descr -> sockaddr = "unix_getsockname" external getpeername : file_descr -> sockaddr = "unix_getpeername" type msg_flag = MSG_OOB | MSG_DONTROUTE | MSG_PEEK val recv : file_descr -> string -> int -> int -> msg_flag list -> int val recvfrom : file_descr -> string -> int -> int -> msg_flag list -> int * sockaddr val send : file_descr -> string -> int -> int -> msg_flag list -> int val sendto : file_descr -> string -> int -> int -> msg_flag list -> sockaddr -> int type socket_option = external getsockopt : file_descr -> socket_option -> bool = "unix_getsockopt" external setsockopt : file_descr -> socket_option -> bool -> unit = "unix_setsockopt" val open_connection : sockaddr -> in_channel * out_channel Connect to a server at the given address . Return a pair of buffered channels connected to the server . Remember to call [ flush ] on the output channel at the right times to ensure correct synchronization . Return a pair of buffered channels connected to the server. Remember to call [flush] on the output channel at the right times to ensure correct synchronization. *) val shutdown_connection : in_channel -> unit val establish_server : (in_channel -> out_channel -> 'a) -> sockaddr -> unit Establish a server on the given address . The function given as first argument is called for each connection with two buffered channels connected to the client . A new process is created for each connection . The function [ establish_server ] never returns normally . The function given as first argument is called for each connection with two buffered channels connected to the client. A new process is created for each connection. The function [establish_server] never returns normally. *) type host_entry = { h_name : string; h_aliases : string array; h_addrtype : socket_domain; h_addr_list : inet_addr array } type protocol_entry = { p_name : string; p_aliases : string array; p_proto : int } type service_entry = { s_name : string; s_aliases : string array; s_port : int; s_proto : string } external gethostname : unit -> string = "unix_gethostname" external gethostbyname : string -> host_entry = "unix_gethostbyname" external gethostbyaddr : inet_addr -> host_entry = "unix_gethostbyaddr" external getprotobyname : string -> protocol_entry = "unix_getprotobyname" external getprotobynumber : int -> protocol_entry = "unix_getprotobynumber" external getservbyname : string -> string -> service_entry = "unix_getservbyname" external getservbyport : int -> string -> service_entry = "unix_getservbyport" pad : I removed it , has no such terminal
7ab1b42b6aa18a51b67ae879d90bc88423fba42d2fdcade8a285b21b79ece09d
valmirjunior0088/curios
Construct.hs
module WebAssembly.Construct ( MonadConstruct (..) , importFunc , importTable , importMem , importGlobal , declareFunc , declareTable , declareMem , declareGlobal , exportFunc , exportTable , exportMem , exportGlobal , declareExportFunc , declareExportTable , declareExportMem , declareExportGlobal , setStart , commitFuncTable , startCode , endCode , pushLocal , pushUnreachable , pushNop , pushBlock , popBlock , pushLoop , popLoop , pushIf , popIf , pushIfElse , popIfElse , pushBr , pushBrIf , pushBrTable , pushReturn , pushCall , pushCallIndirect , pushDrop , pushLocalGet , pushLocalSet , pushLocalTee , pushGlobalGet , pushGlobalSet , pushI32Load , pushI32Store , pushI32Const , pushI32Add , pushI32Sub , pushI32Mul , pushI32DivS , pushI32And , pushI32Or , pushI32Eq , pushI32Ne , pushI32LtS , pushI32LeS , pushI32GtS , pushI32GeS , pushI64Load , pushI64Store , pushI64Const , pushI64Add , pushI64Sub , pushI64Mul , pushI64DivS , pushF32Load , pushF32Store , pushF32Const , pushF32Add , pushF32Sub , pushF32Mul , pushF32Div , pushF32Eq , pushF32Ne , pushF32Lt , pushF32Le , pushF32Gt , pushF32Ge , pushF64Load , pushF64Store , pushF64Const , pushF64Add , pushF64Sub , pushF64Mul , pushF64Div , pushI32FuncRef , ConstructT , runConstructT , Construct , runConstruct ) where import WebAssembly.Syntax.Conventions ( TypeIdx , FuncIdx , TableIdx , MemIdx , GlobalIdx , LocalIdx , LabelIdx , SymIdx , Vec (..) , Name (..) ) import WebAssembly.Syntax.Types ( ValType (..) , ResultType (..) , FuncType (..) , RefType (..) , TableType (..) , GlobalType (..) , Limits (..) , MemType (..) ) import WebAssembly.Syntax.Module ( ImportDesc (..) , Import (..) , ExportDesc (..) , Export (..) , Global (..) , Table (..) , Mem (..) , ElemKind (..) , Elem (..) , Locals (..) , Func (..) , Code (..) , Module (..) , emptyModule ) import WebAssembly.Syntax.Instructions (BlockType (..), MemArg (..), Instr (..), Expr (..)) import WebAssembly.Syntax.LLVM (SymType (..), SymFlags (..), SymInfo (..)) import Control.Monad (when, unless) import Control.Monad.State (StateT, execStateT, get, modify) import Control.Monad.Identity (Identity, runIdentity) import Data.Int (Int32, Int64) import Data.List (group) import Data.Maybe (isJust) import GHC.Generics (Generic) import Data.Generics.Product (the) import Control.Lens (Getting, ASetter, view, (.~), (%~), (<>~), mapped, _2, _head) data ModlState = ModlState { nextTypeIdx :: TypeIdx , nextFuncIdx :: FuncIdx , nextTableIdx :: TableIdx , nextMemIdx :: MemIdx , nextGlobalIdx :: GlobalIdx , nextSymIdx :: SymIdx , types :: [(FuncType, TypeIdx)] , funcs :: [(String, (FuncIdx, SymIdx))] , parameters :: [[String]] , tables :: [(String, TableIdx)] , mems :: [(String, MemIdx)] , globals :: [(String, (GlobalIdx, SymIdx))] , funcRefs :: [(String, (Int32, SymIdx))] , funcTableIdx :: Maybe TableIdx } deriving (Show, Generic) emptyModlState :: ModlState emptyModlState = ModlState { nextTypeIdx = 0 , nextFuncIdx = 0 , nextTableIdx = 0 , nextMemIdx = 0 , nextGlobalIdx = 0 , nextSymIdx = 0 , types = [] , funcs = [] , parameters = [] , tables = [] , mems = [] , globals = [] , funcRefs = [] , funcTableIdx = Nothing } data Frame = RootFrame | BlockFrame BlockType | LoopFrame BlockType | IfFrame BlockType | IfElseFrame BlockType [Instr] deriving (Show) data CodeState = CodeState { nextLocalIdx :: LocalIdx , locals :: [ValType] , variables :: [(String, LocalIdx)] , frames :: [(Frame, [Instr])] , labels :: [(String, LabelIdx)] } deriving (Show, Generic) emptyCodeState :: [String] -> CodeState emptyCodeState names = do let go (parameters, nextLocalIdx) name = ((name, nextLocalIdx) : parameters, succ nextLocalIdx) (variables, localIdx) = foldl go ([], 0) names CodeState { nextLocalIdx = localIdx , locals = [] , variables = variables , frames = [(RootFrame, [])] , labels = [("root", 0)] } data ConstructState = ConstructState { modl :: Module , modlState :: ModlState , codeState :: CodeState } deriving (Show, Generic) emptyState :: ConstructState emptyState = ConstructState { modl = emptyModule , modlState = emptyModlState , codeState = emptyCodeState [] } class Monad m => MonadConstruct m where getConstruct :: m ConstructState modifyConstruct :: (ConstructState -> ConstructState) -> m () use :: MonadConstruct m => Getting a ConstructState a -> m a use lens = view lens <$> getConstruct (.=) :: MonadConstruct m => ASetter ConstructState ConstructState a b -> b -> m () (.=) lens value = modifyConstruct (lens .~ value) (%=) :: MonadConstruct m => ASetter ConstructState ConstructState a b -> (a -> b) -> m () (%=) lens action = modifyConstruct (lens %~ action) (<>=) :: (MonadConstruct m, Semigroup a) => ASetter ConstructState ConstructState a a -> a -> m () (<>=) lens value = modifyConstruct (lens <>~ value) getType :: MonadConstruct m => FuncType -> m TypeIdx getType funcType = do types <- use (the @"modlState" . the @"types") case lookup funcType types of Nothing -> do typeIdx <- use (the @"modlState" . the @"nextTypeIdx") (the @"modlState" . the @"nextTypeIdx") .= succ typeIdx (the @"modl" . the @"typeSec") <>= [funcType] (the @"modlState" . the @"types") %= ((funcType, typeIdx) :) return typeIdx Just typeIdx -> return typeIdx importFunc :: MonadConstruct m => String -> String -> [ValType] -> [ValType] -> m () importFunc namespace name inputs outputs = do funcSec <- use (the @"modl" . the @"funcSec") unless (null funcSec) (error "cannot import func after having declared a func") typeIdx <- getType (FuncType (ResultType (Vec inputs)) (ResultType (Vec outputs))) funcIdx <- use (the @"modlState" . the @"nextFuncIdx") (the @"modlState" . the @"nextFuncIdx") .= succ funcIdx symIdx <- use (the @"modlState" . the @"nextSymIdx") (the @"modlState" . the @"nextSymIdx") .= succ symIdx let func = Import (Name namespace) (Name name) (ImportFunc typeIdx) flags = SymFlags { wasm_sym_binding_weak = False , wasm_sym_binding_local = False , wasm_sym_visibility_hidden = False , wasm_sym_undefined = True , wasm_sym_exported = False , wasm_sym_explicit_name = True , wasm_sym_no_strip = False } info = SymInfo (SYMTAB_FUNCTION funcIdx (Just (Name name))) flags (the @"modl" . the @"importSec") <>= [func] (the @"modl" . the @"linkingSec") <>= [info] (the @"modlState" . the @"funcs") %= ((name, (funcIdx, symIdx)) :) importTable :: MonadConstruct m => String -> String -> TableType -> m () importTable namespace name tableType = do tableSec <- use (the @"modl" . the @"tableSec") unless (null tableSec) (error "cannot import a table after having declared a table") tableIdx <- use (the @"modlState" . the @"nextTableIdx") (the @"modlState" . the @"nextTableIdx") .= succ tableIdx let table = Import (Name namespace) (Name name) (ImportTable tableType) (the @"modl" . the @"importSec") <>= [table] (the @"modlState" . the @"tables") %= ((name, tableIdx) :) importMem :: MonadConstruct m => String -> String -> MemType -> m () importMem namespace name memType = do memSec <- use (the @"modl" . the @"memSec") unless (null memSec) (error "cannot import a mem after having declared a mem") memIdx <- use (the @"modlState" . the @"nextMemIdx") (the @"modlState" . the @"nextMemIdx") .= succ memIdx let mem = Import (Name namespace) (Name name) (ImportMem memType) (the @"modl" . the @"importSec") <>= [mem] (the @"modlState" . the @"mems") %= ((name, memIdx) :) importGlobal :: MonadConstruct m => String -> String -> GlobalType -> m () importGlobal namespace name globalType = do globalSec <- use (the @"modl" . the @"globalSec") unless (null globalSec) (error "cannot import a global after having declared a global") globalIdx <- use (the @"modlState" . the @"nextGlobalIdx") (the @"modlState" . the @"nextGlobalIdx") .= succ globalIdx symIdx <- use (the @"modlState" . the @"nextSymIdx") (the @"modlState" . the @"nextSymIdx") .= succ symIdx let global = Import (Name namespace) (Name name) (ImportGlobal globalType) flags = SymFlags { wasm_sym_binding_weak = False , wasm_sym_binding_local = False , wasm_sym_visibility_hidden = False , wasm_sym_undefined = True , wasm_sym_exported = False , wasm_sym_explicit_name = True , wasm_sym_no_strip = False } info = SymInfo (SYMTAB_GLOBAL globalIdx (Just (Name name))) flags (the @"modl" . the @"importSec") <>= [global] (the @"modl" . the @"linkingSec") <>= [info] (the @"modlState" . the @"globals") %= ((name, (globalIdx, symIdx)) :) declareFunc :: MonadConstruct m => String -> [(String, ValType)] -> [ValType] -> m () declareFunc name inputs outputs = do typeIdx <- getType (FuncType (ResultType (Vec [valType | (_, valType) <- inputs])) (ResultType (Vec outputs))) funcIdx <- use (the @"modlState" . the @"nextFuncIdx") (the @"modlState" . the @"nextFuncIdx") .= succ funcIdx symIdx <- use (the @"modlState" . the @"nextSymIdx") (the @"modlState" . the @"nextSymIdx") .= succ symIdx let flags = SymFlags { wasm_sym_binding_weak = False , wasm_sym_binding_local = False , wasm_sym_visibility_hidden = False , wasm_sym_undefined = False , wasm_sym_exported = False , wasm_sym_explicit_name = True , wasm_sym_no_strip = False } info = SymInfo (SYMTAB_FUNCTION funcIdx (Just (Name name))) flags (the @"modl" . the @"funcSec") <>= [typeIdx] (the @"modl" . the @"linkingSec") <>= [info] (the @"modlState" . the @"funcs") %= ((name, (funcIdx, symIdx)) :) (the @"modlState" . the @"parameters") <>= [[parameter | (parameter, _) <- inputs]] declareTable :: MonadConstruct m => String -> TableType -> m () declareTable name tableType = do tableIdx <- use (the @"modlState" . the @"nextTableIdx") (the @"modlState" . the @"nextTableIdx") .= succ tableIdx (the @"modl" . the @"tableSec") <>= [Table tableType] (the @"modlState" . the @"tables") %= ((name, tableIdx) :) declareMem :: MonadConstruct m => String -> MemType -> m () declareMem name memType = do memIdx <- use (the @"modlState" . the @"nextMemIdx") (the @"modlState" . the @"nextMemIdx") .= succ memIdx (the @"modl" . the @"memSec") <>= [Mem memType] (the @"modlState" . the @"mems") %= ((name, memIdx) :) declareGlobal :: MonadConstruct m => String -> GlobalType -> Expr -> m () declareGlobal name globalType expr = do globalIdx <- use (the @"modlState" . the @"nextGlobalIdx") (the @"modlState" . the @"nextGlobalIdx") .= succ globalIdx symIdx <- use (the @"modlState" . the @"nextSymIdx") (the @"modlState" . the @"nextSymIdx") .= succ symIdx let flags = SymFlags { wasm_sym_binding_weak = False , wasm_sym_binding_local = False , wasm_sym_visibility_hidden = False , wasm_sym_undefined = False , wasm_sym_exported = False , wasm_sym_explicit_name = True , wasm_sym_no_strip = False } info = SymInfo (SYMTAB_GLOBAL globalIdx (Just (Name name))) flags (the @"modl" . the @"globalSec") <>= [Global globalType expr] (the @"modl" . the @"linkingSec") <>= [info] (the @"modlState" . the @"globals") %= ((name, (globalIdx, symIdx)) :) exportFunc :: MonadConstruct m => String -> m () exportFunc name = do funcs <- use (the @"modlState" . the @"funcs") case lookup name funcs of Nothing -> error ("tried to export unknown function \"" ++ name ++ "\"") Just (funcIdx, _) -> (the @"modl" . the @"exportSec") <>= [Export (Name name) (ExportFunc funcIdx)] exportTable :: MonadConstruct m => String -> m () exportTable name = do tables <- use (the @"modlState" . the @"tables") case lookup name tables of Nothing -> error ("tried to export unknown table \"" ++ name ++ "\"") Just tableIdx -> (the @"modl" . the @"exportSec") <>= [Export (Name name) (ExportTable tableIdx)] exportMem :: MonadConstruct m => String -> m () exportMem name = do mems <- use (the @"modlState" . the @"mems") case lookup name mems of Nothing -> error ("tried to export unknown memory \"" ++ name ++ "\"") Just memIdx -> (the @"modl" . the @"exportSec") <>= [Export (Name name) (ExportMem memIdx)] exportGlobal :: MonadConstruct m => String -> m () exportGlobal name = do globals <- use (the @"modlState" . the @"globals") case lookup name globals of Nothing -> error ("tried to export unknown global \"" ++ name ++ "\"") Just (globalIdx, _) -> (the @"modl" . the @"exportSec") <>= [Export (Name name) (ExportGlobal globalIdx)] declareExportFunc :: MonadConstruct m => String -> [(String, ValType)] -> [ValType] -> m () declareExportFunc name inputs outputs = declareFunc name inputs outputs >> exportFunc name declareExportTable :: MonadConstruct m => String -> TableType -> m () declareExportTable name tableType = declareTable name tableType >> exportTable name declareExportMem :: MonadConstruct m => String -> MemType -> m () declareExportMem name memType = declareMem name memType >> exportMem name declareExportGlobal :: MonadConstruct m => String -> GlobalType -> Expr -> m () declareExportGlobal name globalType expr = declareGlobal name globalType expr >> exportGlobal name setStart :: MonadConstruct m => String -> m () setStart name = do funcs <- use (the @"modlState" . the @"funcs") case lookup name funcs of Nothing -> error ("tried to set unknown function \"" ++ name ++ "\" as start") Just (funcIdx, _) -> (the @"modl" . the @"startSec") .= Just funcIdx commitFuncTable :: MonadConstruct m => Maybe (String, String) -> m () commitFuncTable imported = do funcTableIdx <- use (the @"modlState" . the @"funcTableIdx") when (isJust funcTableIdx) (error "cannot commit func table twice") tableIdx <- use (the @"modlState" . the @"nextTableIdx") (the @"modlState" . the @"nextTableIdx") .= succ tableIdx funcs <- use (the @"modlState" . the @"funcs") (the @"modl" . the @"elemSec") <>= [Elem tableIdx (Expr [I32Const 1]) ElemFuncRef (Vec [funcIdx | (_, (funcIdx, _)) <- funcs])] (the @"modlState" . the @"funcRefs") .= [(name, (funcRef, symIdx)) | (name, (_, symIdx)) <- funcs | funcRef <- [1..]] (the @"modlState" . the @"funcTableIdx") .= Just tableIdx let size = fromIntegral (1 + length funcs) limits = Bounded size size tableType = TableType FuncRef limits case imported of Nothing -> (the @"modl" . the @"tableSec") <>= [Table tableType] Just (namespace, name) -> do tableSec <- use (the @"modl" . the @"tableSec") unless (null tableSec) (error "cannot import a func table after having declared a table") (the @"modl" . the @"importSec") <>= [Import (Name namespace) (Name name) (ImportTable tableType)] startCode :: MonadConstruct m => m () startCode = do parameters <- use (the @"modlState" . the @"parameters") (the @"modlState" . the @"parameters") .= tail parameters (the @"codeState") .= emptyCodeState (head parameters) endCode :: MonadConstruct m => m () endCode = do CodeState { locals, frames } <- use (the @"codeState") case frames of [(RootFrame, instrs)] -> do let build valTypes = Locals (fromIntegral $ length valTypes) (head valTypes) built = [build valTypes | valTypes <- group locals] (the @"modl" . the @"codeSec") <>= [Code (Func (Vec built) (Expr instrs))] _ -> error "found undelimited frame while trying to emit code" pushLocal :: MonadConstruct m => String -> ValType -> m () pushLocal name valType = do localIdx <- use (the @"codeState" . the @"nextLocalIdx") (the @"codeState" . the @"nextLocalIdx") .= succ localIdx (the @"codeState" . the @"locals") <>= [valType] (the @"codeState" . the @"variables") %= ((name, localIdx) :) pushInstr :: MonadConstruct m => Instr -> m () pushInstr instr = (the @"codeState" . the @"frames" . _head . _2) <>= [instr] pushUnreachable :: MonadConstruct m => m () pushUnreachable = pushInstr Unreachable pushNop :: MonadConstruct m => m () pushNop = pushInstr Nop pushFrame :: MonadConstruct m => String -> Frame -> m () pushFrame name frame = do (the @"codeState" . the @"frames") %= ((frame, []) :) (the @"codeState" . the @"labels" . mapped . _2) %= succ (the @"codeState" . the @"labels") %= ((name, 0) :) popFrame :: MonadConstruct m => m (Frame, [Instr]) popFrame = do frames <- use (the @"codeState" . the @"frames") (the @"codeState" . the @"frames") .= tail frames (the @"codeState" . the @"labels") %= tail (the @"codeState" . the @"labels" . mapped . _2) %= pred return (head frames) getBlockType :: MonadConstruct m => ([ValType], [ValType]) -> m BlockType getBlockType = \case ([], []) -> return BlockEmpty ([], [valType]) -> return (BlockValType valType) (inputs, outputs) -> do typeIdx <- getType (FuncType (ResultType (Vec inputs)) (ResultType (Vec outputs))) return (BlockTypeIdx typeIdx) pushBlock :: MonadConstruct m => String -> [ValType] -> [ValType] -> m () pushBlock name inputs outputs = pushFrame name . BlockFrame =<< getBlockType (inputs, outputs) popBlock :: MonadConstruct m => m () popBlock = popFrame >>= \case (BlockFrame blockType, instrs) -> pushInstr (Block blockType instrs) _ -> error "tried to pop something that was not a block" pushLoop :: MonadConstruct m => String -> [ValType] -> [ValType] -> m () pushLoop name inputs outputs = pushFrame name . LoopFrame =<< getBlockType (inputs, outputs) popLoop :: MonadConstruct m => m () popLoop = popFrame >>= \case (LoopFrame blockType, instrs) -> pushInstr (Loop blockType instrs) _ -> error "tried to pop something that was not a loop" pushIf :: MonadConstruct m => String -> [ValType] -> [ValType] -> m () pushIf name inputs outputs = pushFrame name . IfFrame =<< getBlockType (inputs, outputs) popIf :: MonadConstruct m => m () popIf = popFrame >>= \case (IfFrame blockType, instrs) -> pushInstr (If blockType instrs) _ -> error "tried to pop something that was not an if" pushIfElse :: MonadConstruct m => String -> m () pushIfElse name = popFrame >>= \case (IfFrame blockType, instrs) -> pushFrame name (IfElseFrame blockType instrs) _ -> error "tried to push an else frame without an if frame" popIfElse :: MonadConstruct m => m () popIfElse = popFrame >>= \case (IfElseFrame blockType trueInstrs, falseInstrs) -> pushInstr (IfElse blockType trueInstrs falseInstrs) _ -> error "tried to pop something that was not an if-else frame" getLabel :: MonadConstruct m => String -> m LabelIdx getLabel name = do labels <- use (the @"codeState" . the @"labels") case lookup name labels of Nothing -> error ("tried to get unknown label \"" ++ name ++ "\"") Just labelIdx -> return labelIdx pushBr :: MonadConstruct m => String -> m () pushBr name = pushInstr . Br =<< getLabel name pushBrIf :: MonadConstruct m => String -> m () pushBrIf name = pushInstr . BrIf =<< getLabel name pushBrTable :: MonadConstruct m => [String] -> String -> m () pushBrTable names name = pushInstr =<< BrTable <$> (Vec <$> mapM getLabel names) <*> getLabel name pushReturn :: MonadConstruct m => m () pushReturn = pushInstr Return pushCall :: MonadConstruct m => String -> m () pushCall name = do funcs <- use (the @"modlState" . the @"funcs") case lookup name funcs of Nothing -> error ("tried to call unknown function \"" ++ name ++ "\"") Just (funcIdx, symIdx) -> pushInstr (Call funcIdx symIdx) pushCallIndirect :: MonadConstruct m => [ValType] -> [ValType] -> m () pushCallIndirect inputs outputs = do tableIdx <- use (the @"modlState" . the @"funcTableIdx") >>= \case Nothing -> error "tried to call_indirect without having set a func table" Just tableIdx -> return tableIdx typeIdx <- getType (FuncType (ResultType (Vec inputs)) (ResultType (Vec outputs))) pushInstr (CallIndirect typeIdx tableIdx) pushDrop :: MonadConstruct m => m () pushDrop = pushInstr Drop getVariable :: MonadConstruct m => String -> m LocalIdx getVariable name = do variables <- use (the @"codeState" . the @"variables") case lookup name variables of Nothing -> error ("tried to get unknown variable \"" ++ name ++ "\"") Just localIdx -> return localIdx pushLocalGet :: MonadConstruct m => String -> m () pushLocalGet name = pushInstr . LocalGet =<< getVariable name pushLocalSet :: MonadConstruct m => String -> m () pushLocalSet name = pushInstr . LocalSet =<< getVariable name pushLocalTee :: MonadConstruct m => String -> m () pushLocalTee name = pushInstr . LocalTee =<< getVariable name getGlobal :: MonadConstruct m => String -> m (GlobalIdx, SymIdx) getGlobal name = do globals <- use (the @"modlState" . the @"globals") case lookup name globals of Nothing -> error ("tried to get unknown global \"" ++ name ++ "\"") Just (globalIdx, symIdx) -> return (globalIdx, symIdx) pushGlobalGet :: MonadConstruct m => String -> m () pushGlobalGet name = pushInstr . uncurry GlobalGet =<< getGlobal name pushGlobalSet :: MonadConstruct m => String -> m () pushGlobalSet name = pushInstr . uncurry GlobalSet =<< getGlobal name pushI32Load :: MonadConstruct m => MemArg -> m () pushI32Load memArg = pushInstr (I32Load memArg) pushI32Store :: MonadConstruct m => MemArg -> m () pushI32Store memArg = pushInstr (I32Store memArg) pushI32Const :: MonadConstruct m => Int32 -> m () pushI32Const value = pushInstr (I32Const value) pushI32Add :: MonadConstruct m => m () pushI32Add = pushInstr I32Add pushI32Sub :: MonadConstruct m => m () pushI32Sub = pushInstr I32Sub pushI32Mul :: MonadConstruct m => m () pushI32Mul = pushInstr I32Mul pushI32DivS :: MonadConstruct m => m () pushI32DivS = pushInstr I32DivS pushI32And :: MonadConstruct m => m () pushI32And = pushInstr I32And pushI32Or :: MonadConstruct m => m () pushI32Or = pushInstr I32Or pushI32Eq :: MonadConstruct m => m () pushI32Eq = pushInstr I32Eq pushI32Ne :: MonadConstruct m => m () pushI32Ne = pushInstr I32Ne pushI32LtS :: MonadConstruct m => m () pushI32LtS = pushInstr I32LtS pushI32LeS :: MonadConstruct m => m () pushI32LeS = pushInstr I32LeS pushI32GtS :: MonadConstruct m => m () pushI32GtS = pushInstr I32GtS pushI32GeS :: MonadConstruct m => m () pushI32GeS = pushInstr I32GeS pushI64Load :: MonadConstruct m => MemArg -> m () pushI64Load memArg = pushInstr (I64Load memArg) pushI64Store :: MonadConstruct m => MemArg -> m () pushI64Store memArg = pushInstr (I64Store memArg) pushI64Const :: MonadConstruct m => Int64 -> m () pushI64Const value = pushInstr (I64Const value) pushI64Add :: MonadConstruct m => m () pushI64Add = pushInstr I64Add pushI64Sub :: MonadConstruct m => m () pushI64Sub = pushInstr I64Sub pushI64Mul :: MonadConstruct m => m () pushI64Mul = pushInstr I64Mul pushI64DivS :: MonadConstruct m => m () pushI64DivS = pushInstr I64DivS pushF32Load :: MonadConstruct m => MemArg -> m () pushF32Load memArg = pushInstr (F32Load memArg) pushF32Store :: MonadConstruct m => MemArg -> m () pushF32Store memArg = pushInstr (F32Store memArg) pushF32Const :: MonadConstruct m => Float -> m () pushF32Const value = pushInstr (F32Const value) pushF32Add :: MonadConstruct m => m () pushF32Add = pushInstr F32Add pushF32Sub :: MonadConstruct m => m () pushF32Sub = pushInstr F32Sub pushF32Mul :: MonadConstruct m => m () pushF32Mul = pushInstr F32Mul pushF32Div :: MonadConstruct m => m () pushF32Div = pushInstr F32Div pushF32Eq :: MonadConstruct m => m () pushF32Eq = pushInstr F32Eq pushF32Ne :: MonadConstruct m => m () pushF32Ne = pushInstr F32Ne pushF32Lt :: MonadConstruct m => m () pushF32Lt = pushInstr F32Lt pushF32Le :: MonadConstruct m => m () pushF32Le = pushInstr F32Le pushF32Gt :: MonadConstruct m => m () pushF32Gt = pushInstr F32Gt pushF32Ge :: MonadConstruct m => m () pushF32Ge = pushInstr F32Ge pushF64Load :: MonadConstruct m => MemArg -> m () pushF64Load memArg = pushInstr (F64Load memArg) pushF64Store :: MonadConstruct m => MemArg -> m () pushF64Store memArg = pushInstr (F64Store memArg) pushF64Const :: MonadConstruct m => Double -> m () pushF64Const value = pushInstr (F64Const value) pushF64Add :: MonadConstruct m => m () pushF64Add = pushInstr F64Add pushF64Sub :: MonadConstruct m => m () pushF64Sub = pushInstr F64Sub pushF64Mul :: MonadConstruct m => m () pushF64Mul = pushInstr F64Mul pushF64Div :: MonadConstruct m => m () pushF64Div = pushInstr F64Div pushI32FuncRef :: MonadConstruct m => String -> m () pushI32FuncRef name = do funcRefs <- use (the @"modlState" . the @"funcRefs") case lookup name funcRefs of Nothing -> error ("tried to create func ref from unknown function \"" ++ name ++ "\"") Just (funcRef, symIdx) -> pushInstr (I32FuncRef funcRef symIdx) newtype ConstructT m a = ConstructT (StateT ConstructState m a) deriving (Functor, Applicative, Monad) instance Monad m => MonadConstruct (ConstructT m) where getConstruct = ConstructT get modifyConstruct = ConstructT . modify runConstructT :: Monad m => ConstructT m a -> m Module runConstructT (ConstructT action) = view (the @"modl") <$> execStateT action emptyState type Construct = ConstructT Identity runConstruct :: Construct a -> Module runConstruct = runIdentity . runConstructT
null
https://raw.githubusercontent.com/valmirjunior0088/curios/2ab8cf6f00c93aabb55a3fbfe6abc9a3d31737d0/src/WebAssembly/Construct.hs
haskell
module WebAssembly.Construct ( MonadConstruct (..) , importFunc , importTable , importMem , importGlobal , declareFunc , declareTable , declareMem , declareGlobal , exportFunc , exportTable , exportMem , exportGlobal , declareExportFunc , declareExportTable , declareExportMem , declareExportGlobal , setStart , commitFuncTable , startCode , endCode , pushLocal , pushUnreachable , pushNop , pushBlock , popBlock , pushLoop , popLoop , pushIf , popIf , pushIfElse , popIfElse , pushBr , pushBrIf , pushBrTable , pushReturn , pushCall , pushCallIndirect , pushDrop , pushLocalGet , pushLocalSet , pushLocalTee , pushGlobalGet , pushGlobalSet , pushI32Load , pushI32Store , pushI32Const , pushI32Add , pushI32Sub , pushI32Mul , pushI32DivS , pushI32And , pushI32Or , pushI32Eq , pushI32Ne , pushI32LtS , pushI32LeS , pushI32GtS , pushI32GeS , pushI64Load , pushI64Store , pushI64Const , pushI64Add , pushI64Sub , pushI64Mul , pushI64DivS , pushF32Load , pushF32Store , pushF32Const , pushF32Add , pushF32Sub , pushF32Mul , pushF32Div , pushF32Eq , pushF32Ne , pushF32Lt , pushF32Le , pushF32Gt , pushF32Ge , pushF64Load , pushF64Store , pushF64Const , pushF64Add , pushF64Sub , pushF64Mul , pushF64Div , pushI32FuncRef , ConstructT , runConstructT , Construct , runConstruct ) where import WebAssembly.Syntax.Conventions ( TypeIdx , FuncIdx , TableIdx , MemIdx , GlobalIdx , LocalIdx , LabelIdx , SymIdx , Vec (..) , Name (..) ) import WebAssembly.Syntax.Types ( ValType (..) , ResultType (..) , FuncType (..) , RefType (..) , TableType (..) , GlobalType (..) , Limits (..) , MemType (..) ) import WebAssembly.Syntax.Module ( ImportDesc (..) , Import (..) , ExportDesc (..) , Export (..) , Global (..) , Table (..) , Mem (..) , ElemKind (..) , Elem (..) , Locals (..) , Func (..) , Code (..) , Module (..) , emptyModule ) import WebAssembly.Syntax.Instructions (BlockType (..), MemArg (..), Instr (..), Expr (..)) import WebAssembly.Syntax.LLVM (SymType (..), SymFlags (..), SymInfo (..)) import Control.Monad (when, unless) import Control.Monad.State (StateT, execStateT, get, modify) import Control.Monad.Identity (Identity, runIdentity) import Data.Int (Int32, Int64) import Data.List (group) import Data.Maybe (isJust) import GHC.Generics (Generic) import Data.Generics.Product (the) import Control.Lens (Getting, ASetter, view, (.~), (%~), (<>~), mapped, _2, _head) data ModlState = ModlState { nextTypeIdx :: TypeIdx , nextFuncIdx :: FuncIdx , nextTableIdx :: TableIdx , nextMemIdx :: MemIdx , nextGlobalIdx :: GlobalIdx , nextSymIdx :: SymIdx , types :: [(FuncType, TypeIdx)] , funcs :: [(String, (FuncIdx, SymIdx))] , parameters :: [[String]] , tables :: [(String, TableIdx)] , mems :: [(String, MemIdx)] , globals :: [(String, (GlobalIdx, SymIdx))] , funcRefs :: [(String, (Int32, SymIdx))] , funcTableIdx :: Maybe TableIdx } deriving (Show, Generic) emptyModlState :: ModlState emptyModlState = ModlState { nextTypeIdx = 0 , nextFuncIdx = 0 , nextTableIdx = 0 , nextMemIdx = 0 , nextGlobalIdx = 0 , nextSymIdx = 0 , types = [] , funcs = [] , parameters = [] , tables = [] , mems = [] , globals = [] , funcRefs = [] , funcTableIdx = Nothing } data Frame = RootFrame | BlockFrame BlockType | LoopFrame BlockType | IfFrame BlockType | IfElseFrame BlockType [Instr] deriving (Show) data CodeState = CodeState { nextLocalIdx :: LocalIdx , locals :: [ValType] , variables :: [(String, LocalIdx)] , frames :: [(Frame, [Instr])] , labels :: [(String, LabelIdx)] } deriving (Show, Generic) emptyCodeState :: [String] -> CodeState emptyCodeState names = do let go (parameters, nextLocalIdx) name = ((name, nextLocalIdx) : parameters, succ nextLocalIdx) (variables, localIdx) = foldl go ([], 0) names CodeState { nextLocalIdx = localIdx , locals = [] , variables = variables , frames = [(RootFrame, [])] , labels = [("root", 0)] } data ConstructState = ConstructState { modl :: Module , modlState :: ModlState , codeState :: CodeState } deriving (Show, Generic) emptyState :: ConstructState emptyState = ConstructState { modl = emptyModule , modlState = emptyModlState , codeState = emptyCodeState [] } class Monad m => MonadConstruct m where getConstruct :: m ConstructState modifyConstruct :: (ConstructState -> ConstructState) -> m () use :: MonadConstruct m => Getting a ConstructState a -> m a use lens = view lens <$> getConstruct (.=) :: MonadConstruct m => ASetter ConstructState ConstructState a b -> b -> m () (.=) lens value = modifyConstruct (lens .~ value) (%=) :: MonadConstruct m => ASetter ConstructState ConstructState a b -> (a -> b) -> m () (%=) lens action = modifyConstruct (lens %~ action) (<>=) :: (MonadConstruct m, Semigroup a) => ASetter ConstructState ConstructState a a -> a -> m () (<>=) lens value = modifyConstruct (lens <>~ value) getType :: MonadConstruct m => FuncType -> m TypeIdx getType funcType = do types <- use (the @"modlState" . the @"types") case lookup funcType types of Nothing -> do typeIdx <- use (the @"modlState" . the @"nextTypeIdx") (the @"modlState" . the @"nextTypeIdx") .= succ typeIdx (the @"modl" . the @"typeSec") <>= [funcType] (the @"modlState" . the @"types") %= ((funcType, typeIdx) :) return typeIdx Just typeIdx -> return typeIdx importFunc :: MonadConstruct m => String -> String -> [ValType] -> [ValType] -> m () importFunc namespace name inputs outputs = do funcSec <- use (the @"modl" . the @"funcSec") unless (null funcSec) (error "cannot import func after having declared a func") typeIdx <- getType (FuncType (ResultType (Vec inputs)) (ResultType (Vec outputs))) funcIdx <- use (the @"modlState" . the @"nextFuncIdx") (the @"modlState" . the @"nextFuncIdx") .= succ funcIdx symIdx <- use (the @"modlState" . the @"nextSymIdx") (the @"modlState" . the @"nextSymIdx") .= succ symIdx let func = Import (Name namespace) (Name name) (ImportFunc typeIdx) flags = SymFlags { wasm_sym_binding_weak = False , wasm_sym_binding_local = False , wasm_sym_visibility_hidden = False , wasm_sym_undefined = True , wasm_sym_exported = False , wasm_sym_explicit_name = True , wasm_sym_no_strip = False } info = SymInfo (SYMTAB_FUNCTION funcIdx (Just (Name name))) flags (the @"modl" . the @"importSec") <>= [func] (the @"modl" . the @"linkingSec") <>= [info] (the @"modlState" . the @"funcs") %= ((name, (funcIdx, symIdx)) :) importTable :: MonadConstruct m => String -> String -> TableType -> m () importTable namespace name tableType = do tableSec <- use (the @"modl" . the @"tableSec") unless (null tableSec) (error "cannot import a table after having declared a table") tableIdx <- use (the @"modlState" . the @"nextTableIdx") (the @"modlState" . the @"nextTableIdx") .= succ tableIdx let table = Import (Name namespace) (Name name) (ImportTable tableType) (the @"modl" . the @"importSec") <>= [table] (the @"modlState" . the @"tables") %= ((name, tableIdx) :) importMem :: MonadConstruct m => String -> String -> MemType -> m () importMem namespace name memType = do memSec <- use (the @"modl" . the @"memSec") unless (null memSec) (error "cannot import a mem after having declared a mem") memIdx <- use (the @"modlState" . the @"nextMemIdx") (the @"modlState" . the @"nextMemIdx") .= succ memIdx let mem = Import (Name namespace) (Name name) (ImportMem memType) (the @"modl" . the @"importSec") <>= [mem] (the @"modlState" . the @"mems") %= ((name, memIdx) :) importGlobal :: MonadConstruct m => String -> String -> GlobalType -> m () importGlobal namespace name globalType = do globalSec <- use (the @"modl" . the @"globalSec") unless (null globalSec) (error "cannot import a global after having declared a global") globalIdx <- use (the @"modlState" . the @"nextGlobalIdx") (the @"modlState" . the @"nextGlobalIdx") .= succ globalIdx symIdx <- use (the @"modlState" . the @"nextSymIdx") (the @"modlState" . the @"nextSymIdx") .= succ symIdx let global = Import (Name namespace) (Name name) (ImportGlobal globalType) flags = SymFlags { wasm_sym_binding_weak = False , wasm_sym_binding_local = False , wasm_sym_visibility_hidden = False , wasm_sym_undefined = True , wasm_sym_exported = False , wasm_sym_explicit_name = True , wasm_sym_no_strip = False } info = SymInfo (SYMTAB_GLOBAL globalIdx (Just (Name name))) flags (the @"modl" . the @"importSec") <>= [global] (the @"modl" . the @"linkingSec") <>= [info] (the @"modlState" . the @"globals") %= ((name, (globalIdx, symIdx)) :) declareFunc :: MonadConstruct m => String -> [(String, ValType)] -> [ValType] -> m () declareFunc name inputs outputs = do typeIdx <- getType (FuncType (ResultType (Vec [valType | (_, valType) <- inputs])) (ResultType (Vec outputs))) funcIdx <- use (the @"modlState" . the @"nextFuncIdx") (the @"modlState" . the @"nextFuncIdx") .= succ funcIdx symIdx <- use (the @"modlState" . the @"nextSymIdx") (the @"modlState" . the @"nextSymIdx") .= succ symIdx let flags = SymFlags { wasm_sym_binding_weak = False , wasm_sym_binding_local = False , wasm_sym_visibility_hidden = False , wasm_sym_undefined = False , wasm_sym_exported = False , wasm_sym_explicit_name = True , wasm_sym_no_strip = False } info = SymInfo (SYMTAB_FUNCTION funcIdx (Just (Name name))) flags (the @"modl" . the @"funcSec") <>= [typeIdx] (the @"modl" . the @"linkingSec") <>= [info] (the @"modlState" . the @"funcs") %= ((name, (funcIdx, symIdx)) :) (the @"modlState" . the @"parameters") <>= [[parameter | (parameter, _) <- inputs]] declareTable :: MonadConstruct m => String -> TableType -> m () declareTable name tableType = do tableIdx <- use (the @"modlState" . the @"nextTableIdx") (the @"modlState" . the @"nextTableIdx") .= succ tableIdx (the @"modl" . the @"tableSec") <>= [Table tableType] (the @"modlState" . the @"tables") %= ((name, tableIdx) :) declareMem :: MonadConstruct m => String -> MemType -> m () declareMem name memType = do memIdx <- use (the @"modlState" . the @"nextMemIdx") (the @"modlState" . the @"nextMemIdx") .= succ memIdx (the @"modl" . the @"memSec") <>= [Mem memType] (the @"modlState" . the @"mems") %= ((name, memIdx) :) declareGlobal :: MonadConstruct m => String -> GlobalType -> Expr -> m () declareGlobal name globalType expr = do globalIdx <- use (the @"modlState" . the @"nextGlobalIdx") (the @"modlState" . the @"nextGlobalIdx") .= succ globalIdx symIdx <- use (the @"modlState" . the @"nextSymIdx") (the @"modlState" . the @"nextSymIdx") .= succ symIdx let flags = SymFlags { wasm_sym_binding_weak = False , wasm_sym_binding_local = False , wasm_sym_visibility_hidden = False , wasm_sym_undefined = False , wasm_sym_exported = False , wasm_sym_explicit_name = True , wasm_sym_no_strip = False } info = SymInfo (SYMTAB_GLOBAL globalIdx (Just (Name name))) flags (the @"modl" . the @"globalSec") <>= [Global globalType expr] (the @"modl" . the @"linkingSec") <>= [info] (the @"modlState" . the @"globals") %= ((name, (globalIdx, symIdx)) :) exportFunc :: MonadConstruct m => String -> m () exportFunc name = do funcs <- use (the @"modlState" . the @"funcs") case lookup name funcs of Nothing -> error ("tried to export unknown function \"" ++ name ++ "\"") Just (funcIdx, _) -> (the @"modl" . the @"exportSec") <>= [Export (Name name) (ExportFunc funcIdx)] exportTable :: MonadConstruct m => String -> m () exportTable name = do tables <- use (the @"modlState" . the @"tables") case lookup name tables of Nothing -> error ("tried to export unknown table \"" ++ name ++ "\"") Just tableIdx -> (the @"modl" . the @"exportSec") <>= [Export (Name name) (ExportTable tableIdx)] exportMem :: MonadConstruct m => String -> m () exportMem name = do mems <- use (the @"modlState" . the @"mems") case lookup name mems of Nothing -> error ("tried to export unknown memory \"" ++ name ++ "\"") Just memIdx -> (the @"modl" . the @"exportSec") <>= [Export (Name name) (ExportMem memIdx)] exportGlobal :: MonadConstruct m => String -> m () exportGlobal name = do globals <- use (the @"modlState" . the @"globals") case lookup name globals of Nothing -> error ("tried to export unknown global \"" ++ name ++ "\"") Just (globalIdx, _) -> (the @"modl" . the @"exportSec") <>= [Export (Name name) (ExportGlobal globalIdx)] declareExportFunc :: MonadConstruct m => String -> [(String, ValType)] -> [ValType] -> m () declareExportFunc name inputs outputs = declareFunc name inputs outputs >> exportFunc name declareExportTable :: MonadConstruct m => String -> TableType -> m () declareExportTable name tableType = declareTable name tableType >> exportTable name declareExportMem :: MonadConstruct m => String -> MemType -> m () declareExportMem name memType = declareMem name memType >> exportMem name declareExportGlobal :: MonadConstruct m => String -> GlobalType -> Expr -> m () declareExportGlobal name globalType expr = declareGlobal name globalType expr >> exportGlobal name setStart :: MonadConstruct m => String -> m () setStart name = do funcs <- use (the @"modlState" . the @"funcs") case lookup name funcs of Nothing -> error ("tried to set unknown function \"" ++ name ++ "\" as start") Just (funcIdx, _) -> (the @"modl" . the @"startSec") .= Just funcIdx commitFuncTable :: MonadConstruct m => Maybe (String, String) -> m () commitFuncTable imported = do funcTableIdx <- use (the @"modlState" . the @"funcTableIdx") when (isJust funcTableIdx) (error "cannot commit func table twice") tableIdx <- use (the @"modlState" . the @"nextTableIdx") (the @"modlState" . the @"nextTableIdx") .= succ tableIdx funcs <- use (the @"modlState" . the @"funcs") (the @"modl" . the @"elemSec") <>= [Elem tableIdx (Expr [I32Const 1]) ElemFuncRef (Vec [funcIdx | (_, (funcIdx, _)) <- funcs])] (the @"modlState" . the @"funcRefs") .= [(name, (funcRef, symIdx)) | (name, (_, symIdx)) <- funcs | funcRef <- [1..]] (the @"modlState" . the @"funcTableIdx") .= Just tableIdx let size = fromIntegral (1 + length funcs) limits = Bounded size size tableType = TableType FuncRef limits case imported of Nothing -> (the @"modl" . the @"tableSec") <>= [Table tableType] Just (namespace, name) -> do tableSec <- use (the @"modl" . the @"tableSec") unless (null tableSec) (error "cannot import a func table after having declared a table") (the @"modl" . the @"importSec") <>= [Import (Name namespace) (Name name) (ImportTable tableType)] startCode :: MonadConstruct m => m () startCode = do parameters <- use (the @"modlState" . the @"parameters") (the @"modlState" . the @"parameters") .= tail parameters (the @"codeState") .= emptyCodeState (head parameters) endCode :: MonadConstruct m => m () endCode = do CodeState { locals, frames } <- use (the @"codeState") case frames of [(RootFrame, instrs)] -> do let build valTypes = Locals (fromIntegral $ length valTypes) (head valTypes) built = [build valTypes | valTypes <- group locals] (the @"modl" . the @"codeSec") <>= [Code (Func (Vec built) (Expr instrs))] _ -> error "found undelimited frame while trying to emit code" pushLocal :: MonadConstruct m => String -> ValType -> m () pushLocal name valType = do localIdx <- use (the @"codeState" . the @"nextLocalIdx") (the @"codeState" . the @"nextLocalIdx") .= succ localIdx (the @"codeState" . the @"locals") <>= [valType] (the @"codeState" . the @"variables") %= ((name, localIdx) :) pushInstr :: MonadConstruct m => Instr -> m () pushInstr instr = (the @"codeState" . the @"frames" . _head . _2) <>= [instr] pushUnreachable :: MonadConstruct m => m () pushUnreachable = pushInstr Unreachable pushNop :: MonadConstruct m => m () pushNop = pushInstr Nop pushFrame :: MonadConstruct m => String -> Frame -> m () pushFrame name frame = do (the @"codeState" . the @"frames") %= ((frame, []) :) (the @"codeState" . the @"labels" . mapped . _2) %= succ (the @"codeState" . the @"labels") %= ((name, 0) :) popFrame :: MonadConstruct m => m (Frame, [Instr]) popFrame = do frames <- use (the @"codeState" . the @"frames") (the @"codeState" . the @"frames") .= tail frames (the @"codeState" . the @"labels") %= tail (the @"codeState" . the @"labels" . mapped . _2) %= pred return (head frames) getBlockType :: MonadConstruct m => ([ValType], [ValType]) -> m BlockType getBlockType = \case ([], []) -> return BlockEmpty ([], [valType]) -> return (BlockValType valType) (inputs, outputs) -> do typeIdx <- getType (FuncType (ResultType (Vec inputs)) (ResultType (Vec outputs))) return (BlockTypeIdx typeIdx) pushBlock :: MonadConstruct m => String -> [ValType] -> [ValType] -> m () pushBlock name inputs outputs = pushFrame name . BlockFrame =<< getBlockType (inputs, outputs) popBlock :: MonadConstruct m => m () popBlock = popFrame >>= \case (BlockFrame blockType, instrs) -> pushInstr (Block blockType instrs) _ -> error "tried to pop something that was not a block" pushLoop :: MonadConstruct m => String -> [ValType] -> [ValType] -> m () pushLoop name inputs outputs = pushFrame name . LoopFrame =<< getBlockType (inputs, outputs) popLoop :: MonadConstruct m => m () popLoop = popFrame >>= \case (LoopFrame blockType, instrs) -> pushInstr (Loop blockType instrs) _ -> error "tried to pop something that was not a loop" pushIf :: MonadConstruct m => String -> [ValType] -> [ValType] -> m () pushIf name inputs outputs = pushFrame name . IfFrame =<< getBlockType (inputs, outputs) popIf :: MonadConstruct m => m () popIf = popFrame >>= \case (IfFrame blockType, instrs) -> pushInstr (If blockType instrs) _ -> error "tried to pop something that was not an if" pushIfElse :: MonadConstruct m => String -> m () pushIfElse name = popFrame >>= \case (IfFrame blockType, instrs) -> pushFrame name (IfElseFrame blockType instrs) _ -> error "tried to push an else frame without an if frame" popIfElse :: MonadConstruct m => m () popIfElse = popFrame >>= \case (IfElseFrame blockType trueInstrs, falseInstrs) -> pushInstr (IfElse blockType trueInstrs falseInstrs) _ -> error "tried to pop something that was not an if-else frame" getLabel :: MonadConstruct m => String -> m LabelIdx getLabel name = do labels <- use (the @"codeState" . the @"labels") case lookup name labels of Nothing -> error ("tried to get unknown label \"" ++ name ++ "\"") Just labelIdx -> return labelIdx pushBr :: MonadConstruct m => String -> m () pushBr name = pushInstr . Br =<< getLabel name pushBrIf :: MonadConstruct m => String -> m () pushBrIf name = pushInstr . BrIf =<< getLabel name pushBrTable :: MonadConstruct m => [String] -> String -> m () pushBrTable names name = pushInstr =<< BrTable <$> (Vec <$> mapM getLabel names) <*> getLabel name pushReturn :: MonadConstruct m => m () pushReturn = pushInstr Return pushCall :: MonadConstruct m => String -> m () pushCall name = do funcs <- use (the @"modlState" . the @"funcs") case lookup name funcs of Nothing -> error ("tried to call unknown function \"" ++ name ++ "\"") Just (funcIdx, symIdx) -> pushInstr (Call funcIdx symIdx) pushCallIndirect :: MonadConstruct m => [ValType] -> [ValType] -> m () pushCallIndirect inputs outputs = do tableIdx <- use (the @"modlState" . the @"funcTableIdx") >>= \case Nothing -> error "tried to call_indirect without having set a func table" Just tableIdx -> return tableIdx typeIdx <- getType (FuncType (ResultType (Vec inputs)) (ResultType (Vec outputs))) pushInstr (CallIndirect typeIdx tableIdx) pushDrop :: MonadConstruct m => m () pushDrop = pushInstr Drop getVariable :: MonadConstruct m => String -> m LocalIdx getVariable name = do variables <- use (the @"codeState" . the @"variables") case lookup name variables of Nothing -> error ("tried to get unknown variable \"" ++ name ++ "\"") Just localIdx -> return localIdx pushLocalGet :: MonadConstruct m => String -> m () pushLocalGet name = pushInstr . LocalGet =<< getVariable name pushLocalSet :: MonadConstruct m => String -> m () pushLocalSet name = pushInstr . LocalSet =<< getVariable name pushLocalTee :: MonadConstruct m => String -> m () pushLocalTee name = pushInstr . LocalTee =<< getVariable name getGlobal :: MonadConstruct m => String -> m (GlobalIdx, SymIdx) getGlobal name = do globals <- use (the @"modlState" . the @"globals") case lookup name globals of Nothing -> error ("tried to get unknown global \"" ++ name ++ "\"") Just (globalIdx, symIdx) -> return (globalIdx, symIdx) pushGlobalGet :: MonadConstruct m => String -> m () pushGlobalGet name = pushInstr . uncurry GlobalGet =<< getGlobal name pushGlobalSet :: MonadConstruct m => String -> m () pushGlobalSet name = pushInstr . uncurry GlobalSet =<< getGlobal name pushI32Load :: MonadConstruct m => MemArg -> m () pushI32Load memArg = pushInstr (I32Load memArg) pushI32Store :: MonadConstruct m => MemArg -> m () pushI32Store memArg = pushInstr (I32Store memArg) pushI32Const :: MonadConstruct m => Int32 -> m () pushI32Const value = pushInstr (I32Const value) pushI32Add :: MonadConstruct m => m () pushI32Add = pushInstr I32Add pushI32Sub :: MonadConstruct m => m () pushI32Sub = pushInstr I32Sub pushI32Mul :: MonadConstruct m => m () pushI32Mul = pushInstr I32Mul pushI32DivS :: MonadConstruct m => m () pushI32DivS = pushInstr I32DivS pushI32And :: MonadConstruct m => m () pushI32And = pushInstr I32And pushI32Or :: MonadConstruct m => m () pushI32Or = pushInstr I32Or pushI32Eq :: MonadConstruct m => m () pushI32Eq = pushInstr I32Eq pushI32Ne :: MonadConstruct m => m () pushI32Ne = pushInstr I32Ne pushI32LtS :: MonadConstruct m => m () pushI32LtS = pushInstr I32LtS pushI32LeS :: MonadConstruct m => m () pushI32LeS = pushInstr I32LeS pushI32GtS :: MonadConstruct m => m () pushI32GtS = pushInstr I32GtS pushI32GeS :: MonadConstruct m => m () pushI32GeS = pushInstr I32GeS pushI64Load :: MonadConstruct m => MemArg -> m () pushI64Load memArg = pushInstr (I64Load memArg) pushI64Store :: MonadConstruct m => MemArg -> m () pushI64Store memArg = pushInstr (I64Store memArg) pushI64Const :: MonadConstruct m => Int64 -> m () pushI64Const value = pushInstr (I64Const value) pushI64Add :: MonadConstruct m => m () pushI64Add = pushInstr I64Add pushI64Sub :: MonadConstruct m => m () pushI64Sub = pushInstr I64Sub pushI64Mul :: MonadConstruct m => m () pushI64Mul = pushInstr I64Mul pushI64DivS :: MonadConstruct m => m () pushI64DivS = pushInstr I64DivS pushF32Load :: MonadConstruct m => MemArg -> m () pushF32Load memArg = pushInstr (F32Load memArg) pushF32Store :: MonadConstruct m => MemArg -> m () pushF32Store memArg = pushInstr (F32Store memArg) pushF32Const :: MonadConstruct m => Float -> m () pushF32Const value = pushInstr (F32Const value) pushF32Add :: MonadConstruct m => m () pushF32Add = pushInstr F32Add pushF32Sub :: MonadConstruct m => m () pushF32Sub = pushInstr F32Sub pushF32Mul :: MonadConstruct m => m () pushF32Mul = pushInstr F32Mul pushF32Div :: MonadConstruct m => m () pushF32Div = pushInstr F32Div pushF32Eq :: MonadConstruct m => m () pushF32Eq = pushInstr F32Eq pushF32Ne :: MonadConstruct m => m () pushF32Ne = pushInstr F32Ne pushF32Lt :: MonadConstruct m => m () pushF32Lt = pushInstr F32Lt pushF32Le :: MonadConstruct m => m () pushF32Le = pushInstr F32Le pushF32Gt :: MonadConstruct m => m () pushF32Gt = pushInstr F32Gt pushF32Ge :: MonadConstruct m => m () pushF32Ge = pushInstr F32Ge pushF64Load :: MonadConstruct m => MemArg -> m () pushF64Load memArg = pushInstr (F64Load memArg) pushF64Store :: MonadConstruct m => MemArg -> m () pushF64Store memArg = pushInstr (F64Store memArg) pushF64Const :: MonadConstruct m => Double -> m () pushF64Const value = pushInstr (F64Const value) pushF64Add :: MonadConstruct m => m () pushF64Add = pushInstr F64Add pushF64Sub :: MonadConstruct m => m () pushF64Sub = pushInstr F64Sub pushF64Mul :: MonadConstruct m => m () pushF64Mul = pushInstr F64Mul pushF64Div :: MonadConstruct m => m () pushF64Div = pushInstr F64Div pushI32FuncRef :: MonadConstruct m => String -> m () pushI32FuncRef name = do funcRefs <- use (the @"modlState" . the @"funcRefs") case lookup name funcRefs of Nothing -> error ("tried to create func ref from unknown function \"" ++ name ++ "\"") Just (funcRef, symIdx) -> pushInstr (I32FuncRef funcRef symIdx) newtype ConstructT m a = ConstructT (StateT ConstructState m a) deriving (Functor, Applicative, Monad) instance Monad m => MonadConstruct (ConstructT m) where getConstruct = ConstructT get modifyConstruct = ConstructT . modify runConstructT :: Monad m => ConstructT m a -> m Module runConstructT (ConstructT action) = view (the @"modl") <$> execStateT action emptyState type Construct = ConstructT Identity runConstruct :: Construct a -> Module runConstruct = runIdentity . runConstructT
002118a536eb0a41e5436ca33ab50d4198b64f1ec628d74700c33b6e2c541143
webyrd/2012-scheme-workshop-quines-paper-code
intro.scm
(load "testcheck.scm") (load "quinec.scm") (test-check "intro-1" (equal? (eval quinec) quinec) #t) ;;; footnote moved to extending-interpreter.scm
null
https://raw.githubusercontent.com/webyrd/2012-scheme-workshop-quines-paper-code/3d160707f6ea6f60a08ec271abea1bfdaf63fd41/intro.scm
scheme
footnote moved to extending-interpreter.scm
(load "testcheck.scm") (load "quinec.scm") (test-check "intro-1" (equal? (eval quinec) quinec) #t)
a70c01c019ac563af5cad56db96de2de373f5b4099c2bbfd8a0bf283b0957773
hexresearch/reflex-material-bootstrap
Input.hs
| Module : Web . Reflex . Bootstrap . Upload . Input Description : Reading files with HTML5 File API Copyright : ( c ) , 2016 License : : Stability : experimental Portability : Portable TODO : pull request this to reflex - dom - contrib Module : Web.Reflex.Bootstrap.Upload.Input Description : Reading files with HTML5 File API Copyright : (c) Anton Gushcha, 2016 License : BSD3 Maintainer : Stability : experimental Portability : Portable TODO: pull request this to reflex-dom-contrib -} module Web.Reflex.Bootstrap.Upload.Input( UploadFileConfig(..) , defaultUploadFileConfig , UploadFile(..) , uploadFileInput , debugUploadFile , FullUploadFile(..) , uploadFullFileInput , uploadJsonFileInput ) where import Control.Exception (finally) import Control.Monad.IO.Class import Data.JSString (unpack) import Data.Map (Map) import Data.Monoid import Data.Text (Text) import GHC.Generics import GHCJS.Buffer import GHCJS.DOM.File (File, getName) import GHCJS.Foreign.Callback import GHCJS.Types (JSString, JSVal) import JavaScript.TypedArray.ArrayBuffer import Reflex import Reflex.Dom import qualified Data.Aeson as A import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as BSL import qualified Data.Map.Strict as M import qualified Data.Text as T import Web.Reflex.Bootstrap.Utils -- | Additional configuration for upload file input widget data UploadFileConfig t = UploadFileConfig { uploadFileInputAttrs :: Dynamic t (Map Text Text) } -- | Default configuration defaultUploadFileConfig :: Reflex t => UploadFileConfig t defaultUploadFileConfig = UploadFileConfig (constDyn mempty) -- | Info about file being uploaded data UploadFile t m = UploadFile { uploadFileName :: !Text -- ^ Selected file name , uploadFileType :: !Text -- ^ Example: 'text/plain' , uploadFileSize :: !Word -- ^ Total size of file , uploadFileContent :: !(Event t (Word, Word) -> m (Event t BS.ByteString)) -- ^ Getter of file contents, takes start index and end index (not including the end) } deriving (Generic) -- | Info about file fully loaded in memory data FullUploadFile a = FullUploadFile { uploadFullFileName :: !Text -- ^ Selected file name , uploadFullFileType :: !Text -- ^ Example: 'text/plain' , uploadFullFileSize :: !Word -- ^ Total size of file , uploadFullFileContent :: !a -- ^ Full contents of file } deriving (Generic) | Typed wrapper around js FileReader object newtype FileReader = FileReader JSVal | Typed wrapper around js event passed into load callback of FileReader newtype OnLoadEvent = OnLoadEvent JSVal foreign import javascript unsafe "$r = new FileReader();" js_newFileReader :: IO FileReader foreign import javascript unsafe "$1.onload = $2;" js_readerOnload :: FileReader -> Callback (JSVal -> IO ()) -> IO () foreign import javascript unsafe "$r = $1.target.result;" js_onLoadEventArrayBuffer :: OnLoadEvent -> IO ArrayBuffer foreign import javascript unsafe "$1.readAsArrayBuffer($2);" js_readAsArrayBuffer :: FileReader -> File -> IO () foreign import javascript unsafe "$r = $1.type;" js_fileType :: File -> IO JSString foreign import javascript unsafe "$r = $1.size;" js_fileSize :: File -> IO Word foreign import javascript unsafe "$r = $1.slice($2, $3);" js_fileSlice :: File -> Word -> Word -> IO File -- | Input for JSON file that is deserialised to specified type uploadJsonFileInput :: forall a t m . (A.FromJSON a, MonadWidget t m) => UploadFileConfig t -- ^ Configuration of the widget -> m (Event t (Either Text (FullUploadFile a))) uploadJsonFileInput cfg = do eu <- uploadFullFileInput cfg return $ ffor eu $ \file@FullUploadFile{..} -> case A.eitherDecode' $ BSL.fromStrict uploadFullFileContent of Left er -> Left (showt er) Right a -> Right $ file { uploadFullFileContent = a } -- | Simplification of 'uploadFileInput' that loads file in memory instantly uploadFullFileInput :: forall t m . MonadWidget t m => UploadFileConfig t -> m (Event t (FullUploadFile BS.ByteString)) uploadFullFileInput cfg = do eu <- uploadFileInput cfg fmap switchPromptlyDyn $ widgetHold (pure never) $ ffor eu $ \UploadFile{..} -> do let makeFullFile bs = FullUploadFile { uploadFullFileName = uploadFileName , uploadFullFileType = uploadFileType , uploadFullFileSize = uploadFileSize , uploadFullFileContent = bs } buildE <- getPostBuild cntE <- uploadFileContent $ const (0, uploadFileSize) <$> buildE return $ makeFullFile <$> cntE -- | Single file input that returns lazy byte string of file content uploadFileInput :: forall t m . MonadWidget t m => UploadFileConfig t -> m (Event t (UploadFile t m)) uploadFileInput UploadFileConfig{..} = do i <- genId let inputId = "fileinput" <> showt i attrs = fmap (M.insert "id" inputId) uploadFileInputAttrs cfg = FileInputConfig attrs FileInput{..} <- fileInput cfg let filesEvent = updated _fileInput_value performEventAsync (readUploadFiles <$> filesEvent) where readUploadFiles :: [File] -> (UploadFile t m -> IO ()) -> WidgetHost m () readUploadFiles files consume = mapM_ (readUploadFile consume) files readUploadFile :: (UploadFile t m -> IO ()) -> File -> WidgetHost m () readUploadFile consume f = liftIO $ do name <- T.pack . unpack <$> getName f ftype <- T.pack . unpack <$> js_fileType f size <- js_fileSize f consume $ UploadFile { uploadFileName = name , uploadFileType = ftype , uploadFileSize = size , uploadFileContent = contentGetter } where contentGetter sliceE = performEventAsync $ ffor sliceE $ \(start, end) consumeChunk -> liftIO $ do f' <- js_fileSlice f start end reader <- js_newFileReader rec c <- syncCallback1 ContinueAsync (onload c consumeChunk) js_readerOnload reader c js_readAsArrayBuffer reader f' onload c consumeChunk e = finally (releaseCallback c) $ do contentsBuff <- js_onLoadEventArrayBuffer $ OnLoadEvent e consumeChunk $ toByteString 0 Nothing $ createFromArrayBuffer contentsBuff -- | Showcase for upload file input widget debugUploadFile :: forall t m . MonadWidget t m => m () debugUploadFile = do fileE <- uploadFileInput defaultUploadFileConfig _ <- widgetHold (pure ()) $ renderFile <$> fileE return () where renderFile :: UploadFile t m -> m () renderFile UploadFile{..} = el "div" $ do el "p" $ text $ "Name: " <> uploadFileName el "p" $ text $ "Type: " <> uploadFileType el "p" $ text $ "Size: " <> showt uploadFileSize initE <- getPostBuild contentE <- uploadFileContent $ const (0, 10) <$> initE _ <- widgetHold (pure ()) $ ffor contentE $ \bs -> el "p" $ text $ "First bytes: " <> showt bs return ()
null
https://raw.githubusercontent.com/hexresearch/reflex-material-bootstrap/0dbf56c743f9739b4e1ec7af8a47cdd3062f85f7/src/Web/Reflex/Bootstrap/Upload/Input.hs
haskell
| Additional configuration for upload file input widget | Default configuration | Info about file being uploaded ^ Selected file name ^ Example: 'text/plain' ^ Total size of file ^ Getter of file contents, takes start index and end index (not including the end) | Info about file fully loaded in memory ^ Selected file name ^ Example: 'text/plain' ^ Total size of file ^ Full contents of file | Input for JSON file that is deserialised to specified type ^ Configuration of the widget | Simplification of 'uploadFileInput' that loads file in memory instantly | Single file input that returns lazy byte string of file content | Showcase for upload file input widget
| Module : Web . Reflex . Bootstrap . Upload . Input Description : Reading files with HTML5 File API Copyright : ( c ) , 2016 License : : Stability : experimental Portability : Portable TODO : pull request this to reflex - dom - contrib Module : Web.Reflex.Bootstrap.Upload.Input Description : Reading files with HTML5 File API Copyright : (c) Anton Gushcha, 2016 License : BSD3 Maintainer : Stability : experimental Portability : Portable TODO: pull request this to reflex-dom-contrib -} module Web.Reflex.Bootstrap.Upload.Input( UploadFileConfig(..) , defaultUploadFileConfig , UploadFile(..) , uploadFileInput , debugUploadFile , FullUploadFile(..) , uploadFullFileInput , uploadJsonFileInput ) where import Control.Exception (finally) import Control.Monad.IO.Class import Data.JSString (unpack) import Data.Map (Map) import Data.Monoid import Data.Text (Text) import GHC.Generics import GHCJS.Buffer import GHCJS.DOM.File (File, getName) import GHCJS.Foreign.Callback import GHCJS.Types (JSString, JSVal) import JavaScript.TypedArray.ArrayBuffer import Reflex import Reflex.Dom import qualified Data.Aeson as A import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as BSL import qualified Data.Map.Strict as M import qualified Data.Text as T import Web.Reflex.Bootstrap.Utils data UploadFileConfig t = UploadFileConfig { uploadFileInputAttrs :: Dynamic t (Map Text Text) } defaultUploadFileConfig :: Reflex t => UploadFileConfig t defaultUploadFileConfig = UploadFileConfig (constDyn mempty) data UploadFile t m = UploadFile { , uploadFileContent :: !(Event t (Word, Word) -> m (Event t BS.ByteString)) } deriving (Generic) data FullUploadFile a = FullUploadFile { } deriving (Generic) | Typed wrapper around js FileReader object newtype FileReader = FileReader JSVal | Typed wrapper around js event passed into load callback of FileReader newtype OnLoadEvent = OnLoadEvent JSVal foreign import javascript unsafe "$r = new FileReader();" js_newFileReader :: IO FileReader foreign import javascript unsafe "$1.onload = $2;" js_readerOnload :: FileReader -> Callback (JSVal -> IO ()) -> IO () foreign import javascript unsafe "$r = $1.target.result;" js_onLoadEventArrayBuffer :: OnLoadEvent -> IO ArrayBuffer foreign import javascript unsafe "$1.readAsArrayBuffer($2);" js_readAsArrayBuffer :: FileReader -> File -> IO () foreign import javascript unsafe "$r = $1.type;" js_fileType :: File -> IO JSString foreign import javascript unsafe "$r = $1.size;" js_fileSize :: File -> IO Word foreign import javascript unsafe "$r = $1.slice($2, $3);" js_fileSlice :: File -> Word -> Word -> IO File uploadJsonFileInput :: forall a t m . (A.FromJSON a, MonadWidget t m) -> m (Event t (Either Text (FullUploadFile a))) uploadJsonFileInput cfg = do eu <- uploadFullFileInput cfg return $ ffor eu $ \file@FullUploadFile{..} -> case A.eitherDecode' $ BSL.fromStrict uploadFullFileContent of Left er -> Left (showt er) Right a -> Right $ file { uploadFullFileContent = a } uploadFullFileInput :: forall t m . MonadWidget t m => UploadFileConfig t -> m (Event t (FullUploadFile BS.ByteString)) uploadFullFileInput cfg = do eu <- uploadFileInput cfg fmap switchPromptlyDyn $ widgetHold (pure never) $ ffor eu $ \UploadFile{..} -> do let makeFullFile bs = FullUploadFile { uploadFullFileName = uploadFileName , uploadFullFileType = uploadFileType , uploadFullFileSize = uploadFileSize , uploadFullFileContent = bs } buildE <- getPostBuild cntE <- uploadFileContent $ const (0, uploadFileSize) <$> buildE return $ makeFullFile <$> cntE uploadFileInput :: forall t m . MonadWidget t m => UploadFileConfig t -> m (Event t (UploadFile t m)) uploadFileInput UploadFileConfig{..} = do i <- genId let inputId = "fileinput" <> showt i attrs = fmap (M.insert "id" inputId) uploadFileInputAttrs cfg = FileInputConfig attrs FileInput{..} <- fileInput cfg let filesEvent = updated _fileInput_value performEventAsync (readUploadFiles <$> filesEvent) where readUploadFiles :: [File] -> (UploadFile t m -> IO ()) -> WidgetHost m () readUploadFiles files consume = mapM_ (readUploadFile consume) files readUploadFile :: (UploadFile t m -> IO ()) -> File -> WidgetHost m () readUploadFile consume f = liftIO $ do name <- T.pack . unpack <$> getName f ftype <- T.pack . unpack <$> js_fileType f size <- js_fileSize f consume $ UploadFile { uploadFileName = name , uploadFileType = ftype , uploadFileSize = size , uploadFileContent = contentGetter } where contentGetter sliceE = performEventAsync $ ffor sliceE $ \(start, end) consumeChunk -> liftIO $ do f' <- js_fileSlice f start end reader <- js_newFileReader rec c <- syncCallback1 ContinueAsync (onload c consumeChunk) js_readerOnload reader c js_readAsArrayBuffer reader f' onload c consumeChunk e = finally (releaseCallback c) $ do contentsBuff <- js_onLoadEventArrayBuffer $ OnLoadEvent e consumeChunk $ toByteString 0 Nothing $ createFromArrayBuffer contentsBuff debugUploadFile :: forall t m . MonadWidget t m => m () debugUploadFile = do fileE <- uploadFileInput defaultUploadFileConfig _ <- widgetHold (pure ()) $ renderFile <$> fileE return () where renderFile :: UploadFile t m -> m () renderFile UploadFile{..} = el "div" $ do el "p" $ text $ "Name: " <> uploadFileName el "p" $ text $ "Type: " <> uploadFileType el "p" $ text $ "Size: " <> showt uploadFileSize initE <- getPostBuild contentE <- uploadFileContent $ const (0, 10) <$> initE _ <- widgetHold (pure ()) $ ffor contentE $ \bs -> el "p" $ text $ "First bytes: " <> showt bs return ()
74159b7163e9d3ed3641da9a7532eedd482b7e5af53a8b5d72f6a525b732ca50
rntz/moxy
runtime.rkt
#lang racket (require (for-syntax syntax/parse)) (require "debug.rkt") (require "util.rkt") (require "tags.rkt") (require "values.rkt") (require "objects.rkt") (require "lex.rkt") ;for the token tags (require "env.rkt") (require "engine.rkt") (require "core-forms.rkt") ;@vars-var (require "pcomb.rkt") (require "parse.rkt") (require "parse-builtins.rkt") (require (prefix-in q- "quasi.rkt")) (provide new-engine) (define/match (envpair-join/2 a b) [((list ax ay) (list bx by)) (list (env-join ax bx) (env-join ay by))]) (define (envpair-join* l) (reduce l (list env-empty env-empty) envpair-join/2)) (define envpair-join (nary envpair-join*)) (define (vars x) (list env-empty (env-single @vars x))) (define (env-val name value) (let ((id (gensym name))) (namespace-set-variable-value! id value #t) (vars (@vars-var name id)))) (define/contract (env-tag tag ctor fields) (-> tag? any/c (listof symbol?) (list/c hash? hash?)) (let* ([name (tag-name tag)] [tag-id (mkid "tag:~a" name)] [ctor-id (mkid name)]) (namespace-set-variable-value! tag-id tag #t) (namespace-set-variable-value! ctor-id ctor #t) (vars (@vars-ctor name ctor-id tag-id (if (= 0 (tag-arity tag)) None (Just fields)))))) (define (env-nodule name envs) (match-define (list parse-env resolve-env) envs) (list (env-single @nodules (@nodules-nodule name (record [resolveExt resolve-env] [parseExt parse-env]))) env-empty)) (define-syntax-parser mkenv [(_ x:id) #'(env-val 'x x)] [(_ (name:id value)) #'(env-val 'name value)] ;; TODO: variadic #:tag [(_ (#:tag name:id)) #`(env-tag #,(tag-name-id #'name) name '#,(tag-fields #'name))] [(_ (#:nodule name:id defs ...)) #'(env-nodule 'name (mkenv defs ...))] [(_ part ...) #'(envpair-join (mkenv part) ...)]) (define (make-env) (mkenv ;; TODO: string-append, string comparison [debug toggle-debug!] format [say printfln] [print (lambda (x) (write x) (display "\n"))] [#:tag True] [#:tag False] [not (compose truthify falsey?)] [toBool (compose truthify truthy?)] [#:tag L] [#:tag R] [#:tag Just] [#:tag None] maybe [fromMaybe from-maybe] [maybeMap maybe-map] [maybeFilter (lambda (v ok?) (maybe-filter v (compose truthy? ok?)))] [#:tag ExtPoint] [symbol string->symbol] gensym [string (lambda (x) (format "~a" x))] [racketEval eval] [eval (lambda (e) (eval (expr-compile e env-empty)))] ;; convenience [sexp (lambda (x) ((hash-get 'sexp x)))] [add (lambda (x y) (+ x y))] (#:nodule List [nil '()] cons list null append [concat (lambda (x) (append* x))] [build build-list] map [filter (lambda (p l) (filter (compose truthy? p) l))] foldr foldl [head car] [tail cdr]) (#:nodule Set [empty (set)] [has (lambda (elem set) (truthify (set-member? set elem)))] [fromList list->set] [toList set->list] [union set-union] [intersect set-intersect]) (#:nodule Hash [empty hash-empty] [isEmpty (compose truthify hash-empty?)] [size hash-count] [single hash-single] [has (compose truthify hash-has?)] [lookup hash-lookup] [get hash-get] [put hash-put] ;; [putWith hash-put-with] [delete hash-delete] ;; [alter hash-alter] ;; [map hash-map] [union hash-union] ;; [unions hash-unions] ;; XXX lists [keys hash-keys] [values hash-values] [fromList hash-from-list] [fromKeysValues hash-from-keys-values]) (#:nodule AST [mkId mkid] [mkTemp mktemp] [exprVar expr:var] [exprLit expr:lit] [exprLambda expr:lambda] [exprRacket expr:racket] [patVar pat:var] [declBegin decl:begin] [varLocal var:local] ) (#:nodule Ext [Exprs @exprs] [InfixExprs @infix-exprs] [Pats @pats] [InfixPats @infix-pats] [Decls @decls] [Tops @tops] [Modules @nodules] [Vars @vars] [QuoteForms @quote-forms]) (#:nodule Env [empty env-empty] [join env-join] [joins env-join*] [single env-single] [get env-get]) (#:nodule Quasi [pure q-pure] [lift q-lift] [map q-fmap] [ap q-ap] [quasi q-quasi] [unquo q-unquo] [quo q-quo] [seq q-seq] [list q-seq*] [run q-run]) (#:nodule Parse from pcomb.rkt [pure return] lift [map <$>] [ap <*>] [bind >>=] [join (lambda (k) (>>= k identity))] fail try ask local psum choice peof option optional [optionMaybe option-maybe] ;; these all return racket lists many many1 [skipMany skip-many] [skipMany1 skip-many1] [sepBy sep-by] [sepBy1 sep-by1] [endBy end-by] [endBy1 end-by1] [sepEndBy sep-end-by] [sepEndBy1 sep-end-by1] [beginSepBy begin-sep-by] [beginSepBy1 begin-sep-by1] [beginSepEndBy begin-sep-end-by] [beginSepEndBy1 begin-sep-end-by1] between [mapByMaybe pmap-maybe] ;; [filterBy pfilter] ;; need to adapt for booleans ;; take ;; returns a racket list expect [takeOne take-one] [satisfy (lambda (p . as) (apply satisfy (compose truthy? p) as))] [tryOneMaybe try-one-maybe] [anyOf any-of] [noneOf none-of] ;; from parse.rkt [localEnv local-env] keyword keysym comma dot semi colon equals bar lparen rparen lbrace rbrace lbrack rbrack parens braces brackets [string p-str] [number p-num] [literal p-lit] [id p-id] [anyId p-any-id] [varId p-var-id] [capsId p-caps-id] ;; TODO: p-qualified, p-var return a list [expr p-expr] [exprAt p-expr-at] [atomicExpr p-atomic-expr] [prefixExpr p-prefix-expr] [infixExpr p-infix-expr] [pat p-pat] [patAt p-pat-at] [atomicPat p-atomic-pat] [prefixPat p-prefix-pat] [infixPat p-infix-pat] ;; from parse-builtins.rkt [unquoExpr p-unquo-expr] ;; ah, whatever listish [listishQ q-listish] [qIfy q-ify] ;uch TODO : etc . return lists ! [ ] [ decls p - decls ] ;; TODO: parse-eval & co ) (#:nodule Lex [#:tag TLPAREN] [#:tag TRPAREN] [#:tag TLBRACK] [#:tag TRBRACK] [#:tag TLBRACE] [#:tag TRBRACE] [#:tag TID] [#:tag TSYM] [#:tag TNUM] [#:tag TSTR]) )) ;; This is a crude hack but it works, so whatever. Ideally we'd expose only the ;; set of language primitives we actually need, but racket's baroque module and ;; namespace system makes that frustratingly complicated. (define-namespace-anchor anchor) (define anchor-ns (namespace-anchor->empty-namespace anchor)) (define (new-engine) (define ns (make-base-namespace)) (match-define (list parse-env resolve-env) (parameterize ([current-namespace ns]) ;; Attach existing modules to the namespace so can reuse them. ;; This prevents it re-creating all the tags we've defined, ;; which leads to weird bugs like: ;; - if False then 0 else 1 ; ;; 0 ;; - # wtf? ;; ;; FIXME: fails if our current directory isn't where values.rkt ;; etc. are! :( :( :( (namespace-attach-module anchor-ns "tags.rkt") (namespace-attach-module anchor-ns "values.rkt") (namespace-attach-module anchor-ns "env.rkt") (namespace-require "tags.rkt") (namespace-require "values.rkt") (namespace-require "env.rkt") (make-env))) (make-engine ns (env-join builtin-parse-env parse-env) resolve-env))
null
https://raw.githubusercontent.com/rntz/moxy/18015aed1596ae55658be4ac9eb5cbb3debb1644/runtime.rkt
racket
for the token tags @vars-var TODO: variadic #:tag TODO: string-append, string comparison convenience [putWith hash-put-with] [alter hash-alter] [map hash-map] [unions hash-unions] XXX lists these all return racket lists [filterBy pfilter] ;; need to adapt for booleans take ;; returns a racket list from parse.rkt TODO: p-qualified, p-var return a list from parse-builtins.rkt ah, whatever uch TODO: parse-eval & co This is a crude hack but it works, so whatever. Ideally we'd expose only the set of language primitives we actually need, but racket's baroque module and namespace system makes that frustratingly complicated. Attach existing modules to the namespace so can reuse them. This prevents it re-creating all the tags we've defined, which leads to weird bugs like: 0 - # wtf? FIXME: fails if our current directory isn't where values.rkt etc. are! :( :( :(
#lang racket (require (for-syntax syntax/parse)) (require "debug.rkt") (require "util.rkt") (require "tags.rkt") (require "values.rkt") (require "objects.rkt") (require "env.rkt") (require "engine.rkt") (require "pcomb.rkt") (require "parse.rkt") (require "parse-builtins.rkt") (require (prefix-in q- "quasi.rkt")) (provide new-engine) (define/match (envpair-join/2 a b) [((list ax ay) (list bx by)) (list (env-join ax bx) (env-join ay by))]) (define (envpair-join* l) (reduce l (list env-empty env-empty) envpair-join/2)) (define envpair-join (nary envpair-join*)) (define (vars x) (list env-empty (env-single @vars x))) (define (env-val name value) (let ((id (gensym name))) (namespace-set-variable-value! id value #t) (vars (@vars-var name id)))) (define/contract (env-tag tag ctor fields) (-> tag? any/c (listof symbol?) (list/c hash? hash?)) (let* ([name (tag-name tag)] [tag-id (mkid "tag:~a" name)] [ctor-id (mkid name)]) (namespace-set-variable-value! tag-id tag #t) (namespace-set-variable-value! ctor-id ctor #t) (vars (@vars-ctor name ctor-id tag-id (if (= 0 (tag-arity tag)) None (Just fields)))))) (define (env-nodule name envs) (match-define (list parse-env resolve-env) envs) (list (env-single @nodules (@nodules-nodule name (record [resolveExt resolve-env] [parseExt parse-env]))) env-empty)) (define-syntax-parser mkenv [(_ x:id) #'(env-val 'x x)] [(_ (name:id value)) #'(env-val 'name value)] [(_ (#:tag name:id)) #`(env-tag #,(tag-name-id #'name) name '#,(tag-fields #'name))] [(_ (#:nodule name:id defs ...)) #'(env-nodule 'name (mkenv defs ...))] [(_ part ...) #'(envpair-join (mkenv part) ...)]) (define (make-env) (mkenv [debug toggle-debug!] format [say printfln] [print (lambda (x) (write x) (display "\n"))] [#:tag True] [#:tag False] [not (compose truthify falsey?)] [toBool (compose truthify truthy?)] [#:tag L] [#:tag R] [#:tag Just] [#:tag None] maybe [fromMaybe from-maybe] [maybeMap maybe-map] [maybeFilter (lambda (v ok?) (maybe-filter v (compose truthy? ok?)))] [#:tag ExtPoint] [symbol string->symbol] gensym [string (lambda (x) (format "~a" x))] [racketEval eval] [eval (lambda (e) (eval (expr-compile e env-empty)))] [sexp (lambda (x) ((hash-get 'sexp x)))] [add (lambda (x y) (+ x y))] (#:nodule List [nil '()] cons list null append [concat (lambda (x) (append* x))] [build build-list] map [filter (lambda (p l) (filter (compose truthy? p) l))] foldr foldl [head car] [tail cdr]) (#:nodule Set [empty (set)] [has (lambda (elem set) (truthify (set-member? set elem)))] [fromList list->set] [toList set->list] [union set-union] [intersect set-intersect]) (#:nodule Hash [empty hash-empty] [isEmpty (compose truthify hash-empty?)] [size hash-count] [single hash-single] [has (compose truthify hash-has?)] [lookup hash-lookup] [get hash-get] [put hash-put] [delete hash-delete] [union hash-union] [keys hash-keys] [values hash-values] [fromList hash-from-list] [fromKeysValues hash-from-keys-values]) (#:nodule AST [mkId mkid] [mkTemp mktemp] [exprVar expr:var] [exprLit expr:lit] [exprLambda expr:lambda] [exprRacket expr:racket] [patVar pat:var] [declBegin decl:begin] [varLocal var:local] ) (#:nodule Ext [Exprs @exprs] [InfixExprs @infix-exprs] [Pats @pats] [InfixPats @infix-pats] [Decls @decls] [Tops @tops] [Modules @nodules] [Vars @vars] [QuoteForms @quote-forms]) (#:nodule Env [empty env-empty] [join env-join] [joins env-join*] [single env-single] [get env-get]) (#:nodule Quasi [pure q-pure] [lift q-lift] [map q-fmap] [ap q-ap] [quasi q-quasi] [unquo q-unquo] [quo q-quo] [seq q-seq] [list q-seq*] [run q-run]) (#:nodule Parse from pcomb.rkt [pure return] lift [map <$>] [ap <*>] [bind >>=] [join (lambda (k) (>>= k identity))] fail try ask local psum choice peof option optional [optionMaybe option-maybe] many many1 [skipMany skip-many] [skipMany1 skip-many1] [sepBy sep-by] [sepBy1 sep-by1] [endBy end-by] [endBy1 end-by1] [sepEndBy sep-end-by] [sepEndBy1 sep-end-by1] [beginSepBy begin-sep-by] [beginSepBy1 begin-sep-by1] [beginSepEndBy begin-sep-end-by] [beginSepEndBy1 begin-sep-end-by1] between [mapByMaybe pmap-maybe] expect [takeOne take-one] [satisfy (lambda (p . as) (apply satisfy (compose truthy? p) as))] [tryOneMaybe try-one-maybe] [anyOf any-of] [noneOf none-of] [localEnv local-env] keyword keysym comma dot semi colon equals bar lparen rparen lbrace rbrace lbrack rbrack parens braces brackets [string p-str] [number p-num] [literal p-lit] [id p-id] [anyId p-any-id] [varId p-var-id] [capsId p-caps-id] [expr p-expr] [exprAt p-expr-at] [atomicExpr p-atomic-expr] [prefixExpr p-prefix-expr] [infixExpr p-infix-expr] [pat p-pat] [patAt p-pat-at] [atomicPat p-atomic-pat] [prefixPat p-prefix-pat] [infixPat p-infix-pat] [unquoExpr p-unquo-expr] listish [listishQ q-listish] TODO : etc . return lists ! [ ] [ decls p - decls ] ) (#:nodule Lex [#:tag TLPAREN] [#:tag TRPAREN] [#:tag TLBRACK] [#:tag TRBRACK] [#:tag TLBRACE] [#:tag TRBRACE] [#:tag TID] [#:tag TSYM] [#:tag TNUM] [#:tag TSTR]) )) (define-namespace-anchor anchor) (define anchor-ns (namespace-anchor->empty-namespace anchor)) (define (new-engine) (define ns (make-base-namespace)) (match-define (list parse-env resolve-env) (parameterize ([current-namespace ns]) (namespace-attach-module anchor-ns "tags.rkt") (namespace-attach-module anchor-ns "values.rkt") (namespace-attach-module anchor-ns "env.rkt") (namespace-require "tags.rkt") (namespace-require "values.rkt") (namespace-require "env.rkt") (make-env))) (make-engine ns (env-join builtin-parse-env parse-env) resolve-env))
4f9ec6de11284eeda38bb3b0033ee197053353331386deab8f1808370dedeaae
dtgoitia/civil-autolisp
0 - Explode polyline.lsp
(defun c:ep ( / ent_name ptList width ) ; Draws individually every single segment of selected polylines (foreach a (ssnamex (ssget '((-4 . "<OR") (0 . "LWPOLYLINE") (-4 . "OR>")))) (if (= 'ENAME (type (cadr a))) (progn (ExplodeSinglePolyline (cadr a)) (vla-delete (vlax-ename->vla-object (cadr a))) );END progn );END if1 );END foreach ; End without double messages (princ) v0.5 - 2016.11.14 - No " Global Width " case considered v0.4 - 2016.11.14 - Linetype Scale management added ; v0.3 - 2016.11.14 - All code rewritten v0.2 - 2016.03.21 - Code optimized and comments translated into English . v0.1 - 2016.03.02 - Command - line name changed from BRP to EP . ; Variables added as local variables not to overlap with other routines. v0.0 - 2016.02.16 - First issue ; NOTE: It supports heavy and light polylines. NOTE : Can be used in polylines with 2 vertexes in the same position . ; NOTE: No problem with closed polylines. Author : Last revision : 2017.04.05 ) (defun ExplodeSinglePolyline (ent_name / i layerName ptList width color lineType lineTypeScale) (setq i 0 layerName (GetPolylineLayer ent_name) ptList (GetPolylinePointList ent_name) width (if (setq width (GetPolylineWidth ent_name)) width 0 ) color (GetPolylineColor ent_name) lineType (GetPolylineStyle ent_name) lineTypeScale (GetPolylineStyleScale ent_name) ) (while (< i (- (length ptList) 1) ) (setq p1 (nth i ptList) p2 (nth (+ i 1) ptList) ) (if (DrawPolylineSegment p1 p2 width color layerName lineType lineTypeScale) (setq i (+ i 1) ) (progn (princ "\nError drawing polyline segment from ") (princ p1) (princ " to ") (princ p2) (princ ".") );END progn );END if );END while ) (defun GetPolylinePointList (ent_name / param endParam pt ptList) (setq param 0 endParam (vlax-curve-getEndParam (vlax-ename->vla-object ent_name)) ) (while (<= param endParam) (setq pt (vlax-curve-getPointAtParam (vlax-ename->vla-object ent_name) param) ptList (append ptList (list pt) ) param (+ param 1) ) );END while ptList ) (defun GetPolylineWidth (ent_name) (cdr (assoc 43 (entget ent_name))) ) (defun GetPolylineColor (ent_name) (if (not (assoc 62 (entget ent_name))) 256 (cdr (assoc 62 (entget ent_name))) );END if ) (defun GetPolylineLayer (ent_name) (cdr (assoc 8 (entget ent_name))) ) (defun GetPolylineStyle (ent_name) (if (not (assoc 6 (entget ent_name))) "ByLayer" (cdr (assoc 6 (entget ent_name))) );END if ) (defun GetPolylineStyleScale (ent_name) (if (not (assoc 48 (entget ent_name))) nil (cdr (assoc 48 (entget ent_name))) );END if ) (defun DrawPolylineSegment (p1 p2 width color layerName lineType lineTypeScale) (entmakex (append (list (cons 0 "LWPOLYLINE") (cons 100 "AcDbEntity") (cons 100 "AcDbPolyline") (cons 8 layerName) (cons 90 2) ; Number of vertex (cons 70 128) ; Not closed polyline (cons 43 width) (cons 62 color) (cons 10 (list (nth 0 p1) (nth 1 p1) )) (cons 10 (list (nth 0 p2) (nth 1 p2) )) );END list (if lineType (list (cons 6 lineType)) ) (if lineTypeScale (list (cons 48 lineTypeScale)) ) );END append );END entmakex )
null
https://raw.githubusercontent.com/dtgoitia/civil-autolisp/72d68139d372c84014d160f8e4918f062356349f/0%20-%20Explode%20polyline.lsp
lisp
Draws individually every single segment of selected polylines END progn END if1 END foreach End without double messages v0.3 - 2016.11.14 - All code rewritten Variables added as local variables not to overlap with other routines. NOTE: It supports heavy and light polylines. NOTE: No problem with closed polylines. END progn END if END while END while END if END if END if Number of vertex Not closed polyline END list END append END entmakex
(defun c:ep ( / ent_name ptList width ) (foreach a (ssnamex (ssget '((-4 . "<OR") (0 . "LWPOLYLINE") (-4 . "OR>")))) (if (= 'ENAME (type (cadr a))) (progn (ExplodeSinglePolyline (cadr a)) (vla-delete (vlax-ename->vla-object (cadr a))) (princ) v0.5 - 2016.11.14 - No " Global Width " case considered v0.4 - 2016.11.14 - Linetype Scale management added v0.2 - 2016.03.21 - Code optimized and comments translated into English . v0.1 - 2016.03.02 - Command - line name changed from BRP to EP . v0.0 - 2016.02.16 - First issue NOTE : Can be used in polylines with 2 vertexes in the same position . Author : Last revision : 2017.04.05 ) (defun ExplodeSinglePolyline (ent_name / i layerName ptList width color lineType lineTypeScale) (setq i 0 layerName (GetPolylineLayer ent_name) ptList (GetPolylinePointList ent_name) width (if (setq width (GetPolylineWidth ent_name)) width 0 ) color (GetPolylineColor ent_name) lineType (GetPolylineStyle ent_name) lineTypeScale (GetPolylineStyleScale ent_name) ) (while (< i (- (length ptList) 1) ) (setq p1 (nth i ptList) p2 (nth (+ i 1) ptList) ) (if (DrawPolylineSegment p1 p2 width color layerName lineType lineTypeScale) (setq i (+ i 1) ) (progn (princ "\nError drawing polyline segment from ") (princ p1) (princ " to ") (princ p2) (princ ".") ) (defun GetPolylinePointList (ent_name / param endParam pt ptList) (setq param 0 endParam (vlax-curve-getEndParam (vlax-ename->vla-object ent_name)) ) (while (<= param endParam) (setq pt (vlax-curve-getPointAtParam (vlax-ename->vla-object ent_name) param) ptList (append ptList (list pt) ) param (+ param 1) ) ptList ) (defun GetPolylineWidth (ent_name) (cdr (assoc 43 (entget ent_name))) ) (defun GetPolylineColor (ent_name) (if (not (assoc 62 (entget ent_name))) 256 (cdr (assoc 62 (entget ent_name))) ) (defun GetPolylineLayer (ent_name) (cdr (assoc 8 (entget ent_name))) ) (defun GetPolylineStyle (ent_name) (if (not (assoc 6 (entget ent_name))) "ByLayer" (cdr (assoc 6 (entget ent_name))) ) (defun GetPolylineStyleScale (ent_name) (if (not (assoc 48 (entget ent_name))) nil (cdr (assoc 48 (entget ent_name))) ) (defun DrawPolylineSegment (p1 p2 width color layerName lineType lineTypeScale) (entmakex (append (list (cons 0 "LWPOLYLINE") (cons 100 "AcDbEntity") (cons 100 "AcDbPolyline") (cons 8 layerName) (cons 43 width) (cons 62 color) (cons 10 (list (nth 0 p1) (nth 1 p1) )) (cons 10 (list (nth 0 p2) (nth 1 p2) )) (if lineType (list (cons 6 lineType)) ) (if lineTypeScale (list (cons 48 lineTypeScale)) ) )
c811e627066ac82068349ef7ff0f73d84c317d27a73fdce2f8ea4d0cf1fd63b8
awslabs/s2n-bignum
bignum_amontmul.ml
* Copyright Amazon.com , Inc. or its affiliates . All Rights Reserved . * SPDX - License - Identifier : Apache-2.0 OR ISC * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 OR ISC *) (* ========================================================================= *) (* Almost-Montgomery multiplication of arbitrary bignums. *) (* ========================================================================= *) * * * print_literal_from_elf " arm / generic / bignum_amontmul.o " ; ; * * * ****) let bignum_amontmul_mc = define_assert_from_elf "bignum_amontmul_mc" "arm/generic/bignum_amontmul.o" [ arm_CBZ X0 ( word 300 ) arm_LDR X14 X4 ( Immediate_Offset ( word 0 ) ) arm_LSL X5 X14 ( rvalue ( word 2 ) ) arm_SUB X5 X14 X5 arm_EOR X5 X5 ( rvalue ( word 2 ) ) arm_MOV X6 ( rvalue ( word 1 ) ) arm_MADD X6 X14 X5 X6 arm_MUL X7 X6 X6 arm_MADD X5 X6 X5 X5 arm_MUL X6 X7 X7 arm_MADD X5 X7 X5 X5 arm_MUL X7 X6 X6 arm_MADD X5 X6 X5 X5 arm_MADD X5 X7 X5 X5 arm_MOV X8 XZR arm_STR XZR X1 ( Shiftreg_Offset X8 3 ) arm_ADD X8 X8 ( rvalue ( word 1 ) ) arm_CMP X8 X0 arm_BCC ( word 2097140 ) arm_MOV X6 XZR arm_MOV X8 XZR arm_LDR X9 X2 ( Shiftreg_Offset X8 3 ) arm_MOV X10 XZR arm_ADDS X11 XZR XZR arm_LDR X14 X3 ( Shiftreg_Offset X10 3 ) arm_LDR X12 X1 ( Shiftreg_Offset X10 3 ) 0x9b0e7d2d; (* arm_MUL X13 X9 X14 *) arm_ADCS X12 X12 X11 arm_UMULH X11 arm_ADC X11 X11 XZR 0xab0d018c; (* arm_ADDS X12 X12 X13 *) arm_STR X12 X1 ( Shiftreg_Offset X10 3 ) arm_ADD X10 X10 ( rvalue ( word 1 ) ) arm_SUB X14 X10 X0 arm_CBNZ X14 ( word 2097112 ) arm_ADCS X6 X6 X11 arm_ADC X7 XZR XZR arm_LDR X12 X1 ( Immediate_Offset ( word 0 ) ) arm_MUL X9 X12 X5 arm_LDR X14 X4 ( Immediate_Offset ( word 0 ) ) 0x9b0e7d2d; (* arm_MUL X13 X9 X14 *) arm_UMULH X11 0xab0d018c; (* arm_ADDS X12 X12 X13 *) arm_MOV X10 ( rvalue ( word 1 ) ) arm_SUB X14 X0 ( rvalue ( word 1 ) ) arm_CBZ X14 ( word 52 ) arm_LDR X14 X4 ( Shiftreg_Offset X10 3 ) arm_LDR X12 X1 ( Shiftreg_Offset X10 3 ) 0x9b0e7d2d; (* arm_MUL X13 X9 X14 *) arm_ADCS X12 X12 X11 arm_UMULH X11 arm_ADC X11 X11 XZR 0xab0d018c; (* arm_ADDS X12 X12 X13 *) arm_SUB X13 X10 ( rvalue ( word 1 ) ) arm_STR X12 X1 ( Shiftreg_Offset X13 3 ) arm_ADD X10 X10 ( rvalue ( word 1 ) ) arm_SUB X14 X10 X0 arm_CBNZ X14 ( word 2097108 ) arm_ADCS X11 X6 X11 arm_ADC X6 X7 XZR arm_SUB X13 X10 ( rvalue ( word 1 ) ) arm_STR X11 X1 ( Shiftreg_Offset X13 3 ) arm_ADD X8 X8 ( rvalue ( word 1 ) ) arm_CMP X8 X0 arm_BCC ( word 2096980 ) arm_NEG X6 X6 arm_NEGS X10 XZR arm_LDR X14 X1 ( Shiftreg_Offset X10 3 ) arm_LDR X12 X4 ( Shiftreg_Offset X10 3 ) arm_AND X12 X12 X6 0xfa0c01ce; (* arm_SBCS X14 X14 X12 *) arm_STR X14 X1 ( Shiftreg_Offset X10 3 ) arm_ADD X10 X10 ( rvalue ( word 1 ) ) arm_SUB X14 X10 X0 arm_CBNZ X14 ( word 2097124 ) arm_RET X30 ];; let BIGNUM_AMONTMUL_EXEC = ARM_MK_EXEC_RULE bignum_amontmul_mc;; (* ------------------------------------------------------------------------- *) (* Proof. *) (* ------------------------------------------------------------------------- *) * * This actually works mod 32 but if anything this is more convenient * * let WORD_NEGMODINV_SEED_LEMMA_16 = prove (`!a x:int64. ODD a /\ word_xor (word_sub (word a) (word_shl (word a) 2)) (word 2) = x ==> (a * val x + 1 == 0) (mod 16)`, REPEAT STRIP_TAC THEN REWRITE_TAC[CONG; MOD_0] THEN TRANS_TAC EQ_TRANS `(val(word a:int64) MOD 16 * val(x:int64) MOD 16 + 1) MOD 16` THEN REWRITE_TAC[ARITH_RULE `16 = 2 EXP 4`] THEN CONJ_TAC THENL [REWRITE_TAC[VAL_WORD; DIMINDEX_64; MOD_MOD_EXP_MIN] THEN CONV_TAC NUM_REDUCE_CONV THEN CONV_TAC MOD_DOWN_CONV THEN REFL_TAC; REWRITE_TAC[VAL_MOD; NUMSEG_LT; ARITH_EQ; ARITH]] THEN SUBGOAL_THEN `bit 0 (word a:int64)` MP_TAC THENL [ASM_REWRITE_TAC[BIT_LSB_WORD]; EXPAND_TAC "x" THEN POP_ASSUM_LIST(K ALL_TAC) THEN DISCH_TAC] THEN CONV_TAC(ONCE_DEPTH_CONV EXPAND_NSUM_CONV) THEN CONV_TAC(TOP_DEPTH_CONV BIT_WORD_CONV) THEN MAP_EVERY ASM_CASES_TAC [`bit 1 (word a:int64)`;`bit 2 (word a:int64)`;`bit 3 (word a:int64)`] THEN ASM_REWRITE_TAC[BITVAL_CLAUSES] THEN CONV_TAC NUM_REDUCE_CONV);; let BIGNUM_AMONTMUL_CORRECT = time prove (`!k z x y m a b n pc. ALL (nonoverlapping (z,8 * val k)) [(word pc,0x130); (x,8 * val k); (y,8 * val k); (m,8 * val k)] ==> ensures arm (\s. aligned_bytes_loaded s (word pc) bignum_amontmul_mc /\ read PC s = word pc /\ C_ARGUMENTS [k; z; x; y; m] s /\ bignum_from_memory (x,val k) s = a /\ bignum_from_memory (y,val k) s = b /\ bignum_from_memory (m,val k) s = n) (\s. read PC s = word(pc + 0x12c) /\ (ODD n ==> (bignum_from_memory (z,val k) s == inverse_mod n (2 EXP (64 * val k)) * a * b) (mod n))) (MAYCHANGE [PC; X5; X6; X7; X8; X9; X10; X11; X12; X13; X14] ,, MAYCHANGE [memory :> bytes(z,8 * val k)] ,, MAYCHANGE SOME_FLAGS)`, W64_GEN_TAC `k:num` THEN MAP_EVERY X_GEN_TAC [`z:int64`; `x:int64`; `y:int64`; `m:int64`] THEN MAP_EVERY X_GEN_TAC [`a:num`; `b:num`; `n:num`; `pc:num`] THEN REWRITE_TAC[ALL; NONOVERLAPPING_CLAUSES] THEN REWRITE_TAC[C_ARGUMENTS; C_RETURN; SOME_FLAGS] THEN STRIP_TAC THEN MAP_EVERY (BIGNUM_TERMRANGE_TAC `k:num`) [`a:num`; `b:num`; `n:num`] THEN (*** Degenerate k = 0 case ***) ASM_CASES_TAC `k = 0` THENL [UNDISCH_THEN `k = 0` SUBST_ALL_TAC THEN REPEAT(FIRST_X_ASSUM(SUBST_ALL_TAC o MATCH_MP (ARITH_RULE `a < 2 EXP (64 * 0) ==> a = 0`))) THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC [1] THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[ODD]; ALL_TAC] THEN (*** Initial word-level modular inverse ***) ENSURES_SEQUENCE_TAC `pc + 0x38` `\s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (x,k) s = a /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ (ODD n ==> (n * val(read X5 s) + 1 == 0) (mod (2 EXP 64)))` THEN CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN SUBGOAL_THEN `bignum_from_memory(m,k) s0 = highdigits n 0` MP_TAC THENL [ASM_REWRITE_TAC[HIGHDIGITS_0; BIGNUM_FROM_MEMORY_BYTES]; GEN_REWRITE_TAC LAND_CONV[BIGNUM_FROM_MEMORY_EQ_HIGHDIGITS] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; ADD_CLAUSES] THEN REWRITE_TAC[GSYM LOWDIGITS_1; lowdigits; MULT_CLAUSES] THEN REWRITE_TAC[GSYM DIMINDEX_64; WORD_MOD_SIZE] THEN REWRITE_TAC[DIMINDEX_64] THEN STRIP_TAC] THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--5) THEN SUBGOAL_THEN `ODD n ==> (n * val (read X5 s5) + 1 == 0) (mod 16)` MP_TAC THENL [ASM_SIMP_TAC[WORD_NEGMODINV_SEED_LEMMA_16]; ALL_TAC] THEN REABBREV_TAC `x0 = read X5 s5` THEN DISCH_TAC THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (6--14) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN REWRITE_TAC[VAL_WORD_MUL; VAL_WORD_ADD; VAL_WORD; DIMINDEX_64; CONG] THEN CONV_TAC MOD_DOWN_CONV THEN REWRITE_TAC[GSYM CONG] THEN SUBST1_TAC(ARITH_RULE `2 EXP 64 = 16 EXP (2 EXP 4)`) THEN DISCH_THEN(fun th -> FIRST_X_ASSUM(MP_TAC o C MATCH_MP th)) THEN SPEC_TAC(`16`,`e:num`) THEN CONV_TAC NUM_REDUCE_CONV THEN CONV_TAC NUMBER_RULE; GHOST_INTRO_TAC `w:num` `\s. val(read X5 s)` THEN REWRITE_TAC[VAL_WORD_GALOIS; DIMINDEX_64]] THEN GLOBALIZE_PRECONDITION_TAC THEN VAL_INT64_TAC `w:num` THEN (*** Get a basic bound on k from the nonoverlapping assumptions ***) FIRST_ASSUM(MP_TAC o MATCH_MP (ONCE_REWRITE_RULE[IMP_CONJ] NONOVERLAPPING_IMP_SMALL_1)) THEN ANTS_TAC THENL [CONV_TAC NUM_REDUCE_CONV; DISCH_TAC] THEN (*** Setup of the main loop with zeroing and corrective start/end ***) ENSURES_WHILE_UP_TAC `k:num` `pc + 0x54` `pc + 0xfc` `\i s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ read X5 s = word w /\ read X8 s = word i /\ bignum_from_memory(word_add x (word(8 * i)),k - i) s = highdigits a i /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ ?q r. q < 2 EXP (64 * i) /\ r < 2 EXP (64 * i) /\ 2 EXP (64 * i) * (2 EXP (64 * k) * val(read X6 s) + bignum_from_memory(z,k) s) + r = q * n + lowdigits a i * b /\ (ODD n ==> r = 0)` THEN ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL [ENSURES_WHILE_UP_TAC `k:num` `pc + 0x3c` `pc + 0x44` `\i s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ read X5 s = word w /\ read X8 s = word i /\ bignum_from_memory (x,k) s = a /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ bignum_from_memory (z,i) s = 0` THEN ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC [1] THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[]; X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[GSYM WORD_ADD] THEN REWRITE_TAC[VAL_WORD_0; MULT_CLAUSES; ADD_CLAUSES]; X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[]; ARM_SIM_TAC BIGNUM_AMONTMUL_EXEC (1--4) THEN ASM_REWRITE_TAC[VAL_WORD_0; MULT_CLAUSES; LOWDIGITS_0; ADD_CLAUSES; SUB_0; WORD_ADD_0; HIGHDIGITS_0] THEN REPEAT(EXISTS_TAC `0`) THEN ARITH_TAC]; ALL_TAC; (*** This is the main loop invariant: save for later ***) X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[]; (*** This is the corrective subtraction part.... ***) GHOST_INTRO_TAC `cout:num` `\s. val(read X6 s)` THEN GHOST_INTRO_TAC `mm:num` `bignum_from_memory(z,k)` THEN BIGNUM_TERMRANGE_TAC `k:num` `mm:num` THEN ASM_SIMP_TAC[LOWDIGITS_SELF] THEN GLOBALIZE_PRECONDITION_TAC THEN ASM_REWRITE_TAC[] THEN FIRST_X_ASSUM(X_CHOOSE_THEN `q:num` (X_CHOOSE_THEN `r:num` STRIP_ASSUME_TAC)) THEN SUBGOAL_THEN `cout < 2` MP_TAC THENL [SUBGOAL_THEN `q * n + a * b < 2 EXP (64 * k) EXP 2 * 2` MP_TAC THENL [MATCH_MP_TAC(ARITH_RULE `x < e * e /\ y < e * e ==> x + y < e EXP 2 * 2`) THEN ASM_SIMP_TAC[LT_MULT2]; FIRST_X_ASSUM(fun th -> GEN_REWRITE_TAC (LAND_CONV o LAND_CONV) [SYM th]) THEN DISCH_THEN(MP_TAC o MATCH_MP (ARITH_RULE `e * (e * c + m) + r < e EXP 2 * 2 ==> e EXP 2 * c < e EXP 2 * 2`)) THEN REWRITE_TAC[LT_MULT_LCANCEL; EXP_EQ_0; ARITH_EQ]]; GEN_REWRITE_TAC LAND_CONV [NUM_AS_BITVAL_ALT]] THEN DISCH_THEN(X_CHOOSE_THEN `c:bool` SUBST_ALL_TAC) THEN REWRITE_TAC[VAL_EQ_BITVAL] THEN ENSURES_WHILE_UP_TAC `k:num` `pc + 0x10c` `pc + 0x124` `\i s. read X0 s = word k /\ read X1 s = z /\ read X4 s = m /\ read X6 s = word_neg(word(bitval c)) /\ read X10 s = word i /\ bignum_from_memory (word_add z (word(8 * i)),k - i) s = highdigits mm i /\ bignum_from_memory (word_add m (word(8 * i)),k - i) s = highdigits n i /\ &(bignum_from_memory(z,i) s):real = &2 pow (64 * i) * &(bitval(~read CF s)) + &(lowdigits mm i) - &(bitval c) * &(lowdigits n i)` THEN ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--4) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN REWRITE_TAC[WORD_SUB_LZERO; SUB_0; GSYM BIGNUM_FROM_MEMORY_BYTES] THEN REWRITE_TAC[WORD_ADD_0; MULT_CLAUSES; BITVAL_CLAUSES] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL; LOWDIGITS_0] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; HIGHDIGITS_0] THEN REAL_ARITH_TAC; X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN GHOST_INTRO_TAC `cin:bool` `read CF` THEN GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV o ONCE_DEPTH_CONV) [BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS] THEN ASM_REWRITE_TAC[SUB_EQ_0; GSYM NOT_LT] THEN REWRITE_TAC[ARITH_RULE `k - i - 1 = k - (i + 1)`] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [4] (1--6) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[GSYM WORD_ADD] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ) THEN SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND] THEN REWRITE_TAC[WORD_ADD; WORD_NEG_NEG; VAL_WORD_BITVAL; WORD_BITVAL_EQ_0; LOWDIGITS_CLAUSES; WORD_NEG_EQ_0; BITVAL_BOUND; NOT_LT] THEN REWRITE_TAC[WORD_AND_MASK] THEN COND_CASES_TAC THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ASM_REWRITE_TAC[NOT_LT] THEN SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND; VAL_WORD_0; BITVAL_CLAUSES; ARITH_RULE `64 * (i + 1) = 64 * i + 64`] THEN REWRITE_TAC[REAL_POW_ADD] THEN CONV_TAC REAL_RING; X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0]; REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0] THEN DISCH_TAC THEN UNDISCH_TAC `ODD n ==> r = 0` THEN ASM_REWRITE_TAC[] THEN DISCH_THEN SUBST_ALL_TAC THEN RULE_ASSUM_TAC(REWRITE_RULE[ADD_CLAUSES])] THEN FIRST_ASSUM(MATCH_MP_TAC o MATCH_MP (NUMBER_RULE `e * x:num = q * n + ab ==> (i * e == 1) (mod n) /\ (y == x) (mod n) ==> (y == i * ab) (mod n)`)) THEN ASM_REWRITE_TAC[INVERSE_MOD_LMUL_EQ; COPRIME_REXP; COPRIME_2] THEN REWRITE_TAC[num_congruent; GSYM INT_OF_NUM_CLAUSES] THEN MATCH_MP_TAC(INTEGER_RULE `x = (e - n) * c + m ==> (x:int == e * c + m) (mod n)`) THEN MATCH_MP_TAC INT_CONG_IMP_EQ THEN EXISTS_TAC `(&2:int) pow (64 * k)` THEN CONJ_TAC THENL [MATCH_MP_TAC(INT_ARITH `&0 <= x /\ x < e /\ &0 <= y /\ y < e ==> abs(x - y:int) < e`) THEN ASM_SIMP_TAC[INT_OF_NUM_CLAUSES; INT_OF_NUM_SUB; LT_IMP_LE; LE_0] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES; BIGNUM_FROM_MEMORY_BOUND] THEN UNDISCH_TAC `2 EXP (64 * k) * (2 EXP (64 * k) * bitval c + mm) = q * n + a * b` THEN ASM_CASES_TAC `c:bool` THEN ASM_REWRITE_TAC[ADD_CLAUSES; MULT_CLAUSES; BITVAL_CLAUSES] THEN ASM_SIMP_TAC[ARITH_RULE `n:num < e ==> ((e - n) + m < e <=> m < n)`] THEN DISCH_THEN(MP_TAC o MATCH_MP (ARITH_RULE `e * (e + m):num = qn + ab ==> ab < e * e ==> e * m < qn`)) THEN ASM_SIMP_TAC[LT_MULT2] THEN REWRITE_TAC[GSYM NOT_LE; CONTRAPOS_THM] THEN ASM_SIMP_TAC[LE_MULT2; LT_IMP_LE]; REWRITE_TAC[INTEGER_RULE `(z:int == (e - n) * c + m) (mod e) <=> (z + n * c == m) (mod e)`] THEN REWRITE_TAC[INT_OF_NUM_CLAUSES; GSYM num_congruent] THEN REWRITE_TAC[REAL_CONGRUENCE; EXP_EQ_0; ARITH_EQ] THEN ASM_SIMP_TAC[GSYM REAL_OF_NUM_CLAUSES; LOWDIGITS_SELF] THEN REWRITE_TAC[real_sub; GSYM REAL_ADD_ASSOC] THEN REWRITE_TAC[REAL_FIELD `(&2 pow n * x + y) / &2 pow n = x + y / &2 pow n`] THEN REAL_INTEGER_TAC]] THEN X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN GHOST_INTRO_TAC `cout:num` `\s. val(read X6 s)` THEN GHOST_INTRO_TAC `z1:num` `bignum_from_memory(z,k)` THEN BIGNUM_TERMRANGE_TAC `k:num` `z1:num` THEN GLOBALIZE_PRECONDITION_TAC THEN ASM_REWRITE_TAC[] THEN FIRST_X_ASSUM(X_CHOOSE_THEN `q:num` (X_CHOOSE_THEN `r:num` STRIP_ASSUME_TAC)) THEN REWRITE_TAC[VAL_WORD_GALOIS; DIMINDEX_64] THEN GLOBALIZE_PRECONDITION_TAC THEN GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV o ONCE_DEPTH_CONV) [BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS] THEN ASM_REWRITE_TAC[ARITH_RULE `k - i = 0 <=> ~(i < k)`] THEN REWRITE_TAC[ARITH_RULE `k - i - 1 = k - (i + 1)`] THEN (*** The multiply-add loop ***) ENSURES_SEQUENCE_TAC `pc + 0x94` `\s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X8 s = word i /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ ?c1 c0. c0 < 2 EXP 64 /\ read X7 s = word(bitval c1) /\ read X6 s = word c0 /\ 2 EXP (64 * k) * (2 EXP 64 * bitval c1 + c0) + bignum_from_memory (z,k) s = (2 EXP (64 * k) * cout + z1) + bigdigit a i * b` THEN CONJ_TAC THENL [ENSURES_WHILE_UP_TAC `k:num` `pc + 0x60` `pc + 0x84` `\j s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X6 s = word cout /\ read X8 s = word i /\ read X9 s = word(bigdigit a i) /\ read X10 s = word j /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ bignum_from_memory(word_add z (word (8 * j)),k - j) s = highdigits z1 j /\ bignum_from_memory(word_add y (word (8 * j)),k - j) s = highdigits b j /\ 2 EXP (64 * j) * (bitval(read CF s) + val(read X11 s)) + bignum_from_memory(z,j) s = lowdigits z1 j + bigdigit a i * lowdigits b j` THEN ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--3) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES; VAL_WORD_0] THEN REWRITE_TAC[LOWDIGITS_0; BIGNUM_FROM_MEMORY_TRIVIAL] THEN REWRITE_TAC[BITVAL_CLAUSES; ADD_CLAUSES; MULT_CLAUSES; WORD_ADD_0] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; HIGHDIGITS_0; SUB_0]; X_GEN_TAC `j:num` THEN STRIP_TAC THEN VAL_INT64_TAC `j:num` THEN GHOST_INTRO_TAC `cin:bool` `read CF` THEN GHOST_INTRO_TAC `hin:int64` `read X11` THEN MP_TAC(GENL [`x:int64`; `a:num`] (ISPECL [`x:int64`; `k - j:num`; `a:num`; `j:num`] BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS)) THEN ASM_REWRITE_TAC[ARITH_RULE `k - j = 0 <=> ~(j < k)`] THEN DISCH_THEN(fun th -> ONCE_REWRITE_TAC[th]) THEN REWRITE_TAC[ARITH_RULE `k - j - 1 = k - (j + 1)`] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [3;4;6;7] (1--9) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[GSYM WORD_ADD] THEN REWRITE_TAC[LOWDIGITS_CLAUSES] THEN GEN_REWRITE_TAC RAND_CONV [ARITH_RULE `(e * d1 + d0) + c * (e * a1 + a0):num = e * (c * a1 + d1) + d0 + c * a0`] THEN FIRST_X_ASSUM(fun th -> GEN_REWRITE_TAC (RAND_CONV o RAND_CONV) [SYM th]) THEN REWRITE_TAC[EXP_ADD; ARITH_RULE `64 * (j + 1) = 64 * j + 64`] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ) THEN GEN_REWRITE_TAC LAND_CONV [TAUT `p /\ q /\ r /\ s <=> p /\ r /\ q /\ s`] THEN DISCH_THEN(MP_TAC o end_itlist CONJ o DECARRY_RULE o CONJUNCTS) THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN CONV_TAC REAL_RING; X_GEN_TAC `j:num` THEN STRIP_TAC THEN VAL_INT64_TAC `j:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0]; GHOST_INTRO_TAC `cin:bool` `read CF` THEN GHOST_INTRO_TAC `hout:int64` `read X11` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN RULE_ASSUM_TAC(REWRITE_RULE[VAL_WORD_SUB_EQ_0]) THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [3] (3--4) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN MAP_EVERY EXISTS_TAC [`carry_s3:bool`; `val(sum_s3:int64)`] THEN ASM_REWRITE_TAC[ADD_CLAUSES; VAL_BOUND_64; WORD_VAL] THEN ASM_REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64] THEN ONCE_REWRITE_TAC[REAL_ARITH `e * (c + h + b) + y:real = e * c + e * (b + h) + y`] THEN ASM_REWRITE_TAC[REAL_OF_NUM_CLAUSES] THEN ASM_SIMP_TAC[LOWDIGITS_SELF] THEN ARITH_TAC]; ALL_TAC] THEN (*** Tidying up a bit ***) GHOST_INTRO_TAC `z2:num` `bignum_from_memory(z,k)` THEN BIGNUM_TERMRANGE_TAC `k:num` `z2:num` THEN GHOST_INTRO_TAC `g6:int64` `read X6` THEN GHOST_INTRO_TAC `g7:int64` `read X7` THEN GLOBALIZE_PRECONDITION_TAC THEN ASM_REWRITE_TAC[] THEN FIRST_X_ASSUM(X_CHOOSE_THEN `c1:bool` (X_CHOOSE_THEN `c0:num` STRIP_ASSUME_TAC)) THEN UNDISCH_THEN `g6:int64 = word c0` SUBST_ALL_TAC THEN UNDISCH_THEN `g7:int64 = word(bitval c1)` SUBST_ALL_TAC THEN * * The initial prelude of the reduction * * ABBREV_TAC `q0 = (w * z2) MOD 2 EXP 64` THEN SUBGOAL_THEN `q0 < 2 EXP 64 /\ val(word q0:int64) = q0` STRIP_ASSUME_TAC THENL [EXPAND_TAC "q0" THEN CONJ_TAC THENL [ARITH_TAC; ALL_TAC] THEN REWRITE_TAC[VAL_WORD; DIMINDEX_64; MOD_MOD_REFL]; ALL_TAC] THEN ENSURES_SEQUENCE_TAC `pc + 0xb4` `\s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X8 s = word i /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ bignum_from_memory (z,k) s = z2 /\ read X6 s = word c0 /\ read X7 s = word (bitval c1) /\ read X9 s = word q0 /\ read X10 s = word 1 /\ read X14 s = word(k - 1) /\ 2 EXP 64 * (bitval(read CF s) + val(read X11 s)) + val(read X12 s) = q0 * bigdigit n 0 + bigdigit z2 0` THEN CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN SUBGOAL_THEN `bignum_from_memory(m,k) s0 = highdigits n 0 /\ bignum_from_memory(z,k) s0 = highdigits z2 0` MP_TAC THENL [ASM_REWRITE_TAC[HIGHDIGITS_0; BIGNUM_FROM_MEMORY_BYTES]; GEN_REWRITE_TAC (LAND_CONV o BINOP_CONV) [BIGNUM_FROM_MEMORY_EQ_HIGHDIGITS] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; ADD_CLAUSES] THEN STRIP_TAC] THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [4;6] (1--8) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN MATCH_MP_TAC(TAUT `p /\ (p ==> q) ==> p /\ q`) THEN CONJ_TAC THENL [UNDISCH_THEN `(w * z2) MOD 2 EXP 64 = q0` (SUBST1_TAC o SYM) THEN ONCE_REWRITE_TAC[GSYM WORD_MOD_SIZE] THEN REWRITE_TAC[GSYM LOWDIGITS_1; lowdigits; MULT_CLAUSES] THEN REWRITE_TAC[ADD_CLAUSES; DIMINDEX_64; VAL_WORD] THEN CONV_TAC MOD_DOWN_CONV THEN REWRITE_TAC[MULT_SYM]; DISCH_THEN SUBST_ALL_TAC] THEN ASM_REWRITE_TAC[WORD_SUB; ARITH_RULE `1 <= k <=> ~(k = 0)`] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ o DECARRY_RULE) THEN DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND] THEN REAL_ARITH_TAC; ALL_TAC] THEN (*** Break at "montend" to share the end reasoning ***) GHOST_INTRO_TAC `r0:num` `\s. val(read X12 s)` THEN REWRITE_TAC[VAL_WORD_GALOIS; DIMINDEX_64] THEN GLOBALIZE_PRECONDITION_TAC THEN ENSURES_SEQUENCE_TAC `pc + 0xe8` `\s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X6 s = word c0 /\ read X7 s = word (bitval c1) /\ read X8 s = word i /\ read X9 s = word q0 /\ read X10 s = word k /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ 2 EXP (64 * k) * (bitval(read CF s) + val(read X11 s)) + 2 EXP 64 * bignum_from_memory (z,k - 1) s + r0 = lowdigits z2 k + q0 * lowdigits n k` THEN CONJ_TAC THENL [ASM_CASES_TAC `k = 1` THENL [UNDISCH_THEN `k = 1` SUBST_ALL_TAC THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC [1] THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL] THEN ASM_REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES] THEN REWRITE_TAC[LOWDIGITS_1] THEN ARITH_TAC; ALL_TAC] THEN * * The reduction loop * * VAL_INT64_TAC `k - 1` THEN ENSURES_WHILE_AUP_TAC `1` `k:num` `pc + 0xb8` `pc + 0xe0` `\j s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X6 s = word c0 /\ read X7 s = word (bitval c1) /\ read X8 s = word i /\ read X9 s = word q0 /\ read X10 s = word j /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ bignum_from_memory(word_add z (word (8 * j)),k - j) s = highdigits z2 j /\ bignum_from_memory(word_add m (word (8 * j)),k - j) s = highdigits n j /\ 2 EXP (64 * j) * (bitval(read CF s) + val(read X11 s)) + 2 EXP 64 * bignum_from_memory(z,j-1) s + r0 = lowdigits z2 j + q0 * lowdigits n j` THEN REPEAT CONJ_TAC THENL [ASM_REWRITE_TAC[ARITH_RULE `1 < k <=> ~(k = 0 \/ k = 1)`]; REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC [1] THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0] THEN ASM_REWRITE_TAC[ARITH_RULE `k <= 1 <=> k = 0 \/ k = 1`] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_DIV; BIGNUM_FROM_MEMORY_TRIVIAL] THEN ASM_REWRITE_TAC[GSYM highdigits; BIGNUM_FROM_MEMORY_BYTES] THEN ASM_REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES; LOWDIGITS_1] THEN ARITH_TAC; X_GEN_TAC `j:num` THEN STRIP_TAC THEN MAP_EVERY VAL_INT64_TAC [`j:num`; `j - 1`] THEN SUBGOAL_THEN `word_sub (word j) (word 1):int64 = word(j - 1)` ASSUME_TAC THENL [ASM_REWRITE_TAC[WORD_SUB]; ALL_TAC] THEN GHOST_INTRO_TAC `cin:bool` `read CF` THEN GHOST_INTRO_TAC `hin:int64` `read X11` THEN MP_TAC(GENL [`x:int64`; `a:num`] (ISPECL [`x:int64`; `k - j:num`; `a:num`; `j:num`] BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS)) THEN ASM_REWRITE_TAC[ARITH_RULE `k - j = 0 <=> ~(j < k)`] THEN DISCH_THEN(fun th -> ONCE_REWRITE_TAC[th]) THEN REWRITE_TAC[ARITH_RULE `k - j - 1 = k - (j + 1)`] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN UNDISCH_THEN `val(word q0:int64) = q0` (K ALL_TAC) THEN ABBREV_TAC `j' = j - 1` THEN SUBGOAL_THEN `j = j' + 1` SUBST_ALL_TAC THENL [EXPAND_TAC "j'" THEN UNDISCH_TAC `1 <= j` THEN ARITH_TAC; ALL_TAC] THEN RULE_ASSUM_TAC(REWRITE_RULE[ARITH_RULE `(j' + 1) + 1 = j' + 2`]) THEN REWRITE_TAC[ARITH_RULE `(j' + 1) + 1 = j' + 2`] THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [3;4;6;7] (1--10) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN CONJ_TAC THENL [CONV_TAC WORD_RULE; ALL_TAC] THEN REWRITE_TAC[ARITH_RULE `(n + 2) - 1 = n + 1`] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN SUBGOAL_THEN `j' + 2 = (j' + 1) + 1` MP_TAC THENL [ARITH_TAC; DISCH_THEN SUBST_ALL_TAC] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ONCE_REWRITE_TAC[LOWDIGITS_CLAUSES] THEN GEN_REWRITE_TAC RAND_CONV [ARITH_RULE `(e * d1 + d0) + c * (e * a1 + a0):num = e * (c * a1 + d1) + d0 + c * a0`] THEN FIRST_X_ASSUM(fun th -> GEN_REWRITE_TAC (RAND_CONV o RAND_CONV) [SYM th]) THEN REWRITE_TAC[EXP_ADD; ARITH_RULE `64 * (j + 1) = 64 * j + 64`] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ) THEN GEN_REWRITE_TAC LAND_CONV [TAUT `p /\ q /\ r /\ s <=> p /\ r /\ q /\ s`] THEN DISCH_THEN(MP_TAC o end_itlist CONJ o DECARRY_RULE o CONJUNCTS) THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN CONV_TAC REAL_RING; X_GEN_TAC `j:num` THEN STRIP_TAC THEN MAP_EVERY VAL_INT64_TAC [`j:num`; `j - 1`] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0]; REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0]]; ALL_TAC] THEN (*** The final digit write ****) ASM_SIMP_TAC[LOWDIGITS_SELF] THEN GHOST_INTRO_TAC `cin:bool` `read CF` THEN GHOST_INTRO_TAC `hin:int64` `read X11` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN VAL_INT64_TAC `k - 1` THEN SUBGOAL_THEN `word_sub (word k) (word 1):int64 = word(k - 1)` ASSUME_TAC THENL [ASM_REWRITE_TAC[WORD_SUB; ARITH_RULE `1 <= k <=> ~(k = 0)`]; ALL_TAC] THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [1;2] (1--5) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN CONJ_TAC THENL [CONV_TAC WORD_RULE; ALL_TAC] THEN (*** The final mathematics of the outer loop invariant ***) MAP_EVERY EXISTS_TAC [`2 EXP (64 * i) * q0 + q`; `2 EXP (64 * i) * r0 + r`] THEN GEN_REWRITE_TAC I [CONJ_ASSOC] THEN CONJ_TAC THENL [REWRITE_TAC[EXP_ADD; ARITH_RULE `64 * (i + 1) = 64 * i + 64`] THEN CONJ_TAC THEN MATCH_MP_TAC(ARITH_RULE `q1 < e /\ q0 < ee /\ (q1 < e ==> (q1 + 1) * ee <= e * ee) ==> ee * q1 + q0 < ee * e`) THEN ASM_REWRITE_TAC[LE_MULT_RCANCEL; EXP_EQ_0; ARITH_EQ] THEN ASM_REWRITE_TAC[ARITH_RULE `n + 1 <= m <=> n < m`]; ALL_TAC] THEN CONJ_TAC THENL [SUBGOAL_THEN `8 * k = 8 * ((k - 1) + 1)` SUBST1_TAC THENL [UNDISCH_TAC `~(k = 0)` THEN ARITH_TAC; REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES]] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN REWRITE_TAC[LEFT_ADD_DISTRIB; MULT_ASSOC; GSYM EXP_ADD] THEN REWRITE_TAC[GSYM LEFT_ADD_DISTRIB] THEN SUBGOAL_THEN `(i + 1) + (k - 1) = i + k` SUBST1_TAC THENL [UNDISCH_TAC `i:num < k` THEN ARITH_TAC; ALL_TAC] THEN REWRITE_TAC[LEFT_ADD_DISTRIB; EXP_ADD; MULT_CLAUSES] THEN REWRITE_TAC[LOWDIGITS_CLAUSES] THEN REPEAT(FIRST_X_ASSUM(MP_TAC o check (can (term_match [] `2 EXP (64 * k) * x + y = z`) o concl))) THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ o DECARRY_RULE) THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64] THEN DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN CONV_TAC REAL_RING; DISCH_THEN(fun th -> REPEAT(FIRST_X_ASSUM(STRIP_ASSUME_TAC o C MATCH_MP th))) THEN ASM_REWRITE_TAC[ADD_EQ_0; MULT_EQ_0; EXP_EQ_0; ARITH_EQ] THEN MATCH_MP_TAC CONG_IMP_EQ THEN EXISTS_TAC `2 EXP 64` THEN ASM_REWRITE_TAC[EXP_LT_0; ARITH_EQ] THEN FIRST_X_ASSUM(MATCH_MP_TAC o MATCH_MP (NUMBER_RULE `ee * x + e * y + r = z ==> e divides ee /\ (z == 0) (mod e) ==> (r == 0) (mod e)`)) THEN CONJ_TAC THENL [MATCH_MP_TAC DIVIDES_EXP_LE_IMP THEN UNDISCH_TAC `~(k = 0)` THEN ARITH_TAC; UNDISCH_THEN `(w * z2) MOD 2 EXP 64 = q0` (SUBST1_TAC o SYM)] THEN REWRITE_TAC[CONG] THEN CONV_TAC MOD_DOWN_CONV THEN REWRITE_TAC[GSYM CONG] THEN MATCH_MP_TAC(NUMBER_RULE `(n * w + 1 == 0) (mod e) ==> (z + (w * z) * n == 0) (mod e)`) THEN ASM_REWRITE_TAC[]]);; let BIGNUM_AMONTMUL_SUBROUTINE_CORRECT = time prove (`!k z x y m a b n pc returnaddress. ALL (nonoverlapping (z,8 * val k)) [(word pc,0x130); (x,8 * val k); (y,8 * val k); (m,8 * val k)] ==> ensures arm (\s. aligned_bytes_loaded s (word pc) bignum_amontmul_mc /\ read PC s = word pc /\ read X30 s = returnaddress /\ C_ARGUMENTS [k; z; x; y; m] s /\ bignum_from_memory (x,val k) s = a /\ bignum_from_memory (y,val k) s = b /\ bignum_from_memory (m,val k) s = n) (\s. read PC s = returnaddress /\ (ODD n ==> (bignum_from_memory (z,val k) s == inverse_mod n (2 EXP (64 * val k)) * a * b) (mod n))) (MAYCHANGE [PC; X5; X6; X7; X8; X9; X10; X11; X12; X13; X14] ,, MAYCHANGE [memory :> bytes(z,8 * val k)] ,, MAYCHANGE SOME_FLAGS)`, ARM_ADD_RETURN_NOSTACK_TAC BIGNUM_AMONTMUL_EXEC BIGNUM_AMONTMUL_CORRECT);;
null
https://raw.githubusercontent.com/awslabs/s2n-bignum/824c15f908d7a343af1b2f378cfedd36e880bdde/arm/proofs/bignum_amontmul.ml
ocaml
========================================================================= Almost-Montgomery multiplication of arbitrary bignums. ========================================================================= arm_MUL X13 X9 X14 arm_ADDS X12 X12 X13 arm_MUL X13 X9 X14 arm_ADDS X12 X12 X13 arm_MUL X13 X9 X14 arm_ADDS X12 X12 X13 arm_SBCS X14 X14 X12 ------------------------------------------------------------------------- Proof. ------------------------------------------------------------------------- ** Degenerate k = 0 case ** ** Initial word-level modular inverse ** ** Get a basic bound on k from the nonoverlapping assumptions ** ** Setup of the main loop with zeroing and corrective start/end ** ** This is the main loop invariant: save for later ** ** This is the corrective subtraction part.... ** ** The multiply-add loop ** ** Tidying up a bit ** ** Break at "montend" to share the end reasoning ** ** The final digit write *** ** The final mathematics of the outer loop invariant **
* Copyright Amazon.com , Inc. or its affiliates . All Rights Reserved . * SPDX - License - Identifier : Apache-2.0 OR ISC * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 OR ISC *) * * * print_literal_from_elf " arm / generic / bignum_amontmul.o " ; ; * * * ****) let bignum_amontmul_mc = define_assert_from_elf "bignum_amontmul_mc" "arm/generic/bignum_amontmul.o" [ arm_CBZ X0 ( word 300 ) arm_LDR X14 X4 ( Immediate_Offset ( word 0 ) ) arm_LSL X5 X14 ( rvalue ( word 2 ) ) arm_SUB X5 X14 X5 arm_EOR X5 X5 ( rvalue ( word 2 ) ) arm_MOV X6 ( rvalue ( word 1 ) ) arm_MADD X6 X14 X5 X6 arm_MUL X7 X6 X6 arm_MADD X5 X6 X5 X5 arm_MUL X6 X7 X7 arm_MADD X5 X7 X5 X5 arm_MUL X7 X6 X6 arm_MADD X5 X6 X5 X5 arm_MADD X5 X7 X5 X5 arm_MOV X8 XZR arm_STR XZR X1 ( Shiftreg_Offset X8 3 ) arm_ADD X8 X8 ( rvalue ( word 1 ) ) arm_CMP X8 X0 arm_BCC ( word 2097140 ) arm_MOV X6 XZR arm_MOV X8 XZR arm_LDR X9 X2 ( Shiftreg_Offset X8 3 ) arm_MOV X10 XZR arm_ADDS X11 XZR XZR arm_LDR X14 X3 ( Shiftreg_Offset X10 3 ) arm_LDR X12 X1 ( Shiftreg_Offset X10 3 ) arm_ADCS X12 X12 X11 arm_UMULH X11 arm_ADC X11 X11 XZR arm_STR X12 X1 ( Shiftreg_Offset X10 3 ) arm_ADD X10 X10 ( rvalue ( word 1 ) ) arm_SUB X14 X10 X0 arm_CBNZ X14 ( word 2097112 ) arm_ADCS X6 X6 X11 arm_ADC X7 XZR XZR arm_LDR X12 X1 ( Immediate_Offset ( word 0 ) ) arm_MUL X9 X12 X5 arm_LDR X14 X4 ( Immediate_Offset ( word 0 ) ) arm_UMULH X11 arm_MOV X10 ( rvalue ( word 1 ) ) arm_SUB X14 X0 ( rvalue ( word 1 ) ) arm_CBZ X14 ( word 52 ) arm_LDR X14 X4 ( Shiftreg_Offset X10 3 ) arm_LDR X12 X1 ( Shiftreg_Offset X10 3 ) arm_ADCS X12 X12 X11 arm_UMULH X11 arm_ADC X11 X11 XZR arm_SUB X13 X10 ( rvalue ( word 1 ) ) arm_STR X12 X1 ( Shiftreg_Offset X13 3 ) arm_ADD X10 X10 ( rvalue ( word 1 ) ) arm_SUB X14 X10 X0 arm_CBNZ X14 ( word 2097108 ) arm_ADCS X11 X6 X11 arm_ADC X6 X7 XZR arm_SUB X13 X10 ( rvalue ( word 1 ) ) arm_STR X11 X1 ( Shiftreg_Offset X13 3 ) arm_ADD X8 X8 ( rvalue ( word 1 ) ) arm_CMP X8 X0 arm_BCC ( word 2096980 ) arm_NEG X6 X6 arm_NEGS X10 XZR arm_LDR X14 X1 ( Shiftreg_Offset X10 3 ) arm_LDR X12 X4 ( Shiftreg_Offset X10 3 ) arm_AND X12 X12 X6 arm_STR X14 X1 ( Shiftreg_Offset X10 3 ) arm_ADD X10 X10 ( rvalue ( word 1 ) ) arm_SUB X14 X10 X0 arm_CBNZ X14 ( word 2097124 ) arm_RET X30 ];; let BIGNUM_AMONTMUL_EXEC = ARM_MK_EXEC_RULE bignum_amontmul_mc;; * * This actually works mod 32 but if anything this is more convenient * * let WORD_NEGMODINV_SEED_LEMMA_16 = prove (`!a x:int64. ODD a /\ word_xor (word_sub (word a) (word_shl (word a) 2)) (word 2) = x ==> (a * val x + 1 == 0) (mod 16)`, REPEAT STRIP_TAC THEN REWRITE_TAC[CONG; MOD_0] THEN TRANS_TAC EQ_TRANS `(val(word a:int64) MOD 16 * val(x:int64) MOD 16 + 1) MOD 16` THEN REWRITE_TAC[ARITH_RULE `16 = 2 EXP 4`] THEN CONJ_TAC THENL [REWRITE_TAC[VAL_WORD; DIMINDEX_64; MOD_MOD_EXP_MIN] THEN CONV_TAC NUM_REDUCE_CONV THEN CONV_TAC MOD_DOWN_CONV THEN REFL_TAC; REWRITE_TAC[VAL_MOD; NUMSEG_LT; ARITH_EQ; ARITH]] THEN SUBGOAL_THEN `bit 0 (word a:int64)` MP_TAC THENL [ASM_REWRITE_TAC[BIT_LSB_WORD]; EXPAND_TAC "x" THEN POP_ASSUM_LIST(K ALL_TAC) THEN DISCH_TAC] THEN CONV_TAC(ONCE_DEPTH_CONV EXPAND_NSUM_CONV) THEN CONV_TAC(TOP_DEPTH_CONV BIT_WORD_CONV) THEN MAP_EVERY ASM_CASES_TAC [`bit 1 (word a:int64)`;`bit 2 (word a:int64)`;`bit 3 (word a:int64)`] THEN ASM_REWRITE_TAC[BITVAL_CLAUSES] THEN CONV_TAC NUM_REDUCE_CONV);; let BIGNUM_AMONTMUL_CORRECT = time prove (`!k z x y m a b n pc. ALL (nonoverlapping (z,8 * val k)) [(word pc,0x130); (x,8 * val k); (y,8 * val k); (m,8 * val k)] ==> ensures arm (\s. aligned_bytes_loaded s (word pc) bignum_amontmul_mc /\ read PC s = word pc /\ C_ARGUMENTS [k; z; x; y; m] s /\ bignum_from_memory (x,val k) s = a /\ bignum_from_memory (y,val k) s = b /\ bignum_from_memory (m,val k) s = n) (\s. read PC s = word(pc + 0x12c) /\ (ODD n ==> (bignum_from_memory (z,val k) s == inverse_mod n (2 EXP (64 * val k)) * a * b) (mod n))) (MAYCHANGE [PC; X5; X6; X7; X8; X9; X10; X11; X12; X13; X14] ,, MAYCHANGE [memory :> bytes(z,8 * val k)] ,, MAYCHANGE SOME_FLAGS)`, W64_GEN_TAC `k:num` THEN MAP_EVERY X_GEN_TAC [`z:int64`; `x:int64`; `y:int64`; `m:int64`] THEN MAP_EVERY X_GEN_TAC [`a:num`; `b:num`; `n:num`; `pc:num`] THEN REWRITE_TAC[ALL; NONOVERLAPPING_CLAUSES] THEN REWRITE_TAC[C_ARGUMENTS; C_RETURN; SOME_FLAGS] THEN STRIP_TAC THEN MAP_EVERY (BIGNUM_TERMRANGE_TAC `k:num`) [`a:num`; `b:num`; `n:num`] THEN ASM_CASES_TAC `k = 0` THENL [UNDISCH_THEN `k = 0` SUBST_ALL_TAC THEN REPEAT(FIRST_X_ASSUM(SUBST_ALL_TAC o MATCH_MP (ARITH_RULE `a < 2 EXP (64 * 0) ==> a = 0`))) THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC [1] THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[ODD]; ALL_TAC] THEN ENSURES_SEQUENCE_TAC `pc + 0x38` `\s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (x,k) s = a /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ (ODD n ==> (n * val(read X5 s) + 1 == 0) (mod (2 EXP 64)))` THEN CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN SUBGOAL_THEN `bignum_from_memory(m,k) s0 = highdigits n 0` MP_TAC THENL [ASM_REWRITE_TAC[HIGHDIGITS_0; BIGNUM_FROM_MEMORY_BYTES]; GEN_REWRITE_TAC LAND_CONV[BIGNUM_FROM_MEMORY_EQ_HIGHDIGITS] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; ADD_CLAUSES] THEN REWRITE_TAC[GSYM LOWDIGITS_1; lowdigits; MULT_CLAUSES] THEN REWRITE_TAC[GSYM DIMINDEX_64; WORD_MOD_SIZE] THEN REWRITE_TAC[DIMINDEX_64] THEN STRIP_TAC] THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--5) THEN SUBGOAL_THEN `ODD n ==> (n * val (read X5 s5) + 1 == 0) (mod 16)` MP_TAC THENL [ASM_SIMP_TAC[WORD_NEGMODINV_SEED_LEMMA_16]; ALL_TAC] THEN REABBREV_TAC `x0 = read X5 s5` THEN DISCH_TAC THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (6--14) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN REWRITE_TAC[VAL_WORD_MUL; VAL_WORD_ADD; VAL_WORD; DIMINDEX_64; CONG] THEN CONV_TAC MOD_DOWN_CONV THEN REWRITE_TAC[GSYM CONG] THEN SUBST1_TAC(ARITH_RULE `2 EXP 64 = 16 EXP (2 EXP 4)`) THEN DISCH_THEN(fun th -> FIRST_X_ASSUM(MP_TAC o C MATCH_MP th)) THEN SPEC_TAC(`16`,`e:num`) THEN CONV_TAC NUM_REDUCE_CONV THEN CONV_TAC NUMBER_RULE; GHOST_INTRO_TAC `w:num` `\s. val(read X5 s)` THEN REWRITE_TAC[VAL_WORD_GALOIS; DIMINDEX_64]] THEN GLOBALIZE_PRECONDITION_TAC THEN VAL_INT64_TAC `w:num` THEN FIRST_ASSUM(MP_TAC o MATCH_MP (ONCE_REWRITE_RULE[IMP_CONJ] NONOVERLAPPING_IMP_SMALL_1)) THEN ANTS_TAC THENL [CONV_TAC NUM_REDUCE_CONV; DISCH_TAC] THEN ENSURES_WHILE_UP_TAC `k:num` `pc + 0x54` `pc + 0xfc` `\i s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ read X5 s = word w /\ read X8 s = word i /\ bignum_from_memory(word_add x (word(8 * i)),k - i) s = highdigits a i /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ ?q r. q < 2 EXP (64 * i) /\ r < 2 EXP (64 * i) /\ 2 EXP (64 * i) * (2 EXP (64 * k) * val(read X6 s) + bignum_from_memory(z,k) s) + r = q * n + lowdigits a i * b /\ (ODD n ==> r = 0)` THEN ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL [ENSURES_WHILE_UP_TAC `k:num` `pc + 0x3c` `pc + 0x44` `\i s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ read X5 s = word w /\ read X8 s = word i /\ bignum_from_memory (x,k) s = a /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ bignum_from_memory (z,i) s = 0` THEN ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC [1] THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[]; X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[GSYM WORD_ADD] THEN REWRITE_TAC[VAL_WORD_0; MULT_CLAUSES; ADD_CLAUSES]; X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[]; ARM_SIM_TAC BIGNUM_AMONTMUL_EXEC (1--4) THEN ASM_REWRITE_TAC[VAL_WORD_0; MULT_CLAUSES; LOWDIGITS_0; ADD_CLAUSES; SUB_0; WORD_ADD_0; HIGHDIGITS_0] THEN REPEAT(EXISTS_TAC `0`) THEN ARITH_TAC]; X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[]; GHOST_INTRO_TAC `cout:num` `\s. val(read X6 s)` THEN GHOST_INTRO_TAC `mm:num` `bignum_from_memory(z,k)` THEN BIGNUM_TERMRANGE_TAC `k:num` `mm:num` THEN ASM_SIMP_TAC[LOWDIGITS_SELF] THEN GLOBALIZE_PRECONDITION_TAC THEN ASM_REWRITE_TAC[] THEN FIRST_X_ASSUM(X_CHOOSE_THEN `q:num` (X_CHOOSE_THEN `r:num` STRIP_ASSUME_TAC)) THEN SUBGOAL_THEN `cout < 2` MP_TAC THENL [SUBGOAL_THEN `q * n + a * b < 2 EXP (64 * k) EXP 2 * 2` MP_TAC THENL [MATCH_MP_TAC(ARITH_RULE `x < e * e /\ y < e * e ==> x + y < e EXP 2 * 2`) THEN ASM_SIMP_TAC[LT_MULT2]; FIRST_X_ASSUM(fun th -> GEN_REWRITE_TAC (LAND_CONV o LAND_CONV) [SYM th]) THEN DISCH_THEN(MP_TAC o MATCH_MP (ARITH_RULE `e * (e * c + m) + r < e EXP 2 * 2 ==> e EXP 2 * c < e EXP 2 * 2`)) THEN REWRITE_TAC[LT_MULT_LCANCEL; EXP_EQ_0; ARITH_EQ]]; GEN_REWRITE_TAC LAND_CONV [NUM_AS_BITVAL_ALT]] THEN DISCH_THEN(X_CHOOSE_THEN `c:bool` SUBST_ALL_TAC) THEN REWRITE_TAC[VAL_EQ_BITVAL] THEN ENSURES_WHILE_UP_TAC `k:num` `pc + 0x10c` `pc + 0x124` `\i s. read X0 s = word k /\ read X1 s = z /\ read X4 s = m /\ read X6 s = word_neg(word(bitval c)) /\ read X10 s = word i /\ bignum_from_memory (word_add z (word(8 * i)),k - i) s = highdigits mm i /\ bignum_from_memory (word_add m (word(8 * i)),k - i) s = highdigits n i /\ &(bignum_from_memory(z,i) s):real = &2 pow (64 * i) * &(bitval(~read CF s)) + &(lowdigits mm i) - &(bitval c) * &(lowdigits n i)` THEN ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--4) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN REWRITE_TAC[WORD_SUB_LZERO; SUB_0; GSYM BIGNUM_FROM_MEMORY_BYTES] THEN REWRITE_TAC[WORD_ADD_0; MULT_CLAUSES; BITVAL_CLAUSES] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL; LOWDIGITS_0] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; HIGHDIGITS_0] THEN REAL_ARITH_TAC; X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN GHOST_INTRO_TAC `cin:bool` `read CF` THEN GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV o ONCE_DEPTH_CONV) [BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS] THEN ASM_REWRITE_TAC[SUB_EQ_0; GSYM NOT_LT] THEN REWRITE_TAC[ARITH_RULE `k - i - 1 = k - (i + 1)`] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [4] (1--6) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[GSYM WORD_ADD] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ) THEN SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND] THEN REWRITE_TAC[WORD_ADD; WORD_NEG_NEG; VAL_WORD_BITVAL; WORD_BITVAL_EQ_0; LOWDIGITS_CLAUSES; WORD_NEG_EQ_0; BITVAL_BOUND; NOT_LT] THEN REWRITE_TAC[WORD_AND_MASK] THEN COND_CASES_TAC THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ASM_REWRITE_TAC[NOT_LT] THEN SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND; VAL_WORD_0; BITVAL_CLAUSES; ARITH_RULE `64 * (i + 1) = 64 * i + 64`] THEN REWRITE_TAC[REAL_POW_ADD] THEN CONV_TAC REAL_RING; X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0]; REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0] THEN DISCH_TAC THEN UNDISCH_TAC `ODD n ==> r = 0` THEN ASM_REWRITE_TAC[] THEN DISCH_THEN SUBST_ALL_TAC THEN RULE_ASSUM_TAC(REWRITE_RULE[ADD_CLAUSES])] THEN FIRST_ASSUM(MATCH_MP_TAC o MATCH_MP (NUMBER_RULE `e * x:num = q * n + ab ==> (i * e == 1) (mod n) /\ (y == x) (mod n) ==> (y == i * ab) (mod n)`)) THEN ASM_REWRITE_TAC[INVERSE_MOD_LMUL_EQ; COPRIME_REXP; COPRIME_2] THEN REWRITE_TAC[num_congruent; GSYM INT_OF_NUM_CLAUSES] THEN MATCH_MP_TAC(INTEGER_RULE `x = (e - n) * c + m ==> (x:int == e * c + m) (mod n)`) THEN MATCH_MP_TAC INT_CONG_IMP_EQ THEN EXISTS_TAC `(&2:int) pow (64 * k)` THEN CONJ_TAC THENL [MATCH_MP_TAC(INT_ARITH `&0 <= x /\ x < e /\ &0 <= y /\ y < e ==> abs(x - y:int) < e`) THEN ASM_SIMP_TAC[INT_OF_NUM_CLAUSES; INT_OF_NUM_SUB; LT_IMP_LE; LE_0] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES; BIGNUM_FROM_MEMORY_BOUND] THEN UNDISCH_TAC `2 EXP (64 * k) * (2 EXP (64 * k) * bitval c + mm) = q * n + a * b` THEN ASM_CASES_TAC `c:bool` THEN ASM_REWRITE_TAC[ADD_CLAUSES; MULT_CLAUSES; BITVAL_CLAUSES] THEN ASM_SIMP_TAC[ARITH_RULE `n:num < e ==> ((e - n) + m < e <=> m < n)`] THEN DISCH_THEN(MP_TAC o MATCH_MP (ARITH_RULE `e * (e + m):num = qn + ab ==> ab < e * e ==> e * m < qn`)) THEN ASM_SIMP_TAC[LT_MULT2] THEN REWRITE_TAC[GSYM NOT_LE; CONTRAPOS_THM] THEN ASM_SIMP_TAC[LE_MULT2; LT_IMP_LE]; REWRITE_TAC[INTEGER_RULE `(z:int == (e - n) * c + m) (mod e) <=> (z + n * c == m) (mod e)`] THEN REWRITE_TAC[INT_OF_NUM_CLAUSES; GSYM num_congruent] THEN REWRITE_TAC[REAL_CONGRUENCE; EXP_EQ_0; ARITH_EQ] THEN ASM_SIMP_TAC[GSYM REAL_OF_NUM_CLAUSES; LOWDIGITS_SELF] THEN REWRITE_TAC[real_sub; GSYM REAL_ADD_ASSOC] THEN REWRITE_TAC[REAL_FIELD `(&2 pow n * x + y) / &2 pow n = x + y / &2 pow n`] THEN REAL_INTEGER_TAC]] THEN X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN GHOST_INTRO_TAC `cout:num` `\s. val(read X6 s)` THEN GHOST_INTRO_TAC `z1:num` `bignum_from_memory(z,k)` THEN BIGNUM_TERMRANGE_TAC `k:num` `z1:num` THEN GLOBALIZE_PRECONDITION_TAC THEN ASM_REWRITE_TAC[] THEN FIRST_X_ASSUM(X_CHOOSE_THEN `q:num` (X_CHOOSE_THEN `r:num` STRIP_ASSUME_TAC)) THEN REWRITE_TAC[VAL_WORD_GALOIS; DIMINDEX_64] THEN GLOBALIZE_PRECONDITION_TAC THEN GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV o ONCE_DEPTH_CONV) [BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS] THEN ASM_REWRITE_TAC[ARITH_RULE `k - i = 0 <=> ~(i < k)`] THEN REWRITE_TAC[ARITH_RULE `k - i - 1 = k - (i + 1)`] THEN ENSURES_SEQUENCE_TAC `pc + 0x94` `\s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X8 s = word i /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ ?c1 c0. c0 < 2 EXP 64 /\ read X7 s = word(bitval c1) /\ read X6 s = word c0 /\ 2 EXP (64 * k) * (2 EXP 64 * bitval c1 + c0) + bignum_from_memory (z,k) s = (2 EXP (64 * k) * cout + z1) + bigdigit a i * b` THEN CONJ_TAC THENL [ENSURES_WHILE_UP_TAC `k:num` `pc + 0x60` `pc + 0x84` `\j s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X6 s = word cout /\ read X8 s = word i /\ read X9 s = word(bigdigit a i) /\ read X10 s = word j /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ bignum_from_memory(word_add z (word (8 * j)),k - j) s = highdigits z1 j /\ bignum_from_memory(word_add y (word (8 * j)),k - j) s = highdigits b j /\ 2 EXP (64 * j) * (bitval(read CF s) + val(read X11 s)) + bignum_from_memory(z,j) s = lowdigits z1 j + bigdigit a i * lowdigits b j` THEN ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--3) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES; VAL_WORD_0] THEN REWRITE_TAC[LOWDIGITS_0; BIGNUM_FROM_MEMORY_TRIVIAL] THEN REWRITE_TAC[BITVAL_CLAUSES; ADD_CLAUSES; MULT_CLAUSES; WORD_ADD_0] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; HIGHDIGITS_0; SUB_0]; X_GEN_TAC `j:num` THEN STRIP_TAC THEN VAL_INT64_TAC `j:num` THEN GHOST_INTRO_TAC `cin:bool` `read CF` THEN GHOST_INTRO_TAC `hin:int64` `read X11` THEN MP_TAC(GENL [`x:int64`; `a:num`] (ISPECL [`x:int64`; `k - j:num`; `a:num`; `j:num`] BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS)) THEN ASM_REWRITE_TAC[ARITH_RULE `k - j = 0 <=> ~(j < k)`] THEN DISCH_THEN(fun th -> ONCE_REWRITE_TAC[th]) THEN REWRITE_TAC[ARITH_RULE `k - j - 1 = k - (j + 1)`] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [3;4;6;7] (1--9) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[GSYM WORD_ADD] THEN REWRITE_TAC[LOWDIGITS_CLAUSES] THEN GEN_REWRITE_TAC RAND_CONV [ARITH_RULE `(e * d1 + d0) + c * (e * a1 + a0):num = e * (c * a1 + d1) + d0 + c * a0`] THEN FIRST_X_ASSUM(fun th -> GEN_REWRITE_TAC (RAND_CONV o RAND_CONV) [SYM th]) THEN REWRITE_TAC[EXP_ADD; ARITH_RULE `64 * (j + 1) = 64 * j + 64`] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ) THEN GEN_REWRITE_TAC LAND_CONV [TAUT `p /\ q /\ r /\ s <=> p /\ r /\ q /\ s`] THEN DISCH_THEN(MP_TAC o end_itlist CONJ o DECARRY_RULE o CONJUNCTS) THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN CONV_TAC REAL_RING; X_GEN_TAC `j:num` THEN STRIP_TAC THEN VAL_INT64_TAC `j:num` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0]; GHOST_INTRO_TAC `cin:bool` `read CF` THEN GHOST_INTRO_TAC `hout:int64` `read X11` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN RULE_ASSUM_TAC(REWRITE_RULE[VAL_WORD_SUB_EQ_0]) THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [3] (3--4) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN MAP_EVERY EXISTS_TAC [`carry_s3:bool`; `val(sum_s3:int64)`] THEN ASM_REWRITE_TAC[ADD_CLAUSES; VAL_BOUND_64; WORD_VAL] THEN ASM_REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64] THEN ONCE_REWRITE_TAC[REAL_ARITH `e * (c + h + b) + y:real = e * c + e * (b + h) + y`] THEN ASM_REWRITE_TAC[REAL_OF_NUM_CLAUSES] THEN ASM_SIMP_TAC[LOWDIGITS_SELF] THEN ARITH_TAC]; ALL_TAC] THEN GHOST_INTRO_TAC `z2:num` `bignum_from_memory(z,k)` THEN BIGNUM_TERMRANGE_TAC `k:num` `z2:num` THEN GHOST_INTRO_TAC `g6:int64` `read X6` THEN GHOST_INTRO_TAC `g7:int64` `read X7` THEN GLOBALIZE_PRECONDITION_TAC THEN ASM_REWRITE_TAC[] THEN FIRST_X_ASSUM(X_CHOOSE_THEN `c1:bool` (X_CHOOSE_THEN `c0:num` STRIP_ASSUME_TAC)) THEN UNDISCH_THEN `g6:int64 = word c0` SUBST_ALL_TAC THEN UNDISCH_THEN `g7:int64 = word(bitval c1)` SUBST_ALL_TAC THEN * * The initial prelude of the reduction * * ABBREV_TAC `q0 = (w * z2) MOD 2 EXP 64` THEN SUBGOAL_THEN `q0 < 2 EXP 64 /\ val(word q0:int64) = q0` STRIP_ASSUME_TAC THENL [EXPAND_TAC "q0" THEN CONJ_TAC THENL [ARITH_TAC; ALL_TAC] THEN REWRITE_TAC[VAL_WORD; DIMINDEX_64; MOD_MOD_REFL]; ALL_TAC] THEN ENSURES_SEQUENCE_TAC `pc + 0xb4` `\s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X8 s = word i /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ bignum_from_memory (z,k) s = z2 /\ read X6 s = word c0 /\ read X7 s = word (bitval c1) /\ read X9 s = word q0 /\ read X10 s = word 1 /\ read X14 s = word(k - 1) /\ 2 EXP 64 * (bitval(read CF s) + val(read X11 s)) + val(read X12 s) = q0 * bigdigit n 0 + bigdigit z2 0` THEN CONJ_TAC THENL [REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN SUBGOAL_THEN `bignum_from_memory(m,k) s0 = highdigits n 0 /\ bignum_from_memory(z,k) s0 = highdigits z2 0` MP_TAC THENL [ASM_REWRITE_TAC[HIGHDIGITS_0; BIGNUM_FROM_MEMORY_BYTES]; GEN_REWRITE_TAC (LAND_CONV o BINOP_CONV) [BIGNUM_FROM_MEMORY_EQ_HIGHDIGITS] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; ADD_CLAUSES] THEN STRIP_TAC] THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [4;6] (1--8) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN MATCH_MP_TAC(TAUT `p /\ (p ==> q) ==> p /\ q`) THEN CONJ_TAC THENL [UNDISCH_THEN `(w * z2) MOD 2 EXP 64 = q0` (SUBST1_TAC o SYM) THEN ONCE_REWRITE_TAC[GSYM WORD_MOD_SIZE] THEN REWRITE_TAC[GSYM LOWDIGITS_1; lowdigits; MULT_CLAUSES] THEN REWRITE_TAC[ADD_CLAUSES; DIMINDEX_64; VAL_WORD] THEN CONV_TAC MOD_DOWN_CONV THEN REWRITE_TAC[MULT_SYM]; DISCH_THEN SUBST_ALL_TAC] THEN ASM_REWRITE_TAC[WORD_SUB; ARITH_RULE `1 <= k <=> ~(k = 0)`] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ o DECARRY_RULE) THEN DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND] THEN REAL_ARITH_TAC; ALL_TAC] THEN GHOST_INTRO_TAC `r0:num` `\s. val(read X12 s)` THEN REWRITE_TAC[VAL_WORD_GALOIS; DIMINDEX_64] THEN GLOBALIZE_PRECONDITION_TAC THEN ENSURES_SEQUENCE_TAC `pc + 0xe8` `\s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X6 s = word c0 /\ read X7 s = word (bitval c1) /\ read X8 s = word i /\ read X9 s = word q0 /\ read X10 s = word k /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ 2 EXP (64 * k) * (bitval(read CF s) + val(read X11 s)) + 2 EXP 64 * bignum_from_memory (z,k - 1) s + r0 = lowdigits z2 k + q0 * lowdigits n k` THEN CONJ_TAC THENL [ASM_CASES_TAC `k = 1` THENL [UNDISCH_THEN `k = 1` SUBST_ALL_TAC THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC [1] THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL] THEN ASM_REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES] THEN REWRITE_TAC[LOWDIGITS_1] THEN ARITH_TAC; ALL_TAC] THEN * * The reduction loop * * VAL_INT64_TAC `k - 1` THEN ENSURES_WHILE_AUP_TAC `1` `k:num` `pc + 0xb8` `pc + 0xe0` `\j s. read X0 s = word k /\ read X1 s = z /\ read X2 s = x /\ read X3 s = y /\ read X4 s = m /\ bignum_from_memory (y,k) s = b /\ bignum_from_memory (m,k) s = n /\ read X5 s = word w /\ read X6 s = word c0 /\ read X7 s = word (bitval c1) /\ read X8 s = word i /\ read X9 s = word q0 /\ read X10 s = word j /\ bignum_from_memory (word_add x (word (8 * (i + 1))),k - (i + 1)) s = highdigits a (i + 1) /\ bignum_from_memory(word_add z (word (8 * j)),k - j) s = highdigits z2 j /\ bignum_from_memory(word_add m (word (8 * j)),k - j) s = highdigits n j /\ 2 EXP (64 * j) * (bitval(read CF s) + val(read X11 s)) + 2 EXP 64 * bignum_from_memory(z,j-1) s + r0 = lowdigits z2 j + q0 * lowdigits n j` THEN REPEAT CONJ_TAC THENL [ASM_REWRITE_TAC[ARITH_RULE `1 < k <=> ~(k = 0 \/ k = 1)`]; REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC [1] THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0] THEN ASM_REWRITE_TAC[ARITH_RULE `k <= 1 <=> k = 0 \/ k = 1`] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_DIV; BIGNUM_FROM_MEMORY_TRIVIAL] THEN ASM_REWRITE_TAC[GSYM highdigits; BIGNUM_FROM_MEMORY_BYTES] THEN ASM_REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES; LOWDIGITS_1] THEN ARITH_TAC; X_GEN_TAC `j:num` THEN STRIP_TAC THEN MAP_EVERY VAL_INT64_TAC [`j:num`; `j - 1`] THEN SUBGOAL_THEN `word_sub (word j) (word 1):int64 = word(j - 1)` ASSUME_TAC THENL [ASM_REWRITE_TAC[WORD_SUB]; ALL_TAC] THEN GHOST_INTRO_TAC `cin:bool` `read CF` THEN GHOST_INTRO_TAC `hin:int64` `read X11` THEN MP_TAC(GENL [`x:int64`; `a:num`] (ISPECL [`x:int64`; `k - j:num`; `a:num`; `j:num`] BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS)) THEN ASM_REWRITE_TAC[ARITH_RULE `k - j = 0 <=> ~(j < k)`] THEN DISCH_THEN(fun th -> ONCE_REWRITE_TAC[th]) THEN REWRITE_TAC[ARITH_RULE `k - j - 1 = k - (j + 1)`] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN UNDISCH_THEN `val(word q0:int64) = q0` (K ALL_TAC) THEN ABBREV_TAC `j' = j - 1` THEN SUBGOAL_THEN `j = j' + 1` SUBST_ALL_TAC THENL [EXPAND_TAC "j'" THEN UNDISCH_TAC `1 <= j` THEN ARITH_TAC; ALL_TAC] THEN RULE_ASSUM_TAC(REWRITE_RULE[ARITH_RULE `(j' + 1) + 1 = j' + 2`]) THEN REWRITE_TAC[ARITH_RULE `(j' + 1) + 1 = j' + 2`] THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [3;4;6;7] (1--10) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN CONJ_TAC THENL [CONV_TAC WORD_RULE; ALL_TAC] THEN REWRITE_TAC[ARITH_RULE `(n + 2) - 1 = n + 1`] THEN REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN SUBGOAL_THEN `j' + 2 = (j' + 1) + 1` MP_TAC THENL [ARITH_TAC; DISCH_THEN SUBST_ALL_TAC] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ONCE_REWRITE_TAC[LOWDIGITS_CLAUSES] THEN GEN_REWRITE_TAC RAND_CONV [ARITH_RULE `(e * d1 + d0) + c * (e * a1 + a0):num = e * (c * a1 + d1) + d0 + c * a0`] THEN FIRST_X_ASSUM(fun th -> GEN_REWRITE_TAC (RAND_CONV o RAND_CONV) [SYM th]) THEN REWRITE_TAC[EXP_ADD; ARITH_RULE `64 * (j + 1) = 64 * j + 64`] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ) THEN GEN_REWRITE_TAC LAND_CONV [TAUT `p /\ q /\ r /\ s <=> p /\ r /\ q /\ s`] THEN DISCH_THEN(MP_TAC o end_itlist CONJ o DECARRY_RULE o CONJUNCTS) THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64; BIGDIGIT_BOUND] THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN CONV_TAC REAL_RING; X_GEN_TAC `j:num` THEN STRIP_TAC THEN MAP_EVERY VAL_INT64_TAC [`j:num`; `j - 1`] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0]; REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN ARM_STEPS_TAC BIGNUM_AMONTMUL_EXEC (1--2) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[VAL_WORD_SUB_EQ_0]]; ALL_TAC] THEN ASM_SIMP_TAC[LOWDIGITS_SELF] THEN GHOST_INTRO_TAC `cin:bool` `read CF` THEN GHOST_INTRO_TAC `hin:int64` `read X11` THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN ENSURES_INIT_TAC "s0" THEN VAL_INT64_TAC `k - 1` THEN SUBGOAL_THEN `word_sub (word k) (word 1):int64 = word(k - 1)` ASSUME_TAC THENL [ASM_REWRITE_TAC[WORD_SUB; ARITH_RULE `1 <= k <=> ~(k = 0)`]; ALL_TAC] THEN ARM_ACCSTEPS_TAC BIGNUM_AMONTMUL_EXEC [1;2] (1--5) THEN ENSURES_FINAL_STATE_TAC THEN ASM_REWRITE_TAC[] THEN CONJ_TAC THENL [CONV_TAC WORD_RULE; ALL_TAC] THEN MAP_EVERY EXISTS_TAC [`2 EXP (64 * i) * q0 + q`; `2 EXP (64 * i) * r0 + r`] THEN GEN_REWRITE_TAC I [CONJ_ASSOC] THEN CONJ_TAC THENL [REWRITE_TAC[EXP_ADD; ARITH_RULE `64 * (i + 1) = 64 * i + 64`] THEN CONJ_TAC THEN MATCH_MP_TAC(ARITH_RULE `q1 < e /\ q0 < ee /\ (q1 < e ==> (q1 + 1) * ee <= e * ee) ==> ee * q1 + q0 < ee * e`) THEN ASM_REWRITE_TAC[LE_MULT_RCANCEL; EXP_EQ_0; ARITH_EQ] THEN ASM_REWRITE_TAC[ARITH_RULE `n + 1 <= m <=> n < m`]; ALL_TAC] THEN CONJ_TAC THENL [SUBGOAL_THEN `8 * k = 8 * ((k - 1) + 1)` SUBST1_TAC THENL [UNDISCH_TAC `~(k = 0)` THEN ARITH_TAC; REWRITE_TAC[GSYM BIGNUM_FROM_MEMORY_BYTES]] THEN REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES] THEN REWRITE_TAC[LEFT_ADD_DISTRIB; MULT_ASSOC; GSYM EXP_ADD] THEN REWRITE_TAC[GSYM LEFT_ADD_DISTRIB] THEN SUBGOAL_THEN `(i + 1) + (k - 1) = i + k` SUBST1_TAC THENL [UNDISCH_TAC `i:num < k` THEN ARITH_TAC; ALL_TAC] THEN REWRITE_TAC[LEFT_ADD_DISTRIB; EXP_ADD; MULT_CLAUSES] THEN REWRITE_TAC[LOWDIGITS_CLAUSES] THEN REPEAT(FIRST_X_ASSUM(MP_TAC o check (can (term_match [] `2 EXP (64 * k) * x + y = z`) o concl))) THEN REWRITE_TAC[GSYM REAL_OF_NUM_CLAUSES] THEN ACCUMULATOR_POP_ASSUM_LIST(MP_TAC o end_itlist CONJ o DECARRY_RULE) THEN ASM_SIMP_TAC[VAL_WORD_EQ; DIMINDEX_64] THEN DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN CONV_TAC REAL_RING; DISCH_THEN(fun th -> REPEAT(FIRST_X_ASSUM(STRIP_ASSUME_TAC o C MATCH_MP th))) THEN ASM_REWRITE_TAC[ADD_EQ_0; MULT_EQ_0; EXP_EQ_0; ARITH_EQ] THEN MATCH_MP_TAC CONG_IMP_EQ THEN EXISTS_TAC `2 EXP 64` THEN ASM_REWRITE_TAC[EXP_LT_0; ARITH_EQ] THEN FIRST_X_ASSUM(MATCH_MP_TAC o MATCH_MP (NUMBER_RULE `ee * x + e * y + r = z ==> e divides ee /\ (z == 0) (mod e) ==> (r == 0) (mod e)`)) THEN CONJ_TAC THENL [MATCH_MP_TAC DIVIDES_EXP_LE_IMP THEN UNDISCH_TAC `~(k = 0)` THEN ARITH_TAC; UNDISCH_THEN `(w * z2) MOD 2 EXP 64 = q0` (SUBST1_TAC o SYM)] THEN REWRITE_TAC[CONG] THEN CONV_TAC MOD_DOWN_CONV THEN REWRITE_TAC[GSYM CONG] THEN MATCH_MP_TAC(NUMBER_RULE `(n * w + 1 == 0) (mod e) ==> (z + (w * z) * n == 0) (mod e)`) THEN ASM_REWRITE_TAC[]]);; let BIGNUM_AMONTMUL_SUBROUTINE_CORRECT = time prove (`!k z x y m a b n pc returnaddress. ALL (nonoverlapping (z,8 * val k)) [(word pc,0x130); (x,8 * val k); (y,8 * val k); (m,8 * val k)] ==> ensures arm (\s. aligned_bytes_loaded s (word pc) bignum_amontmul_mc /\ read PC s = word pc /\ read X30 s = returnaddress /\ C_ARGUMENTS [k; z; x; y; m] s /\ bignum_from_memory (x,val k) s = a /\ bignum_from_memory (y,val k) s = b /\ bignum_from_memory (m,val k) s = n) (\s. read PC s = returnaddress /\ (ODD n ==> (bignum_from_memory (z,val k) s == inverse_mod n (2 EXP (64 * val k)) * a * b) (mod n))) (MAYCHANGE [PC; X5; X6; X7; X8; X9; X10; X11; X12; X13; X14] ,, MAYCHANGE [memory :> bytes(z,8 * val k)] ,, MAYCHANGE SOME_FLAGS)`, ARM_ADD_RETURN_NOSTACK_TAC BIGNUM_AMONTMUL_EXEC BIGNUM_AMONTMUL_CORRECT);;
dba84fddf55e8dca400ed3b607c21cd89df213714323d88ddde7eca18e4cf1c5
kovtun1/DependenciesGraph
utils.ml
let rec sort list = match list with | [] -> [] | element :: tl -> insert element (sort tl) and insert element_to_insert list = match list with | [] -> [element_to_insert] | element :: tl -> if element_to_insert < element then element_to_insert :: element :: tl else element :: insert element_to_insert tl let count element list = let rec count_aux list n = match list with | hd :: tl -> if hd = element then count_aux tl (n + 1) else count_aux tl n | [] -> n in count_aux list 0 let remove_duplicates list = let rec remove_duplicates_aux list new_list = match list with | hd :: tl -> if count hd new_list = 0 then remove_duplicates_aux tl (hd :: new_list) else remove_duplicates_aux tl new_list | [] -> List.rev new_list in remove_duplicates_aux list [] let find_unique src_list dst_list = let rec find_unique_aux src_list unique_src_sublist = match src_list with | hd :: tl -> if List.mem hd dst_list then find_unique_aux tl unique_src_sublist else find_unique_aux tl (hd :: unique_src_sublist) | [] -> List.rev unique_src_sublist in find_unique_aux src_list [] let unwrap_optionals in_list = let rec unwrap_optionals_aux in_list out_list = match in_list with | hd :: tl -> begin match hd with | Some x -> unwrap_optionals_aux tl (x :: out_list) | None -> unwrap_optionals_aux tl out_list end | [] -> List.rev out_list in unwrap_optionals_aux in_list [] let remove_duplicates list = let rec remove_duplicates_aux in_list out_list = match in_list with | element :: tl -> if List.mem element out_list then remove_duplicates_aux tl out_list else remove_duplicates_aux tl (element :: out_list) | [] -> List.rev out_list in remove_duplicates_aux list []
null
https://raw.githubusercontent.com/kovtun1/DependenciesGraph/cec4d2b7a29746ad7b61d76b3662afd5c39f26ff/utils.ml
ocaml
let rec sort list = match list with | [] -> [] | element :: tl -> insert element (sort tl) and insert element_to_insert list = match list with | [] -> [element_to_insert] | element :: tl -> if element_to_insert < element then element_to_insert :: element :: tl else element :: insert element_to_insert tl let count element list = let rec count_aux list n = match list with | hd :: tl -> if hd = element then count_aux tl (n + 1) else count_aux tl n | [] -> n in count_aux list 0 let remove_duplicates list = let rec remove_duplicates_aux list new_list = match list with | hd :: tl -> if count hd new_list = 0 then remove_duplicates_aux tl (hd :: new_list) else remove_duplicates_aux tl new_list | [] -> List.rev new_list in remove_duplicates_aux list [] let find_unique src_list dst_list = let rec find_unique_aux src_list unique_src_sublist = match src_list with | hd :: tl -> if List.mem hd dst_list then find_unique_aux tl unique_src_sublist else find_unique_aux tl (hd :: unique_src_sublist) | [] -> List.rev unique_src_sublist in find_unique_aux src_list [] let unwrap_optionals in_list = let rec unwrap_optionals_aux in_list out_list = match in_list with | hd :: tl -> begin match hd with | Some x -> unwrap_optionals_aux tl (x :: out_list) | None -> unwrap_optionals_aux tl out_list end | [] -> List.rev out_list in unwrap_optionals_aux in_list [] let remove_duplicates list = let rec remove_duplicates_aux in_list out_list = match in_list with | element :: tl -> if List.mem element out_list then remove_duplicates_aux tl out_list else remove_duplicates_aux tl (element :: out_list) | [] -> List.rev out_list in remove_duplicates_aux list []
936cbb6cc2d071b76eda588ef5c22d68bacaca7b28d0a3cc56ca047d45b1e5f5
nubank/workspaces
core.cljs
(ns nubank.workspaces.ui.core (:require [fulcro.client.localized-dom :as dom] [fulcro.client.primitives :as fp] [goog.object :as gobj] [nubank.workspaces.lib.local-storage :as local-storage] [nubank.workspaces.model :as wsm])) (def color-white "#fff") (def color-light-grey "#b1b1b1") (def color-dark-grey "#404040") (def color-red-dark "#ca2c29") (def color-red-light "#f37976") (def color-green-dark "#187d11") (def color-mint-green "#8efd86") (def color-green-light "#61d658") (def color-yellow "#dea54e") (def color-mystic "#d9e2e9") (def color-limed-spruce "#323c47") (def color-geyser "#cdd7e0") (def color-fiord "#4b5b6d") (def color-iron "#e7e8e9") (def classical-colors {::color-scheme "light" ::bg color-white ::primary-text-color "#000" ::error-text-color "#ef0000" ::button-bg color-fiord ::button-active-bg color-fiord ::button-color color-white ::button-disabled-bg "#8c95a0" ::button-disabled-color "#ccc" ::menu-bg color-white ::menu-header-bg color-dark-grey ::menu-header-color color-white ::menu-arrow-bg color-dark-grey ::menu-text "#000" ::tab-active-bg color-white ::tab-bg color-iron ::tab-text color-limed-spruce ::tab-border color-geyser ::tab-text-field-bg "transparent" ::tab-text-field-focus-bg color-white ::workspace-bg "#9fa2ab" ::workspace-tools-bg color-white ::workspace-tools-color color-limed-spruce ::card-default-color-scheme "light" ::card-bg color-white ::card-default-bg color-white ::card-default-text "#000" ::card-toolbar-bg color-geyser ::card-toolbar-default-text color-limed-spruce ::card-toolbar-more-actions "#000" ::card-header-bg color-mystic ::card-header-text color-limed-spruce ::card-ellipsis-menu-bg color-mystic ::spotlight-bg "#e2e2e2" ::spotlight-search-field-bg "#cccbcd" ::spotlight-search-text "#000" ::spotlight-option-text "#1d1d1d" ::spotlight-option-highlight-bg "#e2d610" ::spotlight-option-highlight-text "#000" ::spotlight-option-selected-bg "#582074" ::spotlight-option-selected-text color-white ::welcome-msg-bg color-white ::welcome-msg-text "#000" ::welcome-container-bg color-dark-grey ::help-dialog-bg "rgba(0, 0, 0, 0.8)" ::test-header-waiting-bg color-yellow ::test-header-running-bg color-yellow ::test-header-success-bg color-mint-green ::test-header-error-bg color-red-dark ::test-header-disabled-bg color-light-grey}) (def dark-colors {::color-scheme "dark" ::bg "#202124" ::primary-text-color "#fafafa" ::error-text-color "#CF6679" ::button-bg "#546E7A" ::button-active-bg "#455A64" ::button-color "#fafafa" ::button-disabled-bg "#8c95a0" ::button-disabled-color "#ccc" ::menu-bg "#202124" ::menu-header-bg "#3f4043" ::menu-header-color "#fafafa" ::menu-arrow-bg "#3f4043" ::menu-text "#fafafa" ::tab-active-bg "#3f4043" ::tab-bg "#202124" ::tab-text "#fafafa" ::tab-border "#3f4043" ::tab-text-field-bg "transparent" ::tab-text-field-focus-bg "#616161" ::workspace-bg "#202124" ::workspace-tools-bg "#3f4043" ::workspace-tools-color "#fafafa" ::card-default-color-scheme "light" ::card-bg "#202124" ::card-default-bg color-white ::card-default-text "#000" ::card-toolbar-bg color-geyser ::card-toolbar-default-text "#000" ::card-toolbar-more-actions "#fafafa" ::card-header-bg "#3f4043" ::card-header-text "#fafafa" ::card-ellipsis-menu-bg "#3f4043" ::spotlight-bg "#202124" ::spotlight-search-field-bg "#3f4043" ::spotlight-search-text "#fafafa" ::spotlight-option-text "#fafafa" ::spotlight-option-highlight-bg "#FFF59D" ::spotlight-option-highlight-text "#000" ::spotlight-option-selected-bg "#546E7A" ::spotlight-option-selected-text "#fafafa" ::welcome-msg-bg "#3f4043" ::welcome-msg-text "#fafafa" ::welcome-container-bg "#202124" ::help-dialog-bg "#3f4043" ::test-header-waiting-bg "#3f4043" ::test-header-running-bg "#3f4043" ::test-header-success-bg "#388E3C" ::test-header-error-bg "#BF360C" ::test-header-disabled-bg "#333333"}) (def default-theme :theme/light) (def theme-name->colors-map {:theme/light classical-colors :theme/dark dark-colors}) (def user-defined-theme (local-storage/get ::theme default-theme)) (defn color [color-name] (let [theme (if (= user-defined-theme :theme/auto) (if (gobj/get (js/matchMedia "(prefers-color-scheme: dark)") "matches") :theme/dark :theme/light) user-defined-theme) colors-map (get theme-name->colors-map theme)] (get colors-map color-name))) (def card-border-radius "4px") (def font-helvetica "Helvetica Neue,Arial,Helvetica,sans-serif") (def font-open-sans "'Open Sans', sans-serif") (def font-monospace "monospace") (def font-os12sb {:font-size "12px" :font-family font-open-sans :font-weight "600"}) (defn header-color [card bg] ((::wsm/set-card-header-style card) {:background bg}) nil) (def arrow-right "▶") (def arrow-down "▼") (def box-shadow "0 6px 6px rgba(0, 0, 0, 0.26), 0 10px 20px rgba(0, 0, 0, 0.19), 0 0 2px rgba(0,0,0,0.3)") (def box-shadow-2 "rgba(0, 0, 0, 0.15) 0px 1px 4px, rgba(0, 0, 0, 0.15) 0px 1px 1px") (def close-icon-css {:cursor "pointer" :font-size "23px" :line-height "1em"}) (defn more-icon [props] (dom/svg (merge {:width 20 :height 19 :viewBox "0 0 40 40"} props) (dom/g {:fill (color ::card-toolbar-more-actions)} (dom/path {:d "m20 26.6c1.8 0 3.4 1.6 3.4 3.4s-1.6 3.4-3.4 3.4-3.4-1.6-3.4-3.4 1.6-3.4 3.4-3.4z m0-10c1.8 0 3.4 1.6 3.4 3.4s-1.6 3.4-3.4 3.4-3.4-1.6-3.4-3.4 1.6-3.4 3.4-3.4z m0-3.2c-1.8 0-3.4-1.6-3.4-3.4s1.6-3.4 3.4-3.4 3.4 1.6 3.4 3.4-1.6 3.4-3.4 3.4z"})))) (fp/defsc Button [this props] {:css [[:.button font-os12sb {:background-color (color ::button-bg) :border "none" :border-radius "2px" :color (color ::button-color) :cursor "pointer" :display "inline-block" :padding "2px 8px" :line-height "1.5" :margin-bottom "0" :text-align "center" :white-space "nowrap" :vertical-align "middle" :user-select "none" :outline "none"} [:&:active {:background (color ::button-active-bg)}] [:&:disabled {:background (color ::button-disabled-bg) :color (color ::button-disabled-color) :cursor "not-allowed"}]]]} (apply dom/button :.button props (fp/children this))) (def button (fp/factory Button)) (fp/defsc CSS [this _] {:css-include [Button]})
null
https://raw.githubusercontent.com/nubank/workspaces/c93802388e1d6e1028dda6868d4fece8a2df8ea1/src/nubank/workspaces/ui/core.cljs
clojure
(ns nubank.workspaces.ui.core (:require [fulcro.client.localized-dom :as dom] [fulcro.client.primitives :as fp] [goog.object :as gobj] [nubank.workspaces.lib.local-storage :as local-storage] [nubank.workspaces.model :as wsm])) (def color-white "#fff") (def color-light-grey "#b1b1b1") (def color-dark-grey "#404040") (def color-red-dark "#ca2c29") (def color-red-light "#f37976") (def color-green-dark "#187d11") (def color-mint-green "#8efd86") (def color-green-light "#61d658") (def color-yellow "#dea54e") (def color-mystic "#d9e2e9") (def color-limed-spruce "#323c47") (def color-geyser "#cdd7e0") (def color-fiord "#4b5b6d") (def color-iron "#e7e8e9") (def classical-colors {::color-scheme "light" ::bg color-white ::primary-text-color "#000" ::error-text-color "#ef0000" ::button-bg color-fiord ::button-active-bg color-fiord ::button-color color-white ::button-disabled-bg "#8c95a0" ::button-disabled-color "#ccc" ::menu-bg color-white ::menu-header-bg color-dark-grey ::menu-header-color color-white ::menu-arrow-bg color-dark-grey ::menu-text "#000" ::tab-active-bg color-white ::tab-bg color-iron ::tab-text color-limed-spruce ::tab-border color-geyser ::tab-text-field-bg "transparent" ::tab-text-field-focus-bg color-white ::workspace-bg "#9fa2ab" ::workspace-tools-bg color-white ::workspace-tools-color color-limed-spruce ::card-default-color-scheme "light" ::card-bg color-white ::card-default-bg color-white ::card-default-text "#000" ::card-toolbar-bg color-geyser ::card-toolbar-default-text color-limed-spruce ::card-toolbar-more-actions "#000" ::card-header-bg color-mystic ::card-header-text color-limed-spruce ::card-ellipsis-menu-bg color-mystic ::spotlight-bg "#e2e2e2" ::spotlight-search-field-bg "#cccbcd" ::spotlight-search-text "#000" ::spotlight-option-text "#1d1d1d" ::spotlight-option-highlight-bg "#e2d610" ::spotlight-option-highlight-text "#000" ::spotlight-option-selected-bg "#582074" ::spotlight-option-selected-text color-white ::welcome-msg-bg color-white ::welcome-msg-text "#000" ::welcome-container-bg color-dark-grey ::help-dialog-bg "rgba(0, 0, 0, 0.8)" ::test-header-waiting-bg color-yellow ::test-header-running-bg color-yellow ::test-header-success-bg color-mint-green ::test-header-error-bg color-red-dark ::test-header-disabled-bg color-light-grey}) (def dark-colors {::color-scheme "dark" ::bg "#202124" ::primary-text-color "#fafafa" ::error-text-color "#CF6679" ::button-bg "#546E7A" ::button-active-bg "#455A64" ::button-color "#fafafa" ::button-disabled-bg "#8c95a0" ::button-disabled-color "#ccc" ::menu-bg "#202124" ::menu-header-bg "#3f4043" ::menu-header-color "#fafafa" ::menu-arrow-bg "#3f4043" ::menu-text "#fafafa" ::tab-active-bg "#3f4043" ::tab-bg "#202124" ::tab-text "#fafafa" ::tab-border "#3f4043" ::tab-text-field-bg "transparent" ::tab-text-field-focus-bg "#616161" ::workspace-bg "#202124" ::workspace-tools-bg "#3f4043" ::workspace-tools-color "#fafafa" ::card-default-color-scheme "light" ::card-bg "#202124" ::card-default-bg color-white ::card-default-text "#000" ::card-toolbar-bg color-geyser ::card-toolbar-default-text "#000" ::card-toolbar-more-actions "#fafafa" ::card-header-bg "#3f4043" ::card-header-text "#fafafa" ::card-ellipsis-menu-bg "#3f4043" ::spotlight-bg "#202124" ::spotlight-search-field-bg "#3f4043" ::spotlight-search-text "#fafafa" ::spotlight-option-text "#fafafa" ::spotlight-option-highlight-bg "#FFF59D" ::spotlight-option-highlight-text "#000" ::spotlight-option-selected-bg "#546E7A" ::spotlight-option-selected-text "#fafafa" ::welcome-msg-bg "#3f4043" ::welcome-msg-text "#fafafa" ::welcome-container-bg "#202124" ::help-dialog-bg "#3f4043" ::test-header-waiting-bg "#3f4043" ::test-header-running-bg "#3f4043" ::test-header-success-bg "#388E3C" ::test-header-error-bg "#BF360C" ::test-header-disabled-bg "#333333"}) (def default-theme :theme/light) (def theme-name->colors-map {:theme/light classical-colors :theme/dark dark-colors}) (def user-defined-theme (local-storage/get ::theme default-theme)) (defn color [color-name] (let [theme (if (= user-defined-theme :theme/auto) (if (gobj/get (js/matchMedia "(prefers-color-scheme: dark)") "matches") :theme/dark :theme/light) user-defined-theme) colors-map (get theme-name->colors-map theme)] (get colors-map color-name))) (def card-border-radius "4px") (def font-helvetica "Helvetica Neue,Arial,Helvetica,sans-serif") (def font-open-sans "'Open Sans', sans-serif") (def font-monospace "monospace") (def font-os12sb {:font-size "12px" :font-family font-open-sans :font-weight "600"}) (defn header-color [card bg] ((::wsm/set-card-header-style card) {:background bg}) nil) (def arrow-right "▶") (def arrow-down "▼") (def box-shadow "0 6px 6px rgba(0, 0, 0, 0.26), 0 10px 20px rgba(0, 0, 0, 0.19), 0 0 2px rgba(0,0,0,0.3)") (def box-shadow-2 "rgba(0, 0, 0, 0.15) 0px 1px 4px, rgba(0, 0, 0, 0.15) 0px 1px 1px") (def close-icon-css {:cursor "pointer" :font-size "23px" :line-height "1em"}) (defn more-icon [props] (dom/svg (merge {:width 20 :height 19 :viewBox "0 0 40 40"} props) (dom/g {:fill (color ::card-toolbar-more-actions)} (dom/path {:d "m20 26.6c1.8 0 3.4 1.6 3.4 3.4s-1.6 3.4-3.4 3.4-3.4-1.6-3.4-3.4 1.6-3.4 3.4-3.4z m0-10c1.8 0 3.4 1.6 3.4 3.4s-1.6 3.4-3.4 3.4-3.4-1.6-3.4-3.4 1.6-3.4 3.4-3.4z m0-3.2c-1.8 0-3.4-1.6-3.4-3.4s1.6-3.4 3.4-3.4 3.4 1.6 3.4 3.4-1.6 3.4-3.4 3.4z"})))) (fp/defsc Button [this props] {:css [[:.button font-os12sb {:background-color (color ::button-bg) :border "none" :border-radius "2px" :color (color ::button-color) :cursor "pointer" :display "inline-block" :padding "2px 8px" :line-height "1.5" :margin-bottom "0" :text-align "center" :white-space "nowrap" :vertical-align "middle" :user-select "none" :outline "none"} [:&:active {:background (color ::button-active-bg)}] [:&:disabled {:background (color ::button-disabled-bg) :color (color ::button-disabled-color) :cursor "not-allowed"}]]]} (apply dom/button :.button props (fp/children this))) (def button (fp/factory Button)) (fp/defsc CSS [this _] {:css-include [Button]})
6dcdaeea32cb80986a00cff6ec607fd0b1e20d762b1396c07afca023d33cf360
ghc/testsuite
T7697.hs
module T7697 where f :: Int => Int f x = x
null
https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_fail/T7697.hs
haskell
module T7697 where f :: Int => Int f x = x
8785f560d7dd07d2827016e93dd3f83e54640a8b3eb8d4dc1a56c7545ebf27af
puppetlabs/clj-http-client
gzip_request_test.clj
(ns puppetlabs.http.client.gzip-request-test (:import (com.puppetlabs.http.client Sync SimpleRequestOptions ResponseBodyType CompressType) (java.io ByteArrayInputStream FilterInputStream) (java.net URI) (java.util.zip GZIPInputStream)) (:require [clojure.test :refer :all] [cheshire.core :as cheshire] [schema.test :as schema-test] [puppetlabs.http.client.sync :as http-client] [puppetlabs.http.client.test-common :refer :all] [puppetlabs.trapperkeeper.testutils.webserver :as testwebserver])) (use-fixtures :once schema-test/validate-schemas) (defn req-body-app [req] (let [response {:request-content-encoding (get-in req [:headers "content-encoding"]) :request-body-decompressed (slurp (GZIPInputStream. (:body req)) :encoding "utf-8")}] {:status 200 :headers {"Content-Type" "application/json; charset=utf-8"} :body (cheshire/generate-string response)})) (def short-request-body "gzip me�") (def big-request-body (apply str (repeat 4000 "and�i�said�hey�yeah�yeah�whats�going�on"))) (defn string->byte-array-input-stream [source is-closed-atom] (let [bis (-> source (.getBytes) (ByteArrayInputStream.))] (proxy [FilterInputStream] [bis] (close [] (reset! is-closed-atom true) (proxy-super close))))) (defn post-gzip-clj-request [port body] (-> (http-client/post (format ":%d" port) {:body body :headers {"Content-Type" "text/plain; charset=utf-8"} :compress-request-body :gzip :as :text}) :body (cheshire/parse-string true))) (defn post-gzip-java-request [port body] (-> (SimpleRequestOptions. (URI. (format ":%d/hello/" port))) (.setBody body) (.setHeaders {"Content-Type" "text/plain; charset=utf-8"}) (.setRequestBodyCompression CompressType/GZIP) (.setAs ResponseBodyType/TEXT) (Sync/post) (.getBody) (cheshire/parse-string true))) (deftest clj-sync-client-gzip-requests (testing "for clojure sync client" (testwebserver/with-test-webserver req-body-app port (testing "short string body is gzipped in request" (let [response (post-gzip-clj-request port short-request-body)] (is (= "gzip" (:request-content-encoding response))) (is (= short-request-body (:request-body-decompressed response))))) (testing "big string body is gzipped in request" (let [response (post-gzip-clj-request port big-request-body)] (is (= "gzip" (:request-content-encoding response))) (is (= big-request-body (:request-body-decompressed response))))) (testing "short inputstream body is gzipped in request" (let [is-closed (atom false) response (post-gzip-clj-request port (string->byte-array-input-stream short-request-body is-closed))] (is (= "gzip" (:request-content-encoding response))) (is (= short-request-body (:request-body-decompressed response))) (is @is-closed "input stream was not closed after request"))) (testing "big inputstream body is gzipped in request" (let [is-closed (atom false) response (post-gzip-clj-request port (string->byte-array-input-stream big-request-body is-closed))] (is (= "gzip" (:request-content-encoding response))) (is (= big-request-body (:request-body-decompressed response))) (is @is-closed "input stream was not closed after request")))))) (deftest java-sync-client-gzip-requests (testing "for java sync client" (testwebserver/with-test-webserver req-body-app port (testing "short string body is gzipped in request" (let [response (post-gzip-java-request port short-request-body)] (is (= "gzip" (:request-content-encoding response))) (is (= short-request-body (:request-body-decompressed response))))) (testing "big string body is gzipped in request" (let [response (post-gzip-java-request port big-request-body)] (is (= "gzip" (:request-content-encoding response))) (is (= big-request-body (:request-body-decompressed response))))) (testing "short inputstream body is gzipped in request" (let [is-closed (atom false) response (post-gzip-java-request port (string->byte-array-input-stream short-request-body is-closed))] (is (= "gzip" (:request-content-encoding response))) (is (= short-request-body (:request-body-decompressed response))) (is @is-closed "input stream was not closed after request"))) (testing "big inputstream body is gzipped in request" (let [is-closed (atom false) response (post-gzip-java-request port (string->byte-array-input-stream big-request-body is-closed))] (is (= "gzip" (:request-content-encoding response))) (is (= big-request-body (:request-body-decompressed response))) (is @is-closed "input stream was not closed after request")))))) (deftest connect-exception-during-gzip-request-returns-failure (testing "connection exception during gzip request returns failure" (let [is-closed (atom false)] (is (connect-exception-thrown? (http-client/post ":65535" {:body (string->byte-array-input-stream short-request-body is-closed) :compress-request-body :gzip :as :text}))) (is @is-closed "input stream was not closed after request"))))
null
https://raw.githubusercontent.com/puppetlabs/clj-http-client/ec78805007db944b0ef7b4e4212d4969d7d8d0a4/test/puppetlabs/http/client/gzip_request_test.clj
clojure
(ns puppetlabs.http.client.gzip-request-test (:import (com.puppetlabs.http.client Sync SimpleRequestOptions ResponseBodyType CompressType) (java.io ByteArrayInputStream FilterInputStream) (java.net URI) (java.util.zip GZIPInputStream)) (:require [clojure.test :refer :all] [cheshire.core :as cheshire] [schema.test :as schema-test] [puppetlabs.http.client.sync :as http-client] [puppetlabs.http.client.test-common :refer :all] [puppetlabs.trapperkeeper.testutils.webserver :as testwebserver])) (use-fixtures :once schema-test/validate-schemas) (defn req-body-app [req] (let [response {:request-content-encoding (get-in req [:headers "content-encoding"]) :request-body-decompressed (slurp (GZIPInputStream. (:body req)) :encoding "utf-8")}] {:status 200 :headers {"Content-Type" "application/json; charset=utf-8"} :body (cheshire/generate-string response)})) (def short-request-body "gzip me�") (def big-request-body (apply str (repeat 4000 "and�i�said�hey�yeah�yeah�whats�going�on"))) (defn string->byte-array-input-stream [source is-closed-atom] (let [bis (-> source (.getBytes) (ByteArrayInputStream.))] (proxy [FilterInputStream] [bis] (close [] (reset! is-closed-atom true) (proxy-super close))))) (defn post-gzip-clj-request [port body] (-> (http-client/post (format ":%d" port) {:body body :headers {"Content-Type" "text/plain; charset=utf-8"} :compress-request-body :gzip :as :text}) :body (cheshire/parse-string true))) (defn post-gzip-java-request [port body] (-> (SimpleRequestOptions. (URI. (format ":%d/hello/" port))) (.setBody body) (.setHeaders {"Content-Type" "text/plain; charset=utf-8"}) (.setRequestBodyCompression CompressType/GZIP) (.setAs ResponseBodyType/TEXT) (Sync/post) (.getBody) (cheshire/parse-string true))) (deftest clj-sync-client-gzip-requests (testing "for clojure sync client" (testwebserver/with-test-webserver req-body-app port (testing "short string body is gzipped in request" (let [response (post-gzip-clj-request port short-request-body)] (is (= "gzip" (:request-content-encoding response))) (is (= short-request-body (:request-body-decompressed response))))) (testing "big string body is gzipped in request" (let [response (post-gzip-clj-request port big-request-body)] (is (= "gzip" (:request-content-encoding response))) (is (= big-request-body (:request-body-decompressed response))))) (testing "short inputstream body is gzipped in request" (let [is-closed (atom false) response (post-gzip-clj-request port (string->byte-array-input-stream short-request-body is-closed))] (is (= "gzip" (:request-content-encoding response))) (is (= short-request-body (:request-body-decompressed response))) (is @is-closed "input stream was not closed after request"))) (testing "big inputstream body is gzipped in request" (let [is-closed (atom false) response (post-gzip-clj-request port (string->byte-array-input-stream big-request-body is-closed))] (is (= "gzip" (:request-content-encoding response))) (is (= big-request-body (:request-body-decompressed response))) (is @is-closed "input stream was not closed after request")))))) (deftest java-sync-client-gzip-requests (testing "for java sync client" (testwebserver/with-test-webserver req-body-app port (testing "short string body is gzipped in request" (let [response (post-gzip-java-request port short-request-body)] (is (= "gzip" (:request-content-encoding response))) (is (= short-request-body (:request-body-decompressed response))))) (testing "big string body is gzipped in request" (let [response (post-gzip-java-request port big-request-body)] (is (= "gzip" (:request-content-encoding response))) (is (= big-request-body (:request-body-decompressed response))))) (testing "short inputstream body is gzipped in request" (let [is-closed (atom false) response (post-gzip-java-request port (string->byte-array-input-stream short-request-body is-closed))] (is (= "gzip" (:request-content-encoding response))) (is (= short-request-body (:request-body-decompressed response))) (is @is-closed "input stream was not closed after request"))) (testing "big inputstream body is gzipped in request" (let [is-closed (atom false) response (post-gzip-java-request port (string->byte-array-input-stream big-request-body is-closed))] (is (= "gzip" (:request-content-encoding response))) (is (= big-request-body (:request-body-decompressed response))) (is @is-closed "input stream was not closed after request")))))) (deftest connect-exception-during-gzip-request-returns-failure (testing "connection exception during gzip request returns failure" (let [is-closed (atom false)] (is (connect-exception-thrown? (http-client/post ":65535" {:body (string->byte-array-input-stream short-request-body is-closed) :compress-request-body :gzip :as :text}))) (is @is-closed "input stream was not closed after request"))))
d0a66120afae124ade585cc64421a375e062df3c3ba59d234cfe797c67cc65d3
geophf/1HaskellADay
Solution.hs
# LANGUAGE OverloadedStrings , QuasiQuotes # module Y2017.M09.D26.Solution where import qualified Codec.Compression.GZip as GZ import Data.Aeson import Data.Aeson.Encode.Pretty import Data.ByteString.Lazy.Char8 (ByteString) import qualified Data.ByteString.Lazy.Char8 as BL import Data.Map (Map) import qualified Data.Map as Map import Data.Maybe (mapMaybe) import Database.PostgreSQL.Simple import Database.PostgreSQL.Simple.SqlQQ import Database.PostgreSQL.Simple.ToRow import Database.PostgreSQL.Simple.ToField import Network.HTTP.Conduit below imports available via 1HaskellADay git repository import Store.SQL.Connection (connectInfo) import Store.SQL.Util.Inserts (inserter) import Y2017.M09.D22.Solution (scanArticles, dir, arts, rawText, articleTextById) import Y2017.M09.D25.Solution (parseArticle, metadata, Article, srcId) - So , I wrote a whole exercise for today , which you will see as tomorrow 's exercise , instead , because then I realized that it was too much for one day without some introduction . So , today 's problem : a name , by any other name , would smell as sweet . A datum from Friday 's exercise is " Person " : > > > articles < - scanArticles . GZ.decompress < $ > BL.readFile ( dir + + arts ) > > > Just art3 = parseArticle 3 ( rawText $ head articles ) > > > Map.lookup " People " ( metadata art3 ) Just " Cuomo , " This is an example where the article is about just one person . As you scan the articles you will see some that are about more than one person and the names will be in various formats . Today we will not worry about formats of name(s ) in the Person field . Because today we 're simply going to store the names . Say you have a staging table in PostgreSQL called name_stg with the following structure : - So, I wrote a whole exercise for today, which you will see as tomorrow's exercise, instead, because then I realized that it was too much for one day without some introduction. So, today's Haskell problem: a name, by any other name, would smell as sweet. A datum from Friday's exercise is "Person": >>> articles <- scanArticles . GZ.decompress <$> BL.readFile (dir ++ arts) >>> Just art3 = parseArticle 3 (rawText $ head articles) >>> Map.lookup "People" (metadata art3) Just "Cuomo, Mario M" This is an example where the article is about just one person. As you scan the articles you will see some that are about more than one person and the names will be in various formats. Today we will not worry about formats of name(s) in the Person field. Because today we're simply going to store the names. Say you have a staging table in PostgreSQL called name_stg with the following structure: --} data RawNames = Raw { fromArticle :: Integer, text :: String } deriving (Eq, Ord, Show) -- with our handy insert statement: insertRawNamesStmt :: Query insertRawNamesStmt = [sql|INSERT INTO name_stg (article_id,names) VALUES (?,?)|] -- from the above, derive the below: instance ToRow RawNames where toRow rn = [toField (fromArticle rn),toField (text rn)] -- this looks like an applicative functor definition, but it's weird because the toField ' function ' is actually two different function value types . - Okay , great , and you can use the inserter from before to construct the procedure that inserts RawNames values into PostgreSQL database . Before we do that , we have to convert articles scanned to a list of raw names values . And before we do that , let 's create a function that pipelines the whole process of extracting articles from the archive and reifying those articles to the Y2017.M09.D25.Article type . - Okay, great, and you can use the inserter from before to construct the procedure that inserts RawNames values into PostgreSQL database. Before we do that, we have to convert articles scanned to a list of raw names values. And before we do that, let's create a function that pipelines the whole process of extracting articles from the archive and reifying those articles to the Y2017.M09.D25.Article type. --} type Compressed = ByteString -- reminder to me that this is a compressed archive extractArticles :: Compressed -> [Article] extractArticles gz = Map.toList articles >>= uncurry parseArticle where articles = articleTextById . scanArticles $ GZ.decompress gz {-- >>> articles <- extractArticles <$> BL.readFile (dir ++ arts) --} -- then let's grab the line that has the raw names listed from each article art2RawNames :: Article -> Maybe RawNames art2RawNames = fmap . Raw . srcId <*> Map.lookup "People" . metadata -- really? mapping the functor OVER the Applicative functor? REALLY? - > > > names = mapMaybe art2RawNames articles > > > names [ Raw { fromArticle = 1 , text = " , " } , Raw { fromArticle = 2 , text = " , " } , Raw { fromArticle = 3 , text = " , , " } , Raw { fromArticle = 4 , text = " , " } , Raw { fromArticle = 5 , text = " , " } , Raw { fromArticle = 7 , text = " , " } , Raw { fromArticle = 8 , text = " Francis ( Pope ) " } , Raw { fromArticle = 10 , text = " " } , Raw { fromArticle = 11 , text = " , " } ] n.b . : articles 6 and 9 have no people associated with them . Previous attempts with self - critiques : * uncurry fmap . ( Raw . artId & & & Map.lookup " Person " . metadata ) whenever I see the pattern uncurry f . ( this & & & that ) I know I 'm unnecessarily complicating things I mean : why compose a tuple simply to uncurry it ? * fmap ( Raw ( artId art ) ) . Map.lookup " Person " $ metadata art ugh : repeated names ( ' art ' ) . That 's gross . - >>> names = mapMaybe art2RawNames articles >>> names [Raw {fromArticle = 1, text = "Cuomo, Mario M"}, Raw {fromArticle = 2, text = "Reagan, Ronald Wilson"}, Raw {fromArticle = 3, text = "Obama, Barack Cameron, David"}, Raw {fromArticle = 4, text = "Armstrong, Karen"}, Raw {fromArticle = 5, text = "Cuomo, Mario M"}, Raw {fromArticle = 7, text = "Rivlin, Reuven"}, Raw {fromArticle = 8, text = "Francis (Pope)"}, Raw {fromArticle = 10, text = "Yingluck Shinawatra"}, Raw {fromArticle = 11, text = "Baraka, Amiri"}] n.b.: articles 6 and 9 have no people associated with them. Previous attempts with self-critiques: * uncurry fmap . (Raw . artId &&& Map.lookup "Person" . metadata) whenever I see the pattern uncurry f . (this &&& that) I know I'm unnecessarily complicating things I mean: why compose a tuple simply to uncurry it? * fmap (Raw (artId art)) . Map.lookup "Person" $ metadata art ugh: repeated names ('art'). That's gross. --} -- and with that transformation function, we can insert raw names from articles insertAllRawNames :: Connection -> [RawNames] -> IO () insertAllRawNames conn = inserter conn insertRawNamesStmt - For all the articles compressed in the archive , ( dir + + arts ) , insert the names from the Person metadata into the names_stg table at the index artId . > > > connectInfo ConnectInfo { connectHost = " ... " ... } > > > conn < - connect it > > > insertAllRawNames conn names > > > close conn How many rows did you insert ? [ low key : your answer should be ' 11 ' ] $ select count(1 ) from name_stg ; 9 [ actually , it was 9 rows , as two articles did n't have associated people ] $ select * from name_stg ; i d article_id names ---------------------------------------------------- 1 1 Cuomo , 2 2 , 3 3 , , 4 Armstrong , 5 Cuomo , 6 7 , 7 8 Francis ( Pope ) 8 10 9 11 , Now : how many names did you insert ? We will address that question tomorrow when we get into some simple name parsers . - For all the articles compressed in the archive, (dir ++ arts), insert the names from the Person metadata into the names_stg table at the index artId. >>> connectInfo ConnectInfo {connectHost = "..." ...} >>> conn <- connect it >>> insertAllRawNames conn names >>> close conn How many rows did you insert? [low key: your answer should be '11'] $ select count(1) from name_stg; 9 [actually, it was 9 rows, as two articles didn't have associated people] $ select * from name_stg; id article_id names ---------------------------------------------------- 1 1 Cuomo, Mario M 2 2 Reagan, Ronald Wilson 3 3 Obama, Barack Cameron, David 4 4 Armstrong, Karen 5 5 Cuomo, Mario M 6 7 Rivlin, Reuven 7 8 Francis (Pope) 8 10 Yingluck Shinawatra 9 11 Baraka, Amiri Now: how many names did you insert? We will address that question tomorrow when we get into some simple name parsers. --} - BONUS ----------------------------------------------------------------- Output your RawNames values as JSON . - Output your RawNames values as JSON. --} instance ToJSON RawNames where toJSON rn = object ["fromArticle" .= fromArticle rn, "names" .= text rn] -- BONUS-BONUS ------------------------------------------------------------ -- prettily. - > > > mapM _ ( BL.putStrLn . encodePretty ) names { " names " : " , " , " fromArticle " : 1 } { " names " : " , " , " fromArticle " : 2 } { " names " : " , , " , " fromArticle " : 3 } { " names " : " , " , " fromArticle " : 4 } { " names " : " , " , " fromArticle " : 5 } { " names " : " , " , " fromArticle " : 7 } { " names " : " Francis ( Pope ) " , " fromArticle " : 8 } { " names " : " " , " fromArticle " : 10 } { " names " : " , " , " fromArticle " : 11 } - >>> mapM_ (BL.putStrLn . encodePretty) names { "names": "Cuomo, Mario M", "fromArticle": 1 } { "names": "Reagan, Ronald Wilson", "fromArticle": 2 } { "names": "Obama, Barack Cameron, David", "fromArticle": 3 } { "names": "Armstrong, Karen", "fromArticle": 4 } { "names": "Cuomo, Mario M", "fromArticle": 5 } { "names": "Rivlin, Reuven", "fromArticle": 7 } { "names": "Francis (Pope)", "fromArticle": 8 } { "names": "Yingluck Shinawatra", "fromArticle": 10 } { "names": "Baraka, Amiri", "fromArticle": 11 } --}
null
https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2017/M09/D26/Solution.hs
haskell
} with our handy insert statement: from the above, derive the below: this looks like an applicative functor definition, but it's weird because } reminder to me that this is a compressed archive - >>> articles <- extractArticles <$> BL.readFile (dir ++ arts) - then let's grab the line that has the raw names listed from each article really? mapping the functor OVER the Applicative functor? REALLY? } and with that transformation function, we can insert raw names from articles -------------------------------------------------- -------------------------------------------------- } --------------------------------------------------------------- } BONUS-BONUS ------------------------------------------------------------ prettily. }
# LANGUAGE OverloadedStrings , QuasiQuotes # module Y2017.M09.D26.Solution where import qualified Codec.Compression.GZip as GZ import Data.Aeson import Data.Aeson.Encode.Pretty import Data.ByteString.Lazy.Char8 (ByteString) import qualified Data.ByteString.Lazy.Char8 as BL import Data.Map (Map) import qualified Data.Map as Map import Data.Maybe (mapMaybe) import Database.PostgreSQL.Simple import Database.PostgreSQL.Simple.SqlQQ import Database.PostgreSQL.Simple.ToRow import Database.PostgreSQL.Simple.ToField import Network.HTTP.Conduit below imports available via 1HaskellADay git repository import Store.SQL.Connection (connectInfo) import Store.SQL.Util.Inserts (inserter) import Y2017.M09.D22.Solution (scanArticles, dir, arts, rawText, articleTextById) import Y2017.M09.D25.Solution (parseArticle, metadata, Article, srcId) - So , I wrote a whole exercise for today , which you will see as tomorrow 's exercise , instead , because then I realized that it was too much for one day without some introduction . So , today 's problem : a name , by any other name , would smell as sweet . A datum from Friday 's exercise is " Person " : > > > articles < - scanArticles . GZ.decompress < $ > BL.readFile ( dir + + arts ) > > > Just art3 = parseArticle 3 ( rawText $ head articles ) > > > Map.lookup " People " ( metadata art3 ) Just " Cuomo , " This is an example where the article is about just one person . As you scan the articles you will see some that are about more than one person and the names will be in various formats . Today we will not worry about formats of name(s ) in the Person field . Because today we 're simply going to store the names . Say you have a staging table in PostgreSQL called name_stg with the following structure : - So, I wrote a whole exercise for today, which you will see as tomorrow's exercise, instead, because then I realized that it was too much for one day without some introduction. So, today's Haskell problem: a name, by any other name, would smell as sweet. A datum from Friday's exercise is "Person": >>> articles <- scanArticles . GZ.decompress <$> BL.readFile (dir ++ arts) >>> Just art3 = parseArticle 3 (rawText $ head articles) >>> Map.lookup "People" (metadata art3) Just "Cuomo, Mario M" This is an example where the article is about just one person. As you scan the articles you will see some that are about more than one person and the names will be in various formats. Today we will not worry about formats of name(s) in the Person field. Because today we're simply going to store the names. Say you have a staging table in PostgreSQL called name_stg with the following structure: data RawNames = Raw { fromArticle :: Integer, text :: String } deriving (Eq, Ord, Show) insertRawNamesStmt :: Query insertRawNamesStmt = [sql|INSERT INTO name_stg (article_id,names) VALUES (?,?)|] instance ToRow RawNames where toRow rn = [toField (fromArticle rn),toField (text rn)] the toField ' function ' is actually two different function value types . - Okay , great , and you can use the inserter from before to construct the procedure that inserts RawNames values into PostgreSQL database . Before we do that , we have to convert articles scanned to a list of raw names values . And before we do that , let 's create a function that pipelines the whole process of extracting articles from the archive and reifying those articles to the Y2017.M09.D25.Article type . - Okay, great, and you can use the inserter from before to construct the procedure that inserts RawNames values into PostgreSQL database. Before we do that, we have to convert articles scanned to a list of raw names values. And before we do that, let's create a function that pipelines the whole process of extracting articles from the archive and reifying those articles to the Y2017.M09.D25.Article type. type Compressed = ByteString extractArticles :: Compressed -> [Article] extractArticles gz = Map.toList articles >>= uncurry parseArticle where articles = articleTextById . scanArticles $ GZ.decompress gz art2RawNames :: Article -> Maybe RawNames art2RawNames = fmap . Raw . srcId <*> Map.lookup "People" . metadata - > > > names = mapMaybe art2RawNames articles > > > names [ Raw { fromArticle = 1 , text = " , " } , Raw { fromArticle = 2 , text = " , " } , Raw { fromArticle = 3 , text = " , , " } , Raw { fromArticle = 4 , text = " , " } , Raw { fromArticle = 5 , text = " , " } , Raw { fromArticle = 7 , text = " , " } , Raw { fromArticle = 8 , text = " Francis ( Pope ) " } , Raw { fromArticle = 10 , text = " " } , Raw { fromArticle = 11 , text = " , " } ] n.b . : articles 6 and 9 have no people associated with them . Previous attempts with self - critiques : * uncurry fmap . ( Raw . artId & & & Map.lookup " Person " . metadata ) whenever I see the pattern uncurry f . ( this & & & that ) I know I 'm unnecessarily complicating things I mean : why compose a tuple simply to uncurry it ? * fmap ( Raw ( artId art ) ) . Map.lookup " Person " $ metadata art ugh : repeated names ( ' art ' ) . That 's gross . - >>> names = mapMaybe art2RawNames articles >>> names [Raw {fromArticle = 1, text = "Cuomo, Mario M"}, Raw {fromArticle = 2, text = "Reagan, Ronald Wilson"}, Raw {fromArticle = 3, text = "Obama, Barack Cameron, David"}, Raw {fromArticle = 4, text = "Armstrong, Karen"}, Raw {fromArticle = 5, text = "Cuomo, Mario M"}, Raw {fromArticle = 7, text = "Rivlin, Reuven"}, Raw {fromArticle = 8, text = "Francis (Pope)"}, Raw {fromArticle = 10, text = "Yingluck Shinawatra"}, Raw {fromArticle = 11, text = "Baraka, Amiri"}] n.b.: articles 6 and 9 have no people associated with them. Previous attempts with self-critiques: * uncurry fmap . (Raw . artId &&& Map.lookup "Person" . metadata) whenever I see the pattern uncurry f . (this &&& that) I know I'm unnecessarily complicating things I mean: why compose a tuple simply to uncurry it? * fmap (Raw (artId art)) . Map.lookup "Person" $ metadata art ugh: repeated names ('art'). That's gross. insertAllRawNames :: Connection -> [RawNames] -> IO () insertAllRawNames conn = inserter conn insertRawNamesStmt - For all the articles compressed in the archive , ( dir + + arts ) , insert the names from the Person metadata into the names_stg table at the index artId . > > > connectInfo ConnectInfo { connectHost = " ... " ... } > > > conn < - connect it > > > insertAllRawNames conn names > > > close conn How many rows did you insert ? [ low key : your answer should be ' 11 ' ] $ select count(1 ) from name_stg ; 9 [ actually , it was 9 rows , as two articles did n't have associated people ] $ select * from name_stg ; i d article_id names 1 1 Cuomo , 2 2 , 3 3 , , 4 Armstrong , 5 Cuomo , 6 7 , 7 8 Francis ( Pope ) 8 10 9 11 , Now : how many names did you insert ? We will address that question tomorrow when we get into some simple name parsers . - For all the articles compressed in the archive, (dir ++ arts), insert the names from the Person metadata into the names_stg table at the index artId. >>> connectInfo ConnectInfo {connectHost = "..." ...} >>> conn <- connect it >>> insertAllRawNames conn names >>> close conn How many rows did you insert? [low key: your answer should be '11'] $ select count(1) from name_stg; 9 [actually, it was 9 rows, as two articles didn't have associated people] $ select * from name_stg; id article_id names 1 1 Cuomo, Mario M 2 2 Reagan, Ronald Wilson 3 3 Obama, Barack Cameron, David 4 4 Armstrong, Karen 5 5 Cuomo, Mario M 6 7 Rivlin, Reuven 7 8 Francis (Pope) 8 10 Yingluck Shinawatra 9 11 Baraka, Amiri Now: how many names did you insert? We will address that question tomorrow when we get into some simple name parsers. Output your RawNames values as JSON . - Output your RawNames values as JSON. instance ToJSON RawNames where toJSON rn = object ["fromArticle" .= fromArticle rn, "names" .= text rn] - > > > mapM _ ( BL.putStrLn . encodePretty ) names { " names " : " , " , " fromArticle " : 1 } { " names " : " , " , " fromArticle " : 2 } { " names " : " , , " , " fromArticle " : 3 } { " names " : " , " , " fromArticle " : 4 } { " names " : " , " , " fromArticle " : 5 } { " names " : " , " , " fromArticle " : 7 } { " names " : " Francis ( Pope ) " , " fromArticle " : 8 } { " names " : " " , " fromArticle " : 10 } { " names " : " , " , " fromArticle " : 11 } - >>> mapM_ (BL.putStrLn . encodePretty) names { "names": "Cuomo, Mario M", "fromArticle": 1 } { "names": "Reagan, Ronald Wilson", "fromArticle": 2 } { "names": "Obama, Barack Cameron, David", "fromArticle": 3 } { "names": "Armstrong, Karen", "fromArticle": 4 } { "names": "Cuomo, Mario M", "fromArticle": 5 } { "names": "Rivlin, Reuven", "fromArticle": 7 } { "names": "Francis (Pope)", "fromArticle": 8 } { "names": "Yingluck Shinawatra", "fromArticle": 10 } { "names": "Baraka, Amiri", "fromArticle": 11 }
2a28b94e7c3243fc7dc457f38a72d5c1067aed600d078769765b915e0fe3f60d
mitsuji/mssql-simple
Template.hs
{-# OPTIONS_HADDOCK hide #-} # LANGUAGE CPP # # LANGUAGE TemplateHaskell # module Database.MSSQLServer.Query.Template ( rowTupleQ , resultSetTupleQ , rpcResponseSetTupleQ , rpcOutputSetTupleQ , rpcResultSetTupleQ , rpcQuerySetTupleQ , rpcParamSetTupleQ ) where import Data.Monoid((<>)) import Database.Tds.Message import Language.Haskell.TH import Data.List (foldl') rowTupleQ :: Int -> Q Dec rowTupleQ n = return $ rowTuple n rowTuple :: Int -> Dec rowTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i ->AppT (ConT ''Data) (VarT (mkName $ "a" <> show i))) [1..n]) #else (map (\i ->ClassP ''Data [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "Row")) (foldl' (\x i -> AppT x (VarT (mkName ("a" <> show i)))) (TupleT n) [1..n])) [FunD (mkName "fromListOfRawBytes") [ Clause [ ListP (map (\i ->VarP (mkName $ "m" <> show i)) [1..n]) , ListP (map (\i ->VarP (mkName $ "b" <> show i)) [1..n]) ] #if MIN_VERSION_template_haskell(2,16,0) (NormalB (TupE (map (\i ->Just $ VarE (mkName $ "d" <> show i)) [1..n]) )) #else (NormalB (TupE (map (\i ->VarE (mkName $ "d" <> show i)) [1..n]) )) #endif (map d [1..n]) , Clause [WildP,WildP] (NormalB (AppE (VarE 'error) (LitE (StringL ("fromListOfRawBytes: List length must be " <> show n))) ) ) [] ] ] where d :: Int -> Dec d i = ValD (BangP (VarP (mkName $ "d" <> show i))) (NormalB (AppE (AppE (VarE 'fromRawBytes) (AppE (VarE (mkName "mcdTypeInfo")) (VarE (mkName $ "m" <> show i))) ) (VarE (mkName $ "b" <> show i)) ) ) [] resultSetTupleQ :: Int -> Q Dec resultSetTupleQ n = return $ resultSetTuple n resultSetTuple :: Int -> Dec resultSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i->AppT (ConT (mkName "Result")) (VarT (mkName $ "a" <> (show i)))) [1..n]) #else (map (\i ->ClassP (mkName "Result") [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "ResultSet")) (foldl' (\x i -> AppT x (VarT (mkName ("a" <> show i)))) (TupleT n) [1..n])) [ValD (VarP (mkName "resultSetParser")) (NormalB (DoE #if MIN_VERSION_template_haskell(2,17,0) Nothing #endif ( (flip map [1..n] $ \i -> BindS (BangP (VarP (mkName $ "r" <> show i ))) (SigE (AppE (VarE (mkName "resultParser")) (if i==n then (ConE 'True) else (ConE 'False)) ) (ForallT #if MIN_VERSION_template_haskell(2,17,0) [PlainTV (mkName $ "a" <> show i) SpecifiedSpec] #else [PlainTV (mkName $ "a" <> show i)] #endif #if MIN_VERSION_template_haskell(2,10,0) [AppT (ConT (mkName "Result")) (VarT (mkName $ "a" <> show i))] #else [ClassP (mkName "Result") [VarT (mkName $ "a" <> show i)]] #endif (AppT (ConT (mkName "Parser'")) (VarT (mkName $ "a" <> show i))) ) ) ) <> #if MIN_VERSION_template_haskell(2,16,0) [(NoBindS (AppE (VarE 'return) (TupE (map (\i->Just $ VarE (mkName $ "r" <> show i)) [1..n]) )) )] #else [(NoBindS (AppE (VarE 'return) (TupE (map (\i->VarE (mkName $ "r" <> show i)) [1..n]) )) )] #endif ) ) ) [] ] rpcResponseSetTupleQ :: Int -> Q Dec rpcResponseSetTupleQ n = return $ rpcResponseSetTuple n rpcResponseSetTuple :: Int -> Dec rpcResponseSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (concatMap (\i->[AppT (ConT (mkName "RpcOutputSet")) (VarT (mkName $ "a" <> show i)) ,AppT (ConT (mkName "RpcResultSet")) (VarT (mkName $ "b" <> show i)) ]) [1..n]) #else (concatMap (\i->[ClassP (mkName "RpcOutputSet") [(VarT (mkName $ "a" <> show i))] ,ClassP (mkName "RpcResultSet") [(VarT (mkName $ "b" <> show i))] ]) [1..n]) #endif (AppT (ConT (mkName "RpcResponseSet")) (foldl' (\x i -> AppT x (AppT (AppT (ConT (mkName "RpcResponse")) (VarT (mkName ("a" <> show i)))) (VarT (mkName ("b" <> show i)) )) ) (TupleT n) [1..n])) [ValD (VarP (mkName "rpcResponseSetParser")) (NormalB (DoE #if MIN_VERSION_template_haskell(2,17,0) Nothing #endif ( (flip map [1..n] $ \i -> BindS (BangP (VarP (mkName $ "r" <> show i ))) (AppE (VarE (mkName "rpcResponseParser")) (if i==n then (ConE 'True) else (ConE 'False))) ) <> #if MIN_VERSION_template_haskell(2,16,0) [(NoBindS (AppE (VarE 'return) (TupE (map (\i->Just $ VarE (mkName $ "r" <> show i)) [1..n]) )) )] #else [(NoBindS (AppE (VarE 'return) (TupE (map (\i->VarE (mkName $ "r" <> show i)) [1..n]) )) )] #endif ) ) ) [] ] rpcOutputSetTupleQ :: Int -> Q Dec rpcOutputSetTupleQ n = return $ rpcOutputSetTuple n rpcOutputSetTuple :: Int -> Dec rpcOutputSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i ->AppT (ConT ''Data) (VarT (mkName $ "a" <> show i))) [1..n]) #else (map (\i ->ClassP ''Data [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "RpcOutputSet")) (foldl' (\x i -> AppT x (VarT (mkName ("a" <> show i)))) (TupleT n) [1..n]) ) [FunD (mkName "fromReturnValues") [Clause [ListP (map (\i ->VarP (mkName $ "r" <> show i)) [1..n])] #if MIN_VERSION_template_haskell(2,16,0) (NormalB (TupE (map (\i ->Just $ VarE (mkName $ "d" <> show i)) [1..n]))) #else (NormalB (TupE (map (\i ->VarE (mkName $ "d" <> show i)) [1..n]))) #endif (map (\i->ValD (BangP (VarP (mkName $ "d" <> show i))) (NormalB (AppE (AppE (VarE (mkName "fromRawBytes")) (AppE (VarE (mkName "rvTypeInfo")) (VarE (mkName $ "r" <> show i))) ) (AppE (VarE (mkName "rvRawBytes")) (VarE (mkName $ "r" <> show i)))) ) [] ) [1..n] ) ,Clause [WildP] (NormalB (AppE (VarE 'error) (LitE (StringL $ "fromReturnValues: List length must be " <> show n)))) [] ] ] rpcResultSetTupleQ :: Int -> Q Dec rpcResultSetTupleQ n = return $ rpcResultSetTuple n rpcResultSetTuple :: Int -> Dec rpcResultSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i->AppT (ConT (mkName "RpcResult")) (VarT (mkName $ "a" <> (show i)))) [1..n]) #else (map (\i ->ClassP (mkName "RpcResult") [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "RpcResultSet")) (foldl' (\x i -> AppT x (VarT (mkName ("a" <> show i)))) (TupleT n) [1..n])) [ValD (VarP (mkName "rpcResultSetParser")) (NormalB (DoE #if MIN_VERSION_template_haskell(2,17,0) Nothing #endif ( (flip map [1..n] $ \i -> BindS (BangP (VarP (mkName $ "r" <> show i ))) (SigE (VarE (mkName "rpcResultParser")) (ForallT #if MIN_VERSION_template_haskell(2,17,0) [PlainTV (mkName $ "a" <> show i) SpecifiedSpec] #else [PlainTV (mkName $ "a" <> show i)] #endif #if MIN_VERSION_template_haskell(2,10,0) [AppT (ConT (mkName "RpcResult")) (VarT (mkName $ "a" <> show i))] #else [ClassP (mkName "RpcResult") [VarT (mkName $ "a" <> show i)]] #endif (AppT (ConT (mkName "Parser'")) (VarT (mkName $ "a" <> show i))) ) ) ) <> #if MIN_VERSION_template_haskell(2,16,0) [(NoBindS (AppE (VarE 'return) (TupE (map (\i->Just $ VarE (mkName $ "r" <> show i)) [1..n]) )) )] #else [(NoBindS (AppE (VarE 'return) (TupE (map (\i->VarE (mkName $ "r" <> show i)) [1..n]) )) )] #endif ) ) ) [] ] rpcQuerySetTupleQ :: Int -> Q Dec rpcQuerySetTupleQ n = return $ rpcQuerySetTuple n rpcQuerySetTuple :: Int -> Dec rpcQuerySetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (concatMap (\i->[AppT (ConT (mkName "RpcQueryId")) (VarT (mkName $ "a" <> show i)) ,AppT (ConT (mkName "RpcParamSet")) (VarT (mkName $ "b" <> show i)) ]) [1..n]) #else (concatMap (\i->[ClassP (mkName "RpcQueryId") [(VarT (mkName $ "a" <> show i))] ,ClassP (mkName "RpcParamSet") [(VarT (mkName $ "b" <> show i))] ]) [1..n]) #endif (AppT (ConT (mkName "RpcQuerySet")) (foldl' (\x i -> AppT x (AppT (AppT (ConT (mkName "RpcQuery")) (VarT (mkName ("a" <> show i)))) (VarT (mkName ("b" <> show i)) )) ) (TupleT n) [1..n])) [FunD (mkName "toRpcRequest") [Clause [TupP (map (\i->ConP (mkName "RpcQuery") [VarP (mkName $ "a" <> show i),VarP (mkName $ "b" <> show i)]) [1..n]) ] (NormalB (AppE (ConE (mkName "RpcRequest")) (ListE (map (\i->VarE (mkName $ "r" <> show i)) [1..n])))) (map (\i->ValD (BangP (VarP (mkName $ "r" <> show i))) (NormalB (AppE (AppE (VarE (mkName "toRpcReqBatch")) (VarE (mkName $ "a" <> show i))) (VarE (mkName $ "b" <> show i)))) []) [1..n]) ] ] rpcParamSetTupleQ :: Int -> Q Dec rpcParamSetTupleQ n = return $ rpcParamSetTuple n rpcParamSetTuple :: Int -> Dec rpcParamSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i ->AppT (ConT ''Data) (VarT (mkName $ "a" <> show i))) [1..n]) #else (map (\i ->ClassP ''Data [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "RpcParamSet")) (foldl' (\x i -> AppT x (AppT (ConT (mkName "RpcParam")) (VarT (mkName ("a" <> show i))) ) ) (TupleT n) [1..n])) [FunD (mkName "toRpcReqBatchParams") [Clause [TupP (map (\i->VarP (mkName $ "d" <> show i)) [1..n])] (NormalB (ListE (map (\i ->VarE (mkName $ "p" <> show i)) [1..n]) )) (map (\i->ValD (BangP (VarP (mkName $ "p" <> show i))) (NormalB (AppE (VarE (mkName "rpcReqBatchParam")) (VarE (mkName $ "d" <> show i)))) []) [1..n]) ] ]
null
https://raw.githubusercontent.com/mitsuji/mssql-simple/0c51facf4b86e9340f81485d493acf33dccf26b1/src/Database/MSSQLServer/Query/Template.hs
haskell
# OPTIONS_HADDOCK hide #
# LANGUAGE CPP # # LANGUAGE TemplateHaskell # module Database.MSSQLServer.Query.Template ( rowTupleQ , resultSetTupleQ , rpcResponseSetTupleQ , rpcOutputSetTupleQ , rpcResultSetTupleQ , rpcQuerySetTupleQ , rpcParamSetTupleQ ) where import Data.Monoid((<>)) import Database.Tds.Message import Language.Haskell.TH import Data.List (foldl') rowTupleQ :: Int -> Q Dec rowTupleQ n = return $ rowTuple n rowTuple :: Int -> Dec rowTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i ->AppT (ConT ''Data) (VarT (mkName $ "a" <> show i))) [1..n]) #else (map (\i ->ClassP ''Data [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "Row")) (foldl' (\x i -> AppT x (VarT (mkName ("a" <> show i)))) (TupleT n) [1..n])) [FunD (mkName "fromListOfRawBytes") [ Clause [ ListP (map (\i ->VarP (mkName $ "m" <> show i)) [1..n]) , ListP (map (\i ->VarP (mkName $ "b" <> show i)) [1..n]) ] #if MIN_VERSION_template_haskell(2,16,0) (NormalB (TupE (map (\i ->Just $ VarE (mkName $ "d" <> show i)) [1..n]) )) #else (NormalB (TupE (map (\i ->VarE (mkName $ "d" <> show i)) [1..n]) )) #endif (map d [1..n]) , Clause [WildP,WildP] (NormalB (AppE (VarE 'error) (LitE (StringL ("fromListOfRawBytes: List length must be " <> show n))) ) ) [] ] ] where d :: Int -> Dec d i = ValD (BangP (VarP (mkName $ "d" <> show i))) (NormalB (AppE (AppE (VarE 'fromRawBytes) (AppE (VarE (mkName "mcdTypeInfo")) (VarE (mkName $ "m" <> show i))) ) (VarE (mkName $ "b" <> show i)) ) ) [] resultSetTupleQ :: Int -> Q Dec resultSetTupleQ n = return $ resultSetTuple n resultSetTuple :: Int -> Dec resultSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i->AppT (ConT (mkName "Result")) (VarT (mkName $ "a" <> (show i)))) [1..n]) #else (map (\i ->ClassP (mkName "Result") [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "ResultSet")) (foldl' (\x i -> AppT x (VarT (mkName ("a" <> show i)))) (TupleT n) [1..n])) [ValD (VarP (mkName "resultSetParser")) (NormalB (DoE #if MIN_VERSION_template_haskell(2,17,0) Nothing #endif ( (flip map [1..n] $ \i -> BindS (BangP (VarP (mkName $ "r" <> show i ))) (SigE (AppE (VarE (mkName "resultParser")) (if i==n then (ConE 'True) else (ConE 'False)) ) (ForallT #if MIN_VERSION_template_haskell(2,17,0) [PlainTV (mkName $ "a" <> show i) SpecifiedSpec] #else [PlainTV (mkName $ "a" <> show i)] #endif #if MIN_VERSION_template_haskell(2,10,0) [AppT (ConT (mkName "Result")) (VarT (mkName $ "a" <> show i))] #else [ClassP (mkName "Result") [VarT (mkName $ "a" <> show i)]] #endif (AppT (ConT (mkName "Parser'")) (VarT (mkName $ "a" <> show i))) ) ) ) <> #if MIN_VERSION_template_haskell(2,16,0) [(NoBindS (AppE (VarE 'return) (TupE (map (\i->Just $ VarE (mkName $ "r" <> show i)) [1..n]) )) )] #else [(NoBindS (AppE (VarE 'return) (TupE (map (\i->VarE (mkName $ "r" <> show i)) [1..n]) )) )] #endif ) ) ) [] ] rpcResponseSetTupleQ :: Int -> Q Dec rpcResponseSetTupleQ n = return $ rpcResponseSetTuple n rpcResponseSetTuple :: Int -> Dec rpcResponseSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (concatMap (\i->[AppT (ConT (mkName "RpcOutputSet")) (VarT (mkName $ "a" <> show i)) ,AppT (ConT (mkName "RpcResultSet")) (VarT (mkName $ "b" <> show i)) ]) [1..n]) #else (concatMap (\i->[ClassP (mkName "RpcOutputSet") [(VarT (mkName $ "a" <> show i))] ,ClassP (mkName "RpcResultSet") [(VarT (mkName $ "b" <> show i))] ]) [1..n]) #endif (AppT (ConT (mkName "RpcResponseSet")) (foldl' (\x i -> AppT x (AppT (AppT (ConT (mkName "RpcResponse")) (VarT (mkName ("a" <> show i)))) (VarT (mkName ("b" <> show i)) )) ) (TupleT n) [1..n])) [ValD (VarP (mkName "rpcResponseSetParser")) (NormalB (DoE #if MIN_VERSION_template_haskell(2,17,0) Nothing #endif ( (flip map [1..n] $ \i -> BindS (BangP (VarP (mkName $ "r" <> show i ))) (AppE (VarE (mkName "rpcResponseParser")) (if i==n then (ConE 'True) else (ConE 'False))) ) <> #if MIN_VERSION_template_haskell(2,16,0) [(NoBindS (AppE (VarE 'return) (TupE (map (\i->Just $ VarE (mkName $ "r" <> show i)) [1..n]) )) )] #else [(NoBindS (AppE (VarE 'return) (TupE (map (\i->VarE (mkName $ "r" <> show i)) [1..n]) )) )] #endif ) ) ) [] ] rpcOutputSetTupleQ :: Int -> Q Dec rpcOutputSetTupleQ n = return $ rpcOutputSetTuple n rpcOutputSetTuple :: Int -> Dec rpcOutputSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i ->AppT (ConT ''Data) (VarT (mkName $ "a" <> show i))) [1..n]) #else (map (\i ->ClassP ''Data [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "RpcOutputSet")) (foldl' (\x i -> AppT x (VarT (mkName ("a" <> show i)))) (TupleT n) [1..n]) ) [FunD (mkName "fromReturnValues") [Clause [ListP (map (\i ->VarP (mkName $ "r" <> show i)) [1..n])] #if MIN_VERSION_template_haskell(2,16,0) (NormalB (TupE (map (\i ->Just $ VarE (mkName $ "d" <> show i)) [1..n]))) #else (NormalB (TupE (map (\i ->VarE (mkName $ "d" <> show i)) [1..n]))) #endif (map (\i->ValD (BangP (VarP (mkName $ "d" <> show i))) (NormalB (AppE (AppE (VarE (mkName "fromRawBytes")) (AppE (VarE (mkName "rvTypeInfo")) (VarE (mkName $ "r" <> show i))) ) (AppE (VarE (mkName "rvRawBytes")) (VarE (mkName $ "r" <> show i)))) ) [] ) [1..n] ) ,Clause [WildP] (NormalB (AppE (VarE 'error) (LitE (StringL $ "fromReturnValues: List length must be " <> show n)))) [] ] ] rpcResultSetTupleQ :: Int -> Q Dec rpcResultSetTupleQ n = return $ rpcResultSetTuple n rpcResultSetTuple :: Int -> Dec rpcResultSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i->AppT (ConT (mkName "RpcResult")) (VarT (mkName $ "a" <> (show i)))) [1..n]) #else (map (\i ->ClassP (mkName "RpcResult") [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "RpcResultSet")) (foldl' (\x i -> AppT x (VarT (mkName ("a" <> show i)))) (TupleT n) [1..n])) [ValD (VarP (mkName "rpcResultSetParser")) (NormalB (DoE #if MIN_VERSION_template_haskell(2,17,0) Nothing #endif ( (flip map [1..n] $ \i -> BindS (BangP (VarP (mkName $ "r" <> show i ))) (SigE (VarE (mkName "rpcResultParser")) (ForallT #if MIN_VERSION_template_haskell(2,17,0) [PlainTV (mkName $ "a" <> show i) SpecifiedSpec] #else [PlainTV (mkName $ "a" <> show i)] #endif #if MIN_VERSION_template_haskell(2,10,0) [AppT (ConT (mkName "RpcResult")) (VarT (mkName $ "a" <> show i))] #else [ClassP (mkName "RpcResult") [VarT (mkName $ "a" <> show i)]] #endif (AppT (ConT (mkName "Parser'")) (VarT (mkName $ "a" <> show i))) ) ) ) <> #if MIN_VERSION_template_haskell(2,16,0) [(NoBindS (AppE (VarE 'return) (TupE (map (\i->Just $ VarE (mkName $ "r" <> show i)) [1..n]) )) )] #else [(NoBindS (AppE (VarE 'return) (TupE (map (\i->VarE (mkName $ "r" <> show i)) [1..n]) )) )] #endif ) ) ) [] ] rpcQuerySetTupleQ :: Int -> Q Dec rpcQuerySetTupleQ n = return $ rpcQuerySetTuple n rpcQuerySetTuple :: Int -> Dec rpcQuerySetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (concatMap (\i->[AppT (ConT (mkName "RpcQueryId")) (VarT (mkName $ "a" <> show i)) ,AppT (ConT (mkName "RpcParamSet")) (VarT (mkName $ "b" <> show i)) ]) [1..n]) #else (concatMap (\i->[ClassP (mkName "RpcQueryId") [(VarT (mkName $ "a" <> show i))] ,ClassP (mkName "RpcParamSet") [(VarT (mkName $ "b" <> show i))] ]) [1..n]) #endif (AppT (ConT (mkName "RpcQuerySet")) (foldl' (\x i -> AppT x (AppT (AppT (ConT (mkName "RpcQuery")) (VarT (mkName ("a" <> show i)))) (VarT (mkName ("b" <> show i)) )) ) (TupleT n) [1..n])) [FunD (mkName "toRpcRequest") [Clause [TupP (map (\i->ConP (mkName "RpcQuery") [VarP (mkName $ "a" <> show i),VarP (mkName $ "b" <> show i)]) [1..n]) ] (NormalB (AppE (ConE (mkName "RpcRequest")) (ListE (map (\i->VarE (mkName $ "r" <> show i)) [1..n])))) (map (\i->ValD (BangP (VarP (mkName $ "r" <> show i))) (NormalB (AppE (AppE (VarE (mkName "toRpcReqBatch")) (VarE (mkName $ "a" <> show i))) (VarE (mkName $ "b" <> show i)))) []) [1..n]) ] ] rpcParamSetTupleQ :: Int -> Q Dec rpcParamSetTupleQ n = return $ rpcParamSetTuple n rpcParamSetTuple :: Int -> Dec rpcParamSetTuple n = #if MIN_VERSION_template_haskell(2,11,0) InstanceD Nothing #else InstanceD #endif #if MIN_VERSION_template_haskell(2,10,0) (map (\i ->AppT (ConT ''Data) (VarT (mkName $ "a" <> show i))) [1..n]) #else (map (\i ->ClassP ''Data [(VarT (mkName $ "a" <> show i))]) [1..n]) #endif (AppT (ConT (mkName "RpcParamSet")) (foldl' (\x i -> AppT x (AppT (ConT (mkName "RpcParam")) (VarT (mkName ("a" <> show i))) ) ) (TupleT n) [1..n])) [FunD (mkName "toRpcReqBatchParams") [Clause [TupP (map (\i->VarP (mkName $ "d" <> show i)) [1..n])] (NormalB (ListE (map (\i ->VarE (mkName $ "p" <> show i)) [1..n]) )) (map (\i->ValD (BangP (VarP (mkName $ "p" <> show i))) (NormalB (AppE (VarE (mkName "rpcReqBatchParam")) (VarE (mkName $ "d" <> show i)))) []) [1..n]) ] ]
dce77829cf43ac8dccb65ce1584f2cb011efaf5b5f297a18c7752fe1b86debf0
ocamllabs/ocaml-scry
time.ml
* Copyright ( c ) 2013 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * * Copyright (c) 2013 David Sheets <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. * *) open Sexplib.Std type t = float [@@deriving sexp] type duration = float [@@deriving sexp] let min = 0. let now () = Unix.gettimeofday () let date_to_string_ tm = Unix.( Printf.sprintf "%d-%02d-%02d" (tm.tm_year + 1900) (tm.tm_mon + 1) tm.tm_mday ) let to_string_ tm = Unix.( Printf.sprintf "%sT%02d:%02d:%02dZ" (date_to_string_ tm) tm.tm_hour tm.tm_min tm.tm_sec ) let date_to_string t = date_to_string_ (Unix.gmtime t) let duration_to_string t = Printf.sprintf "%.2fs" t let to_string t = to_string_ (Unix.gmtime t) let elapsed t_0 t_1 = t_1 -. t_0 let is_later dur = dur > 0.
null
https://raw.githubusercontent.com/ocamllabs/ocaml-scry/3ba35317975fe78dab06cd28822219a0eab7c318/lib/time.ml
ocaml
* Copyright ( c ) 2013 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * * Copyright (c) 2013 David Sheets <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. * *) open Sexplib.Std type t = float [@@deriving sexp] type duration = float [@@deriving sexp] let min = 0. let now () = Unix.gettimeofday () let date_to_string_ tm = Unix.( Printf.sprintf "%d-%02d-%02d" (tm.tm_year + 1900) (tm.tm_mon + 1) tm.tm_mday ) let to_string_ tm = Unix.( Printf.sprintf "%sT%02d:%02d:%02dZ" (date_to_string_ tm) tm.tm_hour tm.tm_min tm.tm_sec ) let date_to_string t = date_to_string_ (Unix.gmtime t) let duration_to_string t = Printf.sprintf "%.2fs" t let to_string t = to_string_ (Unix.gmtime t) let elapsed t_0 t_1 = t_1 -. t_0 let is_later dur = dur > 0.
7b3cd170e5465b4a0f96c596f9e733268a855145fe255ae715c8c7384c822b56
NorfairKing/template-optparse
OptParseSpec.hs
{-# LANGUAGE OverloadedStrings #-} -- | = Optparse Tests Template -- -- This is a template implementation of commands, flags, options, environment variable and configuration file parsing according to best practices. -- To use this template, follow the instructions below and delete anything you do not need. -- -- == License -- -- This template is __not__ free to use. -- See -syd.eu/template/NorfairKing/template-optparse for more information . -- Copyright ( c ) 2020 . -- All Rights Reserved . -- -- == Instructions -- -- This module contains tests for the 'OptParse' module. -- It is usually not necessary to write tests for your option parsing, but when -- a bug occurs, you definitely want to be able to write regression tests, so in this module we show one test for each of the arguments , environment -- variables and configuration file. module OptParseSpec ( spec, ) where import Data.Yaml as Yaml import qualified Env import OptParse import Options.Applicative import Test.Hspec spec :: Spec spec = do describe "Arguments" $ it "parses 'greet --greeting hello --polite' correctly" $ do let args = ["greet", "--greeting", "hello", "--polite"] case execParserPure prefs_ argParser args of CompletionInvoked _ -> expectationFailure "Completion invoked" Failure err -> expectationFailure $ unlines ["Failed to parse arguments: ", show err] Success a -> a `shouldBe` ( Arguments ( CommandGreet ( GreetArgs { greetArgGreeting = Just "hello" } ) ) ( Flags { flagConfigFile = Nothing, flagPolite = Just True } ) ) describe "Environment" $ it "parses FOO_BAR_GREETING and FOO_BAR_POLITE correctly" $ do let env = [("FOO_BAR_GREETING", "hello"), ("FOO_BAR_POLITE", "True")] case Env.parsePure environmentParser env of Left err -> expectationFailure $ unlines ["Failed to parse environment variables: ", show err] Right e -> e `shouldBe` ( Environment { envConfigFile = Nothing, envGreeting = Just "hello", envPolite = Just True } ) describe "Configuration" $ it "parses 'greeting' and 'polite' correctly" $ do let config = object [("greeting", "hello"), ("polite", toJSON True)] case parseEither parseJSON config of Left err -> expectationFailure $ unlines ["Failed to parse configuration: ", show err] Right c -> c `shouldBe` ( Configuration { configPolite = Just True, configGreeting = Just "hello" } )
null
https://raw.githubusercontent.com/NorfairKing/template-optparse/e30850c9506fe5bc4de8264d0da0e0b06b4b5f31/test/OptParseSpec.hs
haskell
# LANGUAGE OverloadedStrings # | = Optparse Tests Template This is a template implementation of commands, flags, options, environment variable and configuration file parsing according to best practices. To use this template, follow the instructions below and delete anything you do not need. == License This template is __not__ free to use. == Instructions This module contains tests for the 'OptParse' module. It is usually not necessary to write tests for your option parsing, but when a bug occurs, you definitely want to be able to write regression tests, so variables and configuration file.
See -syd.eu/template/NorfairKing/template-optparse for more information . Copyright ( c ) 2020 . All Rights Reserved . in this module we show one test for each of the arguments , environment module OptParseSpec ( spec, ) where import Data.Yaml as Yaml import qualified Env import OptParse import Options.Applicative import Test.Hspec spec :: Spec spec = do describe "Arguments" $ it "parses 'greet --greeting hello --polite' correctly" $ do let args = ["greet", "--greeting", "hello", "--polite"] case execParserPure prefs_ argParser args of CompletionInvoked _ -> expectationFailure "Completion invoked" Failure err -> expectationFailure $ unlines ["Failed to parse arguments: ", show err] Success a -> a `shouldBe` ( Arguments ( CommandGreet ( GreetArgs { greetArgGreeting = Just "hello" } ) ) ( Flags { flagConfigFile = Nothing, flagPolite = Just True } ) ) describe "Environment" $ it "parses FOO_BAR_GREETING and FOO_BAR_POLITE correctly" $ do let env = [("FOO_BAR_GREETING", "hello"), ("FOO_BAR_POLITE", "True")] case Env.parsePure environmentParser env of Left err -> expectationFailure $ unlines ["Failed to parse environment variables: ", show err] Right e -> e `shouldBe` ( Environment { envConfigFile = Nothing, envGreeting = Just "hello", envPolite = Just True } ) describe "Configuration" $ it "parses 'greeting' and 'polite' correctly" $ do let config = object [("greeting", "hello"), ("polite", toJSON True)] case parseEither parseJSON config of Left err -> expectationFailure $ unlines ["Failed to parse configuration: ", show err] Right c -> c `shouldBe` ( Configuration { configPolite = Just True, configGreeting = Just "hello" } )
9112862872c9990a62efc68abb152a218c7260918cd55df99129c224a226793f
markcox80/lisp-executable
creation-sbcl.lisp
Copyright ( c ) 2011 , ;; All rights reserved. ;; Redistribution and use in source and binary forms, with or without ;; modification, are permitted provided that the following conditions are ;; met: ;; - Redistributions of source code must retain the above copyright ;; notice, this list of conditions and the following disclaimer. ;; - Redistributions in binary form must reproduce the above copyright ;; notice, this list of conditions and the following disclaimer in the ;; documentation and/or other materials provided with the distribution. ;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT ;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , ;; DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #-:sbcl (error "Should not be read. SBCL only.") (in-package "LISP-EXECUTABLE.CREATION") (defmethod start-new-lisp-machine (&rest args &key &allow-other-keys) (destructuring-bind (&key dynamic-space-size control-stack-size &allow-other-keys) args (let* ((run-program-args (remove-from-plist-unless-keys-are args '(:environment :env))) (process (apply #'sb-ext:run-program (first sb-ext:*posix-argv*) (append ;; Runtime options (when dynamic-space-size (list "--dynamic-space-size" (format nil "~A" dynamic-space-size))) (when control-stack-size (list "--control-stack-size" (format nil "~A" control-stack-size))) (list "--end-runtime-options") Toplevel options (list "--end-toplevel-options")) :wait nil :input :stream :output *lisp-machine-output-stream* :error *lisp-machine-output-stream* :search t run-program-args))) (unless (eql (sb-ext:process-status process) :running) (error "Unable to start external SBCL process.")) (force-output *lisp-machine-output-stream*) process))) (defmethod lisp-machine-input (lisp-machine) (unless (eql (sb-ext:process-status lisp-machine) :running) (error "Cannot obtain stream to lisp machine as it is no longer running.")) (sb-ext:process-input lisp-machine)) (defmethod wait-for-lisp-machine (lisp-machine) (force-output *lisp-machine-output-stream*) (sb-ext:process-wait lisp-machine)) (defmethod kill-lisp-machine (lisp-machine) (force-output *lisp-machine-output-stream*) (sb-ext:process-kill lisp-machine 15)) (defmethod save-executable-using-code-and-die (code output-file &rest args &key &allow-other-keys) (let* ((save-lisp-and-die-args (remove-from-plist-unless-keys-are args '(:purify :root-structures :environment-name :compression))) (function (eval `(lambda () ,code)))) (apply #'sb-ext:save-lisp-and-die output-file :toplevel function :executable t :save-runtime-options t ;; Needed to inhibit normal runtime option processing. save-lisp-and-die-args))) (defmethod command-line-arguments () (rest sb-ext:*posix-argv*)) (defmethod lisp-machine-exit (exit-status) (sb-ext:exit :code exit-status)) (defmethod executable-files (output-file) (list output-file)) (defmethod do-with-control-c-handled (function) (handler-case (funcall function) (sb-sys:interactive-interrupt () (lisp-machine-exit 1))))
null
https://raw.githubusercontent.com/markcox80/lisp-executable/989b68ed946e1d99e6e65b7383a64ff035e833c7/src/creation-sbcl.lisp
lisp
All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT LOSS OF USE , DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Runtime options Needed to inhibit normal runtime option processing.
Copyright ( c ) 2011 , " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT HOLDER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT #-:sbcl (error "Should not be read. SBCL only.") (in-package "LISP-EXECUTABLE.CREATION") (defmethod start-new-lisp-machine (&rest args &key &allow-other-keys) (destructuring-bind (&key dynamic-space-size control-stack-size &allow-other-keys) args (let* ((run-program-args (remove-from-plist-unless-keys-are args '(:environment :env))) (when dynamic-space-size (list "--dynamic-space-size" (format nil "~A" dynamic-space-size))) (when control-stack-size (list "--control-stack-size" (format nil "~A" control-stack-size))) (list "--end-runtime-options") Toplevel options (list "--end-toplevel-options")) :wait nil :input :stream :output *lisp-machine-output-stream* :error *lisp-machine-output-stream* :search t run-program-args))) (unless (eql (sb-ext:process-status process) :running) (error "Unable to start external SBCL process.")) (force-output *lisp-machine-output-stream*) process))) (defmethod lisp-machine-input (lisp-machine) (unless (eql (sb-ext:process-status lisp-machine) :running) (error "Cannot obtain stream to lisp machine as it is no longer running.")) (sb-ext:process-input lisp-machine)) (defmethod wait-for-lisp-machine (lisp-machine) (force-output *lisp-machine-output-stream*) (sb-ext:process-wait lisp-machine)) (defmethod kill-lisp-machine (lisp-machine) (force-output *lisp-machine-output-stream*) (sb-ext:process-kill lisp-machine 15)) (defmethod save-executable-using-code-and-die (code output-file &rest args &key &allow-other-keys) (let* ((save-lisp-and-die-args (remove-from-plist-unless-keys-are args '(:purify :root-structures :environment-name :compression))) (function (eval `(lambda () ,code)))) (apply #'sb-ext:save-lisp-and-die output-file :toplevel function :executable t save-lisp-and-die-args))) (defmethod command-line-arguments () (rest sb-ext:*posix-argv*)) (defmethod lisp-machine-exit (exit-status) (sb-ext:exit :code exit-status)) (defmethod executable-files (output-file) (list output-file)) (defmethod do-with-control-c-handled (function) (handler-case (funcall function) (sb-sys:interactive-interrupt () (lisp-machine-exit 1))))
1e4bb53f4062fbfad937f022dceef38cc9fe987a2d926420f1088de8ed10d3c5
emqx/emqtt
emqtt_cli.erl
%%------------------------------------------------------------------------- Copyright ( c ) 2020 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved . %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %%------------------------------------------------------------------------- -module(emqtt_cli). -include("emqtt.hrl"). -export([ main/1 ]). -import(proplists, [get_value/2]). -define(CMD_NAME, "emqtt"). -define(HELP_OPT, [{help, undefined, "help", boolean, "Help information"} ]). -define(CONN_SHORT_OPTS, [{host, $h, "host", {string, "localhost"}, "mqtt server hostname or IP address"}, {port, $p, "port", integer, "mqtt server port number"}, {iface, $I, "iface", string, "specify the network interface or ip address to use"}, {protocol_version, $V, "protocol-version", {atom, 'v5'}, "mqtt protocol version: v3.1 | v3.1.1 | v5"}, {username, $u, "username", string, "username for connecting to server"}, {password, $P, "password", string, "password for connecting to server"}, {clientid, $C, "clientid", string, "client identifier"}, {keepalive, $k, "keepalive", {integer, 300}, "keep alive in seconds"} ]). -define(CONN_LONG_OPTS, [{will_topic, undefined, "will-topic", string, "Topic for will message"}, {will_payload, undefined, "will-payload", string, "Payload in will message"}, {will_qos, undefined, "will-qos", {integer, 0}, "QoS for will message"}, {will_retain, undefined, "will-retain", {boolean, false}, "Retain in will message"}, {enable_websocket, undefined, "enable-websocket", {boolean, false}, "Enable websocket transport or not"}, {enable_quic, undefined, "enable-quic", {boolean, false}, "Enable quic transport or not"}, {enable_ssl, undefined, "enable-ssl", {boolean, false}, "Enable ssl/tls or not"}, {tls_version, undefined, "tls-version", {atom, 'tlsv1.2'}, "TLS protocol version used when the client connects to the broker"}, {cafile, undefined, "CAfile", string, "Path to a file containing pem-encoded ca certificates"}, {cert, undefined, "cert", string, "Path to a file containing the user certificate on pem format"}, {key, undefined, "key", string, "Path to the file containing the user's private pem-encoded key"}, {sni, undefined, "sni", string, "Applicable when '--enable_ssl' is in use. " "Use '--sni true' to apply the host name from '-h|--host' option " "as SNI, therwise use the host name to which the server's SSL " "certificate is issued"}, {verify, undefined, "verify", {boolean, false}, "TLS verify option, default: false " } ]). -define(PUB_OPTS, ?CONN_SHORT_OPTS ++ [{topic, $t, "topic", string, "mqtt topic on which to publish the message"}, {qos, $q, "qos", {integer, 0}, "qos level of assurance for delivery of an application message"}, {retain, $r, "retain", {boolean, false}, "retain message or not"} ] ++ ?HELP_OPT ++ ?CONN_LONG_OPTS ++ [{payload, undefined, "payload", string, "application message that is being published"}, {file, undefined, "file", string, "file content to publish"}, {repeat, undefined, "repeat", {integer, 1}, "the number of times the message will be repeatedly published"}, {repeat_delay, undefined, "repeat-delay", {integer, 0}, "the number of seconds to wait after the previous message was delivered before publishing the next"} ]). -define(SUB_OPTS, ?CONN_SHORT_OPTS ++ [{topic, $t, "topic", string, "mqtt topic to subscribe to"}, {qos, $q, "qos", {integer, 0}, "maximum qos level at which the server can send application messages to the client"} ] ++ ?HELP_OPT ++ ?CONN_LONG_OPTS ++ [{retain_as_publish, undefined, "retain-as-publish", {boolean, false}, "retain as publih option in subscription options"}, {retain_handling, undefined, "retain-handling", {integer, 0}, "retain handling option in subscription options"}, {print, undefined, "print", string, "'size' to print payload size, 'as-string' to print payload as string"} ]). main(["sub" | Argv]) -> {ok, {Opts, _Args}} = getopt:parse(?SUB_OPTS, Argv), ok = maybe_help(sub, Opts), ok = check_required_args(sub, [topic], Opts), main(sub, Opts); main(["pub" | Argv]) -> {ok, {Opts, _Args}} = getopt:parse(?PUB_OPTS, Argv), ok = maybe_help(pub, Opts), ok = check_required_args(pub, [topic], Opts), Payload = get_value(payload, Opts), File = get_value(file, Opts), case {Payload, File} of {undefined, undefined} -> io:format("Error: missing --payload or --file~n"), halt(1); _ -> ok end, main(pub, Opts); main(_Argv) -> io:format("Usage: ~s pub | sub [--help]~n", [?CMD_NAME]). main(PubSub, Opts0) -> application:ensure_all_started(quicer), application:ensure_all_started(emqtt), Print = proplists:get_value(print, Opts0), Opts = proplists:delete(print, Opts0), NOpts = enrich_opts(parse_cmd_opts(Opts)), {ok, Client} = emqtt:start_link(NOpts), ConnRet = case {proplists:get_bool(enable_websocket, NOpts), proplists:get_bool(enable_quic, NOpts)} of {false, false} -> emqtt:connect(Client); {true, false} -> emqtt:ws_connect(Client); {false, true} -> emqtt:quic_connect(Client) end, case ConnRet of {ok, Properties} -> io:format("Client ~s sent CONNECT~n", [get_value(clientid, NOpts)]), case PubSub of pub -> publish(Client, NOpts, proplists:get_value(repeat, Opts)), disconnect(Client, NOpts); sub -> subscribe(Client, NOpts), KeepAlive = maps:get('Server-Keep-Alive', Properties, get_value(keepalive, NOpts)) * 1000, timer:send_interval(KeepAlive, ping), receive_loop(Client, Print) end; {error, Reason} -> io:format("Client ~s failed to sent CONNECT due to ~p~n", [get_value(clientid, NOpts), Reason]) end. publish(Client, Opts, 1) -> do_publish(Client, Opts); publish(Client, Opts, Repeat) -> do_publish(Client, Opts), case proplists:get_value(repeat_delay, Opts) of 0 -> ok; RepeatDelay -> timer:sleep(RepeatDelay * 1000) end, publish(Client, Opts, Repeat - 1). do_publish(Client, Opts) -> case get_value(payload, Opts) of undefined -> File = get_value(file, Opts), case file:read_file(File) of {ok, Bin} -> do_publish(Client, Opts, Bin); {error, Reason} -> io:format("Error: failed_to_read ~s:~nreason=~p", [File, Reason]), halt(1) end; Bin -> do_publish(Client, Opts, Bin) end. do_publish(Client, Opts, Payload) -> case emqtt:publish(Client, get_value(topic, Opts), Payload, Opts) of {error, Reason} -> io:format("Client ~s failed to sent PUBLISH due to ~p~n", [get_value(clientid, Opts), Reason]); _ -> io:format("Client ~s sent PUBLISH (Q~p, R~p, D0, Topic=~s, Payload=...(~p bytes))~n", [get_value(clientid, Opts), get_value(qos, Opts), i(get_value(retain, Opts)), get_value(topic, Opts), iolist_size(Payload)]) end. subscribe(Client, Opts) -> case emqtt:subscribe(Client, get_value(topic, Opts), Opts) of {ok, _, [ReasonCode]} when 0 =< ReasonCode andalso ReasonCode =< 2 -> io:format("Client ~s subscribed to ~s~n", [get_value(clientid, Opts), get_value(topic, Opts)]); {ok, _, [ReasonCode]} -> io:format("Client ~s failed to subscribe to ~s due to ~s~n", [get_value(clientid, Opts), get_value(topic, Opts), emqtt:reason_code_name(ReasonCode)]); {error, Reason} -> io:format("Client ~s failed to send SUBSCRIBE due to ~p~n", [get_value(clientid, Opts), Reason]) end. disconnect(Client, Opts) -> case emqtt:disconnect(Client) of ok -> io:format("Client ~s sent DISCONNECT~n", [get_value(clientid, Opts)]); {error, Reason} -> io:format("Client ~s failed to send DISCONNECT due to ~p~n", [get_value(clientid, Opts), Reason]) end. maybe_help(PubSub, Opts) -> case proplists:get_value(help, Opts) of true -> usage(PubSub), halt(0); _ -> ok end. usage(PubSub) -> Opts = case PubSub of pub -> ?PUB_OPTS; sub -> ?SUB_OPTS end, getopt:usage(Opts, ?CMD_NAME ++ " " ++ atom_to_list(PubSub)). check_required_args(PubSub, Keys, Opts) -> lists:foreach(fun(Key) -> case lists:keyfind(Key, 1, Opts) of false -> io:format("Error: '~s' required~n", [Key]), usage(PubSub), halt(1); _ -> ok end end, Keys). parse_cmd_opts(Opts) -> parse_cmd_opts(Opts, []). parse_cmd_opts([], Acc) -> Acc; parse_cmd_opts([{host, Host} | Opts], Acc) -> parse_cmd_opts(Opts, [{host, Host} | Acc]); parse_cmd_opts([{port, Port} | Opts], Acc) -> parse_cmd_opts(Opts, [{port, Port} | Acc]); parse_cmd_opts([{iface, Interface} | Opts], Acc) -> NAcc = case inet:parse_address(Interface) of {ok, IPAddress0} -> maybe_append(tcp_opts, {ifaddr, IPAddress0}, Acc); _ -> case inet:getifaddrs() of {ok, IfAddrs} -> case lists:filter(fun({addr, {_, _, _, _}}) -> true; (_) -> false end, proplists:get_value(Interface, IfAddrs, [])) of [{addr, IPAddress0}] -> maybe_append(tcp_opts, {ifaddr, IPAddress0}, Acc); _ -> Acc end; _ -> Acc end end, parse_cmd_opts(Opts, NAcc); parse_cmd_opts([{protocol_version, 'v3.1'} | Opts], Acc) -> parse_cmd_opts(Opts, [{proto_ver, v3} | Acc]); parse_cmd_opts([{protocol_version, 'v3.1.1'} | Opts], Acc) -> parse_cmd_opts(Opts, [{proto_ver, v4} | Acc]); parse_cmd_opts([{protocol_version, 'v5'} | Opts], Acc) -> parse_cmd_opts(Opts, [{proto_ver, v5} | Acc]); parse_cmd_opts([{username, Username} | Opts], Acc) -> parse_cmd_opts(Opts, [{username, list_to_binary(Username)} | Acc]); parse_cmd_opts([{password, Password} | Opts], Acc) -> parse_cmd_opts(Opts, [{password, list_to_binary(Password)} | Acc]); parse_cmd_opts([{clientid, Clientid} | Opts], Acc) -> parse_cmd_opts(Opts, [{clientid, list_to_binary(Clientid)} | Acc]); parse_cmd_opts([{will_topic, Topic} | Opts], Acc) -> parse_cmd_opts(Opts, [{will_topic, list_to_binary(Topic)} | Acc]); parse_cmd_opts([{will_payload, Payload} | Opts], Acc) -> parse_cmd_opts(Opts, [{will_payload, list_to_binary(Payload)} | Acc]); parse_cmd_opts([{will_qos, Qos} | Opts], Acc) -> parse_cmd_opts(Opts, [{will_qos, Qos} | Acc]); parse_cmd_opts([{will_retain, Retain} | Opts], Acc) -> parse_cmd_opts(Opts, [{will_retain, Retain} | Acc]); parse_cmd_opts([{keepalive, I} | Opts], Acc) -> parse_cmd_opts(Opts, [{keepalive, I} | Acc]); parse_cmd_opts([{enable_websocket, Enable} | Opts], Acc) -> parse_cmd_opts(Opts, [{enable_websocket, Enable} | Acc]); parse_cmd_opts([{enable_quic, Enable} | Opts], Acc) -> parse_cmd_opts(Opts, [{enable_quic, Enable} | Acc]); parse_cmd_opts([{enable_ssl, Enable} | Opts], Acc) -> parse_cmd_opts(Opts, [{ssl, Enable} | Acc]); parse_cmd_opts([{tls_version, Version} | Opts], Acc) when Version =:= 'tlsv1' orelse Version =:= 'tlsv1.1'orelse Version =:= 'tlsv1.2' orelse Version =:= 'tlsv1.3' -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {versions, [Version]}, Acc)); parse_cmd_opts([{cafile, CAFile} | Opts], Acc) -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {cacertfile, CAFile}, Acc)); parse_cmd_opts([{cert, Cert} | Opts], Acc) -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {certfile, Cert}, Acc)); parse_cmd_opts([{key, Key} | Opts], Acc) -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {keyfile, Key}, Acc)); parse_cmd_opts([{sni, SNI} | Opts], Acc) -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {server_name_indication, SNI}, Acc)); parse_cmd_opts([{qos, QoS} | Opts], Acc) -> parse_cmd_opts(Opts, [{qos, QoS} | Acc]); parse_cmd_opts([{retain_as_publish, RetainAsPublish} | Opts], Acc) -> parse_cmd_opts(Opts, [{rap, RetainAsPublish} | Acc]); parse_cmd_opts([{retain_handling, RetainHandling} | Opts], Acc) -> parse_cmd_opts(Opts, [{rh, RetainHandling} | Acc]); parse_cmd_opts([{retain, Retain} | Opts], Acc) -> parse_cmd_opts(Opts, [{retain, Retain} | Acc]); parse_cmd_opts([{topic, Topic} | Opts], Acc) -> parse_cmd_opts(Opts, [{topic, list_to_binary(Topic)} | Acc]); parse_cmd_opts([{payload, Payload} | Opts], Acc) -> parse_cmd_opts(Opts, [{payload, list_to_binary(Payload)} | Acc]); parse_cmd_opts([{file, File} | Opts], Acc) -> parse_cmd_opts(Opts, [{file, File} | Acc]); parse_cmd_opts([{repeat, Repeat} | Opts], Acc) -> parse_cmd_opts(Opts, [{repeat, Repeat} | Acc]); parse_cmd_opts([{repeat_delay, RepeatDelay} | Opts], Acc) -> parse_cmd_opts(Opts, [{repeat_delay, RepeatDelay} | Acc]); parse_cmd_opts([{print, WhatToPrint} | Opts], Acc) -> parse_cmd_opts(Opts, [{print, WhatToPrint} | Acc]); parse_cmd_opts([{verify, IsVerify} | Opts], Acc) -> V = case IsVerify of true -> verify_peer; false -> verify_none end, parse_cmd_opts(Opts, maybe_append(ssl_opts, {verify, V}, Acc)); parse_cmd_opts([_ | Opts], Acc) -> parse_cmd_opts(Opts, Acc). maybe_append(Key, Value, TupleList) -> case lists:keytake(Key, 1, TupleList) of {value, {Key, OldValue}, NewTupleList} -> [{Key, [Value | OldValue]} | NewTupleList]; false -> [{Key, [Value]} | TupleList] end. enrich_opts(Opts) -> pipeline([fun enrich_clientid_opt/1, fun enrich_port_opt/1], Opts). enrich_clientid_opt(Opts) -> case lists:keyfind(clientid, 1, Opts) of false -> [{clientid, emqtt:random_client_id()} | Opts]; _ -> Opts end. enrich_port_opt(Opts) -> case proplists:get_value(port, Opts) of undefined -> Port = case proplists:get_value(ssl, Opts) of true -> 8883; false -> 1883 end, [{port, Port} | Opts]; _ -> Opts end. pipeline([], Input) -> Input; pipeline([Fun|More], Input) -> pipeline(More, erlang:apply(Fun, [Input])). receive_loop(Client, Print) -> receive {publish, #{payload := Payload}} -> case Print of "size" -> io:format("received ~p bytes~n", [size(Payload)]); _ -> io:format("~s~n", [Payload]) end, receive_loop(Client, Print); ping -> emqtt:ping(Client), receive_loop(Client, Print); _Other -> receive_loop(Client, Print) end. i(true) -> 1; i(false) -> 0.
null
https://raw.githubusercontent.com/emqx/emqtt/662fdbb4994cee5fa1d9ab6397e0a83cd9c3fb70/src/emqtt_cli.erl
erlang
------------------------------------------------------------------------- you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -------------------------------------------------------------------------
Copyright ( c ) 2020 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved . Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , -module(emqtt_cli). -include("emqtt.hrl"). -export([ main/1 ]). -import(proplists, [get_value/2]). -define(CMD_NAME, "emqtt"). -define(HELP_OPT, [{help, undefined, "help", boolean, "Help information"} ]). -define(CONN_SHORT_OPTS, [{host, $h, "host", {string, "localhost"}, "mqtt server hostname or IP address"}, {port, $p, "port", integer, "mqtt server port number"}, {iface, $I, "iface", string, "specify the network interface or ip address to use"}, {protocol_version, $V, "protocol-version", {atom, 'v5'}, "mqtt protocol version: v3.1 | v3.1.1 | v5"}, {username, $u, "username", string, "username for connecting to server"}, {password, $P, "password", string, "password for connecting to server"}, {clientid, $C, "clientid", string, "client identifier"}, {keepalive, $k, "keepalive", {integer, 300}, "keep alive in seconds"} ]). -define(CONN_LONG_OPTS, [{will_topic, undefined, "will-topic", string, "Topic for will message"}, {will_payload, undefined, "will-payload", string, "Payload in will message"}, {will_qos, undefined, "will-qos", {integer, 0}, "QoS for will message"}, {will_retain, undefined, "will-retain", {boolean, false}, "Retain in will message"}, {enable_websocket, undefined, "enable-websocket", {boolean, false}, "Enable websocket transport or not"}, {enable_quic, undefined, "enable-quic", {boolean, false}, "Enable quic transport or not"}, {enable_ssl, undefined, "enable-ssl", {boolean, false}, "Enable ssl/tls or not"}, {tls_version, undefined, "tls-version", {atom, 'tlsv1.2'}, "TLS protocol version used when the client connects to the broker"}, {cafile, undefined, "CAfile", string, "Path to a file containing pem-encoded ca certificates"}, {cert, undefined, "cert", string, "Path to a file containing the user certificate on pem format"}, {key, undefined, "key", string, "Path to the file containing the user's private pem-encoded key"}, {sni, undefined, "sni", string, "Applicable when '--enable_ssl' is in use. " "Use '--sni true' to apply the host name from '-h|--host' option " "as SNI, therwise use the host name to which the server's SSL " "certificate is issued"}, {verify, undefined, "verify", {boolean, false}, "TLS verify option, default: false " } ]). -define(PUB_OPTS, ?CONN_SHORT_OPTS ++ [{topic, $t, "topic", string, "mqtt topic on which to publish the message"}, {qos, $q, "qos", {integer, 0}, "qos level of assurance for delivery of an application message"}, {retain, $r, "retain", {boolean, false}, "retain message or not"} ] ++ ?HELP_OPT ++ ?CONN_LONG_OPTS ++ [{payload, undefined, "payload", string, "application message that is being published"}, {file, undefined, "file", string, "file content to publish"}, {repeat, undefined, "repeat", {integer, 1}, "the number of times the message will be repeatedly published"}, {repeat_delay, undefined, "repeat-delay", {integer, 0}, "the number of seconds to wait after the previous message was delivered before publishing the next"} ]). -define(SUB_OPTS, ?CONN_SHORT_OPTS ++ [{topic, $t, "topic", string, "mqtt topic to subscribe to"}, {qos, $q, "qos", {integer, 0}, "maximum qos level at which the server can send application messages to the client"} ] ++ ?HELP_OPT ++ ?CONN_LONG_OPTS ++ [{retain_as_publish, undefined, "retain-as-publish", {boolean, false}, "retain as publih option in subscription options"}, {retain_handling, undefined, "retain-handling", {integer, 0}, "retain handling option in subscription options"}, {print, undefined, "print", string, "'size' to print payload size, 'as-string' to print payload as string"} ]). main(["sub" | Argv]) -> {ok, {Opts, _Args}} = getopt:parse(?SUB_OPTS, Argv), ok = maybe_help(sub, Opts), ok = check_required_args(sub, [topic], Opts), main(sub, Opts); main(["pub" | Argv]) -> {ok, {Opts, _Args}} = getopt:parse(?PUB_OPTS, Argv), ok = maybe_help(pub, Opts), ok = check_required_args(pub, [topic], Opts), Payload = get_value(payload, Opts), File = get_value(file, Opts), case {Payload, File} of {undefined, undefined} -> io:format("Error: missing --payload or --file~n"), halt(1); _ -> ok end, main(pub, Opts); main(_Argv) -> io:format("Usage: ~s pub | sub [--help]~n", [?CMD_NAME]). main(PubSub, Opts0) -> application:ensure_all_started(quicer), application:ensure_all_started(emqtt), Print = proplists:get_value(print, Opts0), Opts = proplists:delete(print, Opts0), NOpts = enrich_opts(parse_cmd_opts(Opts)), {ok, Client} = emqtt:start_link(NOpts), ConnRet = case {proplists:get_bool(enable_websocket, NOpts), proplists:get_bool(enable_quic, NOpts)} of {false, false} -> emqtt:connect(Client); {true, false} -> emqtt:ws_connect(Client); {false, true} -> emqtt:quic_connect(Client) end, case ConnRet of {ok, Properties} -> io:format("Client ~s sent CONNECT~n", [get_value(clientid, NOpts)]), case PubSub of pub -> publish(Client, NOpts, proplists:get_value(repeat, Opts)), disconnect(Client, NOpts); sub -> subscribe(Client, NOpts), KeepAlive = maps:get('Server-Keep-Alive', Properties, get_value(keepalive, NOpts)) * 1000, timer:send_interval(KeepAlive, ping), receive_loop(Client, Print) end; {error, Reason} -> io:format("Client ~s failed to sent CONNECT due to ~p~n", [get_value(clientid, NOpts), Reason]) end. publish(Client, Opts, 1) -> do_publish(Client, Opts); publish(Client, Opts, Repeat) -> do_publish(Client, Opts), case proplists:get_value(repeat_delay, Opts) of 0 -> ok; RepeatDelay -> timer:sleep(RepeatDelay * 1000) end, publish(Client, Opts, Repeat - 1). do_publish(Client, Opts) -> case get_value(payload, Opts) of undefined -> File = get_value(file, Opts), case file:read_file(File) of {ok, Bin} -> do_publish(Client, Opts, Bin); {error, Reason} -> io:format("Error: failed_to_read ~s:~nreason=~p", [File, Reason]), halt(1) end; Bin -> do_publish(Client, Opts, Bin) end. do_publish(Client, Opts, Payload) -> case emqtt:publish(Client, get_value(topic, Opts), Payload, Opts) of {error, Reason} -> io:format("Client ~s failed to sent PUBLISH due to ~p~n", [get_value(clientid, Opts), Reason]); _ -> io:format("Client ~s sent PUBLISH (Q~p, R~p, D0, Topic=~s, Payload=...(~p bytes))~n", [get_value(clientid, Opts), get_value(qos, Opts), i(get_value(retain, Opts)), get_value(topic, Opts), iolist_size(Payload)]) end. subscribe(Client, Opts) -> case emqtt:subscribe(Client, get_value(topic, Opts), Opts) of {ok, _, [ReasonCode]} when 0 =< ReasonCode andalso ReasonCode =< 2 -> io:format("Client ~s subscribed to ~s~n", [get_value(clientid, Opts), get_value(topic, Opts)]); {ok, _, [ReasonCode]} -> io:format("Client ~s failed to subscribe to ~s due to ~s~n", [get_value(clientid, Opts), get_value(topic, Opts), emqtt:reason_code_name(ReasonCode)]); {error, Reason} -> io:format("Client ~s failed to send SUBSCRIBE due to ~p~n", [get_value(clientid, Opts), Reason]) end. disconnect(Client, Opts) -> case emqtt:disconnect(Client) of ok -> io:format("Client ~s sent DISCONNECT~n", [get_value(clientid, Opts)]); {error, Reason} -> io:format("Client ~s failed to send DISCONNECT due to ~p~n", [get_value(clientid, Opts), Reason]) end. maybe_help(PubSub, Opts) -> case proplists:get_value(help, Opts) of true -> usage(PubSub), halt(0); _ -> ok end. usage(PubSub) -> Opts = case PubSub of pub -> ?PUB_OPTS; sub -> ?SUB_OPTS end, getopt:usage(Opts, ?CMD_NAME ++ " " ++ atom_to_list(PubSub)). check_required_args(PubSub, Keys, Opts) -> lists:foreach(fun(Key) -> case lists:keyfind(Key, 1, Opts) of false -> io:format("Error: '~s' required~n", [Key]), usage(PubSub), halt(1); _ -> ok end end, Keys). parse_cmd_opts(Opts) -> parse_cmd_opts(Opts, []). parse_cmd_opts([], Acc) -> Acc; parse_cmd_opts([{host, Host} | Opts], Acc) -> parse_cmd_opts(Opts, [{host, Host} | Acc]); parse_cmd_opts([{port, Port} | Opts], Acc) -> parse_cmd_opts(Opts, [{port, Port} | Acc]); parse_cmd_opts([{iface, Interface} | Opts], Acc) -> NAcc = case inet:parse_address(Interface) of {ok, IPAddress0} -> maybe_append(tcp_opts, {ifaddr, IPAddress0}, Acc); _ -> case inet:getifaddrs() of {ok, IfAddrs} -> case lists:filter(fun({addr, {_, _, _, _}}) -> true; (_) -> false end, proplists:get_value(Interface, IfAddrs, [])) of [{addr, IPAddress0}] -> maybe_append(tcp_opts, {ifaddr, IPAddress0}, Acc); _ -> Acc end; _ -> Acc end end, parse_cmd_opts(Opts, NAcc); parse_cmd_opts([{protocol_version, 'v3.1'} | Opts], Acc) -> parse_cmd_opts(Opts, [{proto_ver, v3} | Acc]); parse_cmd_opts([{protocol_version, 'v3.1.1'} | Opts], Acc) -> parse_cmd_opts(Opts, [{proto_ver, v4} | Acc]); parse_cmd_opts([{protocol_version, 'v5'} | Opts], Acc) -> parse_cmd_opts(Opts, [{proto_ver, v5} | Acc]); parse_cmd_opts([{username, Username} | Opts], Acc) -> parse_cmd_opts(Opts, [{username, list_to_binary(Username)} | Acc]); parse_cmd_opts([{password, Password} | Opts], Acc) -> parse_cmd_opts(Opts, [{password, list_to_binary(Password)} | Acc]); parse_cmd_opts([{clientid, Clientid} | Opts], Acc) -> parse_cmd_opts(Opts, [{clientid, list_to_binary(Clientid)} | Acc]); parse_cmd_opts([{will_topic, Topic} | Opts], Acc) -> parse_cmd_opts(Opts, [{will_topic, list_to_binary(Topic)} | Acc]); parse_cmd_opts([{will_payload, Payload} | Opts], Acc) -> parse_cmd_opts(Opts, [{will_payload, list_to_binary(Payload)} | Acc]); parse_cmd_opts([{will_qos, Qos} | Opts], Acc) -> parse_cmd_opts(Opts, [{will_qos, Qos} | Acc]); parse_cmd_opts([{will_retain, Retain} | Opts], Acc) -> parse_cmd_opts(Opts, [{will_retain, Retain} | Acc]); parse_cmd_opts([{keepalive, I} | Opts], Acc) -> parse_cmd_opts(Opts, [{keepalive, I} | Acc]); parse_cmd_opts([{enable_websocket, Enable} | Opts], Acc) -> parse_cmd_opts(Opts, [{enable_websocket, Enable} | Acc]); parse_cmd_opts([{enable_quic, Enable} | Opts], Acc) -> parse_cmd_opts(Opts, [{enable_quic, Enable} | Acc]); parse_cmd_opts([{enable_ssl, Enable} | Opts], Acc) -> parse_cmd_opts(Opts, [{ssl, Enable} | Acc]); parse_cmd_opts([{tls_version, Version} | Opts], Acc) when Version =:= 'tlsv1' orelse Version =:= 'tlsv1.1'orelse Version =:= 'tlsv1.2' orelse Version =:= 'tlsv1.3' -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {versions, [Version]}, Acc)); parse_cmd_opts([{cafile, CAFile} | Opts], Acc) -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {cacertfile, CAFile}, Acc)); parse_cmd_opts([{cert, Cert} | Opts], Acc) -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {certfile, Cert}, Acc)); parse_cmd_opts([{key, Key} | Opts], Acc) -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {keyfile, Key}, Acc)); parse_cmd_opts([{sni, SNI} | Opts], Acc) -> parse_cmd_opts(Opts, maybe_append(ssl_opts, {server_name_indication, SNI}, Acc)); parse_cmd_opts([{qos, QoS} | Opts], Acc) -> parse_cmd_opts(Opts, [{qos, QoS} | Acc]); parse_cmd_opts([{retain_as_publish, RetainAsPublish} | Opts], Acc) -> parse_cmd_opts(Opts, [{rap, RetainAsPublish} | Acc]); parse_cmd_opts([{retain_handling, RetainHandling} | Opts], Acc) -> parse_cmd_opts(Opts, [{rh, RetainHandling} | Acc]); parse_cmd_opts([{retain, Retain} | Opts], Acc) -> parse_cmd_opts(Opts, [{retain, Retain} | Acc]); parse_cmd_opts([{topic, Topic} | Opts], Acc) -> parse_cmd_opts(Opts, [{topic, list_to_binary(Topic)} | Acc]); parse_cmd_opts([{payload, Payload} | Opts], Acc) -> parse_cmd_opts(Opts, [{payload, list_to_binary(Payload)} | Acc]); parse_cmd_opts([{file, File} | Opts], Acc) -> parse_cmd_opts(Opts, [{file, File} | Acc]); parse_cmd_opts([{repeat, Repeat} | Opts], Acc) -> parse_cmd_opts(Opts, [{repeat, Repeat} | Acc]); parse_cmd_opts([{repeat_delay, RepeatDelay} | Opts], Acc) -> parse_cmd_opts(Opts, [{repeat_delay, RepeatDelay} | Acc]); parse_cmd_opts([{print, WhatToPrint} | Opts], Acc) -> parse_cmd_opts(Opts, [{print, WhatToPrint} | Acc]); parse_cmd_opts([{verify, IsVerify} | Opts], Acc) -> V = case IsVerify of true -> verify_peer; false -> verify_none end, parse_cmd_opts(Opts, maybe_append(ssl_opts, {verify, V}, Acc)); parse_cmd_opts([_ | Opts], Acc) -> parse_cmd_opts(Opts, Acc). maybe_append(Key, Value, TupleList) -> case lists:keytake(Key, 1, TupleList) of {value, {Key, OldValue}, NewTupleList} -> [{Key, [Value | OldValue]} | NewTupleList]; false -> [{Key, [Value]} | TupleList] end. enrich_opts(Opts) -> pipeline([fun enrich_clientid_opt/1, fun enrich_port_opt/1], Opts). enrich_clientid_opt(Opts) -> case lists:keyfind(clientid, 1, Opts) of false -> [{clientid, emqtt:random_client_id()} | Opts]; _ -> Opts end. enrich_port_opt(Opts) -> case proplists:get_value(port, Opts) of undefined -> Port = case proplists:get_value(ssl, Opts) of true -> 8883; false -> 1883 end, [{port, Port} | Opts]; _ -> Opts end. pipeline([], Input) -> Input; pipeline([Fun|More], Input) -> pipeline(More, erlang:apply(Fun, [Input])). receive_loop(Client, Print) -> receive {publish, #{payload := Payload}} -> case Print of "size" -> io:format("received ~p bytes~n", [size(Payload)]); _ -> io:format("~s~n", [Payload]) end, receive_loop(Client, Print); ping -> emqtt:ping(Client), receive_loop(Client, Print); _Other -> receive_loop(Client, Print) end. i(true) -> 1; i(false) -> 0.
4745f6e4efab88b221c796582c616a9ccc9c20ed5743a13f5565a5503926da07
Palmik/wai-sockjs
Handler.hs
# LANGUAGE FlexibleContexts # {-# LANGUAGE OverloadedStrings #-} module Network.Sock.Types.Handler ( Handler(..) ) where ------------------------------------------------------------------------------ import qualified Data.ByteString.Lazy as BL (ByteString) import Data.Proxy ------------------------------------------------------------------------------ import qualified Network.HTTP.Types as H (ResponseHeaders) import qualified Network.HTTP.Types.Response as H (IsResponse(..)) import qualified Network.HTTP.Types.Request as H (IsRequest(..)) ------------------------------------------------------------------------------ import Network.Sock.Types.Frame import Network.Sock.Types.Server import Network.Sock.Types.Request ------------------------------------------------------------------------------ -- | Handler class Handler tag where handleReuqest :: H.IsResponse res => Proxy tag -> Request -> Server res -- | Formats the Frame (different protocols may format frames differently). format :: H.IsRequest req => Proxy tag -> req -> Frame -> BL.ByteString -- | Used to create a response (headers might be transport & request dependent). headers :: Proxy tag -> Request -> H.ResponseHeaders
null
https://raw.githubusercontent.com/Palmik/wai-sockjs/d1037cb00450a362b7e593a76d6257d06ecb2405/src/Network/Sock/Types/Handler.hs
haskell
# LANGUAGE OverloadedStrings # ---------------------------------------------------------------------------- ---------------------------------------------------------------------------- ---------------------------------------------------------------------------- ---------------------------------------------------------------------------- | Handler | Formats the Frame (different protocols may format frames differently). | Used to create a response (headers might be transport & request dependent).
# LANGUAGE FlexibleContexts # module Network.Sock.Types.Handler ( Handler(..) ) where import qualified Data.ByteString.Lazy as BL (ByteString) import Data.Proxy import qualified Network.HTTP.Types as H (ResponseHeaders) import qualified Network.HTTP.Types.Response as H (IsResponse(..)) import qualified Network.HTTP.Types.Request as H (IsRequest(..)) import Network.Sock.Types.Frame import Network.Sock.Types.Server import Network.Sock.Types.Request class Handler tag where handleReuqest :: H.IsResponse res => Proxy tag -> Request -> Server res format :: H.IsRequest req => Proxy tag -> req -> Frame -> BL.ByteString headers :: Proxy tag -> Request -> H.ResponseHeaders
96336401adfa1a862a2ecd0ee50d40d342b2fbf7f0394d59aaa6097af023357f
hengchu/tiger-haskell
tigertemp.hs
module TigerTemp ( Label , RetLabel , Temp(..) ) where import TigerRegisters import TigerSymbol type Label = Symbol type RetLabel = Label data Temp = TEMP Int | SRC Int | DST Int | Named Register deriving (Ord, Eq) instance Show Temp where show (TEMP d) = "T"++show d show (Named r) = show r show (SRC d) = "`S"++show d show (DST d) = "`D"++show d
null
https://raw.githubusercontent.com/hengchu/tiger-haskell/ec6809500c245ea99d2329e8255a67bdb57cf579/src/tigertemp.hs
haskell
module TigerTemp ( Label , RetLabel , Temp(..) ) where import TigerRegisters import TigerSymbol type Label = Symbol type RetLabel = Label data Temp = TEMP Int | SRC Int | DST Int | Named Register deriving (Ord, Eq) instance Show Temp where show (TEMP d) = "T"++show d show (Named r) = show r show (SRC d) = "`S"++show d show (DST d) = "`D"++show d
bd6a7987527348aed13e51242eb454cd96ea61a3fac21b61756a3c617ee5ba61
tonsky/Heroes
render.cljs
(ns heroes.render (:require [goog.object :as go] [heroes.model :as model] [clojure.string :as str] [datascript.core :as ds] [heroes.core :as core :refer [dim pos]])) (defonce *images (atom {})) (defonce *window-dim (atom nil)) (defonce *frame-time (core/clock-window 10)) (declare render!) (defn image [url] (or (@*images url) (let [img (js/Image.)] (go/set img "onload" (fn [_] (render!))) (go/set img "src" url) (swap! *images assoc url img) img))) (defn window-dim [] (let [w js/window.innerWidth h js/window.innerHeight rotate? (< w h) screen-dim (if rotate? (dim h w) (dim w h)) TODO scale (loop [try-scale (+ 1 step)] (if (or (> (* (:w core/screen-dim) try-scale) (:w screen-dim)) (> (* (:h core/screen-dim) try-scale) (:h screen-dim))) (- try-scale step) (recur (+ try-scale step)))) window-dim (dim (-> (:w screen-dim) (quot scale)) (-> (:h screen-dim) (quot scale)))] (assoc window-dim :window (dim w h) :rotate? rotate? :scale scale :screen-pos (pos (-> (- (:w window-dim) (:w core/screen-dim)) (quot 2)) (-> (- (:h window-dim) (:h core/screen-dim)) (quot 2)))))) (defn render-bg! [ctx db] (let [bg-dim (dim 460 270)] (.drawImage ctx (image "static/bg.png") (-> (- (:w core/screen-dim) (:w bg-dim)) (quot 2)) (-> (- (:h core/screen-dim) (:h bg-dim)) (quot 2)))) (set! (.-strokeStyle ctx) "#fff") (.strokeRect ctx 0.5 0.5 (dec (:w core/screen-dim)) (dec (:h core/screen-dim)))) (defn render-tiles! [ctx db] (set! (.-fillStyle ctx) "rgba(255,255,255,0.3)") (doseq [tile (model/entities db :aevt :tile/pos) :let [{:tile/keys [pos coord]} tile]] (.fillText ctx (str coord) (- (:x pos) 4) (+ (:y pos) 1)))) (defn render-hovers! [ctx db] (set! (.-fillStyle ctx) "rgba(0,0,0,0.2)") (doseq [stack (model/entities db :aevt :stack/tile) :let [{:tile/keys [pos]} (:stack/tile stack)]] (.fillRect ctx (- (:x pos) (quot (:w core/hover-dim) 2)) (- (:y pos) 22) (:w core/hover-dim) (:h core/hover-dim)))) (defn render-sprites! [ctx db] (doseq [sprite (->> (model/entities db :aevt :sprite/pos) (sort-by :sprite/pos)) :let [{:sprite/keys [pos anim mirror? layers] :sprite.anim/keys [frame]} sprite {:anim/keys [sheet]} anim {:sheet/keys [sprite-dim url]} sheet img (image url)]] (.save ctx) (.translate ctx (:x pos) (:y pos)) (when mirror? (.scale ctx -1 1)) (doseq [layer (->> (or layers #{0}) sort)] (.drawImage ctx img (* frame (:w sprite-dim)) (* layer (:h sprite-dim)) (:w sprite-dim) (:h sprite-dim) (- (quot (:w sprite-dim) 2)) (- (:h sprite-dim)) (:w sprite-dim) (:h sprite-dim))) (.restore ctx))) (defn render-labels! [ctx db] (doseq [label (model/entities db :aevt :label/text) :let [{:label/keys [text pos align]} label rect-w (+ 1 (.-width (.measureText ctx text))) rect-h 7 halign (namespace align) valign (name align) x (case halign "left" (:x pos) "center" (- (:x pos) (quot rect-w 2)) "right" (- (:x pos) rect-w)) y (case valign "top" (:y pos) "middle" (- (:y pos) (quot rect-h 2)) "bottom" (- (:y pos) rect-h))]] (set! (.-fillStyle ctx) "#FCEF56") (.fillRect ctx x y rect-w rect-h) (set! (.-fillStyle ctx) "#000") (.fillText ctx text (+ x 1) (+ y (dec rect-h))))) (defn render-stats! [ctx db] (set! (.-fillStyle ctx) "#fff") (let [{:keys [w h scale rotate? window] :as window-dim} @*window-dim frame-time (core/clock-time *frame-time) tx-time (core/clock-time model/*tx-time)] (.fillText ctx (str window " (" window-dim " at " scale "×)" " " (count db) " datoms" " frame " (.toFixed frame-time 1) " ms" " tx " (.toFixed tx-time 1) " ms") 2 (- (:h core/screen-dim) 2)))) (defn render! ([] (render! @model/*db)) ([db] (let [canvas (core/el "#canvas") ctx (.getContext canvas "2d")] (core/clock-measure *frame-time (set! (.-font ctx) "5px Heroes Sans") (render-bg! ctx db) (when @core/*debug? (render-tiles! ctx db)) (when @core/*debug? (render-hovers! ctx db)) (render-sprites! ctx db) (render-labels! ctx db) (render-stats! ctx db))))) (defn on-resize ([] (on-resize nil)) ([_] (when (not= (:window @*window-dim) (dim js/window.innerWidth js/window.innerHeight)) (let [dim (reset! *window-dim (window-dim)) {:keys [scale w h rotate? screen-pos]} dim canvas (core/el "#canvas") ctx (.getContext canvas "2d") style (.-style canvas)] (set! (.-width canvas) w) (set! (.-height canvas) h) (set! (.-width style) (str (* scale w) "px")) (set! (.-height style) (str (* scale h) "px")) (set! (.-transformOrigin style) (str (* scale h 0.5) "px " (* scale h 0.5) "px")) (set! (.-transform style) (if rotate? "rotate(90deg)" "")) (.translate ctx (:x screen-pos) (:y screen-pos)) (render!))))) (defn reload! [] (set! js/window.onresize (fn [e] (on-resize e) iOS in Home Screen mode fires too early (on-resize)) (defn window->screen [wpos] (let [{:keys [rotate? scale screen-pos]} @*window-dim wpos' (if rotate? (pos (:y wpos) (- js/window.innerWidth (:x wpos))) wpos)] (pos (-> (:x wpos') (quot scale) (- (:x screen-pos))) (-> (:y wpos') (quot scale) (- (:y screen-pos))))))
null
https://raw.githubusercontent.com/tonsky/Heroes/341ca22d6d340bedb31c8e7421937a1f07575d26/src/heroes/render.cljs
clojure
(ns heroes.render (:require [goog.object :as go] [heroes.model :as model] [clojure.string :as str] [datascript.core :as ds] [heroes.core :as core :refer [dim pos]])) (defonce *images (atom {})) (defonce *window-dim (atom nil)) (defonce *frame-time (core/clock-window 10)) (declare render!) (defn image [url] (or (@*images url) (let [img (js/Image.)] (go/set img "onload" (fn [_] (render!))) (go/set img "src" url) (swap! *images assoc url img) img))) (defn window-dim [] (let [w js/window.innerWidth h js/window.innerHeight rotate? (< w h) screen-dim (if rotate? (dim h w) (dim w h)) TODO scale (loop [try-scale (+ 1 step)] (if (or (> (* (:w core/screen-dim) try-scale) (:w screen-dim)) (> (* (:h core/screen-dim) try-scale) (:h screen-dim))) (- try-scale step) (recur (+ try-scale step)))) window-dim (dim (-> (:w screen-dim) (quot scale)) (-> (:h screen-dim) (quot scale)))] (assoc window-dim :window (dim w h) :rotate? rotate? :scale scale :screen-pos (pos (-> (- (:w window-dim) (:w core/screen-dim)) (quot 2)) (-> (- (:h window-dim) (:h core/screen-dim)) (quot 2)))))) (defn render-bg! [ctx db] (let [bg-dim (dim 460 270)] (.drawImage ctx (image "static/bg.png") (-> (- (:w core/screen-dim) (:w bg-dim)) (quot 2)) (-> (- (:h core/screen-dim) (:h bg-dim)) (quot 2)))) (set! (.-strokeStyle ctx) "#fff") (.strokeRect ctx 0.5 0.5 (dec (:w core/screen-dim)) (dec (:h core/screen-dim)))) (defn render-tiles! [ctx db] (set! (.-fillStyle ctx) "rgba(255,255,255,0.3)") (doseq [tile (model/entities db :aevt :tile/pos) :let [{:tile/keys [pos coord]} tile]] (.fillText ctx (str coord) (- (:x pos) 4) (+ (:y pos) 1)))) (defn render-hovers! [ctx db] (set! (.-fillStyle ctx) "rgba(0,0,0,0.2)") (doseq [stack (model/entities db :aevt :stack/tile) :let [{:tile/keys [pos]} (:stack/tile stack)]] (.fillRect ctx (- (:x pos) (quot (:w core/hover-dim) 2)) (- (:y pos) 22) (:w core/hover-dim) (:h core/hover-dim)))) (defn render-sprites! [ctx db] (doseq [sprite (->> (model/entities db :aevt :sprite/pos) (sort-by :sprite/pos)) :let [{:sprite/keys [pos anim mirror? layers] :sprite.anim/keys [frame]} sprite {:anim/keys [sheet]} anim {:sheet/keys [sprite-dim url]} sheet img (image url)]] (.save ctx) (.translate ctx (:x pos) (:y pos)) (when mirror? (.scale ctx -1 1)) (doseq [layer (->> (or layers #{0}) sort)] (.drawImage ctx img (* frame (:w sprite-dim)) (* layer (:h sprite-dim)) (:w sprite-dim) (:h sprite-dim) (- (quot (:w sprite-dim) 2)) (- (:h sprite-dim)) (:w sprite-dim) (:h sprite-dim))) (.restore ctx))) (defn render-labels! [ctx db] (doseq [label (model/entities db :aevt :label/text) :let [{:label/keys [text pos align]} label rect-w (+ 1 (.-width (.measureText ctx text))) rect-h 7 halign (namespace align) valign (name align) x (case halign "left" (:x pos) "center" (- (:x pos) (quot rect-w 2)) "right" (- (:x pos) rect-w)) y (case valign "top" (:y pos) "middle" (- (:y pos) (quot rect-h 2)) "bottom" (- (:y pos) rect-h))]] (set! (.-fillStyle ctx) "#FCEF56") (.fillRect ctx x y rect-w rect-h) (set! (.-fillStyle ctx) "#000") (.fillText ctx text (+ x 1) (+ y (dec rect-h))))) (defn render-stats! [ctx db] (set! (.-fillStyle ctx) "#fff") (let [{:keys [w h scale rotate? window] :as window-dim} @*window-dim frame-time (core/clock-time *frame-time) tx-time (core/clock-time model/*tx-time)] (.fillText ctx (str window " (" window-dim " at " scale "×)" " " (count db) " datoms" " frame " (.toFixed frame-time 1) " ms" " tx " (.toFixed tx-time 1) " ms") 2 (- (:h core/screen-dim) 2)))) (defn render! ([] (render! @model/*db)) ([db] (let [canvas (core/el "#canvas") ctx (.getContext canvas "2d")] (core/clock-measure *frame-time (set! (.-font ctx) "5px Heroes Sans") (render-bg! ctx db) (when @core/*debug? (render-tiles! ctx db)) (when @core/*debug? (render-hovers! ctx db)) (render-sprites! ctx db) (render-labels! ctx db) (render-stats! ctx db))))) (defn on-resize ([] (on-resize nil)) ([_] (when (not= (:window @*window-dim) (dim js/window.innerWidth js/window.innerHeight)) (let [dim (reset! *window-dim (window-dim)) {:keys [scale w h rotate? screen-pos]} dim canvas (core/el "#canvas") ctx (.getContext canvas "2d") style (.-style canvas)] (set! (.-width canvas) w) (set! (.-height canvas) h) (set! (.-width style) (str (* scale w) "px")) (set! (.-height style) (str (* scale h) "px")) (set! (.-transformOrigin style) (str (* scale h 0.5) "px " (* scale h 0.5) "px")) (set! (.-transform style) (if rotate? "rotate(90deg)" "")) (.translate ctx (:x screen-pos) (:y screen-pos)) (render!))))) (defn reload! [] (set! js/window.onresize (fn [e] (on-resize e) iOS in Home Screen mode fires too early (on-resize)) (defn window->screen [wpos] (let [{:keys [rotate? scale screen-pos]} @*window-dim wpos' (if rotate? (pos (:y wpos) (- js/window.innerWidth (:x wpos))) wpos)] (pos (-> (:x wpos') (quot scale) (- (:x screen-pos))) (-> (:y wpos') (quot scale) (- (:y screen-pos))))))
b1cbfbab7c767c1998e60ddf68b1ca1e6bd0df13dce8f068986486c35e4ef667
overtone/overtone
flute.clj
(ns overtone.samples.flute (:use [overtone.core])) (defn- registered-vibrato-samples [] (registered-assets ::TransverseFluteTenutoVibrato)) (defn- registered-non-vibrato-samples [] (registered-assets ::TransverseFluteTenutoNonVibrato)) (def FREESOUND-VIBRATO-FLUTE-SAMPLES {154274 :C7 154273 :B6 154272 :A#6 154271 :A6 154270 :G#6 154269 :G6 154268 :F#6 154267 :F6 154266 :E6 154265 :D#6 154264 :D6 154263 :C#6 154262 :C6 154261 :B5 154260 :A#5 154259 :A5 154258 :G#5 154257 :G5 154256 :F#5 154255 :F5 154254 :E5 154253 :D#5 154252 :D5 154251 :C#5 154250 :C5 154249 :B4 154248 :A#4 154247 :A4 154246 :G#4 154245 :G4 154244 :F#4 154243 :E4 154242 :F4 154241 :D#4 154240 :D4 154239 :C#4 154238 :C4}) (def FREESOUND-NON-VIBRATO-FLUTE-SAMPLES {154237 :C4 154236 :C#4 154235 :D4 154234 :D#4 154233 :E4 154232 :F4 154231 :F#4 154230 :G4 154229 :G#4 154228 :A4 154227 :G#6 154226 :G6 154225 :F#6 154224 :F6 154223 :E6 154222 :D#6 154221 :D6 154220 :C#6 154219 :C6 154218 :B5 154217 :A#5 154216 :A5 154215 :G#5 154214 :G5 154213 :F#5 154212 :F5 154211 :E5 154210 :D#5 154209 :D5 154208 :C#5 154207 :C5 154206 :B4 154205 :A#4 154204 :A4}) (def VIBRATO-FLUTE-SAMPLE-IDS (keys FREESOUND-VIBRATO-FLUTE-SAMPLES)) (def NON-VIBRATO-FLUTE-SAMPLE-IDS (keys FREESOUND-NON-VIBRATO-FLUTE-SAMPLES)) (def vibrato-flute-samples (apply freesound-samples VIBRATO-FLUTE-SAMPLE-IDS)) (def non-vibato-flute-samples (apply freesound-samples NON-VIBRATO-FLUTE-SAMPLE-IDS)) (defn- buffer->midi-note [buf note-map] (-> buf :freesound-id note-map name note)) (defn- note-index [buffers note-map] (reduce (fn [index buf] (let [id (:id buf) note (buffer->midi-note buf note-map)] (assoc index note id))) {} buffers)) (defonce ^:private silent-buffer (buffer 0)) (defonce vibrato-index-buffer (let [tab (note-index vibrato-flute-samples FREESOUND-VIBRATO-FLUTE-SAMPLES) buf (buffer 128)] (buffer-fill! buf (:id silent-buffer)) (doseq [[idx val] tab] (buffer-set! buf idx val)) buf)) (defonce non-vibrato-index-buffer (let [tab (note-index non-vibato-flute-samples FREESOUND-NON-VIBRATO-FLUTE-SAMPLES) buf (buffer 128)] (buffer-fill! buf (:id silent-buffer)) (doseq [[idx val] tab] (buffer-set! buf idx val)) buf))
null
https://raw.githubusercontent.com/overtone/overtone/02f8cdd2817bf810ff390b6f91d3e84d61afcc85/src/overtone/samples/flute.clj
clojure
(ns overtone.samples.flute (:use [overtone.core])) (defn- registered-vibrato-samples [] (registered-assets ::TransverseFluteTenutoVibrato)) (defn- registered-non-vibrato-samples [] (registered-assets ::TransverseFluteTenutoNonVibrato)) (def FREESOUND-VIBRATO-FLUTE-SAMPLES {154274 :C7 154273 :B6 154272 :A#6 154271 :A6 154270 :G#6 154269 :G6 154268 :F#6 154267 :F6 154266 :E6 154265 :D#6 154264 :D6 154263 :C#6 154262 :C6 154261 :B5 154260 :A#5 154259 :A5 154258 :G#5 154257 :G5 154256 :F#5 154255 :F5 154254 :E5 154253 :D#5 154252 :D5 154251 :C#5 154250 :C5 154249 :B4 154248 :A#4 154247 :A4 154246 :G#4 154245 :G4 154244 :F#4 154243 :E4 154242 :F4 154241 :D#4 154240 :D4 154239 :C#4 154238 :C4}) (def FREESOUND-NON-VIBRATO-FLUTE-SAMPLES {154237 :C4 154236 :C#4 154235 :D4 154234 :D#4 154233 :E4 154232 :F4 154231 :F#4 154230 :G4 154229 :G#4 154228 :A4 154227 :G#6 154226 :G6 154225 :F#6 154224 :F6 154223 :E6 154222 :D#6 154221 :D6 154220 :C#6 154219 :C6 154218 :B5 154217 :A#5 154216 :A5 154215 :G#5 154214 :G5 154213 :F#5 154212 :F5 154211 :E5 154210 :D#5 154209 :D5 154208 :C#5 154207 :C5 154206 :B4 154205 :A#4 154204 :A4}) (def VIBRATO-FLUTE-SAMPLE-IDS (keys FREESOUND-VIBRATO-FLUTE-SAMPLES)) (def NON-VIBRATO-FLUTE-SAMPLE-IDS (keys FREESOUND-NON-VIBRATO-FLUTE-SAMPLES)) (def vibrato-flute-samples (apply freesound-samples VIBRATO-FLUTE-SAMPLE-IDS)) (def non-vibato-flute-samples (apply freesound-samples NON-VIBRATO-FLUTE-SAMPLE-IDS)) (defn- buffer->midi-note [buf note-map] (-> buf :freesound-id note-map name note)) (defn- note-index [buffers note-map] (reduce (fn [index buf] (let [id (:id buf) note (buffer->midi-note buf note-map)] (assoc index note id))) {} buffers)) (defonce ^:private silent-buffer (buffer 0)) (defonce vibrato-index-buffer (let [tab (note-index vibrato-flute-samples FREESOUND-VIBRATO-FLUTE-SAMPLES) buf (buffer 128)] (buffer-fill! buf (:id silent-buffer)) (doseq [[idx val] tab] (buffer-set! buf idx val)) buf)) (defonce non-vibrato-index-buffer (let [tab (note-index non-vibato-flute-samples FREESOUND-NON-VIBRATO-FLUTE-SAMPLES) buf (buffer 128)] (buffer-fill! buf (:id silent-buffer)) (doseq [[idx val] tab] (buffer-set! buf idx val)) buf))
098a20346c89db355fe172be5782a063c2a8b62b671a08defd17bbf893713cad
helium/blockchain-etl
blockchain_etl_SUITE.erl
-module('blockchain_etl_SUITE'). -compile([export_all]). all() -> [].
null
https://raw.githubusercontent.com/helium/blockchain-etl/2e5f931cb75252d531d4d844035cd57c6e75f38e/test/ct/blockchain_etl_SUITE.erl
erlang
-module('blockchain_etl_SUITE'). -compile([export_all]). all() -> [].
d8d92310a797ec1595a1a7a59abb19bb1d351edb6201e78d9bc0faf376a9e63a
mejgun/haskell-tdlib
SearchPublicChat.hs
{-# LANGUAGE OverloadedStrings #-} -- | module TD.Query.SearchPublicChat where import qualified Data.Aeson as A import qualified Data.Aeson.Types as T import qualified Utils as U -- | -- Searches a public chat by its username. Currently, only private chats, supergroups and channels can be public. Returns the chat if found; otherwise, an error is returned @username Username to be resolved data SearchPublicChat = SearchPublicChat { -- | username :: Maybe String } deriving (Eq) instance Show SearchPublicChat where show SearchPublicChat { username = username_ } = "SearchPublicChat" ++ U.cc [ U.p "username" username_ ] instance T.ToJSON SearchPublicChat where toJSON SearchPublicChat { username = username_ } = A.object [ "@type" A..= T.String "searchPublicChat", "username" A..= username_ ]
null
https://raw.githubusercontent.com/mejgun/haskell-tdlib/6e10efc37e32dbb9d19bef0241aa1553cf2cdda3/src/TD/Query/SearchPublicChat.hs
haskell
# LANGUAGE OverloadedStrings # | | Searches a public chat by its username. Currently, only private chats, supergroups and channels can be public. Returns the chat if found; otherwise, an error is returned @username Username to be resolved |
module TD.Query.SearchPublicChat where import qualified Data.Aeson as A import qualified Data.Aeson.Types as T import qualified Utils as U data SearchPublicChat = SearchPublicChat username :: Maybe String } deriving (Eq) instance Show SearchPublicChat where show SearchPublicChat { username = username_ } = "SearchPublicChat" ++ U.cc [ U.p "username" username_ ] instance T.ToJSON SearchPublicChat where toJSON SearchPublicChat { username = username_ } = A.object [ "@type" A..= T.String "searchPublicChat", "username" A..= username_ ]
9670d82a1e0bdc6ae91a41817e17d85b6aa095dd2474650f9ad02da6f07f0ec5
sulami/spielwiese
018-maximum-path-sum-I.hs
-- Maximum path sum I Problem 18 -- -- By starting at the top of the triangle below and moving to adjacent numbers on the row below , the maximum total from top to bottom is 23 . -- 3 -- 7 4 -- 2 4 6 -- 8 5 9 3 -- That is , 3 + 7 + 4 + 9 = 23 . -- -- Find the maximum total from top to bottom of the triangle below: -- NOTE : As there are only 16384 routes , it is possible to solve this problem by trying every route . However , Problem 67 , is the same challenge with a triangle containing one - hundred rows ; it can not be solved by brute force , -- and requires a clever method! ;o) {-# OPTIONS_GHC -O2 #-} triangle :: [[Int]] triangle = [ [75], [95,64], [17,47,82], [18,35,87,10], [20,04,82,47,65], [19,01,23,75,03,34], [88,02,77,73,07,63,67], [99,65,04,28,06,16,70,92], [41,41,26,56,83,40,80,70,33], [41,48,72,33,47,32,37,16,94,29], [53,71,44,65,25,43,91,52,97,51,14], [70,11,33,28,77,73,17,78,39,68,17,57], [91,71,52,38,17,14,91,43,58,50,27,29,48], [63,66,04,68,89,53,67,30,73,16,69,87,40,31], [04,62,98,27,23,09,70,98,73,93,38,53,60,04,23] ] data BTree a = Empty | Node a (BTree a) (BTree a) deriving (Show) constructTree :: [[a]] -> BTree a constructTree [] = Empty constructTree (x:xs) = Node (head x) (constructTree xs) $ constructTree $ map (drop 1) xs pSum :: Num a => [a] -> BTree a -> [a] pSum rv Empty = rv pSum rv (Node x l r) = pSum (rv ++ [last rv + x]) l ++ (pSum (rv ++ [last rv + x]) r) main = print $ maximum $ pSum [0] $ constructTree triangle
null
https://raw.githubusercontent.com/sulami/spielwiese/da354aa112d43d7ec5f258f4b5afafd7a88c8aa8/hEuler/018-maximum-path-sum-I.hs
haskell
Maximum path sum I By starting at the top of the triangle below and moving to adjacent numbers 7 4 2 4 6 8 5 9 3 Find the maximum total from top to bottom of the triangle below: and requires a clever method! ;o) # OPTIONS_GHC -O2 #
Problem 18 on the row below , the maximum total from top to bottom is 23 . 3 That is , 3 + 7 + 4 + 9 = 23 . NOTE : As there are only 16384 routes , it is possible to solve this problem by trying every route . However , Problem 67 , is the same challenge with a triangle containing one - hundred rows ; it can not be solved by brute force , triangle :: [[Int]] triangle = [ [75], [95,64], [17,47,82], [18,35,87,10], [20,04,82,47,65], [19,01,23,75,03,34], [88,02,77,73,07,63,67], [99,65,04,28,06,16,70,92], [41,41,26,56,83,40,80,70,33], [41,48,72,33,47,32,37,16,94,29], [53,71,44,65,25,43,91,52,97,51,14], [70,11,33,28,77,73,17,78,39,68,17,57], [91,71,52,38,17,14,91,43,58,50,27,29,48], [63,66,04,68,89,53,67,30,73,16,69,87,40,31], [04,62,98,27,23,09,70,98,73,93,38,53,60,04,23] ] data BTree a = Empty | Node a (BTree a) (BTree a) deriving (Show) constructTree :: [[a]] -> BTree a constructTree [] = Empty constructTree (x:xs) = Node (head x) (constructTree xs) $ constructTree $ map (drop 1) xs pSum :: Num a => [a] -> BTree a -> [a] pSum rv Empty = rv pSum rv (Node x l r) = pSum (rv ++ [last rv + x]) l ++ (pSum (rv ++ [last rv + x]) r) main = print $ maximum $ pSum [0] $ constructTree triangle
216c62f9e4c864aaedccbf998d575bb724a53971f2d1e4d6f7b8d0edad66e5fe
input-output-hk/plutus-apps
Types.hs
# LANGUAGE DataKinds # {-# LANGUAGE DeriveAnyClass #-} # LANGUAGE DeriveGeneric # {-# LANGUAGE DerivingVia #-} # LANGUAGE LambdaCase # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE StrictData # # LANGUAGE TemplateHaskell # # LANGUAGE TypeApplications # module Cardano.ChainIndex.Types where import Control.Lens (makeLenses) import Control.Monad.Freer.State import Data.Aeson (FromJSON, ToJSON) import Data.Default (Default, def) import GHC.Generics (Generic) import Prettyprinter (Pretty (..), parens, (<+>)) import Servant.Client (BaseUrl (..), Scheme (..)) import Cardano.BM.Data.Trace (Trace) import Cardano.BM.Data.Tracer (ToObject (..)) import Cardano.BM.Data.Tracer.Extras (Tagged (..), mkObjectStr) import Control.Monad.Freer.Error (Error) import Control.Monad.Freer.Extras (LogMsg) import Plutus.ChainIndex.Emulator (ChainIndexControlEffect, ChainIndexEmulatorState, ChainIndexError, ChainIndexLog, ChainIndexQueryEffect) type ChainIndexEffects m = '[ ChainIndexControlEffect , ChainIndexQueryEffect , State ChainIndexEmulatorState , LogMsg ChainIndexLog , Error ChainIndexError , m ] newtype ChainIndexUrl = ChainIndexUrl BaseUrl deriving (Eq, Show, FromJSON, ToJSON) via BaseUrl newtype ChainIndexConfig = ChainIndexConfig { ciBaseUrl :: ChainIndexUrl } deriving stock (Show, Eq, Generic) deriving anyclass (FromJSON, ToJSON) defaultChainIndexConfig :: ChainIndexConfig defaultChainIndexConfig = ChainIndexConfig See Note [ pab - ports ] in " test / full / Plutus / PAB / CliSpec.hs " . { ciBaseUrl = ChainIndexUrl $ BaseUrl Http "localhost" 9083 "" } instance Default ChainIndexConfig where def = defaultChainIndexConfig makeLenses ''ChainIndexConfig | Messages from the ChainIndex Server data ChainIndexServerMsg = -- | Starting a node client thread StartingNodeClientThread | Starting ChainIndex service | StartingChainIndex Int -- ^ Port number -- | Received transaction | ReceivedBlocksTxns Int -- ^ Blocks Int -- ^ Transactions | ChainEvent ChainIndexLog deriving stock (Show, Generic) deriving anyclass (ToJSON, FromJSON) type ChainIndexTrace = Trace IO ChainIndexServerMsg instance Pretty ChainIndexServerMsg where pretty = \case ReceivedBlocksTxns blocks txns -> "Received" <+> pretty blocks <+> "blocks" <+> parens (pretty txns <+> "transactions") StartingNodeClientThread -> "Starting node client thread" StartingChainIndex port -> "Starting chain index on port" <+> pretty port ChainEvent e -> "Processing chain index event:" <+> pretty e instance ToObject ChainIndexServerMsg where toObject _ = \case ReceivedBlocksTxns x y -> mkObjectStr "received block transactions" (Tagged @"blocks" x, Tagged @"transactions" y) StartingNodeClientThread -> mkObjectStr "starting node client thread" () StartingChainIndex p -> mkObjectStr "starting chain index" (Tagged @"port" p) ChainEvent e -> mkObjectStr "processing chain event" (Tagged @"event" e)
null
https://raw.githubusercontent.com/input-output-hk/plutus-apps/d637b1916522e4ec20b719487a8a2e066937aceb/plutus-pab/src/Cardano/ChainIndex/Types.hs
haskell
# LANGUAGE DeriveAnyClass # # LANGUAGE DerivingVia # # LANGUAGE OverloadedStrings # | Starting a node client thread ^ Port number | Received transaction ^ Blocks ^ Transactions
# LANGUAGE DataKinds # # LANGUAGE DeriveGeneric # # LANGUAGE LambdaCase # # LANGUAGE StrictData # # LANGUAGE TemplateHaskell # # LANGUAGE TypeApplications # module Cardano.ChainIndex.Types where import Control.Lens (makeLenses) import Control.Monad.Freer.State import Data.Aeson (FromJSON, ToJSON) import Data.Default (Default, def) import GHC.Generics (Generic) import Prettyprinter (Pretty (..), parens, (<+>)) import Servant.Client (BaseUrl (..), Scheme (..)) import Cardano.BM.Data.Trace (Trace) import Cardano.BM.Data.Tracer (ToObject (..)) import Cardano.BM.Data.Tracer.Extras (Tagged (..), mkObjectStr) import Control.Monad.Freer.Error (Error) import Control.Monad.Freer.Extras (LogMsg) import Plutus.ChainIndex.Emulator (ChainIndexControlEffect, ChainIndexEmulatorState, ChainIndexError, ChainIndexLog, ChainIndexQueryEffect) type ChainIndexEffects m = '[ ChainIndexControlEffect , ChainIndexQueryEffect , State ChainIndexEmulatorState , LogMsg ChainIndexLog , Error ChainIndexError , m ] newtype ChainIndexUrl = ChainIndexUrl BaseUrl deriving (Eq, Show, FromJSON, ToJSON) via BaseUrl newtype ChainIndexConfig = ChainIndexConfig { ciBaseUrl :: ChainIndexUrl } deriving stock (Show, Eq, Generic) deriving anyclass (FromJSON, ToJSON) defaultChainIndexConfig :: ChainIndexConfig defaultChainIndexConfig = ChainIndexConfig See Note [ pab - ports ] in " test / full / Plutus / PAB / CliSpec.hs " . { ciBaseUrl = ChainIndexUrl $ BaseUrl Http "localhost" 9083 "" } instance Default ChainIndexConfig where def = defaultChainIndexConfig makeLenses ''ChainIndexConfig | Messages from the ChainIndex Server data ChainIndexServerMsg = StartingNodeClientThread | Starting ChainIndex service | StartingChainIndex | ReceivedBlocksTxns | ChainEvent ChainIndexLog deriving stock (Show, Generic) deriving anyclass (ToJSON, FromJSON) type ChainIndexTrace = Trace IO ChainIndexServerMsg instance Pretty ChainIndexServerMsg where pretty = \case ReceivedBlocksTxns blocks txns -> "Received" <+> pretty blocks <+> "blocks" <+> parens (pretty txns <+> "transactions") StartingNodeClientThread -> "Starting node client thread" StartingChainIndex port -> "Starting chain index on port" <+> pretty port ChainEvent e -> "Processing chain index event:" <+> pretty e instance ToObject ChainIndexServerMsg where toObject _ = \case ReceivedBlocksTxns x y -> mkObjectStr "received block transactions" (Tagged @"blocks" x, Tagged @"transactions" y) StartingNodeClientThread -> mkObjectStr "starting node client thread" () StartingChainIndex p -> mkObjectStr "starting chain index" (Tagged @"port" p) ChainEvent e -> mkObjectStr "processing chain event" (Tagged @"event" e)