_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
85d2a9b33921ba7580ec83a5839615d8471a4e6447392c19797b729785ee1e20 | windock/erlyvideo | misultin_gen_server.erl | % ==========================================================================================================
MISULTIN - Example : Running Misultin from a gen_server .
%
% >-|-|-(°>
%
Copyright ( C ) 2009 , < >
% All rights reserved.
%
% BSD License
%
% Redistribution and use in source and binary forms, with or without modification, are permitted provided
% that the following conditions are met:
%
% * Redistributions of source code must retain the above copyright notice, this list of conditions and the
% following disclaimer.
% * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
% the following disclaimer in the documentation and/or other materials provided with the distribution.
% * Neither the name of the authors nor the names of its contributors may be used to endorse or promote
% products derived from this software without specific prior written permission.
%
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR
% WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
% NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
% POSSIBILITY OF SUCH DAMAGE.
% ==========================================================================================================
-module(misultin_gen_server).
-behaviour(gen_server).
% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
% API
-export([start_link/1, stop/0]).
% internal functions
-export([handle_http/1]).
% records
-record(state, {
port
}).
% macros
-define(SERVER, ?MODULE).
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ API = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
Function : { ok , Pid } | ignore | { error , Error }
% Description: Starts the server.
start_link(Port) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [Port], []).
% Function: -> ok
% Description: Manually stops the server.
stop() ->
gen_server:cast(?SERVER, stop).
% ============================ /\ API ======================================================================
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ GEN_SERVER = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
% ----------------------------------------------------------------------------------------------------------
Function : - > { ok , State } | { ok , State , Timeout } | ignore | { stop , Reason }
% Description: Initiates the server.
% ----------------------------------------------------------------------------------------------------------
init([Port]) ->
% trap_exit -> this gen_server needs to be supervised
process_flag(trap_exit, true),
start misultin & set monitor
misultin:start_link([{port, Port}, {loop, fun(Req) -> handle_http(Req) end}]),
erlang:monitor(process, misultin),
{ok, #state{port = Port}}.
% ----------------------------------------------------------------------------------------------------------
Function : handle_call(Request , From , State ) - > { reply , Reply , State } | { reply , Reply , State , Timeout } |
{ noreply , State } | { noreply , State , Timeout } |
% {stop, Reason, Reply, State} | {stop, Reason, State}
% Description: Handling call messages.
% ----------------------------------------------------------------------------------------------------------
% handle_call generic fallback
handle_call(_Request, _From, State) ->
{reply, undefined, State}.
% ----------------------------------------------------------------------------------------------------------
Function : handle_cast(Msg , State ) - > { noreply , State } | { noreply , State , Timeout } | { stop , Reason , State }
% Description: Handling cast messages.
% ----------------------------------------------------------------------------------------------------------
% manual shutdown
handle_cast(stop, State) ->
{stop, normal, State};
% handle_cast generic fallback (ignore)
handle_cast(_Msg, State) ->
{noreply, State}.
% ----------------------------------------------------------------------------------------------------------
Function : handle_info(Info , State ) - > { noreply , State } | { noreply , State , Timeout } | { stop , Reason , State }
% Description: Handling all non call/cast messages.
% ----------------------------------------------------------------------------------------------------------
handle info when misultin server goes down - > take down misultin_gen_server too [ the supervisor will take everything up again ]
handle_info({'DOWN', _, _, {misultin, _}, _}, State) ->
{stop, normal, State};
% handle_info generic fallback (ignore)
handle_info(_Info, State) ->
{noreply, State}.
% ----------------------------------------------------------------------------------------------------------
% Function: terminate(Reason, State) -> void()
% Description: This function is called by a gen_server when it is about to terminate. When it returns,
% the gen_server terminates with Reason. The return value is ignored.
% ----------------------------------------------------------------------------------------------------------
terminate(_Reason, _State) ->
stop misultin
misultin:stop(),
terminated.
% ----------------------------------------------------------------------------------------------------------
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
% Description: Convert process state when code is changed.
% ----------------------------------------------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
= = = = = = = = = = = = = = = = = = = = = = = = = = = = /\ GEN_SERVER = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ INTERNAL FUNCTIONS = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
---------------------------- \/ misultin requests --------------------------------------------------------
handle_http(Req) ->
% get params depending on method
Method = Req:get(method),
case Method of
'GET' ->
Args = Req:parse_qs();
'POST' ->
Args = Req:parse_post()
end,
% build an XML with all parameters and values
BuildXml = fun({Param, Value}, Acc) ->
[lists:flatten(io_lib:format("<param><name>~s</name><value>~s</value></param>", [Param, Value]))|Acc]
end,
Xml = lists:flatten(lists:reverse(lists:foldl(BuildXml, [], Args))),
% output
Req:ok([{"Content-Type", "text/xml"}], "<misultin_test><method>~s</method>~s</misultin_test>", [Method, Xml]).
---------------------------- /\ misultin requests --------------------------------------------------------
% ============================ /\ INTERNAL FUNCTIONS =======================================================
| null | https://raw.githubusercontent.com/windock/erlyvideo/80fdc4175de86f0ab5fb6db20a5b3c2fd4d5a075/deps/misultin/examples/misultin_gen_server.erl | erlang | ==========================================================================================================
>-|-|-(°>
All rights reserved.
BSD License
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the authors nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
==========================================================================================================
gen_server callbacks
API
internal functions
records
macros
Description: Starts the server.
Function: -> ok
Description: Manually stops the server.
============================ /\ API ======================================================================
----------------------------------------------------------------------------------------------------------
Description: Initiates the server.
----------------------------------------------------------------------------------------------------------
trap_exit -> this gen_server needs to be supervised
----------------------------------------------------------------------------------------------------------
{stop, Reason, Reply, State} | {stop, Reason, State}
Description: Handling call messages.
----------------------------------------------------------------------------------------------------------
handle_call generic fallback
----------------------------------------------------------------------------------------------------------
Description: Handling cast messages.
----------------------------------------------------------------------------------------------------------
manual shutdown
handle_cast generic fallback (ignore)
----------------------------------------------------------------------------------------------------------
Description: Handling all non call/cast messages.
----------------------------------------------------------------------------------------------------------
handle_info generic fallback (ignore)
----------------------------------------------------------------------------------------------------------
Function: terminate(Reason, State) -> void()
Description: This function is called by a gen_server when it is about to terminate. When it returns,
the gen_server terminates with Reason. The return value is ignored.
----------------------------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------------------------
Description: Convert process state when code is changed.
----------------------------------------------------------------------------------------------------------
get params depending on method
build an XML with all parameters and values
output
============================ /\ INTERNAL FUNCTIONS ======================================================= | MISULTIN - Example : Running Misultin from a gen_server .
Copyright ( C ) 2009 , < >
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR
PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
-module(misultin_gen_server).
-behaviour(gen_server).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-export([start_link/1, stop/0]).
-export([handle_http/1]).
-record(state, {
port
}).
-define(SERVER, ?MODULE).
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ API = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
Function : { ok , Pid } | ignore | { error , Error }
start_link(Port) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [Port], []).
stop() ->
gen_server:cast(?SERVER, stop).
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ GEN_SERVER = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
Function : - > { ok , State } | { ok , State , Timeout } | ignore | { stop , Reason }
init([Port]) ->
process_flag(trap_exit, true),
start misultin & set monitor
misultin:start_link([{port, Port}, {loop, fun(Req) -> handle_http(Req) end}]),
erlang:monitor(process, misultin),
{ok, #state{port = Port}}.
Function : handle_call(Request , From , State ) - > { reply , Reply , State } | { reply , Reply , State , Timeout } |
{ noreply , State } | { noreply , State , Timeout } |
handle_call(_Request, _From, State) ->
{reply, undefined, State}.
Function : handle_cast(Msg , State ) - > { noreply , State } | { noreply , State , Timeout } | { stop , Reason , State }
handle_cast(stop, State) ->
{stop, normal, State};
handle_cast(_Msg, State) ->
{noreply, State}.
Function : handle_info(Info , State ) - > { noreply , State } | { noreply , State , Timeout } | { stop , Reason , State }
handle info when misultin server goes down - > take down misultin_gen_server too [ the supervisor will take everything up again ]
handle_info({'DOWN', _, _, {misultin, _}, _}, State) ->
{stop, normal, State};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
stop misultin
misultin:stop(),
terminated.
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
= = = = = = = = = = = = = = = = = = = = = = = = = = = = /\ GEN_SERVER = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ INTERNAL FUNCTIONS = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
---------------------------- \/ misultin requests --------------------------------------------------------
handle_http(Req) ->
Method = Req:get(method),
case Method of
'GET' ->
Args = Req:parse_qs();
'POST' ->
Args = Req:parse_post()
end,
BuildXml = fun({Param, Value}, Acc) ->
[lists:flatten(io_lib:format("<param><name>~s</name><value>~s</value></param>", [Param, Value]))|Acc]
end,
Xml = lists:flatten(lists:reverse(lists:foldl(BuildXml, [], Args))),
Req:ok([{"Content-Type", "text/xml"}], "<misultin_test><method>~s</method>~s</misultin_test>", [Method, Xml]).
---------------------------- /\ misultin requests --------------------------------------------------------
|
d7767a547b466650ca120f7312d29e8ee3d1c56bb6a3333a237d758dc7030884 | rumblesan/improviz | LoadShaders.hs | --------------------------------------------------------------------------------
-- |
Module : LoadShaders
Copyright : ( c ) 2013
-- License : BSD3
--
Maintainer : < >
-- Stability : stable
-- Portability : portable
--
Utilities for shader handling , adapted from which is ( c ) The
-- Red Book Authors.
--
--------------------------------------------------------------------------------
module Gfx.LoadShaders
( ShaderSource(..)
, ShaderInfo(..)
, loadShaders
) where
import Control.Exception
import Control.Monad
import qualified Data.ByteString as B
import Graphics.Rendering.OpenGL
--------------------------------------------------------------------------------
-- | The source of the shader source code.
data ShaderSource
= ByteStringSource B.ByteString
-- ^ The shader source code is directly given as a 'B.ByteString'.
| StringSource String
-- ^ The shader source code is directly given as a 'String'.
| FileSource FilePath
-- ^ The shader source code is located in the file at the given 'FilePath'.
deriving (Eq, Ord, Show)
getSource :: ShaderSource -> IO B.ByteString
getSource (ByteStringSource bs ) = return bs
getSource (StringSource str ) = return $ packUtf8 str
getSource (FileSource path) = B.readFile path
--------------------------------------------------------------------------------
-- | A description of a shader: The type of the shader plus its source code.
data ShaderInfo = ShaderInfo ShaderType ShaderSource
deriving (Eq, Ord, Show)
--------------------------------------------------------------------------------
-- | Create a new program object from the given shaders, throwing an
-- 'IOException' if something goes wrong.
loadShaders :: [ShaderInfo] -> IO Program
loadShaders infos =
createProgram `bracketOnError` deleteObjectName $ \program -> do
loadCompileAttach program infos
linkAndCheck program
return program
linkAndCheck :: Program -> IO ()
linkAndCheck = checked linkProgram linkStatus programInfoLog "link"
loadCompileAttach :: Program -> [ShaderInfo] -> IO ()
loadCompileAttach _ [] = return ()
loadCompileAttach program (ShaderInfo shType source : infos) =
createShader shType `bracketOnError` deleteObjectName $ \shader -> do
src <- getSource source
shaderSourceBS shader $= src
compileAndCheck shader
attachShader program shader
loadCompileAttach program infos
compileAndCheck :: Shader -> IO ()
compileAndCheck = checked compileShader compileStatus shaderInfoLog "compile"
checked
:: (t -> IO ())
-> (t -> GettableStateVar Bool)
-> (t -> GettableStateVar String)
-> String
-> t
-> IO ()
checked action getStatus getInfoLog message object = do
action object
ok <- get (getStatus object)
unless ok $ do
infoLog <- get (getInfoLog object)
fail (message ++ " log: " ++ infoLog)
| null | https://raw.githubusercontent.com/rumblesan/improviz/23e16ae7b2d55d2204417ec60c7cb6673a93df7d/src/Gfx/LoadShaders.hs | haskell | ------------------------------------------------------------------------------
|
License : BSD3
Stability : stable
Portability : portable
Red Book Authors.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| The source of the shader source code.
^ The shader source code is directly given as a 'B.ByteString'.
^ The shader source code is directly given as a 'String'.
^ The shader source code is located in the file at the given 'FilePath'.
------------------------------------------------------------------------------
| A description of a shader: The type of the shader plus its source code.
------------------------------------------------------------------------------
| Create a new program object from the given shaders, throwing an
'IOException' if something goes wrong. | Module : LoadShaders
Copyright : ( c ) 2013
Maintainer : < >
Utilities for shader handling , adapted from which is ( c ) The
module Gfx.LoadShaders
( ShaderSource(..)
, ShaderInfo(..)
, loadShaders
) where
import Control.Exception
import Control.Monad
import qualified Data.ByteString as B
import Graphics.Rendering.OpenGL
data ShaderSource
= ByteStringSource B.ByteString
| StringSource String
| FileSource FilePath
deriving (Eq, Ord, Show)
getSource :: ShaderSource -> IO B.ByteString
getSource (ByteStringSource bs ) = return bs
getSource (StringSource str ) = return $ packUtf8 str
getSource (FileSource path) = B.readFile path
data ShaderInfo = ShaderInfo ShaderType ShaderSource
deriving (Eq, Ord, Show)
loadShaders :: [ShaderInfo] -> IO Program
loadShaders infos =
createProgram `bracketOnError` deleteObjectName $ \program -> do
loadCompileAttach program infos
linkAndCheck program
return program
linkAndCheck :: Program -> IO ()
linkAndCheck = checked linkProgram linkStatus programInfoLog "link"
loadCompileAttach :: Program -> [ShaderInfo] -> IO ()
loadCompileAttach _ [] = return ()
loadCompileAttach program (ShaderInfo shType source : infos) =
createShader shType `bracketOnError` deleteObjectName $ \shader -> do
src <- getSource source
shaderSourceBS shader $= src
compileAndCheck shader
attachShader program shader
loadCompileAttach program infos
compileAndCheck :: Shader -> IO ()
compileAndCheck = checked compileShader compileStatus shaderInfoLog "compile"
checked
:: (t -> IO ())
-> (t -> GettableStateVar Bool)
-> (t -> GettableStateVar String)
-> String
-> t
-> IO ()
checked action getStatus getInfoLog message object = do
action object
ok <- get (getStatus object)
unless ok $ do
infoLog <- get (getInfoLog object)
fail (message ++ " log: " ++ infoLog)
|
12a87345d83537817a5ca0bec363c7131e5d3fba95427c460c3a39ed811c8f0b | clojure-emacs/clj-suitable | nrepl.clj | (ns suitable.nrepl
(:require cider.nrepl
cider.piggieback
[clojure.pprint :refer [cl-format pprint]]
nrepl.core
nrepl.server
[suitable.middleware :refer [wrap-complete]]))
;; -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
;; a la
1 . start nrepl server with piggieback
2 . get session
3 . send cljs start form ( e.g. figwheel )
4 . ... profit !
1 . start nrepl server with piggieback
(defonce clj-nrepl-server (atom nil))
(defn start-clj-nrepl-server []
(let [middlewares (map resolve cider.nrepl/cider-middleware)
middlewares (if-let [rf (resolve 'refactor-nrepl.middleware/wrap-refactor)]
(conj middlewares rf) middlewares)
handler (apply nrepl.server/default-handler middlewares)]
(pprint middlewares)
(reset! clj-nrepl-server (nrepl.server/start-server :handler handler :port 7888)))
(cl-format true "clj nrepl server started~%"))
;; -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
(defonce cljs-nrepl-server (atom nil))
(defonce cljs-send-msg (atom nil))
(defonce cljs-client (atom nil))
(defonce cljs-client-session (atom nil))
(defn start-cljs-nrepl-server []
(let [middlewares (map resolve cider.nrepl/cider-middleware)
middlewares (conj middlewares #'cider.piggieback/wrap-cljs-repl)
middlewares (conj middlewares #'wrap-complete)
;; handler (nrepl.server/default-handler #'cider.piggieback/wrap-cljs-repl)
handler (apply nrepl.server/default-handler middlewares)]
(reset! cljs-nrepl-server (nrepl.server/start-server :handler handler :port 7889)))
(cl-format true "cljs nrepl server started~%"))
(defn start-cljs-nrepl-client []
(let [conn (nrepl.core/connect :port 7889)
c (nrepl.core/client conn 1000)
sess (nrepl.core/client-session c)]
(reset! cljs-client c)
(reset! cljs-client-session sess)
(cl-format true "nrepl client started~%")
(reset! cljs-send-msg
(fn [msg] (let [response-seq (nrepl.core/message sess msg)]
(cl-format true "nrepl msg send~%")
(pprint (doall response-seq)))))))
(defn cljs-send-eval [code]
(@cljs-send-msg {:op :eval :code code}))
| null | https://raw.githubusercontent.com/clojure-emacs/clj-suitable/223f890ce6af23e764276c5d26303564a8cafd86/src/dev/suitable/nrepl.clj | clojure | -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
a la
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
handler (nrepl.server/default-handler #'cider.piggieback/wrap-cljs-repl) | (ns suitable.nrepl
(:require cider.nrepl
cider.piggieback
[clojure.pprint :refer [cl-format pprint]]
nrepl.core
nrepl.server
[suitable.middleware :refer [wrap-complete]]))
1 . start nrepl server with piggieback
2 . get session
3 . send cljs start form ( e.g. figwheel )
4 . ... profit !
1 . start nrepl server with piggieback
(defonce clj-nrepl-server (atom nil))
(defn start-clj-nrepl-server []
(let [middlewares (map resolve cider.nrepl/cider-middleware)
middlewares (if-let [rf (resolve 'refactor-nrepl.middleware/wrap-refactor)]
(conj middlewares rf) middlewares)
handler (apply nrepl.server/default-handler middlewares)]
(pprint middlewares)
(reset! clj-nrepl-server (nrepl.server/start-server :handler handler :port 7888)))
(cl-format true "clj nrepl server started~%"))
(defonce cljs-nrepl-server (atom nil))
(defonce cljs-send-msg (atom nil))
(defonce cljs-client (atom nil))
(defonce cljs-client-session (atom nil))
(defn start-cljs-nrepl-server []
(let [middlewares (map resolve cider.nrepl/cider-middleware)
middlewares (conj middlewares #'cider.piggieback/wrap-cljs-repl)
middlewares (conj middlewares #'wrap-complete)
handler (apply nrepl.server/default-handler middlewares)]
(reset! cljs-nrepl-server (nrepl.server/start-server :handler handler :port 7889)))
(cl-format true "cljs nrepl server started~%"))
(defn start-cljs-nrepl-client []
(let [conn (nrepl.core/connect :port 7889)
c (nrepl.core/client conn 1000)
sess (nrepl.core/client-session c)]
(reset! cljs-client c)
(reset! cljs-client-session sess)
(cl-format true "nrepl client started~%")
(reset! cljs-send-msg
(fn [msg] (let [response-seq (nrepl.core/message sess msg)]
(cl-format true "nrepl msg send~%")
(pprint (doall response-seq)))))))
(defn cljs-send-eval [code]
(@cljs-send-msg {:op :eval :code code}))
|
ba3adc729b5c1de730181f17366447a99b69498e5d3aa71ad6e2afedea171758 | dimitri/pgloader | mssql.lisp | ;;;
;;; Tools to handle the MS SQL Database
;;;
(in-package :pgloader.source.mssql)
(defmethod map-rows ((mssql copy-mssql) &key process-row-fn)
"Extract Mssql data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row."
(with-connection (*mssql-db* (source-db mssql))
(let* ((sql (format nil "SELECT ~{~a~^, ~} FROM [~a].[~a];"
(get-column-list mssql)
(schema-source-name (table-schema (source mssql)))
(table-source-name (source mssql)))))
(log-message :debug "~a" sql)
(handler-bind
((babel-encodings:end-of-input-in-character
#'(lambda (c)
(update-stats :data (target mssql) :errs 1)
(log-message :error "~a" c)
(invoke-restart 'mssql::use-nil)))
(babel-encodings:character-decoding-error
#'(lambda (c)
(update-stats :data (target mssql) :errs 1)
(let ((encoding
(babel-encodings:character-coding-error-encoding c))
(position
(babel-encodings:character-coding-error-position c))
(character
(aref (babel-encodings:character-coding-error-buffer c)
(babel-encodings:character-coding-error-position c))))
(log-message :error
"~a: Illegal ~a character starting at position ~a: ~a."
(table-schema (source mssql))
encoding
position
character))
(invoke-restart 'mssql::use-nil))))
(mssql::map-query-results sql
:row-fn process-row-fn
:connection (conn-handle *mssql-db*))))))
(defmethod copy-column-list ((mssql copy-mssql))
"We are sending the data in the MS SQL columns ordering here."
(mapcar #'apply-identifier-case (mapcar #'mssql-column-name (fields mssql))))
(defmethod fetch-metadata ((mssql copy-mssql)
(catalog catalog)
&key
materialize-views
create-indexes
foreign-keys
including
excluding)
"MS SQL introspection to prepare the migration."
(with-stats-collection ("fetch meta data"
:use-result-as-rows t
:use-result-as-read t
:section :pre)
(with-connection (*mssql-db* (source-db mssql))
If asked to MATERIALIZE VIEWS , now is the time to create them in MS
;; SQL, when given definitions rather than existing view names.
(when (and materialize-views (not (eq :all materialize-views)))
(create-matviews materialize-views mssql))
(fetch-columns catalog mssql
:including including
:excluding excluding)
;; fetch view (and their columns) metadata, covering comments too
(let* ((view-names (unless (eq :all materialize-views)
(mapcar #'matview-source-name materialize-views)))
(including
(loop :for (schema-name . view-name) :in view-names
:do (let* ((schema-name (or schema-name "dbo"))
(schema-entry
(or (assoc schema-name including :test #'string=)
(progn (push (cons schema-name nil) including)
(assoc schema-name including
:test #'string=)))))
(push-to-end view-name (cdr schema-entry))))))
(cond (view-names
(fetch-columns catalog mssql
:including including
:excluding excluding
:table-type :view))
((eq :all materialize-views)
(fetch-columns catalog mssql :table-type :view))))
(when create-indexes
(fetch-indexes catalog mssql
:including including
:excluding excluding))
(when foreign-keys
(fetch-foreign-keys catalog mssql
:including including
:excluding excluding))
;; return how many objects we're going to deal with in total
;; for stats collection
(+ (count-tables catalog)
(count-views catalog)
(count-indexes catalog)
(count-fkeys catalog))))
;; be sure to return the catalog itself
catalog)
(defmethod cleanup ((mssql copy-mssql) (catalog catalog) &key materialize-views)
"When there is a PostgreSQL error at prepare-pgsql-database step, we might
need to clean-up any view created in the MS SQL connection for the
migration purpose."
(when materialize-views
(with-connection (*mssql-db* (source-db mssql))
(drop-matviews materialize-views mssql))))
| null | https://raw.githubusercontent.com/dimitri/pgloader/3047c9afe141763e9e7ec05b7f2a6aa97cf06801/src/sources/mssql/mssql.lisp | lisp |
Tools to handle the MS SQL Database
SQL, when given definitions rather than existing view names.
fetch view (and their columns) metadata, covering comments too
return how many objects we're going to deal with in total
for stats collection
be sure to return the catalog itself |
(in-package :pgloader.source.mssql)
(defmethod map-rows ((mssql copy-mssql) &key process-row-fn)
"Extract Mssql data and call PROCESS-ROW-FN function with a single
argument (a list of column values) for each row."
(with-connection (*mssql-db* (source-db mssql))
(let* ((sql (format nil "SELECT ~{~a~^, ~} FROM [~a].[~a];"
(get-column-list mssql)
(schema-source-name (table-schema (source mssql)))
(table-source-name (source mssql)))))
(log-message :debug "~a" sql)
(handler-bind
((babel-encodings:end-of-input-in-character
#'(lambda (c)
(update-stats :data (target mssql) :errs 1)
(log-message :error "~a" c)
(invoke-restart 'mssql::use-nil)))
(babel-encodings:character-decoding-error
#'(lambda (c)
(update-stats :data (target mssql) :errs 1)
(let ((encoding
(babel-encodings:character-coding-error-encoding c))
(position
(babel-encodings:character-coding-error-position c))
(character
(aref (babel-encodings:character-coding-error-buffer c)
(babel-encodings:character-coding-error-position c))))
(log-message :error
"~a: Illegal ~a character starting at position ~a: ~a."
(table-schema (source mssql))
encoding
position
character))
(invoke-restart 'mssql::use-nil))))
(mssql::map-query-results sql
:row-fn process-row-fn
:connection (conn-handle *mssql-db*))))))
(defmethod copy-column-list ((mssql copy-mssql))
"We are sending the data in the MS SQL columns ordering here."
(mapcar #'apply-identifier-case (mapcar #'mssql-column-name (fields mssql))))
(defmethod fetch-metadata ((mssql copy-mssql)
(catalog catalog)
&key
materialize-views
create-indexes
foreign-keys
including
excluding)
"MS SQL introspection to prepare the migration."
(with-stats-collection ("fetch meta data"
:use-result-as-rows t
:use-result-as-read t
:section :pre)
(with-connection (*mssql-db* (source-db mssql))
If asked to MATERIALIZE VIEWS , now is the time to create them in MS
(when (and materialize-views (not (eq :all materialize-views)))
(create-matviews materialize-views mssql))
(fetch-columns catalog mssql
:including including
:excluding excluding)
(let* ((view-names (unless (eq :all materialize-views)
(mapcar #'matview-source-name materialize-views)))
(including
(loop :for (schema-name . view-name) :in view-names
:do (let* ((schema-name (or schema-name "dbo"))
(schema-entry
(or (assoc schema-name including :test #'string=)
(progn (push (cons schema-name nil) including)
(assoc schema-name including
:test #'string=)))))
(push-to-end view-name (cdr schema-entry))))))
(cond (view-names
(fetch-columns catalog mssql
:including including
:excluding excluding
:table-type :view))
((eq :all materialize-views)
(fetch-columns catalog mssql :table-type :view))))
(when create-indexes
(fetch-indexes catalog mssql
:including including
:excluding excluding))
(when foreign-keys
(fetch-foreign-keys catalog mssql
:including including
:excluding excluding))
(+ (count-tables catalog)
(count-views catalog)
(count-indexes catalog)
(count-fkeys catalog))))
catalog)
(defmethod cleanup ((mssql copy-mssql) (catalog catalog) &key materialize-views)
"When there is a PostgreSQL error at prepare-pgsql-database step, we might
need to clean-up any view created in the MS SQL connection for the
migration purpose."
(when materialize-views
(with-connection (*mssql-db* (source-db mssql))
(drop-matviews materialize-views mssql))))
|
1477ade4352e12693e386ff0895e990b871e84097a04e670644cda1202450a00 | PaulBatchelor/codex | pdosc.scm | ;; Generated from pdosc.org, from the frags collection
(define (pdosc freq amt ft)
(trd
(pdhalf
(phasor (freq) (param 0))
(amt))
ft))
| null | https://raw.githubusercontent.com/PaulBatchelor/codex/e6f6c9613c90b319f07863197498681d723debc2/frags/pdosc.scm | scheme | Generated from pdosc.org, from the frags collection | (define (pdosc freq amt ft)
(trd
(pdhalf
(phasor (freq) (param 0))
(amt))
ft))
|
1012448a485596885216d5afca8d6a494abca1eb951076fa9347571140ae1fc7 | cwtsteven/TSD | alt_sum.ml | open Tsd
let state_machine init trans input =
let init = lift init and trans = lift trans in
let state = cell [%dfg init] in
state <~ [%dfg trans state input];
state
let alt = state_machine 1 (fun s _ -> 1 - s) (lift 0)
let sum inp = state_machine 0 (fun s i -> i + s) inp
let alt_sum = sum alt
let _ =
let n = int_of_string Sys.argv.(1) in
for i = 1 to n do
step();
done | null | https://raw.githubusercontent.com/cwtsteven/TSD/32bd2cbca6d445ff6b0caecdbb2775de61fdfc6d/benchmarks/alt_sum/alt_sum.ml | ocaml | open Tsd
let state_machine init trans input =
let init = lift init and trans = lift trans in
let state = cell [%dfg init] in
state <~ [%dfg trans state input];
state
let alt = state_machine 1 (fun s _ -> 1 - s) (lift 0)
let sum inp = state_machine 0 (fun s i -> i + s) inp
let alt_sum = sum alt
let _ =
let n = int_of_string Sys.argv.(1) in
for i = 1 to n do
step();
done |
|
0e1f7923c43be27b335f68cb692fa37cfd2e704a20c0017d50d05d5e10ff1456 | nathanmarz/cascalog | testing.clj | (ns cascalog.logic.testing
(:require [clojure.test :refer :all]
[cascalog.api :refer :all]
[jackknife.seq :refer (collectify multi-set)]
[cascalog.logic.platform :as platform]))
(defn doublify
"Takes a sequence of tuples and converts all numbers to doubles.
For example:
(doublify [[1 :a] [2 :b]])
[ [ 1.0 : a ] [ 2.0 : b ] ] "
[tuples]
(vec (for [t tuples]
(into [] (map (fn [v] (if (number? v) (double v) v))
(collectify t))))))
(defn is-specs= [set1 set2]
(every? true? (doall
(map (fn [input output]
(let [input (multi-set (doublify input))
output (multi-set (doublify output))]
(is (= input output))))
set1 set2))))
(defn is-tuplesets= [set1 set2]
(is-specs= [set1] [set2]))
(defprotocol ITestable
(process?- [_ bindings]
"Used in testing, returns the result from processing the bindings"))
(defn test?- [& bindings]
(let [[specs out-tuples] (process?- platform/*platform* bindings)]
(is-specs= specs out-tuples)))
(defmacro test?<- [& args]
(let [[begin body] (if (keyword? (first args))
(split-at 2 args)
(split-at 1 args))]
`(test?- ~@begin (<- ~@body))))
(defmacro thrown?<- [error & body]
`(is (~'thrown? ~error (<- ~@body))))
| null | https://raw.githubusercontent.com/nathanmarz/cascalog/deaad977aa98985f68f3d1cc3e081d345184c0c8/cascalog-core/src/clj/cascalog/logic/testing.clj | clojure | (ns cascalog.logic.testing
(:require [clojure.test :refer :all]
[cascalog.api :refer :all]
[jackknife.seq :refer (collectify multi-set)]
[cascalog.logic.platform :as platform]))
(defn doublify
"Takes a sequence of tuples and converts all numbers to doubles.
For example:
(doublify [[1 :a] [2 :b]])
[ [ 1.0 : a ] [ 2.0 : b ] ] "
[tuples]
(vec (for [t tuples]
(into [] (map (fn [v] (if (number? v) (double v) v))
(collectify t))))))
(defn is-specs= [set1 set2]
(every? true? (doall
(map (fn [input output]
(let [input (multi-set (doublify input))
output (multi-set (doublify output))]
(is (= input output))))
set1 set2))))
(defn is-tuplesets= [set1 set2]
(is-specs= [set1] [set2]))
(defprotocol ITestable
(process?- [_ bindings]
"Used in testing, returns the result from processing the bindings"))
(defn test?- [& bindings]
(let [[specs out-tuples] (process?- platform/*platform* bindings)]
(is-specs= specs out-tuples)))
(defmacro test?<- [& args]
(let [[begin body] (if (keyword? (first args))
(split-at 2 args)
(split-at 1 args))]
`(test?- ~@begin (<- ~@body))))
(defmacro thrown?<- [error & body]
`(is (~'thrown? ~error (<- ~@body))))
|
|
5569f1f7d49860e7de90e85157dd4358065e2fa4129200c4e78b485bbc600968 | CloudI/CloudI | epgsql_cmd_batch.erl | %% @doc Execute multiple extended queries in a single network round-trip
%%
There are 2 kinds of interface :
%% <ol>
%% <li>To execute multiple queries, each with it's own `statement()'</li>
%% <li>To execute multiple queries, but by binding different parameters to the
%% same `statement()'</li>
%% </ol>
%% ```
%% > {Bind
< BindComplete
%% > Execute
< DataRow *
%% < CommandComplete}*
%% > Sync
%% < ReadyForQuery
%% '''
-module(epgsql_cmd_batch).
-behaviour(epgsql_command).
-export([init/1, execute/2, handle_message/4]).
-export_type([arguments/0, response/0]).
-include("epgsql.hrl").
-include("epgsql_protocol.hrl").
-record(batch,
{batch :: [ [epgsql:bind_param()] ] | [{#statement{}, [epgsql:bind_param()]}],
statement :: #statement{} | undefined,
decoder :: epgsql_wire:row_decoder() | undefined}).
-type arguments() ::
{epgsql:statement(), [ [epgsql:bind_param()] ]} |
[{epgsql:statement(), [epgsql:bind_param()]}].
-type response() :: [{ok, Count :: non_neg_integer(), Rows :: [tuple()]}
| {ok, Count :: non_neg_integer()}
| {ok, Rows :: [tuple()]}
| {error, epgsql:query_error()}
].
-type state() :: #batch{}.
-spec init(arguments()) -> state().
init({#statement{} = Statement, Batch}) ->
#batch{statement = Statement,
batch = Batch};
init(Batch) when is_list(Batch) ->
#batch{batch = Batch}.
execute(Sock, #batch{batch = Batch, statement = undefined} = State) ->
Codec = epgsql_sock:get_codec(Sock),
Commands =
lists:foldr(
fun({Statement, Parameters}, Acc) ->
#statement{name = StatementName,
columns = Columns,
types = Types} = Statement,
BinFormats = epgsql_wire:encode_formats(Columns),
add_command(StatementName, Types, Parameters, BinFormats, Codec, Acc)
end,
[epgsql_wire:encode_sync()],
Batch),
{send_multi, Commands, Sock, State};
execute(Sock, #batch{batch = Batch,
statement = #statement{name = StatementName,
columns = Columns,
types = Types}} = State) ->
Codec = epgsql_sock:get_codec(Sock),
BinFormats = epgsql_wire:encode_formats(Columns),
%% TODO: build some kind of encoder and reuse it for each batch item
Commands =
lists:foldr(
fun(Parameters, Acc) ->
add_command(StatementName, Types, Parameters, BinFormats, Codec, Acc)
end,
[epgsql_wire:encode_sync()],
Batch),
{send_multi, Commands, Sock, State}.
add_command(StmtName, Types, Params, BinFormats, Codec, Acc) ->
TypedParameters = lists:zip(Types, Params),
BinParams = epgsql_wire:encode_parameters(TypedParameters, Codec),
[epgsql_wire:encode_bind("", StmtName, BinParams, BinFormats),
epgsql_wire:encode_execute("", 0) | Acc].
handle_message(?BIND_COMPLETE, <<>>, Sock, State) ->
Columns = current_cols(State),
Codec = epgsql_sock:get_codec(Sock),
Decoder = epgsql_wire:build_decoder(Columns, Codec),
{noaction, Sock, State#batch{decoder = Decoder}};
handle_message(?DATA_ROW, <<_Count:?int16, Bin/binary>>, Sock,
#batch{decoder = Decoder} = State) ->
Row = epgsql_wire:decode_data(Bin, Decoder),
{add_row, Row, Sock, State};
handle_message(?EMPTY_QUERY , _ , , _ State ) - >
Sock1 = epgsql_sock : add_result(Sock , { complete , empty } , { ok , [ ] , [ ] } ) ,
{ noaction , Sock1 } ;
handle_message(?COMMAND_COMPLETE, Bin, Sock,
#batch{batch = [_ | Batch]} = State) ->
Columns = current_cols(State),
Complete = epgsql_wire:decode_complete(Bin),
Rows = epgsql_sock:get_rows(Sock),
Result = case Complete of
{_, Count} when Columns == [] ->
{ok, Count};
{_, Count} ->
{ok, Count, Rows};
_ ->
{ok, Rows}
end,
{add_result, Result, {complete, Complete}, Sock, State#batch{batch = Batch}};
handle_message(?READY_FOR_QUERY, _Status, Sock, _State) ->
Results = epgsql_sock:get_results(Sock),
{finish, Results, done, Sock};
handle_message(?ERROR, Error, Sock, #batch{batch = [_ | Batch]} = State) ->
Result = {error, Error},
{add_result, Result, Result, Sock, State#batch{batch = Batch}};
handle_message(_, _, _, _) ->
unknown.
%% Helpers
current_cols(Batch) ->
#statement{columns = Columns} = current_stmt(Batch),
Columns.
current_stmt(#batch{batch = [{Stmt, _} | _], statement = undefined}) ->
Stmt;
current_stmt(#batch{statement = #statement{} = Stmt}) ->
Stmt.
| null | https://raw.githubusercontent.com/CloudI/CloudI/3e45031c7ee3e974ead2612ea7dd06c9edf973c9/src/external/cloudi_x_epgsql/src/epgsql_cmd_batch.erl | erlang | @doc Execute multiple extended queries in a single network round-trip
<ol>
<li>To execute multiple queries, each with it's own `statement()'</li>
<li>To execute multiple queries, but by binding different parameters to the
same `statement()'</li>
</ol>
```
> {Bind
> Execute
< CommandComplete}*
> Sync
< ReadyForQuery
'''
TODO: build some kind of encoder and reuse it for each batch item
Helpers | There are 2 kinds of interface :
< BindComplete
< DataRow *
-module(epgsql_cmd_batch).
-behaviour(epgsql_command).
-export([init/1, execute/2, handle_message/4]).
-export_type([arguments/0, response/0]).
-include("epgsql.hrl").
-include("epgsql_protocol.hrl").
-record(batch,
{batch :: [ [epgsql:bind_param()] ] | [{#statement{}, [epgsql:bind_param()]}],
statement :: #statement{} | undefined,
decoder :: epgsql_wire:row_decoder() | undefined}).
-type arguments() ::
{epgsql:statement(), [ [epgsql:bind_param()] ]} |
[{epgsql:statement(), [epgsql:bind_param()]}].
-type response() :: [{ok, Count :: non_neg_integer(), Rows :: [tuple()]}
| {ok, Count :: non_neg_integer()}
| {ok, Rows :: [tuple()]}
| {error, epgsql:query_error()}
].
-type state() :: #batch{}.
-spec init(arguments()) -> state().
init({#statement{} = Statement, Batch}) ->
#batch{statement = Statement,
batch = Batch};
init(Batch) when is_list(Batch) ->
#batch{batch = Batch}.
execute(Sock, #batch{batch = Batch, statement = undefined} = State) ->
Codec = epgsql_sock:get_codec(Sock),
Commands =
lists:foldr(
fun({Statement, Parameters}, Acc) ->
#statement{name = StatementName,
columns = Columns,
types = Types} = Statement,
BinFormats = epgsql_wire:encode_formats(Columns),
add_command(StatementName, Types, Parameters, BinFormats, Codec, Acc)
end,
[epgsql_wire:encode_sync()],
Batch),
{send_multi, Commands, Sock, State};
execute(Sock, #batch{batch = Batch,
statement = #statement{name = StatementName,
columns = Columns,
types = Types}} = State) ->
Codec = epgsql_sock:get_codec(Sock),
BinFormats = epgsql_wire:encode_formats(Columns),
Commands =
lists:foldr(
fun(Parameters, Acc) ->
add_command(StatementName, Types, Parameters, BinFormats, Codec, Acc)
end,
[epgsql_wire:encode_sync()],
Batch),
{send_multi, Commands, Sock, State}.
add_command(StmtName, Types, Params, BinFormats, Codec, Acc) ->
TypedParameters = lists:zip(Types, Params),
BinParams = epgsql_wire:encode_parameters(TypedParameters, Codec),
[epgsql_wire:encode_bind("", StmtName, BinParams, BinFormats),
epgsql_wire:encode_execute("", 0) | Acc].
handle_message(?BIND_COMPLETE, <<>>, Sock, State) ->
Columns = current_cols(State),
Codec = epgsql_sock:get_codec(Sock),
Decoder = epgsql_wire:build_decoder(Columns, Codec),
{noaction, Sock, State#batch{decoder = Decoder}};
handle_message(?DATA_ROW, <<_Count:?int16, Bin/binary>>, Sock,
#batch{decoder = Decoder} = State) ->
Row = epgsql_wire:decode_data(Bin, Decoder),
{add_row, Row, Sock, State};
handle_message(?EMPTY_QUERY , _ , , _ State ) - >
Sock1 = epgsql_sock : add_result(Sock , { complete , empty } , { ok , [ ] , [ ] } ) ,
{ noaction , Sock1 } ;
handle_message(?COMMAND_COMPLETE, Bin, Sock,
#batch{batch = [_ | Batch]} = State) ->
Columns = current_cols(State),
Complete = epgsql_wire:decode_complete(Bin),
Rows = epgsql_sock:get_rows(Sock),
Result = case Complete of
{_, Count} when Columns == [] ->
{ok, Count};
{_, Count} ->
{ok, Count, Rows};
_ ->
{ok, Rows}
end,
{add_result, Result, {complete, Complete}, Sock, State#batch{batch = Batch}};
handle_message(?READY_FOR_QUERY, _Status, Sock, _State) ->
Results = epgsql_sock:get_results(Sock),
{finish, Results, done, Sock};
handle_message(?ERROR, Error, Sock, #batch{batch = [_ | Batch]} = State) ->
Result = {error, Error},
{add_result, Result, Result, Sock, State#batch{batch = Batch}};
handle_message(_, _, _, _) ->
unknown.
current_cols(Batch) ->
#statement{columns = Columns} = current_stmt(Batch),
Columns.
current_stmt(#batch{batch = [{Stmt, _} | _], statement = undefined}) ->
Stmt;
current_stmt(#batch{statement = #statement{} = Stmt}) ->
Stmt.
|
6205511b3bf356d05f6f262a2cdf7c957e478b4c5074819f1d36cb3a190da62c | chaoxu/fancy-walks | A.hs | {-# OPTIONS_GHC -O2 #-}
import Data.List
import Data.Maybe
import Data.Char
import Data.Array.IArray
import Data.Array.Unboxed (UArray)
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq, (<|), (|>), (><), ViewL(..), ViewR(..))
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
import Data.Graph
import Control.Parallel.Strategies
parseInput = do
cas <- readInt
replicateM cas $ do
n <- readInt
desired <- readBool
interior <- replicateM (n `div` 2) $ (,) <$> readInt <*> readBool
leaf <- replicateM (n `div` 2 + 1) $ readBool
return (n, desired, interior, leaf)
where
readBool = (/=0) <$> readInt
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readInteger = state $ fromJust . BS.readInteger . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
readLine = state $ BS.span (not . isEoln) . BS.dropWhile isEoln
isEoln ch = ch == '\r' || ch == '\n'
main = do
input <- evalState parseInput <$> BS.getContents
let output = parMap rdeepseq solve input
forM_ (zip [1..] output) $ \(cas, result) -> do
putStrLn $ "Case #" ++ show cas ++ ": " ++ result
solve :: (Int, Bool, [(Int, Bool)], [Bool]) -> String
solve (n, desired, interior, leaf) = show $ minStep (1, desired)
where
arrFunc = listArray (1, n `div` 2) $ map fst interior :: Array Int Int
arr = listArray (1, n) $ map snd interior ++ leaf :: Array Int Bool
minStep :: (Int, Bool) -> BoundedInteger
minStep = (cache !)
where
bnds = ((1, False), (n, True))
cache = listArray bnds $ map go $ range bnds :: Array (Int, Bool) BoundedInteger
go (p, bool) | p * 2 > n = if arr ! p == bool then 0 else maxBound
go (p, bool)
| arr ! p = minimum $ map (+1) costChange ++ costKeep
| otherwise = minimum costKeep
where
leftNode = p * 2
rightNode = p * 2 + 1
costAnd = [ minStep (leftNode, lbool) + minStep (rightNode, rbool)
| lbool <- [False, True]
, rbool <- [False, True]
, (lbool && rbool) == bool
]
costOr = [ minStep (leftNode, lbool) + minStep (rightNode, rbool)
| lbool <- [False, True]
, rbool <- [False, True]
, (lbool || rbool) == bool
]
(costKeep, costChange) = if arrFunc ! p == 1 then (costAnd, costOr) else (costOr, costAnd)
data BoundedInteger = Infinite Bool | Finite Integer deriving Eq
instance Show BoundedInteger where
" Postive Infinite "
show (Infinite False) = "Negative Infinite"
show (Finite a) = show a
instance Num BoundedInteger where
+ oo + -oo ? , I 'm crazy
_ + Infinite v = Infinite v
Finite a + Finite b = Finite (a + b)
Finite 0 * _ = 0
_ * Finite 0 = 0
Infinite v * a = Infinite (v == (a > 0))
a * Infinite v = Infinite (v == (a > 0))
Finite a * Finite b = Finite (a * b)
negate (Infinite v) = Infinite (not v)
negate (Finite v) = Finite (negate v)
signum (Infinite True) = 1
signum (Infinite False) = -1
signum (Finite a) = Finite $ signum a
abs (Infinite _) = Infinite True
abs (Finite v) = Finite (abs v)
fromInteger = Finite
instance Ord BoundedInteger where
Finite a `compare` Finite b = compare a b
Finite _ `compare` Infinite v = if v then LT else GT
Infinite v `compare` Finite _ = if v then GT else LT
Infinite a `compare` Infinite b
| a == b = EQ
| a = GT
| otherwise = LT
instance Bounded BoundedInteger where
minBound = Infinite False
maxBound = Infinite True
| null | https://raw.githubusercontent.com/chaoxu/fancy-walks/952fcc345883181144131f839aa61e36f488998d/code.google.com/codejam/Google%20Code%20Jam%202008/Round%202/A.hs | haskell | # OPTIONS_GHC -O2 # |
import Data.List
import Data.Maybe
import Data.Char
import Data.Array.IArray
import Data.Array.Unboxed (UArray)
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq, (<|), (|>), (><), ViewL(..), ViewR(..))
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
import Data.Graph
import Control.Parallel.Strategies
parseInput = do
cas <- readInt
replicateM cas $ do
n <- readInt
desired <- readBool
interior <- replicateM (n `div` 2) $ (,) <$> readInt <*> readBool
leaf <- replicateM (n `div` 2 + 1) $ readBool
return (n, desired, interior, leaf)
where
readBool = (/=0) <$> readInt
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readInteger = state $ fromJust . BS.readInteger . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
readLine = state $ BS.span (not . isEoln) . BS.dropWhile isEoln
isEoln ch = ch == '\r' || ch == '\n'
main = do
input <- evalState parseInput <$> BS.getContents
let output = parMap rdeepseq solve input
forM_ (zip [1..] output) $ \(cas, result) -> do
putStrLn $ "Case #" ++ show cas ++ ": " ++ result
solve :: (Int, Bool, [(Int, Bool)], [Bool]) -> String
solve (n, desired, interior, leaf) = show $ minStep (1, desired)
where
arrFunc = listArray (1, n `div` 2) $ map fst interior :: Array Int Int
arr = listArray (1, n) $ map snd interior ++ leaf :: Array Int Bool
minStep :: (Int, Bool) -> BoundedInteger
minStep = (cache !)
where
bnds = ((1, False), (n, True))
cache = listArray bnds $ map go $ range bnds :: Array (Int, Bool) BoundedInteger
go (p, bool) | p * 2 > n = if arr ! p == bool then 0 else maxBound
go (p, bool)
| arr ! p = minimum $ map (+1) costChange ++ costKeep
| otherwise = minimum costKeep
where
leftNode = p * 2
rightNode = p * 2 + 1
costAnd = [ minStep (leftNode, lbool) + minStep (rightNode, rbool)
| lbool <- [False, True]
, rbool <- [False, True]
, (lbool && rbool) == bool
]
costOr = [ minStep (leftNode, lbool) + minStep (rightNode, rbool)
| lbool <- [False, True]
, rbool <- [False, True]
, (lbool || rbool) == bool
]
(costKeep, costChange) = if arrFunc ! p == 1 then (costAnd, costOr) else (costOr, costAnd)
data BoundedInteger = Infinite Bool | Finite Integer deriving Eq
instance Show BoundedInteger where
" Postive Infinite "
show (Infinite False) = "Negative Infinite"
show (Finite a) = show a
instance Num BoundedInteger where
+ oo + -oo ? , I 'm crazy
_ + Infinite v = Infinite v
Finite a + Finite b = Finite (a + b)
Finite 0 * _ = 0
_ * Finite 0 = 0
Infinite v * a = Infinite (v == (a > 0))
a * Infinite v = Infinite (v == (a > 0))
Finite a * Finite b = Finite (a * b)
negate (Infinite v) = Infinite (not v)
negate (Finite v) = Finite (negate v)
signum (Infinite True) = 1
signum (Infinite False) = -1
signum (Finite a) = Finite $ signum a
abs (Infinite _) = Infinite True
abs (Finite v) = Finite (abs v)
fromInteger = Finite
instance Ord BoundedInteger where
Finite a `compare` Finite b = compare a b
Finite _ `compare` Infinite v = if v then LT else GT
Infinite v `compare` Finite _ = if v then GT else LT
Infinite a `compare` Infinite b
| a == b = EQ
| a = GT
| otherwise = LT
instance Bounded BoundedInteger where
minBound = Infinite False
maxBound = Infinite True
|
00be6314a7e957692e029ed260701159a9adb190e55ca974433a86a845607640 | BioHaskell/hPDB | PDBEventPrinter.hs | # LANGUAGE OverloadedStrings , PatternGuards , CPP #
-- | Low-level output routines: printing any 'PDBEvent'.
module Bio.PDB.EventParser.PDBEventPrinter(print, isPrintable)
where
import qualified Prelude(String)
import Prelude((++), Bool(True, False), (.), ($), Int, (+), (>), (<), show, Double)
import Text.Printf(hPrintf)
import System.IO(Handle, IO, stderr)
import qualified Data.ByteString.Char8 as BS
import Data.String(IsString)
import Control.Monad(mapM_, return)
import Bio.PDB.EventParser.PDBEvents
import qualified Bio.PDB.EventParser.ExperimentalMethods as ExperimentalMethods
#ifdef HAVE_TEXT_FORMAT
import qualified Data.ByteString.Lazy as L
import Data.Text.Lazy.Encoding(encodeUtf8)
import Data.Text.Encoding (decodeUtf8)
import qualified Data.Text.Lazy as LT
import Data.Text.Lazy.Builder as B
import qualified Data.Text.Format as F
import qualified Data.Text.Buildable as BD
#endif
-- | Prints a PDBEvent to a filehandle.
print :: Handle -> PDBEvent -> IO ()
print handle ATOM { no = num,
atomtype = atype,
restype = rtype,
chain = c,
resid = rid,
resins = rins,
altloc = al,
coords = V3 x y z,
occupancy = occ,
bfactor = bf,
segid = sid,
elt = e,
charge = ch,
hetatm = isHet
} =
#ifndef HAVE_TEXT_FORMAT
hPrintf handle
"%6s%5d %-3s%c%-3s %c%4d%c %8.3f%8.3f%8.3f%6.2f%6.2f %-4s%-2s%-2s\n"
recname
num (BS.unpack atype) al (BS.unpack rtype) c rid rins
x y z occ bf
(BS.unpack sid) (BS.unpack e) (BS.unpack ch)
where
recname :: Prelude.String
recname = if isHet then "HETATM" else "ATOM "
#else
L.hPutStr handle . encodeUtf8 $ F.format "{}{} {}{}{} {}{}{} {}{}{}{}{} {}{}{}\n" args
where
ra justifies a ByteString to the right
ra i = F.right i ' ' . decodeUtf8
-- la justifies anything else (floating point or integer number) to the left
la i = F.left i ' '
args = (recname, la 5 num, specfmt 4 3 atype,
conv al, ra 3 rtype,
conv c, la 4 rid,
conv rins,
ca x, ca y, ca z, pa occ, pa bf,
ra 4 sid, ra 2 e, ra 2 ch)
ca f = la 8 $ F.fixed 3 f -- align coordinate float
pa f = la 6 $ F.fixed 2 f -- align property float
recname = fromText $ if isHet then "HETATM" else "ATOM "
conv : : Builder
conv x = fromString [x]
specfmt mimics erratic alignment of PDB atom types : up to three characters are justified left , after prefixing by single space .
specfmt i j a = B.fromLazyText . LT.justifyRight i ' ' . LT.justifyLeft j ' ' . B.toLazyText . fromText . decodeUtf8 $ a
#endif
TODO : Note that this ANISOU code will be buggy for 4 - letter atom codes that happen ( rarely . )
print handle ANISOU { no = n,
atomtype = atype,
restype = rtype,
chain = c,
resid = rid,
resins = rins,
altloc = al,
u_1_1 = u11,
u_2_2 = u22,
u_3_3 = u33,
u_1_2 = u12,
u_1_3 = u13,
u_2_3 = u23,
segid = sid,
elt = e,
charge = ch
} = hPrintf handle
"ANISOU%5d %-3s%c%-3s %c%4d%c %7d%7d%7d%7d%7d%7d %-4s%-2s%-2s\n"
n (BS.unpack atype) al (BS.unpack rtype) c rid rins
u11 u22 u33 u12 u13 u23
(BS.unpack sid) (BS.unpack e) (BS.unpack ch)
print handle SIGUIJ { no = n,
atomtype = atype,
restype = rtype,
chain = c,
resid = rid,
resins = rins,
altloc = al,
u_1_1 = u11,
u_2_2 = u22,
u_3_3 = u33,
u_1_2 = u12,
u_1_3 = u13,
u_2_3 = u23,
segid = sid,
elt = e,
charge = ch
} = hPrintf handle
"SIGUIJ%5d %-3s%c%-3s %c%4d%c %7d%7d%7d%7d%7d%7d %-4s%-2s%-2s\n"
n (BS.unpack atype) al (BS.unpack rtype) c rid rins
u11 u22 u33 u12 u13 u23
(BS.unpack sid) (BS.unpack e) (BS.unpack ch)
print handle (HEADER { classification = c,
depDate = d,
idCode = i }) = hPrintf handle "HEADER %-40s%9s %4s\n"
(BS.unpack c)
(BS.unpack d)
(BS.unpack i)
print handle MODEL { num=n } = hPrintf handle "MODEL %4d\n" n
print handle END = hPrintf handle "END\n"
print handle ENDMDL = hPrintf handle "ENDMDL\n"
print handle CONECT { atoms=ats } = do hPrintf handle "CONECT"
mapM_ (hPrintf handle "%5d") ats
hPrintf handle "\n"
print handle TER { num = n ,
resname = r ,
chain = ch ,
resid = resi ,
insCode = i } = hPrintf handle
"TER %5d %c%-3s %c%4d\n" n i (BS.unpack r) ch resi
print handle MASTER { numRemark = nr,
numHet = nhet,
numHelix = nhel,
numSheet = nsheet,
numTurn = nturn,
numSite = nsite,
numXform = nxform,
numAts = nats,
numMaster = nmaster,
numConect = ncon,
numSeqres = nseq } = do hPrintf handle "MASTER %5d 0" nr
mapM_ (hPrintf handle "%5d")
[nhet, nhel, nsheet,
nturn, nsite, nxform, nats,
nmaster, ncon, nseq]
hPrintf handle "\n"
print handle REMARK { num = n,
text = t } = mapM_ (hPrintf handle "REMARK %4d %-80s\n" n .
BS.unpack) t
KEYWDS { continuation : : ! Int ,
aList : : ! [ String ] } |
AUTHOR { continuation : : ! Int ,
aList : : ! [ String ] } |
REMARK { num : : ! Int ,
text : : ! [ String ] }
aList :: ![String] } |
AUTHOR { continuation :: !Int,
aList :: ![String] } |
REMARK { num :: !Int,
text :: ![String] } -}
print handle KEYWDS { continuation = c,
aList = l } = printList handle "KEYWDS" "," c l
print handle AUTHOR { continuation = c,
aList = l } = printList handle "AUTHOR" "," c l
print handle EXPDTA { continuation = c,
expMethods = e } = mapM_ (hPrintf handle "EXPDTA %c%-80s\n"
(showContinuation c) .
BS.unpack .
ExperimentalMethods.showExpMethod) e
print handle TITLE { continuation = c,
title = t } = hPrintf handle "TITLE %c%-80s\n"
(showContinuation c)
(contd c $ BS.unpack t)
print handle SEQRES { serial = sn,
chain = ch,
num = n,
resList = l } = do hPrintf handle "SEQRES %3d %c %4d " sn ch n
mapM_ (hPrintf handle "%3s " .
BS.unpack) l
-- TODO: split when longer than X residues
hPrintf handle "\n"
print handle COMPND { cont = c,
tokens = ts } = printSpecList handle "COMPND" c ts
print handle SOURCE { cont = c,
tokens = ts } = printSpecList handle "SOURCE" c ts
print handle SPLIT { cont = c,
codes = cs } = printList handle "SPLIT " " " c cs
print handle ORIGXn { n = n,
o = vecs,
t = f } = printMatrix handle "ORIGX" n vecs f
print handle SCALEn { n = n,
o = vecs,
t = f } = printMatrix handle "SCALE" n vecs f
print handle CRYST1 { a = av,
b = bv,
c = cv,
alpha = aa,
beta = ba,
gamma = ga,
spcGrp = grp,
zValue = z } = hPrintf handle
"CRYST1 %8.3f %8.3f %8.3f %6.2f %6.2f %6.2f %10s %4d\n"
av bv cv
aa ba ga
(BS.unpack grp)
z
print handle TVECT { serial = sn,
vec = V3 a b c } = hPrintf handle "TVECT %4d%10.5f%10.5f%10.5f\n" sn a b c
print handle JRNL { cont = c,
content = contents,
isFirst = aJRNL } = printJRNL contents
where
header :: String
header = if aJRNL then "JRNL " else "REMARK 1 "
[contd] = if c > 0 then show c else " "
printJRNL ((k,v):cs) = hPrintf handle "%12s%4s %c %s\n"
(BS.unpack header)
(BS.unpack k)
contd
(BS.unpack v)
-- print errors:
print handle (PDBParseError c r s) = hPrintf stderr "ERROR: In line %d column %d: %s" c r
(BS.unpack s)
-- print special case for missing...
print handle e = hPrintf stderr "UNIMPLEMENTED: %s\n"
(show e)
-- | For indicating continuation of the record in previous line as a digit with line number.
showContinuation 0 = ' '
showContinuation x | [c] <- show x = c
-- | For indicating continuation of the text in previous line by indent.
contd 0 s = s
contd x s = ' ' : s
| Reports whether a given PDB record is already printable
-- [temporary method, they all should be.]
-- Including errors.
isPrintable ATOM {} = True
isPrintable HEADER {} = True
isPrintable END {} = True
isPrintable ENDMDL {} = True
isPrintable MODEL {} = True
isPrintable CONECT {} = True
isPrintable TER {} = True
isPrintable MASTER {} = True
-- TODO: below
isPrintable AUTHOR {} = True
isPrintable KEYWDS {} = True
--isPrintable JRNL {} = True
isPrintable TITLE {} = True
isPrintable REMARK {} = True
isPrintable EXPDTA {} = True
isPrintable SEQRES {} = True
isPrintable COMPND {} = True
isPrintable SOURCE {} = True
isPrintable SPLIT {} = True
isPrintable ORIGXn {} = True
isPrintable SCALEn {} = True
isPrintable CRYST1 {} = True
isPrintable ANISOU {} = True
isPrintable SIGUIJ {} = True
isPrintable TVECT {} = True
isPrintable JRNL {} = True
isPrintable (PDBParseError c r s) = True
isPrintable _ = False
| Prints a list of words as a PDB speclist record ( see ' Bio . PDB.EventParser . ParseSpecListRecord ' . )
printSpecList handle rectype c ((k, v): ls) = hPrintf handle "%6s %c%-s:%-s;\n"
(BS.unpack rectype)
(showContinuation c)
(contd c $ BS.unpack k)
(BS.unpack v)
| Prints a list of words as a PDB list record ( see ' Bio . PDB.EventParser . ParseSpecListRecord ' . )
printList :: Handle -> BS.ByteString -> BS.ByteString -> Int -> [BS.ByteString] -> IO ()
printList handle label sep c l = hPrintf handle "%6s %c %-80s\n" (BS.unpack label)
(showContinuation c)
str
where str = BS.unpack (BS.intercalate sep l)
-- | Prints a matrix given as a list of 'V3 Double's.
printMatrix :: Handle -> BS.ByteString -> Int -> [V3 Double] -> [Double] -> IO ()
printMatrix handle ident n [] [] = return ()
printMatrix handle ident n (vec:vecs) (f:fs) = do hPrintf handle "%5s%c " (BS.unpack ident) cn
mapM_ printEntry [a, b, c]
hPrintf handle " %9.5f\n" f
printMatrix handle ident (n+1) vecs fs
where [cn] = show n
printEntry :: Double -> IO ()
printEntry = hPrintf handle "%10.6f"
V3 a b c = vec
| null | https://raw.githubusercontent.com/BioHaskell/hPDB/5be747e2f2c57370b498f4c11f9f1887fdab0418/Bio/PDB/EventParser/PDBEventPrinter.hs | haskell | | Low-level output routines: printing any 'PDBEvent'.
| Prints a PDBEvent to a filehandle.
la justifies anything else (floating point or integer number) to the left
align coordinate float
align property float
TODO: split when longer than X residues
print errors:
print special case for missing...
| For indicating continuation of the record in previous line as a digit with line number.
| For indicating continuation of the text in previous line by indent.
[temporary method, they all should be.]
Including errors.
TODO: below
isPrintable JRNL {} = True
| Prints a matrix given as a list of 'V3 Double's. | # LANGUAGE OverloadedStrings , PatternGuards , CPP #
module Bio.PDB.EventParser.PDBEventPrinter(print, isPrintable)
where
import qualified Prelude(String)
import Prelude((++), Bool(True, False), (.), ($), Int, (+), (>), (<), show, Double)
import Text.Printf(hPrintf)
import System.IO(Handle, IO, stderr)
import qualified Data.ByteString.Char8 as BS
import Data.String(IsString)
import Control.Monad(mapM_, return)
import Bio.PDB.EventParser.PDBEvents
import qualified Bio.PDB.EventParser.ExperimentalMethods as ExperimentalMethods
#ifdef HAVE_TEXT_FORMAT
import qualified Data.ByteString.Lazy as L
import Data.Text.Lazy.Encoding(encodeUtf8)
import Data.Text.Encoding (decodeUtf8)
import qualified Data.Text.Lazy as LT
import Data.Text.Lazy.Builder as B
import qualified Data.Text.Format as F
import qualified Data.Text.Buildable as BD
#endif
print :: Handle -> PDBEvent -> IO ()
print handle ATOM { no = num,
atomtype = atype,
restype = rtype,
chain = c,
resid = rid,
resins = rins,
altloc = al,
coords = V3 x y z,
occupancy = occ,
bfactor = bf,
segid = sid,
elt = e,
charge = ch,
hetatm = isHet
} =
#ifndef HAVE_TEXT_FORMAT
hPrintf handle
"%6s%5d %-3s%c%-3s %c%4d%c %8.3f%8.3f%8.3f%6.2f%6.2f %-4s%-2s%-2s\n"
recname
num (BS.unpack atype) al (BS.unpack rtype) c rid rins
x y z occ bf
(BS.unpack sid) (BS.unpack e) (BS.unpack ch)
where
recname :: Prelude.String
recname = if isHet then "HETATM" else "ATOM "
#else
L.hPutStr handle . encodeUtf8 $ F.format "{}{} {}{}{} {}{}{} {}{}{}{}{} {}{}{}\n" args
where
ra justifies a ByteString to the right
ra i = F.right i ' ' . decodeUtf8
la i = F.left i ' '
args = (recname, la 5 num, specfmt 4 3 atype,
conv al, ra 3 rtype,
conv c, la 4 rid,
conv rins,
ca x, ca y, ca z, pa occ, pa bf,
ra 4 sid, ra 2 e, ra 2 ch)
recname = fromText $ if isHet then "HETATM" else "ATOM "
conv : : Builder
conv x = fromString [x]
specfmt mimics erratic alignment of PDB atom types : up to three characters are justified left , after prefixing by single space .
specfmt i j a = B.fromLazyText . LT.justifyRight i ' ' . LT.justifyLeft j ' ' . B.toLazyText . fromText . decodeUtf8 $ a
#endif
TODO : Note that this ANISOU code will be buggy for 4 - letter atom codes that happen ( rarely . )
print handle ANISOU { no = n,
atomtype = atype,
restype = rtype,
chain = c,
resid = rid,
resins = rins,
altloc = al,
u_1_1 = u11,
u_2_2 = u22,
u_3_3 = u33,
u_1_2 = u12,
u_1_3 = u13,
u_2_3 = u23,
segid = sid,
elt = e,
charge = ch
} = hPrintf handle
"ANISOU%5d %-3s%c%-3s %c%4d%c %7d%7d%7d%7d%7d%7d %-4s%-2s%-2s\n"
n (BS.unpack atype) al (BS.unpack rtype) c rid rins
u11 u22 u33 u12 u13 u23
(BS.unpack sid) (BS.unpack e) (BS.unpack ch)
print handle SIGUIJ { no = n,
atomtype = atype,
restype = rtype,
chain = c,
resid = rid,
resins = rins,
altloc = al,
u_1_1 = u11,
u_2_2 = u22,
u_3_3 = u33,
u_1_2 = u12,
u_1_3 = u13,
u_2_3 = u23,
segid = sid,
elt = e,
charge = ch
} = hPrintf handle
"SIGUIJ%5d %-3s%c%-3s %c%4d%c %7d%7d%7d%7d%7d%7d %-4s%-2s%-2s\n"
n (BS.unpack atype) al (BS.unpack rtype) c rid rins
u11 u22 u33 u12 u13 u23
(BS.unpack sid) (BS.unpack e) (BS.unpack ch)
print handle (HEADER { classification = c,
depDate = d,
idCode = i }) = hPrintf handle "HEADER %-40s%9s %4s\n"
(BS.unpack c)
(BS.unpack d)
(BS.unpack i)
print handle MODEL { num=n } = hPrintf handle "MODEL %4d\n" n
print handle END = hPrintf handle "END\n"
print handle ENDMDL = hPrintf handle "ENDMDL\n"
print handle CONECT { atoms=ats } = do hPrintf handle "CONECT"
mapM_ (hPrintf handle "%5d") ats
hPrintf handle "\n"
print handle TER { num = n ,
resname = r ,
chain = ch ,
resid = resi ,
insCode = i } = hPrintf handle
"TER %5d %c%-3s %c%4d\n" n i (BS.unpack r) ch resi
print handle MASTER { numRemark = nr,
numHet = nhet,
numHelix = nhel,
numSheet = nsheet,
numTurn = nturn,
numSite = nsite,
numXform = nxform,
numAts = nats,
numMaster = nmaster,
numConect = ncon,
numSeqres = nseq } = do hPrintf handle "MASTER %5d 0" nr
mapM_ (hPrintf handle "%5d")
[nhet, nhel, nsheet,
nturn, nsite, nxform, nats,
nmaster, ncon, nseq]
hPrintf handle "\n"
print handle REMARK { num = n,
text = t } = mapM_ (hPrintf handle "REMARK %4d %-80s\n" n .
BS.unpack) t
KEYWDS { continuation : : ! Int ,
aList : : ! [ String ] } |
AUTHOR { continuation : : ! Int ,
aList : : ! [ String ] } |
REMARK { num : : ! Int ,
text : : ! [ String ] }
aList :: ![String] } |
AUTHOR { continuation :: !Int,
aList :: ![String] } |
REMARK { num :: !Int,
text :: ![String] } -}
print handle KEYWDS { continuation = c,
aList = l } = printList handle "KEYWDS" "," c l
print handle AUTHOR { continuation = c,
aList = l } = printList handle "AUTHOR" "," c l
print handle EXPDTA { continuation = c,
expMethods = e } = mapM_ (hPrintf handle "EXPDTA %c%-80s\n"
(showContinuation c) .
BS.unpack .
ExperimentalMethods.showExpMethod) e
print handle TITLE { continuation = c,
title = t } = hPrintf handle "TITLE %c%-80s\n"
(showContinuation c)
(contd c $ BS.unpack t)
print handle SEQRES { serial = sn,
chain = ch,
num = n,
resList = l } = do hPrintf handle "SEQRES %3d %c %4d " sn ch n
mapM_ (hPrintf handle "%3s " .
BS.unpack) l
hPrintf handle "\n"
print handle COMPND { cont = c,
tokens = ts } = printSpecList handle "COMPND" c ts
print handle SOURCE { cont = c,
tokens = ts } = printSpecList handle "SOURCE" c ts
print handle SPLIT { cont = c,
codes = cs } = printList handle "SPLIT " " " c cs
print handle ORIGXn { n = n,
o = vecs,
t = f } = printMatrix handle "ORIGX" n vecs f
print handle SCALEn { n = n,
o = vecs,
t = f } = printMatrix handle "SCALE" n vecs f
print handle CRYST1 { a = av,
b = bv,
c = cv,
alpha = aa,
beta = ba,
gamma = ga,
spcGrp = grp,
zValue = z } = hPrintf handle
"CRYST1 %8.3f %8.3f %8.3f %6.2f %6.2f %6.2f %10s %4d\n"
av bv cv
aa ba ga
(BS.unpack grp)
z
print handle TVECT { serial = sn,
vec = V3 a b c } = hPrintf handle "TVECT %4d%10.5f%10.5f%10.5f\n" sn a b c
print handle JRNL { cont = c,
content = contents,
isFirst = aJRNL } = printJRNL contents
where
header :: String
header = if aJRNL then "JRNL " else "REMARK 1 "
[contd] = if c > 0 then show c else " "
printJRNL ((k,v):cs) = hPrintf handle "%12s%4s %c %s\n"
(BS.unpack header)
(BS.unpack k)
contd
(BS.unpack v)
print handle (PDBParseError c r s) = hPrintf stderr "ERROR: In line %d column %d: %s" c r
(BS.unpack s)
print handle e = hPrintf stderr "UNIMPLEMENTED: %s\n"
(show e)
showContinuation 0 = ' '
showContinuation x | [c] <- show x = c
contd 0 s = s
contd x s = ' ' : s
| Reports whether a given PDB record is already printable
isPrintable ATOM {} = True
isPrintable HEADER {} = True
isPrintable END {} = True
isPrintable ENDMDL {} = True
isPrintable MODEL {} = True
isPrintable CONECT {} = True
isPrintable TER {} = True
isPrintable MASTER {} = True
isPrintable AUTHOR {} = True
isPrintable KEYWDS {} = True
isPrintable TITLE {} = True
isPrintable REMARK {} = True
isPrintable EXPDTA {} = True
isPrintable SEQRES {} = True
isPrintable COMPND {} = True
isPrintable SOURCE {} = True
isPrintable SPLIT {} = True
isPrintable ORIGXn {} = True
isPrintable SCALEn {} = True
isPrintable CRYST1 {} = True
isPrintable ANISOU {} = True
isPrintable SIGUIJ {} = True
isPrintable TVECT {} = True
isPrintable JRNL {} = True
isPrintable (PDBParseError c r s) = True
isPrintable _ = False
| Prints a list of words as a PDB speclist record ( see ' Bio . PDB.EventParser . ParseSpecListRecord ' . )
printSpecList handle rectype c ((k, v): ls) = hPrintf handle "%6s %c%-s:%-s;\n"
(BS.unpack rectype)
(showContinuation c)
(contd c $ BS.unpack k)
(BS.unpack v)
| Prints a list of words as a PDB list record ( see ' Bio . PDB.EventParser . ParseSpecListRecord ' . )
printList :: Handle -> BS.ByteString -> BS.ByteString -> Int -> [BS.ByteString] -> IO ()
printList handle label sep c l = hPrintf handle "%6s %c %-80s\n" (BS.unpack label)
(showContinuation c)
str
where str = BS.unpack (BS.intercalate sep l)
printMatrix :: Handle -> BS.ByteString -> Int -> [V3 Double] -> [Double] -> IO ()
printMatrix handle ident n [] [] = return ()
printMatrix handle ident n (vec:vecs) (f:fs) = do hPrintf handle "%5s%c " (BS.unpack ident) cn
mapM_ printEntry [a, b, c]
hPrintf handle " %9.5f\n" f
printMatrix handle ident (n+1) vecs fs
where [cn] = show n
printEntry :: Double -> IO ()
printEntry = hPrintf handle "%10.6f"
V3 a b c = vec
|
bffaa283aa7ef1b27f472318d2adb8a9b2b9d07218cfdf99a56f8c721e672036 | chetmurthy/ensemble | appl_multi.ml | (**************************************************************)
APPL_MULTI.ML : Multiplexion of interfaces
Author : Ohad Rodeh 11/97
(**************************************************************)
open Trans
open Hsys
open Util
open View
open Appl_intf
open New
open Arge
(**************************************************************)
let name = Trace.file "APPL_MULTI"
let failwith s = Trace.make_failwith name s
(**************************************************************)
type header = Appl_intf.appl_multi_header
let f iv =
let log = Trace.log name
and logs = Trace.log (name^"S") in
if Array.length iv = 0 then failwith "Zero interfaces";
let heartbeats = Array.map (fun x -> Time.to_float x.heartbeat_rate) iv in
let heartbeat_rate = Array.fold_left min heartbeats.(0) heartbeats in
let heartbeat_rate = Time.of_float heartbeat_rate in
let width = Array.length iv in
let xfer_vct = Array.create width false in
let filter i a =
let rec loop = function
| hd :: tl -> (match hd with
| Cast msg ->
Cast (i,msg) :: loop tl
| Send (dst,msg) ->
Send (dst, (i,msg)) :: loop tl
| Send1 (dst,msg) ->
Send1 (dst, (i,msg)) :: loop tl
| Control a -> (match a with
| Leave
| Prompt
| Suspect _
| Rekey _
| Protocol _
| Migrate _
| Timeout _
| Dump -> (Control a) :: loop tl
| XferDone ->
logs (fun () -> "XferDone");
xfer_vct.(i) <- true;
if array_for_all ident xfer_vct then (
(Control XferDone) :: loop tl
) else
loop tl
| Block _ -> failwith "Action Not supported under appl_multi"
| No_op -> loop tl
)
)
| [] -> []
in
let a = Array.to_list a in
let a = loop a in
Array.of_list a
in
let install (ls,vs) =
(* View change.
*)
log (fun () -> sprintf "New View, nmembers=%d" ls.nmembers);
Array.fill xfer_vct 0 width false;
let ha = Array.mapi (fun i apli ->
let a,h = apli.install (ls,vs) in
let a = filter i a in
(h,a)) iv in
let hndl_vct = Array.map (fun (h,a) -> h) ha in
let a = Array.map (fun (h,a) -> a) ha in
let actions = (array_flatten a) in
let receive origin bk cs =
let handlers = Array.map (fun handlers -> handlers.receive origin bk cs) hndl_vct in
fun (i,msg) ->
if i < 0 || i >= Array.length handlers then
failwith "receive:inconsistent number of multiplexed interfaces" ;
filter i (handlers.(i) msg)
and block () =
let a = Array.mapi (fun i h -> (filter i (h.block ()))) hndl_vct in
array_flatten a
and heartbeat t =
let a = Array.mapi (fun i h -> (filter i (h.heartbeat t))) hndl_vct in
array_flatten a
and disable () =
Array.iter (fun h -> h.disable ()) hndl_vct
in
let handlers = {
flow_block = (fun _ -> ());
receive = receive ;
block = block ;
heartbeat = heartbeat ;
disable = disable
}
in
actions,handlers
and exit () =
Array.iter (fun apli -> apli.exit ()) iv;
log (fun () -> "Got exit")
in {
heartbeat_rate = heartbeat_rate;
install = install ;
exit = exit
}
(**************************************************************)
| null | https://raw.githubusercontent.com/chetmurthy/ensemble/8266a89e68be24a4aaa5d594662e211eeaa6dc89/ensemble/server/appl/appl_multi.ml | ocaml | ************************************************************
************************************************************
************************************************************
************************************************************
View change.
************************************************************ | APPL_MULTI.ML : Multiplexion of interfaces
Author : Ohad Rodeh 11/97
open Trans
open Hsys
open Util
open View
open Appl_intf
open New
open Arge
let name = Trace.file "APPL_MULTI"
let failwith s = Trace.make_failwith name s
type header = Appl_intf.appl_multi_header
let f iv =
let log = Trace.log name
and logs = Trace.log (name^"S") in
if Array.length iv = 0 then failwith "Zero interfaces";
let heartbeats = Array.map (fun x -> Time.to_float x.heartbeat_rate) iv in
let heartbeat_rate = Array.fold_left min heartbeats.(0) heartbeats in
let heartbeat_rate = Time.of_float heartbeat_rate in
let width = Array.length iv in
let xfer_vct = Array.create width false in
let filter i a =
let rec loop = function
| hd :: tl -> (match hd with
| Cast msg ->
Cast (i,msg) :: loop tl
| Send (dst,msg) ->
Send (dst, (i,msg)) :: loop tl
| Send1 (dst,msg) ->
Send1 (dst, (i,msg)) :: loop tl
| Control a -> (match a with
| Leave
| Prompt
| Suspect _
| Rekey _
| Protocol _
| Migrate _
| Timeout _
| Dump -> (Control a) :: loop tl
| XferDone ->
logs (fun () -> "XferDone");
xfer_vct.(i) <- true;
if array_for_all ident xfer_vct then (
(Control XferDone) :: loop tl
) else
loop tl
| Block _ -> failwith "Action Not supported under appl_multi"
| No_op -> loop tl
)
)
| [] -> []
in
let a = Array.to_list a in
let a = loop a in
Array.of_list a
in
let install (ls,vs) =
log (fun () -> sprintf "New View, nmembers=%d" ls.nmembers);
Array.fill xfer_vct 0 width false;
let ha = Array.mapi (fun i apli ->
let a,h = apli.install (ls,vs) in
let a = filter i a in
(h,a)) iv in
let hndl_vct = Array.map (fun (h,a) -> h) ha in
let a = Array.map (fun (h,a) -> a) ha in
let actions = (array_flatten a) in
let receive origin bk cs =
let handlers = Array.map (fun handlers -> handlers.receive origin bk cs) hndl_vct in
fun (i,msg) ->
if i < 0 || i >= Array.length handlers then
failwith "receive:inconsistent number of multiplexed interfaces" ;
filter i (handlers.(i) msg)
and block () =
let a = Array.mapi (fun i h -> (filter i (h.block ()))) hndl_vct in
array_flatten a
and heartbeat t =
let a = Array.mapi (fun i h -> (filter i (h.heartbeat t))) hndl_vct in
array_flatten a
and disable () =
Array.iter (fun h -> h.disable ()) hndl_vct
in
let handlers = {
flow_block = (fun _ -> ());
receive = receive ;
block = block ;
heartbeat = heartbeat ;
disable = disable
}
in
actions,handlers
and exit () =
Array.iter (fun apli -> apli.exit ()) iv;
log (fun () -> "Got exit")
in {
heartbeat_rate = heartbeat_rate;
install = install ;
exit = exit
}
|
9df3f414903c35779292a6e1954ea2585992a1dd98c9b5992396538aa549405f | nklein/clifford | add.lisp | (in-package #:clifford)
(defun %nullary-addition (info)
`(defmethod nullary-+ ((x ,(name info)))
,(scalar-zero info)))
(defun %unary-addition (info)
`(defmethod unary-+ ((x ,(name info)))
x))
(defun %binary-addition (info)
`(defmethod binary-+ ((x ,(name info)) (y ,(name info)))
(,(constructor info)
,@(iter (for a in-accessors-of info)
(collecting `(+ (,a x) (,a y)))))))
(defun %scalar-addition (info)
`((defmethod binary-+ ((x ,(name info)) (y ,(scalar-type info)))
(,(constructor info)
,(first (keywords info)) (+ (,(first (accessors info)) x) y)
,@(iter (for a in-non-scalar-accessors-of info)
(collecting `(,a x)))))
(defmethod binary-+ ((x ,(scalar-type info)) (y ,(name info)))
(,(constructor info)
,(first (keywords info)) (+ x (,(first (accessors info)) y))
,@(iter (for a in-non-scalar-accessors-of info)
(collecting `(,a y)))))))
(defun %1+-addition (info)
`(defmethod 1+ ((x ,(name info)))
(,(constructor info)
,(first (keywords info)) (1+ (,(first (accessors info)) x))
,@(iter (for a in-non-scalar-accessors-of info)
(collecting `(,a x))))))
(defun create-addition-functions (info)
`(,(%nullary-addition info)
,(%unary-addition info)
,(%binary-addition info)
,@(%scalar-addition info)
,(%1+-addition info)))
| null | https://raw.githubusercontent.com/nklein/clifford/2ba9b9a8f68eb88c2821341dfac2f2a61c2f84ce/src/add.lisp | lisp | (in-package #:clifford)
(defun %nullary-addition (info)
`(defmethod nullary-+ ((x ,(name info)))
,(scalar-zero info)))
(defun %unary-addition (info)
`(defmethod unary-+ ((x ,(name info)))
x))
(defun %binary-addition (info)
`(defmethod binary-+ ((x ,(name info)) (y ,(name info)))
(,(constructor info)
,@(iter (for a in-accessors-of info)
(collecting `(+ (,a x) (,a y)))))))
(defun %scalar-addition (info)
`((defmethod binary-+ ((x ,(name info)) (y ,(scalar-type info)))
(,(constructor info)
,(first (keywords info)) (+ (,(first (accessors info)) x) y)
,@(iter (for a in-non-scalar-accessors-of info)
(collecting `(,a x)))))
(defmethod binary-+ ((x ,(scalar-type info)) (y ,(name info)))
(,(constructor info)
,(first (keywords info)) (+ x (,(first (accessors info)) y))
,@(iter (for a in-non-scalar-accessors-of info)
(collecting `(,a y)))))))
(defun %1+-addition (info)
`(defmethod 1+ ((x ,(name info)))
(,(constructor info)
,(first (keywords info)) (1+ (,(first (accessors info)) x))
,@(iter (for a in-non-scalar-accessors-of info)
(collecting `(,a x))))))
(defun create-addition-functions (info)
`(,(%nullary-addition info)
,(%unary-addition info)
,(%binary-addition info)
,@(%scalar-addition info)
,(%1+-addition info)))
|
|
1f5464c338bcf50b0ebba0d937b6b0b55ba04565aba166bf080c0b66ed9a1519 | logseq/logseq | db.cljs | (ns frontend.db
"Main entry ns for db related fns"
(:require [clojure.core.async :as async]
[datascript.core :as d]
[logseq.db.schema :as db-schema]
[frontend.db.conn :as conn]
[logseq.db.default :as default-db]
[frontend.db.model]
[frontend.db.query-custom]
[frontend.db.query-react]
[frontend.db.react :as react]
[frontend.db.utils]
[frontend.db.persist :as db-persist]
[frontend.db.migrate :as db-migrate]
[frontend.namespaces :refer [import-vars]]
[frontend.state :as state]
[frontend.util :as util]
[promesa.core :as p]
[electron.ipc :as ipc]))
(import-vars
[frontend.db.conn
;; TODO: remove later
conns
get-repo-path
get-repo-name
get-short-repo-name
datascript-db
get-db
remove-conn!]
[frontend.db.utils
db->json db->edn-str db->string get-max-tx-id get-tx-id
group-by-page seq-flatten
string->db
entity pull pull-many transact! get-key-value]
[frontend.db.model
blocks-count blocks-count-cache clean-export! delete-blocks get-pre-block
delete-files delete-pages-by-files
filter-only-public-pages-and-blocks get-all-block-contents get-all-tagged-pages
get-all-templates get-block-and-children get-block-by-uuid get-block-children sort-by-left
get-block-parent get-block-parents parents-collapsed? get-block-referenced-blocks get-all-referenced-blocks-uuid
get-block-children-ids get-block-immediate-children get-block-page
get-custom-css get-date-scheduled-or-deadlines
get-file-last-modified-at get-file get-file-page get-file-page-id file-exists?
get-files get-files-blocks get-files-full get-journals-length get-pages-with-file
get-latest-journals get-page get-page-alias get-page-alias-names get-paginated-blocks
get-page-blocks-count get-page-blocks-no-cache get-page-file get-page-format get-page-properties
get-page-referenced-blocks get-page-referenced-blocks-full get-page-referenced-pages get-page-unlinked-references
get-all-pages get-pages get-pages-relation get-pages-that-mentioned-page get-public-pages get-tag-pages
journal-page? page-alias-set pull-block
set-file-last-modified-at! page-empty? page-exists? page-empty-or-dummy? get-alias-source-page
set-file-content! has-children? get-namespace-pages get-all-namespace-relation get-pages-by-name-partition
get-original-name]
[frontend.db.react
get-current-page set-key-value
remove-key! remove-q! remove-query-component! add-q! add-query-component! clear-query-state!
clear-query-state-without-refs-and-embeds! kv q
query-state query-components remove-custom-query! set-new-result! sub-key-value refresh!]
[frontend.db.query-custom
custom-query]
[frontend.db.query-react
react-query custom-query-result-transform]
[logseq.db.default built-in-pages-names built-in-pages])
(defn get-schema-version [db]
(d/q
'[:find ?v .
:where
[_ :schema/version ?v]]
db))
(defn old-schema?
[db]
(let [v (get-schema-version db)]
(if (integer? v)
(> db-schema/version v)
;; backward compatibility
true)))
;; persisting DBs between page reloads
(defn persist! [repo]
(let [key (datascript-db repo)
db (get-db repo)]
(when db
(let [db-str (if db (db->string db) "")]
(p/let [_ (db-persist/save-graph! key db-str)])))))
(defonce persistent-jobs (atom {}))
(defn clear-repo-persistent-job!
[repo]
(when-let [old-job (get @persistent-jobs repo)]
(js/clearTimeout old-job)))
(defn persist-if-idle!
[repo]
(clear-repo-persistent-job! repo)
(let [job (js/setTimeout
(fn []
(if (and (state/input-idle? repo)
(state/db-idle? repo)
;; It's ok to not persist here since new changes
;; will be notified when restarting the app.
(not (state/whiteboard-route?)))
(persist! repo)
;; (state/set-db-persisted! repo true)
(persist-if-idle! repo)))
3000)]
(swap! persistent-jobs assoc repo job)))
;; only save when user's idle
(defonce *db-listener (atom nil))
(defn- repo-listen-to-tx!
[repo conn]
(d/listen! conn :persistence
(fn [tx-report]
(when (not (:new-graph? (:tx-meta tx-report))) ; skip initial txs
(if (util/electron?)
(when-not (:dbsync? (:tx-meta tx-report))
;; sync with other windows if needed
(p/let [graph-has-other-window? (ipc/ipc "graphHasOtherWindow" repo)]
(when graph-has-other-window?
(ipc/ipc "dbsync" repo {:data (db->string (:tx-data tx-report))}))))
(do
(state/set-last-transact-time! repo (util/time-ms))
(persist-if-idle! repo)))
(when-let [db-listener @*db-listener]
(db-listener repo tx-report))))))
(defn listen-and-persist!
[repo]
(when-let [conn (get-db repo false)]
(d/unlisten! conn :persistence)
(repo-listen-to-tx! repo conn)))
(defn start-db-conn!
([repo]
(start-db-conn! repo {}))
([repo option]
(conn/start! repo
(assoc option
:listen-handler listen-and-persist!))))
(defn restore-graph-from-text!
"Swap db string into the current db status
stored: the text to restore from"
[repo stored]
(p/let [db-name (datascript-db repo)
db-conn (d/create-conn db-schema/schema)
_ (swap! conns assoc db-name db-conn)
_ (when stored
(let [stored-db (try (string->db stored)
(catch :default _e
(js/console.warn "Invalid graph cache")
(d/empty-db db-schema/schema)))
attached-db (d/db-with stored-db
TODO bug overriding uuids ?
db (if (old-schema? attached-db)
(db-migrate/migrate attached-db)
attached-db)]
(conn/reset-conn! db-conn db)))]
(d/transact! db-conn [{:schema/version db-schema/version}])))
(defn restore-graph!
"Restore db from serialized db cache"
[repo]
(p/let [db-name (datascript-db repo)
stored (db-persist/get-serialized-graph db-name)]
(restore-graph-from-text! repo stored)))
(defn restore!
[repo]
(p/let [_ (restore-graph! repo)]
(listen-and-persist! repo)))
(defn run-batch-txs!
[]
(let [chan (state/get-db-batch-txs-chan)]
(async/go-loop []
(let [f (async/<! chan)]
(f))
(recur))
chan))
(defn new-block-id
[]
(d/squuid))
| null | https://raw.githubusercontent.com/logseq/logseq/6a5b0c819975a36aa91b84f73dec32d2ed483503/src/main/frontend/db.cljs | clojure | TODO: remove later
backward compatibility
persisting DBs between page reloads
It's ok to not persist here since new changes
will be notified when restarting the app.
(state/set-db-persisted! repo true)
only save when user's idle
skip initial txs
sync with other windows if needed | (ns frontend.db
"Main entry ns for db related fns"
(:require [clojure.core.async :as async]
[datascript.core :as d]
[logseq.db.schema :as db-schema]
[frontend.db.conn :as conn]
[logseq.db.default :as default-db]
[frontend.db.model]
[frontend.db.query-custom]
[frontend.db.query-react]
[frontend.db.react :as react]
[frontend.db.utils]
[frontend.db.persist :as db-persist]
[frontend.db.migrate :as db-migrate]
[frontend.namespaces :refer [import-vars]]
[frontend.state :as state]
[frontend.util :as util]
[promesa.core :as p]
[electron.ipc :as ipc]))
(import-vars
[frontend.db.conn
conns
get-repo-path
get-repo-name
get-short-repo-name
datascript-db
get-db
remove-conn!]
[frontend.db.utils
db->json db->edn-str db->string get-max-tx-id get-tx-id
group-by-page seq-flatten
string->db
entity pull pull-many transact! get-key-value]
[frontend.db.model
blocks-count blocks-count-cache clean-export! delete-blocks get-pre-block
delete-files delete-pages-by-files
filter-only-public-pages-and-blocks get-all-block-contents get-all-tagged-pages
get-all-templates get-block-and-children get-block-by-uuid get-block-children sort-by-left
get-block-parent get-block-parents parents-collapsed? get-block-referenced-blocks get-all-referenced-blocks-uuid
get-block-children-ids get-block-immediate-children get-block-page
get-custom-css get-date-scheduled-or-deadlines
get-file-last-modified-at get-file get-file-page get-file-page-id file-exists?
get-files get-files-blocks get-files-full get-journals-length get-pages-with-file
get-latest-journals get-page get-page-alias get-page-alias-names get-paginated-blocks
get-page-blocks-count get-page-blocks-no-cache get-page-file get-page-format get-page-properties
get-page-referenced-blocks get-page-referenced-blocks-full get-page-referenced-pages get-page-unlinked-references
get-all-pages get-pages get-pages-relation get-pages-that-mentioned-page get-public-pages get-tag-pages
journal-page? page-alias-set pull-block
set-file-last-modified-at! page-empty? page-exists? page-empty-or-dummy? get-alias-source-page
set-file-content! has-children? get-namespace-pages get-all-namespace-relation get-pages-by-name-partition
get-original-name]
[frontend.db.react
get-current-page set-key-value
remove-key! remove-q! remove-query-component! add-q! add-query-component! clear-query-state!
clear-query-state-without-refs-and-embeds! kv q
query-state query-components remove-custom-query! set-new-result! sub-key-value refresh!]
[frontend.db.query-custom
custom-query]
[frontend.db.query-react
react-query custom-query-result-transform]
[logseq.db.default built-in-pages-names built-in-pages])
(defn get-schema-version [db]
(d/q
'[:find ?v .
:where
[_ :schema/version ?v]]
db))
(defn old-schema?
[db]
(let [v (get-schema-version db)]
(if (integer? v)
(> db-schema/version v)
true)))
(defn persist! [repo]
(let [key (datascript-db repo)
db (get-db repo)]
(when db
(let [db-str (if db (db->string db) "")]
(p/let [_ (db-persist/save-graph! key db-str)])))))
(defonce persistent-jobs (atom {}))
(defn clear-repo-persistent-job!
[repo]
(when-let [old-job (get @persistent-jobs repo)]
(js/clearTimeout old-job)))
(defn persist-if-idle!
[repo]
(clear-repo-persistent-job! repo)
(let [job (js/setTimeout
(fn []
(if (and (state/input-idle? repo)
(state/db-idle? repo)
(not (state/whiteboard-route?)))
(persist! repo)
(persist-if-idle! repo)))
3000)]
(swap! persistent-jobs assoc repo job)))
(defonce *db-listener (atom nil))
(defn- repo-listen-to-tx!
[repo conn]
(d/listen! conn :persistence
(fn [tx-report]
(if (util/electron?)
(when-not (:dbsync? (:tx-meta tx-report))
(p/let [graph-has-other-window? (ipc/ipc "graphHasOtherWindow" repo)]
(when graph-has-other-window?
(ipc/ipc "dbsync" repo {:data (db->string (:tx-data tx-report))}))))
(do
(state/set-last-transact-time! repo (util/time-ms))
(persist-if-idle! repo)))
(when-let [db-listener @*db-listener]
(db-listener repo tx-report))))))
(defn listen-and-persist!
[repo]
(when-let [conn (get-db repo false)]
(d/unlisten! conn :persistence)
(repo-listen-to-tx! repo conn)))
(defn start-db-conn!
([repo]
(start-db-conn! repo {}))
([repo option]
(conn/start! repo
(assoc option
:listen-handler listen-and-persist!))))
(defn restore-graph-from-text!
"Swap db string into the current db status
stored: the text to restore from"
[repo stored]
(p/let [db-name (datascript-db repo)
db-conn (d/create-conn db-schema/schema)
_ (swap! conns assoc db-name db-conn)
_ (when stored
(let [stored-db (try (string->db stored)
(catch :default _e
(js/console.warn "Invalid graph cache")
(d/empty-db db-schema/schema)))
attached-db (d/db-with stored-db
TODO bug overriding uuids ?
db (if (old-schema? attached-db)
(db-migrate/migrate attached-db)
attached-db)]
(conn/reset-conn! db-conn db)))]
(d/transact! db-conn [{:schema/version db-schema/version}])))
(defn restore-graph!
"Restore db from serialized db cache"
[repo]
(p/let [db-name (datascript-db repo)
stored (db-persist/get-serialized-graph db-name)]
(restore-graph-from-text! repo stored)))
(defn restore!
[repo]
(p/let [_ (restore-graph! repo)]
(listen-and-persist! repo)))
(defn run-batch-txs!
[]
(let [chan (state/get-db-batch-txs-chan)]
(async/go-loop []
(let [f (async/<! chan)]
(f))
(recur))
chan))
(defn new-block-id
[]
(d/squuid))
|
a380a51357bfdca83b5797eab08d40a76dc24dc0ea802c0da065a6e979be34b8 | WormBase/wormbase_rest | associations.clj | (ns rest-api.classes.expression-cluster.widgets.associations
(:require
[rest-api.formatters.object :as obj :refer [pack-obj]]
[rest-api.classes.generic-fields :as generic]))
(defn life-stages [ec]
{:data (when-let [lss (:expression-cluster/life-stage ec)]
(for [ls lss]
{:life_stages (pack-obj ls)
:definition (->> (:life-stage/definition ls)
(:life-stage.definition/text))}))
:description "Life stages associated with this expression cluster"})
(defn go-terms [ec] ; non in the database
{:data (when-let [go-terms (:expression-cluster/go-term ec)]
(map pack-obj go-terms))
:description "GO terms associated with this expression cluster"})
(defn anatomy-terms [ec]
{:data (when-let [aths (:expression-cluster/anatomy-term ec)]
(for [ath aths
:let [at (:expression-cluster.anatomy-term/anatomy-term ath)]]
{:anatomy_term (pack-obj at)
:definition (:anatomy-term.definition/text
(:anatomy-term/definition at))}))
:description "anatomy terms associated with this expression cluster"})
(defn processes [ec]
{:data (when-let [phs (:wbprocess.expression-cluster/_expression-cluster ec)]
(for [ph phs
:let [wbprocess (:wbprocess/_expression-cluster ph)]]
{:processes (pack-obj wbprocess)
:definition (:wbprocess.summary/text
(:wbprocess/summary wbprocess))}))
:description "Processes associated with this expression cluster"})
(def widget
{:name generic/name-field
:life_stages life-stages
:go_terms go-terms
:anatomy_terms anatomy-terms
:processes processes})
| null | https://raw.githubusercontent.com/WormBase/wormbase_rest/e51026f35b87d96260b62ddb5458a81ee911bf3a/src/rest_api/classes/expression_cluster/widgets/associations.clj | clojure | non in the database | (ns rest-api.classes.expression-cluster.widgets.associations
(:require
[rest-api.formatters.object :as obj :refer [pack-obj]]
[rest-api.classes.generic-fields :as generic]))
(defn life-stages [ec]
{:data (when-let [lss (:expression-cluster/life-stage ec)]
(for [ls lss]
{:life_stages (pack-obj ls)
:definition (->> (:life-stage/definition ls)
(:life-stage.definition/text))}))
:description "Life stages associated with this expression cluster"})
{:data (when-let [go-terms (:expression-cluster/go-term ec)]
(map pack-obj go-terms))
:description "GO terms associated with this expression cluster"})
(defn anatomy-terms [ec]
{:data (when-let [aths (:expression-cluster/anatomy-term ec)]
(for [ath aths
:let [at (:expression-cluster.anatomy-term/anatomy-term ath)]]
{:anatomy_term (pack-obj at)
:definition (:anatomy-term.definition/text
(:anatomy-term/definition at))}))
:description "anatomy terms associated with this expression cluster"})
(defn processes [ec]
{:data (when-let [phs (:wbprocess.expression-cluster/_expression-cluster ec)]
(for [ph phs
:let [wbprocess (:wbprocess/_expression-cluster ph)]]
{:processes (pack-obj wbprocess)
:definition (:wbprocess.summary/text
(:wbprocess/summary wbprocess))}))
:description "Processes associated with this expression cluster"})
(def widget
{:name generic/name-field
:life_stages life-stages
:go_terms go-terms
:anatomy_terms anatomy-terms
:processes processes})
|
bc815c5591aaa36ef93377877d48bd7f5baddd17a56b599a44ec20b358f6c2be | LightTable/LightTable | tabs.cljs | (ns lt.objs.tabs
"Manage tabsets and tabs"
(:require [lt.object :refer [object* behavior*] :as object]
[lt.objs.editor :as editor]
[lt.objs.canvas :as canvas]
[lt.objs.command :as cmd]
[lt.objs.animations :as anim]
[lt.objs.context :as ctx]
[lt.objs.menu :as menu]
[lt.util.load :as load]
[lt.util.dom :refer [$ append] :as dom]
[lt.util.style :refer [->px]]
[lt.util.js :refer [now]]
[singultus.core :as crate]
[singultus.binding :refer [bound map-bound subatom]])
(:require-macros [lt.macros :refer [behavior defui]]))
(load/js "core/lighttable/ui/dragdrop.js" :sync)
(def multi-def (object* ::multi-editor2
:tags #{:tabs}
:tabsets []
:left 0
:right 0
:bottom 0
:init (fn [this]
(let [tabsets (crate/html [:div.tabsets {:style {:bottom (bound (subatom this :tabset-bottom) ->px)}}])]
(object/merge! this {:tabsets-elem tabsets})
(ctx/in! :tabs this)
[:div#multi {:style {:left (bound (subatom this :left) ->px)
:right (bound (subatom this :right) ->px)
:bottom (bound (subatom this :bottom) ->px)}}
tabsets]
))))
(def multi (object/create multi-def))
(defn ensure-visible [idx tabset]
(when-let [cur (aget (dom/$$ ".list li" (object/->content tabset)) idx)]
(let [left (.-offsetLeft cur)
width (.-clientWidth cur)
right (+ left width)
gp (dom/parent (dom/parent cur))
pwidth (.-clientWidth gp)
pleft (.-scrollLeft gp)
pright (+ pleft pwidth)
inside (and (>= left pleft)
(<= right pright))]
(when-not inside
(if (> pleft left)
(set! (.-scrollLeft gp) (- left 50))
(set! (.-scrollLeft gp) (+ (- right pwidth) 50)))
))))
(defn ->index [obj]
(when (and obj @obj (::tabset @obj))
(first (first (filter #(= obj (second %)) (map-indexed vector (:objs @(::tabset @obj))))))))
(defn active! [obj]
(when (and obj
(::tabset @obj))
(object/merge! (::tabset @obj) {:active-obj obj})
(object/raise obj :show)
(ensure-visible (->index obj) (::tabset @obj))))
(defn update-tab-order [multi children]
(let [ser (if (vector? children)
children
(map #(dom/attr % :pos) children))
prev-active (:active-obj @multi)]
(object/merge! multi {:objs (mapv (:objs @multi) ser)
:active-obj nil})
(active! prev-active)
))
(defn ->name [e]
(or
(get-in @e [:info :name])
(:name @e)
"unknown"))
(defn ->path [e]
(or
(get-in @e [:info :path])
(:path @e)
""))
(defn active? [c e multi]
(str c (when (= (@multi :active-obj) e)
" active")))
(defn dirty? [c e]
(str c (when (:dirty @e)
" dirty")))
(defui close-tab [obj]
[:span.tab-close "x"]
:click (fn [] (object/raise obj :close)))
(defui item [label multi e pos]
[:li {:class (-> " "
(active? e multi)
(dirty? e))
:draggable "true"
:title (->path e)
:obj-id (object/->id e)
:pos pos}
[:span.file-name
(->name e)]
(when (object/raise-reduce e :close-button+ false)
(close-tab label))]
;; Disable middle-click pasting in linux
:mouseup (fn [ev]
(when (or (= 1 (.-button ev)) (.-metaKey ev))
(dom/prevent ev)))
:click (fn [ev]
(if (or (= 1 (.-button ev)) (.-metaKey ev))
(object/raise label :close)
(active! e)))
:contextmenu (fn [ev]
(object/raise label :menu! ev)))
(object/object* ::tab-label
:tags #{:tab-label}
:init (fn [this multi e pos]
(object/merge! this {::tab-object e
:tabset multi})
(item this multi e pos)))
(declare move-tab)
(defn objs-list [multi objs]
(let [prev-tabs (filter #(= (:tabset @%) multi) (object/by-tag :tab-label))
item (crate/html
[:ul
(for [[idx o] (map vector (range) objs)
:when @o]
(object/->content (object/create ::tab-label multi o idx)))])]
;;Remove old tabs
(doseq [tab prev-tabs]
(object/destroy! tab))
(js/sortable item (js-obj "axis" "x" "distance" 10 "scroll" false "opacity" 0.9 "connectWith" ".list"))
(dom/on item "contextmenu" (fn [e]
(object/raise multi :menu! e)))
(dom/on item "moved" (fn [e] (move-tab multi (.-opts e)) ))
(dom/on item "sortupdate" (fn [e] (update-tab-order multi (.-opts e))))
item))
(defui tabbed-item [active item]
[:div.content {:style {:visibility (bound active #(if (= % @item)
"visible"
"hidden"))}}
(bound item #(when % (object/->content %)))])
(defui vertical-grip [this]
[:div.vertical-grip {:draggable "true"}]
:dragstart (fn [e]
(set! (.-dataTransfer.dropEffect e) "move")
(.dataTransfer.setData e "text/plain" nil)
(object/raise this :start-drag e)
)
:dragend (fn [e]
(object/raise this :end-drag e)
)
:drag (fn [e]
(set! (.-dataTransfer.dropEffect e) "move")
(object/raise this :width! e)))
(defn ->perc [x]
(if x
(str x "%")
"0"))
(defn floored [x]
(cond
(< x 0) 0
(> x 100) 100
:else x))
(defn to-perc [width x]
(* (/ x width) 100))
(defn next-tabset [t]
(let [ts (@multi :tabsets)]
(second (drop-while #(not= t %) ts))
))
(defn prev-tabset [t]
(let [ts (@multi :tabsets)]
(-> (take-while #(not= t %) ts)
(last))))
(defn previous-tabset-width [cur]
(let [ts (@multi :tabsets)]
(reduce + 0 (map (comp :width deref) (take-while #(not= cur %) ts)))
))
(defn add-tabset [ts]
(object/update! multi [:tabsets] conj ts)
(dom/append (:tabsets-elem @multi) (object/->content ts))
)
(defn spawn-tabset []
(let [ts (object/create ::tabset)
width (- 100 (reduce + (map (comp :width deref) (@multi :tabsets))))]
(object/merge! ts {:width width})
(add-tabset ts)
ts))
(defn equalize-tabset-widths []
(let [tss (:tabsets @multi)
width (/ 100.0 (count tss))]
(doseq [ts tss]
(object/merge! ts {:width width}))))
(defn temp-width [ts w]
(dom/css (object/->content ts) {:width (->perc w)
:border-width (if (= 0 w)
0
"")}))
(defn activate-tabset [ts]
(when-not (= (ctx/->obj :tabset) ts)
(when-let [old (ctx/->obj :tabset)]
(dom/remove-class (object/->content old) :active))
(ctx/in! :tabset ts)
(dom/add-class (object/->content ts) :active)
true))
(defui tabset-ui [this]
[:div.tabset {:style {:width (bound (subatom this :width) ->perc)}}
[:div.list
(bound this #(objs-list this (:objs %)))]
[:div.items
(map-bound (partial tabbed-item (subatom this :active-obj)) this {:path [:objs]})]
(vertical-grip this)]
:click (fn []
(object/raise this :active)))
(object/object* ::tabset
:objs []
:active-obj nil
:count 0
:tags #{:tabset}
:width 100
:init (fn [this]
(tabset-ui this)
))
(defn ->tabsets [tabs]
(for [k tabs]
(object/->content k)))
(def tabset (object/create ::tabset))
(defn add!
([obj] (add! obj nil))
([obj ts]
(when-let [cur-tabset (or ts (ctx/->obj :tabset))]
(object/add-tags obj [:tabset.tab])
(object/update! cur-tabset [:objs] conj obj)
(object/merge! obj {::tabset cur-tabset})
(add-watch (subatom obj [:dirty]) :tabs (fn [_ _ _ cur]
(object/raise cur-tabset :tab.updated)
))
obj)))
(defn rem-tabset
([ts] (rem-tabset ts false))
([ts prev?]
(let [to-ts (if prev?
(or (prev-tabset ts) (next-tabset ts))
(or (next-tabset ts) (prev-tabset ts)))]
(when to-ts
(object/merge! to-ts {:width (floored (+ (:width @to-ts) (:width @ts)))})
(dom/remove (object/->content ts))
(doseq [t (:objs @ts)]
(add! t to-ts))
(object/update! multi [:tabsets] #(vec (remove #{ts} %)))
(object/destroy! ts)
(equalize-tabset-widths)
(object/raise to-ts :active)))))
(defn rem! [obj]
(when (and obj @obj (::tabset @obj))
(let [cur-tabset (::tabset @obj)
idx (->index obj)
active (:active-obj @cur-tabset)
aidx (->index active)]
(remove-watch obj :tabs)
(object/merge! obj {::tabset nil})
(object/merge! cur-tabset {:objs (vec (remove #(= obj %) (@cur-tabset :objs)))})
(if (= obj active)
(object/raise cur-tabset :tab idx)
(when (not= aidx (->index active))
(object/merge! cur-tabset {:active-obj nil})
(active! active))
))))
(defn refresh! [obj]
(when-let [ts (::tabset @obj)]
(object/raise ts :tab.updated)))
(defn in-tab? [obj]
(@obj ::tabset))
(defn add-or-focus! [obj]
(if (in-tab? obj)
(active! obj)
(do
(add! obj)
(active! obj))))
(defn num-tabs []
(reduce (fn [res cur]
(+ res (count (:objs @cur))))
0
(:tabsets @multi)))
(defn active-tab []
(when-let [cur-tabset (ctx/->obj :tabset)]
(:active-obj @cur-tabset)))
(defn move-tab-to-tabset [obj ts]
(rem! obj)
(add! obj ts)
(active! obj)
(object/raise obj :move))
(defn move-tab [multi elem]
(let [id (dom/attr elem :obj-id)
idx (dom/index elem)
obj (object/by-id (js/parseInt id))
cnt (-> @multi :objs count)]
(rem! obj)
(add! obj multi)
(if (> cnt 0)
(update-tab-order multi (vec (concat (range idx) [cnt] (range idx cnt)))))
(active! obj)
(object/raise obj :move)))
;;*********************************************************
;; Behaviors
;;*********************************************************
(behavior ::on-destroy-remove
:triggers #{:destroy :closed}
:reaction (fn [this]
(rem! this)
))
(behavior ::active-tab-num
:triggers #{:tab}
:reaction (fn [this num]
(let [objs (@this :objs)]
(if (< num (count objs))
(active! (get objs num))
(active! (get objs (dec (count objs))))))
))
(behavior ::prev-tab
:triggers #{:tab.prev}
:throttle 100
:reaction (fn [this]
(let [objs (@this :objs)
idx (->index (:active-obj @this))]
(if (> idx 0)
(active! (get objs (dec idx)))
(active! (get objs (dec (count objs))))))
))
(behavior ::next-tab
:triggers #{:tab.next}
:throttle 100
:reaction (fn [this]
(let [objs (@this :objs)
idx (inc (->index (:active-obj @this)))]
(if (< idx (count objs))
(active! (get objs idx))
(active! (get objs 0))))
))
(behavior ::tab-close
:triggers #{:tab.close}
:reaction (fn [this]
(try
(let [orig (:active-obj @this)]
(object/raise orig :close))
(catch :default e
(js/lt.objs.console.error e)))))
(behavior ::on-destroy-objs
:triggers #{:destroy}
:reaction (fn [this]
(doseq [e (:objs @this)]
(object/destroy! e))
))
(behavior ::repaint-tab-updated
:triggers #{:tab.updated}
:reaction (fn [this]
(object/update! this [:count] inc)))
(behavior ::no-anim-on-drag
:triggers #{:start-drag}
:reaction (fn [this]
(anim/off)))
(behavior ::reanim-on-drop
:triggers #{:end-drag}
:reaction (fn [this]
(anim/on)))
(behavior ::set-dragging
:triggers #{:start-drag}
:reaction (fn [this]
(dom/add-class (dom/$ :body) :dragging)
))
(behavior ::unset-dragging
:triggers #{:end-drag}
:reaction (fn [this]
(dom/remove-class (dom/$ :body) :dragging)
))
(behavior ::set-width-final!
:triggers #{:end-drag}
:reaction (fn [this e]
(when-let [ts (next-tabset this)]
(let [width (dom/width (object/->content multi))
left (:left @multi)
cx (.-clientX e)
new-loc (- (+ width left) cx)
new-perc (floored (int (- 100 (previous-tabset-width this) (to-perc width new-loc))))
prev-width (:width @this)
new-perc (if (>= new-perc (+ (:width @ts) prev-width))
(+ (:width @ts) prev-width)
new-perc)
next-width (floored
(if-not ts
1
(+ (:width @ts) (- prev-width new-perc))))]
(cond
(= new-perc 0) (rem-tabset this)
(= next-width 0) (rem-tabset ts :prev)
:else
(when-not (= cx 0)
(if (or (< new-perc 0) )
(object/merge! this {:width 100})
(when (and (not= cx 0)
ts
(>= new-perc 0)
(>= next-width 0))
(object/merge! this {:width new-perc})
(if ts
(object/merge! ts {:width next-width})
(spawn-tabset)
)))))))))
(behavior ::width!
:triggers #{:width!}
:reaction (fn [this e]
(let [width (dom/width (object/->content multi))
left (:left @multi)
cx (.-clientX e)
new-loc (- (+ width left) cx)
new-perc (floored (int (- 100 (previous-tabset-width this) (to-perc width new-loc))))
prev-width (:width @this)
ts (next-tabset this)
new-perc (if (and ts
(>= new-perc (+ (:width @ts) prev-width)))
(+ (:width @ts) prev-width)
new-perc)
next-width (floored
(if-not ts
1
(+ (:width @ts) (- prev-width new-perc))))]
(when-not (= cx 0)
(if (or (< new-perc 0) )
(temp-width this 100)
(when (and (not= cx 0)
ts
(>= new-perc 0)
(>= next-width 0))
(temp-width this new-perc)
(if ts
(temp-width ts next-width)
(spawn-tabset))))))
))
(behavior ::tab-active
:triggers #{:active}
:reaction (fn [this]
(activate-tabset (::tabset @this))))
(behavior ::tab-label-menu+
:triggers #{:menu+}
:reaction (fn [this items]
(conj items
{:label "Move tab to new tabset"
:order 1
:click (fn [] (cmd/exec! :tabs.move-new-tabset (::tab-object this)))}
{:label "Close tab"
:order 2
:click (fn [] (object/raise this :close))})))
(behavior ::on-close-tab-label
:triggers #{:close}
:reaction (fn [this]
(when-let [e (::tab-object @this)]
(object/raise e :close))
(object/destroy! this)))
(behavior ::tabset-active
:triggers #{:active}
:reaction (fn [this]
(when (activate-tabset this)
(when-let [active (:active-obj @this)]
(object/raise active :focus!)))))
(behavior ::tabset-menu+
:triggers #{:menu+}
:reaction (fn [this items]
(conj items
{:label "New tabset"
:order 1
:click (fn [] (cmd/exec! :tabset.new))}
{:label "Close tabset"
:order 2
:click (fn [] (rem-tabset this))})))
(behavior ::left!
:triggers #{:left!}
:reaction (fn [this v]
(object/update! this [:left] + v)))
(behavior ::right!
:triggers #{:right!}
:reaction (fn [this v]
(object/update! this [:right] + v)))
(behavior ::bottom!
:triggers #{:bottom!}
:reaction (fn [this v]
(object/update! this [:bottom] + v)))
(behavior ::tabset-bottom!
:triggers #{:tabset-bottom!}
:reaction (fn [this v]
(object/update! this [:tabset-bottom] + v)))
(behavior ::init-sortable
:triggers #{:init}
:reaction (fn [app]
(js/initSortable js/window)))
(behavior ::init
:triggers #{:init}
:reaction (fn [this]
(add-tabset tabset)
(object/raise tabset :active)
))
(behavior ::show-close-button
:desc "Tab: Show close button on tabs"
:type :user
:triggers #{:close-button+}
:reaction (fn [this]
true))
;;*********************************************************
;; Commands
;;*********************************************************
(cmd/command {:command :tabs.move-new-tabset
:desc "Tab: Move tab to new tabset"
:exec (fn [tab]
(when-let [ts (ctx/->obj :tabset)]
(when-let [cur (or tab (@ts :active-obj))]
(let [new (cmd/exec! :tabset.new)]
(move-tab-to-tabset cur new)))))})
(cmd/command {:command :tabs.move-next-tabset
:desc "Tab: Move tab to next tabset"
:exec (fn []
(when-let [ts (ctx/->obj :tabset)]
(let [cur (@ts :active-obj)
next (or (next-tabset ts) (prev-tabset ts))]
(when (and cur next (not= next ts))
(move-tab-to-tabset cur next)))))})
(cmd/command {:command :tabs.move-prev-tabset
:desc "Tab: Move tab to previous tabset"
:exec (fn []
(when-let [ts (ctx/->obj :tabset)]
(let [cur (@ts :active-obj)
next (or (prev-tabset ts) (next-tabset ts))]
(when (and cur next (not= next ts))
(move-tab-to-tabset cur next)))))})
(cmd/command {:command :tabs.next
:desc "Tab: Next tab"
:exec (fn []
(object/raise (ctx/->obj :tabset) :tab.next))})
(cmd/command {:command :tabs.prev
:desc "Tab: Previous tab"
:exec (fn []
(object/raise (ctx/->obj :tabset) :tab.prev))})
(cmd/command {:command :tabs.close
:desc "Tab: Close current tab"
:exec (fn []
(when (= 0 (num-tabs))
(cmd/exec! :window.close))
(when-let [ts (ctx/->obj :tabset)]
(when (and (:active-obj @ts)
@(:active-obj @ts))
(object/raise ts :tab.close)))
)})
(cmd/command {:command :tabs.close-all
:desc "Tabs: Close all tabs"
:exec (fn []
(let [objs (object/by-tag :tabset.tab)]
(doseq [obj objs]
(object/raise obj :close))))})
(cmd/command {:command :tabs.close-others
:desc "Tabs: Close tabs except current tab"
:exec (fn []
(let [cur (active-tab)
objs (object/by-tag :tabset.tab)]
(doseq [obj objs]
(if-not (identical? cur obj)
(object/raise obj :close)))))})
(cmd/command {:command :tabs.goto
:hidden true
:desc "Tab: Goto tab # or :last"
:exec (fn [x]
(let [ts (ctx/->obj :tabset)
tab-count (count (:objs @ts))
idx (dec tab-count)]
(object/raise (ctx/->obj :tabset)
:tab (if (= x :last) idx x))))})
(cmd/command {:command :tabset.next
:desc "Tabset: Next tabset"
:exec (fn []
(if-let [n (next-tabset (ctx/->obj :tabset))]
(object/raise n :active)
(if-let [n (get (:tabsets @multi) 0)]
(object/raise n :active))))})
(cmd/command {:command :tabset.prev
:desc "Tabset: Previous tabset"
:exec (fn []
(if-let [n (prev-tabset (ctx/->obj :tabset))]
(object/raise n :active)
(if-let [n (last (:tabsets @multi))]
(object/raise n :active))))})
(cmd/command {:command :tabset.close
:desc "Tabset: Remove active tabset"
:exec (fn [ts]
(rem-tabset (ctx/->obj :tabset)))})
(cmd/command {:command :tabset.new
:desc "Tabset: Add a tabset"
:exec (fn []
(let [ts (spawn-tabset)]
(equalize-tabset-widths)
ts))})
(cmd/command {:command :tabs.focus-active
:desc "Tab: focus active"
:hidden true
:exec (fn []
(when-let [active (:active-obj @(ctx/->obj :tabset))]
(object/raise active :focus!)))})
(append (object/->content canvas/canvas) (:content @multi))
| null | https://raw.githubusercontent.com/LightTable/LightTable/57f861ae5b33d21ef8c7d064dd026a2b1a98fa87/src/lt/objs/tabs.cljs | clojure | Disable middle-click pasting in linux
Remove old tabs
*********************************************************
Behaviors
*********************************************************
*********************************************************
Commands
********************************************************* | (ns lt.objs.tabs
"Manage tabsets and tabs"
(:require [lt.object :refer [object* behavior*] :as object]
[lt.objs.editor :as editor]
[lt.objs.canvas :as canvas]
[lt.objs.command :as cmd]
[lt.objs.animations :as anim]
[lt.objs.context :as ctx]
[lt.objs.menu :as menu]
[lt.util.load :as load]
[lt.util.dom :refer [$ append] :as dom]
[lt.util.style :refer [->px]]
[lt.util.js :refer [now]]
[singultus.core :as crate]
[singultus.binding :refer [bound map-bound subatom]])
(:require-macros [lt.macros :refer [behavior defui]]))
(load/js "core/lighttable/ui/dragdrop.js" :sync)
(def multi-def (object* ::multi-editor2
:tags #{:tabs}
:tabsets []
:left 0
:right 0
:bottom 0
:init (fn [this]
(let [tabsets (crate/html [:div.tabsets {:style {:bottom (bound (subatom this :tabset-bottom) ->px)}}])]
(object/merge! this {:tabsets-elem tabsets})
(ctx/in! :tabs this)
[:div#multi {:style {:left (bound (subatom this :left) ->px)
:right (bound (subatom this :right) ->px)
:bottom (bound (subatom this :bottom) ->px)}}
tabsets]
))))
(def multi (object/create multi-def))
(defn ensure-visible [idx tabset]
(when-let [cur (aget (dom/$$ ".list li" (object/->content tabset)) idx)]
(let [left (.-offsetLeft cur)
width (.-clientWidth cur)
right (+ left width)
gp (dom/parent (dom/parent cur))
pwidth (.-clientWidth gp)
pleft (.-scrollLeft gp)
pright (+ pleft pwidth)
inside (and (>= left pleft)
(<= right pright))]
(when-not inside
(if (> pleft left)
(set! (.-scrollLeft gp) (- left 50))
(set! (.-scrollLeft gp) (+ (- right pwidth) 50)))
))))
(defn ->index [obj]
(when (and obj @obj (::tabset @obj))
(first (first (filter #(= obj (second %)) (map-indexed vector (:objs @(::tabset @obj))))))))
(defn active! [obj]
(when (and obj
(::tabset @obj))
(object/merge! (::tabset @obj) {:active-obj obj})
(object/raise obj :show)
(ensure-visible (->index obj) (::tabset @obj))))
(defn update-tab-order [multi children]
(let [ser (if (vector? children)
children
(map #(dom/attr % :pos) children))
prev-active (:active-obj @multi)]
(object/merge! multi {:objs (mapv (:objs @multi) ser)
:active-obj nil})
(active! prev-active)
))
(defn ->name [e]
(or
(get-in @e [:info :name])
(:name @e)
"unknown"))
(defn ->path [e]
(or
(get-in @e [:info :path])
(:path @e)
""))
(defn active? [c e multi]
(str c (when (= (@multi :active-obj) e)
" active")))
(defn dirty? [c e]
(str c (when (:dirty @e)
" dirty")))
(defui close-tab [obj]
[:span.tab-close "x"]
:click (fn [] (object/raise obj :close)))
(defui item [label multi e pos]
[:li {:class (-> " "
(active? e multi)
(dirty? e))
:draggable "true"
:title (->path e)
:obj-id (object/->id e)
:pos pos}
[:span.file-name
(->name e)]
(when (object/raise-reduce e :close-button+ false)
(close-tab label))]
:mouseup (fn [ev]
(when (or (= 1 (.-button ev)) (.-metaKey ev))
(dom/prevent ev)))
:click (fn [ev]
(if (or (= 1 (.-button ev)) (.-metaKey ev))
(object/raise label :close)
(active! e)))
:contextmenu (fn [ev]
(object/raise label :menu! ev)))
(object/object* ::tab-label
:tags #{:tab-label}
:init (fn [this multi e pos]
(object/merge! this {::tab-object e
:tabset multi})
(item this multi e pos)))
(declare move-tab)
(defn objs-list [multi objs]
(let [prev-tabs (filter #(= (:tabset @%) multi) (object/by-tag :tab-label))
item (crate/html
[:ul
(for [[idx o] (map vector (range) objs)
:when @o]
(object/->content (object/create ::tab-label multi o idx)))])]
(doseq [tab prev-tabs]
(object/destroy! tab))
(js/sortable item (js-obj "axis" "x" "distance" 10 "scroll" false "opacity" 0.9 "connectWith" ".list"))
(dom/on item "contextmenu" (fn [e]
(object/raise multi :menu! e)))
(dom/on item "moved" (fn [e] (move-tab multi (.-opts e)) ))
(dom/on item "sortupdate" (fn [e] (update-tab-order multi (.-opts e))))
item))
(defui tabbed-item [active item]
[:div.content {:style {:visibility (bound active #(if (= % @item)
"visible"
"hidden"))}}
(bound item #(when % (object/->content %)))])
(defui vertical-grip [this]
[:div.vertical-grip {:draggable "true"}]
:dragstart (fn [e]
(set! (.-dataTransfer.dropEffect e) "move")
(.dataTransfer.setData e "text/plain" nil)
(object/raise this :start-drag e)
)
:dragend (fn [e]
(object/raise this :end-drag e)
)
:drag (fn [e]
(set! (.-dataTransfer.dropEffect e) "move")
(object/raise this :width! e)))
(defn ->perc [x]
(if x
(str x "%")
"0"))
(defn floored [x]
(cond
(< x 0) 0
(> x 100) 100
:else x))
(defn to-perc [width x]
(* (/ x width) 100))
(defn next-tabset [t]
(let [ts (@multi :tabsets)]
(second (drop-while #(not= t %) ts))
))
(defn prev-tabset [t]
(let [ts (@multi :tabsets)]
(-> (take-while #(not= t %) ts)
(last))))
(defn previous-tabset-width [cur]
(let [ts (@multi :tabsets)]
(reduce + 0 (map (comp :width deref) (take-while #(not= cur %) ts)))
))
(defn add-tabset [ts]
(object/update! multi [:tabsets] conj ts)
(dom/append (:tabsets-elem @multi) (object/->content ts))
)
(defn spawn-tabset []
(let [ts (object/create ::tabset)
width (- 100 (reduce + (map (comp :width deref) (@multi :tabsets))))]
(object/merge! ts {:width width})
(add-tabset ts)
ts))
(defn equalize-tabset-widths []
(let [tss (:tabsets @multi)
width (/ 100.0 (count tss))]
(doseq [ts tss]
(object/merge! ts {:width width}))))
(defn temp-width [ts w]
(dom/css (object/->content ts) {:width (->perc w)
:border-width (if (= 0 w)
0
"")}))
(defn activate-tabset [ts]
(when-not (= (ctx/->obj :tabset) ts)
(when-let [old (ctx/->obj :tabset)]
(dom/remove-class (object/->content old) :active))
(ctx/in! :tabset ts)
(dom/add-class (object/->content ts) :active)
true))
(defui tabset-ui [this]
[:div.tabset {:style {:width (bound (subatom this :width) ->perc)}}
[:div.list
(bound this #(objs-list this (:objs %)))]
[:div.items
(map-bound (partial tabbed-item (subatom this :active-obj)) this {:path [:objs]})]
(vertical-grip this)]
:click (fn []
(object/raise this :active)))
(object/object* ::tabset
:objs []
:active-obj nil
:count 0
:tags #{:tabset}
:width 100
:init (fn [this]
(tabset-ui this)
))
(defn ->tabsets [tabs]
(for [k tabs]
(object/->content k)))
(def tabset (object/create ::tabset))
(defn add!
([obj] (add! obj nil))
([obj ts]
(when-let [cur-tabset (or ts (ctx/->obj :tabset))]
(object/add-tags obj [:tabset.tab])
(object/update! cur-tabset [:objs] conj obj)
(object/merge! obj {::tabset cur-tabset})
(add-watch (subatom obj [:dirty]) :tabs (fn [_ _ _ cur]
(object/raise cur-tabset :tab.updated)
))
obj)))
(defn rem-tabset
([ts] (rem-tabset ts false))
([ts prev?]
(let [to-ts (if prev?
(or (prev-tabset ts) (next-tabset ts))
(or (next-tabset ts) (prev-tabset ts)))]
(when to-ts
(object/merge! to-ts {:width (floored (+ (:width @to-ts) (:width @ts)))})
(dom/remove (object/->content ts))
(doseq [t (:objs @ts)]
(add! t to-ts))
(object/update! multi [:tabsets] #(vec (remove #{ts} %)))
(object/destroy! ts)
(equalize-tabset-widths)
(object/raise to-ts :active)))))
(defn rem! [obj]
(when (and obj @obj (::tabset @obj))
(let [cur-tabset (::tabset @obj)
idx (->index obj)
active (:active-obj @cur-tabset)
aidx (->index active)]
(remove-watch obj :tabs)
(object/merge! obj {::tabset nil})
(object/merge! cur-tabset {:objs (vec (remove #(= obj %) (@cur-tabset :objs)))})
(if (= obj active)
(object/raise cur-tabset :tab idx)
(when (not= aidx (->index active))
(object/merge! cur-tabset {:active-obj nil})
(active! active))
))))
(defn refresh! [obj]
(when-let [ts (::tabset @obj)]
(object/raise ts :tab.updated)))
(defn in-tab? [obj]
(@obj ::tabset))
(defn add-or-focus! [obj]
(if (in-tab? obj)
(active! obj)
(do
(add! obj)
(active! obj))))
(defn num-tabs []
(reduce (fn [res cur]
(+ res (count (:objs @cur))))
0
(:tabsets @multi)))
(defn active-tab []
(when-let [cur-tabset (ctx/->obj :tabset)]
(:active-obj @cur-tabset)))
(defn move-tab-to-tabset [obj ts]
(rem! obj)
(add! obj ts)
(active! obj)
(object/raise obj :move))
(defn move-tab [multi elem]
(let [id (dom/attr elem :obj-id)
idx (dom/index elem)
obj (object/by-id (js/parseInt id))
cnt (-> @multi :objs count)]
(rem! obj)
(add! obj multi)
(if (> cnt 0)
(update-tab-order multi (vec (concat (range idx) [cnt] (range idx cnt)))))
(active! obj)
(object/raise obj :move)))
(behavior ::on-destroy-remove
:triggers #{:destroy :closed}
:reaction (fn [this]
(rem! this)
))
(behavior ::active-tab-num
:triggers #{:tab}
:reaction (fn [this num]
(let [objs (@this :objs)]
(if (< num (count objs))
(active! (get objs num))
(active! (get objs (dec (count objs))))))
))
(behavior ::prev-tab
:triggers #{:tab.prev}
:throttle 100
:reaction (fn [this]
(let [objs (@this :objs)
idx (->index (:active-obj @this))]
(if (> idx 0)
(active! (get objs (dec idx)))
(active! (get objs (dec (count objs))))))
))
(behavior ::next-tab
:triggers #{:tab.next}
:throttle 100
:reaction (fn [this]
(let [objs (@this :objs)
idx (inc (->index (:active-obj @this)))]
(if (< idx (count objs))
(active! (get objs idx))
(active! (get objs 0))))
))
(behavior ::tab-close
:triggers #{:tab.close}
:reaction (fn [this]
(try
(let [orig (:active-obj @this)]
(object/raise orig :close))
(catch :default e
(js/lt.objs.console.error e)))))
(behavior ::on-destroy-objs
:triggers #{:destroy}
:reaction (fn [this]
(doseq [e (:objs @this)]
(object/destroy! e))
))
(behavior ::repaint-tab-updated
:triggers #{:tab.updated}
:reaction (fn [this]
(object/update! this [:count] inc)))
(behavior ::no-anim-on-drag
:triggers #{:start-drag}
:reaction (fn [this]
(anim/off)))
(behavior ::reanim-on-drop
:triggers #{:end-drag}
:reaction (fn [this]
(anim/on)))
(behavior ::set-dragging
:triggers #{:start-drag}
:reaction (fn [this]
(dom/add-class (dom/$ :body) :dragging)
))
(behavior ::unset-dragging
:triggers #{:end-drag}
:reaction (fn [this]
(dom/remove-class (dom/$ :body) :dragging)
))
(behavior ::set-width-final!
:triggers #{:end-drag}
:reaction (fn [this e]
(when-let [ts (next-tabset this)]
(let [width (dom/width (object/->content multi))
left (:left @multi)
cx (.-clientX e)
new-loc (- (+ width left) cx)
new-perc (floored (int (- 100 (previous-tabset-width this) (to-perc width new-loc))))
prev-width (:width @this)
new-perc (if (>= new-perc (+ (:width @ts) prev-width))
(+ (:width @ts) prev-width)
new-perc)
next-width (floored
(if-not ts
1
(+ (:width @ts) (- prev-width new-perc))))]
(cond
(= new-perc 0) (rem-tabset this)
(= next-width 0) (rem-tabset ts :prev)
:else
(when-not (= cx 0)
(if (or (< new-perc 0) )
(object/merge! this {:width 100})
(when (and (not= cx 0)
ts
(>= new-perc 0)
(>= next-width 0))
(object/merge! this {:width new-perc})
(if ts
(object/merge! ts {:width next-width})
(spawn-tabset)
)))))))))
(behavior ::width!
:triggers #{:width!}
:reaction (fn [this e]
(let [width (dom/width (object/->content multi))
left (:left @multi)
cx (.-clientX e)
new-loc (- (+ width left) cx)
new-perc (floored (int (- 100 (previous-tabset-width this) (to-perc width new-loc))))
prev-width (:width @this)
ts (next-tabset this)
new-perc (if (and ts
(>= new-perc (+ (:width @ts) prev-width)))
(+ (:width @ts) prev-width)
new-perc)
next-width (floored
(if-not ts
1
(+ (:width @ts) (- prev-width new-perc))))]
(when-not (= cx 0)
(if (or (< new-perc 0) )
(temp-width this 100)
(when (and (not= cx 0)
ts
(>= new-perc 0)
(>= next-width 0))
(temp-width this new-perc)
(if ts
(temp-width ts next-width)
(spawn-tabset))))))
))
(behavior ::tab-active
:triggers #{:active}
:reaction (fn [this]
(activate-tabset (::tabset @this))))
(behavior ::tab-label-menu+
:triggers #{:menu+}
:reaction (fn [this items]
(conj items
{:label "Move tab to new tabset"
:order 1
:click (fn [] (cmd/exec! :tabs.move-new-tabset (::tab-object this)))}
{:label "Close tab"
:order 2
:click (fn [] (object/raise this :close))})))
(behavior ::on-close-tab-label
:triggers #{:close}
:reaction (fn [this]
(when-let [e (::tab-object @this)]
(object/raise e :close))
(object/destroy! this)))
(behavior ::tabset-active
:triggers #{:active}
:reaction (fn [this]
(when (activate-tabset this)
(when-let [active (:active-obj @this)]
(object/raise active :focus!)))))
(behavior ::tabset-menu+
:triggers #{:menu+}
:reaction (fn [this items]
(conj items
{:label "New tabset"
:order 1
:click (fn [] (cmd/exec! :tabset.new))}
{:label "Close tabset"
:order 2
:click (fn [] (rem-tabset this))})))
(behavior ::left!
:triggers #{:left!}
:reaction (fn [this v]
(object/update! this [:left] + v)))
(behavior ::right!
:triggers #{:right!}
:reaction (fn [this v]
(object/update! this [:right] + v)))
(behavior ::bottom!
:triggers #{:bottom!}
:reaction (fn [this v]
(object/update! this [:bottom] + v)))
(behavior ::tabset-bottom!
:triggers #{:tabset-bottom!}
:reaction (fn [this v]
(object/update! this [:tabset-bottom] + v)))
(behavior ::init-sortable
:triggers #{:init}
:reaction (fn [app]
(js/initSortable js/window)))
(behavior ::init
:triggers #{:init}
:reaction (fn [this]
(add-tabset tabset)
(object/raise tabset :active)
))
(behavior ::show-close-button
:desc "Tab: Show close button on tabs"
:type :user
:triggers #{:close-button+}
:reaction (fn [this]
true))
(cmd/command {:command :tabs.move-new-tabset
:desc "Tab: Move tab to new tabset"
:exec (fn [tab]
(when-let [ts (ctx/->obj :tabset)]
(when-let [cur (or tab (@ts :active-obj))]
(let [new (cmd/exec! :tabset.new)]
(move-tab-to-tabset cur new)))))})
(cmd/command {:command :tabs.move-next-tabset
:desc "Tab: Move tab to next tabset"
:exec (fn []
(when-let [ts (ctx/->obj :tabset)]
(let [cur (@ts :active-obj)
next (or (next-tabset ts) (prev-tabset ts))]
(when (and cur next (not= next ts))
(move-tab-to-tabset cur next)))))})
(cmd/command {:command :tabs.move-prev-tabset
:desc "Tab: Move tab to previous tabset"
:exec (fn []
(when-let [ts (ctx/->obj :tabset)]
(let [cur (@ts :active-obj)
next (or (prev-tabset ts) (next-tabset ts))]
(when (and cur next (not= next ts))
(move-tab-to-tabset cur next)))))})
(cmd/command {:command :tabs.next
:desc "Tab: Next tab"
:exec (fn []
(object/raise (ctx/->obj :tabset) :tab.next))})
(cmd/command {:command :tabs.prev
:desc "Tab: Previous tab"
:exec (fn []
(object/raise (ctx/->obj :tabset) :tab.prev))})
(cmd/command {:command :tabs.close
:desc "Tab: Close current tab"
:exec (fn []
(when (= 0 (num-tabs))
(cmd/exec! :window.close))
(when-let [ts (ctx/->obj :tabset)]
(when (and (:active-obj @ts)
@(:active-obj @ts))
(object/raise ts :tab.close)))
)})
(cmd/command {:command :tabs.close-all
:desc "Tabs: Close all tabs"
:exec (fn []
(let [objs (object/by-tag :tabset.tab)]
(doseq [obj objs]
(object/raise obj :close))))})
(cmd/command {:command :tabs.close-others
:desc "Tabs: Close tabs except current tab"
:exec (fn []
(let [cur (active-tab)
objs (object/by-tag :tabset.tab)]
(doseq [obj objs]
(if-not (identical? cur obj)
(object/raise obj :close)))))})
(cmd/command {:command :tabs.goto
:hidden true
:desc "Tab: Goto tab # or :last"
:exec (fn [x]
(let [ts (ctx/->obj :tabset)
tab-count (count (:objs @ts))
idx (dec tab-count)]
(object/raise (ctx/->obj :tabset)
:tab (if (= x :last) idx x))))})
(cmd/command {:command :tabset.next
:desc "Tabset: Next tabset"
:exec (fn []
(if-let [n (next-tabset (ctx/->obj :tabset))]
(object/raise n :active)
(if-let [n (get (:tabsets @multi) 0)]
(object/raise n :active))))})
(cmd/command {:command :tabset.prev
:desc "Tabset: Previous tabset"
:exec (fn []
(if-let [n (prev-tabset (ctx/->obj :tabset))]
(object/raise n :active)
(if-let [n (last (:tabsets @multi))]
(object/raise n :active))))})
(cmd/command {:command :tabset.close
:desc "Tabset: Remove active tabset"
:exec (fn [ts]
(rem-tabset (ctx/->obj :tabset)))})
(cmd/command {:command :tabset.new
:desc "Tabset: Add a tabset"
:exec (fn []
(let [ts (spawn-tabset)]
(equalize-tabset-widths)
ts))})
(cmd/command {:command :tabs.focus-active
:desc "Tab: focus active"
:hidden true
:exec (fn []
(when-let [active (:active-obj @(ctx/->obj :tabset))]
(object/raise active :focus!)))})
(append (object/->content canvas/canvas) (:content @multi))
|
1355e55c50f0b1d799872f8b5246f86a06014b63c5225013683c42e61308459f | incoherentsoftware/defect-process | AttackDescriptions.hs | module Enemy.All.Flying.AttackDescriptions
( EnemyAttackDescriptions(..)
, mkEnemyAttackDescs
) where
import Control.Monad.IO.Class (MonadIO)
import Attack
import FileCache
import Window.Graphics
data EnemyAttackDescriptions = EnemyAttackDescriptions
{ _shoot :: AttackDescription
, _fireball :: AttackDescription
, _shock :: AttackDescription
}
mkEnemyAttackDescs :: (FileCache m, GraphicsRead m, MonadIO m) => m EnemyAttackDescriptions
mkEnemyAttackDescs =
EnemyAttackDescriptions <$>
loadPackAtkDesc "attack-projectile-release.atk" <*>
loadPackAtkDesc "attack-projectile.atk" <*>
loadPackAtkDesc "attack-shock.atk"
where loadPackAtkDesc = \f -> loadPackAttackDescription $ PackResourceFilePath "data/enemies/flying-enemy.pack" f
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/8797aad1d93bff5aadd7226c39a48f45cf76746e/src/Enemy/All/Flying/AttackDescriptions.hs | haskell | module Enemy.All.Flying.AttackDescriptions
( EnemyAttackDescriptions(..)
, mkEnemyAttackDescs
) where
import Control.Monad.IO.Class (MonadIO)
import Attack
import FileCache
import Window.Graphics
data EnemyAttackDescriptions = EnemyAttackDescriptions
{ _shoot :: AttackDescription
, _fireball :: AttackDescription
, _shock :: AttackDescription
}
mkEnemyAttackDescs :: (FileCache m, GraphicsRead m, MonadIO m) => m EnemyAttackDescriptions
mkEnemyAttackDescs =
EnemyAttackDescriptions <$>
loadPackAtkDesc "attack-projectile-release.atk" <*>
loadPackAtkDesc "attack-projectile.atk" <*>
loadPackAtkDesc "attack-shock.atk"
where loadPackAtkDesc = \f -> loadPackAttackDescription $ PackResourceFilePath "data/enemies/flying-enemy.pack" f
|
|
19525123310d6f6b18497fe6903501ccf65a36274e3c23b18653a80e5b80e0ac | kaoskorobase/mescaline | Clock.hs | module Mescaline.Clock (
Tempo
, fromBps
, Time(..)
, Clock
, mkClock
, tempo
, elapsed
, logical
, beatsToSeconds
, secondsToBeats
, setTempo
, setElapsed
, setLogical
) where
import Mescaline.Time (Beats, Seconds)
data Tempo = Tempo {
beatsPerSecond :: Double
, secondsPerBeat :: Double
} deriving (Eq, Show)
fromBps :: Double -> Tempo
fromBps bps = Tempo bps (recip bps)
data Time = Time {
seconds :: !Seconds
, beats :: !Beats
} deriving (Eq, Show)
data Clock = Clock {
tempo :: !Tempo
, base :: !Time
, elapsed :: !Time
, logical :: !Time
} deriving (Eq, Show)
mkClock :: Tempo -> Seconds -> Clock
mkClock t s0 = Clock t t0 t0 t0
where t0 = Time s0 (realToFrac s0)
beatsToSeconds :: Clock -> Beats -> Seconds
beatsToSeconds c b = seconds (base c) + realToFrac (realToFrac b' * secondsPerBeat (tempo c))
where b' = b - beats (base c)
secondsToBeats :: Clock -> Seconds -> Beats
secondsToBeats c s = beats (base c) + realToFrac (realToFrac s' * beatsPerSecond (tempo c))
where s' = s - seconds (base c)
setLogical :: Beats -> Clock -> Clock
setLogical b c = c { logical = Time (beatsToSeconds c b) b }
setElapsed :: Seconds -> Clock -> Clock
setElapsed s c = c { elapsed = Time s (secondsToBeats c s) }
setTempo :: Tempo -> Clock -> Clock
setTempo t c =
let b0 = beats (logical c)
in c {
base = Time (beatsToSeconds c b0) b0
, tempo = t
}
| null | https://raw.githubusercontent.com/kaoskorobase/mescaline/13554fc4826d0c977d0010c0b4fb74ba12ced6b9/lib/mescaline-patterns/src/Mescaline/Clock.hs | haskell | module Mescaline.Clock (
Tempo
, fromBps
, Time(..)
, Clock
, mkClock
, tempo
, elapsed
, logical
, beatsToSeconds
, secondsToBeats
, setTempo
, setElapsed
, setLogical
) where
import Mescaline.Time (Beats, Seconds)
data Tempo = Tempo {
beatsPerSecond :: Double
, secondsPerBeat :: Double
} deriving (Eq, Show)
fromBps :: Double -> Tempo
fromBps bps = Tempo bps (recip bps)
data Time = Time {
seconds :: !Seconds
, beats :: !Beats
} deriving (Eq, Show)
data Clock = Clock {
tempo :: !Tempo
, base :: !Time
, elapsed :: !Time
, logical :: !Time
} deriving (Eq, Show)
mkClock :: Tempo -> Seconds -> Clock
mkClock t s0 = Clock t t0 t0 t0
where t0 = Time s0 (realToFrac s0)
beatsToSeconds :: Clock -> Beats -> Seconds
beatsToSeconds c b = seconds (base c) + realToFrac (realToFrac b' * secondsPerBeat (tempo c))
where b' = b - beats (base c)
secondsToBeats :: Clock -> Seconds -> Beats
secondsToBeats c s = beats (base c) + realToFrac (realToFrac s' * beatsPerSecond (tempo c))
where s' = s - seconds (base c)
setLogical :: Beats -> Clock -> Clock
setLogical b c = c { logical = Time (beatsToSeconds c b) b }
setElapsed :: Seconds -> Clock -> Clock
setElapsed s c = c { elapsed = Time s (secondsToBeats c s) }
setTempo :: Tempo -> Clock -> Clock
setTempo t c =
let b0 = beats (logical c)
in c {
base = Time (beatsToSeconds c b0) b0
, tempo = t
}
|
|
8666ef8bb586b7cfc20e6cada6cc084f5916c99626fa38d1cd82697eb9511851 | input-output-hk/project-icarus-importer | Simple.hs | # LANGUAGE TypeFamilies #
-- | Simple implementation of slotting.
module Pos.Slotting.Impl.Simple
( SimpleSlottingStateVar
, mkSimpleSlottingStateVar
, SimpleSlottingMode
, MonadSimpleSlotting
, getCurrentSlotSimple
, getCurrentSlotSimple'
, getCurrentSlotBlockingSimple
, getCurrentSlotBlockingSimple'
, getCurrentSlotInaccurateSimple
, getCurrentSlotInaccurateSimple'
, currentTimeSlottingSimple
) where
import Universum
import Mockable (CurrentTime, Mockable, currentTime)
import Pos.Core.Configuration (HasProtocolConstants)
import Pos.Core.Slotting (SlotId (..), Timestamp (..), unflattenSlotId)
import Pos.Slotting.Impl.Util (approxSlotUsingOutdated, slotFromTimestamp)
import Pos.Slotting.MemState (MonadSlotsData, getCurrentNextEpochIndexM,
waitCurrentEpochEqualsM)
import Pos.Util (HasLens (..))
----------------------------------------------------------------------------
-- Mode
----------------------------------------------------------------------------
type SimpleSlottingMode ctx m
= ( Mockable CurrentTime m
, MonadSlotsData ctx m
, MonadIO m
)
type MonadSimpleSlotting ctx m
= ( MonadReader ctx m
, HasLens SimpleSlottingStateVar ctx SimpleSlottingStateVar
, SimpleSlottingMode ctx m
)
----------------------------------------------------------------------------
State
----------------------------------------------------------------------------
data SimpleSlottingState = SimpleSlottingState
{ _sssLastSlot :: !SlotId
}
type SimpleSlottingStateVar = TVar SimpleSlottingState
mkSimpleSlottingStateVar :: (MonadIO m, HasProtocolConstants) => m SimpleSlottingStateVar
mkSimpleSlottingStateVar = atomically $ newTVar $ SimpleSlottingState $ unflattenSlotId 0
----------------------------------------------------------------------------
-- Implementation
----------------------------------------------------------------------------
getCurrentSlotSimple'
:: (SimpleSlottingMode ctx m, HasProtocolConstants)
=> SimpleSlottingStateVar
-> m (Maybe SlotId)
getCurrentSlotSimple' var =
currentTimeSlottingSimple
>>= slotFromTimestamp
>>= traverse (updateLastSlot var)
getCurrentSlotSimple
:: (MonadSimpleSlotting ctx m, HasProtocolConstants)
=> m (Maybe SlotId)
getCurrentSlotSimple = view (lensOf @SimpleSlottingStateVar) >>= getCurrentSlotSimple'
getCurrentSlotBlockingSimple'
:: (SimpleSlottingMode ctx m, HasProtocolConstants)
=> SimpleSlottingStateVar
-> m SlotId
getCurrentSlotBlockingSimple' var = do
(_, nextEpochIndex) <- getCurrentNextEpochIndexM
getCurrentSlotSimple' var >>= \case
Just slot -> pure slot
Nothing -> do
waitCurrentEpochEqualsM nextEpochIndex
getCurrentSlotBlockingSimple' var
getCurrentSlotBlockingSimple
:: (MonadSimpleSlotting ctx m, HasProtocolConstants)
=> m SlotId
getCurrentSlotBlockingSimple =
view (lensOf @SimpleSlottingStateVar) >>= getCurrentSlotBlockingSimple'
getCurrentSlotInaccurateSimple'
:: (SimpleSlottingMode ctx m, HasProtocolConstants)
=> SimpleSlottingStateVar
-> m SlotId
getCurrentSlotInaccurateSimple' var =
getCurrentSlotSimple' var >>= \case
Just slot -> pure slot
Nothing -> do
lastSlot <- _sssLastSlot <$> atomically (readTVar var)
max lastSlot <$> (currentTimeSlottingSimple >>=
approxSlotUsingOutdated)
getCurrentSlotInaccurateSimple
:: (MonadSimpleSlotting ctx m, HasProtocolConstants)
=> m SlotId
getCurrentSlotInaccurateSimple =
view (lensOf @SimpleSlottingStateVar) >>= getCurrentSlotInaccurateSimple'
currentTimeSlottingSimple :: (SimpleSlottingMode ctx m) => m Timestamp
currentTimeSlottingSimple = Timestamp <$> currentTime
updateLastSlot :: MonadIO m => SimpleSlottingStateVar -> SlotId -> m SlotId
updateLastSlot var slot = atomically $ do
modifyTVar' var (SimpleSlottingState . max slot . _sssLastSlot)
_sssLastSlot <$> readTVar var
| null | https://raw.githubusercontent.com/input-output-hk/project-icarus-importer/36342f277bcb7f1902e677a02d1ce93e4cf224f0/infra/Pos/Slotting/Impl/Simple.hs | haskell | | Simple implementation of slotting.
--------------------------------------------------------------------------
Mode
--------------------------------------------------------------------------
--------------------------------------------------------------------------
--------------------------------------------------------------------------
--------------------------------------------------------------------------
Implementation
-------------------------------------------------------------------------- | # LANGUAGE TypeFamilies #
module Pos.Slotting.Impl.Simple
( SimpleSlottingStateVar
, mkSimpleSlottingStateVar
, SimpleSlottingMode
, MonadSimpleSlotting
, getCurrentSlotSimple
, getCurrentSlotSimple'
, getCurrentSlotBlockingSimple
, getCurrentSlotBlockingSimple'
, getCurrentSlotInaccurateSimple
, getCurrentSlotInaccurateSimple'
, currentTimeSlottingSimple
) where
import Universum
import Mockable (CurrentTime, Mockable, currentTime)
import Pos.Core.Configuration (HasProtocolConstants)
import Pos.Core.Slotting (SlotId (..), Timestamp (..), unflattenSlotId)
import Pos.Slotting.Impl.Util (approxSlotUsingOutdated, slotFromTimestamp)
import Pos.Slotting.MemState (MonadSlotsData, getCurrentNextEpochIndexM,
waitCurrentEpochEqualsM)
import Pos.Util (HasLens (..))
type SimpleSlottingMode ctx m
= ( Mockable CurrentTime m
, MonadSlotsData ctx m
, MonadIO m
)
type MonadSimpleSlotting ctx m
= ( MonadReader ctx m
, HasLens SimpleSlottingStateVar ctx SimpleSlottingStateVar
, SimpleSlottingMode ctx m
)
State
data SimpleSlottingState = SimpleSlottingState
{ _sssLastSlot :: !SlotId
}
type SimpleSlottingStateVar = TVar SimpleSlottingState
mkSimpleSlottingStateVar :: (MonadIO m, HasProtocolConstants) => m SimpleSlottingStateVar
mkSimpleSlottingStateVar = atomically $ newTVar $ SimpleSlottingState $ unflattenSlotId 0
getCurrentSlotSimple'
:: (SimpleSlottingMode ctx m, HasProtocolConstants)
=> SimpleSlottingStateVar
-> m (Maybe SlotId)
getCurrentSlotSimple' var =
currentTimeSlottingSimple
>>= slotFromTimestamp
>>= traverse (updateLastSlot var)
getCurrentSlotSimple
:: (MonadSimpleSlotting ctx m, HasProtocolConstants)
=> m (Maybe SlotId)
getCurrentSlotSimple = view (lensOf @SimpleSlottingStateVar) >>= getCurrentSlotSimple'
getCurrentSlotBlockingSimple'
:: (SimpleSlottingMode ctx m, HasProtocolConstants)
=> SimpleSlottingStateVar
-> m SlotId
getCurrentSlotBlockingSimple' var = do
(_, nextEpochIndex) <- getCurrentNextEpochIndexM
getCurrentSlotSimple' var >>= \case
Just slot -> pure slot
Nothing -> do
waitCurrentEpochEqualsM nextEpochIndex
getCurrentSlotBlockingSimple' var
getCurrentSlotBlockingSimple
:: (MonadSimpleSlotting ctx m, HasProtocolConstants)
=> m SlotId
getCurrentSlotBlockingSimple =
view (lensOf @SimpleSlottingStateVar) >>= getCurrentSlotBlockingSimple'
getCurrentSlotInaccurateSimple'
:: (SimpleSlottingMode ctx m, HasProtocolConstants)
=> SimpleSlottingStateVar
-> m SlotId
getCurrentSlotInaccurateSimple' var =
getCurrentSlotSimple' var >>= \case
Just slot -> pure slot
Nothing -> do
lastSlot <- _sssLastSlot <$> atomically (readTVar var)
max lastSlot <$> (currentTimeSlottingSimple >>=
approxSlotUsingOutdated)
getCurrentSlotInaccurateSimple
:: (MonadSimpleSlotting ctx m, HasProtocolConstants)
=> m SlotId
getCurrentSlotInaccurateSimple =
view (lensOf @SimpleSlottingStateVar) >>= getCurrentSlotInaccurateSimple'
currentTimeSlottingSimple :: (SimpleSlottingMode ctx m) => m Timestamp
currentTimeSlottingSimple = Timestamp <$> currentTime
updateLastSlot :: MonadIO m => SimpleSlottingStateVar -> SlotId -> m SlotId
updateLastSlot var slot = atomically $ do
modifyTVar' var (SimpleSlottingState . max slot . _sssLastSlot)
_sssLastSlot <$> readTVar var
|
b0882542afadd2c6030f04a219fe0294448ec61dc1f08aad0e926b260faff178 | inconvergent/weir | plot-paths.lisp | (in-package #:weir-tests)
(defun %main-plot-paths ()
(rnd:set-rnd-state 76)
(let* ((size 1000d0)
(mid (vec:rep (* 0.5d0 size)))
(psvg (draw-svg:make* :height size :width size))
(wer (weir:make)))
(weir:add-path! wer (bzspl:adaptive-pos
(bzspl:make (rnd:nin-circ 5 400d0 :xy mid))
:lim 2d0)
:closed t)
(weir:add-path! wer (bzspl:adaptive-pos
(bzspl:make (rnd:nin-circ 5 400d0 :xy mid))
:lim 2d0)
:closed t)
(weir:intersect-all! wer)
(loop for lp in (weir:get-segments wer)
do (draw-svg:path psvg (weir:get-verts wer lp)
:stroke "red" :sw 5d0))
(loop for lp in (weir:walk-graph wer)
do (draw-svg:path psvg (weir:get-verts wer lp)
:sw 1d0))
(draw-svg:save psvg
(weir-utils:internal-path-string
"test/data/plot-paths"))))
(define-file-tests test-plot-paths ()
(time (%main-plot-paths)))
| null | https://raw.githubusercontent.com/inconvergent/weir/3c364e3a0e15526f0d6985f08a57b312b5c35f7d/test/plot-paths.lisp | lisp | (in-package #:weir-tests)
(defun %main-plot-paths ()
(rnd:set-rnd-state 76)
(let* ((size 1000d0)
(mid (vec:rep (* 0.5d0 size)))
(psvg (draw-svg:make* :height size :width size))
(wer (weir:make)))
(weir:add-path! wer (bzspl:adaptive-pos
(bzspl:make (rnd:nin-circ 5 400d0 :xy mid))
:lim 2d0)
:closed t)
(weir:add-path! wer (bzspl:adaptive-pos
(bzspl:make (rnd:nin-circ 5 400d0 :xy mid))
:lim 2d0)
:closed t)
(weir:intersect-all! wer)
(loop for lp in (weir:get-segments wer)
do (draw-svg:path psvg (weir:get-verts wer lp)
:stroke "red" :sw 5d0))
(loop for lp in (weir:walk-graph wer)
do (draw-svg:path psvg (weir:get-verts wer lp)
:sw 1d0))
(draw-svg:save psvg
(weir-utils:internal-path-string
"test/data/plot-paths"))))
(define-file-tests test-plot-paths ()
(time (%main-plot-paths)))
|
|
8bc29261825dbaac036e149bd140dc9430d5886cc84bf40c4d814c53d2e86482 | weyrick/roadsend-php | php-gtk-signals.scm | ;; ***** BEGIN LICENSE BLOCK *****
Roadsend PHP Compiler Runtime Libraries
Copyright ( C ) 2007 Roadsend , Inc.
;;
;; This program is free software; you can redistribute it and/or
;; modify it under the terms of the GNU Lesser General Public License
as published by the Free Software Foundation ; either version 2.1
of the License , or ( at your option ) any later version .
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details .
;;
You should have received a copy of the GNU Lesser General Public License
;; along with this program; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 , USA
;; ***** END LICENSE BLOCK *****
(module php-gtk-signals
(load (php-macros "../../../php-macros.scm"))
(load (php-gtk-macros "php-gtk-macros.sch"))
; (library "common")
(import (gtk-binding "cigloo/gtk.scm")
(gtk-signals "cigloo/signals.scm"))
(library "php-runtime")
(import (php-gtk-common-lib "php-gtk-common.scm")
)
(export
(init-php-gtk-signals)
(phpgtk-callback-closure function data pass-object? simple?)
))
(define (init-php-gtk-signals)
1)
( ( disconnect : c - name gtk_signal_disconnect ) ( handler_id : gtk - type guint ) )
;; /* }}} */
;; /* {{{ GObject::connect */
;; static PHP_METHOD(GObject, connect)
;; {
;; phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU, PHPG_CONNECT_NORMAL, FALSE);
;; }
;; /* }}} */
;; /* {{{ GObject::connect_after */
;; static PHP_METHOD(GObject, connect_after)
;; {
;; phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU, PHPG_CONNECT_NORMAL, TRUE);
;; }
;; /* }}} */
/ * { {
static PHP_METHOD(GObject , connect_simple )
;; {
;; phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU, PHPG_CONNECT_SIMPLE, FALSE);
;; }
;; /* }}} */
/ * { { { * /
;; static PHP_METHOD(GObject, connect_simple_after)
;; {
;; phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU, PHPG_CONNECT_SIMPLE, TRUE);
;; }
;; /* }}} */
;; /* {{{ GObject::connect_object */
static PHP_METHOD(GObject , connect_object )
;; {
;; phpg_warn_deprecated("use connect() or connect_simple()" TSRMLS_CC);
phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU , PHPG_CONNECT_OBJECT , FALSE ) ;
;; }
;; /* }}} */
;; /* {{{ GObject::connect_object_after */
;; static PHP_METHOD(GObject, connect_object_after)
;; {
phpg_warn_deprecated("use ( ) or connect_simple_after ( ) " TSRMLS_CC ) ;
phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU , PHPG_CONNECT_OBJECT , TRUE ) ;
;; }
;; /* }}} */
( ( connect : return - type guint : c - name gtk_signal_connect ) ( name : gtk - type const - gchar * ) ( func : gtk - type GtkSignalFunc ) ( func_data : gtk - type gpointer ) )
(defmethod gtkobject (connect signal funcname #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox funcname)
(map maybe-unbox data)
object?: #f
simple?: #f
after?: #f))
(defmethod gtkobject (connect_simple signal funcname #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox funcname)
(map maybe-unbox data)
object?: #f
simple?: #t
after?: #f))
( defmethod - XXX gtkobject ( ) TRUE )
( ( connect_after : return - type guint : c - name gtk_signal_connect_after ) ( name : gtk - type const - gchar * ) ( func : gtk - type GtkSignalFunc ) ( func_data : gtk - type gpointer ) )
(defmethod gtkobject (connect_after signal funcname #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox funcname)
(map maybe-unbox data)
object?: #f
simple?: #f
after?: #t))
(defmethod gtkobject (connect_simple_after signal funcname #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox funcname)
(map maybe-unbox data)
object?: #f
simple?: #t
after?: #t))
( ( connect_object : return - type guint : c - name gtk_signal_connect_object ) ( name : gtk - type const - gchar * ) ( func : gtk - type GtkSignalFunc ) ( slot_object : gtk - type GtkObject * ) )
(defmethod gtkobject (connect_object signal function #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox function)
(map maybe-unbox data)
object?: #t
after?: #f
simple?: #f))
( ( connect_object_after : return - type guint : c - name gtk_signal_connect_object_after ) ( name : gtk - type const - gchar * ) ( func : gtk - type GtkSignalFunc ) ( slot_object : gtk - type GtkObject * ) )
(defmethod gtkobject (connect_object_after signal function #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox function)
(map maybe-unbox data)
object?: #t
after?: #t
simple?: #f))
(define (connect-impl $this signal function data #!key object? after? simple?)
(debug-trace 3 "file " *PHP-FILE* " line " *PHP-LINE* " connecting a non-object, the function is " (mkstr function) " php-hash? says " (php-hash? function))
(let ((signal (mkstr signal)))
(gtk-signal-connect (GTK_OBJECT (gtk-object $this))
signal
(phpgtk-callback-closure function data
(not object?) simple?)
after?)
TRUE))
;; (define (connect-object-impl $this signal function data after? simple?)
( let ( ( signal ( mkstr signal ) ) )
( debug - trace 3 " file " * PHP - FILE * " line " * PHP - LINE * " connecting an object , the function is " ( mkstr function ) " php - hash ? says " ( php - hash ? function ) )
;; (gtk-signal-connect (GTK_OBJECT (gtk-object $this))
;; signal
( phpgtk - callback - closure function data # f simple ? )
;; after?)
;; TRUE))
(define-macro (my-try form handler)
`(if (getenv "BIGLOOSTACKDEPTH")
,form
(try ,form ,handler)))
;; it looks like the utility of pass-object? is to suppress passing
;; the object. The php-gtk marshaller (php_gtk_callback_marshal)
;; passes the object if it gets one, unless pass-object? is specified
;; and false. If it doesn't get an object, it doesn't pass it,
;; regardless of what pass-object? is set to.
;;
;; simple? is handled in the marshaller in php-gtk, but we handle it
;; here. it just means the arguments aren't used, so no need to pass
;; them. if we handled it in the marshaller, we could skip
;; marshalling them too.
(define (phpgtk-callback-closure function data pass-object? simple?)
(let ((connected-line *PHP-LINE*)
(connected-file *PHP-FILE*))
we 've basically got three copies of the same code below , in a
;; fit of premature optimization.
(if (php-hash? function)
(let ((object (php-hash-lookup function 0))
(method (mkstr (php-hash-lookup function 1))))
(if (php-object? object)
;; the callback is a regular method call
(lambda (obj . rest)
(debug-trace 2 "calling method " method " arguments " obj ", " rest ", data: " data )
(my-try (if simple?
(call-php-method object method)
(apply call-php-method object method
(append (map convert-to-php-type
(if (and obj pass-object?)
(cons obj rest)
rest))
data)))
(lambda (e p m o)
(php-warning "Unable to call callback " (php-object-class object) "->" method "() specified in "
connected-file " on line " connected-line ": " m)
(e FALSE))))
;; the callback is a static method call
(lambda (obj . rest)
(my-try (if simple?
(call-static-php-method (mkstr object) NULL method)
(apply call-static-php-method (mkstr object) NULL method
(append (map convert-to-php-type
(if (and obj pass-object?)
(cons obj rest)
rest))
data)))
(lambda (e p m o)
(php-warning "Unable to call callback " (php-object-class object) "::" method "() specified in "
connected-file " on line " connected-line ": " m)
(e FALSE))))))
;; the callback is a regular function call
(let ((function (mkstr function)))
(lambda (obj . rest)
(my-try (if simple?
(php-funcall function)
(apply php-funcall function
(append (map convert-to-php-type
(if (and obj pass-object?)
(cons obj rest)
rest))
data)))
(lambda (e p m o)
(php-warning "Unable to call callback " function "() specified in "
connected-file " on line " connected-line ": " m)
(e FALSE))))))))
; (if pass-object?
; (
; (lambda args
; (let ((args (if pass-object? args (cdr args))))
; (try
; (if (php-object? object)
; (apply call-php-method object method
; (append (map convert-to-php-type args)
; data))
; (apply call-static-php-method (mkstr object) method
; (append (map convert-to-php-type args)
; data)))
; (lambda (e p m o)
; (php-warning "Unable to call callback " (php-object-class object) "->" method "() specified in "
; connected-file " on line " connected-line ": " m)
; (e FALSE))))))
; (let ((function (mkstr function)))
; (lambda args
; (let ((args (if pass-object? args (cdr args))))
; (try
; (apply php-funcall function
; (append (map convert-to-php-type args) data))
; (lambda (e p m o)
; (php-warning "Unable to call callback " function "() specified in "
; connected-file " on line " connected-line ": " m)
; (e FALSE)))))))))
| null | https://raw.githubusercontent.com/weyrick/roadsend-php/d6301a897b1a02d7a85bdb915bea91d0991eb158/runtime/ext/gtk2/php-gtk-signals.scm | scheme | ***** BEGIN LICENSE BLOCK *****
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public License
either version 2.1
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program; if not, write to the Free Software
***** END LICENSE BLOCK *****
(library "common")
/* }}} */
/* {{{ GObject::connect */
static PHP_METHOD(GObject, connect)
{
phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU, PHPG_CONNECT_NORMAL, FALSE);
}
/* }}} */
/* {{{ GObject::connect_after */
static PHP_METHOD(GObject, connect_after)
{
phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU, PHPG_CONNECT_NORMAL, TRUE);
}
/* }}} */
{
phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU, PHPG_CONNECT_SIMPLE, FALSE);
}
/* }}} */
static PHP_METHOD(GObject, connect_simple_after)
{
phpg_signal_connect_impl(INTERNAL_FUNCTION_PARAM_PASSTHRU, PHPG_CONNECT_SIMPLE, TRUE);
}
/* }}} */
/* {{{ GObject::connect_object */
{
phpg_warn_deprecated("use connect() or connect_simple()" TSRMLS_CC);
}
/* }}} */
/* {{{ GObject::connect_object_after */
static PHP_METHOD(GObject, connect_object_after)
{
}
/* }}} */
(define (connect-object-impl $this signal function data after? simple?)
(gtk-signal-connect (GTK_OBJECT (gtk-object $this))
signal
after?)
TRUE))
it looks like the utility of pass-object? is to suppress passing
the object. The php-gtk marshaller (php_gtk_callback_marshal)
passes the object if it gets one, unless pass-object? is specified
and false. If it doesn't get an object, it doesn't pass it,
regardless of what pass-object? is set to.
simple? is handled in the marshaller in php-gtk, but we handle it
here. it just means the arguments aren't used, so no need to pass
them. if we handled it in the marshaller, we could skip
marshalling them too.
fit of premature optimization.
the callback is a regular method call
the callback is a static method call
the callback is a regular function call
(if pass-object?
(
(lambda args
(let ((args (if pass-object? args (cdr args))))
(try
(if (php-object? object)
(apply call-php-method object method
(append (map convert-to-php-type args)
data))
(apply call-static-php-method (mkstr object) method
(append (map convert-to-php-type args)
data)))
(lambda (e p m o)
(php-warning "Unable to call callback " (php-object-class object) "->" method "() specified in "
connected-file " on line " connected-line ": " m)
(e FALSE))))))
(let ((function (mkstr function)))
(lambda args
(let ((args (if pass-object? args (cdr args))))
(try
(apply php-funcall function
(append (map convert-to-php-type args) data))
(lambda (e p m o)
(php-warning "Unable to call callback " function "() specified in "
connected-file " on line " connected-line ": " m)
(e FALSE))))))))) | Roadsend PHP Compiler Runtime Libraries
Copyright ( C ) 2007 Roadsend , Inc.
of the License , or ( at your option ) any later version .
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public License
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 , USA
(module php-gtk-signals
(load (php-macros "../../../php-macros.scm"))
(load (php-gtk-macros "php-gtk-macros.sch"))
(import (gtk-binding "cigloo/gtk.scm")
(gtk-signals "cigloo/signals.scm"))
(library "php-runtime")
(import (php-gtk-common-lib "php-gtk-common.scm")
)
(export
(init-php-gtk-signals)
(phpgtk-callback-closure function data pass-object? simple?)
))
(define (init-php-gtk-signals)
1)
( ( disconnect : c - name gtk_signal_disconnect ) ( handler_id : gtk - type guint ) )
/ * { {
static PHP_METHOD(GObject , connect_simple )
/ * { { { * /
static PHP_METHOD(GObject , connect_object )
( ( connect : return - type guint : c - name gtk_signal_connect ) ( name : gtk - type const - gchar * ) ( func : gtk - type GtkSignalFunc ) ( func_data : gtk - type gpointer ) )
(defmethod gtkobject (connect signal funcname #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox funcname)
(map maybe-unbox data)
object?: #f
simple?: #f
after?: #f))
(defmethod gtkobject (connect_simple signal funcname #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox funcname)
(map maybe-unbox data)
object?: #f
simple?: #t
after?: #f))
( defmethod - XXX gtkobject ( ) TRUE )
( ( connect_after : return - type guint : c - name gtk_signal_connect_after ) ( name : gtk - type const - gchar * ) ( func : gtk - type GtkSignalFunc ) ( func_data : gtk - type gpointer ) )
(defmethod gtkobject (connect_after signal funcname #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox funcname)
(map maybe-unbox data)
object?: #f
simple?: #f
after?: #t))
(defmethod gtkobject (connect_simple_after signal funcname #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox funcname)
(map maybe-unbox data)
object?: #f
simple?: #t
after?: #t))
( ( connect_object : return - type guint : c - name gtk_signal_connect_object ) ( name : gtk - type const - gchar * ) ( func : gtk - type GtkSignalFunc ) ( slot_object : gtk - type GtkObject * ) )
(defmethod gtkobject (connect_object signal function #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox function)
(map maybe-unbox data)
object?: #t
after?: #f
simple?: #f))
( ( connect_object_after : return - type guint : c - name gtk_signal_connect_object_after ) ( name : gtk - type const - gchar * ) ( func : gtk - type GtkSignalFunc ) ( slot_object : gtk - type GtkObject * ) )
(defmethod gtkobject (connect_object_after signal function #!rest data)
(connect-impl (maybe-unbox $this)
(maybe-unbox signal)
(maybe-unbox function)
(map maybe-unbox data)
object?: #t
after?: #t
simple?: #f))
(define (connect-impl $this signal function data #!key object? after? simple?)
(debug-trace 3 "file " *PHP-FILE* " line " *PHP-LINE* " connecting a non-object, the function is " (mkstr function) " php-hash? says " (php-hash? function))
(let ((signal (mkstr signal)))
(gtk-signal-connect (GTK_OBJECT (gtk-object $this))
signal
(phpgtk-callback-closure function data
(not object?) simple?)
after?)
TRUE))
( let ( ( signal ( mkstr signal ) ) )
( debug - trace 3 " file " * PHP - FILE * " line " * PHP - LINE * " connecting an object , the function is " ( mkstr function ) " php - hash ? says " ( php - hash ? function ) )
( phpgtk - callback - closure function data # f simple ? )
(define-macro (my-try form handler)
`(if (getenv "BIGLOOSTACKDEPTH")
,form
(try ,form ,handler)))
(define (phpgtk-callback-closure function data pass-object? simple?)
(let ((connected-line *PHP-LINE*)
(connected-file *PHP-FILE*))
we 've basically got three copies of the same code below , in a
(if (php-hash? function)
(let ((object (php-hash-lookup function 0))
(method (mkstr (php-hash-lookup function 1))))
(if (php-object? object)
(lambda (obj . rest)
(debug-trace 2 "calling method " method " arguments " obj ", " rest ", data: " data )
(my-try (if simple?
(call-php-method object method)
(apply call-php-method object method
(append (map convert-to-php-type
(if (and obj pass-object?)
(cons obj rest)
rest))
data)))
(lambda (e p m o)
(php-warning "Unable to call callback " (php-object-class object) "->" method "() specified in "
connected-file " on line " connected-line ": " m)
(e FALSE))))
(lambda (obj . rest)
(my-try (if simple?
(call-static-php-method (mkstr object) NULL method)
(apply call-static-php-method (mkstr object) NULL method
(append (map convert-to-php-type
(if (and obj pass-object?)
(cons obj rest)
rest))
data)))
(lambda (e p m o)
(php-warning "Unable to call callback " (php-object-class object) "::" method "() specified in "
connected-file " on line " connected-line ": " m)
(e FALSE))))))
(let ((function (mkstr function)))
(lambda (obj . rest)
(my-try (if simple?
(php-funcall function)
(apply php-funcall function
(append (map convert-to-php-type
(if (and obj pass-object?)
(cons obj rest)
rest))
data)))
(lambda (e p m o)
(php-warning "Unable to call callback " function "() specified in "
connected-file " on line " connected-line ": " m)
(e FALSE))))))))
|
438ed09185b0f3f51b7df64b8c8542c0d6665eba8a32bf472defa435916bb4ee | hellonico/origami-dnn | cam.clj | (ns origami-dnn.demo.ssdnet.cam
(:require [origami-dnn.net.mobilenet :refer [find-objects]]
[opencv4.dnn :as dnn]
[opencv4.dnn.core :as origami-dnn]
[origami-dnn.draw :as d]
[opencv4.utils :refer [resize-by simple-cam-window]]))
(defn -main [& args]
(let [ [net opts labels] (origami-dnn/read-net-from-repo "networks.caffe:mobilenet:1.0.0") ]
(simple-cam-window
(fn [buffer]
(-> buffer
(find-objects net opts)
(d/red-boxes! labels))))))
; (-main)
; (def spec "networks.caffe:mobilenet:1.0.0")
| null | https://raw.githubusercontent.com/hellonico/origami-dnn/f55a32d0d3d528fcf57aaac10cfb20c7998b380c/src/origami_dnn/demo/ssdnet/cam.clj | clojure | (-main)
(def spec "networks.caffe:mobilenet:1.0.0") | (ns origami-dnn.demo.ssdnet.cam
(:require [origami-dnn.net.mobilenet :refer [find-objects]]
[opencv4.dnn :as dnn]
[opencv4.dnn.core :as origami-dnn]
[origami-dnn.draw :as d]
[opencv4.utils :refer [resize-by simple-cam-window]]))
(defn -main [& args]
(let [ [net opts labels] (origami-dnn/read-net-from-repo "networks.caffe:mobilenet:1.0.0") ]
(simple-cam-window
(fn [buffer]
(-> buffer
(find-objects net opts)
(d/red-boxes! labels))))))
|
4b74fddacdd57f2011698996728e5342dfec1112f317fed9ed16a3391a349b28 | reflex-frp/reflex-native | Types.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
|Types used throughout " Reflex . Native . Test " .
module Reflex.Native.Test.Types
(
-- * Unique identities
TestIdentity, unTestIdentity, newTestIdentity, tshowTestIdentity
-- * Test views
, TestHolder, TestViewCommon(..), TestContainerView(..), TestTextView(..), TestMarker(..), TestView(..), _testView_common, _testView_identity
-- ** Test views as diagnostic text
, showsTestContainerView, showsTestView, showTestViewHierarchy, tshowTestContainerViewIdentity, tshowTestTextViewIdentity, tshowTestMarkerIdentity
, tshowTestViewIdentity
-- ** Traversing a test view hierarchy
, traverseTestContainerView, traverseTestView
-- * Test execution environment and evaluation monad
, TestEnv(..), TestEvaluation(..)
) where
import Control.Concurrent.Chan (Chan)
import Control.Concurrent.STM.TVar (TVar)
import Control.Monad.Exception (MonadAsyncException, MonadException)
import Control.Monad.Fix (MonadFix)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.RWS.Strict (RWST)
import Data.Dependent.Sum (DSum)
import Data.DList (DList)
import qualified Data.DList as DList
import Data.Foldable (toList)
import Data.Functor.Identity (Identity(..))
import Data.IORef (IORef, newIORef, atomicModifyIORef')
import Data.Monoid ((<>))
import Data.Sequence (Seq)
import Data.Text (Text, pack)
import GHC.Generics (Generic)
import qualified Rank2
import Rank2 (apply)
import Reflex.Host.Class (ReflexHost(type EventHandle, type EventTrigger))
import Reflex.Native.TextStyle (TextStyle(..))
import Reflex.Native.ViewLayout (ViewLayout)
import Reflex.Native.ViewStyle (ViewStyle(..))
import Reflex.PerformEvent.Base (FireCommand)
import Reflex.Spider (SpiderHost, SpiderTimeline)
import Reflex.TriggerEvent.Base (EventTriggerRef, TriggerInvocation)
import System.IO.Unsafe (unsafePerformIO)
-- |A unique identity for a test view, holder, marker, or similar thing qualified by what it's an identity for. Almost identical to "Data.Unique" except that
-- this has a more useful 'Show' instance for diagnostics.
newtype TestIdentity = TestIdentity { unTestIdentity :: Integer } deriving (Eq, Ord)
-- |Show a unique identifier for diagnostics.
tshowTestIdentity :: TestIdentity -> Text
tshowTestIdentity (TestIdentity i) = pack ('#' : show i)
|Shared reference to make unique ' TestIdentity ' values
# NOINLINE nextTestIdentityRef #
nextTestIdentityRef :: IORef Integer
nextTestIdentityRef = unsafePerformIO $ newIORef 1
|Create a new ' TestIdentity ' with a new serial number
newTestIdentity :: MonadIO m => m TestIdentity
newTestIdentity =
fmap TestIdentity . liftIO . atomicModifyIORef' nextTestIdentityRef $ \ n ->
let n' = succ n in (n', n')
|'ShowS ' for a @'ViewStyle ' Identity@ since ' Show ' is needed for test assertion messages and so on .
showsViewStyle :: ViewStyle Identity -> ShowS
showsViewStyle (ViewStyle {..})
= ('{':)
. showString "bg=" . shows (runIdentity _viewStyle_backgroundColor)
. ('}':)
|Common attributes of every view in a test view hierarchy . Parameterized by @f@ which wraps every value ; @f ~ TVar@ during the building step , and
-- @f ~ Identity@ for frozen copies of the view hierarchy.
data TestViewCommon v = TestViewCommon
{ _testViewCommon_identity :: TestIdentity
-- ^Unique identity of the view for distinguising it among others.
, _testViewCommon_style :: ViewStyle v
-- ^The style of the view.
, _testViewCommon_layout :: v ViewLayout
-- ^The layout of the view.
, _testViewCommon_accessibilityLabel :: v (Maybe Text)
-- ^The accessibility label of the view.
} deriving (Generic)
|Show a @TestViewCommon@ for test assertion messages and the like . Usually used as the first part of showing the view type embedding the
instance Show (TestViewCommon Identity) where
showsPrec _ (TestViewCommon {..})
= ('#':) . shows (unTestIdentity _testViewCommon_identity)
. showString " style=" . showsViewStyle _testViewCommon_style
. showString " layout=" . shows (runIdentity _testViewCommon_layout)
. showString " accessibilityLabel=" . shows (runIdentity _testViewCommon_accessibilityLabel)
instance Rank2.Functor TestViewCommon where
f <$> TestViewCommon a b c d = TestViewCommon a (f Rank2.<$> b) (f c) (f d)
instance Rank2.Apply TestViewCommon where
TestViewCommon _ fb fc fd <*> TestViewCommon a b c d = TestViewCommon a (fb Rank2.<*> b) (apply fc c) (apply fd d)
instance Rank2.Applicative TestViewCommon where
pure f = TestViewCommon (TestIdentity (-1) {- it's a hack! -}) (Rank2.pure f) f f
instance Rank2.Foldable TestViewCommon where
foldMap f (TestViewCommon _ b c d) = Rank2.foldMap f b <> f c <> f d
instance Rank2.Traversable TestViewCommon where
traverse f (TestViewCommon a b c d) = TestViewCommon a <$> Rank2.traverse f b <*> f c <*> f d
-- |A container view which has common view attributes and a collection of subviews.
data TestContainerView v = TestContainerView
{ _testContainerView_common :: TestViewCommon v
-- ^The common view attributes for the container.
, _testContainerView_contents :: v (Seq (TestView v))
-- ^The subviews.
} deriving (Generic)
ca n't instance on account of the fixed point - would need @v@ or @v'@ to be a Functor but they need to be natural .
-- |Show a 'TestContainerView' for test assertion messages and the like.
instance Show (TestContainerView Identity) where
showsPrec _ = showsTestContainerView True
|Show a ' TestContainerView ' for test assertion messages and the like . Takes a boolean indicating whether subviews will be dumped ( @True@ ) or not ( @False@ ) .
showsTestContainerView :: Bool -> TestContainerView Identity -> ShowS
showsTestContainerView recurse (TestContainerView {..})
= showString "container " . shows _testContainerView_common
. (if recurse then (' ':) . showList (toList _testContainerView_contents) else id)
-- |Traverse some effect through a @'TestContainerView' v@ while changing @v -> v'@. See 'traverseTestView' for how this is commonly used.
-- The traversal effect needs to accept an additional mapping effect to apply inside in order to handle the fixed point @_testContainerView_contents@.
traverseTestContainerView
:: Applicative f
=> (forall a b. (a -> f b) -> v a -> f (v' b))
-> TestContainerView v -> f (TestContainerView v')
traverseTestContainerView f (TestContainerView {..}) =
TestContainerView
<$> Rank2.traverse (f pure) _testContainerView_common
<*> f (traverse (traverseTestView f)) _testContainerView_contents
|Show the type and identity of a test container view , equivalent to @'tshowTestViewIdentity ' . ' TestView_Container'@
tshowTestContainerViewIdentity :: TestContainerView v -> Text
tshowTestContainerViewIdentity = tshowTestViewIdentity . TestView_Container
-- |A text display view which has common view attributes, a text style, and whatever the current/captured text is.
data TestTextView v = TestTextView
{ _testTextView_common :: TestViewCommon v
-- ^The common view attributes for the text view.
, _testTextView_style :: TextStyle v
-- ^The style to display the text with.
, _testTextView_text :: v Text
-- ^The actual text.
} deriving (Generic)
|'ShowS ' for a @'TextStyle ' Identity@ since ' Show ' is needed for test assertion messages and so on .
showsTextStyle :: TextStyle Identity -> ShowS
showsTextStyle (TextStyle {..})
= showString "{color=" . shows (runIdentity _textStyle_textColor)
. showString " font=" . shows (runIdentity _textStyle_font)
. ('}':)
-- |Show a 'TestTextView' for test assertion messages and the like.
instance Show (TestTextView Identity) where
showsPrec _ (TestTextView {..})
= showString "text " . shows _testTextView_common
. showString " textStyle=" . showsTextStyle _testTextView_style
. showString " text=" . shows (runIdentity _testTextView_text)
instance Rank2.Functor TestTextView where
f <$> TestTextView a b c = TestTextView (f Rank2.<$> a) (f Rank2.<$> b) (f c)
instance Rank2.Apply TestTextView where
TestTextView fa fb fc <*> TestTextView a b c = TestTextView (fa Rank2.<*> a) (fb Rank2.<*> b) (apply fc c)
instance Rank2.Applicative TestTextView where
pure f = TestTextView (Rank2.pure f) (Rank2.pure f) f
instance Rank2.Foldable TestTextView where
foldMap f (TestTextView a b c) = Rank2.foldMap f a <> Rank2.foldMap f b <> f c
instance Rank2.Traversable TestTextView where
traverse f (TestTextView a b c) = TestTextView <$> Rank2.traverse f a <*> Rank2.traverse f b <*> f c
-- |Show the type and identity of a test text view, equivalent to @'tshowTestViewIdentity' . 'TestView_Text'@
tshowTestTextViewIdentity :: TestTextView v -> Text
tshowTestTextViewIdentity = tshowTestViewIdentity . TestView_Text
-- |A marker view node which doesn't have any display but denotes the boundary between replaceable view segments.
data TestMarker = TestMarker
{ _testMarker_identity :: TestIdentity
-- ^The unique identity of the marker.
, _testMarker_parent :: TVar (Maybe (TVar (Seq (TestView TVar))))
-- ^Where the marker is installed, or Nothing if it's not installed.
} deriving (Eq, Generic)
|Show a ' TestMarker ' for test assertion messages and the like .
instance Show TestMarker where
showsPrec _ (TestMarker {..}) = showString "marker #" . shows (unTestIdentity _testMarker_identity)
|Show the type and identity of a test marker , equivalent to @'tshowTestViewIdentity ' . '
tshowTestMarkerIdentity :: TestMarker -> Text
tshowTestMarkerIdentity = tshowTestViewIdentity . TestView_Marker
|A node in the view hierarchy , either one of the @Test*View@ types or a special marker used during build time to isolate sections of the subviews .
data TestView v
= TestView_Container (TestContainerView v)
| TestView_Text (TestTextView v)
| TestView_Marker TestMarker
deriving (Generic)
-- |Show a 'TestView' for test assertion messages and the like.
instance Show (TestView Identity) where
showsPrec _ = showsTestView True
|Show a ' TestView ' for test assertion messages and the like . Takes a boolean which controls whether subviews will be dumped ( @True@ ) or not ( @False ) .
showsTestView :: Bool -> TestView Identity -> ShowS
showsTestView recurse = \ case
-- each of the view types includes show output indicating their type, so don't duplicate it here
TestView_Container cv -> showsTestContainerView recurse cv
TestView_Text tv -> shows tv
TestView_Marker m -> shows m
-- |Show a 'TestView' hierarchy on multiple lines with indenting.
showTestViewHierarchy :: String -> Seq (TestView Identity) -> [String]
showTestViewHierarchy prefix = DList.toList . go prefix
where
go :: String -> Seq (TestView Identity) -> DList String
go indent = foldMap (visit indent) . toList
visit :: String -> TestView Identity -> DList String
visit indent = \ case
TestView_Container cv ->
DList.cons
(indent ++ showsTestContainerView False cv "")
(go (' ':' ':indent) . runIdentity . _testContainerView_contents $ cv)
other -> DList.singleton $ indent ++ showsTestView False other ""
|Test for equal identity of two view nodes
instance Eq (TestView v) where
a == b = _testView_identity a == _testView_identity b
-- |Traverse some effect through a @'TestView' v@ while changing @v -> v'@. This is used to do any recursive effect on a view hierarchy, such as freezing a
@'TestView ' TVar@ into a @'TestView ' Identity@ via @atomically . traverseTestView ( \ f - > pure . Identity < = < f < = < readTVar)@.
The traversal effect needs to accept an additional mapping effect to apply inside in order to handle the fixed point if the
view is a @TestView_Container@.
traverseTestView
:: Applicative f
=> (forall a b. (a -> f b) -> v a -> f (v' b))
-> TestView v -> f (TestView v')
traverseTestView f = \ case
TestView_Container cv -> TestView_Container <$> traverseTestContainerView f cv
TestView_Text tv -> TestView_Text <$> Rank2.traverse (f pure) tv
TestView_Marker m -> pure (TestView_Marker m)
-- |Show the type and identity of a view node
tshowTestViewIdentity :: TestView v -> Text
tshowTestViewIdentity = \ case
TestView_Container cv -> "container " <> tshowTestIdentity (_testViewCommon_identity . _testContainerView_common $ cv)
TestView_Text tv -> "text " <> tshowTestIdentity (_testViewCommon_identity . _testTextView_common $ tv)
TestView_Marker m -> "marker " <> tshowTestIdentity (_testMarker_identity m)
|Project the ' TestViewCommon ' out of a ' TestView ' , if it 's not a ' TestView_Marker ' .
_testView_common :: TestView v -> Maybe (TestViewCommon v)
_testView_common = \ case
TestView_Container cv -> Just . _testContainerView_common $ cv
TestView_Text tv -> Just . _testTextView_common $ tv
TestView_Marker _ -> Nothing
-- |Project the unique identity out of a 'TestView'
_testView_identity :: TestView v -> TestIdentity
_testView_identity = \ case
TestView_Container cv -> _testViewCommon_identity . _testContainerView_common $ cv
TestView_Text tv -> _testViewCommon_identity . _testTextView_common $ tv
TestView_Marker m -> _testMarker_identity m
|Type which holds a sequence of views . The same type as @_testContainerView_contents@ for @'TestContainerView ' TVar@
type TestHolder = TVar (Seq (TestView TVar))
-- |The environment of an in-progress test with a handle to the view hierarchy and the event processing channel.
data TestEnv x = TestEnv
{ _testEnv_rootHolder :: TestHolder
-- ^The root of the view hierarchy.
, _testEnv_rootReady :: TVar Bool
^True iff the first build was immediately ready or it 's been committed since .
, _testEnv_eventChan :: Chan [DSum (EventTriggerRef (SpiderTimeline x)) TriggerInvocation]
-- ^The event channel to write new event trigger invocations to.
, _testEnv_fireCommand :: FireCommand (SpiderTimeline x) (SpiderHost x)
^The ' FireCommand ' which is used to process events with the underlying host and then perform any actions triggered by those events .
, _testEnv_stepCompleteEventHandle :: EventHandle (SpiderTimeline x) ()
-- ^The event which is fired after each test evaluation step to ensure that event processing has been finished. This is especially required since @Chan@s can
-- only be read by blocking, so we need an event to explicitly bookend the step.
, _testEnv_stepCompleteTriggerRef :: IORef (Maybe (EventTrigger (SpiderTimeline x) ()))
-- ^The trigger for @_testEnv_stepCompleteEvent@.
}
-- |The monad for evaluating an in-progress test after the build has completed and has access to the state of the view hierarchy and event processing channel.
newtype TestEvaluation x a = TestEvaluation { unTestEvaluation :: RWST (TestEnv x) () (Seq (TestView Identity)) (SpiderHost x) a }
deriving (Functor, Applicative, Monad, MonadFix, MonadIO, MonadException, MonadAsyncException)
| null | https://raw.githubusercontent.com/reflex-frp/reflex-native/5fb6a07845e4f7c51f97e9c8ce1a48009f341246/reflex-native-test/src/Reflex/Native/Test/Types.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
* Unique identities
* Test views
** Test views as diagnostic text
** Traversing a test view hierarchy
* Test execution environment and evaluation monad
|A unique identity for a test view, holder, marker, or similar thing qualified by what it's an identity for. Almost identical to "Data.Unique" except that
this has a more useful 'Show' instance for diagnostics.
|Show a unique identifier for diagnostics.
@f ~ Identity@ for frozen copies of the view hierarchy.
^Unique identity of the view for distinguising it among others.
^The style of the view.
^The layout of the view.
^The accessibility label of the view.
it's a hack!
|A container view which has common view attributes and a collection of subviews.
^The common view attributes for the container.
^The subviews.
|Show a 'TestContainerView' for test assertion messages and the like.
|Traverse some effect through a @'TestContainerView' v@ while changing @v -> v'@. See 'traverseTestView' for how this is commonly used.
The traversal effect needs to accept an additional mapping effect to apply inside in order to handle the fixed point @_testContainerView_contents@.
|A text display view which has common view attributes, a text style, and whatever the current/captured text is.
^The common view attributes for the text view.
^The style to display the text with.
^The actual text.
|Show a 'TestTextView' for test assertion messages and the like.
|Show the type and identity of a test text view, equivalent to @'tshowTestViewIdentity' . 'TestView_Text'@
|A marker view node which doesn't have any display but denotes the boundary between replaceable view segments.
^The unique identity of the marker.
^Where the marker is installed, or Nothing if it's not installed.
|Show a 'TestView' for test assertion messages and the like.
each of the view types includes show output indicating their type, so don't duplicate it here
|Show a 'TestView' hierarchy on multiple lines with indenting.
|Traverse some effect through a @'TestView' v@ while changing @v -> v'@. This is used to do any recursive effect on a view hierarchy, such as freezing a
|Show the type and identity of a view node
|Project the unique identity out of a 'TestView'
|The environment of an in-progress test with a handle to the view hierarchy and the event processing channel.
^The root of the view hierarchy.
^The event channel to write new event trigger invocations to.
^The event which is fired after each test evaluation step to ensure that event processing has been finished. This is especially required since @Chan@s can
only be read by blocking, so we need an event to explicitly bookend the step.
^The trigger for @_testEnv_stepCompleteEvent@.
|The monad for evaluating an in-progress test after the build has completed and has access to the state of the view hierarchy and event processing channel. | # LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
|Types used throughout " Reflex . Native . Test " .
module Reflex.Native.Test.Types
(
TestIdentity, unTestIdentity, newTestIdentity, tshowTestIdentity
, TestHolder, TestViewCommon(..), TestContainerView(..), TestTextView(..), TestMarker(..), TestView(..), _testView_common, _testView_identity
, showsTestContainerView, showsTestView, showTestViewHierarchy, tshowTestContainerViewIdentity, tshowTestTextViewIdentity, tshowTestMarkerIdentity
, tshowTestViewIdentity
, traverseTestContainerView, traverseTestView
, TestEnv(..), TestEvaluation(..)
) where
import Control.Concurrent.Chan (Chan)
import Control.Concurrent.STM.TVar (TVar)
import Control.Monad.Exception (MonadAsyncException, MonadException)
import Control.Monad.Fix (MonadFix)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.RWS.Strict (RWST)
import Data.Dependent.Sum (DSum)
import Data.DList (DList)
import qualified Data.DList as DList
import Data.Foldable (toList)
import Data.Functor.Identity (Identity(..))
import Data.IORef (IORef, newIORef, atomicModifyIORef')
import Data.Monoid ((<>))
import Data.Sequence (Seq)
import Data.Text (Text, pack)
import GHC.Generics (Generic)
import qualified Rank2
import Rank2 (apply)
import Reflex.Host.Class (ReflexHost(type EventHandle, type EventTrigger))
import Reflex.Native.TextStyle (TextStyle(..))
import Reflex.Native.ViewLayout (ViewLayout)
import Reflex.Native.ViewStyle (ViewStyle(..))
import Reflex.PerformEvent.Base (FireCommand)
import Reflex.Spider (SpiderHost, SpiderTimeline)
import Reflex.TriggerEvent.Base (EventTriggerRef, TriggerInvocation)
import System.IO.Unsafe (unsafePerformIO)
newtype TestIdentity = TestIdentity { unTestIdentity :: Integer } deriving (Eq, Ord)
tshowTestIdentity :: TestIdentity -> Text
tshowTestIdentity (TestIdentity i) = pack ('#' : show i)
|Shared reference to make unique ' TestIdentity ' values
# NOINLINE nextTestIdentityRef #
nextTestIdentityRef :: IORef Integer
nextTestIdentityRef = unsafePerformIO $ newIORef 1
|Create a new ' TestIdentity ' with a new serial number
newTestIdentity :: MonadIO m => m TestIdentity
newTestIdentity =
fmap TestIdentity . liftIO . atomicModifyIORef' nextTestIdentityRef $ \ n ->
let n' = succ n in (n', n')
|'ShowS ' for a @'ViewStyle ' Identity@ since ' Show ' is needed for test assertion messages and so on .
showsViewStyle :: ViewStyle Identity -> ShowS
showsViewStyle (ViewStyle {..})
= ('{':)
. showString "bg=" . shows (runIdentity _viewStyle_backgroundColor)
. ('}':)
|Common attributes of every view in a test view hierarchy . Parameterized by @f@ which wraps every value ; @f ~ TVar@ during the building step , and
data TestViewCommon v = TestViewCommon
{ _testViewCommon_identity :: TestIdentity
, _testViewCommon_style :: ViewStyle v
, _testViewCommon_layout :: v ViewLayout
, _testViewCommon_accessibilityLabel :: v (Maybe Text)
} deriving (Generic)
|Show a @TestViewCommon@ for test assertion messages and the like . Usually used as the first part of showing the view type embedding the
instance Show (TestViewCommon Identity) where
showsPrec _ (TestViewCommon {..})
= ('#':) . shows (unTestIdentity _testViewCommon_identity)
. showString " style=" . showsViewStyle _testViewCommon_style
. showString " layout=" . shows (runIdentity _testViewCommon_layout)
. showString " accessibilityLabel=" . shows (runIdentity _testViewCommon_accessibilityLabel)
instance Rank2.Functor TestViewCommon where
f <$> TestViewCommon a b c d = TestViewCommon a (f Rank2.<$> b) (f c) (f d)
instance Rank2.Apply TestViewCommon where
TestViewCommon _ fb fc fd <*> TestViewCommon a b c d = TestViewCommon a (fb Rank2.<*> b) (apply fc c) (apply fd d)
instance Rank2.Applicative TestViewCommon where
instance Rank2.Foldable TestViewCommon where
foldMap f (TestViewCommon _ b c d) = Rank2.foldMap f b <> f c <> f d
instance Rank2.Traversable TestViewCommon where
traverse f (TestViewCommon a b c d) = TestViewCommon a <$> Rank2.traverse f b <*> f c <*> f d
data TestContainerView v = TestContainerView
{ _testContainerView_common :: TestViewCommon v
, _testContainerView_contents :: v (Seq (TestView v))
} deriving (Generic)
ca n't instance on account of the fixed point - would need @v@ or @v'@ to be a Functor but they need to be natural .
instance Show (TestContainerView Identity) where
showsPrec _ = showsTestContainerView True
|Show a ' TestContainerView ' for test assertion messages and the like . Takes a boolean indicating whether subviews will be dumped ( @True@ ) or not ( @False@ ) .
showsTestContainerView :: Bool -> TestContainerView Identity -> ShowS
showsTestContainerView recurse (TestContainerView {..})
= showString "container " . shows _testContainerView_common
. (if recurse then (' ':) . showList (toList _testContainerView_contents) else id)
traverseTestContainerView
:: Applicative f
=> (forall a b. (a -> f b) -> v a -> f (v' b))
-> TestContainerView v -> f (TestContainerView v')
traverseTestContainerView f (TestContainerView {..}) =
TestContainerView
<$> Rank2.traverse (f pure) _testContainerView_common
<*> f (traverse (traverseTestView f)) _testContainerView_contents
|Show the type and identity of a test container view , equivalent to @'tshowTestViewIdentity ' . ' TestView_Container'@
tshowTestContainerViewIdentity :: TestContainerView v -> Text
tshowTestContainerViewIdentity = tshowTestViewIdentity . TestView_Container
data TestTextView v = TestTextView
{ _testTextView_common :: TestViewCommon v
, _testTextView_style :: TextStyle v
, _testTextView_text :: v Text
} deriving (Generic)
|'ShowS ' for a @'TextStyle ' Identity@ since ' Show ' is needed for test assertion messages and so on .
showsTextStyle :: TextStyle Identity -> ShowS
showsTextStyle (TextStyle {..})
= showString "{color=" . shows (runIdentity _textStyle_textColor)
. showString " font=" . shows (runIdentity _textStyle_font)
. ('}':)
instance Show (TestTextView Identity) where
showsPrec _ (TestTextView {..})
= showString "text " . shows _testTextView_common
. showString " textStyle=" . showsTextStyle _testTextView_style
. showString " text=" . shows (runIdentity _testTextView_text)
instance Rank2.Functor TestTextView where
f <$> TestTextView a b c = TestTextView (f Rank2.<$> a) (f Rank2.<$> b) (f c)
instance Rank2.Apply TestTextView where
TestTextView fa fb fc <*> TestTextView a b c = TestTextView (fa Rank2.<*> a) (fb Rank2.<*> b) (apply fc c)
instance Rank2.Applicative TestTextView where
pure f = TestTextView (Rank2.pure f) (Rank2.pure f) f
instance Rank2.Foldable TestTextView where
foldMap f (TestTextView a b c) = Rank2.foldMap f a <> Rank2.foldMap f b <> f c
instance Rank2.Traversable TestTextView where
traverse f (TestTextView a b c) = TestTextView <$> Rank2.traverse f a <*> Rank2.traverse f b <*> f c
tshowTestTextViewIdentity :: TestTextView v -> Text
tshowTestTextViewIdentity = tshowTestViewIdentity . TestView_Text
data TestMarker = TestMarker
{ _testMarker_identity :: TestIdentity
, _testMarker_parent :: TVar (Maybe (TVar (Seq (TestView TVar))))
} deriving (Eq, Generic)
|Show a ' TestMarker ' for test assertion messages and the like .
instance Show TestMarker where
showsPrec _ (TestMarker {..}) = showString "marker #" . shows (unTestIdentity _testMarker_identity)
|Show the type and identity of a test marker , equivalent to @'tshowTestViewIdentity ' . '
tshowTestMarkerIdentity :: TestMarker -> Text
tshowTestMarkerIdentity = tshowTestViewIdentity . TestView_Marker
|A node in the view hierarchy , either one of the @Test*View@ types or a special marker used during build time to isolate sections of the subviews .
data TestView v
= TestView_Container (TestContainerView v)
| TestView_Text (TestTextView v)
| TestView_Marker TestMarker
deriving (Generic)
instance Show (TestView Identity) where
showsPrec _ = showsTestView True
|Show a ' TestView ' for test assertion messages and the like . Takes a boolean which controls whether subviews will be dumped ( @True@ ) or not ( @False ) .
showsTestView :: Bool -> TestView Identity -> ShowS
showsTestView recurse = \ case
TestView_Container cv -> showsTestContainerView recurse cv
TestView_Text tv -> shows tv
TestView_Marker m -> shows m
showTestViewHierarchy :: String -> Seq (TestView Identity) -> [String]
showTestViewHierarchy prefix = DList.toList . go prefix
where
go :: String -> Seq (TestView Identity) -> DList String
go indent = foldMap (visit indent) . toList
visit :: String -> TestView Identity -> DList String
visit indent = \ case
TestView_Container cv ->
DList.cons
(indent ++ showsTestContainerView False cv "")
(go (' ':' ':indent) . runIdentity . _testContainerView_contents $ cv)
other -> DList.singleton $ indent ++ showsTestView False other ""
|Test for equal identity of two view nodes
instance Eq (TestView v) where
a == b = _testView_identity a == _testView_identity b
@'TestView ' TVar@ into a @'TestView ' Identity@ via @atomically . traverseTestView ( \ f - > pure . Identity < = < f < = < readTVar)@.
The traversal effect needs to accept an additional mapping effect to apply inside in order to handle the fixed point if the
view is a @TestView_Container@.
traverseTestView
:: Applicative f
=> (forall a b. (a -> f b) -> v a -> f (v' b))
-> TestView v -> f (TestView v')
traverseTestView f = \ case
TestView_Container cv -> TestView_Container <$> traverseTestContainerView f cv
TestView_Text tv -> TestView_Text <$> Rank2.traverse (f pure) tv
TestView_Marker m -> pure (TestView_Marker m)
tshowTestViewIdentity :: TestView v -> Text
tshowTestViewIdentity = \ case
TestView_Container cv -> "container " <> tshowTestIdentity (_testViewCommon_identity . _testContainerView_common $ cv)
TestView_Text tv -> "text " <> tshowTestIdentity (_testViewCommon_identity . _testTextView_common $ tv)
TestView_Marker m -> "marker " <> tshowTestIdentity (_testMarker_identity m)
|Project the ' TestViewCommon ' out of a ' TestView ' , if it 's not a ' TestView_Marker ' .
_testView_common :: TestView v -> Maybe (TestViewCommon v)
_testView_common = \ case
TestView_Container cv -> Just . _testContainerView_common $ cv
TestView_Text tv -> Just . _testTextView_common $ tv
TestView_Marker _ -> Nothing
_testView_identity :: TestView v -> TestIdentity
_testView_identity = \ case
TestView_Container cv -> _testViewCommon_identity . _testContainerView_common $ cv
TestView_Text tv -> _testViewCommon_identity . _testTextView_common $ tv
TestView_Marker m -> _testMarker_identity m
|Type which holds a sequence of views . The same type as @_testContainerView_contents@ for @'TestContainerView ' TVar@
type TestHolder = TVar (Seq (TestView TVar))
data TestEnv x = TestEnv
{ _testEnv_rootHolder :: TestHolder
, _testEnv_rootReady :: TVar Bool
^True iff the first build was immediately ready or it 's been committed since .
, _testEnv_eventChan :: Chan [DSum (EventTriggerRef (SpiderTimeline x)) TriggerInvocation]
, _testEnv_fireCommand :: FireCommand (SpiderTimeline x) (SpiderHost x)
^The ' FireCommand ' which is used to process events with the underlying host and then perform any actions triggered by those events .
, _testEnv_stepCompleteEventHandle :: EventHandle (SpiderTimeline x) ()
, _testEnv_stepCompleteTriggerRef :: IORef (Maybe (EventTrigger (SpiderTimeline x) ()))
}
newtype TestEvaluation x a = TestEvaluation { unTestEvaluation :: RWST (TestEnv x) () (Seq (TestView Identity)) (SpiderHost x) a }
deriving (Functor, Applicative, Monad, MonadFix, MonadIO, MonadException, MonadAsyncException)
|
ecc9846fa6c91bf31943870792c3e3bcc282e8b752af4ddde77ca226c0a95662 | ygmpkk/house | VertexSpec.hs | --------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.VertexSpec
Copyright : ( c ) 2003
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer :
-- Stability : provisional
-- Portability : portable
--
This module corresponds to section 2.7 ( Vertex Specification ) of the
OpenGL 1.4 specs .
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.VertexSpec (
-- * Vertex Coordinates
Vertex(..),
VertexComponent,
Vertex2(..), Vertex3(..), Vertex4(..),
* Auxiliary Vertex Attributes
$ AuxiliaryVertexAttributes
-- ** Texture Coordinates
currentTextureCoords, TexCoord(..),
TexCoordComponent,
TexCoord1(..), TexCoord2(..), TexCoord3(..), TexCoord4(..),
-- ** Normal
currentNormal, Normal(..),
NormalComponent,
Normal3(..),
-- ** Fog Coordinate
currentFogCoord, FogCoord(..),
FogCoordComponent,
FogCoord1(..),
-- ** Color and Secondary Color
rgbaMode,
currentColor, Color(..),
currentSecondaryColor, SecondaryColor(..),
ColorComponent,
Color3(..), Color4(..),
currentIndex, Index(..),
IndexComponent,
Index1(..),
-- * Texture Units
TextureUnit(..), maxTextureUnit
) where
import Data.Int
import Data.Word
import Foreign.Ptr ( Ptr, castPtr )
import Foreign.Storable ( Storable(..) )
import Graphics.Rendering.OpenGL.GL.BasicTypes (
GLenum, GLbyte, GLshort, GLint, GLubyte, GLushort, GLuint, GLfloat,
GLdouble )
import Graphics.Rendering.OpenGL.GL.Extensions (
FunPtr, unsafePerformIO, Invoker, getProcAddress )
import Graphics.Rendering.OpenGL.GL.GLboolean ( unmarshalGLboolean )
import Graphics.Rendering.OpenGL.GL.PeekPoke (
poke1, poke2, poke3, poke4,
peek1, peek2, peek3, peek4 )
import Graphics.Rendering.OpenGL.GL.QueryUtils (
GetPName(GetCurrentTextureCoords, GetCurrentNormal, GetCurrentFogCoord,
GetCurrentColor, GetCurrentSecondaryColor, GetCurrentIndex,
GetMaxTextureUnits,GetRGBAMode),
getBoolean1, getInteger1, getFloat1, getFloat3, getFloat4 )
import Graphics.Rendering.OpenGL.GL.StateVar (
GettableStateVar, makeGettableStateVar, StateVar, makeStateVar )
--------------------------------------------------------------------------------
#include "HsOpenGLExt.h"
#include "HsOpenGLTypes.h"
--------------------------------------------------------------------------------
-- | The class of all types which can be used as a vertex coordinate.
class VertexComponent a where
vertex2 :: a -> a -> IO ()
vertex3 :: a -> a -> a -> IO ()
vertex4 :: a -> a -> a -> a -> IO ()
vertex2v :: Ptr a -> IO ()
vertex3v :: Ptr a -> IO ()
vertex4v :: Ptr a -> IO ()
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glVertex2s" glVertex2s ::
GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex3s" glVertex3s ::
GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex4s" glVertex4s ::
GLshort -> GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex2sv" glVertex2sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex3sv" glVertex3sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex4sv" glVertex4sv ::
Ptr GLshort -> IO ()
instance VertexComponent GLshort_ where
vertex2 = glVertex2s
vertex3 = glVertex3s
vertex4 = glVertex4s
vertex2v = glVertex2sv
vertex3v = glVertex3sv
vertex4v = glVertex4sv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glVertex2i" glVertex2i ::
GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glVertex3i" glVertex3i ::
GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glVertex4i" glVertex4i ::
GLint -> GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glVertex2iv" glVertex2iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glVertex3iv" glVertex3iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glVertex4iv" glVertex4iv ::
Ptr GLint -> IO ()
instance VertexComponent GLint_ where
vertex2 = glVertex2i
vertex3 = glVertex3i
vertex4 = glVertex4i
vertex2v = glVertex2iv
vertex3v = glVertex3iv
vertex4v = glVertex4iv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glVertex2f" glVertex2f ::
GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex3f" glVertex3f ::
GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex4f" glVertex4f ::
GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex2fv" glVertex2fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex3fv" glVertex3fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex4fv" glVertex4fv ::
Ptr GLfloat -> IO ()
instance VertexComponent GLfloat_ where
vertex2 = glVertex2f
vertex3 = glVertex3f
vertex4 = glVertex4f
vertex2v = glVertex2fv
vertex3v = glVertex3fv
vertex4v = glVertex4fv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glVertex2d" glVertex2d ::
GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex3d" glVertex3d ::
GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex4d" glVertex4d ::
GLdouble -> GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex2dv" glVertex2dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex3dv" glVertex3dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex4dv" glVertex4dv ::
Ptr GLdouble -> IO ()
instance VertexComponent GLdouble_ where
vertex2 = glVertex2d
vertex3 = glVertex3d
vertex4 = glVertex4d
vertex2v = glVertex2dv
vertex3v = glVertex3dv
vertex4v = glVertex4dv
--------------------------------------------------------------------------------
| Specify the ( /x/ , /y/ , /z/ , ) coordinates of a four - dimensional vertex .
-- This must only be done during
-- 'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive', otherwise the
-- behaviour is unspecified. The current values of the auxiliary vertex
-- attributes are associated with the vertex.
--
-- Note that there is no such thing as a \"current vertex\" which could be
-- retrieved.
class Vertex a where
vertex :: a -> IO ()
vertexv :: Ptr a -> IO ()
-- | A vertex with /z/=0 and /w/=1.
data Vertex2 a = Vertex2 a a
deriving ( Eq, Ord, Show )
instance VertexComponent a => Vertex (Vertex2 a) where
vertex (Vertex2 x y) = vertex2 x y
vertexv = vertex2v . (castPtr :: Ptr (Vertex2 b) -> Ptr b)
instance Storable a => Storable (Vertex2 a) where
sizeOf ~(Vertex2 x _) = 2 * sizeOf x
alignment ~(Vertex2 x _) = alignment x
peek = peek2 Vertex2 . castPtr
poke ptr (Vertex2 x y) = poke2 (castPtr ptr) x y
-- | A vertex with /w/=1.
data Vertex3 a = Vertex3 a a a
deriving ( Eq, Ord, Show )
instance VertexComponent a => Vertex (Vertex3 a) where
vertex (Vertex3 x y z) = vertex3 x y z
vertexv = vertex3v . (castPtr :: Ptr (Vertex3 b) -> Ptr b)
instance Storable a => Storable (Vertex3 a) where
sizeOf ~(Vertex3 x _ _) = 3 * sizeOf x
alignment ~(Vertex3 x _ _) = alignment x
peek = peek3 Vertex3 . castPtr
poke ptr (Vertex3 x y z) = poke3 (castPtr ptr) x y z
| A fully - fledged four - dimensional vertex .
data Vertex4 a = Vertex4 a a a a
deriving ( Eq, Ord, Show )
instance VertexComponent a => Vertex (Vertex4 a) where
vertex (Vertex4 x y z w) = vertex4 x y z w
vertexv = vertex4v . (castPtr :: Ptr (Vertex4 b) -> Ptr b)
instance Storable a => Storable (Vertex4 a) where
sizeOf ~(Vertex4 x _ _ _) = 4 * sizeOf x
alignment ~(Vertex4 x _ _ _) = alignment x
peek = peek4 Vertex4 . castPtr
poke ptr (Vertex4 x y z w) = poke4 (castPtr ptr) x y z w
--------------------------------------------------------------------------------
$ AuxiliaryVertexAttributes
Apart from its coordinates in four - dimensional space , every vertex has
-- associated /auxiliary attributes/: Its texture coordinates, a normal, a
-- fog coordinate, and a color plus a secondary color. For every attribute, the
-- OpenGL state contains its current value, which can be changed at any time.
--
-- Every attribute has a \"natural\" format via which it can be manipulated
directly as part of the OpenGL state , e.g. the current texture coordinates
are internally handled as @'TexCoord4 ' ' GLfloat'@. Different formats are
converted to this format , e.g. the /s/ , /r/ , and /t/ coordinates of a
-- @'TexCoord3' 'GLint'@ are converted to floating point values and a /q/
coordinate of 1.0 is implicitly assumed .
--
-- Consequently, the vast majority of classes, functions, and data types in this
-- module are for convenience only and offer no additional functionality.
--------------------------------------------------------------------------------
| The current texture coordinates ( /s/ , /t/ , /r/ , /q/ ) for the current
-- texture unit (see 'Graphics.Rendering.OpenGL.GL.CoordTrans.activeTexture').
The initial value is ( 0,0,0,1 ) for all texture units .
currentTextureCoords :: StateVar (TexCoord4 GLfloat)
currentTextureCoords =
makeStateVar (getFloat4 TexCoord4 GetCurrentTextureCoords) texCoord
--------------------------------------------------------------------------------
-- | The class of all types which can be used as a texture coordinate.
class TexCoordComponent a where
texCoord1 :: a -> IO ()
texCoord2 :: a -> a -> IO ()
texCoord3 :: a -> a -> a -> IO ()
texCoord4 :: a -> a -> a -> a -> IO ()
texCoord1v :: Ptr a -> IO ()
texCoord2v :: Ptr a -> IO ()
texCoord3v :: Ptr a -> IO ()
texCoord4v :: Ptr a -> IO ()
multiTexCoord1 :: GLenum -> a -> IO ()
multiTexCoord2 :: GLenum -> a -> a -> IO ()
multiTexCoord3 :: GLenum -> a -> a -> a -> IO ()
multiTexCoord4 :: GLenum -> a -> a -> a -> a -> IO ()
multiTexCoord1v :: GLenum -> Ptr a -> IO ()
multiTexCoord2v :: GLenum -> Ptr a -> IO ()
multiTexCoord3v :: GLenum -> Ptr a -> IO ()
multiTexCoord4v :: GLenum -> Ptr a -> IO ()
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glTexCoord1s" glTexCoord1s ::
GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord2s" glTexCoord2s ::
GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord3s" glTexCoord3s ::
GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord4s" glTexCoord4s ::
GLshort -> GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord1sv" glTexCoord1sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord2sv" glTexCoord2sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord3sv" glTexCoord3sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord4sv" glTexCoord4sv ::
Ptr GLshort -> IO ()
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1sARB,GLenum -> GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2sARB,GLenum -> GLshort -> GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3sARB,GLenum -> GLshort -> GLshort -> GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4sARB,GLenum -> GLshort -> GLshort -> GLshort -> GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1svARB,GLenum -> Ptr GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2svARB,GLenum -> Ptr GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3svARB,GLenum -> Ptr GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4svARB,GLenum -> Ptr GLshort -> IO ())
instance TexCoordComponent GLshort_ where
texCoord1 = glTexCoord1s
texCoord2 = glTexCoord2s
texCoord3 = glTexCoord3s
texCoord4 = glTexCoord4s
texCoord1v = glTexCoord1sv
texCoord2v = glTexCoord2sv
texCoord3v = glTexCoord3sv
texCoord4v = glTexCoord4sv
multiTexCoord1 = glMultiTexCoord1sARB
multiTexCoord2 = glMultiTexCoord2sARB
multiTexCoord3 = glMultiTexCoord3sARB
multiTexCoord4 = glMultiTexCoord4sARB
multiTexCoord1v = glMultiTexCoord1svARB
multiTexCoord2v = glMultiTexCoord2svARB
multiTexCoord3v = glMultiTexCoord3svARB
multiTexCoord4v = glMultiTexCoord4svARB
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glTexCoord1i" glTexCoord1i ::
GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord2i" glTexCoord2i ::
GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord3i" glTexCoord3i ::
GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord4i" glTexCoord4i ::
GLint -> GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord1iv" glTexCoord1iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord2iv" glTexCoord2iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord3iv" glTexCoord3iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord4iv" glTexCoord4iv ::
Ptr GLint -> IO ()
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1iARB,GLenum -> GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2iARB,GLenum -> GLint -> GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3iARB,GLenum -> GLint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4iARB,GLenum -> GLint -> GLint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1ivARB,GLenum -> Ptr GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2ivARB,GLenum -> Ptr GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3ivARB,GLenum -> Ptr GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4ivARB,GLenum -> Ptr GLint -> IO ())
instance TexCoordComponent GLint_ where
texCoord1 = glTexCoord1i
texCoord2 = glTexCoord2i
texCoord3 = glTexCoord3i
texCoord4 = glTexCoord4i
texCoord1v = glTexCoord1iv
texCoord2v = glTexCoord2iv
texCoord3v = glTexCoord3iv
texCoord4v = glTexCoord4iv
multiTexCoord1 = glMultiTexCoord1iARB
multiTexCoord2 = glMultiTexCoord2iARB
multiTexCoord3 = glMultiTexCoord3iARB
multiTexCoord4 = glMultiTexCoord4iARB
multiTexCoord1v = glMultiTexCoord1ivARB
multiTexCoord2v = glMultiTexCoord2ivARB
multiTexCoord3v = glMultiTexCoord3ivARB
multiTexCoord4v = glMultiTexCoord4ivARB
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glTexCoord1f" glTexCoord1f ::
GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord2f" glTexCoord2f ::
GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord3f" glTexCoord3f ::
GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord4f" glTexCoord4f ::
GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord1fv" glTexCoord1fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord2fv" glTexCoord2fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord3fv" glTexCoord3fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord4fv" glTexCoord4fv ::
Ptr GLfloat -> IO ()
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1fARB,GLenum -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2fARB,GLenum -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3fARB,GLenum -> GLfloat -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4fARB,GLenum -> GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1fvARB,GLenum -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2fvARB,GLenum -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3fvARB,GLenum -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4fvARB,GLenum -> Ptr GLfloat -> IO ())
instance TexCoordComponent GLfloat_ where
texCoord1 = glTexCoord1f
texCoord2 = glTexCoord2f
texCoord3 = glTexCoord3f
texCoord4 = glTexCoord4f
texCoord1v = glTexCoord1fv
texCoord2v = glTexCoord2fv
texCoord3v = glTexCoord3fv
texCoord4v = glTexCoord4fv
multiTexCoord1 = glMultiTexCoord1fARB
multiTexCoord2 = glMultiTexCoord2fARB
multiTexCoord3 = glMultiTexCoord3fARB
multiTexCoord4 = glMultiTexCoord4fARB
multiTexCoord1v = glMultiTexCoord1fvARB
multiTexCoord2v = glMultiTexCoord2fvARB
multiTexCoord3v = glMultiTexCoord3fvARB
multiTexCoord4v = glMultiTexCoord4fvARB
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glTexCoord1d" glTexCoord1d ::
GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord2d" glTexCoord2d ::
GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord3d" glTexCoord3d ::
GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord4d" glTexCoord4d ::
GLdouble -> GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord1dv" glTexCoord1dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord2dv" glTexCoord2dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord3dv" glTexCoord3dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord4dv" glTexCoord4dv ::
Ptr GLdouble -> IO ()
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1dARB,GLenum -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2dARB,GLenum -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3dARB,GLenum -> GLdouble -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4dARB,GLenum -> GLdouble -> GLdouble -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1dvARB,GLenum -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2dvARB,GLenum -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3dvARB,GLenum -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4dvARB,GLenum -> Ptr GLdouble -> IO ())
instance TexCoordComponent GLdouble_ where
texCoord1 = glTexCoord1d
texCoord2 = glTexCoord2d
texCoord3 = glTexCoord3d
texCoord4 = glTexCoord4d
texCoord1v = glTexCoord1dv
texCoord2v = glTexCoord2dv
texCoord3v = glTexCoord3dv
texCoord4v = glTexCoord4dv
multiTexCoord1 = glMultiTexCoord1dARB
multiTexCoord2 = glMultiTexCoord2dARB
multiTexCoord3 = glMultiTexCoord3dARB
multiTexCoord4 = glMultiTexCoord4dARB
multiTexCoord1v = glMultiTexCoord1dvARB
multiTexCoord2v = glMultiTexCoord2dvARB
multiTexCoord3v = glMultiTexCoord3dvARB
multiTexCoord4v = glMultiTexCoord4dvARB
--------------------------------------------------------------------------------
-- | Change the current texture coordinates of the current or given texture
-- unit.
class TexCoord a where
texCoord :: a -> IO ()
texCoordv :: Ptr a -> IO ()
multiTexCoord :: TextureUnit -> a -> IO ()
multiTexCoordv :: TextureUnit -> Ptr a -> IO ()
-- | Texture coordinates with /t/=0, /r/=0, and /q/=1.
data TexCoord1 a = TexCoord1 a
deriving ( Eq, Ord, Show )
instance TexCoordComponent a => TexCoord (TexCoord1 a) where
texCoord (TexCoord1 s) = texCoord1 s
texCoordv = texCoord1v . (castPtr :: Ptr (TexCoord1 b) -> Ptr b)
multiTexCoord (TextureUnit u) (TexCoord1 s) =
multiTexCoord1 (fromIntegral u) s
multiTexCoordv (TextureUnit u) =
multiTexCoord1v (fromIntegral u) . (castPtr :: Ptr (TexCoord1 b) -> Ptr b)
instance Storable a => Storable (TexCoord1 a) where
sizeOf ~(TexCoord1 s) = sizeOf s
alignment ~(TexCoord1 s) = alignment s
peek = peek1 TexCoord1 . castPtr
poke ptr (TexCoord1 s) = poke1 (castPtr ptr) s
-- | Texture coordinates with /r/=0 and /q/=1.
data TexCoord2 a = TexCoord2 a a
deriving ( Eq, Ord, Show )
instance TexCoordComponent a => TexCoord (TexCoord2 a) where
texCoord (TexCoord2 s t) = texCoord2 s t
texCoordv = texCoord2v . (castPtr :: Ptr (TexCoord2 b) -> Ptr b)
multiTexCoord (TextureUnit u) (TexCoord2 s t) =
multiTexCoord2 (fromIntegral u) s t
multiTexCoordv (TextureUnit u) =
multiTexCoord2v (fromIntegral u) . (castPtr :: Ptr (TexCoord2 b) -> Ptr b)
instance Storable a => Storable (TexCoord2 a) where
sizeOf ~(TexCoord2 s _) = 2 * sizeOf s
alignment ~(TexCoord2 s _) = alignment s
peek = peek2 TexCoord2 . castPtr
poke ptr (TexCoord2 s t) = poke2 (castPtr ptr) s t
-- | Texture coordinates with /q/=1.
data TexCoord3 a = TexCoord3 a a a
deriving ( Eq, Ord, Show )
instance TexCoordComponent a => TexCoord (TexCoord3 a) where
texCoord (TexCoord3 s t r) = texCoord3 s t r
texCoordv = texCoord3v . (castPtr :: Ptr (TexCoord3 b) -> Ptr b)
multiTexCoord (TextureUnit u) (TexCoord3 s t r) =
multiTexCoord3 (fromIntegral u) s t r
multiTexCoordv (TextureUnit u) =
multiTexCoord3v (fromIntegral u) . (castPtr :: Ptr (TexCoord3 b) -> Ptr b)
instance Storable a => Storable (TexCoord3 a) where
sizeOf ~(TexCoord3 s _ _) = 3 * sizeOf s
alignment ~(TexCoord3 s _ _) = alignment s
peek = peek3 TexCoord3 . castPtr
poke ptr (TexCoord3 s t r) = poke3 (castPtr ptr) s t r
| Fully - fledged four - dimensional texture coordinates .
data TexCoord4 a = TexCoord4 a a a a
deriving ( Eq, Ord, Show )
instance TexCoordComponent a => TexCoord (TexCoord4 a) where
texCoord (TexCoord4 s t r q) = texCoord4 s t r q
texCoordv = texCoord4v . (castPtr :: Ptr (TexCoord4 b) -> Ptr b)
multiTexCoord (TextureUnit u) (TexCoord4 s t r q) =
multiTexCoord4 (fromIntegral u) s t r q
multiTexCoordv (TextureUnit u) =
multiTexCoord4v (fromIntegral u) . (castPtr :: Ptr (TexCoord4 b) -> Ptr b)
instance Storable a => Storable (TexCoord4 a) where
sizeOf ~(TexCoord4 s _ _ _) = 4 * sizeOf s
alignment ~(TexCoord4 s _ _ _) = alignment s
peek = peek4 TexCoord4 . castPtr
poke ptr (TexCoord4 s t r q) = poke4 (castPtr ptr) s t r q
--------------------------------------------------------------------------------
-- | The current normal (/x/, /y/, /z/). The initial value is the unit vector
( 0 , 0 , 1 ) .
currentNormal :: StateVar (Normal3 GLfloat)
currentNormal = makeStateVar (getFloat3 Normal3 GetCurrentNormal) normal
--------------------------------------------------------------------------------
-- | The class of all types which can be used as a component of a normal.
class NormalComponent a where
normal3 :: a -> a -> a -> IO ()
normal3v :: Ptr a -> IO ()
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glNormal3b" glNormal3b ::
GLbyte -> GLbyte -> GLbyte -> IO ()
foreign import CALLCONV unsafe "glNormal3bv" glNormal3bv ::
Ptr GLbyte -> IO ()
instance NormalComponent GLbyte_ where
normal3 = glNormal3b
normal3v = glNormal3bv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glNormal3s" glNormal3s ::
GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glNormal3sv" glNormal3sv ::
Ptr GLshort -> IO ()
instance NormalComponent GLshort_ where
normal3 = glNormal3s
normal3v = glNormal3sv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glNormal3i" glNormal3i ::
GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glNormal3iv" glNormal3iv ::
Ptr GLint -> IO ()
instance NormalComponent GLint_ where
normal3 = glNormal3i
normal3v = glNormal3iv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glNormal3f" glNormal3f ::
GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glNormal3fv" glNormal3fv ::
Ptr GLfloat -> IO ()
instance NormalComponent GLfloat_ where
normal3 = glNormal3f
normal3v = glNormal3fv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glNormal3d" glNormal3d ::
GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glNormal3dv" glNormal3dv ::
Ptr GLdouble -> IO ()
instance NormalComponent GLdouble_ where
normal3 = glNormal3d
normal3v = glNormal3dv
--------------------------------------------------------------------------------
-- | Change the current normal. Integral arguments are converted to
-- floating-point with a linear mapping that maps the most positive
representable integer value to 1.0 , and the most negative representable
integer value to -1.0 .
--
-- Normals specified with 'normal' or 'normalv' need not have unit length.
-- If 'Graphics.Rendering.OpenGL.GL.CoordTrans.normalize' is enabled, then
-- normals of any length specified with 'normal' or 'normalv' are normalized
-- after transformation. If
-- 'Graphics.Rendering.OpenGL.GL.CoordTrans.rescaleNormal' is enabled, normals
-- are scaled by a scaling factor derived from the modelview matrix.
-- 'Graphics.Rendering.OpenGL.GL.CoordTrans.rescaleNormal' requires that the
-- originally specified normals were of unit length, and that the modelview
-- matrix contains only uniform scales for proper results. Normalization is
-- initially disabled.
class Normal a where
normal :: a -> IO ()
normalv :: Ptr a -> IO ()
A three - dimensional normal .
data Normal3 a = Normal3 a a a
deriving ( Eq, Ord, Show )
instance NormalComponent a => Normal (Normal3 a) where
normal (Normal3 x y z) = normal3 x y z
normalv = normal3v . (castPtr :: Ptr (Normal3 b) -> Ptr b)
instance Storable a => Storable (Normal3 a) where
sizeOf ~(Normal3 x _ _) = 3 * sizeOf x
alignment ~(Normal3 x _ _) = alignment x
peek = peek3 Normal3 . castPtr
poke ptr (Normal3 x y z) = poke3 (castPtr ptr) x y z
--------------------------------------------------------------------------------
-- | The current fog coordinate. The initial value is 0.
currentFogCoord :: StateVar (FogCoord1 GLfloat)
currentFogCoord =
makeStateVar (getFloat1 FogCoord1 GetCurrentFogCoord) fogCoord
--------------------------------------------------------------------------------
-- | The class of all types which can be used as the fog coordinate.
class FogCoordComponent a where
fogCoord1 :: a -> IO ()
fogCoord1v :: Ptr a -> IO ()
--------------------------------------------------------------------------------
EXTENSION_ENTRY("GL_EXT_fog_coord or OpenGL 1.4",glFogCoordfEXT,GLfloat -> IO ())
EXTENSION_ENTRY("GL_EXT_fog_coord or OpenGL 1.4",glFogCoordfvEXT,Ptr GLfloat -> IO ())
instance FogCoordComponent GLfloat_ where
fogCoord1 = glFogCoordfEXT
fogCoord1v = glFogCoordfvEXT
--------------------------------------------------------------------------------
EXTENSION_ENTRY("GL_EXT_fog_coord or OpenGL 1.4",glFogCoorddEXT,GLdouble -> IO ())
EXTENSION_ENTRY("GL_EXT_fog_coord or OpenGL 1.4",glFogCoorddvEXT,Ptr GLdouble -> IO ())
instance FogCoordComponent GLdouble_ where
fogCoord1 = glFogCoorddEXT
fogCoord1v = glFogCoorddvEXT
--------------------------------------------------------------------------------
-- | Change the current fog coordinate.
class FogCoord a where
fogCoord :: a -> IO ()
fogCoordv :: Ptr a -> IO ()
-- | A fog coordinate.
newtype FogCoord1 a = FogCoord1 a
deriving ( Eq, Ord, Show )
instance FogCoordComponent a => FogCoord (FogCoord1 a) where
fogCoord (FogCoord1 c) = fogCoord1 c
fogCoordv = fogCoord1v . (castPtr :: Ptr (FogCoord1 b) -> Ptr b)
--------------------------------------------------------------------------------
-- | If 'rgbaMode' contains 'True', the color buffers store RGBA value. If
-- color indexes are stored, it contains 'False'.
rgbaMode :: GettableStateVar Bool
rgbaMode = makeGettableStateVar (getBoolean1 unmarshalGLboolean GetRGBAMode)
--------------------------------------------------------------------------------
The current color ( /R/ , /G/ , /B/ , /A/ ) . The initial value is ( 1 , 1 , 1 , 1 ) .
Note that this state variable is significant only when the GL is in RGBA
-- mode.
currentColor :: StateVar (Color4 GLfloat)
currentColor =
makeStateVar (getFloat4 Color4 GetCurrentColor) color
The current secondary color ( /R/ , /G/ , /B/ , /A/ ) . The initial value is
( 0 , 0 , 0 , 1 ) . Note that this state variable is significant only when the GL is
-- in RGBA mode.
currentSecondaryColor :: StateVar (Color4 GLfloat)
currentSecondaryColor =
makeStateVar (getFloat4 Color4 GetCurrentSecondaryColor) color
--------------------------------------------------------------------------------
-- | The class of all types which can be used as a color component.
class ColorComponent a where
color3 :: a -> a -> a -> IO ()
color4 :: a -> a -> a -> a -> IO ()
color3v :: Ptr a -> IO ()
color4v :: Ptr a -> IO ()
secondaryColor3 :: a -> a -> a -> IO ()
secondaryColor3v :: Ptr a -> IO ()
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glColor3b" glColor3b ::
GLbyte -> GLbyte -> GLbyte -> IO ()
foreign import CALLCONV unsafe "glColor4b" glColor4b ::
GLbyte -> GLbyte -> GLbyte -> GLbyte -> IO ()
foreign import CALLCONV unsafe "glColor3bv" glColor3bv ::
Ptr GLbyte -> IO ()
foreign import CALLCONV unsafe "glColor4bv" glColor4bv ::
Ptr GLbyte -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3bEXT,GLbyte -> GLbyte -> GLbyte -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3bvEXT,Ptr GLbyte -> IO ())
instance ColorComponent GLbyte_ where
color3 = glColor3b
color4 = glColor4b
color3v = glColor3bv
color4v = glColor4bv
secondaryColor3 = glSecondaryColor3bEXT
secondaryColor3v = glSecondaryColor3bvEXT
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glColor3s" glColor3s ::
GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glColor4s" glColor4s ::
GLshort -> GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glColor3sv" glColor3sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glColor4sv" glColor4sv ::
Ptr GLshort -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3sEXT,GLshort -> GLshort -> GLshort -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3svEXT,Ptr GLshort -> IO ())
instance ColorComponent GLshort_ where
color3 = glColor3s
color4 = glColor4s
color3v = glColor3sv
color4v = glColor4sv
secondaryColor3 = glSecondaryColor3sEXT
secondaryColor3v = glSecondaryColor3svEXT
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glColor3i" glColor3i ::
GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glColor4i" glColor4i ::
GLint -> GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glColor3iv" glColor3iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glColor4iv" glColor4iv ::
Ptr GLint -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3iEXT,GLint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3ivEXT,Ptr GLint -> IO ())
instance ColorComponent GLint_ where
color3 = glColor3i
color4 = glColor4i
color3v = glColor3iv
color4v = glColor4iv
secondaryColor3 = glSecondaryColor3iEXT
secondaryColor3v = glSecondaryColor3ivEXT
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glColor3f" glColor3f ::
GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glColor4f" glColor4f ::
GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glColor3fv" glColor3fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glColor4fv" glColor4fv ::
Ptr GLfloat -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3fEXT,GLfloat -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3fvEXT,Ptr GLfloat -> IO ())
instance ColorComponent GLfloat_ where
color3 = glColor3f
color4 = glColor4f
color3v = glColor3fv
color4v = glColor4fv
secondaryColor3 = glSecondaryColor3fEXT
secondaryColor3v = glSecondaryColor3fvEXT
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glColor3d" glColor3d ::
GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glColor4d" glColor4d ::
GLdouble -> GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glColor3dv" glColor3dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glColor4dv" glColor4dv ::
Ptr GLdouble -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3dEXT,GLdouble -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3dvEXT,Ptr GLdouble -> IO ())
instance ColorComponent GLdouble_ where
color3 = glColor3d
color4 = glColor4d
color3v = glColor3dv
color4v = glColor4dv
secondaryColor3 = glSecondaryColor3dEXT
secondaryColor3v = glSecondaryColor3dvEXT
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glColor3ub" glColor3ub ::
GLubyte -> GLubyte -> GLubyte -> IO ()
foreign import CALLCONV unsafe "glColor4ub" glColor4ub ::
GLubyte -> GLubyte -> GLubyte -> GLubyte -> IO ()
foreign import CALLCONV unsafe "glColor3ubv" glColor3ubv ::
Ptr GLubyte -> IO ()
foreign import CALLCONV unsafe "glColor4ubv" glColor4ubv ::
Ptr GLubyte -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3ubEXT,GLubyte -> GLubyte -> GLubyte -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3ubvEXT,Ptr GLubyte -> IO ())
instance ColorComponent GLubyte_ where
color3 = glColor3ub
color4 = glColor4ub
color3v = glColor3ubv
color4v = glColor4ubv
secondaryColor3 = glSecondaryColor3ubEXT
secondaryColor3v = glSecondaryColor3ubvEXT
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glColor3us" glColor3us ::
GLushort -> GLushort -> GLushort -> IO ()
foreign import CALLCONV unsafe "glColor4us" glColor4us ::
GLushort -> GLushort -> GLushort -> GLushort -> IO ()
foreign import CALLCONV unsafe "glColor3usv" glColor3usv ::
Ptr GLushort -> IO ()
foreign import CALLCONV unsafe "glColor4usv" glColor4usv ::
Ptr GLushort -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3usEXT,GLushort -> GLushort -> GLushort -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3usvEXT,Ptr GLushort -> IO ())
instance ColorComponent GLushort_ where
color3 = glColor3us
color4 = glColor4us
color3v = glColor3usv
color4v = glColor4usv
secondaryColor3 = glSecondaryColor3usEXT
secondaryColor3v = glSecondaryColor3usvEXT
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glColor3ui" glColor3ui ::
GLuint -> GLuint -> GLuint -> IO ()
foreign import CALLCONV unsafe "glColor4ui" glColor4ui ::
GLuint -> GLuint -> GLuint -> GLuint -> IO ()
foreign import CALLCONV unsafe "glColor3uiv" glColor3uiv ::
Ptr GLuint -> IO ()
foreign import CALLCONV unsafe "glColor4uiv" glColor4uiv ::
Ptr GLuint -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3uiEXT,GLuint -> GLuint -> GLuint -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3uivEXT,Ptr GLuint -> IO ())
instance ColorComponent GLuint_ where
color3 = glColor3ui
color4 = glColor4ui
color3v = glColor3uiv
color4v = glColor4uiv
secondaryColor3 = glSecondaryColor3uiEXT
secondaryColor3v = glSecondaryColor3uivEXT
--------------------------------------------------------------------------------
-- | Change the current color.
class Color a where
color :: a -> IO ()
colorv :: Ptr a -> IO ()
-- An RGBA color with /A/=1.
data Color3 a = Color3 a a a
deriving ( Eq, Ord, Show )
instance ColorComponent a => Color (Color3 a) where
color (Color3 r g b) = color3 r g b
colorv = color3v . (castPtr :: Ptr (Color3 b) -> Ptr b)
instance Storable a => Storable (Color3 a) where
sizeOf ~(Color3 r _ _) = 3 * sizeOf r
alignment ~(Color3 r _ _) = alignment r
peek = peek3 Color3 . castPtr
poke ptr (Color3 r g b) = poke3 (castPtr ptr) r g b
-- | A fully-fledged RGBA color.
data Color4 a = Color4 a a a a
deriving ( Eq, Ord, Show )
instance ColorComponent a => Color (Color4 a) where
color (Color4 r g b a) = color4 r g b a
colorv = color4v . (castPtr :: Ptr (Color4 b) -> Ptr b)
instance Storable a => Storable (Color4 a) where
sizeOf ~(Color4 r _ _ _) = 4 * sizeOf r
alignment ~(Color4 r _ _ _) = alignment r
peek = peek4 Color4 . castPtr
poke ptr (Color4 r g b a) = poke4 (castPtr ptr) r g b a
--------------------------------------------------------------------------------
-- | Change the current secondary color.
class SecondaryColor a where
secondaryColor :: a -> IO ()
secondaryColorv :: Ptr a -> IO ()
instance ColorComponent a => SecondaryColor (Color3 a) where
secondaryColor (Color3 r g b) = secondaryColor3 r g b
secondaryColorv = secondaryColor3v . (castPtr :: Ptr (Color3 b) -> Ptr b)
--------------------------------------------------------------------------------
The current color index . The initial value is 1 . Note that this state
variable is significant only when the GL is in color index mode .
currentIndex :: StateVar (Index1 GLint)
currentIndex = makeStateVar (getInteger1 Index1 GetCurrentIndex) index
--------------------------------------------------------------------------------
-- | The class of all types which can be used as a color index.
class IndexComponent a where
index1 :: a -> IO ()
index1v :: Ptr a -> IO ()
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glIndexs" glIndexs ::
GLshort -> IO ()
foreign import CALLCONV unsafe "glIndexsv" glIndexsv ::
Ptr GLshort -> IO ()
instance IndexComponent GLshort_ where
index1 = glIndexs
index1v = glIndexsv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glIndexi" glIndexi ::
GLint -> IO ()
foreign import CALLCONV unsafe "glIndexiv" glIndexiv ::
Ptr GLint -> IO ()
instance IndexComponent GLint_ where
index1 = glIndexi
index1v = glIndexiv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glIndexf" glIndexf ::
GLfloat -> IO ()
foreign import CALLCONV unsafe "glIndexfv" glIndexfv ::
Ptr GLfloat -> IO ()
instance IndexComponent GLfloat_ where
index1 = glIndexf
index1v = glIndexfv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glIndexd" glIndexd ::
GLdouble -> IO ()
foreign import CALLCONV unsafe "glIndexdv" glIndexdv ::
Ptr GLdouble -> IO ()
instance IndexComponent GLdouble_ where
index1 = glIndexd
index1v = glIndexdv
--------------------------------------------------------------------------------
foreign import CALLCONV unsafe "glIndexub" glIndexub ::
GLubyte -> IO ()
foreign import CALLCONV unsafe "glIndexubv" glIndexubv ::
Ptr GLubyte -> IO ()
instance IndexComponent GLubyte_ where
index1 = glIndexub
index1v = glIndexubv
--------------------------------------------------------------------------------
-- | Change the current color index.
class Index a where
index :: a -> IO () -- Collision with Prelude.index
indexv :: Ptr a -> IO ()
-- | A color index.
newtype Index1 a = Index1 a
deriving ( Eq, Ord, Show )
instance IndexComponent a => Index (Index1 a) where
index (Index1 i) = index1 i
indexv = index1v . (castPtr :: Ptr (Index1 b) -> Ptr b)
instance Storable a => Storable (Index1 a) where
sizeOf ~(Index1 s) = sizeOf s
alignment ~(Index1 s) = alignment s
peek = peek1 Index1 . castPtr
poke ptr (Index1 s) = poke1 (castPtr ptr) s
--------------------------------------------------------------------------------
-- | Identifies a texture unit via its number, which must be in the range of
-- (0 .. 'maxTextureUnit').
newtype TextureUnit = TextureUnit GLuint
deriving ( Eq, Ord, Show )
| An implementation must support at least 2 texture units , but it may
support up to 32 ones . This state variable can be used to query the actual
-- implementation limit.
maxTextureUnit :: GettableStateVar TextureUnit
maxTextureUnit =
makeGettableStateVar
(getInteger1 (TextureUnit . fromIntegral) GetMaxTextureUnits)
| null | https://raw.githubusercontent.com/ygmpkk/house/1ed0eed82139869e85e3c5532f2b579cf2566fa2/ghc-6.2/libraries/OpenGL/Graphics/Rendering/OpenGL/GL/VertexSpec.hs | haskell | ------------------------------------------------------------------------------
|
Module : Graphics.Rendering.OpenGL.GL.VertexSpec
License : BSD-style (see the file libraries/OpenGL/LICENSE)
Maintainer :
Stability : provisional
Portability : portable
------------------------------------------------------------------------------
* Vertex Coordinates
** Texture Coordinates
** Normal
** Fog Coordinate
** Color and Secondary Color
* Texture Units
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| The class of all types which can be used as a vertex coordinate.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
This must only be done during
'Graphics.Rendering.OpenGL.GL.BeginEnd.renderPrimitive', otherwise the
behaviour is unspecified. The current values of the auxiliary vertex
attributes are associated with the vertex.
Note that there is no such thing as a \"current vertex\" which could be
retrieved.
| A vertex with /z/=0 and /w/=1.
| A vertex with /w/=1.
------------------------------------------------------------------------------
associated /auxiliary attributes/: Its texture coordinates, a normal, a
fog coordinate, and a color plus a secondary color. For every attribute, the
OpenGL state contains its current value, which can be changed at any time.
Every attribute has a \"natural\" format via which it can be manipulated
@'TexCoord3' 'GLint'@ are converted to floating point values and a /q/
Consequently, the vast majority of classes, functions, and data types in this
module are for convenience only and offer no additional functionality.
------------------------------------------------------------------------------
texture unit (see 'Graphics.Rendering.OpenGL.GL.CoordTrans.activeTexture').
------------------------------------------------------------------------------
| The class of all types which can be used as a texture coordinate.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Change the current texture coordinates of the current or given texture
unit.
| Texture coordinates with /t/=0, /r/=0, and /q/=1.
| Texture coordinates with /r/=0 and /q/=1.
| Texture coordinates with /q/=1.
------------------------------------------------------------------------------
| The current normal (/x/, /y/, /z/). The initial value is the unit vector
------------------------------------------------------------------------------
| The class of all types which can be used as a component of a normal.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Change the current normal. Integral arguments are converted to
floating-point with a linear mapping that maps the most positive
Normals specified with 'normal' or 'normalv' need not have unit length.
If 'Graphics.Rendering.OpenGL.GL.CoordTrans.normalize' is enabled, then
normals of any length specified with 'normal' or 'normalv' are normalized
after transformation. If
'Graphics.Rendering.OpenGL.GL.CoordTrans.rescaleNormal' is enabled, normals
are scaled by a scaling factor derived from the modelview matrix.
'Graphics.Rendering.OpenGL.GL.CoordTrans.rescaleNormal' requires that the
originally specified normals were of unit length, and that the modelview
matrix contains only uniform scales for proper results. Normalization is
initially disabled.
------------------------------------------------------------------------------
| The current fog coordinate. The initial value is 0.
------------------------------------------------------------------------------
| The class of all types which can be used as the fog coordinate.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Change the current fog coordinate.
| A fog coordinate.
------------------------------------------------------------------------------
| If 'rgbaMode' contains 'True', the color buffers store RGBA value. If
color indexes are stored, it contains 'False'.
------------------------------------------------------------------------------
mode.
in RGBA mode.
------------------------------------------------------------------------------
| The class of all types which can be used as a color component.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Change the current color.
An RGBA color with /A/=1.
| A fully-fledged RGBA color.
------------------------------------------------------------------------------
| Change the current secondary color.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| The class of all types which can be used as a color index.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Change the current color index.
Collision with Prelude.index
| A color index.
------------------------------------------------------------------------------
| Identifies a texture unit via its number, which must be in the range of
(0 .. 'maxTextureUnit').
implementation limit. | Copyright : ( c ) 2003
This module corresponds to section 2.7 ( Vertex Specification ) of the
OpenGL 1.4 specs .
module Graphics.Rendering.OpenGL.GL.VertexSpec (
Vertex(..),
VertexComponent,
Vertex2(..), Vertex3(..), Vertex4(..),
* Auxiliary Vertex Attributes
$ AuxiliaryVertexAttributes
currentTextureCoords, TexCoord(..),
TexCoordComponent,
TexCoord1(..), TexCoord2(..), TexCoord3(..), TexCoord4(..),
currentNormal, Normal(..),
NormalComponent,
Normal3(..),
currentFogCoord, FogCoord(..),
FogCoordComponent,
FogCoord1(..),
rgbaMode,
currentColor, Color(..),
currentSecondaryColor, SecondaryColor(..),
ColorComponent,
Color3(..), Color4(..),
currentIndex, Index(..),
IndexComponent,
Index1(..),
TextureUnit(..), maxTextureUnit
) where
import Data.Int
import Data.Word
import Foreign.Ptr ( Ptr, castPtr )
import Foreign.Storable ( Storable(..) )
import Graphics.Rendering.OpenGL.GL.BasicTypes (
GLenum, GLbyte, GLshort, GLint, GLubyte, GLushort, GLuint, GLfloat,
GLdouble )
import Graphics.Rendering.OpenGL.GL.Extensions (
FunPtr, unsafePerformIO, Invoker, getProcAddress )
import Graphics.Rendering.OpenGL.GL.GLboolean ( unmarshalGLboolean )
import Graphics.Rendering.OpenGL.GL.PeekPoke (
poke1, poke2, poke3, poke4,
peek1, peek2, peek3, peek4 )
import Graphics.Rendering.OpenGL.GL.QueryUtils (
GetPName(GetCurrentTextureCoords, GetCurrentNormal, GetCurrentFogCoord,
GetCurrentColor, GetCurrentSecondaryColor, GetCurrentIndex,
GetMaxTextureUnits,GetRGBAMode),
getBoolean1, getInteger1, getFloat1, getFloat3, getFloat4 )
import Graphics.Rendering.OpenGL.GL.StateVar (
GettableStateVar, makeGettableStateVar, StateVar, makeStateVar )
#include "HsOpenGLExt.h"
#include "HsOpenGLTypes.h"
class VertexComponent a where
vertex2 :: a -> a -> IO ()
vertex3 :: a -> a -> a -> IO ()
vertex4 :: a -> a -> a -> a -> IO ()
vertex2v :: Ptr a -> IO ()
vertex3v :: Ptr a -> IO ()
vertex4v :: Ptr a -> IO ()
foreign import CALLCONV unsafe "glVertex2s" glVertex2s ::
GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex3s" glVertex3s ::
GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex4s" glVertex4s ::
GLshort -> GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex2sv" glVertex2sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex3sv" glVertex3sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glVertex4sv" glVertex4sv ::
Ptr GLshort -> IO ()
instance VertexComponent GLshort_ where
vertex2 = glVertex2s
vertex3 = glVertex3s
vertex4 = glVertex4s
vertex2v = glVertex2sv
vertex3v = glVertex3sv
vertex4v = glVertex4sv
foreign import CALLCONV unsafe "glVertex2i" glVertex2i ::
GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glVertex3i" glVertex3i ::
GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glVertex4i" glVertex4i ::
GLint -> GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glVertex2iv" glVertex2iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glVertex3iv" glVertex3iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glVertex4iv" glVertex4iv ::
Ptr GLint -> IO ()
instance VertexComponent GLint_ where
vertex2 = glVertex2i
vertex3 = glVertex3i
vertex4 = glVertex4i
vertex2v = glVertex2iv
vertex3v = glVertex3iv
vertex4v = glVertex4iv
foreign import CALLCONV unsafe "glVertex2f" glVertex2f ::
GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex3f" glVertex3f ::
GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex4f" glVertex4f ::
GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex2fv" glVertex2fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex3fv" glVertex3fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glVertex4fv" glVertex4fv ::
Ptr GLfloat -> IO ()
instance VertexComponent GLfloat_ where
vertex2 = glVertex2f
vertex3 = glVertex3f
vertex4 = glVertex4f
vertex2v = glVertex2fv
vertex3v = glVertex3fv
vertex4v = glVertex4fv
foreign import CALLCONV unsafe "glVertex2d" glVertex2d ::
GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex3d" glVertex3d ::
GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex4d" glVertex4d ::
GLdouble -> GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex2dv" glVertex2dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex3dv" glVertex3dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glVertex4dv" glVertex4dv ::
Ptr GLdouble -> IO ()
instance VertexComponent GLdouble_ where
vertex2 = glVertex2d
vertex3 = glVertex3d
vertex4 = glVertex4d
vertex2v = glVertex2dv
vertex3v = glVertex3dv
vertex4v = glVertex4dv
| Specify the ( /x/ , /y/ , /z/ , ) coordinates of a four - dimensional vertex .
class Vertex a where
vertex :: a -> IO ()
vertexv :: Ptr a -> IO ()
data Vertex2 a = Vertex2 a a
deriving ( Eq, Ord, Show )
instance VertexComponent a => Vertex (Vertex2 a) where
vertex (Vertex2 x y) = vertex2 x y
vertexv = vertex2v . (castPtr :: Ptr (Vertex2 b) -> Ptr b)
instance Storable a => Storable (Vertex2 a) where
sizeOf ~(Vertex2 x _) = 2 * sizeOf x
alignment ~(Vertex2 x _) = alignment x
peek = peek2 Vertex2 . castPtr
poke ptr (Vertex2 x y) = poke2 (castPtr ptr) x y
data Vertex3 a = Vertex3 a a a
deriving ( Eq, Ord, Show )
instance VertexComponent a => Vertex (Vertex3 a) where
vertex (Vertex3 x y z) = vertex3 x y z
vertexv = vertex3v . (castPtr :: Ptr (Vertex3 b) -> Ptr b)
instance Storable a => Storable (Vertex3 a) where
sizeOf ~(Vertex3 x _ _) = 3 * sizeOf x
alignment ~(Vertex3 x _ _) = alignment x
peek = peek3 Vertex3 . castPtr
poke ptr (Vertex3 x y z) = poke3 (castPtr ptr) x y z
| A fully - fledged four - dimensional vertex .
data Vertex4 a = Vertex4 a a a a
deriving ( Eq, Ord, Show )
instance VertexComponent a => Vertex (Vertex4 a) where
vertex (Vertex4 x y z w) = vertex4 x y z w
vertexv = vertex4v . (castPtr :: Ptr (Vertex4 b) -> Ptr b)
instance Storable a => Storable (Vertex4 a) where
sizeOf ~(Vertex4 x _ _ _) = 4 * sizeOf x
alignment ~(Vertex4 x _ _ _) = alignment x
peek = peek4 Vertex4 . castPtr
poke ptr (Vertex4 x y z w) = poke4 (castPtr ptr) x y z w
$ AuxiliaryVertexAttributes
Apart from its coordinates in four - dimensional space , every vertex has
directly as part of the OpenGL state , e.g. the current texture coordinates
are internally handled as @'TexCoord4 ' ' GLfloat'@. Different formats are
converted to this format , e.g. the /s/ , /r/ , and /t/ coordinates of a
coordinate of 1.0 is implicitly assumed .
| The current texture coordinates ( /s/ , /t/ , /r/ , /q/ ) for the current
The initial value is ( 0,0,0,1 ) for all texture units .
currentTextureCoords :: StateVar (TexCoord4 GLfloat)
currentTextureCoords =
makeStateVar (getFloat4 TexCoord4 GetCurrentTextureCoords) texCoord
class TexCoordComponent a where
texCoord1 :: a -> IO ()
texCoord2 :: a -> a -> IO ()
texCoord3 :: a -> a -> a -> IO ()
texCoord4 :: a -> a -> a -> a -> IO ()
texCoord1v :: Ptr a -> IO ()
texCoord2v :: Ptr a -> IO ()
texCoord3v :: Ptr a -> IO ()
texCoord4v :: Ptr a -> IO ()
multiTexCoord1 :: GLenum -> a -> IO ()
multiTexCoord2 :: GLenum -> a -> a -> IO ()
multiTexCoord3 :: GLenum -> a -> a -> a -> IO ()
multiTexCoord4 :: GLenum -> a -> a -> a -> a -> IO ()
multiTexCoord1v :: GLenum -> Ptr a -> IO ()
multiTexCoord2v :: GLenum -> Ptr a -> IO ()
multiTexCoord3v :: GLenum -> Ptr a -> IO ()
multiTexCoord4v :: GLenum -> Ptr a -> IO ()
foreign import CALLCONV unsafe "glTexCoord1s" glTexCoord1s ::
GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord2s" glTexCoord2s ::
GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord3s" glTexCoord3s ::
GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord4s" glTexCoord4s ::
GLshort -> GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord1sv" glTexCoord1sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord2sv" glTexCoord2sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord3sv" glTexCoord3sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glTexCoord4sv" glTexCoord4sv ::
Ptr GLshort -> IO ()
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1sARB,GLenum -> GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2sARB,GLenum -> GLshort -> GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3sARB,GLenum -> GLshort -> GLshort -> GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4sARB,GLenum -> GLshort -> GLshort -> GLshort -> GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1svARB,GLenum -> Ptr GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2svARB,GLenum -> Ptr GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3svARB,GLenum -> Ptr GLshort -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4svARB,GLenum -> Ptr GLshort -> IO ())
instance TexCoordComponent GLshort_ where
texCoord1 = glTexCoord1s
texCoord2 = glTexCoord2s
texCoord3 = glTexCoord3s
texCoord4 = glTexCoord4s
texCoord1v = glTexCoord1sv
texCoord2v = glTexCoord2sv
texCoord3v = glTexCoord3sv
texCoord4v = glTexCoord4sv
multiTexCoord1 = glMultiTexCoord1sARB
multiTexCoord2 = glMultiTexCoord2sARB
multiTexCoord3 = glMultiTexCoord3sARB
multiTexCoord4 = glMultiTexCoord4sARB
multiTexCoord1v = glMultiTexCoord1svARB
multiTexCoord2v = glMultiTexCoord2svARB
multiTexCoord3v = glMultiTexCoord3svARB
multiTexCoord4v = glMultiTexCoord4svARB
foreign import CALLCONV unsafe "glTexCoord1i" glTexCoord1i ::
GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord2i" glTexCoord2i ::
GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord3i" glTexCoord3i ::
GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord4i" glTexCoord4i ::
GLint -> GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord1iv" glTexCoord1iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord2iv" glTexCoord2iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord3iv" glTexCoord3iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glTexCoord4iv" glTexCoord4iv ::
Ptr GLint -> IO ()
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1iARB,GLenum -> GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2iARB,GLenum -> GLint -> GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3iARB,GLenum -> GLint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4iARB,GLenum -> GLint -> GLint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1ivARB,GLenum -> Ptr GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2ivARB,GLenum -> Ptr GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3ivARB,GLenum -> Ptr GLint -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4ivARB,GLenum -> Ptr GLint -> IO ())
instance TexCoordComponent GLint_ where
texCoord1 = glTexCoord1i
texCoord2 = glTexCoord2i
texCoord3 = glTexCoord3i
texCoord4 = glTexCoord4i
texCoord1v = glTexCoord1iv
texCoord2v = glTexCoord2iv
texCoord3v = glTexCoord3iv
texCoord4v = glTexCoord4iv
multiTexCoord1 = glMultiTexCoord1iARB
multiTexCoord2 = glMultiTexCoord2iARB
multiTexCoord3 = glMultiTexCoord3iARB
multiTexCoord4 = glMultiTexCoord4iARB
multiTexCoord1v = glMultiTexCoord1ivARB
multiTexCoord2v = glMultiTexCoord2ivARB
multiTexCoord3v = glMultiTexCoord3ivARB
multiTexCoord4v = glMultiTexCoord4ivARB
foreign import CALLCONV unsafe "glTexCoord1f" glTexCoord1f ::
GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord2f" glTexCoord2f ::
GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord3f" glTexCoord3f ::
GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord4f" glTexCoord4f ::
GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord1fv" glTexCoord1fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord2fv" glTexCoord2fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord3fv" glTexCoord3fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glTexCoord4fv" glTexCoord4fv ::
Ptr GLfloat -> IO ()
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1fARB,GLenum -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2fARB,GLenum -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3fARB,GLenum -> GLfloat -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4fARB,GLenum -> GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1fvARB,GLenum -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2fvARB,GLenum -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3fvARB,GLenum -> Ptr GLfloat -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4fvARB,GLenum -> Ptr GLfloat -> IO ())
instance TexCoordComponent GLfloat_ where
texCoord1 = glTexCoord1f
texCoord2 = glTexCoord2f
texCoord3 = glTexCoord3f
texCoord4 = glTexCoord4f
texCoord1v = glTexCoord1fv
texCoord2v = glTexCoord2fv
texCoord3v = glTexCoord3fv
texCoord4v = glTexCoord4fv
multiTexCoord1 = glMultiTexCoord1fARB
multiTexCoord2 = glMultiTexCoord2fARB
multiTexCoord3 = glMultiTexCoord3fARB
multiTexCoord4 = glMultiTexCoord4fARB
multiTexCoord1v = glMultiTexCoord1fvARB
multiTexCoord2v = glMultiTexCoord2fvARB
multiTexCoord3v = glMultiTexCoord3fvARB
multiTexCoord4v = glMultiTexCoord4fvARB
foreign import CALLCONV unsafe "glTexCoord1d" glTexCoord1d ::
GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord2d" glTexCoord2d ::
GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord3d" glTexCoord3d ::
GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord4d" glTexCoord4d ::
GLdouble -> GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord1dv" glTexCoord1dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord2dv" glTexCoord2dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord3dv" glTexCoord3dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glTexCoord4dv" glTexCoord4dv ::
Ptr GLdouble -> IO ()
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1dARB,GLenum -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2dARB,GLenum -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3dARB,GLenum -> GLdouble -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4dARB,GLenum -> GLdouble -> GLdouble -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord1dvARB,GLenum -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord2dvARB,GLenum -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord3dvARB,GLenum -> Ptr GLdouble -> IO ())
EXTENSION_ENTRY("GL_ARB_multitexture or OpenGL 1.3",glMultiTexCoord4dvARB,GLenum -> Ptr GLdouble -> IO ())
instance TexCoordComponent GLdouble_ where
texCoord1 = glTexCoord1d
texCoord2 = glTexCoord2d
texCoord3 = glTexCoord3d
texCoord4 = glTexCoord4d
texCoord1v = glTexCoord1dv
texCoord2v = glTexCoord2dv
texCoord3v = glTexCoord3dv
texCoord4v = glTexCoord4dv
multiTexCoord1 = glMultiTexCoord1dARB
multiTexCoord2 = glMultiTexCoord2dARB
multiTexCoord3 = glMultiTexCoord3dARB
multiTexCoord4 = glMultiTexCoord4dARB
multiTexCoord1v = glMultiTexCoord1dvARB
multiTexCoord2v = glMultiTexCoord2dvARB
multiTexCoord3v = glMultiTexCoord3dvARB
multiTexCoord4v = glMultiTexCoord4dvARB
class TexCoord a where
texCoord :: a -> IO ()
texCoordv :: Ptr a -> IO ()
multiTexCoord :: TextureUnit -> a -> IO ()
multiTexCoordv :: TextureUnit -> Ptr a -> IO ()
data TexCoord1 a = TexCoord1 a
deriving ( Eq, Ord, Show )
instance TexCoordComponent a => TexCoord (TexCoord1 a) where
texCoord (TexCoord1 s) = texCoord1 s
texCoordv = texCoord1v . (castPtr :: Ptr (TexCoord1 b) -> Ptr b)
multiTexCoord (TextureUnit u) (TexCoord1 s) =
multiTexCoord1 (fromIntegral u) s
multiTexCoordv (TextureUnit u) =
multiTexCoord1v (fromIntegral u) . (castPtr :: Ptr (TexCoord1 b) -> Ptr b)
instance Storable a => Storable (TexCoord1 a) where
sizeOf ~(TexCoord1 s) = sizeOf s
alignment ~(TexCoord1 s) = alignment s
peek = peek1 TexCoord1 . castPtr
poke ptr (TexCoord1 s) = poke1 (castPtr ptr) s
data TexCoord2 a = TexCoord2 a a
deriving ( Eq, Ord, Show )
instance TexCoordComponent a => TexCoord (TexCoord2 a) where
texCoord (TexCoord2 s t) = texCoord2 s t
texCoordv = texCoord2v . (castPtr :: Ptr (TexCoord2 b) -> Ptr b)
multiTexCoord (TextureUnit u) (TexCoord2 s t) =
multiTexCoord2 (fromIntegral u) s t
multiTexCoordv (TextureUnit u) =
multiTexCoord2v (fromIntegral u) . (castPtr :: Ptr (TexCoord2 b) -> Ptr b)
instance Storable a => Storable (TexCoord2 a) where
sizeOf ~(TexCoord2 s _) = 2 * sizeOf s
alignment ~(TexCoord2 s _) = alignment s
peek = peek2 TexCoord2 . castPtr
poke ptr (TexCoord2 s t) = poke2 (castPtr ptr) s t
data TexCoord3 a = TexCoord3 a a a
deriving ( Eq, Ord, Show )
instance TexCoordComponent a => TexCoord (TexCoord3 a) where
texCoord (TexCoord3 s t r) = texCoord3 s t r
texCoordv = texCoord3v . (castPtr :: Ptr (TexCoord3 b) -> Ptr b)
multiTexCoord (TextureUnit u) (TexCoord3 s t r) =
multiTexCoord3 (fromIntegral u) s t r
multiTexCoordv (TextureUnit u) =
multiTexCoord3v (fromIntegral u) . (castPtr :: Ptr (TexCoord3 b) -> Ptr b)
instance Storable a => Storable (TexCoord3 a) where
sizeOf ~(TexCoord3 s _ _) = 3 * sizeOf s
alignment ~(TexCoord3 s _ _) = alignment s
peek = peek3 TexCoord3 . castPtr
poke ptr (TexCoord3 s t r) = poke3 (castPtr ptr) s t r
| Fully - fledged four - dimensional texture coordinates .
data TexCoord4 a = TexCoord4 a a a a
deriving ( Eq, Ord, Show )
instance TexCoordComponent a => TexCoord (TexCoord4 a) where
texCoord (TexCoord4 s t r q) = texCoord4 s t r q
texCoordv = texCoord4v . (castPtr :: Ptr (TexCoord4 b) -> Ptr b)
multiTexCoord (TextureUnit u) (TexCoord4 s t r q) =
multiTexCoord4 (fromIntegral u) s t r q
multiTexCoordv (TextureUnit u) =
multiTexCoord4v (fromIntegral u) . (castPtr :: Ptr (TexCoord4 b) -> Ptr b)
instance Storable a => Storable (TexCoord4 a) where
sizeOf ~(TexCoord4 s _ _ _) = 4 * sizeOf s
alignment ~(TexCoord4 s _ _ _) = alignment s
peek = peek4 TexCoord4 . castPtr
poke ptr (TexCoord4 s t r q) = poke4 (castPtr ptr) s t r q
( 0 , 0 , 1 ) .
currentNormal :: StateVar (Normal3 GLfloat)
currentNormal = makeStateVar (getFloat3 Normal3 GetCurrentNormal) normal
class NormalComponent a where
normal3 :: a -> a -> a -> IO ()
normal3v :: Ptr a -> IO ()
foreign import CALLCONV unsafe "glNormal3b" glNormal3b ::
GLbyte -> GLbyte -> GLbyte -> IO ()
foreign import CALLCONV unsafe "glNormal3bv" glNormal3bv ::
Ptr GLbyte -> IO ()
instance NormalComponent GLbyte_ where
normal3 = glNormal3b
normal3v = glNormal3bv
foreign import CALLCONV unsafe "glNormal3s" glNormal3s ::
GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glNormal3sv" glNormal3sv ::
Ptr GLshort -> IO ()
instance NormalComponent GLshort_ where
normal3 = glNormal3s
normal3v = glNormal3sv
foreign import CALLCONV unsafe "glNormal3i" glNormal3i ::
GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glNormal3iv" glNormal3iv ::
Ptr GLint -> IO ()
instance NormalComponent GLint_ where
normal3 = glNormal3i
normal3v = glNormal3iv
foreign import CALLCONV unsafe "glNormal3f" glNormal3f ::
GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glNormal3fv" glNormal3fv ::
Ptr GLfloat -> IO ()
instance NormalComponent GLfloat_ where
normal3 = glNormal3f
normal3v = glNormal3fv
foreign import CALLCONV unsafe "glNormal3d" glNormal3d ::
GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glNormal3dv" glNormal3dv ::
Ptr GLdouble -> IO ()
instance NormalComponent GLdouble_ where
normal3 = glNormal3d
normal3v = glNormal3dv
representable integer value to 1.0 , and the most negative representable
integer value to -1.0 .
class Normal a where
normal :: a -> IO ()
normalv :: Ptr a -> IO ()
A three - dimensional normal .
data Normal3 a = Normal3 a a a
deriving ( Eq, Ord, Show )
instance NormalComponent a => Normal (Normal3 a) where
normal (Normal3 x y z) = normal3 x y z
normalv = normal3v . (castPtr :: Ptr (Normal3 b) -> Ptr b)
instance Storable a => Storable (Normal3 a) where
sizeOf ~(Normal3 x _ _) = 3 * sizeOf x
alignment ~(Normal3 x _ _) = alignment x
peek = peek3 Normal3 . castPtr
poke ptr (Normal3 x y z) = poke3 (castPtr ptr) x y z
currentFogCoord :: StateVar (FogCoord1 GLfloat)
currentFogCoord =
makeStateVar (getFloat1 FogCoord1 GetCurrentFogCoord) fogCoord
class FogCoordComponent a where
fogCoord1 :: a -> IO ()
fogCoord1v :: Ptr a -> IO ()
EXTENSION_ENTRY("GL_EXT_fog_coord or OpenGL 1.4",glFogCoordfEXT,GLfloat -> IO ())
EXTENSION_ENTRY("GL_EXT_fog_coord or OpenGL 1.4",glFogCoordfvEXT,Ptr GLfloat -> IO ())
instance FogCoordComponent GLfloat_ where
fogCoord1 = glFogCoordfEXT
fogCoord1v = glFogCoordfvEXT
EXTENSION_ENTRY("GL_EXT_fog_coord or OpenGL 1.4",glFogCoorddEXT,GLdouble -> IO ())
EXTENSION_ENTRY("GL_EXT_fog_coord or OpenGL 1.4",glFogCoorddvEXT,Ptr GLdouble -> IO ())
instance FogCoordComponent GLdouble_ where
fogCoord1 = glFogCoorddEXT
fogCoord1v = glFogCoorddvEXT
class FogCoord a where
fogCoord :: a -> IO ()
fogCoordv :: Ptr a -> IO ()
newtype FogCoord1 a = FogCoord1 a
deriving ( Eq, Ord, Show )
instance FogCoordComponent a => FogCoord (FogCoord1 a) where
fogCoord (FogCoord1 c) = fogCoord1 c
fogCoordv = fogCoord1v . (castPtr :: Ptr (FogCoord1 b) -> Ptr b)
rgbaMode :: GettableStateVar Bool
rgbaMode = makeGettableStateVar (getBoolean1 unmarshalGLboolean GetRGBAMode)
The current color ( /R/ , /G/ , /B/ , /A/ ) . The initial value is ( 1 , 1 , 1 , 1 ) .
Note that this state variable is significant only when the GL is in RGBA
currentColor :: StateVar (Color4 GLfloat)
currentColor =
makeStateVar (getFloat4 Color4 GetCurrentColor) color
The current secondary color ( /R/ , /G/ , /B/ , /A/ ) . The initial value is
( 0 , 0 , 0 , 1 ) . Note that this state variable is significant only when the GL is
currentSecondaryColor :: StateVar (Color4 GLfloat)
currentSecondaryColor =
makeStateVar (getFloat4 Color4 GetCurrentSecondaryColor) color
class ColorComponent a where
color3 :: a -> a -> a -> IO ()
color4 :: a -> a -> a -> a -> IO ()
color3v :: Ptr a -> IO ()
color4v :: Ptr a -> IO ()
secondaryColor3 :: a -> a -> a -> IO ()
secondaryColor3v :: Ptr a -> IO ()
foreign import CALLCONV unsafe "glColor3b" glColor3b ::
GLbyte -> GLbyte -> GLbyte -> IO ()
foreign import CALLCONV unsafe "glColor4b" glColor4b ::
GLbyte -> GLbyte -> GLbyte -> GLbyte -> IO ()
foreign import CALLCONV unsafe "glColor3bv" glColor3bv ::
Ptr GLbyte -> IO ()
foreign import CALLCONV unsafe "glColor4bv" glColor4bv ::
Ptr GLbyte -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3bEXT,GLbyte -> GLbyte -> GLbyte -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3bvEXT,Ptr GLbyte -> IO ())
instance ColorComponent GLbyte_ where
color3 = glColor3b
color4 = glColor4b
color3v = glColor3bv
color4v = glColor4bv
secondaryColor3 = glSecondaryColor3bEXT
secondaryColor3v = glSecondaryColor3bvEXT
foreign import CALLCONV unsafe "glColor3s" glColor3s ::
GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glColor4s" glColor4s ::
GLshort -> GLshort -> GLshort -> GLshort -> IO ()
foreign import CALLCONV unsafe "glColor3sv" glColor3sv ::
Ptr GLshort -> IO ()
foreign import CALLCONV unsafe "glColor4sv" glColor4sv ::
Ptr GLshort -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3sEXT,GLshort -> GLshort -> GLshort -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3svEXT,Ptr GLshort -> IO ())
instance ColorComponent GLshort_ where
color3 = glColor3s
color4 = glColor4s
color3v = glColor3sv
color4v = glColor4sv
secondaryColor3 = glSecondaryColor3sEXT
secondaryColor3v = glSecondaryColor3svEXT
foreign import CALLCONV unsafe "glColor3i" glColor3i ::
GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glColor4i" glColor4i ::
GLint -> GLint -> GLint -> GLint -> IO ()
foreign import CALLCONV unsafe "glColor3iv" glColor3iv ::
Ptr GLint -> IO ()
foreign import CALLCONV unsafe "glColor4iv" glColor4iv ::
Ptr GLint -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3iEXT,GLint -> GLint -> GLint -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3ivEXT,Ptr GLint -> IO ())
instance ColorComponent GLint_ where
color3 = glColor3i
color4 = glColor4i
color3v = glColor3iv
color4v = glColor4iv
secondaryColor3 = glSecondaryColor3iEXT
secondaryColor3v = glSecondaryColor3ivEXT
foreign import CALLCONV unsafe "glColor3f" glColor3f ::
GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glColor4f" glColor4f ::
GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ()
foreign import CALLCONV unsafe "glColor3fv" glColor3fv ::
Ptr GLfloat -> IO ()
foreign import CALLCONV unsafe "glColor4fv" glColor4fv ::
Ptr GLfloat -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3fEXT,GLfloat -> GLfloat -> GLfloat -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3fvEXT,Ptr GLfloat -> IO ())
instance ColorComponent GLfloat_ where
color3 = glColor3f
color4 = glColor4f
color3v = glColor3fv
color4v = glColor4fv
secondaryColor3 = glSecondaryColor3fEXT
secondaryColor3v = glSecondaryColor3fvEXT
foreign import CALLCONV unsafe "glColor3d" glColor3d ::
GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glColor4d" glColor4d ::
GLdouble -> GLdouble -> GLdouble -> GLdouble -> IO ()
foreign import CALLCONV unsafe "glColor3dv" glColor3dv ::
Ptr GLdouble -> IO ()
foreign import CALLCONV unsafe "glColor4dv" glColor4dv ::
Ptr GLdouble -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3dEXT,GLdouble -> GLdouble -> GLdouble -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3dvEXT,Ptr GLdouble -> IO ())
instance ColorComponent GLdouble_ where
color3 = glColor3d
color4 = glColor4d
color3v = glColor3dv
color4v = glColor4dv
secondaryColor3 = glSecondaryColor3dEXT
secondaryColor3v = glSecondaryColor3dvEXT
foreign import CALLCONV unsafe "glColor3ub" glColor3ub ::
GLubyte -> GLubyte -> GLubyte -> IO ()
foreign import CALLCONV unsafe "glColor4ub" glColor4ub ::
GLubyte -> GLubyte -> GLubyte -> GLubyte -> IO ()
foreign import CALLCONV unsafe "glColor3ubv" glColor3ubv ::
Ptr GLubyte -> IO ()
foreign import CALLCONV unsafe "glColor4ubv" glColor4ubv ::
Ptr GLubyte -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3ubEXT,GLubyte -> GLubyte -> GLubyte -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3ubvEXT,Ptr GLubyte -> IO ())
instance ColorComponent GLubyte_ where
color3 = glColor3ub
color4 = glColor4ub
color3v = glColor3ubv
color4v = glColor4ubv
secondaryColor3 = glSecondaryColor3ubEXT
secondaryColor3v = glSecondaryColor3ubvEXT
foreign import CALLCONV unsafe "glColor3us" glColor3us ::
GLushort -> GLushort -> GLushort -> IO ()
foreign import CALLCONV unsafe "glColor4us" glColor4us ::
GLushort -> GLushort -> GLushort -> GLushort -> IO ()
foreign import CALLCONV unsafe "glColor3usv" glColor3usv ::
Ptr GLushort -> IO ()
foreign import CALLCONV unsafe "glColor4usv" glColor4usv ::
Ptr GLushort -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3usEXT,GLushort -> GLushort -> GLushort -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3usvEXT,Ptr GLushort -> IO ())
instance ColorComponent GLushort_ where
color3 = glColor3us
color4 = glColor4us
color3v = glColor3usv
color4v = glColor4usv
secondaryColor3 = glSecondaryColor3usEXT
secondaryColor3v = glSecondaryColor3usvEXT
foreign import CALLCONV unsafe "glColor3ui" glColor3ui ::
GLuint -> GLuint -> GLuint -> IO ()
foreign import CALLCONV unsafe "glColor4ui" glColor4ui ::
GLuint -> GLuint -> GLuint -> GLuint -> IO ()
foreign import CALLCONV unsafe "glColor3uiv" glColor3uiv ::
Ptr GLuint -> IO ()
foreign import CALLCONV unsafe "glColor4uiv" glColor4uiv ::
Ptr GLuint -> IO ()
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3uiEXT,GLuint -> GLuint -> GLuint -> IO ())
EXTENSION_ENTRY("GL_EXT_secondary_color or OpenGL 1.4",glSecondaryColor3uivEXT,Ptr GLuint -> IO ())
instance ColorComponent GLuint_ where
color3 = glColor3ui
color4 = glColor4ui
color3v = glColor3uiv
color4v = glColor4uiv
secondaryColor3 = glSecondaryColor3uiEXT
secondaryColor3v = glSecondaryColor3uivEXT
class Color a where
color :: a -> IO ()
colorv :: Ptr a -> IO ()
data Color3 a = Color3 a a a
deriving ( Eq, Ord, Show )
instance ColorComponent a => Color (Color3 a) where
color (Color3 r g b) = color3 r g b
colorv = color3v . (castPtr :: Ptr (Color3 b) -> Ptr b)
instance Storable a => Storable (Color3 a) where
sizeOf ~(Color3 r _ _) = 3 * sizeOf r
alignment ~(Color3 r _ _) = alignment r
peek = peek3 Color3 . castPtr
poke ptr (Color3 r g b) = poke3 (castPtr ptr) r g b
data Color4 a = Color4 a a a a
deriving ( Eq, Ord, Show )
instance ColorComponent a => Color (Color4 a) where
color (Color4 r g b a) = color4 r g b a
colorv = color4v . (castPtr :: Ptr (Color4 b) -> Ptr b)
instance Storable a => Storable (Color4 a) where
sizeOf ~(Color4 r _ _ _) = 4 * sizeOf r
alignment ~(Color4 r _ _ _) = alignment r
peek = peek4 Color4 . castPtr
poke ptr (Color4 r g b a) = poke4 (castPtr ptr) r g b a
class SecondaryColor a where
secondaryColor :: a -> IO ()
secondaryColorv :: Ptr a -> IO ()
instance ColorComponent a => SecondaryColor (Color3 a) where
secondaryColor (Color3 r g b) = secondaryColor3 r g b
secondaryColorv = secondaryColor3v . (castPtr :: Ptr (Color3 b) -> Ptr b)
The current color index . The initial value is 1 . Note that this state
variable is significant only when the GL is in color index mode .
currentIndex :: StateVar (Index1 GLint)
currentIndex = makeStateVar (getInteger1 Index1 GetCurrentIndex) index
class IndexComponent a where
index1 :: a -> IO ()
index1v :: Ptr a -> IO ()
foreign import CALLCONV unsafe "glIndexs" glIndexs ::
GLshort -> IO ()
foreign import CALLCONV unsafe "glIndexsv" glIndexsv ::
Ptr GLshort -> IO ()
instance IndexComponent GLshort_ where
index1 = glIndexs
index1v = glIndexsv
foreign import CALLCONV unsafe "glIndexi" glIndexi ::
GLint -> IO ()
foreign import CALLCONV unsafe "glIndexiv" glIndexiv ::
Ptr GLint -> IO ()
instance IndexComponent GLint_ where
index1 = glIndexi
index1v = glIndexiv
foreign import CALLCONV unsafe "glIndexf" glIndexf ::
GLfloat -> IO ()
foreign import CALLCONV unsafe "glIndexfv" glIndexfv ::
Ptr GLfloat -> IO ()
instance IndexComponent GLfloat_ where
index1 = glIndexf
index1v = glIndexfv
foreign import CALLCONV unsafe "glIndexd" glIndexd ::
GLdouble -> IO ()
foreign import CALLCONV unsafe "glIndexdv" glIndexdv ::
Ptr GLdouble -> IO ()
instance IndexComponent GLdouble_ where
index1 = glIndexd
index1v = glIndexdv
foreign import CALLCONV unsafe "glIndexub" glIndexub ::
GLubyte -> IO ()
foreign import CALLCONV unsafe "glIndexubv" glIndexubv ::
Ptr GLubyte -> IO ()
instance IndexComponent GLubyte_ where
index1 = glIndexub
index1v = glIndexubv
class Index a where
indexv :: Ptr a -> IO ()
newtype Index1 a = Index1 a
deriving ( Eq, Ord, Show )
instance IndexComponent a => Index (Index1 a) where
index (Index1 i) = index1 i
indexv = index1v . (castPtr :: Ptr (Index1 b) -> Ptr b)
instance Storable a => Storable (Index1 a) where
sizeOf ~(Index1 s) = sizeOf s
alignment ~(Index1 s) = alignment s
peek = peek1 Index1 . castPtr
poke ptr (Index1 s) = poke1 (castPtr ptr) s
newtype TextureUnit = TextureUnit GLuint
deriving ( Eq, Ord, Show )
| An implementation must support at least 2 texture units , but it may
support up to 32 ones . This state variable can be used to query the actual
maxTextureUnit :: GettableStateVar TextureUnit
maxTextureUnit =
makeGettableStateVar
(getInteger1 (TextureUnit . fromIntegral) GetMaxTextureUnits)
|
a4cdc26c348773c387884e9dcf6332eac69bfdf9ad5250dd9cec982d211e3ace | cicakhq/potato | views.lisp | (in-package :potato.views)
(declaim #.potato.common::*compile-decl*)
(defmacro ps-view-fix (fields &body body)
`(remove #\Newline (ps-view ,fields ,@body)))
(defun create (id &rest view-defs)
(let ((result (apply #'create-ps-view id view-defs)))
(unless (cdr (assoc :|ok| result))
(error "Error creating view ~a: ~s" id result))))
;;; This function is a lot more complicated that it needs to be.
;;; Originally we found some weird behaviour where the views were not
;;; created correctly even though the CouchDB calls returns ok. In
;;; order to analyse this, a lot of code was added to check that
;;; everything worked. Later, it was revealed that this was caused by
;;; a known CouchDB bug:
;;; -1415
;;;
;;; Thus, the :|created_date| field was added, but the old code that
;;; checks for correct ciew creation still remains until we can
;;; confirm that this was the cause of the issue.
(defun mkview (group-name view-defs &optional update-defs)
(let ((id (concatenate 'string "_design/" group-name)))
(loop
for exists-p = (progn
(delete-document id :if-missing :ignore)
(get-document id :if-missing :ignore))
while exists-p
do (progn
(log:error "View ~s still exists after deleting, waiting 1 second" id)
(sleep 1)))
(let ((result (create-document `((:|language| . "javascript")
(:|views| . ,(loop
for (name map reduce) in view-defs
collect `(,name . ((:|map| . ,map)
,@(when reduce
`((:|reduce| . ,reduce)))))))
(:|created_date| . ,(potato.core:format-timestamp nil (local-time:now)))
,@(when update-defs
`((:|updates| . ,(loop
for (name script) in update-defs
collect (cons name script))))))
:id id)))
(log:debug "Created view ~a. Result: ~s" group-name result)
(sleep 1/10)
;; Verify that the views are available. This shouldn't be
;; needed, but in some circumstances we have seen that when
;; multiple views are created in quick succession, some views
;; are not created even though the creation succeded. The
;; purpose of this code is to detect this case so that it can be
;; debugged later.
(let* ((result (get-document id))
(result-views (potato.common:getfield :|views| result)))
(loop
for view in view-defs
unless (potato.common:getfield (car view) result-views)
do (error "View ~s missing after updating views" (car view)))))))
(defmacro with-zero-gensym (&body body)
(setq *ps-gensym-counter* 0)
`(progn ,@body))
(defmacro zs (&body body)
`(with-zero-gensym
(ps ,@body)))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun docname (type)
(potato.db:couchdb-type-for-class type)))
(defun init-potato-views ()
(mkview "channel"
`((:|users_in_channel|
,(zs (lambda (doc)
(with-slots (type channel users) doc
(when (and (eql type #.(docname 'potato.core:channel-users))
(not (null users)))
(dolist (user-id ((@ |Object| keys) users))
(emit channel (create _id user-id))))))))
(:|channels_for_group|
,(zs (lambda (doc)
(with-slots (type group) doc
(when (eql type #.(docname 'potato.core:channel))
(emit group doc))))))
(:|recent_message|
,(zs (lambda (doc)
(with-slots (type channel users) doc
(when (and (eql type #.(docname 'potato.core:channel-users))
(not (null users)))
(dolist (user-id ((@ |Object| keys) users))
(let ((entry (getprop users user-id)))
(when (> (@ entry count) 0)
(emit (list user-id channel) entry)))))))))
(:|private_channel_list|
,(zs (lambda (doc)
(with-slots (type domain users group_type) doc
(when (and (eql type #.(docname 'potato.core:channel-users))
(eql group_type "PRIVATE"))
(dolist (user-id ((@ |Object| keys) users))
(emit (list domain user-id) doc)))))))
(:|channel_nickname|
,(zs (lambda (doc)
(with-slots (type channel) doc
(when (eql type #.(docname 'potato.core:channel-nickname))
(emit channel doc))))))
(:|channels_in_domain|
,(zs (lambda (doc)
(with-slots (type domain) doc
(when (eql type #.(docname 'potato.core:channel))
(emit domain doc)))))))
;; Update functions
`((:|add_user_to_channel|
,(zs (lambda (doc req)
(with-slots (type users deleted) doc
(cond
((not (eql type #.(docname 'potato.core:channel-users)))
(throw "Illegal object type"))
(deleted
(list doc "\"Can't add users to a deleted channel\""))
(t
(let* ((user-id (@ req query user_id))
(current-time (@ req query current_date))
(new-mapping (create :|count| 0 :|last_read| current-time)))
(when (null users)
(setf users (create)))
(unless (getprop users user-id)
(setf (getprop users user-id) new-mapping))
(list doc "\"Updated\""))))))))
(:|remove_user_from_channel|
,(zs (lambda (doc req)
(with-slots (type users group_type) doc
(unless (eql type #.(docname 'potato.core:channel-users))
(throw "Illegal object type"))
(let ((user-id (@ req query user_id)))
(cond ((eql group_type "PRIVATE")
(list doc (concatenate 'string "\"Channel is private\"")))
((and users (getprop users user-id))
(delete (getprop users user-id))
(list doc (concatenate 'string "\"User " user-id " was removed from channel\"")))
(t
(list doc (concatenate 'string "\"User " user-id " is not a member of channel\"")))))))))
(:|incmsg|
,(zs (lambda (doc req)
(with-slots (users) doc
(let ((ret ""))
(dolist (key ((@ |Object| keys) users))
(let ((u (getprop users key)))
(incf (getprop u "count") )
(when (not (eql ret ""))
(setf ret (concatenate 'string ret " ")))
(setf ret (concatenate 'string ret key))))
(list doc (concatenate 'string "\"" ret "\"")))))))
(:|mark_read|
,(zs (lambda (doc req)
(with-slots (users) doc
(let ((user-id (@ req query user_id))
(date (@ req query date)))
(let* ((u (getprop users user-id))
old-count)
(if u
(progn
(setf old-count (getprop u "count"))
(setf (getprop u "count") 0)
(setf (getprop u "last_read") date))
;; ELSE: User does not exist
(setf old-count 0))
(list (if (> old-count 0) doc nil)
(concatenate 'string "{\"count\":" old-count "}"))))))))
(:|mark_hidden|
,(zs (lambda (doc req)
(with-slots (users) doc
(let* ((uid (@ req query user_id))
(show (eql (@ req query show) "1"))
(u (getprop users uid)))
(if u
(let ((old-hidden (getprop u "hide")))
(if (or (and old-hidden (not show))
(and (not old-hidden) show))
(list nil "\"ok\"")
(progn
(setf (getprop u "hide") (not show))
(list doc "\"ok\""))))
;; ELSE: User is not a member of channel
(list nil "\"not-member\"")))))))))
(mkview "user"
`((:|users_by_email|
,(zs (lambda (doc)
(with-slots (type email user) doc
(when (eql type #.(docname 'potato.core:user-email))
(emit email user))))))
(:|emails_by_user|
,(zs (lambda (doc)
(with-slots (type email user) doc
(when (eql type #.(docname 'potato.core:user-email))
(emit user email))))))
(:|channels_by_domain_and_user|
,(zs (lambda (doc)
(with-slots (type users channel domain name group group_type) doc
(when (eql type #.(docname 'potato.core:channel-users))
(let ((user-ids ((@ |Object| keys) users)))
(dolist (user-id user-ids)
(let ((user (getprop users user-id)))
(emit (list user-id domain)
(list channel
name
(if (@ user hide) true false)
group
group_type
(@ user count)
(if (eql group_type "PRIVATE")
(let ((uid0 (aref user-ids 0))
(uid1 (aref user-ids 1)))
(if (eql user-id uid0) uid1 uid0))
;; ELSE: This is not a private chat, return nil
nil)))))))))))
#+nil(:|user_description|
,(zs (lambda (doc)
(with-slots (type _id description image_name default_image_name) doc
(when (eql type #.(docname 'potato.core:user))
(emit _id
(list description (if (and image_name (not (eql image_name "")))
image_name
default_image_name)))))))))
`((:|update_image_name|
,(zs (lambda (doc req)
(let ((name (@ req query name)))
(setf (getprop doc "image_name") name)
(list doc "\"ok\"")))))))
(mkview "domain"
`((:|users_in_domain|
,(zs (lambda (doc)
(with-slots (type domain user user_name role) doc
(when (eql type #.(docname 'potato.core:domain-user))
(emit domain (create :|user| user
:|user_name| user_name
:|role| role)))))))
(:|user_count_in_domain|
,(zs (lambda (doc)
(with-slots (type domain) doc
(when (eql type #.(docname 'potato.core:domain-user))
(emit domain 1)))))
,(zs (lambda (key values rereduce)
(sum values))))
(:|domains_for_user|
,(zs (lambda (doc)
(with-slots (type domain domain_name user role) doc
(when (eql type #.(docname 'potato.core:domain-user))
(emit user (create :|domain| domain
:|domain_name| domain_name
:|role| role)))))))
(:|private_domains_for_user|
,(zs (lambda (doc)
(with-slots (type domain domain_name user role) doc
(when (and (eql type #.(docname 'potato.core:domain-user))
(eql role "PRIVATE"))
(emit user (create :|domain| domain :|domain_name| domain_name :|role| role)))))))
(:|domains_for_mail|
,(zs (lambda (doc)
(with-slots (type email_domains) doc
(when (eql type #.(docname 'potato.core:domain))
(dolist (v email_domains)
(emit v doc)))))))
(:|invitations_for_email|
,(zs (lambda (doc)
(with-slots (type email) doc
(when (eql type #.(docname 'potato.core:domain-email-invitation))
(emit email doc))))))
(:|email_invitations_for_domain|
,(zs (lambda (doc)
(with-slots (type domain) doc
(when (eql type #.(docname 'potato.core:domain-email-invitation))
(emit domain doc))))))
(:|domain_list|
,(zs (lambda (doc)
(with-slots (type domain_type) doc
(when (eql type #.(docname 'potato.core:domain))
(emit domain_type doc))))))
(:|public_domains|
,(zs (lambda (doc)
(with-slots (_id type join_public) doc
(when (and (eql type #.(docname 'potato.core:domain))
join_public)
(emit _id doc))))))
(:|domain_nickname|
,(zs (lambda (doc)
(with-slots (type domain) doc
(when (eql type #.(docname 'potato.core:domain-nickname))
(emit domain doc))))))))
(mkview "group"
`((:|groups_for_user|
,(zs (lambda (doc)
(with-slots (_id domain type name users group_type) doc
(when (eql type #.(docname 'potato.core:group))
(dolist (user users)
(with-slots (user_id role) user
(emit (list domain user_id role)
(create :|group| _id
:|group_name| name
:|group_type| group_type
:|role| role)))))))))
(:|groups_for_user_nodomain|
,(zs (lambda (doc)
(with-slots (_id type name users group_type) doc
(when (eql type #.(docname 'potato.core:group))
(dolist (user users)
(with-slots (user_id role) user
(emit (list user_id role)
(create :|group| _id
:|group_name| name
:|group_type| group_type
:|role| role)))))))))
(:|groups_and_users|
,(zs (lambda (doc)
(with-slots (_id type users) doc
(when (eql type #.(docname 'potato.core:group))
(dolist (user users)
(with-slots (user_id role) user
(emit (list _id user_id) role))))))))
#+nil(:|available_groups_for_domain|
,(zs (lambda (doc)
(with-slots (_id type authorised_domains) doc
(when (eql type "group")
(dolist (domain authorised_domains)
(emit domain _id)))))))
(:|groups_in_domain|
,(zs (lambda (doc)
(with-slots (type domain group_type name) doc
(when (eql type #.(docname 'potato.core:group))
(emit domain (create :|group_type| group_type
:|name| name)))))))))
(mkview "file"
`((:|files_for_channel|
,(zs (lambda (doc)
(with-slots (type channel name confirmed_p) doc
(when (and (eql type #.(docname 'potato.upload:file))
(stringp confirmed_p))
(emit (list channel name) doc))))))
(:|not_confirmed_files|
,(zs (lambda (doc)
(with-slots (type key created_date confirmed_p) doc
(when (and (eql type #.(docname 'potato.upload:file))
(not (stringp confirmed_p)))
(emit (list created_date key) doc))))))
(:|size_for_channel|
,(zs (lambda (doc)
(with-slots (type channel _id size confirmed_p) doc
(when (and (eql type #.(docname 'potato.upload:file))
(stringp confirmed_p))
(emit (list channel _id) size)))))
,(zs (lambda (key values rereduce)
(sum values))))
(:|size_for_user|
,(zs (lambda (doc)
(with-slots (type user channel _id size confirmed_p) doc
(when (and (eql type #.(docname 'potato.upload:file))
(stringp confirmed_p))
(emit (list user channel _id) size)))))
,(zs (lambda (key values rereduce)
(sum values))))))
(mkview "gcm"
`((:|gcm_for_user|
,(zs (lambda (doc)
(with-slots (type user gcm_token recipient_type) doc
(when (eql type #.(docname 'potato.gcm:gcm-registration))
(emit user (list gcm_token (or recipient_type "GCM"))))))))
(:|unread_channel|
,(zs (lambda (doc)
(with-slots (type user gcm_token unread recipient_type) doc
(when (eql type #.(docname 'potato.gcm:gcm-registration))
(dolist (cid unread)
(emit cid (list user gcm_token (or recipient_type "GCM")))))))))))
(delete-document "_design/index_filter" :if-missing :ignore)
(create-document `((:|filters| . ((:|index_updates| . ,(zs (lambda (doc req)
(with-slots (type updated) doc
(or (eql type #.(docname 'potato.core:user))
(eql type "memberdomain"))))))
(:|created_date| . ,(potato.core:format-timestamp nil (local-time:now))))))
:id "_design/index_filter"))
(defun init-user-notification-views ()
(potato.common:with-user-notification-db
(mkview "user"
`((:|notifications_for_user|
,(zs (lambda (doc)
(with-slots (type user created_date) doc
(when (eql type #.(docname 'potato.user-notification:user-notification))
(emit (list user created_date) doc))))))
(:|notifications_for_user_unread|
,(zs (lambda (doc)
(with-slots (type user created_date read) doc
(when (and (eql type #.(docname 'potato.user-notification:user-notification))
(not read))
(emit (list user created_date) doc))))))
(:|notifications_for_user_channel|
,(zs (lambda (doc)
(with-slots (type user channel created_date read _id) doc
(when (and (eql type #.(docname 'potato.user-notification:user-notification))
(not read))
(emit (list user channel created_date) (list _id)))))))
(:|notifications_with_timestamps|
,(zs (lambda (doc)
(with-slots (type read ping_timestamp user channel) doc
(when (and (eql type #.(docname 'potato.user-notification:user-notification))
(not (null ping_timestamp))
(not read))
(emit ping_timestamp (create :|user| user
:|channel| channel)))))))
(:|notifications_updated|
,(zs (lambda (doc)
(with-slots (type ping_timestamp user read) doc
(when (and (eql type #.(docname 'potato.user-notification:user-notification))
(not read)
(not (null ping_timestamp)))
(emit ping_timestamp user)))))))
`((:|mark_as_read|
,(zs (lambda (doc req)
(let ((uid (@ req query user)))
(with-slots (user read) doc
(cond ((not (eql user uid))
(list nil "\"no-match\""))
(read
(list nil "\"already-marked-as-read\""))
(t
(setf read true)
(list doc "\"ok\""))))))))))))
(defun init-messages-views ()
(potato.common:with-messages-db
(mkview "channel"
`((:|created_date|
,(zs (lambda (doc)
(with-slots (_id type channel created_date) doc
(when (and (eql type "message"))
(emit (list channel _id) doc)))))))
;; Update functions
`((:|update_message_text|
,(zs (lambda (doc req)
(labels ((copy-message ()
(create :|updated_date| (or (getprop doc "updated_date")
(getprop doc "created_date"))
:|deleted| (getprop doc "deleted")
:|extra_html| (getprop doc "extra_html")
:|text| (getprop doc "text")
:|image| (getprop doc "image"))))
(let ((date (@ req query date))
(image (@ req query update_image))
(image-width (parse-int (@ req query update_image_width)))
(image-height (parse-int (@ req query update_image_height)))
(image-mime-type (@ req query update_image_mime_type))
(old-updated (getprop doc "update"))
(copy (copy-message)))
(if old-updated
(funcall (@ old-updated push) copy)
(setf (getprop doc "update") (list copy)))
(macrolet ((update-if-changed (value field &optional transformer-fn)
`(let ((v (@ req query ,value)))
(when v
(setf (getprop doc ,field)
,(if transformer-fn
`(funcall ,transformer-fn v)
'v))))))
(setf (getprop doc "updated_date") date)
(update-if-changed update_message_text "text")
(update-if-changed update_extra_html "extra_html")
(update-if-changed update_deleted_p "deleted" (lambda (p) (if (eql p "1") true false)))
(when (and image (not (eql image "")))
(setf (getprop doc "image")
(create :|file| image
:|width| image-width
:|height| image-height)))
(list doc (funcall (@ *JSON* stringify)
(create "result" "ok"
"updates" (length (getprop doc "update")))))))))))
(:|update_star_user|
,(zs (lambda (doc req)
(let ((uid (@ req query user_id))
(add (eql (@ req query add) "1")))
(let* ((star-users (let ((u (getprop doc "star_users")))
(if (eql u undefined)
(let ((res (list)))
(setf (getprop doc "star_users") res)
res)
u)))
(position (funcall (@ star-users index-of) uid))
(modified nil))
(cond ((and (>= position 0) (not add))
(funcall (@ star-users splice) position 1)
(setq modified t))
((and (= position -1) add)
(funcall (@ star-users push) uid)
(setq modified t)))
(list (if modified doc nil)
(funcall (@ *JSON* stringify) (if position true false))))))))
(:|update_hidden_user|
,(zs (lambda (doc req)
(let ((uid (@ req query user_id))
(add (eql (@ req query add) "1")))
(let* ((hidden-users (let ((u (getprop doc "hidden")))
(if (eql u undefined)
(let ((res (list)))
(setf (getprop doc "hidden") res)
res)
u)))
(position (funcall (@ hidden-users index-of) uid))
(modified nil))
(cond ((and (>= position 0) (not add))
(funcall (@ hidden-users splice) position 1)
(setq modified t))
((and (= position -1) add)
(funcall (@ hidden-users push) uid)
(setq modified t)))
(list (if modified doc nil)
(funcall (@ *JSON* stringify) (if position true false))))))))))
(delete-document "_design/messagefilters" :if-missing :ignore)
(create-document `((:|filters| . ((:|message_index_updates| . ,(zs (lambda (doc req)
(with-slots (type updated) doc
(and (eql type "message")
(not updated))))))
(:|created_date| . ,(potato.core:format-timestamp nil (local-time:now))))))
:id "_design/messagefilters")))
(defun init-views ()
(init-potato-views)
(init-user-notification-views)
(init-messages-views))
(defun init-views-if-needed ()
(let ((result (clouchdb:get-document "_design/user" :if-missing :ignore)))
(unless result
(init-views))))
| null | https://raw.githubusercontent.com/cicakhq/potato/88b6c92dbbc80a6c9552435604f7b1ae6f2a4026/src/potato/views.lisp | lisp | This function is a lot more complicated that it needs to be.
Originally we found some weird behaviour where the views were not
created correctly even though the CouchDB calls returns ok. In
order to analyse this, a lot of code was added to check that
everything worked. Later, it was revealed that this was caused by
a known CouchDB bug:
-1415
Thus, the :|created_date| field was added, but the old code that
checks for correct ciew creation still remains until we can
confirm that this was the cause of the issue.
Verify that the views are available. This shouldn't be
needed, but in some circumstances we have seen that when
multiple views are created in quick succession, some views
are not created even though the creation succeded. The
purpose of this code is to detect this case so that it can be
debugged later.
Update functions
ELSE: User does not exist
ELSE: User is not a member of channel
ELSE: This is not a private chat, return nil
Update functions | (in-package :potato.views)
(declaim #.potato.common::*compile-decl*)
(defmacro ps-view-fix (fields &body body)
`(remove #\Newline (ps-view ,fields ,@body)))
(defun create (id &rest view-defs)
(let ((result (apply #'create-ps-view id view-defs)))
(unless (cdr (assoc :|ok| result))
(error "Error creating view ~a: ~s" id result))))
(defun mkview (group-name view-defs &optional update-defs)
(let ((id (concatenate 'string "_design/" group-name)))
(loop
for exists-p = (progn
(delete-document id :if-missing :ignore)
(get-document id :if-missing :ignore))
while exists-p
do (progn
(log:error "View ~s still exists after deleting, waiting 1 second" id)
(sleep 1)))
(let ((result (create-document `((:|language| . "javascript")
(:|views| . ,(loop
for (name map reduce) in view-defs
collect `(,name . ((:|map| . ,map)
,@(when reduce
`((:|reduce| . ,reduce)))))))
(:|created_date| . ,(potato.core:format-timestamp nil (local-time:now)))
,@(when update-defs
`((:|updates| . ,(loop
for (name script) in update-defs
collect (cons name script))))))
:id id)))
(log:debug "Created view ~a. Result: ~s" group-name result)
(sleep 1/10)
(let* ((result (get-document id))
(result-views (potato.common:getfield :|views| result)))
(loop
for view in view-defs
unless (potato.common:getfield (car view) result-views)
do (error "View ~s missing after updating views" (car view)))))))
(defmacro with-zero-gensym (&body body)
(setq *ps-gensym-counter* 0)
`(progn ,@body))
(defmacro zs (&body body)
`(with-zero-gensym
(ps ,@body)))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun docname (type)
(potato.db:couchdb-type-for-class type)))
(defun init-potato-views ()
(mkview "channel"
`((:|users_in_channel|
,(zs (lambda (doc)
(with-slots (type channel users) doc
(when (and (eql type #.(docname 'potato.core:channel-users))
(not (null users)))
(dolist (user-id ((@ |Object| keys) users))
(emit channel (create _id user-id))))))))
(:|channels_for_group|
,(zs (lambda (doc)
(with-slots (type group) doc
(when (eql type #.(docname 'potato.core:channel))
(emit group doc))))))
(:|recent_message|
,(zs (lambda (doc)
(with-slots (type channel users) doc
(when (and (eql type #.(docname 'potato.core:channel-users))
(not (null users)))
(dolist (user-id ((@ |Object| keys) users))
(let ((entry (getprop users user-id)))
(when (> (@ entry count) 0)
(emit (list user-id channel) entry)))))))))
(:|private_channel_list|
,(zs (lambda (doc)
(with-slots (type domain users group_type) doc
(when (and (eql type #.(docname 'potato.core:channel-users))
(eql group_type "PRIVATE"))
(dolist (user-id ((@ |Object| keys) users))
(emit (list domain user-id) doc)))))))
(:|channel_nickname|
,(zs (lambda (doc)
(with-slots (type channel) doc
(when (eql type #.(docname 'potato.core:channel-nickname))
(emit channel doc))))))
(:|channels_in_domain|
,(zs (lambda (doc)
(with-slots (type domain) doc
(when (eql type #.(docname 'potato.core:channel))
(emit domain doc)))))))
`((:|add_user_to_channel|
,(zs (lambda (doc req)
(with-slots (type users deleted) doc
(cond
((not (eql type #.(docname 'potato.core:channel-users)))
(throw "Illegal object type"))
(deleted
(list doc "\"Can't add users to a deleted channel\""))
(t
(let* ((user-id (@ req query user_id))
(current-time (@ req query current_date))
(new-mapping (create :|count| 0 :|last_read| current-time)))
(when (null users)
(setf users (create)))
(unless (getprop users user-id)
(setf (getprop users user-id) new-mapping))
(list doc "\"Updated\""))))))))
(:|remove_user_from_channel|
,(zs (lambda (doc req)
(with-slots (type users group_type) doc
(unless (eql type #.(docname 'potato.core:channel-users))
(throw "Illegal object type"))
(let ((user-id (@ req query user_id)))
(cond ((eql group_type "PRIVATE")
(list doc (concatenate 'string "\"Channel is private\"")))
((and users (getprop users user-id))
(delete (getprop users user-id))
(list doc (concatenate 'string "\"User " user-id " was removed from channel\"")))
(t
(list doc (concatenate 'string "\"User " user-id " is not a member of channel\"")))))))))
(:|incmsg|
,(zs (lambda (doc req)
(with-slots (users) doc
(let ((ret ""))
(dolist (key ((@ |Object| keys) users))
(let ((u (getprop users key)))
(incf (getprop u "count") )
(when (not (eql ret ""))
(setf ret (concatenate 'string ret " ")))
(setf ret (concatenate 'string ret key))))
(list doc (concatenate 'string "\"" ret "\"")))))))
(:|mark_read|
,(zs (lambda (doc req)
(with-slots (users) doc
(let ((user-id (@ req query user_id))
(date (@ req query date)))
(let* ((u (getprop users user-id))
old-count)
(if u
(progn
(setf old-count (getprop u "count"))
(setf (getprop u "count") 0)
(setf (getprop u "last_read") date))
(setf old-count 0))
(list (if (> old-count 0) doc nil)
(concatenate 'string "{\"count\":" old-count "}"))))))))
(:|mark_hidden|
,(zs (lambda (doc req)
(with-slots (users) doc
(let* ((uid (@ req query user_id))
(show (eql (@ req query show) "1"))
(u (getprop users uid)))
(if u
(let ((old-hidden (getprop u "hide")))
(if (or (and old-hidden (not show))
(and (not old-hidden) show))
(list nil "\"ok\"")
(progn
(setf (getprop u "hide") (not show))
(list doc "\"ok\""))))
(list nil "\"not-member\"")))))))))
(mkview "user"
`((:|users_by_email|
,(zs (lambda (doc)
(with-slots (type email user) doc
(when (eql type #.(docname 'potato.core:user-email))
(emit email user))))))
(:|emails_by_user|
,(zs (lambda (doc)
(with-slots (type email user) doc
(when (eql type #.(docname 'potato.core:user-email))
(emit user email))))))
(:|channels_by_domain_and_user|
,(zs (lambda (doc)
(with-slots (type users channel domain name group group_type) doc
(when (eql type #.(docname 'potato.core:channel-users))
(let ((user-ids ((@ |Object| keys) users)))
(dolist (user-id user-ids)
(let ((user (getprop users user-id)))
(emit (list user-id domain)
(list channel
name
(if (@ user hide) true false)
group
group_type
(@ user count)
(if (eql group_type "PRIVATE")
(let ((uid0 (aref user-ids 0))
(uid1 (aref user-ids 1)))
(if (eql user-id uid0) uid1 uid0))
nil)))))))))))
#+nil(:|user_description|
,(zs (lambda (doc)
(with-slots (type _id description image_name default_image_name) doc
(when (eql type #.(docname 'potato.core:user))
(emit _id
(list description (if (and image_name (not (eql image_name "")))
image_name
default_image_name)))))))))
`((:|update_image_name|
,(zs (lambda (doc req)
(let ((name (@ req query name)))
(setf (getprop doc "image_name") name)
(list doc "\"ok\"")))))))
(mkview "domain"
`((:|users_in_domain|
,(zs (lambda (doc)
(with-slots (type domain user user_name role) doc
(when (eql type #.(docname 'potato.core:domain-user))
(emit domain (create :|user| user
:|user_name| user_name
:|role| role)))))))
(:|user_count_in_domain|
,(zs (lambda (doc)
(with-slots (type domain) doc
(when (eql type #.(docname 'potato.core:domain-user))
(emit domain 1)))))
,(zs (lambda (key values rereduce)
(sum values))))
(:|domains_for_user|
,(zs (lambda (doc)
(with-slots (type domain domain_name user role) doc
(when (eql type #.(docname 'potato.core:domain-user))
(emit user (create :|domain| domain
:|domain_name| domain_name
:|role| role)))))))
(:|private_domains_for_user|
,(zs (lambda (doc)
(with-slots (type domain domain_name user role) doc
(when (and (eql type #.(docname 'potato.core:domain-user))
(eql role "PRIVATE"))
(emit user (create :|domain| domain :|domain_name| domain_name :|role| role)))))))
(:|domains_for_mail|
,(zs (lambda (doc)
(with-slots (type email_domains) doc
(when (eql type #.(docname 'potato.core:domain))
(dolist (v email_domains)
(emit v doc)))))))
(:|invitations_for_email|
,(zs (lambda (doc)
(with-slots (type email) doc
(when (eql type #.(docname 'potato.core:domain-email-invitation))
(emit email doc))))))
(:|email_invitations_for_domain|
,(zs (lambda (doc)
(with-slots (type domain) doc
(when (eql type #.(docname 'potato.core:domain-email-invitation))
(emit domain doc))))))
(:|domain_list|
,(zs (lambda (doc)
(with-slots (type domain_type) doc
(when (eql type #.(docname 'potato.core:domain))
(emit domain_type doc))))))
(:|public_domains|
,(zs (lambda (doc)
(with-slots (_id type join_public) doc
(when (and (eql type #.(docname 'potato.core:domain))
join_public)
(emit _id doc))))))
(:|domain_nickname|
,(zs (lambda (doc)
(with-slots (type domain) doc
(when (eql type #.(docname 'potato.core:domain-nickname))
(emit domain doc))))))))
(mkview "group"
`((:|groups_for_user|
,(zs (lambda (doc)
(with-slots (_id domain type name users group_type) doc
(when (eql type #.(docname 'potato.core:group))
(dolist (user users)
(with-slots (user_id role) user
(emit (list domain user_id role)
(create :|group| _id
:|group_name| name
:|group_type| group_type
:|role| role)))))))))
(:|groups_for_user_nodomain|
,(zs (lambda (doc)
(with-slots (_id type name users group_type) doc
(when (eql type #.(docname 'potato.core:group))
(dolist (user users)
(with-slots (user_id role) user
(emit (list user_id role)
(create :|group| _id
:|group_name| name
:|group_type| group_type
:|role| role)))))))))
(:|groups_and_users|
,(zs (lambda (doc)
(with-slots (_id type users) doc
(when (eql type #.(docname 'potato.core:group))
(dolist (user users)
(with-slots (user_id role) user
(emit (list _id user_id) role))))))))
#+nil(:|available_groups_for_domain|
,(zs (lambda (doc)
(with-slots (_id type authorised_domains) doc
(when (eql type "group")
(dolist (domain authorised_domains)
(emit domain _id)))))))
(:|groups_in_domain|
,(zs (lambda (doc)
(with-slots (type domain group_type name) doc
(when (eql type #.(docname 'potato.core:group))
(emit domain (create :|group_type| group_type
:|name| name)))))))))
(mkview "file"
`((:|files_for_channel|
,(zs (lambda (doc)
(with-slots (type channel name confirmed_p) doc
(when (and (eql type #.(docname 'potato.upload:file))
(stringp confirmed_p))
(emit (list channel name) doc))))))
(:|not_confirmed_files|
,(zs (lambda (doc)
(with-slots (type key created_date confirmed_p) doc
(when (and (eql type #.(docname 'potato.upload:file))
(not (stringp confirmed_p)))
(emit (list created_date key) doc))))))
(:|size_for_channel|
,(zs (lambda (doc)
(with-slots (type channel _id size confirmed_p) doc
(when (and (eql type #.(docname 'potato.upload:file))
(stringp confirmed_p))
(emit (list channel _id) size)))))
,(zs (lambda (key values rereduce)
(sum values))))
(:|size_for_user|
,(zs (lambda (doc)
(with-slots (type user channel _id size confirmed_p) doc
(when (and (eql type #.(docname 'potato.upload:file))
(stringp confirmed_p))
(emit (list user channel _id) size)))))
,(zs (lambda (key values rereduce)
(sum values))))))
(mkview "gcm"
`((:|gcm_for_user|
,(zs (lambda (doc)
(with-slots (type user gcm_token recipient_type) doc
(when (eql type #.(docname 'potato.gcm:gcm-registration))
(emit user (list gcm_token (or recipient_type "GCM"))))))))
(:|unread_channel|
,(zs (lambda (doc)
(with-slots (type user gcm_token unread recipient_type) doc
(when (eql type #.(docname 'potato.gcm:gcm-registration))
(dolist (cid unread)
(emit cid (list user gcm_token (or recipient_type "GCM")))))))))))
(delete-document "_design/index_filter" :if-missing :ignore)
(create-document `((:|filters| . ((:|index_updates| . ,(zs (lambda (doc req)
(with-slots (type updated) doc
(or (eql type #.(docname 'potato.core:user))
(eql type "memberdomain"))))))
(:|created_date| . ,(potato.core:format-timestamp nil (local-time:now))))))
:id "_design/index_filter"))
(defun init-user-notification-views ()
(potato.common:with-user-notification-db
(mkview "user"
`((:|notifications_for_user|
,(zs (lambda (doc)
(with-slots (type user created_date) doc
(when (eql type #.(docname 'potato.user-notification:user-notification))
(emit (list user created_date) doc))))))
(:|notifications_for_user_unread|
,(zs (lambda (doc)
(with-slots (type user created_date read) doc
(when (and (eql type #.(docname 'potato.user-notification:user-notification))
(not read))
(emit (list user created_date) doc))))))
(:|notifications_for_user_channel|
,(zs (lambda (doc)
(with-slots (type user channel created_date read _id) doc
(when (and (eql type #.(docname 'potato.user-notification:user-notification))
(not read))
(emit (list user channel created_date) (list _id)))))))
(:|notifications_with_timestamps|
,(zs (lambda (doc)
(with-slots (type read ping_timestamp user channel) doc
(when (and (eql type #.(docname 'potato.user-notification:user-notification))
(not (null ping_timestamp))
(not read))
(emit ping_timestamp (create :|user| user
:|channel| channel)))))))
(:|notifications_updated|
,(zs (lambda (doc)
(with-slots (type ping_timestamp user read) doc
(when (and (eql type #.(docname 'potato.user-notification:user-notification))
(not read)
(not (null ping_timestamp)))
(emit ping_timestamp user)))))))
`((:|mark_as_read|
,(zs (lambda (doc req)
(let ((uid (@ req query user)))
(with-slots (user read) doc
(cond ((not (eql user uid))
(list nil "\"no-match\""))
(read
(list nil "\"already-marked-as-read\""))
(t
(setf read true)
(list doc "\"ok\""))))))))))))
(defun init-messages-views ()
(potato.common:with-messages-db
(mkview "channel"
`((:|created_date|
,(zs (lambda (doc)
(with-slots (_id type channel created_date) doc
(when (and (eql type "message"))
(emit (list channel _id) doc)))))))
`((:|update_message_text|
,(zs (lambda (doc req)
(labels ((copy-message ()
(create :|updated_date| (or (getprop doc "updated_date")
(getprop doc "created_date"))
:|deleted| (getprop doc "deleted")
:|extra_html| (getprop doc "extra_html")
:|text| (getprop doc "text")
:|image| (getprop doc "image"))))
(let ((date (@ req query date))
(image (@ req query update_image))
(image-width (parse-int (@ req query update_image_width)))
(image-height (parse-int (@ req query update_image_height)))
(image-mime-type (@ req query update_image_mime_type))
(old-updated (getprop doc "update"))
(copy (copy-message)))
(if old-updated
(funcall (@ old-updated push) copy)
(setf (getprop doc "update") (list copy)))
(macrolet ((update-if-changed (value field &optional transformer-fn)
`(let ((v (@ req query ,value)))
(when v
(setf (getprop doc ,field)
,(if transformer-fn
`(funcall ,transformer-fn v)
'v))))))
(setf (getprop doc "updated_date") date)
(update-if-changed update_message_text "text")
(update-if-changed update_extra_html "extra_html")
(update-if-changed update_deleted_p "deleted" (lambda (p) (if (eql p "1") true false)))
(when (and image (not (eql image "")))
(setf (getprop doc "image")
(create :|file| image
:|width| image-width
:|height| image-height)))
(list doc (funcall (@ *JSON* stringify)
(create "result" "ok"
"updates" (length (getprop doc "update")))))))))))
(:|update_star_user|
,(zs (lambda (doc req)
(let ((uid (@ req query user_id))
(add (eql (@ req query add) "1")))
(let* ((star-users (let ((u (getprop doc "star_users")))
(if (eql u undefined)
(let ((res (list)))
(setf (getprop doc "star_users") res)
res)
u)))
(position (funcall (@ star-users index-of) uid))
(modified nil))
(cond ((and (>= position 0) (not add))
(funcall (@ star-users splice) position 1)
(setq modified t))
((and (= position -1) add)
(funcall (@ star-users push) uid)
(setq modified t)))
(list (if modified doc nil)
(funcall (@ *JSON* stringify) (if position true false))))))))
(:|update_hidden_user|
,(zs (lambda (doc req)
(let ((uid (@ req query user_id))
(add (eql (@ req query add) "1")))
(let* ((hidden-users (let ((u (getprop doc "hidden")))
(if (eql u undefined)
(let ((res (list)))
(setf (getprop doc "hidden") res)
res)
u)))
(position (funcall (@ hidden-users index-of) uid))
(modified nil))
(cond ((and (>= position 0) (not add))
(funcall (@ hidden-users splice) position 1)
(setq modified t))
((and (= position -1) add)
(funcall (@ hidden-users push) uid)
(setq modified t)))
(list (if modified doc nil)
(funcall (@ *JSON* stringify) (if position true false))))))))))
(delete-document "_design/messagefilters" :if-missing :ignore)
(create-document `((:|filters| . ((:|message_index_updates| . ,(zs (lambda (doc req)
(with-slots (type updated) doc
(and (eql type "message")
(not updated))))))
(:|created_date| . ,(potato.core:format-timestamp nil (local-time:now))))))
:id "_design/messagefilters")))
(defun init-views ()
(init-potato-views)
(init-user-notification-views)
(init-messages-views))
(defun init-views-if-needed ()
(let ((result (clouchdb:get-document "_design/user" :if-missing :ignore)))
(unless result
(init-views))))
|
9fe348e85e454f3cdc6752b4e96d73850bb7e63429e168f171b7bbfad98daacd | zyrolasting/file-watchers | lists.rkt | #lang racket/base
(require
racket/contract)
(provide
(contract-out
[not-in-list (-> list? procedure?)]
[list-diff (-> list? list? pair?)]))
(define (not-in-list lst)
(λ (v) (not (member v lst))))
(define (list-diff old new)
(cons (filter (not-in-list old) new)
(filter (not-in-list new) old)))
(module+ test
(require rackunit)
(define A '(1 2 3))
(define B '(3 4 5))
(define (check-pair pair expected-first expected-second)
(check-equal? (car pair) expected-first)
(check-equal? (cdr pair) expected-second))
(check-pred (not-in-list A) 4)
(check-false ((not-in-list A) 3))
(check-pair (list-diff A A) '() '())
(check-pair (list-diff A B) '(4 5) '(1 2))
(check-pair (list-diff B A) '(1 2) '(4 5)))
| null | https://raw.githubusercontent.com/zyrolasting/file-watchers/c1ac766a345a335438165ab0d13a4d8f6aec6162/lists.rkt | racket | #lang racket/base
(require
racket/contract)
(provide
(contract-out
[not-in-list (-> list? procedure?)]
[list-diff (-> list? list? pair?)]))
(define (not-in-list lst)
(λ (v) (not (member v lst))))
(define (list-diff old new)
(cons (filter (not-in-list old) new)
(filter (not-in-list new) old)))
(module+ test
(require rackunit)
(define A '(1 2 3))
(define B '(3 4 5))
(define (check-pair pair expected-first expected-second)
(check-equal? (car pair) expected-first)
(check-equal? (cdr pair) expected-second))
(check-pred (not-in-list A) 4)
(check-false ((not-in-list A) 3))
(check-pair (list-diff A A) '() '())
(check-pair (list-diff A B) '(4 5) '(1 2))
(check-pair (list-diff B A) '(1 2) '(4 5)))
|
|
a397b853b563afaade071054a9c4e4c9bbb379b15a66d67bfe62f59602f7c980 | lemmaandrew/CodingBatHaskell | parenBit.hs | {- From
Given a string that contains a single pair of parenthesis, compute recursively a
new string made of only of the parenthesis and their contents, so \"xyz(abc)123\" yields
\"(abc)\".
-}
import Test.Hspec ( hspec, describe, it, shouldBe )
parenBit :: String -> String
parenBit str = undefined
main :: IO ()
main = hspec $ describe "Tests:" $ do
it "\"(abc)\"" $
parenBit "xyz(abc)123" `shouldBe` "(abc)"
it "\"(hello)\"" $
parenBit "x(hello)" `shouldBe` "(hello)"
it "\"(xy)\"" $
parenBit "(xy)1" `shouldBe` "(xy)"
it "\"(possible)\"" $
parenBit "not really (possible)" `shouldBe` "(possible)"
it "\"(abc)\"" $
parenBit "(abc)" `shouldBe` "(abc)"
it "\"(abc)\"" $
parenBit "(abc)xyz" `shouldBe` "(abc)"
it "\"(abc)\"" $
parenBit "(abc)x" `shouldBe` "(abc)"
it "\"(x)\"" $
parenBit "(x)" `shouldBe` "(x)"
it "\"()\"" $
parenBit "()" `shouldBe` "()"
it "\"(ipsa)\"" $
parenBit "res (ipsa) loquitor" `shouldBe` "(ipsa)"
it "\"(not really)\"" $
parenBit "hello(not really)there" `shouldBe` "(not really)"
it "\"(ab)\"" $
parenBit "ab(ab)ab" `shouldBe` "(ab)"
| null | https://raw.githubusercontent.com/lemmaandrew/CodingBatHaskell/d839118be02e1867504206657a0664fd79d04736/CodingBat/Recursion-1/parenBit.hs | haskell | From
Given a string that contains a single pair of parenthesis, compute recursively a
new string made of only of the parenthesis and their contents, so \"xyz(abc)123\" yields
\"(abc)\".
| import Test.Hspec ( hspec, describe, it, shouldBe )
parenBit :: String -> String
parenBit str = undefined
main :: IO ()
main = hspec $ describe "Tests:" $ do
it "\"(abc)\"" $
parenBit "xyz(abc)123" `shouldBe` "(abc)"
it "\"(hello)\"" $
parenBit "x(hello)" `shouldBe` "(hello)"
it "\"(xy)\"" $
parenBit "(xy)1" `shouldBe` "(xy)"
it "\"(possible)\"" $
parenBit "not really (possible)" `shouldBe` "(possible)"
it "\"(abc)\"" $
parenBit "(abc)" `shouldBe` "(abc)"
it "\"(abc)\"" $
parenBit "(abc)xyz" `shouldBe` "(abc)"
it "\"(abc)\"" $
parenBit "(abc)x" `shouldBe` "(abc)"
it "\"(x)\"" $
parenBit "(x)" `shouldBe` "(x)"
it "\"()\"" $
parenBit "()" `shouldBe` "()"
it "\"(ipsa)\"" $
parenBit "res (ipsa) loquitor" `shouldBe` "(ipsa)"
it "\"(not really)\"" $
parenBit "hello(not really)there" `shouldBe` "(not really)"
it "\"(ab)\"" $
parenBit "ab(ab)ab" `shouldBe` "(ab)"
|
a293a970a8b51efd0c5f00c066ca68fb37375bea0f359bac4e2ce7e6effce6ee | dyzsr/ocaml-selectml | menhirLib.mli | module General : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
(* This module offers general-purpose functions on lists and streams. *)
As of 2017/03/31 , this module is DEPRECATED . It might be removed in
the future .
the future. *)
(* --------------------------------------------------------------------------- *)
(* Lists. *)
[ take n xs ] returns the [ n ] first elements of the list [ xs ] . It is
acceptable for the list [ xs ] to have length less than [ n ] , in
which case [ xs ] itself is returned .
acceptable for the list [xs] to have length less than [n], in
which case [xs] itself is returned. *)
val take: int -> 'a list -> 'a list
[ drop n xs ] returns the list [ xs ] , deprived of its [ n ] first elements .
It is acceptable for the list [ xs ] to have length less than [ n ] , in
which case an empty list is returned .
It is acceptable for the list [xs] to have length less than [n], in
which case an empty list is returned. *)
val drop: int -> 'a list -> 'a list
(* [uniq cmp xs] assumes that the list [xs] is sorted according to the
ordering [cmp] and returns the list [xs] deprived of any duplicate
elements. *)
val uniq: ('a -> 'a -> int) -> 'a list -> 'a list
(* [weed cmp xs] returns the list [xs] deprived of any duplicate elements. *)
val weed: ('a -> 'a -> int) -> 'a list -> 'a list
(* --------------------------------------------------------------------------- *)
(* A stream is a list whose elements are produced on demand. *)
type 'a stream =
'a head Lazy.t
and 'a head =
| Nil
| Cons of 'a * 'a stream
(* The length of a stream. *)
val length: 'a stream -> int
(* Folding over a stream. *)
val foldr: ('a -> 'b -> 'b) -> 'a stream -> 'b -> 'b
end
module Convert : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
An ocamlyacc - style , or Menhir - style , parser requires access to
the lexer , which must be parameterized with a lexing buffer , and
to the lexing buffer itself , where it reads position information .
the lexer, which must be parameterized with a lexing buffer, and
to the lexing buffer itself, where it reads position information. *)
(* This traditional API is convenient when used with ocamllex, but
inelegant when used with other lexer generators. *)
type ('token, 'semantic_value) traditional =
(Lexing.lexbuf -> 'token) -> Lexing.lexbuf -> 'semantic_value
(* This revised API is independent of any lexer generator. Here, the
parser only requires access to the lexer, and the lexer takes no
parameters. The tokens returned by the lexer may contain position
information. *)
type ('token, 'semantic_value) revised =
(unit -> 'token) -> 'semantic_value
(* --------------------------------------------------------------------------- *)
Converting a traditional parser , produced by ocamlyacc or Menhir ,
into a revised parser .
into a revised parser. *)
A token of the revised lexer is essentially a triple of a token
of the traditional lexer ( or raw token ) , a start position , and
and end position . The three [ get ] functions are accessors .
of the traditional lexer (or raw token), a start position, and
and end position. The three [get] functions are accessors. *)
We do not require the type [ ' token ] to actually be a triple type .
This enables complex applications where it is a record type with
more than three fields . It also enables simple applications where
positions are of no interest , so [ ' token ] is just [ ' raw_token ]
and [ get_startp ] and [ get_endp ] return dummy positions .
This enables complex applications where it is a record type with
more than three fields. It also enables simple applications where
positions are of no interest, so ['token] is just ['raw_token]
and [get_startp] and [get_endp] return dummy positions. *)
val traditional2revised:
('token -> 'raw_token) ->
('token -> Lexing.position) ->
('token -> Lexing.position) ->
('raw_token, 'semantic_value) traditional ->
('token, 'semantic_value) revised
(* --------------------------------------------------------------------------- *)
(* Converting a revised parser back to a traditional parser. *)
val revised2traditional:
('raw_token -> Lexing.position -> Lexing.position -> 'token) ->
('token, 'semantic_value) revised ->
('raw_token, 'semantic_value) traditional
(* --------------------------------------------------------------------------- *)
(* Simplified versions of the above, where concrete triples are used. *)
module Simplified : sig
val traditional2revised:
('token, 'semantic_value) traditional ->
('token * Lexing.position * Lexing.position, 'semantic_value) revised
val revised2traditional:
('token * Lexing.position * Lexing.position, 'semantic_value) revised ->
('token, 'semantic_value) traditional
end
end
module IncrementalEngine : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
type position = Lexing.position
open General
This signature describes the incremental LR engine .
(* In this mode, the user controls the lexer, and the parser suspends
itself when it needs to read a new token. *)
module type INCREMENTAL_ENGINE = sig
type token
A value of type [ production ] is ( an index for ) a production . The start
productions ( which do not exist in an \mly file , but are constructed by
Menhir internally ) are not part of this type .
productions (which do not exist in an \mly file, but are constructed by
Menhir internally) are not part of this type. *)
type production
(* The type ['a checkpoint] represents an intermediate or final state of the
parser. An intermediate checkpoint is a suspension: it records the parser's
current state, and allows parsing to be resumed. The parameter ['a] is
the type of the semantic value that will eventually be produced if the
parser succeeds. *)
(* [Accepted] and [Rejected] are final checkpoints. [Accepted] carries a
semantic value. *)
[ InputNeeded ] is an intermediate checkpoint . It means that the parser wishes
to read one token before continuing .
to read one token before continuing. *)
[ Shifting ] is an intermediate checkpoint . It means that the parser is taking
a shift transition . It exposes the state of the parser before and after
the transition . The Boolean parameter tells whether the parser intends to
request a new token after this transition . ( It always does , except when
it is about to accept . )
a shift transition. It exposes the state of the parser before and after
the transition. The Boolean parameter tells whether the parser intends to
request a new token after this transition. (It always does, except when
it is about to accept.) *)
(* [AboutToReduce] is an intermediate checkpoint. It means that the parser is
about to perform a reduction step. It exposes the parser's current
state as well as the production that is about to be reduced. *)
(* [HandlingError] is an intermediate checkpoint. It means that the parser has
detected an error and is currently handling it, in several steps. *)
(* A value of type ['a env] represents a configuration of the automaton:
current state, stack, lookahead token, etc. The parameter ['a] is the
type of the semantic value that will eventually be produced if the parser
succeeds. *)
(* In normal operation, the parser works with checkpoints: see the functions
[offer] and [resume]. However, it is also possible to work directly with
environments (see the functions [pop], [force_reduction], and [feed]) and
to reconstruct a checkpoint out of an environment (see [input_needed]).
This is considered advanced functionality; its purpose is to allow error
recovery strategies to be programmed by the user. *)
type 'a env
type 'a checkpoint = private
| InputNeeded of 'a env
| Shifting of 'a env * 'a env * bool
| AboutToReduce of 'a env * production
| HandlingError of 'a env
| Accepted of 'a
| Rejected
[ offer ] allows the user to resume the parser after it has suspended
itself with a checkpoint of the form [ InputNeeded env ] . [ offer ] expects
the old checkpoint as well as a new token and produces a new checkpoint .
It does not raise any exception .
itself with a checkpoint of the form [InputNeeded env]. [offer] expects
the old checkpoint as well as a new token and produces a new checkpoint.
It does not raise any exception. *)
val offer:
'a checkpoint ->
token * position * position ->
'a checkpoint
(* [resume] allows the user to resume the parser after it has suspended
itself with a checkpoint of the form [AboutToReduce (env, prod)] or
[HandlingError env]. [resume] expects the old checkpoint and produces a
new checkpoint. It does not raise any exception. *)
The optional argument [ strategy ] influences the manner in which [ resume ]
deals with checkpoints of the form [ ErrorHandling _ ] . Its default value
is [ ` Legacy ] . It can be briefly described as follows :
- If the [ error ] token is used only to report errors ( that is , if the
[ error ] token appears only at the end of a production , whose semantic
action raises an exception ) then the simplified strategy should be
preferred . ( This includes the case where the [ error ] token does not
appear at all in the grammar . )
- If the [ error ] token is used to recover after an error , or if
perfect backward compatibility is required , the legacy strategy
should be selected .
More details on these strategies appear in the file [ Engine.ml ] .
deals with checkpoints of the form [ErrorHandling _]. Its default value
is [`Legacy]. It can be briefly described as follows:
- If the [error] token is used only to report errors (that is, if the
[error] token appears only at the end of a production, whose semantic
action raises an exception) then the simplified strategy should be
preferred. (This includes the case where the [error] token does not
appear at all in the grammar.)
- If the [error] token is used to recover after an error, or if
perfect backward compatibility is required, the legacy strategy
should be selected.
More details on these strategies appear in the file [Engine.ml]. *)
type strategy =
[ `Legacy | `Simplified ]
val resume:
?strategy:strategy ->
'a checkpoint ->
'a checkpoint
(* A token supplier is a function of no arguments which delivers a new token
(together with its start and end positions) every time it is called. *)
type supplier =
unit -> token * position * position
(* A pair of a lexer and a lexing buffer can be easily turned into a
supplier. *)
val lexer_lexbuf_to_supplier:
(Lexing.lexbuf -> token) ->
Lexing.lexbuf ->
supplier
The functions [ offer ] and [ resume ] are sufficient to write a parser loop .
One can imagine many variations ( which is why we expose these functions
in the first place ! ) . Here , we expose a few variations of the main loop ,
ready for use .
One can imagine many variations (which is why we expose these functions
in the first place!). Here, we expose a few variations of the main loop,
ready for use. *)
(* [loop supplier checkpoint] begins parsing from [checkpoint], reading
tokens from [supplier]. It continues parsing until it reaches a
checkpoint of the form [Accepted v] or [Rejected]. In the former case, it
returns [v]. In the latter case, it raises the exception [Error].
The optional argument [strategy], whose default value is [Legacy],
is passed to [resume] and influences the error-handling strategy. *)
val loop: ?strategy:strategy -> supplier -> 'a checkpoint -> 'a
[ loop_handle succeed fail supplier checkpoint ] begins parsing from
[ checkpoint ] , reading tokens from [ supplier ] . It continues parsing until
it reaches a checkpoint of the form [ Accepted v ] or [ HandlingError env ]
( or [ Rejected ] , but that should not happen , as [ HandlingError _ ] will be
observed first ) . In the former case , it calls [ succeed v ] . In the latter
case , it calls [ fail ] with this checkpoint . It can not raise [ Error ] .
This means that Menhir 's error - handling procedure does not get a chance
to run . For this reason , there is no [ strategy ] parameter . Instead , the
user can implement her own error handling code , in the [ fail ]
continuation .
[checkpoint], reading tokens from [supplier]. It continues parsing until
it reaches a checkpoint of the form [Accepted v] or [HandlingError env]
(or [Rejected], but that should not happen, as [HandlingError _] will be
observed first). In the former case, it calls [succeed v]. In the latter
case, it calls [fail] with this checkpoint. It cannot raise [Error].
This means that Menhir's error-handling procedure does not get a chance
to run. For this reason, there is no [strategy] parameter. Instead, the
user can implement her own error handling code, in the [fail]
continuation. *)
val loop_handle:
('a -> 'answer) ->
('a checkpoint -> 'answer) ->
supplier -> 'a checkpoint -> 'answer
[ loop_handle_undo ] is analogous to [ loop_handle ] , except it passes a pair
of checkpoints to the failure continuation .
The first ( and oldest ) checkpoint is the last [ InputNeeded ] checkpoint that
was encountered before the error was detected . The second ( and newest )
checkpoint is where the error was detected , as in [ loop_handle ] . Going back
to the first checkpoint can be thought of as undoing any reductions that
were performed after seeing the problematic token . ( These reductions must
be default reductions or spurious reductions . )
[ loop_handle_undo ] must initially be applied to an [ InputNeeded ] checkpoint .
The parser 's initial checkpoints satisfy this constraint .
of checkpoints to the failure continuation.
The first (and oldest) checkpoint is the last [InputNeeded] checkpoint that
was encountered before the error was detected. The second (and newest)
checkpoint is where the error was detected, as in [loop_handle]. Going back
to the first checkpoint can be thought of as undoing any reductions that
were performed after seeing the problematic token. (These reductions must
be default reductions or spurious reductions.)
[loop_handle_undo] must initially be applied to an [InputNeeded] checkpoint.
The parser's initial checkpoints satisfy this constraint. *)
val loop_handle_undo:
('a -> 'answer) ->
('a checkpoint -> 'a checkpoint -> 'answer) ->
supplier -> 'a checkpoint -> 'answer
(* [shifts checkpoint] assumes that [checkpoint] has been obtained by
submitting a token to the parser. It runs the parser from [checkpoint],
through an arbitrary number of reductions, until the parser either
accepts this token (i.e., shifts) or rejects it (i.e., signals an error).
If the parser decides to shift, then [Some env] is returned, where [env]
is the parser's state just before shifting. Otherwise, [None] is
returned. *)
(* It is desirable that the semantic actions be side-effect free, or that
their side-effects be harmless (replayable). *)
val shifts: 'a checkpoint -> 'a env option
(* The function [acceptable] allows testing, after an error has been
detected, which tokens would have been accepted at this point. It is
implemented using [shifts]. Its argument should be an [InputNeeded]
checkpoint. *)
For completeness , one must undo any spurious reductions before carrying out
this test -- that is , one must apply [ acceptable ] to the FIRST checkpoint
that is passed by [ loop_handle_undo ] to its failure continuation .
this test -- that is, one must apply [acceptable] to the FIRST checkpoint
that is passed by [loop_handle_undo] to its failure continuation. *)
(* This test causes some semantic actions to be run! The semantic actions
should be side-effect free, or their side-effects should be harmless. *)
(* The position [pos] is used as the start and end positions of the
hypothetical token, and may be picked up by the semantic actions. We
suggest using the position where the error was detected. *)
val acceptable: 'a checkpoint -> token -> position -> bool
The abstract type [ ' a lr1state ] describes the non - initial states of the
) automaton . The index [ ' a ] represents the type of the semantic value
associated with this state 's incoming symbol .
LR(1) automaton. The index ['a] represents the type of the semantic value
associated with this state's incoming symbol. *)
type 'a lr1state
The states of the LR(1 ) automaton are numbered ( from 0 and up ) .
val number: _ lr1state -> int
(* Productions are numbered. *)
(* [find_production i] requires the index [i] to be valid. Use with care. *)
val production_index: production -> int
val find_production: int -> production
(* An element is a pair of a non-initial state [s] and a semantic value [v]
associated with the incoming symbol of this state. The idea is, the value
[v] was pushed onto the stack just before the state [s] was entered. Thus,
for some type ['a], the state [s] has type ['a lr1state] and the value [v]
has type ['a]. In other words, the type [element] is an existential type. *)
type element =
| Element: 'a lr1state * 'a * position * position -> element
(* The parser's stack is (or, more precisely, can be viewed as) a stream of
elements. The type [stream] is defined by the module [General]. *)
As of 2017/03/31 , the types [ stream ] and [ stack ] and the function [ stack ]
are DEPRECATED . They might be removed in the future . An alternative way
of inspecting the stack is via the functions [ top ] and [ pop ] .
are DEPRECATED. They might be removed in the future. An alternative way
of inspecting the stack is via the functions [top] and [pop]. *)
type stack = (* DEPRECATED *)
element stream
This is the parser 's stack , a stream of elements . This stream is empty if
the parser is in an initial state ; otherwise , it is non - empty . The LR(1 )
automaton 's current state is the one found in the top element of the
stack .
the parser is in an initial state; otherwise, it is non-empty. The LR(1)
automaton's current state is the one found in the top element of the
stack. *)
val stack: 'a env -> stack (* DEPRECATED *)
(* [top env] returns the parser's top stack element. The state contained in
this stack element is the current state of the automaton. If the stack is
empty, [None] is returned. In that case, the current state of the
automaton must be an initial state. *)
val top: 'a env -> element option
(* [pop_many i env] pops [i] cells off the automaton's stack. This is done
via [i] successive invocations of [pop]. Thus, [pop_many 1] is [pop]. The
index [i] must be nonnegative. The time complexity is O(i). *)
val pop_many: int -> 'a env -> 'a env option
(* [get i env] returns the parser's [i]-th stack element. The index [i] is
0-based: thus, [get 0] is [top]. If [i] is greater than or equal to the
number of elements in the stack, [None] is returned. The time complexity
is O(i). *)
val get: int -> 'a env -> element option
[ env ] is ( the integer number of ) the automaton 's
current state . This works even if the automaton 's stack is empty , in
which case the current state is an initial state . This number can be
passed as an argument to a [ message ] function generated by [ menhir
--compile - errors ] .
current state. This works even if the automaton's stack is empty, in
which case the current state is an initial state. This number can be
passed as an argument to a [message] function generated by [menhir
--compile-errors]. *)
val current_state_number: 'a env -> int
(* [equal env1 env2] tells whether the parser configurations [env1] and
[env2] are equal in the sense that the automaton's current state is the
same in [env1] and [env2] and the stack is *physically* the same in
[env1] and [env2]. If [equal env1 env2] is [true], then the sequence of
the stack elements, as observed via [pop] and [top], must be the same in
[env1] and [env2]. Also, if [equal env1 env2] holds, then the checkpoints
[input_needed env1] and [input_needed env2] must be equivalent. The
function [equal] has time complexity O(1). *)
val equal: 'a env -> 'a env -> bool
(* These are the start and end positions of the current lookahead token. If
invoked in an initial state, this function returns a pair of twice the
initial position. *)
val positions: 'a env -> position * position
(* When applied to an environment taken from a checkpoint of the form
[AboutToReduce (env, prod)], the function [env_has_default_reduction]
tells whether the reduction that is about to take place is a default
reduction. *)
val env_has_default_reduction: 'a env -> bool
(* [state_has_default_reduction s] tells whether the state [s] has a default
reduction. This includes the case where [s] is an accepting state. *)
val state_has_default_reduction: _ lr1state -> bool
(* [pop env] returns a new environment, where the parser's top stack cell
has been popped off. (If the stack is empty, [None] is returned.) This
amounts to pretending that the (terminal or nonterminal) symbol that
corresponds to this stack cell has not been read. *)
val pop: 'a env -> 'a env option
(* [force_reduction prod env] should be called only if in the state [env]
the parser is capable of reducing the production [prod]. If this
condition is satisfied, then this production is reduced, which means that
its semantic action is executed (this can have side effects!) and the
automaton makes a goto (nonterminal) transition. If this condition is not
satisfied, [Invalid_argument _] is raised. *)
val force_reduction: production -> 'a env -> 'a env
[ input_needed env ] returns [ InputNeeded env ] . That is , out of an [ env ]
that might have been obtained via a series of calls to the functions
[ pop ] , [ force_reduction ] , [ feed ] , etc . , it produces a checkpoint , which
can be used to resume normal parsing , by supplying this checkpoint as an
argument to [ offer ] .
that might have been obtained via a series of calls to the functions
[pop], [force_reduction], [feed], etc., it produces a checkpoint, which
can be used to resume normal parsing, by supplying this checkpoint as an
argument to [offer]. *)
This function should be used with some care . It could " mess up the
lookahead " in the sense that it allows parsing to resume in an arbitrary
state [ s ] with an arbitrary lookahead symbol [ t ] , even though Menhir 's
reachability analysis ( menhir --list - errors ) might well think that it is
impossible to reach this particular configuration . If one is using
Menhir 's new error reporting facility , this could cause the parser to
reach an error state for which no error message has been prepared .
lookahead" in the sense that it allows parsing to resume in an arbitrary
state [s] with an arbitrary lookahead symbol [t], even though Menhir's
reachability analysis (menhir --list-errors) might well think that it is
impossible to reach this particular configuration. If one is using
Menhir's new error reporting facility, this could cause the parser to
reach an error state for which no error message has been prepared. *)
val input_needed: 'a env -> 'a checkpoint
end
(* This signature is a fragment of the inspection API that is made available
to the user when [--inspection] is used. This fragment contains type
definitions for symbols. *)
module type SYMBOLS = sig
(* The type ['a terminal] represents a terminal symbol. The type ['a
nonterminal] represents a nonterminal symbol. In both cases, the index
['a] represents the type of the semantic values associated with this
symbol. The concrete definitions of these types are generated. *)
type 'a terminal
type 'a nonterminal
(* The type ['a symbol] represents a terminal or nonterminal symbol. It is
the disjoint union of the types ['a terminal] and ['a nonterminal]. *)
type 'a symbol =
| T : 'a terminal -> 'a symbol
| N : 'a nonterminal -> 'a symbol
(* The type [xsymbol] is an existentially quantified version of the type
['a symbol]. This type is useful in situations where the index ['a]
is not statically known. *)
type xsymbol =
| X : 'a symbol -> xsymbol
end
(* This signature describes the inspection API that is made available to the
user when [--inspection] is used. *)
module type INSPECTION = sig
(* The types of symbols are described above. *)
include SYMBOLS
The type [ ' a lr1state ] is meant to be the same as in [ INCREMENTAL_ENGINE ] .
type 'a lr1state
The type [ production ] is meant to be the same as in [ INCREMENTAL_ENGINE ] .
It represents a production of the grammar . A production can be examined
via the functions [ lhs ] and [ rhs ] below .
It represents a production of the grammar. A production can be examined
via the functions [lhs] and [rhs] below. *)
type production
(* An LR(0) item is a pair of a production [prod] and a valid index [i] into
this production. That is, if the length of [rhs prod] is [n], then [i] is
comprised between 0 and [n], inclusive. *)
type item =
production * int
(* Ordering functions. *)
val compare_terminals: _ terminal -> _ terminal -> int
val compare_nonterminals: _ nonterminal -> _ nonterminal -> int
val compare_symbols: xsymbol -> xsymbol -> int
val compare_productions: production -> production -> int
val compare_items: item -> item -> int
(* [incoming_symbol s] is the incoming symbol of the state [s], that is,
the symbol that the parser must recognize before (has recognized when)
it enters the state [s]. This function gives access to the semantic
value [v] stored in a stack element [Element (s, v, _, _)]. Indeed,
by case analysis on the symbol [incoming_symbol s], one discovers the
type ['a] of the value [v]. *)
val incoming_symbol: 'a lr1state -> 'a symbol
[ items s ] is the set of the LR(0 ) items in the LR(0 ) core of the )
state [ s ] . This set is not epsilon - closed . This set is presented as a
list , in an arbitrary order .
state [s]. This set is not epsilon-closed. This set is presented as a
list, in an arbitrary order. *)
val items: _ lr1state -> item list
[ lhs prod ] is the left - hand side of the production [ prod ] . This is
always a non - terminal symbol .
always a non-terminal symbol. *)
val lhs: production -> xsymbol
(* [rhs prod] is the right-hand side of the production [prod]. This is
a (possibly empty) sequence of (terminal or nonterminal) symbols. *)
val rhs: production -> xsymbol list
(* [nullable nt] tells whether the non-terminal symbol [nt] is nullable.
That is, it is true if and only if this symbol produces the empty
word [epsilon]. *)
val nullable: _ nonterminal -> bool
[ first nt t ] tells whether the FIRST set of the nonterminal symbol [ nt ]
contains the terminal symbol [ t ] . That is , it is true if and only if
[ nt ] produces a word that begins with [ t ] .
contains the terminal symbol [t]. That is, it is true if and only if
[nt] produces a word that begins with [t]. *)
val first: _ nonterminal -> _ terminal -> bool
[ xfirst ] is analogous to [ first ] , but expects a first argument of type
[ xsymbol ] instead of [ _ terminal ] .
[xsymbol] instead of [_ terminal]. *)
val xfirst: xsymbol -> _ terminal -> bool
(* [foreach_terminal] enumerates the terminal symbols, including [error].
[foreach_terminal_but_error] enumerates the terminal symbols, excluding
[error]. *)
val foreach_terminal: (xsymbol -> 'a -> 'a) -> 'a -> 'a
val foreach_terminal_but_error: (xsymbol -> 'a -> 'a) -> 'a -> 'a
(* The type [env] is meant to be the same as in [INCREMENTAL_ENGINE]. *)
type 'a env
[ feed symbol startp env ] causes the parser to consume the
( terminal or nonterminal ) symbol [ symbol ] , accompanied with the semantic
value [ semv ] and with the start and end positions [ startp ] and [ endp ] .
Thus , the automaton makes a transition , and reaches a new state . The
stack grows by one cell . This operation is permitted only if the current
state ( as determined by [ env ] ) has an outgoing transition labeled with
[ symbol ] . Otherwise , [ Invalid_argument _ ] is raised .
(terminal or nonterminal) symbol [symbol], accompanied with the semantic
value [semv] and with the start and end positions [startp] and [endp].
Thus, the automaton makes a transition, and reaches a new state. The
stack grows by one cell. This operation is permitted only if the current
state (as determined by [env]) has an outgoing transition labeled with
[symbol]. Otherwise, [Invalid_argument _] is raised. *)
val feed: 'a symbol -> position -> 'a -> position -> 'b env -> 'b env
end
(* This signature combines the incremental API and the inspection API. *)
module type EVERYTHING = sig
include INCREMENTAL_ENGINE
include INSPECTION
with type 'a lr1state := 'a lr1state
with type production := production
with type 'a env := 'a env
end
end
module EngineTypes : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
(* This file defines several types and module types that are used in the
specification of module [Engine]. *)
(* --------------------------------------------------------------------------- *)
(* It would be nice if we could keep the structure of stacks and environments
hidden. However, stacks and environments must be accessible to semantic
actions, so the following data structure definitions must be public. *)
(* --------------------------------------------------------------------------- *)
(* A stack is a linked list of cells. A sentinel cell -- which is its own
successor -- is used to mark the bottom of the stack. The sentinel cell
itself is not significant -- it contains dummy values. *)
type ('state, 'semantic_value) stack = {
(* The state that we should go back to if we pop this stack cell. *)
(* This convention means that the state contained in the top stack cell is
not the current state [env.current]. It also means that the state found
within the sentinel is a dummy -- it is never consulted. This convention
is the same as that adopted by the code-based back-end. *)
state: 'state;
(* The semantic value associated with the chunk of input that this cell
represents. *)
semv: 'semantic_value;
(* The start and end positions of the chunk of input that this cell
represents. *)
startp: Lexing.position;
endp: Lexing.position;
(* The next cell down in the stack. If this is a self-pointer, then this
cell is the sentinel, and the stack is conceptually empty. *)
next: ('state, 'semantic_value) stack;
}
(* --------------------------------------------------------------------------- *)
(* A parsing environment contains all of the parser's state (except for the
current program point). *)
type ('state, 'semantic_value, 'token) env = {
If this flag is true , then the first component of [ env.triple ] should
be ignored , as it has been logically overwritten with the [ error ]
pseudo - token .
be ignored, as it has been logically overwritten with the [error]
pseudo-token. *)
error: bool;
The last token that was obtained from the lexer , together with its start
and end positions . Warning : before the first call to the lexer has taken
place , a dummy ( and possibly invalid ) token is stored here .
and end positions. Warning: before the first call to the lexer has taken
place, a dummy (and possibly invalid) token is stored here. *)
triple: 'token * Lexing.position * Lexing.position;
(* The stack. In [CodeBackend], it is passed around on its own,
whereas, here, it is accessed via the environment. *)
stack: ('state, 'semantic_value) stack;
(* The current state. In [CodeBackend], it is passed around on its
own, whereas, here, it is accessed via the environment. *)
current: 'state;
}
(* --------------------------------------------------------------------------- *)
(* A number of logging hooks are used to (optionally) emit logging messages. *)
(* The comments indicate the conventional messages that correspond
to these hooks in the code-based back-end; see [CodeBackend]. *)
module type LOG = sig
type state
type terminal
type production
State % d :
val state: state -> unit
(* Shifting (<terminal>) to state <state> *)
val shift: terminal -> state -> unit
(* Reducing a production should be logged either as a reduction
event (for regular productions) or as an acceptance event (for
start productions). *)
(* Reducing production <production> / Accepting *)
val reduce_or_accept: production -> unit
(* Lookahead token is now <terminal> (<pos>-<pos>) *)
val lookahead_token: terminal -> Lexing.position -> Lexing.position -> unit
(* Initiating error handling *)
val initiating_error_handling: unit -> unit
(* Resuming error handling *)
val resuming_error_handling: unit -> unit
(* Handling error in state <state> *)
val handling_error: state -> unit
end
(* --------------------------------------------------------------------------- *)
This signature describes the parameters that must be supplied to the LR
engine .
engine. *)
module type TABLE = sig
(* The type of automaton states. *)
type state
States are numbered .
val number: state -> int
(* The type of tokens. These can be thought of as real tokens, that is,
tokens returned by the lexer. They carry a semantic value. This type
does not include the [error] pseudo-token. *)
type token
(* The type of terminal symbols. These can be thought of as integer codes.
They do not carry a semantic value. This type does include the [error]
pseudo-token. *)
type terminal
(* The type of nonterminal symbols. *)
type nonterminal
(* The type of semantic values. *)
type semantic_value
A token is conceptually a pair of a ( non-[error ] ) terminal symbol and
a semantic value . The following two functions are the pair projections .
a semantic value. The following two functions are the pair projections. *)
val token2terminal: token -> terminal
val token2value: token -> semantic_value
(* Even though the [error] pseudo-token is not a real token, it is a
terminal symbol. Furthermore, for regularity, it must have a semantic
value. *)
val error_terminal: terminal
val error_value: semantic_value
(* [foreach_terminal] allows iterating over all terminal symbols. *)
val foreach_terminal: (terminal -> 'a -> 'a) -> 'a -> 'a
(* The type of productions. *)
type production
val production_index: production -> int
val find_production: int -> production
(* If a state [s] has a default reduction on production [prod], then, upon
entering [s], the automaton should reduce [prod] without consulting the
lookahead token. The following function allows determining which states
have default reductions. *)
Instead of returning a value of a sum type -- either [ DefRed prod ] , or
[ NoDefRed ] -- it accepts two continuations , and invokes just one of
them . This mechanism allows avoiding a memory allocation .
[NoDefRed] -- it accepts two continuations, and invokes just one of
them. This mechanism allows avoiding a memory allocation. *)
val default_reduction:
state ->
('env -> production -> 'answer) ->
('env -> 'answer) ->
'env -> 'answer
An LR automaton can normally take three kinds of actions : shift , reduce ,
or fail . ( Acceptance is a particular case of reduction : it consists in
reducing a start production . )
or fail. (Acceptance is a particular case of reduction: it consists in
reducing a start production.) *)
There are two variants of the shift action . [ shift / discard s ] instructs
the automaton to discard the current token , request a new one from the
lexer , and move to state [ s ] . [ shift / nodiscard s ] instructs it to move to
state [ s ] without requesting a new token . This instruction should be used
when [ s ] has a default reduction on [ # ] . See [ CodeBackend.gettoken ] for
details .
the automaton to discard the current token, request a new one from the
lexer, and move to state [s]. [shift/nodiscard s] instructs it to move to
state [s] without requesting a new token. This instruction should be used
when [s] has a default reduction on [#]. See [CodeBackend.gettoken] for
details. *)
(* This is the automaton's action table. It maps a pair of a state and a
terminal symbol to an action. *)
Instead of returning a value of a sum type -- one of shift / discard ,
shift / nodiscard , reduce , or fail -- this function accepts three
continuations , and invokes just one them . This mechanism allows avoiding
a memory allocation .
shift/nodiscard, reduce, or fail -- this function accepts three
continuations, and invokes just one them. This mechanism allows avoiding
a memory allocation. *)
In summary , the parameters to [ action ] are as follows :
- the first two parameters , a state and a terminal symbol , are used to
look up the action table ;
- the next parameter is the semantic value associated with the above
terminal symbol ; it is not used , only passed along to the shift
continuation , as explained below ;
- the shift continuation expects an environment ; a flag that tells
whether to discard the current token ; the terminal symbol that
is being shifted ; its semantic value ; and the target state of
the transition ;
- the reduce continuation expects an environment and a production ;
- the fail continuation expects an environment ;
- the last parameter is the environment ; it is not used , only passed
along to the selected continuation .
- the first two parameters, a state and a terminal symbol, are used to
look up the action table;
- the next parameter is the semantic value associated with the above
terminal symbol; it is not used, only passed along to the shift
continuation, as explained below;
- the shift continuation expects an environment; a flag that tells
whether to discard the current token; the terminal symbol that
is being shifted; its semantic value; and the target state of
the transition;
- the reduce continuation expects an environment and a production;
- the fail continuation expects an environment;
- the last parameter is the environment; it is not used, only passed
along to the selected continuation. *)
val action:
state ->
terminal ->
semantic_value ->
('env -> bool -> terminal -> semantic_value -> state -> 'answer) ->
('env -> production -> 'answer) ->
('env -> 'answer) ->
'env -> 'answer
(* This is the automaton's goto table. This table maps a pair of a state
and a nonterminal symbol to a new state. By extension, it also maps a
pair of a state and a production to a new state. *)
The function [ goto_nt ] can be applied to [ s ] and [ nt ] ONLY if the state
[ s ] has an outgoing transition labeled [ nt ] . Otherwise , its result is
undefined . Similarly , the call [ goto_prod prod s ] is permitted ONLY if
the state [ s ] has an outgoing transition labeled with the nonterminal
symbol [ lhs prod ] . The function [ maybe_goto_nt ] involves an additional
dynamic check and CAN be called even if there is no outgoing transition .
[s] has an outgoing transition labeled [nt]. Otherwise, its result is
undefined. Similarly, the call [goto_prod prod s] is permitted ONLY if
the state [s] has an outgoing transition labeled with the nonterminal
symbol [lhs prod]. The function [maybe_goto_nt] involves an additional
dynamic check and CAN be called even if there is no outgoing transition. *)
val goto_nt : state -> nonterminal -> state
val goto_prod: state -> production -> state
val maybe_goto_nt: state -> nonterminal -> state option
(* [is_start prod] tells whether the production [prod] is a start production. *)
val is_start: production -> bool
By convention , a semantic action is responsible for :
1 . fetching whatever semantic values and positions it needs off the stack ;
2 . popping an appropriate number of cells off the stack , as dictated
by the length of the right - hand side of the production ;
3 . computing a new semantic value , as well as new start and end positions ;
4 . pushing a new stack cell , which contains the three values
computed in step 3 ;
5 . returning the new stack computed in steps 2 and 4 .
Point 1 is essentially forced upon us : if semantic values were fetched
off the stack by this interpreter , then the calling convention for
semantic actions would be variadic : not all semantic actions would have
the same number of arguments . The rest follows rather naturally .
1. fetching whatever semantic values and positions it needs off the stack;
2. popping an appropriate number of cells off the stack, as dictated
by the length of the right-hand side of the production;
3. computing a new semantic value, as well as new start and end positions;
4. pushing a new stack cell, which contains the three values
computed in step 3;
5. returning the new stack computed in steps 2 and 4.
Point 1 is essentially forced upon us: if semantic values were fetched
off the stack by this interpreter, then the calling convention for
semantic actions would be variadic: not all semantic actions would have
the same number of arguments. The rest follows rather naturally. *)
Semantic actions are allowed to raise [ Error ] .
exception Error
type semantic_action =
(state, semantic_value, token) env -> (state, semantic_value) stack
val semantic_action: production -> semantic_action
[ may_reduce state prod ] tests whether the state [ state ] is capable of
reducing the production [ prod ] . This function is currently costly and
is not used by the core LR engine . It is used in the implementation
of certain functions , such as [ force_reduction ] , which allow the engine
to be driven programmatically .
reducing the production [prod]. This function is currently costly and
is not used by the core LR engine. It is used in the implementation
of certain functions, such as [force_reduction], which allow the engine
to be driven programmatically. *)
val may_reduce: state -> production -> bool
(* If the flag [log] is false, then the logging functions are not called.
If it is [true], then they are called. *)
val log : bool
The logging hooks required by the LR engine .
module Log : LOG
with type state := state
and type terminal := terminal
and type production := production
end
(* --------------------------------------------------------------------------- *)
(* This signature describes the monolithic (traditional) LR engine. *)
(* In this interface, the parser controls the lexer. *)
module type MONOLITHIC_ENGINE = sig
type state
type token
type semantic_value
(* An entry point to the engine requires a start state, a lexer, and a lexing
buffer. It either succeeds and produces a semantic value, or fails and
raises [Error]. *)
exception Error
val entry:
see [ IncrementalEngine ]
state ->
(Lexing.lexbuf -> token) ->
Lexing.lexbuf ->
semantic_value
end
(* --------------------------------------------------------------------------- *)
The following signatures describe the incremental LR engine .
First , see [ INCREMENTAL_ENGINE ] in the file [ IncrementalEngine.ml ] .
(* The [start] function is set apart because we do not wish to publish
it as part of the generated [parser.mli] file. Instead, the table
back-end will publish specialized versions of it, with a suitable
type cast. *)
module type INCREMENTAL_ENGINE_START = sig
(* [start] is an entry point. It requires a start state and a start position
and begins the parsing process. If the lexer is based on an OCaml lexing
buffer, the start position should be [lexbuf.lex_curr_p]. [start] produces
a checkpoint, which usually will be an [InputNeeded] checkpoint. (It could
be [Accepted] if this starting state accepts only the empty word. It could
be [Rejected] if this starting state accepts no word at all.) It does not
raise any exception. *)
(* [start s pos] should really produce a checkpoint of type ['a checkpoint],
for a fixed ['a] that depends on the state [s]. We cannot express this, so
we use [semantic_value checkpoint], which is safe. The table back-end uses
[Obj.magic] to produce safe specialized versions of [start]. *)
type state
type semantic_value
type 'a checkpoint
val start:
state ->
Lexing.position ->
semantic_value checkpoint
end
(* --------------------------------------------------------------------------- *)
This signature describes the LR engine , which combines the monolithic
and incremental interfaces .
and incremental interfaces. *)
module type ENGINE = sig
include MONOLITHIC_ENGINE
include IncrementalEngine.INCREMENTAL_ENGINE
with type token := token
and type 'a lr1state = state (* useful for us; hidden from the end user *)
include INCREMENTAL_ENGINE_START
with type state := state
and type semantic_value := semantic_value
and type 'a checkpoint := 'a checkpoint
end
end
module Engine : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
open EngineTypes
(* The LR parsing engine. *)
module Make (T : TABLE)
: ENGINE
with type state = T.state
and type token = T.token
and type semantic_value = T.semantic_value
and type production = T.production
and type 'a env = (T.state, T.semantic_value, T.token) EngineTypes.env
(* We would prefer not to expose the definition of the type [env].
However, it must be exposed because some of the code in the
inspection API needs access to the engine's internals; see
[InspectionTableInterpreter]. Everything would be simpler if
--inspection was always ON, but that would lead to bigger parse
tables for everybody. *)
end
module ErrorReports : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
(* -------------------------------------------------------------------------- *)
The following functions help keep track of the start and end positions of
the last two tokens in a two - place buffer . This is used to nicely display
where a syntax error took place .
the last two tokens in a two-place buffer. This is used to nicely display
where a syntax error took place. *)
type 'a buffer
(* [wrap lexer] returns a pair of a new (initially empty) buffer and a lexer
which internally relies on [lexer] and updates [buffer] on the fly whenever
a token is demanded. *)
The type of the buffer is [ ( position * position ) buffer ] , which means that
it stores two pairs of positions , which are the start and end positions of
the last two tokens .
it stores two pairs of positions, which are the start and end positions of
the last two tokens. *)
open Lexing
val wrap:
(lexbuf -> 'token) ->
(position * position) buffer * (lexbuf -> 'token)
val wrap_supplier:
(unit -> 'token * position * position) ->
(position * position) buffer * (unit -> 'token * position * position)
(* [show f buffer] prints the contents of the buffer, producing a string that
is typically of the form "after '%s' and before '%s'". The function [f] is
used to print an element. The buffer MUST be nonempty. *)
val show: ('a -> string) -> 'a buffer -> string
(* [last buffer] returns the last element of the buffer. The buffer MUST be
nonempty. *)
val last: 'a buffer -> 'a
(* -------------------------------------------------------------------------- *)
[ extract text ( pos1 , pos2 ) ] extracts the sub - string of [ text ] delimited
by the positions [ pos1 ] and [ pos2 ] .
by the positions [pos1] and [pos2]. *)
val extract: string -> position * position -> string
[ sanitize text ] eliminates any special characters from the text [ text ] .
A special character is a character whose ASCII code is less than 32 .
Every special character is replaced with a single space character .
A special character is a character whose ASCII code is less than 32.
Every special character is replaced with a single space character. *)
val sanitize: string -> string
[ compress text ] replaces every run of at least one whitespace character
with exactly one space character .
with exactly one space character. *)
val compress: string -> string
(* [shorten k text] limits the length of [text] to [2k+3] characters. If the
text is too long, a fragment in the middle is replaced with an ellipsis. *)
val shorten: int -> string -> string
(* [expand f text] searches [text] for occurrences of [$k], where [k]
is a nonnegative integer literal, and replaces each such occurrence
with the string [f k]. *)
val expand: (int -> string) -> string -> string
end
module LexerUtil : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
open Lexing
(* [init filename lexbuf] initializes the lexing buffer [lexbuf] so
that the positions that are subsequently read from it refer to the
file [filename]. It returns [lexbuf]. *)
val init: string -> lexbuf -> lexbuf
(* [read filename] reads the entire contents of the file [filename] and
returns a pair of this content (a string) and a lexing buffer that
has been initialized, based on this string. *)
val read: string -> string * lexbuf
(* [newline lexbuf] increments the line counter stored within [lexbuf]. It
should be invoked by the lexer itself every time a newline character is
consumed. This allows maintaining a current the line number in [lexbuf]. *)
val newline: lexbuf -> unit
[ range ( startpos , endpos ) ] prints a textual description of the range
delimited by the start and end positions [ startpos ] and [ endpos ] .
This description is one line long and ends in a newline character .
This description mentions the file name , the line number , and a range
of characters on this line . The line number is correct only if [ newline ]
has been correctly used , as described dabove .
delimited by the start and end positions [startpos] and [endpos].
This description is one line long and ends in a newline character.
This description mentions the file name, the line number, and a range
of characters on this line. The line number is correct only if [newline]
has been correctly used, as described dabove. *)
val range: position * position -> string
end
module Printers : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
This module is part of MenhirLib .
module Make
(I : IncrementalEngine.EVERYTHING)
(User : sig
(* [print s] is supposed to send the string [s] to some output channel. *)
val print: string -> unit
[ s ] is supposed to print a representation of the symbol [ s ] .
val print_symbol: I.xsymbol -> unit
(* [print_element e] is supposed to print a representation of the element [e].
This function is optional; if it is not provided, [print_element_as_symbol]
(defined below) is used instead. *)
val print_element: (I.element -> unit) option
end)
: sig
open I
(* Printing a list of symbols. *)
val print_symbols: xsymbol list -> unit
(* Printing an element as a symbol. This prints just the symbol
that this element represents; nothing more. *)
val print_element_as_symbol: element -> unit
(* Printing a stack as a list of elements. This function needs an element
printer. It uses [print_element] if provided by the user; otherwise
it uses [print_element_as_symbol]. (Ending with a newline.) *)
val print_stack: 'a env -> unit
(* Printing an item. (Ending with a newline.) *)
val print_item: item -> unit
(* Printing a production. (Ending with a newline.) *)
val print_production: production -> unit
Printing the current LR(1 ) state . The current state is first displayed
as a number ; then the list of its LR(0 ) items is printed . ( Ending with
a newline . )
as a number; then the list of its LR(0) items is printed. (Ending with
a newline.) *)
val print_current_state: 'a env -> unit
(* Printing a summary of the stack and current state. This function just
calls [print_stack] and [print_current_state] in succession. *)
val print_env: 'a env -> unit
end
end
module InfiniteArray : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
(** This module implements infinite arrays. **)
type 'a t
(** [make x] creates an infinite array, where every slot contains [x]. **)
val make: 'a -> 'a t
(** [get a i] returns the element contained at offset [i] in the array [a].
Slots are numbered 0 and up. **)
val get: 'a t -> int -> 'a
(** [set a i x] sets the element contained at offset [i] in the array
[a] to [x]. Slots are numbered 0 and up. **)
val set: 'a t -> int -> 'a -> unit
(** [extent a] is the length of an initial segment of the array [a]
that is sufficiently large to contain all [set] operations ever
performed. In other words, all elements beyond that segment have
the default value. *)
val extent: 'a t -> int
(** [domain a] is a fresh copy of an initial segment of the array [a]
whose length is [extent a]. *)
val domain: 'a t -> 'a array
end
module PackedIntArray : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
A packed integer array is represented as a pair of an integer [ k ] and
a string [ s ] . The integer [ k ] is the number of bits per integer that we
use . The string [ s ] is just an array of bits , which is read in 8 - bit
chunks .
a string [s]. The integer [k] is the number of bits per integer that we
use. The string [s] is just an array of bits, which is read in 8-bit
chunks. *)
The ocaml programming language treats string literals and array literals
in slightly different ways : the former are statically allocated , while
the latter are dynamically allocated . ( This is rather arbitrary . ) In the
context of Menhir 's table - based back - end , where compact , immutable
integer arrays are needed , ocaml strings are preferable to ocaml arrays .
in slightly different ways: the former are statically allocated, while
the latter are dynamically allocated. (This is rather arbitrary.) In the
context of Menhir's table-based back-end, where compact, immutable
integer arrays are needed, ocaml strings are preferable to ocaml arrays. *)
type t =
int * string
(* [pack a] turns an array of integers into a packed integer array. *)
(* Because the sign bit is the most significant bit, the magnitude of
any negative number is the word size. In other words, [pack] does
not achieve any space savings as soon as [a] contains any negative
numbers, even if they are ``small''. *)
val pack: int array -> t
(* [get t i] returns the integer stored in the packed array [t] at index [i]. *)
(* Together, [pack] and [get] satisfy the following property: if the index [i]
is within bounds, then [get (pack a) i] equals [a.(i)]. *)
val get: t -> int -> int
[ get1 t i ] returns the integer stored in the packed array [ t ] at index [ i ] .
It assumes ( and does not check ) that the array 's bit width is [ 1 ] . The
parameter [ t ] is just a string .
It assumes (and does not check) that the array's bit width is [1]. The
parameter [t] is just a string. *)
val get1: string -> int -> int
[ unflatten1 ( n , data ) i j ] accesses the two - dimensional bitmap
represented by [ ( n , data ) ] at indices [ i ] and [ j ] . The integer
[ n ] is the width of the bitmap ; the string [ data ] is the second
component of the packed array obtained by encoding the table as
a one - dimensional array .
represented by [(n, data)] at indices [i] and [j]. The integer
[n] is the width of the bitmap; the string [data] is the second
component of the packed array obtained by encoding the table as
a one-dimensional array. *)
val unflatten1: int * string -> int -> int -> int
end
module RowDisplacement : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
This module compresses a two - dimensional table , where some values
are considered insignificant , via row displacement .
are considered insignificant, via row displacement. *)
(* A compressed table is represented as a pair of arrays. The
displacement array is an array of offsets into the data array. *)
type 'a table =
int array * (* displacement *)
'a array (* data *)
[ compress equal insignificant dummy m n t ] turns the two - dimensional table
[ t ] into a compressed table . The parameter [ equal ] is equality of data
values . The parameter [ wildcard ] tells which data values are insignificant ,
and can thus be overwritten with other values . The parameter [ dummy ] is
used to fill holes in the data array . [ m ] and [ n ] are the integer
dimensions of the table [ t ] .
[t] into a compressed table. The parameter [equal] is equality of data
values. The parameter [wildcard] tells which data values are insignificant,
and can thus be overwritten with other values. The parameter [dummy] is
used to fill holes in the data array. [m] and [n] are the integer
dimensions of the table [t]. *)
val compress:
('a -> 'a -> bool) ->
('a -> bool) ->
'a ->
int -> int ->
'a array array ->
'a table
(* [get ct i j] returns the value found at indices [i] and [j] in the
compressed table [ct]. This function call is permitted only if the
value found at indices [i] and [j] in the original table is
significant -- otherwise, it could fail abruptly. *)
(* Together, [compress] and [get] have the property that, if the value
found at indices [i] and [j] in an uncompressed table [t] is
significant, then [get (compress t) i j] is equal to that value. *)
val get:
'a table ->
int -> int ->
'a
[ getget ] is a variant of [ get ] which only requires read access ,
via accessors , to the two components of the table .
via accessors, to the two components of the table. *)
val getget:
('displacement -> int -> int) ->
('data -> int -> 'a) ->
'displacement * 'data ->
int -> int ->
'a
end
module LinearizedArray : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
(* An array of arrays (of possibly different lengths!) can be ``linearized'',
i.e., encoded as a data array (by concatenating all of the little arrays)
and an entry array (which contains offsets into the data array). *)
type 'a t =
(* data: *) 'a array *
(* entry: *) int array
(* [make a] turns the array of arrays [a] into a linearized array. *)
val make: 'a array array -> 'a t
(* [read la i j] reads the linearized array [la] at indices [i] and [j].
Thus, [read (make a) i j] is equivalent to [a.(i).(j)]. *)
val read: 'a t -> int -> int -> 'a
(* [write la i j v] writes the value [v] into the linearized array [la]
at indices [i] and [j]. *)
val write: 'a t -> int -> int -> 'a -> unit
(* [length la] is the number of rows of the array [la]. Thus, [length (make
a)] is equivalent to [Array.length a]. *)
val length: 'a t -> int
[ row_length la i ] is the length of the row at index [ i ] in the linearized
array [ la ] . Thus , [ ( make a ) i ] is equivalent to [ Array.length
a.(i ) ] .
array [la]. Thus, [row_length (make a) i] is equivalent to [Array.length
a.(i)]. *)
val row_length: 'a t -> int -> int
(* [read_row la i] reads the row at index [i], producing a list. Thus,
[read_row (make a) i] is equivalent to [Array.to_list a.(i)]. *)
val read_row: 'a t -> int -> 'a list
(* The following variants read the linearized array via accessors
[get_data : int -> 'a] and [get_entry : int -> int]. *)
val row_length_via:
(* get_entry: *) (int -> int) ->
(* i: *) int ->
int
val read_via:
(* get_data: *) (int -> 'a) ->
(* get_entry: *) (int -> int) ->
(* i: *) int ->
(* j: *) int ->
'a
val read_row_via:
(* get_data: *) (int -> 'a) ->
(* get_entry: *) (int -> int) ->
(* i: *) int ->
'a list
end
module TableFormat : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
(* This signature defines the format of the parse tables. It is used as
an argument to [TableInterpreter.Make]. *)
module type TABLES = sig
(* This is the parser's type of tokens. *)
type token
(* This maps a token to its internal (generation-time) integer code. *)
val token2terminal: token -> int
(* This is the integer code for the error pseudo-token. *)
val error_terminal: int
(* This maps a token to its semantic value. *)
val token2value: token -> Obj.t
Traditionally , an LR automaton is described by two tables , namely , an
action table and a goto table . See , for instance , the book .
The action table is a two - dimensional matrix that maps a state and a
lookahead token to an action . An action is one of : shift to a certain
state , reduce a certain production , accept , or fail .
The goto table is a two - dimensional matrix that maps a state and a
non - terminal symbol to either a state or undefined . By construction , this
table is sparse : its undefined entries are never looked up . A compression
technique is free to overlap them with other entries .
In Menhir , things are slightly different . If a state has a default
reduction on token [ # ] , then that reduction must be performed without
consulting the lookahead token . As a result , we must first determine
whether that is the case , before we can obtain a lookahead token and use it
as an index in the action table .
Thus , Menhir 's tables are as follows .
A one - dimensional default reduction table maps a state to either ` ` no
default reduction '' ( encoded as : 0 ) or ` ` by default , reduce prod ''
( encoded as : 1 + prod ) . The action table is looked up only when there
is no default reduction .
action table and a goto table. See, for instance, the Dragon book.
The action table is a two-dimensional matrix that maps a state and a
lookahead token to an action. An action is one of: shift to a certain
state, reduce a certain production, accept, or fail.
The goto table is a two-dimensional matrix that maps a state and a
non-terminal symbol to either a state or undefined. By construction, this
table is sparse: its undefined entries are never looked up. A compression
technique is free to overlap them with other entries.
In Menhir, things are slightly different. If a state has a default
reduction on token [#], then that reduction must be performed without
consulting the lookahead token. As a result, we must first determine
whether that is the case, before we can obtain a lookahead token and use it
as an index in the action table.
Thus, Menhir's tables are as follows.
A one-dimensional default reduction table maps a state to either ``no
default reduction'' (encoded as: 0) or ``by default, reduce prod''
(encoded as: 1 + prod). The action table is looked up only when there
is no default reduction. *)
val default_reduction: PackedIntArray.t
Menhir follows , and Heuft , who point out that , although the
action table is not sparse by nature ( i.e. , the error entries are
significant ) , it can be made sparse by first factoring out a binary error
matrix , then replacing the error entries in the action table with undefined
entries . Thus :
A two - dimensional error bitmap maps a state and a terminal to either
` ` fail '' ( encoded as : 0 ) or ` ` do not fail '' ( encoded as : 1 ) . The action
table , which is now sparse , is looked up only in the latter case .
action table is not sparse by nature (i.e., the error entries are
significant), it can be made sparse by first factoring out a binary error
matrix, then replacing the error entries in the action table with undefined
entries. Thus:
A two-dimensional error bitmap maps a state and a terminal to either
``fail'' (encoded as: 0) or ``do not fail'' (encoded as: 1). The action
table, which is now sparse, is looked up only in the latter case. *)
The error bitmap is flattened into a one - dimensional table ; its width is
recorded so as to allow indexing . The table is then compressed via
[ PackedIntArray ] . The bit width of the resulting packed array must be
[ 1 ] , so it is not explicitly recorded .
recorded so as to allow indexing. The table is then compressed via
[PackedIntArray]. The bit width of the resulting packed array must be
[1], so it is not explicitly recorded. *)
(* The error bitmap does not contain a column for the [#] pseudo-terminal.
Thus, its width is [Terminal.n - 1]. We exploit the fact that the integer
code assigned to [#] is greatest: the fact that the right-most column
in the bitmap is missing does not affect the code for accessing it. *)
second component of [ PackedIntArray.t ]
A two - dimensional action table maps a state and a terminal to one of
` ` shift to state s and discard the current token '' ( encoded as : s | 10 ) ,
` ` shift to state s without discarding the current token '' ( encoded as : s |
11 ) , or ` ` reduce prod '' ( encoded as : prod | 01 ) .
``shift to state s and discard the current token'' (encoded as: s | 10),
``shift to state s without discarding the current token'' (encoded as: s |
11), or ``reduce prod'' (encoded as: prod | 01). *)
The action table is first compressed via [ RowDisplacement ] , then packed
via [ PackedIntArray ] .
via [PackedIntArray]. *)
(* Like the error bitmap, the action table does not contain a column for the
[#] pseudo-terminal. *)
val action: PackedIntArray.t * PackedIntArray.t
A one - dimensional lhs table maps a production to its left - hand side ( a
non - terminal symbol ) .
non-terminal symbol). *)
val lhs: PackedIntArray.t
A two - dimensional goto table maps a state and a non - terminal symbol to
either undefined ( encoded as : 0 ) or a new state s ( encoded as : 1 + s ) .
either undefined (encoded as: 0) or a new state s (encoded as: 1 + s). *)
The goto table is first compressed via [ RowDisplacement ] , then packed
via [ PackedIntArray ] .
via [PackedIntArray]. *)
val goto: PackedIntArray.t * PackedIntArray.t
(* The number of start productions. A production [prod] is a start
production if and only if [prod < start] holds. This is also the
number of start symbols. A nonterminal symbol [nt] is a start
symbol if and only if [nt < start] holds. *)
val start: int
A one - dimensional semantic action table maps productions to semantic
actions . The calling convention for semantic actions is described in
[ EngineTypes ] . This table contains ONLY NON - START PRODUCTIONS , so the
indexing is off by [ start ] . Be careful .
actions. The calling convention for semantic actions is described in
[EngineTypes]. This table contains ONLY NON-START PRODUCTIONS, so the
indexing is off by [start]. Be careful. *)
val semantic_action: ((int, Obj.t, token) EngineTypes.env ->
(int, Obj.t) EngineTypes.stack) array
(* The parser defines its own [Error] exception. This exception can be
raised by semantic actions and caught by the engine, and raised by the
engine towards the final user. *)
exception Error
The parser indicates whether to generate a trace . Generating a
trace requires two extra tables , which respectively map a
terminal symbol and a production to a string .
trace requires two extra tables, which respectively map a
terminal symbol and a production to a string. *)
val trace: (string array * string array) option
end
end
module InspectionTableFormat : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
(* This signature defines the format of the tables that are produced (in
addition to the tables described in [TableFormat]) when the command line
switch [--inspection] is enabled. It is used as an argument to
[InspectionTableInterpreter.Make]. *)
module type TABLES = sig
(* The types of symbols. *)
include IncrementalEngine.SYMBOLS
The type [ ' a lr1state ] describes an LR(1 ) state . The generated parser defines
it internally as [ int ] .
it internally as [int]. *)
type 'a lr1state
(* Some of the tables that follow use encodings of (terminal and
nonterminal) symbols as integers. So, we need functions that
map the integer encoding of a symbol to its algebraic encoding. *)
val terminal: int -> xsymbol
val nonterminal: int -> xsymbol
(* The left-hand side of every production already appears in the
signature [TableFormat.TABLES], so we need not repeat it here. *)
(* The right-hand side of every production. This a linearized array
of arrays of integers, whose [data] and [entry] components have
been packed. The encoding of symbols as integers in described in
[TableBackend]. *)
val rhs: PackedIntArray.t * PackedIntArray.t
(* A mapping of every (non-initial) state to its LR(0) core. *)
val lr0_core: PackedIntArray.t
(* A mapping of every LR(0) state to its set of LR(0) items. Each item is
represented in its packed form (see [Item]) as an integer. Thus the
mapping is an array of arrays of integers, which is linearized and
packed, like [rhs]. *)
val lr0_items: PackedIntArray.t * PackedIntArray.t
(* A mapping of every LR(0) state to its incoming symbol, if it has one. *)
val lr0_incoming: PackedIntArray.t
(* A table that tells which non-terminal symbols are nullable. *)
val nullable: string
This is a packed int array of bit width 1 . It can be read
using [ PackedIntArray.get1 ] .
using [PackedIntArray.get1]. *)
A two - table dimensional table , indexed by a nonterminal symbol and
by a terminal symbol ( other than [ # ] ) , encodes the FIRST sets .
by a terminal symbol (other than [#]), encodes the FIRST sets. *)
second component of [ PackedIntArray.t ]
end
end
module InspectionTableInterpreter : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
(* This functor is invoked inside the generated parser, in [--table] mode. It
produces no code! It simply constructs the types [symbol] and [xsymbol] on
top of the generated types [terminal] and [nonterminal]. *)
module Symbols (T : sig
type 'a terminal
type 'a nonterminal
end)
: IncrementalEngine.SYMBOLS
with type 'a terminal := 'a T.terminal
and type 'a nonterminal := 'a T.nonterminal
(* This functor is invoked inside the generated parser, in [--table] mode. It
constructs the inspection API on top of the inspection tables described in
[InspectionTableFormat]. *)
module Make
(TT : TableFormat.TABLES)
(IT : InspectionTableFormat.TABLES
with type 'a lr1state = int)
(ET : EngineTypes.TABLE
with type terminal = int
and type nonterminal = int
and type semantic_value = Obj.t)
(E : sig
type 'a env = (ET.state, ET.semantic_value, ET.token) EngineTypes.env
end)
: IncrementalEngine.INSPECTION
with type 'a terminal := 'a IT.terminal
and type 'a nonterminal := 'a IT.nonterminal
and type 'a lr1state := 'a IT.lr1state
and type production := int
and type 'a env := 'a E.env
end
module TableInterpreter : sig
(******************************************************************************)
(* *)
Menhir
(* *)
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
(* special exception on linking, as described in the file LICENSE. *)
(* *)
(******************************************************************************)
This module provides a thin decoding layer for the generated tables , thus
providing an API that is suitable for use by [ Engine . Make ] . It is part of
[ MenhirLib ] .
providing an API that is suitable for use by [Engine.Make]. It is part of
[MenhirLib]. *)
(* The exception [Error] is declared within the generated parser. This is
preferable to pre-declaring it here, as it ensures that each parser gets
its own, distinct [Error] exception. This is consistent with the code-based
back-end. *)
(* This functor is invoked by the generated parser. *)
module MakeEngineTable
(T : TableFormat.TABLES)
: EngineTypes.TABLE
with type state = int
and type token = T.token
and type semantic_value = Obj.t
and type production = int
and type terminal = int
and type nonterminal = int
end
module StaticVersion : sig
val require_20220210: unit
end
| null | https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/boot/menhir/menhirLib.mli | ocaml | ****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
This module offers general-purpose functions on lists and streams.
---------------------------------------------------------------------------
Lists.
[uniq cmp xs] assumes that the list [xs] is sorted according to the
ordering [cmp] and returns the list [xs] deprived of any duplicate
elements.
[weed cmp xs] returns the list [xs] deprived of any duplicate elements.
---------------------------------------------------------------------------
A stream is a list whose elements are produced on demand.
The length of a stream.
Folding over a stream.
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
This traditional API is convenient when used with ocamllex, but
inelegant when used with other lexer generators.
This revised API is independent of any lexer generator. Here, the
parser only requires access to the lexer, and the lexer takes no
parameters. The tokens returned by the lexer may contain position
information.
---------------------------------------------------------------------------
---------------------------------------------------------------------------
Converting a revised parser back to a traditional parser.
---------------------------------------------------------------------------
Simplified versions of the above, where concrete triples are used.
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
In this mode, the user controls the lexer, and the parser suspends
itself when it needs to read a new token.
The type ['a checkpoint] represents an intermediate or final state of the
parser. An intermediate checkpoint is a suspension: it records the parser's
current state, and allows parsing to be resumed. The parameter ['a] is
the type of the semantic value that will eventually be produced if the
parser succeeds.
[Accepted] and [Rejected] are final checkpoints. [Accepted] carries a
semantic value.
[AboutToReduce] is an intermediate checkpoint. It means that the parser is
about to perform a reduction step. It exposes the parser's current
state as well as the production that is about to be reduced.
[HandlingError] is an intermediate checkpoint. It means that the parser has
detected an error and is currently handling it, in several steps.
A value of type ['a env] represents a configuration of the automaton:
current state, stack, lookahead token, etc. The parameter ['a] is the
type of the semantic value that will eventually be produced if the parser
succeeds.
In normal operation, the parser works with checkpoints: see the functions
[offer] and [resume]. However, it is also possible to work directly with
environments (see the functions [pop], [force_reduction], and [feed]) and
to reconstruct a checkpoint out of an environment (see [input_needed]).
This is considered advanced functionality; its purpose is to allow error
recovery strategies to be programmed by the user.
[resume] allows the user to resume the parser after it has suspended
itself with a checkpoint of the form [AboutToReduce (env, prod)] or
[HandlingError env]. [resume] expects the old checkpoint and produces a
new checkpoint. It does not raise any exception.
A token supplier is a function of no arguments which delivers a new token
(together with its start and end positions) every time it is called.
A pair of a lexer and a lexing buffer can be easily turned into a
supplier.
[loop supplier checkpoint] begins parsing from [checkpoint], reading
tokens from [supplier]. It continues parsing until it reaches a
checkpoint of the form [Accepted v] or [Rejected]. In the former case, it
returns [v]. In the latter case, it raises the exception [Error].
The optional argument [strategy], whose default value is [Legacy],
is passed to [resume] and influences the error-handling strategy.
[shifts checkpoint] assumes that [checkpoint] has been obtained by
submitting a token to the parser. It runs the parser from [checkpoint],
through an arbitrary number of reductions, until the parser either
accepts this token (i.e., shifts) or rejects it (i.e., signals an error).
If the parser decides to shift, then [Some env] is returned, where [env]
is the parser's state just before shifting. Otherwise, [None] is
returned.
It is desirable that the semantic actions be side-effect free, or that
their side-effects be harmless (replayable).
The function [acceptable] allows testing, after an error has been
detected, which tokens would have been accepted at this point. It is
implemented using [shifts]. Its argument should be an [InputNeeded]
checkpoint.
This test causes some semantic actions to be run! The semantic actions
should be side-effect free, or their side-effects should be harmless.
The position [pos] is used as the start and end positions of the
hypothetical token, and may be picked up by the semantic actions. We
suggest using the position where the error was detected.
Productions are numbered.
[find_production i] requires the index [i] to be valid. Use with care.
An element is a pair of a non-initial state [s] and a semantic value [v]
associated with the incoming symbol of this state. The idea is, the value
[v] was pushed onto the stack just before the state [s] was entered. Thus,
for some type ['a], the state [s] has type ['a lr1state] and the value [v]
has type ['a]. In other words, the type [element] is an existential type.
The parser's stack is (or, more precisely, can be viewed as) a stream of
elements. The type [stream] is defined by the module [General].
DEPRECATED
DEPRECATED
[top env] returns the parser's top stack element. The state contained in
this stack element is the current state of the automaton. If the stack is
empty, [None] is returned. In that case, the current state of the
automaton must be an initial state.
[pop_many i env] pops [i] cells off the automaton's stack. This is done
via [i] successive invocations of [pop]. Thus, [pop_many 1] is [pop]. The
index [i] must be nonnegative. The time complexity is O(i).
[get i env] returns the parser's [i]-th stack element. The index [i] is
0-based: thus, [get 0] is [top]. If [i] is greater than or equal to the
number of elements in the stack, [None] is returned. The time complexity
is O(i).
[equal env1 env2] tells whether the parser configurations [env1] and
[env2] are equal in the sense that the automaton's current state is the
same in [env1] and [env2] and the stack is *physically* the same in
[env1] and [env2]. If [equal env1 env2] is [true], then the sequence of
the stack elements, as observed via [pop] and [top], must be the same in
[env1] and [env2]. Also, if [equal env1 env2] holds, then the checkpoints
[input_needed env1] and [input_needed env2] must be equivalent. The
function [equal] has time complexity O(1).
These are the start and end positions of the current lookahead token. If
invoked in an initial state, this function returns a pair of twice the
initial position.
When applied to an environment taken from a checkpoint of the form
[AboutToReduce (env, prod)], the function [env_has_default_reduction]
tells whether the reduction that is about to take place is a default
reduction.
[state_has_default_reduction s] tells whether the state [s] has a default
reduction. This includes the case where [s] is an accepting state.
[pop env] returns a new environment, where the parser's top stack cell
has been popped off. (If the stack is empty, [None] is returned.) This
amounts to pretending that the (terminal or nonterminal) symbol that
corresponds to this stack cell has not been read.
[force_reduction prod env] should be called only if in the state [env]
the parser is capable of reducing the production [prod]. If this
condition is satisfied, then this production is reduced, which means that
its semantic action is executed (this can have side effects!) and the
automaton makes a goto (nonterminal) transition. If this condition is not
satisfied, [Invalid_argument _] is raised.
This signature is a fragment of the inspection API that is made available
to the user when [--inspection] is used. This fragment contains type
definitions for symbols.
The type ['a terminal] represents a terminal symbol. The type ['a
nonterminal] represents a nonterminal symbol. In both cases, the index
['a] represents the type of the semantic values associated with this
symbol. The concrete definitions of these types are generated.
The type ['a symbol] represents a terminal or nonterminal symbol. It is
the disjoint union of the types ['a terminal] and ['a nonterminal].
The type [xsymbol] is an existentially quantified version of the type
['a symbol]. This type is useful in situations where the index ['a]
is not statically known.
This signature describes the inspection API that is made available to the
user when [--inspection] is used.
The types of symbols are described above.
An LR(0) item is a pair of a production [prod] and a valid index [i] into
this production. That is, if the length of [rhs prod] is [n], then [i] is
comprised between 0 and [n], inclusive.
Ordering functions.
[incoming_symbol s] is the incoming symbol of the state [s], that is,
the symbol that the parser must recognize before (has recognized when)
it enters the state [s]. This function gives access to the semantic
value [v] stored in a stack element [Element (s, v, _, _)]. Indeed,
by case analysis on the symbol [incoming_symbol s], one discovers the
type ['a] of the value [v].
[rhs prod] is the right-hand side of the production [prod]. This is
a (possibly empty) sequence of (terminal or nonterminal) symbols.
[nullable nt] tells whether the non-terminal symbol [nt] is nullable.
That is, it is true if and only if this symbol produces the empty
word [epsilon].
[foreach_terminal] enumerates the terminal symbols, including [error].
[foreach_terminal_but_error] enumerates the terminal symbols, excluding
[error].
The type [env] is meant to be the same as in [INCREMENTAL_ENGINE].
This signature combines the incremental API and the inspection API.
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
This file defines several types and module types that are used in the
specification of module [Engine].
---------------------------------------------------------------------------
It would be nice if we could keep the structure of stacks and environments
hidden. However, stacks and environments must be accessible to semantic
actions, so the following data structure definitions must be public.
---------------------------------------------------------------------------
A stack is a linked list of cells. A sentinel cell -- which is its own
successor -- is used to mark the bottom of the stack. The sentinel cell
itself is not significant -- it contains dummy values.
The state that we should go back to if we pop this stack cell.
This convention means that the state contained in the top stack cell is
not the current state [env.current]. It also means that the state found
within the sentinel is a dummy -- it is never consulted. This convention
is the same as that adopted by the code-based back-end.
The semantic value associated with the chunk of input that this cell
represents.
The start and end positions of the chunk of input that this cell
represents.
The next cell down in the stack. If this is a self-pointer, then this
cell is the sentinel, and the stack is conceptually empty.
---------------------------------------------------------------------------
A parsing environment contains all of the parser's state (except for the
current program point).
The stack. In [CodeBackend], it is passed around on its own,
whereas, here, it is accessed via the environment.
The current state. In [CodeBackend], it is passed around on its
own, whereas, here, it is accessed via the environment.
---------------------------------------------------------------------------
A number of logging hooks are used to (optionally) emit logging messages.
The comments indicate the conventional messages that correspond
to these hooks in the code-based back-end; see [CodeBackend].
Shifting (<terminal>) to state <state>
Reducing a production should be logged either as a reduction
event (for regular productions) or as an acceptance event (for
start productions).
Reducing production <production> / Accepting
Lookahead token is now <terminal> (<pos>-<pos>)
Initiating error handling
Resuming error handling
Handling error in state <state>
---------------------------------------------------------------------------
The type of automaton states.
The type of tokens. These can be thought of as real tokens, that is,
tokens returned by the lexer. They carry a semantic value. This type
does not include the [error] pseudo-token.
The type of terminal symbols. These can be thought of as integer codes.
They do not carry a semantic value. This type does include the [error]
pseudo-token.
The type of nonterminal symbols.
The type of semantic values.
Even though the [error] pseudo-token is not a real token, it is a
terminal symbol. Furthermore, for regularity, it must have a semantic
value.
[foreach_terminal] allows iterating over all terminal symbols.
The type of productions.
If a state [s] has a default reduction on production [prod], then, upon
entering [s], the automaton should reduce [prod] without consulting the
lookahead token. The following function allows determining which states
have default reductions.
This is the automaton's action table. It maps a pair of a state and a
terminal symbol to an action.
This is the automaton's goto table. This table maps a pair of a state
and a nonterminal symbol to a new state. By extension, it also maps a
pair of a state and a production to a new state.
[is_start prod] tells whether the production [prod] is a start production.
If the flag [log] is false, then the logging functions are not called.
If it is [true], then they are called.
---------------------------------------------------------------------------
This signature describes the monolithic (traditional) LR engine.
In this interface, the parser controls the lexer.
An entry point to the engine requires a start state, a lexer, and a lexing
buffer. It either succeeds and produces a semantic value, or fails and
raises [Error].
---------------------------------------------------------------------------
The [start] function is set apart because we do not wish to publish
it as part of the generated [parser.mli] file. Instead, the table
back-end will publish specialized versions of it, with a suitable
type cast.
[start] is an entry point. It requires a start state and a start position
and begins the parsing process. If the lexer is based on an OCaml lexing
buffer, the start position should be [lexbuf.lex_curr_p]. [start] produces
a checkpoint, which usually will be an [InputNeeded] checkpoint. (It could
be [Accepted] if this starting state accepts only the empty word. It could
be [Rejected] if this starting state accepts no word at all.) It does not
raise any exception.
[start s pos] should really produce a checkpoint of type ['a checkpoint],
for a fixed ['a] that depends on the state [s]. We cannot express this, so
we use [semantic_value checkpoint], which is safe. The table back-end uses
[Obj.magic] to produce safe specialized versions of [start].
---------------------------------------------------------------------------
useful for us; hidden from the end user
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
The LR parsing engine.
We would prefer not to expose the definition of the type [env].
However, it must be exposed because some of the code in the
inspection API needs access to the engine's internals; see
[InspectionTableInterpreter]. Everything would be simpler if
--inspection was always ON, but that would lead to bigger parse
tables for everybody.
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
--------------------------------------------------------------------------
[wrap lexer] returns a pair of a new (initially empty) buffer and a lexer
which internally relies on [lexer] and updates [buffer] on the fly whenever
a token is demanded.
[show f buffer] prints the contents of the buffer, producing a string that
is typically of the form "after '%s' and before '%s'". The function [f] is
used to print an element. The buffer MUST be nonempty.
[last buffer] returns the last element of the buffer. The buffer MUST be
nonempty.
--------------------------------------------------------------------------
[shorten k text] limits the length of [text] to [2k+3] characters. If the
text is too long, a fragment in the middle is replaced with an ellipsis.
[expand f text] searches [text] for occurrences of [$k], where [k]
is a nonnegative integer literal, and replaces each such occurrence
with the string [f k].
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
[init filename lexbuf] initializes the lexing buffer [lexbuf] so
that the positions that are subsequently read from it refer to the
file [filename]. It returns [lexbuf].
[read filename] reads the entire contents of the file [filename] and
returns a pair of this content (a string) and a lexing buffer that
has been initialized, based on this string.
[newline lexbuf] increments the line counter stored within [lexbuf]. It
should be invoked by the lexer itself every time a newline character is
consumed. This allows maintaining a current the line number in [lexbuf].
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
[print s] is supposed to send the string [s] to some output channel.
[print_element e] is supposed to print a representation of the element [e].
This function is optional; if it is not provided, [print_element_as_symbol]
(defined below) is used instead.
Printing a list of symbols.
Printing an element as a symbol. This prints just the symbol
that this element represents; nothing more.
Printing a stack as a list of elements. This function needs an element
printer. It uses [print_element] if provided by the user; otherwise
it uses [print_element_as_symbol]. (Ending with a newline.)
Printing an item. (Ending with a newline.)
Printing a production. (Ending with a newline.)
Printing a summary of the stack and current state. This function just
calls [print_stack] and [print_current_state] in succession.
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
* This module implements infinite arrays. *
* [make x] creates an infinite array, where every slot contains [x]. *
* [get a i] returns the element contained at offset [i] in the array [a].
Slots are numbered 0 and up. *
* [set a i x] sets the element contained at offset [i] in the array
[a] to [x]. Slots are numbered 0 and up. *
* [extent a] is the length of an initial segment of the array [a]
that is sufficiently large to contain all [set] operations ever
performed. In other words, all elements beyond that segment have
the default value.
* [domain a] is a fresh copy of an initial segment of the array [a]
whose length is [extent a].
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
[pack a] turns an array of integers into a packed integer array.
Because the sign bit is the most significant bit, the magnitude of
any negative number is the word size. In other words, [pack] does
not achieve any space savings as soon as [a] contains any negative
numbers, even if they are ``small''.
[get t i] returns the integer stored in the packed array [t] at index [i].
Together, [pack] and [get] satisfy the following property: if the index [i]
is within bounds, then [get (pack a) i] equals [a.(i)].
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
A compressed table is represented as a pair of arrays. The
displacement array is an array of offsets into the data array.
displacement
data
[get ct i j] returns the value found at indices [i] and [j] in the
compressed table [ct]. This function call is permitted only if the
value found at indices [i] and [j] in the original table is
significant -- otherwise, it could fail abruptly.
Together, [compress] and [get] have the property that, if the value
found at indices [i] and [j] in an uncompressed table [t] is
significant, then [get (compress t) i j] is equal to that value.
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
An array of arrays (of possibly different lengths!) can be ``linearized'',
i.e., encoded as a data array (by concatenating all of the little arrays)
and an entry array (which contains offsets into the data array).
data:
entry:
[make a] turns the array of arrays [a] into a linearized array.
[read la i j] reads the linearized array [la] at indices [i] and [j].
Thus, [read (make a) i j] is equivalent to [a.(i).(j)].
[write la i j v] writes the value [v] into the linearized array [la]
at indices [i] and [j].
[length la] is the number of rows of the array [la]. Thus, [length (make
a)] is equivalent to [Array.length a].
[read_row la i] reads the row at index [i], producing a list. Thus,
[read_row (make a) i] is equivalent to [Array.to_list a.(i)].
The following variants read the linearized array via accessors
[get_data : int -> 'a] and [get_entry : int -> int].
get_entry:
i:
get_data:
get_entry:
i:
j:
get_data:
get_entry:
i:
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
This signature defines the format of the parse tables. It is used as
an argument to [TableInterpreter.Make].
This is the parser's type of tokens.
This maps a token to its internal (generation-time) integer code.
This is the integer code for the error pseudo-token.
This maps a token to its semantic value.
The error bitmap does not contain a column for the [#] pseudo-terminal.
Thus, its width is [Terminal.n - 1]. We exploit the fact that the integer
code assigned to [#] is greatest: the fact that the right-most column
in the bitmap is missing does not affect the code for accessing it.
Like the error bitmap, the action table does not contain a column for the
[#] pseudo-terminal.
The number of start productions. A production [prod] is a start
production if and only if [prod < start] holds. This is also the
number of start symbols. A nonterminal symbol [nt] is a start
symbol if and only if [nt < start] holds.
The parser defines its own [Error] exception. This exception can be
raised by semantic actions and caught by the engine, and raised by the
engine towards the final user.
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
This signature defines the format of the tables that are produced (in
addition to the tables described in [TableFormat]) when the command line
switch [--inspection] is enabled. It is used as an argument to
[InspectionTableInterpreter.Make].
The types of symbols.
Some of the tables that follow use encodings of (terminal and
nonterminal) symbols as integers. So, we need functions that
map the integer encoding of a symbol to its algebraic encoding.
The left-hand side of every production already appears in the
signature [TableFormat.TABLES], so we need not repeat it here.
The right-hand side of every production. This a linearized array
of arrays of integers, whose [data] and [entry] components have
been packed. The encoding of symbols as integers in described in
[TableBackend].
A mapping of every (non-initial) state to its LR(0) core.
A mapping of every LR(0) state to its set of LR(0) items. Each item is
represented in its packed form (see [Item]) as an integer. Thus the
mapping is an array of arrays of integers, which is linearized and
packed, like [rhs].
A mapping of every LR(0) state to its incoming symbol, if it has one.
A table that tells which non-terminal symbols are nullable.
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
This functor is invoked inside the generated parser, in [--table] mode. It
produces no code! It simply constructs the types [symbol] and [xsymbol] on
top of the generated types [terminal] and [nonterminal].
This functor is invoked inside the generated parser, in [--table] mode. It
constructs the inspection API on top of the inspection tables described in
[InspectionTableFormat].
****************************************************************************
special exception on linking, as described in the file LICENSE.
****************************************************************************
The exception [Error] is declared within the generated parser. This is
preferable to pre-declaring it here, as it ensures that each parser gets
its own, distinct [Error] exception. This is consistent with the code-based
back-end.
This functor is invoked by the generated parser. | module General : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
As of 2017/03/31 , this module is DEPRECATED . It might be removed in
the future .
the future. *)
[ take n xs ] returns the [ n ] first elements of the list [ xs ] . It is
acceptable for the list [ xs ] to have length less than [ n ] , in
which case [ xs ] itself is returned .
acceptable for the list [xs] to have length less than [n], in
which case [xs] itself is returned. *)
val take: int -> 'a list -> 'a list
[ drop n xs ] returns the list [ xs ] , deprived of its [ n ] first elements .
It is acceptable for the list [ xs ] to have length less than [ n ] , in
which case an empty list is returned .
It is acceptable for the list [xs] to have length less than [n], in
which case an empty list is returned. *)
val drop: int -> 'a list -> 'a list
val uniq: ('a -> 'a -> int) -> 'a list -> 'a list
val weed: ('a -> 'a -> int) -> 'a list -> 'a list
type 'a stream =
'a head Lazy.t
and 'a head =
| Nil
| Cons of 'a * 'a stream
val length: 'a stream -> int
val foldr: ('a -> 'b -> 'b) -> 'a stream -> 'b -> 'b
end
module Convert : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
An ocamlyacc - style , or Menhir - style , parser requires access to
the lexer , which must be parameterized with a lexing buffer , and
to the lexing buffer itself , where it reads position information .
the lexer, which must be parameterized with a lexing buffer, and
to the lexing buffer itself, where it reads position information. *)
type ('token, 'semantic_value) traditional =
(Lexing.lexbuf -> 'token) -> Lexing.lexbuf -> 'semantic_value
type ('token, 'semantic_value) revised =
(unit -> 'token) -> 'semantic_value
Converting a traditional parser , produced by ocamlyacc or Menhir ,
into a revised parser .
into a revised parser. *)
A token of the revised lexer is essentially a triple of a token
of the traditional lexer ( or raw token ) , a start position , and
and end position . The three [ get ] functions are accessors .
of the traditional lexer (or raw token), a start position, and
and end position. The three [get] functions are accessors. *)
We do not require the type [ ' token ] to actually be a triple type .
This enables complex applications where it is a record type with
more than three fields . It also enables simple applications where
positions are of no interest , so [ ' token ] is just [ ' raw_token ]
and [ get_startp ] and [ get_endp ] return dummy positions .
This enables complex applications where it is a record type with
more than three fields. It also enables simple applications where
positions are of no interest, so ['token] is just ['raw_token]
and [get_startp] and [get_endp] return dummy positions. *)
val traditional2revised:
('token -> 'raw_token) ->
('token -> Lexing.position) ->
('token -> Lexing.position) ->
('raw_token, 'semantic_value) traditional ->
('token, 'semantic_value) revised
val revised2traditional:
('raw_token -> Lexing.position -> Lexing.position -> 'token) ->
('token, 'semantic_value) revised ->
('raw_token, 'semantic_value) traditional
module Simplified : sig
val traditional2revised:
('token, 'semantic_value) traditional ->
('token * Lexing.position * Lexing.position, 'semantic_value) revised
val revised2traditional:
('token * Lexing.position * Lexing.position, 'semantic_value) revised ->
('token, 'semantic_value) traditional
end
end
module IncrementalEngine : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
type position = Lexing.position
open General
This signature describes the incremental LR engine .
module type INCREMENTAL_ENGINE = sig
type token
A value of type [ production ] is ( an index for ) a production . The start
productions ( which do not exist in an \mly file , but are constructed by
Menhir internally ) are not part of this type .
productions (which do not exist in an \mly file, but are constructed by
Menhir internally) are not part of this type. *)
type production
[ InputNeeded ] is an intermediate checkpoint . It means that the parser wishes
to read one token before continuing .
to read one token before continuing. *)
[ Shifting ] is an intermediate checkpoint . It means that the parser is taking
a shift transition . It exposes the state of the parser before and after
the transition . The Boolean parameter tells whether the parser intends to
request a new token after this transition . ( It always does , except when
it is about to accept . )
a shift transition. It exposes the state of the parser before and after
the transition. The Boolean parameter tells whether the parser intends to
request a new token after this transition. (It always does, except when
it is about to accept.) *)
type 'a env
type 'a checkpoint = private
| InputNeeded of 'a env
| Shifting of 'a env * 'a env * bool
| AboutToReduce of 'a env * production
| HandlingError of 'a env
| Accepted of 'a
| Rejected
[ offer ] allows the user to resume the parser after it has suspended
itself with a checkpoint of the form [ InputNeeded env ] . [ offer ] expects
the old checkpoint as well as a new token and produces a new checkpoint .
It does not raise any exception .
itself with a checkpoint of the form [InputNeeded env]. [offer] expects
the old checkpoint as well as a new token and produces a new checkpoint.
It does not raise any exception. *)
val offer:
'a checkpoint ->
token * position * position ->
'a checkpoint
The optional argument [ strategy ] influences the manner in which [ resume ]
deals with checkpoints of the form [ ErrorHandling _ ] . Its default value
is [ ` Legacy ] . It can be briefly described as follows :
- If the [ error ] token is used only to report errors ( that is , if the
[ error ] token appears only at the end of a production , whose semantic
action raises an exception ) then the simplified strategy should be
preferred . ( This includes the case where the [ error ] token does not
appear at all in the grammar . )
- If the [ error ] token is used to recover after an error , or if
perfect backward compatibility is required , the legacy strategy
should be selected .
More details on these strategies appear in the file [ Engine.ml ] .
deals with checkpoints of the form [ErrorHandling _]. Its default value
is [`Legacy]. It can be briefly described as follows:
- If the [error] token is used only to report errors (that is, if the
[error] token appears only at the end of a production, whose semantic
action raises an exception) then the simplified strategy should be
preferred. (This includes the case where the [error] token does not
appear at all in the grammar.)
- If the [error] token is used to recover after an error, or if
perfect backward compatibility is required, the legacy strategy
should be selected.
More details on these strategies appear in the file [Engine.ml]. *)
type strategy =
[ `Legacy | `Simplified ]
val resume:
?strategy:strategy ->
'a checkpoint ->
'a checkpoint
type supplier =
unit -> token * position * position
val lexer_lexbuf_to_supplier:
(Lexing.lexbuf -> token) ->
Lexing.lexbuf ->
supplier
The functions [ offer ] and [ resume ] are sufficient to write a parser loop .
One can imagine many variations ( which is why we expose these functions
in the first place ! ) . Here , we expose a few variations of the main loop ,
ready for use .
One can imagine many variations (which is why we expose these functions
in the first place!). Here, we expose a few variations of the main loop,
ready for use. *)
val loop: ?strategy:strategy -> supplier -> 'a checkpoint -> 'a
[ loop_handle succeed fail supplier checkpoint ] begins parsing from
[ checkpoint ] , reading tokens from [ supplier ] . It continues parsing until
it reaches a checkpoint of the form [ Accepted v ] or [ HandlingError env ]
( or [ Rejected ] , but that should not happen , as [ HandlingError _ ] will be
observed first ) . In the former case , it calls [ succeed v ] . In the latter
case , it calls [ fail ] with this checkpoint . It can not raise [ Error ] .
This means that Menhir 's error - handling procedure does not get a chance
to run . For this reason , there is no [ strategy ] parameter . Instead , the
user can implement her own error handling code , in the [ fail ]
continuation .
[checkpoint], reading tokens from [supplier]. It continues parsing until
it reaches a checkpoint of the form [Accepted v] or [HandlingError env]
(or [Rejected], but that should not happen, as [HandlingError _] will be
observed first). In the former case, it calls [succeed v]. In the latter
case, it calls [fail] with this checkpoint. It cannot raise [Error].
This means that Menhir's error-handling procedure does not get a chance
to run. For this reason, there is no [strategy] parameter. Instead, the
user can implement her own error handling code, in the [fail]
continuation. *)
val loop_handle:
('a -> 'answer) ->
('a checkpoint -> 'answer) ->
supplier -> 'a checkpoint -> 'answer
[ loop_handle_undo ] is analogous to [ loop_handle ] , except it passes a pair
of checkpoints to the failure continuation .
The first ( and oldest ) checkpoint is the last [ InputNeeded ] checkpoint that
was encountered before the error was detected . The second ( and newest )
checkpoint is where the error was detected , as in [ loop_handle ] . Going back
to the first checkpoint can be thought of as undoing any reductions that
were performed after seeing the problematic token . ( These reductions must
be default reductions or spurious reductions . )
[ loop_handle_undo ] must initially be applied to an [ InputNeeded ] checkpoint .
The parser 's initial checkpoints satisfy this constraint .
of checkpoints to the failure continuation.
The first (and oldest) checkpoint is the last [InputNeeded] checkpoint that
was encountered before the error was detected. The second (and newest)
checkpoint is where the error was detected, as in [loop_handle]. Going back
to the first checkpoint can be thought of as undoing any reductions that
were performed after seeing the problematic token. (These reductions must
be default reductions or spurious reductions.)
[loop_handle_undo] must initially be applied to an [InputNeeded] checkpoint.
The parser's initial checkpoints satisfy this constraint. *)
val loop_handle_undo:
('a -> 'answer) ->
('a checkpoint -> 'a checkpoint -> 'answer) ->
supplier -> 'a checkpoint -> 'answer
val shifts: 'a checkpoint -> 'a env option
For completeness , one must undo any spurious reductions before carrying out
this test -- that is , one must apply [ acceptable ] to the FIRST checkpoint
that is passed by [ loop_handle_undo ] to its failure continuation .
this test -- that is, one must apply [acceptable] to the FIRST checkpoint
that is passed by [loop_handle_undo] to its failure continuation. *)
val acceptable: 'a checkpoint -> token -> position -> bool
The abstract type [ ' a lr1state ] describes the non - initial states of the
) automaton . The index [ ' a ] represents the type of the semantic value
associated with this state 's incoming symbol .
LR(1) automaton. The index ['a] represents the type of the semantic value
associated with this state's incoming symbol. *)
type 'a lr1state
The states of the LR(1 ) automaton are numbered ( from 0 and up ) .
val number: _ lr1state -> int
val production_index: production -> int
val find_production: int -> production
type element =
| Element: 'a lr1state * 'a * position * position -> element
As of 2017/03/31 , the types [ stream ] and [ stack ] and the function [ stack ]
are DEPRECATED . They might be removed in the future . An alternative way
of inspecting the stack is via the functions [ top ] and [ pop ] .
are DEPRECATED. They might be removed in the future. An alternative way
of inspecting the stack is via the functions [top] and [pop]. *)
element stream
This is the parser 's stack , a stream of elements . This stream is empty if
the parser is in an initial state ; otherwise , it is non - empty . The LR(1 )
automaton 's current state is the one found in the top element of the
stack .
the parser is in an initial state; otherwise, it is non-empty. The LR(1)
automaton's current state is the one found in the top element of the
stack. *)
val top: 'a env -> element option
val pop_many: int -> 'a env -> 'a env option
val get: int -> 'a env -> element option
[ env ] is ( the integer number of ) the automaton 's
current state . This works even if the automaton 's stack is empty , in
which case the current state is an initial state . This number can be
passed as an argument to a [ message ] function generated by [ menhir
--compile - errors ] .
current state. This works even if the automaton's stack is empty, in
which case the current state is an initial state. This number can be
passed as an argument to a [message] function generated by [menhir
--compile-errors]. *)
val current_state_number: 'a env -> int
val equal: 'a env -> 'a env -> bool
val positions: 'a env -> position * position
val env_has_default_reduction: 'a env -> bool
val state_has_default_reduction: _ lr1state -> bool
val pop: 'a env -> 'a env option
val force_reduction: production -> 'a env -> 'a env
[ input_needed env ] returns [ InputNeeded env ] . That is , out of an [ env ]
that might have been obtained via a series of calls to the functions
[ pop ] , [ force_reduction ] , [ feed ] , etc . , it produces a checkpoint , which
can be used to resume normal parsing , by supplying this checkpoint as an
argument to [ offer ] .
that might have been obtained via a series of calls to the functions
[pop], [force_reduction], [feed], etc., it produces a checkpoint, which
can be used to resume normal parsing, by supplying this checkpoint as an
argument to [offer]. *)
This function should be used with some care . It could " mess up the
lookahead " in the sense that it allows parsing to resume in an arbitrary
state [ s ] with an arbitrary lookahead symbol [ t ] , even though Menhir 's
reachability analysis ( menhir --list - errors ) might well think that it is
impossible to reach this particular configuration . If one is using
Menhir 's new error reporting facility , this could cause the parser to
reach an error state for which no error message has been prepared .
lookahead" in the sense that it allows parsing to resume in an arbitrary
state [s] with an arbitrary lookahead symbol [t], even though Menhir's
reachability analysis (menhir --list-errors) might well think that it is
impossible to reach this particular configuration. If one is using
Menhir's new error reporting facility, this could cause the parser to
reach an error state for which no error message has been prepared. *)
val input_needed: 'a env -> 'a checkpoint
end
module type SYMBOLS = sig
type 'a terminal
type 'a nonterminal
type 'a symbol =
| T : 'a terminal -> 'a symbol
| N : 'a nonterminal -> 'a symbol
type xsymbol =
| X : 'a symbol -> xsymbol
end
module type INSPECTION = sig
include SYMBOLS
The type [ ' a lr1state ] is meant to be the same as in [ INCREMENTAL_ENGINE ] .
type 'a lr1state
The type [ production ] is meant to be the same as in [ INCREMENTAL_ENGINE ] .
It represents a production of the grammar . A production can be examined
via the functions [ lhs ] and [ rhs ] below .
It represents a production of the grammar. A production can be examined
via the functions [lhs] and [rhs] below. *)
type production
type item =
production * int
val compare_terminals: _ terminal -> _ terminal -> int
val compare_nonterminals: _ nonterminal -> _ nonterminal -> int
val compare_symbols: xsymbol -> xsymbol -> int
val compare_productions: production -> production -> int
val compare_items: item -> item -> int
val incoming_symbol: 'a lr1state -> 'a symbol
[ items s ] is the set of the LR(0 ) items in the LR(0 ) core of the )
state [ s ] . This set is not epsilon - closed . This set is presented as a
list , in an arbitrary order .
state [s]. This set is not epsilon-closed. This set is presented as a
list, in an arbitrary order. *)
val items: _ lr1state -> item list
[ lhs prod ] is the left - hand side of the production [ prod ] . This is
always a non - terminal symbol .
always a non-terminal symbol. *)
val lhs: production -> xsymbol
val rhs: production -> xsymbol list
val nullable: _ nonterminal -> bool
[ first nt t ] tells whether the FIRST set of the nonterminal symbol [ nt ]
contains the terminal symbol [ t ] . That is , it is true if and only if
[ nt ] produces a word that begins with [ t ] .
contains the terminal symbol [t]. That is, it is true if and only if
[nt] produces a word that begins with [t]. *)
val first: _ nonterminal -> _ terminal -> bool
[ xfirst ] is analogous to [ first ] , but expects a first argument of type
[ xsymbol ] instead of [ _ terminal ] .
[xsymbol] instead of [_ terminal]. *)
val xfirst: xsymbol -> _ terminal -> bool
val foreach_terminal: (xsymbol -> 'a -> 'a) -> 'a -> 'a
val foreach_terminal_but_error: (xsymbol -> 'a -> 'a) -> 'a -> 'a
type 'a env
[ feed symbol startp env ] causes the parser to consume the
( terminal or nonterminal ) symbol [ symbol ] , accompanied with the semantic
value [ semv ] and with the start and end positions [ startp ] and [ endp ] .
Thus , the automaton makes a transition , and reaches a new state . The
stack grows by one cell . This operation is permitted only if the current
state ( as determined by [ env ] ) has an outgoing transition labeled with
[ symbol ] . Otherwise , [ Invalid_argument _ ] is raised .
(terminal or nonterminal) symbol [symbol], accompanied with the semantic
value [semv] and with the start and end positions [startp] and [endp].
Thus, the automaton makes a transition, and reaches a new state. The
stack grows by one cell. This operation is permitted only if the current
state (as determined by [env]) has an outgoing transition labeled with
[symbol]. Otherwise, [Invalid_argument _] is raised. *)
val feed: 'a symbol -> position -> 'a -> position -> 'b env -> 'b env
end
module type EVERYTHING = sig
include INCREMENTAL_ENGINE
include INSPECTION
with type 'a lr1state := 'a lr1state
with type production := production
with type 'a env := 'a env
end
end
module EngineTypes : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
type ('state, 'semantic_value) stack = {
state: 'state;
semv: 'semantic_value;
startp: Lexing.position;
endp: Lexing.position;
next: ('state, 'semantic_value) stack;
}
type ('state, 'semantic_value, 'token) env = {
If this flag is true , then the first component of [ env.triple ] should
be ignored , as it has been logically overwritten with the [ error ]
pseudo - token .
be ignored, as it has been logically overwritten with the [error]
pseudo-token. *)
error: bool;
The last token that was obtained from the lexer , together with its start
and end positions . Warning : before the first call to the lexer has taken
place , a dummy ( and possibly invalid ) token is stored here .
and end positions. Warning: before the first call to the lexer has taken
place, a dummy (and possibly invalid) token is stored here. *)
triple: 'token * Lexing.position * Lexing.position;
stack: ('state, 'semantic_value) stack;
current: 'state;
}
module type LOG = sig
type state
type terminal
type production
State % d :
val state: state -> unit
val shift: terminal -> state -> unit
val reduce_or_accept: production -> unit
val lookahead_token: terminal -> Lexing.position -> Lexing.position -> unit
val initiating_error_handling: unit -> unit
val resuming_error_handling: unit -> unit
val handling_error: state -> unit
end
This signature describes the parameters that must be supplied to the LR
engine .
engine. *)
module type TABLE = sig
type state
States are numbered .
val number: state -> int
type token
type terminal
type nonterminal
type semantic_value
A token is conceptually a pair of a ( non-[error ] ) terminal symbol and
a semantic value . The following two functions are the pair projections .
a semantic value. The following two functions are the pair projections. *)
val token2terminal: token -> terminal
val token2value: token -> semantic_value
val error_terminal: terminal
val error_value: semantic_value
val foreach_terminal: (terminal -> 'a -> 'a) -> 'a -> 'a
type production
val production_index: production -> int
val find_production: int -> production
Instead of returning a value of a sum type -- either [ DefRed prod ] , or
[ NoDefRed ] -- it accepts two continuations , and invokes just one of
them . This mechanism allows avoiding a memory allocation .
[NoDefRed] -- it accepts two continuations, and invokes just one of
them. This mechanism allows avoiding a memory allocation. *)
val default_reduction:
state ->
('env -> production -> 'answer) ->
('env -> 'answer) ->
'env -> 'answer
An LR automaton can normally take three kinds of actions : shift , reduce ,
or fail . ( Acceptance is a particular case of reduction : it consists in
reducing a start production . )
or fail. (Acceptance is a particular case of reduction: it consists in
reducing a start production.) *)
There are two variants of the shift action . [ shift / discard s ] instructs
the automaton to discard the current token , request a new one from the
lexer , and move to state [ s ] . [ shift / nodiscard s ] instructs it to move to
state [ s ] without requesting a new token . This instruction should be used
when [ s ] has a default reduction on [ # ] . See [ CodeBackend.gettoken ] for
details .
the automaton to discard the current token, request a new one from the
lexer, and move to state [s]. [shift/nodiscard s] instructs it to move to
state [s] without requesting a new token. This instruction should be used
when [s] has a default reduction on [#]. See [CodeBackend.gettoken] for
details. *)
Instead of returning a value of a sum type -- one of shift / discard ,
shift / nodiscard , reduce , or fail -- this function accepts three
continuations , and invokes just one them . This mechanism allows avoiding
a memory allocation .
shift/nodiscard, reduce, or fail -- this function accepts three
continuations, and invokes just one them. This mechanism allows avoiding
a memory allocation. *)
In summary , the parameters to [ action ] are as follows :
- the first two parameters , a state and a terminal symbol , are used to
look up the action table ;
- the next parameter is the semantic value associated with the above
terminal symbol ; it is not used , only passed along to the shift
continuation , as explained below ;
- the shift continuation expects an environment ; a flag that tells
whether to discard the current token ; the terminal symbol that
is being shifted ; its semantic value ; and the target state of
the transition ;
- the reduce continuation expects an environment and a production ;
- the fail continuation expects an environment ;
- the last parameter is the environment ; it is not used , only passed
along to the selected continuation .
- the first two parameters, a state and a terminal symbol, are used to
look up the action table;
- the next parameter is the semantic value associated with the above
terminal symbol; it is not used, only passed along to the shift
continuation, as explained below;
- the shift continuation expects an environment; a flag that tells
whether to discard the current token; the terminal symbol that
is being shifted; its semantic value; and the target state of
the transition;
- the reduce continuation expects an environment and a production;
- the fail continuation expects an environment;
- the last parameter is the environment; it is not used, only passed
along to the selected continuation. *)
val action:
state ->
terminal ->
semantic_value ->
('env -> bool -> terminal -> semantic_value -> state -> 'answer) ->
('env -> production -> 'answer) ->
('env -> 'answer) ->
'env -> 'answer
The function [ goto_nt ] can be applied to [ s ] and [ nt ] ONLY if the state
[ s ] has an outgoing transition labeled [ nt ] . Otherwise , its result is
undefined . Similarly , the call [ goto_prod prod s ] is permitted ONLY if
the state [ s ] has an outgoing transition labeled with the nonterminal
symbol [ lhs prod ] . The function [ maybe_goto_nt ] involves an additional
dynamic check and CAN be called even if there is no outgoing transition .
[s] has an outgoing transition labeled [nt]. Otherwise, its result is
undefined. Similarly, the call [goto_prod prod s] is permitted ONLY if
the state [s] has an outgoing transition labeled with the nonterminal
symbol [lhs prod]. The function [maybe_goto_nt] involves an additional
dynamic check and CAN be called even if there is no outgoing transition. *)
val goto_nt : state -> nonterminal -> state
val goto_prod: state -> production -> state
val maybe_goto_nt: state -> nonterminal -> state option
val is_start: production -> bool
By convention , a semantic action is responsible for :
1 . fetching whatever semantic values and positions it needs off the stack ;
2 . popping an appropriate number of cells off the stack , as dictated
by the length of the right - hand side of the production ;
3 . computing a new semantic value , as well as new start and end positions ;
4 . pushing a new stack cell , which contains the three values
computed in step 3 ;
5 . returning the new stack computed in steps 2 and 4 .
Point 1 is essentially forced upon us : if semantic values were fetched
off the stack by this interpreter , then the calling convention for
semantic actions would be variadic : not all semantic actions would have
the same number of arguments . The rest follows rather naturally .
1. fetching whatever semantic values and positions it needs off the stack;
2. popping an appropriate number of cells off the stack, as dictated
by the length of the right-hand side of the production;
3. computing a new semantic value, as well as new start and end positions;
4. pushing a new stack cell, which contains the three values
computed in step 3;
5. returning the new stack computed in steps 2 and 4.
Point 1 is essentially forced upon us: if semantic values were fetched
off the stack by this interpreter, then the calling convention for
semantic actions would be variadic: not all semantic actions would have
the same number of arguments. The rest follows rather naturally. *)
Semantic actions are allowed to raise [ Error ] .
exception Error
type semantic_action =
(state, semantic_value, token) env -> (state, semantic_value) stack
val semantic_action: production -> semantic_action
[ may_reduce state prod ] tests whether the state [ state ] is capable of
reducing the production [ prod ] . This function is currently costly and
is not used by the core LR engine . It is used in the implementation
of certain functions , such as [ force_reduction ] , which allow the engine
to be driven programmatically .
reducing the production [prod]. This function is currently costly and
is not used by the core LR engine. It is used in the implementation
of certain functions, such as [force_reduction], which allow the engine
to be driven programmatically. *)
val may_reduce: state -> production -> bool
val log : bool
The logging hooks required by the LR engine .
module Log : LOG
with type state := state
and type terminal := terminal
and type production := production
end
module type MONOLITHIC_ENGINE = sig
type state
type token
type semantic_value
exception Error
val entry:
see [ IncrementalEngine ]
state ->
(Lexing.lexbuf -> token) ->
Lexing.lexbuf ->
semantic_value
end
The following signatures describe the incremental LR engine .
First , see [ INCREMENTAL_ENGINE ] in the file [ IncrementalEngine.ml ] .
module type INCREMENTAL_ENGINE_START = sig
type state
type semantic_value
type 'a checkpoint
val start:
state ->
Lexing.position ->
semantic_value checkpoint
end
This signature describes the LR engine , which combines the monolithic
and incremental interfaces .
and incremental interfaces. *)
module type ENGINE = sig
include MONOLITHIC_ENGINE
include IncrementalEngine.INCREMENTAL_ENGINE
with type token := token
include INCREMENTAL_ENGINE_START
with type state := state
and type semantic_value := semantic_value
and type 'a checkpoint := 'a checkpoint
end
end
module Engine : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
open EngineTypes
module Make (T : TABLE)
: ENGINE
with type state = T.state
and type token = T.token
and type semantic_value = T.semantic_value
and type production = T.production
and type 'a env = (T.state, T.semantic_value, T.token) EngineTypes.env
end
module ErrorReports : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
The following functions help keep track of the start and end positions of
the last two tokens in a two - place buffer . This is used to nicely display
where a syntax error took place .
the last two tokens in a two-place buffer. This is used to nicely display
where a syntax error took place. *)
type 'a buffer
The type of the buffer is [ ( position * position ) buffer ] , which means that
it stores two pairs of positions , which are the start and end positions of
the last two tokens .
it stores two pairs of positions, which are the start and end positions of
the last two tokens. *)
open Lexing
val wrap:
(lexbuf -> 'token) ->
(position * position) buffer * (lexbuf -> 'token)
val wrap_supplier:
(unit -> 'token * position * position) ->
(position * position) buffer * (unit -> 'token * position * position)
val show: ('a -> string) -> 'a buffer -> string
val last: 'a buffer -> 'a
[ extract text ( pos1 , pos2 ) ] extracts the sub - string of [ text ] delimited
by the positions [ pos1 ] and [ pos2 ] .
by the positions [pos1] and [pos2]. *)
val extract: string -> position * position -> string
[ sanitize text ] eliminates any special characters from the text [ text ] .
A special character is a character whose ASCII code is less than 32 .
Every special character is replaced with a single space character .
A special character is a character whose ASCII code is less than 32.
Every special character is replaced with a single space character. *)
val sanitize: string -> string
[ compress text ] replaces every run of at least one whitespace character
with exactly one space character .
with exactly one space character. *)
val compress: string -> string
val shorten: int -> string -> string
val expand: (int -> string) -> string -> string
end
module LexerUtil : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
open Lexing
val init: string -> lexbuf -> lexbuf
val read: string -> string * lexbuf
val newline: lexbuf -> unit
[ range ( startpos , endpos ) ] prints a textual description of the range
delimited by the start and end positions [ startpos ] and [ endpos ] .
This description is one line long and ends in a newline character .
This description mentions the file name , the line number , and a range
of characters on this line . The line number is correct only if [ newline ]
has been correctly used , as described dabove .
delimited by the start and end positions [startpos] and [endpos].
This description is one line long and ends in a newline character.
This description mentions the file name, the line number, and a range
of characters on this line. The line number is correct only if [newline]
has been correctly used, as described dabove. *)
val range: position * position -> string
end
module Printers : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
This module is part of MenhirLib .
module Make
(I : IncrementalEngine.EVERYTHING)
(User : sig
val print: string -> unit
[ s ] is supposed to print a representation of the symbol [ s ] .
val print_symbol: I.xsymbol -> unit
val print_element: (I.element -> unit) option
end)
: sig
open I
val print_symbols: xsymbol list -> unit
val print_element_as_symbol: element -> unit
val print_stack: 'a env -> unit
val print_item: item -> unit
val print_production: production -> unit
Printing the current LR(1 ) state . The current state is first displayed
as a number ; then the list of its LR(0 ) items is printed . ( Ending with
a newline . )
as a number; then the list of its LR(0) items is printed. (Ending with
a newline.) *)
val print_current_state: 'a env -> unit
val print_env: 'a env -> unit
end
end
module InfiniteArray : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
type 'a t
val make: 'a -> 'a t
val get: 'a t -> int -> 'a
val set: 'a t -> int -> 'a -> unit
val extent: 'a t -> int
val domain: 'a t -> 'a array
end
module PackedIntArray : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
A packed integer array is represented as a pair of an integer [ k ] and
a string [ s ] . The integer [ k ] is the number of bits per integer that we
use . The string [ s ] is just an array of bits , which is read in 8 - bit
chunks .
a string [s]. The integer [k] is the number of bits per integer that we
use. The string [s] is just an array of bits, which is read in 8-bit
chunks. *)
The ocaml programming language treats string literals and array literals
in slightly different ways : the former are statically allocated , while
the latter are dynamically allocated . ( This is rather arbitrary . ) In the
context of Menhir 's table - based back - end , where compact , immutable
integer arrays are needed , ocaml strings are preferable to ocaml arrays .
in slightly different ways: the former are statically allocated, while
the latter are dynamically allocated. (This is rather arbitrary.) In the
context of Menhir's table-based back-end, where compact, immutable
integer arrays are needed, ocaml strings are preferable to ocaml arrays. *)
type t =
int * string
val pack: int array -> t
val get: t -> int -> int
[ get1 t i ] returns the integer stored in the packed array [ t ] at index [ i ] .
It assumes ( and does not check ) that the array 's bit width is [ 1 ] . The
parameter [ t ] is just a string .
It assumes (and does not check) that the array's bit width is [1]. The
parameter [t] is just a string. *)
val get1: string -> int -> int
[ unflatten1 ( n , data ) i j ] accesses the two - dimensional bitmap
represented by [ ( n , data ) ] at indices [ i ] and [ j ] . The integer
[ n ] is the width of the bitmap ; the string [ data ] is the second
component of the packed array obtained by encoding the table as
a one - dimensional array .
represented by [(n, data)] at indices [i] and [j]. The integer
[n] is the width of the bitmap; the string [data] is the second
component of the packed array obtained by encoding the table as
a one-dimensional array. *)
val unflatten1: int * string -> int -> int -> int
end
module RowDisplacement : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
This module compresses a two - dimensional table , where some values
are considered insignificant , via row displacement .
are considered insignificant, via row displacement. *)
type 'a table =
[ compress equal insignificant dummy m n t ] turns the two - dimensional table
[ t ] into a compressed table . The parameter [ equal ] is equality of data
values . The parameter [ wildcard ] tells which data values are insignificant ,
and can thus be overwritten with other values . The parameter [ dummy ] is
used to fill holes in the data array . [ m ] and [ n ] are the integer
dimensions of the table [ t ] .
[t] into a compressed table. The parameter [equal] is equality of data
values. The parameter [wildcard] tells which data values are insignificant,
and can thus be overwritten with other values. The parameter [dummy] is
used to fill holes in the data array. [m] and [n] are the integer
dimensions of the table [t]. *)
val compress:
('a -> 'a -> bool) ->
('a -> bool) ->
'a ->
int -> int ->
'a array array ->
'a table
val get:
'a table ->
int -> int ->
'a
[ getget ] is a variant of [ get ] which only requires read access ,
via accessors , to the two components of the table .
via accessors, to the two components of the table. *)
val getget:
('displacement -> int -> int) ->
('data -> int -> 'a) ->
'displacement * 'data ->
int -> int ->
'a
end
module LinearizedArray : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
type 'a t =
val make: 'a array array -> 'a t
val read: 'a t -> int -> int -> 'a
val write: 'a t -> int -> int -> 'a -> unit
val length: 'a t -> int
[ row_length la i ] is the length of the row at index [ i ] in the linearized
array [ la ] . Thus , [ ( make a ) i ] is equivalent to [ Array.length
a.(i ) ] .
array [la]. Thus, [row_length (make a) i] is equivalent to [Array.length
a.(i)]. *)
val row_length: 'a t -> int -> int
val read_row: 'a t -> int -> 'a list
val row_length_via:
int
val read_via:
'a
val read_row_via:
'a list
end
module TableFormat : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
module type TABLES = sig
type token
val token2terminal: token -> int
val error_terminal: int
val token2value: token -> Obj.t
Traditionally , an LR automaton is described by two tables , namely , an
action table and a goto table . See , for instance , the book .
The action table is a two - dimensional matrix that maps a state and a
lookahead token to an action . An action is one of : shift to a certain
state , reduce a certain production , accept , or fail .
The goto table is a two - dimensional matrix that maps a state and a
non - terminal symbol to either a state or undefined . By construction , this
table is sparse : its undefined entries are never looked up . A compression
technique is free to overlap them with other entries .
In Menhir , things are slightly different . If a state has a default
reduction on token [ # ] , then that reduction must be performed without
consulting the lookahead token . As a result , we must first determine
whether that is the case , before we can obtain a lookahead token and use it
as an index in the action table .
Thus , Menhir 's tables are as follows .
A one - dimensional default reduction table maps a state to either ` ` no
default reduction '' ( encoded as : 0 ) or ` ` by default , reduce prod ''
( encoded as : 1 + prod ) . The action table is looked up only when there
is no default reduction .
action table and a goto table. See, for instance, the Dragon book.
The action table is a two-dimensional matrix that maps a state and a
lookahead token to an action. An action is one of: shift to a certain
state, reduce a certain production, accept, or fail.
The goto table is a two-dimensional matrix that maps a state and a
non-terminal symbol to either a state or undefined. By construction, this
table is sparse: its undefined entries are never looked up. A compression
technique is free to overlap them with other entries.
In Menhir, things are slightly different. If a state has a default
reduction on token [#], then that reduction must be performed without
consulting the lookahead token. As a result, we must first determine
whether that is the case, before we can obtain a lookahead token and use it
as an index in the action table.
Thus, Menhir's tables are as follows.
A one-dimensional default reduction table maps a state to either ``no
default reduction'' (encoded as: 0) or ``by default, reduce prod''
(encoded as: 1 + prod). The action table is looked up only when there
is no default reduction. *)
val default_reduction: PackedIntArray.t
Menhir follows , and Heuft , who point out that , although the
action table is not sparse by nature ( i.e. , the error entries are
significant ) , it can be made sparse by first factoring out a binary error
matrix , then replacing the error entries in the action table with undefined
entries . Thus :
A two - dimensional error bitmap maps a state and a terminal to either
` ` fail '' ( encoded as : 0 ) or ` ` do not fail '' ( encoded as : 1 ) . The action
table , which is now sparse , is looked up only in the latter case .
action table is not sparse by nature (i.e., the error entries are
significant), it can be made sparse by first factoring out a binary error
matrix, then replacing the error entries in the action table with undefined
entries. Thus:
A two-dimensional error bitmap maps a state and a terminal to either
``fail'' (encoded as: 0) or ``do not fail'' (encoded as: 1). The action
table, which is now sparse, is looked up only in the latter case. *)
The error bitmap is flattened into a one - dimensional table ; its width is
recorded so as to allow indexing . The table is then compressed via
[ PackedIntArray ] . The bit width of the resulting packed array must be
[ 1 ] , so it is not explicitly recorded .
recorded so as to allow indexing. The table is then compressed via
[PackedIntArray]. The bit width of the resulting packed array must be
[1], so it is not explicitly recorded. *)
second component of [ PackedIntArray.t ]
A two - dimensional action table maps a state and a terminal to one of
` ` shift to state s and discard the current token '' ( encoded as : s | 10 ) ,
` ` shift to state s without discarding the current token '' ( encoded as : s |
11 ) , or ` ` reduce prod '' ( encoded as : prod | 01 ) .
``shift to state s and discard the current token'' (encoded as: s | 10),
``shift to state s without discarding the current token'' (encoded as: s |
11), or ``reduce prod'' (encoded as: prod | 01). *)
The action table is first compressed via [ RowDisplacement ] , then packed
via [ PackedIntArray ] .
via [PackedIntArray]. *)
val action: PackedIntArray.t * PackedIntArray.t
A one - dimensional lhs table maps a production to its left - hand side ( a
non - terminal symbol ) .
non-terminal symbol). *)
val lhs: PackedIntArray.t
A two - dimensional goto table maps a state and a non - terminal symbol to
either undefined ( encoded as : 0 ) or a new state s ( encoded as : 1 + s ) .
either undefined (encoded as: 0) or a new state s (encoded as: 1 + s). *)
The goto table is first compressed via [ RowDisplacement ] , then packed
via [ PackedIntArray ] .
via [PackedIntArray]. *)
val goto: PackedIntArray.t * PackedIntArray.t
val start: int
A one - dimensional semantic action table maps productions to semantic
actions . The calling convention for semantic actions is described in
[ EngineTypes ] . This table contains ONLY NON - START PRODUCTIONS , so the
indexing is off by [ start ] . Be careful .
actions. The calling convention for semantic actions is described in
[EngineTypes]. This table contains ONLY NON-START PRODUCTIONS, so the
indexing is off by [start]. Be careful. *)
val semantic_action: ((int, Obj.t, token) EngineTypes.env ->
(int, Obj.t) EngineTypes.stack) array
exception Error
The parser indicates whether to generate a trace . Generating a
trace requires two extra tables , which respectively map a
terminal symbol and a production to a string .
trace requires two extra tables, which respectively map a
terminal symbol and a production to a string. *)
val trace: (string array * string array) option
end
end
module InspectionTableFormat : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
module type TABLES = sig
include IncrementalEngine.SYMBOLS
The type [ ' a lr1state ] describes an LR(1 ) state . The generated parser defines
it internally as [ int ] .
it internally as [int]. *)
type 'a lr1state
val terminal: int -> xsymbol
val nonterminal: int -> xsymbol
val rhs: PackedIntArray.t * PackedIntArray.t
val lr0_core: PackedIntArray.t
val lr0_items: PackedIntArray.t * PackedIntArray.t
val lr0_incoming: PackedIntArray.t
val nullable: string
This is a packed int array of bit width 1 . It can be read
using [ PackedIntArray.get1 ] .
using [PackedIntArray.get1]. *)
A two - table dimensional table , indexed by a nonterminal symbol and
by a terminal symbol ( other than [ # ] ) , encodes the FIRST sets .
by a terminal symbol (other than [#]), encodes the FIRST sets. *)
second component of [ PackedIntArray.t ]
end
end
module InspectionTableInterpreter : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
module Symbols (T : sig
type 'a terminal
type 'a nonterminal
end)
: IncrementalEngine.SYMBOLS
with type 'a terminal := 'a T.terminal
and type 'a nonterminal := 'a T.nonterminal
module Make
(TT : TableFormat.TABLES)
(IT : InspectionTableFormat.TABLES
with type 'a lr1state = int)
(ET : EngineTypes.TABLE
with type terminal = int
and type nonterminal = int
and type semantic_value = Obj.t)
(E : sig
type 'a env = (ET.state, ET.semantic_value, ET.token) EngineTypes.env
end)
: IncrementalEngine.INSPECTION
with type 'a terminal := 'a IT.terminal
and type 'a nonterminal := 'a IT.nonterminal
and type 'a lr1state := 'a IT.lr1state
and type production := int
and type 'a env := 'a E.env
end
module TableInterpreter : sig
Menhir
Copyright . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License version 2 , with a
This module provides a thin decoding layer for the generated tables , thus
providing an API that is suitable for use by [ Engine . Make ] . It is part of
[ MenhirLib ] .
providing an API that is suitable for use by [Engine.Make]. It is part of
[MenhirLib]. *)
module MakeEngineTable
(T : TableFormat.TABLES)
: EngineTypes.TABLE
with type state = int
and type token = T.token
and type semantic_value = Obj.t
and type production = int
and type terminal = int
and type nonterminal = int
end
module StaticVersion : sig
val require_20220210: unit
end
|
e2eff1b65913e90ac9fdab85299d4887516caaf35f4f906e31bc5f3674359fa3 | blockapps/merkle-patricia-db | Diff.hs | module Blockchain.Database.MerklePatricia.Diff (dbDiff, DiffOp(..)) where
import Blockchain.Database.MerklePatricia.Internal
import Blockchain.Database.MerklePatricia.NodeData
import Control.Monad
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Resource
import Data.Function
import qualified Data.NibbleString as N
-- Probably the entire MPDB system ought to be in this monad
type MPReaderM a = ReaderT MPDB a
data MPChoice = Data NodeData | Ref NodeRef | Value Val | None deriving (Eq)
node :: MonadResource m=>MPChoice -> MPReaderM m NodeData
node (Data nd) = return nd
node (Ref nr) = do
derefNode <- asks getNodeData
lift $ derefNode nr
node _ = return EmptyNodeData
simplify :: NodeData -> [MPChoice]
17 : not a mistake
simplify FullNodeData{ choices = ch, nodeVal = v } =
maybe None Value v : map Ref ch
simplify n@ShortcutNodeData{ nextNibbleString = k, nextVal = v } = None : delta h
where
delta m =
let pre = replicate m None
post = replicate (16 - m - 1) None
in pre ++ [x] ++ post
x | N.null t = either Ref Value v
| otherwise = Data n{ nextNibbleString = t }
(h,t) = (fromIntegral $ N.head k, N.tail k)
enter :: MonadResource m=>MPChoice -> MPReaderM m [MPChoice]
enter = liftM simplify . node
data DiffOp =
Create {key::[N.Nibble], val::Val} |
Update {key::[N.Nibble], oldVal::Val, newVal::Val} |
Delete {key::[N.Nibble], oldVal::Val}
deriving (Show, Eq)
diffChoice :: MonadResource m=>Maybe N.Nibble -> MPChoice -> MPChoice -> MPReaderM m [DiffOp]
diffChoice n ch1 ch2 = case (ch1, ch2) of
(None, Value v) -> return [Create sn v]
(Value v, None) -> return [Delete sn v]
(Value v1, Value v2)
| v1 /= v2 -> return [Update sn v1 v2]
_ | ch1 == ch2 -> return []
| otherwise -> pRecurse ch1 ch2
where
sn = maybe [] (:[]) n
prefix =
let prepend n' op = op{key = n':(key op)}
in map (maybe id prepend n)
pRecurse = liftM prefix .* recurse
diffChoices :: MonadResource m=>[MPChoice] -> [MPChoice] -> MPReaderM m [DiffOp]
diffChoices =
liftM concat .* sequence .* zipWith3 diffChoice maybeNums
where maybeNums = Nothing : map Just [0..]
recurse :: MonadResource m=>MPChoice -> MPChoice -> MPReaderM m [DiffOp]
recurse = join .* (liftM2 diffChoices `on` enter)
infixr 9 .*
(.*) :: (c -> d) -> (a -> b -> c) -> (a -> b -> d)
(.*) = (.) . (.)
diff :: MonadResource m=>NodeRef -> NodeRef -> MPReaderM m [DiffOp]
diff = recurse `on` Ref
dbDiff :: MonadResource m => MPDB -> StateRoot -> StateRoot -> m [DiffOp]
dbDiff db root1 root2 = runReaderT ((diff `on` PtrRef) root1 root2) db
| null | https://raw.githubusercontent.com/blockapps/merkle-patricia-db/7ea92ea9124e7d89134bbaa661221cd1e3aefc26/src/Blockchain/Database/MerklePatricia/Diff.hs | haskell | Probably the entire MPDB system ought to be in this monad | module Blockchain.Database.MerklePatricia.Diff (dbDiff, DiffOp(..)) where
import Blockchain.Database.MerklePatricia.Internal
import Blockchain.Database.MerklePatricia.NodeData
import Control.Monad
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Resource
import Data.Function
import qualified Data.NibbleString as N
type MPReaderM a = ReaderT MPDB a
data MPChoice = Data NodeData | Ref NodeRef | Value Val | None deriving (Eq)
node :: MonadResource m=>MPChoice -> MPReaderM m NodeData
node (Data nd) = return nd
node (Ref nr) = do
derefNode <- asks getNodeData
lift $ derefNode nr
node _ = return EmptyNodeData
simplify :: NodeData -> [MPChoice]
17 : not a mistake
simplify FullNodeData{ choices = ch, nodeVal = v } =
maybe None Value v : map Ref ch
simplify n@ShortcutNodeData{ nextNibbleString = k, nextVal = v } = None : delta h
where
delta m =
let pre = replicate m None
post = replicate (16 - m - 1) None
in pre ++ [x] ++ post
x | N.null t = either Ref Value v
| otherwise = Data n{ nextNibbleString = t }
(h,t) = (fromIntegral $ N.head k, N.tail k)
enter :: MonadResource m=>MPChoice -> MPReaderM m [MPChoice]
enter = liftM simplify . node
data DiffOp =
Create {key::[N.Nibble], val::Val} |
Update {key::[N.Nibble], oldVal::Val, newVal::Val} |
Delete {key::[N.Nibble], oldVal::Val}
deriving (Show, Eq)
diffChoice :: MonadResource m=>Maybe N.Nibble -> MPChoice -> MPChoice -> MPReaderM m [DiffOp]
diffChoice n ch1 ch2 = case (ch1, ch2) of
(None, Value v) -> return [Create sn v]
(Value v, None) -> return [Delete sn v]
(Value v1, Value v2)
| v1 /= v2 -> return [Update sn v1 v2]
_ | ch1 == ch2 -> return []
| otherwise -> pRecurse ch1 ch2
where
sn = maybe [] (:[]) n
prefix =
let prepend n' op = op{key = n':(key op)}
in map (maybe id prepend n)
pRecurse = liftM prefix .* recurse
diffChoices :: MonadResource m=>[MPChoice] -> [MPChoice] -> MPReaderM m [DiffOp]
diffChoices =
liftM concat .* sequence .* zipWith3 diffChoice maybeNums
where maybeNums = Nothing : map Just [0..]
recurse :: MonadResource m=>MPChoice -> MPChoice -> MPReaderM m [DiffOp]
recurse = join .* (liftM2 diffChoices `on` enter)
infixr 9 .*
(.*) :: (c -> d) -> (a -> b -> c) -> (a -> b -> d)
(.*) = (.) . (.)
diff :: MonadResource m=>NodeRef -> NodeRef -> MPReaderM m [DiffOp]
diff = recurse `on` Ref
dbDiff :: MonadResource m => MPDB -> StateRoot -> StateRoot -> m [DiffOp]
dbDiff db root1 root2 = runReaderT ((diff `on` PtrRef) root1 root2) db
|
5e49f76748959b161368d5b19fd951ff576f172b21a16a56285357287542e122 | roswell/roswell | list-versions.lisp | (defpackage :roswell.list.versions
(:use :cl :roswell.util))
(in-package :roswell.list.versions)
(defun versions (&rest args)
;; Experimental?
(setf args (cdr args))
(if args
(destructuring-bind (impl version) (parse-version-spec (impl (first args)))
(declare (ignore version))
(format *error-output* "Installable versions for ~A:~%" impl)
(let ((fun (module "install" impl)))
(and (setq fun (funcall fun :list))
(format t "~{~A~%~}" (funcall fun)))))
(format t "candidates for ros list versions [impl] are:~2%~{~A~%~}"
(mapcar (lambda (x)
(subseq x 8))
(remove-if-not
(lambda (x)
(string-equal "install-" x :end2 (min 8 (length x))))
(mapcar #'pathname-name
(directory (make-pathname
:defaults *load-pathname*
:type "lisp"
:name :wild))))))))
| null | https://raw.githubusercontent.com/roswell/roswell/0107dfb54393aff1a776deb79d58f67f642135cb/lisp/list-versions.lisp | lisp | Experimental? | (defpackage :roswell.list.versions
(:use :cl :roswell.util))
(in-package :roswell.list.versions)
(defun versions (&rest args)
(setf args (cdr args))
(if args
(destructuring-bind (impl version) (parse-version-spec (impl (first args)))
(declare (ignore version))
(format *error-output* "Installable versions for ~A:~%" impl)
(let ((fun (module "install" impl)))
(and (setq fun (funcall fun :list))
(format t "~{~A~%~}" (funcall fun)))))
(format t "candidates for ros list versions [impl] are:~2%~{~A~%~}"
(mapcar (lambda (x)
(subseq x 8))
(remove-if-not
(lambda (x)
(string-equal "install-" x :end2 (min 8 (length x))))
(mapcar #'pathname-name
(directory (make-pathname
:defaults *load-pathname*
:type "lisp"
:name :wild))))))))
|
79c2e34429d369382c4db4a1432bbf745353c4fbe71447fba3693269fcd70792 | davisp/couchdb | ibrowse_http_client.erl | %%%-------------------------------------------------------------------
%%% File : ibrowse_http_client.erl
Author : >
%%% Description : The name says it all
%%%
Created : 11 Oct 2003 by >
%%%-------------------------------------------------------------------
-module(ibrowse_http_client).
-behaviour(gen_server).
%%--------------------------------------------------------------------
%% Include files
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% External exports
-export([
start_link/1,
start/1,
stop/1,
send_req/7
]).
-ifdef(debug).
-compile(export_all).
-endif.
%% gen_server callbacks
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-include("ibrowse.hrl").
-include_lib("kernel/include/inet.hrl").
-record(state, {host, port, connect_timeout,
inactivity_timer_ref,
use_proxy = false, proxy_auth_digest,
ssl_options = [], is_ssl = false, socket,
proxy_tunnel_setup = false,
tunnel_setup_queue = [],
reqs=queue:new(), cur_req, status=idle, http_status_code,
reply_buffer = <<>>, rep_buf_size=0, streamed_size = 0,
recvd_headers=[],
status_line, raw_headers,
is_closing, content_length,
deleted_crlf = false, transfer_encoding,
chunk_size, chunk_size_buffer = <<>>,
recvd_chunk_size, interim_reply_sent = false,
lb_ets_tid, cur_pipeline_size = 0, prev_req_id
}).
-record(request, {url, method, options, from,
stream_to, caller_controls_socket = false,
caller_socket_options = [],
req_id,
stream_chunk_size,
save_response_to_file = false,
tmp_file_name, tmp_file_fd, preserve_chunked_encoding,
response_format, timer_ref}).
-import(ibrowse_lib, [
get_value/2,
get_value/3,
do_trace/2
]).
-define(DEFAULT_STREAM_CHUNK_SIZE, 1024*1024).
-define(dec2hex(X), erlang:integer_to_list(X, 16)).
%%====================================================================
%% External functions
%%====================================================================
%%--------------------------------------------------------------------
Function : start_link/0
%% Description: Starts the server
%%--------------------------------------------------------------------
start(Args) ->
gen_server:start(?MODULE, Args, []).
start_link(Args) ->
gen_server:start_link(?MODULE, Args, []).
stop(Conn_pid) ->
case catch gen_server:call(Conn_pid, stop) of
{'EXIT', {timeout, _}} ->
exit(Conn_pid, kill),
ok;
_ ->
ok
end.
send_req(Conn_Pid, Url, Headers, Method, Body, Options, Timeout) ->
gen_server:call(
Conn_Pid,
{send_req, {Url, Headers, Method, Body, Options, Timeout}}, Timeout).
%%====================================================================
%% Server functions
%%====================================================================
%%--------------------------------------------------------------------
%% Function: init/1
%% Description: Initiates the server
%% Returns: {ok, State} |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%%--------------------------------------------------------------------
init({Lb_Tid, #url{host = Host, port = Port}, {SSLOptions, Is_ssl}}) ->
State = #state{host = Host,
port = Port,
ssl_options = SSLOptions,
is_ssl = Is_ssl,
lb_ets_tid = Lb_Tid},
put(ibrowse_trace_token, [Host, $:, integer_to_list(Port)]),
put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
{ok, set_inac_timer(State)};
init(Url) when is_list(Url) ->
case catch ibrowse_lib:parse_url(Url) of
#url{protocol = Protocol} = Url_rec ->
init({undefined, Url_rec, {[], Protocol == https}});
{'EXIT', _} ->
{error, invalid_url}
end;
init({Host, Port}) ->
State = #state{host = Host,
port = Port},
put(ibrowse_trace_token, [Host, $:, integer_to_list(Port)]),
put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
{ok, set_inac_timer(State)}.
%%--------------------------------------------------------------------
Function : handle_call/3
%% Description: Handling call messages
%% Returns: {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} | (terminate/2 is called)
%% {stop, Reason, State} (terminate/2 is called)
%%--------------------------------------------------------------------
%% Received a request when the remote server has already sent us a
%% Connection: Close header
handle_call({send_req, _}, _From, #state{is_closing = true} = State) ->
{reply, {error, connection_closing}, State};
handle_call({send_req, {Url, Headers, Method, Body, Options, Timeout}},
From, State) ->
send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State);
handle_call(stop, _From, State) ->
do_close(State),
do_error_reply(State, closing_on_request),
{stop, normal, ok, State};
handle_call(Request, _From, State) ->
Reply = {unknown_request, Request},
{reply, Reply, State}.
%%--------------------------------------------------------------------
%% Function: handle_cast/2
%% Description: Handling cast messages
Returns : { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State} (terminate/2 is called)
%%--------------------------------------------------------------------
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
%% Function: handle_info/2
%% Description: Handling all non call/cast messages
Returns : { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State} (terminate/2 is called)
%%--------------------------------------------------------------------
handle_info({tcp, _Sock, Data}, #state{status = Status} = State) ->
do_trace("Data recvd in state: ~p. Size: ~p. ~p~n~n", [Status, size(Data), Data]),
handle_sock_data(Data, State);
handle_info({ssl, _Sock, Data}, State) ->
handle_sock_data(Data, State);
handle_info({stream_next, Req_id}, #state{socket = Socket,
cur_req = #request{req_id = Req_id}} = State) ->
do_setopts(Socket, [{active, once}], State),
{noreply, set_inac_timer(State)};
handle_info({stream_next, _Req_id}, State) ->
_Cur_req_id = case State#state.cur_req of
#request{req_id = Cur} ->
Cur;
_ ->
undefined
end,
{noreply, State};
handle_info({stream_close, _Req_id}, State) ->
shutting_down(State),
do_close(State),
do_error_reply(State, closing_on_request),
{stop, normal, State};
handle_info({tcp_closed, _Sock}, State) ->
do_trace("TCP connection closed by peer!~n", []),
handle_sock_closed(State),
{stop, normal, State};
handle_info({ssl_closed, _Sock}, State) ->
do_trace("SSL connection closed by peer!~n", []),
handle_sock_closed(State),
{stop, normal, State};
handle_info({tcp_error, _Sock, Reason}, State) ->
do_trace("Error on connection to ~1000.p:~1000.p -> ~1000.p~n",
[State#state.host, State#state.port, Reason]),
handle_sock_closed(State),
{stop, normal, State};
handle_info({ssl_error, _Sock, Reason}, State) ->
do_trace("Error on SSL connection to ~1000.p:~1000.p -> ~1000.p~n",
[State#state.host, State#state.port, Reason]),
handle_sock_closed(State),
{stop, normal, State};
handle_info({req_timedout, From}, State) ->
case lists:keymember(From, #request.from, queue:to_list(State#state.reqs)) of
false ->
{noreply, State};
true ->
shutting_down(State),
%% do_error_reply(State, req_timedout),
{stop, normal, State}
end;
handle_info(timeout, State) ->
do_trace("Inactivity timeout triggered. Shutting down connection~n", []),
shutting_down(State),
do_error_reply(State, req_timedout),
{stop, normal, State};
handle_info({trace, Bool}, State) ->
put(my_trace_flag, Bool),
{noreply, State};
handle_info(Info, State) ->
io:format("Unknown message recvd for ~1000.p:~1000.p -> ~p~n",
[State#state.host, State#state.port, Info]),
io:format("Recvd unknown message ~p when in state: ~p~n", [Info, State]),
{noreply, State}.
%%--------------------------------------------------------------------
%% Function: terminate/2
%% Description: Shutdown the server
%% Returns: any (ignored by gen_server)
%%--------------------------------------------------------------------
terminate(_Reason, State) ->
do_close(State),
ok.
%%--------------------------------------------------------------------
%% Func: code_change/3
%% Purpose: Convert process state when code is changed
%% Returns: {ok, NewState}
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% Handles data recvd on the socket
%%--------------------------------------------------------------------
handle_sock_data(Data, #state{status=idle}=State) ->
do_trace("Data recvd on socket in state idle!. ~1000.p~n", [Data]),
shutting_down(State),
do_error_reply(State, data_in_status_idle),
do_close(State),
{stop, normal, State};
handle_sock_data(Data, #state{status = get_header}=State) ->
case parse_response(Data, State) of
{error, _Reason} ->
shutting_down(State),
{stop, normal, State};
#state{socket = Socket, status = Status, cur_req = CurReq} = State_1 ->
case {Status, CurReq} of
{get_header, #request{caller_controls_socket = true}} ->
do_setopts(Socket, [{active, once}], State_1);
_ ->
active_once(State_1)
end,
{noreply, set_inac_timer(State_1)}
end;
handle_sock_data(Data, #state{status = get_body,
socket = Socket,
content_length = CL,
http_status_code = StatCode,
recvd_headers = Headers,
chunk_size = CSz} = State) ->
case (CL == undefined) and (CSz == undefined) of
true ->
case accumulate_response(Data, State) of
{error, Reason} ->
shutting_down(State),
fail_pipelined_requests(State,
{error, {Reason, {stat_code, StatCode}, Headers}}),
{stop, normal, State};
State_1 ->
active_once(State_1),
State_2 = set_inac_timer(State_1),
{noreply, State_2}
end;
_ ->
case parse_11_response(Data, State) of
{error, Reason} ->
shutting_down(State),
fail_pipelined_requests(State,
{error, {Reason, {stat_code, StatCode}, Headers}}),
{stop, normal, State};
#state{cur_req = #request{caller_controls_socket = Ccs},
interim_reply_sent = Irs} = State_1 ->
case Irs of
true ->
active_once(State_1);
false when Ccs == true ->
do_setopts(Socket, [{active, once}], State);
false ->
active_once(State_1)
end,
State_2 = State_1#state{interim_reply_sent = false},
case Ccs of
true ->
cancel_timer(State_2#state.inactivity_timer_ref, {eat_message, timeout}),
{noreply, State_2#state{inactivity_timer_ref = undefined}};
_ ->
{noreply, set_inac_timer(State_2)}
end;
State_1 ->
active_once(State_1),
State_2 = set_inac_timer(State_1),
{noreply, State_2}
end
end.
accumulate_response(Data,
#state{
cur_req = #request{save_response_to_file = Srtf,
tmp_file_fd = undefined} = CurReq,
http_status_code=[$2 | _]}=State) when Srtf /= false ->
TmpFilename = make_tmp_filename(Srtf),
Mode = file_mode(Srtf),
case file:open(TmpFilename, [Mode, delayed_write, raw]) of
{ok, Fd} ->
accumulate_response(Data, State#state{
cur_req = CurReq#request{
tmp_file_fd = Fd,
tmp_file_name = TmpFilename}});
{error, Reason} ->
{error, {file_open_error, Reason}}
end;
accumulate_response(Data, #state{cur_req = #request{save_response_to_file = Srtf,
tmp_file_fd = Fd},
transfer_encoding=chunked,
reply_buffer = Reply_buf,
http_status_code=[$2 | _]
} = State) when Srtf /= false ->
case file:write(Fd, [Reply_buf, Data]) of
ok ->
State#state{reply_buffer = <<>>};
{error, Reason} ->
{error, {file_write_error, Reason}}
end;
accumulate_response(Data, #state{cur_req = #request{save_response_to_file = Srtf,
tmp_file_fd = Fd},
reply_buffer = RepBuf,
http_status_code=[$2 | _]
} = State) when Srtf /= false ->
case file:write(Fd, [RepBuf, Data]) of
ok ->
State#state{reply_buffer = <<>>};
{error, Reason} ->
{error, {file_write_error, Reason}}
end;
accumulate_response(Data, #state{reply_buffer = RepBuf,
rep_buf_size = RepBufSize,
streamed_size = Streamed_size,
cur_req = CurReq}=State) ->
#request{stream_to = StreamTo,
req_id = ReqId,
stream_chunk_size = Stream_chunk_size,
response_format = Response_format,
caller_controls_socket = Caller_controls_socket} = CurReq,
RepBuf_1 = <<RepBuf/binary, Data/binary>>,
New_data_size = RepBufSize - Streamed_size,
case StreamTo of
undefined ->
State#state{reply_buffer = RepBuf_1};
_ when Caller_controls_socket == true ->
do_interim_reply(StreamTo, Response_format, ReqId, RepBuf_1),
State#state{reply_buffer = <<>>,
interim_reply_sent = true,
streamed_size = Streamed_size + size(RepBuf_1)};
_ when New_data_size >= Stream_chunk_size ->
{Stream_chunk, Rem_data} = split_binary(RepBuf_1, Stream_chunk_size),
do_interim_reply(StreamTo, Response_format, ReqId, Stream_chunk),
State_1 = State#state{
reply_buffer = <<>>,
interim_reply_sent = true,
streamed_size = Streamed_size + Stream_chunk_size},
case Rem_data of
<<>> ->
State_1;
_ ->
accumulate_response(Rem_data, State_1)
end;
_ ->
State#state{reply_buffer = RepBuf_1}
end.
make_tmp_filename(true) ->
DownloadDir = ibrowse:get_config_value(download_dir, filename:absname("./")),
{A,B,C} = now(),
filename:join([DownloadDir,
"ibrowse_tmp_file_"++
integer_to_list(A) ++
integer_to_list(B) ++
integer_to_list(C)]);
make_tmp_filename(File) when is_list(File) ->
File;
make_tmp_filename({append, File}) when is_list(File) ->
File.
file_mode({append, _File}) -> append;
file_mode(_Srtf) -> write.
%%--------------------------------------------------------------------
%% Handles the case when the server closes the socket
%%--------------------------------------------------------------------
handle_sock_closed(#state{status=get_header} = State) ->
shutting_down(State),
do_error_reply(State, connection_closed);
handle_sock_closed(#state{cur_req=undefined} = State) ->
shutting_down(State);
%% We check for IsClosing because this the server could have sent a
Connection - Close header and has closed the socket to indicate end
%% of response. There maybe requests pipelined which need a response.
handle_sock_closed(#state{reply_buffer = Buf, reqs = Reqs, http_status_code = SC,
is_closing = IsClosing,
cur_req = #request{tmp_file_name=TmpFilename,
tmp_file_fd=Fd} = CurReq,
status = get_body,
recvd_headers = Headers,
status_line = Status_line,
raw_headers = Raw_headers
}=State) ->
#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format,
options = Options} = CurReq,
case IsClosing of
true ->
{_, Reqs_1} = queue:out(Reqs),
Body = case TmpFilename of
undefined ->
Buf;
_ ->
ok = file:close(Fd),
{file, TmpFilename}
end,
Reply = case get_value(give_raw_headers, Options, false) of
true ->
{ok, Status_line, Raw_headers, Body};
false ->
{ok, SC, Headers, Buf}
end,
State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
ok = do_error_reply(State_1#state{reqs = Reqs_1}, connection_closed),
State_1;
_ ->
ok = do_error_reply(State, connection_closed),
State
end.
do_connect(Host, Port, Options, #state{is_ssl = true,
use_proxy = false,
ssl_options = SSLOptions},
Timeout) ->
ssl:connect(Host, Port, get_sock_options(Host, Options, SSLOptions), Timeout);
do_connect(Host, Port, Options, _State, Timeout) ->
gen_tcp:connect(Host, Port, get_sock_options(Host, Options, []), Timeout).
get_sock_options(Host, Options, SSLOptions) ->
Caller_socket_options = get_value(socket_options, Options, []),
Ipv6Options = case is_ipv6_host(Host) of
true ->
[inet6];
false ->
[]
end,
Other_sock_options = filter_sock_options(SSLOptions ++ Caller_socket_options ++ Ipv6Options),
case lists:keysearch(nodelay, 1, Other_sock_options) of
false ->
[{nodelay, true}, binary, {active, false} | Other_sock_options];
{value, _} ->
[binary, {active, false} | Other_sock_options]
end.
is_ipv6_host(Host) ->
case inet_parse:address(Host) of
{ok, {_, _, _, _, _, _, _, _}} ->
true;
{ok, {_, _, _, _}} ->
false;
_ ->
case inet:gethostbyname(Host) of
{ok, #hostent{h_addrtype = inet6}} ->
true;
_ ->
false
end
end.
%% We don't want the caller to specify certain options
filter_sock_options(Opts) ->
lists:filter(fun({active, _}) ->
false;
({packet, _}) ->
false;
(list) ->
false;
(_) ->
true
end, Opts).
do_send(Req, #state{socket = Sock,
is_ssl = true,
use_proxy = true,
proxy_tunnel_setup = Pts}) when Pts /= done -> gen_tcp:send(Sock, Req);
do_send(Req, #state{socket = Sock, is_ssl = true}) -> ssl:send(Sock, Req);
do_send(Req, #state{socket = Sock, is_ssl = false}) -> gen_tcp:send(Sock, Req).
do_send_body(Sock::socket_descriptor ( ) , Source::source_descriptor ( ) , IsSSL::boolean ( ) ) - > ok | error ( )
%% source_descriptor() = fun_arity_0 |
%% {fun_arity_0} |
%% {fun_arity_1, term()}
%% error() = term()
do_send_body(Source, State, TE) when is_function(Source) ->
do_send_body({Source}, State, TE);
do_send_body({Source}, State, TE) when is_function(Source) ->
do_send_body1(Source, Source(), State, TE);
do_send_body({Source, Source_state}, State, TE) when is_function(Source) ->
do_send_body1(Source, Source(Source_state), State, TE);
do_send_body(Body, State, _TE) ->
do_send(Body, State).
do_send_body1(Source, Resp, State, TE) ->
case Resp of
{ok, Data} when Data == []; Data == <<>> ->
do_send_body({Source}, State, TE);
{ok, Data} ->
do_send(maybe_chunked_encode(Data, TE), State),
do_send_body({Source}, State, TE);
{ok, Data, New_source_state} when Data == []; Data == <<>> ->
do_send_body({Source, New_source_state}, State, TE);
{ok, Data, New_source_state} ->
do_send(maybe_chunked_encode(Data, TE), State),
do_send_body({Source, New_source_state}, State, TE);
eof when TE == true ->
do_send(<<"0\r\n\r\n">>, State),
ok;
eof ->
ok;
Err ->
Err
end.
maybe_chunked_encode(Data, false) ->
Data;
maybe_chunked_encode(Data, true) ->
[?dec2hex(iolist_size(Data)), "\r\n", Data, "\r\n"].
do_close(#state{socket = undefined}) -> ok;
do_close(#state{socket = Sock,
is_ssl = true,
use_proxy = true,
proxy_tunnel_setup = Pts
}) when Pts /= done -> catch gen_tcp:close(Sock);
do_close(#state{socket = Sock, is_ssl = true}) -> catch ssl:close(Sock);
do_close(#state{socket = Sock, is_ssl = false}) -> catch gen_tcp:close(Sock).
active_once(#state{cur_req = #request{caller_controls_socket = true}}) ->
ok;
active_once(#state{socket = Socket} = State) ->
do_setopts(Socket, [{active, once}], State).
do_setopts(_Sock, [], _) -> ok;
do_setopts(Sock, Opts, #state{is_ssl = true,
use_proxy = true,
proxy_tunnel_setup = Pts}
) when Pts /= done -> inet:setopts(Sock, Opts);
do_setopts(Sock, Opts, #state{is_ssl = true}) -> ssl:setopts(Sock, Opts);
do_setopts(Sock, Opts, _) -> inet:setopts(Sock, Opts).
check_ssl_options(Options, State) ->
case get_value(is_ssl, Options, false) of
false ->
State;
true ->
State#state{is_ssl=true, ssl_options=get_value(ssl_options, Options)}
end.
send_req_1(From,
#url{host = Host,
port = Port} = Url,
Headers, Method, Body, Options, Timeout,
#state{socket = undefined} = State) ->
{Host_1, Port_1, State_1} =
case get_value(proxy_host, Options, false) of
false ->
{Host, Port, State};
PHost ->
ProxyUser = get_value(proxy_user, Options, []),
ProxyPassword = get_value(proxy_password, Options, []),
Digest = http_auth_digest(ProxyUser, ProxyPassword),
{PHost, get_value(proxy_port, Options, 80),
State#state{use_proxy = true,
proxy_auth_digest = Digest}}
end,
State_2 = check_ssl_options(Options, State_1),
do_trace("Connecting...~n", []),
Conn_timeout = get_value(connect_timeout, Options, Timeout),
case do_connect(Host_1, Port_1, Options, State_2, Conn_timeout) of
{ok, Sock} ->
do_trace("Connected! Socket: ~1000.p~n", [Sock]),
State_3 = State_2#state{socket = Sock,
connect_timeout = Conn_timeout},
send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State_3);
Err ->
shutting_down(State_2),
do_trace("Error connecting. Reason: ~1000.p~n", [Err]),
gen_server:reply(From, {error, {conn_failed, Err}}),
{stop, normal, State_2}
end;
%% Send a CONNECT request.
Wait for 200 OK
%% Upgrade to SSL connection
%% Then send request
send_req_1(From,
#url{
host = Server_host,
port = Server_port
} = Url,
Headers, Method, Body, Options, Timeout,
#state{
proxy_tunnel_setup = false,
use_proxy = true,
is_ssl = true} = State) ->
Ref = case Timeout of
infinity ->
undefined;
_ ->
erlang:send_after(Timeout, self(), {req_timedout, From})
end,
NewReq = #request{
method = connect,
preserve_chunked_encoding = get_value(preserve_chunked_encoding, Options, false),
options = Options,
timer_ref = Ref
},
State_1 = State#state{reqs=queue:in(NewReq, State#state.reqs)},
Pxy_auth_headers = maybe_modify_headers(Url, Method, Options, [], State_1),
Path = [Server_host, $:, integer_to_list(Server_port)],
{Req, Body_1} = make_request(connect, Pxy_auth_headers,
Path, Path,
[], Options, State_1, undefined),
TE = is_chunked_encoding_specified(Options),
trace_request(Req),
case do_send(Req, State) of
ok ->
case do_send_body(Body_1, State_1, TE) of
ok ->
trace_request_body(Body_1),
active_once(State_1),
State_1_1 = inc_pipeline_counter(State_1),
State_2 = State_1_1#state{status = get_header,
cur_req = NewReq,
proxy_tunnel_setup = in_progress,
tunnel_setup_queue = [{From, Url, Headers, Method, Body, Options, Timeout}]},
State_3 = set_inac_timer(State_2),
{noreply, State_3};
Err ->
shutting_down(State_1),
do_trace("Send failed... Reason: ~p~n", [Err]),
gen_server:reply(From, {error, {send_failed, Err}}),
{stop, normal, State_1}
end;
Err ->
shutting_down(State_1),
do_trace("Send failed... Reason: ~p~n", [Err]),
gen_server:reply(From, {error, {send_failed, Err}}),
{stop, normal, State_1}
end;
send_req_1(From, Url, Headers, Method, Body, Options, Timeout,
#state{proxy_tunnel_setup = in_progress,
tunnel_setup_queue = Q} = State) ->
do_trace("Queued SSL request awaiting tunnel setup: ~n"
"URL : ~s~n"
"Method : ~p~n"
"Headers : ~p~n", [Url, Method, Headers]),
{noreply, State#state{tunnel_setup_queue = [{From, Url, Headers, Method, Body, Options, Timeout} | Q]}};
send_req_1(From,
#url{abspath = AbsPath,
path = RelPath} = Url,
Headers, Method, Body, Options, Timeout,
#state{status = Status,
socket = Socket} = State) ->
cancel_timer(State#state.inactivity_timer_ref, {eat_message, timeout}),
ReqId = make_req_id(),
Resp_format = get_value(response_format, Options, list),
Caller_socket_options = get_value(socket_options, Options, []),
{StreamTo, Caller_controls_socket} =
case get_value(stream_to, Options, undefined) of
{Caller, once} when is_pid(Caller) or
is_atom(Caller) ->
Async_pid_rec = {{req_id_pid, ReqId}, self()},
true = ets:insert(ibrowse_stream, Async_pid_rec),
{Caller, true};
undefined ->
{undefined, false};
Caller when is_pid(Caller) or
is_atom(Caller) ->
{Caller, false};
Stream_to_inv ->
exit({invalid_option, {stream_to, Stream_to_inv}})
end,
SaveResponseToFile = get_value(save_response_to_file, Options, false),
Ref = case Timeout of
infinity ->
undefined;
_ ->
erlang:send_after(Timeout, self(), {req_timedout, From})
end,
NewReq = #request{url = Url,
method = Method,
stream_to = StreamTo,
caller_controls_socket = Caller_controls_socket,
caller_socket_options = Caller_socket_options,
options = Options,
req_id = ReqId,
save_response_to_file = SaveResponseToFile,
stream_chunk_size = get_stream_chunk_size(Options),
response_format = Resp_format,
from = From,
preserve_chunked_encoding = get_value(preserve_chunked_encoding, Options, false),
timer_ref = Ref
},
State_1 = State#state{reqs=queue:in(NewReq, State#state.reqs)},
Headers_1 = maybe_modify_headers(Url, Method, Options, Headers, State_1),
{Req, Body_1} = make_request(Method,
Headers_1,
AbsPath, RelPath, Body, Options, State_1,
ReqId),
trace_request(Req),
do_setopts(Socket, Caller_socket_options, State_1),
TE = is_chunked_encoding_specified(Options),
case do_send(Req, State_1) of
ok ->
case do_send_body(Body_1, State_1, TE) of
ok ->
trace_request_body(Body_1),
State_2 = inc_pipeline_counter(State_1),
active_once(State_2),
State_3 = case Status of
idle ->
State_2#state{status = get_header,
cur_req = NewReq};
_ ->
State_2
end,
case StreamTo of
undefined ->
ok;
_ ->
gen_server:reply(From, {ibrowse_req_id, ReqId})
end,
State_4 = set_inac_timer(State_3),
{noreply, State_4};
Err ->
shutting_down(State_1),
do_trace("Send failed... Reason: ~p~n", [Err]),
gen_server:reply(From, {error, {send_failed, Err}}),
{stop, normal, State_1}
end;
Err ->
shutting_down(State_1),
do_trace("Send failed... Reason: ~p~n", [Err]),
gen_server:reply(From, {error, {send_failed, Err}}),
{stop, normal, State_1}
end.
maybe_modify_headers(#url{}, connect, _, Headers, State) ->
add_proxy_auth_headers(State, Headers);
maybe_modify_headers(#url{host = Host, port = Port} = Url,
_Method,
Options, Headers, State) ->
case get_value(headers_as_is, Options, false) of
false ->
Headers_1 = add_auth_headers(Url, Options, Headers, State),
HostHeaderValue = case lists:keysearch(host_header, 1, Options) of
false ->
case Port of
80 -> Host;
443 -> Host;
_ -> [Host, ":", integer_to_list(Port)]
end;
{value, {_, Host_h_val}} ->
Host_h_val
end,
[{"Host", HostHeaderValue} | Headers_1];
true ->
Headers
end.
add_auth_headers(#url{username = User,
password = UPw},
Options,
Headers,
State) ->
Headers_1 = case User of
undefined ->
case get_value(basic_auth, Options, undefined) of
undefined ->
Headers;
{U,P} ->
[{"Authorization", ["Basic ", http_auth_digest(U, P)]} | Headers]
end;
_ ->
[{"Authorization", ["Basic ", http_auth_digest(User, UPw)]} | Headers]
end,
add_proxy_auth_headers(State, Headers_1).
add_proxy_auth_headers(#state{use_proxy = false}, Headers) ->
Headers;
add_proxy_auth_headers(#state{proxy_auth_digest = []}, Headers) ->
Headers;
add_proxy_auth_headers(#state{proxy_auth_digest = Auth_digest}, Headers) ->
[{"Proxy-Authorization", ["Basic ", Auth_digest]} | Headers].
http_auth_digest([], []) ->
[];
http_auth_digest(Username, Password) ->
ibrowse_lib:encode_base64(Username ++ [$: | Password]).
make_request(Method, Headers, AbsPath, RelPath, Body, Options,
#state{use_proxy = UseProxy, is_ssl = Is_ssl}, ReqId) ->
HttpVsn = http_vsn_string(get_value(http_vsn, Options, {1,1})),
Fun1 = fun({X, Y}) when is_atom(X) ->
{to_lower(atom_to_list(X)), X, Y};
({X, Y}) when is_list(X) ->
{to_lower(X), X, Y}
end,
Headers_0 = [Fun1(X) || X <- Headers],
Headers_1 =
case lists:keysearch("content-length", 1, Headers_0) of
false when (Body =:= [] orelse Body =:= <<>>) andalso
(Method =:= post orelse Method =:= put) ->
[{"content-length", "Content-Length", "0"} | Headers_0];
false when is_binary(Body) orelse is_list(Body) ->
[{"content-length", "Content-Length", integer_to_list(iolist_size(Body))} | Headers_0];
_ ->
%% Content-Length is already specified or Body is a
%% function or function/state pair
Headers_0
end,
{Headers_2, Body_1} =
case is_chunked_encoding_specified(Options) of
false ->
{[{Y, Z} || {_, Y, Z} <- Headers_1], Body};
true ->
Chunk_size_1 = case get_value(transfer_encoding, Options) of
chunked ->
5120;
{chunked, Chunk_size} ->
Chunk_size
end,
{[{Y, Z} || {X, Y, Z} <- Headers_1,
X /= "content-length"] ++
[{"Transfer-Encoding", "chunked"}],
chunk_request_body(Body, Chunk_size_1)}
end,
Headers_3 = case lists:member({include_ibrowse_req_id, true}, Options) of
true ->
[{"x-ibrowse-request-id", io_lib:format("~1000.p",[ReqId])} | Headers_2];
false ->
Headers_2
end,
Headers_4 = cons_headers(Headers_3),
Uri = case get_value(use_absolute_uri, Options, false) or UseProxy of
true ->
case Is_ssl of
true ->
RelPath;
false ->
AbsPath
end;
false ->
RelPath
end,
{[method(Method), " ", Uri, " ", HttpVsn, crnl(), Headers_4, crnl()], Body_1}.
is_chunked_encoding_specified(Options) ->
case get_value(transfer_encoding, Options, false) of
false ->
false;
{chunked, _} ->
true;
chunked ->
true
end.
http_vsn_string({0,9}) -> "HTTP/0.9";
http_vsn_string({1,0}) -> "HTTP/1.0";
http_vsn_string({1,1}) -> "HTTP/1.1".
cons_headers(Headers) ->
cons_headers(Headers, []).
cons_headers([], Acc) ->
encode_headers(Acc);
cons_headers([{basic_auth, {U,P}} | T], Acc) ->
cons_headers(T, [{"Authorization",
["Basic ", ibrowse_lib:encode_base64(U++":"++P)]} | Acc]);
cons_headers([{cookie, Cookie} | T], Acc) ->
cons_headers(T, [{"Cookie", Cookie} | Acc]);
cons_headers([{content_length, L} | T], Acc) ->
cons_headers(T, [{"Content-Length", L} | Acc]);
cons_headers([{content_type, L} | T], Acc) ->
cons_headers(T, [{"Content-Type", L} | Acc]);
cons_headers([H | T], Acc) ->
cons_headers(T, [H | Acc]).
encode_headers(L) ->
encode_headers(L, []).
encode_headers([{http_vsn, _Val} | T], Acc) ->
encode_headers(T, Acc);
encode_headers([{Name,Val} | T], Acc) when is_list(Name) ->
encode_headers(T, [[Name, ": ", fmt_val(Val), crnl()] | Acc]);
encode_headers([{Name,Val} | T], Acc) when is_atom(Name) ->
encode_headers(T, [[atom_to_list(Name), ": ", fmt_val(Val), crnl()] | Acc]);
encode_headers([], Acc) ->
lists:reverse(Acc).
chunk_request_body(Body, _ChunkSize) when is_tuple(Body) orelse
is_function(Body) ->
Body;
chunk_request_body(Body, ChunkSize) ->
chunk_request_body(Body, ChunkSize, []).
chunk_request_body(Body, _ChunkSize, Acc) when Body == <<>>; Body == [] ->
LastChunk = "0\r\n",
lists:reverse(["\r\n", LastChunk | Acc]);
chunk_request_body(Body, ChunkSize, Acc) when is_binary(Body),
size(Body) >= ChunkSize ->
<<ChunkBody:ChunkSize/binary, Rest/binary>> = Body,
Chunk = [?dec2hex(ChunkSize),"\r\n",
ChunkBody, "\r\n"],
chunk_request_body(Rest, ChunkSize, [Chunk | Acc]);
chunk_request_body(Body, _ChunkSize, Acc) when is_binary(Body) ->
BodySize = size(Body),
Chunk = [?dec2hex(BodySize),"\r\n",
Body, "\r\n"],
LastChunk = "0\r\n",
lists:reverse(["\r\n", LastChunk, Chunk | Acc]);
chunk_request_body(Body, ChunkSize, Acc) when length(Body) >= ChunkSize ->
{ChunkBody, Rest} = split_list_at(Body, ChunkSize),
Chunk = [?dec2hex(ChunkSize),"\r\n",
ChunkBody, "\r\n"],
chunk_request_body(Rest, ChunkSize, [Chunk | Acc]);
chunk_request_body(Body, _ChunkSize, Acc) when is_list(Body) ->
BodySize = length(Body),
Chunk = [?dec2hex(BodySize),"\r\n",
Body, "\r\n"],
LastChunk = "0\r\n",
lists:reverse(["\r\n", LastChunk, Chunk | Acc]).
parse_response(<<>>, #state{cur_req = undefined}=State) ->
State#state{status = idle};
parse_response(Data, #state{cur_req = undefined}) ->
do_trace("Data left to process when no pending request. ~1000.p~n", [Data]),
{error, data_in_status_idle};
parse_response(Data, #state{reply_buffer = Acc, reqs = Reqs,
cur_req = CurReq} = State) ->
#request{from=From, stream_to=StreamTo, req_id=ReqId,
method=Method, response_format = Resp_format,
options = Options, timer_ref = T_ref
} = CurReq,
MaxHeaderSize = ibrowse:get_config_value(max_headers_size, infinity),
case scan_header(Acc, Data) of
{yes, Headers, Data_1} ->
do_trace("Recvd Header Data -> ~s~n----~n", [Headers]),
do_trace("Recvd headers~n--- Headers Begin ---~n~s~n--- Headers End ---~n~n", [Headers]),
{HttpVsn, StatCode, Headers_1, Status_line, Raw_headers} = parse_headers(Headers),
do_trace("HttpVsn: ~p StatusCode: ~p Headers_1 -> ~1000.p~n", [HttpVsn, StatCode, Headers_1]),
LCHeaders = [{to_lower(X), Y} || {X,Y} <- Headers_1],
ConnClose = to_lower(get_value("connection", LCHeaders, "false")),
IsClosing = is_connection_closing(HttpVsn, ConnClose),
State_0 = case IsClosing of
true ->
shutting_down(State),
State#state{is_closing = IsClosing};
false ->
State
end,
Give_raw_headers = get_value(give_raw_headers, Options, false),
State_1 = case Give_raw_headers of
true ->
State_0#state{recvd_headers=Headers_1, status=get_body,
reply_buffer = <<>>,
status_line = Status_line,
raw_headers = Raw_headers,
http_status_code=StatCode};
false ->
State_0#state{recvd_headers=Headers_1, status=get_body,
reply_buffer = <<>>,
http_status_code=StatCode}
end,
put(conn_close, ConnClose),
TransferEncoding = to_lower(get_value("transfer-encoding", LCHeaders, "false")),
Head_response_with_body = lists:member({workaround, head_response_with_body}, Options),
case get_value("content-length", LCHeaders, undefined) of
_ when Method == connect,
hd(StatCode) == $2 ->
{_, Reqs_1} = queue:out(Reqs),
cancel_timer(T_ref),
upgrade_to_ssl(set_cur_request(State_0#state{reqs = Reqs_1,
recvd_headers = [],
status = idle
}));
_ when Method == connect ->
{_, Reqs_1} = queue:out(Reqs),
do_error_reply(State#state{reqs = Reqs_1},
{error, proxy_tunnel_failed}),
{error, proxy_tunnel_failed};
_ when Method =:= head,
Head_response_with_body =:= false ->
%% This (HEAD response with body) is not supposed
to happen , but it does . An Apache server was
%% observed to send an "empty" body, but in a
Chunked - Transfer - Encoding way , which meant
there was still a body . Issue # 67 on Github
{_, Reqs_1} = queue:out(Reqs),
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
{ok, StatCode, Headers_1, []}),
cancel_timer(T_ref, {eat_message, {req_timedout, From}}),
State_2 = reset_state(State_1_1),
State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
parse_response(Data_1, State_3);
_ when hd(StatCode) =:= $1 ->
%% No message body is expected. Server may send
one or more 1XX responses before a proper
%% response.
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
do_trace("Recvd a status code of ~p. Ignoring and waiting for a proper response~n", [StatCode]),
parse_response(Data_1, State_1#state{recvd_headers = [],
status = get_header});
_ when StatCode =:= "204";
StatCode =:= "304" ->
%% No message body is expected for these Status Codes.
%% RFC2616 - Sec 4.4
{_, Reqs_1} = queue:out(Reqs),
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
{ok, StatCode, Headers_1, []}),
cancel_timer(T_ref, {eat_message, {req_timedout, From}}),
State_2 = reset_state(State_1_1),
State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
parse_response(Data_1, State_3);
_ when TransferEncoding =:= "chunked" ->
do_trace("Chunked encoding detected...~n",[]),
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
case parse_11_response(Data_1, State_1#state{transfer_encoding=chunked,
chunk_size=chunk_start,
reply_buffer = <<>>}) of
{error, Reason} ->
fail_pipelined_requests(State_1,
{error, {Reason,
{stat_code, StatCode}, Headers_1}}),
{error, Reason};
State_2 ->
State_2
end;
undefined when HttpVsn =:= "HTTP/1.0";
ConnClose =:= "close" ->
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
State_1#state{reply_buffer = Data_1};
undefined ->
fail_pipelined_requests(State_1,
{error, {content_length_undefined,
{stat_code, StatCode}, Headers}}),
{error, content_length_undefined};
V ->
case catch list_to_integer(V) of
V_1 when is_integer(V_1), V_1 >= 0 ->
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
do_trace("Recvd Content-Length of ~p~n", [V_1]),
State_2 = State_1#state{rep_buf_size=0,
reply_buffer = <<>>,
content_length=V_1},
case parse_11_response(Data_1, State_2) of
{error, Reason} ->
fail_pipelined_requests(State_1,
{error, {Reason,
{stat_code, StatCode}, Headers_1}}),
{error, Reason};
State_3 ->
State_3
end;
_ ->
fail_pipelined_requests(State_1,
{error, {content_length_undefined,
{stat_code, StatCode}, Headers}}),
{error, content_length_undefined}
end
end;
{no, Acc_1} when MaxHeaderSize == infinity ->
State#state{reply_buffer = Acc_1};
{no, Acc_1} when size(Acc_1) < MaxHeaderSize ->
State#state{reply_buffer = Acc_1};
{no, _Acc_1} ->
fail_pipelined_requests(State, {error, max_headers_size_exceeded}),
{error, max_headers_size_exceeded}
end.
upgrade_to_ssl(#state{socket = Socket,
connect_timeout = Conn_timeout,
ssl_options = Ssl_options,
tunnel_setup_queue = Q} = State) ->
case ssl:connect(Socket, Ssl_options, Conn_timeout) of
{ok, Ssl_socket} ->
do_trace("Upgraded to SSL socket!!~n", []),
State_1 = State#state{socket = Ssl_socket,
proxy_tunnel_setup = done},
send_queued_requests(lists:reverse(Q), State_1);
Err ->
do_trace("Upgrade to SSL socket failed. Reson: ~p~n", [Err]),
do_error_reply(State, {error, {send_failed, Err}}),
{error, send_failed}
end.
send_queued_requests([], State) ->
do_trace("Sent all queued requests via SSL connection~n", []),
State#state{tunnel_setup_queue = []};
send_queued_requests([{From, Url, Headers, Method, Body, Options, Timeout} | Q],
State) ->
case send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State) of
{noreply, State_1} ->
send_queued_requests(Q, State_1);
Err ->
do_trace("Error sending queued SSL request: ~n"
"URL : ~s~n"
"Method : ~p~n"
"Headers : ~p~n", [Url, Method, Headers]),
do_error_reply(State, {error, {send_failed, Err}}),
{error, send_failed}
end.
is_connection_closing("HTTP/0.9", _) -> true;
is_connection_closing(_, "close") -> true;
is_connection_closing("HTTP/1.0", "false") -> true;
is_connection_closing(_, _) -> false.
%% This clause determines the chunk size when given data from the beginning of the chunk
parse_11_response(DataRecvd,
#state{transfer_encoding = chunked,
chunk_size = chunk_start,
chunk_size_buffer = Chunk_sz_buf
} = State) ->
case scan_crlf(Chunk_sz_buf, DataRecvd) of
{yes, ChunkHeader, Data_1} ->
State_1 = maybe_accumulate_ce_data(State, <<ChunkHeader/binary, $\r, $\n>>),
ChunkSize = parse_chunk_header(ChunkHeader),
%%
%% Do we have to preserve the chunk encoding when
%% streaming? NO. This should be transparent to the client
process . Chunked encoding was only introduced to make
%% it efficient for the server.
%%
RemLen = size(Data_1),
do_trace("Determined chunk size: ~p. Already recvd: ~p~n",
[ChunkSize, RemLen]),
parse_11_response(Data_1, State_1#state{chunk_size_buffer = <<>>,
deleted_crlf = true,
recvd_chunk_size = 0,
chunk_size = ChunkSize});
{no, Data_1} ->
State#state{chunk_size_buffer = Data_1}
end;
This clause is to remove the CRLF between two chunks
%%
parse_11_response(DataRecvd,
#state{transfer_encoding = chunked,
chunk_size = tbd,
chunk_size_buffer = Buf
} = State) ->
case scan_crlf(Buf, DataRecvd) of
{yes, _, NextChunk} ->
State_1 = maybe_accumulate_ce_data(State, <<$\r, $\n>>),
State_2 = State_1#state{chunk_size = chunk_start,
chunk_size_buffer = <<>>,
deleted_crlf = true},
parse_11_response(NextChunk, State_2);
{no, Data_1} ->
State#state{chunk_size_buffer = Data_1}
end;
This clause deals with the end of a chunked transfer . ibrowse does
%% not support Trailers in the Chunked Transfer encoding. Any trailer
%% received is silently discarded.
parse_11_response(DataRecvd,
#state{transfer_encoding = chunked, chunk_size = 0,
cur_req = CurReq,
deleted_crlf = DelCrlf,
chunk_size_buffer = Trailer,
reqs = Reqs} = State) ->
do_trace("Detected end of chunked transfer...~n", []),
DataRecvd_1 = case DelCrlf of
false ->
DataRecvd;
true ->
<<$\r, $\n, DataRecvd/binary>>
end,
case scan_header(Trailer, DataRecvd_1) of
{yes, TEHeaders, Rem} ->
{_, Reqs_1} = queue:out(Reqs),
State_1 = maybe_accumulate_ce_data(State, <<TEHeaders/binary, $\r, $\n>>),
State_2 = handle_response(CurReq,
State_1#state{reqs = Reqs_1}),
parse_response(Rem, reset_state(State_2));
{no, Rem} ->
accumulate_response(<<>>, State#state{chunk_size_buffer = Rem, deleted_crlf = false})
end;
%% This clause extracts a chunk, given the size.
parse_11_response(DataRecvd,
#state{transfer_encoding = chunked,
chunk_size = CSz,
recvd_chunk_size = Recvd_csz,
rep_buf_size = RepBufSz} = State) ->
NeedBytes = CSz - Recvd_csz,
DataLen = size(DataRecvd),
do_trace("Recvd more data: size: ~p. NeedBytes: ~p~n", [DataLen, NeedBytes]),
case DataLen >= NeedBytes of
true ->
{RemChunk, RemData} = split_binary(DataRecvd, NeedBytes),
do_trace("Recvd another chunk...~p~n", [RemChunk]),
do_trace("RemData -> ~p~n", [RemData]),
case accumulate_response(RemChunk, State) of
{error, Reason} ->
do_trace("Error accumulating response --> ~p~n", [Reason]),
{error, Reason};
#state{} = State_1 ->
State_2 = State_1#state{chunk_size=tbd},
parse_11_response(RemData, State_2)
end;
false ->
accumulate_response(DataRecvd,
State#state{rep_buf_size = RepBufSz + DataLen,
recvd_chunk_size = Recvd_csz + DataLen})
end;
%% This clause to extract the body when Content-Length is specified
parse_11_response(DataRecvd,
#state{content_length=CL, rep_buf_size=RepBufSz,
reqs=Reqs}=State) ->
NeedBytes = CL - RepBufSz,
DataLen = size(DataRecvd),
case DataLen >= NeedBytes of
true ->
{RemBody, Rem} = split_binary(DataRecvd, NeedBytes),
{_, Reqs_1} = queue:out(Reqs),
State_1 = accumulate_response(RemBody, State),
State_2 = handle_response(State_1#state.cur_req, State_1#state{reqs=Reqs_1}),
State_3 = reset_state(State_2),
parse_response(Rem, State_3);
false ->
accumulate_response(DataRecvd, State#state{rep_buf_size = (RepBufSz+DataLen)})
end.
maybe_accumulate_ce_data(#state{cur_req = #request{preserve_chunked_encoding = false}} = State, _) ->
State;
maybe_accumulate_ce_data(State, Data) ->
accumulate_response(Data, State).
handle_response(#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format,
save_response_to_file = SaveResponseToFile,
tmp_file_name = TmpFilename,
tmp_file_fd = Fd,
options = Options,
timer_ref = ReqTimer
},
#state{http_status_code = SCode,
status_line = Status_line,
raw_headers = Raw_headers,
reply_buffer = RepBuf,
recvd_headers = RespHeaders}=State) when SaveResponseToFile /= false ->
Body = RepBuf,
case Fd of
undefined ->
ok;
_ ->
ok = file:close(Fd)
end,
ResponseBody = case TmpFilename of
undefined ->
Body;
_ ->
{file, TmpFilename}
end,
{Resp_headers_1, Raw_headers_1} = maybe_add_custom_headers(RespHeaders, Raw_headers, Options),
Reply = case get_value(give_raw_headers, Options, false) of
true ->
{ok, Status_line, Raw_headers_1, ResponseBody};
false ->
{ok, SCode, Resp_headers_1, ResponseBody}
end,
State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
cancel_timer(ReqTimer, {eat_message, {req_timedout, From}}),
set_cur_request(State_1);
handle_response(#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format,
options = Options, timer_ref = ReqTimer},
#state{http_status_code = SCode,
status_line = Status_line,
raw_headers = Raw_headers,
recvd_headers = Resp_headers,
reply_buffer = RepBuf
} = State) ->
Body = RepBuf,
{Resp_headers_1, Raw_headers_1} = maybe_add_custom_headers(Resp_headers, Raw_headers, Options),
Reply = case get_value(give_raw_headers, Options, false) of
true ->
{ok, Status_line, Raw_headers_1, Body};
false ->
{ok, SCode, Resp_headers_1, Body}
end,
State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
cancel_timer(ReqTimer, {eat_message, {req_timedout, From}}),
set_cur_request(State_1).
reset_state(State) ->
State#state{status = get_header,
rep_buf_size = 0,
streamed_size = 0,
content_length = undefined,
reply_buffer = <<>>,
chunk_size_buffer = <<>>,
recvd_headers = [],
status_line = undefined,
raw_headers = undefined,
deleted_crlf = false,
http_status_code = undefined,
chunk_size = undefined,
transfer_encoding = undefined
}.
set_cur_request(#state{reqs = Reqs, socket = Socket} = State) ->
case queue:peek(Reqs) of
empty ->
State#state{cur_req = undefined};
{value, #request{caller_controls_socket = Ccs} = NextReq} ->
case Ccs of
true ->
do_setopts(Socket, [{active, once}], State);
_ ->
ok
end,
State#state{cur_req = NextReq}
end.
parse_headers(Headers) ->
case scan_crlf(Headers) of
{yes, StatusLine, T} ->
parse_headers(StatusLine, T);
{no, StatusLine} ->
parse_headers(StatusLine, <<>>)
end.
parse_headers(StatusLine, Headers) ->
Headers_1 = parse_headers_1(Headers),
case parse_status_line(StatusLine) of
{ok, HttpVsn, StatCode, _Msg} ->
put(http_prot_vsn, HttpVsn),
{HttpVsn, StatCode, Headers_1, StatusLine, Headers};
A HTTP 0.9 response ?
put(http_prot_vsn, "HTTP/0.9"),
{"HTTP/0.9", undefined, Headers, StatusLine, Headers}
end.
From RFC 2616
%
HTTP/1.1 header field values can be folded onto multiple lines if
% the continuation line begins with a space or horizontal tab. All
% linear white space, including folding, has the same semantics as
SP . A recipient replace any linear white space with a single
SP before interpreting the field value or forwarding the message
% downstream.
parse_headers_1(B) when is_binary(B) ->
parse_headers_1(binary_to_list(B));
parse_headers_1(String) ->
parse_headers_1(String, [], []).
parse_headers_1([$\n, H |T], [$\r | L], Acc) when H =:= 32;
H =:= $\t ->
parse_headers_1(lists:dropwhile(fun(X) ->
is_whitespace(X)
end, T), [32 | L], Acc);
parse_headers_1([$\n, H |T], L, Acc) when H =:= 32;
H =:= $\t ->
parse_headers_1(lists:dropwhile(fun(X) ->
is_whitespace(X)
end, T), [32 | L], Acc);
parse_headers_1([$\n|T], [$\r | L], Acc) ->
case parse_header(lists:reverse(L)) of
invalid ->
parse_headers_1(T, [], Acc);
NewHeader ->
parse_headers_1(T, [], [NewHeader | Acc])
end;
parse_headers_1([$\n|T], L, Acc) ->
case parse_header(lists:reverse(L)) of
invalid ->
parse_headers_1(T, [], Acc);
NewHeader ->
parse_headers_1(T, [], [NewHeader | Acc])
end;
parse_headers_1([H|T], L, Acc) ->
parse_headers_1(T, [H|L], Acc);
parse_headers_1([], [], Acc) ->
lists:reverse(Acc);
parse_headers_1([], L, Acc) ->
Acc_1 = case parse_header(lists:reverse(L)) of
invalid ->
Acc;
NewHeader ->
[NewHeader | Acc]
end,
lists:reverse(Acc_1).
parse_status_line(Line) when is_binary(Line) ->
parse_status_line(binary_to_list(Line));
parse_status_line(Line) ->
parse_status_line(Line, get_prot_vsn, [], []).
parse_status_line([32 | T], get_prot_vsn, ProtVsn, StatCode) ->
parse_status_line(T, get_status_code, ProtVsn, StatCode);
parse_status_line([32 | T], get_status_code, ProtVsn, StatCode) ->
{ok, lists:reverse(ProtVsn), lists:reverse(StatCode), T};
parse_status_line([], get_status_code, ProtVsn, StatCode) ->
{ok, lists:reverse(ProtVsn), lists:reverse(StatCode), []};
parse_status_line([H | T], get_prot_vsn, ProtVsn, StatCode) ->
parse_status_line(T, get_prot_vsn, [H|ProtVsn], StatCode);
parse_status_line([H | T], get_status_code, ProtVsn, StatCode) ->
parse_status_line(T, get_status_code, ProtVsn, [H | StatCode]);
parse_status_line([], _, _, _) ->
http_09.
parse_header(L) ->
parse_header(L, []).
parse_header([$: | V], Acc) ->
{lists:reverse(Acc), string:strip(V)};
parse_header([H | T], Acc) ->
parse_header(T, [H | Acc]);
parse_header([], _) ->
invalid.
scan_header(Bin) ->
case get_crlf_crlf_pos(Bin, 0) of
{yes, Pos} ->
{Headers, <<_:4/binary, Body/binary>>} = split_binary(Bin, Pos),
{yes, Headers, Body};
{yes_dodgy, Pos} ->
{Headers, <<_:2/binary, Body/binary>>} = split_binary(Bin, Pos),
{yes, Headers, Body};
no ->
{no, Bin}
end.
scan_header(Bin1, Bin2) when size(Bin1) < 4 ->
scan_header(<<Bin1/binary, Bin2/binary>>);
scan_header(Bin1, <<>>) ->
scan_header(Bin1);
scan_header(Bin1, Bin2) ->
Bin1_already_scanned_size = size(Bin1) - 4,
<<Headers_prefix:Bin1_already_scanned_size/binary, Rest/binary>> = Bin1,
Bin_to_scan = <<Rest/binary, Bin2/binary>>,
case get_crlf_crlf_pos(Bin_to_scan, 0) of
{yes, Pos} ->
{Headers_suffix, <<_:4/binary, Body/binary>>} = split_binary(Bin_to_scan, Pos),
{yes, <<Headers_prefix/binary, Headers_suffix/binary>>, Body};
{yes_dodgy, Pos} ->
{Headers_suffix, <<_:2/binary, Body/binary>>} = split_binary(Bin_to_scan, Pos),
{yes, <<Headers_prefix/binary, Headers_suffix/binary>>, Body};
no ->
{no, <<Bin1/binary, Bin2/binary>>}
end.
get_crlf_crlf_pos(<<$\r, $\n, $\r, $\n, _/binary>>, Pos) -> {yes, Pos};
get_crlf_crlf_pos(<<$\n, $\n, _/binary>>, Pos) -> {yes_dodgy, Pos};
get_crlf_crlf_pos(<<_, Rest/binary>>, Pos) -> get_crlf_crlf_pos(Rest, Pos + 1);
get_crlf_crlf_pos(<<>>, _) -> no.
scan_crlf(Bin) ->
case get_crlf_pos(Bin) of
{yes, Offset, Pos} ->
{Prefix, <<_:Offset/binary, Suffix/binary>>} = split_binary(Bin, Pos),
{yes, Prefix, Suffix};
no ->
{no, Bin}
end.
scan_crlf(<<>>, Bin2) ->
scan_crlf(Bin2);
scan_crlf(Bin1, Bin2) when size(Bin1) < 2 ->
scan_crlf(<<Bin1/binary, Bin2/binary>>);
scan_crlf(Bin1, Bin2) ->
scan_crlf_1(size(Bin1) - 2, Bin1, Bin2).
scan_crlf_1(Bin1_head_size, Bin1, Bin2) ->
<<Bin1_head:Bin1_head_size/binary, Bin1_tail/binary>> = Bin1,
Bin3 = <<Bin1_tail/binary, Bin2/binary>>,
case get_crlf_pos(Bin3) of
{yes, Offset, Pos} ->
{Prefix, <<_:Offset/binary, Suffix/binary>>} = split_binary(Bin3, Pos),
{yes, list_to_binary([Bin1_head, Prefix]), Suffix};
no ->
{no, list_to_binary([Bin1, Bin2])}
end.
get_crlf_pos(Bin) ->
get_crlf_pos(Bin, 0).
get_crlf_pos(<<$\r, $\n, _/binary>>, Pos) -> {yes, 2, Pos};
get_crlf_pos(<<$\n, _/binary>>, Pos) -> {yes, 1, Pos};
get_crlf_pos(<<_, Rest/binary>>, Pos) -> get_crlf_pos(Rest, Pos + 1);
get_crlf_pos(<<>>, _) -> no.
fmt_val(L) when is_list(L) -> L;
fmt_val(I) when is_integer(I) -> integer_to_list(I);
fmt_val(A) when is_atom(A) -> atom_to_list(A);
fmt_val(Term) -> io_lib:format("~p", [Term]).
crnl() -> "\r\n".
method(connect) -> "CONNECT";
method(delete) -> "DELETE";
method(get) -> "GET";
method(head) -> "HEAD";
method(options) -> "OPTIONS";
method(post) -> "POST";
method(put) -> "PUT";
method(trace) -> "TRACE";
%% webdav
method(copy) -> "COPY";
method(lock) -> "LOCK";
method(mkcol) -> "MKCOL";
method(move) -> "MOVE";
method(propfind) -> "PROPFIND";
method(proppatch) -> "PROPPATCH";
method(search) -> "SEARCH";
method(unlock) -> "UNLOCK";
%% subversion %%
method(report) -> "REPORT";
method(mkactivity) -> "MKACTIVITY";
method(checkout) -> "CHECKOUT";
method(merge) -> "MERGE";
%% upnp
method(msearch) -> "MSEARCH";
method(notify) -> "NOTIFY";
method(subscribe) -> "SUBSCRIBE";
method(unsubscribe) -> "UNSUBSCRIBE";
%% rfc-5789
method(patch) -> "PATCH";
method(purge) -> "PURGE".
From RFC 2616
%%
% The chunked encoding modifies the body of a message in order to
% transfer it as a series of chunks, each with its own size indicator,
followed by an OPTIONAL trailer containing entity - header
% fields. This allows dynamically produced content to be transferred
% along with the information necessary for the recipient to verify
% that it has received the full message.
% Chunked-Body = *chunk
% last-chunk
% trailer
% CRLF
% chunk = chunk-size [ chunk-extension ] CRLF
% chunk-data CRLF
chunk - size = 1*HEX
% last-chunk = 1*("0") [ chunk-extension ] CRLF
% chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
% chunk-ext-name = token
% chunk-ext-val = token | quoted-string
% chunk-data = chunk-size(OCTET)
% trailer = *(entity-header CRLF)
% The chunk-size field is a string of hex digits indicating the size
% of the chunk. The chunked encoding is ended by any chunk whose size
is zero , followed by the trailer , which is terminated by an empty
% line.
%%
%% The parsing implemented here discards all chunk extensions. It also
strips trailing spaces from the chunk size fields as Apache 1.3.27 was
%% sending them.
parse_chunk_header(ChunkHeader) ->
parse_chunk_header(ChunkHeader, []).
parse_chunk_header(<<$;, _/binary>>, Acc) ->
hexlist_to_integer(lists:reverse(Acc));
parse_chunk_header(<<H, T/binary>>, Acc) ->
case is_whitespace(H) of
true ->
parse_chunk_header(T, Acc);
false ->
parse_chunk_header(T, [H | Acc])
end;
parse_chunk_header(<<>>, Acc) ->
hexlist_to_integer(lists:reverse(Acc)).
is_whitespace($\s) -> true;
is_whitespace($\r) -> true;
is_whitespace($\n) -> true;
is_whitespace($\t) -> true;
is_whitespace(_) -> false.
send_async_headers(_ReqId, undefined, _, _State) ->
ok;
send_async_headers(ReqId, StreamTo, Give_raw_headers,
#state{status_line = Status_line, raw_headers = Raw_headers,
recvd_headers = Headers, http_status_code = StatCode,
cur_req = #request{options = Opts}
}) ->
{Headers_1, Raw_headers_1} = maybe_add_custom_headers(Headers, Raw_headers, Opts),
case Give_raw_headers of
false ->
catch StreamTo ! {ibrowse_async_headers, ReqId, StatCode, Headers_1};
true ->
catch StreamTo ! {ibrowse_async_headers, ReqId, Status_line, Raw_headers_1}
end.
maybe_add_custom_headers(Headers, Raw_headers, Opts) ->
Custom_headers = get_value(add_custom_headers, Opts, []),
Headers_1 = Headers ++ Custom_headers,
Raw_headers_1 = case Custom_headers of
[_ | _] when is_binary(Raw_headers) ->
Custom_headers_bin = list_to_binary(string:join([[X, $:, Y] || {X, Y} <- Custom_headers], "\r\n")),
<<Raw_headers/binary, "\r\n", Custom_headers_bin/binary>>;
_ ->
Raw_headers
end,
{Headers_1, Raw_headers_1}.
format_response_data(Resp_format, Body) ->
case Resp_format of
list when is_list(Body) ->
flatten(Body);
list when is_binary(Body) ->
binary_to_list(Body);
binary when is_list(Body) ->
list_to_binary(Body);
_ ->
%% This is to cater for sending messages such as
%% {chunk_start, _}, chunk_end etc
Body
end.
do_reply(State, From, undefined, _, Resp_format, {ok, St_code, Headers, Body}) ->
Msg_1 = {ok, St_code, Headers, format_response_data(Resp_format, Body)},
gen_server:reply(From, Msg_1),
dec_pipeline_counter(State);
do_reply(State, From, undefined, _, _, Msg) ->
gen_server:reply(From, Msg),
dec_pipeline_counter(State);
do_reply(#state{prev_req_id = Prev_req_id} = State,
_From, StreamTo, ReqId, Resp_format, {ok, _, _, Body}) ->
State_1 = dec_pipeline_counter(State),
case Body of
[] ->
ok;
_ ->
Body_1 = format_response_data(Resp_format, Body),
catch StreamTo ! {ibrowse_async_response, ReqId, Body_1}
end,
catch StreamTo ! {ibrowse_async_response_end, ReqId},
We do n't want to delete the Req - id to Pid mapping straightaway
%% as the client may send a stream_next message just while we are
%% sending back this ibrowse_async_response_end message. If we
%% deleted this mapping straightaway, the caller will see a
{ error , unknown_req_id } when it calls ibrowse : stream_next/1 . To
get around this , we store the req i d , and clear it after the
%% next request. If there are wierd combinations of stream,
%% stream_once and sync requests on the same connection, it will
%% take a while for the req_id-pid mapping to get cleared, but it
%% should do no harm.
ets:delete(ibrowse_stream, {req_id_pid, Prev_req_id}),
State_1#state{prev_req_id = ReqId};
do_reply(State, _From, StreamTo, ReqId, Resp_format, Msg) ->
State_1 = dec_pipeline_counter(State),
Msg_1 = format_response_data(Resp_format, Msg),
catch StreamTo ! {ibrowse_async_response, ReqId, Msg_1},
State_1.
do_interim_reply(undefined, _, _ReqId, _Msg) ->
ok;
do_interim_reply(StreamTo, Response_format, ReqId, Msg) ->
Msg_1 = format_response_data(Response_format, Msg),
catch StreamTo ! {ibrowse_async_response, ReqId, Msg_1}.
do_error_reply(#state{reqs = Reqs, tunnel_setup_queue = Tun_q} = State, Err) ->
ReqList = queue:to_list(Reqs),
lists:foreach(fun(#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format}) ->
ets:delete(ibrowse_stream, {req_id_pid, ReqId}),
do_reply(State, From, StreamTo, ReqId, Resp_format, {error, Err})
end, ReqList),
lists:foreach(
fun({From, _Url, _Headers, _Method, _Body, _Options, _Timeout}) ->
do_reply(State, From, undefined, undefined, undefined, Err)
end, Tun_q).
fail_pipelined_requests(#state{reqs = Reqs, cur_req = CurReq} = State, Reply) ->
{_, Reqs_1} = queue:out(Reqs),
#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format} = CurReq,
State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
do_error_reply(State_1#state{reqs = Reqs_1}, previous_request_failed).
split_list_at(List, N) ->
split_list_at(List, N, []).
split_list_at([], _, Acc) ->
{lists:reverse(Acc), []};
split_list_at(List2, 0, List1) ->
{lists:reverse(List1), List2};
split_list_at([H | List2], N, List1) ->
split_list_at(List2, N-1, [H | List1]).
hexlist_to_integer(List) ->
hexlist_to_integer(lists:reverse(List), 1, 0).
hexlist_to_integer([H | T], Multiplier, Acc) ->
hexlist_to_integer(T, Multiplier*16, Multiplier*to_ascii(H) + Acc);
hexlist_to_integer([], _, Acc) ->
Acc.
to_ascii($A) -> 10;
to_ascii($a) -> 10;
to_ascii($B) -> 11;
to_ascii($b) -> 11;
to_ascii($C) -> 12;
to_ascii($c) -> 12;
to_ascii($D) -> 13;
to_ascii($d) -> 13;
to_ascii($E) -> 14;
to_ascii($e) -> 14;
to_ascii($F) -> 15;
to_ascii($f) -> 15;
to_ascii($1) -> 1;
to_ascii($2) -> 2;
to_ascii($3) -> 3;
to_ascii($4) -> 4;
to_ascii($5) -> 5;
to_ascii($6) -> 6;
to_ascii($7) -> 7;
to_ascii($8) -> 8;
to_ascii($9) -> 9;
to_ascii($0) -> 0.
cancel_timer(undefined) -> ok;
cancel_timer(Ref) -> _ = erlang:cancel_timer(Ref),
ok.
cancel_timer(Ref, {eat_message, Msg}) ->
cancel_timer(Ref),
receive
Msg ->
ok
after 0 ->
ok
end.
make_req_id() ->
now().
to_lower(Str) ->
to_lower(Str, []).
to_lower([H|T], Acc) when H >= $A, H =< $Z ->
to_lower(T, [H+32|Acc]);
to_lower([H|T], Acc) ->
to_lower(T, [H|Acc]);
to_lower([], Acc) ->
lists:reverse(Acc).
shutting_down(#state{lb_ets_tid = undefined}) ->
ok;
shutting_down(#state{lb_ets_tid = Tid,
cur_pipeline_size = _Sz}) ->
catch ets:delete(Tid, self()).
inc_pipeline_counter(#state{is_closing = true} = State) ->
State;
inc_pipeline_counter(#state{lb_ets_tid = undefined} = State) ->
State;
inc_pipeline_counter(#state{cur_pipeline_size = Pipe_sz,
lb_ets_tid = Tid} = State) ->
update_counter(Tid, self(), {2,1,99999,9999}),
State#state{cur_pipeline_size = Pipe_sz + 1}.
update_counter(Tid, Key, Args) ->
ets:update_counter(Tid, Key, Args).
dec_pipeline_counter(#state{is_closing = true} = State) ->
State;
dec_pipeline_counter(#state{lb_ets_tid = undefined} = State) ->
State;
dec_pipeline_counter(#state{cur_pipeline_size = Pipe_sz,
lb_ets_tid = Tid} = State) ->
try
update_counter(Tid, self(), {2,-1,0,0}),
update_counter(Tid, self(), {3,-1,0,0})
catch
_:_ ->
ok
end,
State#state{cur_pipeline_size = Pipe_sz - 1}.
flatten([H | _] = L) when is_integer(H) ->
L;
flatten([H | _] = L) when is_list(H) ->
lists:flatten(L);
flatten([]) ->
[].
get_stream_chunk_size(Options) ->
case lists:keysearch(stream_chunk_size, 1, Options) of
{value, {_, V}} when V > 0 ->
V;
_ ->
?DEFAULT_STREAM_CHUNK_SIZE
end.
set_inac_timer(State) ->
cancel_timer(State#state.inactivity_timer_ref),
set_inac_timer(State#state{inactivity_timer_ref = undefined},
get_inac_timeout(State)).
set_inac_timer(State, Timeout) when is_integer(Timeout) ->
Ref = erlang:send_after(Timeout, self(), timeout),
State#state{inactivity_timer_ref = Ref};
set_inac_timer(State, _) ->
State.
get_inac_timeout(#state{cur_req = #request{options = Opts}}) ->
get_value(inactivity_timeout, Opts, infinity);
get_inac_timeout(#state{cur_req = undefined}) ->
case ibrowse:get_config_value(inactivity_timeout, undefined) of
Val when is_integer(Val) ->
Val;
_ ->
case application:get_env(ibrowse, inactivity_timeout) of
{ok, Val} when is_integer(Val), Val > 0 ->
Val;
_ ->
10000
end
end.
trace_request(Req) ->
case get(my_trace_flag) of
true ->
%%Avoid the binary operations if trace is not on...
NReq = to_binary(Req),
do_trace("Sending request: ~n"
"--- Request Begin ---~n~s~n"
"--- Request End ---~n", [NReq]);
_ -> ok
end.
trace_request_body(Body) ->
case get(my_trace_flag) of
true ->
%%Avoid the binary operations if trace is not on...
NBody = to_binary(Body),
case size(NBody) > 1024 of
true ->
ok;
false ->
do_trace("Sending request body: ~n"
"--- Request Body Begin ---~n~s~n"
"--- Request Body End ---~n", [NBody])
end;
false ->
ok
end.
to_binary(X) when is_list(X) -> list_to_binary(X);
to_binary(X) when is_binary(X) -> X.
| null | https://raw.githubusercontent.com/davisp/couchdb/b0420f9006915149e81607615720f32f21c76725/src/ibrowse/ibrowse_http_client.erl | erlang | -------------------------------------------------------------------
File : ibrowse_http_client.erl
Description : The name says it all
-------------------------------------------------------------------
--------------------------------------------------------------------
Include files
--------------------------------------------------------------------
--------------------------------------------------------------------
External exports
gen_server callbacks
====================================================================
External functions
====================================================================
--------------------------------------------------------------------
Description: Starts the server
--------------------------------------------------------------------
====================================================================
Server functions
====================================================================
--------------------------------------------------------------------
Function: init/1
Description: Initiates the server
Returns: {ok, State} |
ignore |
{stop, Reason}
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Handling call messages
Returns: {reply, Reply, State} |
{stop, Reason, Reply, State} | (terminate/2 is called)
{stop, Reason, State} (terminate/2 is called)
--------------------------------------------------------------------
Received a request when the remote server has already sent us a
Connection: Close header
--------------------------------------------------------------------
Function: handle_cast/2
Description: Handling cast messages
{stop, Reason, State} (terminate/2 is called)
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: handle_info/2
Description: Handling all non call/cast messages
{stop, Reason, State} (terminate/2 is called)
--------------------------------------------------------------------
do_error_reply(State, req_timedout),
--------------------------------------------------------------------
Function: terminate/2
Description: Shutdown the server
Returns: any (ignored by gen_server)
--------------------------------------------------------------------
--------------------------------------------------------------------
Func: code_change/3
Purpose: Convert process state when code is changed
Returns: {ok, NewState}
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
Handles data recvd on the socket
--------------------------------------------------------------------
--------------------------------------------------------------------
Handles the case when the server closes the socket
--------------------------------------------------------------------
We check for IsClosing because this the server could have sent a
of response. There maybe requests pipelined which need a response.
We don't want the caller to specify certain options
source_descriptor() = fun_arity_0 |
{fun_arity_0} |
{fun_arity_1, term()}
error() = term()
Send a CONNECT request.
Upgrade to SSL connection
Then send request
Content-Length is already specified or Body is a
function or function/state pair
This (HEAD response with body) is not supposed
observed to send an "empty" body, but in a
No message body is expected. Server may send
response.
No message body is expected for these Status Codes.
RFC2616 - Sec 4.4
This clause determines the chunk size when given data from the beginning of the chunk
Do we have to preserve the chunk encoding when
streaming? NO. This should be transparent to the client
it efficient for the server.
not support Trailers in the Chunked Transfer encoding. Any trailer
received is silently discarded.
This clause extracts a chunk, given the size.
This clause to extract the body when Content-Length is specified
the continuation line begins with a space or horizontal tab. All
linear white space, including folding, has the same semantics as
downstream.
webdav
subversion %%
upnp
rfc-5789
The chunked encoding modifies the body of a message in order to
transfer it as a series of chunks, each with its own size indicator,
fields. This allows dynamically produced content to be transferred
along with the information necessary for the recipient to verify
that it has received the full message.
Chunked-Body = *chunk
last-chunk
trailer
CRLF
chunk = chunk-size [ chunk-extension ] CRLF
chunk-data CRLF
last-chunk = 1*("0") [ chunk-extension ] CRLF
chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
chunk-ext-name = token
chunk-ext-val = token | quoted-string
chunk-data = chunk-size(OCTET)
trailer = *(entity-header CRLF)
The chunk-size field is a string of hex digits indicating the size
of the chunk. The chunked encoding is ended by any chunk whose size
line.
The parsing implemented here discards all chunk extensions. It also
sending them.
This is to cater for sending messages such as
{chunk_start, _}, chunk_end etc
as the client may send a stream_next message just while we are
sending back this ibrowse_async_response_end message. If we
deleted this mapping straightaway, the caller will see a
next request. If there are wierd combinations of stream,
stream_once and sync requests on the same connection, it will
take a while for the req_id-pid mapping to get cleared, but it
should do no harm.
Avoid the binary operations if trace is not on...
Avoid the binary operations if trace is not on... | Author : >
Created : 11 Oct 2003 by >
-module(ibrowse_http_client).
-behaviour(gen_server).
-export([
start_link/1,
start/1,
stop/1,
send_req/7
]).
-ifdef(debug).
-compile(export_all).
-endif.
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-include("ibrowse.hrl").
-include_lib("kernel/include/inet.hrl").
-record(state, {host, port, connect_timeout,
inactivity_timer_ref,
use_proxy = false, proxy_auth_digest,
ssl_options = [], is_ssl = false, socket,
proxy_tunnel_setup = false,
tunnel_setup_queue = [],
reqs=queue:new(), cur_req, status=idle, http_status_code,
reply_buffer = <<>>, rep_buf_size=0, streamed_size = 0,
recvd_headers=[],
status_line, raw_headers,
is_closing, content_length,
deleted_crlf = false, transfer_encoding,
chunk_size, chunk_size_buffer = <<>>,
recvd_chunk_size, interim_reply_sent = false,
lb_ets_tid, cur_pipeline_size = 0, prev_req_id
}).
-record(request, {url, method, options, from,
stream_to, caller_controls_socket = false,
caller_socket_options = [],
req_id,
stream_chunk_size,
save_response_to_file = false,
tmp_file_name, tmp_file_fd, preserve_chunked_encoding,
response_format, timer_ref}).
-import(ibrowse_lib, [
get_value/2,
get_value/3,
do_trace/2
]).
-define(DEFAULT_STREAM_CHUNK_SIZE, 1024*1024).
-define(dec2hex(X), erlang:integer_to_list(X, 16)).
Function : start_link/0
start(Args) ->
gen_server:start(?MODULE, Args, []).
start_link(Args) ->
gen_server:start_link(?MODULE, Args, []).
stop(Conn_pid) ->
case catch gen_server:call(Conn_pid, stop) of
{'EXIT', {timeout, _}} ->
exit(Conn_pid, kill),
ok;
_ ->
ok
end.
send_req(Conn_Pid, Url, Headers, Method, Body, Options, Timeout) ->
gen_server:call(
Conn_Pid,
{send_req, {Url, Headers, Method, Body, Options, Timeout}}, Timeout).
{ ok , State , Timeout } |
init({Lb_Tid, #url{host = Host, port = Port}, {SSLOptions, Is_ssl}}) ->
State = #state{host = Host,
port = Port,
ssl_options = SSLOptions,
is_ssl = Is_ssl,
lb_ets_tid = Lb_Tid},
put(ibrowse_trace_token, [Host, $:, integer_to_list(Port)]),
put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
{ok, set_inac_timer(State)};
init(Url) when is_list(Url) ->
case catch ibrowse_lib:parse_url(Url) of
#url{protocol = Protocol} = Url_rec ->
init({undefined, Url_rec, {[], Protocol == https}});
{'EXIT', _} ->
{error, invalid_url}
end;
init({Host, Port}) ->
State = #state{host = Host,
port = Port},
put(ibrowse_trace_token, [Host, $:, integer_to_list(Port)]),
put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
{ok, set_inac_timer(State)}.
Function : handle_call/3
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call({send_req, _}, _From, #state{is_closing = true} = State) ->
{reply, {error, connection_closing}, State};
handle_call({send_req, {Url, Headers, Method, Body, Options, Timeout}},
From, State) ->
send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State);
handle_call(stop, _From, State) ->
do_close(State),
do_error_reply(State, closing_on_request),
{stop, normal, ok, State};
handle_call(Request, _From, State) ->
Reply = {unknown_request, Request},
{reply, Reply, State}.
Returns : { noreply , State } |
{ noreply , State , Timeout } |
handle_cast(_Msg, State) ->
{noreply, State}.
Returns : { noreply , State } |
{ noreply , State , Timeout } |
handle_info({tcp, _Sock, Data}, #state{status = Status} = State) ->
do_trace("Data recvd in state: ~p. Size: ~p. ~p~n~n", [Status, size(Data), Data]),
handle_sock_data(Data, State);
handle_info({ssl, _Sock, Data}, State) ->
handle_sock_data(Data, State);
handle_info({stream_next, Req_id}, #state{socket = Socket,
cur_req = #request{req_id = Req_id}} = State) ->
do_setopts(Socket, [{active, once}], State),
{noreply, set_inac_timer(State)};
handle_info({stream_next, _Req_id}, State) ->
_Cur_req_id = case State#state.cur_req of
#request{req_id = Cur} ->
Cur;
_ ->
undefined
end,
{noreply, State};
handle_info({stream_close, _Req_id}, State) ->
shutting_down(State),
do_close(State),
do_error_reply(State, closing_on_request),
{stop, normal, State};
handle_info({tcp_closed, _Sock}, State) ->
do_trace("TCP connection closed by peer!~n", []),
handle_sock_closed(State),
{stop, normal, State};
handle_info({ssl_closed, _Sock}, State) ->
do_trace("SSL connection closed by peer!~n", []),
handle_sock_closed(State),
{stop, normal, State};
handle_info({tcp_error, _Sock, Reason}, State) ->
do_trace("Error on connection to ~1000.p:~1000.p -> ~1000.p~n",
[State#state.host, State#state.port, Reason]),
handle_sock_closed(State),
{stop, normal, State};
handle_info({ssl_error, _Sock, Reason}, State) ->
do_trace("Error on SSL connection to ~1000.p:~1000.p -> ~1000.p~n",
[State#state.host, State#state.port, Reason]),
handle_sock_closed(State),
{stop, normal, State};
handle_info({req_timedout, From}, State) ->
case lists:keymember(From, #request.from, queue:to_list(State#state.reqs)) of
false ->
{noreply, State};
true ->
shutting_down(State),
{stop, normal, State}
end;
handle_info(timeout, State) ->
do_trace("Inactivity timeout triggered. Shutting down connection~n", []),
shutting_down(State),
do_error_reply(State, req_timedout),
{stop, normal, State};
handle_info({trace, Bool}, State) ->
put(my_trace_flag, Bool),
{noreply, State};
handle_info(Info, State) ->
io:format("Unknown message recvd for ~1000.p:~1000.p -> ~p~n",
[State#state.host, State#state.port, Info]),
io:format("Recvd unknown message ~p when in state: ~p~n", [Info, State]),
{noreply, State}.
terminate(_Reason, State) ->
do_close(State),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
handle_sock_data(Data, #state{status=idle}=State) ->
do_trace("Data recvd on socket in state idle!. ~1000.p~n", [Data]),
shutting_down(State),
do_error_reply(State, data_in_status_idle),
do_close(State),
{stop, normal, State};
handle_sock_data(Data, #state{status = get_header}=State) ->
case parse_response(Data, State) of
{error, _Reason} ->
shutting_down(State),
{stop, normal, State};
#state{socket = Socket, status = Status, cur_req = CurReq} = State_1 ->
case {Status, CurReq} of
{get_header, #request{caller_controls_socket = true}} ->
do_setopts(Socket, [{active, once}], State_1);
_ ->
active_once(State_1)
end,
{noreply, set_inac_timer(State_1)}
end;
handle_sock_data(Data, #state{status = get_body,
socket = Socket,
content_length = CL,
http_status_code = StatCode,
recvd_headers = Headers,
chunk_size = CSz} = State) ->
case (CL == undefined) and (CSz == undefined) of
true ->
case accumulate_response(Data, State) of
{error, Reason} ->
shutting_down(State),
fail_pipelined_requests(State,
{error, {Reason, {stat_code, StatCode}, Headers}}),
{stop, normal, State};
State_1 ->
active_once(State_1),
State_2 = set_inac_timer(State_1),
{noreply, State_2}
end;
_ ->
case parse_11_response(Data, State) of
{error, Reason} ->
shutting_down(State),
fail_pipelined_requests(State,
{error, {Reason, {stat_code, StatCode}, Headers}}),
{stop, normal, State};
#state{cur_req = #request{caller_controls_socket = Ccs},
interim_reply_sent = Irs} = State_1 ->
case Irs of
true ->
active_once(State_1);
false when Ccs == true ->
do_setopts(Socket, [{active, once}], State);
false ->
active_once(State_1)
end,
State_2 = State_1#state{interim_reply_sent = false},
case Ccs of
true ->
cancel_timer(State_2#state.inactivity_timer_ref, {eat_message, timeout}),
{noreply, State_2#state{inactivity_timer_ref = undefined}};
_ ->
{noreply, set_inac_timer(State_2)}
end;
State_1 ->
active_once(State_1),
State_2 = set_inac_timer(State_1),
{noreply, State_2}
end
end.
accumulate_response(Data,
#state{
cur_req = #request{save_response_to_file = Srtf,
tmp_file_fd = undefined} = CurReq,
http_status_code=[$2 | _]}=State) when Srtf /= false ->
TmpFilename = make_tmp_filename(Srtf),
Mode = file_mode(Srtf),
case file:open(TmpFilename, [Mode, delayed_write, raw]) of
{ok, Fd} ->
accumulate_response(Data, State#state{
cur_req = CurReq#request{
tmp_file_fd = Fd,
tmp_file_name = TmpFilename}});
{error, Reason} ->
{error, {file_open_error, Reason}}
end;
accumulate_response(Data, #state{cur_req = #request{save_response_to_file = Srtf,
tmp_file_fd = Fd},
transfer_encoding=chunked,
reply_buffer = Reply_buf,
http_status_code=[$2 | _]
} = State) when Srtf /= false ->
case file:write(Fd, [Reply_buf, Data]) of
ok ->
State#state{reply_buffer = <<>>};
{error, Reason} ->
{error, {file_write_error, Reason}}
end;
accumulate_response(Data, #state{cur_req = #request{save_response_to_file = Srtf,
tmp_file_fd = Fd},
reply_buffer = RepBuf,
http_status_code=[$2 | _]
} = State) when Srtf /= false ->
case file:write(Fd, [RepBuf, Data]) of
ok ->
State#state{reply_buffer = <<>>};
{error, Reason} ->
{error, {file_write_error, Reason}}
end;
accumulate_response(Data, #state{reply_buffer = RepBuf,
rep_buf_size = RepBufSize,
streamed_size = Streamed_size,
cur_req = CurReq}=State) ->
#request{stream_to = StreamTo,
req_id = ReqId,
stream_chunk_size = Stream_chunk_size,
response_format = Response_format,
caller_controls_socket = Caller_controls_socket} = CurReq,
RepBuf_1 = <<RepBuf/binary, Data/binary>>,
New_data_size = RepBufSize - Streamed_size,
case StreamTo of
undefined ->
State#state{reply_buffer = RepBuf_1};
_ when Caller_controls_socket == true ->
do_interim_reply(StreamTo, Response_format, ReqId, RepBuf_1),
State#state{reply_buffer = <<>>,
interim_reply_sent = true,
streamed_size = Streamed_size + size(RepBuf_1)};
_ when New_data_size >= Stream_chunk_size ->
{Stream_chunk, Rem_data} = split_binary(RepBuf_1, Stream_chunk_size),
do_interim_reply(StreamTo, Response_format, ReqId, Stream_chunk),
State_1 = State#state{
reply_buffer = <<>>,
interim_reply_sent = true,
streamed_size = Streamed_size + Stream_chunk_size},
case Rem_data of
<<>> ->
State_1;
_ ->
accumulate_response(Rem_data, State_1)
end;
_ ->
State#state{reply_buffer = RepBuf_1}
end.
make_tmp_filename(true) ->
DownloadDir = ibrowse:get_config_value(download_dir, filename:absname("./")),
{A,B,C} = now(),
filename:join([DownloadDir,
"ibrowse_tmp_file_"++
integer_to_list(A) ++
integer_to_list(B) ++
integer_to_list(C)]);
make_tmp_filename(File) when is_list(File) ->
File;
make_tmp_filename({append, File}) when is_list(File) ->
File.
file_mode({append, _File}) -> append;
file_mode(_Srtf) -> write.
handle_sock_closed(#state{status=get_header} = State) ->
shutting_down(State),
do_error_reply(State, connection_closed);
handle_sock_closed(#state{cur_req=undefined} = State) ->
shutting_down(State);
Connection - Close header and has closed the socket to indicate end
handle_sock_closed(#state{reply_buffer = Buf, reqs = Reqs, http_status_code = SC,
is_closing = IsClosing,
cur_req = #request{tmp_file_name=TmpFilename,
tmp_file_fd=Fd} = CurReq,
status = get_body,
recvd_headers = Headers,
status_line = Status_line,
raw_headers = Raw_headers
}=State) ->
#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format,
options = Options} = CurReq,
case IsClosing of
true ->
{_, Reqs_1} = queue:out(Reqs),
Body = case TmpFilename of
undefined ->
Buf;
_ ->
ok = file:close(Fd),
{file, TmpFilename}
end,
Reply = case get_value(give_raw_headers, Options, false) of
true ->
{ok, Status_line, Raw_headers, Body};
false ->
{ok, SC, Headers, Buf}
end,
State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
ok = do_error_reply(State_1#state{reqs = Reqs_1}, connection_closed),
State_1;
_ ->
ok = do_error_reply(State, connection_closed),
State
end.
do_connect(Host, Port, Options, #state{is_ssl = true,
use_proxy = false,
ssl_options = SSLOptions},
Timeout) ->
ssl:connect(Host, Port, get_sock_options(Host, Options, SSLOptions), Timeout);
do_connect(Host, Port, Options, _State, Timeout) ->
gen_tcp:connect(Host, Port, get_sock_options(Host, Options, []), Timeout).
get_sock_options(Host, Options, SSLOptions) ->
Caller_socket_options = get_value(socket_options, Options, []),
Ipv6Options = case is_ipv6_host(Host) of
true ->
[inet6];
false ->
[]
end,
Other_sock_options = filter_sock_options(SSLOptions ++ Caller_socket_options ++ Ipv6Options),
case lists:keysearch(nodelay, 1, Other_sock_options) of
false ->
[{nodelay, true}, binary, {active, false} | Other_sock_options];
{value, _} ->
[binary, {active, false} | Other_sock_options]
end.
is_ipv6_host(Host) ->
case inet_parse:address(Host) of
{ok, {_, _, _, _, _, _, _, _}} ->
true;
{ok, {_, _, _, _}} ->
false;
_ ->
case inet:gethostbyname(Host) of
{ok, #hostent{h_addrtype = inet6}} ->
true;
_ ->
false
end
end.
filter_sock_options(Opts) ->
lists:filter(fun({active, _}) ->
false;
({packet, _}) ->
false;
(list) ->
false;
(_) ->
true
end, Opts).
do_send(Req, #state{socket = Sock,
is_ssl = true,
use_proxy = true,
proxy_tunnel_setup = Pts}) when Pts /= done -> gen_tcp:send(Sock, Req);
do_send(Req, #state{socket = Sock, is_ssl = true}) -> ssl:send(Sock, Req);
do_send(Req, #state{socket = Sock, is_ssl = false}) -> gen_tcp:send(Sock, Req).
do_send_body(Sock::socket_descriptor ( ) , Source::source_descriptor ( ) , IsSSL::boolean ( ) ) - > ok | error ( )
do_send_body(Source, State, TE) when is_function(Source) ->
do_send_body({Source}, State, TE);
do_send_body({Source}, State, TE) when is_function(Source) ->
do_send_body1(Source, Source(), State, TE);
do_send_body({Source, Source_state}, State, TE) when is_function(Source) ->
do_send_body1(Source, Source(Source_state), State, TE);
do_send_body(Body, State, _TE) ->
do_send(Body, State).
do_send_body1(Source, Resp, State, TE) ->
case Resp of
{ok, Data} when Data == []; Data == <<>> ->
do_send_body({Source}, State, TE);
{ok, Data} ->
do_send(maybe_chunked_encode(Data, TE), State),
do_send_body({Source}, State, TE);
{ok, Data, New_source_state} when Data == []; Data == <<>> ->
do_send_body({Source, New_source_state}, State, TE);
{ok, Data, New_source_state} ->
do_send(maybe_chunked_encode(Data, TE), State),
do_send_body({Source, New_source_state}, State, TE);
eof when TE == true ->
do_send(<<"0\r\n\r\n">>, State),
ok;
eof ->
ok;
Err ->
Err
end.
maybe_chunked_encode(Data, false) ->
Data;
maybe_chunked_encode(Data, true) ->
[?dec2hex(iolist_size(Data)), "\r\n", Data, "\r\n"].
do_close(#state{socket = undefined}) -> ok;
do_close(#state{socket = Sock,
is_ssl = true,
use_proxy = true,
proxy_tunnel_setup = Pts
}) when Pts /= done -> catch gen_tcp:close(Sock);
do_close(#state{socket = Sock, is_ssl = true}) -> catch ssl:close(Sock);
do_close(#state{socket = Sock, is_ssl = false}) -> catch gen_tcp:close(Sock).
active_once(#state{cur_req = #request{caller_controls_socket = true}}) ->
ok;
active_once(#state{socket = Socket} = State) ->
do_setopts(Socket, [{active, once}], State).
do_setopts(_Sock, [], _) -> ok;
do_setopts(Sock, Opts, #state{is_ssl = true,
use_proxy = true,
proxy_tunnel_setup = Pts}
) when Pts /= done -> inet:setopts(Sock, Opts);
do_setopts(Sock, Opts, #state{is_ssl = true}) -> ssl:setopts(Sock, Opts);
do_setopts(Sock, Opts, _) -> inet:setopts(Sock, Opts).
check_ssl_options(Options, State) ->
case get_value(is_ssl, Options, false) of
false ->
State;
true ->
State#state{is_ssl=true, ssl_options=get_value(ssl_options, Options)}
end.
send_req_1(From,
#url{host = Host,
port = Port} = Url,
Headers, Method, Body, Options, Timeout,
#state{socket = undefined} = State) ->
{Host_1, Port_1, State_1} =
case get_value(proxy_host, Options, false) of
false ->
{Host, Port, State};
PHost ->
ProxyUser = get_value(proxy_user, Options, []),
ProxyPassword = get_value(proxy_password, Options, []),
Digest = http_auth_digest(ProxyUser, ProxyPassword),
{PHost, get_value(proxy_port, Options, 80),
State#state{use_proxy = true,
proxy_auth_digest = Digest}}
end,
State_2 = check_ssl_options(Options, State_1),
do_trace("Connecting...~n", []),
Conn_timeout = get_value(connect_timeout, Options, Timeout),
case do_connect(Host_1, Port_1, Options, State_2, Conn_timeout) of
{ok, Sock} ->
do_trace("Connected! Socket: ~1000.p~n", [Sock]),
State_3 = State_2#state{socket = Sock,
connect_timeout = Conn_timeout},
send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State_3);
Err ->
shutting_down(State_2),
do_trace("Error connecting. Reason: ~1000.p~n", [Err]),
gen_server:reply(From, {error, {conn_failed, Err}}),
{stop, normal, State_2}
end;
Wait for 200 OK
send_req_1(From,
#url{
host = Server_host,
port = Server_port
} = Url,
Headers, Method, Body, Options, Timeout,
#state{
proxy_tunnel_setup = false,
use_proxy = true,
is_ssl = true} = State) ->
Ref = case Timeout of
infinity ->
undefined;
_ ->
erlang:send_after(Timeout, self(), {req_timedout, From})
end,
NewReq = #request{
method = connect,
preserve_chunked_encoding = get_value(preserve_chunked_encoding, Options, false),
options = Options,
timer_ref = Ref
},
State_1 = State#state{reqs=queue:in(NewReq, State#state.reqs)},
Pxy_auth_headers = maybe_modify_headers(Url, Method, Options, [], State_1),
Path = [Server_host, $:, integer_to_list(Server_port)],
{Req, Body_1} = make_request(connect, Pxy_auth_headers,
Path, Path,
[], Options, State_1, undefined),
TE = is_chunked_encoding_specified(Options),
trace_request(Req),
case do_send(Req, State) of
ok ->
case do_send_body(Body_1, State_1, TE) of
ok ->
trace_request_body(Body_1),
active_once(State_1),
State_1_1 = inc_pipeline_counter(State_1),
State_2 = State_1_1#state{status = get_header,
cur_req = NewReq,
proxy_tunnel_setup = in_progress,
tunnel_setup_queue = [{From, Url, Headers, Method, Body, Options, Timeout}]},
State_3 = set_inac_timer(State_2),
{noreply, State_3};
Err ->
shutting_down(State_1),
do_trace("Send failed... Reason: ~p~n", [Err]),
gen_server:reply(From, {error, {send_failed, Err}}),
{stop, normal, State_1}
end;
Err ->
shutting_down(State_1),
do_trace("Send failed... Reason: ~p~n", [Err]),
gen_server:reply(From, {error, {send_failed, Err}}),
{stop, normal, State_1}
end;
send_req_1(From, Url, Headers, Method, Body, Options, Timeout,
#state{proxy_tunnel_setup = in_progress,
tunnel_setup_queue = Q} = State) ->
do_trace("Queued SSL request awaiting tunnel setup: ~n"
"URL : ~s~n"
"Method : ~p~n"
"Headers : ~p~n", [Url, Method, Headers]),
{noreply, State#state{tunnel_setup_queue = [{From, Url, Headers, Method, Body, Options, Timeout} | Q]}};
send_req_1(From,
#url{abspath = AbsPath,
path = RelPath} = Url,
Headers, Method, Body, Options, Timeout,
#state{status = Status,
socket = Socket} = State) ->
cancel_timer(State#state.inactivity_timer_ref, {eat_message, timeout}),
ReqId = make_req_id(),
Resp_format = get_value(response_format, Options, list),
Caller_socket_options = get_value(socket_options, Options, []),
{StreamTo, Caller_controls_socket} =
case get_value(stream_to, Options, undefined) of
{Caller, once} when is_pid(Caller) or
is_atom(Caller) ->
Async_pid_rec = {{req_id_pid, ReqId}, self()},
true = ets:insert(ibrowse_stream, Async_pid_rec),
{Caller, true};
undefined ->
{undefined, false};
Caller when is_pid(Caller) or
is_atom(Caller) ->
{Caller, false};
Stream_to_inv ->
exit({invalid_option, {stream_to, Stream_to_inv}})
end,
SaveResponseToFile = get_value(save_response_to_file, Options, false),
Ref = case Timeout of
infinity ->
undefined;
_ ->
erlang:send_after(Timeout, self(), {req_timedout, From})
end,
NewReq = #request{url = Url,
method = Method,
stream_to = StreamTo,
caller_controls_socket = Caller_controls_socket,
caller_socket_options = Caller_socket_options,
options = Options,
req_id = ReqId,
save_response_to_file = SaveResponseToFile,
stream_chunk_size = get_stream_chunk_size(Options),
response_format = Resp_format,
from = From,
preserve_chunked_encoding = get_value(preserve_chunked_encoding, Options, false),
timer_ref = Ref
},
State_1 = State#state{reqs=queue:in(NewReq, State#state.reqs)},
Headers_1 = maybe_modify_headers(Url, Method, Options, Headers, State_1),
{Req, Body_1} = make_request(Method,
Headers_1,
AbsPath, RelPath, Body, Options, State_1,
ReqId),
trace_request(Req),
do_setopts(Socket, Caller_socket_options, State_1),
TE = is_chunked_encoding_specified(Options),
case do_send(Req, State_1) of
ok ->
case do_send_body(Body_1, State_1, TE) of
ok ->
trace_request_body(Body_1),
State_2 = inc_pipeline_counter(State_1),
active_once(State_2),
State_3 = case Status of
idle ->
State_2#state{status = get_header,
cur_req = NewReq};
_ ->
State_2
end,
case StreamTo of
undefined ->
ok;
_ ->
gen_server:reply(From, {ibrowse_req_id, ReqId})
end,
State_4 = set_inac_timer(State_3),
{noreply, State_4};
Err ->
shutting_down(State_1),
do_trace("Send failed... Reason: ~p~n", [Err]),
gen_server:reply(From, {error, {send_failed, Err}}),
{stop, normal, State_1}
end;
Err ->
shutting_down(State_1),
do_trace("Send failed... Reason: ~p~n", [Err]),
gen_server:reply(From, {error, {send_failed, Err}}),
{stop, normal, State_1}
end.
maybe_modify_headers(#url{}, connect, _, Headers, State) ->
add_proxy_auth_headers(State, Headers);
maybe_modify_headers(#url{host = Host, port = Port} = Url,
_Method,
Options, Headers, State) ->
case get_value(headers_as_is, Options, false) of
false ->
Headers_1 = add_auth_headers(Url, Options, Headers, State),
HostHeaderValue = case lists:keysearch(host_header, 1, Options) of
false ->
case Port of
80 -> Host;
443 -> Host;
_ -> [Host, ":", integer_to_list(Port)]
end;
{value, {_, Host_h_val}} ->
Host_h_val
end,
[{"Host", HostHeaderValue} | Headers_1];
true ->
Headers
end.
add_auth_headers(#url{username = User,
password = UPw},
Options,
Headers,
State) ->
Headers_1 = case User of
undefined ->
case get_value(basic_auth, Options, undefined) of
undefined ->
Headers;
{U,P} ->
[{"Authorization", ["Basic ", http_auth_digest(U, P)]} | Headers]
end;
_ ->
[{"Authorization", ["Basic ", http_auth_digest(User, UPw)]} | Headers]
end,
add_proxy_auth_headers(State, Headers_1).
add_proxy_auth_headers(#state{use_proxy = false}, Headers) ->
Headers;
add_proxy_auth_headers(#state{proxy_auth_digest = []}, Headers) ->
Headers;
add_proxy_auth_headers(#state{proxy_auth_digest = Auth_digest}, Headers) ->
[{"Proxy-Authorization", ["Basic ", Auth_digest]} | Headers].
http_auth_digest([], []) ->
[];
http_auth_digest(Username, Password) ->
ibrowse_lib:encode_base64(Username ++ [$: | Password]).
make_request(Method, Headers, AbsPath, RelPath, Body, Options,
#state{use_proxy = UseProxy, is_ssl = Is_ssl}, ReqId) ->
HttpVsn = http_vsn_string(get_value(http_vsn, Options, {1,1})),
Fun1 = fun({X, Y}) when is_atom(X) ->
{to_lower(atom_to_list(X)), X, Y};
({X, Y}) when is_list(X) ->
{to_lower(X), X, Y}
end,
Headers_0 = [Fun1(X) || X <- Headers],
Headers_1 =
case lists:keysearch("content-length", 1, Headers_0) of
false when (Body =:= [] orelse Body =:= <<>>) andalso
(Method =:= post orelse Method =:= put) ->
[{"content-length", "Content-Length", "0"} | Headers_0];
false when is_binary(Body) orelse is_list(Body) ->
[{"content-length", "Content-Length", integer_to_list(iolist_size(Body))} | Headers_0];
_ ->
Headers_0
end,
{Headers_2, Body_1} =
case is_chunked_encoding_specified(Options) of
false ->
{[{Y, Z} || {_, Y, Z} <- Headers_1], Body};
true ->
Chunk_size_1 = case get_value(transfer_encoding, Options) of
chunked ->
5120;
{chunked, Chunk_size} ->
Chunk_size
end,
{[{Y, Z} || {X, Y, Z} <- Headers_1,
X /= "content-length"] ++
[{"Transfer-Encoding", "chunked"}],
chunk_request_body(Body, Chunk_size_1)}
end,
Headers_3 = case lists:member({include_ibrowse_req_id, true}, Options) of
true ->
[{"x-ibrowse-request-id", io_lib:format("~1000.p",[ReqId])} | Headers_2];
false ->
Headers_2
end,
Headers_4 = cons_headers(Headers_3),
Uri = case get_value(use_absolute_uri, Options, false) or UseProxy of
true ->
case Is_ssl of
true ->
RelPath;
false ->
AbsPath
end;
false ->
RelPath
end,
{[method(Method), " ", Uri, " ", HttpVsn, crnl(), Headers_4, crnl()], Body_1}.
is_chunked_encoding_specified(Options) ->
case get_value(transfer_encoding, Options, false) of
false ->
false;
{chunked, _} ->
true;
chunked ->
true
end.
http_vsn_string({0,9}) -> "HTTP/0.9";
http_vsn_string({1,0}) -> "HTTP/1.0";
http_vsn_string({1,1}) -> "HTTP/1.1".
cons_headers(Headers) ->
cons_headers(Headers, []).
cons_headers([], Acc) ->
encode_headers(Acc);
cons_headers([{basic_auth, {U,P}} | T], Acc) ->
cons_headers(T, [{"Authorization",
["Basic ", ibrowse_lib:encode_base64(U++":"++P)]} | Acc]);
cons_headers([{cookie, Cookie} | T], Acc) ->
cons_headers(T, [{"Cookie", Cookie} | Acc]);
cons_headers([{content_length, L} | T], Acc) ->
cons_headers(T, [{"Content-Length", L} | Acc]);
cons_headers([{content_type, L} | T], Acc) ->
cons_headers(T, [{"Content-Type", L} | Acc]);
cons_headers([H | T], Acc) ->
cons_headers(T, [H | Acc]).
encode_headers(L) ->
encode_headers(L, []).
encode_headers([{http_vsn, _Val} | T], Acc) ->
encode_headers(T, Acc);
encode_headers([{Name,Val} | T], Acc) when is_list(Name) ->
encode_headers(T, [[Name, ": ", fmt_val(Val), crnl()] | Acc]);
encode_headers([{Name,Val} | T], Acc) when is_atom(Name) ->
encode_headers(T, [[atom_to_list(Name), ": ", fmt_val(Val), crnl()] | Acc]);
encode_headers([], Acc) ->
lists:reverse(Acc).
chunk_request_body(Body, _ChunkSize) when is_tuple(Body) orelse
is_function(Body) ->
Body;
chunk_request_body(Body, ChunkSize) ->
chunk_request_body(Body, ChunkSize, []).
chunk_request_body(Body, _ChunkSize, Acc) when Body == <<>>; Body == [] ->
LastChunk = "0\r\n",
lists:reverse(["\r\n", LastChunk | Acc]);
chunk_request_body(Body, ChunkSize, Acc) when is_binary(Body),
size(Body) >= ChunkSize ->
<<ChunkBody:ChunkSize/binary, Rest/binary>> = Body,
Chunk = [?dec2hex(ChunkSize),"\r\n",
ChunkBody, "\r\n"],
chunk_request_body(Rest, ChunkSize, [Chunk | Acc]);
chunk_request_body(Body, _ChunkSize, Acc) when is_binary(Body) ->
BodySize = size(Body),
Chunk = [?dec2hex(BodySize),"\r\n",
Body, "\r\n"],
LastChunk = "0\r\n",
lists:reverse(["\r\n", LastChunk, Chunk | Acc]);
chunk_request_body(Body, ChunkSize, Acc) when length(Body) >= ChunkSize ->
{ChunkBody, Rest} = split_list_at(Body, ChunkSize),
Chunk = [?dec2hex(ChunkSize),"\r\n",
ChunkBody, "\r\n"],
chunk_request_body(Rest, ChunkSize, [Chunk | Acc]);
chunk_request_body(Body, _ChunkSize, Acc) when is_list(Body) ->
BodySize = length(Body),
Chunk = [?dec2hex(BodySize),"\r\n",
Body, "\r\n"],
LastChunk = "0\r\n",
lists:reverse(["\r\n", LastChunk, Chunk | Acc]).
parse_response(<<>>, #state{cur_req = undefined}=State) ->
State#state{status = idle};
parse_response(Data, #state{cur_req = undefined}) ->
do_trace("Data left to process when no pending request. ~1000.p~n", [Data]),
{error, data_in_status_idle};
parse_response(Data, #state{reply_buffer = Acc, reqs = Reqs,
cur_req = CurReq} = State) ->
#request{from=From, stream_to=StreamTo, req_id=ReqId,
method=Method, response_format = Resp_format,
options = Options, timer_ref = T_ref
} = CurReq,
MaxHeaderSize = ibrowse:get_config_value(max_headers_size, infinity),
case scan_header(Acc, Data) of
{yes, Headers, Data_1} ->
do_trace("Recvd Header Data -> ~s~n----~n", [Headers]),
do_trace("Recvd headers~n--- Headers Begin ---~n~s~n--- Headers End ---~n~n", [Headers]),
{HttpVsn, StatCode, Headers_1, Status_line, Raw_headers} = parse_headers(Headers),
do_trace("HttpVsn: ~p StatusCode: ~p Headers_1 -> ~1000.p~n", [HttpVsn, StatCode, Headers_1]),
LCHeaders = [{to_lower(X), Y} || {X,Y} <- Headers_1],
ConnClose = to_lower(get_value("connection", LCHeaders, "false")),
IsClosing = is_connection_closing(HttpVsn, ConnClose),
State_0 = case IsClosing of
true ->
shutting_down(State),
State#state{is_closing = IsClosing};
false ->
State
end,
Give_raw_headers = get_value(give_raw_headers, Options, false),
State_1 = case Give_raw_headers of
true ->
State_0#state{recvd_headers=Headers_1, status=get_body,
reply_buffer = <<>>,
status_line = Status_line,
raw_headers = Raw_headers,
http_status_code=StatCode};
false ->
State_0#state{recvd_headers=Headers_1, status=get_body,
reply_buffer = <<>>,
http_status_code=StatCode}
end,
put(conn_close, ConnClose),
TransferEncoding = to_lower(get_value("transfer-encoding", LCHeaders, "false")),
Head_response_with_body = lists:member({workaround, head_response_with_body}, Options),
case get_value("content-length", LCHeaders, undefined) of
_ when Method == connect,
hd(StatCode) == $2 ->
{_, Reqs_1} = queue:out(Reqs),
cancel_timer(T_ref),
upgrade_to_ssl(set_cur_request(State_0#state{reqs = Reqs_1,
recvd_headers = [],
status = idle
}));
_ when Method == connect ->
{_, Reqs_1} = queue:out(Reqs),
do_error_reply(State#state{reqs = Reqs_1},
{error, proxy_tunnel_failed}),
{error, proxy_tunnel_failed};
_ when Method =:= head,
Head_response_with_body =:= false ->
to happen , but it does . An Apache server was
Chunked - Transfer - Encoding way , which meant
there was still a body . Issue # 67 on Github
{_, Reqs_1} = queue:out(Reqs),
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
{ok, StatCode, Headers_1, []}),
cancel_timer(T_ref, {eat_message, {req_timedout, From}}),
State_2 = reset_state(State_1_1),
State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
parse_response(Data_1, State_3);
_ when hd(StatCode) =:= $1 ->
one or more 1XX responses before a proper
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
do_trace("Recvd a status code of ~p. Ignoring and waiting for a proper response~n", [StatCode]),
parse_response(Data_1, State_1#state{recvd_headers = [],
status = get_header});
_ when StatCode =:= "204";
StatCode =:= "304" ->
{_, Reqs_1} = queue:out(Reqs),
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
{ok, StatCode, Headers_1, []}),
cancel_timer(T_ref, {eat_message, {req_timedout, From}}),
State_2 = reset_state(State_1_1),
State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
parse_response(Data_1, State_3);
_ when TransferEncoding =:= "chunked" ->
do_trace("Chunked encoding detected...~n",[]),
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
case parse_11_response(Data_1, State_1#state{transfer_encoding=chunked,
chunk_size=chunk_start,
reply_buffer = <<>>}) of
{error, Reason} ->
fail_pipelined_requests(State_1,
{error, {Reason,
{stat_code, StatCode}, Headers_1}}),
{error, Reason};
State_2 ->
State_2
end;
undefined when HttpVsn =:= "HTTP/1.0";
ConnClose =:= "close" ->
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
State_1#state{reply_buffer = Data_1};
undefined ->
fail_pipelined_requests(State_1,
{error, {content_length_undefined,
{stat_code, StatCode}, Headers}}),
{error, content_length_undefined};
V ->
case catch list_to_integer(V) of
V_1 when is_integer(V_1), V_1 >= 0 ->
send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
do_trace("Recvd Content-Length of ~p~n", [V_1]),
State_2 = State_1#state{rep_buf_size=0,
reply_buffer = <<>>,
content_length=V_1},
case parse_11_response(Data_1, State_2) of
{error, Reason} ->
fail_pipelined_requests(State_1,
{error, {Reason,
{stat_code, StatCode}, Headers_1}}),
{error, Reason};
State_3 ->
State_3
end;
_ ->
fail_pipelined_requests(State_1,
{error, {content_length_undefined,
{stat_code, StatCode}, Headers}}),
{error, content_length_undefined}
end
end;
{no, Acc_1} when MaxHeaderSize == infinity ->
State#state{reply_buffer = Acc_1};
{no, Acc_1} when size(Acc_1) < MaxHeaderSize ->
State#state{reply_buffer = Acc_1};
{no, _Acc_1} ->
fail_pipelined_requests(State, {error, max_headers_size_exceeded}),
{error, max_headers_size_exceeded}
end.
upgrade_to_ssl(#state{socket = Socket,
connect_timeout = Conn_timeout,
ssl_options = Ssl_options,
tunnel_setup_queue = Q} = State) ->
case ssl:connect(Socket, Ssl_options, Conn_timeout) of
{ok, Ssl_socket} ->
do_trace("Upgraded to SSL socket!!~n", []),
State_1 = State#state{socket = Ssl_socket,
proxy_tunnel_setup = done},
send_queued_requests(lists:reverse(Q), State_1);
Err ->
do_trace("Upgrade to SSL socket failed. Reson: ~p~n", [Err]),
do_error_reply(State, {error, {send_failed, Err}}),
{error, send_failed}
end.
send_queued_requests([], State) ->
do_trace("Sent all queued requests via SSL connection~n", []),
State#state{tunnel_setup_queue = []};
send_queued_requests([{From, Url, Headers, Method, Body, Options, Timeout} | Q],
State) ->
case send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State) of
{noreply, State_1} ->
send_queued_requests(Q, State_1);
Err ->
do_trace("Error sending queued SSL request: ~n"
"URL : ~s~n"
"Method : ~p~n"
"Headers : ~p~n", [Url, Method, Headers]),
do_error_reply(State, {error, {send_failed, Err}}),
{error, send_failed}
end.
is_connection_closing("HTTP/0.9", _) -> true;
is_connection_closing(_, "close") -> true;
is_connection_closing("HTTP/1.0", "false") -> true;
is_connection_closing(_, _) -> false.
parse_11_response(DataRecvd,
#state{transfer_encoding = chunked,
chunk_size = chunk_start,
chunk_size_buffer = Chunk_sz_buf
} = State) ->
case scan_crlf(Chunk_sz_buf, DataRecvd) of
{yes, ChunkHeader, Data_1} ->
State_1 = maybe_accumulate_ce_data(State, <<ChunkHeader/binary, $\r, $\n>>),
ChunkSize = parse_chunk_header(ChunkHeader),
process . Chunked encoding was only introduced to make
RemLen = size(Data_1),
do_trace("Determined chunk size: ~p. Already recvd: ~p~n",
[ChunkSize, RemLen]),
parse_11_response(Data_1, State_1#state{chunk_size_buffer = <<>>,
deleted_crlf = true,
recvd_chunk_size = 0,
chunk_size = ChunkSize});
{no, Data_1} ->
State#state{chunk_size_buffer = Data_1}
end;
This clause is to remove the CRLF between two chunks
parse_11_response(DataRecvd,
#state{transfer_encoding = chunked,
chunk_size = tbd,
chunk_size_buffer = Buf
} = State) ->
case scan_crlf(Buf, DataRecvd) of
{yes, _, NextChunk} ->
State_1 = maybe_accumulate_ce_data(State, <<$\r, $\n>>),
State_2 = State_1#state{chunk_size = chunk_start,
chunk_size_buffer = <<>>,
deleted_crlf = true},
parse_11_response(NextChunk, State_2);
{no, Data_1} ->
State#state{chunk_size_buffer = Data_1}
end;
This clause deals with the end of a chunked transfer . ibrowse does
parse_11_response(DataRecvd,
#state{transfer_encoding = chunked, chunk_size = 0,
cur_req = CurReq,
deleted_crlf = DelCrlf,
chunk_size_buffer = Trailer,
reqs = Reqs} = State) ->
do_trace("Detected end of chunked transfer...~n", []),
DataRecvd_1 = case DelCrlf of
false ->
DataRecvd;
true ->
<<$\r, $\n, DataRecvd/binary>>
end,
case scan_header(Trailer, DataRecvd_1) of
{yes, TEHeaders, Rem} ->
{_, Reqs_1} = queue:out(Reqs),
State_1 = maybe_accumulate_ce_data(State, <<TEHeaders/binary, $\r, $\n>>),
State_2 = handle_response(CurReq,
State_1#state{reqs = Reqs_1}),
parse_response(Rem, reset_state(State_2));
{no, Rem} ->
accumulate_response(<<>>, State#state{chunk_size_buffer = Rem, deleted_crlf = false})
end;
parse_11_response(DataRecvd,
#state{transfer_encoding = chunked,
chunk_size = CSz,
recvd_chunk_size = Recvd_csz,
rep_buf_size = RepBufSz} = State) ->
NeedBytes = CSz - Recvd_csz,
DataLen = size(DataRecvd),
do_trace("Recvd more data: size: ~p. NeedBytes: ~p~n", [DataLen, NeedBytes]),
case DataLen >= NeedBytes of
true ->
{RemChunk, RemData} = split_binary(DataRecvd, NeedBytes),
do_trace("Recvd another chunk...~p~n", [RemChunk]),
do_trace("RemData -> ~p~n", [RemData]),
case accumulate_response(RemChunk, State) of
{error, Reason} ->
do_trace("Error accumulating response --> ~p~n", [Reason]),
{error, Reason};
#state{} = State_1 ->
State_2 = State_1#state{chunk_size=tbd},
parse_11_response(RemData, State_2)
end;
false ->
accumulate_response(DataRecvd,
State#state{rep_buf_size = RepBufSz + DataLen,
recvd_chunk_size = Recvd_csz + DataLen})
end;
parse_11_response(DataRecvd,
#state{content_length=CL, rep_buf_size=RepBufSz,
reqs=Reqs}=State) ->
NeedBytes = CL - RepBufSz,
DataLen = size(DataRecvd),
case DataLen >= NeedBytes of
true ->
{RemBody, Rem} = split_binary(DataRecvd, NeedBytes),
{_, Reqs_1} = queue:out(Reqs),
State_1 = accumulate_response(RemBody, State),
State_2 = handle_response(State_1#state.cur_req, State_1#state{reqs=Reqs_1}),
State_3 = reset_state(State_2),
parse_response(Rem, State_3);
false ->
accumulate_response(DataRecvd, State#state{rep_buf_size = (RepBufSz+DataLen)})
end.
maybe_accumulate_ce_data(#state{cur_req = #request{preserve_chunked_encoding = false}} = State, _) ->
State;
maybe_accumulate_ce_data(State, Data) ->
accumulate_response(Data, State).
handle_response(#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format,
save_response_to_file = SaveResponseToFile,
tmp_file_name = TmpFilename,
tmp_file_fd = Fd,
options = Options,
timer_ref = ReqTimer
},
#state{http_status_code = SCode,
status_line = Status_line,
raw_headers = Raw_headers,
reply_buffer = RepBuf,
recvd_headers = RespHeaders}=State) when SaveResponseToFile /= false ->
Body = RepBuf,
case Fd of
undefined ->
ok;
_ ->
ok = file:close(Fd)
end,
ResponseBody = case TmpFilename of
undefined ->
Body;
_ ->
{file, TmpFilename}
end,
{Resp_headers_1, Raw_headers_1} = maybe_add_custom_headers(RespHeaders, Raw_headers, Options),
Reply = case get_value(give_raw_headers, Options, false) of
true ->
{ok, Status_line, Raw_headers_1, ResponseBody};
false ->
{ok, SCode, Resp_headers_1, ResponseBody}
end,
State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
cancel_timer(ReqTimer, {eat_message, {req_timedout, From}}),
set_cur_request(State_1);
handle_response(#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format,
options = Options, timer_ref = ReqTimer},
#state{http_status_code = SCode,
status_line = Status_line,
raw_headers = Raw_headers,
recvd_headers = Resp_headers,
reply_buffer = RepBuf
} = State) ->
Body = RepBuf,
{Resp_headers_1, Raw_headers_1} = maybe_add_custom_headers(Resp_headers, Raw_headers, Options),
Reply = case get_value(give_raw_headers, Options, false) of
true ->
{ok, Status_line, Raw_headers_1, Body};
false ->
{ok, SCode, Resp_headers_1, Body}
end,
State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
cancel_timer(ReqTimer, {eat_message, {req_timedout, From}}),
set_cur_request(State_1).
reset_state(State) ->
State#state{status = get_header,
rep_buf_size = 0,
streamed_size = 0,
content_length = undefined,
reply_buffer = <<>>,
chunk_size_buffer = <<>>,
recvd_headers = [],
status_line = undefined,
raw_headers = undefined,
deleted_crlf = false,
http_status_code = undefined,
chunk_size = undefined,
transfer_encoding = undefined
}.
set_cur_request(#state{reqs = Reqs, socket = Socket} = State) ->
case queue:peek(Reqs) of
empty ->
State#state{cur_req = undefined};
{value, #request{caller_controls_socket = Ccs} = NextReq} ->
case Ccs of
true ->
do_setopts(Socket, [{active, once}], State);
_ ->
ok
end,
State#state{cur_req = NextReq}
end.
parse_headers(Headers) ->
case scan_crlf(Headers) of
{yes, StatusLine, T} ->
parse_headers(StatusLine, T);
{no, StatusLine} ->
parse_headers(StatusLine, <<>>)
end.
parse_headers(StatusLine, Headers) ->
Headers_1 = parse_headers_1(Headers),
case parse_status_line(StatusLine) of
{ok, HttpVsn, StatCode, _Msg} ->
put(http_prot_vsn, HttpVsn),
{HttpVsn, StatCode, Headers_1, StatusLine, Headers};
A HTTP 0.9 response ?
put(http_prot_vsn, "HTTP/0.9"),
{"HTTP/0.9", undefined, Headers, StatusLine, Headers}
end.
From RFC 2616
HTTP/1.1 header field values can be folded onto multiple lines if
SP . A recipient replace any linear white space with a single
SP before interpreting the field value or forwarding the message
parse_headers_1(B) when is_binary(B) ->
parse_headers_1(binary_to_list(B));
parse_headers_1(String) ->
parse_headers_1(String, [], []).
parse_headers_1([$\n, H |T], [$\r | L], Acc) when H =:= 32;
H =:= $\t ->
parse_headers_1(lists:dropwhile(fun(X) ->
is_whitespace(X)
end, T), [32 | L], Acc);
parse_headers_1([$\n, H |T], L, Acc) when H =:= 32;
H =:= $\t ->
parse_headers_1(lists:dropwhile(fun(X) ->
is_whitespace(X)
end, T), [32 | L], Acc);
parse_headers_1([$\n|T], [$\r | L], Acc) ->
case parse_header(lists:reverse(L)) of
invalid ->
parse_headers_1(T, [], Acc);
NewHeader ->
parse_headers_1(T, [], [NewHeader | Acc])
end;
parse_headers_1([$\n|T], L, Acc) ->
case parse_header(lists:reverse(L)) of
invalid ->
parse_headers_1(T, [], Acc);
NewHeader ->
parse_headers_1(T, [], [NewHeader | Acc])
end;
parse_headers_1([H|T], L, Acc) ->
parse_headers_1(T, [H|L], Acc);
parse_headers_1([], [], Acc) ->
lists:reverse(Acc);
parse_headers_1([], L, Acc) ->
Acc_1 = case parse_header(lists:reverse(L)) of
invalid ->
Acc;
NewHeader ->
[NewHeader | Acc]
end,
lists:reverse(Acc_1).
parse_status_line(Line) when is_binary(Line) ->
parse_status_line(binary_to_list(Line));
parse_status_line(Line) ->
parse_status_line(Line, get_prot_vsn, [], []).
parse_status_line([32 | T], get_prot_vsn, ProtVsn, StatCode) ->
parse_status_line(T, get_status_code, ProtVsn, StatCode);
parse_status_line([32 | T], get_status_code, ProtVsn, StatCode) ->
{ok, lists:reverse(ProtVsn), lists:reverse(StatCode), T};
parse_status_line([], get_status_code, ProtVsn, StatCode) ->
{ok, lists:reverse(ProtVsn), lists:reverse(StatCode), []};
parse_status_line([H | T], get_prot_vsn, ProtVsn, StatCode) ->
parse_status_line(T, get_prot_vsn, [H|ProtVsn], StatCode);
parse_status_line([H | T], get_status_code, ProtVsn, StatCode) ->
parse_status_line(T, get_status_code, ProtVsn, [H | StatCode]);
parse_status_line([], _, _, _) ->
http_09.
parse_header(L) ->
parse_header(L, []).
parse_header([$: | V], Acc) ->
{lists:reverse(Acc), string:strip(V)};
parse_header([H | T], Acc) ->
parse_header(T, [H | Acc]);
parse_header([], _) ->
invalid.
scan_header(Bin) ->
case get_crlf_crlf_pos(Bin, 0) of
{yes, Pos} ->
{Headers, <<_:4/binary, Body/binary>>} = split_binary(Bin, Pos),
{yes, Headers, Body};
{yes_dodgy, Pos} ->
{Headers, <<_:2/binary, Body/binary>>} = split_binary(Bin, Pos),
{yes, Headers, Body};
no ->
{no, Bin}
end.
scan_header(Bin1, Bin2) when size(Bin1) < 4 ->
scan_header(<<Bin1/binary, Bin2/binary>>);
scan_header(Bin1, <<>>) ->
scan_header(Bin1);
scan_header(Bin1, Bin2) ->
Bin1_already_scanned_size = size(Bin1) - 4,
<<Headers_prefix:Bin1_already_scanned_size/binary, Rest/binary>> = Bin1,
Bin_to_scan = <<Rest/binary, Bin2/binary>>,
case get_crlf_crlf_pos(Bin_to_scan, 0) of
{yes, Pos} ->
{Headers_suffix, <<_:4/binary, Body/binary>>} = split_binary(Bin_to_scan, Pos),
{yes, <<Headers_prefix/binary, Headers_suffix/binary>>, Body};
{yes_dodgy, Pos} ->
{Headers_suffix, <<_:2/binary, Body/binary>>} = split_binary(Bin_to_scan, Pos),
{yes, <<Headers_prefix/binary, Headers_suffix/binary>>, Body};
no ->
{no, <<Bin1/binary, Bin2/binary>>}
end.
get_crlf_crlf_pos(<<$\r, $\n, $\r, $\n, _/binary>>, Pos) -> {yes, Pos};
get_crlf_crlf_pos(<<$\n, $\n, _/binary>>, Pos) -> {yes_dodgy, Pos};
get_crlf_crlf_pos(<<_, Rest/binary>>, Pos) -> get_crlf_crlf_pos(Rest, Pos + 1);
get_crlf_crlf_pos(<<>>, _) -> no.
scan_crlf(Bin) ->
case get_crlf_pos(Bin) of
{yes, Offset, Pos} ->
{Prefix, <<_:Offset/binary, Suffix/binary>>} = split_binary(Bin, Pos),
{yes, Prefix, Suffix};
no ->
{no, Bin}
end.
scan_crlf(<<>>, Bin2) ->
scan_crlf(Bin2);
scan_crlf(Bin1, Bin2) when size(Bin1) < 2 ->
scan_crlf(<<Bin1/binary, Bin2/binary>>);
scan_crlf(Bin1, Bin2) ->
scan_crlf_1(size(Bin1) - 2, Bin1, Bin2).
scan_crlf_1(Bin1_head_size, Bin1, Bin2) ->
<<Bin1_head:Bin1_head_size/binary, Bin1_tail/binary>> = Bin1,
Bin3 = <<Bin1_tail/binary, Bin2/binary>>,
case get_crlf_pos(Bin3) of
{yes, Offset, Pos} ->
{Prefix, <<_:Offset/binary, Suffix/binary>>} = split_binary(Bin3, Pos),
{yes, list_to_binary([Bin1_head, Prefix]), Suffix};
no ->
{no, list_to_binary([Bin1, Bin2])}
end.
get_crlf_pos(Bin) ->
get_crlf_pos(Bin, 0).
get_crlf_pos(<<$\r, $\n, _/binary>>, Pos) -> {yes, 2, Pos};
get_crlf_pos(<<$\n, _/binary>>, Pos) -> {yes, 1, Pos};
get_crlf_pos(<<_, Rest/binary>>, Pos) -> get_crlf_pos(Rest, Pos + 1);
get_crlf_pos(<<>>, _) -> no.
fmt_val(L) when is_list(L) -> L;
fmt_val(I) when is_integer(I) -> integer_to_list(I);
fmt_val(A) when is_atom(A) -> atom_to_list(A);
fmt_val(Term) -> io_lib:format("~p", [Term]).
crnl() -> "\r\n".
method(connect) -> "CONNECT";
method(delete) -> "DELETE";
method(get) -> "GET";
method(head) -> "HEAD";
method(options) -> "OPTIONS";
method(post) -> "POST";
method(put) -> "PUT";
method(trace) -> "TRACE";
method(copy) -> "COPY";
method(lock) -> "LOCK";
method(mkcol) -> "MKCOL";
method(move) -> "MOVE";
method(propfind) -> "PROPFIND";
method(proppatch) -> "PROPPATCH";
method(search) -> "SEARCH";
method(unlock) -> "UNLOCK";
method(report) -> "REPORT";
method(mkactivity) -> "MKACTIVITY";
method(checkout) -> "CHECKOUT";
method(merge) -> "MERGE";
method(msearch) -> "MSEARCH";
method(notify) -> "NOTIFY";
method(subscribe) -> "SUBSCRIBE";
method(unsubscribe) -> "UNSUBSCRIBE";
method(patch) -> "PATCH";
method(purge) -> "PURGE".
From RFC 2616
followed by an OPTIONAL trailer containing entity - header
chunk - size = 1*HEX
is zero , followed by the trailer , which is terminated by an empty
strips trailing spaces from the chunk size fields as Apache 1.3.27 was
parse_chunk_header(ChunkHeader) ->
parse_chunk_header(ChunkHeader, []).
parse_chunk_header(<<$;, _/binary>>, Acc) ->
hexlist_to_integer(lists:reverse(Acc));
parse_chunk_header(<<H, T/binary>>, Acc) ->
case is_whitespace(H) of
true ->
parse_chunk_header(T, Acc);
false ->
parse_chunk_header(T, [H | Acc])
end;
parse_chunk_header(<<>>, Acc) ->
hexlist_to_integer(lists:reverse(Acc)).
is_whitespace($\s) -> true;
is_whitespace($\r) -> true;
is_whitespace($\n) -> true;
is_whitespace($\t) -> true;
is_whitespace(_) -> false.
send_async_headers(_ReqId, undefined, _, _State) ->
ok;
send_async_headers(ReqId, StreamTo, Give_raw_headers,
#state{status_line = Status_line, raw_headers = Raw_headers,
recvd_headers = Headers, http_status_code = StatCode,
cur_req = #request{options = Opts}
}) ->
{Headers_1, Raw_headers_1} = maybe_add_custom_headers(Headers, Raw_headers, Opts),
case Give_raw_headers of
false ->
catch StreamTo ! {ibrowse_async_headers, ReqId, StatCode, Headers_1};
true ->
catch StreamTo ! {ibrowse_async_headers, ReqId, Status_line, Raw_headers_1}
end.
maybe_add_custom_headers(Headers, Raw_headers, Opts) ->
Custom_headers = get_value(add_custom_headers, Opts, []),
Headers_1 = Headers ++ Custom_headers,
Raw_headers_1 = case Custom_headers of
[_ | _] when is_binary(Raw_headers) ->
Custom_headers_bin = list_to_binary(string:join([[X, $:, Y] || {X, Y} <- Custom_headers], "\r\n")),
<<Raw_headers/binary, "\r\n", Custom_headers_bin/binary>>;
_ ->
Raw_headers
end,
{Headers_1, Raw_headers_1}.
format_response_data(Resp_format, Body) ->
case Resp_format of
list when is_list(Body) ->
flatten(Body);
list when is_binary(Body) ->
binary_to_list(Body);
binary when is_list(Body) ->
list_to_binary(Body);
_ ->
Body
end.
do_reply(State, From, undefined, _, Resp_format, {ok, St_code, Headers, Body}) ->
Msg_1 = {ok, St_code, Headers, format_response_data(Resp_format, Body)},
gen_server:reply(From, Msg_1),
dec_pipeline_counter(State);
do_reply(State, From, undefined, _, _, Msg) ->
gen_server:reply(From, Msg),
dec_pipeline_counter(State);
do_reply(#state{prev_req_id = Prev_req_id} = State,
_From, StreamTo, ReqId, Resp_format, {ok, _, _, Body}) ->
State_1 = dec_pipeline_counter(State),
case Body of
[] ->
ok;
_ ->
Body_1 = format_response_data(Resp_format, Body),
catch StreamTo ! {ibrowse_async_response, ReqId, Body_1}
end,
catch StreamTo ! {ibrowse_async_response_end, ReqId},
We do n't want to delete the Req - id to Pid mapping straightaway
{ error , unknown_req_id } when it calls ibrowse : stream_next/1 . To
get around this , we store the req i d , and clear it after the
ets:delete(ibrowse_stream, {req_id_pid, Prev_req_id}),
State_1#state{prev_req_id = ReqId};
do_reply(State, _From, StreamTo, ReqId, Resp_format, Msg) ->
State_1 = dec_pipeline_counter(State),
Msg_1 = format_response_data(Resp_format, Msg),
catch StreamTo ! {ibrowse_async_response, ReqId, Msg_1},
State_1.
do_interim_reply(undefined, _, _ReqId, _Msg) ->
ok;
do_interim_reply(StreamTo, Response_format, ReqId, Msg) ->
Msg_1 = format_response_data(Response_format, Msg),
catch StreamTo ! {ibrowse_async_response, ReqId, Msg_1}.
do_error_reply(#state{reqs = Reqs, tunnel_setup_queue = Tun_q} = State, Err) ->
ReqList = queue:to_list(Reqs),
lists:foreach(fun(#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format}) ->
ets:delete(ibrowse_stream, {req_id_pid, ReqId}),
do_reply(State, From, StreamTo, ReqId, Resp_format, {error, Err})
end, ReqList),
lists:foreach(
fun({From, _Url, _Headers, _Method, _Body, _Options, _Timeout}) ->
do_reply(State, From, undefined, undefined, undefined, Err)
end, Tun_q).
fail_pipelined_requests(#state{reqs = Reqs, cur_req = CurReq} = State, Reply) ->
{_, Reqs_1} = queue:out(Reqs),
#request{from=From, stream_to=StreamTo, req_id=ReqId,
response_format = Resp_format} = CurReq,
State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
do_error_reply(State_1#state{reqs = Reqs_1}, previous_request_failed).
split_list_at(List, N) ->
split_list_at(List, N, []).
split_list_at([], _, Acc) ->
{lists:reverse(Acc), []};
split_list_at(List2, 0, List1) ->
{lists:reverse(List1), List2};
split_list_at([H | List2], N, List1) ->
split_list_at(List2, N-1, [H | List1]).
hexlist_to_integer(List) ->
hexlist_to_integer(lists:reverse(List), 1, 0).
hexlist_to_integer([H | T], Multiplier, Acc) ->
hexlist_to_integer(T, Multiplier*16, Multiplier*to_ascii(H) + Acc);
hexlist_to_integer([], _, Acc) ->
Acc.
to_ascii($A) -> 10;
to_ascii($a) -> 10;
to_ascii($B) -> 11;
to_ascii($b) -> 11;
to_ascii($C) -> 12;
to_ascii($c) -> 12;
to_ascii($D) -> 13;
to_ascii($d) -> 13;
to_ascii($E) -> 14;
to_ascii($e) -> 14;
to_ascii($F) -> 15;
to_ascii($f) -> 15;
to_ascii($1) -> 1;
to_ascii($2) -> 2;
to_ascii($3) -> 3;
to_ascii($4) -> 4;
to_ascii($5) -> 5;
to_ascii($6) -> 6;
to_ascii($7) -> 7;
to_ascii($8) -> 8;
to_ascii($9) -> 9;
to_ascii($0) -> 0.
cancel_timer(undefined) -> ok;
cancel_timer(Ref) -> _ = erlang:cancel_timer(Ref),
ok.
cancel_timer(Ref, {eat_message, Msg}) ->
cancel_timer(Ref),
receive
Msg ->
ok
after 0 ->
ok
end.
make_req_id() ->
now().
to_lower(Str) ->
to_lower(Str, []).
to_lower([H|T], Acc) when H >= $A, H =< $Z ->
to_lower(T, [H+32|Acc]);
to_lower([H|T], Acc) ->
to_lower(T, [H|Acc]);
to_lower([], Acc) ->
lists:reverse(Acc).
shutting_down(#state{lb_ets_tid = undefined}) ->
ok;
shutting_down(#state{lb_ets_tid = Tid,
cur_pipeline_size = _Sz}) ->
catch ets:delete(Tid, self()).
inc_pipeline_counter(#state{is_closing = true} = State) ->
State;
inc_pipeline_counter(#state{lb_ets_tid = undefined} = State) ->
State;
inc_pipeline_counter(#state{cur_pipeline_size = Pipe_sz,
lb_ets_tid = Tid} = State) ->
update_counter(Tid, self(), {2,1,99999,9999}),
State#state{cur_pipeline_size = Pipe_sz + 1}.
update_counter(Tid, Key, Args) ->
ets:update_counter(Tid, Key, Args).
dec_pipeline_counter(#state{is_closing = true} = State) ->
State;
dec_pipeline_counter(#state{lb_ets_tid = undefined} = State) ->
State;
dec_pipeline_counter(#state{cur_pipeline_size = Pipe_sz,
lb_ets_tid = Tid} = State) ->
try
update_counter(Tid, self(), {2,-1,0,0}),
update_counter(Tid, self(), {3,-1,0,0})
catch
_:_ ->
ok
end,
State#state{cur_pipeline_size = Pipe_sz - 1}.
flatten([H | _] = L) when is_integer(H) ->
L;
flatten([H | _] = L) when is_list(H) ->
lists:flatten(L);
flatten([]) ->
[].
get_stream_chunk_size(Options) ->
case lists:keysearch(stream_chunk_size, 1, Options) of
{value, {_, V}} when V > 0 ->
V;
_ ->
?DEFAULT_STREAM_CHUNK_SIZE
end.
set_inac_timer(State) ->
cancel_timer(State#state.inactivity_timer_ref),
set_inac_timer(State#state{inactivity_timer_ref = undefined},
get_inac_timeout(State)).
set_inac_timer(State, Timeout) when is_integer(Timeout) ->
Ref = erlang:send_after(Timeout, self(), timeout),
State#state{inactivity_timer_ref = Ref};
set_inac_timer(State, _) ->
State.
get_inac_timeout(#state{cur_req = #request{options = Opts}}) ->
get_value(inactivity_timeout, Opts, infinity);
get_inac_timeout(#state{cur_req = undefined}) ->
case ibrowse:get_config_value(inactivity_timeout, undefined) of
Val when is_integer(Val) ->
Val;
_ ->
case application:get_env(ibrowse, inactivity_timeout) of
{ok, Val} when is_integer(Val), Val > 0 ->
Val;
_ ->
10000
end
end.
trace_request(Req) ->
case get(my_trace_flag) of
true ->
NReq = to_binary(Req),
do_trace("Sending request: ~n"
"--- Request Begin ---~n~s~n"
"--- Request End ---~n", [NReq]);
_ -> ok
end.
trace_request_body(Body) ->
case get(my_trace_flag) of
true ->
NBody = to_binary(Body),
case size(NBody) > 1024 of
true ->
ok;
false ->
do_trace("Sending request body: ~n"
"--- Request Body Begin ---~n~s~n"
"--- Request Body End ---~n", [NBody])
end;
false ->
ok
end.
to_binary(X) when is_list(X) -> list_to_binary(X);
to_binary(X) when is_binary(X) -> X.
|
eb755f89c4d669c25ad4d464a0a387e2cb625dd09b5da49907c548dbc939e99d | Bogdanp/nemea | config.rkt | #lang racket/base
(require (for-syntax racket/base)
gregor
koyo/config
net/url
racket/port
racket/runtime-path
racket/set
racket/string)
(current-option-name-prefix "NEMEA")
(define-option hostname
#:default "127.0.0.1")
(define-option listen-ip
#:default "127.0.0.1")
(define-option port
#:default "8000"
(string->number port))
(define-option timezone
#:default (current-timezone))
(define-option database-url
#:default "postgres:nemea@127.0.0.1/nemea"
(string->url database-url))
(define-syntax-rule (define/provide name e ...)
(begin
(define name e ...)
(provide name)))
(define/provide db-username (car (string-split (url-user database-url) ":")))
(define/provide db-password (cadr (string-split (url-user database-url) ":")))
(define/provide db-host (url-host database-url))
(define/provide db-port (or (url-port database-url) 5432))
(define/provide db-name (path/param-path (car (url-path database-url))))
(define-option db-max-connections
#:default "16"
(string->number db-max-connections))
(define-option db-max-idle-connections
#:default "2"
(string->number db-max-idle-connections))
(define-option batcher-channel-size
#:default "1000000"
(string->number batcher-channel-size))
(define-option batcher-timeout
#:default "5"
(string->number batcher-timeout))
(define-option log-level
#:default "info"
(string->symbol log-level))
(define-runtime-path spammers-file-path (build-path 'up "assets" "data" "spammers.txt"))
(define/provide spammers
(call-with-input-file spammers-file-path
(lambda (in)
(list->set
(string-split (port->string in) "\n")))))
| null | https://raw.githubusercontent.com/Bogdanp/nemea/6e6149007fb0c43d8f0fb2271b36f0ccad830703/nemea/config.rkt | racket | #lang racket/base
(require (for-syntax racket/base)
gregor
koyo/config
net/url
racket/port
racket/runtime-path
racket/set
racket/string)
(current-option-name-prefix "NEMEA")
(define-option hostname
#:default "127.0.0.1")
(define-option listen-ip
#:default "127.0.0.1")
(define-option port
#:default "8000"
(string->number port))
(define-option timezone
#:default (current-timezone))
(define-option database-url
#:default "postgres:nemea@127.0.0.1/nemea"
(string->url database-url))
(define-syntax-rule (define/provide name e ...)
(begin
(define name e ...)
(provide name)))
(define/provide db-username (car (string-split (url-user database-url) ":")))
(define/provide db-password (cadr (string-split (url-user database-url) ":")))
(define/provide db-host (url-host database-url))
(define/provide db-port (or (url-port database-url) 5432))
(define/provide db-name (path/param-path (car (url-path database-url))))
(define-option db-max-connections
#:default "16"
(string->number db-max-connections))
(define-option db-max-idle-connections
#:default "2"
(string->number db-max-idle-connections))
(define-option batcher-channel-size
#:default "1000000"
(string->number batcher-channel-size))
(define-option batcher-timeout
#:default "5"
(string->number batcher-timeout))
(define-option log-level
#:default "info"
(string->symbol log-level))
(define-runtime-path spammers-file-path (build-path 'up "assets" "data" "spammers.txt"))
(define/provide spammers
(call-with-input-file spammers-file-path
(lambda (in)
(list->set
(string-split (port->string in) "\n")))))
|
|
399776977c1f9cd7d45070cfd06e9c5900d6925c8d4afb8e5cea768da0b6f015 | fetburner/Coq2SML | term.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
This module instantiates the structure of generic deBruijn terms to Coq
open Util
open Pp
open Names
open Univ
open Esubst
open Validate
Coq abstract syntax with deBruijn variables ; ' a is the type of sorts
type existential_key = int
type metavariable = int
(* This defines the strategy to use for verifiying a Cast *)
(* This defines Cases annotations *)
type case_style = LetStyle | IfStyle | LetPatternStyle | MatchStyle |
RegularStyle
type case_printing =
{ ind_nargs : int; (* length of the arity of the inductive type *)
style : case_style }
type case_info =
{ ci_ind : inductive;
ci_npar : int;
ci_cstr_ndecls : int array; (* number of pattern var of each constructor *)
ci_pp_info : case_printing (* not interpreted by the kernel *)
}
let val_ci =
let val_cstyle = val_enum "case_style" 5 in
let val_cprint = val_tuple ~name:"case_printing" [|val_int;val_cstyle|] in
val_tuple ~name:"case_info" [|val_ind;val_int;val_array val_int;val_cprint|]
(* Sorts. *)
type contents = Pos | Null
type sorts =
| Prop of contents (* proposition types *)
| Type of universe
type sorts_family = InProp | InSet | InType
let family_of_sort = function
| Prop Null -> InProp
| Prop Pos -> InSet
| Type _ -> InType
let val_sort = val_sum "sort" 0 [|[|val_enum "cnt" 2|];[|val_univ|]|]
let val_sortfam = val_enum "sorts_family" 3
(********************************************************************)
(* Constructions as implemented *)
(********************************************************************)
[ constr array ] is an instance matching definitional [ named_context ] in
the same order ( i.e. last argument first )
the same order (i.e. last argument first) *)
type 'constr pexistential = existential_key * 'constr array
type 'constr prec_declaration =
name array * 'constr array * 'constr array
type 'constr pfixpoint =
(int array * int) * 'constr prec_declaration
type 'constr pcofixpoint =
int * 'constr prec_declaration
let val_evar f = val_tuple ~name:"pexistential" [|val_int;val_array f|]
let val_prec f =
val_tuple ~name:"prec_declaration"
[|val_array val_name; val_array f; val_array f|]
let val_fix f =
val_tuple ~name:"pfixpoint"
[|val_tuple~name:"fix2"[|val_array val_int;val_int|];val_prec f|]
let val_cofix f = val_tuple ~name:"pcofixpoint"[|val_int;val_prec f|]
type cast_kind = VMcast | DEFAULTcast
let val_cast = val_enum "cast_kind" 2
(*s*******************************************************************)
(* The type of constructions *)
type constr =
| Rel of int
| Var of identifier
| Meta of metavariable
| Evar of constr pexistential
| Sort of sorts
| Cast of constr * cast_kind * constr
| Prod of name * constr * constr
| Lambda of name * constr * constr
| LetIn of name * constr * constr * constr
| App of constr * constr array
| Const of constant
| Ind of inductive
| Construct of constructor
| Case of case_info * constr * constr * constr array
| Fix of constr pfixpoint
| CoFix of constr pcofixpoint
let val_constr = val_rec_sum "constr" 0 (fun val_constr -> [|
[|val_int|]; (* Rel *)
Var
Meta
[|val_evar val_constr|]; (* Evar *)
[|val_sort|]; (* Sort *)
[|val_constr;val_cast;val_constr|]; (* Cast *)
[|val_name;val_constr;val_constr|]; (* Prod *)
[|val_name;val_constr;val_constr|]; (* Lambda *)
[|val_name;val_constr;val_constr;val_constr|]; (* LetIn *)
[|val_constr;val_array val_constr|]; (* App *)
[|val_con|]; (* Const *)
[|val_ind|]; (* Ind *)
Construct
[|val_ci;val_constr;val_constr;val_array val_constr|]; (* Case *)
[|val_fix val_constr|]; (* Fix *)
CoFix
|])
type existential = constr pexistential
type rec_declaration = constr prec_declaration
type fixpoint = constr pfixpoint
type cofixpoint = constr pcofixpoint
let rec strip_outer_cast c = match c with
| Cast (c,_,_) -> strip_outer_cast c
| _ -> c
let rec collapse_appl c = match c with
| App (f,cl) ->
let rec collapse_rec f cl2 =
match (strip_outer_cast f) with
| App (g,cl1) -> collapse_rec g (Array.append cl1 cl2)
| _ -> App (f,cl2)
in
collapse_rec f cl
| _ -> c
let decompose_app c =
match collapse_appl c with
| App (f,cl) -> (f, Array.to_list cl)
| _ -> (c,[])
let applist (f,l) = App (f, Array.of_list l)
(****************************************************************************)
(* Functions for dealing with constr terms *)
(****************************************************************************)
(*********************)
(* Occurring *)
(*********************)
let iter_constr_with_binders g f n c = match c with
| (Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _
| Construct _) -> ()
| Cast (c,_,t) -> f n c; f n t
| Prod (_,t,c) -> f n t; f (g n) c
| Lambda (_,t,c) -> f n t; f (g n) c
| LetIn (_,b,t,c) -> f n b; f n t; f (g n) c
| App (c,l) -> f n c; Array.iter (f n) l
| Evar (_,l) -> Array.iter (f n) l
| Case (_,p,c,bl) -> f n p; f n c; Array.iter (f n) bl
| Fix (_,(_,tl,bl)) ->
Array.iter (f n) tl;
Array.iter (f (iterate g (Array.length tl) n)) bl
| CoFix (_,(_,tl,bl)) ->
Array.iter (f n) tl;
Array.iter (f (iterate g (Array.length tl) n)) bl
exception LocalOccur
( closedn n M ) raises FreeVar if a variable of height greater than n
occurs in M , returns ( ) otherwise
occurs in M, returns () otherwise *)
let closedn n c =
let rec closed_rec n c = match c with
| Rel m -> if m>n then raise LocalOccur
| _ -> iter_constr_with_binders succ closed_rec n c
in
try closed_rec n c; true with LocalOccur -> false
[ closed0 M ] is true iff [ M ] is a ( deBruijn ) closed term
let closed0 = closedn 0
( noccurn n M ) returns true iff ( Rel n ) does NOT occur in term M
let noccurn n term =
let rec occur_rec n c = match c with
| Rel m -> if m = n then raise LocalOccur
| _ -> iter_constr_with_binders succ occur_rec n c
in
try occur_rec n term; true with LocalOccur -> false
( noccur_between n m M ) returns true iff ( Rel p ) does NOT occur in term M
for n < = p < n+m
for n <= p < n+m *)
let noccur_between n m term =
let rec occur_rec n c = match c with
| Rel(p) -> if n<=p && p<n+m then raise LocalOccur
| _ -> iter_constr_with_binders succ occur_rec n c
in
try occur_rec n term; true with LocalOccur -> false
Checking function for terms containing existential variables .
The function [ noccur_with_meta ] considers the fact that
each existential variable ( as well as each isevar )
in the term appears applied to its local context ,
which may contain the CoFix variables . These occurrences of CoFix variables
are not considered
The function [noccur_with_meta] considers the fact that
each existential variable (as well as each isevar)
in the term appears applied to its local context,
which may contain the CoFix variables. These occurrences of CoFix variables
are not considered *)
let noccur_with_meta n m term =
let rec occur_rec n c = match c with
| Rel p -> if n<=p & p<n+m then raise LocalOccur
| App(f,cl) ->
(match f with
| (Cast (Meta _,_,_)| Meta _) -> ()
| _ -> iter_constr_with_binders succ occur_rec n c)
| Evar (_, _) -> ()
| _ -> iter_constr_with_binders succ occur_rec n c
in
try (occur_rec n term; true) with LocalOccur -> false
(*********************)
(* Lifting *)
(*********************)
let map_constr_with_binders g f l c = match c with
| (Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _
| Construct _) -> c
| Cast (c,k,t) -> Cast (f l c, k, f l t)
| Prod (na,t,c) -> Prod (na, f l t, f (g l) c)
| Lambda (na,t,c) -> Lambda (na, f l t, f (g l) c)
| LetIn (na,b,t,c) -> LetIn (na, f l b, f l t, f (g l) c)
| App (c,al) -> App (f l c, Array.map (f l) al)
| Evar (e,al) -> Evar (e, Array.map (f l) al)
| Case (ci,p,c,bl) -> Case (ci, f l p, f l c, Array.map (f l) bl)
| Fix (ln,(lna,tl,bl)) ->
let l' = iterate g (Array.length tl) l in
Fix (ln,(lna,Array.map (f l) tl,Array.map (f l') bl))
| CoFix(ln,(lna,tl,bl)) ->
let l' = iterate g (Array.length tl) l in
CoFix (ln,(lna,Array.map (f l) tl,Array.map (f l') bl))
(* The generic lifting function *)
let rec exliftn el c = match c with
| Rel i -> Rel(reloc_rel i el)
| _ -> map_constr_with_binders el_lift exliftn el c
(* Lifting the binding depth across k bindings *)
let liftn k n =
match el_liftn (pred n) (el_shft k el_id) with
| ELID -> (fun c -> c)
| el -> exliftn el
let lift k = liftn k 1
(*********************)
(* Substituting *)
(*********************)
( subst1 M c ) substitutes M for Rel(1 ) in c
we generalise it to ( substl [ M1, ... ,Mn ] c ) which substitutes in parallel
M1, ... ,Mn for respectively Rel(1), ... ,Rel(n ) in c
we generalise it to (substl [M1,...,Mn] c) which substitutes in parallel
M1,...,Mn for respectively Rel(1),...,Rel(n) in c *)
(* 1st : general case *)
type info = Closed | Open | Unknown
type 'a substituend = { mutable sinfo: info; sit: 'a }
let rec lift_substituend depth s =
match s.sinfo with
| Closed -> s.sit
| Open -> lift depth s.sit
| Unknown ->
s.sinfo <- if closed0 s.sit then Closed else Open;
lift_substituend depth s
let make_substituend c = { sinfo=Unknown; sit=c }
let substn_many lamv n c =
let lv = Array.length lamv in
if lv = 0 then c
else
let rec substrec depth c = match c with
| Rel k ->
if k<=depth then c
else if k-depth <= lv then lift_substituend depth lamv.(k-depth-1)
else Rel (k-lv)
| _ -> map_constr_with_binders succ substrec depth c in
substrec n c
let substnl laml n =
substn_many (Array.map make_substituend (Array.of_list laml)) n
let substl laml = substnl laml 0
let subst1 lam = substl [lam]
(***************************************************************************)
(* Type of assumptions and contexts *)
(***************************************************************************)
let val_ndecl =
val_tuple ~name:"named_declaration"[|val_id;val_opt val_constr;val_constr|]
let val_rdecl =
val_tuple ~name:"rel_declaration"[|val_name;val_opt val_constr;val_constr|]
let val_nctxt = val_list val_ndecl
let val_rctxt = val_list val_rdecl
type named_declaration = identifier * constr option * constr
type rel_declaration = name * constr option * constr
type named_context = named_declaration list
let empty_named_context = []
let fold_named_context f l ~init = List.fold_right f l init
type section_context = named_context
type rel_context = rel_declaration list
let empty_rel_context = []
let rel_context_length = List.length
let rel_context_nhyps hyps =
let rec nhyps acc = function
| [] -> acc
| (_,None,_)::hyps -> nhyps (1+acc) hyps
| (_,Some _,_)::hyps -> nhyps acc hyps in
nhyps 0 hyps
let fold_rel_context f l ~init = List.fold_right f l init
let map_context f l =
let map_decl (n, body_o, typ as decl) =
let body_o' = Option.smartmap f body_o in
let typ' = f typ in
if body_o' == body_o && typ' == typ then decl else
(n, body_o', typ')
in
list_smartmap map_decl l
let map_rel_context = map_context
let extended_rel_list n hyps =
let rec reln l p = function
| (_,None,_) :: hyps -> reln (Rel (n+p) :: l) (p+1) hyps
| (_,Some _,_) :: hyps -> reln l (p+1) hyps
| [] -> l
in
reln [] 1 hyps
(* Iterate lambda abstractions *)
(* compose_lam [xn:Tn;..;x1:T1] b = [x1:T1]..[xn:Tn]b *)
let compose_lam l b =
let rec lamrec = function
| ([], b) -> b
| ((v,t)::l, b) -> lamrec (l, Lambda (v,t,b))
in
lamrec (l,b)
(* Transforms a lambda term [x1:T1]..[xn:Tn]T into the pair
([(xn,Tn);...;(x1,T1)],T), where T is not a lambda *)
let decompose_lam =
let rec lamdec_rec l c = match c with
| Lambda (x,t,c) -> lamdec_rec ((x,t)::l) c
| Cast (c,_,_) -> lamdec_rec l c
| _ -> l,c
in
lamdec_rec []
Decompose lambda abstractions and lets , until finding n
abstractions
abstractions *)
let decompose_lam_n_assum n =
if n < 0 then
error "decompose_lam_n_assum: integer parameter must be positive";
let rec lamdec_rec l n c =
if n=0 then l,c
else match c with
| Lambda (x,t,c) -> lamdec_rec ((x,None,t) :: l) (n-1) c
| LetIn (x,b,t,c) -> lamdec_rec ((x,Some b,t) :: l) n c
| Cast (c,_,_) -> lamdec_rec l n c
| c -> error "decompose_lam_n_assum: not enough abstractions"
in
lamdec_rec empty_rel_context n
(* Iterate products, with or without lets *)
Constructs either [ ( x : t)c ] or [ [ x = b : t]c ]
let mkProd_or_LetIn (na,body,t) c =
match body with
| None -> Prod (na, t, c)
| Some b -> LetIn (na, b, t, c)
let it_mkProd_or_LetIn = List.fold_left (fun c d -> mkProd_or_LetIn d c)
let decompose_prod_assum =
let rec prodec_rec l c =
match c with
| Prod (x,t,c) -> prodec_rec ((x,None,t) :: l) c
| LetIn (x,b,t,c) -> prodec_rec ((x,Some b,t) :: l) c
| Cast (c,_,_) -> prodec_rec l c
| _ -> l,c
in
prodec_rec empty_rel_context
let decompose_prod_n_assum n =
if n < 0 then
error "decompose_prod_n_assum: integer parameter must be positive";
let rec prodec_rec l n c =
if n=0 then l,c
else match c with
| Prod (x,t,c) -> prodec_rec ((x,None,t) :: l) (n-1) c
| LetIn (x,b,t,c) -> prodec_rec ((x,Some b,t) :: l) (n-1) c
| Cast (c,_,_) -> prodec_rec l n c
| c -> error "decompose_prod_n_assum: not enough assumptions"
in
prodec_rec empty_rel_context n
(***************************)
(* Other term constructors *)
(***************************)
type arity = rel_context * sorts
let mkArity (sign,s) = it_mkProd_or_LetIn (Sort s) sign
let destArity =
let rec prodec_rec l c =
match c with
| Prod (x,t,c) -> prodec_rec ((x,None,t)::l) c
| LetIn (x,b,t,c) -> prodec_rec ((x,Some b,t)::l) c
| Cast (c,_,_) -> prodec_rec l c
| Sort s -> l,s
| _ -> anomaly "destArity: not an arity"
in
prodec_rec []
let rec isArity c =
match c with
| Prod (_,_,c) -> isArity c
| LetIn (_,b,_,c) -> isArity (subst1 b c)
| Cast (c,_,_) -> isArity c
| Sort _ -> true
| _ -> false
(*******************************)
(* alpha conversion functions *)
(*******************************)
(* alpha conversion : ignore print names and casts *)
let compare_constr f t1 t2 =
match t1, t2 with
| Rel n1, Rel n2 -> n1 = n2
| Meta m1, Meta m2 -> m1 = m2
| Var id1, Var id2 -> id1 = id2
| Sort s1, Sort s2 -> s1 = s2
| Cast (c1,_,_), _ -> f c1 t2
| _, Cast (c2,_,_) -> f t1 c2
| Prod (_,t1,c1), Prod (_,t2,c2) -> f t1 t2 & f c1 c2
| Lambda (_,t1,c1), Lambda (_,t2,c2) -> f t1 t2 & f c1 c2
| LetIn (_,b1,t1,c1), LetIn (_,b2,t2,c2) -> f b1 b2 & f t1 t2 & f c1 c2
| App (c1,l1), App (c2,l2) ->
if Array.length l1 = Array.length l2 then
f c1 c2 & array_for_all2 f l1 l2
else
let (h1,l1) = decompose_app t1 in
let (h2,l2) = decompose_app t2 in
if List.length l1 = List.length l2 then
f h1 h2 & List.for_all2 f l1 l2
else false
| Evar (e1,l1), Evar (e2,l2) -> e1 = e2 & array_for_all2 f l1 l2
| Const c1, Const c2 -> eq_con_chk c1 c2
| Ind c1, Ind c2 -> eq_ind_chk c1 c2
| Construct (c1,i1), Construct (c2,i2) -> i1=i2 && eq_ind_chk c1 c2
| Case (_,p1,c1,bl1), Case (_,p2,c2,bl2) ->
f p1 p2 & f c1 c2 & array_for_all2 f bl1 bl2
| Fix (ln1,(_,tl1,bl1)), Fix (ln2,(_,tl2,bl2)) ->
ln1 = ln2 & array_for_all2 f tl1 tl2 & array_for_all2 f bl1 bl2
| CoFix(ln1,(_,tl1,bl1)), CoFix(ln2,(_,tl2,bl2)) ->
ln1 = ln2 & array_for_all2 f tl1 tl2 & array_for_all2 f bl1 bl2
| _ -> false
let rec eq_constr m n =
(m==n) or
compare_constr eq_constr m n
let eq_constr m n = eq_constr m n (* to avoid tracing a recursive fun *)
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/checker/term.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
This defines the strategy to use for verifiying a Cast
This defines Cases annotations
length of the arity of the inductive type
number of pattern var of each constructor
not interpreted by the kernel
Sorts.
proposition types
******************************************************************
Constructions as implemented
******************************************************************
s******************************************************************
The type of constructions
Rel
Evar
Sort
Cast
Prod
Lambda
LetIn
App
Const
Ind
Case
Fix
**************************************************************************
Functions for dealing with constr terms
**************************************************************************
*******************
Occurring
*******************
*******************
Lifting
*******************
The generic lifting function
Lifting the binding depth across k bindings
*******************
Substituting
*******************
1st : general case
*************************************************************************
Type of assumptions and contexts
*************************************************************************
Iterate lambda abstractions
compose_lam [xn:Tn;..;x1:T1] b = [x1:T1]..[xn:Tn]b
Transforms a lambda term [x1:T1]..[xn:Tn]T into the pair
([(xn,Tn);...;(x1,T1)],T), where T is not a lambda
Iterate products, with or without lets
*************************
Other term constructors
*************************
*****************************
alpha conversion functions
*****************************
alpha conversion : ignore print names and casts
to avoid tracing a recursive fun | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
This module instantiates the structure of generic deBruijn terms to Coq
open Util
open Pp
open Names
open Univ
open Esubst
open Validate
Coq abstract syntax with deBruijn variables ; ' a is the type of sorts
type existential_key = int
type metavariable = int
type case_style = LetStyle | IfStyle | LetPatternStyle | MatchStyle |
RegularStyle
type case_printing =
style : case_style }
type case_info =
{ ci_ind : inductive;
ci_npar : int;
}
let val_ci =
let val_cstyle = val_enum "case_style" 5 in
let val_cprint = val_tuple ~name:"case_printing" [|val_int;val_cstyle|] in
val_tuple ~name:"case_info" [|val_ind;val_int;val_array val_int;val_cprint|]
type contents = Pos | Null
type sorts =
| Type of universe
type sorts_family = InProp | InSet | InType
let family_of_sort = function
| Prop Null -> InProp
| Prop Pos -> InSet
| Type _ -> InType
let val_sort = val_sum "sort" 0 [|[|val_enum "cnt" 2|];[|val_univ|]|]
let val_sortfam = val_enum "sorts_family" 3
[ constr array ] is an instance matching definitional [ named_context ] in
the same order ( i.e. last argument first )
the same order (i.e. last argument first) *)
type 'constr pexistential = existential_key * 'constr array
type 'constr prec_declaration =
name array * 'constr array * 'constr array
type 'constr pfixpoint =
(int array * int) * 'constr prec_declaration
type 'constr pcofixpoint =
int * 'constr prec_declaration
let val_evar f = val_tuple ~name:"pexistential" [|val_int;val_array f|]
let val_prec f =
val_tuple ~name:"prec_declaration"
[|val_array val_name; val_array f; val_array f|]
let val_fix f =
val_tuple ~name:"pfixpoint"
[|val_tuple~name:"fix2"[|val_array val_int;val_int|];val_prec f|]
let val_cofix f = val_tuple ~name:"pcofixpoint"[|val_int;val_prec f|]
type cast_kind = VMcast | DEFAULTcast
let val_cast = val_enum "cast_kind" 2
type constr =
| Rel of int
| Var of identifier
| Meta of metavariable
| Evar of constr pexistential
| Sort of sorts
| Cast of constr * cast_kind * constr
| Prod of name * constr * constr
| Lambda of name * constr * constr
| LetIn of name * constr * constr * constr
| App of constr * constr array
| Const of constant
| Ind of inductive
| Construct of constructor
| Case of case_info * constr * constr * constr array
| Fix of constr pfixpoint
| CoFix of constr pcofixpoint
let val_constr = val_rec_sum "constr" 0 (fun val_constr -> [|
Var
Meta
Construct
CoFix
|])
type existential = constr pexistential
type rec_declaration = constr prec_declaration
type fixpoint = constr pfixpoint
type cofixpoint = constr pcofixpoint
let rec strip_outer_cast c = match c with
| Cast (c,_,_) -> strip_outer_cast c
| _ -> c
let rec collapse_appl c = match c with
| App (f,cl) ->
let rec collapse_rec f cl2 =
match (strip_outer_cast f) with
| App (g,cl1) -> collapse_rec g (Array.append cl1 cl2)
| _ -> App (f,cl2)
in
collapse_rec f cl
| _ -> c
let decompose_app c =
match collapse_appl c with
| App (f,cl) -> (f, Array.to_list cl)
| _ -> (c,[])
let applist (f,l) = App (f, Array.of_list l)
let iter_constr_with_binders g f n c = match c with
| (Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _
| Construct _) -> ()
| Cast (c,_,t) -> f n c; f n t
| Prod (_,t,c) -> f n t; f (g n) c
| Lambda (_,t,c) -> f n t; f (g n) c
| LetIn (_,b,t,c) -> f n b; f n t; f (g n) c
| App (c,l) -> f n c; Array.iter (f n) l
| Evar (_,l) -> Array.iter (f n) l
| Case (_,p,c,bl) -> f n p; f n c; Array.iter (f n) bl
| Fix (_,(_,tl,bl)) ->
Array.iter (f n) tl;
Array.iter (f (iterate g (Array.length tl) n)) bl
| CoFix (_,(_,tl,bl)) ->
Array.iter (f n) tl;
Array.iter (f (iterate g (Array.length tl) n)) bl
exception LocalOccur
( closedn n M ) raises FreeVar if a variable of height greater than n
occurs in M , returns ( ) otherwise
occurs in M, returns () otherwise *)
let closedn n c =
let rec closed_rec n c = match c with
| Rel m -> if m>n then raise LocalOccur
| _ -> iter_constr_with_binders succ closed_rec n c
in
try closed_rec n c; true with LocalOccur -> false
[ closed0 M ] is true iff [ M ] is a ( deBruijn ) closed term
let closed0 = closedn 0
( noccurn n M ) returns true iff ( Rel n ) does NOT occur in term M
let noccurn n term =
let rec occur_rec n c = match c with
| Rel m -> if m = n then raise LocalOccur
| _ -> iter_constr_with_binders succ occur_rec n c
in
try occur_rec n term; true with LocalOccur -> false
( noccur_between n m M ) returns true iff ( Rel p ) does NOT occur in term M
for n < = p < n+m
for n <= p < n+m *)
let noccur_between n m term =
let rec occur_rec n c = match c with
| Rel(p) -> if n<=p && p<n+m then raise LocalOccur
| _ -> iter_constr_with_binders succ occur_rec n c
in
try occur_rec n term; true with LocalOccur -> false
Checking function for terms containing existential variables .
The function [ noccur_with_meta ] considers the fact that
each existential variable ( as well as each isevar )
in the term appears applied to its local context ,
which may contain the CoFix variables . These occurrences of CoFix variables
are not considered
The function [noccur_with_meta] considers the fact that
each existential variable (as well as each isevar)
in the term appears applied to its local context,
which may contain the CoFix variables. These occurrences of CoFix variables
are not considered *)
let noccur_with_meta n m term =
let rec occur_rec n c = match c with
| Rel p -> if n<=p & p<n+m then raise LocalOccur
| App(f,cl) ->
(match f with
| (Cast (Meta _,_,_)| Meta _) -> ()
| _ -> iter_constr_with_binders succ occur_rec n c)
| Evar (_, _) -> ()
| _ -> iter_constr_with_binders succ occur_rec n c
in
try (occur_rec n term; true) with LocalOccur -> false
let map_constr_with_binders g f l c = match c with
| (Rel _ | Meta _ | Var _ | Sort _ | Const _ | Ind _
| Construct _) -> c
| Cast (c,k,t) -> Cast (f l c, k, f l t)
| Prod (na,t,c) -> Prod (na, f l t, f (g l) c)
| Lambda (na,t,c) -> Lambda (na, f l t, f (g l) c)
| LetIn (na,b,t,c) -> LetIn (na, f l b, f l t, f (g l) c)
| App (c,al) -> App (f l c, Array.map (f l) al)
| Evar (e,al) -> Evar (e, Array.map (f l) al)
| Case (ci,p,c,bl) -> Case (ci, f l p, f l c, Array.map (f l) bl)
| Fix (ln,(lna,tl,bl)) ->
let l' = iterate g (Array.length tl) l in
Fix (ln,(lna,Array.map (f l) tl,Array.map (f l') bl))
| CoFix(ln,(lna,tl,bl)) ->
let l' = iterate g (Array.length tl) l in
CoFix (ln,(lna,Array.map (f l) tl,Array.map (f l') bl))
let rec exliftn el c = match c with
| Rel i -> Rel(reloc_rel i el)
| _ -> map_constr_with_binders el_lift exliftn el c
let liftn k n =
match el_liftn (pred n) (el_shft k el_id) with
| ELID -> (fun c -> c)
| el -> exliftn el
let lift k = liftn k 1
( subst1 M c ) substitutes M for Rel(1 ) in c
we generalise it to ( substl [ M1, ... ,Mn ] c ) which substitutes in parallel
M1, ... ,Mn for respectively Rel(1), ... ,Rel(n ) in c
we generalise it to (substl [M1,...,Mn] c) which substitutes in parallel
M1,...,Mn for respectively Rel(1),...,Rel(n) in c *)
type info = Closed | Open | Unknown
type 'a substituend = { mutable sinfo: info; sit: 'a }
let rec lift_substituend depth s =
match s.sinfo with
| Closed -> s.sit
| Open -> lift depth s.sit
| Unknown ->
s.sinfo <- if closed0 s.sit then Closed else Open;
lift_substituend depth s
let make_substituend c = { sinfo=Unknown; sit=c }
let substn_many lamv n c =
let lv = Array.length lamv in
if lv = 0 then c
else
let rec substrec depth c = match c with
| Rel k ->
if k<=depth then c
else if k-depth <= lv then lift_substituend depth lamv.(k-depth-1)
else Rel (k-lv)
| _ -> map_constr_with_binders succ substrec depth c in
substrec n c
let substnl laml n =
substn_many (Array.map make_substituend (Array.of_list laml)) n
let substl laml = substnl laml 0
let subst1 lam = substl [lam]
let val_ndecl =
val_tuple ~name:"named_declaration"[|val_id;val_opt val_constr;val_constr|]
let val_rdecl =
val_tuple ~name:"rel_declaration"[|val_name;val_opt val_constr;val_constr|]
let val_nctxt = val_list val_ndecl
let val_rctxt = val_list val_rdecl
type named_declaration = identifier * constr option * constr
type rel_declaration = name * constr option * constr
type named_context = named_declaration list
let empty_named_context = []
let fold_named_context f l ~init = List.fold_right f l init
type section_context = named_context
type rel_context = rel_declaration list
let empty_rel_context = []
let rel_context_length = List.length
let rel_context_nhyps hyps =
let rec nhyps acc = function
| [] -> acc
| (_,None,_)::hyps -> nhyps (1+acc) hyps
| (_,Some _,_)::hyps -> nhyps acc hyps in
nhyps 0 hyps
let fold_rel_context f l ~init = List.fold_right f l init
let map_context f l =
let map_decl (n, body_o, typ as decl) =
let body_o' = Option.smartmap f body_o in
let typ' = f typ in
if body_o' == body_o && typ' == typ then decl else
(n, body_o', typ')
in
list_smartmap map_decl l
let map_rel_context = map_context
let extended_rel_list n hyps =
let rec reln l p = function
| (_,None,_) :: hyps -> reln (Rel (n+p) :: l) (p+1) hyps
| (_,Some _,_) :: hyps -> reln l (p+1) hyps
| [] -> l
in
reln [] 1 hyps
let compose_lam l b =
let rec lamrec = function
| ([], b) -> b
| ((v,t)::l, b) -> lamrec (l, Lambda (v,t,b))
in
lamrec (l,b)
let decompose_lam =
let rec lamdec_rec l c = match c with
| Lambda (x,t,c) -> lamdec_rec ((x,t)::l) c
| Cast (c,_,_) -> lamdec_rec l c
| _ -> l,c
in
lamdec_rec []
Decompose lambda abstractions and lets , until finding n
abstractions
abstractions *)
let decompose_lam_n_assum n =
if n < 0 then
error "decompose_lam_n_assum: integer parameter must be positive";
let rec lamdec_rec l n c =
if n=0 then l,c
else match c with
| Lambda (x,t,c) -> lamdec_rec ((x,None,t) :: l) (n-1) c
| LetIn (x,b,t,c) -> lamdec_rec ((x,Some b,t) :: l) n c
| Cast (c,_,_) -> lamdec_rec l n c
| c -> error "decompose_lam_n_assum: not enough abstractions"
in
lamdec_rec empty_rel_context n
Constructs either [ ( x : t)c ] or [ [ x = b : t]c ]
let mkProd_or_LetIn (na,body,t) c =
match body with
| None -> Prod (na, t, c)
| Some b -> LetIn (na, b, t, c)
let it_mkProd_or_LetIn = List.fold_left (fun c d -> mkProd_or_LetIn d c)
let decompose_prod_assum =
let rec prodec_rec l c =
match c with
| Prod (x,t,c) -> prodec_rec ((x,None,t) :: l) c
| LetIn (x,b,t,c) -> prodec_rec ((x,Some b,t) :: l) c
| Cast (c,_,_) -> prodec_rec l c
| _ -> l,c
in
prodec_rec empty_rel_context
let decompose_prod_n_assum n =
if n < 0 then
error "decompose_prod_n_assum: integer parameter must be positive";
let rec prodec_rec l n c =
if n=0 then l,c
else match c with
| Prod (x,t,c) -> prodec_rec ((x,None,t) :: l) (n-1) c
| LetIn (x,b,t,c) -> prodec_rec ((x,Some b,t) :: l) (n-1) c
| Cast (c,_,_) -> prodec_rec l n c
| c -> error "decompose_prod_n_assum: not enough assumptions"
in
prodec_rec empty_rel_context n
type arity = rel_context * sorts
let mkArity (sign,s) = it_mkProd_or_LetIn (Sort s) sign
let destArity =
let rec prodec_rec l c =
match c with
| Prod (x,t,c) -> prodec_rec ((x,None,t)::l) c
| LetIn (x,b,t,c) -> prodec_rec ((x,Some b,t)::l) c
| Cast (c,_,_) -> prodec_rec l c
| Sort s -> l,s
| _ -> anomaly "destArity: not an arity"
in
prodec_rec []
let rec isArity c =
match c with
| Prod (_,_,c) -> isArity c
| LetIn (_,b,_,c) -> isArity (subst1 b c)
| Cast (c,_,_) -> isArity c
| Sort _ -> true
| _ -> false
let compare_constr f t1 t2 =
match t1, t2 with
| Rel n1, Rel n2 -> n1 = n2
| Meta m1, Meta m2 -> m1 = m2
| Var id1, Var id2 -> id1 = id2
| Sort s1, Sort s2 -> s1 = s2
| Cast (c1,_,_), _ -> f c1 t2
| _, Cast (c2,_,_) -> f t1 c2
| Prod (_,t1,c1), Prod (_,t2,c2) -> f t1 t2 & f c1 c2
| Lambda (_,t1,c1), Lambda (_,t2,c2) -> f t1 t2 & f c1 c2
| LetIn (_,b1,t1,c1), LetIn (_,b2,t2,c2) -> f b1 b2 & f t1 t2 & f c1 c2
| App (c1,l1), App (c2,l2) ->
if Array.length l1 = Array.length l2 then
f c1 c2 & array_for_all2 f l1 l2
else
let (h1,l1) = decompose_app t1 in
let (h2,l2) = decompose_app t2 in
if List.length l1 = List.length l2 then
f h1 h2 & List.for_all2 f l1 l2
else false
| Evar (e1,l1), Evar (e2,l2) -> e1 = e2 & array_for_all2 f l1 l2
| Const c1, Const c2 -> eq_con_chk c1 c2
| Ind c1, Ind c2 -> eq_ind_chk c1 c2
| Construct (c1,i1), Construct (c2,i2) -> i1=i2 && eq_ind_chk c1 c2
| Case (_,p1,c1,bl1), Case (_,p2,c2,bl2) ->
f p1 p2 & f c1 c2 & array_for_all2 f bl1 bl2
| Fix (ln1,(_,tl1,bl1)), Fix (ln2,(_,tl2,bl2)) ->
ln1 = ln2 & array_for_all2 f tl1 tl2 & array_for_all2 f bl1 bl2
| CoFix(ln1,(_,tl1,bl1)), CoFix(ln2,(_,tl2,bl2)) ->
ln1 = ln2 & array_for_all2 f tl1 tl2 & array_for_all2 f bl1 bl2
| _ -> false
let rec eq_constr m n =
(m==n) or
compare_constr eq_constr m n
|
7b9f69447894540b58745c43e0355af18300754aab5bbf5c1b521fac27b1c3db | ucsd-progsys/liquid-fixpoint | UndoANFTests.hs | {-# LANGUAGE OverloadedStrings #-}
module UndoANFTests(tests) where
import Language.Fixpoint.Types (SortedReft(..), Symbol,
isPrefixOfSym, anfPrefix, syms)
import Language.Fixpoint.Solver.EnvironmentReduction (undoANFSimplifyingWith)
import Arbitrary
import qualified Data.HashMap.Strict as M
import Test.Tasty (TestTree, testGroup, adjustOption)
import Test.Tasty.HUnit ((@?=))
import qualified Test.Tasty.HUnit as H
import Test.Tasty.QuickCheck ((===))
import qualified Test.Tasty.QuickCheck as Q
tests :: TestTree
tests =
withOptions $
testGroup
"undoANFSimplifyingWith id id"
[ H.testCase "id on empty env" $
simpleUndoANF [] @?= M.empty
, Q.testProperty "id when env contains no lq_anf$* bindings" $
prop_no_change (M.fromList . unEnv . unNoAnfEnv) simpleUndoANFNoAnfEnv
, testGroup
"zero anf vars left afterwards, starting with:"
[ Q.testProperty "no anf vars" $
prop_no_anfs simpleUndoANFNoAnfEnv
, Q.testProperty "single-level anf vars" $
prop_no_anfs simpleUndoANFFlatAnfEnv
, Q.testProperty "chained anf vars" $
prop_no_anfs simpleUndoANFChainedAnfEnv
]
]
where
withOptions = adjustOption (min (Q.QuickCheckMaxSize 8)) -- adjustOption . min because we don't want to default to the enormous value.
adjustOption . because we may want larger on the command line .
-- | 5 seconds (in microseconds).
timeout :: Int
timeout = 5000000
prop_no_change :: (Q.Arbitrary e, Eq e, Show e) => (e -> M.HashMap Symbol SortedReft) -> (e -> M.HashMap Symbol SortedReft) -> e -> Q.Property
prop_no_change toHashMap f e = Q.within timeout $ f e === toHashMap e
prop_no_anfs :: (Q.Arbitrary e, Eq e, Show e) => (e -> M.HashMap Symbol SortedReft) -> e -> Q.Property
prop_no_anfs f e = Q.within timeout . checkNoAnfs . f $ e
where
checkNoAnfs m = M.filter (any isAnfVar . syms) m === M.empty
isAnfVar = isPrefixOfSym anfPrefix
-- | We perform tests with only trivial lenses (i.e. id)
simpleUndoANF :: [(Symbol, SortedReft)] -> M.HashMap Symbol SortedReft
simpleUndoANF = undoANFSimplifyingWith id id . M.fromList
----------------------------------------------------
| simpleUndoANF conjugated with various newtypes
----------------------------------------------------
simpleUndoANFEnv :: Env -> M.HashMap Symbol SortedReft
simpleUndoANFEnv = simpleUndoANF . unEnv
simpleUndoANFNoAnfEnv :: NoAnfEnv -> M.HashMap Symbol SortedReft
simpleUndoANFNoAnfEnv = simpleUndoANFEnv . unNoAnfEnv
simpleUndoANFFlatAnfEnv :: FlatAnfEnv -> M.HashMap Symbol SortedReft
simpleUndoANFFlatAnfEnv = simpleUndoANFEnv . unFlatAnfEnv
simpleUndoANFChainedAnfEnv :: ChainedAnfEnv -> M.HashMap Symbol SortedReft
simpleUndoANFChainedAnfEnv = simpleUndoANFEnv . unChainedAnfEnv
| null | https://raw.githubusercontent.com/ucsd-progsys/liquid-fixpoint/ebf1b6ea02ba21b67b5e12b328505a2d5cb6cc13/tests/tasty/UndoANFTests.hs | haskell | # LANGUAGE OverloadedStrings #
adjustOption . min because we don't want to default to the enormous value.
| 5 seconds (in microseconds).
| We perform tests with only trivial lenses (i.e. id)
--------------------------------------------------
-------------------------------------------------- |
module UndoANFTests(tests) where
import Language.Fixpoint.Types (SortedReft(..), Symbol,
isPrefixOfSym, anfPrefix, syms)
import Language.Fixpoint.Solver.EnvironmentReduction (undoANFSimplifyingWith)
import Arbitrary
import qualified Data.HashMap.Strict as M
import Test.Tasty (TestTree, testGroup, adjustOption)
import Test.Tasty.HUnit ((@?=))
import qualified Test.Tasty.HUnit as H
import Test.Tasty.QuickCheck ((===))
import qualified Test.Tasty.QuickCheck as Q
tests :: TestTree
tests =
withOptions $
testGroup
"undoANFSimplifyingWith id id"
[ H.testCase "id on empty env" $
simpleUndoANF [] @?= M.empty
, Q.testProperty "id when env contains no lq_anf$* bindings" $
prop_no_change (M.fromList . unEnv . unNoAnfEnv) simpleUndoANFNoAnfEnv
, testGroup
"zero anf vars left afterwards, starting with:"
[ Q.testProperty "no anf vars" $
prop_no_anfs simpleUndoANFNoAnfEnv
, Q.testProperty "single-level anf vars" $
prop_no_anfs simpleUndoANFFlatAnfEnv
, Q.testProperty "chained anf vars" $
prop_no_anfs simpleUndoANFChainedAnfEnv
]
]
where
adjustOption . because we may want larger on the command line .
timeout :: Int
timeout = 5000000
prop_no_change :: (Q.Arbitrary e, Eq e, Show e) => (e -> M.HashMap Symbol SortedReft) -> (e -> M.HashMap Symbol SortedReft) -> e -> Q.Property
prop_no_change toHashMap f e = Q.within timeout $ f e === toHashMap e
prop_no_anfs :: (Q.Arbitrary e, Eq e, Show e) => (e -> M.HashMap Symbol SortedReft) -> e -> Q.Property
prop_no_anfs f e = Q.within timeout . checkNoAnfs . f $ e
where
checkNoAnfs m = M.filter (any isAnfVar . syms) m === M.empty
isAnfVar = isPrefixOfSym anfPrefix
simpleUndoANF :: [(Symbol, SortedReft)] -> M.HashMap Symbol SortedReft
simpleUndoANF = undoANFSimplifyingWith id id . M.fromList
| simpleUndoANF conjugated with various newtypes
simpleUndoANFEnv :: Env -> M.HashMap Symbol SortedReft
simpleUndoANFEnv = simpleUndoANF . unEnv
simpleUndoANFNoAnfEnv :: NoAnfEnv -> M.HashMap Symbol SortedReft
simpleUndoANFNoAnfEnv = simpleUndoANFEnv . unNoAnfEnv
simpleUndoANFFlatAnfEnv :: FlatAnfEnv -> M.HashMap Symbol SortedReft
simpleUndoANFFlatAnfEnv = simpleUndoANFEnv . unFlatAnfEnv
simpleUndoANFChainedAnfEnv :: ChainedAnfEnv -> M.HashMap Symbol SortedReft
simpleUndoANFChainedAnfEnv = simpleUndoANFEnv . unChainedAnfEnv
|
8a5d150004ece1ac2bd7e9aea47687a65bb89d62b206246657f94c1e6f95b132 | input-output-hk/plutus | InterList.hs | {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module PlutusCore.Examples.Data.InterList
( interListData
, interNil
, interCons
, foldrInterList
) where
import PlutusCore.Core
import PlutusCore.MkPlc
import PlutusCore.Name
import PlutusCore.Quote
import PlutusCore.StdLib.Data.Function
import PlutusCore.StdLib.Data.Unit
import PlutusCore.StdLib.Type
import Universe
Note [ InterList ]
We encode the following in this module :
open import Function
data InterList ( A B : Set ) : Set where
InterNil : InterList A B
InterCons : A - > B - > InterList B A - > InterList A B
foldrInterList : ( A B R : Set ) - > ( A - > B - > R - > R ) - > R - > InterList A B - > R
foldrInterList A0 R f0 z = go A0 f0 where
go : ∀ A B - > ( A - > B - > R - > R ) - > InterList A B - > R
go A B f InterNil = z
go A B f ( InterCons x y xs ) = f x y $ go B A ( flip f ) xs
We encode the following in this module:
open import Function
data InterList (A B : Set) : Set where
InterNil : InterList A B
InterCons : A -> B -> InterList B A -> InterList A B
foldrInterList : (A B R : Set) -> (A -> B -> R -> R) -> R -> InterList A B -> R
foldrInterList A0 B0 R f0 z = go A0 B0 f0 where
go : ∀ A B -> (A -> B -> R -> R) -> InterList A B -> R
go A B f InterNil = z
go A B f (InterCons x y xs) = f x y $ go B A (flip f) xs
-}
This definition is used as an example in Note [ Spiney API ] in " PlutusCore . StdLib . Type " ,
-- so if you change it here, then also change it there.
| @InterList@ as a PLC type .
--
> fix \(interlist : : * - > * - > * ) ( a : : * ) ( b : : * ) - >
> all ( r : : * ) . r - > ( a - > b - a - > r ) - > r
interListData :: RecursiveType uni fun ()
interListData = runQuote $ do
a <- freshTyName "a"
b <- freshTyName "b"
interlist <- freshTyName "interlist"
r <- freshTyName "r"
let interlistBA = mkIterTyApp () (TyVar () interlist) [TyVar () b, TyVar () a]
makeRecursiveType () interlist [TyVarDecl () a $ Type (), TyVarDecl () b $ Type ()]
. TyForall () r (Type ())
. TyFun () (TyVar () r)
. TyFun () (mkIterTyFun () [TyVar () a, TyVar () b, interlistBA] $ TyVar () r)
$ TyVar () r
interNil :: Term TyName Name uni fun ()
interNil = runQuote $ do
let RecursiveType interlist wrapInterList = interListData
a <- freshTyName "a"
b <- freshTyName "b"
r <- freshTyName "r"
z <- freshName "z"
f <- freshName "f"
let interlistBA = mkIterTyApp () interlist [TyVar () b, TyVar () a]
return
. TyAbs () a (Type ())
. TyAbs () b (Type ())
. wrapInterList [TyVar () a, TyVar () b]
. TyAbs () r (Type ())
. LamAbs () z (TyVar () r)
. LamAbs () f (mkIterTyFun () [TyVar () a, TyVar () b, interlistBA] $ TyVar () r)
$ Var () z
interCons :: Term TyName Name uni fun ()
interCons = runQuote $ do
let RecursiveType interlist wrapInterList = interListData
a <- freshTyName "a"
b <- freshTyName "b"
x <- freshName "x"
y <- freshName "y"
xs <- freshName "xs"
r <- freshTyName "r"
z <- freshName "z"
f <- freshName "f"
let interlistBA = mkIterTyApp () interlist [TyVar () b, TyVar () a]
return
. TyAbs () a (Type ())
. TyAbs () b (Type ())
. LamAbs () x (TyVar () a)
. LamAbs () y (TyVar () b)
. LamAbs () xs interlistBA
. wrapInterList [TyVar () a, TyVar () b]
. TyAbs () r (Type ())
. LamAbs () z (TyVar () r)
. LamAbs () f (mkIterTyFun () [TyVar () a, TyVar () b, interlistBA] $ TyVar () r)
$ mkIterApp () (Var () f)
[ Var () x
, Var () y
, Var () xs
]
foldrInterList :: uni `Includes` () => Term TyName Name uni fun ()
foldrInterList = runQuote $ do
let interlist = _recursiveType interListData
a0 <- freshTyName "a0"
b0 <- freshTyName "b0"
r <- freshTyName "r"
f <- freshName "f"
z <- freshName "z"
rec <- freshName "rec"
u <- freshName "u"
a <- freshTyName "a"
b <- freshTyName "b"
f' <- freshName "f'"
xs <- freshName "xs"
x <- freshName "x"
y <- freshName "y"
xs' <- freshName "xs'"
x' <- freshName "x'"
y' <- freshName "y'"
let interlistOf a' b' = mkIterTyApp () interlist [TyVar () a', TyVar () b']
fTy a' b' = mkIterTyFun () [TyVar () a', TyVar () b', TyVar () r] $ TyVar () r
fixTyArg2
= TyForall () a (Type ())
. TyForall () b (Type ())
. TyFun () (fTy a b)
. TyFun () (interlistOf a b)
$ TyVar () r
instedFix = mkIterInst () fix [unit, fixTyArg2]
unwrappedXs = TyInst () (Unwrap () (Var () xs)) $ TyVar () r
instedRec = mkIterInst () (Apply () (Var () rec) unitval) [TyVar () b, TyVar () a]
return
. TyAbs () a0 (Type ())
. TyAbs () b0 (Type ())
. TyAbs () r (Type ())
. LamAbs () f (fTy a0 b0)
. LamAbs () z (TyVar () r)
$ mkIterInst ()
( mkIterApp () instedFix
[ LamAbs () rec (TyFun () unit fixTyArg2)
. LamAbs () u unit
. TyAbs () a (Type ())
. TyAbs () b (Type ())
. LamAbs () f' (fTy a b)
. LamAbs () xs (interlistOf a b)
$ mkIterApp () unwrappedXs
[ Var () z
, LamAbs () x (TyVar () a)
. LamAbs () y (TyVar () b)
. LamAbs () xs' (interlistOf b a)
$ mkIterApp () (Var () f')
[ Var () x
, Var () y
, mkIterApp () instedRec
[ LamAbs () y' (TyVar () b)
. LamAbs () x' (TyVar () a)
$ mkIterApp () (Var () f')
[ Var () x'
, Var () y'
]
, Var () xs'
]
]
]
, unitval
]
)
[ TyVar () a0
, TyVar () b0
]
| null | https://raw.githubusercontent.com/input-output-hk/plutus/1f31e640e8a258185db01fa899da63f9018c0e85/plutus-core/plutus-core/examples/PlutusCore/Examples/Data/InterList.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE TypeOperators #
so if you change it here, then also change it there.
|
module PlutusCore.Examples.Data.InterList
( interListData
, interNil
, interCons
, foldrInterList
) where
import PlutusCore.Core
import PlutusCore.MkPlc
import PlutusCore.Name
import PlutusCore.Quote
import PlutusCore.StdLib.Data.Function
import PlutusCore.StdLib.Data.Unit
import PlutusCore.StdLib.Type
import Universe
Note [ InterList ]
We encode the following in this module :
open import Function
data InterList ( A B : Set ) : Set where
InterNil : InterList A B
InterCons : A - > B - > InterList B A - > InterList A B
foldrInterList : ( A B R : Set ) - > ( A - > B - > R - > R ) - > R - > InterList A B - > R
foldrInterList A0 R f0 z = go A0 f0 where
go : ∀ A B - > ( A - > B - > R - > R ) - > InterList A B - > R
go A B f InterNil = z
go A B f ( InterCons x y xs ) = f x y $ go B A ( flip f ) xs
We encode the following in this module:
open import Function
data InterList (A B : Set) : Set where
InterNil : InterList A B
InterCons : A -> B -> InterList B A -> InterList A B
foldrInterList : (A B R : Set) -> (A -> B -> R -> R) -> R -> InterList A B -> R
foldrInterList A0 B0 R f0 z = go A0 B0 f0 where
go : ∀ A B -> (A -> B -> R -> R) -> InterList A B -> R
go A B f InterNil = z
go A B f (InterCons x y xs) = f x y $ go B A (flip f) xs
-}
This definition is used as an example in Note [ Spiney API ] in " PlutusCore . StdLib . Type " ,
| @InterList@ as a PLC type .
> fix \(interlist : : * - > * - > * ) ( a : : * ) ( b : : * ) - >
> all ( r : : * ) . r - > ( a - > b - a - > r ) - > r
interListData :: RecursiveType uni fun ()
interListData = runQuote $ do
a <- freshTyName "a"
b <- freshTyName "b"
interlist <- freshTyName "interlist"
r <- freshTyName "r"
let interlistBA = mkIterTyApp () (TyVar () interlist) [TyVar () b, TyVar () a]
makeRecursiveType () interlist [TyVarDecl () a $ Type (), TyVarDecl () b $ Type ()]
. TyForall () r (Type ())
. TyFun () (TyVar () r)
. TyFun () (mkIterTyFun () [TyVar () a, TyVar () b, interlistBA] $ TyVar () r)
$ TyVar () r
interNil :: Term TyName Name uni fun ()
interNil = runQuote $ do
let RecursiveType interlist wrapInterList = interListData
a <- freshTyName "a"
b <- freshTyName "b"
r <- freshTyName "r"
z <- freshName "z"
f <- freshName "f"
let interlistBA = mkIterTyApp () interlist [TyVar () b, TyVar () a]
return
. TyAbs () a (Type ())
. TyAbs () b (Type ())
. wrapInterList [TyVar () a, TyVar () b]
. TyAbs () r (Type ())
. LamAbs () z (TyVar () r)
. LamAbs () f (mkIterTyFun () [TyVar () a, TyVar () b, interlistBA] $ TyVar () r)
$ Var () z
interCons :: Term TyName Name uni fun ()
interCons = runQuote $ do
let RecursiveType interlist wrapInterList = interListData
a <- freshTyName "a"
b <- freshTyName "b"
x <- freshName "x"
y <- freshName "y"
xs <- freshName "xs"
r <- freshTyName "r"
z <- freshName "z"
f <- freshName "f"
let interlistBA = mkIterTyApp () interlist [TyVar () b, TyVar () a]
return
. TyAbs () a (Type ())
. TyAbs () b (Type ())
. LamAbs () x (TyVar () a)
. LamAbs () y (TyVar () b)
. LamAbs () xs interlistBA
. wrapInterList [TyVar () a, TyVar () b]
. TyAbs () r (Type ())
. LamAbs () z (TyVar () r)
. LamAbs () f (mkIterTyFun () [TyVar () a, TyVar () b, interlistBA] $ TyVar () r)
$ mkIterApp () (Var () f)
[ Var () x
, Var () y
, Var () xs
]
foldrInterList :: uni `Includes` () => Term TyName Name uni fun ()
foldrInterList = runQuote $ do
let interlist = _recursiveType interListData
a0 <- freshTyName "a0"
b0 <- freshTyName "b0"
r <- freshTyName "r"
f <- freshName "f"
z <- freshName "z"
rec <- freshName "rec"
u <- freshName "u"
a <- freshTyName "a"
b <- freshTyName "b"
f' <- freshName "f'"
xs <- freshName "xs"
x <- freshName "x"
y <- freshName "y"
xs' <- freshName "xs'"
x' <- freshName "x'"
y' <- freshName "y'"
let interlistOf a' b' = mkIterTyApp () interlist [TyVar () a', TyVar () b']
fTy a' b' = mkIterTyFun () [TyVar () a', TyVar () b', TyVar () r] $ TyVar () r
fixTyArg2
= TyForall () a (Type ())
. TyForall () b (Type ())
. TyFun () (fTy a b)
. TyFun () (interlistOf a b)
$ TyVar () r
instedFix = mkIterInst () fix [unit, fixTyArg2]
unwrappedXs = TyInst () (Unwrap () (Var () xs)) $ TyVar () r
instedRec = mkIterInst () (Apply () (Var () rec) unitval) [TyVar () b, TyVar () a]
return
. TyAbs () a0 (Type ())
. TyAbs () b0 (Type ())
. TyAbs () r (Type ())
. LamAbs () f (fTy a0 b0)
. LamAbs () z (TyVar () r)
$ mkIterInst ()
( mkIterApp () instedFix
[ LamAbs () rec (TyFun () unit fixTyArg2)
. LamAbs () u unit
. TyAbs () a (Type ())
. TyAbs () b (Type ())
. LamAbs () f' (fTy a b)
. LamAbs () xs (interlistOf a b)
$ mkIterApp () unwrappedXs
[ Var () z
, LamAbs () x (TyVar () a)
. LamAbs () y (TyVar () b)
. LamAbs () xs' (interlistOf b a)
$ mkIterApp () (Var () f')
[ Var () x
, Var () y
, mkIterApp () instedRec
[ LamAbs () y' (TyVar () b)
. LamAbs () x' (TyVar () a)
$ mkIterApp () (Var () f')
[ Var () x'
, Var () y'
]
, Var () xs'
]
]
]
, unitval
]
)
[ TyVar () a0
, TyVar () b0
]
|
6e9d9865de3b203fa31160caf1f5591872f145f60d56740dae264ade48b21105 | earl-ducaine/cl-garnet | demo-moveline.lisp | -*- Mode : LISP ; Syntax : Common - Lisp ; Package : DEMO - MOVELINE ; Base : 10 -*-
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
The Garnet User Interface Development Environment . ; ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; This code was written as part of the Garnet project at ;;;
Carnegie Mellon University , and has been placed in the public ; ; ;
domain . If you are using this code or any part of Garnet , ; ; ;
;;; please contact to be put on the mailing list. ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; This file contains demo code for testing if the ends of lines can be
;;; changed with the mouse.
;;;
;;; This is intended as a test and demonstration of the move-grow
;;; interactor as part of the Garnet project.
;;;
;;; ** Call (demo-moveline:Do-Go) to start and (demo-moveline:Do-Stop) to stop **
;;;
Designed and implemented by
;;;
27 - May-92 Pervin - The latest CMUCL requires that the
;;; argument to random be declared an integer.
09 - Apr-92 Mickish - Changed create - instances of opal : default - line - styles to
;;; opal:line-styles.
13 - Feb-92 Pervin - Merged demo - moveline and color - demo - moveline
;;;
(in-package :DEMO-MOVELINE)
(declaim (special MYLINE VP AGG FEEDBACK INTER1 INTER2 INTER3 INTER4 INTER5
INTER6 INTER7))
(defparameter *test-debug* NIL)
(defvar *color-p* (g-value opal:color :color-p))
(create-instance 'myline opal:line
(:points (list 0 0 0 0))
(:color 0)
(:x1 (o-formula (first (gvl :points))))
(:y1 (o-formula (second (gvl :points))))
(:x2 (o-formula (third (gvl :points))))
(:y2 (o-formula (fourth (gvl :points)))))
(if *color-p*
(s-value myline :line-style
(o-formula (case (mod (gvl :color) 6)
; thin-line
(0 (create-instance nil opal:line-style
(:constant T)
(:line-thickness 0)
(:foreground-color opal:blue)))
; line-2
(1 (create-instance nil opal:line-style
(:constant T)
(:line-thickness 2)
(:foreground-color opal:green)))
; line-4
(2 (create-instance nil opal:line-style
(:constant T)
(:line-thickness 4)
(:foreground-color opal:yellow)))
; line-8
(3 opal:line-8)
; dotted-line
(4 (create-instance nil opal:line-style
(:constant T)
(:line-style :dash)
(:line-thickness 1)
(:dash-pattern '(1 1))
(:foreground-color opal:blue)
(:background-color opal:yellow)))
(5 opal:dashed-line))))
(s-value myline :line-style
(o-formula (case (mod (gvl :color) 6)
(0 opal:thin-line)
(1 opal:line-2)
(2 opal:line-4)
(3 opal:line-8)
(4 opal:dotted-line)
(5 opal:dashed-line)))))
(defun create-lines (num agg)
(let (obj
(width (g-value agg :width))
(height (g-value agg :height)))
(dotimes (i num)
(setq obj (create-instance NIL myline
(:points (list (random (the integer width))
(random (the integer height))
(random (the integer width))
(random (the integer height))))
(:color i)))
(when *test-debug*
(format T "created line ~s: ~s (~s ~s ~s ~s) color = ~s~%"
i obj (g-value obj :x1)(g-value obj :y1)(g-value obj :x2)
(g-value obj :y2)(g-value obj :line-style)))
(opal:add-component agg obj))))
;;; ********************************************************************
;;; Main procedures
;;; ********************************************************************
(defun Do-Go (&key dont-enter-main-event-loop double-buffered-p)
;;; create a window
(create-instance 'vp inter:interactor-window (:left 300) (:top 50)
(:width 300) (:height 200)(:title "GARNET Move-lines")
(:double-buffered-p double-buffered-p)
(:icon-title "Move-Lines"))
;;; create the top level aggregate in the window
(create-instance 'agg opal:aggregate
(:left 0)(:top 0)(:width 300)(:height 200))
(s-value vp :aggregate agg)
(create-lines 6 agg)
(create-instance 'feedback Opal:line
(:points (list 0 0 0 0))
(:x1 (o-formula (first (gvl :points))))
(:y1 (o-formula (second (gvl :points))))
(:x2 (o-formula (third (gvl :points))))
(:y2 (o-formula (fourth (gvl :points))))
(:obj-over NIL)
(:visible (o-formula (gvl :obj-over)))
(:line-style (if *color-p*
(create-instance nil opal:line-style
(:constant T)
(:line-style :dash)
(:dash-pattern '(4 4))
(:foreground-color opal:red))
opal:dashed-line))
(:draw-function :xor)
(:fast-redraw-p T))
(opal:add-component agg feedback)
(opal:update vp)
(Create-Instance 'inter1 inter:move-grow-interactor
(:window vp)
(:min-length 40)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point :where-hit)
(:feedback-obj feedback)
(:grow-p t)
(:line-p t))
(Create-Instance 'inter2 inter:move-grow-interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point 1)
(:line-p t)
(:grow-p t)
(:start-event :middledown))
(Create-Instance 'inter3 inter:Move-Grow-Interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point 2)
(:line-p t)
(:grow-p t)
(:start-event :rightdown))
(Create-Instance 'inter4 inter:move-grow-interactor
(:start-event :shift-leftdown)
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point :where-hit)
(:feedback-obj feedback)
(:line-p t))
(Create-Instance 'inter5 inter:move-grow-interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point 1)
(:line-p t)
(:start-event :shift-middledown))
(Create-Instance 'inter6 inter:Move-Grow-Interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point 2)
(:line-p t)
(:start-event :shift-rightdown))
(Create-Instance 'inter7 inter:Move-Grow-Interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point :Center)
(:line-p t)
(:start-event :control-leftdown))
;; ** Do-Go **
(opal:update vp)
(Format T "~%Demo-Moveline:
Press on a line with the left button to cause the nearest end point to
grow with the mouse.
Press with middle button to move the first end point only.
Press with right button to move the second end point only.
Press with shift-left to move from where pressed.
Press with shift-middle to move from first end point.
Press with shift-right to move from second end point.
Press with control-left to move from center.~%")
(unless dont-enter-main-event-loop #-cmu (inter:main-event-loop))
)
;; ** STOP **
(defun Do-Stop ()
(opal:destroy vp))
| null | https://raw.githubusercontent.com/earl-ducaine/cl-garnet/f0095848513ba69c370ed1dc51ee01f0bb4dd108/src/demos/demo-moveline.lisp | lisp | Syntax : Common - Lisp ; Package : DEMO - MOVELINE ; Base : 10 -*-
; ;
This code was written as part of the Garnet project at ;;;
; ;
; ;
please contact to be put on the mailing list. ;;;
This file contains demo code for testing if the ends of lines can be
changed with the mouse.
This is intended as a test and demonstration of the move-grow
interactor as part of the Garnet project.
** Call (demo-moveline:Do-Go) to start and (demo-moveline:Do-Stop) to stop **
argument to random be declared an integer.
opal:line-styles.
thin-line
line-2
line-4
line-8
dotted-line
********************************************************************
Main procedures
********************************************************************
create a window
create the top level aggregate in the window
** Do-Go **
** STOP ** |
Designed and implemented by
27 - May-92 Pervin - The latest CMUCL requires that the
09 - Apr-92 Mickish - Changed create - instances of opal : default - line - styles to
13 - Feb-92 Pervin - Merged demo - moveline and color - demo - moveline
(in-package :DEMO-MOVELINE)
(declaim (special MYLINE VP AGG FEEDBACK INTER1 INTER2 INTER3 INTER4 INTER5
INTER6 INTER7))
(defparameter *test-debug* NIL)
(defvar *color-p* (g-value opal:color :color-p))
(create-instance 'myline opal:line
(:points (list 0 0 0 0))
(:color 0)
(:x1 (o-formula (first (gvl :points))))
(:y1 (o-formula (second (gvl :points))))
(:x2 (o-formula (third (gvl :points))))
(:y2 (o-formula (fourth (gvl :points)))))
(if *color-p*
(s-value myline :line-style
(o-formula (case (mod (gvl :color) 6)
(0 (create-instance nil opal:line-style
(:constant T)
(:line-thickness 0)
(:foreground-color opal:blue)))
(1 (create-instance nil opal:line-style
(:constant T)
(:line-thickness 2)
(:foreground-color opal:green)))
(2 (create-instance nil opal:line-style
(:constant T)
(:line-thickness 4)
(:foreground-color opal:yellow)))
(3 opal:line-8)
(4 (create-instance nil opal:line-style
(:constant T)
(:line-style :dash)
(:line-thickness 1)
(:dash-pattern '(1 1))
(:foreground-color opal:blue)
(:background-color opal:yellow)))
(5 opal:dashed-line))))
(s-value myline :line-style
(o-formula (case (mod (gvl :color) 6)
(0 opal:thin-line)
(1 opal:line-2)
(2 opal:line-4)
(3 opal:line-8)
(4 opal:dotted-line)
(5 opal:dashed-line)))))
(defun create-lines (num agg)
(let (obj
(width (g-value agg :width))
(height (g-value agg :height)))
(dotimes (i num)
(setq obj (create-instance NIL myline
(:points (list (random (the integer width))
(random (the integer height))
(random (the integer width))
(random (the integer height))))
(:color i)))
(when *test-debug*
(format T "created line ~s: ~s (~s ~s ~s ~s) color = ~s~%"
i obj (g-value obj :x1)(g-value obj :y1)(g-value obj :x2)
(g-value obj :y2)(g-value obj :line-style)))
(opal:add-component agg obj))))
(defun Do-Go (&key dont-enter-main-event-loop double-buffered-p)
(create-instance 'vp inter:interactor-window (:left 300) (:top 50)
(:width 300) (:height 200)(:title "GARNET Move-lines")
(:double-buffered-p double-buffered-p)
(:icon-title "Move-Lines"))
(create-instance 'agg opal:aggregate
(:left 0)(:top 0)(:width 300)(:height 200))
(s-value vp :aggregate agg)
(create-lines 6 agg)
(create-instance 'feedback Opal:line
(:points (list 0 0 0 0))
(:x1 (o-formula (first (gvl :points))))
(:y1 (o-formula (second (gvl :points))))
(:x2 (o-formula (third (gvl :points))))
(:y2 (o-formula (fourth (gvl :points))))
(:obj-over NIL)
(:visible (o-formula (gvl :obj-over)))
(:line-style (if *color-p*
(create-instance nil opal:line-style
(:constant T)
(:line-style :dash)
(:dash-pattern '(4 4))
(:foreground-color opal:red))
opal:dashed-line))
(:draw-function :xor)
(:fast-redraw-p T))
(opal:add-component agg feedback)
(opal:update vp)
(Create-Instance 'inter1 inter:move-grow-interactor
(:window vp)
(:min-length 40)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point :where-hit)
(:feedback-obj feedback)
(:grow-p t)
(:line-p t))
(Create-Instance 'inter2 inter:move-grow-interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point 1)
(:line-p t)
(:grow-p t)
(:start-event :middledown))
(Create-Instance 'inter3 inter:Move-Grow-Interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point 2)
(:line-p t)
(:grow-p t)
(:start-event :rightdown))
(Create-Instance 'inter4 inter:move-grow-interactor
(:start-event :shift-leftdown)
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point :where-hit)
(:feedback-obj feedback)
(:line-p t))
(Create-Instance 'inter5 inter:move-grow-interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point 1)
(:line-p t)
(:start-event :shift-middledown))
(Create-Instance 'inter6 inter:Move-Grow-Interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point 2)
(:line-p t)
(:start-event :shift-rightdown))
(Create-Instance 'inter7 inter:Move-Grow-Interactor
(:window vp)
(:start-where `(:element-of ,agg :type ,myline))
(:attach-point :Center)
(:line-p t)
(:start-event :control-leftdown))
(opal:update vp)
(Format T "~%Demo-Moveline:
Press on a line with the left button to cause the nearest end point to
grow with the mouse.
Press with middle button to move the first end point only.
Press with right button to move the second end point only.
Press with shift-left to move from where pressed.
Press with shift-middle to move from first end point.
Press with shift-right to move from second end point.
Press with control-left to move from center.~%")
(unless dont-enter-main-event-loop #-cmu (inter:main-event-loop))
)
(defun Do-Stop ()
(opal:destroy vp))
|
7ed911d80b0409b7620602586916ebf5b30acc21427896c723eaf12d1bee15a5 | futurice/haskell-mega-repo | Config.hs | # LANGUAGE DataKinds #
module Futurice.App.Futuroom.Config where
import Futurice.EnvConfig
import Futurice.Integrations
import Futurice.Prelude
import Prelude ()
data Config = Config
{ cfgGoogleConfig :: !(IntegrationsConfig '[ ServGO ])
}
instance Configure Config where
configure = Config
<$> configure
| null | https://raw.githubusercontent.com/futurice/haskell-mega-repo/2647723f12f5435e2edc373f6738386a9668f603/futuroom-app/src/Futurice/App/Futuroom/Config.hs | haskell | # LANGUAGE DataKinds #
module Futurice.App.Futuroom.Config where
import Futurice.EnvConfig
import Futurice.Integrations
import Futurice.Prelude
import Prelude ()
data Config = Config
{ cfgGoogleConfig :: !(IntegrationsConfig '[ ServGO ])
}
instance Configure Config where
configure = Config
<$> configure
|
|
3e50848d73e4cb82e52540d1a34ed363b68037b4ab22056ae7073336bfef5912 | janestreet/ppx_here | ppx_here_expander.mli | open Ppxlib
(** Lift a lexing position to a expression *)
val lift_position : loc:Location.t -> Parsetree.expression
(** Lift a lexing position to a string expression *)
val lift_position_as_string : loc:Location.t -> Parsetree.expression
(** Same as setting the directory name with [-dirname], for tests *)
val set_dirname : string option -> unit
(** Prepend the directory name if [-dirname] was passed on the command line and the
filename is relative. *)
val expand_filename : string -> string
| null | https://raw.githubusercontent.com/janestreet/ppx_here/3dc2a41c80950375582241d1c99ca383b4fbda44/expander/ppx_here_expander.mli | ocaml | * Lift a lexing position to a expression
* Lift a lexing position to a string expression
* Same as setting the directory name with [-dirname], for tests
* Prepend the directory name if [-dirname] was passed on the command line and the
filename is relative. | open Ppxlib
val lift_position : loc:Location.t -> Parsetree.expression
val lift_position_as_string : loc:Location.t -> Parsetree.expression
val set_dirname : string option -> unit
val expand_filename : string -> string
|
1ce9c2c93a9bf26e3072157a5b459420c34c05d4968de72fe41d51ce3ce062ac | ryszard/clsql | mysql-loader.lisp | -*- Mode : LISP ; Syntax : ANSI - Common - Lisp ; Base : 10 -*-
;;;; *************************************************************************
;;;; FILE IDENTIFICATION
;;;;
;;;; Name: mysql-loader.sql
Purpose : MySQL library loader using UFFI
Author :
Created : Feb 2002
;;;;
$ Id$
;;;;
This file , part of CLSQL , is Copyright ( c ) 2002 - 2004 by
;;;;
CLSQL users are granted the rights to distribute and use this software
as governed by the terms of the Lisp Lesser GNU Public License
;;;; (), also known as the LLGPL.
;;;; *************************************************************************
(in-package #:mysql)
searches clsql_mysql64 to accomodate both 32 - bit and 64 - bit libraries on same system
(defparameter *clsql-mysql-library-candidate-names*
`(,@(when (> most-positive-fixnum (expt 2 32)) (list "clsql_mysql64"))
"clsql_mysql"))
(defvar *mysql-library-candidate-names*
'("libmysqlclient" "libmysql"))
(defvar *mysql-supporting-libraries* '("c")
"Used only by CMU. List of library flags needed to be passed to ld to
load the MySQL client library succesfully. If this differs at your site,
set to the right path before compiling or loading the system.")
(defvar *mysql-library-loaded* nil
"T if foreign library was able to be loaded successfully")
(defmethod clsql-sys:database-type-library-loaded ((database-type (eql :mysql)))
*mysql-library-loaded*)
(defmethod clsql-sys:database-type-load-foreign ((database-type (eql :mysql)))
(clsql:push-library-path
(make-pathname :directory clsql-mysql-system::*library-file-dir*))
(clsql-uffi:find-and-load-foreign-library *mysql-library-candidate-names*
:module "mysql"
:supporting-libraries *mysql-supporting-libraries*)
(clsql-uffi:find-and-load-foreign-library *clsql-mysql-library-candidate-names*
:module "clsql-mysql"
:supporting-libraries *mysql-supporting-libraries*)
(setq *mysql-library-loaded* t))
(clsql-sys:database-type-load-foreign :mysql)
| null | https://raw.githubusercontent.com/ryszard/clsql/9aafcb72bd7ca1d7e908938b6a5319753b3371d9/db-mysql/mysql-loader.lisp | lisp | Syntax : ANSI - Common - Lisp ; Base : 10 -*-
*************************************************************************
FILE IDENTIFICATION
Name: mysql-loader.sql
(), also known as the LLGPL.
************************************************************************* | Purpose : MySQL library loader using UFFI
Author :
Created : Feb 2002
$ Id$
This file , part of CLSQL , is Copyright ( c ) 2002 - 2004 by
CLSQL users are granted the rights to distribute and use this software
as governed by the terms of the Lisp Lesser GNU Public License
(in-package #:mysql)
searches clsql_mysql64 to accomodate both 32 - bit and 64 - bit libraries on same system
(defparameter *clsql-mysql-library-candidate-names*
`(,@(when (> most-positive-fixnum (expt 2 32)) (list "clsql_mysql64"))
"clsql_mysql"))
(defvar *mysql-library-candidate-names*
'("libmysqlclient" "libmysql"))
(defvar *mysql-supporting-libraries* '("c")
"Used only by CMU. List of library flags needed to be passed to ld to
load the MySQL client library succesfully. If this differs at your site,
set to the right path before compiling or loading the system.")
(defvar *mysql-library-loaded* nil
"T if foreign library was able to be loaded successfully")
(defmethod clsql-sys:database-type-library-loaded ((database-type (eql :mysql)))
*mysql-library-loaded*)
(defmethod clsql-sys:database-type-load-foreign ((database-type (eql :mysql)))
(clsql:push-library-path
(make-pathname :directory clsql-mysql-system::*library-file-dir*))
(clsql-uffi:find-and-load-foreign-library *mysql-library-candidate-names*
:module "mysql"
:supporting-libraries *mysql-supporting-libraries*)
(clsql-uffi:find-and-load-foreign-library *clsql-mysql-library-candidate-names*
:module "clsql-mysql"
:supporting-libraries *mysql-supporting-libraries*)
(setq *mysql-library-loaded* t))
(clsql-sys:database-type-load-foreign :mysql)
|
0bc2f7153dfb31274e34d0345e87a22c24bd3f79a17848bf76f67a4d6f3cb21f | AccelerateHS/accelerate | Vec.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE GADTs #
{-# LANGUAGE KindSignatures #-}
# LANGUAGE MagicHash #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
Module : Data . Array . Accelerate . Representation .
Copyright : [ 2008 .. 2020 ] The Accelerate Team
-- License : BSD3
--
Maintainer : < >
-- Stability : experimental
Portability : non - portable ( GHC extensions )
--
module Data.Array.Accelerate.Representation.Vec
where
import Data.Array.Accelerate.Type
import Data.Array.Accelerate.Representation.Type
import Data.Primitive.Vec
import Control.Monad.ST
import Data.Primitive.ByteArray
import Data.Primitive.Types
import Language.Haskell.TH.Extra
import GHC.Base ( Int(..), Int#, (-#) )
import GHC.TypeNats
-- | Declares the size of a SIMD vector and the type of its elements. This
data type is used to denote the relation between a vector type (
-- n single) with its tuple representation (tuple). Conversions between
-- those types are exposed through 'pack' and 'unpack'.
--
data VecR (n :: Nat) single tuple where
VecRnil :: SingleType s -> VecR 0 s ()
VecRsucc :: VecR n s t -> VecR (n + 1) s (t, s)
vecRvector :: KnownNat n => VecR n s tuple -> VectorType (Vec n s)
vecRvector = uncurry VectorType . go
where
go :: VecR n s tuple -> (Int, SingleType s)
go (VecRnil tp) = (0, tp)
go (VecRsucc vec) | (n, tp) <- go vec = (n + 1, tp)
vecRtuple :: VecR n s tuple -> TypeR tuple
vecRtuple = snd . go
where
go :: VecR n s tuple -> (SingleType s, TypeR tuple)
go (VecRnil tp) = (tp, TupRunit)
go (VecRsucc vec) | (tp, tuple) <- go vec = (tp, TupRpair tuple (TupRsingle (SingleScalarType tp)))
pack :: forall n single tuple. KnownNat n => VecR n single tuple -> tuple -> Vec n single
pack vecR tuple
| VectorType n single <- vecRvector vecR
, SingleDict <- singleDict single
= runST $ do
mba <- newByteArray (n * sizeOf (undefined :: single))
go (n - 1) vecR tuple mba
ByteArray ba# <- unsafeFreezeByteArray mba
return $! Vec ba#
where
go :: Prim single => Int -> VecR n' single tuple' -> tuple' -> MutableByteArray s -> ST s ()
go _ (VecRnil _) () _ = return ()
go i (VecRsucc r) (xs, x) mba = do
writeByteArray mba i x
go (i - 1) r xs mba
unpack :: forall n single tuple. KnownNat n => VecR n single tuple -> Vec n single -> tuple
unpack vecR (Vec ba#)
| VectorType n single <- vecRvector vecR
, (I# n#) <- n
, SingleDict <- singleDict single
= go (n# -# 1#) vecR
where
go :: Prim single => Int# -> VecR n' single tuple' -> tuple'
go _ (VecRnil _) = ()
go i# (VecRsucc r) = x `seq` xs `seq` (xs, x)
where
xs = go (i# -# 1#) r
x = indexByteArray# ba# i#
rnfVecR :: VecR n single tuple -> ()
rnfVecR (VecRnil tp) = rnfSingleType tp
rnfVecR (VecRsucc vec) = rnfVecR vec
liftVecR :: VecR n single tuple -> CodeQ (VecR n single tuple)
liftVecR (VecRnil tp) = [|| VecRnil $$(liftSingleType tp) ||]
liftVecR (VecRsucc vec) = [|| VecRsucc $$(liftVecR vec) ||]
| null | https://raw.githubusercontent.com/AccelerateHS/accelerate/7c769b761d0b2a91f318096b9dd3fced94616961/src/Data/Array/Accelerate/Representation/Vec.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE KindSignatures #
# LANGUAGE TypeOperators #
# OPTIONS_HADDOCK hide #
|
License : BSD3
Stability : experimental
| Declares the size of a SIMD vector and the type of its elements. This
n single) with its tuple representation (tuple). Conversions between
those types are exposed through 'pack' and 'unpack'.
| # LANGUAGE GADTs #
# LANGUAGE MagicHash #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
Module : Data . Array . Accelerate . Representation .
Copyright : [ 2008 .. 2020 ] The Accelerate Team
Maintainer : < >
Portability : non - portable ( GHC extensions )
module Data.Array.Accelerate.Representation.Vec
where
import Data.Array.Accelerate.Type
import Data.Array.Accelerate.Representation.Type
import Data.Primitive.Vec
import Control.Monad.ST
import Data.Primitive.ByteArray
import Data.Primitive.Types
import Language.Haskell.TH.Extra
import GHC.Base ( Int(..), Int#, (-#) )
import GHC.TypeNats
data type is used to denote the relation between a vector type (
data VecR (n :: Nat) single tuple where
VecRnil :: SingleType s -> VecR 0 s ()
VecRsucc :: VecR n s t -> VecR (n + 1) s (t, s)
vecRvector :: KnownNat n => VecR n s tuple -> VectorType (Vec n s)
vecRvector = uncurry VectorType . go
where
go :: VecR n s tuple -> (Int, SingleType s)
go (VecRnil tp) = (0, tp)
go (VecRsucc vec) | (n, tp) <- go vec = (n + 1, tp)
vecRtuple :: VecR n s tuple -> TypeR tuple
vecRtuple = snd . go
where
go :: VecR n s tuple -> (SingleType s, TypeR tuple)
go (VecRnil tp) = (tp, TupRunit)
go (VecRsucc vec) | (tp, tuple) <- go vec = (tp, TupRpair tuple (TupRsingle (SingleScalarType tp)))
pack :: forall n single tuple. KnownNat n => VecR n single tuple -> tuple -> Vec n single
pack vecR tuple
| VectorType n single <- vecRvector vecR
, SingleDict <- singleDict single
= runST $ do
mba <- newByteArray (n * sizeOf (undefined :: single))
go (n - 1) vecR tuple mba
ByteArray ba# <- unsafeFreezeByteArray mba
return $! Vec ba#
where
go :: Prim single => Int -> VecR n' single tuple' -> tuple' -> MutableByteArray s -> ST s ()
go _ (VecRnil _) () _ = return ()
go i (VecRsucc r) (xs, x) mba = do
writeByteArray mba i x
go (i - 1) r xs mba
unpack :: forall n single tuple. KnownNat n => VecR n single tuple -> Vec n single -> tuple
unpack vecR (Vec ba#)
| VectorType n single <- vecRvector vecR
, (I# n#) <- n
, SingleDict <- singleDict single
= go (n# -# 1#) vecR
where
go :: Prim single => Int# -> VecR n' single tuple' -> tuple'
go _ (VecRnil _) = ()
go i# (VecRsucc r) = x `seq` xs `seq` (xs, x)
where
xs = go (i# -# 1#) r
x = indexByteArray# ba# i#
rnfVecR :: VecR n single tuple -> ()
rnfVecR (VecRnil tp) = rnfSingleType tp
rnfVecR (VecRsucc vec) = rnfVecR vec
liftVecR :: VecR n single tuple -> CodeQ (VecR n single tuple)
liftVecR (VecRnil tp) = [|| VecRnil $$(liftSingleType tp) ||]
liftVecR (VecRsucc vec) = [|| VecRsucc $$(liftVecR vec) ||]
|
1e120d4ea78d0decfdd589c2e0616174380461002f7dcf62faff9e172980ed5f | shirok/Gauche | 222.scm | ;;;
SRFI-222 - Compound objects
;;;
Copyright ( c ) 2022 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; 3. Neither the name of the authors nor the names of its contributors
;;; may be used to endorse or promote products derived from this
;;; software without specific prior written permission.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
(define-module srfi.222
(use gauche.record)
(use data.queue)
(export make-compound compound?
compound-subobjects compound-length compound-ref
compound-map compound-map->list compound-filter
compound-predicate compound-access
))
(select-module srfi.222)
(define-class <compound> ()
((subobjects :init-keyword :subobjects) ; list of objects
))
(define-method write-object ((c <compound>) port)
(format port "#<compound ~s>" (~ c'subobjects)))
(define-method object-hash ((c <compound>) rec-hash)
(rec-hash (~ c'subobjects)))
(define-method object-compare ((a <compound>) (b <compound>))
(compare (~ a'subobjects) (~ b'subobjects)))
(define (%list->compound objs)
(let1 q (make-queue)
(dolist [o objs]
(if (compound? o)
(apply enqueue! q (~ o'subobjects))
(enqueue! q o)))
(make <compound> :subobjects (dequeue-all! q))))
(define (make-compound . objs) (%list->compound objs))
(define (compound? obj) (is-a? obj <compound>))
(define (compound-subobjects obj)
(if (compound? obj)
(~ obj'subobjects)
(list obj)))
(define (compound-length obj)
(if (compound? obj)
(length (~ obj'subobjects))
1))
(define (compound-ref obj k)
(assume (and (exact-integer? k) (not (negative? k)))
"Index must be nonnegative exact integer, but got:" k)
(if (compound? obj)
(~ obj'subobjects k)
(if (= k 0) obj (error "Index out of range:" k))))
(define (compound-map mapper obj)
(%list->compound (compound-map->list mapper obj)))
(define (compound-map->list mapper obj)
(map mapper (compound-subobjects obj)))
(define (compound-filter pred obj)
(%list->compound (filter pred (compound-subobjects obj))))
(define (compound-predicate pred obj)
(or (pred obj)
(and (compound? obj)
(boolean (any pred (~ obj'subobjects))))))
(define (compound-access pred acc default obj)
(cond [(pred obj) (acc obj)]
[(and (compound? obj)
(find pred (~ obj'subobjects)))
=> acc]
[else default]))
| null | https://raw.githubusercontent.com/shirok/Gauche/e606bfe5a94b100d5807bca9c2bb95df94f60aa6/lib/srfi/222.scm | scheme |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the authors nor the names of its contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
list of objects | SRFI-222 - Compound objects
Copyright ( c ) 2022 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
(define-module srfi.222
(use gauche.record)
(use data.queue)
(export make-compound compound?
compound-subobjects compound-length compound-ref
compound-map compound-map->list compound-filter
compound-predicate compound-access
))
(select-module srfi.222)
(define-class <compound> ()
))
(define-method write-object ((c <compound>) port)
(format port "#<compound ~s>" (~ c'subobjects)))
(define-method object-hash ((c <compound>) rec-hash)
(rec-hash (~ c'subobjects)))
(define-method object-compare ((a <compound>) (b <compound>))
(compare (~ a'subobjects) (~ b'subobjects)))
(define (%list->compound objs)
(let1 q (make-queue)
(dolist [o objs]
(if (compound? o)
(apply enqueue! q (~ o'subobjects))
(enqueue! q o)))
(make <compound> :subobjects (dequeue-all! q))))
(define (make-compound . objs) (%list->compound objs))
(define (compound? obj) (is-a? obj <compound>))
(define (compound-subobjects obj)
(if (compound? obj)
(~ obj'subobjects)
(list obj)))
(define (compound-length obj)
(if (compound? obj)
(length (~ obj'subobjects))
1))
(define (compound-ref obj k)
(assume (and (exact-integer? k) (not (negative? k)))
"Index must be nonnegative exact integer, but got:" k)
(if (compound? obj)
(~ obj'subobjects k)
(if (= k 0) obj (error "Index out of range:" k))))
(define (compound-map mapper obj)
(%list->compound (compound-map->list mapper obj)))
(define (compound-map->list mapper obj)
(map mapper (compound-subobjects obj)))
(define (compound-filter pred obj)
(%list->compound (filter pred (compound-subobjects obj))))
(define (compound-predicate pred obj)
(or (pred obj)
(and (compound? obj)
(boolean (any pred (~ obj'subobjects))))))
(define (compound-access pred acc default obj)
(cond [(pred obj) (acc obj)]
[(and (compound? obj)
(find pred (~ obj'subobjects)))
=> acc]
[else default]))
|
c4d09e07c44d545b4450b1f0da3d7462fd628198544696628b48b06c11f640a9 | lambdaisland/kaocha | filter_test.clj | (ns kaocha.plugin.filter-test
(:require [clojure.test :refer [deftest is]]
[kaocha.plugin.filter :as f]
[kaocha.testable :as testable]))
(defn flat-test-seq [t]
(cons t (mapcat flat-test-seq (:kaocha.test-plan/tests t))))
(defn skipped-tests [testable]
(into {}
(for [{:kaocha.testable/keys [id skip]} (flat-test-seq testable)]
[id (boolean skip)])))
(deftest matches?-test
(is (f/matches? {:kaocha.testable/id :foo.bar/baz}
'[foo.bar/baz]
[]))
(is (f/matches? {:kaocha.testable/id :foo.bar/baz}
'[foo.bar]
[]))
(is (f/matches? {:kaocha.testable/meta {:foo.bar/baz true}}
[]
'[foo.bar/baz]))
(is (f/matches? {:kaocha.testable/id :foo.bar}
'[foo.bar]
[])))
(deftest filters-test
(is (= '{:skip [skip]
:focus [focus]
:skip-meta [:skip-meta]
:focus-meta [:focus-meta]}
(f/filters '#:kaocha.filter{:skip [skip]
:focus [focus]
:skip-meta [:skip-meta]
:focus-meta [:focus-meta]}))))
(deftest merge-filters-test
(is (= {:skip () :skip-meta () :focus nil :focus-meta nil}
(f/merge-filters {} {})))
(is (= {:skip '[foo bar] :skip-meta () :focus nil :focus-meta nil}
(f/merge-filters
{:skip '[foo]}
{:skip '[bar]})))
(is (= {:skip () :skip-meta () :focus '[bar] :focus-meta nil}
(f/merge-filters
{:focus '[foo]}
{:focus '[bar]})))
(is (= {:skip () :skip-meta () :focus '[foo] :focus-meta nil}
(f/merge-filters
{:focus '[foo]}
{:focud '[]}))))
(deftest truthy-keys-test
(is (= [:zzz]
(f/truthy-keys {:xxx false
:yyy nil
:zzz true}))))
(deftest remove-missing-metadata-keys-test
(is (= #{:xxx}
(f/remove-missing-metadata-keys
[:xxx :yyy]
{:kaocha.test-plan/tests [{:kaocha.testable/id :abc
:kaocha.testable/meta {:xxx true}}]}))))
(deftest filter-testable-test
(is (= #:kaocha.testable
{:id :foo.bar/baz, :skip true}
(f/filter-testable {:kaocha.testable/id :foo.bar/baz}
{:skip '[foo.bar]})))
(is (= {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [#:kaocha.testable{:id :y/_1, :skip true}]}
(f/filter-testable {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1}]}
{:skip '[y/_1]})))
(is (= {:kaocha.testable/id :x/_1, :kaocha.test-plan/tests [#:kaocha.testable{:id :y/_1}]}
(f/filter-testable {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1}]}
{:focus '[x/_1]})))
(is (= {:kaocha.testable/id :x/_1,
:kaocha.test-plan/tests [#:kaocha.testable{:id :y/_1
:skip true}
#:kaocha.testable{:id :z/_1}]}
(f/filter-testable {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1}
{:kaocha.testable/id :z/_1}]}
{:focus '[z/_1]})))
(is (= {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1
:kaocha.testable/skip true}
{:kaocha.testable/id :y/_2
:kaocha.test-plan/tests
[{:kaocha.testable/id :z/_1
:kaocha.testable/skip true}
{:kaocha.testable/id :z/_2}]}]}
(f/filter-testable {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1}
{:kaocha.testable/id :y/_2
:kaocha.test-plan/tests
[{:kaocha.testable/id :z/_1}
{:kaocha.testable/id :z/_2}]}]}
{:focus '[y/_2] :skip '[z/_1]})))
(is (= {:kaocha.testable/id :x,
:kaocha.test-plan/tests [{:kaocha.testable/id :y,
:kaocha.test-plan/tests [#:kaocha.testable{:id :z}
#:kaocha.testable{:id :z/_2}]}]}
(f/filter-testable {:kaocha.testable/id :x
:kaocha.test-plan/tests [{:kaocha.testable/id :y
:kaocha.test-plan/tests
[{:kaocha.testable/id :z}
{:kaocha.testable/id :z/_2}]}]}
{:focus '[z]})))
(is (= {:kaocha.testable/id :x,
:kaocha.filter/focus [:z/_2],
:kaocha.test-plan/tests
[{:kaocha.testable/id :y
:kaocha.test-plan/tests
[#:kaocha.testable{:id :z
:skip true}
#:kaocha.testable{:id :z/_2}]}]}
(f/filter-testable {:kaocha.testable/id :x
:kaocha.filter/focus [:z/_2]
:kaocha.test-plan/tests [{:kaocha.testable/id :y
:kaocha.test-plan/tests
[{:kaocha.testable/id :z}
{:kaocha.testable/id :z/_2}]}]}
{})))
;; These cases need more hammock time to figure out what the "right" behavior should be
#_
(is (= {:base false
:x false
:a false :b false
:y false
:c true :d false}
(skipped-tests (f/filter-testable {:kaocha.testable/id :base
:kaocha.test-plan/tests [{:kaocha.testable/id :x
:kaocha.test-plan/tests [{:kaocha.testable/id :a}
{:kaocha.testable/id :b
:kaocha.testable/meta {:foo true}}]}
{:kaocha.testable/id :y
:kaocha.test-plan/tests [{:kaocha.testable/id :c}
{:kaocha.testable/id :d
:kaocha.testable/meta {:foo true}}]}]}
{:focus [:x]
:focus-meta [:foo]}))
)))
(deftest filter-focus-and-focus-meta-separately-test
(is (= {:kaocha.test-plan/tests [{:kaocha.testable/id :negative
:kaocha.test-plan/tests
[{:kaocha.testable/id :foo.bar-test
:kaocha.test-plan/tests [{:kaocha.testable/id :foo.bar-test/some-positive-test
:kaocha.testable/meta {:positive true}
:kaocha.testable/skip true}
{:kaocha.testable/id :foo.bar-test/some-negative-test
:kaocha.testable/meta {:negative true}}
{:kaocha.testable/id :foo.bar-test/some-random-test
:kaocha.testable/meta {:random true}
:kaocha.testable/skip true}]}]
:kaocha.testable/skip true}
{:kaocha.testable/id :positive
:kaocha.test-plan/tests [{:kaocha.testable/id :foo.bar-test
:kaocha.test-plan/tests
[{:kaocha.testable/id :foo.bar-test/some-positive-test
:kaocha.testable/meta {:positive true}}
{:kaocha.testable/id :foo.bar-test/some-negative-test
:kaocha.testable/meta {:negative true}
:kaocha.testable/skip true}
{:kaocha.testable/id :foo.bar-test/some-random-test
:kaocha.testable/meta {:random true}
:kaocha.testable/skip true}]}]}]
:kaocha.filter/focus #{:positive}
:kaocha.filter/focus-meta #{}}
(f/filter-post-load-hook {:kaocha.test-plan/tests [{:kaocha.testable/id :negative
:kaocha.test-plan/tests [{:kaocha.testable/id :foo.bar-test
:kaocha.test-plan/tests
[{:kaocha.testable/id :foo.bar-test/some-positive-test
:kaocha.testable/meta {:positive true}}
{:kaocha.testable/id :foo.bar-test/some-negative-test
:kaocha.testable/meta {:negative true}}
{:kaocha.testable/id :foo.bar-test/some-random-test
:kaocha.testable/meta {:random true}}]}]
:kaocha.filter/focus-meta #{:negative}}
{:kaocha.testable/id :positive
:kaocha.test-plan/tests [{:kaocha.testable/id :foo.bar-test
:kaocha.test-plan/tests
[{:kaocha.testable/id :foo.bar-test/some-positive-test
:kaocha.testable/meta {:positive true}}
{:kaocha.testable/id :foo.bar-test/some-negative-test
:kaocha.testable/meta {:negative true}}
{:kaocha.testable/id :foo.bar-test/some-random-test
:kaocha.testable/meta {:random true}}]}]
:kaocha.filter/focus-meta #{:positive}}]
:kaocha.filter/focus #{:positive}}))))
| null | https://raw.githubusercontent.com/lambdaisland/kaocha/8f18babb732b21e7fb2231e44be4d972c7ab22bc/test/unit/kaocha/plugin/filter_test.clj | clojure | These cases need more hammock time to figure out what the "right" behavior should be | (ns kaocha.plugin.filter-test
(:require [clojure.test :refer [deftest is]]
[kaocha.plugin.filter :as f]
[kaocha.testable :as testable]))
(defn flat-test-seq [t]
(cons t (mapcat flat-test-seq (:kaocha.test-plan/tests t))))
(defn skipped-tests [testable]
(into {}
(for [{:kaocha.testable/keys [id skip]} (flat-test-seq testable)]
[id (boolean skip)])))
(deftest matches?-test
(is (f/matches? {:kaocha.testable/id :foo.bar/baz}
'[foo.bar/baz]
[]))
(is (f/matches? {:kaocha.testable/id :foo.bar/baz}
'[foo.bar]
[]))
(is (f/matches? {:kaocha.testable/meta {:foo.bar/baz true}}
[]
'[foo.bar/baz]))
(is (f/matches? {:kaocha.testable/id :foo.bar}
'[foo.bar]
[])))
(deftest filters-test
(is (= '{:skip [skip]
:focus [focus]
:skip-meta [:skip-meta]
:focus-meta [:focus-meta]}
(f/filters '#:kaocha.filter{:skip [skip]
:focus [focus]
:skip-meta [:skip-meta]
:focus-meta [:focus-meta]}))))
(deftest merge-filters-test
(is (= {:skip () :skip-meta () :focus nil :focus-meta nil}
(f/merge-filters {} {})))
(is (= {:skip '[foo bar] :skip-meta () :focus nil :focus-meta nil}
(f/merge-filters
{:skip '[foo]}
{:skip '[bar]})))
(is (= {:skip () :skip-meta () :focus '[bar] :focus-meta nil}
(f/merge-filters
{:focus '[foo]}
{:focus '[bar]})))
(is (= {:skip () :skip-meta () :focus '[foo] :focus-meta nil}
(f/merge-filters
{:focus '[foo]}
{:focud '[]}))))
(deftest truthy-keys-test
(is (= [:zzz]
(f/truthy-keys {:xxx false
:yyy nil
:zzz true}))))
(deftest remove-missing-metadata-keys-test
(is (= #{:xxx}
(f/remove-missing-metadata-keys
[:xxx :yyy]
{:kaocha.test-plan/tests [{:kaocha.testable/id :abc
:kaocha.testable/meta {:xxx true}}]}))))
(deftest filter-testable-test
(is (= #:kaocha.testable
{:id :foo.bar/baz, :skip true}
(f/filter-testable {:kaocha.testable/id :foo.bar/baz}
{:skip '[foo.bar]})))
(is (= {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [#:kaocha.testable{:id :y/_1, :skip true}]}
(f/filter-testable {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1}]}
{:skip '[y/_1]})))
(is (= {:kaocha.testable/id :x/_1, :kaocha.test-plan/tests [#:kaocha.testable{:id :y/_1}]}
(f/filter-testable {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1}]}
{:focus '[x/_1]})))
(is (= {:kaocha.testable/id :x/_1,
:kaocha.test-plan/tests [#:kaocha.testable{:id :y/_1
:skip true}
#:kaocha.testable{:id :z/_1}]}
(f/filter-testable {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1}
{:kaocha.testable/id :z/_1}]}
{:focus '[z/_1]})))
(is (= {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1
:kaocha.testable/skip true}
{:kaocha.testable/id :y/_2
:kaocha.test-plan/tests
[{:kaocha.testable/id :z/_1
:kaocha.testable/skip true}
{:kaocha.testable/id :z/_2}]}]}
(f/filter-testable {:kaocha.testable/id :x/_1
:kaocha.test-plan/tests [{:kaocha.testable/id :y/_1}
{:kaocha.testable/id :y/_2
:kaocha.test-plan/tests
[{:kaocha.testable/id :z/_1}
{:kaocha.testable/id :z/_2}]}]}
{:focus '[y/_2] :skip '[z/_1]})))
(is (= {:kaocha.testable/id :x,
:kaocha.test-plan/tests [{:kaocha.testable/id :y,
:kaocha.test-plan/tests [#:kaocha.testable{:id :z}
#:kaocha.testable{:id :z/_2}]}]}
(f/filter-testable {:kaocha.testable/id :x
:kaocha.test-plan/tests [{:kaocha.testable/id :y
:kaocha.test-plan/tests
[{:kaocha.testable/id :z}
{:kaocha.testable/id :z/_2}]}]}
{:focus '[z]})))
(is (= {:kaocha.testable/id :x,
:kaocha.filter/focus [:z/_2],
:kaocha.test-plan/tests
[{:kaocha.testable/id :y
:kaocha.test-plan/tests
[#:kaocha.testable{:id :z
:skip true}
#:kaocha.testable{:id :z/_2}]}]}
(f/filter-testable {:kaocha.testable/id :x
:kaocha.filter/focus [:z/_2]
:kaocha.test-plan/tests [{:kaocha.testable/id :y
:kaocha.test-plan/tests
[{:kaocha.testable/id :z}
{:kaocha.testable/id :z/_2}]}]}
{})))
#_
(is (= {:base false
:x false
:a false :b false
:y false
:c true :d false}
(skipped-tests (f/filter-testable {:kaocha.testable/id :base
:kaocha.test-plan/tests [{:kaocha.testable/id :x
:kaocha.test-plan/tests [{:kaocha.testable/id :a}
{:kaocha.testable/id :b
:kaocha.testable/meta {:foo true}}]}
{:kaocha.testable/id :y
:kaocha.test-plan/tests [{:kaocha.testable/id :c}
{:kaocha.testable/id :d
:kaocha.testable/meta {:foo true}}]}]}
{:focus [:x]
:focus-meta [:foo]}))
)))
(deftest filter-focus-and-focus-meta-separately-test
(is (= {:kaocha.test-plan/tests [{:kaocha.testable/id :negative
:kaocha.test-plan/tests
[{:kaocha.testable/id :foo.bar-test
:kaocha.test-plan/tests [{:kaocha.testable/id :foo.bar-test/some-positive-test
:kaocha.testable/meta {:positive true}
:kaocha.testable/skip true}
{:kaocha.testable/id :foo.bar-test/some-negative-test
:kaocha.testable/meta {:negative true}}
{:kaocha.testable/id :foo.bar-test/some-random-test
:kaocha.testable/meta {:random true}
:kaocha.testable/skip true}]}]
:kaocha.testable/skip true}
{:kaocha.testable/id :positive
:kaocha.test-plan/tests [{:kaocha.testable/id :foo.bar-test
:kaocha.test-plan/tests
[{:kaocha.testable/id :foo.bar-test/some-positive-test
:kaocha.testable/meta {:positive true}}
{:kaocha.testable/id :foo.bar-test/some-negative-test
:kaocha.testable/meta {:negative true}
:kaocha.testable/skip true}
{:kaocha.testable/id :foo.bar-test/some-random-test
:kaocha.testable/meta {:random true}
:kaocha.testable/skip true}]}]}]
:kaocha.filter/focus #{:positive}
:kaocha.filter/focus-meta #{}}
(f/filter-post-load-hook {:kaocha.test-plan/tests [{:kaocha.testable/id :negative
:kaocha.test-plan/tests [{:kaocha.testable/id :foo.bar-test
:kaocha.test-plan/tests
[{:kaocha.testable/id :foo.bar-test/some-positive-test
:kaocha.testable/meta {:positive true}}
{:kaocha.testable/id :foo.bar-test/some-negative-test
:kaocha.testable/meta {:negative true}}
{:kaocha.testable/id :foo.bar-test/some-random-test
:kaocha.testable/meta {:random true}}]}]
:kaocha.filter/focus-meta #{:negative}}
{:kaocha.testable/id :positive
:kaocha.test-plan/tests [{:kaocha.testable/id :foo.bar-test
:kaocha.test-plan/tests
[{:kaocha.testable/id :foo.bar-test/some-positive-test
:kaocha.testable/meta {:positive true}}
{:kaocha.testable/id :foo.bar-test/some-negative-test
:kaocha.testable/meta {:negative true}}
{:kaocha.testable/id :foo.bar-test/some-random-test
:kaocha.testable/meta {:random true}}]}]
:kaocha.filter/focus-meta #{:positive}}]
:kaocha.filter/focus #{:positive}}))))
|
44bfbc6d1915c7180b0faadc4965f279f2ed3eb4019a7da292858daea508fd67 | amnh/poy5 | character.mli | POY 5.1.1 . A phylogenetic analysis program using Dynamic Homologies .
Copyright ( C ) 2014 , , , Ward Wheeler ,
and the American Museum of Natural History .
(* *)
(* This program is free software; you can redistribute it and/or modify *)
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
(* (at your option) any later version. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
USA
(** This type interacts with the tree traversal code to continue or stop traversal *)
type update_res
val rContinue : update_res
val rStop : update_res
(** Utility function to create a new, unique code for a character module *)
val new_code : unit -> int
(** Colors for characters *)
type c = White | Black | Blue | Yellow | Red | Grey | Green | Brown
(** Heuristic values *)
type h = Fast | Medium | Exact
* The [ CHARACTER ] signature is the general signature for different types of
characters . This signature is currently missing several functions :
* [ median_3 ]
* [ update ] for performing a 3median and returning an [ update_res ] value
Note that characters are meant to be immutable values .
characters. This signature is currently missing several functions:
* [median_3]
* [update] for performing a 3median and returning an [update_res] value
Note that characters are meant to be immutable values. *)
module type CHARACTER =
sig
type t
(** Every character has a code; characters with the same code are
homologous. *)
val code : t -> int
(** Type for generating random instances of this character. Used for
testing. *)
(* type gen *)
(** Make a random set of parameters for generating mutually consistent and
comparable characters *)
(* val rand_gen : unit -> gen *)
(** Given a set of parameters, make a random character *)
(* val make_rand : gen -> t *)
* [ median prev_median a b ] returns the median of [ a ] and [ b ] . Optionally ,
you may specify [ Some prev ] as [ prev_median ] , the median of the previous
versions of [ a ] and [ b ] . This is useful if [ a ] and [ b ] themselves keep
a list of changes from their previous incarnations . See the module
[ Charset ] for an example of how this is used .
you may specify [Some prev] as [prev_median], the median of the previous
versions of [a] and [b]. This is useful if [a] and [b] themselves keep
a list of changes from their previous incarnations. See the module
[Charset] for an example of how this is used. *)
val median : t option -> t -> t -> t
val median_3 : t -> t -> t -> t -> t
val reroot_median : t -> t -> t
val dist_2 : t -> t -> t -> float
* Distance between two character instances .
val distance : t -> t -> float
(** [compare_codes a b] imposes a total ordering on the set of characters
based on their homologies. For homologous characters (those with the
same codes), this function should return 0. *)
val compare_codes : t -> t -> int
(** [compare_data a b] imposes a total ordering on the set of characters
based on their codes and data. Only in cases where both the codes and
the data are the same will this function return 0. *)
val compare_data : t -> t -> int
val cardinal : t -> int
val deep_cardinal : t -> int
(** Returns a string representation of a character instance. *)
val to_string : t -> string
end
* [ CharacterSet]s are [ CHARACTER]s in their own right , but also have
additional functionality relevant to sets of characters . The [ Charset ]
module defines a functor that implements sets of a generic character .
additional functionality relevant to sets of characters. The [Charset]
module defines a functor that implements sets of a generic character. *)
module type CharacterSet = sig
type e (** contents of the set *)
type t (** set type *)
(* (\** Type for generating random instances of this character. Used for *)
(* testing. *\) *)
(* type gen *)
(* (\** Make a random set of parameters for generating mutually consistent and *)
(* comparable characters *\) *)
(* val rand_gen : unit -> gen *)
(* (\** Given a set of parameters, make a random character *\) *)
(* val make_rand : gen -> t *)
(** An empty set of characters *)
val empty : t
(** Find the cardinality (number of elements) of a set *)
val cardinal : t -> int
val deep_cardinal : t -> int
val code : t -> int
val set_code : t -> int -> t
val elt_code : e -> int
(** [add code elt cost set] adds an element [elt] with a given code and cost
to the character set [set] *)
val add : e -> float -> t -> t
(** [del code set] returns a new set with [code] deleted *)
val del : int -> t -> t
(** return the list of codes *)
val codes : t -> int list
(** return the list of code/cost pairs *)
val costs : t -> (int * float) list
(** [get_elt_withcode code set] returns the element with code [code], if it exists *)
val get_elt_withcode : int -> t -> e option
(** [substitute a b] takes any element in [a] with a corresponding element
in [b] and replaces the element in [b] with the one in [a]. The set of
codes in [b] is not changed, but elements in [b] are overwritten by ones
from [a]. Of course, the sets are immutable, so the resulting set is
returned. *)
val substitute : t -> t -> t
(** [merge a b] returns the merge of [a] and [b] *)
val merge : t -> t -> t
(** set subtraction *)
val minus : t -> t -> t
(** [random p set] returns a random subset of [set], given a random boolean
generator [p]. *)
val random : (unit -> bool) -> t -> t
(** Turns a set into a [(code, elt, cost)] list *)
val to_list : t -> (int * e * float) list
(** Takes a list of [(code, elt, cost)] elements and returns a set *)
val of_list : (int * e * float) list -> t
(** gets the heuristic currently used on a given set *)
val set_heu : h -> t -> t
(** sets the heuristic to be used *)
val get_heu : t -> h
(** [fold fun start set] calls [fun elt accum] to fold the set *)
val fold : (e -> 'b -> 'b) -> 'b -> t -> 'b
(** [f_codes set list] returns a subset with only codes that appear in list
*)
val f_codes : t -> int list -> t
val iter : (e -> int -> unit) -> t -> unit
(** [iter fun set] calls [fun elt code color] for each element *)
(** [map fun set] returns a new set, made by calling [fun elt code color],
which should return a new element *)
val map : (e -> int -> e) -> t -> t
val is_empty : t -> bool
(** [median prev left right] should calculate a median of [left] and
[right]. [prev] is provided as a set of medians of the "previous"
versions of [left] and [right]. However, under certain tree operations,
[prev] will be the median of other nodes; the program must check for
this case. See charset.ml for an example. *)
val median : t option -> t -> t -> t
val to_string : t -> string
val distance_list : t -> t -> (int * float) list
val distance : t -> t -> float
val compare_codes : t -> t -> int
val compare_data : t -> t -> int
(* val update : t option -> t -> t -> t -> (t * update_res) *)
(* val median_3 : t -> t -> t -> t *)
end
| null | https://raw.githubusercontent.com/amnh/poy5/da563a2339d3fa9c0110ae86cc35fad576f728ab/src/character.mli | ocaml |
This program is free software; you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
* This type interacts with the tree traversal code to continue or stop traversal
* Utility function to create a new, unique code for a character module
* Colors for characters
* Heuristic values
* Every character has a code; characters with the same code are
homologous.
* Type for generating random instances of this character. Used for
testing.
type gen
* Make a random set of parameters for generating mutually consistent and
comparable characters
val rand_gen : unit -> gen
* Given a set of parameters, make a random character
val make_rand : gen -> t
* [compare_codes a b] imposes a total ordering on the set of characters
based on their homologies. For homologous characters (those with the
same codes), this function should return 0.
* [compare_data a b] imposes a total ordering on the set of characters
based on their codes and data. Only in cases where both the codes and
the data are the same will this function return 0.
* Returns a string representation of a character instance.
* contents of the set
* set type
(\** Type for generating random instances of this character. Used for
testing. *\)
type gen
(\** Make a random set of parameters for generating mutually consistent and
comparable characters *\)
val rand_gen : unit -> gen
(\** Given a set of parameters, make a random character *\)
val make_rand : gen -> t
* An empty set of characters
* Find the cardinality (number of elements) of a set
* [add code elt cost set] adds an element [elt] with a given code and cost
to the character set [set]
* [del code set] returns a new set with [code] deleted
* return the list of codes
* return the list of code/cost pairs
* [get_elt_withcode code set] returns the element with code [code], if it exists
* [substitute a b] takes any element in [a] with a corresponding element
in [b] and replaces the element in [b] with the one in [a]. The set of
codes in [b] is not changed, but elements in [b] are overwritten by ones
from [a]. Of course, the sets are immutable, so the resulting set is
returned.
* [merge a b] returns the merge of [a] and [b]
* set subtraction
* [random p set] returns a random subset of [set], given a random boolean
generator [p].
* Turns a set into a [(code, elt, cost)] list
* Takes a list of [(code, elt, cost)] elements and returns a set
* gets the heuristic currently used on a given set
* sets the heuristic to be used
* [fold fun start set] calls [fun elt accum] to fold the set
* [f_codes set list] returns a subset with only codes that appear in list
* [iter fun set] calls [fun elt code color] for each element
* [map fun set] returns a new set, made by calling [fun elt code color],
which should return a new element
* [median prev left right] should calculate a median of [left] and
[right]. [prev] is provided as a set of medians of the "previous"
versions of [left] and [right]. However, under certain tree operations,
[prev] will be the median of other nodes; the program must check for
this case. See charset.ml for an example.
val update : t option -> t -> t -> t -> (t * update_res)
val median_3 : t -> t -> t -> t | POY 5.1.1 . A phylogenetic analysis program using Dynamic Homologies .
Copyright ( C ) 2014 , , , Ward Wheeler ,
and the American Museum of Natural History .
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
USA
type update_res
val rContinue : update_res
val rStop : update_res
val new_code : unit -> int
type c = White | Black | Blue | Yellow | Red | Grey | Green | Brown
type h = Fast | Medium | Exact
* The [ CHARACTER ] signature is the general signature for different types of
characters . This signature is currently missing several functions :
* [ median_3 ]
* [ update ] for performing a 3median and returning an [ update_res ] value
Note that characters are meant to be immutable values .
characters. This signature is currently missing several functions:
* [median_3]
* [update] for performing a 3median and returning an [update_res] value
Note that characters are meant to be immutable values. *)
module type CHARACTER =
sig
type t
val code : t -> int
* [ median prev_median a b ] returns the median of [ a ] and [ b ] . Optionally ,
you may specify [ Some prev ] as [ prev_median ] , the median of the previous
versions of [ a ] and [ b ] . This is useful if [ a ] and [ b ] themselves keep
a list of changes from their previous incarnations . See the module
[ Charset ] for an example of how this is used .
you may specify [Some prev] as [prev_median], the median of the previous
versions of [a] and [b]. This is useful if [a] and [b] themselves keep
a list of changes from their previous incarnations. See the module
[Charset] for an example of how this is used. *)
val median : t option -> t -> t -> t
val median_3 : t -> t -> t -> t -> t
val reroot_median : t -> t -> t
val dist_2 : t -> t -> t -> float
* Distance between two character instances .
val distance : t -> t -> float
val compare_codes : t -> t -> int
val compare_data : t -> t -> int
val cardinal : t -> int
val deep_cardinal : t -> int
val to_string : t -> string
end
* [ CharacterSet]s are [ CHARACTER]s in their own right , but also have
additional functionality relevant to sets of characters . The [ Charset ]
module defines a functor that implements sets of a generic character .
additional functionality relevant to sets of characters. The [Charset]
module defines a functor that implements sets of a generic character. *)
module type CharacterSet = sig
val empty : t
val cardinal : t -> int
val deep_cardinal : t -> int
val code : t -> int
val set_code : t -> int -> t
val elt_code : e -> int
val add : e -> float -> t -> t
val del : int -> t -> t
val codes : t -> int list
val costs : t -> (int * float) list
val get_elt_withcode : int -> t -> e option
val substitute : t -> t -> t
val merge : t -> t -> t
val minus : t -> t -> t
val random : (unit -> bool) -> t -> t
val to_list : t -> (int * e * float) list
val of_list : (int * e * float) list -> t
val set_heu : h -> t -> t
val get_heu : t -> h
val fold : (e -> 'b -> 'b) -> 'b -> t -> 'b
val f_codes : t -> int list -> t
val iter : (e -> int -> unit) -> t -> unit
val map : (e -> int -> e) -> t -> t
val is_empty : t -> bool
val median : t option -> t -> t -> t
val to_string : t -> string
val distance_list : t -> t -> (int * float) list
val distance : t -> t -> float
val compare_codes : t -> t -> int
val compare_data : t -> t -> int
end
|
7cd0c523d5239363fa81efa6383505a84828afb031f4ee3343937c0beb16c518 | mirage/ocaml-matrix | send_to_device.ml | open Json_encoding
open Matrix_common
module Query = Empty.Query
module Request = struct
type t = {messages: (string * (string * Ezjsonm.value) list) list option}
[@@deriving accessor]
let encoding =
let to_tuple t = t.messages in
let of_tuple v =
let messages = v in
{messages} in
let with_tuple = obj1 (opt "messages" (assoc (assoc any))) in
conv to_tuple of_tuple with_tuple
end
module Response = Empty.Json
| null | https://raw.githubusercontent.com/mirage/ocaml-matrix/2a58d3d41c43404741f2dfdaf1d2d0f3757b2b69/lib/matrix-ctos/send_to_device.ml | ocaml | open Json_encoding
open Matrix_common
module Query = Empty.Query
module Request = struct
type t = {messages: (string * (string * Ezjsonm.value) list) list option}
[@@deriving accessor]
let encoding =
let to_tuple t = t.messages in
let of_tuple v =
let messages = v in
{messages} in
let with_tuple = obj1 (opt "messages" (assoc (assoc any))) in
conv to_tuple of_tuple with_tuple
end
module Response = Empty.Json
|
|
c56cf92fb517c3d0ee0d0694a0a8ab291b10d662b03cf4bfb03b51ffb7c4555b | philzook58/fib-anyon | Fib.hs | # LANGUAGE GADTs , TypeFamilies ,
StandaloneDeriving , UndecidableInstances ,
ScopedTypeVariables , FlexibleInstances , DataKinds ,
FunctionalDependencies , PolyKinds ,
TypeOperators , RankNTypes , MultiParamTypeClasses ,
TypeApplications , FlexibleContexts , AllowAmbiguousTypes #
StandaloneDeriving, UndecidableInstances,
ScopedTypeVariables, FlexibleInstances, DataKinds,
FunctionalDependencies, PolyKinds,
TypeOperators, RankNTypes, MultiParamTypeClasses,
TypeApplications, FlexibleContexts, AllowAmbiguousTypes #-}
AllowAmbiguousTypes , ImpredicativeTypes , InstanceSigs , NoImplicitPrelude ,
module Fib where
import Vec
import Data.Complex
import Control.Monad ((<=<))
import GHC.TypeNats
import Data.Proxy
data FibAnyon = I d | Tau
I started using DataKinds and it ended up being a problem .
data Tau
--data Id
type Id = ()
data FibTree root leaves where
TTT :: FibTree Tau l -> FibTree Tau r -> FibTree Tau (l,r)
ITT :: FibTree Tau l -> FibTree Tau r -> FibTree Id (l,r)
TIT :: FibTree Id l -> FibTree Tau r -> FibTree Tau (l,r)
TTI :: FibTree Tau l -> FibTree Id r -> FibTree Tau (l,r)
III :: FibTree Id l -> FibTree Id r -> FibTree Id (l,r)
TLeaf :: FibTree Tau Tau
ILeaf :: FibTree Id Id
-- pretty printing would be hella nice
deriving instance Show (FibTree a b)
deriving instance Eq (FibTree a b)
instance Ord (FibTree a b) where
compare (ITT l r) (ITT l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (ITT _ _) _ = LT
compare _ (ITT _ _) = GT
compare (TTI l r) (TTI l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (TIT l r) (TIT l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (TTT l r) (TTT l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (III l r) (III l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (TTI _ _) _ = LT
compare _ (TTI _ _) = GT
compare (TIT _ _) _ = LT
compare _ (TIT _ _) = GT
compare (TTT _ _) _ = LT
compare _ (TTT _ _) = GT
compare (III _ _) _ = LT
compare _ (III _ _) = GT
compare TLeaf TLeaf = EQ
compare ILeaf ILeaf = EQ
lmap :: (forall a. FibTree a b -> Q (FibTree a c)) -> (FibTree e (b,d) -> Q (FibTree e (c,d)))
lmap f (ITT l r) = fmap (\l' -> ITT l' r) (f l)
lmap f (TTI l r) = fmap (\l' -> TTI l' r) (f l)
lmap f (TIT l r) = fmap (\l' -> TIT l' r) (f l)
lmap f (TTT l r) = fmap (\l' -> TTT l' r) (f l)
lmap f (III l r) = fmap (\l' -> III l' r) (f l)
rmap :: (forall a. FibTree a b -> Q (FibTree a c)) -> (FibTree e (d,b) -> Q (FibTree e (d,c)))
rmap f (ITT l r) = fmap (\r' -> ITT l r') (f r)
rmap f (TTI l r) = fmap (\r' -> TTI l r') (f r)
rmap f (TIT l r) = fmap (\r' -> TIT l r') (f r)
rmap f (TTT l r) = fmap (\r' -> TTT l r') (f r)
rmap f (III l r) = fmap (\r' -> III l r') (f r)
braid :: FibTree a (l,r) -> Q (FibTree a (r,l))
braid (ITT l r) = W [(ITT r l, cis $ 4 * pi / 5)] -- different scalar factors for trivial and non trivial fusion
braid (TTT l r) = W [(TTT r l, (cis $ - 3 * pi / 5))]
braid (TTI l r) = pure $ TIT r l-- exchange with trivial means nothing
braid (TIT l r) = pure $ TTI r l
braid (III l r) = pure $ III r l
-- The inverse of braid
braid' :: FibTree a (l,r) -> Q (FibTree a (r,l))
braid' = star . braid
tau :: Complex Double
tau = ((sqrt 5) - 1) / 2 :+ 0 -- 0.618 :+ 0
fmove :: FibTree a (c,(d,e)) -> Q (FibTree a ((c,d),e))
fmove ( ITT a ( TTI b c ) ) = pure $ ITI ( TTT a b ) c -- pure ( auto ( auto a b ) c ) -- no maybe not . The internal one is n't auto
fmove (ITT a (TIT b c)) = pure $ ITT ( TTI a b) c
fmove (ITT a (TTT b c)) = pure $ ITT ( TTT a b) c
fmove (ITT a (TTI b c)) = pure $ III ( ITT a b) c
fmove (TIT a (TTT b c)) = pure $ TTT ( TIT a b) c
fmove (TIT a (TTI b c)) = pure $ TTI ( TIT a b) c
fmove (TIT a (TIT b c)) = pure $ TIT ( III a b) c
fmove ( TIT a ( TIT b c ) ) = TTT ( III a b ) c
-- the nontrivial ones have all tau on the leafs and root
-- internal I
fmove (TTI a (III b c)) = pure $ TTI ( TTI a b) c
fmove (TTI a (ITT b c)) = W [(TIT ( ITT a b) c, tau) , (TTT ( TTT a b) c, sqrt tau)]
-- internal T
fmove (TTT a (TTT b c)) = W [(TIT ( ITT a b) c, sqrt tau) , (TTT ( TTT a b) c, - tau )]
fmove (TTT a (TTI b c)) = pure $ TTI ( TTT a b) c
fmove (TTT a (TIT b c)) = pure $ TTT ( TTI a b) c
fmove (III a (ITT b c)) = pure $ ITT ( TIT a b) c
fmove (III a (III b c)) = pure $ III ( III a b) c
-- largely just a tranpose of the above case.
fmove' :: FibTree a ((c,d),e) -> Q (FibTree a (c,(d,e)))
fmove' (ITT ( TTI a b) c) = pure $ (ITT a (TIT b c))
fmove' (ITT ( TTT a b) c) = pure $ (ITT a (TTT b c))
fmove' (ITT ( TIT a b) c) = pure $ (III a (ITT b c))
fmoveq ( ITT a ( TTT b c ) ) = pure $
fmove' (TTI ( TTT a b) c) = pure $ (TTT a (TTI b c))
fmove' (TTI ( TTI a b) c) = pure $ (TTI a (III b c))
fmove' (TTI ( TIT a b) c) = pure $ TIT a (TTI b c)
fmoveq ( TTT a ( TTI b c ) ) = pure $ TTI ( TTT a b ) c
fmoveq ( TTT a ( TIT b c ) ) = pure $ TTT ( TTI a b ) c
fmove' (TIT ( ITT a b) c) = W [(TTI a (ITT b c), tau) , (TTT a (TTT b c) , sqrt tau)]
fmove' (TIT ( III a b) c ) = pure $ TIT a (TIT b c)
fmove' (TTT ( TTI a b) c ) = pure $ TTT a (TIT b c)
fmove' (TTT ( TIT a b) c ) = pure $ TIT a (TTT b c)
fmove' (TTT ( TTT a b) c) = W [(TTI a (ITT b c), sqrt tau) , (TTT a (TTT b c), - tau )]
fmove' (III ( III a b) c ) = pure $ III a (III b c)
fmove' (III ( ITT a b) c ) = pure $ ITT a (TTI b c)
rightUnit :: FibTree e (a,Id) -> Q (FibTree e a)
rightUnit (TTI t _) = pure t
rightUnit (III t _) = pure t
rightUnit' :: FibTree e a -> Q (FibTree e (a,Id))
rightUnit' t@(TTT _ _) = pure (TTI t ILeaf)
rightUnit' t@(TTI _ _) = pure (TTI t ILeaf)
rightUnit' t@(TIT _ _) = pure (TTI t ILeaf)
rightUnit' t@(III _ _) = pure (III t ILeaf)
rightUnit' t@(ITT _ _) = pure (III t ILeaf)
rightUnit' t@(ILeaf) = pure (III t ILeaf)
rightUnit' t@(TLeaf) = pure (TTI t ILeaf)
leftUnit :: FibTree e (Id,a) -> Q (FibTree e a)
leftUnit = rightUnit <=< braid
-- braid vs braid' doesn't matter, but it has a nice symmettry.
leftUnit' :: FibTree e a -> Q (FibTree e (Id,a))
leftUnit' = braid' <=< rightUnit'
dot :: FibTree a (b, c) -> FibTree a' (b, c) -> Q (FibTree a' a)
dot x@(TTI _ _) y@(TTI _ _) | x == y = pure TLeaf
| otherwise = mempty
dot x@(TIT _ _) y@(TIT _ _) | x == y = pure TLeaf
| otherwise = mempty
dot x@(TTT _ _) y@(TTT _ _) | x == y = pure TLeaf
| otherwise = mempty
dot x@(III _ _) y@(III _ _) | x == y = pure ILeaf
| otherwise = mempty
dot x@(ITT _ _) y@(ITT _ _) | x == y = pure ILeaf
| otherwise = mempty
dot _ _ = mempty
class AllTrees a b where
allTrees :: [FibTree a b]
instance AllTrees Tau Tau where
allTrees = [TLeaf]
instance AllTrees Id Tau where
allTrees = []
instance AllTrees Tau Id where
allTrees = []
instance AllTrees Id Id where
allTrees = [ILeaf]
instance (AllTrees Tau a,
AllTrees Id a,
AllTrees Tau b,
AllTrees Id b) => AllTrees Id (a,b) where
allTrees = (III <$> ia <*> ib) <> (ITT <$> ta <*> tb) where
ta = allTrees @Tau @a
ia = allTrees @Id @a
tb = allTrees @Tau @b
ib = allTrees @Id @b
instance (AllTrees Tau a,
AllTrees Id a,
AllTrees Tau b,
AllTrees Id b) => AllTrees Tau (a,b) where
allTrees = (TIT <$> ia <*> tb) <> (TTI <$> ta <*> ib) <> (TTT <$> ta <*> tb) where
ta = allTrees @Tau @a
ia = allTrees @Id @a
tb = allTrees @Tau @b
ib = allTrees @Id @b
t9 = allTrees @Tau @((Tau,Tau),Tau)
-- Resulting type depends on input
-- I think a typefamily type computation might be necessary?
pullLeft and pullRight might not need a type class .
pullLeft ( TTT l r ) = fmove ( TTT pl r )
-- where pl = pullLeft l
type family Assoc a where
Assoc ( ( a , b),c ) = ( a,(b , c ) )
type family PullLeft a where
PullLeft ( ( a , b),c ) = Assoc ( PullLeft ( a , b ) , c )
PullLeft a = a
type = PullLeft ( ( ( Int , Int),Int),Int )
pullLeft : : forall a b c. ( c ~ PullLeft b ) = > FibTree a b - > Q ( FibTree a c )
pullLeft TLeaf = pure
pullLeft ILeaf = pure ILeaf
pullLeft t = do
t ' < - lmap pullLeft t
fmove ' t '
pullLeft t@(ITT l r ) = helper t
pullLeft t@(TTT l r ) = helper t
pullLeft t@(TTI l r ) = helper t
pullLeft t@(TIT l r ) = helper t
pullLeft t@(III l r ) = helper t where
helper : : ( d ~ PullLeft ( b , c ) ) = > FibTree a ( b , c ) - > Q ( FibTree a d )
helper t = do
t ' < - lmap pullLeft t
fmove ' t '
type family Assoc a where
Assoc ((a,b),c) = (a,(b,c))
type family PullLeft a where
PullLeft ((a,b),c) = Assoc (PullLeft (a, b), c)
PullLeft a = a
type Test1 = PullLeft (((Int,Int),Int),Int)
pullLeft :: forall a b c. (c ~ PullLeft b) => FibTree a b -> Q (FibTree a c)
pullLeft TLeaf = pure Tleaf
pullLeft ILeaf = pure ILeaf
pullLeft t = do
t' <- lmap pullLeft t
fmove' t'
pullLeft t@(ITT l r) = helper t
pullLeft t@(TTT l r) = helper t
pullLeft t@(TTI l r) = helper t
pullLeft t@(TIT l r) = helper t
pullLeft t@(III l r) = helper t where
helper :: (d ~ PullLeft (b,c)) => FibTree a (b,c) -> Q (FibTree a d)
helper t = do
t' <- lmap pullLeft t
fmove' t'
-}
Spiritually this function is this . Ca n't write it this way unforturnately
-- There are typing problems
-- this is n't even right
-- we need to pattern match on ( ( ) , )
pullLeft TLeaf = pure
pullLeft ILeaf = pure ILeaf
pullLeft t = do
t ' < - lmap pullLeft t
fmove ' t '
-- but actually a two level tree desctruct
--treeDestruct ( )
Spiritually this function is this. Can't write it this way unforturnately
-- There are typing problems
-- this isn't even right
-- we need to pattern match on ((),)
pullLeft TLeaf = pure Tleaf
pullLeft ILeaf = pure ILeaf
pullLeft t = do
t' <- lmap pullLeft t
fmove' t'
-- but actually a two level tree desctruct
--treeDestruct()
-}
type family Assoc a where
Assoc ( ( a , b),c ) = ( a,(b , c ) )
type family PullLeft ' a where
PullLeft ' ( ( a , b),c ) = Assoc ( PullLeft ' ( a , b ) , c )
PullLeft ' a = a
type family Assoc a where
Assoc ((a,b),c) = (a,(b,c))
type family PullLeft' a where
PullLeft' ((a,b),c) = Assoc (PullLeft' (a, b), c)
PullLeft' a = a
-}
-- could seperate the typeclass so that I'm using type families instead.
class PullLeftLeaf a b | a -> b where
pullLeftLeaf :: FibTree c a -> Q (FibTree c b)
instance PullLeftLeaf (Tau,c) (Tau,c) where
pullLeftLeaf = pure
instance PullLeftLeaf (Id,c) (Id,c) where
pullLeftLeaf = pure
instance PullLeftLeaf Tau Tau where
pullLeftLeaf = pure
instance PullLeftLeaf Id Id where
pullLeftLeaf = pure
instance (PullLeftLeaf (a,b) (a',b'),
r ~ (a',(b',c))) => PullLeftLeaf ((a, b),c) r where
pullLeftLeaf t = do
t' <- lmap pullLeftLeaf t
fmove' t'
class PullRightLeaf a b | a -> b where -- | a -> b functional dependency causes errors?
pullRightLeaf :: FibTree c a -> Q (FibTree c b)
{-
instance (PullRightLeaf a a', r ~ ((b,c),a')) => PullRightLeaf ((b,c),a) r where
pullRightLeaf t = rmap pullRightLeaf t
-}
instance PullRightLeaf Tau Tau where
pullRightLeaf = pure
instance PullRightLeaf Id Id where
pullRightLeaf = pure
instance PullRightLeaf (c,Tau) (c,Tau) where
pullRightLeaf = pure
instance PullRightLeaf (c,Id) (c,Id) where
pullRightLeaf = pure
instance (PullRightLeaf (a,b) (a',b'), r ~ ((c,a'),b')) => PullRightLeaf (c,(a, b)) r where
pullRightLeaf t = do
t' <- rmap pullRightLeaf t
fmove t'
class RightAssoc a b | a -> b where
rightAssoc :: FibTree c a -> Q (FibTree c b)
instance RightAssoc Tau Tau where
rightAssoc = pure
instance RightAssoc Id Id where
rightAssoc = pure
instance (PullLeftLeaf (a,b) (a',b'),
RightAssoc b' b'',
r ~ (a', b'')) => RightAssoc (a,b) r where
rightAssoc t = do
t' <- pullLeftLeaf t
rmap rightAssoc t'
-- usually you'll want to force not the root, but the leaves to be some type
-- hence the ordering b c d a for type application
bmove :: forall b c d a. FibTree a (b,(c,d)) -> Q (FibTree a (c,(b,d)))
bmove t = do
t' :: FibTree a ((b,c),d) <- fmove t
t'' :: FibTree a ((c,b),d) <- lmap braid t'
fmove' t''
bmove' :: forall b c d a. FibTree a (b,(c,d)) -> Q (FibTree a (c,(b,d)))
bmove' = fmove' <=< (lmap braid') <=< fmove
Therei s a general pattern for digging into
-- n and a tell us b, the subpiece of a
-- c and a and n tell use what a looks like with b replaced = d
data
rmapN :: forall n gte s t a b e. (RMapN n gte s t a b, gte ~ (CmpNat n 0)) => (forall r. FibTree r a -> Q (FibTree r b)) -> (FibTree e s) -> Q (FibTree e t)
rmapN f t = rmapN' @n @gte f t
class RMapN n gte s t a b | n gte s b -> a t where
rmapN' :: (forall r. FibTree r a -> Q (FibTree r b)) -> (FibTree e s) -> Q (FibTree e t)
instance (a ~ s, b ~ t) => RMapN 0 'EQ s t a b where
rmapN' f t = f t -- rmapN = id
instance (RMapN (n-1) gte r r' a b,
gte ~ (CmpNat (n-1) 0),
t ~ (l,r')) => RMapN n 'GT (l,r) t a b where
rmapN' f t = rmap (rmapN @(n-1) f) t
bmoveN : : forall n a b c d e f. RMapN n a ( b , ( c , d ) ) ( c , ( b , d ) ) e = > FibTree f a - > Q ( FibTree f e )
bmoveN t = rmapN @n ( bmove @b ) t
-- dotN : : forall n a b c d e f. dot : : FibTree a ( b , c ) - > FibTree a ' ( b , c ) - > Q ( FibTree a ' a )
fuseN : : forall n a b c d e f g. RMapN n f ( b,(c , d ) ) ( a , d ) g = > FibTree a ( b , c ) - > FibTree e f - > Q ( FibTree e g )
fuseN q t = rmapN @n f t where
f : : forall r. FibTree r ( b,(c , d ) ) - > Q ( FibTree r ( a , d ) )
f t ' = do
t '' < - fmove t '
lmap ( dot q ) t ''
bmoveN :: forall n a b c d e f. RMapN n a (b, (c, d)) (c, (b, d)) e => FibTree f a -> Q (FibTree f e)
bmoveN t = rmapN @n (bmove @b @c @d) t
-- dotN :: forall n a b c d e f. dot :: FibTree a (b, c) -> FibTree a' (b, c) -> Q (FibTree a' a)
fuseN :: forall n a b c d e f g. RMapN n f (b,(c,d)) (a,d) g => FibTree a (b, c) -> FibTree e f -> Q (FibTree e g)
fuseN q t = rmapN @n f t where
f :: forall r. FibTree r (b,(c,d)) -> Q (FibTree r (a,d))
f t' = do
t'' <- fmove t'
lmap (dot q) t''
-}
type family Count a where
Count Tau = 1
Count Id = 1
Count (a,b) = (Count a) + (Count b)
type family LeftCount a where
LeftCount (a,b) = Count a
lcamap :: forall n s t a b e gte .
(gte ~ CmpNat (LeftCount s) n,
LCAMap n gte s t a b)
=> (forall r. FibTree r a -> Q (FibTree r b)) -> (FibTree e s) -> Q (FibTree e t)
lcamap f t = lcamap' @n @gte f t
class LCAMap n gte s t a b | n gte s b -> t a where
lcamap' :: (forall r. FibTree r a -> Q (FibTree r b)) -> (FibTree e s) -> Q (FibTree e t)
instance (n' ~ (n - Count l), -- We're searching in the right subtree. Subtract the leaf number in the left subtree
dip one level down to order which way we have to go next
gte ~ (CmpNat lc n'), -- Do we go left, right or have we arrived in the next layer?
LCAMap n' gte r r' a b, -- recursive call
t ~ (l,r') -- reconstruct total return type from recursive return type. Left tree is unaffected by lcamapping
) => LCAMap n 'LT (l,r) t a b where
lcamap' f x = rmap (lcamap @n' f) x
instance (lc ~ (LeftCount l),
gte ~ (CmpNat lc n),
LCAMap n gte l l' a b,
t ~ (l',r)
) => LCAMap n 'GT (l,r) t a b where
lcamap' f x = lmap (lcamap @n f) x
instance (t ~ b, a ~ s) => LCAMap n 'EQ s t a b where -- Base case
lcamap' f x = f x
class Twiddle s t a b | s b -> t a where
twiddle :: (forall r. FibTree r a -> Q (FibTree r b)) -> FibTree e s -> Q (FibTree e t)
instance Twiddle ((l,x),(y,r)) ((l,c),r) (x,y) c where
twiddle f x = do
x' <- fmove x -- (((l',x),y),r')
x'' <- lmap fmove' x' -- ((l',(x,y)),r')
lmap (rmap f) x''
instance Twiddle (Tau, (y,r)) (c,r) (Tau, y) c where
twiddle f x = fmove x >>= lmap f
instance Twiddle (Id, (y,r)) (c,r) (Id, y) c where
twiddle f x = fmove x >>= lmap f
instance Twiddle ((l,x), Tau) (l,c) (x,Tau) c where
twiddle f x = fmove' x >>= rmap f
instance Twiddle ((l,x), Id) (l,c) (x,Id) c where
twiddle f x = fmove' x >>= rmap f
instance Twiddle (Tau, Tau) c (Tau,Tau) c where
twiddle f x = f x
instance Twiddle (Id, Id) c (Id,Id) c where
twiddle f x = f x
instance Twiddle (Tau, Id) c (Tau,Id) c where
twiddle f x = f x
instance Twiddle (Id, Tau) c (Id,Tau) c where
twiddle f x = f x
nmap :: forall (n :: Nat) s t a b a' b' l l' r r' e gte.
(gte ~ CmpNat (LeftCount s) n,
LCAMap n gte s t a' b',
a' ~ (l,r),
PullRightLeaf l l',
PullLeftLeaf r r',
Twiddle (l',r') b' a b) =>
(forall r. FibTree r a -> Q (FibTree r b)) -> FibTree e s -> Q (FibTree e t)
nmap f z = lcamap @n @s @t @a' @b' (\x -> do
x' <- lmap pullRightLeaf x
x'' <- rmap pullLeftLeaf x'
twiddle f x'') z
t1 = nmap @2 braid (TTT (TTI TLeaf ILeaf) (TTT TLeaf TLeaf))
t5 = nmap @2 pure (TTT (TTI TLeaf ILeaf) (TTT TLeaf TLeaf)) >>= nmap @3 pure
t2 = nmap @1 braid (TTT (TTI TLeaf ILeaf) (TTT TLeaf TLeaf))
t4 = nmap @1 braid (TTT TLeaf (TTT TLeaf TLeaf))
t3 = nmap @2 braid (TTT (TTT (TTT TLeaf TLeaf) TLeaf) (TTT TLeaf TLeaf))
t6 = rightAssoc (TTT (TTT (TTT TLeaf TLeaf) TLeaf) (TTT TLeaf TLeaf))
t7 = t6 >>= bmove
t8 = t6 >>= rmapN @0 bmove
-- For the category, we probably don't want the association structure.
' [ Tau , I d , , I d ] typelevel list of particles
--
ttt = TTT TLeaf TLeaf
example = starttree >>=
nmap @1 braid >>=
nmap @2 braid >>=
nmap @1 (dot ttt) >>=
nmap @1 braid' >>=
nmap @2 (dot ttt) >>=
nmap @1 (dot ttt) where
starttree = pure (TTT (TTT TLeaf
(TTT TLeaf
TLeaf))
TLeaf
)
-- would be nice to use an unknot
-- example2 =
I should use neighbormap for this .
-- maybe I should call f fuse and make it global?
bmoveN t = rmapN @n ( bmove : : forall r. FibTree r ( b,(c , d ) ) - > Q ( FibTree r ( c,(b , d ) ) ) ) t
bmoveN t = rmapN @n @a @(b , ( c , d ) ) @(c , ( b , d ) ) @e bmove t
bmoveN : : forall n a b c d e f. RMapN n a ( b , ( c , d ) ) ( c , ( b , d ) ) e = > Proxy n - > FibTree f a - > Q ( FibTree f e )
bmoveN p t = rmapN p ( bmove : : FibTree a ( b,(c , d ) ) - > Q ( FibTree a ( c,(b , d ) ) ) ) t
bmoveN :: forall n a b c d e f. RMapN n a (b, (c, d)) (c, (b, d)) e => Proxy n -> FibTree f a -> Q (FibTree f e)
bmoveN p t = rmapN p (bmove :: FibTree a (b,(c,d)) -> Q (FibTree a (c,(b,d)))) t
-}
{- do
t' <- fmove t
t'' <- lmap braid' t'
fmove' t''
-}
--
{-
pullLeftLeaf
pullRightLeaf
-}
class Standardize a b | a - > b where
standardize : : FibTree c a - > Q ( FibTree c b )
instance Standardize a b where
type family Append a b where
Append ( a , b ) c = ( a , Append b c )
Append a c = ( a , c )
type family Leftize where
Leftize ( a , b ) = Append ( Leftize a ) ( Leftize b )
Leftize a = a
fullrightassoc = standardize
completerightassoc
bmove = fmove braid fmove '
standardize : : FibTree c a - > Q ( FibTree c ( Leftize a ) )
standardize ( TLeaf ) = TLeaf
standardize ( ILeaf ) = ILeaf
standardize t = do
x < - pullLeft t
rmap standardize x
lcamap : : FibTree a b - > Proxy ( a : : Int ) - > FibTree a ?
lcamap f n t@(TTT l r ) | count l = = n = f t
| count l < n = f ( n - count l ) r
| otherwise = lcamap f n l
pullRight ( )
treerecursor : : FibTree a b - > ( l - > r - > c ) - > ( leaf - > c ) - > c
treerecursor ( TTT l r ) f g = f l r
treerecursor ( TLeaf ) = g Tleaf
class Standardize a b | a -> b where
standardize :: FibTree c a -> Q (FibTree c b)
instance Standardize a b where
type family Append a b where
Append (a,b) c = (a, Append b c)
Append a c = (a, c)
type family Leftize where
Leftize (a,b) = Append (Leftize a) (Leftize b)
Leftize a = a
fullrightassoc = standardize
completerightassoc
bmove = fmove braid fmove'
standardize :: FibTree c a -> Q (FibTree c (Leftize a))
standardize (TLeaf) = TLeaf
standardize (ILeaf) = ILeaf
standardize t = do
x <- pullLeft t
rmap standardize x
lcamap :: FibTree a b -> Proxy (a :: Int) -> FibTree a ?
lcamap f n t@(TTT l r) | count l == n = f t
| count l < n = lcamap f (n - count l) r
| otherwise = lcamap f n l
pullRight ()
treerecursor :: FibTree a b -> (l -> r -> c) -> (leaf -> c) -> c
treerecursor (TTT l r) f g = f l r
treerecursor (TLeaf) = g Tleaf
-} | null | https://raw.githubusercontent.com/philzook58/fib-anyon/5c81535201ffdd5a40db18510ce894be9ccccbd7/src/Fib.hs | haskell | data Id
pretty printing would be hella nice
different scalar factors for trivial and non trivial fusion
exchange with trivial means nothing
The inverse of braid
0.618 :+ 0
pure ( auto ( auto a b ) c ) -- no maybe not . The internal one is n't auto
the nontrivial ones have all tau on the leafs and root
internal I
internal T
largely just a tranpose of the above case.
braid vs braid' doesn't matter, but it has a nice symmettry.
Resulting type depends on input
I think a typefamily type computation might be necessary?
where pl = pullLeft l
There are typing problems
this is n't even right
we need to pattern match on ( ( ) , )
but actually a two level tree desctruct
treeDestruct ( )
There are typing problems
this isn't even right
we need to pattern match on ((),)
but actually a two level tree desctruct
treeDestruct()
could seperate the typeclass so that I'm using type families instead.
| a -> b functional dependency causes errors?
instance (PullRightLeaf a a', r ~ ((b,c),a')) => PullRightLeaf ((b,c),a) r where
pullRightLeaf t = rmap pullRightLeaf t
usually you'll want to force not the root, but the leaves to be some type
hence the ordering b c d a for type application
n and a tell us b, the subpiece of a
c and a and n tell use what a looks like with b replaced = d
rmapN = id
dotN : : forall n a b c d e f. dot : : FibTree a ( b , c ) - > FibTree a ' ( b , c ) - > Q ( FibTree a ' a )
dotN :: forall n a b c d e f. dot :: FibTree a (b, c) -> FibTree a' (b, c) -> Q (FibTree a' a)
We're searching in the right subtree. Subtract the leaf number in the left subtree
Do we go left, right or have we arrived in the next layer?
recursive call
reconstruct total return type from recursive return type. Left tree is unaffected by lcamapping
Base case
(((l',x),y),r')
((l',(x,y)),r')
For the category, we probably don't want the association structure.
would be nice to use an unknot
example2 =
maybe I should call f fuse and make it global?
do
t' <- fmove t
t'' <- lmap braid' t'
fmove' t''
pullLeftLeaf
pullRightLeaf
| # LANGUAGE GADTs , TypeFamilies ,
StandaloneDeriving , UndecidableInstances ,
ScopedTypeVariables , FlexibleInstances , DataKinds ,
FunctionalDependencies , PolyKinds ,
TypeOperators , RankNTypes , MultiParamTypeClasses ,
TypeApplications , FlexibleContexts , AllowAmbiguousTypes #
StandaloneDeriving, UndecidableInstances,
ScopedTypeVariables, FlexibleInstances, DataKinds,
FunctionalDependencies, PolyKinds,
TypeOperators, RankNTypes, MultiParamTypeClasses,
TypeApplications, FlexibleContexts, AllowAmbiguousTypes #-}
AllowAmbiguousTypes , ImpredicativeTypes , InstanceSigs , NoImplicitPrelude ,
module Fib where
import Vec
import Data.Complex
import Control.Monad ((<=<))
import GHC.TypeNats
import Data.Proxy
data FibAnyon = I d | Tau
I started using DataKinds and it ended up being a problem .
data Tau
type Id = ()
data FibTree root leaves where
TTT :: FibTree Tau l -> FibTree Tau r -> FibTree Tau (l,r)
ITT :: FibTree Tau l -> FibTree Tau r -> FibTree Id (l,r)
TIT :: FibTree Id l -> FibTree Tau r -> FibTree Tau (l,r)
TTI :: FibTree Tau l -> FibTree Id r -> FibTree Tau (l,r)
III :: FibTree Id l -> FibTree Id r -> FibTree Id (l,r)
TLeaf :: FibTree Tau Tau
ILeaf :: FibTree Id Id
deriving instance Show (FibTree a b)
deriving instance Eq (FibTree a b)
instance Ord (FibTree a b) where
compare (ITT l r) (ITT l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (ITT _ _) _ = LT
compare _ (ITT _ _) = GT
compare (TTI l r) (TTI l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (TIT l r) (TIT l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (TTT l r) (TTT l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (III l r) (III l' r') | l < l' = LT
| l > l' = GT
| otherwise = compare r r'
compare (TTI _ _) _ = LT
compare _ (TTI _ _) = GT
compare (TIT _ _) _ = LT
compare _ (TIT _ _) = GT
compare (TTT _ _) _ = LT
compare _ (TTT _ _) = GT
compare (III _ _) _ = LT
compare _ (III _ _) = GT
compare TLeaf TLeaf = EQ
compare ILeaf ILeaf = EQ
lmap :: (forall a. FibTree a b -> Q (FibTree a c)) -> (FibTree e (b,d) -> Q (FibTree e (c,d)))
lmap f (ITT l r) = fmap (\l' -> ITT l' r) (f l)
lmap f (TTI l r) = fmap (\l' -> TTI l' r) (f l)
lmap f (TIT l r) = fmap (\l' -> TIT l' r) (f l)
lmap f (TTT l r) = fmap (\l' -> TTT l' r) (f l)
lmap f (III l r) = fmap (\l' -> III l' r) (f l)
rmap :: (forall a. FibTree a b -> Q (FibTree a c)) -> (FibTree e (d,b) -> Q (FibTree e (d,c)))
rmap f (ITT l r) = fmap (\r' -> ITT l r') (f r)
rmap f (TTI l r) = fmap (\r' -> TTI l r') (f r)
rmap f (TIT l r) = fmap (\r' -> TIT l r') (f r)
rmap f (TTT l r) = fmap (\r' -> TTT l r') (f r)
rmap f (III l r) = fmap (\r' -> III l r') (f r)
braid :: FibTree a (l,r) -> Q (FibTree a (r,l))
braid (TTT l r) = W [(TTT r l, (cis $ - 3 * pi / 5))]
braid (TIT l r) = pure $ TTI r l
braid (III l r) = pure $ III r l
braid' :: FibTree a (l,r) -> Q (FibTree a (r,l))
braid' = star . braid
tau :: Complex Double
fmove :: FibTree a (c,(d,e)) -> Q (FibTree a ((c,d),e))
fmove (ITT a (TIT b c)) = pure $ ITT ( TTI a b) c
fmove (ITT a (TTT b c)) = pure $ ITT ( TTT a b) c
fmove (ITT a (TTI b c)) = pure $ III ( ITT a b) c
fmove (TIT a (TTT b c)) = pure $ TTT ( TIT a b) c
fmove (TIT a (TTI b c)) = pure $ TTI ( TIT a b) c
fmove (TIT a (TIT b c)) = pure $ TIT ( III a b) c
fmove ( TIT a ( TIT b c ) ) = TTT ( III a b ) c
fmove (TTI a (III b c)) = pure $ TTI ( TTI a b) c
fmove (TTI a (ITT b c)) = W [(TIT ( ITT a b) c, tau) , (TTT ( TTT a b) c, sqrt tau)]
fmove (TTT a (TTT b c)) = W [(TIT ( ITT a b) c, sqrt tau) , (TTT ( TTT a b) c, - tau )]
fmove (TTT a (TTI b c)) = pure $ TTI ( TTT a b) c
fmove (TTT a (TIT b c)) = pure $ TTT ( TTI a b) c
fmove (III a (ITT b c)) = pure $ ITT ( TIT a b) c
fmove (III a (III b c)) = pure $ III ( III a b) c
fmove' :: FibTree a ((c,d),e) -> Q (FibTree a (c,(d,e)))
fmove' (ITT ( TTI a b) c) = pure $ (ITT a (TIT b c))
fmove' (ITT ( TTT a b) c) = pure $ (ITT a (TTT b c))
fmove' (ITT ( TIT a b) c) = pure $ (III a (ITT b c))
fmoveq ( ITT a ( TTT b c ) ) = pure $
fmove' (TTI ( TTT a b) c) = pure $ (TTT a (TTI b c))
fmove' (TTI ( TTI a b) c) = pure $ (TTI a (III b c))
fmove' (TTI ( TIT a b) c) = pure $ TIT a (TTI b c)
fmoveq ( TTT a ( TTI b c ) ) = pure $ TTI ( TTT a b ) c
fmoveq ( TTT a ( TIT b c ) ) = pure $ TTT ( TTI a b ) c
fmove' (TIT ( ITT a b) c) = W [(TTI a (ITT b c), tau) , (TTT a (TTT b c) , sqrt tau)]
fmove' (TIT ( III a b) c ) = pure $ TIT a (TIT b c)
fmove' (TTT ( TTI a b) c ) = pure $ TTT a (TIT b c)
fmove' (TTT ( TIT a b) c ) = pure $ TIT a (TTT b c)
fmove' (TTT ( TTT a b) c) = W [(TTI a (ITT b c), sqrt tau) , (TTT a (TTT b c), - tau )]
fmove' (III ( III a b) c ) = pure $ III a (III b c)
fmove' (III ( ITT a b) c ) = pure $ ITT a (TTI b c)
rightUnit :: FibTree e (a,Id) -> Q (FibTree e a)
rightUnit (TTI t _) = pure t
rightUnit (III t _) = pure t
rightUnit' :: FibTree e a -> Q (FibTree e (a,Id))
rightUnit' t@(TTT _ _) = pure (TTI t ILeaf)
rightUnit' t@(TTI _ _) = pure (TTI t ILeaf)
rightUnit' t@(TIT _ _) = pure (TTI t ILeaf)
rightUnit' t@(III _ _) = pure (III t ILeaf)
rightUnit' t@(ITT _ _) = pure (III t ILeaf)
rightUnit' t@(ILeaf) = pure (III t ILeaf)
rightUnit' t@(TLeaf) = pure (TTI t ILeaf)
leftUnit :: FibTree e (Id,a) -> Q (FibTree e a)
leftUnit = rightUnit <=< braid
leftUnit' :: FibTree e a -> Q (FibTree e (Id,a))
leftUnit' = braid' <=< rightUnit'
dot :: FibTree a (b, c) -> FibTree a' (b, c) -> Q (FibTree a' a)
dot x@(TTI _ _) y@(TTI _ _) | x == y = pure TLeaf
| otherwise = mempty
dot x@(TIT _ _) y@(TIT _ _) | x == y = pure TLeaf
| otherwise = mempty
dot x@(TTT _ _) y@(TTT _ _) | x == y = pure TLeaf
| otherwise = mempty
dot x@(III _ _) y@(III _ _) | x == y = pure ILeaf
| otherwise = mempty
dot x@(ITT _ _) y@(ITT _ _) | x == y = pure ILeaf
| otherwise = mempty
dot _ _ = mempty
class AllTrees a b where
allTrees :: [FibTree a b]
instance AllTrees Tau Tau where
allTrees = [TLeaf]
instance AllTrees Id Tau where
allTrees = []
instance AllTrees Tau Id where
allTrees = []
instance AllTrees Id Id where
allTrees = [ILeaf]
instance (AllTrees Tau a,
AllTrees Id a,
AllTrees Tau b,
AllTrees Id b) => AllTrees Id (a,b) where
allTrees = (III <$> ia <*> ib) <> (ITT <$> ta <*> tb) where
ta = allTrees @Tau @a
ia = allTrees @Id @a
tb = allTrees @Tau @b
ib = allTrees @Id @b
instance (AllTrees Tau a,
AllTrees Id a,
AllTrees Tau b,
AllTrees Id b) => AllTrees Tau (a,b) where
allTrees = (TIT <$> ia <*> tb) <> (TTI <$> ta <*> ib) <> (TTT <$> ta <*> tb) where
ta = allTrees @Tau @a
ia = allTrees @Id @a
tb = allTrees @Tau @b
ib = allTrees @Id @b
t9 = allTrees @Tau @((Tau,Tau),Tau)
pullLeft and pullRight might not need a type class .
pullLeft ( TTT l r ) = fmove ( TTT pl r )
type family Assoc a where
Assoc ( ( a , b),c ) = ( a,(b , c ) )
type family PullLeft a where
PullLeft ( ( a , b),c ) = Assoc ( PullLeft ( a , b ) , c )
PullLeft a = a
type = PullLeft ( ( ( Int , Int),Int),Int )
pullLeft : : forall a b c. ( c ~ PullLeft b ) = > FibTree a b - > Q ( FibTree a c )
pullLeft TLeaf = pure
pullLeft ILeaf = pure ILeaf
pullLeft t = do
t ' < - lmap pullLeft t
fmove ' t '
pullLeft t@(ITT l r ) = helper t
pullLeft t@(TTT l r ) = helper t
pullLeft t@(TTI l r ) = helper t
pullLeft t@(TIT l r ) = helper t
pullLeft t@(III l r ) = helper t where
helper : : ( d ~ PullLeft ( b , c ) ) = > FibTree a ( b , c ) - > Q ( FibTree a d )
helper t = do
t ' < - lmap pullLeft t
fmove ' t '
type family Assoc a where
Assoc ((a,b),c) = (a,(b,c))
type family PullLeft a where
PullLeft ((a,b),c) = Assoc (PullLeft (a, b), c)
PullLeft a = a
type Test1 = PullLeft (((Int,Int),Int),Int)
pullLeft :: forall a b c. (c ~ PullLeft b) => FibTree a b -> Q (FibTree a c)
pullLeft TLeaf = pure Tleaf
pullLeft ILeaf = pure ILeaf
pullLeft t = do
t' <- lmap pullLeft t
fmove' t'
pullLeft t@(ITT l r) = helper t
pullLeft t@(TTT l r) = helper t
pullLeft t@(TTI l r) = helper t
pullLeft t@(TIT l r) = helper t
pullLeft t@(III l r) = helper t where
helper :: (d ~ PullLeft (b,c)) => FibTree a (b,c) -> Q (FibTree a d)
helper t = do
t' <- lmap pullLeft t
fmove' t'
-}
Spiritually this function is this . Ca n't write it this way unforturnately
pullLeft TLeaf = pure
pullLeft ILeaf = pure ILeaf
pullLeft t = do
t ' < - lmap pullLeft t
fmove ' t '
Spiritually this function is this. Can't write it this way unforturnately
pullLeft TLeaf = pure Tleaf
pullLeft ILeaf = pure ILeaf
pullLeft t = do
t' <- lmap pullLeft t
fmove' t'
-}
type family Assoc a where
Assoc ( ( a , b),c ) = ( a,(b , c ) )
type family PullLeft ' a where
PullLeft ' ( ( a , b),c ) = Assoc ( PullLeft ' ( a , b ) , c )
PullLeft ' a = a
type family Assoc a where
Assoc ((a,b),c) = (a,(b,c))
type family PullLeft' a where
PullLeft' ((a,b),c) = Assoc (PullLeft' (a, b), c)
PullLeft' a = a
-}
class PullLeftLeaf a b | a -> b where
pullLeftLeaf :: FibTree c a -> Q (FibTree c b)
instance PullLeftLeaf (Tau,c) (Tau,c) where
pullLeftLeaf = pure
instance PullLeftLeaf (Id,c) (Id,c) where
pullLeftLeaf = pure
instance PullLeftLeaf Tau Tau where
pullLeftLeaf = pure
instance PullLeftLeaf Id Id where
pullLeftLeaf = pure
instance (PullLeftLeaf (a,b) (a',b'),
r ~ (a',(b',c))) => PullLeftLeaf ((a, b),c) r where
pullLeftLeaf t = do
t' <- lmap pullLeftLeaf t
fmove' t'
pullRightLeaf :: FibTree c a -> Q (FibTree c b)
instance PullRightLeaf Tau Tau where
pullRightLeaf = pure
instance PullRightLeaf Id Id where
pullRightLeaf = pure
instance PullRightLeaf (c,Tau) (c,Tau) where
pullRightLeaf = pure
instance PullRightLeaf (c,Id) (c,Id) where
pullRightLeaf = pure
instance (PullRightLeaf (a,b) (a',b'), r ~ ((c,a'),b')) => PullRightLeaf (c,(a, b)) r where
pullRightLeaf t = do
t' <- rmap pullRightLeaf t
fmove t'
class RightAssoc a b | a -> b where
rightAssoc :: FibTree c a -> Q (FibTree c b)
instance RightAssoc Tau Tau where
rightAssoc = pure
instance RightAssoc Id Id where
rightAssoc = pure
instance (PullLeftLeaf (a,b) (a',b'),
RightAssoc b' b'',
r ~ (a', b'')) => RightAssoc (a,b) r where
rightAssoc t = do
t' <- pullLeftLeaf t
rmap rightAssoc t'
bmove :: forall b c d a. FibTree a (b,(c,d)) -> Q (FibTree a (c,(b,d)))
bmove t = do
t' :: FibTree a ((b,c),d) <- fmove t
t'' :: FibTree a ((c,b),d) <- lmap braid t'
fmove' t''
bmove' :: forall b c d a. FibTree a (b,(c,d)) -> Q (FibTree a (c,(b,d)))
bmove' = fmove' <=< (lmap braid') <=< fmove
Therei s a general pattern for digging into
data
rmapN :: forall n gte s t a b e. (RMapN n gte s t a b, gte ~ (CmpNat n 0)) => (forall r. FibTree r a -> Q (FibTree r b)) -> (FibTree e s) -> Q (FibTree e t)
rmapN f t = rmapN' @n @gte f t
class RMapN n gte s t a b | n gte s b -> a t where
rmapN' :: (forall r. FibTree r a -> Q (FibTree r b)) -> (FibTree e s) -> Q (FibTree e t)
instance (a ~ s, b ~ t) => RMapN 0 'EQ s t a b where
instance (RMapN (n-1) gte r r' a b,
gte ~ (CmpNat (n-1) 0),
t ~ (l,r')) => RMapN n 'GT (l,r) t a b where
rmapN' f t = rmap (rmapN @(n-1) f) t
bmoveN : : forall n a b c d e f. RMapN n a ( b , ( c , d ) ) ( c , ( b , d ) ) e = > FibTree f a - > Q ( FibTree f e )
bmoveN t = rmapN @n ( bmove @b ) t
fuseN : : forall n a b c d e f g. RMapN n f ( b,(c , d ) ) ( a , d ) g = > FibTree a ( b , c ) - > FibTree e f - > Q ( FibTree e g )
fuseN q t = rmapN @n f t where
f : : forall r. FibTree r ( b,(c , d ) ) - > Q ( FibTree r ( a , d ) )
f t ' = do
t '' < - fmove t '
lmap ( dot q ) t ''
bmoveN :: forall n a b c d e f. RMapN n a (b, (c, d)) (c, (b, d)) e => FibTree f a -> Q (FibTree f e)
bmoveN t = rmapN @n (bmove @b @c @d) t
fuseN :: forall n a b c d e f g. RMapN n f (b,(c,d)) (a,d) g => FibTree a (b, c) -> FibTree e f -> Q (FibTree e g)
fuseN q t = rmapN @n f t where
f :: forall r. FibTree r (b,(c,d)) -> Q (FibTree r (a,d))
f t' = do
t'' <- fmove t'
lmap (dot q) t''
-}
type family Count a where
Count Tau = 1
Count Id = 1
Count (a,b) = (Count a) + (Count b)
type family LeftCount a where
LeftCount (a,b) = Count a
lcamap :: forall n s t a b e gte .
(gte ~ CmpNat (LeftCount s) n,
LCAMap n gte s t a b)
=> (forall r. FibTree r a -> Q (FibTree r b)) -> (FibTree e s) -> Q (FibTree e t)
lcamap f t = lcamap' @n @gte f t
class LCAMap n gte s t a b | n gte s b -> t a where
lcamap' :: (forall r. FibTree r a -> Q (FibTree r b)) -> (FibTree e s) -> Q (FibTree e t)
dip one level down to order which way we have to go next
) => LCAMap n 'LT (l,r) t a b where
lcamap' f x = rmap (lcamap @n' f) x
instance (lc ~ (LeftCount l),
gte ~ (CmpNat lc n),
LCAMap n gte l l' a b,
t ~ (l',r)
) => LCAMap n 'GT (l,r) t a b where
lcamap' f x = lmap (lcamap @n f) x
lcamap' f x = f x
class Twiddle s t a b | s b -> t a where
twiddle :: (forall r. FibTree r a -> Q (FibTree r b)) -> FibTree e s -> Q (FibTree e t)
instance Twiddle ((l,x),(y,r)) ((l,c),r) (x,y) c where
twiddle f x = do
lmap (rmap f) x''
instance Twiddle (Tau, (y,r)) (c,r) (Tau, y) c where
twiddle f x = fmove x >>= lmap f
instance Twiddle (Id, (y,r)) (c,r) (Id, y) c where
twiddle f x = fmove x >>= lmap f
instance Twiddle ((l,x), Tau) (l,c) (x,Tau) c where
twiddle f x = fmove' x >>= rmap f
instance Twiddle ((l,x), Id) (l,c) (x,Id) c where
twiddle f x = fmove' x >>= rmap f
instance Twiddle (Tau, Tau) c (Tau,Tau) c where
twiddle f x = f x
instance Twiddle (Id, Id) c (Id,Id) c where
twiddle f x = f x
instance Twiddle (Tau, Id) c (Tau,Id) c where
twiddle f x = f x
instance Twiddle (Id, Tau) c (Id,Tau) c where
twiddle f x = f x
nmap :: forall (n :: Nat) s t a b a' b' l l' r r' e gte.
(gte ~ CmpNat (LeftCount s) n,
LCAMap n gte s t a' b',
a' ~ (l,r),
PullRightLeaf l l',
PullLeftLeaf r r',
Twiddle (l',r') b' a b) =>
(forall r. FibTree r a -> Q (FibTree r b)) -> FibTree e s -> Q (FibTree e t)
nmap f z = lcamap @n @s @t @a' @b' (\x -> do
x' <- lmap pullRightLeaf x
x'' <- rmap pullLeftLeaf x'
twiddle f x'') z
t1 = nmap @2 braid (TTT (TTI TLeaf ILeaf) (TTT TLeaf TLeaf))
t5 = nmap @2 pure (TTT (TTI TLeaf ILeaf) (TTT TLeaf TLeaf)) >>= nmap @3 pure
t2 = nmap @1 braid (TTT (TTI TLeaf ILeaf) (TTT TLeaf TLeaf))
t4 = nmap @1 braid (TTT TLeaf (TTT TLeaf TLeaf))
t3 = nmap @2 braid (TTT (TTT (TTT TLeaf TLeaf) TLeaf) (TTT TLeaf TLeaf))
t6 = rightAssoc (TTT (TTT (TTT TLeaf TLeaf) TLeaf) (TTT TLeaf TLeaf))
t7 = t6 >>= bmove
t8 = t6 >>= rmapN @0 bmove
' [ Tau , I d , , I d ] typelevel list of particles
ttt = TTT TLeaf TLeaf
example = starttree >>=
nmap @1 braid >>=
nmap @2 braid >>=
nmap @1 (dot ttt) >>=
nmap @1 braid' >>=
nmap @2 (dot ttt) >>=
nmap @1 (dot ttt) where
starttree = pure (TTT (TTT TLeaf
(TTT TLeaf
TLeaf))
TLeaf
)
I should use neighbormap for this .
bmoveN t = rmapN @n ( bmove : : forall r. FibTree r ( b,(c , d ) ) - > Q ( FibTree r ( c,(b , d ) ) ) ) t
bmoveN t = rmapN @n @a @(b , ( c , d ) ) @(c , ( b , d ) ) @e bmove t
bmoveN : : forall n a b c d e f. RMapN n a ( b , ( c , d ) ) ( c , ( b , d ) ) e = > Proxy n - > FibTree f a - > Q ( FibTree f e )
bmoveN p t = rmapN p ( bmove : : FibTree a ( b,(c , d ) ) - > Q ( FibTree a ( c,(b , d ) ) ) ) t
bmoveN :: forall n a b c d e f. RMapN n a (b, (c, d)) (c, (b, d)) e => Proxy n -> FibTree f a -> Q (FibTree f e)
bmoveN p t = rmapN p (bmove :: FibTree a (b,(c,d)) -> Q (FibTree a (c,(b,d)))) t
-}
class Standardize a b | a - > b where
standardize : : FibTree c a - > Q ( FibTree c b )
instance Standardize a b where
type family Append a b where
Append ( a , b ) c = ( a , Append b c )
Append a c = ( a , c )
type family Leftize where
Leftize ( a , b ) = Append ( Leftize a ) ( Leftize b )
Leftize a = a
fullrightassoc = standardize
completerightassoc
bmove = fmove braid fmove '
standardize : : FibTree c a - > Q ( FibTree c ( Leftize a ) )
standardize ( TLeaf ) = TLeaf
standardize ( ILeaf ) = ILeaf
standardize t = do
x < - pullLeft t
rmap standardize x
lcamap : : FibTree a b - > Proxy ( a : : Int ) - > FibTree a ?
lcamap f n t@(TTT l r ) | count l = = n = f t
| count l < n = f ( n - count l ) r
| otherwise = lcamap f n l
pullRight ( )
treerecursor : : FibTree a b - > ( l - > r - > c ) - > ( leaf - > c ) - > c
treerecursor ( TTT l r ) f g = f l r
treerecursor ( TLeaf ) = g Tleaf
class Standardize a b | a -> b where
standardize :: FibTree c a -> Q (FibTree c b)
instance Standardize a b where
type family Append a b where
Append (a,b) c = (a, Append b c)
Append a c = (a, c)
type family Leftize where
Leftize (a,b) = Append (Leftize a) (Leftize b)
Leftize a = a
fullrightassoc = standardize
completerightassoc
bmove = fmove braid fmove'
standardize :: FibTree c a -> Q (FibTree c (Leftize a))
standardize (TLeaf) = TLeaf
standardize (ILeaf) = ILeaf
standardize t = do
x <- pullLeft t
rmap standardize x
lcamap :: FibTree a b -> Proxy (a :: Int) -> FibTree a ?
lcamap f n t@(TTT l r) | count l == n = f t
| count l < n = lcamap f (n - count l) r
| otherwise = lcamap f n l
pullRight ()
treerecursor :: FibTree a b -> (l -> r -> c) -> (leaf -> c) -> c
treerecursor (TTT l r) f g = f l r
treerecursor (TLeaf) = g Tleaf
-} |
832ff63fc9847acdf30d54dfe05c2c43c1517653ea88cf2ba93a7df530d4d0d0 | BrianChevalier/matrix-compare | project.clj | (defproject matrix-compare "0.1.0"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[org.clojure/clojurescript "1.10.773"]
[reagent "0.10.0"]]
:plugins [[lein-cljsbuild "1.1.7"]
[lein-figwheel "0.5.20"]]
:clean-targets ^{:protect false}
[:target-path
[:cljsbuild :builds :app :compiler :output-dir]
[:cljsbuild :builds :app :compiler :output-to]]
:resource-paths ["public"]
:figwheel {:http-server-root "."
:nrepl-port 7002
:nrepl-middleware [cider.piggieback/wrap-cljs-repl]
:css-dirs ["public/css"]}
:cljsbuild {:builds {:app
{:source-paths ["src" "env/dev/cljs"]
:compiler
{:main "matrix-compare.dev"
:output-to "public/js/app.js"
:output-dir "public/js/out"
:asset-path "js/out"
:source-map true
:optimizations :none
:pretty-print true}
:figwheel
{:on-jsload "matrix-compare.core/mount-root"
:open-urls [":3449/index.html"]}}
:release
{:source-paths ["src" "env/prod/cljs"]
:compiler
{:output-to "public/js/app.js"
:output-dir "target/release"
:optimizations :advanced
:infer-externs true
:pretty-print false}}}}
:aliases {"package" ["do" "clean" ["cljsbuild" "once" "release"]]}
:profiles {:dev {:source-paths ["src" "env/dev/clj"]
:dependencies [[binaryage/devtools "1.0.2"]
[figwheel-sidecar "0.5.20"]
[nrepl "0.7.0"]
[cider/piggieback "0.5.0"]]}})
| null | https://raw.githubusercontent.com/BrianChevalier/matrix-compare/c6c1263fba031f7d4a44bf36593d99310b0c553e/project.clj | clojure | (defproject matrix-compare "0.1.0"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[org.clojure/clojurescript "1.10.773"]
[reagent "0.10.0"]]
:plugins [[lein-cljsbuild "1.1.7"]
[lein-figwheel "0.5.20"]]
:clean-targets ^{:protect false}
[:target-path
[:cljsbuild :builds :app :compiler :output-dir]
[:cljsbuild :builds :app :compiler :output-to]]
:resource-paths ["public"]
:figwheel {:http-server-root "."
:nrepl-port 7002
:nrepl-middleware [cider.piggieback/wrap-cljs-repl]
:css-dirs ["public/css"]}
:cljsbuild {:builds {:app
{:source-paths ["src" "env/dev/cljs"]
:compiler
{:main "matrix-compare.dev"
:output-to "public/js/app.js"
:output-dir "public/js/out"
:asset-path "js/out"
:source-map true
:optimizations :none
:pretty-print true}
:figwheel
{:on-jsload "matrix-compare.core/mount-root"
:open-urls [":3449/index.html"]}}
:release
{:source-paths ["src" "env/prod/cljs"]
:compiler
{:output-to "public/js/app.js"
:output-dir "target/release"
:optimizations :advanced
:infer-externs true
:pretty-print false}}}}
:aliases {"package" ["do" "clean" ["cljsbuild" "once" "release"]]}
:profiles {:dev {:source-paths ["src" "env/dev/clj"]
:dependencies [[binaryage/devtools "1.0.2"]
[figwheel-sidecar "0.5.20"]
[nrepl "0.7.0"]
[cider/piggieback "0.5.0"]]}})
|
|
d09d4f74b8106251ca8a775136ebe6cc8b2e5db766e514bd0ef0c8d0cfeb48b0 | CryptoKami/cryptokami-core | UtilSpec.hs | # LANGUAGE ExistentialQuantification #
{-# LANGUAGE TypeFamilies #-}
-- | Specification of Pos.Client.Txp.Util
module Test.Pos.Client.Txp.UtilSpec
( spec
) where
import Universum
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as M
import qualified Data.Set as S
import Formatting (build, hex, left, sformat, shown, (%), (%.))
import Test.Hspec (Spec, describe)
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck (Discard (..), Gen, Testable, arbitrary, choose)
import Test.QuickCheck.Monadic (forAllM, stop)
import Pos.Client.Txp.Addresses (MonadAddresses (..))
import Pos.Client.Txp.Util (InputSelectionPolicy (..), TxError (..), TxOutputs,
TxWithSpendings, createMTx, createRedemptionTx,
isNotEnoughMoneyTxError)
import Pos.Core (Address, BlockVersionData (..), Coeff (..), TxFeePolicy (..),
TxSizeLinear (..), makePubKeyAddressBoot, makeRedeemAddress,
unsafeIntegerToCoin)
import Pos.Core.Txp (Tx (..), TxAux (..), TxId, TxIn (..), TxOut (..), TxOutAux (..))
import Pos.Crypto (RedeemSecretKey, SafeSigner, SecretKey, decodeHash, fakeSigner,
redeemToPublic, toPublic)
import Pos.DB (gsAdoptedBVData)
import Pos.Txp (Utxo)
import Pos.Util.QuickCheck.Arbitrary (nonrepeating)
import Pos.Util.QuickCheck.Property (stopProperty)
import Pos.Util.Util (leftToPanic)
import Test.Pos.Configuration (withDefConfigurations)
import Test.Pos.Client.Txp.Mode (HasTxpConfigurations, TxpTestMode, TxpTestProperty,
withBVData)
----------------------------------------------------------------------------
-- Tests
----------------------------------------------------------------------------
spec :: Spec
spec = withDefConfigurations $
describe "Client.Txp.Util" $ do
describe "createMTx" $ createMTxSpec
GHC does n't support impredicative polymorphism so we need a wrapper
-- for the list below to typecheck.
data TestFunctionWrapper
= forall prop. (Testable prop) => TestFunctionWrapper (InputSelectionPolicy -> prop)
createMTxSpec :: HasTxpConfigurations => Spec
createMTxSpec = do
let inputSelectionPolicies =
[ ("Grouped inputs", OptimizeForSecurity)
, ("Ungrouped inputs", OptimizeForHighThroughput)
]
let testSpecs =
[ (createMTxWorksWhenWeAreRichDesc, TestFunctionWrapper createMTxWorksWhenWeAreRichSpec)
, (stabilizationDoesNotFailDesc, TestFunctionWrapper stabilizationDoesNotFailSpec)
, (feeIsNonzeroDesc, TestFunctionWrapper feeIsNonzeroSpec)
, (manyUtxoTo1Desc, TestFunctionWrapper manyUtxoTo1Spec)
, (manyAddressesTo1Desc, TestFunctionWrapper manyAddressesTo1Spec)
, (manyAddressesToManyDesc, TestFunctionWrapper manyAddressesToManySpec)
, (txWithRedeemOutputFailsDesc, TestFunctionWrapper txWithRedeemOutputFailsSpec)
, (feeForManyAddressesDesc, TestFunctionWrapper feeForManyAddressesSpec)
]
for_ inputSelectionPolicies $ \(inputSelectionDesc, policy) ->
describe inputSelectionDesc . for_ testSpecs $ \(funcDesc, TestFunctionWrapper func) ->
prop funcDesc (func policy)
prop redemptionDesc redemptionSpec
prop groupedPolicyDesc groupedPolicySpec
prop ungroupedPolicyDesc ungroupedPolicySpec
where
createMTxWorksWhenWeAreRichDesc =
"Transaction is created successfully when we have 1 input with 1M coins " <>
"and 1 output with 1 coin"
stabilizationDoesNotFailDesc =
"FailedToStabilize is not thrown " <>
"when there is 1 input with 200k coins and 1 output with 1 coin"
feeIsNonzeroDesc =
"An attempt to create a tx for 1 coin when we have 100k coins fails " <>
"because of the fee"
manyUtxoTo1Desc =
"Transaction is created successfully when we have 10 items in Utxo " <>
"for a single address with 100k coins each"
manyAddressesTo1Desc =
"Transaction is created successfully when we have 10 items in Utxo " <>
"for 10 different addresses with 100k coins each and 1 output with 1 coin"
manyAddressesToManyDesc =
"Transaction is created successfully when we have 10 items in Utxo " <>
"for 10 different addresses with 100k coins each and 10 outputs with 1 coin each"
redemptionDesc =
"Redemption transaction is created successfully"
txWithRedeemOutputFailsDesc =
"An attempt to create a tx with a redeem address as an output fails"
feeForManyAddressesDesc =
"Fee evaluation succeedes when many addresses are used"
groupedPolicyDesc =
"The amount of used inputs equals the amount of available inputs"
ungroupedPolicyDesc =
"The amount of used inputs is as small as possible"
testCreateMTx
:: HasTxpConfigurations
=> CreateMTxParams
-> TxpTestProperty (Either TxError (TxAux, NonEmpty TxOut))
testCreateMTx CreateMTxParams{..} = lift $
createMTx mempty cmpInputSelectionPolicy cmpUtxo (getSignerFromList cmpSigners)
cmpOutputs cmpAddrData
createMTxWorksWhenWeAreRichSpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
createMTxWorksWhenWeAreRichSpec inputSelectionPolicy =
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err -> stopProperty $ sformat ("Failed to create tx: "%build) err
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 1 1000000 1 1
stabilizationDoesNotFailSpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
stabilizationDoesNotFailSpec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err@FailedToStabilize -> stopProperty $ pretty err
Left _ -> return ()
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 1 200000 1 1
feeIsNonzeroSpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
feeIsNonzeroSpec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left (NotEnoughMoney _) -> return ()
Left err -> stopProperty $ pretty err
Right _ -> stopProperty $
"Transaction was created even though there were " <>
"not enough funds for the fee"
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 1 100000 1 1
manyUtxoTo1Spec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
manyUtxoTo1Spec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err -> stopProperty $ pretty err
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyUtxoTo1Params inputSelectionPolicy 10 100000 1
manyAddressesTo1Spec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
manyAddressesTo1Spec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err -> stopProperty $ pretty err
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 10 100000 1 1
manyAddressesToManySpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
manyAddressesToManySpec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err -> stopProperty $ pretty err
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 10 100000 10 1
redemptionSpec :: HasTxpConfigurations => TxpTestProperty ()
redemptionSpec = do
forAllM genParams $ \(CreateRedemptionTxParams {..}) -> do
txOrError <- createRedemptionTx crpUtxo crpRsk crpOutputs
case txOrError of
Left err -> stopProperty $ pretty err
Right _ -> return ()
where
genParams = do
crpRsk <- arbitrary
skTo <- arbitrary
let txOutAuxInput = generateRedeemTxOutAux 1 crpRsk
txOutAuxOutput = generateTxOutAux 1 skTo
crpUtxo = one (TxInUtxo (unsafeIntegerToTxId 0) 0, txOutAuxInput)
crpOutputs = one txOutAuxOutput
pure CreateRedemptionTxParams {..}
txWithRedeemOutputFailsSpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
txWithRedeemOutputFailsSpec inputSelectionPolicy = do
forAllM genParams $ \(CreateMTxParams {..}) -> do
txOrError <-
createMTx mempty cmpInputSelectionPolicy cmpUtxo
(getSignerFromList cmpSigners)
cmpOutputs cmpAddrData
case txOrError of
Left (OutputIsRedeem _) -> return ()
Left err -> stopProperty $ pretty err
Right _ -> stopProperty $
sformat ("Transaction to a redeem address was created")
where
genParams = do
txOutAuxOutput <- generateRedeemTxOutAux 1 <$> arbitrary
params <- makeManyAddressesToManyParams inputSelectionPolicy 1 1000000 1 1
pure params{ cmpOutputs = one txOutAuxOutput }
feeForManyAddressesSpec
:: HasTxpConfigurations
=> InputSelectionPolicy
-> Bool
-> TxpTestProperty ()
feeForManyAddressesSpec inputSelectionPolicy manyAddrs =
forAllM (choose (5, 20)) $
\(Coeff . fromInteger -> feePolicySlope) ->
forAllM (choose (10000, 100000)) $
\(Coeff . fromInteger -> feePolicyConstTerm) ->
forAllM (choose (10000, 100000)) $
\toSpend ->
forAllM (choose (1000, 10000)) $
\perAddrAmount ->
forAllM (mkParams 100 perAddrAmount toSpend) $
\params ->
withTxFeePolicy feePolicyConstTerm feePolicySlope $ do
-- tx builder should find this utxo to be enough for construction
txOrError <- testCreateMTx params
txAux <- case txOrError of
Left err ->
if isNotEnoughMoneyTxError err
then stop Discard
else stopProperty $ sformat ("On first attempt: "%build) err
Right (txAux, _) ->
return txAux
-- even if utxo size is barely enough - fee stabilization should achieve
-- success as well
-- 'succ' is needed here because current algorithm may fail to stabilize fee if
-- almost all money are spent
let enoughInputs = succ . length . _txInputs $ taTx txAux
utxo' = M.fromList . take enoughInputs . M.toList $ cmpUtxo params
params' = params { cmpUtxo = utxo' }
txOrError' <- testCreateMTx params'
case txOrError' of
Left err -> stopProperty $ sformat ("On second attempt: "%build) err
Right _ -> return ()
where
considering two corner cases of utxo outputs distribution
mkParams
| manyAddrs = makeManyAddressesTo1Params inputSelectionPolicy
| otherwise = makeManyUtxoTo1Params inputSelectionPolicy
groupedPolicySpec :: HasTxpConfigurations => TxpTestProperty ()
groupedPolicySpec =
forAllM gen $ testCreateMTx >=> \case
Left err -> stopProperty $ pretty err
Right (txAux, _) ->
let picked = length . _txInputs . taTx $ txAux
in unless (picked == utxoNum) . stopProperty
$ sformat ("Only "%build%" inputs were used instead of all of the inputs") picked
where
utxoNum = 10
gen = makeManyUtxoTo1Params OptimizeForSecurity (fromIntegral utxoNum) 1000000 1
ungroupedPolicySpec :: HasTxpConfigurations => TxpTestProperty ()
ungroupedPolicySpec =
forAllM gen $ testCreateMTx >=> \case
Left err -> stopProperty $ pretty err
Right (txAux, _) ->
let picked = length . _txInputs . taTx $ txAux
in unless (picked == 1) . stopProperty
$ sformat ("Only "%build%" inputs were used instead of just 1 input") picked
where
gen = makeManyUtxoTo1Params OptimizeForHighThroughput 10 1000000 1
----------------------------------------------------------------------------
-- Helpers
----------------------------------------------------------------------------
-- | Container for parameters of `createMTx`.
data CreateMTxParams = CreateMTxParams
{ cmpInputSelectionPolicy :: InputSelectionPolicy
-- ^ Input selection policy
, cmpUtxo :: !Utxo
^ Unspent transaction outputs .
, cmpSigners :: !(NonEmpty (SafeSigner, Address))
^ Wrappers around secret keys for addresses in Utxo .
, cmpOutputs :: !TxOutputs
^ A ( nonempty ) list of desired tx outputs .
, cmpAddrData :: !(AddrData TxpTestMode)
-- ^ Data that is normally used for creation of change addresses.
-- In tests, it is always `()`.
} deriving Show
-- | Container for parameters of `createRedemptionTx`.
-- The parameters mirror those of `createMTx` almost perfectly.
data CreateRedemptionTxParams = CreateRedemptionTxParams
{ crpUtxo :: !Utxo
, crpRsk :: !RedeemSecretKey
, crpOutputs :: !TxOutputs
} deriving Show
getSignerFromList :: NonEmpty (SafeSigner, Address) -> Address -> Maybe SafeSigner
getSignerFromList (HM.fromList . map swap . toList -> hm) =
\addr -> HM.lookup addr hm
makeManyUtxoTo1Params :: InputSelectionPolicy -> Int -> Integer -> Integer -> Gen CreateMTxParams
makeManyUtxoTo1Params inputSelectionPolicy numFrom amountEachFrom amountTo = do
~[skFrom, skTo] <- nonrepeating 2
let txOutAuxInput = generateTxOutAux amountEachFrom skFrom
txOutAuxOutput = generateTxOutAux amountTo skTo
cmpInputSelectionPolicy = inputSelectionPolicy
cmpUtxo = M.fromList
[(TxInUtxo (unsafeIntegerToTxId 0) (fromIntegral k), txOutAuxInput) |
k <- [0..numFrom-1]]
cmpSigners = one $ makeSigner skFrom
cmpOutputs = one txOutAuxOutput
cmpAddrData = ()
pure CreateMTxParams {..}
makeManyAddressesToManyParams
:: InputSelectionPolicy
-> Int
-> Integer
-> Int
-> Integer
-> Gen CreateMTxParams
makeManyAddressesToManyParams inputSelectionPolicy numFrom amountEachFrom numTo amountEachTo = do
sks <- nonrepeating (numFrom + numTo)
let (sksFrom, sksTo) = splitAt numFrom sks
cmpSignersList = map makeSigner sksFrom
cmpSigners = NE.fromList cmpSignersList
txOutAuxInputs = map (generateTxOutAux amountEachFrom) sksFrom
txOutAuxOutputs = map (generateTxOutAux amountEachTo) sksTo
cmpInputSelectionPolicy = inputSelectionPolicy
cmpUtxo = M.fromList
[(TxInUtxo (unsafeIntegerToTxId $ fromIntegral k) 0, txOutAux) |
(k, txOutAux) <- zip [0..numFrom-1] txOutAuxInputs]
cmpOutputs = NE.fromList txOutAuxOutputs
cmpAddrData = ()
pure CreateMTxParams {..}
makeManyAddressesTo1Params :: InputSelectionPolicy -> Int -> Integer -> Integer -> Gen CreateMTxParams
makeManyAddressesTo1Params inputSelectionPolicy numFrom amountEachFrom amountEachTo =
makeManyAddressesToManyParams inputSelectionPolicy numFrom amountEachFrom 1 amountEachTo
ensureTxMakesSense
:: HasTxpConfigurations
=> TxWithSpendings -> Utxo -> TxOutputs -> TxpTestProperty ()
ensureTxMakesSense (_, neTxOut) utxo _ = do
unless (S.fromList txOutUsed `S.isSubsetOf` S.fromList txOutAvailable) $
stopProperty $
sformat ("Used some inputs that were not available!\n"%
"Available: "%shown%"\n"%
"Used: "%shown
) txOutAvailable txOutUsed
where
txOutAvailable = map toaOut $ M.elems utxo
txOutUsed = NE.toList neTxOut
unsafeIntegerToTxId :: Integer -> TxId
unsafeIntegerToTxId n =
leftToPanic "unsafeIntegerToTxId: " $ decodeHash $
sformat (left 64 '0' %. hex) n
makeTxOutAux :: Integer -> Address -> TxOutAux
makeTxOutAux amount addr =
let coin = unsafeIntegerToCoin amount
txOut = TxOut addr coin
in TxOutAux txOut
generateTxOutAux :: Integer -> SecretKey -> TxOutAux
generateTxOutAux amount sk =
makeTxOutAux amount (secretKeyToAddress sk)
generateRedeemTxOutAux :: Integer -> RedeemSecretKey -> TxOutAux
generateRedeemTxOutAux amount rsk =
makeTxOutAux amount (makeRedeemAddress $ redeemToPublic rsk)
secretKeyToAddress :: SecretKey -> Address
secretKeyToAddress = makePubKeyAddressBoot . toPublic
makeSigner :: SecretKey -> (SafeSigner, Address)
makeSigner sk = (fakeSigner sk, secretKeyToAddress sk)
withTxFeePolicy
:: HasTxpConfigurations
=> Coeff -> Coeff -> TxpTestProperty () -> TxpTestProperty ()
withTxFeePolicy a b action = do
let policy = TxFeePolicyTxSizeLinear $ TxSizeLinear a b
bvd <- gsAdoptedBVData
withBVData bvd{ bvdTxFeePolicy = policy } action
| null | https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/client/test/Test/Pos/Client/Txp/UtilSpec.hs | haskell | # LANGUAGE TypeFamilies #
| Specification of Pos.Client.Txp.Util
--------------------------------------------------------------------------
Tests
--------------------------------------------------------------------------
for the list below to typecheck.
tx builder should find this utxo to be enough for construction
even if utxo size is barely enough - fee stabilization should achieve
success as well
'succ' is needed here because current algorithm may fail to stabilize fee if
almost all money are spent
--------------------------------------------------------------------------
Helpers
--------------------------------------------------------------------------
| Container for parameters of `createMTx`.
^ Input selection policy
^ Data that is normally used for creation of change addresses.
In tests, it is always `()`.
| Container for parameters of `createRedemptionTx`.
The parameters mirror those of `createMTx` almost perfectly. | # LANGUAGE ExistentialQuantification #
module Test.Pos.Client.Txp.UtilSpec
( spec
) where
import Universum
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as M
import qualified Data.Set as S
import Formatting (build, hex, left, sformat, shown, (%), (%.))
import Test.Hspec (Spec, describe)
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck (Discard (..), Gen, Testable, arbitrary, choose)
import Test.QuickCheck.Monadic (forAllM, stop)
import Pos.Client.Txp.Addresses (MonadAddresses (..))
import Pos.Client.Txp.Util (InputSelectionPolicy (..), TxError (..), TxOutputs,
TxWithSpendings, createMTx, createRedemptionTx,
isNotEnoughMoneyTxError)
import Pos.Core (Address, BlockVersionData (..), Coeff (..), TxFeePolicy (..),
TxSizeLinear (..), makePubKeyAddressBoot, makeRedeemAddress,
unsafeIntegerToCoin)
import Pos.Core.Txp (Tx (..), TxAux (..), TxId, TxIn (..), TxOut (..), TxOutAux (..))
import Pos.Crypto (RedeemSecretKey, SafeSigner, SecretKey, decodeHash, fakeSigner,
redeemToPublic, toPublic)
import Pos.DB (gsAdoptedBVData)
import Pos.Txp (Utxo)
import Pos.Util.QuickCheck.Arbitrary (nonrepeating)
import Pos.Util.QuickCheck.Property (stopProperty)
import Pos.Util.Util (leftToPanic)
import Test.Pos.Configuration (withDefConfigurations)
import Test.Pos.Client.Txp.Mode (HasTxpConfigurations, TxpTestMode, TxpTestProperty,
withBVData)
spec :: Spec
spec = withDefConfigurations $
describe "Client.Txp.Util" $ do
describe "createMTx" $ createMTxSpec
GHC does n't support impredicative polymorphism so we need a wrapper
data TestFunctionWrapper
= forall prop. (Testable prop) => TestFunctionWrapper (InputSelectionPolicy -> prop)
createMTxSpec :: HasTxpConfigurations => Spec
createMTxSpec = do
let inputSelectionPolicies =
[ ("Grouped inputs", OptimizeForSecurity)
, ("Ungrouped inputs", OptimizeForHighThroughput)
]
let testSpecs =
[ (createMTxWorksWhenWeAreRichDesc, TestFunctionWrapper createMTxWorksWhenWeAreRichSpec)
, (stabilizationDoesNotFailDesc, TestFunctionWrapper stabilizationDoesNotFailSpec)
, (feeIsNonzeroDesc, TestFunctionWrapper feeIsNonzeroSpec)
, (manyUtxoTo1Desc, TestFunctionWrapper manyUtxoTo1Spec)
, (manyAddressesTo1Desc, TestFunctionWrapper manyAddressesTo1Spec)
, (manyAddressesToManyDesc, TestFunctionWrapper manyAddressesToManySpec)
, (txWithRedeemOutputFailsDesc, TestFunctionWrapper txWithRedeemOutputFailsSpec)
, (feeForManyAddressesDesc, TestFunctionWrapper feeForManyAddressesSpec)
]
for_ inputSelectionPolicies $ \(inputSelectionDesc, policy) ->
describe inputSelectionDesc . for_ testSpecs $ \(funcDesc, TestFunctionWrapper func) ->
prop funcDesc (func policy)
prop redemptionDesc redemptionSpec
prop groupedPolicyDesc groupedPolicySpec
prop ungroupedPolicyDesc ungroupedPolicySpec
where
createMTxWorksWhenWeAreRichDesc =
"Transaction is created successfully when we have 1 input with 1M coins " <>
"and 1 output with 1 coin"
stabilizationDoesNotFailDesc =
"FailedToStabilize is not thrown " <>
"when there is 1 input with 200k coins and 1 output with 1 coin"
feeIsNonzeroDesc =
"An attempt to create a tx for 1 coin when we have 100k coins fails " <>
"because of the fee"
manyUtxoTo1Desc =
"Transaction is created successfully when we have 10 items in Utxo " <>
"for a single address with 100k coins each"
manyAddressesTo1Desc =
"Transaction is created successfully when we have 10 items in Utxo " <>
"for 10 different addresses with 100k coins each and 1 output with 1 coin"
manyAddressesToManyDesc =
"Transaction is created successfully when we have 10 items in Utxo " <>
"for 10 different addresses with 100k coins each and 10 outputs with 1 coin each"
redemptionDesc =
"Redemption transaction is created successfully"
txWithRedeemOutputFailsDesc =
"An attempt to create a tx with a redeem address as an output fails"
feeForManyAddressesDesc =
"Fee evaluation succeedes when many addresses are used"
groupedPolicyDesc =
"The amount of used inputs equals the amount of available inputs"
ungroupedPolicyDesc =
"The amount of used inputs is as small as possible"
testCreateMTx
:: HasTxpConfigurations
=> CreateMTxParams
-> TxpTestProperty (Either TxError (TxAux, NonEmpty TxOut))
testCreateMTx CreateMTxParams{..} = lift $
createMTx mempty cmpInputSelectionPolicy cmpUtxo (getSignerFromList cmpSigners)
cmpOutputs cmpAddrData
createMTxWorksWhenWeAreRichSpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
createMTxWorksWhenWeAreRichSpec inputSelectionPolicy =
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err -> stopProperty $ sformat ("Failed to create tx: "%build) err
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 1 1000000 1 1
stabilizationDoesNotFailSpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
stabilizationDoesNotFailSpec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err@FailedToStabilize -> stopProperty $ pretty err
Left _ -> return ()
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 1 200000 1 1
feeIsNonzeroSpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
feeIsNonzeroSpec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left (NotEnoughMoney _) -> return ()
Left err -> stopProperty $ pretty err
Right _ -> stopProperty $
"Transaction was created even though there were " <>
"not enough funds for the fee"
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 1 100000 1 1
manyUtxoTo1Spec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
manyUtxoTo1Spec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err -> stopProperty $ pretty err
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyUtxoTo1Params inputSelectionPolicy 10 100000 1
manyAddressesTo1Spec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
manyAddressesTo1Spec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err -> stopProperty $ pretty err
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 10 100000 1 1
manyAddressesToManySpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
manyAddressesToManySpec inputSelectionPolicy = do
forAllM gen $ \txParams@CreateMTxParams{..} -> do
txOrError <- testCreateMTx txParams
case txOrError of
Left err -> stopProperty $ pretty err
Right tx -> ensureTxMakesSense tx cmpUtxo cmpOutputs
where
gen = makeManyAddressesToManyParams inputSelectionPolicy 10 100000 10 1
redemptionSpec :: HasTxpConfigurations => TxpTestProperty ()
redemptionSpec = do
forAllM genParams $ \(CreateRedemptionTxParams {..}) -> do
txOrError <- createRedemptionTx crpUtxo crpRsk crpOutputs
case txOrError of
Left err -> stopProperty $ pretty err
Right _ -> return ()
where
genParams = do
crpRsk <- arbitrary
skTo <- arbitrary
let txOutAuxInput = generateRedeemTxOutAux 1 crpRsk
txOutAuxOutput = generateTxOutAux 1 skTo
crpUtxo = one (TxInUtxo (unsafeIntegerToTxId 0) 0, txOutAuxInput)
crpOutputs = one txOutAuxOutput
pure CreateRedemptionTxParams {..}
txWithRedeemOutputFailsSpec :: HasTxpConfigurations => InputSelectionPolicy -> TxpTestProperty ()
txWithRedeemOutputFailsSpec inputSelectionPolicy = do
forAllM genParams $ \(CreateMTxParams {..}) -> do
txOrError <-
createMTx mempty cmpInputSelectionPolicy cmpUtxo
(getSignerFromList cmpSigners)
cmpOutputs cmpAddrData
case txOrError of
Left (OutputIsRedeem _) -> return ()
Left err -> stopProperty $ pretty err
Right _ -> stopProperty $
sformat ("Transaction to a redeem address was created")
where
genParams = do
txOutAuxOutput <- generateRedeemTxOutAux 1 <$> arbitrary
params <- makeManyAddressesToManyParams inputSelectionPolicy 1 1000000 1 1
pure params{ cmpOutputs = one txOutAuxOutput }
feeForManyAddressesSpec
:: HasTxpConfigurations
=> InputSelectionPolicy
-> Bool
-> TxpTestProperty ()
feeForManyAddressesSpec inputSelectionPolicy manyAddrs =
forAllM (choose (5, 20)) $
\(Coeff . fromInteger -> feePolicySlope) ->
forAllM (choose (10000, 100000)) $
\(Coeff . fromInteger -> feePolicyConstTerm) ->
forAllM (choose (10000, 100000)) $
\toSpend ->
forAllM (choose (1000, 10000)) $
\perAddrAmount ->
forAllM (mkParams 100 perAddrAmount toSpend) $
\params ->
withTxFeePolicy feePolicyConstTerm feePolicySlope $ do
txOrError <- testCreateMTx params
txAux <- case txOrError of
Left err ->
if isNotEnoughMoneyTxError err
then stop Discard
else stopProperty $ sformat ("On first attempt: "%build) err
Right (txAux, _) ->
return txAux
let enoughInputs = succ . length . _txInputs $ taTx txAux
utxo' = M.fromList . take enoughInputs . M.toList $ cmpUtxo params
params' = params { cmpUtxo = utxo' }
txOrError' <- testCreateMTx params'
case txOrError' of
Left err -> stopProperty $ sformat ("On second attempt: "%build) err
Right _ -> return ()
where
considering two corner cases of utxo outputs distribution
mkParams
| manyAddrs = makeManyAddressesTo1Params inputSelectionPolicy
| otherwise = makeManyUtxoTo1Params inputSelectionPolicy
groupedPolicySpec :: HasTxpConfigurations => TxpTestProperty ()
groupedPolicySpec =
forAllM gen $ testCreateMTx >=> \case
Left err -> stopProperty $ pretty err
Right (txAux, _) ->
let picked = length . _txInputs . taTx $ txAux
in unless (picked == utxoNum) . stopProperty
$ sformat ("Only "%build%" inputs were used instead of all of the inputs") picked
where
utxoNum = 10
gen = makeManyUtxoTo1Params OptimizeForSecurity (fromIntegral utxoNum) 1000000 1
ungroupedPolicySpec :: HasTxpConfigurations => TxpTestProperty ()
ungroupedPolicySpec =
forAllM gen $ testCreateMTx >=> \case
Left err -> stopProperty $ pretty err
Right (txAux, _) ->
let picked = length . _txInputs . taTx $ txAux
in unless (picked == 1) . stopProperty
$ sformat ("Only "%build%" inputs were used instead of just 1 input") picked
where
gen = makeManyUtxoTo1Params OptimizeForHighThroughput 10 1000000 1
data CreateMTxParams = CreateMTxParams
{ cmpInputSelectionPolicy :: InputSelectionPolicy
, cmpUtxo :: !Utxo
^ Unspent transaction outputs .
, cmpSigners :: !(NonEmpty (SafeSigner, Address))
^ Wrappers around secret keys for addresses in Utxo .
, cmpOutputs :: !TxOutputs
^ A ( nonempty ) list of desired tx outputs .
, cmpAddrData :: !(AddrData TxpTestMode)
} deriving Show
data CreateRedemptionTxParams = CreateRedemptionTxParams
{ crpUtxo :: !Utxo
, crpRsk :: !RedeemSecretKey
, crpOutputs :: !TxOutputs
} deriving Show
getSignerFromList :: NonEmpty (SafeSigner, Address) -> Address -> Maybe SafeSigner
getSignerFromList (HM.fromList . map swap . toList -> hm) =
\addr -> HM.lookup addr hm
makeManyUtxoTo1Params :: InputSelectionPolicy -> Int -> Integer -> Integer -> Gen CreateMTxParams
makeManyUtxoTo1Params inputSelectionPolicy numFrom amountEachFrom amountTo = do
~[skFrom, skTo] <- nonrepeating 2
let txOutAuxInput = generateTxOutAux amountEachFrom skFrom
txOutAuxOutput = generateTxOutAux amountTo skTo
cmpInputSelectionPolicy = inputSelectionPolicy
cmpUtxo = M.fromList
[(TxInUtxo (unsafeIntegerToTxId 0) (fromIntegral k), txOutAuxInput) |
k <- [0..numFrom-1]]
cmpSigners = one $ makeSigner skFrom
cmpOutputs = one txOutAuxOutput
cmpAddrData = ()
pure CreateMTxParams {..}
makeManyAddressesToManyParams
:: InputSelectionPolicy
-> Int
-> Integer
-> Int
-> Integer
-> Gen CreateMTxParams
makeManyAddressesToManyParams inputSelectionPolicy numFrom amountEachFrom numTo amountEachTo = do
sks <- nonrepeating (numFrom + numTo)
let (sksFrom, sksTo) = splitAt numFrom sks
cmpSignersList = map makeSigner sksFrom
cmpSigners = NE.fromList cmpSignersList
txOutAuxInputs = map (generateTxOutAux amountEachFrom) sksFrom
txOutAuxOutputs = map (generateTxOutAux amountEachTo) sksTo
cmpInputSelectionPolicy = inputSelectionPolicy
cmpUtxo = M.fromList
[(TxInUtxo (unsafeIntegerToTxId $ fromIntegral k) 0, txOutAux) |
(k, txOutAux) <- zip [0..numFrom-1] txOutAuxInputs]
cmpOutputs = NE.fromList txOutAuxOutputs
cmpAddrData = ()
pure CreateMTxParams {..}
makeManyAddressesTo1Params :: InputSelectionPolicy -> Int -> Integer -> Integer -> Gen CreateMTxParams
makeManyAddressesTo1Params inputSelectionPolicy numFrom amountEachFrom amountEachTo =
makeManyAddressesToManyParams inputSelectionPolicy numFrom amountEachFrom 1 amountEachTo
ensureTxMakesSense
:: HasTxpConfigurations
=> TxWithSpendings -> Utxo -> TxOutputs -> TxpTestProperty ()
ensureTxMakesSense (_, neTxOut) utxo _ = do
unless (S.fromList txOutUsed `S.isSubsetOf` S.fromList txOutAvailable) $
stopProperty $
sformat ("Used some inputs that were not available!\n"%
"Available: "%shown%"\n"%
"Used: "%shown
) txOutAvailable txOutUsed
where
txOutAvailable = map toaOut $ M.elems utxo
txOutUsed = NE.toList neTxOut
unsafeIntegerToTxId :: Integer -> TxId
unsafeIntegerToTxId n =
leftToPanic "unsafeIntegerToTxId: " $ decodeHash $
sformat (left 64 '0' %. hex) n
makeTxOutAux :: Integer -> Address -> TxOutAux
makeTxOutAux amount addr =
let coin = unsafeIntegerToCoin amount
txOut = TxOut addr coin
in TxOutAux txOut
generateTxOutAux :: Integer -> SecretKey -> TxOutAux
generateTxOutAux amount sk =
makeTxOutAux amount (secretKeyToAddress sk)
generateRedeemTxOutAux :: Integer -> RedeemSecretKey -> TxOutAux
generateRedeemTxOutAux amount rsk =
makeTxOutAux amount (makeRedeemAddress $ redeemToPublic rsk)
secretKeyToAddress :: SecretKey -> Address
secretKeyToAddress = makePubKeyAddressBoot . toPublic
makeSigner :: SecretKey -> (SafeSigner, Address)
makeSigner sk = (fakeSigner sk, secretKeyToAddress sk)
withTxFeePolicy
:: HasTxpConfigurations
=> Coeff -> Coeff -> TxpTestProperty () -> TxpTestProperty ()
withTxFeePolicy a b action = do
let policy = TxFeePolicyTxSizeLinear $ TxSizeLinear a b
bvd <- gsAdoptedBVData
withBVData bvd{ bvdTxFeePolicy = policy } action
|
4caa01990cfa5e9dfb80b43329a37b15ddcebf94100351a3b97fe43f4ba45788 | fulcro-legacy/fulcro-incubator | routing_ws.cljs | (ns fulcro.incubator.routing-ws
(:require-macros [fulcro.incubator.dynamic-routing :refer [defsc-route-target defrouter]])
(:require
[fulcro.incubator.ui-state-machines :as uism :refer [defstatemachine]]
[fulcro-spec.core :refer [assertions component]]
[nubank.workspaces.core :as ws]
[fulcro.client.primitives :as prim :refer [defsc]]
[fulcro.client.mutations :refer [defmutation]]
[fulcro.server :as server]
[fulcro-spec.core :refer [specification assertions]]
[fulcro.client.dom :as dom]
[nubank.workspaces.model :as wsm]
[nubank.workspaces.card-types.fulcro :as ct.fulcro]
[nubank.workspaces.lib.fulcro-portal :as f.portal]
[fulcro.client.mutations :as m]
[fulcro.client.data-fetch :as df]
[fulcro.incubator.dynamic-routing :as dr]
[fulcro.client :as fc]
[fulcro.logging :as log]))
;; so we can see UI state machine debug messages
(log/set-level! :debug)
(defonce fulcro-app (atom nil))
(defsc-route-target Pane1 [this {:keys [:y] :as props}]
{:query [:y]
:ident (fn [] [:COMPONENT/by-id :pane1])
:initial-state {:y 1}
:route-segment (fn [] ["pane1"])
:route-cancelled (fn [_])
:will-enter (fn [_ _] (dr/route-immediate [:COMPONENT/by-id :pane1]))
:will-leave (fn [_]
(js/console.log "Leaving pane1")
true)}
(dom/div (str "PANE 1")))
(declare SettingsPaneRouter)
(defmutation do-stuff-and-finish-routing [params]
(action [{:keys [reconciler]}]
(js/console.log "Pretending to do stuff to app state before finishing the route")
(dr/target-ready! reconciler [:COMPONENT/by-id :pane2])))
(defsc-route-target Pane2 [this {:keys [:x] :as props}]
{:query [:x]
:ident (fn [] [:COMPONENT/by-id :pane2])
:initial-state {:x 1}
:route-segment (fn [] ["pane2"])
:route-cancelled (fn [_])
:will-enter (fn [reconciler _] (dr/route-deferred [:COMPONENT/by-id :pane2]
(fn [] (prim/transact! reconciler `[(do-stuff-and-finish-routing {})]))))
:will-leave (fn [_] (js/console.log "Leave pane2")
true)}
(dom/div
(dom/button {:onClick #(dr/change-route-relative this SettingsPaneRouter ["pane1"])} "Relative route to pane 1")
(str "PANE 2")))
(defrouter SettingsPaneRouter [this props]
{:router-targets [Pane1 Pane2]})
(def ui-settings-pane-router (prim/factory SettingsPaneRouter))
(defsc-route-target Settings [this {:keys [:x :panes] :as props}]
{:query [:x {:panes (prim/get-query SettingsPaneRouter)}]
:ident (fn [] [:COMPONENT/by-id :settings])
:initial-state {:x :param/x
:panes {}}
:route-segment (fn [] ["settings"])
:route-cancelled (fn [_])
:will-enter (fn [_ _] (dr/route-immediate [:COMPONENT/by-id :settings]))
:will-leave (fn [_] (js/console.log "Leaving settings") true)}
(dom/div
(str "Settings: x = " x)
(ui-settings-pane-router panes)))
(defsc-route-target User [this {:keys [user/id user/name] :as props}]
{:query [:user/id :user/name]
:ident [:user/id :user/id]
:route-segment (fn [] ["user" :user-id])
:route-cancelled (fn [params]
;; js network mock doesn't support cancel, but this is how you'd do it:
(when @fulcro-app
(fc/abort-request! @fulcro-app :user-load)))
:will-enter (fn [reconciler {:keys [user-id]}]
(when-let [user-id (some-> user-id (js/parseInt))]
(dr/route-deferred [:user/id user-id]
#(df/load reconciler [:user/id user-id] User {:post-mutation `dr/target-ready
:abort-id :user-load
:marker false
:post-mutation-params {:target [:user/id user-id]}}))))
:will-leave (fn [props] (js/console.log "Leaving user " (:user/id props)) true)}
(dom/div (str "User: name = " name " with computed props " (prim/get-computed this))))
(defrouter RootRouter2 [this {:keys [current-state pending-path-segment route-factory route-props]}]
{:router-targets [Settings User]
:componentDidUpdate (fn [pp ps]
(let [current-state (uism/get-active-state this ::RootRouter2)
sm-env (uism/state-machine-env (prim/component->state-map this)
nil ::RootRouter2 :noop {})
pending-path-segment (uism/retrieve sm-env :pending-path-segment)
current-path (uism/retrieve sm-env :path-segment)]
(js/console.log :rr2-updated current-state :pending-path pending-path-segment
:current-path current-path)))}
(case current-state
:pending (dom/div "Loading a user..."
(dom/button {:onClick #(dr/change-route this ["settings" "pane2"])} "cancel"))
:failed (dom/div
(dom/div "Ooops!")
(dom/button {:onClick #(dr/change-route this ["settings"])} "Go to settings"))
(dom/div "No route selected.")))
(def ui-root-router-2 (prim/factory RootRouter2))
(defsc Root2 [this {:keys [router] :as props}]
{:query [{:router (prim/get-query RootRouter2)}]
:initial-state {:router {:x 2}}}
(dom/div
(dom/button {:onClick (fn [] (dr/change-route this ["user" 1] {:deferred-timeout 10
:error-timeout 100}))} "Change route to /user/1")
(dom/button {:onClick (fn [] (dr/change-route this ["settings"]))} "Change route to /settings")
(dom/button {:onClick (fn [] (dr/change-route this ["settings" "pane1"]))} "Change route to /settings/pane1")
(dom/button {:onClick (fn [] (dr/change-route this ["settings" "pane2"]))} "Change route to /settings/pane2")
(dom/button {:onClick (fn [] (js/console.log (dr/current-route this this)))} "Log current route")
(ui-root-router-2 (prim/computed router {:X 1}))))
(server/defquery-entity :user/id
(value [env id params]
{:user/id id
:user/name (str "User " id)}))
(ws/defcard router-2-demo-card
{::wsm/card-width 2
::wsm/align {:flex 1}
::wsm/card-height 13}
(ct.fulcro/fulcro-card
{::f.portal/root Root2
::f.portal/wrap-root? false
::f.portal/app {:started-callback (fn [{:keys [reconciler] :as app}]
(reset! fulcro-app app)
simulate what it will look like on the first frame
(js/setTimeout
#(dr/change-route reconciler ["settings"])
2000))
:networking (server/new-server-emulator (server/fulcro-parser) 2000)}}))
| null | https://raw.githubusercontent.com/fulcro-legacy/fulcro-incubator/72885fcc944e16bf98aa42b5d1e518c0ce4574db/src/workspaces/fulcro/incubator/routing_ws.cljs | clojure | so we can see UI state machine debug messages
js network mock doesn't support cancel, but this is how you'd do it: | (ns fulcro.incubator.routing-ws
(:require-macros [fulcro.incubator.dynamic-routing :refer [defsc-route-target defrouter]])
(:require
[fulcro.incubator.ui-state-machines :as uism :refer [defstatemachine]]
[fulcro-spec.core :refer [assertions component]]
[nubank.workspaces.core :as ws]
[fulcro.client.primitives :as prim :refer [defsc]]
[fulcro.client.mutations :refer [defmutation]]
[fulcro.server :as server]
[fulcro-spec.core :refer [specification assertions]]
[fulcro.client.dom :as dom]
[nubank.workspaces.model :as wsm]
[nubank.workspaces.card-types.fulcro :as ct.fulcro]
[nubank.workspaces.lib.fulcro-portal :as f.portal]
[fulcro.client.mutations :as m]
[fulcro.client.data-fetch :as df]
[fulcro.incubator.dynamic-routing :as dr]
[fulcro.client :as fc]
[fulcro.logging :as log]))
(log/set-level! :debug)
(defonce fulcro-app (atom nil))
(defsc-route-target Pane1 [this {:keys [:y] :as props}]
{:query [:y]
:ident (fn [] [:COMPONENT/by-id :pane1])
:initial-state {:y 1}
:route-segment (fn [] ["pane1"])
:route-cancelled (fn [_])
:will-enter (fn [_ _] (dr/route-immediate [:COMPONENT/by-id :pane1]))
:will-leave (fn [_]
(js/console.log "Leaving pane1")
true)}
(dom/div (str "PANE 1")))
(declare SettingsPaneRouter)
(defmutation do-stuff-and-finish-routing [params]
(action [{:keys [reconciler]}]
(js/console.log "Pretending to do stuff to app state before finishing the route")
(dr/target-ready! reconciler [:COMPONENT/by-id :pane2])))
(defsc-route-target Pane2 [this {:keys [:x] :as props}]
{:query [:x]
:ident (fn [] [:COMPONENT/by-id :pane2])
:initial-state {:x 1}
:route-segment (fn [] ["pane2"])
:route-cancelled (fn [_])
:will-enter (fn [reconciler _] (dr/route-deferred [:COMPONENT/by-id :pane2]
(fn [] (prim/transact! reconciler `[(do-stuff-and-finish-routing {})]))))
:will-leave (fn [_] (js/console.log "Leave pane2")
true)}
(dom/div
(dom/button {:onClick #(dr/change-route-relative this SettingsPaneRouter ["pane1"])} "Relative route to pane 1")
(str "PANE 2")))
(defrouter SettingsPaneRouter [this props]
{:router-targets [Pane1 Pane2]})
(def ui-settings-pane-router (prim/factory SettingsPaneRouter))
(defsc-route-target Settings [this {:keys [:x :panes] :as props}]
{:query [:x {:panes (prim/get-query SettingsPaneRouter)}]
:ident (fn [] [:COMPONENT/by-id :settings])
:initial-state {:x :param/x
:panes {}}
:route-segment (fn [] ["settings"])
:route-cancelled (fn [_])
:will-enter (fn [_ _] (dr/route-immediate [:COMPONENT/by-id :settings]))
:will-leave (fn [_] (js/console.log "Leaving settings") true)}
(dom/div
(str "Settings: x = " x)
(ui-settings-pane-router panes)))
(defsc-route-target User [this {:keys [user/id user/name] :as props}]
{:query [:user/id :user/name]
:ident [:user/id :user/id]
:route-segment (fn [] ["user" :user-id])
:route-cancelled (fn [params]
(when @fulcro-app
(fc/abort-request! @fulcro-app :user-load)))
:will-enter (fn [reconciler {:keys [user-id]}]
(when-let [user-id (some-> user-id (js/parseInt))]
(dr/route-deferred [:user/id user-id]
#(df/load reconciler [:user/id user-id] User {:post-mutation `dr/target-ready
:abort-id :user-load
:marker false
:post-mutation-params {:target [:user/id user-id]}}))))
:will-leave (fn [props] (js/console.log "Leaving user " (:user/id props)) true)}
(dom/div (str "User: name = " name " with computed props " (prim/get-computed this))))
(defrouter RootRouter2 [this {:keys [current-state pending-path-segment route-factory route-props]}]
{:router-targets [Settings User]
:componentDidUpdate (fn [pp ps]
(let [current-state (uism/get-active-state this ::RootRouter2)
sm-env (uism/state-machine-env (prim/component->state-map this)
nil ::RootRouter2 :noop {})
pending-path-segment (uism/retrieve sm-env :pending-path-segment)
current-path (uism/retrieve sm-env :path-segment)]
(js/console.log :rr2-updated current-state :pending-path pending-path-segment
:current-path current-path)))}
(case current-state
:pending (dom/div "Loading a user..."
(dom/button {:onClick #(dr/change-route this ["settings" "pane2"])} "cancel"))
:failed (dom/div
(dom/div "Ooops!")
(dom/button {:onClick #(dr/change-route this ["settings"])} "Go to settings"))
(dom/div "No route selected.")))
(def ui-root-router-2 (prim/factory RootRouter2))
(defsc Root2 [this {:keys [router] :as props}]
{:query [{:router (prim/get-query RootRouter2)}]
:initial-state {:router {:x 2}}}
(dom/div
(dom/button {:onClick (fn [] (dr/change-route this ["user" 1] {:deferred-timeout 10
:error-timeout 100}))} "Change route to /user/1")
(dom/button {:onClick (fn [] (dr/change-route this ["settings"]))} "Change route to /settings")
(dom/button {:onClick (fn [] (dr/change-route this ["settings" "pane1"]))} "Change route to /settings/pane1")
(dom/button {:onClick (fn [] (dr/change-route this ["settings" "pane2"]))} "Change route to /settings/pane2")
(dom/button {:onClick (fn [] (js/console.log (dr/current-route this this)))} "Log current route")
(ui-root-router-2 (prim/computed router {:X 1}))))
(server/defquery-entity :user/id
(value [env id params]
{:user/id id
:user/name (str "User " id)}))
(ws/defcard router-2-demo-card
{::wsm/card-width 2
::wsm/align {:flex 1}
::wsm/card-height 13}
(ct.fulcro/fulcro-card
{::f.portal/root Root2
::f.portal/wrap-root? false
::f.portal/app {:started-callback (fn [{:keys [reconciler] :as app}]
(reset! fulcro-app app)
simulate what it will look like on the first frame
(js/setTimeout
#(dr/change-route reconciler ["settings"])
2000))
:networking (server/new-server-emulator (server/fulcro-parser) 2000)}}))
|
5e87e536ea750f13b2b1d3e75d44b0da1a1217e1d58f1d3ce22f614d3479d83c | oscaro/clj-gcloud-common | coerce_test.clj | (ns clj-gcloud.coerce-test
(:require
[clj-gcloud.coerce :as sut]
[clojure.test :refer [deftest is testing]])
(:import
(com.google.pubsub.v1 Topic)))
(sut/create-clj-coerce Topic [:name])
(deftest coercion-test
(testing "It should map getters to keywords"
(let [^Topic topic (-> (Topic/newBuilder) (.setName "test") .build)]
(is (= (.getName topic) (:name (sut/->clj topic)))))))
| null | https://raw.githubusercontent.com/oscaro/clj-gcloud-common/fe1709759e2633cc968652b4c27d78bf6ef0243b/test/clj_gcloud/coerce_test.clj | clojure | (ns clj-gcloud.coerce-test
(:require
[clj-gcloud.coerce :as sut]
[clojure.test :refer [deftest is testing]])
(:import
(com.google.pubsub.v1 Topic)))
(sut/create-clj-coerce Topic [:name])
(deftest coercion-test
(testing "It should map getters to keywords"
(let [^Topic topic (-> (Topic/newBuilder) (.setName "test") .build)]
(is (= (.getName topic) (:name (sut/->clj topic)))))))
|
|
8f08fecbf267062b4f7c8df3d59705923b91f7b124e59a0d9a670e9df5f36977 | copton/ocram | Ruab.hs | # LANGUAGE GeneralizedNewtypeDeriving #
module Ocram.Ruab where
imports { { { 1
import Text.JSON
import Control.Applicative ((<$>))
import Control.Arrow ((***))
import Control.Monad (guard)
import qualified Data.ByteString.Char8 as BS
{ { { 1
encode_debug_info = BS.pack . encodeStrict
{ { { 1
decode_debug_info string = (resultToEither . decodeStrict . BS.unpack) string
types { { { 1
{ { { 2
= TRow {getTRow :: Int}
deriving (Eq, Num, Ord)
{ { { 2
= PRow {getPRow :: Int}
deriving (Eq, Num, Ord)
{ { { 2
= ERow {getERow :: Int}
deriving (Eq, Num, Ord)
type ThreadId = Int
{ { { 2
-- |Map between P-rows and E-rows
= [(PLocation, [ERow])]
{ { { 3
-- |A location in the P-code
plThread :: Maybe ThreadId
, plRow :: PRow
, plBlockingCall :: Bool
} deriving (Eq, Ord)
{ { { 2
-- |Map between T-rows and P-rows
MapTP {
mtpMaxTRow :: TRow
, mtpMaxPRow :: PRow
, mtpMapping :: [(TRow, PRow)]
}
{ { { 2
-- |Mapping of Variable names
= [(Scope, RenameMap)]
{ { { 3
Scope {
scStart :: PRow
, scEnd :: PRow
} deriving (Eq, Ord)
{ { { 3
{ { { 3
= AutomaticVariable {
varThread :: ThreadId
, varTName :: String
}
| StaticVariable {
varTName :: String
}
{ { { 3
-- |A fully qualified name of a variable
= String
{ { { 2
-- |Information about a source file
fileName :: FilePath
, fileChecksum :: String
}
{ { { 2
-- |Information about a T-thread
threadId :: Int
, threadStart :: String
, threadExecution :: String
, threadCritical :: [String]
} deriving Show
{ { { 2
|The debugging information that is exchanged between Ocram and Ruab
diTcode :: File -- ^the T-code file
, diPcode :: BS.ByteString -- ^the P-code
, diEcode :: File -- ^the E-code file
, diMtp :: MapTP -- ^mapping between T- and P-rows
, diMpe :: MapPE -- ^mapping between P- and E-rows
, diVm :: VarMap -- ^renaming of variables defined in critical functions
, diThreads :: [Thread] -- ^the T-threads
, diOsApi :: [String] -- ^name of T-code API functions
, diNcfs :: [String] -- ^name non-critical functions
}
instances { { { 1
{ { { 2
readJSON val = TRow <$> readJSON val
showJSON = showJSON . getTRow
{ { { 2
readJSON val = PRow <$> readJSON val
showJSON = showJSON . getPRow
{ { { 2
readJSON val = ERow <$> readJSON val
showJSON = showJSON . getERow
{ { { 2
readJSON val = do
(t, r, b) <- readJSON val
return $ PLocation (decodeTid t) r b
showJSON (PLocation t r b) = showJSON (encodeTid t, r, b)
{ { { 2
readJSON (JSObject obj) = do
let [mr, ma] = map snd $ fromJSObject obj
(mtr, mpr) <- readJSON mr
ma' <- readJSON ma
return $ MapTP (TRow mtr) (PRow mpr) (map (TRow *** PRow) ma')
readJSON x = readFail "MapTP" x
showJSON (MapTP (TRow mtr) (PRow mpr) ma) = (JSObject . toJSObject) [
("max", showJSON (mtr, mpr))
, ("map", showJSON (map (getTRow *** getPRow) ma))
]
{ { { 2
readJSON val = do
(s, e) <- readJSON val
return $ Scope s e
showJSON (Scope s e) = showJSON (s, e)
{ { { 2
readJSON val = do
(c, o) <- readJSON val
case c :: Int of
0 -> do
(t, n) <- readJSON o
return $ AutomaticVariable t n
1 -> do
n <- readJSON o
return $ StaticVariable n
_ -> readFail "Variable" val
showJSON (AutomaticVariable t n) = showJSON (0 :: Int, showJSON (t, n))
showJSON (StaticVariable n) = showJSON (1 :: Int, showJSON n)
{ { { 2
readJSON val = readJSON val >>= \[n,c] -> return $ File n c
showJSON (File n c) = showJSON [n, c]
{ { { 2
showJSON (Thread tid ts te tc) = (JSObject . toJSObject) [
("id", showJSON tid)
, ("start", showJSON ts)
, ("execute" , showJSON te)
, ("critical", showJSON tc)
]
readJSON (JSObject obj) = do
let [tid, ts, te, tc] = map snd $ fromJSObject obj
tid' <- readJSON tid
ts' <- readJSON ts
te' <- readJSON te
tc' <- readJSON tc
return $ Thread tid' ts' te' tc'
readJSON x = readFail "Thread" x
{ { { 2
showJSON (DebugInfo tcode pcode ecode mtp mpe vm ts api ncfs) = (JSObject . toJSObject) [
("tcode", showJSON tcode)
, ("pcode", showJSON pcode)
, ("ecode", showJSON ecode)
, ("mtp", showJSON mtp)
, ("mpe", showJSON mpe)
, ("vm", showJSON vm)
, ("threads", showJSON ts)
, ("api", showJSON api)
, ("ncfs", showJSON ncfs)
]
readJSON (JSObject obj) = do
let [tcode, pcode, ecode, mtp, mpe, vm, ts, api, ncfs] = map snd $ fromJSObject obj
[tcode', ecode'] <- mapM readJSON [tcode, ecode]
pcode' <- readJSON pcode
mtp' <- readJSON mtp
mpe' <- readJSON mpe
vm' <- readJSON vm
ts' <- readJSON ts
api' <- readJSON api
ncfs' <- readJSON ncfs
return $ DebugInfo tcode' pcode' ecode' mtp' mpe' vm' ts' api' ncfs'
readJSON x = readFail "DebugInfo" x
{ { { 2
show (TRow x) = show x
{ { { 2
show (ERow x) = show x
{ { { 2
show (PRow x) = show x
utils { { { 1
readFail :: String -> JSValue -> Result a
readFail type_ x = Error $ "unexpected JSON value for " ++ type_ ++ " type: '" ++ show x ++ "'"
encodeTid :: Maybe ThreadId -> Int
encodeTid Nothing = -1
encodeTid (Just tid) = tid
decodeTid :: Int -> Maybe ThreadId
decodeTid (-1) = Nothing
decodeTid tid = Just tid
mapper { { { 1
{ { { 2
t2p_row (MapTP _ prows locs) trow = do
guard (trow > 0)
let (src, dst) = (last . takeWhile ((<=trow) . fst)) locs
let prow = dst + (PRow . getTRow) (trow - src + 1)
guard (prow <= prows)
return prow
{ { { 2
p2t_row mtp@(MapTP trows _ locs) prow = do
guard (prow > 0)
let (src, dst) = (last . takeWhile ((<=prow) . snd)) locs
let trow = src + (TRow . getPRow) (prow - dst - 1)
guard (trow <= trows)
prow' <- t2p_row mtp trow
guard (prow' == prow)
return trow
| null | https://raw.githubusercontent.com/copton/ocram/c7166eab0187868a52a61017c6d3687e5a1a6162/ruab/src/Ocram/Ruab.hs | haskell | |Map between P-rows and E-rows
|A location in the P-code
|Map between T-rows and P-rows
|Mapping of Variable names
|A fully qualified name of a variable
|Information about a source file
|Information about a T-thread
^the T-code file
^the P-code
^the E-code file
^mapping between T- and P-rows
^mapping between P- and E-rows
^renaming of variables defined in critical functions
^the T-threads
^name of T-code API functions
^name non-critical functions | # LANGUAGE GeneralizedNewtypeDeriving #
module Ocram.Ruab where
imports { { { 1
import Text.JSON
import Control.Applicative ((<$>))
import Control.Arrow ((***))
import Control.Monad (guard)
import qualified Data.ByteString.Char8 as BS
{ { { 1
encode_debug_info = BS.pack . encodeStrict
{ { { 1
decode_debug_info string = (resultToEither . decodeStrict . BS.unpack) string
types { { { 1
{ { { 2
= TRow {getTRow :: Int}
deriving (Eq, Num, Ord)
{ { { 2
= PRow {getPRow :: Int}
deriving (Eq, Num, Ord)
{ { { 2
= ERow {getERow :: Int}
deriving (Eq, Num, Ord)
type ThreadId = Int
{ { { 2
= [(PLocation, [ERow])]
{ { { 3
plThread :: Maybe ThreadId
, plRow :: PRow
, plBlockingCall :: Bool
} deriving (Eq, Ord)
{ { { 2
MapTP {
mtpMaxTRow :: TRow
, mtpMaxPRow :: PRow
, mtpMapping :: [(TRow, PRow)]
}
{ { { 2
= [(Scope, RenameMap)]
{ { { 3
Scope {
scStart :: PRow
, scEnd :: PRow
} deriving (Eq, Ord)
{ { { 3
{ { { 3
= AutomaticVariable {
varThread :: ThreadId
, varTName :: String
}
| StaticVariable {
varTName :: String
}
{ { { 3
= String
{ { { 2
fileName :: FilePath
, fileChecksum :: String
}
{ { { 2
threadId :: Int
, threadStart :: String
, threadExecution :: String
, threadCritical :: [String]
} deriving Show
{ { { 2
|The debugging information that is exchanged between Ocram and Ruab
}
instances { { { 1
{ { { 2
readJSON val = TRow <$> readJSON val
showJSON = showJSON . getTRow
{ { { 2
readJSON val = PRow <$> readJSON val
showJSON = showJSON . getPRow
{ { { 2
readJSON val = ERow <$> readJSON val
showJSON = showJSON . getERow
{ { { 2
readJSON val = do
(t, r, b) <- readJSON val
return $ PLocation (decodeTid t) r b
showJSON (PLocation t r b) = showJSON (encodeTid t, r, b)
{ { { 2
readJSON (JSObject obj) = do
let [mr, ma] = map snd $ fromJSObject obj
(mtr, mpr) <- readJSON mr
ma' <- readJSON ma
return $ MapTP (TRow mtr) (PRow mpr) (map (TRow *** PRow) ma')
readJSON x = readFail "MapTP" x
showJSON (MapTP (TRow mtr) (PRow mpr) ma) = (JSObject . toJSObject) [
("max", showJSON (mtr, mpr))
, ("map", showJSON (map (getTRow *** getPRow) ma))
]
{ { { 2
readJSON val = do
(s, e) <- readJSON val
return $ Scope s e
showJSON (Scope s e) = showJSON (s, e)
{ { { 2
readJSON val = do
(c, o) <- readJSON val
case c :: Int of
0 -> do
(t, n) <- readJSON o
return $ AutomaticVariable t n
1 -> do
n <- readJSON o
return $ StaticVariable n
_ -> readFail "Variable" val
showJSON (AutomaticVariable t n) = showJSON (0 :: Int, showJSON (t, n))
showJSON (StaticVariable n) = showJSON (1 :: Int, showJSON n)
{ { { 2
readJSON val = readJSON val >>= \[n,c] -> return $ File n c
showJSON (File n c) = showJSON [n, c]
{ { { 2
showJSON (Thread tid ts te tc) = (JSObject . toJSObject) [
("id", showJSON tid)
, ("start", showJSON ts)
, ("execute" , showJSON te)
, ("critical", showJSON tc)
]
readJSON (JSObject obj) = do
let [tid, ts, te, tc] = map snd $ fromJSObject obj
tid' <- readJSON tid
ts' <- readJSON ts
te' <- readJSON te
tc' <- readJSON tc
return $ Thread tid' ts' te' tc'
readJSON x = readFail "Thread" x
{ { { 2
showJSON (DebugInfo tcode pcode ecode mtp mpe vm ts api ncfs) = (JSObject . toJSObject) [
("tcode", showJSON tcode)
, ("pcode", showJSON pcode)
, ("ecode", showJSON ecode)
, ("mtp", showJSON mtp)
, ("mpe", showJSON mpe)
, ("vm", showJSON vm)
, ("threads", showJSON ts)
, ("api", showJSON api)
, ("ncfs", showJSON ncfs)
]
readJSON (JSObject obj) = do
let [tcode, pcode, ecode, mtp, mpe, vm, ts, api, ncfs] = map snd $ fromJSObject obj
[tcode', ecode'] <- mapM readJSON [tcode, ecode]
pcode' <- readJSON pcode
mtp' <- readJSON mtp
mpe' <- readJSON mpe
vm' <- readJSON vm
ts' <- readJSON ts
api' <- readJSON api
ncfs' <- readJSON ncfs
return $ DebugInfo tcode' pcode' ecode' mtp' mpe' vm' ts' api' ncfs'
readJSON x = readFail "DebugInfo" x
{ { { 2
show (TRow x) = show x
{ { { 2
show (ERow x) = show x
{ { { 2
show (PRow x) = show x
utils { { { 1
readFail :: String -> JSValue -> Result a
readFail type_ x = Error $ "unexpected JSON value for " ++ type_ ++ " type: '" ++ show x ++ "'"
encodeTid :: Maybe ThreadId -> Int
encodeTid Nothing = -1
encodeTid (Just tid) = tid
decodeTid :: Int -> Maybe ThreadId
decodeTid (-1) = Nothing
decodeTid tid = Just tid
mapper { { { 1
{ { { 2
t2p_row (MapTP _ prows locs) trow = do
guard (trow > 0)
let (src, dst) = (last . takeWhile ((<=trow) . fst)) locs
let prow = dst + (PRow . getTRow) (trow - src + 1)
guard (prow <= prows)
return prow
{ { { 2
p2t_row mtp@(MapTP trows _ locs) prow = do
guard (prow > 0)
let (src, dst) = (last . takeWhile ((<=prow) . snd)) locs
let trow = src + (TRow . getPRow) (prow - dst - 1)
guard (trow <= trows)
prow' <- t2p_row mtp trow
guard (prow' == prow)
return trow
|
5145e3c7dc3a004c9d10bb1e20d72af1e4f81e5e4f4608c6bf55d1e79389bc38 | dinosaure/art | monolith.ml | open Monolith
open PPrint
module Map = Map.Make (struct type t = Art.key let compare (a : Art.key) (b : Art.key) =
String.compare (a :> string) (b :> string) end)
let char_without_d0 () = match Gen.char () with
| '\000' -> Gen.reject ()
| chr -> chr
let key =
easily_constructible
Gen.(fun () -> Art.unsafe_key (string (int 100) char_without_d0 ()))
(fun (key : Art.key) -> string (key :> string))
let value = lt 16
module type INDEX = sig
type t
val make : unit -> t
val insert : t -> Art.key -> int -> unit
val find_opt : t -> Art.key -> int option
val is_empty : t -> bool
end
module Make (R : INDEX) (C : INDEX) = struct
let run t fuel =
declare "make" (unit ^> t) R.make C.make;
declare "is_empty" (t ^> bool) R.is_empty C.is_empty;
declare "insert" (t ^> key ^> value ^> unit) R.insert C.insert;
declare "find_opt" (t ^> key ^> option value) R.find_opt C.find_opt;
main fuel
end
module Reference = struct
type t = (Art.key, int) Hashtbl.t
let make () = Hashtbl.create 0x100
let is_empty tbl = Hashtbl.length tbl = 0
let insert tbl key value = Hashtbl.add tbl key value
let find_opt tbl key = match Hashtbl.find tbl key with
| v -> Some v
| exception Not_found -> None
end
module Equivalence = Make (Reference) (struct include Art type t = int Art.t end)
let () =
let t = declare_abstract_type () in
Equivalence.run t 5
| null | https://raw.githubusercontent.com/dinosaure/art/742474da86def9c64d62ed2dca866584c5c46134/fuzz/monolith.ml | ocaml | open Monolith
open PPrint
module Map = Map.Make (struct type t = Art.key let compare (a : Art.key) (b : Art.key) =
String.compare (a :> string) (b :> string) end)
let char_without_d0 () = match Gen.char () with
| '\000' -> Gen.reject ()
| chr -> chr
let key =
easily_constructible
Gen.(fun () -> Art.unsafe_key (string (int 100) char_without_d0 ()))
(fun (key : Art.key) -> string (key :> string))
let value = lt 16
module type INDEX = sig
type t
val make : unit -> t
val insert : t -> Art.key -> int -> unit
val find_opt : t -> Art.key -> int option
val is_empty : t -> bool
end
module Make (R : INDEX) (C : INDEX) = struct
let run t fuel =
declare "make" (unit ^> t) R.make C.make;
declare "is_empty" (t ^> bool) R.is_empty C.is_empty;
declare "insert" (t ^> key ^> value ^> unit) R.insert C.insert;
declare "find_opt" (t ^> key ^> option value) R.find_opt C.find_opt;
main fuel
end
module Reference = struct
type t = (Art.key, int) Hashtbl.t
let make () = Hashtbl.create 0x100
let is_empty tbl = Hashtbl.length tbl = 0
let insert tbl key value = Hashtbl.add tbl key value
let find_opt tbl key = match Hashtbl.find tbl key with
| v -> Some v
| exception Not_found -> None
end
module Equivalence = Make (Reference) (struct include Art type t = int Art.t end)
let () =
let t = declare_abstract_type () in
Equivalence.run t 5
|
|
dab9badbd296895ca377e45d805c885c6206843fca9335579322fb9dacde54b3 | Hans-Halverson/myte | x86_64_calling_convention.ml | open Asm_calling_convention
open Asm_register
let system_v_calling_convention =
object
inherit calling_convention
val general_params = [| `DI; `SI; `D; `C; `R8; `R9 |]
val float_params = [| `XMM0; `XMM1; `XMM2; `XMM3; `XMM4; `XMM5; `XMM6; `XMM7 |]
val callee_saved_registers = RegSet.of_list [`B; `SP; `BP; `R12; `R13; `R14; `R15]
val caller_saved_registers =
RegSet.of_list
[
`A;
`C;
`D;
`SI;
`DI;
`R8;
`R9;
`R10;
`R11;
`XMM0;
`XMM1;
`XMM2;
`XMM3;
`XMM4;
`XMM5;
`XMM6;
`XMM7;
`XMM8;
`XMM9;
`XMM10;
`XMM11;
`XMM12;
`XMM13;
`XMM14;
`XMM15;
]
method general_params = general_params
method float_params = float_params
method callee_saved_registers = callee_saved_registers
method caller_saved_registers = caller_saved_registers
method calculate_return_register (return_mir_type : Mir_type.Type.t) : Register.t =
match return_mir_type with
| Double -> `XMM0
| _ -> `A
end
| null | https://raw.githubusercontent.com/Hans-Halverson/myte/6ed56a9a7840e70414390deacc2155bd73f755ca/src/asm/x86_64/x86_64_calling_convention.ml | ocaml | open Asm_calling_convention
open Asm_register
let system_v_calling_convention =
object
inherit calling_convention
val general_params = [| `DI; `SI; `D; `C; `R8; `R9 |]
val float_params = [| `XMM0; `XMM1; `XMM2; `XMM3; `XMM4; `XMM5; `XMM6; `XMM7 |]
val callee_saved_registers = RegSet.of_list [`B; `SP; `BP; `R12; `R13; `R14; `R15]
val caller_saved_registers =
RegSet.of_list
[
`A;
`C;
`D;
`SI;
`DI;
`R8;
`R9;
`R10;
`R11;
`XMM0;
`XMM1;
`XMM2;
`XMM3;
`XMM4;
`XMM5;
`XMM6;
`XMM7;
`XMM8;
`XMM9;
`XMM10;
`XMM11;
`XMM12;
`XMM13;
`XMM14;
`XMM15;
]
method general_params = general_params
method float_params = float_params
method callee_saved_registers = callee_saved_registers
method caller_saved_registers = caller_saved_registers
method calculate_return_register (return_mir_type : Mir_type.Type.t) : Register.t =
match return_mir_type with
| Double -> `XMM0
| _ -> `A
end
|
|
b59e13449d9a238c11f6e3dae4cce6081bfcdf3948ea2617d645c00025d87813 | reactiveml/rml | scene_json.ml | Graph viewer
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open Scene
let array_stringify f ch l =
Format.fprintf ch "@[<1>[0";
Array.iter (fun e -> Format.fprintf ch ",@,%a" f e) l;
Format.fprintf ch "]@]"
let string_stringify ch s =
(*XXX Escape! *)
Format.fprintf ch "\"%s\"" s
let color_stringify ch c =
match c with
None ->
Format.fprintf ch "0"
| Some (r, g, b) ->
let h v = truncate (v *. 255.99) in
Format.fprintf ch "@[<1>[0,@,%a]@]"
string_stringify (Format.sprintf "#%02x%02x%02x" (h r) (h g) (h b))
let font_stringify ch (font, size) =
Format.fprintf ch "%a" string_stringify (Format.sprintf "%gpx %s" size font)
let command_stringify ch c =
match c with
Move_to (x, y) ->
Format.fprintf ch "@[<1>[0,@,%g,@,%g]@]" x y
| Curve_to (x1, y1, x2, y2, x3, y3) ->
Format.fprintf ch "@[<1>[1,@,%g,@,%g,@,%g,@,%g,@,%g,@,%g]@]"
x1 y1 x2 y2 x3 y3
let commands_stringify = array_stringify command_stringify
let point_stringify ch (x, y) = Format.fprintf ch "@[<1>[0,@,%g,@,%g]@]" x y
let points_stringify = array_stringify point_stringify
let rect_stringify ch (x1, y1, x2, y2) =
Format.fprintf ch "@[<1>[0,@,%g,@,%g,@,%g,@,%g]@]" x1 y1 x2 y2
let rect_array_stringify = array_stringify rect_stringify
let element_stringify ch e =
match e with
Path (cmds, fill, stroke) ->
Format.fprintf ch "@[<1>[0,@,%a,@,%a,@,%a]@]"
commands_stringify cmds color_stringify fill color_stringify stroke
| Polygon (l, fill, stroke) ->
Format.fprintf ch "@[<1>[1,@,%a,@,%a,@,%a]@]"
points_stringify l color_stringify fill color_stringify stroke
| Ellipse (cx, cy, rx, ry, fill, stroke) ->
Format.fprintf ch "@[<1>[2,@,%g,@,%g,@,%g,@,%g,@,%a,@,%a]@]"
cx cy rx ry color_stringify fill color_stringify stroke
| Text (x, y, txt, font, fill, stroke) ->
Format.fprintf ch "@[<1>[3,@,%g,@,%g,@,%a,@,%a,@,%a,@,%a]@]"
x y string_stringify txt font_stringify font
color_stringify fill color_stringify stroke
let stringify = array_stringify element_stringify
| null | https://raw.githubusercontent.com/reactiveml/rml/d178d49ed923290fa7eee642541bdff3ee90b3b4/toplevel-alt/js/js-of-ocaml/examples/graph_viewer/scene_json.ml | ocaml | XXX Escape! | Graph viewer
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open Scene
let array_stringify f ch l =
Format.fprintf ch "@[<1>[0";
Array.iter (fun e -> Format.fprintf ch ",@,%a" f e) l;
Format.fprintf ch "]@]"
let string_stringify ch s =
Format.fprintf ch "\"%s\"" s
let color_stringify ch c =
match c with
None ->
Format.fprintf ch "0"
| Some (r, g, b) ->
let h v = truncate (v *. 255.99) in
Format.fprintf ch "@[<1>[0,@,%a]@]"
string_stringify (Format.sprintf "#%02x%02x%02x" (h r) (h g) (h b))
let font_stringify ch (font, size) =
Format.fprintf ch "%a" string_stringify (Format.sprintf "%gpx %s" size font)
let command_stringify ch c =
match c with
Move_to (x, y) ->
Format.fprintf ch "@[<1>[0,@,%g,@,%g]@]" x y
| Curve_to (x1, y1, x2, y2, x3, y3) ->
Format.fprintf ch "@[<1>[1,@,%g,@,%g,@,%g,@,%g,@,%g,@,%g]@]"
x1 y1 x2 y2 x3 y3
let commands_stringify = array_stringify command_stringify
let point_stringify ch (x, y) = Format.fprintf ch "@[<1>[0,@,%g,@,%g]@]" x y
let points_stringify = array_stringify point_stringify
let rect_stringify ch (x1, y1, x2, y2) =
Format.fprintf ch "@[<1>[0,@,%g,@,%g,@,%g,@,%g]@]" x1 y1 x2 y2
let rect_array_stringify = array_stringify rect_stringify
let element_stringify ch e =
match e with
Path (cmds, fill, stroke) ->
Format.fprintf ch "@[<1>[0,@,%a,@,%a,@,%a]@]"
commands_stringify cmds color_stringify fill color_stringify stroke
| Polygon (l, fill, stroke) ->
Format.fprintf ch "@[<1>[1,@,%a,@,%a,@,%a]@]"
points_stringify l color_stringify fill color_stringify stroke
| Ellipse (cx, cy, rx, ry, fill, stroke) ->
Format.fprintf ch "@[<1>[2,@,%g,@,%g,@,%g,@,%g,@,%a,@,%a]@]"
cx cy rx ry color_stringify fill color_stringify stroke
| Text (x, y, txt, font, fill, stroke) ->
Format.fprintf ch "@[<1>[3,@,%g,@,%g,@,%a,@,%a,@,%a,@,%a]@]"
x y string_stringify txt font_stringify font
color_stringify fill color_stringify stroke
let stringify = array_stringify element_stringify
|
af1cc0008f8aae630ef661c5d1947c37adf04d34149f0b6f146a9d7a1a3b095f | mxthevs/Caml_bot | storage_trusted_users.ml | type trusted_user = { username : string }
type external_user = {
id : string;
username : string;
}
let ( let* ) = Lwt.bind
module Async = struct
exception User_not_found
let index () =
let read_all =
[%rapper
get_many
{sql|
SELECT @string{id}, @string{username}
FROM trusted_users
|sql}
record_out]
()
in
let* users = Database.dispatch read_all in
users |> List.map (fun { username; _ } -> { username }) |> Lwt.return
let show username =
let read_one =
[%rapper
get_opt
{sql|
SELECT @string{id}, @string{username}
FROM trusted_users
WHERE username = %string{username}
|sql}
record_out]
in
let* database_user = Database.dispatch (read_one ~username) in
let user =
match database_user with
| Some { username; _ } -> Some { username }
| None -> None
in
Lwt.return user
let store username =
let insert =
[%rapper
execute
{sql|
INSERT INTO trusted_users
VALUES(%string{id}, %string{username})
|sql}
record_in]
in
let id = Uuidm.create `V4 |> Uuidm.to_string in
Database.dispatch (insert { id; username })
let destroy name =
let* database_user = show name in
match database_user with
| Some user ->
let delete =
[%rapper
execute
{sql|
DELETE FROM trusted_users
WHERE username = %string{username}
|sql}]
in
Database.dispatch (delete ~username:user.username)
| None -> raise User_not_found
end
let index () =
try Ok (Lwt_main.run (Async.index ())) with
| Database.Query_failed error -> Error (Printf.sprintf "Could not retrieve users: %s" error)
let show name =
try Ok (Lwt_main.run (Async.show name)) with
| Database.Query_failed error -> Error (Printf.sprintf "Could not retrieve user: %s" error)
let store username =
try Ok (Lwt_main.run (Async.store username)) with
| Database.Query_failed error -> Error (`Msg (Printf.sprintf "Could not create user: %s" error))
let destroy name =
try Ok (Lwt_main.run (Async.destroy name)) with
| Async.User_not_found -> Error (`Not_found (Printf.sprintf "user %s not found" name))
| Database.Query_failed error -> Error (`Msg (Printf.sprintf "Could not destroy user: %s" error))
| null | https://raw.githubusercontent.com/mxthevs/Caml_bot/619ce00ef0b7c07ccb9b41425b5c048c71be0ff5/src/storage/storage_trusted_users.ml | ocaml | type trusted_user = { username : string }
type external_user = {
id : string;
username : string;
}
let ( let* ) = Lwt.bind
module Async = struct
exception User_not_found
let index () =
let read_all =
[%rapper
get_many
{sql|
SELECT @string{id}, @string{username}
FROM trusted_users
|sql}
record_out]
()
in
let* users = Database.dispatch read_all in
users |> List.map (fun { username; _ } -> { username }) |> Lwt.return
let show username =
let read_one =
[%rapper
get_opt
{sql|
SELECT @string{id}, @string{username}
FROM trusted_users
WHERE username = %string{username}
|sql}
record_out]
in
let* database_user = Database.dispatch (read_one ~username) in
let user =
match database_user with
| Some { username; _ } -> Some { username }
| None -> None
in
Lwt.return user
let store username =
let insert =
[%rapper
execute
{sql|
INSERT INTO trusted_users
VALUES(%string{id}, %string{username})
|sql}
record_in]
in
let id = Uuidm.create `V4 |> Uuidm.to_string in
Database.dispatch (insert { id; username })
let destroy name =
let* database_user = show name in
match database_user with
| Some user ->
let delete =
[%rapper
execute
{sql|
DELETE FROM trusted_users
WHERE username = %string{username}
|sql}]
in
Database.dispatch (delete ~username:user.username)
| None -> raise User_not_found
end
let index () =
try Ok (Lwt_main.run (Async.index ())) with
| Database.Query_failed error -> Error (Printf.sprintf "Could not retrieve users: %s" error)
let show name =
try Ok (Lwt_main.run (Async.show name)) with
| Database.Query_failed error -> Error (Printf.sprintf "Could not retrieve user: %s" error)
let store username =
try Ok (Lwt_main.run (Async.store username)) with
| Database.Query_failed error -> Error (`Msg (Printf.sprintf "Could not create user: %s" error))
let destroy name =
try Ok (Lwt_main.run (Async.destroy name)) with
| Async.User_not_found -> Error (`Not_found (Printf.sprintf "user %s not found" name))
| Database.Query_failed error -> Error (`Msg (Printf.sprintf "Could not destroy user: %s" error))
|
|
5a867f5a3dba1e38151cfc4c8c89834f072b2013a989fac99b3157298840e6f0 | DSiSc/why3 | typing.ml | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
open Wstdlib
open Ident
open Ptree
open Ty
open Term
open Decl
open Theory
open Dterm
open Ity
open Expr
open Pdecl
open Pmodule
(** debug flags *)
let debug_parse_only = Debug.register_flag "parse_only"
~desc:"Stop@ after@ parsing."
let debug_type_only = Debug.register_flag "type_only"
~desc:"Stop@ after@ type-checking."
(** symbol lookup *)
let rec qloc = function
| Qdot (p, id) -> Loc.join (qloc p) id.id_loc
| Qident id -> id.id_loc
let qloc_last = function
| Qdot (_, id) | Qident id -> id.id_loc
let rec print_qualid fmt = function
| Qdot (p, id) ->
Format.fprintf fmt "%a.%a" print_qualid p Ident.print_decoded id.id_str
| Qident id -> Ident.print_decoded fmt id.id_str
let string_list_of_qualid q =
let rec sloq acc = function
| Qdot (p, id) -> sloq (id.id_str :: acc) p
| Qident id -> id.id_str :: acc in
sloq [] q
exception UnboundSymbol of qualid
let find_qualid get_id find ns q =
let sl = string_list_of_qualid q in
let r = try find ns sl with Not_found ->
Loc.error ~loc:(qloc q) (UnboundSymbol q) in
if Debug.test_flag Glob.flag then Glob.use ~kind:"" (qloc_last q) (get_id r);
r
let find_prop_ns ns q = find_qualid (fun pr -> pr.pr_name) ns_find_pr ns q
let find_tysymbol_ns ns q = find_qualid (fun ts -> ts.ts_name) ns_find_ts ns q
let find_lsymbol_ns ns q = find_qualid (fun ls -> ls.ls_name) ns_find_ls ns q
let find_fsymbol_ns ns q =
let ls = find_lsymbol_ns ns q in
if ls.ls_value <> None then ls else
Loc.error ~loc:(qloc q) (FunctionSymbolExpected ls)
let find_psymbol_ns ns q =
let ls = find_lsymbol_ns ns q in
if ls.ls_value = None then ls else
Loc.error ~loc:(qloc q) (PredicateSymbolExpected ls)
let find_tysymbol tuc q = find_tysymbol_ns (Theory.get_namespace tuc) q
let find_lsymbol tuc q = find_lsymbol_ns (Theory.get_namespace tuc) q
let find_fsymbol tuc q = find_fsymbol_ns (Theory.get_namespace tuc) q
let find_psymbol tuc q = find_psymbol_ns (Theory.get_namespace tuc) q
let find_prop tuc q = find_prop_ns (Theory.get_namespace tuc) q
let find_prop_of_kind k tuc q =
let pr = find_prop tuc q in
match (Mid.find pr.pr_name tuc.uc_known).d_node with
| Dind _ when k = Paxiom -> pr
| Dprop (l,_,_) when l = k -> pr
| _ -> Loc.errorm ~loc:(qloc q) "proposition %a is not %s"
print_qualid q (match k with
| Plemma -> "a lemma" | Paxiom -> "an axiom" | Pgoal -> "a goal")
let find_itysymbol_ns ns q =
find_qualid (fun s -> s.its_ts.ts_name) Pmodule.ns_find_its ns q
let find_xsymbol_ns ns q =
find_qualid (fun s -> s.xs_name) Pmodule.ns_find_xs ns q
let find_prog_symbol_ns ns p =
let get_id_ps = function
| PV pv -> pv.pv_vs.vs_name
| RS rs -> rs.rs_name
(* FIXME: this is incorrect, but we cannot
know the correct symbol at this stage *)
| OO ss -> (Srs.choose ss).rs_name in
find_qualid get_id_ps ns_find_prog_symbol ns p
(** Parsing types *)
let ty_of_pty ns pty =
let rec get_ty = function
| PTtyvar {id_str = x} ->
ty_var (tv_of_string x)
| PTtyapp (q, tyl) ->
let ts = find_tysymbol_ns ns q in
let tyl = List.map get_ty tyl in
Loc.try2 ~loc:(qloc q) ty_app ts tyl
| PTtuple tyl ->
let s = its_tuple (List.length tyl) in
ty_app s.its_ts (List.map get_ty tyl)
| PTarrow (ty1, ty2) ->
ty_func (get_ty ty1) (get_ty ty2)
| PTpure ty | PTparen ty ->
get_ty ty
in
get_ty pty
let dty_of_pty ns pty =
Dterm.dty_of_ty (ty_of_pty ns pty)
let dty_of_opt ns = function
| Some pty -> dty_of_pty ns pty
| None -> Dterm.dty_fresh ()
* typing using destructive type variables
parsed trees intermediate trees typed trees
( Ptree ) ( Dterm ) ( Term )
-----------------------------------------------------------
ppure_type ---dty--- > > ty
lexpr --dterm-- > dterm --term-- > term
parsed trees intermediate trees typed trees
(Ptree) (Dterm) (Term)
-----------------------------------------------------------
ppure_type ---dty---> dty ---ty---> ty
lexpr --dterm--> dterm --term--> term
*)
(** Typing patterns, terms, and formulas *)
let create_user_id {id_str = n; id_ats = attrs; id_loc = loc} =
let get_attrs (attrs, loc) = function
| ATstr attr -> Sattr.add attr attrs, loc | ATpos loc -> attrs, loc in
let attrs, loc = List.fold_left get_attrs (Sattr.empty, loc) attrs in
id_user ~attrs n loc
let parse_record ~loc ns km get_val fl =
let fl = List.map (fun (q,e) -> find_lsymbol_ns ns q, e) fl in
let cs,pjl,flm = Loc.try2 ~loc parse_record km fl in
let get_val pj = get_val cs pj (Mls.find_opt pj flm) in
cs, List.map get_val pjl
let rec dpattern ns km { pat_desc = desc; pat_loc = loc } =
match desc with
| Ptree.Pparen p -> dpattern ns km p
| _ -> (* creative indentation ahead *)
Dterm.dpattern ~loc (match desc with
| Ptree.Pwild -> DPwild
| Ptree.Pparen _ -> assert false (* never *)
| Ptree.Pvar x -> DPvar (create_user_id x)
| Ptree.Papp (q,pl) ->
let pl = List.map (dpattern ns km) pl in
DPapp (find_lsymbol_ns ns q, pl)
| Ptree.Ptuple pl ->
let pl = List.map (dpattern ns km) pl in
DPapp (fs_tuple (List.length pl), pl)
| Ptree.Prec fl ->
let get_val _ _ = function
| Some p -> dpattern ns km p
| None -> Dterm.dpattern DPwild in
let cs,fl = parse_record ~loc ns km get_val fl in
DPapp (cs,fl)
| Ptree.Pas (p,x,false) -> DPas (dpattern ns km p, create_user_id x)
| Ptree.Por (p,q) -> DPor (dpattern ns km p, dpattern ns km q)
| Ptree.Pcast (p,ty) -> DPcast (dpattern ns km p, dty_of_pty ns ty)
| Ptree.Pghost _ | Ptree.Pas (_,_,true) ->
Loc.errorm ~loc "ghost patterns are only allowed in programs")
let quant_var ns (loc, id, gh, ty) =
if gh then Loc.errorm ~loc "ghost variables are only allowed in programs";
Opt.map create_user_id id, dty_of_opt ns ty, Some loc
let loc_cutoff loc13 loc23 loc2 =
let f,l,b,e = Loc.get loc13 in
let _,_,_,w = Loc.get loc23 in
let _,_,_,m = Loc.get loc2 in
Loc.user_position f l b (e - (w - m))
let is_reusable dt = match dt.dt_node with
| DTvar _ | DTgvar _ | DTconst _ | DTtrue | DTfalse -> true
| DTapp (_,[]) -> true
| _ -> false
let mk_var crcmap n dt =
let dty = match dt.dt_dty with
| None -> dty_of_ty ty_bool
| Some dty -> dty in
Dterm.dterm crcmap ?loc:dt.dt_loc (DTvar (n, dty))
let mk_let crcmap ~loc n dt node =
DTlet (dt, id_user n loc, Dterm.dterm crcmap ~loc node)
let mk_closure crcmap loc ls =
let mk dt = Dterm.dterm crcmap ~loc dt in
let mk_v i _ =
Some (id_user ("y" ^ string_of_int i) loc), dty_fresh (), None in
let mk_t (id, dty, _) = mk (DTvar ((Opt.get id).pre_name, dty)) in
let vl = Lists.mapi mk_v ls.ls_args in
DTquant (DTlambda, vl, [], mk (DTapp (ls, List.map mk_t vl)))
(* track the use of labels *)
let at_uses = Hstr.create 5
let rec dterm ns km crcmap gvars at denv {term_desc = desc; term_loc = loc} =
let func_app e el =
List.fold_left (fun e1 (loc, e2) ->
DTfapp (Dterm.dterm crcmap ~loc e1, e2)) e el
in
let rec apply_ls loc ls al l el = match l, el with
| (_::l), (e::el) -> apply_ls loc ls (e::al) l el
| [], _ -> func_app (DTapp (ls, List.rev_map snd al)) el
| _, [] -> func_app (mk_closure crcmap loc ls) (List.rev_append al el)
in
let qualid_app q el = match gvars at q with
| Some v ->
begin match at with
| Some l -> (* check for impact *)
let u = Opt.get (gvars None q) in
if not (pv_equal v u) then
Hstr.replace at_uses l true
| None -> ()
end;
func_app (DTgvar v.pv_vs) el
| None ->
let ls = find_lsymbol_ns ns q in
apply_ls (qloc q) ls [] ls.ls_args el
in
let qualid_app q el = match q with
| Qident {id_str = n} ->
(match denv_get_opt denv n with
| Some d -> func_app d el
| None -> qualid_app q el)
| _ -> qualid_app q el
in
let rec unfold_app e1 e2 el = match e1.term_desc with
| Ptree.Tapply (e11,e12) ->
let e12 = dterm ns km crcmap gvars at denv e12 in
unfold_app e11 e12 ((e1.term_loc, e2)::el)
| Ptree.Tident q ->
qualid_app q ((e1.term_loc, e2)::el)
| _ ->
func_app (DTfapp (dterm ns km crcmap gvars at denv e1, e2)) el
in
Dterm.dterm crcmap ~loc (match desc with
| Ptree.Tident q ->
qualid_app q []
| Ptree.Tidapp (q, tl) ->
let tl = List.map (dterm ns km crcmap gvars at denv) tl in
DTapp (find_lsymbol_ns ns q, tl)
| Ptree.Tapply (e1, e2) ->
unfold_app e1 (dterm ns km crcmap gvars at denv e2) []
| Ptree.Ttuple tl ->
let tl = List.map (dterm ns km crcmap gvars at denv) tl in
DTapp (fs_tuple (List.length tl), tl)
| Ptree.Tinfix (e1, op1, e23)
| Ptree.Tinnfix (e1, op1, e23) ->
let apply loc de1 op de2 =
if op.id_str = Ident.op_neq then
let op = { op with id_str = Ident.op_equ } in
let ls = find_lsymbol_ns ns (Qident op) in
DTnot (Dterm.dterm crcmap ~loc (DTapp (ls, [de1;de2])))
else
DTapp (find_lsymbol_ns ns (Qident op), [de1;de2]) in
let rec chain loc de1 op1 = function
| { term_desc = Ptree.Tinfix (e2, op2, e3); term_loc = loc23 } ->
let de2 = dterm ns km crcmap gvars at denv e2 in
let loc12 = loc_cutoff loc loc23 e2.term_loc in
let de12 = Dterm.dterm crcmap ~loc:loc12 (apply loc12 de1 op1 de2) in
let de23 = Dterm.dterm crcmap ~loc:loc23 (chain loc23 de2 op2 e3) in
DTbinop (DTand, de12, de23)
| e23 ->
apply loc de1 op1 (dterm ns km crcmap gvars at denv e23) in
chain loc (dterm ns km crcmap gvars at denv e1) op1 e23
| Ptree.Tconst (Number.ConstInt _ as c) ->
DTconst (c, dty_int)
| Ptree.Tconst (Number.ConstReal _ as c) ->
DTconst (c, dty_real)
| Ptree.Tlet (x, e1, e2) ->
let id = create_user_id x in
let e1 = dterm ns km crcmap gvars at denv e1 in
let denv = denv_add_let denv e1 id in
let e2 = dterm ns km crcmap gvars at denv e2 in
DTlet (e1, id, e2)
| Ptree.Tcase (e1, bl) ->
let e1 = dterm ns km crcmap gvars at denv e1 in
let branch (p, e) =
let p = dpattern ns km p in
let denv = denv_add_term_pat denv p e1 in
p, dterm ns km crcmap gvars at denv e in
DTcase (e1, List.map branch bl)
| Ptree.Tif (e1, e2, e3) ->
let e1 = dterm ns km crcmap gvars at denv e1 in
let e2 = dterm ns km crcmap gvars at denv e2 in
let e3 = dterm ns km crcmap gvars at denv e3 in
DTif (e1, e2, e3)
| Ptree.Ttrue ->
DTtrue
| Ptree.Tfalse ->
DTfalse
| Ptree.Tnot e1 ->
DTnot (dterm ns km crcmap gvars at denv e1)
| Ptree.Tbinop (e1, Dterm.DTiff, e23)
| Ptree.Tbinnop (e1, Dterm.DTiff, e23) ->
let rec chain loc de1 = function
| { term_desc = Ptree.Tbinop (e2, DTiff, e3); term_loc = loc23 } ->
let de2 = dterm ns km crcmap gvars at denv e2 in
let loc12 = loc_cutoff loc loc23 e2.term_loc in
let de12 = Dterm.dterm crcmap ~loc:loc12 (DTbinop (DTiff, de1, de2)) in
let de23 = Dterm.dterm crcmap ~loc:loc23 (chain loc23 de2 e3) in
DTbinop (DTand, de12, de23)
| { term_desc = Ptree.Tbinop (_, DTimplies, _); term_loc = loc23 } ->
Loc.errorm ~loc:loc23 "An unparenthesized implication cannot be \
placed at the right hand side of an equivalence"
| e23 ->
DTbinop (DTiff, de1, (dterm ns km crcmap gvars at denv e23)) in
chain loc (dterm ns km crcmap gvars at denv e1) e23
| Ptree.Tbinop (e1, op, e2)
| Ptree.Tbinnop (e1, op, e2) ->
let e1 = dterm ns km crcmap gvars at denv e1 in
let e2 = dterm ns km crcmap gvars at denv e2 in
DTbinop (op, e1, e2)
| Ptree.Tquant (q, uqu, trl, e1) ->
let qvl = List.map (quant_var ns) uqu in
let denv = denv_add_quant denv qvl in
let dterm e = dterm ns km crcmap gvars at denv e in
let trl = List.map (List.map dterm) trl in
let e1 = dterm e1 in
DTquant (q, qvl, trl, e1)
| Ptree.Trecord fl ->
let get_val _cs pj = function
| Some e -> dterm ns km crcmap gvars at denv e
| None -> Loc.error ~loc (RecordFieldMissing pj) in
let cs, fl = parse_record ~loc ns km get_val fl in
DTapp (cs, fl)
| Ptree.Tupdate (e1, fl) ->
let e1 = dterm ns km crcmap gvars at denv e1 in
let re = is_reusable e1 in
let v = if re then e1 else mk_var crcmap "q " e1 in
let get_val _ pj = function
| Some e -> dterm ns km crcmap gvars at denv e
| None -> Dterm.dterm crcmap ~loc (DTapp (pj,[v])) in
let cs, fl = parse_record ~loc ns km get_val fl in
let d = DTapp (cs, fl) in
if re then d else mk_let crcmap ~loc "q " e1 d
| Ptree.Tat (e1, ({id_str = l; id_loc = loc} as id)) ->
Hstr.add at_uses l false;
let id = { id with id_str = "" } in
(* check if the label has actually been defined *)
ignore (Loc.try2 ~loc gvars (Some l) (Qident id));
let e1 = dterm ns km crcmap gvars (Some l) denv e1 in
if not (Hstr.find at_uses l) then Loc.errorm ~loc
"this `at'/`old' operator is never used";
Hstr.remove at_uses l;
DTattr (e1, Sattr.empty)
| Ptree.Tscope (q, e1) ->
let ns = import_namespace ns (string_list_of_qualid q) in
DTattr (dterm ns km crcmap gvars at denv e1, Sattr.empty)
| Ptree.Tattr (ATpos uloc, e1) ->
DTuloc (dterm ns km crcmap gvars at denv e1, uloc)
| Ptree.Tattr (ATstr attr, e1) ->
DTattr (dterm ns km crcmap gvars at denv e1, Sattr.singleton attr)
| Ptree.Tcast ({term_desc = Ptree.Tconst c}, pty) ->
DTconst (c, dty_of_pty ns pty)
| Ptree.Tcast (e1, pty) ->
let d1 = dterm ns km crcmap gvars at denv e1 in
DTcast (d1, dty_of_pty ns pty))
let no_gvars at q = match at with
| Some _ -> Loc.errorm ~loc:(qloc q)
"`at' and `old' can only be used in program annotations"
| None -> None
let type_term_in_namespace ns km crcmap t =
let t = dterm ns km crcmap no_gvars None Dterm.denv_empty t in
Dterm.term ~strict:true ~keep_loc:true t
let type_fmla_in_namespace ns km crcmap f =
let f = dterm ns km crcmap no_gvars None Dterm.denv_empty f in
Dterm.fmla ~strict:true ~keep_loc:true f
(** typing program expressions *)
open Dexpr
let ty_of_pty tuc = ty_of_pty (get_namespace tuc)
let get_namespace muc = List.hd muc.Pmodule.muc_import
let dterm muc =
let uc = muc.muc_theory in
dterm (Theory.get_namespace uc) uc.uc_known uc.uc_crcmap
let find_xsymbol muc q = find_xsymbol_ns (get_namespace muc) q
let find_itysymbol muc q = find_itysymbol_ns (get_namespace muc) q
let find_prog_symbol muc q = find_prog_symbol_ns (get_namespace muc) q
let find_special muc test nm q =
match find_prog_symbol muc q with
| RS s when test s -> s
| OO ss ->
begin match Srs.elements (Srs.filter test ss) with
| [s] -> s
| _::_ -> Loc.errorm ~loc:(qloc q)
"Ambiguous %s notation: %a" nm print_qualid q
| [] -> Loc.errorm ~loc:(qloc q) "Not a %s: %a" nm print_qualid q
end
| _ -> Loc.errorm ~loc:(qloc q) "Not a %s: %a" nm print_qualid q
let ity_of_pty muc pty =
let rec get_ity = function
| PTtyvar {id_str = x} ->
ity_var (tv_of_string x)
| PTtyapp (q, tyl) ->
let s = find_itysymbol_ns (get_namespace muc) q in
let tyl = List.map get_ity tyl in
Loc.try3 ~loc:(qloc q) ity_app s tyl []
| PTtuple tyl ->
ity_tuple (List.map get_ity tyl)
| PTarrow (ty1, ty2) ->
ity_func (get_ity ty1) (get_ity ty2)
| PTpure ty ->
ity_purify (get_ity ty)
| PTparen ty ->
get_ity ty
in
get_ity pty
let dity_of_pty muc pty =
Dexpr.dity_of_ity (ity_of_pty muc pty)
let dity_of_opt muc = function
| Some pty -> dity_of_pty muc pty
| None -> Dexpr.dity_fresh ()
(* records *)
let find_record_field muc q =
let test rs = rs.rs_field <> None in
find_special muc test "record field" q
let find_record_field2 muc (q,e) = find_record_field muc q, e
let parse_record muc fll =
we assume that every rsymbol in fll was resolved
using find_record_field , so they are all fields
using find_record_field, so they are all fields *)
let ls_of_rs rs = match rs.rs_logic with
| RLls ls -> ls | _ -> assert false in
let rs = match fll with
| (rs, _)::_ -> rs
| [] -> raise EmptyRecord in
let its = match rs.rs_cty.cty_args with
| [{pv_ity = {ity_node = (Ityreg {reg_its = s} | Ityapp (s,_,_))}}] -> s
| _ -> raise (BadRecordField (ls_of_rs rs)) in
let itd = find_its_defn muc.muc_known its in
let check v s = match s.rs_field with
| Some u -> pv_equal v u
| _ -> false in
let cs = match itd.itd_constructors with
| [cs] when Lists.equal check cs.rs_cty.cty_args itd.itd_fields -> cs
| _ -> raise (BadRecordField (ls_of_rs rs)) in
let pjs = Srs.of_list itd.itd_fields in
let flm = List.fold_left (fun m (pj,v) -> if Srs.mem pj pjs then
Mrs.add_new (DuplicateRecordField (ls_of_rs pj)) pj v m
else raise (BadRecordField (ls_of_rs pj))) Mrs.empty fll in
cs, itd.itd_fields, flm
let parse_record ~loc muc get_val fl =
let fl = List.map (find_record_field2 muc) fl in
let cs,pjl,flm = Loc.try2 ~loc parse_record muc fl in
let get_val pj = get_val cs pj (Mrs.find_opt pj flm) in
cs, List.map get_val pjl
(* patterns *)
let find_constructor muc q =
let test rs = match rs.rs_logic with
| RLls {ls_constr = c} -> c > 0
| _ -> false in
find_special muc test "constructor" q
let rec dpattern muc gh { pat_desc = desc; pat_loc = loc } =
match desc with
| Ptree.Pparen p -> dpattern muc gh p
| Ptree.Pghost p -> dpattern muc true p
| _ -> (* creative indentation ahead *)
Dexpr.dpattern ~loc (match desc with
| Ptree.Pwild -> DPwild
| Ptree.Pparen _ | Ptree.Pghost _ -> assert false
| Ptree.Pvar x -> DPvar (create_user_id x, gh)
| Ptree.Papp (q,pl) ->
DPapp (find_constructor muc q, List.map (dpattern muc gh) pl)
| Ptree.Prec fl ->
let get_val _ _ = function
| Some p -> dpattern muc gh p
| None -> Dexpr.dpattern DPwild in
let cs,fl = parse_record ~loc muc get_val fl in
DPapp (cs,fl)
| Ptree.Ptuple pl ->
DPapp (rs_tuple (List.length pl), List.map (dpattern muc gh) pl)
| Ptree.Pcast (p,pty) -> DPcast (dpattern muc gh p, dity_of_pty muc pty)
| Ptree.Pas (p,x,g) -> DPas (dpattern muc gh p, create_user_id x, gh || g)
| Ptree.Por (p,q) -> DPor (dpattern muc gh p, dpattern muc gh q))
let dpattern muc pat = dpattern muc false pat
(* specifications *)
let find_global_pv muc q = try match find_prog_symbol muc q with
| PV v -> Some v | _ -> None with _ -> None
let find_local_pv muc lvm q = match q with
| Qdot _ -> find_global_pv muc q
| Qident id -> let ovs = Mstr.find_opt id.id_str lvm in
if ovs = None then find_global_pv muc q else ovs
let mk_gvars muc lvm old = fun at q ->
match find_local_pv muc lvm q, at with
| Some v, Some l -> Some (old l v)
| None, Some l ->
begin match q with
(* normally, we have no reason to call "old" without
a pvsymbol, but we make an exception for an empty
ident to check if the label is valid at Tat *)
| Qident {id_str = ""} -> Opt.map (old l) None
| _ -> None end
| v, None -> v
let type_term muc lvm old t =
let gvars = mk_gvars muc lvm old in
let t = dterm muc gvars None Dterm.denv_empty t in
Dterm.term ~strict:true ~keep_loc:true t
let type_fmla muc lvm old f =
let gvars = mk_gvars muc lvm old in
let f = dterm muc gvars None Dterm.denv_empty f in
Dterm.fmla ~strict:true ~keep_loc:true f
let dpre muc pl lvm old =
let dpre f = type_fmla muc lvm old f in
List.map dpre pl
let dpost muc ql lvm old ity =
let rec dpost (loc,pfl) = match pfl with
| [{ pat_desc = Ptree.Pparen p; pat_loc = loc}, f] ->
dpost (loc, [p,f])
| [{ pat_desc = Ptree.Pwild | Ptree.Ptuple [] }, f] ->
let v = create_pvsymbol (id_fresh "result") ity in
v, Loc.try3 ~loc type_fmla muc lvm old f
| [{ pat_desc = Ptree.Pvar id }, f] ->
let v = create_pvsymbol (create_user_id id) ity in
let lvm = Mstr.add id.id_str v lvm in
v, Loc.try3 ~loc type_fmla muc lvm old f
| _ ->
let v = create_pvsymbol (id_fresh "result") ity in
let i = { id_str = "(null)"; id_loc = loc; id_ats = [] } in
let t = { term_desc = Tident (Qident i); term_loc = loc } in
let f = { term_desc = Ptree.Tcase (t, pfl); term_loc = loc } in
let lvm = Mstr.add "(null)" v lvm in
v, Loc.try3 ~loc type_fmla muc lvm old f in
List.map dpost ql
let dxpost muc ql lvm xsm old =
let add_exn (q,pf) m =
let xs = match q with
| Qident i ->
begin try Mstr.find i.id_str xsm with
| Not_found -> find_xsymbol muc q end
| _ -> find_xsymbol muc q in
Mxs.change (fun l -> match pf, l with
| Some pf, Some l -> Some (pf :: l)
| Some pf, None -> Some (pf :: [])
| None, None -> Some []
| None, Some _ -> l) xs m in
let mk_xpost loc xs pfl =
if pfl = [] then [] else
dpost muc [loc,pfl] lvm old xs.xs_ity in
let exn_map (loc,xpfl) =
let m = List.fold_right add_exn xpfl Mxs.empty in
Mxs.mapi (fun xs pfl -> mk_xpost loc xs pfl) m in
let add_map ql m =
Mxs.union (fun _ l r -> Some (l @ r)) (exn_map ql) m in
List.fold_right add_map ql Mxs.empty
let dreads muc rl lvm =
let dreads q = match find_local_pv muc lvm q with Some v -> v
| None -> Loc.errorm ~loc:(qloc q) "Not a variable: %a" print_qualid q in
List.map dreads rl
let dwrites muc wl lvm =
let old _ _ = Loc.errorm
"`at' and `old' cannot be used in the `writes' clause" in
let dwrites t = type_term muc lvm old t in
List.map dwrites wl
let find_variant_ls muc q = match find_lsymbol muc.muc_theory q with
| { ls_args = [u;v]; ls_value = None } as ls when ty_equal u v -> ls
| s -> Loc.errorm ~loc:(qloc q) "Not an order relation: %a" Pretty.print_ls s
let dvariant muc varl lvm _xsm old =
let dvar t = type_term muc lvm old t in
let dvar (t,q) = dvar t, Opt.map (find_variant_ls muc) q in
List.map dvar varl
let dspec muc sp lvm xsm old ity = {
ds_pre = dpre muc sp.sp_pre lvm old;
ds_post = dpost muc sp.sp_post lvm old ity;
ds_xpost = dxpost muc sp.sp_xpost lvm xsm old;
ds_reads = dreads muc sp.sp_reads lvm;
ds_writes = dwrites muc sp.sp_writes lvm;
ds_checkrw = sp.sp_checkrw;
ds_diverge = sp.sp_diverge; }
let dspec_no_variant muc sp = match sp.sp_variant with
| ({term_loc = loc},_)::_ ->
Loc.errorm ~loc "unexpected 'variant' clause"
| _ -> dspec muc sp
let dassert muc f lvm _xsm old = type_fmla muc lvm old f
let dinvariant muc f lvm _xsm old = dpre muc f lvm old
(* abstract values *)
let dparam muc (_,id,gh,pty) =
Opt.map create_user_id id, gh, dity_of_pty muc pty
let dbinder muc (_,id,gh,opt) =
Opt.map create_user_id id, gh, dity_of_opt muc opt
(* expressions *)
let is_reusable de = match de.de_node with
| DEvar _ | DEsym _ -> true | _ -> false
let mk_var n de =
Dexpr.dexpr ?loc:de.de_loc (DEvar (n, de.de_dvty))
let mk_let ~loc n de node =
let de1 = Dexpr.dexpr ~loc node in
DElet ((id_user n loc, false, RKnone, de), de1)
let update_any kind e = match e.expr_desc with
| Ptree.Eany (pl, _, pty, msk, sp) ->
{ e with expr_desc = Ptree.Eany (pl, kind, pty, msk, sp) }
| _ -> e
let local_kind = function
| RKfunc | RKpred -> RKlocal
| k -> k
let rec eff_dterm muc denv {term_desc = desc; term_loc = loc} =
let expr_app loc e el =
List.fold_left (fun e1 e2 ->
DEapp (Dexpr.dexpr ~loc e1, e2)) e el
in
let qualid_app loc q el =
let e = try DEsym (find_prog_symbol muc q) with
| _ -> DEls_pure (find_lsymbol muc.muc_theory q, false) in
expr_app loc e el
in
let qualid_app loc q el = match q with
| Qident {id_str = n} ->
(match denv_get_opt denv n with
| Some d -> expr_app loc d el
| None -> qualid_app loc q el)
| _ -> qualid_app loc q el
in
Dexpr.dexpr ~loc (match desc with
| Ptree.Tident q ->
qualid_app loc q []
| Ptree.Tidapp (q, [e1]) ->
qualid_app loc q [eff_dterm muc denv e1]
| Ptree.Tapply (e1, e2) ->
DEapp (eff_dterm muc denv e1, eff_dterm muc denv e2)
| Ptree.Tscope (q, e1) ->
let muc = open_scope muc "dummy" in
let muc = import_scope muc (string_list_of_qualid q) in
DEattr (eff_dterm muc denv e1, Sattr.empty)
| Ptree.Tattr (ATpos uloc, e1) ->
DEuloc (eff_dterm muc denv e1, uloc)
| Ptree.Tattr (ATstr attr, e1) ->
DEattr (eff_dterm muc denv e1, Sattr.singleton attr)
| Ptree.Tcast (e1, pty) ->
let d1 = eff_dterm muc denv e1 in
DEcast (d1, dity_of_pty muc pty)
| Ptree.Tat _ -> Loc.errorm ~loc "`at' and `old' cannot be used here"
| Ptree.Tidapp _ | Ptree.Tconst _ | Ptree.Tinfix _ | Ptree.Tinnfix _
| Ptree.Ttuple _ | Ptree.Tlet _ | Ptree.Tcase _ | Ptree.Tif _
| Ptree.Ttrue | Ptree.Tfalse | Ptree.Tnot _ | Ptree.Tbinop _ | Ptree.Tbinnop _
| Ptree.Tquant _ | Ptree.Trecord _ | Ptree.Tupdate _ ->
Loc.errorm ~loc "unsupported effect expression")
let rec dexpr muc denv {expr_desc = desc; expr_loc = loc} =
let expr_app loc e el =
List.fold_left (fun e1 e2 ->
DEapp (Dexpr.dexpr ~loc e1, e2)) e el
in
let qualid_app loc q el =
let e = try DEsym (find_prog_symbol muc q) with
| _ -> DEls_pure (find_lsymbol muc.muc_theory q, false) in
expr_app loc e el
in
let qualid_app loc q el = match q with
| Qident {id_str = n} ->
(match denv_get_opt denv n with
| Some d -> expr_app loc d el
| None -> qualid_app loc q el)
| _ -> qualid_app loc q el
in
let qualid_app_pure loc q el =
let e = match find_global_pv muc q with
| Some v -> DEpv_pure v
| None -> DEls_pure (find_lsymbol muc.muc_theory q, true) in
expr_app loc e el
in
let qualid_app_pure loc q el = match q with
| Qident {id_str = n} ->
(match denv_get_pure_opt denv n with
| Some d -> expr_app loc d el
| None -> qualid_app_pure loc q el)
| _ -> qualid_app_pure loc q el
in
let find_dxsymbol q = match q with
| Qident {id_str = n} ->
(try denv_get_exn denv n with _
-> DEgexn (find_xsymbol muc q))
| _ -> DEgexn (find_xsymbol muc q)
in
Dexpr.dexpr ~loc begin match desc with
| Ptree.Eident q ->
qualid_app loc q []
| Ptree.Eidpur q ->
qualid_app_pure loc q []
| Ptree.Eidapp (q, el) ->
qualid_app loc q (List.map (dexpr muc denv) el)
| Ptree.Eapply (e1, e2) ->
DEapp (dexpr muc denv e1, dexpr muc denv e2)
| Ptree.Etuple el ->
let e = DEsym (RS (rs_tuple (List.length el))) in
expr_app loc e (List.map (dexpr muc denv) el)
| Ptree.Einfix (e1, op1, e23)
| Ptree.Einnfix (e1, op1, e23) ->
let apply loc de1 op de2 =
if op.id_str = Ident.op_neq then
let oq = Qident { op with id_str = Ident.op_equ } in
let dt = qualid_app op.id_loc oq [de1;de2] in
DEnot (Dexpr.dexpr ~loc dt)
else
qualid_app op.id_loc (Qident op) [de1;de2] in
let rec chain n1 n2 loc de1 op1 = function
| { expr_desc = Ptree.Einfix (e2, op2, e3); expr_loc = loc23 } ->
let de2 = dexpr muc denv e2 in
let re = is_reusable de2 in
let v = if re then de2 else mk_var n1 de2 in
let loc12 = loc_cutoff loc loc23 e2.expr_loc in
let de12 = Dexpr.dexpr ~loc:loc12 (apply loc12 de1 op1 v) in
let de23 = Dexpr.dexpr ~loc:loc23 (chain n2 n1 loc23 v op2 e3) in
let d = DEand (de12, de23) in
if re then d else mk_let ~loc n1 de2 d
| e23 ->
apply loc de1 op1 (dexpr muc denv e23) in
chain "q1 " "q2 " loc (dexpr muc denv e1) op1 e23
| Ptree.Econst (Number.ConstInt _ as c) ->
let dty = if Mts.is_empty muc.muc_theory.uc_ranges
then dity_int else dity_fresh () in
DEconst(c, dty)
| Ptree.Econst (Number.ConstReal _ as c) ->
let dty = if Mts.is_empty muc.muc_theory.uc_floats
then dity_real else dity_fresh () in
DEconst(c, dty)
| Ptree.Erecord fl ->
let ls_of_rs rs = match rs.rs_logic with
| RLls ls -> ls | _ -> assert false in
let get_val _cs pj = function
| None -> Loc.error ~loc (Decl.RecordFieldMissing (ls_of_rs pj))
| Some e -> dexpr muc denv e in
let cs,fl = parse_record ~loc muc get_val fl in
expr_app loc (DEsym (RS cs)) fl
| Ptree.Eupdate (e1, fl) ->
let e1 = dexpr muc denv e1 in
let re = is_reusable e1 in
let v = if re then e1 else mk_var "q " e1 in
let get_val _ pj = function
| None ->
let pj = Dexpr.dexpr ~loc (DEsym (RS pj)) in
Dexpr.dexpr ~loc (DEapp (pj, v))
| Some e -> dexpr muc denv e in
let cs,fl = parse_record ~loc muc get_val fl in
let d = expr_app loc (DEsym (RS cs)) fl in
if re then d else mk_let ~loc "q " e1 d
| Ptree.Elet (id, gh, kind, e1, e2) ->
let e1 = update_any kind e1 in
let kind = local_kind kind in
let ld = create_user_id id, gh, kind, dexpr muc denv e1 in
DElet (ld, dexpr muc (denv_add_let denv ld) e2)
| Ptree.Erec (fdl, e1) ->
let update_kind (id, gh, k, bl, pty, msk, sp, e) =
id, gh, local_kind k, bl, pty, msk, sp, e in
let fdl = List.map update_kind fdl in
let denv, rd = drec_defn muc denv fdl in
DErec (rd, dexpr muc denv e1)
| Ptree.Efun (bl, pty, msk, sp, e) ->
let bl = List.map (dbinder muc) bl in
let ds = dspec_no_variant muc sp in
let dity = dity_of_opt muc pty in
let denv = denv_add_args denv bl in
DEfun (bl, dity, msk, ds, dexpr muc denv e)
| Ptree.Eany (pl, kind, pty, msk, sp) ->
let pl = List.map (dparam muc) pl in
let ds = dspec_no_variant muc sp in
let ity = match kind, pty with
| _, Some pty -> ity_of_pty muc pty
| RKlemma, None -> ity_unit
| RKpred, None -> ity_bool
| _ -> Loc.errorm ~loc "cannot determine the type of the result" in
let dity = dity_of_ity ity in
let res = Some (id_fresh "result"), false, dity in
let denv = denv_add_args denv (res::pl) in
let add_alias (t1, t2) =
let bty = dity_fresh () in
let dt1 = eff_dterm muc denv t1 in
let dt2 = eff_dterm muc denv t2 in
ignore (Dexpr.dexpr ~loc:t1.term_loc (DEcast (dt1, bty)));
ignore (Dexpr.dexpr ~loc:t2.term_loc (DEcast (dt2, bty)));
in
List.iter add_alias sp.sp_alias;
DEany (pl, dity, msk, ds)
| Ptree.Ematch (e1, bl, xl) ->
let e1 = dexpr muc denv e1 in
let rbranch (pp, e) =
let pp = dpattern muc pp in
let denv = denv_add_expr_pat denv pp e1 in
pp, dexpr muc denv e in
let xbranch (q, pp, e) =
let xs = find_dxsymbol q in
let mb_unit = match xs with
| DEgexn xs -> ity_equal xs.xs_ity ity_unit
| DElexn _ -> true in
let pp = match pp with
| Some pp -> dpattern muc pp
| None when mb_unit -> Dexpr.dpattern ~loc (DPapp (rs_void, []))
| _ -> Loc.errorm ~loc "exception argument expected" in
let denv = denv_add_exn_pat denv pp xs in
let e = dexpr muc denv e in
xs, pp, e in
DEmatch (e1, List.map rbranch bl, List.map xbranch xl)
| Ptree.Eif (e1, e2, e3) ->
let e1 = dexpr muc denv e1 in
let e2 = dexpr muc denv e2 in
let e3 = dexpr muc denv e3 in
DEif (e1, e2, e3)
| Ptree.Enot e1 ->
DEnot (dexpr muc denv e1)
| Ptree.Eand (e1, e2) ->
DEand (dexpr muc denv e1, dexpr muc denv e2)
| Ptree.Eor (e1, e2) ->
DEor (dexpr muc denv e1, dexpr muc denv e2)
| Ptree.Etrue -> DEtrue
| Ptree.Efalse -> DEfalse
| Ptree.Esequence (e1, e2) ->
let e1 = { e1 with expr_desc = Ecast (e1, PTtuple []) } in
let e1 = dexpr muc denv e1 in
let e2 = dexpr muc denv e2 in
DElet ((id_user "_" loc, false, RKnone, e1), e2)
| Ptree.Ewhile (e1, inv, var, e2) ->
let e1 = dexpr muc denv e1 in
let e2 = dexpr muc denv e2 in
let inv = dinvariant muc inv in
let var = dvariant muc var in
DEwhile (e1, inv, var, e2)
| Ptree.Efor (id, efrom, dir, eto, inv, e1) ->
let efrom = dexpr muc denv efrom in
let eto = dexpr muc denv eto in
let inv = dinvariant muc inv in
let id = create_user_id id in
let denv = denv_add_for_index denv id efrom.de_dvty in
DEfor (id, efrom, dir, eto, inv, dexpr muc denv e1)
| Ptree.Eassign asl ->
let mk_assign (e1,q,e2) =
dexpr muc denv e1, find_record_field muc q, dexpr muc denv e2 in
DEassign (List.map mk_assign asl)
| Ptree.Eraise (q, e1) ->
let xs = find_dxsymbol q in
let mb_unit = match xs with
| DEgexn xs -> ity_equal xs.xs_ity ity_unit
| DElexn _ -> true in
let e1 = match e1 with
| Some e1 -> dexpr muc denv e1
| None when mb_unit -> Dexpr.dexpr ~loc (DEsym (RS rs_void))
| _ -> Loc.errorm ~loc "exception argument expected" in
DEraise (xs, e1)
| Ptree.Eghost e1 ->
DEghost (dexpr muc denv e1)
| Ptree.Eexn (id, pty, mask, e1) ->
let id = create_user_id id in
let dity = dity_of_pty muc pty in
let denv = denv_add_exn denv id dity in
DEexn (id, dity, mask, dexpr muc denv e1)
| Ptree.Eabsurd ->
DEabsurd
| Ptree.Epure t ->
let get_term lvm _xsm old = type_term muc lvm old t in
let gvars _at q = try match find_prog_symbol muc q with
| PV v -> Some v | _ -> None with _ -> None in
let get_dty pure_denv =
let dt = dterm muc gvars None pure_denv t in
match dt.dt_dty with Some dty -> dty | None -> dty_bool in
DEpure (get_term, denv_pure denv get_dty)
| Ptree.Eassert (ak, f) ->
DEassert (ak, dassert muc f)
| Ptree.Eoptexn (id, mask, e1) ->
let dity = dity_fresh () in
let id = create_user_id id in
let denv = denv_add_exn denv id dity in
DEoptexn (id, dity, mask, dexpr muc denv e1)
| Ptree.Elabel (id, e1) ->
DElabel (create_user_id id, dexpr muc denv e1)
| Ptree.Escope (q, e1) ->
let muc = open_scope muc "dummy" in
let muc = import_scope muc (string_list_of_qualid q) in
DEattr (dexpr muc denv e1, Sattr.empty)
| Ptree.Eattr (ATpos uloc, e1) ->
DEuloc (dexpr muc denv e1, uloc)
| Ptree.Eattr (ATstr attr, e1) ->
DEattr (dexpr muc denv e1, Sattr.singleton attr)
| Ptree.Ecast ({expr_desc = Ptree.Econst c}, pty) ->
DEconst (c, dity_of_pty muc pty)
| Ptree.Ecast (e1, pty) ->
let d1 = dexpr muc denv e1 in
DEcast (d1, dity_of_pty muc pty)
end
and drec_defn muc denv fdl =
let prep (id, gh, kind, bl, pty, msk, sp, e) =
let bl = List.map (dbinder muc) bl in
let dity = dity_of_opt muc pty in
let pre denv =
let denv = denv_add_args denv bl in
let dv = dvariant muc sp.sp_variant in
dspec muc sp, dv, dexpr muc denv e in
create_user_id id, gh, kind, bl, dity, msk, pre in
Dexpr.drec_defn denv (List.map prep fdl)
(** Typing declarations *)
open Pdecl
open Pmodule
let add_pdecl ~vc muc d =
if Debug.test_flag Glob.flag then Sid.iter (Glob.def ~kind:"") d.pd_news;
add_pdecl ~vc muc d
let add_decl muc d = add_pdecl ~vc:false muc (create_pure_decl d)
let type_pure muc lvm denv e =
let gvars at q = match at, q with
| Some _, _ -> Loc.errorm ~loc:(qloc q)
"`at' and `old' can only be used in program annotations"
| None, Qident x -> Mstr.find_opt x.id_str lvm
| None, Qdot _ -> None in
dterm muc gvars None denv e
let type_term_pure muc lvm denv e =
Dterm.term ~strict:true ~keep_loc:true (type_pure muc lvm denv e)
let type_fmla_pure muc lvm denv e =
Dterm.fmla ~strict:true ~keep_loc:true (type_pure muc lvm denv e)
let check_public ~loc d name =
if d.td_vis <> Public || d.td_mut then
Loc.errorm ~loc "%s types cannot be abstract, private, or mutable" name;
if d.td_inv <> [] then
Loc.errorm ~loc "%s types cannot have invariants" name;
if d.td_wit <> [] then
Loc.errorm ~loc "%s types cannot have witnesses" name
let add_types muc tdl =
let add m ({td_ident = {id_str = x}; td_loc = loc} as d) =
Mstr.add_new (Loc.Located (loc, ClashSymbol x)) x d m in
let def = List.fold_left add Mstr.empty tdl in
let hts = Hstr.create 5 in
let htd = Hstr.create 5 in
let rec visit ~alias ~alg x d = if not (Hstr.mem htd x) then
let id = create_user_id d.td_ident and loc = d.td_loc in
let args = List.map (fun id -> tv_of_string id.id_str) d.td_params in
match d.td_def with
| TDalias pty ->
check_public ~loc d "Alias";
let alias = Sstr.add x alias in
let ity = parse ~loc ~alias ~alg pty in
if not (Hstr.mem htd x) then
let itd = create_alias_decl id args ity in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd
| TDalgebraic csl ->
check_public ~loc d "Algebraic";
let hfd = Hstr.create 5 in
let alias = Sstr.empty in
let alg = Mstr.add x (id,args) alg in
let get_pj nms (_, id, ghost, pty) = match id with
| Some ({id_str = nm} as id) ->
let exn = Loc.Located (id.id_loc, Loc.Message ("Field " ^
nm ^ " is used more than once in the same constructor")) in
let nms = Sstr.add_new exn nm nms in
let ity = parse ~loc ~alias ~alg pty in
let v = try Hstr.find hfd nm with Not_found ->
let v = create_pvsymbol (create_user_id id) ~ghost ity in
Hstr.add hfd nm v;
v in
if not (ity_equal v.pv_ity ity && ghost = v.pv_ghost) then
Loc.errorm ~loc "Conflicting definitions for field %s" nm;
nms, (true, v)
| None ->
let ity = parse ~loc ~alias ~alg pty in
nms, (false, create_pvsymbol (id_fresh "a") ~ghost ity) in
let get_cs oms (_, id, pjl) =
let nms, pjl = Lists.map_fold_left get_pj Sstr.empty pjl in
if Sstr.equal oms nms then create_user_id id, pjl else
let df = Sstr.union (Sstr.diff oms nms) (Sstr.diff nms oms) in
Loc.errorm ~loc "Field %s is missing in some constructors"
(Sstr.choose df) in
let csl = match csl with
| (_, id, pjl)::csl ->
let oms, pjl = Lists.map_fold_left get_pj Sstr.empty pjl in
(create_user_id id, pjl) :: List.map (get_cs oms) csl
| [] -> assert false in
if not ( Hstr.mem htd x ) then
begin match try Some (Hstr.find hts x) with Not_found -> None with
| Some s ->
Hstr.add htd x (create_rec_variant_decl s csl)
| None ->
let itd = create_plain_variant_decl id args csl in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd end
| TDrecord fl ->
let alias = Sstr.empty in
let alg = Mstr.add x (id,args) alg in
let get_fd nms fd =
let {id_str = nm; id_loc = loc} = fd.f_ident in
let id = create_user_id fd.f_ident in
let ity = parse ~loc ~alias ~alg fd.f_pty in
let ghost = d.td_vis = Abstract || fd.f_ghost in
let pv = create_pvsymbol id ~ghost ity in
let exn = Loc.Located (loc, Loc.Message ("Field " ^
nm ^ " is used more than once in a record")) in
Mstr.add_new exn nm pv nms, (fd.f_mutable, pv) in
let nms,fl = Lists.map_fold_left get_fd Mstr.empty fl in
if not ( Hstr.mem htd x ) then
begin match try Some (Hstr.find hts x) with Not_found -> None with
| Some s ->
check_public ~loc d "Recursive";
let get_fd (mut, fd) = if mut then Loc.errorm ~loc
"Recursive types cannot have mutable fields" else fd in
Hstr.add htd x (create_rec_record_decl s (List.map get_fd fl))
| None ->
(* empty records are automatically private, otherwise they are
just unit types that can be neither constructed nor refined *)
let priv = d.td_vis <> Public || fl = [] and mut = d.td_mut in
let add_fd m (_, v) = Mstr.add v.pv_vs.vs_name.id_string v m in
let gvars = List.fold_left add_fd Mstr.empty fl in
let type_inv f = type_fmla_pure muc gvars Dterm.denv_empty f in
let inv = List.map type_inv d.td_inv in
let add_w m (q,e) =
let v = try match q with
| Qident x -> Mstr.find x.id_str nms
| Qdot _ -> raise Not_found
with Not_found -> Loc.errorm ~loc:(qloc q)
"Unknown field %a" print_qualid q in
let dity = dity_of_ity v.pv_ity in
let de = dexpr muc denv_empty e in
let de = Dexpr.dexpr ?loc:de.de_loc (DEcast (de, dity)) in
Mpv.add v (expr ~keep_loc:true de) m in
let wit = List.fold_left add_w Mpv.empty d.td_wit in
let wit = if d.td_wit = [] then [] else
List.map (fun (_,v) -> try Mpv.find v wit with
| _ -> Loc.errorm ?loc:v.pv_vs.vs_name.Ident.id_loc
"Missing field %s" v.pv_vs.vs_name.id_string) fl in
let itd = create_plain_record_decl ~priv ~mut id args fl inv wit in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd
end
| TDrange (lo,hi) ->
check_public ~loc d "Range";
let ir = Number.create_range lo hi in
let itd = create_range_decl id ir in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd
| TDfloat (eb,sb) ->
check_public ~loc d "Floating-point";
let fp = { Number.fp_exponent_digits = eb;
Number.fp_significand_digits = sb } in
let itd = create_float_decl id fp in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd
and parse ~loc ~alias ~alg pty =
let rec down = function
| PTtyvar id ->
ity_var (tv_of_string id.id_str)
| PTtyapp (q,tyl) ->
let s = match q with
| Qident {id_str = x} when Sstr.mem x alias ->
Loc.errorm ~loc "Cyclic type definition"
| Qident {id_str = x} when Hstr.mem hts x ->
Hstr.find hts x
| Qident {id_str = x} when Mstr.mem x alg ->
let id, args = Mstr.find x alg in
let s = create_rec_itysymbol id args in
Hstr.add hts x s;
visit ~alias ~alg x ( Mstr.find x def ) ;
s
| Qident {id_str = x} when Mstr.mem x def ->
visit ~alias ~alg x (Mstr.find x def);
Hstr.find hts x
| _ ->
find_itysymbol muc q in
Loc.try3 ~loc:(qloc q) ity_app s (List.map down tyl) []
| PTtuple tyl -> ity_tuple (List.map down tyl)
| PTarrow (ty1,ty2) -> ity_func (down ty1) (down ty2)
| PTpure ty -> ity_purify (down ty)
| PTparen ty -> down ty in
down pty in
Mstr.iter (visit ~alias:Mstr.empty ~alg:Mstr.empty) def;
let tdl = List.map (fun d -> Hstr.find htd d.td_ident.id_str) tdl in
let add muc d = add_pdecl ~vc:true muc d in
List.fold_left add muc (create_type_decl tdl)
let tyl_of_params {muc_theory = tuc} pl =
let ty_of_param (loc,_,gh,ty) =
if gh then Loc.errorm ~loc
"ghost parameters are not allowed in pure declarations";
ty_of_pty tuc ty in
List.map ty_of_param pl
let add_logics muc dl =
let lsymbols = Hstr.create 17 in
1 . create all symbols and make an environment with these symbols
let create_symbol mkk d =
let id = create_user_id d.ld_ident in
let pl = tyl_of_params muc d.ld_params in
let ty = Opt.map (ty_of_pty muc.muc_theory) d.ld_type in
let ls = create_lsymbol id pl ty in
Hstr.add lsymbols d.ld_ident.id_str ls;
Loc.try2 ~loc:d.ld_loc add_decl mkk (create_param_decl ls) in
let mkk = List.fold_left create_symbol muc dl in
2 . then type - check all definitions
let type_decl d (abst,defn) =
let ls = Hstr.find lsymbols d.ld_ident.id_str in
let create_var (loc,x,_,_) ty =
let id = match x with
| Some id -> create_user_id id
| None -> id_user "_" loc in
create_vsymbol id ty in
let vl = List.map2 create_var d.ld_params ls.ls_args in
let add_var mvs (_,x,_,_) vs = match x with
| Some {id_str = x} -> Mstr.add_new (DuplicateVar x) x (DTgvar vs) mvs
| None -> mvs in
let denv = List.fold_left2 add_var Dterm.denv_empty d.ld_params vl in
match d.ld_def, d.ld_type with
| None, _ -> ls :: abst, defn
| Some e, None -> (* predicate *)
let f = type_fmla_pure mkk Mstr.empty denv e in
abst, (make_ls_defn ls vl f) :: defn
| Some e, Some ty -> (* function *)
let e = { e with term_desc = Tcast (e, ty) } in
let t = type_term_pure mkk Mstr.empty denv e in
abst, (make_ls_defn ls vl t) :: defn in
let abst,defn = List.fold_right type_decl dl ([],[]) in
let add_param muc s = add_decl muc (create_param_decl s) in
let add_logic muc l = add_decl muc (create_logic_decl l) in
let muc = List.fold_left add_param muc abst in
if defn = [] then muc else add_logic muc defn
let add_inductives muc s dl =
1 . create all symbols and make an environment with these symbols
let psymbols = Hstr.create 17 in
let create_symbol mkk d =
let id = create_user_id d.in_ident in
let pl = tyl_of_params muc d.in_params in
let ps = create_psymbol id pl in
Hstr.add psymbols d.in_ident.id_str ps;
Loc.try2 ~loc:d.in_loc add_decl mkk (create_param_decl ps) in
let mkk = List.fold_left create_symbol muc dl in
2 . then type - check all definitions
let propsyms = Hstr.create 17 in
let type_decl d =
let ps = Hstr.find psymbols d.in_ident.id_str in
let clause (loc, id, f) =
Hstr.replace propsyms id.id_str loc;
let f = type_fmla_pure mkk Mstr.empty Dterm.denv_empty f in
create_prsymbol (create_user_id id), f in
ps, List.map clause d.in_def in
let loc_of_id id = Opt.get id.Ident.id_loc in
try add_decl muc (create_ind_decl s (List.map type_decl dl))
with
| ClashSymbol s ->
Loc.error ~loc:(Hstr.find propsyms s) (ClashSymbol s)
| InvalidIndDecl (ls,pr) ->
Loc.error ~loc:(loc_of_id pr.pr_name) (InvalidIndDecl (ls,pr))
| NonPositiveIndDecl (ls,pr,s) ->
Loc.error ~loc:(loc_of_id pr.pr_name) (NonPositiveIndDecl (ls,pr,s))
let add_prop muc k s f =
let pr = create_prsymbol (create_user_id s) in
let f = type_fmla_pure muc Mstr.empty Dterm.denv_empty f in
add_decl muc (create_prop_decl k pr f)
(* parse declarations *)
let find_module env file q =
let m = match q with
| Qident {id_str = nm} ->
(try Mstr.find nm file with Not_found -> read_module env [] nm)
| Qdot (p, {id_str = nm}) -> read_module env (string_list_of_qualid p) nm in
if Debug.test_flag Glob.flag then
Glob.use ~kind:"theory" (qloc_last q) m.mod_theory.th_name;
m
let type_inst ({muc_theory = tuc} as muc) ({mod_theory = t} as m) s =
let add_inst s = function
| CStsym (p,[],PTtyapp (q,[])) ->
let ts1 = find_tysymbol_ns t.th_export p in
let ts2 = find_itysymbol muc q in
if Mts.mem ts1 s.mi_ty then Loc.error ~loc:(qloc p)
(ClashSymbol ts1.ts_name.id_string);
{ s with mi_ts = Loc.try4 ~loc:(qloc p) Mts.add_new
(ClashSymbol ts1.ts_name.id_string) ts1 ts2 s.mi_ts }
| CStsym (p,tvl,pty) ->
let ts1 = find_tysymbol_ns t.th_export p in
let tvl = List.map (fun id -> tv_of_string id.id_str) tvl in
let ts2 = Loc.try3 ~loc:(qloc p) create_alias_itysymbol
(id_clone ts1.ts_name) tvl (ity_of_pty muc pty) in
let ty2 = ity_app ts2 (List.map ity_var ts1.ts_args) [] in
let check v ty = match ty.ity_node with
| Ityvar u -> tv_equal u v | _ -> false in
begin match ty2.ity_node with
| Ityapp (ts2, tyl, _) | Ityreg { reg_its = ts2; reg_args = tyl }
when Lists.equal check tvl tyl ->
if Mts.mem ts1 s.mi_ty then Loc.error ~loc:(qloc p)
(ClashSymbol ts1.ts_name.id_string);
{ s with mi_ts = Loc.try4 ~loc:(qloc p) Mts.add_new
(ClashSymbol ts1.ts_name.id_string) ts1 ts2 s.mi_ts }
| _ ->
if Mts.mem ts1 s.mi_ts then Loc.error ~loc:(qloc p)
(ClashSymbol ts1.ts_name.id_string);
{ s with mi_ty = Loc.try4 ~loc:(qloc p) Mts.add_new
(ClashSymbol ts1.ts_name.id_string) ts1 ty2 s.mi_ty }
end
| CSfsym (p,q) ->
let ls1 = find_fsymbol_ns t.th_export p in
let ls2 = find_fsymbol tuc q in
{ s with mi_ls = Loc.try4 ~loc:(qloc p) Mls.add_new
(ClashSymbol ls1.ls_name.id_string) ls1 ls2 s.mi_ls }
| CSpsym (p,q) ->
let ls1 = find_psymbol_ns t.th_export p in
let ls2 = find_psymbol tuc q in
{ s with mi_ls = Loc.try4 ~loc:(qloc p) Mls.add_new
(ClashSymbol ls1.ls_name.id_string) ls1 ls2 s.mi_ls }
| CSvsym (p,q) ->
let rs1 = find_prog_symbol_ns m.mod_export p in
let rs2 = find_prog_symbol muc q in
begin match rs1, rs2 with
| RS rs1, RS rs2 ->
{ s with mi_rs = Loc.try4 ~loc:(qloc p) Mrs.add_new
(ClashSymbol rs1.rs_name.id_string) rs1 rs2 s.mi_rs }
| PV pv1, PV pv2 ->
{ s with mi_pv = Loc.try4 ~loc:(qloc p) Mvs.add_new
(ClashSymbol pv1.pv_vs.vs_name.id_string) pv1.pv_vs pv2 s.mi_pv }
| PV _, RS _ ->
Loc.errorm ~loc:(qloc q) "program constant expected"
| RS _, PV _ ->
Loc.errorm ~loc:(qloc q) "program function expected"
| OO _, _ | _, OO _ ->
Loc.errorm ~loc:(qloc q) "ambiguous notation"
end
| CSxsym (p,q) ->
let xs1 = find_xsymbol_ns m.mod_export p in
let xs2 = find_xsymbol muc q in
{ s with mi_xs = Loc.try4 ~loc:(qloc p) Mxs.add_new
(ClashSymbol xs1.xs_name.id_string) xs1 xs2 s.mi_xs }
| CSaxiom p ->
let pr = find_prop_ns t.th_export p in
{ s with mi_pk = Loc.try4 ~loc:(qloc p) Mpr.add_new
(ClashSymbol pr.pr_name.id_string) pr Paxiom s.mi_pk }
| CSlemma p ->
let pr = find_prop_ns t.th_export p in
{ s with mi_pk = Loc.try4 ~loc:(qloc p) Mpr.add_new
(ClashSymbol pr.pr_name.id_string) pr Plemma s.mi_pk }
| CSgoal p ->
let pr = find_prop_ns t.th_export p in
{ s with mi_pk = Loc.try4 ~loc:(qloc p) Mpr.add_new
(ClashSymbol pr.pr_name.id_string) pr Pgoal s.mi_pk }
| CSprop k ->
(* TODO: check for multiple settings *)
{ s with mi_df = k }
in
List.fold_left add_inst (empty_mod_inst m) s
let add_decl muc env file d =
let vc = muc.muc_theory.uc_path = [] &&
Debug.test_noflag debug_type_only in
match d with
| Ptree.Dtype dl ->
add_types muc dl
| Ptree.Dlogic dl ->
add_logics muc dl
| Ptree.Dind (s,dl) ->
add_inductives muc s dl
| Ptree.Dprop (k,s,f) ->
add_prop muc k s f
| Ptree.Dmeta (id,al) ->
let tuc = muc.muc_theory in
let convert = function
| Ptree.Mty (PTtyapp (q,[]))
-> MAts (find_tysymbol tuc q)
| Ptree.Mty ty -> MAty (ty_of_pty tuc ty)
| Ptree.Mfs q -> MAls (find_fsymbol tuc q)
| Ptree.Mps q -> MAls (find_psymbol tuc q)
| Ptree.Max q -> MApr (find_prop_of_kind Paxiom tuc q)
| Ptree.Mlm q -> MApr (find_prop_of_kind Plemma tuc q)
| Ptree.Mgl q -> MApr (find_prop_of_kind Pgoal tuc q)
| Ptree.Mstr s -> MAstr s
| Ptree.Mint i -> MAint i in
add_meta muc (lookup_meta id.id_str) (List.map convert al)
| Ptree.Dlet (id, gh, kind, e) ->
let e = update_any kind e in
let ld = create_user_id id, gh, kind, dexpr muc denv_empty e in
add_pdecl ~vc muc (create_let_decl (let_defn ~keep_loc:true ld))
| Ptree.Drec fdl ->
let _, rd = drec_defn muc denv_empty fdl in
add_pdecl ~vc muc (create_let_decl (rec_defn ~keep_loc:true rd))
| Ptree.Dexn (id, pty, mask) ->
let ity = ity_of_pty muc pty in
let xs = create_xsymbol (create_user_id id) ~mask ity in
add_pdecl ~vc muc (create_exn_decl xs)
| Ptree.Duse use ->
use_export muc (find_module env file use)
| Ptree.Dclone (use, inst) ->
let m = find_module env file use in
warn_clone_not_abstract (qloc use) m.mod_theory;
clone_export muc m (type_inst muc m inst)
(* incremental parsing *)
type slice = {
env : Env.env;
path : Env.pathname;
mutable file : pmodule Mstr.t;
mutable muc : pmodule_uc option;
}
let state = (Stack.create () : slice Stack.t)
let open_file env path =
assert (Stack.is_empty state || (Stack.top state).muc <> None);
Stack.push { env = env; path = path; file = Mstr.empty; muc = None } state
let close_file () =
assert (not (Stack.is_empty state) && (Stack.top state).muc = None);
(Stack.pop state).file
let open_module ({id_str = nm; id_loc = loc} as id) =
assert (not (Stack.is_empty state) && (Stack.top state).muc = None);
let slice = Stack.top state in
if Mstr.mem nm slice.file then Loc.errorm ~loc
"module %s is already defined in this file" nm;
let muc = create_module slice.env ~path:slice.path (create_user_id id) in
slice.muc <- Some muc
let close_module loc =
assert (not (Stack.is_empty state) && (Stack.top state).muc <> None);
let slice = Stack.top state in
if Debug.test_noflag debug_parse_only then begin
let m = Loc.try1 ~loc close_module (Opt.get slice.muc) in
if Debug.test_flag Glob.flag then
Glob.def ~kind:"theory" m.mod_theory.th_name;
slice.file <- Mstr.add m.mod_theory.th_name.id_string m slice.file;
end;
slice.muc <- None
let top_muc_on_demand loc slice = match slice.muc with
| Some muc -> muc
| None ->
assert (Mstr.is_empty slice.file);
if slice.path <> [] then Loc.errorm ~loc
"All declarations in library files must be inside modules";
let muc = create_module slice.env ~path:[] (id_fresh "Top") in
slice.muc <- Some muc;
muc
let open_scope loc nm =
assert (not (Stack.is_empty state));
let slice = Stack.top state in
let muc = top_muc_on_demand loc slice in
if Debug.test_noflag debug_parse_only then
slice.muc <- Some (open_scope muc nm.id_str)
let close_scope loc ~import =
assert (not (Stack.is_empty state) && (Stack.top state).muc <> None);
if Debug.test_noflag debug_parse_only then
let slice = Stack.top state in
let muc = Loc.try1 ~loc (close_scope ~import) (Opt.get slice.muc) in
slice.muc <- Some muc
let import_scope loc q =
assert (not (Stack.is_empty state));
let slice = Stack.top state in
let muc = top_muc_on_demand loc slice in
if Debug.test_noflag debug_parse_only then
let muc = Loc.try2 ~loc import_scope muc (string_list_of_qualid q) in
slice.muc <- Some muc
let add_decl loc d =
assert (not (Stack.is_empty state));
let slice = Stack.top state in
let muc = top_muc_on_demand loc slice in
if Debug.test_noflag debug_parse_only then
let muc = Loc.try4 ~loc add_decl muc slice.env slice.file d in
slice.muc <- Some muc
(** Exception printing *)
let () = Exn_printer.register (fun fmt e -> match e with
| UnboundSymbol q ->
Format.fprintf fmt "unbound symbol '%a'" print_qualid q
| _ -> raise e)
| null | https://raw.githubusercontent.com/DSiSc/why3/8ba9c2287224b53075adc51544bc377bc8ea5c75/src/parser/typing.ml | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
* debug flags
* symbol lookup
FIXME: this is incorrect, but we cannot
know the correct symbol at this stage
* Parsing types
* Typing patterns, terms, and formulas
creative indentation ahead
never
track the use of labels
check for impact
check if the label has actually been defined
* typing program expressions
records
patterns
creative indentation ahead
specifications
normally, we have no reason to call "old" without
a pvsymbol, but we make an exception for an empty
ident to check if the label is valid at Tat
abstract values
expressions
* Typing declarations
empty records are automatically private, otherwise they are
just unit types that can be neither constructed nor refined
predicate
function
parse declarations
TODO: check for multiple settings
incremental parsing
* Exception printing | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
General Public License version 2.1 , with the special exception
open Wstdlib
open Ident
open Ptree
open Ty
open Term
open Decl
open Theory
open Dterm
open Ity
open Expr
open Pdecl
open Pmodule
let debug_parse_only = Debug.register_flag "parse_only"
~desc:"Stop@ after@ parsing."
let debug_type_only = Debug.register_flag "type_only"
~desc:"Stop@ after@ type-checking."
let rec qloc = function
| Qdot (p, id) -> Loc.join (qloc p) id.id_loc
| Qident id -> id.id_loc
let qloc_last = function
| Qdot (_, id) | Qident id -> id.id_loc
let rec print_qualid fmt = function
| Qdot (p, id) ->
Format.fprintf fmt "%a.%a" print_qualid p Ident.print_decoded id.id_str
| Qident id -> Ident.print_decoded fmt id.id_str
let string_list_of_qualid q =
let rec sloq acc = function
| Qdot (p, id) -> sloq (id.id_str :: acc) p
| Qident id -> id.id_str :: acc in
sloq [] q
exception UnboundSymbol of qualid
let find_qualid get_id find ns q =
let sl = string_list_of_qualid q in
let r = try find ns sl with Not_found ->
Loc.error ~loc:(qloc q) (UnboundSymbol q) in
if Debug.test_flag Glob.flag then Glob.use ~kind:"" (qloc_last q) (get_id r);
r
let find_prop_ns ns q = find_qualid (fun pr -> pr.pr_name) ns_find_pr ns q
let find_tysymbol_ns ns q = find_qualid (fun ts -> ts.ts_name) ns_find_ts ns q
let find_lsymbol_ns ns q = find_qualid (fun ls -> ls.ls_name) ns_find_ls ns q
let find_fsymbol_ns ns q =
let ls = find_lsymbol_ns ns q in
if ls.ls_value <> None then ls else
Loc.error ~loc:(qloc q) (FunctionSymbolExpected ls)
let find_psymbol_ns ns q =
let ls = find_lsymbol_ns ns q in
if ls.ls_value = None then ls else
Loc.error ~loc:(qloc q) (PredicateSymbolExpected ls)
let find_tysymbol tuc q = find_tysymbol_ns (Theory.get_namespace tuc) q
let find_lsymbol tuc q = find_lsymbol_ns (Theory.get_namespace tuc) q
let find_fsymbol tuc q = find_fsymbol_ns (Theory.get_namespace tuc) q
let find_psymbol tuc q = find_psymbol_ns (Theory.get_namespace tuc) q
let find_prop tuc q = find_prop_ns (Theory.get_namespace tuc) q
let find_prop_of_kind k tuc q =
let pr = find_prop tuc q in
match (Mid.find pr.pr_name tuc.uc_known).d_node with
| Dind _ when k = Paxiom -> pr
| Dprop (l,_,_) when l = k -> pr
| _ -> Loc.errorm ~loc:(qloc q) "proposition %a is not %s"
print_qualid q (match k with
| Plemma -> "a lemma" | Paxiom -> "an axiom" | Pgoal -> "a goal")
let find_itysymbol_ns ns q =
find_qualid (fun s -> s.its_ts.ts_name) Pmodule.ns_find_its ns q
let find_xsymbol_ns ns q =
find_qualid (fun s -> s.xs_name) Pmodule.ns_find_xs ns q
let find_prog_symbol_ns ns p =
let get_id_ps = function
| PV pv -> pv.pv_vs.vs_name
| RS rs -> rs.rs_name
| OO ss -> (Srs.choose ss).rs_name in
find_qualid get_id_ps ns_find_prog_symbol ns p
let ty_of_pty ns pty =
let rec get_ty = function
| PTtyvar {id_str = x} ->
ty_var (tv_of_string x)
| PTtyapp (q, tyl) ->
let ts = find_tysymbol_ns ns q in
let tyl = List.map get_ty tyl in
Loc.try2 ~loc:(qloc q) ty_app ts tyl
| PTtuple tyl ->
let s = its_tuple (List.length tyl) in
ty_app s.its_ts (List.map get_ty tyl)
| PTarrow (ty1, ty2) ->
ty_func (get_ty ty1) (get_ty ty2)
| PTpure ty | PTparen ty ->
get_ty ty
in
get_ty pty
let dty_of_pty ns pty =
Dterm.dty_of_ty (ty_of_pty ns pty)
let dty_of_opt ns = function
| Some pty -> dty_of_pty ns pty
| None -> Dterm.dty_fresh ()
* typing using destructive type variables
parsed trees intermediate trees typed trees
( Ptree ) ( Dterm ) ( Term )
-----------------------------------------------------------
ppure_type ---dty--- > > ty
lexpr --dterm-- > dterm --term-- > term
parsed trees intermediate trees typed trees
(Ptree) (Dterm) (Term)
-----------------------------------------------------------
ppure_type ---dty---> dty ---ty---> ty
lexpr --dterm--> dterm --term--> term
*)
let create_user_id {id_str = n; id_ats = attrs; id_loc = loc} =
let get_attrs (attrs, loc) = function
| ATstr attr -> Sattr.add attr attrs, loc | ATpos loc -> attrs, loc in
let attrs, loc = List.fold_left get_attrs (Sattr.empty, loc) attrs in
id_user ~attrs n loc
let parse_record ~loc ns km get_val fl =
let fl = List.map (fun (q,e) -> find_lsymbol_ns ns q, e) fl in
let cs,pjl,flm = Loc.try2 ~loc parse_record km fl in
let get_val pj = get_val cs pj (Mls.find_opt pj flm) in
cs, List.map get_val pjl
let rec dpattern ns km { pat_desc = desc; pat_loc = loc } =
match desc with
| Ptree.Pparen p -> dpattern ns km p
Dterm.dpattern ~loc (match desc with
| Ptree.Pwild -> DPwild
| Ptree.Pvar x -> DPvar (create_user_id x)
| Ptree.Papp (q,pl) ->
let pl = List.map (dpattern ns km) pl in
DPapp (find_lsymbol_ns ns q, pl)
| Ptree.Ptuple pl ->
let pl = List.map (dpattern ns km) pl in
DPapp (fs_tuple (List.length pl), pl)
| Ptree.Prec fl ->
let get_val _ _ = function
| Some p -> dpattern ns km p
| None -> Dterm.dpattern DPwild in
let cs,fl = parse_record ~loc ns km get_val fl in
DPapp (cs,fl)
| Ptree.Pas (p,x,false) -> DPas (dpattern ns km p, create_user_id x)
| Ptree.Por (p,q) -> DPor (dpattern ns km p, dpattern ns km q)
| Ptree.Pcast (p,ty) -> DPcast (dpattern ns km p, dty_of_pty ns ty)
| Ptree.Pghost _ | Ptree.Pas (_,_,true) ->
Loc.errorm ~loc "ghost patterns are only allowed in programs")
let quant_var ns (loc, id, gh, ty) =
if gh then Loc.errorm ~loc "ghost variables are only allowed in programs";
Opt.map create_user_id id, dty_of_opt ns ty, Some loc
let loc_cutoff loc13 loc23 loc2 =
let f,l,b,e = Loc.get loc13 in
let _,_,_,w = Loc.get loc23 in
let _,_,_,m = Loc.get loc2 in
Loc.user_position f l b (e - (w - m))
let is_reusable dt = match dt.dt_node with
| DTvar _ | DTgvar _ | DTconst _ | DTtrue | DTfalse -> true
| DTapp (_,[]) -> true
| _ -> false
let mk_var crcmap n dt =
let dty = match dt.dt_dty with
| None -> dty_of_ty ty_bool
| Some dty -> dty in
Dterm.dterm crcmap ?loc:dt.dt_loc (DTvar (n, dty))
let mk_let crcmap ~loc n dt node =
DTlet (dt, id_user n loc, Dterm.dterm crcmap ~loc node)
let mk_closure crcmap loc ls =
let mk dt = Dterm.dterm crcmap ~loc dt in
let mk_v i _ =
Some (id_user ("y" ^ string_of_int i) loc), dty_fresh (), None in
let mk_t (id, dty, _) = mk (DTvar ((Opt.get id).pre_name, dty)) in
let vl = Lists.mapi mk_v ls.ls_args in
DTquant (DTlambda, vl, [], mk (DTapp (ls, List.map mk_t vl)))
let at_uses = Hstr.create 5
let rec dterm ns km crcmap gvars at denv {term_desc = desc; term_loc = loc} =
let func_app e el =
List.fold_left (fun e1 (loc, e2) ->
DTfapp (Dterm.dterm crcmap ~loc e1, e2)) e el
in
let rec apply_ls loc ls al l el = match l, el with
| (_::l), (e::el) -> apply_ls loc ls (e::al) l el
| [], _ -> func_app (DTapp (ls, List.rev_map snd al)) el
| _, [] -> func_app (mk_closure crcmap loc ls) (List.rev_append al el)
in
let qualid_app q el = match gvars at q with
| Some v ->
begin match at with
let u = Opt.get (gvars None q) in
if not (pv_equal v u) then
Hstr.replace at_uses l true
| None -> ()
end;
func_app (DTgvar v.pv_vs) el
| None ->
let ls = find_lsymbol_ns ns q in
apply_ls (qloc q) ls [] ls.ls_args el
in
let qualid_app q el = match q with
| Qident {id_str = n} ->
(match denv_get_opt denv n with
| Some d -> func_app d el
| None -> qualid_app q el)
| _ -> qualid_app q el
in
let rec unfold_app e1 e2 el = match e1.term_desc with
| Ptree.Tapply (e11,e12) ->
let e12 = dterm ns km crcmap gvars at denv e12 in
unfold_app e11 e12 ((e1.term_loc, e2)::el)
| Ptree.Tident q ->
qualid_app q ((e1.term_loc, e2)::el)
| _ ->
func_app (DTfapp (dterm ns km crcmap gvars at denv e1, e2)) el
in
Dterm.dterm crcmap ~loc (match desc with
| Ptree.Tident q ->
qualid_app q []
| Ptree.Tidapp (q, tl) ->
let tl = List.map (dterm ns km crcmap gvars at denv) tl in
DTapp (find_lsymbol_ns ns q, tl)
| Ptree.Tapply (e1, e2) ->
unfold_app e1 (dterm ns km crcmap gvars at denv e2) []
| Ptree.Ttuple tl ->
let tl = List.map (dterm ns km crcmap gvars at denv) tl in
DTapp (fs_tuple (List.length tl), tl)
| Ptree.Tinfix (e1, op1, e23)
| Ptree.Tinnfix (e1, op1, e23) ->
let apply loc de1 op de2 =
if op.id_str = Ident.op_neq then
let op = { op with id_str = Ident.op_equ } in
let ls = find_lsymbol_ns ns (Qident op) in
DTnot (Dterm.dterm crcmap ~loc (DTapp (ls, [de1;de2])))
else
DTapp (find_lsymbol_ns ns (Qident op), [de1;de2]) in
let rec chain loc de1 op1 = function
| { term_desc = Ptree.Tinfix (e2, op2, e3); term_loc = loc23 } ->
let de2 = dterm ns km crcmap gvars at denv e2 in
let loc12 = loc_cutoff loc loc23 e2.term_loc in
let de12 = Dterm.dterm crcmap ~loc:loc12 (apply loc12 de1 op1 de2) in
let de23 = Dterm.dterm crcmap ~loc:loc23 (chain loc23 de2 op2 e3) in
DTbinop (DTand, de12, de23)
| e23 ->
apply loc de1 op1 (dterm ns km crcmap gvars at denv e23) in
chain loc (dterm ns km crcmap gvars at denv e1) op1 e23
| Ptree.Tconst (Number.ConstInt _ as c) ->
DTconst (c, dty_int)
| Ptree.Tconst (Number.ConstReal _ as c) ->
DTconst (c, dty_real)
| Ptree.Tlet (x, e1, e2) ->
let id = create_user_id x in
let e1 = dterm ns km crcmap gvars at denv e1 in
let denv = denv_add_let denv e1 id in
let e2 = dterm ns km crcmap gvars at denv e2 in
DTlet (e1, id, e2)
| Ptree.Tcase (e1, bl) ->
let e1 = dterm ns km crcmap gvars at denv e1 in
let branch (p, e) =
let p = dpattern ns km p in
let denv = denv_add_term_pat denv p e1 in
p, dterm ns km crcmap gvars at denv e in
DTcase (e1, List.map branch bl)
| Ptree.Tif (e1, e2, e3) ->
let e1 = dterm ns km crcmap gvars at denv e1 in
let e2 = dterm ns km crcmap gvars at denv e2 in
let e3 = dterm ns km crcmap gvars at denv e3 in
DTif (e1, e2, e3)
| Ptree.Ttrue ->
DTtrue
| Ptree.Tfalse ->
DTfalse
| Ptree.Tnot e1 ->
DTnot (dterm ns km crcmap gvars at denv e1)
| Ptree.Tbinop (e1, Dterm.DTiff, e23)
| Ptree.Tbinnop (e1, Dterm.DTiff, e23) ->
let rec chain loc de1 = function
| { term_desc = Ptree.Tbinop (e2, DTiff, e3); term_loc = loc23 } ->
let de2 = dterm ns km crcmap gvars at denv e2 in
let loc12 = loc_cutoff loc loc23 e2.term_loc in
let de12 = Dterm.dterm crcmap ~loc:loc12 (DTbinop (DTiff, de1, de2)) in
let de23 = Dterm.dterm crcmap ~loc:loc23 (chain loc23 de2 e3) in
DTbinop (DTand, de12, de23)
| { term_desc = Ptree.Tbinop (_, DTimplies, _); term_loc = loc23 } ->
Loc.errorm ~loc:loc23 "An unparenthesized implication cannot be \
placed at the right hand side of an equivalence"
| e23 ->
DTbinop (DTiff, de1, (dterm ns km crcmap gvars at denv e23)) in
chain loc (dterm ns km crcmap gvars at denv e1) e23
| Ptree.Tbinop (e1, op, e2)
| Ptree.Tbinnop (e1, op, e2) ->
let e1 = dterm ns km crcmap gvars at denv e1 in
let e2 = dterm ns km crcmap gvars at denv e2 in
DTbinop (op, e1, e2)
| Ptree.Tquant (q, uqu, trl, e1) ->
let qvl = List.map (quant_var ns) uqu in
let denv = denv_add_quant denv qvl in
let dterm e = dterm ns km crcmap gvars at denv e in
let trl = List.map (List.map dterm) trl in
let e1 = dterm e1 in
DTquant (q, qvl, trl, e1)
| Ptree.Trecord fl ->
let get_val _cs pj = function
| Some e -> dterm ns km crcmap gvars at denv e
| None -> Loc.error ~loc (RecordFieldMissing pj) in
let cs, fl = parse_record ~loc ns km get_val fl in
DTapp (cs, fl)
| Ptree.Tupdate (e1, fl) ->
let e1 = dterm ns km crcmap gvars at denv e1 in
let re = is_reusable e1 in
let v = if re then e1 else mk_var crcmap "q " e1 in
let get_val _ pj = function
| Some e -> dterm ns km crcmap gvars at denv e
| None -> Dterm.dterm crcmap ~loc (DTapp (pj,[v])) in
let cs, fl = parse_record ~loc ns km get_val fl in
let d = DTapp (cs, fl) in
if re then d else mk_let crcmap ~loc "q " e1 d
| Ptree.Tat (e1, ({id_str = l; id_loc = loc} as id)) ->
Hstr.add at_uses l false;
let id = { id with id_str = "" } in
ignore (Loc.try2 ~loc gvars (Some l) (Qident id));
let e1 = dterm ns km crcmap gvars (Some l) denv e1 in
if not (Hstr.find at_uses l) then Loc.errorm ~loc
"this `at'/`old' operator is never used";
Hstr.remove at_uses l;
DTattr (e1, Sattr.empty)
| Ptree.Tscope (q, e1) ->
let ns = import_namespace ns (string_list_of_qualid q) in
DTattr (dterm ns km crcmap gvars at denv e1, Sattr.empty)
| Ptree.Tattr (ATpos uloc, e1) ->
DTuloc (dterm ns km crcmap gvars at denv e1, uloc)
| Ptree.Tattr (ATstr attr, e1) ->
DTattr (dterm ns km crcmap gvars at denv e1, Sattr.singleton attr)
| Ptree.Tcast ({term_desc = Ptree.Tconst c}, pty) ->
DTconst (c, dty_of_pty ns pty)
| Ptree.Tcast (e1, pty) ->
let d1 = dterm ns km crcmap gvars at denv e1 in
DTcast (d1, dty_of_pty ns pty))
let no_gvars at q = match at with
| Some _ -> Loc.errorm ~loc:(qloc q)
"`at' and `old' can only be used in program annotations"
| None -> None
let type_term_in_namespace ns km crcmap t =
let t = dterm ns km crcmap no_gvars None Dterm.denv_empty t in
Dterm.term ~strict:true ~keep_loc:true t
let type_fmla_in_namespace ns km crcmap f =
let f = dterm ns km crcmap no_gvars None Dterm.denv_empty f in
Dterm.fmla ~strict:true ~keep_loc:true f
open Dexpr
let ty_of_pty tuc = ty_of_pty (get_namespace tuc)
let get_namespace muc = List.hd muc.Pmodule.muc_import
let dterm muc =
let uc = muc.muc_theory in
dterm (Theory.get_namespace uc) uc.uc_known uc.uc_crcmap
let find_xsymbol muc q = find_xsymbol_ns (get_namespace muc) q
let find_itysymbol muc q = find_itysymbol_ns (get_namespace muc) q
let find_prog_symbol muc q = find_prog_symbol_ns (get_namespace muc) q
let find_special muc test nm q =
match find_prog_symbol muc q with
| RS s when test s -> s
| OO ss ->
begin match Srs.elements (Srs.filter test ss) with
| [s] -> s
| _::_ -> Loc.errorm ~loc:(qloc q)
"Ambiguous %s notation: %a" nm print_qualid q
| [] -> Loc.errorm ~loc:(qloc q) "Not a %s: %a" nm print_qualid q
end
| _ -> Loc.errorm ~loc:(qloc q) "Not a %s: %a" nm print_qualid q
let ity_of_pty muc pty =
let rec get_ity = function
| PTtyvar {id_str = x} ->
ity_var (tv_of_string x)
| PTtyapp (q, tyl) ->
let s = find_itysymbol_ns (get_namespace muc) q in
let tyl = List.map get_ity tyl in
Loc.try3 ~loc:(qloc q) ity_app s tyl []
| PTtuple tyl ->
ity_tuple (List.map get_ity tyl)
| PTarrow (ty1, ty2) ->
ity_func (get_ity ty1) (get_ity ty2)
| PTpure ty ->
ity_purify (get_ity ty)
| PTparen ty ->
get_ity ty
in
get_ity pty
let dity_of_pty muc pty =
Dexpr.dity_of_ity (ity_of_pty muc pty)
let dity_of_opt muc = function
| Some pty -> dity_of_pty muc pty
| None -> Dexpr.dity_fresh ()
let find_record_field muc q =
let test rs = rs.rs_field <> None in
find_special muc test "record field" q
let find_record_field2 muc (q,e) = find_record_field muc q, e
let parse_record muc fll =
we assume that every rsymbol in fll was resolved
using find_record_field , so they are all fields
using find_record_field, so they are all fields *)
let ls_of_rs rs = match rs.rs_logic with
| RLls ls -> ls | _ -> assert false in
let rs = match fll with
| (rs, _)::_ -> rs
| [] -> raise EmptyRecord in
let its = match rs.rs_cty.cty_args with
| [{pv_ity = {ity_node = (Ityreg {reg_its = s} | Ityapp (s,_,_))}}] -> s
| _ -> raise (BadRecordField (ls_of_rs rs)) in
let itd = find_its_defn muc.muc_known its in
let check v s = match s.rs_field with
| Some u -> pv_equal v u
| _ -> false in
let cs = match itd.itd_constructors with
| [cs] when Lists.equal check cs.rs_cty.cty_args itd.itd_fields -> cs
| _ -> raise (BadRecordField (ls_of_rs rs)) in
let pjs = Srs.of_list itd.itd_fields in
let flm = List.fold_left (fun m (pj,v) -> if Srs.mem pj pjs then
Mrs.add_new (DuplicateRecordField (ls_of_rs pj)) pj v m
else raise (BadRecordField (ls_of_rs pj))) Mrs.empty fll in
cs, itd.itd_fields, flm
let parse_record ~loc muc get_val fl =
let fl = List.map (find_record_field2 muc) fl in
let cs,pjl,flm = Loc.try2 ~loc parse_record muc fl in
let get_val pj = get_val cs pj (Mrs.find_opt pj flm) in
cs, List.map get_val pjl
let find_constructor muc q =
let test rs = match rs.rs_logic with
| RLls {ls_constr = c} -> c > 0
| _ -> false in
find_special muc test "constructor" q
let rec dpattern muc gh { pat_desc = desc; pat_loc = loc } =
match desc with
| Ptree.Pparen p -> dpattern muc gh p
| Ptree.Pghost p -> dpattern muc true p
Dexpr.dpattern ~loc (match desc with
| Ptree.Pwild -> DPwild
| Ptree.Pparen _ | Ptree.Pghost _ -> assert false
| Ptree.Pvar x -> DPvar (create_user_id x, gh)
| Ptree.Papp (q,pl) ->
DPapp (find_constructor muc q, List.map (dpattern muc gh) pl)
| Ptree.Prec fl ->
let get_val _ _ = function
| Some p -> dpattern muc gh p
| None -> Dexpr.dpattern DPwild in
let cs,fl = parse_record ~loc muc get_val fl in
DPapp (cs,fl)
| Ptree.Ptuple pl ->
DPapp (rs_tuple (List.length pl), List.map (dpattern muc gh) pl)
| Ptree.Pcast (p,pty) -> DPcast (dpattern muc gh p, dity_of_pty muc pty)
| Ptree.Pas (p,x,g) -> DPas (dpattern muc gh p, create_user_id x, gh || g)
| Ptree.Por (p,q) -> DPor (dpattern muc gh p, dpattern muc gh q))
let dpattern muc pat = dpattern muc false pat
let find_global_pv muc q = try match find_prog_symbol muc q with
| PV v -> Some v | _ -> None with _ -> None
let find_local_pv muc lvm q = match q with
| Qdot _ -> find_global_pv muc q
| Qident id -> let ovs = Mstr.find_opt id.id_str lvm in
if ovs = None then find_global_pv muc q else ovs
let mk_gvars muc lvm old = fun at q ->
match find_local_pv muc lvm q, at with
| Some v, Some l -> Some (old l v)
| None, Some l ->
begin match q with
| Qident {id_str = ""} -> Opt.map (old l) None
| _ -> None end
| v, None -> v
let type_term muc lvm old t =
let gvars = mk_gvars muc lvm old in
let t = dterm muc gvars None Dterm.denv_empty t in
Dterm.term ~strict:true ~keep_loc:true t
let type_fmla muc lvm old f =
let gvars = mk_gvars muc lvm old in
let f = dterm muc gvars None Dterm.denv_empty f in
Dterm.fmla ~strict:true ~keep_loc:true f
let dpre muc pl lvm old =
let dpre f = type_fmla muc lvm old f in
List.map dpre pl
let dpost muc ql lvm old ity =
let rec dpost (loc,pfl) = match pfl with
| [{ pat_desc = Ptree.Pparen p; pat_loc = loc}, f] ->
dpost (loc, [p,f])
| [{ pat_desc = Ptree.Pwild | Ptree.Ptuple [] }, f] ->
let v = create_pvsymbol (id_fresh "result") ity in
v, Loc.try3 ~loc type_fmla muc lvm old f
| [{ pat_desc = Ptree.Pvar id }, f] ->
let v = create_pvsymbol (create_user_id id) ity in
let lvm = Mstr.add id.id_str v lvm in
v, Loc.try3 ~loc type_fmla muc lvm old f
| _ ->
let v = create_pvsymbol (id_fresh "result") ity in
let i = { id_str = "(null)"; id_loc = loc; id_ats = [] } in
let t = { term_desc = Tident (Qident i); term_loc = loc } in
let f = { term_desc = Ptree.Tcase (t, pfl); term_loc = loc } in
let lvm = Mstr.add "(null)" v lvm in
v, Loc.try3 ~loc type_fmla muc lvm old f in
List.map dpost ql
let dxpost muc ql lvm xsm old =
let add_exn (q,pf) m =
let xs = match q with
| Qident i ->
begin try Mstr.find i.id_str xsm with
| Not_found -> find_xsymbol muc q end
| _ -> find_xsymbol muc q in
Mxs.change (fun l -> match pf, l with
| Some pf, Some l -> Some (pf :: l)
| Some pf, None -> Some (pf :: [])
| None, None -> Some []
| None, Some _ -> l) xs m in
let mk_xpost loc xs pfl =
if pfl = [] then [] else
dpost muc [loc,pfl] lvm old xs.xs_ity in
let exn_map (loc,xpfl) =
let m = List.fold_right add_exn xpfl Mxs.empty in
Mxs.mapi (fun xs pfl -> mk_xpost loc xs pfl) m in
let add_map ql m =
Mxs.union (fun _ l r -> Some (l @ r)) (exn_map ql) m in
List.fold_right add_map ql Mxs.empty
let dreads muc rl lvm =
let dreads q = match find_local_pv muc lvm q with Some v -> v
| None -> Loc.errorm ~loc:(qloc q) "Not a variable: %a" print_qualid q in
List.map dreads rl
let dwrites muc wl lvm =
let old _ _ = Loc.errorm
"`at' and `old' cannot be used in the `writes' clause" in
let dwrites t = type_term muc lvm old t in
List.map dwrites wl
let find_variant_ls muc q = match find_lsymbol muc.muc_theory q with
| { ls_args = [u;v]; ls_value = None } as ls when ty_equal u v -> ls
| s -> Loc.errorm ~loc:(qloc q) "Not an order relation: %a" Pretty.print_ls s
let dvariant muc varl lvm _xsm old =
let dvar t = type_term muc lvm old t in
let dvar (t,q) = dvar t, Opt.map (find_variant_ls muc) q in
List.map dvar varl
let dspec muc sp lvm xsm old ity = {
ds_pre = dpre muc sp.sp_pre lvm old;
ds_post = dpost muc sp.sp_post lvm old ity;
ds_xpost = dxpost muc sp.sp_xpost lvm xsm old;
ds_reads = dreads muc sp.sp_reads lvm;
ds_writes = dwrites muc sp.sp_writes lvm;
ds_checkrw = sp.sp_checkrw;
ds_diverge = sp.sp_diverge; }
let dspec_no_variant muc sp = match sp.sp_variant with
| ({term_loc = loc},_)::_ ->
Loc.errorm ~loc "unexpected 'variant' clause"
| _ -> dspec muc sp
let dassert muc f lvm _xsm old = type_fmla muc lvm old f
let dinvariant muc f lvm _xsm old = dpre muc f lvm old
let dparam muc (_,id,gh,pty) =
Opt.map create_user_id id, gh, dity_of_pty muc pty
let dbinder muc (_,id,gh,opt) =
Opt.map create_user_id id, gh, dity_of_opt muc opt
let is_reusable de = match de.de_node with
| DEvar _ | DEsym _ -> true | _ -> false
let mk_var n de =
Dexpr.dexpr ?loc:de.de_loc (DEvar (n, de.de_dvty))
let mk_let ~loc n de node =
let de1 = Dexpr.dexpr ~loc node in
DElet ((id_user n loc, false, RKnone, de), de1)
let update_any kind e = match e.expr_desc with
| Ptree.Eany (pl, _, pty, msk, sp) ->
{ e with expr_desc = Ptree.Eany (pl, kind, pty, msk, sp) }
| _ -> e
let local_kind = function
| RKfunc | RKpred -> RKlocal
| k -> k
let rec eff_dterm muc denv {term_desc = desc; term_loc = loc} =
let expr_app loc e el =
List.fold_left (fun e1 e2 ->
DEapp (Dexpr.dexpr ~loc e1, e2)) e el
in
let qualid_app loc q el =
let e = try DEsym (find_prog_symbol muc q) with
| _ -> DEls_pure (find_lsymbol muc.muc_theory q, false) in
expr_app loc e el
in
let qualid_app loc q el = match q with
| Qident {id_str = n} ->
(match denv_get_opt denv n with
| Some d -> expr_app loc d el
| None -> qualid_app loc q el)
| _ -> qualid_app loc q el
in
Dexpr.dexpr ~loc (match desc with
| Ptree.Tident q ->
qualid_app loc q []
| Ptree.Tidapp (q, [e1]) ->
qualid_app loc q [eff_dterm muc denv e1]
| Ptree.Tapply (e1, e2) ->
DEapp (eff_dterm muc denv e1, eff_dterm muc denv e2)
| Ptree.Tscope (q, e1) ->
let muc = open_scope muc "dummy" in
let muc = import_scope muc (string_list_of_qualid q) in
DEattr (eff_dterm muc denv e1, Sattr.empty)
| Ptree.Tattr (ATpos uloc, e1) ->
DEuloc (eff_dterm muc denv e1, uloc)
| Ptree.Tattr (ATstr attr, e1) ->
DEattr (eff_dterm muc denv e1, Sattr.singleton attr)
| Ptree.Tcast (e1, pty) ->
let d1 = eff_dterm muc denv e1 in
DEcast (d1, dity_of_pty muc pty)
| Ptree.Tat _ -> Loc.errorm ~loc "`at' and `old' cannot be used here"
| Ptree.Tidapp _ | Ptree.Tconst _ | Ptree.Tinfix _ | Ptree.Tinnfix _
| Ptree.Ttuple _ | Ptree.Tlet _ | Ptree.Tcase _ | Ptree.Tif _
| Ptree.Ttrue | Ptree.Tfalse | Ptree.Tnot _ | Ptree.Tbinop _ | Ptree.Tbinnop _
| Ptree.Tquant _ | Ptree.Trecord _ | Ptree.Tupdate _ ->
Loc.errorm ~loc "unsupported effect expression")
let rec dexpr muc denv {expr_desc = desc; expr_loc = loc} =
let expr_app loc e el =
List.fold_left (fun e1 e2 ->
DEapp (Dexpr.dexpr ~loc e1, e2)) e el
in
let qualid_app loc q el =
let e = try DEsym (find_prog_symbol muc q) with
| _ -> DEls_pure (find_lsymbol muc.muc_theory q, false) in
expr_app loc e el
in
let qualid_app loc q el = match q with
| Qident {id_str = n} ->
(match denv_get_opt denv n with
| Some d -> expr_app loc d el
| None -> qualid_app loc q el)
| _ -> qualid_app loc q el
in
let qualid_app_pure loc q el =
let e = match find_global_pv muc q with
| Some v -> DEpv_pure v
| None -> DEls_pure (find_lsymbol muc.muc_theory q, true) in
expr_app loc e el
in
let qualid_app_pure loc q el = match q with
| Qident {id_str = n} ->
(match denv_get_pure_opt denv n with
| Some d -> expr_app loc d el
| None -> qualid_app_pure loc q el)
| _ -> qualid_app_pure loc q el
in
let find_dxsymbol q = match q with
| Qident {id_str = n} ->
(try denv_get_exn denv n with _
-> DEgexn (find_xsymbol muc q))
| _ -> DEgexn (find_xsymbol muc q)
in
Dexpr.dexpr ~loc begin match desc with
| Ptree.Eident q ->
qualid_app loc q []
| Ptree.Eidpur q ->
qualid_app_pure loc q []
| Ptree.Eidapp (q, el) ->
qualid_app loc q (List.map (dexpr muc denv) el)
| Ptree.Eapply (e1, e2) ->
DEapp (dexpr muc denv e1, dexpr muc denv e2)
| Ptree.Etuple el ->
let e = DEsym (RS (rs_tuple (List.length el))) in
expr_app loc e (List.map (dexpr muc denv) el)
| Ptree.Einfix (e1, op1, e23)
| Ptree.Einnfix (e1, op1, e23) ->
let apply loc de1 op de2 =
if op.id_str = Ident.op_neq then
let oq = Qident { op with id_str = Ident.op_equ } in
let dt = qualid_app op.id_loc oq [de1;de2] in
DEnot (Dexpr.dexpr ~loc dt)
else
qualid_app op.id_loc (Qident op) [de1;de2] in
let rec chain n1 n2 loc de1 op1 = function
| { expr_desc = Ptree.Einfix (e2, op2, e3); expr_loc = loc23 } ->
let de2 = dexpr muc denv e2 in
let re = is_reusable de2 in
let v = if re then de2 else mk_var n1 de2 in
let loc12 = loc_cutoff loc loc23 e2.expr_loc in
let de12 = Dexpr.dexpr ~loc:loc12 (apply loc12 de1 op1 v) in
let de23 = Dexpr.dexpr ~loc:loc23 (chain n2 n1 loc23 v op2 e3) in
let d = DEand (de12, de23) in
if re then d else mk_let ~loc n1 de2 d
| e23 ->
apply loc de1 op1 (dexpr muc denv e23) in
chain "q1 " "q2 " loc (dexpr muc denv e1) op1 e23
| Ptree.Econst (Number.ConstInt _ as c) ->
let dty = if Mts.is_empty muc.muc_theory.uc_ranges
then dity_int else dity_fresh () in
DEconst(c, dty)
| Ptree.Econst (Number.ConstReal _ as c) ->
let dty = if Mts.is_empty muc.muc_theory.uc_floats
then dity_real else dity_fresh () in
DEconst(c, dty)
| Ptree.Erecord fl ->
let ls_of_rs rs = match rs.rs_logic with
| RLls ls -> ls | _ -> assert false in
let get_val _cs pj = function
| None -> Loc.error ~loc (Decl.RecordFieldMissing (ls_of_rs pj))
| Some e -> dexpr muc denv e in
let cs,fl = parse_record ~loc muc get_val fl in
expr_app loc (DEsym (RS cs)) fl
| Ptree.Eupdate (e1, fl) ->
let e1 = dexpr muc denv e1 in
let re = is_reusable e1 in
let v = if re then e1 else mk_var "q " e1 in
let get_val _ pj = function
| None ->
let pj = Dexpr.dexpr ~loc (DEsym (RS pj)) in
Dexpr.dexpr ~loc (DEapp (pj, v))
| Some e -> dexpr muc denv e in
let cs,fl = parse_record ~loc muc get_val fl in
let d = expr_app loc (DEsym (RS cs)) fl in
if re then d else mk_let ~loc "q " e1 d
| Ptree.Elet (id, gh, kind, e1, e2) ->
let e1 = update_any kind e1 in
let kind = local_kind kind in
let ld = create_user_id id, gh, kind, dexpr muc denv e1 in
DElet (ld, dexpr muc (denv_add_let denv ld) e2)
| Ptree.Erec (fdl, e1) ->
let update_kind (id, gh, k, bl, pty, msk, sp, e) =
id, gh, local_kind k, bl, pty, msk, sp, e in
let fdl = List.map update_kind fdl in
let denv, rd = drec_defn muc denv fdl in
DErec (rd, dexpr muc denv e1)
| Ptree.Efun (bl, pty, msk, sp, e) ->
let bl = List.map (dbinder muc) bl in
let ds = dspec_no_variant muc sp in
let dity = dity_of_opt muc pty in
let denv = denv_add_args denv bl in
DEfun (bl, dity, msk, ds, dexpr muc denv e)
| Ptree.Eany (pl, kind, pty, msk, sp) ->
let pl = List.map (dparam muc) pl in
let ds = dspec_no_variant muc sp in
let ity = match kind, pty with
| _, Some pty -> ity_of_pty muc pty
| RKlemma, None -> ity_unit
| RKpred, None -> ity_bool
| _ -> Loc.errorm ~loc "cannot determine the type of the result" in
let dity = dity_of_ity ity in
let res = Some (id_fresh "result"), false, dity in
let denv = denv_add_args denv (res::pl) in
let add_alias (t1, t2) =
let bty = dity_fresh () in
let dt1 = eff_dterm muc denv t1 in
let dt2 = eff_dterm muc denv t2 in
ignore (Dexpr.dexpr ~loc:t1.term_loc (DEcast (dt1, bty)));
ignore (Dexpr.dexpr ~loc:t2.term_loc (DEcast (dt2, bty)));
in
List.iter add_alias sp.sp_alias;
DEany (pl, dity, msk, ds)
| Ptree.Ematch (e1, bl, xl) ->
let e1 = dexpr muc denv e1 in
let rbranch (pp, e) =
let pp = dpattern muc pp in
let denv = denv_add_expr_pat denv pp e1 in
pp, dexpr muc denv e in
let xbranch (q, pp, e) =
let xs = find_dxsymbol q in
let mb_unit = match xs with
| DEgexn xs -> ity_equal xs.xs_ity ity_unit
| DElexn _ -> true in
let pp = match pp with
| Some pp -> dpattern muc pp
| None when mb_unit -> Dexpr.dpattern ~loc (DPapp (rs_void, []))
| _ -> Loc.errorm ~loc "exception argument expected" in
let denv = denv_add_exn_pat denv pp xs in
let e = dexpr muc denv e in
xs, pp, e in
DEmatch (e1, List.map rbranch bl, List.map xbranch xl)
| Ptree.Eif (e1, e2, e3) ->
let e1 = dexpr muc denv e1 in
let e2 = dexpr muc denv e2 in
let e3 = dexpr muc denv e3 in
DEif (e1, e2, e3)
| Ptree.Enot e1 ->
DEnot (dexpr muc denv e1)
| Ptree.Eand (e1, e2) ->
DEand (dexpr muc denv e1, dexpr muc denv e2)
| Ptree.Eor (e1, e2) ->
DEor (dexpr muc denv e1, dexpr muc denv e2)
| Ptree.Etrue -> DEtrue
| Ptree.Efalse -> DEfalse
| Ptree.Esequence (e1, e2) ->
let e1 = { e1 with expr_desc = Ecast (e1, PTtuple []) } in
let e1 = dexpr muc denv e1 in
let e2 = dexpr muc denv e2 in
DElet ((id_user "_" loc, false, RKnone, e1), e2)
| Ptree.Ewhile (e1, inv, var, e2) ->
let e1 = dexpr muc denv e1 in
let e2 = dexpr muc denv e2 in
let inv = dinvariant muc inv in
let var = dvariant muc var in
DEwhile (e1, inv, var, e2)
| Ptree.Efor (id, efrom, dir, eto, inv, e1) ->
let efrom = dexpr muc denv efrom in
let eto = dexpr muc denv eto in
let inv = dinvariant muc inv in
let id = create_user_id id in
let denv = denv_add_for_index denv id efrom.de_dvty in
DEfor (id, efrom, dir, eto, inv, dexpr muc denv e1)
| Ptree.Eassign asl ->
let mk_assign (e1,q,e2) =
dexpr muc denv e1, find_record_field muc q, dexpr muc denv e2 in
DEassign (List.map mk_assign asl)
| Ptree.Eraise (q, e1) ->
let xs = find_dxsymbol q in
let mb_unit = match xs with
| DEgexn xs -> ity_equal xs.xs_ity ity_unit
| DElexn _ -> true in
let e1 = match e1 with
| Some e1 -> dexpr muc denv e1
| None when mb_unit -> Dexpr.dexpr ~loc (DEsym (RS rs_void))
| _ -> Loc.errorm ~loc "exception argument expected" in
DEraise (xs, e1)
| Ptree.Eghost e1 ->
DEghost (dexpr muc denv e1)
| Ptree.Eexn (id, pty, mask, e1) ->
let id = create_user_id id in
let dity = dity_of_pty muc pty in
let denv = denv_add_exn denv id dity in
DEexn (id, dity, mask, dexpr muc denv e1)
| Ptree.Eabsurd ->
DEabsurd
| Ptree.Epure t ->
let get_term lvm _xsm old = type_term muc lvm old t in
let gvars _at q = try match find_prog_symbol muc q with
| PV v -> Some v | _ -> None with _ -> None in
let get_dty pure_denv =
let dt = dterm muc gvars None pure_denv t in
match dt.dt_dty with Some dty -> dty | None -> dty_bool in
DEpure (get_term, denv_pure denv get_dty)
| Ptree.Eassert (ak, f) ->
DEassert (ak, dassert muc f)
| Ptree.Eoptexn (id, mask, e1) ->
let dity = dity_fresh () in
let id = create_user_id id in
let denv = denv_add_exn denv id dity in
DEoptexn (id, dity, mask, dexpr muc denv e1)
| Ptree.Elabel (id, e1) ->
DElabel (create_user_id id, dexpr muc denv e1)
| Ptree.Escope (q, e1) ->
let muc = open_scope muc "dummy" in
let muc = import_scope muc (string_list_of_qualid q) in
DEattr (dexpr muc denv e1, Sattr.empty)
| Ptree.Eattr (ATpos uloc, e1) ->
DEuloc (dexpr muc denv e1, uloc)
| Ptree.Eattr (ATstr attr, e1) ->
DEattr (dexpr muc denv e1, Sattr.singleton attr)
| Ptree.Ecast ({expr_desc = Ptree.Econst c}, pty) ->
DEconst (c, dity_of_pty muc pty)
| Ptree.Ecast (e1, pty) ->
let d1 = dexpr muc denv e1 in
DEcast (d1, dity_of_pty muc pty)
end
and drec_defn muc denv fdl =
let prep (id, gh, kind, bl, pty, msk, sp, e) =
let bl = List.map (dbinder muc) bl in
let dity = dity_of_opt muc pty in
let pre denv =
let denv = denv_add_args denv bl in
let dv = dvariant muc sp.sp_variant in
dspec muc sp, dv, dexpr muc denv e in
create_user_id id, gh, kind, bl, dity, msk, pre in
Dexpr.drec_defn denv (List.map prep fdl)
open Pdecl
open Pmodule
let add_pdecl ~vc muc d =
if Debug.test_flag Glob.flag then Sid.iter (Glob.def ~kind:"") d.pd_news;
add_pdecl ~vc muc d
let add_decl muc d = add_pdecl ~vc:false muc (create_pure_decl d)
let type_pure muc lvm denv e =
let gvars at q = match at, q with
| Some _, _ -> Loc.errorm ~loc:(qloc q)
"`at' and `old' can only be used in program annotations"
| None, Qident x -> Mstr.find_opt x.id_str lvm
| None, Qdot _ -> None in
dterm muc gvars None denv e
let type_term_pure muc lvm denv e =
Dterm.term ~strict:true ~keep_loc:true (type_pure muc lvm denv e)
let type_fmla_pure muc lvm denv e =
Dterm.fmla ~strict:true ~keep_loc:true (type_pure muc lvm denv e)
let check_public ~loc d name =
if d.td_vis <> Public || d.td_mut then
Loc.errorm ~loc "%s types cannot be abstract, private, or mutable" name;
if d.td_inv <> [] then
Loc.errorm ~loc "%s types cannot have invariants" name;
if d.td_wit <> [] then
Loc.errorm ~loc "%s types cannot have witnesses" name
let add_types muc tdl =
let add m ({td_ident = {id_str = x}; td_loc = loc} as d) =
Mstr.add_new (Loc.Located (loc, ClashSymbol x)) x d m in
let def = List.fold_left add Mstr.empty tdl in
let hts = Hstr.create 5 in
let htd = Hstr.create 5 in
let rec visit ~alias ~alg x d = if not (Hstr.mem htd x) then
let id = create_user_id d.td_ident and loc = d.td_loc in
let args = List.map (fun id -> tv_of_string id.id_str) d.td_params in
match d.td_def with
| TDalias pty ->
check_public ~loc d "Alias";
let alias = Sstr.add x alias in
let ity = parse ~loc ~alias ~alg pty in
if not (Hstr.mem htd x) then
let itd = create_alias_decl id args ity in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd
| TDalgebraic csl ->
check_public ~loc d "Algebraic";
let hfd = Hstr.create 5 in
let alias = Sstr.empty in
let alg = Mstr.add x (id,args) alg in
let get_pj nms (_, id, ghost, pty) = match id with
| Some ({id_str = nm} as id) ->
let exn = Loc.Located (id.id_loc, Loc.Message ("Field " ^
nm ^ " is used more than once in the same constructor")) in
let nms = Sstr.add_new exn nm nms in
let ity = parse ~loc ~alias ~alg pty in
let v = try Hstr.find hfd nm with Not_found ->
let v = create_pvsymbol (create_user_id id) ~ghost ity in
Hstr.add hfd nm v;
v in
if not (ity_equal v.pv_ity ity && ghost = v.pv_ghost) then
Loc.errorm ~loc "Conflicting definitions for field %s" nm;
nms, (true, v)
| None ->
let ity = parse ~loc ~alias ~alg pty in
nms, (false, create_pvsymbol (id_fresh "a") ~ghost ity) in
let get_cs oms (_, id, pjl) =
let nms, pjl = Lists.map_fold_left get_pj Sstr.empty pjl in
if Sstr.equal oms nms then create_user_id id, pjl else
let df = Sstr.union (Sstr.diff oms nms) (Sstr.diff nms oms) in
Loc.errorm ~loc "Field %s is missing in some constructors"
(Sstr.choose df) in
let csl = match csl with
| (_, id, pjl)::csl ->
let oms, pjl = Lists.map_fold_left get_pj Sstr.empty pjl in
(create_user_id id, pjl) :: List.map (get_cs oms) csl
| [] -> assert false in
if not ( Hstr.mem htd x ) then
begin match try Some (Hstr.find hts x) with Not_found -> None with
| Some s ->
Hstr.add htd x (create_rec_variant_decl s csl)
| None ->
let itd = create_plain_variant_decl id args csl in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd end
| TDrecord fl ->
let alias = Sstr.empty in
let alg = Mstr.add x (id,args) alg in
let get_fd nms fd =
let {id_str = nm; id_loc = loc} = fd.f_ident in
let id = create_user_id fd.f_ident in
let ity = parse ~loc ~alias ~alg fd.f_pty in
let ghost = d.td_vis = Abstract || fd.f_ghost in
let pv = create_pvsymbol id ~ghost ity in
let exn = Loc.Located (loc, Loc.Message ("Field " ^
nm ^ " is used more than once in a record")) in
Mstr.add_new exn nm pv nms, (fd.f_mutable, pv) in
let nms,fl = Lists.map_fold_left get_fd Mstr.empty fl in
if not ( Hstr.mem htd x ) then
begin match try Some (Hstr.find hts x) with Not_found -> None with
| Some s ->
check_public ~loc d "Recursive";
let get_fd (mut, fd) = if mut then Loc.errorm ~loc
"Recursive types cannot have mutable fields" else fd in
Hstr.add htd x (create_rec_record_decl s (List.map get_fd fl))
| None ->
let priv = d.td_vis <> Public || fl = [] and mut = d.td_mut in
let add_fd m (_, v) = Mstr.add v.pv_vs.vs_name.id_string v m in
let gvars = List.fold_left add_fd Mstr.empty fl in
let type_inv f = type_fmla_pure muc gvars Dterm.denv_empty f in
let inv = List.map type_inv d.td_inv in
let add_w m (q,e) =
let v = try match q with
| Qident x -> Mstr.find x.id_str nms
| Qdot _ -> raise Not_found
with Not_found -> Loc.errorm ~loc:(qloc q)
"Unknown field %a" print_qualid q in
let dity = dity_of_ity v.pv_ity in
let de = dexpr muc denv_empty e in
let de = Dexpr.dexpr ?loc:de.de_loc (DEcast (de, dity)) in
Mpv.add v (expr ~keep_loc:true de) m in
let wit = List.fold_left add_w Mpv.empty d.td_wit in
let wit = if d.td_wit = [] then [] else
List.map (fun (_,v) -> try Mpv.find v wit with
| _ -> Loc.errorm ?loc:v.pv_vs.vs_name.Ident.id_loc
"Missing field %s" v.pv_vs.vs_name.id_string) fl in
let itd = create_plain_record_decl ~priv ~mut id args fl inv wit in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd
end
| TDrange (lo,hi) ->
check_public ~loc d "Range";
let ir = Number.create_range lo hi in
let itd = create_range_decl id ir in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd
| TDfloat (eb,sb) ->
check_public ~loc d "Floating-point";
let fp = { Number.fp_exponent_digits = eb;
Number.fp_significand_digits = sb } in
let itd = create_float_decl id fp in
Hstr.add hts x itd.itd_its; Hstr.add htd x itd
and parse ~loc ~alias ~alg pty =
let rec down = function
| PTtyvar id ->
ity_var (tv_of_string id.id_str)
| PTtyapp (q,tyl) ->
let s = match q with
| Qident {id_str = x} when Sstr.mem x alias ->
Loc.errorm ~loc "Cyclic type definition"
| Qident {id_str = x} when Hstr.mem hts x ->
Hstr.find hts x
| Qident {id_str = x} when Mstr.mem x alg ->
let id, args = Mstr.find x alg in
let s = create_rec_itysymbol id args in
Hstr.add hts x s;
visit ~alias ~alg x ( Mstr.find x def ) ;
s
| Qident {id_str = x} when Mstr.mem x def ->
visit ~alias ~alg x (Mstr.find x def);
Hstr.find hts x
| _ ->
find_itysymbol muc q in
Loc.try3 ~loc:(qloc q) ity_app s (List.map down tyl) []
| PTtuple tyl -> ity_tuple (List.map down tyl)
| PTarrow (ty1,ty2) -> ity_func (down ty1) (down ty2)
| PTpure ty -> ity_purify (down ty)
| PTparen ty -> down ty in
down pty in
Mstr.iter (visit ~alias:Mstr.empty ~alg:Mstr.empty) def;
let tdl = List.map (fun d -> Hstr.find htd d.td_ident.id_str) tdl in
let add muc d = add_pdecl ~vc:true muc d in
List.fold_left add muc (create_type_decl tdl)
let tyl_of_params {muc_theory = tuc} pl =
let ty_of_param (loc,_,gh,ty) =
if gh then Loc.errorm ~loc
"ghost parameters are not allowed in pure declarations";
ty_of_pty tuc ty in
List.map ty_of_param pl
let add_logics muc dl =
let lsymbols = Hstr.create 17 in
1 . create all symbols and make an environment with these symbols
let create_symbol mkk d =
let id = create_user_id d.ld_ident in
let pl = tyl_of_params muc d.ld_params in
let ty = Opt.map (ty_of_pty muc.muc_theory) d.ld_type in
let ls = create_lsymbol id pl ty in
Hstr.add lsymbols d.ld_ident.id_str ls;
Loc.try2 ~loc:d.ld_loc add_decl mkk (create_param_decl ls) in
let mkk = List.fold_left create_symbol muc dl in
2 . then type - check all definitions
let type_decl d (abst,defn) =
let ls = Hstr.find lsymbols d.ld_ident.id_str in
let create_var (loc,x,_,_) ty =
let id = match x with
| Some id -> create_user_id id
| None -> id_user "_" loc in
create_vsymbol id ty in
let vl = List.map2 create_var d.ld_params ls.ls_args in
let add_var mvs (_,x,_,_) vs = match x with
| Some {id_str = x} -> Mstr.add_new (DuplicateVar x) x (DTgvar vs) mvs
| None -> mvs in
let denv = List.fold_left2 add_var Dterm.denv_empty d.ld_params vl in
match d.ld_def, d.ld_type with
| None, _ -> ls :: abst, defn
let f = type_fmla_pure mkk Mstr.empty denv e in
abst, (make_ls_defn ls vl f) :: defn
let e = { e with term_desc = Tcast (e, ty) } in
let t = type_term_pure mkk Mstr.empty denv e in
abst, (make_ls_defn ls vl t) :: defn in
let abst,defn = List.fold_right type_decl dl ([],[]) in
let add_param muc s = add_decl muc (create_param_decl s) in
let add_logic muc l = add_decl muc (create_logic_decl l) in
let muc = List.fold_left add_param muc abst in
if defn = [] then muc else add_logic muc defn
let add_inductives muc s dl =
1 . create all symbols and make an environment with these symbols
let psymbols = Hstr.create 17 in
let create_symbol mkk d =
let id = create_user_id d.in_ident in
let pl = tyl_of_params muc d.in_params in
let ps = create_psymbol id pl in
Hstr.add psymbols d.in_ident.id_str ps;
Loc.try2 ~loc:d.in_loc add_decl mkk (create_param_decl ps) in
let mkk = List.fold_left create_symbol muc dl in
2 . then type - check all definitions
let propsyms = Hstr.create 17 in
let type_decl d =
let ps = Hstr.find psymbols d.in_ident.id_str in
let clause (loc, id, f) =
Hstr.replace propsyms id.id_str loc;
let f = type_fmla_pure mkk Mstr.empty Dterm.denv_empty f in
create_prsymbol (create_user_id id), f in
ps, List.map clause d.in_def in
let loc_of_id id = Opt.get id.Ident.id_loc in
try add_decl muc (create_ind_decl s (List.map type_decl dl))
with
| ClashSymbol s ->
Loc.error ~loc:(Hstr.find propsyms s) (ClashSymbol s)
| InvalidIndDecl (ls,pr) ->
Loc.error ~loc:(loc_of_id pr.pr_name) (InvalidIndDecl (ls,pr))
| NonPositiveIndDecl (ls,pr,s) ->
Loc.error ~loc:(loc_of_id pr.pr_name) (NonPositiveIndDecl (ls,pr,s))
let add_prop muc k s f =
let pr = create_prsymbol (create_user_id s) in
let f = type_fmla_pure muc Mstr.empty Dterm.denv_empty f in
add_decl muc (create_prop_decl k pr f)
let find_module env file q =
let m = match q with
| Qident {id_str = nm} ->
(try Mstr.find nm file with Not_found -> read_module env [] nm)
| Qdot (p, {id_str = nm}) -> read_module env (string_list_of_qualid p) nm in
if Debug.test_flag Glob.flag then
Glob.use ~kind:"theory" (qloc_last q) m.mod_theory.th_name;
m
let type_inst ({muc_theory = tuc} as muc) ({mod_theory = t} as m) s =
let add_inst s = function
| CStsym (p,[],PTtyapp (q,[])) ->
let ts1 = find_tysymbol_ns t.th_export p in
let ts2 = find_itysymbol muc q in
if Mts.mem ts1 s.mi_ty then Loc.error ~loc:(qloc p)
(ClashSymbol ts1.ts_name.id_string);
{ s with mi_ts = Loc.try4 ~loc:(qloc p) Mts.add_new
(ClashSymbol ts1.ts_name.id_string) ts1 ts2 s.mi_ts }
| CStsym (p,tvl,pty) ->
let ts1 = find_tysymbol_ns t.th_export p in
let tvl = List.map (fun id -> tv_of_string id.id_str) tvl in
let ts2 = Loc.try3 ~loc:(qloc p) create_alias_itysymbol
(id_clone ts1.ts_name) tvl (ity_of_pty muc pty) in
let ty2 = ity_app ts2 (List.map ity_var ts1.ts_args) [] in
let check v ty = match ty.ity_node with
| Ityvar u -> tv_equal u v | _ -> false in
begin match ty2.ity_node with
| Ityapp (ts2, tyl, _) | Ityreg { reg_its = ts2; reg_args = tyl }
when Lists.equal check tvl tyl ->
if Mts.mem ts1 s.mi_ty then Loc.error ~loc:(qloc p)
(ClashSymbol ts1.ts_name.id_string);
{ s with mi_ts = Loc.try4 ~loc:(qloc p) Mts.add_new
(ClashSymbol ts1.ts_name.id_string) ts1 ts2 s.mi_ts }
| _ ->
if Mts.mem ts1 s.mi_ts then Loc.error ~loc:(qloc p)
(ClashSymbol ts1.ts_name.id_string);
{ s with mi_ty = Loc.try4 ~loc:(qloc p) Mts.add_new
(ClashSymbol ts1.ts_name.id_string) ts1 ty2 s.mi_ty }
end
| CSfsym (p,q) ->
let ls1 = find_fsymbol_ns t.th_export p in
let ls2 = find_fsymbol tuc q in
{ s with mi_ls = Loc.try4 ~loc:(qloc p) Mls.add_new
(ClashSymbol ls1.ls_name.id_string) ls1 ls2 s.mi_ls }
| CSpsym (p,q) ->
let ls1 = find_psymbol_ns t.th_export p in
let ls2 = find_psymbol tuc q in
{ s with mi_ls = Loc.try4 ~loc:(qloc p) Mls.add_new
(ClashSymbol ls1.ls_name.id_string) ls1 ls2 s.mi_ls }
| CSvsym (p,q) ->
let rs1 = find_prog_symbol_ns m.mod_export p in
let rs2 = find_prog_symbol muc q in
begin match rs1, rs2 with
| RS rs1, RS rs2 ->
{ s with mi_rs = Loc.try4 ~loc:(qloc p) Mrs.add_new
(ClashSymbol rs1.rs_name.id_string) rs1 rs2 s.mi_rs }
| PV pv1, PV pv2 ->
{ s with mi_pv = Loc.try4 ~loc:(qloc p) Mvs.add_new
(ClashSymbol pv1.pv_vs.vs_name.id_string) pv1.pv_vs pv2 s.mi_pv }
| PV _, RS _ ->
Loc.errorm ~loc:(qloc q) "program constant expected"
| RS _, PV _ ->
Loc.errorm ~loc:(qloc q) "program function expected"
| OO _, _ | _, OO _ ->
Loc.errorm ~loc:(qloc q) "ambiguous notation"
end
| CSxsym (p,q) ->
let xs1 = find_xsymbol_ns m.mod_export p in
let xs2 = find_xsymbol muc q in
{ s with mi_xs = Loc.try4 ~loc:(qloc p) Mxs.add_new
(ClashSymbol xs1.xs_name.id_string) xs1 xs2 s.mi_xs }
| CSaxiom p ->
let pr = find_prop_ns t.th_export p in
{ s with mi_pk = Loc.try4 ~loc:(qloc p) Mpr.add_new
(ClashSymbol pr.pr_name.id_string) pr Paxiom s.mi_pk }
| CSlemma p ->
let pr = find_prop_ns t.th_export p in
{ s with mi_pk = Loc.try4 ~loc:(qloc p) Mpr.add_new
(ClashSymbol pr.pr_name.id_string) pr Plemma s.mi_pk }
| CSgoal p ->
let pr = find_prop_ns t.th_export p in
{ s with mi_pk = Loc.try4 ~loc:(qloc p) Mpr.add_new
(ClashSymbol pr.pr_name.id_string) pr Pgoal s.mi_pk }
| CSprop k ->
{ s with mi_df = k }
in
List.fold_left add_inst (empty_mod_inst m) s
let add_decl muc env file d =
let vc = muc.muc_theory.uc_path = [] &&
Debug.test_noflag debug_type_only in
match d with
| Ptree.Dtype dl ->
add_types muc dl
| Ptree.Dlogic dl ->
add_logics muc dl
| Ptree.Dind (s,dl) ->
add_inductives muc s dl
| Ptree.Dprop (k,s,f) ->
add_prop muc k s f
| Ptree.Dmeta (id,al) ->
let tuc = muc.muc_theory in
let convert = function
| Ptree.Mty (PTtyapp (q,[]))
-> MAts (find_tysymbol tuc q)
| Ptree.Mty ty -> MAty (ty_of_pty tuc ty)
| Ptree.Mfs q -> MAls (find_fsymbol tuc q)
| Ptree.Mps q -> MAls (find_psymbol tuc q)
| Ptree.Max q -> MApr (find_prop_of_kind Paxiom tuc q)
| Ptree.Mlm q -> MApr (find_prop_of_kind Plemma tuc q)
| Ptree.Mgl q -> MApr (find_prop_of_kind Pgoal tuc q)
| Ptree.Mstr s -> MAstr s
| Ptree.Mint i -> MAint i in
add_meta muc (lookup_meta id.id_str) (List.map convert al)
| Ptree.Dlet (id, gh, kind, e) ->
let e = update_any kind e in
let ld = create_user_id id, gh, kind, dexpr muc denv_empty e in
add_pdecl ~vc muc (create_let_decl (let_defn ~keep_loc:true ld))
| Ptree.Drec fdl ->
let _, rd = drec_defn muc denv_empty fdl in
add_pdecl ~vc muc (create_let_decl (rec_defn ~keep_loc:true rd))
| Ptree.Dexn (id, pty, mask) ->
let ity = ity_of_pty muc pty in
let xs = create_xsymbol (create_user_id id) ~mask ity in
add_pdecl ~vc muc (create_exn_decl xs)
| Ptree.Duse use ->
use_export muc (find_module env file use)
| Ptree.Dclone (use, inst) ->
let m = find_module env file use in
warn_clone_not_abstract (qloc use) m.mod_theory;
clone_export muc m (type_inst muc m inst)
type slice = {
env : Env.env;
path : Env.pathname;
mutable file : pmodule Mstr.t;
mutable muc : pmodule_uc option;
}
let state = (Stack.create () : slice Stack.t)
let open_file env path =
assert (Stack.is_empty state || (Stack.top state).muc <> None);
Stack.push { env = env; path = path; file = Mstr.empty; muc = None } state
let close_file () =
assert (not (Stack.is_empty state) && (Stack.top state).muc = None);
(Stack.pop state).file
let open_module ({id_str = nm; id_loc = loc} as id) =
assert (not (Stack.is_empty state) && (Stack.top state).muc = None);
let slice = Stack.top state in
if Mstr.mem nm slice.file then Loc.errorm ~loc
"module %s is already defined in this file" nm;
let muc = create_module slice.env ~path:slice.path (create_user_id id) in
slice.muc <- Some muc
let close_module loc =
assert (not (Stack.is_empty state) && (Stack.top state).muc <> None);
let slice = Stack.top state in
if Debug.test_noflag debug_parse_only then begin
let m = Loc.try1 ~loc close_module (Opt.get slice.muc) in
if Debug.test_flag Glob.flag then
Glob.def ~kind:"theory" m.mod_theory.th_name;
slice.file <- Mstr.add m.mod_theory.th_name.id_string m slice.file;
end;
slice.muc <- None
let top_muc_on_demand loc slice = match slice.muc with
| Some muc -> muc
| None ->
assert (Mstr.is_empty slice.file);
if slice.path <> [] then Loc.errorm ~loc
"All declarations in library files must be inside modules";
let muc = create_module slice.env ~path:[] (id_fresh "Top") in
slice.muc <- Some muc;
muc
let open_scope loc nm =
assert (not (Stack.is_empty state));
let slice = Stack.top state in
let muc = top_muc_on_demand loc slice in
if Debug.test_noflag debug_parse_only then
slice.muc <- Some (open_scope muc nm.id_str)
let close_scope loc ~import =
assert (not (Stack.is_empty state) && (Stack.top state).muc <> None);
if Debug.test_noflag debug_parse_only then
let slice = Stack.top state in
let muc = Loc.try1 ~loc (close_scope ~import) (Opt.get slice.muc) in
slice.muc <- Some muc
let import_scope loc q =
assert (not (Stack.is_empty state));
let slice = Stack.top state in
let muc = top_muc_on_demand loc slice in
if Debug.test_noflag debug_parse_only then
let muc = Loc.try2 ~loc import_scope muc (string_list_of_qualid q) in
slice.muc <- Some muc
let add_decl loc d =
assert (not (Stack.is_empty state));
let slice = Stack.top state in
let muc = top_muc_on_demand loc slice in
if Debug.test_noflag debug_parse_only then
let muc = Loc.try4 ~loc add_decl muc slice.env slice.file d in
slice.muc <- Some muc
let () = Exn_printer.register (fun fmt e -> match e with
| UnboundSymbol q ->
Format.fprintf fmt "unbound symbol '%a'" print_qualid q
| _ -> raise e)
|
915ef47dd1832f17038b9179faee3d258bebe361b9a90694b948d8075e1b95c6 | mgsloan/instance-templates | Lattices.hs | # LANGUAGE TemplateHaskell , ConstraintKinds , ScopedTypeVariables , FlexibleInstances , FlexibleContexts ,
UndecidableInstances #
Not actually using this , just necessary for an AST quote
From lattices library
module Lattices where
import Data.Monoid (Monoid)
import Language.Haskell.InstanceTemplates
import Language.Haskell.TH.Syntax -- This is just for prettier -ddump-splices
-- | A algebraic structure with element joins: <>
--
-- Associativity: x `join` (y `join` z) == (x `join` y) `join` z
-- Commutativity: x `join` y == y `join` x
-- Idempotency: x `join` x == x
--
-- Partial-Order: x `leq` y == (x `join` y == y)
class JoinSemiLattice a where
join :: a -> a -> a
-- | The partial ordering induced by the join-semilattice structure
joinLeq :: (Eq a, JoinSemiLattice a) => a -> a -> Bool
joinLeq x y = x `join` y == y
| The join of at a list of join - semilattice elements ( of length at least one )
joins1 :: JoinSemiLattice a => [a] -> a
joins1 = foldr1 join
-- | A algebraic structure with element meets: <>
--
-- Associativity: x `meet` (y `meet` z) == (x `meet` y) `meet` z
-- Commutativity: x `meet` y == y `meet` x
-- Idempotency: x `meet` x == x
--
-- Partial-Order: x `leq` y == (x `meet` y == x)
class MeetSemiLattice a where
meet :: a -> a -> a
-- | The partial ordering induced by the meet-semilattice structure
meetLeq :: (Eq a, MeetSemiLattice a) => a -> a -> Bool
meetLeq x y = x `meet` y == x
| The meet of at a list of meet - semilattice elements ( of length at least one )
meets1 :: MeetSemiLattice a => [a] -> a
meets1 = foldr1 meet
| The combination of two semi lattices makes a lattice if the absorption law holds :
-- see <> and <(order)>
--
-- Absorption: a `join` (a `meet` b) == a `meet` (a `join` b) == a
type Lattice a = (JoinSemiLattice a, MeetSemiLattice a)
$(mkTemplate =<< [d|
class Lattice a where
instance Inherit (Instance (JoinSemiLattice a, MeetSemiLattice a)) where
|])
{- Instance template syntax
deriving class Lattice a where
instance (JoinSemiLattice a, MeetSemiLattice a)
-}
-- | A join-semilattice with some element |bottom| that `join` approaches.
--
-- Identity: x `join` bottom == x
class JoinSemiLattice a => BoundedJoinSemiLattice a where
bottom :: a
-- | The join of a list of join-semilattice elements
joins :: BoundedJoinSemiLattice a => [a] -> a
joins = foldr join bottom
-- | A meet-semilattice with some element |top| that `meet` approaches.
--
-- Identity: x `meet` top == x
class MeetSemiLattice a => BoundedMeetSemiLattice a where
top :: a
-- | The meet of a list of meet-semilattice elements
meets :: BoundedMeetSemiLattice a => [a] -> a
meets = foldr meet top
-- | Lattices with both bounds
--
Partial - Order : bottom ` leq ` top
type BoundedLattice a = (Lattice a, BoundedJoinSemiLattice a, BoundedMeetSemiLattice a)
$(mkTemplate =<< [d|
class BoundedLattice a where
instance Inherit (Instance
( JoinSemiLattice a, MeetSemiLattice a
, BoundedJoinSemiLattice a, BoundedMeetSemiLattice a
)) where
|])
{- Instance template syntax
deriving class BoundedLattice a where
instance (Lattice a, BoundedJoinSemiLattice a, BoundedMeetSemiLattice a)
-}
-- | Boolean lattices have a complement and are distributive over join and meet:
-- <>
--
-- Complement Join: complement x `join` x == top
-- Complement Meet: complement x `meet` x == bottom
-- Involution: complement (complement x) == x
Order - Reversing : complement x ` leq ` complement y = = y ` leq ` x
class BoundedLattice a => BooleanLattice a where
complement :: a -> a
-- | Lattices with residuals for the Monoid: <>
--
-- TODO: MeetSemiLattice variant?
--
( y ` leq ` residueR x z ) = = = ( mappend x y ` leq ` z ) = = = ( x ` leq ` residueL z y )
class (JoinSemiLattice a, Monoid a) => ResiduatedLattice a where
residualL, residualR :: a -> a -> a
-- | Lattices with implication - Heyting Algebras: <>
-- class (BoundedLattice a => Heyting a) where
--
Partial - Order : a ` leq ` b = = ( a ` diff ` b = = top )
-- Distributive: a `diff` (b `join` c) == (a `diff` b) `join` (a `diff` c)
-- a `join` (a `diff` b) == a `join` b
-- b `join` (a `diff` b) == b
class (BoundedJoinSemiLattice a, BoundedMeetSemiLattice a) => DiffLattice a where
diff :: a -> a -> a
More issues representing constraint kinds in TH . The above should be
-- class BoundedLattice a => DiffLattice a where
type MonoidDiffLattice a = (BoundedLattice a, ResiduatedLattice a, DiffLattice a)
$(mkTemplate =<< [d|
class MonoidDiffLattice a where
instance Inherit (Instance
( JoinSemiLattice a, MeetSemiLattice a
, BoundedJoinSemiLattice a, BoundedMeetSemiLattice a
, DiffLattice a
)) where
-- When the Monoid operations are the same as the BoundedJoinSemiLattice,
-- residueL === residueR === diff
instance ResiduatedLattice a where
residualL = diff
residualR = diff
|]) | null | https://raw.githubusercontent.com/mgsloan/instance-templates/734f3fe838f194b613a270c2e1eb9502b73840a6/tests/lattice/Lattices.hs | haskell | This is just for prettier -ddump-splices
| A algebraic structure with element joins: <>
Associativity: x `join` (y `join` z) == (x `join` y) `join` z
Commutativity: x `join` y == y `join` x
Idempotency: x `join` x == x
Partial-Order: x `leq` y == (x `join` y == y)
| The partial ordering induced by the join-semilattice structure
| A algebraic structure with element meets: <>
Associativity: x `meet` (y `meet` z) == (x `meet` y) `meet` z
Commutativity: x `meet` y == y `meet` x
Idempotency: x `meet` x == x
Partial-Order: x `leq` y == (x `meet` y == x)
| The partial ordering induced by the meet-semilattice structure
see <> and <(order)>
Absorption: a `join` (a `meet` b) == a `meet` (a `join` b) == a
Instance template syntax
deriving class Lattice a where
instance (JoinSemiLattice a, MeetSemiLattice a)
| A join-semilattice with some element |bottom| that `join` approaches.
Identity: x `join` bottom == x
| The join of a list of join-semilattice elements
| A meet-semilattice with some element |top| that `meet` approaches.
Identity: x `meet` top == x
| The meet of a list of meet-semilattice elements
| Lattices with both bounds
Instance template syntax
deriving class BoundedLattice a where
instance (Lattice a, BoundedJoinSemiLattice a, BoundedMeetSemiLattice a)
| Boolean lattices have a complement and are distributive over join and meet:
<>
Complement Join: complement x `join` x == top
Complement Meet: complement x `meet` x == bottom
Involution: complement (complement x) == x
| Lattices with residuals for the Monoid: <>
TODO: MeetSemiLattice variant?
| Lattices with implication - Heyting Algebras: <>
class (BoundedLattice a => Heyting a) where
Distributive: a `diff` (b `join` c) == (a `diff` b) `join` (a `diff` c)
a `join` (a `diff` b) == a `join` b
b `join` (a `diff` b) == b
class BoundedLattice a => DiffLattice a where
When the Monoid operations are the same as the BoundedJoinSemiLattice,
residueL === residueR === diff | # LANGUAGE TemplateHaskell , ConstraintKinds , ScopedTypeVariables , FlexibleInstances , FlexibleContexts ,
UndecidableInstances #
Not actually using this , just necessary for an AST quote
From lattices library
module Lattices where
import Data.Monoid (Monoid)
import Language.Haskell.InstanceTemplates
class JoinSemiLattice a where
join :: a -> a -> a
joinLeq :: (Eq a, JoinSemiLattice a) => a -> a -> Bool
joinLeq x y = x `join` y == y
| The join of at a list of join - semilattice elements ( of length at least one )
joins1 :: JoinSemiLattice a => [a] -> a
joins1 = foldr1 join
class MeetSemiLattice a where
meet :: a -> a -> a
meetLeq :: (Eq a, MeetSemiLattice a) => a -> a -> Bool
meetLeq x y = x `meet` y == x
| The meet of at a list of meet - semilattice elements ( of length at least one )
meets1 :: MeetSemiLattice a => [a] -> a
meets1 = foldr1 meet
| The combination of two semi lattices makes a lattice if the absorption law holds :
type Lattice a = (JoinSemiLattice a, MeetSemiLattice a)
$(mkTemplate =<< [d|
class Lattice a where
instance Inherit (Instance (JoinSemiLattice a, MeetSemiLattice a)) where
|])
class JoinSemiLattice a => BoundedJoinSemiLattice a where
bottom :: a
joins :: BoundedJoinSemiLattice a => [a] -> a
joins = foldr join bottom
class MeetSemiLattice a => BoundedMeetSemiLattice a where
top :: a
meets :: BoundedMeetSemiLattice a => [a] -> a
meets = foldr meet top
Partial - Order : bottom ` leq ` top
type BoundedLattice a = (Lattice a, BoundedJoinSemiLattice a, BoundedMeetSemiLattice a)
$(mkTemplate =<< [d|
class BoundedLattice a where
instance Inherit (Instance
( JoinSemiLattice a, MeetSemiLattice a
, BoundedJoinSemiLattice a, BoundedMeetSemiLattice a
)) where
|])
Order - Reversing : complement x ` leq ` complement y = = y ` leq ` x
class BoundedLattice a => BooleanLattice a where
complement :: a -> a
( y ` leq ` residueR x z ) = = = ( mappend x y ` leq ` z ) = = = ( x ` leq ` residueL z y )
class (JoinSemiLattice a, Monoid a) => ResiduatedLattice a where
residualL, residualR :: a -> a -> a
Partial - Order : a ` leq ` b = = ( a ` diff ` b = = top )
class (BoundedJoinSemiLattice a, BoundedMeetSemiLattice a) => DiffLattice a where
diff :: a -> a -> a
More issues representing constraint kinds in TH . The above should be
type MonoidDiffLattice a = (BoundedLattice a, ResiduatedLattice a, DiffLattice a)
$(mkTemplate =<< [d|
class MonoidDiffLattice a where
instance Inherit (Instance
( JoinSemiLattice a, MeetSemiLattice a
, BoundedJoinSemiLattice a, BoundedMeetSemiLattice a
, DiffLattice a
)) where
instance ResiduatedLattice a where
residualL = diff
residualR = diff
|]) |
65660f8afadb2fb4c2827acb41dd8cc3879d2b07bba6e80d4ad0bcb7d114fa3b | jeromesimeon/Galax | df_analysis_context.ml | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : df_analysis_context.ml , v 1.3 2007/02/01 22:08:54 simeon Exp $
(* Module: Df_analysis_context
Description:
This module provides a functional context for the XQuery core
data flow analysis algorithm by storing a mapping between
variable names and graphs.
*)
open Xquery_common_ast
open Error
open Df_struct
type ('a, 'b) df_analysis_context = (cvname * ('a, 'b) dfgraph) list
let build_df_analysis_context () =
[]
let add_var_dfgraph ctxt cvname dfgraph =
(cvname, dfgraph) :: ctxt
let get_var_dfgraph ctxt cvname =
try
List.assoc cvname ctxt
with
| _ ->
raise (Query (Streaming_XPath ("Cannot find variable $"^(Namespace_names.prefixed_string_of_rqname cvname)^" in data flow analysis context.")))
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/streaming_xpath/df_analysis_context.ml | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
Module: Df_analysis_context
Description:
This module provides a functional context for the XQuery core
data flow analysis algorithm by storing a mapping between
variable names and graphs.
| Copyright 2001 - 2007 .
$ I d : df_analysis_context.ml , v 1.3 2007/02/01 22:08:54 simeon Exp $
open Xquery_common_ast
open Error
open Df_struct
type ('a, 'b) df_analysis_context = (cvname * ('a, 'b) dfgraph) list
let build_df_analysis_context () =
[]
let add_var_dfgraph ctxt cvname dfgraph =
(cvname, dfgraph) :: ctxt
let get_var_dfgraph ctxt cvname =
try
List.assoc cvname ctxt
with
| _ ->
raise (Query (Streaming_XPath ("Cannot find variable $"^(Namespace_names.prefixed_string_of_rqname cvname)^" in data flow analysis context.")))
|
821210e3ee65e239f3c3177b40d7669d8afbfe28b56eb18bdcee18ea207b2071 | vyzo/gerbil | optimize-xform__0.scm | (declare (block) (standard-bindings) (extended-bindings))
(begin
(define gxc#&identity-expression
(make-promise
(lambda ()
(let ((_tbl18216_ (make-table 'test: eq?)))
(table-set! _tbl18216_ '%#begin-annotation gxc#xform-identity)
(table-set! _tbl18216_ '%#lambda gxc#xform-identity)
(table-set! _tbl18216_ '%#case-lambda gxc#xform-identity)
(table-set! _tbl18216_ '%#let-values gxc#xform-identity)
(table-set! _tbl18216_ '%#letrec-values gxc#xform-identity)
(table-set! _tbl18216_ '%#letrec*-values gxc#xform-identity)
(table-set! _tbl18216_ '%#quote gxc#xform-identity)
(table-set! _tbl18216_ '%#quote-syntax gxc#xform-identity)
(table-set! _tbl18216_ '%#call gxc#xform-identity)
(table-set! _tbl18216_ '%#if gxc#xform-identity)
(table-set! _tbl18216_ '%#ref gxc#xform-identity)
(table-set! _tbl18216_ '%#set! gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-instance? gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-direct-instance? gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-ref gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-set! gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-direct-ref gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-direct-set! gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-unchecked-ref gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-unchecked-set! gxc#xform-identity)
_tbl18216_))))
(define gxc#&identity-special-form
(make-promise
(lambda ()
(let ((_tbl18212_ (make-table 'test: eq?)))
(table-set! _tbl18212_ '%#begin gxc#xform-identity)
(table-set! _tbl18212_ '%#begin-syntax gxc#xform-identity)
(table-set! _tbl18212_ '%#begin-foreign gxc#xform-identity)
(table-set! _tbl18212_ '%#module gxc#xform-identity)
(table-set! _tbl18212_ '%#import gxc#xform-identity)
(table-set! _tbl18212_ '%#export gxc#xform-identity)
(table-set! _tbl18212_ '%#provide gxc#xform-identity)
(table-set! _tbl18212_ '%#extern gxc#xform-identity)
(table-set! _tbl18212_ '%#define-values gxc#xform-identity)
(table-set! _tbl18212_ '%#define-syntax gxc#xform-identity)
(table-set! _tbl18212_ '%#define-alias gxc#xform-identity)
(table-set! _tbl18212_ '%#declare gxc#xform-identity)
_tbl18212_))))
(define gxc#&identity
(make-promise
(lambda ()
(let ((_tbl18208_ (make-table 'test: eq?)))
(hash-copy! _tbl18208_ (force gxc#&identity-special-form))
(hash-copy! _tbl18208_ (force gxc#&identity-expression))
_tbl18208_))))
(define gxc#&basic-xform-expression
(make-promise
(lambda ()
(let ((_tbl18204_ (make-table 'test: eq?)))
(table-set!
_tbl18204_
'%#begin-annotation
gxc#xform-begin-annotation%)
(table-set! _tbl18204_ '%#lambda gxc#xform-lambda%)
(table-set! _tbl18204_ '%#case-lambda gxc#xform-case-lambda%)
(table-set! _tbl18204_ '%#let-values gxc#xform-let-values%)
(table-set! _tbl18204_ '%#letrec-values gxc#xform-let-values%)
(table-set! _tbl18204_ '%#letrec*-values gxc#xform-let-values%)
(table-set! _tbl18204_ '%#quote gxc#xform-identity)
(table-set! _tbl18204_ '%#quote-syntax gxc#xform-identity)
(table-set! _tbl18204_ '%#call gxc#xform-operands)
(table-set! _tbl18204_ '%#if gxc#xform-operands)
(table-set! _tbl18204_ '%#ref gxc#xform-identity)
(table-set! _tbl18204_ '%#set! gxc#xform-setq%)
(table-set! _tbl18204_ '%#struct-instance? gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-direct-instance? gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-ref gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-set! gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-direct-ref gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-direct-set! gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-unchecked-ref gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-unchecked-set! gxc#xform-operands)
_tbl18204_))))
(define gxc#&basic-xform
(make-promise
(lambda ()
(let ((_tbl18200_ (make-table 'test: eq?)))
(hash-copy! _tbl18200_ (force gxc#&basic-xform-expression))
(hash-copy! _tbl18200_ (force gxc#&identity))
(table-set! _tbl18200_ '%#begin gxc#xform-begin%)
(table-set! _tbl18200_ '%#begin-syntax gxc#xform-begin-syntax%)
(table-set! _tbl18200_ '%#module gxc#xform-module%)
(table-set! _tbl18200_ '%#define-values gxc#xform-define-values%)
(table-set! _tbl18200_ '%#define-syntax gxc#xform-define-syntax%)
_tbl18200_))))
(define gxc#&collect-mutators
(make-promise
(lambda ()
(let ((_tbl18196_ (make-table 'test: eq?)))
(hash-copy! _tbl18196_ (force gxc#&void))
(table-set! _tbl18196_ '%#begin gxc#collect-begin%)
(table-set! _tbl18196_ '%#begin-syntax gxc#collect-begin-syntax%)
(table-set!
_tbl18196_
'%#begin-annotation
gxc#collect-begin-annotation%)
(table-set! _tbl18196_ '%#module gxc#collect-module%)
(table-set! _tbl18196_ '%#define-values gxc#collect-define-values%)
(table-set! _tbl18196_ '%#define-syntax gxc#collect-define-syntax%)
(table-set! _tbl18196_ '%#lambda gxc#collect-body-lambda%)
(table-set! _tbl18196_ '%#case-lambda gxc#collect-body-case-lambda%)
(table-set! _tbl18196_ '%#let-values gxc#collect-body-let-values%)
(table-set! _tbl18196_ '%#letrec-values gxc#collect-body-let-values%)
(table-set! _tbl18196_ '%#letrec*-values gxc#collect-body-let-values%)
(table-set! _tbl18196_ '%#call gxc#collect-operands)
(table-set! _tbl18196_ '%#if gxc#collect-operands)
(table-set! _tbl18196_ '%#set! gxc#collect-mutators-setq%)
(table-set! _tbl18196_ '%#struct-instance? gxc#collect-operands)
(table-set!
_tbl18196_
'%#struct-direct-instance?
gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-ref gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-set! gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-direct-ref gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-direct-set! gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-unchecked-ref gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-unchecked-set! gxc#collect-operands)
_tbl18196_))))
(define gxc#apply-collect-mutators
(lambda (_stx18189_ . _args18191_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18189_ _args18191_))
gxc#current-compile-methods
(force gxc#&collect-mutators))))
(define gxc#&expression-subst
(make-promise
(lambda ()
(let ((_tbl18186_ (make-table 'test: eq?)))
(hash-copy! _tbl18186_ (force gxc#&basic-xform-expression))
(table-set! _tbl18186_ '%#begin gxc#xform-begin%)
(table-set! _tbl18186_ '%#ref gxc#expression-subst-ref%)
(table-set! _tbl18186_ '%#set! gxc#expression-subst-setq%)
_tbl18186_))))
(define gxc#apply-expression-subst
(lambda (_stx18179_ . _args18181_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18179_ _args18181_))
gxc#current-compile-methods
(force gxc#&expression-subst))))
(define gxc#&expression-subst*
(make-promise
(lambda ()
(let ((_tbl18176_ (make-table 'test: eq?)))
(hash-copy! _tbl18176_ (force gxc#&expression-subst))
(table-set! _tbl18176_ '%#ref gxc#expression-subst*-ref%)
(table-set! _tbl18176_ '%#set! gxc#expression-subst*-setq%)
_tbl18176_))))
(define gxc#apply-expression-subst*
(lambda (_stx18169_ . _args18171_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18169_ _args18171_))
gxc#current-compile-methods
(force gxc#&expression-subst*))))
(define gxc#&find-expression
(make-promise
(lambda ()
(let ((_tbl18166_ (make-table 'test: eq?)))
(hash-copy! _tbl18166_ (force gxc#&false-expression))
(table-set! _tbl18166_ '%#begin gxc#find-body%)
(table-set! _tbl18166_ '%#begin-annotation gxc#find-begin-annotation%)
(table-set! _tbl18166_ '%#lambda gxc#find-lambda%)
(table-set! _tbl18166_ '%#case-lambda gxc#find-case-lambda%)
(table-set! _tbl18166_ '%#let-values gxc#find-let-values%)
(table-set! _tbl18166_ '%#letrec-values gxc#find-let-values%)
(table-set! _tbl18166_ '%#letrec*-values gxc#find-let-values%)
(table-set! _tbl18166_ '%#call gxc#find-body%)
(table-set! _tbl18166_ '%#if gxc#find-body%)
(table-set! _tbl18166_ '%#set! gxc#find-setq%)
(table-set! _tbl18166_ '%#struct-instance? gxc#find-body%)
(table-set! _tbl18166_ '%#struct-direct-instance? gxc#find-body%)
(table-set! _tbl18166_ '%#struct-ref gxc#find-body%)
(table-set! _tbl18166_ '%#struct-set! gxc#find-body%)
(table-set! _tbl18166_ '%#struct-direct-ref gxc#find-body%)
(table-set! _tbl18166_ '%#struct-direct-set! gxc#find-body%)
(table-set! _tbl18166_ '%#struct-unchecked-ref gxc#find-body%)
(table-set! _tbl18166_ '%#struct-unchecked-set! gxc#find-body%)
_tbl18166_))))
(define gxc#&find-var-refs
(make-promise
(lambda ()
(let ((_tbl18162_ (make-table 'test: eq?)))
(hash-copy! _tbl18162_ (force gxc#&find-expression))
(table-set! _tbl18162_ '%#ref gxc#find-var-refs-ref%)
(table-set! _tbl18162_ '%#set! gxc#find-var-refs-setq%)
_tbl18162_))))
(define gxc#apply-find-var-refs
(lambda (_stx18155_ . _args18157_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18155_ _args18157_))
gxc#current-compile-methods
(force gxc#&find-var-refs))))
(define gxc#&collect-runtime-refs
(make-promise
(lambda ()
(let ((_tbl18152_ (make-table 'test: eq?)))
(hash-copy! _tbl18152_ (force gxc#&collect-expression-refs))
(table-set! _tbl18152_ '%#ref gxc#collect-runtime-refs-ref%)
(table-set! _tbl18152_ '%#set! gxc#collect-runtime-refs-setq%)
_tbl18152_))))
(define gxc#apply-collect-runtime-refs
(lambda (_stx18145_ . _args18147_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18145_ _args18147_))
gxc#current-compile-methods
(force gxc#&collect-runtime-refs))))
(define gxc#xform-identity (lambda (_stx18142_ . _args18143_) _stx18142_))
(define gxc#xform-wrap-source
(lambda (_stx18139_ _src-stx18140_)
(gx#stx-wrap-source _stx18139_ (gx#stx-source _src-stx18140_))))
(define gxc#xform-apply-compile-e
(lambda (_args18133_)
(lambda (_g1813418136_)
(apply gxc#compile-e _g1813418136_ _args18133_))))
(define gxc#xform-begin%
(lambda (_stx18092_ . _args18093_)
(let* ((_g1809518105_
(lambda (_g1809618102_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1809618102_)))
(_g1809418130_
(lambda (_g1809618108_)
(if (gx#stx-pair? _g1809618108_)
(let ((_e1809818110_ (gx#stx-e _g1809618108_)))
(let ((_hd1809918113_ (##car _e1809818110_))
(_tl1810018115_ (##cdr _e1809818110_)))
((lambda (_L18118_)
(let ((_forms18128_
(map (gxc#xform-apply-compile-e _args18093_)
_L18118_)))
(gxc#xform-wrap-source
(cons '%#begin _forms18128_)
_stx18092_)))
_tl1810018115_)))
(_g1809518105_ _g1809618108_)))))
(_g1809418130_ _stx18092_))))
(define gxc#xform-begin-syntax%
(lambda (_stx18050_ . _args18051_)
(let* ((_g1805318063_
(lambda (_g1805418060_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1805418060_)))
(_g1805218089_
(lambda (_g1805418066_)
(if (gx#stx-pair? _g1805418066_)
(let ((_e1805618068_ (gx#stx-e _g1805418066_)))
(let ((_hd1805718071_ (##car _e1805618068_))
(_tl1805818073_ (##cdr _e1805618068_)))
((lambda (_L18076_)
(call-with-parameters
(lambda ()
(let ((_forms18087_
(map (gxc#xform-apply-compile-e
_args18051_)
_L18076_)))
(gxc#xform-wrap-source
(cons '%#begin-syntax _forms18087_)
_stx18050_)))
gx#current-expander-phi
(fx+ (gx#current-expander-phi) '1)))
_tl1805818073_)))
(_g1805318063_ _g1805418066_)))))
(_g1805218089_ _stx18050_))))
(define gxc#xform-module%
(lambda (_stx17987_ . _args17988_)
(let* ((_g1799018004_
(lambda (_g1799118001_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1799118001_)))
(_g1798918047_
(lambda (_g1799118007_)
(if (gx#stx-pair? _g1799118007_)
(let ((_e1799418009_ (gx#stx-e _g1799118007_)))
(let ((_hd1799518012_ (##car _e1799418009_))
(_tl1799618014_ (##cdr _e1799418009_)))
(if (gx#stx-pair? _tl1799618014_)
(let ((_e1799718017_ (gx#stx-e _tl1799618014_)))
(let ((_hd1799818020_ (##car _e1799718017_))
(_tl1799918022_ (##cdr _e1799718017_)))
((lambda (_L18025_ _L18026_)
(let* ((_ctx18039_
(gx#syntax-local-e__0 _L18026_))
(_code18041_
(##structure-ref
_ctx18039_
'11
gx#module-context::t
'#f))
(_code18044_
(call-with-parameters
(lambda ()
(apply gxc#compile-e
_code18041_
_args17988_))
gx#current-expander-context
_ctx18039_)))
(##structure-set!
_ctx18039_
_code18044_
'11
gx#module-context::t
'#f)
(gxc#xform-wrap-source
(cons '%#module
(cons _L18026_
(cons _code18044_ '())))
_stx17987_)))
_tl1799918022_
_hd1799818020_)))
(_g1799018004_ _g1799118007_))))
(_g1799018004_ _g1799118007_)))))
(_g1798918047_ _stx17987_))))
(define gxc#xform-define-values%
(lambda (_stx17917_ . _args17918_)
(let* ((_g1792017937_
(lambda (_g1792117934_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1792117934_)))
(_g1791917984_
(lambda (_g1792117940_)
(if (gx#stx-pair? _g1792117940_)
(let ((_e1792417942_ (gx#stx-e _g1792117940_)))
(let ((_hd1792517945_ (##car _e1792417942_))
(_tl1792617947_ (##cdr _e1792417942_)))
(if (gx#stx-pair? _tl1792617947_)
(let ((_e1792717950_ (gx#stx-e _tl1792617947_)))
(let ((_hd1792817953_ (##car _e1792717950_))
(_tl1792917955_ (##cdr _e1792717950_)))
(if (gx#stx-pair? _tl1792917955_)
(let ((_e1793017958_
(gx#stx-e _tl1792917955_)))
(let ((_hd1793117961_
(##car _e1793017958_))
(_tl1793217963_
(##cdr _e1793017958_)))
(if (gx#stx-null? _tl1793217963_)
((lambda (_L17966_ _L17967_)
(let ((_expr17982_
(apply gxc#compile-e
_L17966_
_args17918_)))
(gxc#xform-wrap-source
(cons '%#define-values
(cons _L17967_
(cons _expr17982_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
'())))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
_stx17917_)))
_hd1793117961_
_hd1792817953_)
(_g1792017937_ _g1792117940_))))
(_g1792017937_ _g1792117940_))))
(_g1792017937_ _g1792117940_))))
(_g1792017937_ _g1792117940_)))))
(_g1791917984_ _stx17917_))))
(define gxc#xform-define-syntax%
(lambda (_stx17846_ . _args17847_)
(let* ((_g1784917866_
(lambda (_g1785017863_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1785017863_)))
(_g1784817914_
(lambda (_g1785017869_)
(if (gx#stx-pair? _g1785017869_)
(let ((_e1785317871_ (gx#stx-e _g1785017869_)))
(let ((_hd1785417874_ (##car _e1785317871_))
(_tl1785517876_ (##cdr _e1785317871_)))
(if (gx#stx-pair? _tl1785517876_)
(let ((_e1785617879_ (gx#stx-e _tl1785517876_)))
(let ((_hd1785717882_ (##car _e1785617879_))
(_tl1785817884_ (##cdr _e1785617879_)))
(if (gx#stx-pair? _tl1785817884_)
(let ((_e1785917887_
(gx#stx-e _tl1785817884_)))
(let ((_hd1786017890_
(##car _e1785917887_))
(_tl1786117892_
(##cdr _e1785917887_)))
(if (gx#stx-null? _tl1786117892_)
((lambda (_L17895_ _L17896_)
(call-with-parameters
(lambda ()
(let ((_expr17912_
(apply gxc#compile-e
_L17895_
_args17847_)))
(gxc#xform-wrap-source
(cons '%#define-syntax
(cons _L17896_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
(cons _expr17912_ '())))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
_stx17846_)))
gx#current-expander-phi
(fx+ (gx#current-expander-phi)
'1)))
_hd1786017890_
_hd1785717882_)
(_g1784917866_ _g1785017869_))))
(_g1784917866_ _g1785017869_))))
(_g1784917866_ _g1785017869_))))
(_g1784917866_ _g1785017869_)))))
(_g1784817914_ _stx17846_))))
(define gxc#xform-begin-annotation%
(lambda (_stx17776_ . _args17777_)
(let* ((_g1777917796_
(lambda (_g1778017793_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1778017793_)))
(_g1777817843_
(lambda (_g1778017799_)
(if (gx#stx-pair? _g1778017799_)
(let ((_e1778317801_ (gx#stx-e _g1778017799_)))
(let ((_hd1778417804_ (##car _e1778317801_))
(_tl1778517806_ (##cdr _e1778317801_)))
(if (gx#stx-pair? _tl1778517806_)
(let ((_e1778617809_ (gx#stx-e _tl1778517806_)))
(let ((_hd1778717812_ (##car _e1778617809_))
(_tl1778817814_ (##cdr _e1778617809_)))
(if (gx#stx-pair? _tl1778817814_)
(let ((_e1778917817_
(gx#stx-e _tl1778817814_)))
(let ((_hd1779017820_
(##car _e1778917817_))
(_tl1779117822_
(##cdr _e1778917817_)))
(if (gx#stx-null? _tl1779117822_)
((lambda (_L17825_ _L17826_)
(let ((_expr17841_
(apply gxc#compile-e
_L17825_
_args17777_)))
(gxc#xform-wrap-source
(cons '%#begin-annotation
(cons _L17826_
(cons _expr17841_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
'())))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
_stx17776_)))
_hd1779017820_
_hd1778717812_)
(_g1777917796_ _g1778017799_))))
(_g1777917796_ _g1778017799_))))
(_g1777917796_ _g1778017799_))))
(_g1777917796_ _g1778017799_)))))
(_g1777817843_ _stx17776_))))
(define gxc#xform-lambda%
(lambda (_stx17719_ . _args17720_)
(let* ((_g1772217736_
(lambda (_g1772317733_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1772317733_)))
(_g1772117773_
(lambda (_g1772317739_)
(if (gx#stx-pair? _g1772317739_)
(let ((_e1772617741_ (gx#stx-e _g1772317739_)))
(let ((_hd1772717744_ (##car _e1772617741_))
(_tl1772817746_ (##cdr _e1772617741_)))
(if (gx#stx-pair? _tl1772817746_)
(let ((_e1772917749_ (gx#stx-e _tl1772817746_)))
(let ((_hd1773017752_ (##car _e1772917749_))
(_tl1773117754_ (##cdr _e1772917749_)))
((lambda (_L17757_ _L17758_)
(let ((_body17771_
(map (gxc#xform-apply-compile-e
_args17720_)
_L17757_)))
(gxc#xform-wrap-source
(cons '%#lambda
(cons _L17758_ _body17771_))
_stx17719_)))
_tl1773117754_
_hd1773017752_)))
(_g1772217736_ _g1772317739_))))
(_g1772217736_ _g1772317739_)))))
(_g1772117773_ _stx17719_))))
(define gxc#xform-case-lambda%
(lambda (_stx17632_ . _args17633_)
(letrec ((_clause-e17635_
(lambda (_clause17676_)
(let* ((_g1767817689_
(lambda (_g1767917686_)
(gx#raise-syntax-error
'#f
'"Bad syntax"
_g1767917686_)))
(_g1767717716_
(lambda (_g1767917692_)
(if (gx#stx-pair? _g1767917692_)
(let ((_e1768217694_ (gx#stx-e _g1767917692_)))
(let ((_hd1768317697_ (##car _e1768217694_))
(_tl1768417699_ (##cdr _e1768217694_)))
((lambda (_L17702_ _L17703_)
(let ((_body17714_
(map (gxc#xform-apply-compile-e
_args17633_)
_L17702_)))
(cons _L17703_ _body17714_)))
_tl1768417699_
_hd1768317697_)))
(_g1767817689_ _g1767917692_)))))
(_g1767717716_ _clause17676_)))))
(let* ((_g1763717647_
(lambda (_g1763817644_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1763817644_)))
(_g1763617673_
(lambda (_g1763817650_)
(if (gx#stx-pair? _g1763817650_)
(let ((_e1764017652_ (gx#stx-e _g1763817650_)))
(let ((_hd1764117655_ (##car _e1764017652_))
(_tl1764217657_ (##cdr _e1764017652_)))
((lambda (_L17660_)
(let ((_clauses17671_
(map _clause-e17635_ _L17660_)))
(gxc#xform-wrap-source
(cons '%#case-lambda _clauses17671_)
_stx17632_)))
_tl1764217657_)))
(_g1763717647_ _g1763817650_)))))
(_g1763617673_ _stx17632_)))))
(define gxc#xform-let-values%
(lambda (_stx17426_ . _args17427_)
(let* ((_g1742917462_
(lambda (_g1743017459_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1743017459_)))
(_g1742817629_
(lambda (_g1743017465_)
(if (gx#stx-pair? _g1743017465_)
(let ((_e1743517467_ (gx#stx-e _g1743017465_)))
(let ((_hd1743617470_ (##car _e1743517467_))
(_tl1743717472_ (##cdr _e1743517467_)))
(if (gx#stx-pair? _tl1743717472_)
(let ((_e1743817475_ (gx#stx-e _tl1743717472_)))
(let ((_hd1743917478_ (##car _e1743817475_))
(_tl1744017480_ (##cdr _e1743817475_)))
(if (gx#stx-pair/null? _hd1743917478_)
(let ((_g18218_
(gx#syntax-split-splice
_hd1743917478_
'0)))
(begin
(let ((_g18219_
(if (##values? _g18218_)
(##vector-length _g18218_)
1)))
(if (not (##fx= _g18219_ 2))
(error "Context expects 2 values"
_g18219_)))
(let ((_target1744117483_
(##vector-ref _g18218_ 0))
(_tl1744317485_
(##vector-ref _g18218_ 1)))
(if (gx#stx-null? _tl1744317485_)
(letrec ((_loop1744417488_
(lambda (_hd1744217491_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
_expr1744817493_
_hd1744917495_)
(if (gx#stx-pair? _hd1744217491_)
(let ((_e1744517498_ (gx#stx-e _hd1744217491_)))
(let ((_lp-hd1744617501_ (##car _e1744517498_))
(_lp-tl1744717503_ (##cdr _e1744517498_)))
(if (gx#stx-pair? _lp-hd1744617501_)
(let ((_e1745217506_
(gx#stx-e _lp-hd1744617501_)))
(let ((_hd1745317509_ (##car _e1745217506_))
(_tl1745417511_ (##cdr _e1745217506_)))
(if (gx#stx-pair? _tl1745417511_)
(let ((_e1745517514_
(gx#stx-e _tl1745417511_)))
(let ((_hd1745617517_
(##car _e1745517514_))
(_tl1745717519_
(##cdr _e1745517514_)))
(if (gx#stx-null? _tl1745717519_)
(_loop1744417488_
_lp-tl1744717503_
(cons _hd1745617517_
_expr1744817493_)
(cons _hd1745317509_
_hd1744917495_))
(_g1742917462_ _g1743017465_))))
(_g1742917462_ _g1743017465_))))
(_g1742917462_ _g1743017465_))))
(let ((_expr1745017522_ (reverse _expr1744817493_))
(_hd1745117524_ (reverse _hd1744917495_)))
((lambda (_L17527_ _L17528_ _L17529_ _L17530_)
(let* ((_g1754917565_
(lambda (_g1755017562_)
(gx#raise-syntax-error
'#f
'"Bad syntax"
_g1755017562_)))
(_g1754817619_
(lambda (_g1755017568_)
(if (gx#stx-pair/null? _g1755017568_)
(let ((_g18220_
(gx#syntax-split-splice
_g1755017568_
'0)))
(begin
(let ((_g18221_
(if (##values? _g18220_)
(##vector-length
_g18220_)
1)))
(if (not (##fx= _g18221_ 2))
(error "Context expects 2 values"
_g18221_)))
(let ((_target1755217570_
(##vector-ref _g18220_ 0))
(_tl1755417572_
(##vector-ref _g18220_ 1)))
(if (gx#stx-null?
_tl1755417572_)
(letrec ((_loop1755517575_
(lambda (_hd1755317578_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
_expr1755917580_)
(if (gx#stx-pair? _hd1755317578_)
(let ((_e1755617583_ (gx#syntax-e _hd1755317578_)))
(let ((_lp-hd1755717586_ (##car _e1755617583_))
(_lp-tl1755817588_ (##cdr _e1755617583_)))
(_loop1755517575_
_lp-tl1755817588_
(cons _lp-hd1755717586_ _expr1755917580_))))
(let ((_expr1756017591_ (reverse _expr1755917580_)))
((lambda (_L17594_)
(let ()
(let ((_body17607_
(map (gxc#xform-apply-compile-e
_args17427_)
_L17527_)))
(gxc#xform-wrap-source
(cons _L17530_
(cons (begin
(gx#syntax-check-splice-targets
_L17594_
_L17529_)
(foldr2 (lambda (_g1760817612_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
_g1760917614_
_g1761017616_)
(cons (cons _g1760917614_ (cons _g1760817612_ '()))
_g1761017616_))
'()
_L17594_
_L17529_))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
_body17607_))
_stx17426_))))
_expr1756017591_))))))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
(_loop1755517575_
_target1755217570_
'()))
(_g1754917565_
_g1755017568_)))))
(_g1754917565_ _g1755017568_)))))
(_g1754817619_
(map (gxc#xform-apply-compile-e _args17427_)
(foldr1 (lambda (_g1762117624_
_g1762217626_)
(cons _g1762117624_
_g1762217626_))
'()
_L17528_)))))
_tl1744017480_
_expr1745017522_
_hd1745117524_
_hd1743617470_))))))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
(_loop1744417488_
_target1744117483_
'()
'()))
(_g1742917462_ _g1743017465_)))))
(_g1742917462_ _g1743017465_))))
(_g1742917462_ _g1743017465_))))
(_g1742917462_ _g1743017465_)))))
(_g1742817629_ _stx17426_))))
(define gxc#xform-operands
(lambda (_stx17382_ . _args17383_)
(let* ((_g1738517396_
(lambda (_g1738617393_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1738617393_)))
(_g1738417423_
(lambda (_g1738617399_)
(if (gx#stx-pair? _g1738617399_)
(let ((_e1738917401_ (gx#stx-e _g1738617399_)))
(let ((_hd1739017404_ (##car _e1738917401_))
(_tl1739117406_ (##cdr _e1738917401_)))
((lambda (_L17409_ _L17410_)
(let ((_rands17421_
(map (gxc#xform-apply-compile-e _args17383_)
_L17409_)))
(gxc#xform-wrap-source
(cons _L17410_ _rands17421_)
_stx17382_)))
_tl1739117406_
_hd1739017404_)))
(_g1738517396_ _g1738617399_)))))
(_g1738417423_ _stx17382_))))
(define gxc#xform-call% gxc#xform-operands)
(define gxc#xform-setq%
(lambda (_stx17312_ . _args17313_)
(let* ((_g1731517332_
(lambda (_g1731617329_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1731617329_)))
(_g1731417379_
(lambda (_g1731617335_)
(if (gx#stx-pair? _g1731617335_)
(let ((_e1731917337_ (gx#stx-e _g1731617335_)))
(let ((_hd1732017340_ (##car _e1731917337_))
(_tl1732117342_ (##cdr _e1731917337_)))
(if (gx#stx-pair? _tl1732117342_)
(let ((_e1732217345_ (gx#stx-e _tl1732117342_)))
(let ((_hd1732317348_ (##car _e1732217345_))
(_tl1732417350_ (##cdr _e1732217345_)))
(if (gx#stx-pair? _tl1732417350_)
(let ((_e1732517353_
(gx#stx-e _tl1732417350_)))
(let ((_hd1732617356_
(##car _e1732517353_))
(_tl1732717358_
(##cdr _e1732517353_)))
(if (gx#stx-null? _tl1732717358_)
((lambda (_L17361_ _L17362_)
(let ((_expr17377_
(apply gxc#compile-e
_L17361_
_args17313_)))
(gxc#xform-wrap-source
(cons '%#set!
(cons _L17362_
(cons _expr17377_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
'())))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
_stx17312_)))
_hd1732617356_
_hd1732317348_)
(_g1731517332_ _g1731617335_))))
(_g1731517332_ _g1731617335_))))
(_g1731517332_ _g1731617335_))))
(_g1731517332_ _g1731617335_)))))
(_g1731417379_ _stx17312_))))
(define gxc#collect-mutators-setq%
(lambda (_stx17243_)
(let* ((_g1724517262_
(lambda (_g1724617259_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1724617259_)))
(_g1724417309_
(lambda (_g1724617265_)
(if (gx#stx-pair? _g1724617265_)
(let ((_e1724917267_ (gx#stx-e _g1724617265_)))
(let ((_hd1725017270_ (##car _e1724917267_))
(_tl1725117272_ (##cdr _e1724917267_)))
(if (gx#stx-pair? _tl1725117272_)
(let ((_e1725217275_ (gx#stx-e _tl1725117272_)))
(let ((_hd1725317278_ (##car _e1725217275_))
(_tl1725417280_ (##cdr _e1725217275_)))
(if (gx#stx-pair? _tl1725417280_)
(let ((_e1725517283_
(gx#stx-e _tl1725417280_)))
(let ((_hd1725617286_
(##car _e1725517283_))
(_tl1725717288_
(##cdr _e1725517283_)))
(if (gx#stx-null? _tl1725717288_)
((lambda (_L17291_ _L17292_)
(let ((_sym17307_
(gxc#identifier-symbol
_L17292_)))
(gxc#verbose
'"collect mutator "
_sym17307_)
(table-set!
(gxc#current-compile-mutators)
_sym17307_
'#t)
(gxc#compile-e _L17291_)))
_hd1725617286_
_hd1725317278_)
(_g1724517262_ _g1724617265_))))
(_g1724517262_ _g1724617265_))))
(_g1724517262_ _g1724617265_))))
(_g1724517262_ _g1724617265_)))))
(_g1724417309_ _stx17243_))))
(define gxc#expression-subst-ref%
(lambda (_stx17190_ _id17191_ _new-id17192_)
(let* ((_g1719417207_
(lambda (_g1719517204_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1719517204_)))
(_g1719317240_
(lambda (_g1719517210_)
(if (gx#stx-pair? _g1719517210_)
(let ((_e1719717212_ (gx#stx-e _g1719517210_)))
(let ((_hd1719817215_ (##car _e1719717212_))
(_tl1719917217_ (##cdr _e1719717212_)))
(if (gx#stx-pair? _tl1719917217_)
(let ((_e1720017220_ (gx#stx-e _tl1719917217_)))
(let ((_hd1720117223_ (##car _e1720017220_))
(_tl1720217225_ (##cdr _e1720017220_)))
(if (gx#stx-null? _tl1720217225_)
((lambda (_L17228_)
(if (gx#free-identifier=?
_L17228_
_id17191_)
(gxc#xform-wrap-source
(cons '%#ref
(cons _new-id17192_ '()))
_stx17190_)
_stx17190_))
_hd1720117223_)
(_g1719417207_ _g1719517210_))))
(_g1719417207_ _g1719517210_))))
(_g1719417207_ _g1719517210_)))))
(_g1719317240_ _stx17190_))))
(define gxc#expression-subst*-ref%
(lambda (_stx17131_ _subst17132_)
(let* ((_g1713417147_
(lambda (_g1713517144_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1713517144_)))
(_g1713317187_
(lambda (_g1713517150_)
(if (gx#stx-pair? _g1713517150_)
(let ((_e1713717152_ (gx#stx-e _g1713517150_)))
(let ((_hd1713817155_ (##car _e1713717152_))
(_tl1713917157_ (##cdr _e1713717152_)))
(if (gx#stx-pair? _tl1713917157_)
(let ((_e1714017160_ (gx#stx-e _tl1713917157_)))
(let ((_hd1714117163_ (##car _e1714017160_))
(_tl1714217165_ (##cdr _e1714017160_)))
(if (gx#stx-null? _tl1714217165_)
((lambda (_L17168_)
(let ((_$e17182_
(find (lambda (_sub17180_)
(gx#free-identifier=?
_L17168_
(car _sub17180_)))
_subst17132_)))
(if _$e17182_
((lambda (_sub17185_)
(gxc#xform-wrap-source
(cons '%#ref
(cons (cdr _sub17185_)
'()))
_stx17131_))
_$e17182_)
_stx17131_)))
_hd1714117163_)
(_g1713417147_ _g1713517150_))))
(_g1713417147_ _g1713517150_))))
(_g1713417147_ _g1713517150_)))))
(_g1713317187_ _stx17131_))))
(define gxc#expression-subst-setq%
(lambda (_stx17059_ _id17060_ _new-id17061_)
(let* ((_g1706317080_
(lambda (_g1706417077_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1706417077_)))
(_g1706217128_
(lambda (_g1706417083_)
(if (gx#stx-pair? _g1706417083_)
(let ((_e1706717085_ (gx#stx-e _g1706417083_)))
(let ((_hd1706817088_ (##car _e1706717085_))
(_tl1706917090_ (##cdr _e1706717085_)))
(if (gx#stx-pair? _tl1706917090_)
(let ((_e1707017093_ (gx#stx-e _tl1706917090_)))
(let ((_hd1707117096_ (##car _e1707017093_))
(_tl1707217098_ (##cdr _e1707017093_)))
(if (gx#stx-pair? _tl1707217098_)
(let ((_e1707317101_
(gx#stx-e _tl1707217098_)))
(let ((_hd1707417104_
(##car _e1707317101_))
(_tl1707517106_
(##cdr _e1707317101_)))
(if (gx#stx-null? _tl1707517106_)
((lambda (_L17109_ _L17110_)
(let ((_new-expr17125_
(gxc#compile-e
_L17109_
_id17060_
_new-id17061_))
(_new-xid17126_
(if (gx#free-identifier=?
_L17110_
_id17060_)
_new-id17061_
_L17110_)))
(gxc#xform-wrap-source
(cons '%#set!
(cons _new-xid17126_
(cons _new-expr17125_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
'())))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
_stx17059_)))
_hd1707417104_
_hd1707117096_)
(_g1706317080_ _g1706417083_))))
(_g1706317080_ _g1706417083_))))
(_g1706317080_ _g1706417083_))))
(_g1706317080_ _g1706417083_)))))
(_g1706217128_ _stx17059_))))
(define gxc#expression-subst*-setq%
(lambda (_stx16983_ _subst16984_)
(let* ((_g1698617003_
(lambda (_g1698717000_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1698717000_)))
(_g1698517056_
(lambda (_g1698717006_)
(if (gx#stx-pair? _g1698717006_)
(let ((_e1699017008_ (gx#stx-e _g1698717006_)))
(let ((_hd1699117011_ (##car _e1699017008_))
(_tl1699217013_ (##cdr _e1699017008_)))
(if (gx#stx-pair? _tl1699217013_)
(let ((_e1699317016_ (gx#stx-e _tl1699217013_)))
(let ((_hd1699417019_ (##car _e1699317016_))
(_tl1699517021_ (##cdr _e1699317016_)))
(if (gx#stx-pair? _tl1699517021_)
(let ((_e1699617024_
(gx#stx-e _tl1699517021_)))
(let ((_hd1699717027_
(##car _e1699617024_))
(_tl1699817029_
(##cdr _e1699617024_)))
(if (gx#stx-null? _tl1699817029_)
((lambda (_L17032_ _L17033_)
(let ((_new-expr17053_
(gxc#compile-e
_L17032_
_subst16984_))
(_new-xid17054_
(let ((_$e17050_
(find (lambda (_sub17048_)
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
(gx#free-identifier=? _L17033_ (car _sub17048_)))
_subst16984_)))
(if _$e17050_ (cdr _$e17050_) _L17033_))))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
(gxc#xform-wrap-source
(cons '%#set!
(cons _new-xid17054_
(cons _new-expr17053_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
'())))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
_stx16983_)))
_hd1699717027_
_hd1699417019_)
(_g1698617003_ _g1698717006_))))
(_g1698617003_ _g1698717006_))))
(_g1698617003_ _g1698717006_))))
(_g1698617003_ _g1698717006_)))))
(_g1698517056_ _stx16983_))))
(define gxc#collect-runtime-refs-ref%
(lambda (_stx16929_ _ht16930_)
(let* ((_g1693216945_
(lambda (_g1693316942_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1693316942_)))
(_g1693116980_
(lambda (_g1693316948_)
(if (gx#stx-pair? _g1693316948_)
(let ((_e1693516950_ (gx#stx-e _g1693316948_)))
(let ((_hd1693616953_ (##car _e1693516950_))
(_tl1693716955_ (##cdr _e1693516950_)))
(if (gx#stx-pair? _tl1693716955_)
(let ((_e1693816958_ (gx#stx-e _tl1693716955_)))
(let ((_hd1693916961_ (##car _e1693816958_))
(_tl1694016963_ (##cdr _e1693816958_)))
(if (gx#stx-null? _tl1694016963_)
((lambda (_L16966_)
(let ((_eid16978_
(gxc#identifier-symbol
_L16966_)))
(hash-update!
_ht16930_
_eid16978_
1+
'0)))
_hd1693916961_)
(_g1693216945_ _g1693316948_))))
(_g1693216945_ _g1693316948_))))
(_g1693216945_ _g1693316948_)))))
(_g1693116980_ _stx16929_))))
(define gxc#collect-runtime-refs-setq%
(lambda (_stx16859_ _ht16860_)
(let* ((_g1686216879_
(lambda (_g1686316876_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1686316876_)))
(_g1686116926_
(lambda (_g1686316882_)
(if (gx#stx-pair? _g1686316882_)
(let ((_e1686616884_ (gx#stx-e _g1686316882_)))
(let ((_hd1686716887_ (##car _e1686616884_))
(_tl1686816889_ (##cdr _e1686616884_)))
(if (gx#stx-pair? _tl1686816889_)
(let ((_e1686916892_ (gx#stx-e _tl1686816889_)))
(let ((_hd1687016895_ (##car _e1686916892_))
(_tl1687116897_ (##cdr _e1686916892_)))
(if (gx#stx-pair? _tl1687116897_)
(let ((_e1687216900_
(gx#stx-e _tl1687116897_)))
(let ((_hd1687316903_
(##car _e1687216900_))
(_tl1687416905_
(##cdr _e1687216900_)))
(if (gx#stx-null? _tl1687416905_)
((lambda (_L16908_ _L16909_)
(let ((_eid16924_
(gxc#identifier-symbol
_L16909_)))
(hash-update!
_ht16860_
_eid16924_
1+
'0)
(gxc#compile-e
_L16908_
_ht16860_)))
_hd1687316903_
_hd1687016895_)
(_g1686216879_ _g1686316882_))))
(_g1686216879_ _g1686316882_))))
(_g1686216879_ _g1686316882_))))
(_g1686216879_ _g1686316882_)))))
(_g1686116926_ _stx16859_))))
(define gxc#find-body%
(lambda (_stx16772_ _arg16773_)
(let* ((_g1677516794_
(lambda (_g1677616791_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1677616791_)))
(_g1677416856_
(lambda (_g1677616797_)
(if (gx#stx-pair? _g1677616797_)
(let ((_e1677816799_ (gx#stx-e _g1677616797_)))
(let ((_hd1677916802_ (##car _e1677816799_))
(_tl1678016804_ (##cdr _e1677816799_)))
(if (gx#stx-pair/null? _tl1678016804_)
(let ((_g18222_
(gx#syntax-split-splice _tl1678016804_ '0)))
(begin
(let ((_g18223_
(if (##values? _g18222_)
(##vector-length _g18222_)
1)))
(if (not (##fx= _g18223_ 2))
(error "Context expects 2 values"
_g18223_)))
(let ((_target1678116807_
(##vector-ref _g18222_ 0))
(_tl1678316809_
(##vector-ref _g18222_ 1)))
(if (gx#stx-null? _tl1678316809_)
(letrec ((_loop1678416812_
(lambda (_hd1678216815_
_expr1678816817_)
(if (gx#stx-pair?
_hd1678216815_)
(let ((_e1678516820_
(gx#stx-e
_hd1678216815_)))
(let ((_lp-hd1678616823_
(##car _e1678516820_))
(_lp-tl1678716825_
(##cdr _e1678516820_)))
(_loop1678416812_
_lp-tl1678716825_
(cons _lp-hd1678616823_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
_expr1678816817_))))
(let ((_expr1678916828_ (reverse _expr1678816817_)))
((lambda (_L16831_)
(ormap1 (lambda (_g1684416846_)
(gxc#compile-e _g1684416846_ _arg16773_))
(foldr1 (lambda (_g1684816851_ _g1684916853_)
(cons _g1684816851_ _g1684916853_))
'()
_L16831_)))
_expr1678916828_))))))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
(_loop1678416812_
_target1678116807_
'()))
(_g1677516794_ _g1677616797_)))))
(_g1677516794_ _g1677616797_))))
(_g1677516794_ _g1677616797_)))))
(_g1677416856_ _stx16772_))))
(define gxc#find-begin-annotation%
(lambda (_stx16704_ _arg16705_)
(let* ((_g1670716724_
(lambda (_g1670816721_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1670816721_)))
(_g1670616769_
(lambda (_g1670816727_)
(if (gx#stx-pair? _g1670816727_)
(let ((_e1671116729_ (gx#stx-e _g1670816727_)))
(let ((_hd1671216732_ (##car _e1671116729_))
(_tl1671316734_ (##cdr _e1671116729_)))
(if (gx#stx-pair? _tl1671316734_)
(let ((_e1671416737_ (gx#stx-e _tl1671316734_)))
(let ((_hd1671516740_ (##car _e1671416737_))
(_tl1671616742_ (##cdr _e1671416737_)))
(if (gx#stx-pair? _tl1671616742_)
(let ((_e1671716745_
(gx#stx-e _tl1671616742_)))
(let ((_hd1671816748_
(##car _e1671716745_))
(_tl1671916750_
(##cdr _e1671716745_)))
(if (gx#stx-null? _tl1671916750_)
((lambda (_L16753_ _L16754_)
(gxc#compile-e
_L16753_
_arg16705_))
_hd1671816748_
_hd1671516740_)
(_g1670716724_ _g1670816727_))))
(_g1670716724_ _g1670816727_))))
(_g1670716724_ _g1670816727_))))
(_g1670716724_ _g1670816727_)))))
(_g1670616769_ _stx16704_))))
(define gxc#find-lambda%
(lambda (_stx16636_ _arg16637_)
(let* ((_g1663916656_
(lambda (_g1664016653_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1664016653_)))
(_g1663816701_
(lambda (_g1664016659_)
(if (gx#stx-pair? _g1664016659_)
(let ((_e1664316661_ (gx#stx-e _g1664016659_)))
(let ((_hd1664416664_ (##car _e1664316661_))
(_tl1664516666_ (##cdr _e1664316661_)))
(if (gx#stx-pair? _tl1664516666_)
(let ((_e1664616669_ (gx#stx-e _tl1664516666_)))
(let ((_hd1664716672_ (##car _e1664616669_))
(_tl1664816674_ (##cdr _e1664616669_)))
(if (gx#stx-pair? _tl1664816674_)
(let ((_e1664916677_
(gx#stx-e _tl1664816674_)))
(let ((_hd1665016680_
(##car _e1664916677_))
(_tl1665116682_
(##cdr _e1664916677_)))
(if (gx#stx-null? _tl1665116682_)
((lambda (_L16685_ _L16686_)
(gxc#compile-e
_L16685_
_arg16637_))
_hd1665016680_
_hd1664716672_)
(_g1663916656_ _g1664016659_))))
(_g1663916656_ _g1664016659_))))
(_g1663916656_ _g1664016659_))))
(_g1663916656_ _g1664016659_)))))
(_g1663816701_ _stx16636_))))
(define gxc#find-case-lambda%
(lambda (_stx16518_ _arg16519_)
(let* ((_g1652116549_
(lambda (_g1652216546_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1652216546_)))
(_g1652016633_
(lambda (_g1652216552_)
(if (gx#stx-pair? _g1652216552_)
(let ((_e1652516554_ (gx#stx-e _g1652216552_)))
(let ((_hd1652616557_ (##car _e1652516554_))
(_tl1652716559_ (##cdr _e1652516554_)))
(if (gx#stx-pair/null? _tl1652716559_)
(let ((_g18224_
(gx#syntax-split-splice _tl1652716559_ '0)))
(begin
(let ((_g18225_
(if (##values? _g18224_)
(##vector-length _g18224_)
1)))
(if (not (##fx= _g18225_ 2))
(error "Context expects 2 values"
_g18225_)))
(let ((_target1652816562_
(##vector-ref _g18224_ 0))
(_tl1653016564_
(##vector-ref _g18224_ 1)))
(if (gx#stx-null? _tl1653016564_)
(letrec ((_loop1653116567_
(lambda (_hd1652916570_
_body1653516572_
_hd1653616574_)
(if (gx#stx-pair?
_hd1652916570_)
(let ((_e1653216577_
(gx#stx-e
_hd1652916570_)))
(let ((_lp-hd1653316580_
(##car _e1653216577_))
(_lp-tl1653416582_
(##cdr _e1653216577_)))
(if (gx#stx-pair?
_lp-hd1653316580_)
(let ((_e1653916585_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
(gx#stx-e _lp-hd1653316580_)))
(let ((_hd1654016588_ (##car _e1653916585_))
(_tl1654116590_ (##cdr _e1653916585_)))
(if (gx#stx-pair? _tl1654116590_)
(let ((_e1654216593_ (gx#stx-e _tl1654116590_)))
(let ((_hd1654316596_ (##car _e1654216593_))
(_tl1654416598_ (##cdr _e1654216593_)))
(if (gx#stx-null? _tl1654416598_)
(_loop1653116567_
_lp-tl1653416582_
(cons _hd1654316596_ _body1653516572_)
(cons _hd1654016588_ _hd1653616574_))
(_g1652116549_ _g1652216552_))))
(_g1652116549_ _g1652216552_))))
(_g1652116549_ _g1652216552_))))
(let ((_body1653716601_ (reverse _body1653516572_))
(_hd1653816603_ (reverse _hd1653616574_)))
((lambda (_L16606_ _L16607_)
(ormap1 (lambda (_g1662116623_)
(gxc#compile-e _g1662116623_ _arg16519_))
(foldr1 (lambda (_g1662516628_ _g1662616630_)
(cons _g1662516628_ _g1662616630_))
'()
_L16606_)))
_body1653716601_
_hd1653816603_))))))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
(_loop1653116567_
_target1652816562_
'()
'()))
(_g1652116549_ _g1652216552_)))))
(_g1652116549_ _g1652216552_))))
(_g1652116549_ _g1652216552_)))))
(_g1652016633_ _stx16518_))))
(define gxc#find-let-values%
(lambda (_stx16368_ _arg16369_)
(let* ((_g1637116406_
(lambda (_g1637216403_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1637216403_)))
(_g1637016515_
(lambda (_g1637216409_)
(if (gx#stx-pair? _g1637216409_)
(let ((_e1637616411_ (gx#stx-e _g1637216409_)))
(let ((_hd1637716414_ (##car _e1637616411_))
(_tl1637816416_ (##cdr _e1637616411_)))
(if (gx#stx-pair? _tl1637816416_)
(let ((_e1637916419_ (gx#stx-e _tl1637816416_)))
(let ((_hd1638016422_ (##car _e1637916419_))
(_tl1638116424_ (##cdr _e1637916419_)))
(if (gx#stx-pair/null? _hd1638016422_)
(let ((_g18226_
(gx#syntax-split-splice
_hd1638016422_
'0)))
(begin
(let ((_g18227_
(if (##values? _g18226_)
(##vector-length _g18226_)
1)))
(if (not (##fx= _g18227_ 2))
(error "Context expects 2 values"
_g18227_)))
(let ((_target1638216427_
(##vector-ref _g18226_ 0))
(_tl1638416429_
(##vector-ref _g18226_ 1)))
(if (gx#stx-null? _tl1638416429_)
(letrec ((_loop1638516432_
(lambda (_hd1638316435_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
_expr1638916437_
_bind1639016439_)
(if (gx#stx-pair? _hd1638316435_)
(let ((_e1638616442_ (gx#stx-e _hd1638316435_)))
(let ((_lp-hd1638716445_ (##car _e1638616442_))
(_lp-tl1638816447_ (##cdr _e1638616442_)))
(if (gx#stx-pair? _lp-hd1638716445_)
(let ((_e1639316450_
(gx#stx-e _lp-hd1638716445_)))
(let ((_hd1639416453_ (##car _e1639316450_))
(_tl1639516455_ (##cdr _e1639316450_)))
(if (gx#stx-pair? _tl1639516455_)
(let ((_e1639616458_
(gx#stx-e _tl1639516455_)))
(let ((_hd1639716461_
(##car _e1639616458_))
(_tl1639816463_
(##cdr _e1639616458_)))
(if (gx#stx-null? _tl1639816463_)
(_loop1638516432_
_lp-tl1638816447_
(cons _hd1639716461_
_expr1638916437_)
(cons _hd1639416453_
_bind1639016439_))
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_))))
(let ((_expr1639116466_ (reverse _expr1638916437_))
(_bind1639216468_ (reverse _bind1639016439_)))
(if (gx#stx-pair? _tl1638116424_)
(let ((_e1639916471_ (gx#stx-e _tl1638116424_)))
(let ((_hd1640016474_ (##car _e1639916471_))
(_tl1640116476_ (##cdr _e1639916471_)))
(if (gx#stx-null? _tl1640116476_)
((lambda (_L16479_ _L16480_ _L16481_)
(let ((_$e16512_
(ormap1 (lambda (_g1650016502_)
(gxc#compile-e
_g1650016502_
_arg16369_))
(foldr1 (lambda (_g1650416507_
;;<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
_g1650516509_)
(cons _g1650416507_ _g1650516509_))
'()
_L16480_))))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
(if _$e16512_
_$e16512_
(gxc#compile-e
_L16479_
_arg16369_))))
_hd1640016474_
_expr1639116466_
_bind1639216468_)
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_)))))))
;;>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
(_loop1638516432_
_target1638216427_
'()
'()))
(_g1637116406_ _g1637216409_)))))
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_)))))
(_g1637016515_ _stx16368_))))
(define gxc#find-setq%
(lambda (_stx16300_ _arg16301_)
(let* ((_g1630316320_
(lambda (_g1630416317_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1630416317_)))
(_g1630216365_
(lambda (_g1630416323_)
(if (gx#stx-pair? _g1630416323_)
(let ((_e1630716325_ (gx#stx-e _g1630416323_)))
(let ((_hd1630816328_ (##car _e1630716325_))
(_tl1630916330_ (##cdr _e1630716325_)))
(if (gx#stx-pair? _tl1630916330_)
(let ((_e1631016333_ (gx#stx-e _tl1630916330_)))
(let ((_hd1631116336_ (##car _e1631016333_))
(_tl1631216338_ (##cdr _e1631016333_)))
(if (gx#stx-pair? _tl1631216338_)
(let ((_e1631316341_
(gx#stx-e _tl1631216338_)))
(let ((_hd1631416344_
(##car _e1631316341_))
(_tl1631516346_
(##cdr _e1631316341_)))
(if (gx#stx-null? _tl1631516346_)
((lambda (_L16349_ _L16350_)
(gxc#compile-e
_L16349_
_arg16301_))
_hd1631416344_
_hd1631116336_)
(_g1630316320_ _g1630416323_))))
(_g1630316320_ _g1630416323_))))
(_g1630316320_ _g1630416323_))))
(_g1630316320_ _g1630416323_)))))
(_g1630216365_ _stx16300_))))
(define gxc#find-var-refs-ref%
(lambda (_stx16244_ _ids16245_)
(let* ((_g1624716260_
(lambda (_g1624816257_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1624816257_)))
(_g1624616297_
(lambda (_g1624816263_)
(if (gx#stx-pair? _g1624816263_)
(let ((_e1625016265_ (gx#stx-e _g1624816263_)))
(let ((_hd1625116268_ (##car _e1625016265_))
(_tl1625216270_ (##cdr _e1625016265_)))
(if (gx#stx-pair? _tl1625216270_)
(let ((_e1625316273_ (gx#stx-e _tl1625216270_)))
(let ((_hd1625416276_ (##car _e1625316273_))
(_tl1625516278_ (##cdr _e1625316273_)))
(if (gx#stx-null? _tl1625516278_)
((lambda (_L16281_)
(find (lambda (_g1629216294_)
(gx#free-identifier=?
_L16281_
_g1629216294_))
_ids16245_))
_hd1625416276_)
(_g1624716260_ _g1624816263_))))
(_g1624716260_ _g1624816263_))))
(_g1624716260_ _g1624816263_)))))
(_g1624616297_ _stx16244_))))
(define gxc#find-var-refs-setq%
(lambda (_stx16168_ _ids16169_)
(let* ((_g1617116188_
(lambda (_g1617216185_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1617216185_)))
(_g1617016241_
(lambda (_g1617216191_)
(if (gx#stx-pair? _g1617216191_)
(let ((_e1617516193_ (gx#stx-e _g1617216191_)))
(let ((_hd1617616196_ (##car _e1617516193_))
(_tl1617716198_ (##cdr _e1617516193_)))
(if (gx#stx-pair? _tl1617716198_)
(let ((_e1617816201_ (gx#stx-e _tl1617716198_)))
(let ((_hd1617916204_ (##car _e1617816201_))
(_tl1618016206_ (##cdr _e1617816201_)))
(if (gx#stx-pair? _tl1618016206_)
(let ((_e1618116209_
(gx#stx-e _tl1618016206_)))
(let ((_hd1618216212_
(##car _e1618116209_))
(_tl1618316214_
(##cdr _e1618116209_)))
(if (gx#stx-null? _tl1618316214_)
((lambda (_L16217_ _L16218_)
(let ((_$e16238_
(find (lambda (_g1623316235_)
(gx#free-identifier=?
_L16218_
_g1623316235_))
_ids16169_)))
(if _$e16238_
_$e16238_
(gxc#compile-e
_L16217_
_ids16169_))))
_hd1618216212_
_hd1617916204_)
(_g1617116188_ _g1617216191_))))
(_g1617116188_ _g1617216191_))))
(_g1617116188_ _g1617216191_))))
(_g1617116188_ _g1617216191_)))))
(_g1617016241_ _stx16168_)))))
| null | https://raw.githubusercontent.com/vyzo/gerbil/17fbcb95a8302c0de3f88380be1a3eb6fe891b95/src/bootstrap/gerbil/compiler/optimize-xform__0.scm | scheme | <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> | (declare (block) (standard-bindings) (extended-bindings))
(begin
(define gxc#&identity-expression
(make-promise
(lambda ()
(let ((_tbl18216_ (make-table 'test: eq?)))
(table-set! _tbl18216_ '%#begin-annotation gxc#xform-identity)
(table-set! _tbl18216_ '%#lambda gxc#xform-identity)
(table-set! _tbl18216_ '%#case-lambda gxc#xform-identity)
(table-set! _tbl18216_ '%#let-values gxc#xform-identity)
(table-set! _tbl18216_ '%#letrec-values gxc#xform-identity)
(table-set! _tbl18216_ '%#letrec*-values gxc#xform-identity)
(table-set! _tbl18216_ '%#quote gxc#xform-identity)
(table-set! _tbl18216_ '%#quote-syntax gxc#xform-identity)
(table-set! _tbl18216_ '%#call gxc#xform-identity)
(table-set! _tbl18216_ '%#if gxc#xform-identity)
(table-set! _tbl18216_ '%#ref gxc#xform-identity)
(table-set! _tbl18216_ '%#set! gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-instance? gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-direct-instance? gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-ref gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-set! gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-direct-ref gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-direct-set! gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-unchecked-ref gxc#xform-identity)
(table-set! _tbl18216_ '%#struct-unchecked-set! gxc#xform-identity)
_tbl18216_))))
(define gxc#&identity-special-form
(make-promise
(lambda ()
(let ((_tbl18212_ (make-table 'test: eq?)))
(table-set! _tbl18212_ '%#begin gxc#xform-identity)
(table-set! _tbl18212_ '%#begin-syntax gxc#xform-identity)
(table-set! _tbl18212_ '%#begin-foreign gxc#xform-identity)
(table-set! _tbl18212_ '%#module gxc#xform-identity)
(table-set! _tbl18212_ '%#import gxc#xform-identity)
(table-set! _tbl18212_ '%#export gxc#xform-identity)
(table-set! _tbl18212_ '%#provide gxc#xform-identity)
(table-set! _tbl18212_ '%#extern gxc#xform-identity)
(table-set! _tbl18212_ '%#define-values gxc#xform-identity)
(table-set! _tbl18212_ '%#define-syntax gxc#xform-identity)
(table-set! _tbl18212_ '%#define-alias gxc#xform-identity)
(table-set! _tbl18212_ '%#declare gxc#xform-identity)
_tbl18212_))))
(define gxc#&identity
(make-promise
(lambda ()
(let ((_tbl18208_ (make-table 'test: eq?)))
(hash-copy! _tbl18208_ (force gxc#&identity-special-form))
(hash-copy! _tbl18208_ (force gxc#&identity-expression))
_tbl18208_))))
(define gxc#&basic-xform-expression
(make-promise
(lambda ()
(let ((_tbl18204_ (make-table 'test: eq?)))
(table-set!
_tbl18204_
'%#begin-annotation
gxc#xform-begin-annotation%)
(table-set! _tbl18204_ '%#lambda gxc#xform-lambda%)
(table-set! _tbl18204_ '%#case-lambda gxc#xform-case-lambda%)
(table-set! _tbl18204_ '%#let-values gxc#xform-let-values%)
(table-set! _tbl18204_ '%#letrec-values gxc#xform-let-values%)
(table-set! _tbl18204_ '%#letrec*-values gxc#xform-let-values%)
(table-set! _tbl18204_ '%#quote gxc#xform-identity)
(table-set! _tbl18204_ '%#quote-syntax gxc#xform-identity)
(table-set! _tbl18204_ '%#call gxc#xform-operands)
(table-set! _tbl18204_ '%#if gxc#xform-operands)
(table-set! _tbl18204_ '%#ref gxc#xform-identity)
(table-set! _tbl18204_ '%#set! gxc#xform-setq%)
(table-set! _tbl18204_ '%#struct-instance? gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-direct-instance? gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-ref gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-set! gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-direct-ref gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-direct-set! gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-unchecked-ref gxc#xform-operands)
(table-set! _tbl18204_ '%#struct-unchecked-set! gxc#xform-operands)
_tbl18204_))))
(define gxc#&basic-xform
(make-promise
(lambda ()
(let ((_tbl18200_ (make-table 'test: eq?)))
(hash-copy! _tbl18200_ (force gxc#&basic-xform-expression))
(hash-copy! _tbl18200_ (force gxc#&identity))
(table-set! _tbl18200_ '%#begin gxc#xform-begin%)
(table-set! _tbl18200_ '%#begin-syntax gxc#xform-begin-syntax%)
(table-set! _tbl18200_ '%#module gxc#xform-module%)
(table-set! _tbl18200_ '%#define-values gxc#xform-define-values%)
(table-set! _tbl18200_ '%#define-syntax gxc#xform-define-syntax%)
_tbl18200_))))
(define gxc#&collect-mutators
(make-promise
(lambda ()
(let ((_tbl18196_ (make-table 'test: eq?)))
(hash-copy! _tbl18196_ (force gxc#&void))
(table-set! _tbl18196_ '%#begin gxc#collect-begin%)
(table-set! _tbl18196_ '%#begin-syntax gxc#collect-begin-syntax%)
(table-set!
_tbl18196_
'%#begin-annotation
gxc#collect-begin-annotation%)
(table-set! _tbl18196_ '%#module gxc#collect-module%)
(table-set! _tbl18196_ '%#define-values gxc#collect-define-values%)
(table-set! _tbl18196_ '%#define-syntax gxc#collect-define-syntax%)
(table-set! _tbl18196_ '%#lambda gxc#collect-body-lambda%)
(table-set! _tbl18196_ '%#case-lambda gxc#collect-body-case-lambda%)
(table-set! _tbl18196_ '%#let-values gxc#collect-body-let-values%)
(table-set! _tbl18196_ '%#letrec-values gxc#collect-body-let-values%)
(table-set! _tbl18196_ '%#letrec*-values gxc#collect-body-let-values%)
(table-set! _tbl18196_ '%#call gxc#collect-operands)
(table-set! _tbl18196_ '%#if gxc#collect-operands)
(table-set! _tbl18196_ '%#set! gxc#collect-mutators-setq%)
(table-set! _tbl18196_ '%#struct-instance? gxc#collect-operands)
(table-set!
_tbl18196_
'%#struct-direct-instance?
gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-ref gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-set! gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-direct-ref gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-direct-set! gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-unchecked-ref gxc#collect-operands)
(table-set! _tbl18196_ '%#struct-unchecked-set! gxc#collect-operands)
_tbl18196_))))
(define gxc#apply-collect-mutators
(lambda (_stx18189_ . _args18191_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18189_ _args18191_))
gxc#current-compile-methods
(force gxc#&collect-mutators))))
(define gxc#&expression-subst
(make-promise
(lambda ()
(let ((_tbl18186_ (make-table 'test: eq?)))
(hash-copy! _tbl18186_ (force gxc#&basic-xform-expression))
(table-set! _tbl18186_ '%#begin gxc#xform-begin%)
(table-set! _tbl18186_ '%#ref gxc#expression-subst-ref%)
(table-set! _tbl18186_ '%#set! gxc#expression-subst-setq%)
_tbl18186_))))
(define gxc#apply-expression-subst
(lambda (_stx18179_ . _args18181_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18179_ _args18181_))
gxc#current-compile-methods
(force gxc#&expression-subst))))
(define gxc#&expression-subst*
(make-promise
(lambda ()
(let ((_tbl18176_ (make-table 'test: eq?)))
(hash-copy! _tbl18176_ (force gxc#&expression-subst))
(table-set! _tbl18176_ '%#ref gxc#expression-subst*-ref%)
(table-set! _tbl18176_ '%#set! gxc#expression-subst*-setq%)
_tbl18176_))))
(define gxc#apply-expression-subst*
(lambda (_stx18169_ . _args18171_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18169_ _args18171_))
gxc#current-compile-methods
(force gxc#&expression-subst*))))
(define gxc#&find-expression
(make-promise
(lambda ()
(let ((_tbl18166_ (make-table 'test: eq?)))
(hash-copy! _tbl18166_ (force gxc#&false-expression))
(table-set! _tbl18166_ '%#begin gxc#find-body%)
(table-set! _tbl18166_ '%#begin-annotation gxc#find-begin-annotation%)
(table-set! _tbl18166_ '%#lambda gxc#find-lambda%)
(table-set! _tbl18166_ '%#case-lambda gxc#find-case-lambda%)
(table-set! _tbl18166_ '%#let-values gxc#find-let-values%)
(table-set! _tbl18166_ '%#letrec-values gxc#find-let-values%)
(table-set! _tbl18166_ '%#letrec*-values gxc#find-let-values%)
(table-set! _tbl18166_ '%#call gxc#find-body%)
(table-set! _tbl18166_ '%#if gxc#find-body%)
(table-set! _tbl18166_ '%#set! gxc#find-setq%)
(table-set! _tbl18166_ '%#struct-instance? gxc#find-body%)
(table-set! _tbl18166_ '%#struct-direct-instance? gxc#find-body%)
(table-set! _tbl18166_ '%#struct-ref gxc#find-body%)
(table-set! _tbl18166_ '%#struct-set! gxc#find-body%)
(table-set! _tbl18166_ '%#struct-direct-ref gxc#find-body%)
(table-set! _tbl18166_ '%#struct-direct-set! gxc#find-body%)
(table-set! _tbl18166_ '%#struct-unchecked-ref gxc#find-body%)
(table-set! _tbl18166_ '%#struct-unchecked-set! gxc#find-body%)
_tbl18166_))))
(define gxc#&find-var-refs
(make-promise
(lambda ()
(let ((_tbl18162_ (make-table 'test: eq?)))
(hash-copy! _tbl18162_ (force gxc#&find-expression))
(table-set! _tbl18162_ '%#ref gxc#find-var-refs-ref%)
(table-set! _tbl18162_ '%#set! gxc#find-var-refs-setq%)
_tbl18162_))))
(define gxc#apply-find-var-refs
(lambda (_stx18155_ . _args18157_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18155_ _args18157_))
gxc#current-compile-methods
(force gxc#&find-var-refs))))
(define gxc#&collect-runtime-refs
(make-promise
(lambda ()
(let ((_tbl18152_ (make-table 'test: eq?)))
(hash-copy! _tbl18152_ (force gxc#&collect-expression-refs))
(table-set! _tbl18152_ '%#ref gxc#collect-runtime-refs-ref%)
(table-set! _tbl18152_ '%#set! gxc#collect-runtime-refs-setq%)
_tbl18152_))))
(define gxc#apply-collect-runtime-refs
(lambda (_stx18145_ . _args18147_)
(call-with-parameters
(lambda () (apply gxc#compile-e _stx18145_ _args18147_))
gxc#current-compile-methods
(force gxc#&collect-runtime-refs))))
(define gxc#xform-identity (lambda (_stx18142_ . _args18143_) _stx18142_))
(define gxc#xform-wrap-source
(lambda (_stx18139_ _src-stx18140_)
(gx#stx-wrap-source _stx18139_ (gx#stx-source _src-stx18140_))))
(define gxc#xform-apply-compile-e
(lambda (_args18133_)
(lambda (_g1813418136_)
(apply gxc#compile-e _g1813418136_ _args18133_))))
(define gxc#xform-begin%
(lambda (_stx18092_ . _args18093_)
(let* ((_g1809518105_
(lambda (_g1809618102_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1809618102_)))
(_g1809418130_
(lambda (_g1809618108_)
(if (gx#stx-pair? _g1809618108_)
(let ((_e1809818110_ (gx#stx-e _g1809618108_)))
(let ((_hd1809918113_ (##car _e1809818110_))
(_tl1810018115_ (##cdr _e1809818110_)))
((lambda (_L18118_)
(let ((_forms18128_
(map (gxc#xform-apply-compile-e _args18093_)
_L18118_)))
(gxc#xform-wrap-source
(cons '%#begin _forms18128_)
_stx18092_)))
_tl1810018115_)))
(_g1809518105_ _g1809618108_)))))
(_g1809418130_ _stx18092_))))
(define gxc#xform-begin-syntax%
(lambda (_stx18050_ . _args18051_)
(let* ((_g1805318063_
(lambda (_g1805418060_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1805418060_)))
(_g1805218089_
(lambda (_g1805418066_)
(if (gx#stx-pair? _g1805418066_)
(let ((_e1805618068_ (gx#stx-e _g1805418066_)))
(let ((_hd1805718071_ (##car _e1805618068_))
(_tl1805818073_ (##cdr _e1805618068_)))
((lambda (_L18076_)
(call-with-parameters
(lambda ()
(let ((_forms18087_
(map (gxc#xform-apply-compile-e
_args18051_)
_L18076_)))
(gxc#xform-wrap-source
(cons '%#begin-syntax _forms18087_)
_stx18050_)))
gx#current-expander-phi
(fx+ (gx#current-expander-phi) '1)))
_tl1805818073_)))
(_g1805318063_ _g1805418066_)))))
(_g1805218089_ _stx18050_))))
(define gxc#xform-module%
(lambda (_stx17987_ . _args17988_)
(let* ((_g1799018004_
(lambda (_g1799118001_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1799118001_)))
(_g1798918047_
(lambda (_g1799118007_)
(if (gx#stx-pair? _g1799118007_)
(let ((_e1799418009_ (gx#stx-e _g1799118007_)))
(let ((_hd1799518012_ (##car _e1799418009_))
(_tl1799618014_ (##cdr _e1799418009_)))
(if (gx#stx-pair? _tl1799618014_)
(let ((_e1799718017_ (gx#stx-e _tl1799618014_)))
(let ((_hd1799818020_ (##car _e1799718017_))
(_tl1799918022_ (##cdr _e1799718017_)))
((lambda (_L18025_ _L18026_)
(let* ((_ctx18039_
(gx#syntax-local-e__0 _L18026_))
(_code18041_
(##structure-ref
_ctx18039_
'11
gx#module-context::t
'#f))
(_code18044_
(call-with-parameters
(lambda ()
(apply gxc#compile-e
_code18041_
_args17988_))
gx#current-expander-context
_ctx18039_)))
(##structure-set!
_ctx18039_
_code18044_
'11
gx#module-context::t
'#f)
(gxc#xform-wrap-source
(cons '%#module
(cons _L18026_
(cons _code18044_ '())))
_stx17987_)))
_tl1799918022_
_hd1799818020_)))
(_g1799018004_ _g1799118007_))))
(_g1799018004_ _g1799118007_)))))
(_g1798918047_ _stx17987_))))
(define gxc#xform-define-values%
(lambda (_stx17917_ . _args17918_)
(let* ((_g1792017937_
(lambda (_g1792117934_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1792117934_)))
(_g1791917984_
(lambda (_g1792117940_)
(if (gx#stx-pair? _g1792117940_)
(let ((_e1792417942_ (gx#stx-e _g1792117940_)))
(let ((_hd1792517945_ (##car _e1792417942_))
(_tl1792617947_ (##cdr _e1792417942_)))
(if (gx#stx-pair? _tl1792617947_)
(let ((_e1792717950_ (gx#stx-e _tl1792617947_)))
(let ((_hd1792817953_ (##car _e1792717950_))
(_tl1792917955_ (##cdr _e1792717950_)))
(if (gx#stx-pair? _tl1792917955_)
(let ((_e1793017958_
(gx#stx-e _tl1792917955_)))
(let ((_hd1793117961_
(##car _e1793017958_))
(_tl1793217963_
(##cdr _e1793017958_)))
(if (gx#stx-null? _tl1793217963_)
((lambda (_L17966_ _L17967_)
(let ((_expr17982_
(apply gxc#compile-e
_L17966_
_args17918_)))
(gxc#xform-wrap-source
(cons '%#define-values
(cons _L17967_
(cons _expr17982_
'())))
_stx17917_)))
_hd1793117961_
_hd1792817953_)
(_g1792017937_ _g1792117940_))))
(_g1792017937_ _g1792117940_))))
(_g1792017937_ _g1792117940_))))
(_g1792017937_ _g1792117940_)))))
(_g1791917984_ _stx17917_))))
(define gxc#xform-define-syntax%
(lambda (_stx17846_ . _args17847_)
(let* ((_g1784917866_
(lambda (_g1785017863_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1785017863_)))
(_g1784817914_
(lambda (_g1785017869_)
(if (gx#stx-pair? _g1785017869_)
(let ((_e1785317871_ (gx#stx-e _g1785017869_)))
(let ((_hd1785417874_ (##car _e1785317871_))
(_tl1785517876_ (##cdr _e1785317871_)))
(if (gx#stx-pair? _tl1785517876_)
(let ((_e1785617879_ (gx#stx-e _tl1785517876_)))
(let ((_hd1785717882_ (##car _e1785617879_))
(_tl1785817884_ (##cdr _e1785617879_)))
(if (gx#stx-pair? _tl1785817884_)
(let ((_e1785917887_
(gx#stx-e _tl1785817884_)))
(let ((_hd1786017890_
(##car _e1785917887_))
(_tl1786117892_
(##cdr _e1785917887_)))
(if (gx#stx-null? _tl1786117892_)
((lambda (_L17895_ _L17896_)
(call-with-parameters
(lambda ()
(let ((_expr17912_
(apply gxc#compile-e
_L17895_
_args17847_)))
(gxc#xform-wrap-source
(cons '%#define-syntax
(cons _L17896_
(cons _expr17912_ '())))
_stx17846_)))
gx#current-expander-phi
(fx+ (gx#current-expander-phi)
'1)))
_hd1786017890_
_hd1785717882_)
(_g1784917866_ _g1785017869_))))
(_g1784917866_ _g1785017869_))))
(_g1784917866_ _g1785017869_))))
(_g1784917866_ _g1785017869_)))))
(_g1784817914_ _stx17846_))))
(define gxc#xform-begin-annotation%
(lambda (_stx17776_ . _args17777_)
(let* ((_g1777917796_
(lambda (_g1778017793_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1778017793_)))
(_g1777817843_
(lambda (_g1778017799_)
(if (gx#stx-pair? _g1778017799_)
(let ((_e1778317801_ (gx#stx-e _g1778017799_)))
(let ((_hd1778417804_ (##car _e1778317801_))
(_tl1778517806_ (##cdr _e1778317801_)))
(if (gx#stx-pair? _tl1778517806_)
(let ((_e1778617809_ (gx#stx-e _tl1778517806_)))
(let ((_hd1778717812_ (##car _e1778617809_))
(_tl1778817814_ (##cdr _e1778617809_)))
(if (gx#stx-pair? _tl1778817814_)
(let ((_e1778917817_
(gx#stx-e _tl1778817814_)))
(let ((_hd1779017820_
(##car _e1778917817_))
(_tl1779117822_
(##cdr _e1778917817_)))
(if (gx#stx-null? _tl1779117822_)
((lambda (_L17825_ _L17826_)
(let ((_expr17841_
(apply gxc#compile-e
_L17825_
_args17777_)))
(gxc#xform-wrap-source
(cons '%#begin-annotation
(cons _L17826_
(cons _expr17841_
'())))
_stx17776_)))
_hd1779017820_
_hd1778717812_)
(_g1777917796_ _g1778017799_))))
(_g1777917796_ _g1778017799_))))
(_g1777917796_ _g1778017799_))))
(_g1777917796_ _g1778017799_)))))
(_g1777817843_ _stx17776_))))
(define gxc#xform-lambda%
(lambda (_stx17719_ . _args17720_)
(let* ((_g1772217736_
(lambda (_g1772317733_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1772317733_)))
(_g1772117773_
(lambda (_g1772317739_)
(if (gx#stx-pair? _g1772317739_)
(let ((_e1772617741_ (gx#stx-e _g1772317739_)))
(let ((_hd1772717744_ (##car _e1772617741_))
(_tl1772817746_ (##cdr _e1772617741_)))
(if (gx#stx-pair? _tl1772817746_)
(let ((_e1772917749_ (gx#stx-e _tl1772817746_)))
(let ((_hd1773017752_ (##car _e1772917749_))
(_tl1773117754_ (##cdr _e1772917749_)))
((lambda (_L17757_ _L17758_)
(let ((_body17771_
(map (gxc#xform-apply-compile-e
_args17720_)
_L17757_)))
(gxc#xform-wrap-source
(cons '%#lambda
(cons _L17758_ _body17771_))
_stx17719_)))
_tl1773117754_
_hd1773017752_)))
(_g1772217736_ _g1772317739_))))
(_g1772217736_ _g1772317739_)))))
(_g1772117773_ _stx17719_))))
(define gxc#xform-case-lambda%
(lambda (_stx17632_ . _args17633_)
(letrec ((_clause-e17635_
(lambda (_clause17676_)
(let* ((_g1767817689_
(lambda (_g1767917686_)
(gx#raise-syntax-error
'#f
'"Bad syntax"
_g1767917686_)))
(_g1767717716_
(lambda (_g1767917692_)
(if (gx#stx-pair? _g1767917692_)
(let ((_e1768217694_ (gx#stx-e _g1767917692_)))
(let ((_hd1768317697_ (##car _e1768217694_))
(_tl1768417699_ (##cdr _e1768217694_)))
((lambda (_L17702_ _L17703_)
(let ((_body17714_
(map (gxc#xform-apply-compile-e
_args17633_)
_L17702_)))
(cons _L17703_ _body17714_)))
_tl1768417699_
_hd1768317697_)))
(_g1767817689_ _g1767917692_)))))
(_g1767717716_ _clause17676_)))))
(let* ((_g1763717647_
(lambda (_g1763817644_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1763817644_)))
(_g1763617673_
(lambda (_g1763817650_)
(if (gx#stx-pair? _g1763817650_)
(let ((_e1764017652_ (gx#stx-e _g1763817650_)))
(let ((_hd1764117655_ (##car _e1764017652_))
(_tl1764217657_ (##cdr _e1764017652_)))
((lambda (_L17660_)
(let ((_clauses17671_
(map _clause-e17635_ _L17660_)))
(gxc#xform-wrap-source
(cons '%#case-lambda _clauses17671_)
_stx17632_)))
_tl1764217657_)))
(_g1763717647_ _g1763817650_)))))
(_g1763617673_ _stx17632_)))))
(define gxc#xform-let-values%
(lambda (_stx17426_ . _args17427_)
(let* ((_g1742917462_
(lambda (_g1743017459_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1743017459_)))
(_g1742817629_
(lambda (_g1743017465_)
(if (gx#stx-pair? _g1743017465_)
(let ((_e1743517467_ (gx#stx-e _g1743017465_)))
(let ((_hd1743617470_ (##car _e1743517467_))
(_tl1743717472_ (##cdr _e1743517467_)))
(if (gx#stx-pair? _tl1743717472_)
(let ((_e1743817475_ (gx#stx-e _tl1743717472_)))
(let ((_hd1743917478_ (##car _e1743817475_))
(_tl1744017480_ (##cdr _e1743817475_)))
(if (gx#stx-pair/null? _hd1743917478_)
(let ((_g18218_
(gx#syntax-split-splice
_hd1743917478_
'0)))
(begin
(let ((_g18219_
(if (##values? _g18218_)
(##vector-length _g18218_)
1)))
(if (not (##fx= _g18219_ 2))
(error "Context expects 2 values"
_g18219_)))
(let ((_target1744117483_
(##vector-ref _g18218_ 0))
(_tl1744317485_
(##vector-ref _g18218_ 1)))
(if (gx#stx-null? _tl1744317485_)
(letrec ((_loop1744417488_
(lambda (_hd1744217491_
_expr1744817493_
_hd1744917495_)
(if (gx#stx-pair? _hd1744217491_)
(let ((_e1744517498_ (gx#stx-e _hd1744217491_)))
(let ((_lp-hd1744617501_ (##car _e1744517498_))
(_lp-tl1744717503_ (##cdr _e1744517498_)))
(if (gx#stx-pair? _lp-hd1744617501_)
(let ((_e1745217506_
(gx#stx-e _lp-hd1744617501_)))
(let ((_hd1745317509_ (##car _e1745217506_))
(_tl1745417511_ (##cdr _e1745217506_)))
(if (gx#stx-pair? _tl1745417511_)
(let ((_e1745517514_
(gx#stx-e _tl1745417511_)))
(let ((_hd1745617517_
(##car _e1745517514_))
(_tl1745717519_
(##cdr _e1745517514_)))
(if (gx#stx-null? _tl1745717519_)
(_loop1744417488_
_lp-tl1744717503_
(cons _hd1745617517_
_expr1744817493_)
(cons _hd1745317509_
_hd1744917495_))
(_g1742917462_ _g1743017465_))))
(_g1742917462_ _g1743017465_))))
(_g1742917462_ _g1743017465_))))
(let ((_expr1745017522_ (reverse _expr1744817493_))
(_hd1745117524_ (reverse _hd1744917495_)))
((lambda (_L17527_ _L17528_ _L17529_ _L17530_)
(let* ((_g1754917565_
(lambda (_g1755017562_)
(gx#raise-syntax-error
'#f
'"Bad syntax"
_g1755017562_)))
(_g1754817619_
(lambda (_g1755017568_)
(if (gx#stx-pair/null? _g1755017568_)
(let ((_g18220_
(gx#syntax-split-splice
_g1755017568_
'0)))
(begin
(let ((_g18221_
(if (##values? _g18220_)
(##vector-length
_g18220_)
1)))
(if (not (##fx= _g18221_ 2))
(error "Context expects 2 values"
_g18221_)))
(let ((_target1755217570_
(##vector-ref _g18220_ 0))
(_tl1755417572_
(##vector-ref _g18220_ 1)))
(if (gx#stx-null?
_tl1755417572_)
(letrec ((_loop1755517575_
(lambda (_hd1755317578_
_expr1755917580_)
(if (gx#stx-pair? _hd1755317578_)
(let ((_e1755617583_ (gx#syntax-e _hd1755317578_)))
(let ((_lp-hd1755717586_ (##car _e1755617583_))
(_lp-tl1755817588_ (##cdr _e1755617583_)))
(_loop1755517575_
_lp-tl1755817588_
(cons _lp-hd1755717586_ _expr1755917580_))))
(let ((_expr1756017591_ (reverse _expr1755917580_)))
((lambda (_L17594_)
(let ()
(let ((_body17607_
(map (gxc#xform-apply-compile-e
_args17427_)
_L17527_)))
(gxc#xform-wrap-source
(cons _L17530_
(cons (begin
(gx#syntax-check-splice-targets
_L17594_
_L17529_)
(foldr2 (lambda (_g1760817612_
_g1760917614_
_g1761017616_)
(cons (cons _g1760917614_ (cons _g1760817612_ '()))
_g1761017616_))
'()
_L17594_
_L17529_))
_body17607_))
_stx17426_))))
_expr1756017591_))))))
(_loop1755517575_
_target1755217570_
'()))
(_g1754917565_
_g1755017568_)))))
(_g1754917565_ _g1755017568_)))))
(_g1754817619_
(map (gxc#xform-apply-compile-e _args17427_)
(foldr1 (lambda (_g1762117624_
_g1762217626_)
(cons _g1762117624_
_g1762217626_))
'()
_L17528_)))))
_tl1744017480_
_expr1745017522_
_hd1745117524_
_hd1743617470_))))))
(_loop1744417488_
_target1744117483_
'()
'()))
(_g1742917462_ _g1743017465_)))))
(_g1742917462_ _g1743017465_))))
(_g1742917462_ _g1743017465_))))
(_g1742917462_ _g1743017465_)))))
(_g1742817629_ _stx17426_))))
(define gxc#xform-operands
(lambda (_stx17382_ . _args17383_)
(let* ((_g1738517396_
(lambda (_g1738617393_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1738617393_)))
(_g1738417423_
(lambda (_g1738617399_)
(if (gx#stx-pair? _g1738617399_)
(let ((_e1738917401_ (gx#stx-e _g1738617399_)))
(let ((_hd1739017404_ (##car _e1738917401_))
(_tl1739117406_ (##cdr _e1738917401_)))
((lambda (_L17409_ _L17410_)
(let ((_rands17421_
(map (gxc#xform-apply-compile-e _args17383_)
_L17409_)))
(gxc#xform-wrap-source
(cons _L17410_ _rands17421_)
_stx17382_)))
_tl1739117406_
_hd1739017404_)))
(_g1738517396_ _g1738617399_)))))
(_g1738417423_ _stx17382_))))
(define gxc#xform-call% gxc#xform-operands)
(define gxc#xform-setq%
(lambda (_stx17312_ . _args17313_)
(let* ((_g1731517332_
(lambda (_g1731617329_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1731617329_)))
(_g1731417379_
(lambda (_g1731617335_)
(if (gx#stx-pair? _g1731617335_)
(let ((_e1731917337_ (gx#stx-e _g1731617335_)))
(let ((_hd1732017340_ (##car _e1731917337_))
(_tl1732117342_ (##cdr _e1731917337_)))
(if (gx#stx-pair? _tl1732117342_)
(let ((_e1732217345_ (gx#stx-e _tl1732117342_)))
(let ((_hd1732317348_ (##car _e1732217345_))
(_tl1732417350_ (##cdr _e1732217345_)))
(if (gx#stx-pair? _tl1732417350_)
(let ((_e1732517353_
(gx#stx-e _tl1732417350_)))
(let ((_hd1732617356_
(##car _e1732517353_))
(_tl1732717358_
(##cdr _e1732517353_)))
(if (gx#stx-null? _tl1732717358_)
((lambda (_L17361_ _L17362_)
(let ((_expr17377_
(apply gxc#compile-e
_L17361_
_args17313_)))
(gxc#xform-wrap-source
(cons '%#set!
(cons _L17362_
(cons _expr17377_
'())))
_stx17312_)))
_hd1732617356_
_hd1732317348_)
(_g1731517332_ _g1731617335_))))
(_g1731517332_ _g1731617335_))))
(_g1731517332_ _g1731617335_))))
(_g1731517332_ _g1731617335_)))))
(_g1731417379_ _stx17312_))))
(define gxc#collect-mutators-setq%
(lambda (_stx17243_)
(let* ((_g1724517262_
(lambda (_g1724617259_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1724617259_)))
(_g1724417309_
(lambda (_g1724617265_)
(if (gx#stx-pair? _g1724617265_)
(let ((_e1724917267_ (gx#stx-e _g1724617265_)))
(let ((_hd1725017270_ (##car _e1724917267_))
(_tl1725117272_ (##cdr _e1724917267_)))
(if (gx#stx-pair? _tl1725117272_)
(let ((_e1725217275_ (gx#stx-e _tl1725117272_)))
(let ((_hd1725317278_ (##car _e1725217275_))
(_tl1725417280_ (##cdr _e1725217275_)))
(if (gx#stx-pair? _tl1725417280_)
(let ((_e1725517283_
(gx#stx-e _tl1725417280_)))
(let ((_hd1725617286_
(##car _e1725517283_))
(_tl1725717288_
(##cdr _e1725517283_)))
(if (gx#stx-null? _tl1725717288_)
((lambda (_L17291_ _L17292_)
(let ((_sym17307_
(gxc#identifier-symbol
_L17292_)))
(gxc#verbose
'"collect mutator "
_sym17307_)
(table-set!
(gxc#current-compile-mutators)
_sym17307_
'#t)
(gxc#compile-e _L17291_)))
_hd1725617286_
_hd1725317278_)
(_g1724517262_ _g1724617265_))))
(_g1724517262_ _g1724617265_))))
(_g1724517262_ _g1724617265_))))
(_g1724517262_ _g1724617265_)))))
(_g1724417309_ _stx17243_))))
(define gxc#expression-subst-ref%
(lambda (_stx17190_ _id17191_ _new-id17192_)
(let* ((_g1719417207_
(lambda (_g1719517204_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1719517204_)))
(_g1719317240_
(lambda (_g1719517210_)
(if (gx#stx-pair? _g1719517210_)
(let ((_e1719717212_ (gx#stx-e _g1719517210_)))
(let ((_hd1719817215_ (##car _e1719717212_))
(_tl1719917217_ (##cdr _e1719717212_)))
(if (gx#stx-pair? _tl1719917217_)
(let ((_e1720017220_ (gx#stx-e _tl1719917217_)))
(let ((_hd1720117223_ (##car _e1720017220_))
(_tl1720217225_ (##cdr _e1720017220_)))
(if (gx#stx-null? _tl1720217225_)
((lambda (_L17228_)
(if (gx#free-identifier=?
_L17228_
_id17191_)
(gxc#xform-wrap-source
(cons '%#ref
(cons _new-id17192_ '()))
_stx17190_)
_stx17190_))
_hd1720117223_)
(_g1719417207_ _g1719517210_))))
(_g1719417207_ _g1719517210_))))
(_g1719417207_ _g1719517210_)))))
(_g1719317240_ _stx17190_))))
(define gxc#expression-subst*-ref%
(lambda (_stx17131_ _subst17132_)
(let* ((_g1713417147_
(lambda (_g1713517144_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1713517144_)))
(_g1713317187_
(lambda (_g1713517150_)
(if (gx#stx-pair? _g1713517150_)
(let ((_e1713717152_ (gx#stx-e _g1713517150_)))
(let ((_hd1713817155_ (##car _e1713717152_))
(_tl1713917157_ (##cdr _e1713717152_)))
(if (gx#stx-pair? _tl1713917157_)
(let ((_e1714017160_ (gx#stx-e _tl1713917157_)))
(let ((_hd1714117163_ (##car _e1714017160_))
(_tl1714217165_ (##cdr _e1714017160_)))
(if (gx#stx-null? _tl1714217165_)
((lambda (_L17168_)
(let ((_$e17182_
(find (lambda (_sub17180_)
(gx#free-identifier=?
_L17168_
(car _sub17180_)))
_subst17132_)))
(if _$e17182_
((lambda (_sub17185_)
(gxc#xform-wrap-source
(cons '%#ref
(cons (cdr _sub17185_)
'()))
_stx17131_))
_$e17182_)
_stx17131_)))
_hd1714117163_)
(_g1713417147_ _g1713517150_))))
(_g1713417147_ _g1713517150_))))
(_g1713417147_ _g1713517150_)))))
(_g1713317187_ _stx17131_))))
(define gxc#expression-subst-setq%
(lambda (_stx17059_ _id17060_ _new-id17061_)
(let* ((_g1706317080_
(lambda (_g1706417077_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1706417077_)))
(_g1706217128_
(lambda (_g1706417083_)
(if (gx#stx-pair? _g1706417083_)
(let ((_e1706717085_ (gx#stx-e _g1706417083_)))
(let ((_hd1706817088_ (##car _e1706717085_))
(_tl1706917090_ (##cdr _e1706717085_)))
(if (gx#stx-pair? _tl1706917090_)
(let ((_e1707017093_ (gx#stx-e _tl1706917090_)))
(let ((_hd1707117096_ (##car _e1707017093_))
(_tl1707217098_ (##cdr _e1707017093_)))
(if (gx#stx-pair? _tl1707217098_)
(let ((_e1707317101_
(gx#stx-e _tl1707217098_)))
(let ((_hd1707417104_
(##car _e1707317101_))
(_tl1707517106_
(##cdr _e1707317101_)))
(if (gx#stx-null? _tl1707517106_)
((lambda (_L17109_ _L17110_)
(let ((_new-expr17125_
(gxc#compile-e
_L17109_
_id17060_
_new-id17061_))
(_new-xid17126_
(if (gx#free-identifier=?
_L17110_
_id17060_)
_new-id17061_
_L17110_)))
(gxc#xform-wrap-source
(cons '%#set!
(cons _new-xid17126_
(cons _new-expr17125_
'())))
_stx17059_)))
_hd1707417104_
_hd1707117096_)
(_g1706317080_ _g1706417083_))))
(_g1706317080_ _g1706417083_))))
(_g1706317080_ _g1706417083_))))
(_g1706317080_ _g1706417083_)))))
(_g1706217128_ _stx17059_))))
(define gxc#expression-subst*-setq%
(lambda (_stx16983_ _subst16984_)
(let* ((_g1698617003_
(lambda (_g1698717000_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1698717000_)))
(_g1698517056_
(lambda (_g1698717006_)
(if (gx#stx-pair? _g1698717006_)
(let ((_e1699017008_ (gx#stx-e _g1698717006_)))
(let ((_hd1699117011_ (##car _e1699017008_))
(_tl1699217013_ (##cdr _e1699017008_)))
(if (gx#stx-pair? _tl1699217013_)
(let ((_e1699317016_ (gx#stx-e _tl1699217013_)))
(let ((_hd1699417019_ (##car _e1699317016_))
(_tl1699517021_ (##cdr _e1699317016_)))
(if (gx#stx-pair? _tl1699517021_)
(let ((_e1699617024_
(gx#stx-e _tl1699517021_)))
(let ((_hd1699717027_
(##car _e1699617024_))
(_tl1699817029_
(##cdr _e1699617024_)))
(if (gx#stx-null? _tl1699817029_)
((lambda (_L17032_ _L17033_)
(let ((_new-expr17053_
(gxc#compile-e
_L17032_
_subst16984_))
(_new-xid17054_
(let ((_$e17050_
(find (lambda (_sub17048_)
(gx#free-identifier=? _L17033_ (car _sub17048_)))
_subst16984_)))
(if _$e17050_ (cdr _$e17050_) _L17033_))))
(gxc#xform-wrap-source
(cons '%#set!
(cons _new-xid17054_
(cons _new-expr17053_
'())))
_stx16983_)))
_hd1699717027_
_hd1699417019_)
(_g1698617003_ _g1698717006_))))
(_g1698617003_ _g1698717006_))))
(_g1698617003_ _g1698717006_))))
(_g1698617003_ _g1698717006_)))))
(_g1698517056_ _stx16983_))))
(define gxc#collect-runtime-refs-ref%
(lambda (_stx16929_ _ht16930_)
(let* ((_g1693216945_
(lambda (_g1693316942_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1693316942_)))
(_g1693116980_
(lambda (_g1693316948_)
(if (gx#stx-pair? _g1693316948_)
(let ((_e1693516950_ (gx#stx-e _g1693316948_)))
(let ((_hd1693616953_ (##car _e1693516950_))
(_tl1693716955_ (##cdr _e1693516950_)))
(if (gx#stx-pair? _tl1693716955_)
(let ((_e1693816958_ (gx#stx-e _tl1693716955_)))
(let ((_hd1693916961_ (##car _e1693816958_))
(_tl1694016963_ (##cdr _e1693816958_)))
(if (gx#stx-null? _tl1694016963_)
((lambda (_L16966_)
(let ((_eid16978_
(gxc#identifier-symbol
_L16966_)))
(hash-update!
_ht16930_
_eid16978_
1+
'0)))
_hd1693916961_)
(_g1693216945_ _g1693316948_))))
(_g1693216945_ _g1693316948_))))
(_g1693216945_ _g1693316948_)))))
(_g1693116980_ _stx16929_))))
(define gxc#collect-runtime-refs-setq%
(lambda (_stx16859_ _ht16860_)
(let* ((_g1686216879_
(lambda (_g1686316876_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1686316876_)))
(_g1686116926_
(lambda (_g1686316882_)
(if (gx#stx-pair? _g1686316882_)
(let ((_e1686616884_ (gx#stx-e _g1686316882_)))
(let ((_hd1686716887_ (##car _e1686616884_))
(_tl1686816889_ (##cdr _e1686616884_)))
(if (gx#stx-pair? _tl1686816889_)
(let ((_e1686916892_ (gx#stx-e _tl1686816889_)))
(let ((_hd1687016895_ (##car _e1686916892_))
(_tl1687116897_ (##cdr _e1686916892_)))
(if (gx#stx-pair? _tl1687116897_)
(let ((_e1687216900_
(gx#stx-e _tl1687116897_)))
(let ((_hd1687316903_
(##car _e1687216900_))
(_tl1687416905_
(##cdr _e1687216900_)))
(if (gx#stx-null? _tl1687416905_)
((lambda (_L16908_ _L16909_)
(let ((_eid16924_
(gxc#identifier-symbol
_L16909_)))
(hash-update!
_ht16860_
_eid16924_
1+
'0)
(gxc#compile-e
_L16908_
_ht16860_)))
_hd1687316903_
_hd1687016895_)
(_g1686216879_ _g1686316882_))))
(_g1686216879_ _g1686316882_))))
(_g1686216879_ _g1686316882_))))
(_g1686216879_ _g1686316882_)))))
(_g1686116926_ _stx16859_))))
(define gxc#find-body%
(lambda (_stx16772_ _arg16773_)
(let* ((_g1677516794_
(lambda (_g1677616791_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1677616791_)))
(_g1677416856_
(lambda (_g1677616797_)
(if (gx#stx-pair? _g1677616797_)
(let ((_e1677816799_ (gx#stx-e _g1677616797_)))
(let ((_hd1677916802_ (##car _e1677816799_))
(_tl1678016804_ (##cdr _e1677816799_)))
(if (gx#stx-pair/null? _tl1678016804_)
(let ((_g18222_
(gx#syntax-split-splice _tl1678016804_ '0)))
(begin
(let ((_g18223_
(if (##values? _g18222_)
(##vector-length _g18222_)
1)))
(if (not (##fx= _g18223_ 2))
(error "Context expects 2 values"
_g18223_)))
(let ((_target1678116807_
(##vector-ref _g18222_ 0))
(_tl1678316809_
(##vector-ref _g18222_ 1)))
(if (gx#stx-null? _tl1678316809_)
(letrec ((_loop1678416812_
(lambda (_hd1678216815_
_expr1678816817_)
(if (gx#stx-pair?
_hd1678216815_)
(let ((_e1678516820_
(gx#stx-e
_hd1678216815_)))
(let ((_lp-hd1678616823_
(##car _e1678516820_))
(_lp-tl1678716825_
(##cdr _e1678516820_)))
(_loop1678416812_
_lp-tl1678716825_
(cons _lp-hd1678616823_
_expr1678816817_))))
(let ((_expr1678916828_ (reverse _expr1678816817_)))
((lambda (_L16831_)
(ormap1 (lambda (_g1684416846_)
(gxc#compile-e _g1684416846_ _arg16773_))
(foldr1 (lambda (_g1684816851_ _g1684916853_)
(cons _g1684816851_ _g1684916853_))
'()
_L16831_)))
_expr1678916828_))))))
(_loop1678416812_
_target1678116807_
'()))
(_g1677516794_ _g1677616797_)))))
(_g1677516794_ _g1677616797_))))
(_g1677516794_ _g1677616797_)))))
(_g1677416856_ _stx16772_))))
(define gxc#find-begin-annotation%
(lambda (_stx16704_ _arg16705_)
(let* ((_g1670716724_
(lambda (_g1670816721_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1670816721_)))
(_g1670616769_
(lambda (_g1670816727_)
(if (gx#stx-pair? _g1670816727_)
(let ((_e1671116729_ (gx#stx-e _g1670816727_)))
(let ((_hd1671216732_ (##car _e1671116729_))
(_tl1671316734_ (##cdr _e1671116729_)))
(if (gx#stx-pair? _tl1671316734_)
(let ((_e1671416737_ (gx#stx-e _tl1671316734_)))
(let ((_hd1671516740_ (##car _e1671416737_))
(_tl1671616742_ (##cdr _e1671416737_)))
(if (gx#stx-pair? _tl1671616742_)
(let ((_e1671716745_
(gx#stx-e _tl1671616742_)))
(let ((_hd1671816748_
(##car _e1671716745_))
(_tl1671916750_
(##cdr _e1671716745_)))
(if (gx#stx-null? _tl1671916750_)
((lambda (_L16753_ _L16754_)
(gxc#compile-e
_L16753_
_arg16705_))
_hd1671816748_
_hd1671516740_)
(_g1670716724_ _g1670816727_))))
(_g1670716724_ _g1670816727_))))
(_g1670716724_ _g1670816727_))))
(_g1670716724_ _g1670816727_)))))
(_g1670616769_ _stx16704_))))
(define gxc#find-lambda%
(lambda (_stx16636_ _arg16637_)
(let* ((_g1663916656_
(lambda (_g1664016653_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1664016653_)))
(_g1663816701_
(lambda (_g1664016659_)
(if (gx#stx-pair? _g1664016659_)
(let ((_e1664316661_ (gx#stx-e _g1664016659_)))
(let ((_hd1664416664_ (##car _e1664316661_))
(_tl1664516666_ (##cdr _e1664316661_)))
(if (gx#stx-pair? _tl1664516666_)
(let ((_e1664616669_ (gx#stx-e _tl1664516666_)))
(let ((_hd1664716672_ (##car _e1664616669_))
(_tl1664816674_ (##cdr _e1664616669_)))
(if (gx#stx-pair? _tl1664816674_)
(let ((_e1664916677_
(gx#stx-e _tl1664816674_)))
(let ((_hd1665016680_
(##car _e1664916677_))
(_tl1665116682_
(##cdr _e1664916677_)))
(if (gx#stx-null? _tl1665116682_)
((lambda (_L16685_ _L16686_)
(gxc#compile-e
_L16685_
_arg16637_))
_hd1665016680_
_hd1664716672_)
(_g1663916656_ _g1664016659_))))
(_g1663916656_ _g1664016659_))))
(_g1663916656_ _g1664016659_))))
(_g1663916656_ _g1664016659_)))))
(_g1663816701_ _stx16636_))))
(define gxc#find-case-lambda%
(lambda (_stx16518_ _arg16519_)
(let* ((_g1652116549_
(lambda (_g1652216546_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1652216546_)))
(_g1652016633_
(lambda (_g1652216552_)
(if (gx#stx-pair? _g1652216552_)
(let ((_e1652516554_ (gx#stx-e _g1652216552_)))
(let ((_hd1652616557_ (##car _e1652516554_))
(_tl1652716559_ (##cdr _e1652516554_)))
(if (gx#stx-pair/null? _tl1652716559_)
(let ((_g18224_
(gx#syntax-split-splice _tl1652716559_ '0)))
(begin
(let ((_g18225_
(if (##values? _g18224_)
(##vector-length _g18224_)
1)))
(if (not (##fx= _g18225_ 2))
(error "Context expects 2 values"
_g18225_)))
(let ((_target1652816562_
(##vector-ref _g18224_ 0))
(_tl1653016564_
(##vector-ref _g18224_ 1)))
(if (gx#stx-null? _tl1653016564_)
(letrec ((_loop1653116567_
(lambda (_hd1652916570_
_body1653516572_
_hd1653616574_)
(if (gx#stx-pair?
_hd1652916570_)
(let ((_e1653216577_
(gx#stx-e
_hd1652916570_)))
(let ((_lp-hd1653316580_
(##car _e1653216577_))
(_lp-tl1653416582_
(##cdr _e1653216577_)))
(if (gx#stx-pair?
_lp-hd1653316580_)
(let ((_e1653916585_
(gx#stx-e _lp-hd1653316580_)))
(let ((_hd1654016588_ (##car _e1653916585_))
(_tl1654116590_ (##cdr _e1653916585_)))
(if (gx#stx-pair? _tl1654116590_)
(let ((_e1654216593_ (gx#stx-e _tl1654116590_)))
(let ((_hd1654316596_ (##car _e1654216593_))
(_tl1654416598_ (##cdr _e1654216593_)))
(if (gx#stx-null? _tl1654416598_)
(_loop1653116567_
_lp-tl1653416582_
(cons _hd1654316596_ _body1653516572_)
(cons _hd1654016588_ _hd1653616574_))
(_g1652116549_ _g1652216552_))))
(_g1652116549_ _g1652216552_))))
(_g1652116549_ _g1652216552_))))
(let ((_body1653716601_ (reverse _body1653516572_))
(_hd1653816603_ (reverse _hd1653616574_)))
((lambda (_L16606_ _L16607_)
(ormap1 (lambda (_g1662116623_)
(gxc#compile-e _g1662116623_ _arg16519_))
(foldr1 (lambda (_g1662516628_ _g1662616630_)
(cons _g1662516628_ _g1662616630_))
'()
_L16606_)))
_body1653716601_
_hd1653816603_))))))
(_loop1653116567_
_target1652816562_
'()
'()))
(_g1652116549_ _g1652216552_)))))
(_g1652116549_ _g1652216552_))))
(_g1652116549_ _g1652216552_)))))
(_g1652016633_ _stx16518_))))
(define gxc#find-let-values%
(lambda (_stx16368_ _arg16369_)
(let* ((_g1637116406_
(lambda (_g1637216403_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1637216403_)))
(_g1637016515_
(lambda (_g1637216409_)
(if (gx#stx-pair? _g1637216409_)
(let ((_e1637616411_ (gx#stx-e _g1637216409_)))
(let ((_hd1637716414_ (##car _e1637616411_))
(_tl1637816416_ (##cdr _e1637616411_)))
(if (gx#stx-pair? _tl1637816416_)
(let ((_e1637916419_ (gx#stx-e _tl1637816416_)))
(let ((_hd1638016422_ (##car _e1637916419_))
(_tl1638116424_ (##cdr _e1637916419_)))
(if (gx#stx-pair/null? _hd1638016422_)
(let ((_g18226_
(gx#syntax-split-splice
_hd1638016422_
'0)))
(begin
(let ((_g18227_
(if (##values? _g18226_)
(##vector-length _g18226_)
1)))
(if (not (##fx= _g18227_ 2))
(error "Context expects 2 values"
_g18227_)))
(let ((_target1638216427_
(##vector-ref _g18226_ 0))
(_tl1638416429_
(##vector-ref _g18226_ 1)))
(if (gx#stx-null? _tl1638416429_)
(letrec ((_loop1638516432_
(lambda (_hd1638316435_
_expr1638916437_
_bind1639016439_)
(if (gx#stx-pair? _hd1638316435_)
(let ((_e1638616442_ (gx#stx-e _hd1638316435_)))
(let ((_lp-hd1638716445_ (##car _e1638616442_))
(_lp-tl1638816447_ (##cdr _e1638616442_)))
(if (gx#stx-pair? _lp-hd1638716445_)
(let ((_e1639316450_
(gx#stx-e _lp-hd1638716445_)))
(let ((_hd1639416453_ (##car _e1639316450_))
(_tl1639516455_ (##cdr _e1639316450_)))
(if (gx#stx-pair? _tl1639516455_)
(let ((_e1639616458_
(gx#stx-e _tl1639516455_)))
(let ((_hd1639716461_
(##car _e1639616458_))
(_tl1639816463_
(##cdr _e1639616458_)))
(if (gx#stx-null? _tl1639816463_)
(_loop1638516432_
_lp-tl1638816447_
(cons _hd1639716461_
_expr1638916437_)
(cons _hd1639416453_
_bind1639016439_))
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_))))
(let ((_expr1639116466_ (reverse _expr1638916437_))
(_bind1639216468_ (reverse _bind1639016439_)))
(if (gx#stx-pair? _tl1638116424_)
(let ((_e1639916471_ (gx#stx-e _tl1638116424_)))
(let ((_hd1640016474_ (##car _e1639916471_))
(_tl1640116476_ (##cdr _e1639916471_)))
(if (gx#stx-null? _tl1640116476_)
((lambda (_L16479_ _L16480_ _L16481_)
(let ((_$e16512_
(ormap1 (lambda (_g1650016502_)
(gxc#compile-e
_g1650016502_
_arg16369_))
(foldr1 (lambda (_g1650416507_
_g1650516509_)
(cons _g1650416507_ _g1650516509_))
'()
_L16480_))))
(if _$e16512_
_$e16512_
(gxc#compile-e
_L16479_
_arg16369_))))
_hd1640016474_
_expr1639116466_
_bind1639216468_)
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_)))))))
(_loop1638516432_
_target1638216427_
'()
'()))
(_g1637116406_ _g1637216409_)))))
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_))))
(_g1637116406_ _g1637216409_)))))
(_g1637016515_ _stx16368_))))
(define gxc#find-setq%
(lambda (_stx16300_ _arg16301_)
(let* ((_g1630316320_
(lambda (_g1630416317_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1630416317_)))
(_g1630216365_
(lambda (_g1630416323_)
(if (gx#stx-pair? _g1630416323_)
(let ((_e1630716325_ (gx#stx-e _g1630416323_)))
(let ((_hd1630816328_ (##car _e1630716325_))
(_tl1630916330_ (##cdr _e1630716325_)))
(if (gx#stx-pair? _tl1630916330_)
(let ((_e1631016333_ (gx#stx-e _tl1630916330_)))
(let ((_hd1631116336_ (##car _e1631016333_))
(_tl1631216338_ (##cdr _e1631016333_)))
(if (gx#stx-pair? _tl1631216338_)
(let ((_e1631316341_
(gx#stx-e _tl1631216338_)))
(let ((_hd1631416344_
(##car _e1631316341_))
(_tl1631516346_
(##cdr _e1631316341_)))
(if (gx#stx-null? _tl1631516346_)
((lambda (_L16349_ _L16350_)
(gxc#compile-e
_L16349_
_arg16301_))
_hd1631416344_
_hd1631116336_)
(_g1630316320_ _g1630416323_))))
(_g1630316320_ _g1630416323_))))
(_g1630316320_ _g1630416323_))))
(_g1630316320_ _g1630416323_)))))
(_g1630216365_ _stx16300_))))
(define gxc#find-var-refs-ref%
(lambda (_stx16244_ _ids16245_)
(let* ((_g1624716260_
(lambda (_g1624816257_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1624816257_)))
(_g1624616297_
(lambda (_g1624816263_)
(if (gx#stx-pair? _g1624816263_)
(let ((_e1625016265_ (gx#stx-e _g1624816263_)))
(let ((_hd1625116268_ (##car _e1625016265_))
(_tl1625216270_ (##cdr _e1625016265_)))
(if (gx#stx-pair? _tl1625216270_)
(let ((_e1625316273_ (gx#stx-e _tl1625216270_)))
(let ((_hd1625416276_ (##car _e1625316273_))
(_tl1625516278_ (##cdr _e1625316273_)))
(if (gx#stx-null? _tl1625516278_)
((lambda (_L16281_)
(find (lambda (_g1629216294_)
(gx#free-identifier=?
_L16281_
_g1629216294_))
_ids16245_))
_hd1625416276_)
(_g1624716260_ _g1624816263_))))
(_g1624716260_ _g1624816263_))))
(_g1624716260_ _g1624816263_)))))
(_g1624616297_ _stx16244_))))
(define gxc#find-var-refs-setq%
(lambda (_stx16168_ _ids16169_)
(let* ((_g1617116188_
(lambda (_g1617216185_)
(gx#raise-syntax-error '#f '"Bad syntax" _g1617216185_)))
(_g1617016241_
(lambda (_g1617216191_)
(if (gx#stx-pair? _g1617216191_)
(let ((_e1617516193_ (gx#stx-e _g1617216191_)))
(let ((_hd1617616196_ (##car _e1617516193_))
(_tl1617716198_ (##cdr _e1617516193_)))
(if (gx#stx-pair? _tl1617716198_)
(let ((_e1617816201_ (gx#stx-e _tl1617716198_)))
(let ((_hd1617916204_ (##car _e1617816201_))
(_tl1618016206_ (##cdr _e1617816201_)))
(if (gx#stx-pair? _tl1618016206_)
(let ((_e1618116209_
(gx#stx-e _tl1618016206_)))
(let ((_hd1618216212_
(##car _e1618116209_))
(_tl1618316214_
(##cdr _e1618116209_)))
(if (gx#stx-null? _tl1618316214_)
((lambda (_L16217_ _L16218_)
(let ((_$e16238_
(find (lambda (_g1623316235_)
(gx#free-identifier=?
_L16218_
_g1623316235_))
_ids16169_)))
(if _$e16238_
_$e16238_
(gxc#compile-e
_L16217_
_ids16169_))))
_hd1618216212_
_hd1617916204_)
(_g1617116188_ _g1617216191_))))
(_g1617116188_ _g1617216191_))))
(_g1617116188_ _g1617216191_))))
(_g1617116188_ _g1617216191_)))))
(_g1617016241_ _stx16168_)))))
|
e518447463f85777ad0de61b417640b24ae26376761b65d078d909eedbc84359 | unisonweb/unison | PinBoard.hs | # LANGUAGE MagicHash #
# LANGUAGE UnboxedTuples #
module Unison.Test.Util.PinBoard
( test,
)
where
import qualified Data.ByteString as ByteString
import EasyTest
import GHC.Exts (isTrue#, reallyUnsafePtrEquality#, touch#)
import GHC.IO (IO (IO))
import System.Mem (performGC)
import qualified Unison.Util.PinBoard as PinBoard
test :: Test ()
test =
scope "util.pinboard" . tests $
[ scope "pinning equal values stores only one" $ do
let b0 = ByteString.singleton 0
let b1 = ByteString.copy b0
board <- PinBoard.new
pinning a thing for the first time returns it
b0' <- PinBoard.pin board b0
expectSamePointer b0 b0'
pinning an equal thing returns the first
b1' <- PinBoard.pin board b1
expectSamePointer b0 b1'
the board should only have one value in it
expect' . (== 1) <$> io (PinBoard.debugSize board)
-- keep b0 alive until here
touch b0
-- observe that the board doesn't keep its value alive
io performGC
expect' . (== 0) <$> io (PinBoard.debugSize board)
ok
]
expectSamePointer :: a -> a -> Test ()
expectSamePointer x y =
expect' (isTrue# (reallyUnsafePtrEquality# x y))
touch :: a -> Test ()
touch x =
io (IO \s -> (# touch# x s, () #))
| null | https://raw.githubusercontent.com/unisonweb/unison/477371ba97a019fe36294a76faed52190ef29d75/parser-typechecker/tests/Unison/Test/Util/PinBoard.hs | haskell | keep b0 alive until here
observe that the board doesn't keep its value alive | # LANGUAGE MagicHash #
# LANGUAGE UnboxedTuples #
module Unison.Test.Util.PinBoard
( test,
)
where
import qualified Data.ByteString as ByteString
import EasyTest
import GHC.Exts (isTrue#, reallyUnsafePtrEquality#, touch#)
import GHC.IO (IO (IO))
import System.Mem (performGC)
import qualified Unison.Util.PinBoard as PinBoard
test :: Test ()
test =
scope "util.pinboard" . tests $
[ scope "pinning equal values stores only one" $ do
let b0 = ByteString.singleton 0
let b1 = ByteString.copy b0
board <- PinBoard.new
pinning a thing for the first time returns it
b0' <- PinBoard.pin board b0
expectSamePointer b0 b0'
pinning an equal thing returns the first
b1' <- PinBoard.pin board b1
expectSamePointer b0 b1'
the board should only have one value in it
expect' . (== 1) <$> io (PinBoard.debugSize board)
touch b0
io performGC
expect' . (== 0) <$> io (PinBoard.debugSize board)
ok
]
expectSamePointer :: a -> a -> Test ()
expectSamePointer x y =
expect' (isTrue# (reallyUnsafePtrEquality# x y))
touch :: a -> Test ()
touch x =
io (IO \s -> (# touch# x s, () #))
|
c0f35e671fe0eb2c539edb8aa336fd2c94cba1b22653d7476156f357d2619acc | tonymorris/geo-gpx | Ptseg.hs | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
-- | Complex Type: @ptsegType@ </#type_ptsegType>
module Data.Geo.GPX.Type.Ptseg(
Ptseg
, ptseg
, runPtseg
) where
import Data.Geo.GPX.Type.Pt
import Text.XML.HXT.Arrow.Pickle
import Control.Newtype
newtype Ptseg = Ptseg [Pt]
deriving (Eq, Ord)
ptseg ::
[Pt] -- ^ The points (pt).
-> Ptseg
ptseg =
Ptseg
runPtseg ::
Ptseg
-> [Pt]
runPtseg (Ptseg p) =
p
instance XmlPickler Ptseg where
xpickle =
xpWrap (ptseg, \(Ptseg k) -> k) (xpList (xpElem "pt" xpickle))
instance Newtype Ptseg [Pt] where
pack =
Ptseg
unpack (Ptseg x) =
x
| null | https://raw.githubusercontent.com/tonymorris/geo-gpx/526b59ec403293c810c2ba08d2c006dc526e8bf9/src/Data/Geo/GPX/Type/Ptseg.hs | haskell | | Complex Type: @ptsegType@ </#type_ptsegType>
^ The points (pt). | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
module Data.Geo.GPX.Type.Ptseg(
Ptseg
, ptseg
, runPtseg
) where
import Data.Geo.GPX.Type.Pt
import Text.XML.HXT.Arrow.Pickle
import Control.Newtype
newtype Ptseg = Ptseg [Pt]
deriving (Eq, Ord)
ptseg ::
-> Ptseg
ptseg =
Ptseg
runPtseg ::
Ptseg
-> [Pt]
runPtseg (Ptseg p) =
p
instance XmlPickler Ptseg where
xpickle =
xpWrap (ptseg, \(Ptseg k) -> k) (xpList (xpElem "pt" xpickle))
instance Newtype Ptseg [Pt] where
pack =
Ptseg
unpack (Ptseg x) =
x
|
3dfb05f994778a9c4cede133efc2a0142925e5f0fbff2c1c58c492d1517b8c01 | expipiplus1/update-nix-fetchgit | Main.hs | # OPTIONS_GHC -Wno - orphans #
module Main
( main
) where
import Data.Bool
import Data.Foldable
import qualified Data.Text.IO as T
import Data.Version ( showVersion )
import Options.Applicative
import Options.Generic
import Paths_update_nix_fetchgit ( version )
import Say
import Text.ParserCombinators.ReadP ( char
, eof
, readP_to_S
, readS_to_P
, sepBy
)
import Text.Regex.TDFA
import Update.Nix.FetchGit
import Update.Nix.FetchGit.Types
main :: IO ()
main = do
(o, fs) <- parseOpts
let e = env o
let goStd = T.putStr =<< processText e =<< T.getContents
case fs of
[] -> goStd
_ -> for_ fs $ \f -> if f == "-" then goStd else processFile e f
----------------------------------------------------------------
-- Env
----------------------------------------------------------------
env :: Options Unwrapped -> Env
env Options {..} =
let sayLog
| verbose = const sayErr
| quiet = \case
Verbose -> const (pure ())
Normal -> const (pure ())
Quiet -> sayErr
| otherwise = \case
Verbose -> const (pure ())
Normal -> sayErr
Quiet -> sayErr
updateLocations = [ (l, c) | Position l c <- location ]
attrPatterns = attribute
dryness = bool Wet Dry dryRun
in Env { .. }
----------------------------------------------------------------
-- Options
----------------------------------------------------------------
data Options w = Options
{ verbose :: w ::: Bool <!> "False"
, quiet :: w ::: Bool <!> "False"
, location
:: w ::: [Position] <?> "Source location to limit updates to, Combined using inclusive or"
, attribute
:: w ::: [Regex] <?> "Pattern (POSIX regex) to limit updates to expressions under matching names in attrsets and let bindings. Combined using inclusive or, if this isn't specified then no expressions will be filtered by attribute name"
, dryRun :: w ::: Bool <!> "False" <?> "Don't modify the file"
, onlyCommented
:: w ::: Bool <!> "False" <?> "Only update from Git sources which have a comment on the 'rev' (or 'url' for fetchTarball from GitHub) attribute"
}
deriving stock Generic
parseOpts :: IO (Options Unwrapped, [FilePath])
parseOpts = customExecParser (prefs $ multiSuffix "...")
(info optParser (progDesc desc))
where
desc = unlines
[ "Update fetchers in Nix expressions."
, "Without any files, stdin and stdout will be used"
]
optParser :: Parser (Options Unwrapped, [FilePath])
optParser =
versionOption
<*> ( (,)
<$> (unwrap <$> parseRecordWithModifiers defaultModifiers
{ shortNameModifier = \case
"attribute" -> Just 'A'
n -> firstLetter n
, fieldNameModifier = \case
"dryRun" -> "dry-run"
"onlyCommented" -> "only-commented"
n -> n
}
)
<*> many
(strArgument
( help "Nix files to update"
<> Options.Applicative.metavar "FILE"
)
)
)
where
versionString = "update-nix-fetchgit-" <> showVersion version
versionOption :: Parser (a -> a)
versionOption = infoOption
versionString
(long "version" <> help ("print " <> versionString))
instance ParseRecord (Options Wrapped)
data Position = Position Int Int
deriving Show
instance Read Position where
readsPrec _ = readP_to_S $ do
[line, col] <- sepBy (readS_to_P reads) (char ':')
eof
pure $ Position line col
instance ParseField Position where
metavar _ = "LINE:COL"
instance Read Regex where
readsPrec _ s = case makeRegexM s of
Nothing -> []
Just r -> [(r, "")]
instance ParseField Regex where
metavar _ = "REGEX"
readField = eitherReader makeRegexM
| null | https://raw.githubusercontent.com/expipiplus1/update-nix-fetchgit/ee0efa386a747b2524d374585f42fc0bddecfefd/app/Main.hs | haskell | --------------------------------------------------------------
Env
--------------------------------------------------------------
--------------------------------------------------------------
Options
-------------------------------------------------------------- | # OPTIONS_GHC -Wno - orphans #
module Main
( main
) where
import Data.Bool
import Data.Foldable
import qualified Data.Text.IO as T
import Data.Version ( showVersion )
import Options.Applicative
import Options.Generic
import Paths_update_nix_fetchgit ( version )
import Say
import Text.ParserCombinators.ReadP ( char
, eof
, readP_to_S
, readS_to_P
, sepBy
)
import Text.Regex.TDFA
import Update.Nix.FetchGit
import Update.Nix.FetchGit.Types
main :: IO ()
main = do
(o, fs) <- parseOpts
let e = env o
let goStd = T.putStr =<< processText e =<< T.getContents
case fs of
[] -> goStd
_ -> for_ fs $ \f -> if f == "-" then goStd else processFile e f
env :: Options Unwrapped -> Env
env Options {..} =
let sayLog
| verbose = const sayErr
| quiet = \case
Verbose -> const (pure ())
Normal -> const (pure ())
Quiet -> sayErr
| otherwise = \case
Verbose -> const (pure ())
Normal -> sayErr
Quiet -> sayErr
updateLocations = [ (l, c) | Position l c <- location ]
attrPatterns = attribute
dryness = bool Wet Dry dryRun
in Env { .. }
data Options w = Options
{ verbose :: w ::: Bool <!> "False"
, quiet :: w ::: Bool <!> "False"
, location
:: w ::: [Position] <?> "Source location to limit updates to, Combined using inclusive or"
, attribute
:: w ::: [Regex] <?> "Pattern (POSIX regex) to limit updates to expressions under matching names in attrsets and let bindings. Combined using inclusive or, if this isn't specified then no expressions will be filtered by attribute name"
, dryRun :: w ::: Bool <!> "False" <?> "Don't modify the file"
, onlyCommented
:: w ::: Bool <!> "False" <?> "Only update from Git sources which have a comment on the 'rev' (or 'url' for fetchTarball from GitHub) attribute"
}
deriving stock Generic
parseOpts :: IO (Options Unwrapped, [FilePath])
parseOpts = customExecParser (prefs $ multiSuffix "...")
(info optParser (progDesc desc))
where
desc = unlines
[ "Update fetchers in Nix expressions."
, "Without any files, stdin and stdout will be used"
]
optParser :: Parser (Options Unwrapped, [FilePath])
optParser =
versionOption
<*> ( (,)
<$> (unwrap <$> parseRecordWithModifiers defaultModifiers
{ shortNameModifier = \case
"attribute" -> Just 'A'
n -> firstLetter n
, fieldNameModifier = \case
"dryRun" -> "dry-run"
"onlyCommented" -> "only-commented"
n -> n
}
)
<*> many
(strArgument
( help "Nix files to update"
<> Options.Applicative.metavar "FILE"
)
)
)
where
versionString = "update-nix-fetchgit-" <> showVersion version
versionOption :: Parser (a -> a)
versionOption = infoOption
versionString
(long "version" <> help ("print " <> versionString))
instance ParseRecord (Options Wrapped)
data Position = Position Int Int
deriving Show
instance Read Position where
readsPrec _ = readP_to_S $ do
[line, col] <- sepBy (readS_to_P reads) (char ':')
eof
pure $ Position line col
instance ParseField Position where
metavar _ = "LINE:COL"
instance Read Regex where
readsPrec _ s = case makeRegexM s of
Nothing -> []
Just r -> [(r, "")]
instance ParseField Regex where
metavar _ = "REGEX"
readField = eitherReader makeRegexM
|
5a757a819abdde1ed6a27c2c6ecf9fcb9c7fff965b4ca5094f9c4392228f13df | quil-lang/quilc | state-prep-tests.lisp | state-prep-tests.lisp
;;;;
Author :
(in-package #:cl-quil-tests)
(defun wf-to-matrix (wf)
"Convert a sequence WF to a corresponding column vector."
(quil::from-array (copy-seq wf)
(list (length wf) 1)))
(defun check-state-prep (source-wf target-wf matrix)
"Checks whether SOURCE-WF maps to TARGET-WF under the specified MATRIX."
(let* ((result (magicl:@ matrix
(wf-to-matrix source-wf)))
(prefactor (/ (aref target-wf 0) (magicl:tref result 0 0))))
(is (quil::matrix-equality (magicl:scale result prefactor)
(wf-to-matrix target-wf)))))
(deftest test-state-prep-formation ()
"Checks that STATE-PREP-APPLICATION (with SOURCE-WF in the ground state) correctly compiles into native instructions."
(let* ((qubits (mapcar #'quil:qubit (list 0 1 2 3)))
(target-wf (quil::random-wavefunction (length qubits)))
(source-wf (quil::build-ground-state (length qubits)))
(instr (make-instance 'quil::state-prep-application
:arguments qubits
:target-wf target-wf
:source-wf source-wf)))
(let* ((output-matrix (quil::make-matrix-from-quil
(quil::expand-to-native-instructions
(list instr)
(quil::build-8Q-chip)))))
(check-state-prep source-wf target-wf output-matrix))))
(deftest test-aqvm-unlink-refuses-large-GHZ-state ()
"Checks that an AQVM correctly assembles a GHZ state and then correctly disables itself."
(let ((aqvm (quil::build-aqvm 8))
(quil (quil::parse-quil "
H 0
CNOT 0 1
CNOT 1 2
CNOT 2 3
")))
(dolist (instr (coerce (quil::parsed-program-executable-code quil) 'list))
(quil::aqvm-apply-instruction aqvm instr))
;; check that the correct state was constructed
(destructuring-bind (wf qc-complex)
(quil::aqvm-extract-single-wf-component aqvm 0)
(declare (ignore qc-complex))
(is (and (quil::double~ (/ (sqrt 2)) (aref wf 0))
(quil::double~ (/ (sqrt 2)) (aref wf 15)))))
now check that unlinking the AQVM kills this state , since it is too entangled
(quil::aqvm-unlink aqvm)
(loop :for i :below 4
:do (destructuring-bind (wf qc-complex)
(quil::aqvm-extract-single-wf-component aqvm i)
(declare (ignore qc-complex))
(is (eql ':not-simulated wf))))
(destructuring-bind (wf qc-complex)
(quil::aqvm-extract-single-wf-component aqvm 4)
(declare (ignore qc-complex))
(every #'quil::double~ wf (quil::build-ground-state 1)))))
(deftest test-state-prep-2Q-source-and-target ()
"Checks that STATE-PREP-APPLICATION (both with arbitrary and with adversarial SOURCE-WF and TARGET-WF) correctly compiles into native instructions."
(flet ((grab-row (m j)
(make-array 4
:element-type '(complex double-float)
:initial-contents (loop :for i :below 4
:collect (magicl:tref m i j))))
(build-state-prep (source-wf target-wf)
(make-instance 'quil::state-prep-application
:operator (named-operator "STATE-PREP")
:arguments (list (qubit 1) (qubit 0))
:source-wf source-wf
:target-wf target-wf)))
(let ((chip (quil::build-8q-chip)))
(dotimes (j 10)
(let* ((unentangled-matrix (quil::make-matrix-from-quil
(list (quil::anon-gate "U0" (quil::random-special-unitary 2) 0)
(quil::anon-gate "U1" (quil::random-special-unitary 2) 1))))
(entangled-matrix (quil::random-special-unitary 4)))
(loop :for (source-wf target-wf) :in (list ;; entangled-entangled
(list (grab-row entangled-matrix 0)
(grab-row entangled-matrix 1))
;; unentangled-entangled
(list (grab-row unentangled-matrix 0)
(grab-row entangled-matrix 2))
;; entangled-unentangled
(list (grab-row entangled-matrix 3)
(grab-row unentangled-matrix 1))
;; unentangled-unentangled
(list (grab-row unentangled-matrix 2)
(grab-row unentangled-matrix 3)))
:for state-instr := (build-state-prep source-wf target-wf)
:for state-circuit := (quil::expand-to-native-instructions
(quil::state-prep-2Q-compiler state-instr) chip)
:for circuit-result := (quil::nondestructively-apply-instrs-to-wf state-circuit source-wf
(list 0 1))
:do (is (quil::collinearp target-wf circuit-result))))))))
(deftest test-state-prep-1q-source-and-target ()
"Checks that STATE-PREP-APPLICATION (with arbitrary SOURCE-WF and TARGET-WF) correctly compiles into native instructions."
(let* ((source-wf (quil::random-wavefunction 1))
(target-wf (quil::random-wavefunction 1))
(instr (make-instance 'quil::state-prep-application
:arguments (mapcar #'quil::qubit (list 0))
:target-wf target-wf
:source-wf source-wf)))
(let* ((output-matrix (quil::make-matrix-from-quil
(quil::expand-to-native-instructions
(list instr)
(quil::build-8Q-chip)))))
(check-state-prep source-wf target-wf output-matrix))))
(deftest test-state-prep-4q-compiler ()
"Check that STATE-PREP-4Q-COMPILER (with arbitrary SOURCE-WF and TARGET-WF) correctly compiles into native instructions."
(let* ((qubits (mapcar #'quil::qubit (list 0 1 2 3)))
(source-wf (quil::random-wavefunction 4))
(target-wf (quil::random-wavefunction 4))
(instr (make-instance 'quil::state-prep-application
:arguments qubits
:target-wf target-wf
:source-wf source-wf))
(output-matrix (quil::make-matrix-from-quil
(quil::expand-to-native-instructions
(quil::state-prep-4q-compiler instr)
(quil::build-8Q-chip)))))
(check-state-prep source-wf target-wf output-matrix)))
(deftest test-schmidt-decomposition ()
"Check that a random wavefunction can be reconstructed from its SCHMIDT-DECOMPOSITION."
(flet ((matrix-column (m i)
(magicl::slice m
(list 0 i)
(list (magicl:nrows m) (1+ i)))))
(let* ((random-wf (quil::random-wavefunction 4)))
(multiple-value-bind (c U V) (quil::schmidt-decomposition random-wf 2 2)
(let* ((schmidt-terms (loop :for i :from 0 :below 4
:collect (magicl:scale (magicl::kron
(matrix-column U i)
(matrix-column V i))
(aref c i))))
(reconstructed-wf (reduce #'magicl:.+ schmidt-terms)))
(is (quil::matrix-equality reconstructed-wf
(wf-to-matrix random-wf))))))))
(deftest test-aqvm-unlink-on-10Q ()
(let ((quil::*aqvm-correlation-threshold* 4)
(aqvm (quil::build-aqvm 11))
(pp (quil::parse-quil "
# set up wf
RX(1.0) 3
RX(1.3) 1
RX(1.4) 0
RX(-0.2) 6
RX(-0.4) 7
RX(-0.6) 8
RX(-0.8) 9
RX(1.2) 2
RX(0.5) 5
RX(0.7) 4
CNOT 5 2
CNOT 6 7
CNOT 7 8
CNOT 8 9
CNOT 5 1
CNOT 0 4
# formally entangle qubits
CNOT 1 4
CNOT 1 4
CNOT 6 5
CNOT 6 5
CNOT 3 5
CNOT 3 5
")))
(dolist (instr (coerce (parsed-program-executable-code pp) 'list))
(quil::aqvm-apply-instruction aqvm instr))
(quil::aqvm-stop-simulating aqvm 10)
(destructuring-bind (wf qubit-list)
(quil::aqvm-extract-state aqvm (list 0 1 2 3 4 5 6 7 8 9)
:destructive-update nil)
(quil::aqvm-unlink aqvm)
(destructuring-bind (new-wf new-qubit-list)
(quil::aqvm-extract-state aqvm (list 0 1 2 3 4 5 6 7 8 9))
;; check wf against new-wf
(is (loop :for j :below (ash 1 10)
:always (let ((wf-index (loop :for i :from 0
:for idx :in qubit-list
:sum (ash (ldb (byte 1 idx) j) (- 9 i))))
(new-wf-index (loop :for i :from 0
:for idx :in new-qubit-list
:sum (ash (ldb (byte 1 idx) j) (- 9 i)))))
(quil::double= (aref wf wf-index)
(aref new-wf new-wf-index)))))
;; check new-wf has small components
(is (loop :for wf :across (quil::antisocial-qvm-wfs aqvm)
:for expected-size :in (list 4 8 8 2 4 8 16 16 16 16 ':not-simulated)
:always (if (eql ':not-simulated wf)
(eql ':not-simulated expected-size)
(= expected-size (array-total-size wf)))))))))
(deftest test-canonicalize-wf ()
(dotimes (n 100)
(let ((wf (quil::random-wavefunction 2)))
(multiple-value-bind (m v)
(quil::canonicalize-wf wf)
(is (every #'quil::double=
v
(quil::nondestructively-apply-matrix-to-vector m wf)))
(is (quil::double= 0d0 (imagpart (aref v 1))))
(is (quil::double= 0d0 (aref v 2)))
(is (quil::double= 0d0 (aref v 3)))))))
| null | https://raw.githubusercontent.com/quil-lang/quilc/3f3260aaa65cdde25a4f9c0027959e37ceef9d64/tests/state-prep-tests.lisp | lisp |
check that the correct state was constructed
entangled-entangled
unentangled-entangled
entangled-unentangled
unentangled-unentangled
check wf against new-wf
check new-wf has small components | state-prep-tests.lisp
Author :
(in-package #:cl-quil-tests)
(defun wf-to-matrix (wf)
"Convert a sequence WF to a corresponding column vector."
(quil::from-array (copy-seq wf)
(list (length wf) 1)))
(defun check-state-prep (source-wf target-wf matrix)
"Checks whether SOURCE-WF maps to TARGET-WF under the specified MATRIX."
(let* ((result (magicl:@ matrix
(wf-to-matrix source-wf)))
(prefactor (/ (aref target-wf 0) (magicl:tref result 0 0))))
(is (quil::matrix-equality (magicl:scale result prefactor)
(wf-to-matrix target-wf)))))
(deftest test-state-prep-formation ()
"Checks that STATE-PREP-APPLICATION (with SOURCE-WF in the ground state) correctly compiles into native instructions."
(let* ((qubits (mapcar #'quil:qubit (list 0 1 2 3)))
(target-wf (quil::random-wavefunction (length qubits)))
(source-wf (quil::build-ground-state (length qubits)))
(instr (make-instance 'quil::state-prep-application
:arguments qubits
:target-wf target-wf
:source-wf source-wf)))
(let* ((output-matrix (quil::make-matrix-from-quil
(quil::expand-to-native-instructions
(list instr)
(quil::build-8Q-chip)))))
(check-state-prep source-wf target-wf output-matrix))))
(deftest test-aqvm-unlink-refuses-large-GHZ-state ()
"Checks that an AQVM correctly assembles a GHZ state and then correctly disables itself."
(let ((aqvm (quil::build-aqvm 8))
(quil (quil::parse-quil "
H 0
CNOT 0 1
CNOT 1 2
CNOT 2 3
")))
(dolist (instr (coerce (quil::parsed-program-executable-code quil) 'list))
(quil::aqvm-apply-instruction aqvm instr))
(destructuring-bind (wf qc-complex)
(quil::aqvm-extract-single-wf-component aqvm 0)
(declare (ignore qc-complex))
(is (and (quil::double~ (/ (sqrt 2)) (aref wf 0))
(quil::double~ (/ (sqrt 2)) (aref wf 15)))))
now check that unlinking the AQVM kills this state , since it is too entangled
(quil::aqvm-unlink aqvm)
(loop :for i :below 4
:do (destructuring-bind (wf qc-complex)
(quil::aqvm-extract-single-wf-component aqvm i)
(declare (ignore qc-complex))
(is (eql ':not-simulated wf))))
(destructuring-bind (wf qc-complex)
(quil::aqvm-extract-single-wf-component aqvm 4)
(declare (ignore qc-complex))
(every #'quil::double~ wf (quil::build-ground-state 1)))))
(deftest test-state-prep-2Q-source-and-target ()
"Checks that STATE-PREP-APPLICATION (both with arbitrary and with adversarial SOURCE-WF and TARGET-WF) correctly compiles into native instructions."
(flet ((grab-row (m j)
(make-array 4
:element-type '(complex double-float)
:initial-contents (loop :for i :below 4
:collect (magicl:tref m i j))))
(build-state-prep (source-wf target-wf)
(make-instance 'quil::state-prep-application
:operator (named-operator "STATE-PREP")
:arguments (list (qubit 1) (qubit 0))
:source-wf source-wf
:target-wf target-wf)))
(let ((chip (quil::build-8q-chip)))
(dotimes (j 10)
(let* ((unentangled-matrix (quil::make-matrix-from-quil
(list (quil::anon-gate "U0" (quil::random-special-unitary 2) 0)
(quil::anon-gate "U1" (quil::random-special-unitary 2) 1))))
(entangled-matrix (quil::random-special-unitary 4)))
(list (grab-row entangled-matrix 0)
(grab-row entangled-matrix 1))
(list (grab-row unentangled-matrix 0)
(grab-row entangled-matrix 2))
(list (grab-row entangled-matrix 3)
(grab-row unentangled-matrix 1))
(list (grab-row unentangled-matrix 2)
(grab-row unentangled-matrix 3)))
:for state-instr := (build-state-prep source-wf target-wf)
:for state-circuit := (quil::expand-to-native-instructions
(quil::state-prep-2Q-compiler state-instr) chip)
:for circuit-result := (quil::nondestructively-apply-instrs-to-wf state-circuit source-wf
(list 0 1))
:do (is (quil::collinearp target-wf circuit-result))))))))
(deftest test-state-prep-1q-source-and-target ()
"Checks that STATE-PREP-APPLICATION (with arbitrary SOURCE-WF and TARGET-WF) correctly compiles into native instructions."
(let* ((source-wf (quil::random-wavefunction 1))
(target-wf (quil::random-wavefunction 1))
(instr (make-instance 'quil::state-prep-application
:arguments (mapcar #'quil::qubit (list 0))
:target-wf target-wf
:source-wf source-wf)))
(let* ((output-matrix (quil::make-matrix-from-quil
(quil::expand-to-native-instructions
(list instr)
(quil::build-8Q-chip)))))
(check-state-prep source-wf target-wf output-matrix))))
(deftest test-state-prep-4q-compiler ()
"Check that STATE-PREP-4Q-COMPILER (with arbitrary SOURCE-WF and TARGET-WF) correctly compiles into native instructions."
(let* ((qubits (mapcar #'quil::qubit (list 0 1 2 3)))
(source-wf (quil::random-wavefunction 4))
(target-wf (quil::random-wavefunction 4))
(instr (make-instance 'quil::state-prep-application
:arguments qubits
:target-wf target-wf
:source-wf source-wf))
(output-matrix (quil::make-matrix-from-quil
(quil::expand-to-native-instructions
(quil::state-prep-4q-compiler instr)
(quil::build-8Q-chip)))))
(check-state-prep source-wf target-wf output-matrix)))
(deftest test-schmidt-decomposition ()
"Check that a random wavefunction can be reconstructed from its SCHMIDT-DECOMPOSITION."
(flet ((matrix-column (m i)
(magicl::slice m
(list 0 i)
(list (magicl:nrows m) (1+ i)))))
(let* ((random-wf (quil::random-wavefunction 4)))
(multiple-value-bind (c U V) (quil::schmidt-decomposition random-wf 2 2)
(let* ((schmidt-terms (loop :for i :from 0 :below 4
:collect (magicl:scale (magicl::kron
(matrix-column U i)
(matrix-column V i))
(aref c i))))
(reconstructed-wf (reduce #'magicl:.+ schmidt-terms)))
(is (quil::matrix-equality reconstructed-wf
(wf-to-matrix random-wf))))))))
(deftest test-aqvm-unlink-on-10Q ()
(let ((quil::*aqvm-correlation-threshold* 4)
(aqvm (quil::build-aqvm 11))
(pp (quil::parse-quil "
# set up wf
RX(1.0) 3
RX(1.3) 1
RX(1.4) 0
RX(-0.2) 6
RX(-0.4) 7
RX(-0.6) 8
RX(-0.8) 9
RX(1.2) 2
RX(0.5) 5
RX(0.7) 4
CNOT 5 2
CNOT 6 7
CNOT 7 8
CNOT 8 9
CNOT 5 1
CNOT 0 4
# formally entangle qubits
CNOT 1 4
CNOT 1 4
CNOT 6 5
CNOT 6 5
CNOT 3 5
CNOT 3 5
")))
(dolist (instr (coerce (parsed-program-executable-code pp) 'list))
(quil::aqvm-apply-instruction aqvm instr))
(quil::aqvm-stop-simulating aqvm 10)
(destructuring-bind (wf qubit-list)
(quil::aqvm-extract-state aqvm (list 0 1 2 3 4 5 6 7 8 9)
:destructive-update nil)
(quil::aqvm-unlink aqvm)
(destructuring-bind (new-wf new-qubit-list)
(quil::aqvm-extract-state aqvm (list 0 1 2 3 4 5 6 7 8 9))
(is (loop :for j :below (ash 1 10)
:always (let ((wf-index (loop :for i :from 0
:for idx :in qubit-list
:sum (ash (ldb (byte 1 idx) j) (- 9 i))))
(new-wf-index (loop :for i :from 0
:for idx :in new-qubit-list
:sum (ash (ldb (byte 1 idx) j) (- 9 i)))))
(quil::double= (aref wf wf-index)
(aref new-wf new-wf-index)))))
(is (loop :for wf :across (quil::antisocial-qvm-wfs aqvm)
:for expected-size :in (list 4 8 8 2 4 8 16 16 16 16 ':not-simulated)
:always (if (eql ':not-simulated wf)
(eql ':not-simulated expected-size)
(= expected-size (array-total-size wf)))))))))
(deftest test-canonicalize-wf ()
(dotimes (n 100)
(let ((wf (quil::random-wavefunction 2)))
(multiple-value-bind (m v)
(quil::canonicalize-wf wf)
(is (every #'quil::double=
v
(quil::nondestructively-apply-matrix-to-vector m wf)))
(is (quil::double= 0d0 (imagpart (aref v 1))))
(is (quil::double= 0d0 (aref v 2)))
(is (quil::double= 0d0 (aref v 3)))))))
|
a9bd335ddfe3a26f535c5791869c519adb23f186858a64d1bcad1668320e8821 | cblp/python5 | dict.hs | main = do
let animals = dict[ "dog" := int(1),
"cat" := 2 ]
-- total <- animals.get("dog") + animals.get("cat")
-- ^ error: Couldn't match type ‘Maybe Integer’ with ‘Integer’
total <- animals.getdefault("dog", 0) + animals.getdefault("cat", 0)
print(total)
| null | https://raw.githubusercontent.com/cblp/python5/897b7bbb7b522fa5653eff10b9ae616a4e01b6ff/examples/dict.hs | haskell | total <- animals.get("dog") + animals.get("cat")
^ error: Couldn't match type ‘Maybe Integer’ with ‘Integer’ | main = do
let animals = dict[ "dog" := int(1),
"cat" := 2 ]
total <- animals.getdefault("dog", 0) + animals.getdefault("cat", 0)
print(total)
|
ba4977983d2ba7b06b26140d1061bc6d536ab7c2b6521bfc9aab8708423b4c06 | emqx/emqx | emqx_limiter_server_sup.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_limiter_server_sup).
-behaviour(supervisor).
%% API
-export([start_link/0, start/1, start/2, stop/1]).
%% Supervisor callbacks
-export([init/1]).
%%--------------------------------------------------------------------
%% API functions
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% @doc
%% Starts the supervisor
%% @end
%%--------------------------------------------------------------------
-spec start_link() ->
{ok, Pid :: pid()}
| {error, {already_started, Pid :: pid()}}
| {error, {shutdown, term()}}
| {error, term()}
| ignore.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec start(emqx_limiter_schema:limiter_type()) -> _.
start(Type) ->
Spec = make_child(Type),
supervisor:start_child(?MODULE, Spec).
-spec start(emqx_limiter_schema:limiter_type(), hocons:config()) -> _.
start(Type, Cfg) ->
Spec = make_child(Type, Cfg),
supervisor:start_child(?MODULE, Spec).
stop(Type) ->
Id = emqx_limiter_server:name(Type),
_ = supervisor:terminate_child(?MODULE, Id),
supervisor:delete_child(?MODULE, Id).
%%--------------------------------------------------------------------
%% Supervisor callbacks
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
@private
%% @doc
%% Whenever a supervisor is started using supervisor:start_link/[2,3],
%% this function is called by the new process to find out about
%% restart strategy, maximum restart intensity, and child
%% specifications.
%% @end
%%--------------------------------------------------------------------
-spec init(Args :: term()) ->
{ok, {SupFlags :: supervisor:sup_flags(), [ChildSpec :: supervisor:child_spec()]}}
| ignore.
init([]) ->
SupFlags = #{
strategy => one_for_one,
intensity => 10,
period => 3600
},
{ok, {SupFlags, childs()}}.
%%--==================================================================
%% Internal functions
%%--==================================================================
make_child(Type) ->
Cfg = emqx:get_config([limiter, Type]),
make_child(Type, Cfg).
make_child(Type, Cfg) ->
Id = emqx_limiter_server:name(Type),
#{
id => Id,
start => {emqx_limiter_server, start_link, [Type, Cfg]},
restart => transient,
shutdown => 5000,
type => worker,
modules => [emqx_limiter_server]
}.
childs() ->
[make_child(Type) || Type <- emqx_limiter_schema:types()].
| null | https://raw.githubusercontent.com/emqx/emqx/dbc10c2eed3df314586c7b9ac6292083204f1f68/apps/emqx/src/emqx_limiter/src/emqx_limiter_server_sup.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
API
Supervisor callbacks
--------------------------------------------------------------------
API functions
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Starts the supervisor
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
Supervisor callbacks
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Whenever a supervisor is started using supervisor:start_link/[2,3],
this function is called by the new process to find out about
restart strategy, maximum restart intensity, and child
specifications.
@end
--------------------------------------------------------------------
--==================================================================
Internal functions
--================================================================== | Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_limiter_server_sup).
-behaviour(supervisor).
-export([start_link/0, start/1, start/2, stop/1]).
-export([init/1]).
-spec start_link() ->
{ok, Pid :: pid()}
| {error, {already_started, Pid :: pid()}}
| {error, {shutdown, term()}}
| {error, term()}
| ignore.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec start(emqx_limiter_schema:limiter_type()) -> _.
start(Type) ->
Spec = make_child(Type),
supervisor:start_child(?MODULE, Spec).
-spec start(emqx_limiter_schema:limiter_type(), hocons:config()) -> _.
start(Type, Cfg) ->
Spec = make_child(Type, Cfg),
supervisor:start_child(?MODULE, Spec).
stop(Type) ->
Id = emqx_limiter_server:name(Type),
_ = supervisor:terminate_child(?MODULE, Id),
supervisor:delete_child(?MODULE, Id).
@private
-spec init(Args :: term()) ->
{ok, {SupFlags :: supervisor:sup_flags(), [ChildSpec :: supervisor:child_spec()]}}
| ignore.
init([]) ->
SupFlags = #{
strategy => one_for_one,
intensity => 10,
period => 3600
},
{ok, {SupFlags, childs()}}.
make_child(Type) ->
Cfg = emqx:get_config([limiter, Type]),
make_child(Type, Cfg).
make_child(Type, Cfg) ->
Id = emqx_limiter_server:name(Type),
#{
id => Id,
start => {emqx_limiter_server, start_link, [Type, Cfg]},
restart => transient,
shutdown => 5000,
type => worker,
modules => [emqx_limiter_server]
}.
childs() ->
[make_child(Type) || Type <- emqx_limiter_schema:types()].
|
5b8c0211210abda016f71362e930cef5fdc04ff24fe176f13ce9a1553673c716 | ocamllabs/ocaml-effects | odoc_analyse.ml | (***********************************************************************)
(* *)
(* OCamldoc *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
(** Analysis of source files. This module is strongly inspired from
driver/main.ml :-) *)
let print_DEBUG s = print_string s ; print_newline ()
open Config
open Clflags
open Misc
open Format
open Typedtree
* Initialize the search path .
The current directory is always searched first ,
then the directories specified with the -I option ( in command - line order ) ,
then the standard library directory .
The current directory is always searched first,
then the directories specified with the -I option (in command-line order),
then the standard library directory. *)
let init_path () =
load_path :=
"" :: List.rev (Config.standard_library :: !Clflags.include_dirs);
Env.reset_cache ()
(** Return the initial environment in which compilation proceeds. *)
let initial_env () =
let initial =
if !Clflags.unsafe_string then Env.initial_unsafe_string
else Env.initial_safe_string
in
try
if !Clflags.nopervasives then initial else
Env.open_pers_signature "Pervasives" initial
with Not_found ->
fatal_error "cannot open pervasives.cmi"
(** Optionally preprocess a source file *)
let preprocess sourcefile =
try
Pparse.preprocess sourcefile
with Pparse.Error err ->
Format.eprintf "Preprocessing error@.%a@."
Pparse.report_error err;
exit 2
let (++) x f = f x
(** Analysis of an implementation file. Returns (Some typedtree) if
no error occured, else None and an error message is printed.*)
let tool_name = "ocamldoc"
let process_implementation_file ppf sourcefile =
init_path ();
let prefixname = Filename.chop_extension sourcefile in
let modulename = String.capitalize_ascii(Filename.basename prefixname) in
Env.set_unit_name modulename;
let inputfile = preprocess sourcefile in
let env = initial_env () in
try
let parsetree =
Pparse.file ~tool_name Format.err_formatter inputfile
Parse.implementation ast_impl_magic_number
in
let typedtree =
Typemod.type_implementation
sourcefile prefixname modulename env parsetree
in
(Some (parsetree, typedtree), inputfile)
with
e ->
match e with
Syntaxerr.Error err ->
fprintf Format.err_formatter "@[%a@]@."
Syntaxerr.report_error err;
None, inputfile
| Failure s ->
prerr_endline s;
incr Odoc_global.errors ;
None, inputfile
| e ->
raise e
(** Analysis of an interface file. Returns (Some signature) if
no error occured, else None and an error message is printed.*)
let process_interface_file ppf sourcefile =
init_path ();
let prefixname = Filename.chop_extension sourcefile in
let modulename = String.capitalize_ascii(Filename.basename prefixname) in
Env.set_unit_name modulename;
let inputfile = preprocess sourcefile in
let ast =
Pparse.file ~tool_name Format.err_formatter inputfile
Parse.interface ast_intf_magic_number
in
let sg = Typemod.type_interface (initial_env()) ast in
Warnings.check_fatal ();
(ast, sg, inputfile)
(** The module used to analyse the parsetree and signature of an implementation file.*)
module Ast_analyser = Odoc_ast.Analyser (Odoc_comments.Basic_info_retriever)
(** The module used to analyse the parse tree and typed tree of an interface file.*)
module Sig_analyser = Odoc_sig.Analyser (Odoc_comments.Basic_info_retriever)
(** Handle an error. *)
let process_error exn =
match Location.error_of_exn exn with
| Some err ->
fprintf Format.err_formatter "@[%a@]@." Location.report_error err
| None ->
fprintf Format.err_formatter
"Compilation error(%s). Use the OCaml compiler to get more details.@."
(Printexc.to_string exn)
* Process the given file , according to its extension . Return the Module.t created , if any .
let process_file ppf sourcefile =
if !Odoc_global.verbose then
(
let f = match sourcefile with
Odoc_global.Impl_file f
| Odoc_global.Intf_file f -> f
| Odoc_global.Text_file f -> f
in
print_string (Odoc_messages.analysing f) ;
print_newline ();
);
match sourcefile with
Odoc_global.Impl_file file ->
(
Location.input_name := file;
try
let (parsetree_typedtree_opt, input_file) = process_implementation_file ppf file in
match parsetree_typedtree_opt with
None ->
None
| Some (parsetree, typedtree) ->
let file_module = Ast_analyser.analyse_typed_tree file
!Location.input_name parsetree typedtree
in
file_module.Odoc_module.m_top_deps <- Odoc_dep.impl_dependencies parsetree ;
if !Odoc_global.verbose then
(
print_string Odoc_messages.ok;
print_newline ()
);
Pparse.remove_preprocessed input_file;
Some file_module
with
| Sys_error s
| Failure s ->
prerr_endline s ;
incr Odoc_global.errors ;
None
| e ->
process_error e ;
incr Odoc_global.errors ;
None
)
| Odoc_global.Intf_file file ->
(
Location.input_name := file;
try
let (ast, signat, input_file) = process_interface_file ppf file in
let file_module = Sig_analyser.analyse_signature file
!Location.input_name ast signat.sig_type
in
file_module.Odoc_module.m_top_deps <- Odoc_dep.intf_dependencies ast ;
if !Odoc_global.verbose then
(
print_string Odoc_messages.ok;
print_newline ()
);
Pparse.remove_preprocessed input_file;
Some file_module
with
| Sys_error s
| Failure s ->
prerr_endline s;
incr Odoc_global.errors ;
None
| e ->
process_error e ;
incr Odoc_global.errors ;
None
)
| Odoc_global.Text_file file ->
Location.input_name := file;
try
let mod_name =
let s =
try Filename.chop_extension file
with _ -> file
in
String.capitalize_ascii (Filename.basename s)
in
let txt =
try Odoc_text.Texter.text_of_string (Odoc_misc.input_file_as_string file)
with Odoc_text.Text_syntax (l, c, s) ->
raise (Failure (Odoc_messages.text_parse_error l c s))
in
let m =
{
Odoc_module.m_name = mod_name ;
Odoc_module.m_type = Types.Mty_signature [] ;
Odoc_module.m_info = None ;
Odoc_module.m_is_interface = true ;
Odoc_module.m_file = file ;
Odoc_module.m_kind = Odoc_module.Module_struct
[Odoc_module.Element_module_comment txt] ;
Odoc_module.m_loc =
{ Odoc_types.loc_impl = None ;
Odoc_types.loc_inter = Some (Location.in_file file) } ;
Odoc_module.m_top_deps = [] ;
Odoc_module.m_code = None ;
Odoc_module.m_code_intf = None ;
Odoc_module.m_text_only = true ;
}
in
Some m
with
| Sys_error s
| Failure s ->
prerr_endline s;
incr Odoc_global.errors ;
None
| e ->
process_error e ;
incr Odoc_global.errors ;
None
(** Remove the class elements between the stop special comments. *)
let rec remove_class_elements_between_stop keep eles =
match eles with
[] -> []
| ele :: q ->
match ele with
Odoc_class.Class_comment [ Odoc_types.Raw "/*" ] ->
remove_class_elements_between_stop (not keep) q
| Odoc_class.Class_attribute _
| Odoc_class.Class_method _
| Odoc_class.Class_comment _ ->
if keep then
ele :: (remove_class_elements_between_stop keep q)
else
remove_class_elements_between_stop keep q
(** Remove the class elements between the stop special comments in a class kind. *)
let rec remove_class_elements_between_stop_in_class_kind k =
match k with
Odoc_class.Class_structure (inher, l) ->
Odoc_class.Class_structure (inher, remove_class_elements_between_stop true l)
| Odoc_class.Class_apply _ -> k
| Odoc_class.Class_constr _ -> k
| Odoc_class.Class_constraint (k1, ctk) ->
Odoc_class.Class_constraint (remove_class_elements_between_stop_in_class_kind k1,
remove_class_elements_between_stop_in_class_type_kind ctk)
(** Remove the class elements beetween the stop special comments in a class type kind. *)
and remove_class_elements_between_stop_in_class_type_kind tk =
match tk with
Odoc_class.Class_signature (inher, l) ->
Odoc_class.Class_signature (inher, remove_class_elements_between_stop true l)
| Odoc_class.Class_type _ -> tk
(** Remove the module elements between the stop special comments. *)
let rec remove_module_elements_between_stop keep eles =
let f = remove_module_elements_between_stop in
match eles with
[] -> []
| ele :: q ->
match ele with
Odoc_module.Element_module_comment [ Odoc_types.Raw "/*" ] ->
f (not keep) q
| Odoc_module.Element_module_comment _ ->
if keep then
ele :: (f keep q)
else
f keep q
| Odoc_module.Element_module m ->
if keep then
(
m.Odoc_module.m_kind <- remove_module_elements_between_stop_in_module_kind m.Odoc_module.m_kind ;
(Odoc_module.Element_module m) :: (f keep q)
)
else
f keep q
| Odoc_module.Element_module_type mt ->
if keep then
(
mt.Odoc_module.mt_kind <- Odoc_misc.apply_opt
remove_module_elements_between_stop_in_module_type_kind mt.Odoc_module.mt_kind ;
(Odoc_module.Element_module_type mt) :: (f keep q)
)
else
f keep q
| Odoc_module.Element_included_module _ ->
if keep then
ele :: (f keep q)
else
f keep q
| Odoc_module.Element_class c ->
if keep then
(
c.Odoc_class.cl_kind <- remove_class_elements_between_stop_in_class_kind c.Odoc_class.cl_kind ;
(Odoc_module.Element_class c) :: (f keep q)
)
else
f keep q
| Odoc_module.Element_class_type ct ->
if keep then
(
ct.Odoc_class.clt_kind <- remove_class_elements_between_stop_in_class_type_kind ct.Odoc_class.clt_kind ;
(Odoc_module.Element_class_type ct) :: (f keep q)
)
else
f keep q
| Odoc_module.Element_value _
| Odoc_module.Element_type_extension _
| Odoc_module.Element_exception _
| Odoc_module.Element_type _ ->
if keep then
ele :: (f keep q)
else
f keep q
(** Remove the module elements between the stop special comments, in the given module kind. *)
and remove_module_elements_between_stop_in_module_kind k =
match k with
| Odoc_module.Module_struct l -> Odoc_module.Module_struct (remove_module_elements_between_stop true l)
| Odoc_module.Module_alias _ -> k
| Odoc_module.Module_functor (params, k2) ->
Odoc_module.Module_functor (params, remove_module_elements_between_stop_in_module_kind k2)
| Odoc_module.Module_apply (k1, k2) ->
Odoc_module.Module_apply (remove_module_elements_between_stop_in_module_kind k1,
remove_module_elements_between_stop_in_module_kind k2)
| Odoc_module.Module_with (mtkind, s) ->
Odoc_module.Module_with (remove_module_elements_between_stop_in_module_type_kind mtkind, s)
| Odoc_module.Module_constraint (k2, mtkind) ->
Odoc_module.Module_constraint (remove_module_elements_between_stop_in_module_kind k2,
remove_module_elements_between_stop_in_module_type_kind mtkind)
| Odoc_module.Module_typeof _ -> k
| Odoc_module.Module_unpack _ -> k
(** Remove the module elements between the stop special comment, in the given module type kind. *)
and remove_module_elements_between_stop_in_module_type_kind tk =
match tk with
| Odoc_module.Module_type_struct l -> Odoc_module.Module_type_struct (remove_module_elements_between_stop true l)
| Odoc_module.Module_type_functor (params, tk2) ->
Odoc_module.Module_type_functor (params, remove_module_elements_between_stop_in_module_type_kind tk2)
| Odoc_module.Module_type_alias _ -> tk
| Odoc_module.Module_type_with (tk2, s) ->
Odoc_module.Module_type_with (remove_module_elements_between_stop_in_module_type_kind tk2, s)
| Odoc_module.Module_type_typeof _ -> tk
(** Remove elements between the stop special comment. *)
let remove_elements_between_stop module_list =
List.map
(fun m ->
m.Odoc_module.m_kind <- remove_module_elements_between_stop_in_module_kind m.Odoc_module.m_kind;
m
)
module_list
(** This function builds the modules from the given list of source files. *)
let analyse_files ?(init=[]) files =
let modules_pre =
init @
(List.fold_left
(fun acc -> fun file ->
try
match process_file Format.err_formatter file with
None ->
acc
| Some m ->
acc @ [ m ]
with
Failure s ->
prerr_endline s ;
incr Odoc_global.errors ;
acc
)
[]
files
)
in
(* Remove elements between the stop special comments, if needed. *)
let modules =
if !Odoc_global.no_stop then
modules_pre
else
remove_elements_between_stop modules_pre
in
if !Odoc_global.verbose then
(
print_string Odoc_messages.merging;
print_newline ()
);
let merged_modules = Odoc_merge.merge !Odoc_global.merge_options modules in
if !Odoc_global.verbose then
(
print_string Odoc_messages.ok;
print_newline ();
);
let modules_list =
(List.fold_left
(fun acc -> fun m -> acc @ (Odoc_module.module_all_submodules ~trans: false m))
merged_modules
merged_modules
)
in
if !Odoc_global.verbose then
(
print_string Odoc_messages.cross_referencing;
print_newline ()
);
let _ = Odoc_cross.associate modules_list in
if !Odoc_global.verbose then
(
print_string Odoc_messages.ok;
print_newline ();
);
if !Odoc_global.sort_modules then
List.sort (fun m1 m2 -> compare m1.Odoc_module.m_name m2.Odoc_module.m_name) merged_modules
else
merged_modules
let dump_modules file (modules : Odoc_module.t_module list) =
try
let chanout = open_out_bin file in
let dump = Odoc_types.make_dump modules in
output_value chanout dump;
close_out chanout
with
Sys_error s ->
raise (Failure s)
let load_modules file =
try
let chanin = open_in_bin file in
let dump = input_value chanin in
close_in chanin ;
let (l : Odoc_module.t_module list) = Odoc_types.open_dump dump in
l
with
Sys_error s ->
raise (Failure s)
| null | https://raw.githubusercontent.com/ocamllabs/ocaml-effects/36008b741adc201bf9b547545344507da603ae31/ocamldoc/odoc_analyse.ml | ocaml | *********************************************************************
OCamldoc
*********************************************************************
* Analysis of source files. This module is strongly inspired from
driver/main.ml :-)
* Return the initial environment in which compilation proceeds.
* Optionally preprocess a source file
* Analysis of an implementation file. Returns (Some typedtree) if
no error occured, else None and an error message is printed.
* Analysis of an interface file. Returns (Some signature) if
no error occured, else None and an error message is printed.
* The module used to analyse the parsetree and signature of an implementation file.
* The module used to analyse the parse tree and typed tree of an interface file.
* Handle an error.
* Remove the class elements between the stop special comments.
* Remove the class elements between the stop special comments in a class kind.
* Remove the class elements beetween the stop special comments in a class type kind.
* Remove the module elements between the stop special comments.
* Remove the module elements between the stop special comments, in the given module kind.
* Remove the module elements between the stop special comment, in the given module type kind.
* Remove elements between the stop special comment.
* This function builds the modules from the given list of source files.
Remove elements between the stop special comments, if needed. | , projet Cristal , INRIA Rocquencourt
Copyright 2001 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
let print_DEBUG s = print_string s ; print_newline ()
open Config
open Clflags
open Misc
open Format
open Typedtree
* Initialize the search path .
The current directory is always searched first ,
then the directories specified with the -I option ( in command - line order ) ,
then the standard library directory .
The current directory is always searched first,
then the directories specified with the -I option (in command-line order),
then the standard library directory. *)
let init_path () =
load_path :=
"" :: List.rev (Config.standard_library :: !Clflags.include_dirs);
Env.reset_cache ()
let initial_env () =
let initial =
if !Clflags.unsafe_string then Env.initial_unsafe_string
else Env.initial_safe_string
in
try
if !Clflags.nopervasives then initial else
Env.open_pers_signature "Pervasives" initial
with Not_found ->
fatal_error "cannot open pervasives.cmi"
let preprocess sourcefile =
try
Pparse.preprocess sourcefile
with Pparse.Error err ->
Format.eprintf "Preprocessing error@.%a@."
Pparse.report_error err;
exit 2
let (++) x f = f x
let tool_name = "ocamldoc"
let process_implementation_file ppf sourcefile =
init_path ();
let prefixname = Filename.chop_extension sourcefile in
let modulename = String.capitalize_ascii(Filename.basename prefixname) in
Env.set_unit_name modulename;
let inputfile = preprocess sourcefile in
let env = initial_env () in
try
let parsetree =
Pparse.file ~tool_name Format.err_formatter inputfile
Parse.implementation ast_impl_magic_number
in
let typedtree =
Typemod.type_implementation
sourcefile prefixname modulename env parsetree
in
(Some (parsetree, typedtree), inputfile)
with
e ->
match e with
Syntaxerr.Error err ->
fprintf Format.err_formatter "@[%a@]@."
Syntaxerr.report_error err;
None, inputfile
| Failure s ->
prerr_endline s;
incr Odoc_global.errors ;
None, inputfile
| e ->
raise e
let process_interface_file ppf sourcefile =
init_path ();
let prefixname = Filename.chop_extension sourcefile in
let modulename = String.capitalize_ascii(Filename.basename prefixname) in
Env.set_unit_name modulename;
let inputfile = preprocess sourcefile in
let ast =
Pparse.file ~tool_name Format.err_formatter inputfile
Parse.interface ast_intf_magic_number
in
let sg = Typemod.type_interface (initial_env()) ast in
Warnings.check_fatal ();
(ast, sg, inputfile)
module Ast_analyser = Odoc_ast.Analyser (Odoc_comments.Basic_info_retriever)
module Sig_analyser = Odoc_sig.Analyser (Odoc_comments.Basic_info_retriever)
let process_error exn =
match Location.error_of_exn exn with
| Some err ->
fprintf Format.err_formatter "@[%a@]@." Location.report_error err
| None ->
fprintf Format.err_formatter
"Compilation error(%s). Use the OCaml compiler to get more details.@."
(Printexc.to_string exn)
* Process the given file , according to its extension . Return the Module.t created , if any .
let process_file ppf sourcefile =
if !Odoc_global.verbose then
(
let f = match sourcefile with
Odoc_global.Impl_file f
| Odoc_global.Intf_file f -> f
| Odoc_global.Text_file f -> f
in
print_string (Odoc_messages.analysing f) ;
print_newline ();
);
match sourcefile with
Odoc_global.Impl_file file ->
(
Location.input_name := file;
try
let (parsetree_typedtree_opt, input_file) = process_implementation_file ppf file in
match parsetree_typedtree_opt with
None ->
None
| Some (parsetree, typedtree) ->
let file_module = Ast_analyser.analyse_typed_tree file
!Location.input_name parsetree typedtree
in
file_module.Odoc_module.m_top_deps <- Odoc_dep.impl_dependencies parsetree ;
if !Odoc_global.verbose then
(
print_string Odoc_messages.ok;
print_newline ()
);
Pparse.remove_preprocessed input_file;
Some file_module
with
| Sys_error s
| Failure s ->
prerr_endline s ;
incr Odoc_global.errors ;
None
| e ->
process_error e ;
incr Odoc_global.errors ;
None
)
| Odoc_global.Intf_file file ->
(
Location.input_name := file;
try
let (ast, signat, input_file) = process_interface_file ppf file in
let file_module = Sig_analyser.analyse_signature file
!Location.input_name ast signat.sig_type
in
file_module.Odoc_module.m_top_deps <- Odoc_dep.intf_dependencies ast ;
if !Odoc_global.verbose then
(
print_string Odoc_messages.ok;
print_newline ()
);
Pparse.remove_preprocessed input_file;
Some file_module
with
| Sys_error s
| Failure s ->
prerr_endline s;
incr Odoc_global.errors ;
None
| e ->
process_error e ;
incr Odoc_global.errors ;
None
)
| Odoc_global.Text_file file ->
Location.input_name := file;
try
let mod_name =
let s =
try Filename.chop_extension file
with _ -> file
in
String.capitalize_ascii (Filename.basename s)
in
let txt =
try Odoc_text.Texter.text_of_string (Odoc_misc.input_file_as_string file)
with Odoc_text.Text_syntax (l, c, s) ->
raise (Failure (Odoc_messages.text_parse_error l c s))
in
let m =
{
Odoc_module.m_name = mod_name ;
Odoc_module.m_type = Types.Mty_signature [] ;
Odoc_module.m_info = None ;
Odoc_module.m_is_interface = true ;
Odoc_module.m_file = file ;
Odoc_module.m_kind = Odoc_module.Module_struct
[Odoc_module.Element_module_comment txt] ;
Odoc_module.m_loc =
{ Odoc_types.loc_impl = None ;
Odoc_types.loc_inter = Some (Location.in_file file) } ;
Odoc_module.m_top_deps = [] ;
Odoc_module.m_code = None ;
Odoc_module.m_code_intf = None ;
Odoc_module.m_text_only = true ;
}
in
Some m
with
| Sys_error s
| Failure s ->
prerr_endline s;
incr Odoc_global.errors ;
None
| e ->
process_error e ;
incr Odoc_global.errors ;
None
let rec remove_class_elements_between_stop keep eles =
match eles with
[] -> []
| ele :: q ->
match ele with
Odoc_class.Class_comment [ Odoc_types.Raw "/*" ] ->
remove_class_elements_between_stop (not keep) q
| Odoc_class.Class_attribute _
| Odoc_class.Class_method _
| Odoc_class.Class_comment _ ->
if keep then
ele :: (remove_class_elements_between_stop keep q)
else
remove_class_elements_between_stop keep q
let rec remove_class_elements_between_stop_in_class_kind k =
match k with
Odoc_class.Class_structure (inher, l) ->
Odoc_class.Class_structure (inher, remove_class_elements_between_stop true l)
| Odoc_class.Class_apply _ -> k
| Odoc_class.Class_constr _ -> k
| Odoc_class.Class_constraint (k1, ctk) ->
Odoc_class.Class_constraint (remove_class_elements_between_stop_in_class_kind k1,
remove_class_elements_between_stop_in_class_type_kind ctk)
and remove_class_elements_between_stop_in_class_type_kind tk =
match tk with
Odoc_class.Class_signature (inher, l) ->
Odoc_class.Class_signature (inher, remove_class_elements_between_stop true l)
| Odoc_class.Class_type _ -> tk
let rec remove_module_elements_between_stop keep eles =
let f = remove_module_elements_between_stop in
match eles with
[] -> []
| ele :: q ->
match ele with
Odoc_module.Element_module_comment [ Odoc_types.Raw "/*" ] ->
f (not keep) q
| Odoc_module.Element_module_comment _ ->
if keep then
ele :: (f keep q)
else
f keep q
| Odoc_module.Element_module m ->
if keep then
(
m.Odoc_module.m_kind <- remove_module_elements_between_stop_in_module_kind m.Odoc_module.m_kind ;
(Odoc_module.Element_module m) :: (f keep q)
)
else
f keep q
| Odoc_module.Element_module_type mt ->
if keep then
(
mt.Odoc_module.mt_kind <- Odoc_misc.apply_opt
remove_module_elements_between_stop_in_module_type_kind mt.Odoc_module.mt_kind ;
(Odoc_module.Element_module_type mt) :: (f keep q)
)
else
f keep q
| Odoc_module.Element_included_module _ ->
if keep then
ele :: (f keep q)
else
f keep q
| Odoc_module.Element_class c ->
if keep then
(
c.Odoc_class.cl_kind <- remove_class_elements_between_stop_in_class_kind c.Odoc_class.cl_kind ;
(Odoc_module.Element_class c) :: (f keep q)
)
else
f keep q
| Odoc_module.Element_class_type ct ->
if keep then
(
ct.Odoc_class.clt_kind <- remove_class_elements_between_stop_in_class_type_kind ct.Odoc_class.clt_kind ;
(Odoc_module.Element_class_type ct) :: (f keep q)
)
else
f keep q
| Odoc_module.Element_value _
| Odoc_module.Element_type_extension _
| Odoc_module.Element_exception _
| Odoc_module.Element_type _ ->
if keep then
ele :: (f keep q)
else
f keep q
and remove_module_elements_between_stop_in_module_kind k =
match k with
| Odoc_module.Module_struct l -> Odoc_module.Module_struct (remove_module_elements_between_stop true l)
| Odoc_module.Module_alias _ -> k
| Odoc_module.Module_functor (params, k2) ->
Odoc_module.Module_functor (params, remove_module_elements_between_stop_in_module_kind k2)
| Odoc_module.Module_apply (k1, k2) ->
Odoc_module.Module_apply (remove_module_elements_between_stop_in_module_kind k1,
remove_module_elements_between_stop_in_module_kind k2)
| Odoc_module.Module_with (mtkind, s) ->
Odoc_module.Module_with (remove_module_elements_between_stop_in_module_type_kind mtkind, s)
| Odoc_module.Module_constraint (k2, mtkind) ->
Odoc_module.Module_constraint (remove_module_elements_between_stop_in_module_kind k2,
remove_module_elements_between_stop_in_module_type_kind mtkind)
| Odoc_module.Module_typeof _ -> k
| Odoc_module.Module_unpack _ -> k
and remove_module_elements_between_stop_in_module_type_kind tk =
match tk with
| Odoc_module.Module_type_struct l -> Odoc_module.Module_type_struct (remove_module_elements_between_stop true l)
| Odoc_module.Module_type_functor (params, tk2) ->
Odoc_module.Module_type_functor (params, remove_module_elements_between_stop_in_module_type_kind tk2)
| Odoc_module.Module_type_alias _ -> tk
| Odoc_module.Module_type_with (tk2, s) ->
Odoc_module.Module_type_with (remove_module_elements_between_stop_in_module_type_kind tk2, s)
| Odoc_module.Module_type_typeof _ -> tk
let remove_elements_between_stop module_list =
List.map
(fun m ->
m.Odoc_module.m_kind <- remove_module_elements_between_stop_in_module_kind m.Odoc_module.m_kind;
m
)
module_list
let analyse_files ?(init=[]) files =
let modules_pre =
init @
(List.fold_left
(fun acc -> fun file ->
try
match process_file Format.err_formatter file with
None ->
acc
| Some m ->
acc @ [ m ]
with
Failure s ->
prerr_endline s ;
incr Odoc_global.errors ;
acc
)
[]
files
)
in
let modules =
if !Odoc_global.no_stop then
modules_pre
else
remove_elements_between_stop modules_pre
in
if !Odoc_global.verbose then
(
print_string Odoc_messages.merging;
print_newline ()
);
let merged_modules = Odoc_merge.merge !Odoc_global.merge_options modules in
if !Odoc_global.verbose then
(
print_string Odoc_messages.ok;
print_newline ();
);
let modules_list =
(List.fold_left
(fun acc -> fun m -> acc @ (Odoc_module.module_all_submodules ~trans: false m))
merged_modules
merged_modules
)
in
if !Odoc_global.verbose then
(
print_string Odoc_messages.cross_referencing;
print_newline ()
);
let _ = Odoc_cross.associate modules_list in
if !Odoc_global.verbose then
(
print_string Odoc_messages.ok;
print_newline ();
);
if !Odoc_global.sort_modules then
List.sort (fun m1 m2 -> compare m1.Odoc_module.m_name m2.Odoc_module.m_name) merged_modules
else
merged_modules
let dump_modules file (modules : Odoc_module.t_module list) =
try
let chanout = open_out_bin file in
let dump = Odoc_types.make_dump modules in
output_value chanout dump;
close_out chanout
with
Sys_error s ->
raise (Failure s)
let load_modules file =
try
let chanin = open_in_bin file in
let dump = input_value chanin in
close_in chanin ;
let (l : Odoc_module.t_module list) = Odoc_types.open_dump dump in
l
with
Sys_error s ->
raise (Failure s)
|
874d23b1c9b225a552361acc513291952e967f4c36d270394c2028f751da1b6f | rtoy/cmucl | ctak.lisp | (in-package "USER")
CTAK -- A version of the TAKeuchi function that uses the CATCH / THROW facility .
(defun ctak (x y z)
(declare (fixnum x y z))
(catch 'ctak (ctak-aux x y z)))
(defun ctak-aux (x y z)
(declare (fixnum x y z))
(cond ((not (< y x))
(throw 'ctak z))
(t (ctak-aux
(catch 'ctak
(ctak-aux (the fixnum (1- x))
y
z))
(catch 'ctak
(ctak-aux (the fixnum (1- y))
z
x))
(catch 'ctak
(ctak-aux (the fixnum (1- z))
x
y))))))
(defun time-ctak ()
(time (ctak 18 12 6)))
| null | https://raw.githubusercontent.com/rtoy/cmucl/9b1abca53598f03a5b39ded4185471a5b8777dea/src/benchmarks/gabriel/ctak.lisp | lisp | (in-package "USER")
CTAK -- A version of the TAKeuchi function that uses the CATCH / THROW facility .
(defun ctak (x y z)
(declare (fixnum x y z))
(catch 'ctak (ctak-aux x y z)))
(defun ctak-aux (x y z)
(declare (fixnum x y z))
(cond ((not (< y x))
(throw 'ctak z))
(t (ctak-aux
(catch 'ctak
(ctak-aux (the fixnum (1- x))
y
z))
(catch 'ctak
(ctak-aux (the fixnum (1- y))
z
x))
(catch 'ctak
(ctak-aux (the fixnum (1- z))
x
y))))))
(defun time-ctak ()
(time (ctak 18 12 6)))
|
|
2588f014544189a532b38ed09eae7e667ba3f25383e936ee40e06efa3d4a7191 | jkk/formative | bootstrap.cljc | (ns formative.render.bootstrap
(:require [formative.render :refer [render-form render-field
render-problems]]
[formative.util :as util]))
(defn render-bootstrap-row [field]
(let [field-id (util/get-field-id field)
field (assoc field :id field-id)
field (if (= :submit (:type field))
(assoc field :class (str (:class field)
" btn btn-primary"))
field)]
[:div {:id (util/get-field-container-id field)
:class (str (if (= :submit (:type field))
"form-actions submit-group "
"field-group ")
(when-not (#{:heading :html} (:type field))
" control-group ")
(name (:type field :text)) "-row"
(when (:problem field) " error problem"))}
(if (= :heading (:type field))
[:legend (render-field field)]
(list
[:div {:class (if (#{:checkbox :submit} (:type field))
"empty-shell"
"label-shell")}
(when (and (not (#{:checkbox} (:type field))) (:label field))
[:label.control-label {:for field-id}
(:label field)])]
[:div {:class (str "input-shell" (when-not (#{:submit :html} (:type field))
" controls"))}
(when (:prefix field)
[:span.prefix (:prefix field)])
(if (= :checkbox (:type field))
[:label.checkbox {:for field-id} " "
(render-field field) " "
[:span.cb-label (:label field)]]
(render-field field))
(when (:suffix field)
[:span.suffix (:suffix field)])
(when (and (= :submit (:type field))
(:cancel-href field))
[:span.cancel-link " " [:a.btn {:href (:cancel-href field)}
(:cancel-label field)]])
(when (:note field)
[:div.note.help-inline (:note field)])]))]))
(defn- group-fieldsets [fields]
(loop [ret []
group []
fields fields]
(if (empty? fields)
(if (seq group)
(conj ret group)
ret)
(if (#{:heading :submit} (:type (first fields)))
(recur (if (seq group) (conj ret group) ret)
[(first fields)]
(rest fields))
(recur ret
(conj group (first fields))
(rest fields))))))
(defn render-bootstrap-form [form-attrs fields class opts]
(let [[hidden-fields visible-fields] ((juxt filter remove)
#(= :hidden (:type %)) fields)
submit-only? (and (= 1 (count visible-fields))
(= :submit (:type (first visible-fields))))
shell-attrs {:class (str class
" bootstrap-form"
(when submit-only? " submit-only"))}
shell-attrs (if (:id form-attrs)
(assoc shell-attrs :id (str (name (:id form-attrs))
"-shell"))
shell-attrs)]
[:div shell-attrs
(when-let [problems (:problems opts)]
(when (map? (first problems))
(render-problems problems fields)))
[:form (dissoc form-attrs :renderer)
(list
(map render-field hidden-fields)
(for [fieldset (group-fieldsets visible-fields)]
[:fieldset {:class (str "fieldset-" (name (:name (first fieldset))))}
(map render-bootstrap-row fieldset)]))]]))
(defmethod render-form :bootstrap-horizontal [form-attrs fields opts]
(render-bootstrap-form form-attrs fields "form-shell form-horizontal" opts))
(defmethod render-form :bootstrap-stacked [form-attrs fields opts]
(render-bootstrap-form form-attrs fields "form-shell" opts))
| null | https://raw.githubusercontent.com/jkk/formative/e1b69161c05438a48d3186bd2eb5126377ffe2e3/src/formative/render/bootstrap.cljc | clojure | (ns formative.render.bootstrap
(:require [formative.render :refer [render-form render-field
render-problems]]
[formative.util :as util]))
(defn render-bootstrap-row [field]
(let [field-id (util/get-field-id field)
field (assoc field :id field-id)
field (if (= :submit (:type field))
(assoc field :class (str (:class field)
" btn btn-primary"))
field)]
[:div {:id (util/get-field-container-id field)
:class (str (if (= :submit (:type field))
"form-actions submit-group "
"field-group ")
(when-not (#{:heading :html} (:type field))
" control-group ")
(name (:type field :text)) "-row"
(when (:problem field) " error problem"))}
(if (= :heading (:type field))
[:legend (render-field field)]
(list
[:div {:class (if (#{:checkbox :submit} (:type field))
"empty-shell"
"label-shell")}
(when (and (not (#{:checkbox} (:type field))) (:label field))
[:label.control-label {:for field-id}
(:label field)])]
[:div {:class (str "input-shell" (when-not (#{:submit :html} (:type field))
" controls"))}
(when (:prefix field)
[:span.prefix (:prefix field)])
(if (= :checkbox (:type field))
[:label.checkbox {:for field-id} " "
(render-field field) " "
[:span.cb-label (:label field)]]
(render-field field))
(when (:suffix field)
[:span.suffix (:suffix field)])
(when (and (= :submit (:type field))
(:cancel-href field))
[:span.cancel-link " " [:a.btn {:href (:cancel-href field)}
(:cancel-label field)]])
(when (:note field)
[:div.note.help-inline (:note field)])]))]))
(defn- group-fieldsets [fields]
(loop [ret []
group []
fields fields]
(if (empty? fields)
(if (seq group)
(conj ret group)
ret)
(if (#{:heading :submit} (:type (first fields)))
(recur (if (seq group) (conj ret group) ret)
[(first fields)]
(rest fields))
(recur ret
(conj group (first fields))
(rest fields))))))
(defn render-bootstrap-form [form-attrs fields class opts]
(let [[hidden-fields visible-fields] ((juxt filter remove)
#(= :hidden (:type %)) fields)
submit-only? (and (= 1 (count visible-fields))
(= :submit (:type (first visible-fields))))
shell-attrs {:class (str class
" bootstrap-form"
(when submit-only? " submit-only"))}
shell-attrs (if (:id form-attrs)
(assoc shell-attrs :id (str (name (:id form-attrs))
"-shell"))
shell-attrs)]
[:div shell-attrs
(when-let [problems (:problems opts)]
(when (map? (first problems))
(render-problems problems fields)))
[:form (dissoc form-attrs :renderer)
(list
(map render-field hidden-fields)
(for [fieldset (group-fieldsets visible-fields)]
[:fieldset {:class (str "fieldset-" (name (:name (first fieldset))))}
(map render-bootstrap-row fieldset)]))]]))
(defmethod render-form :bootstrap-horizontal [form-attrs fields opts]
(render-bootstrap-form form-attrs fields "form-shell form-horizontal" opts))
(defmethod render-form :bootstrap-stacked [form-attrs fields opts]
(render-bootstrap-form form-attrs fields "form-shell" opts))
|
|
3366216f1e56cf11577fd8a87f4d2d03bda94237009b1de44c753969d5eaf2ce | softwarelanguageslab/maf | R5RS_gambit_cat-4.scm | ; Changes:
* removed : 0
* added : 1
* swaps : 1
; * negated predicates: 0
; * swapped branches: 0
* calls to i d fun : 1
(letrec ((inport #f)
(outport #f)
(catport (lambda (port)
(<change>
()
outport)
(let ((x (read-char port)))
(if (eof-object? x)
(close-output-port outport)
(begin
(write-char x outport)
(catport port))))))
(go (lambda ()
(<change>
(set! inport (open-input-file "input.txt"))
(set! outport (open-output-file "output.txt")))
(<change>
(set! outport (open-output-file "output.txt"))
(set! inport (open-input-file "input.txt")))
(<change>
(catport inport)
((lambda (x) x) (catport inport)))
(close-input-port inport))))
(go)) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_gambit_cat-4.scm | scheme | Changes:
* negated predicates: 0
* swapped branches: 0 | * removed : 0
* added : 1
* swaps : 1
* calls to i d fun : 1
(letrec ((inport #f)
(outport #f)
(catport (lambda (port)
(<change>
()
outport)
(let ((x (read-char port)))
(if (eof-object? x)
(close-output-port outport)
(begin
(write-char x outport)
(catport port))))))
(go (lambda ()
(<change>
(set! inport (open-input-file "input.txt"))
(set! outport (open-output-file "output.txt")))
(<change>
(set! outport (open-output-file "output.txt"))
(set! inport (open-input-file "input.txt")))
(<change>
(catport inport)
((lambda (x) x) (catport inport)))
(close-input-port inport))))
(go)) |
53d11b9b067f2468a15ff8b7ab4006a705809a18ae86d77e63d0cff6980aed0c | 8c6794b6/guile-tjit | eval.scm | ;;; eval.scm --- The R6RS `eval' library
Copyright ( C ) 2010 Free Software Foundation , Inc.
;;
;; This library is free software; you can redistribute it and/or
;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
;;
;; This library is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;; Lesser General Public License for more details.
;;
You should have received a copy of the GNU Lesser General Public
;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
(library (rnrs eval (6))
(export eval environment)
(import (only (guile) eval
make-module
module-uses
beautify-user-module!
set-module-uses!)
(rnrs base (6))
(rnrs io simple (6))
(rnrs lists (6)))
(define (environment . import-specs)
(let ((module (make-module))
(needs-purify? (not (member '(guile) import-specs))))
(beautify-user-module! module)
(for-each (lambda (import-spec) (eval (list 'import import-spec) module))
import-specs)
(if needs-purify? (set-module-uses! module (cdr (module-uses module))))
module))
)
| null | https://raw.githubusercontent.com/8c6794b6/guile-tjit/9566e480af2ff695e524984992626426f393414f/module/rnrs/eval.scm | scheme | eval.scm --- The R6RS `eval' library
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software |
Copyright ( C ) 2010 Free Software Foundation , Inc.
version 3 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
(library (rnrs eval (6))
(export eval environment)
(import (only (guile) eval
make-module
module-uses
beautify-user-module!
set-module-uses!)
(rnrs base (6))
(rnrs io simple (6))
(rnrs lists (6)))
(define (environment . import-specs)
(let ((module (make-module))
(needs-purify? (not (member '(guile) import-specs))))
(beautify-user-module! module)
(for-each (lambda (import-spec) (eval (list 'import import-spec) module))
import-specs)
(if needs-purify? (set-module-uses! module (cdr (module-uses module))))
module))
)
|
6c938e82df6d8cf6cbc847588fcc39ee5261560a25c9af51054d2a22ddaa6444 | zenspider/schemers | exercise.3.30.scm | #lang racket/base
(require "../lib/queue.scm")
(require "../lib/circuits.scm")
(require "../lib/test.rkt")
(require (only-in srfi/1 fold))
Exercise 3.30
* Note Figure 3 - 27 : : shows a " ripple - carry adder "
;; formed by stringing together n full-adders. This is the simplest
form of parallel adder for adding two n - bit binary numbers . The
inputs A_1 , A_2 , A_3 , ... , A_n and , B_2 , B_3 , ... , B_n are the
two binary numbers to be added ( each A_k and B_k is a 0 or a 1 ) .
The circuit generates S_1 , S_2 , S_3 , ... , S_n , the n bits of the
;; sum, and C, the carry from the addition. Write a procedure
;; `ripple-carry-adder' that generates this circuit. The procedure
should take as arguments three lists of n wires each -- the A_k , the
B_k , and the S_k -- and also another wire C. The major drawback of
;; the ripple-carry adder is the need to wait for the carry signals
;; to propagate.
;;
* Figure 3.27 :* A ripple - carry adder for n - bit numbers .
;;
;; : : :
: A_1 B_1 C_1 A_2 B_2 C_2 A_3 B_3 C_3 : : A_n B_n C_n=0
;; : | | +---+ | | +---+ | | +----- : | | +-
;; | | | | | | | | | | | | : : | | |
;; : ++---+---++ | ++---+---++ | ++---+---++ : : ++---+---++
;; : | FA | | | FA | | | FA | : : | FA |
;; : +--+---+--+ | +--+---+--+ | +--+---+--+ : : +--+---+--+
;; : | | | | | | | | : : | |
;; C ------+ | +-----+ | +-----+ | : ------+ |
;; : | C_1 | C_2 | : :C_(n-1) |
;; : | | | : : |
;; S_1 S_2 S_3 S_n
;; in ruby:
;;
a_s.zip(b_s , s_s).inject(c ) { |c_in , ( a , b ,
;; c_out = make_wire
full_adder(a , b , c_in , s , c_out )
;; c_out
;; }
(define (ripple-carry-adder a-s b-s s-s c)
(fold (lambda (a b s c-in)
(let ((c-out (make-wire)))
(full-adder a c-in b s c-out)
c-out))
c a-s b-s s-s))
;; What is the delay needed to obtain the complete output from an
;; n-bit ripple-carry adder, expressed in terms of the delays for
;; and-gates, or-gates, and inverters?
A : full adder cost is 2 * half adder + or
half adder is ( and or ) + not + and
so ripple would be : n * ( 2 * ( ( and or ) + not + and ) + or )
or 2n * ( and or ) + 2n * not + 2n * and + n * or
(test-group "3.3.0"
(define a1 (make-wire))
(define a2 (make-wire))
(define b1 (make-wire))
(define b2 (make-wire))
(define s1 (make-wire))
(define s2 (make-wire))
(define c (make-wire))
(define A (list a1 a2))
(define B (list b1 b2))
(define S (list s1 s2))
(set-signal! (ripple-carry-adder A B S c) 0)
(test-group "01 + 00 = 001"
(set-signal! a1 1)
(propagate)
(test 1 (get-signal s1))
(test 0 (get-signal s2))
(test 0 (get-signal c)))
(test-group "11 + 00 = 011"
(set-signal! a2 1)
(propagate)
(test 1 (get-signal s1))
(test 1 (get-signal s2))
(test 0 (get-signal c)))
(test-group "11 + 11 = 110"
(set-signal! b1 1)
(set-signal! b2 1)
(propagate)
(test 0 (get-signal s1))
(test 1 (get-signal s2))
(test 1 (get-signal c))))
| null | https://raw.githubusercontent.com/zenspider/schemers/2939ca553ac79013a4c3aaaec812c1bad3933b16/sicp/ch_3/exercise.3.30.scm | scheme | formed by stringing together n full-adders. This is the simplest
sum, and C, the carry from the addition. Write a procedure
`ripple-carry-adder' that generates this circuit. The procedure
the ripple-carry adder is the need to wait for the carry signals
to propagate.
: : :
: | | +---+ | | +---+ | | +----- : | | +-
| | | | | | | | | | | | : : | | |
: ++---+---++ | ++---+---++ | ++---+---++ : : ++---+---++
: | FA | | | FA | | | FA | : : | FA |
: +--+---+--+ | +--+---+--+ | +--+---+--+ : : +--+---+--+
: | | | | | | | | : : | |
C ------+ | +-----+ | +-----+ | : ------+ |
: | C_1 | C_2 | : :C_(n-1) |
: | | | : : |
S_1 S_2 S_3 S_n
in ruby:
c_out = make_wire
c_out
}
What is the delay needed to obtain the complete output from an
n-bit ripple-carry adder, expressed in terms of the delays for
and-gates, or-gates, and inverters? | #lang racket/base
(require "../lib/queue.scm")
(require "../lib/circuits.scm")
(require "../lib/test.rkt")
(require (only-in srfi/1 fold))
Exercise 3.30
* Note Figure 3 - 27 : : shows a " ripple - carry adder "
form of parallel adder for adding two n - bit binary numbers . The
inputs A_1 , A_2 , A_3 , ... , A_n and , B_2 , B_3 , ... , B_n are the
two binary numbers to be added ( each A_k and B_k is a 0 or a 1 ) .
The circuit generates S_1 , S_2 , S_3 , ... , S_n , the n bits of the
should take as arguments three lists of n wires each -- the A_k , the
B_k , and the S_k -- and also another wire C. The major drawback of
* Figure 3.27 :* A ripple - carry adder for n - bit numbers .
: A_1 B_1 C_1 A_2 B_2 C_2 A_3 B_3 C_3 : : A_n B_n C_n=0
a_s.zip(b_s , s_s).inject(c ) { |c_in , ( a , b ,
full_adder(a , b , c_in , s , c_out )
(define (ripple-carry-adder a-s b-s s-s c)
(fold (lambda (a b s c-in)
(let ((c-out (make-wire)))
(full-adder a c-in b s c-out)
c-out))
c a-s b-s s-s))
A : full adder cost is 2 * half adder + or
half adder is ( and or ) + not + and
so ripple would be : n * ( 2 * ( ( and or ) + not + and ) + or )
or 2n * ( and or ) + 2n * not + 2n * and + n * or
(test-group "3.3.0"
(define a1 (make-wire))
(define a2 (make-wire))
(define b1 (make-wire))
(define b2 (make-wire))
(define s1 (make-wire))
(define s2 (make-wire))
(define c (make-wire))
(define A (list a1 a2))
(define B (list b1 b2))
(define S (list s1 s2))
(set-signal! (ripple-carry-adder A B S c) 0)
(test-group "01 + 00 = 001"
(set-signal! a1 1)
(propagate)
(test 1 (get-signal s1))
(test 0 (get-signal s2))
(test 0 (get-signal c)))
(test-group "11 + 00 = 011"
(set-signal! a2 1)
(propagate)
(test 1 (get-signal s1))
(test 1 (get-signal s2))
(test 0 (get-signal c)))
(test-group "11 + 11 = 110"
(set-signal! b1 1)
(set-signal! b2 1)
(propagate)
(test 0 (get-signal s1))
(test 1 (get-signal s2))
(test 1 (get-signal c))))
|
779ebbd947bfa3112ab13feae5c7df32145232e8d151249c025181162bec004d | basho/riak_cs | riak_cs_json.erl | %% ---------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2013 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
@doc A collection functions for going to or from JSON to an erlang
%% record type.
-module(riak_cs_json).
-include("riak_cs.hrl").
-include("list_objects.hrl").
-include("oos_api.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
%% Public API
-export([from_json/1,
get/2,
to_json/1,
value_or_default/2]).
-type attributes() :: [{atom(), string()}].
-type external_node() :: {atom(), [string()]}.
-type internal_node() :: {atom(), [internal_node() | external_node()]} |
{atom(), attributes(), [internal_node() | external_node()]}.
%% ===================================================================
%% Public API
%% ===================================================================
-spec from_json(string()) -> {struct, term()} | [term()] | {error, decode_failed}.
from_json(JsonString) ->
case catch mochijson2:decode(JsonString) of
{'EXIT', _} ->
{error, decode_failed};
Result ->
Result
end.
-type match_spec() :: {index, non_neg_integer()} | {key, binary(), binary()}.
-type path_query() :: {find, match_spec()}.
-type path() :: [binary() | tuple() | path_query()].
-spec get({struct, term()} | [term()] | undefined, path()) -> term().
get({struct, _}=Object, Path) ->
follow_path(Object, Path);
get(Array, [{find, Query} | RestPath]) when is_list(Array) ->
follow_path(find(Array, Query), RestPath);
get(_Array, [{find, _Query} | _RestPath]) ->
{error, invalid_path};
get(undefined, _) ->
{error, not_found};
get(not_found, _) ->
{error, not_found}.
-spec to_json(term()) -> binary().
to_json(?KEYSTONE_S3_AUTH_REQ{}=Req) ->
Inner = {struct, [{<<"access">>, Req?KEYSTONE_S3_AUTH_REQ.access},
{<<"signature">>, Req?KEYSTONE_S3_AUTH_REQ.signature},
{<<"token">>, Req?KEYSTONE_S3_AUTH_REQ.token}]},
iolist_to_binary(mochijson2:encode({struct, [{<<"credentials">>, Inner}]}));
to_json(?RCS_USER{}=Req) ->
iolist_to_binary(mochijson2:encode(user_object(Req)));
to_json({users, Users}) ->
UserList = [user_object(User) || User <- Users],
iolist_to_binary(mochijson2:encode(UserList));
to_json(undefined) ->
[];
to_json([]) ->
[].
-spec value_or_default({ok, term()} | {error, term()}, term()) -> term().
value_or_default({error, Reason}, Default) ->
_ = lager:debug("JSON error: ~p", [Reason]),
Default;
value_or_default({ok, Value}, _) ->
Value.
%% ===================================================================
Internal functions
%% ===================================================================
-spec follow_path(tuple() | [term()] | undefined, path()) ->
{ok, term()} | {error, not_found}.
follow_path(undefined, _) ->
{error, not_found};
follow_path(Value, []) ->
{ok, Value};
follow_path(JsonItems, [{find, Query}]) ->
follow_path(find(JsonItems, Query), []);
follow_path(JsonItems, [{find, Query} | RestPath]) ->
get(find(JsonItems, Query), RestPath);
follow_path({struct, JsonItems}, [Key]) when is_tuple(Key) ->
follow_path(target_tuple_values(Key, JsonItems), []);
follow_path({struct, JsonItems}, [Key]) ->
follow_path(proplists:get_value(Key, JsonItems), []);
follow_path({struct, JsonItems}, [Key | RestPath]) ->
Value = proplists:get_value(Key, JsonItems),
follow_path(Value, RestPath).
-spec find([term()], match_spec()) -> undefined | {struct, term()}.
find(Array, {key, Key, Value}) ->
lists:foldl(key_folder(Key, Value), not_found, Array);
find(Array, {index, Index}) when Index =< length(Array) ->
lists:nth(Index, Array);
find(_, {index, _}) ->
undefined.
key_folder(Key, Value) ->
fun({struct, Items}=X, Acc) ->
case lists:keyfind(Key, 1, Items) of
{Key, Value} ->
X;
_ ->
Acc
end;
(_, Acc) ->
Acc
end.
-spec target_tuple_values(tuple(), proplists:proplist()) -> tuple().
target_tuple_values(Keys, JsonItems) ->
list_to_tuple(
[proplists:get_value(element(Index, Keys), JsonItems)
|| Index <- lists:seq(1, tuple_size(Keys))]).
-spec user_object(rcs_user()) -> {struct, proplists:proplist()}.
user_object(?RCS_USER{email=Email,
display_name=DisplayName,
name=Name,
key_id=KeyID,
key_secret=KeySecret,
canonical_id=CanonicalID,
status=Status}) ->
StatusBin = case Status of
enabled ->
<<"enabled">>;
_ ->
<<"disabled">>
end,
UserData = [{email, list_to_binary(Email)},
{display_name, list_to_binary(DisplayName)},
{name, list_to_binary(Name)},
{key_id, list_to_binary(KeyID)},
{key_secret, list_to_binary(KeySecret)},
{id, list_to_binary(CanonicalID)},
{status, StatusBin}],
{struct, UserData}.
%% ===================================================================
Eunit tests
%% ===================================================================
-ifdef(TEST).
get_single_key_test() ->
Object1 = "{\"abc\":\"123\", \"def\":\"456\", \"ghi\":\"789\"}",
Object2 = "{\"test\":{\"abc\":\"123\", \"def\":\"456\", \"ghi\":\"789\"}}",
?assertEqual({ok, <<"123">>}, get(from_json(Object1), [<<"abc">>])),
?assertEqual({ok, <<"456">>}, get(from_json(Object1), [<<"def">>])),
?assertEqual({ok, <<"789">>}, get(from_json(Object1), [<<"ghi">>])),
?assertEqual({ok, <<"123">>}, get(from_json(Object2), [<<"test">>, <<"abc">>])),
?assertEqual({ok, <<"456">>}, get(from_json(Object2), [<<"test">>, <<"def">>])),
?assertEqual({ok, <<"789">>}, get(from_json(Object2), [<<"test">>, <<"ghi">>])),
?assertEqual({error, not_found}, get(from_json(Object1), [<<"zzz">>])),
?assertEqual({error, not_found}, get(from_json(Object2), [<<"test">>, <<"zzz">>])).
get_array_test() ->
Array = "[\"abc\", \"123\", \"def\", \"456\", 7]",
?assertEqual({ok, <<"abc">>}, get(from_json(Array), [{find, {index, 1}}])),
?assertEqual({ok, <<"123">>}, get(from_json(Array), [{find, {index, 2}}])),
?assertEqual({ok, <<"def">>}, get(from_json(Array), [{find, {index, 3}}])),
?assertEqual({ok, <<"456">>}, get(from_json(Array), [{find, {index, 4}}])),
?assertEqual({ok, 7}, get(from_json(Array), [{find, {index, 5}}])),
?assertEqual({error, not_found}, get(from_json(Array), [{find, {index, 6}}])).
get_multi_key_test() ->
Object1 = "{\"test\":{\"abc\":\"123\", \"def\":\"456\", \"ghi\":\"789\"}}",
Object2 = "{\"test\":{\"abc\":{\"123\":123,\"456\":456,\"789\":789},\"def\""
":{\"123\":123,\"456\":456,\"789\":789},\"ghi\":{\"123\":123,\"456\""
":456,\"789\":789}}}",
?assertEqual({ok, {<<"123">>, <<"789">>}}, get(from_json(Object1), [<<"test">>, {<<"abc">>, <<"ghi">>}])),
?assertEqual({ok, {123, 789}}, get(from_json(Object2), [<<"test">>, <<"abc">>, {<<"123">>, <<"789">>}])),
?assertEqual({ok, {123, 789}}, get(from_json(Object2), [<<"test">>, <<"def">>, {<<"123">>, <<"789">>}])),
?assertEqual({ok, {123, 789}}, get(from_json(Object2), [<<"test">>, <<"ghi">>, {<<"123">>, <<"789">>}])).
get_embedded_key_from_array_test() ->
Object = "{\"test\":{\"objects\":[{\"key1\":\"a1\",\"key2\":\"a2\",\"key3\""
":\"a3\"},{\"key1\":\"b1\",\"key2\":\"b2\",\"key3\":\"b3\"},{\"key1\""
":\"c1\",\"key2\":\"c2\",\"key3\":\"c3\"}]}}",
?assertEqual({ok, {struct, [{<<"key1">>, <<"a1">>}, {<<"key2">>, <<"a2">>}, {<<"key3">>, <<"a3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key1">>, <<"a1">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"a1">>}, {<<"key2">>, <<"a2">>}, {<<"key3">>, <<"a3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key2">>, <<"a2">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"a1">>}, {<<"key2">>, <<"a2">>}, {<<"key3">>, <<"a3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key3">>, <<"a3">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"b1">>}, {<<"key2">>, <<"b2">>}, {<<"key3">>, <<"b3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key1">>, <<"b1">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"b1">>}, {<<"key2">>, <<"b2">>}, {<<"key3">>, <<"b3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key2">>, <<"b2">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"b1">>}, {<<"key2">>, <<"b2">>}, {<<"key3">>, <<"b3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key3">>, <<"b3">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"c1">>}, {<<"key2">>, <<"c2">>}, {<<"key3">>, <<"c3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key1">>, <<"c1">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"c1">>}, {<<"key2">>, <<"c2">>}, {<<"key3">>, <<"c3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key2">>, <<"c2">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"c1">>}, {<<"key2">>, <<"c2">>}, {<<"key3">>, <<"c3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key3">>, <<"c3">>}}])).
-endif.
| null | https://raw.githubusercontent.com/basho/riak_cs/c0c1012d1c9c691c74c8c5d9f69d388f5047bcd2/src/riak_cs_json.erl | erlang | ---------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---------------------------------------------------------------------
record type.
Public API
===================================================================
Public API
===================================================================
===================================================================
===================================================================
===================================================================
=================================================================== | Copyright ( c ) 2007 - 2013 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@doc A collection functions for going to or from JSON to an erlang
-module(riak_cs_json).
-include("riak_cs.hrl").
-include("list_objects.hrl").
-include("oos_api.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([from_json/1,
get/2,
to_json/1,
value_or_default/2]).
-type attributes() :: [{atom(), string()}].
-type external_node() :: {atom(), [string()]}.
-type internal_node() :: {atom(), [internal_node() | external_node()]} |
{atom(), attributes(), [internal_node() | external_node()]}.
-spec from_json(string()) -> {struct, term()} | [term()] | {error, decode_failed}.
from_json(JsonString) ->
case catch mochijson2:decode(JsonString) of
{'EXIT', _} ->
{error, decode_failed};
Result ->
Result
end.
-type match_spec() :: {index, non_neg_integer()} | {key, binary(), binary()}.
-type path_query() :: {find, match_spec()}.
-type path() :: [binary() | tuple() | path_query()].
-spec get({struct, term()} | [term()] | undefined, path()) -> term().
get({struct, _}=Object, Path) ->
follow_path(Object, Path);
get(Array, [{find, Query} | RestPath]) when is_list(Array) ->
follow_path(find(Array, Query), RestPath);
get(_Array, [{find, _Query} | _RestPath]) ->
{error, invalid_path};
get(undefined, _) ->
{error, not_found};
get(not_found, _) ->
{error, not_found}.
-spec to_json(term()) -> binary().
to_json(?KEYSTONE_S3_AUTH_REQ{}=Req) ->
Inner = {struct, [{<<"access">>, Req?KEYSTONE_S3_AUTH_REQ.access},
{<<"signature">>, Req?KEYSTONE_S3_AUTH_REQ.signature},
{<<"token">>, Req?KEYSTONE_S3_AUTH_REQ.token}]},
iolist_to_binary(mochijson2:encode({struct, [{<<"credentials">>, Inner}]}));
to_json(?RCS_USER{}=Req) ->
iolist_to_binary(mochijson2:encode(user_object(Req)));
to_json({users, Users}) ->
UserList = [user_object(User) || User <- Users],
iolist_to_binary(mochijson2:encode(UserList));
to_json(undefined) ->
[];
to_json([]) ->
[].
-spec value_or_default({ok, term()} | {error, term()}, term()) -> term().
value_or_default({error, Reason}, Default) ->
_ = lager:debug("JSON error: ~p", [Reason]),
Default;
value_or_default({ok, Value}, _) ->
Value.
Internal functions
-spec follow_path(tuple() | [term()] | undefined, path()) ->
{ok, term()} | {error, not_found}.
follow_path(undefined, _) ->
{error, not_found};
follow_path(Value, []) ->
{ok, Value};
follow_path(JsonItems, [{find, Query}]) ->
follow_path(find(JsonItems, Query), []);
follow_path(JsonItems, [{find, Query} | RestPath]) ->
get(find(JsonItems, Query), RestPath);
follow_path({struct, JsonItems}, [Key]) when is_tuple(Key) ->
follow_path(target_tuple_values(Key, JsonItems), []);
follow_path({struct, JsonItems}, [Key]) ->
follow_path(proplists:get_value(Key, JsonItems), []);
follow_path({struct, JsonItems}, [Key | RestPath]) ->
Value = proplists:get_value(Key, JsonItems),
follow_path(Value, RestPath).
-spec find([term()], match_spec()) -> undefined | {struct, term()}.
find(Array, {key, Key, Value}) ->
lists:foldl(key_folder(Key, Value), not_found, Array);
find(Array, {index, Index}) when Index =< length(Array) ->
lists:nth(Index, Array);
find(_, {index, _}) ->
undefined.
key_folder(Key, Value) ->
fun({struct, Items}=X, Acc) ->
case lists:keyfind(Key, 1, Items) of
{Key, Value} ->
X;
_ ->
Acc
end;
(_, Acc) ->
Acc
end.
-spec target_tuple_values(tuple(), proplists:proplist()) -> tuple().
target_tuple_values(Keys, JsonItems) ->
list_to_tuple(
[proplists:get_value(element(Index, Keys), JsonItems)
|| Index <- lists:seq(1, tuple_size(Keys))]).
-spec user_object(rcs_user()) -> {struct, proplists:proplist()}.
user_object(?RCS_USER{email=Email,
display_name=DisplayName,
name=Name,
key_id=KeyID,
key_secret=KeySecret,
canonical_id=CanonicalID,
status=Status}) ->
StatusBin = case Status of
enabled ->
<<"enabled">>;
_ ->
<<"disabled">>
end,
UserData = [{email, list_to_binary(Email)},
{display_name, list_to_binary(DisplayName)},
{name, list_to_binary(Name)},
{key_id, list_to_binary(KeyID)},
{key_secret, list_to_binary(KeySecret)},
{id, list_to_binary(CanonicalID)},
{status, StatusBin}],
{struct, UserData}.
Eunit tests
-ifdef(TEST).
get_single_key_test() ->
Object1 = "{\"abc\":\"123\", \"def\":\"456\", \"ghi\":\"789\"}",
Object2 = "{\"test\":{\"abc\":\"123\", \"def\":\"456\", \"ghi\":\"789\"}}",
?assertEqual({ok, <<"123">>}, get(from_json(Object1), [<<"abc">>])),
?assertEqual({ok, <<"456">>}, get(from_json(Object1), [<<"def">>])),
?assertEqual({ok, <<"789">>}, get(from_json(Object1), [<<"ghi">>])),
?assertEqual({ok, <<"123">>}, get(from_json(Object2), [<<"test">>, <<"abc">>])),
?assertEqual({ok, <<"456">>}, get(from_json(Object2), [<<"test">>, <<"def">>])),
?assertEqual({ok, <<"789">>}, get(from_json(Object2), [<<"test">>, <<"ghi">>])),
?assertEqual({error, not_found}, get(from_json(Object1), [<<"zzz">>])),
?assertEqual({error, not_found}, get(from_json(Object2), [<<"test">>, <<"zzz">>])).
get_array_test() ->
Array = "[\"abc\", \"123\", \"def\", \"456\", 7]",
?assertEqual({ok, <<"abc">>}, get(from_json(Array), [{find, {index, 1}}])),
?assertEqual({ok, <<"123">>}, get(from_json(Array), [{find, {index, 2}}])),
?assertEqual({ok, <<"def">>}, get(from_json(Array), [{find, {index, 3}}])),
?assertEqual({ok, <<"456">>}, get(from_json(Array), [{find, {index, 4}}])),
?assertEqual({ok, 7}, get(from_json(Array), [{find, {index, 5}}])),
?assertEqual({error, not_found}, get(from_json(Array), [{find, {index, 6}}])).
get_multi_key_test() ->
Object1 = "{\"test\":{\"abc\":\"123\", \"def\":\"456\", \"ghi\":\"789\"}}",
Object2 = "{\"test\":{\"abc\":{\"123\":123,\"456\":456,\"789\":789},\"def\""
":{\"123\":123,\"456\":456,\"789\":789},\"ghi\":{\"123\":123,\"456\""
":456,\"789\":789}}}",
?assertEqual({ok, {<<"123">>, <<"789">>}}, get(from_json(Object1), [<<"test">>, {<<"abc">>, <<"ghi">>}])),
?assertEqual({ok, {123, 789}}, get(from_json(Object2), [<<"test">>, <<"abc">>, {<<"123">>, <<"789">>}])),
?assertEqual({ok, {123, 789}}, get(from_json(Object2), [<<"test">>, <<"def">>, {<<"123">>, <<"789">>}])),
?assertEqual({ok, {123, 789}}, get(from_json(Object2), [<<"test">>, <<"ghi">>, {<<"123">>, <<"789">>}])).
get_embedded_key_from_array_test() ->
Object = "{\"test\":{\"objects\":[{\"key1\":\"a1\",\"key2\":\"a2\",\"key3\""
":\"a3\"},{\"key1\":\"b1\",\"key2\":\"b2\",\"key3\":\"b3\"},{\"key1\""
":\"c1\",\"key2\":\"c2\",\"key3\":\"c3\"}]}}",
?assertEqual({ok, {struct, [{<<"key1">>, <<"a1">>}, {<<"key2">>, <<"a2">>}, {<<"key3">>, <<"a3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key1">>, <<"a1">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"a1">>}, {<<"key2">>, <<"a2">>}, {<<"key3">>, <<"a3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key2">>, <<"a2">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"a1">>}, {<<"key2">>, <<"a2">>}, {<<"key3">>, <<"a3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key3">>, <<"a3">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"b1">>}, {<<"key2">>, <<"b2">>}, {<<"key3">>, <<"b3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key1">>, <<"b1">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"b1">>}, {<<"key2">>, <<"b2">>}, {<<"key3">>, <<"b3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key2">>, <<"b2">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"b1">>}, {<<"key2">>, <<"b2">>}, {<<"key3">>, <<"b3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key3">>, <<"b3">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"c1">>}, {<<"key2">>, <<"c2">>}, {<<"key3">>, <<"c3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key1">>, <<"c1">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"c1">>}, {<<"key2">>, <<"c2">>}, {<<"key3">>, <<"c3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key2">>, <<"c2">>}}])),
?assertEqual({ok, {struct, [{<<"key1">>, <<"c1">>}, {<<"key2">>, <<"c2">>}, {<<"key3">>, <<"c3">>}]}},
get(from_json(Object),
[<<"test">>, <<"objects">>, {find, {key, <<"key3">>, <<"c3">>}}])).
-endif.
|
800986cd9cdad100b1e5975c8c0400e0ca083ecdff28ff186ed7c0f0ad5156fe | GaloisInc/ivory | BoundedInteger.hs | # LANGUAGE ScopedTypeVariables #
module Ivory.Language.BoundedInteger where
import Text.Printf
import Ivory.Language.Proxy
import qualified Ivory.Language.Syntax as I
import Ivory.Language.Type
--------------------------------------------------------------------------------
-- | It is an error if a constant implicitly underflows/overflows.
boundedFromInteger :: forall a b . (Num a, IvoryType a, Bounded b, Integral b)
=> (I.Expr -> a) -> b -> Integer -> a
boundedFromInteger constr _ i
| i > snd bounds
= error $ printf "The constant %d is too large to cast to type %s." i tyStr
| i < fst bounds
= error $ printf "The constant %d is too small to cast to type %s." i tyStr
| otherwise
= constr (fromInteger i)
where
ty = ivoryType (Proxy :: Proxy a)
tyStr = show ty
bounds :: (Integer, Integer)
bounds = (fromIntegral (minBound :: b), fromIntegral (maxBound :: b))
| null | https://raw.githubusercontent.com/GaloisInc/ivory/53a0795b4fbeb0b7da0f6cdaccdde18849a78cd6/ivory/src/Ivory/Language/BoundedInteger.hs | haskell | ------------------------------------------------------------------------------
| It is an error if a constant implicitly underflows/overflows. | # LANGUAGE ScopedTypeVariables #
module Ivory.Language.BoundedInteger where
import Text.Printf
import Ivory.Language.Proxy
import qualified Ivory.Language.Syntax as I
import Ivory.Language.Type
boundedFromInteger :: forall a b . (Num a, IvoryType a, Bounded b, Integral b)
=> (I.Expr -> a) -> b -> Integer -> a
boundedFromInteger constr _ i
| i > snd bounds
= error $ printf "The constant %d is too large to cast to type %s." i tyStr
| i < fst bounds
= error $ printf "The constant %d is too small to cast to type %s." i tyStr
| otherwise
= constr (fromInteger i)
where
ty = ivoryType (Proxy :: Proxy a)
tyStr = show ty
bounds :: (Integer, Integer)
bounds = (fromIntegral (minBound :: b), fromIntegral (maxBound :: b))
|
b57545b14ef73eabb2f6f1a683e9969bbbd9fa86cc6b91a49a9c2d6cb2e5849d | emaphis/HtDP2e-solutions | ex059.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex059) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
HtDP 2e - 4 Enumerations and Intervals
4.7 Finite State Worlds
;; Ex 59:
Finish the design of a world program that simulates the
(require 2htdp/image)
(require 2htdp/universe)
;; constants
bulb 's redius
(define SPACE (/ RAD 2)) ; the space between bulbs
(define MT (empty-scene (* RAD 10) (* RAD 3)))
traffic light FSA .
TrafficLight is on of three Strings
;; - "red"
;; - "green"
;; - "yellow"
interp . one of three colors at any given time in order of
#;
(define (fn-for-tl tl) ; template
(cond [(string=? tl "red") (... tl)]
[(string=? tl "yellow") (... tl)]
[(string=? tl "green") (... tl)]))
;; functions
TrafficLight - >
; yields the next state given current state cs
(check-expect (tl-next "red") "green")
(check-expect (tl-next "green") "yellow")
(check-expect (tl-next "yellow") "red")
;(define (tl-next cs) cs) ; stub
(define (tl-next tl)
(cond [(string=? tl "red") "green"]
[(string=? tl "yellow") "red"]
[(string=? tl "green") "yellow"]))
TrafficLight - > Image
; renderS the current state cs as an image
(check-expect (tl-render "red")
(overlay
(beside (circle RAD "solid" "red")
(square SPACE "outline" "white")
(circle RAD "outline" "yellow")
(square SPACE "outline" "white")
(circle RAD "outline" "green"))
MT))
(check-expect (tl-render "yellow")
(overlay
(beside (circle RAD "outline" "red")
(square SPACE "outline" "white")
(circle RAD "solid" "yellow")
(square SPACE "outline" "white")
(circle RAD "outline" "green"))
MT))
(check-expect (tl-render "green")
(overlay
(beside (circle RAD "outline" "red")
(square SPACE "outline" "white")
(circle RAD "outline" "yellow")
(square SPACE "outline" "white")
(circle RAD "solid" "green"))
MT))
;(define (tl-render cs) ; stub
( empty - scene 90 30 ) )
(define (tl-render cs)
(overlay
(beside (bulb cs "red")
(square SPACE "outline" "white")
(bulb cs "yellow")
(square SPACE "outline" "white")
(bulb cs "green"))
MT))
- > Image
; render the c colored bulb of the traffic light,
; when on is the current state
(define (bulb on c)
(if (string=? on c)
(circle RAD "solid" c)
(circle RAD "outline" c)))
TrafficLight - >
simulate a clock - based American traffic light
; run: (traffic-light-simulation "red")
(define (traffic-light-simulation initial-state)
(big-bang initial-state
[to-draw tl-render]
[on-tick tl-next 1]))
| null | https://raw.githubusercontent.com/emaphis/HtDP2e-solutions/ecb60b9a7bbf9b8999c0122b6ea152a3301f0a68/1-Fixed-Size-Data/04-Intervals/ex059.rkt | racket | about the language level of this file in a form that our tools can easily process.
Ex 59:
constants
the space between bulbs
- "red"
- "green"
- "yellow"
template
functions
yields the next state given current state cs
(define (tl-next cs) cs) ; stub
renderS the current state cs as an image
(define (tl-render cs) ; stub
render the c colored bulb of the traffic light,
when on is the current state
run: (traffic-light-simulation "red") | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex059) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
HtDP 2e - 4 Enumerations and Intervals
4.7 Finite State Worlds
Finish the design of a world program that simulates the
(require 2htdp/image)
(require 2htdp/universe)
bulb 's redius
(define MT (empty-scene (* RAD 10) (* RAD 3)))
traffic light FSA .
TrafficLight is on of three Strings
interp . one of three colors at any given time in order of
(cond [(string=? tl "red") (... tl)]
[(string=? tl "yellow") (... tl)]
[(string=? tl "green") (... tl)]))
TrafficLight - >
(check-expect (tl-next "red") "green")
(check-expect (tl-next "green") "yellow")
(check-expect (tl-next "yellow") "red")
(define (tl-next tl)
(cond [(string=? tl "red") "green"]
[(string=? tl "yellow") "red"]
[(string=? tl "green") "yellow"]))
TrafficLight - > Image
(check-expect (tl-render "red")
(overlay
(beside (circle RAD "solid" "red")
(square SPACE "outline" "white")
(circle RAD "outline" "yellow")
(square SPACE "outline" "white")
(circle RAD "outline" "green"))
MT))
(check-expect (tl-render "yellow")
(overlay
(beside (circle RAD "outline" "red")
(square SPACE "outline" "white")
(circle RAD "solid" "yellow")
(square SPACE "outline" "white")
(circle RAD "outline" "green"))
MT))
(check-expect (tl-render "green")
(overlay
(beside (circle RAD "outline" "red")
(square SPACE "outline" "white")
(circle RAD "outline" "yellow")
(square SPACE "outline" "white")
(circle RAD "solid" "green"))
MT))
( empty - scene 90 30 ) )
(define (tl-render cs)
(overlay
(beside (bulb cs "red")
(square SPACE "outline" "white")
(bulb cs "yellow")
(square SPACE "outline" "white")
(bulb cs "green"))
MT))
- > Image
(define (bulb on c)
(if (string=? on c)
(circle RAD "solid" c)
(circle RAD "outline" c)))
TrafficLight - >
simulate a clock - based American traffic light
(define (traffic-light-simulation initial-state)
(big-bang initial-state
[to-draw tl-render]
[on-tick tl-next 1]))
|
68558c0d9a6d64a99360b8166653c9716bc9fc73c987cfceca8a634debefa082 | bartavelle/stateWriter | Lazy.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleContexts #
# LANGUAGE UndecidableInstances #
module Control.Monad.Trans.RSS.Lazy (
* The RWS monad
RSS,
rss,
runRSS,
evalRSS,
execRSS,
withRSS,
-- * The RSST monad transformer
RSST,
runRSST,
evalRSST,
execRSST,
withRSST,
-- * Helpers
liftCatch
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Fix
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Except
import Control.Monad.Signatures
import Data.Functor.Identity
import Control.Monad.State
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.RWS
-- | A monad containing an environment of type @r@, output of type @w@
-- and an updatable state of type @s@.
type RSS r w s = RSST r w s Identity
| Construct an RSS computation from a function .
( The inverse of ' runRSS ' . )
rss :: Monoid w => (r -> s -> (a, s, w)) -> RSS r w s a
rss f = RSST $ \r (s,w) -> let (a,s',w') = f r s
in Identity (a, (s', w <> w'))
-- | Unwrap an RSS computation as a function.
-- (The inverse of 'rws'.)
runRSS :: Monoid w => RSS r w s a -> r -> s -> (a,s,w)
runRSS m r s = runIdentity (runRSST m r s)
-- | Evaluate a computation with the given initial state and environment,
-- returning the final value and output, discarding the final state.
evalRSS :: Monoid w
=> RSS r w s a -- ^RWS computation to execute
-> r -- ^initial environment
-> s -- ^initial value
-> (a, w) -- ^final value and output
evalRSS m r s = let
(a, _, w) = runRSS m r s
in (a, w)
-- | Evaluate a computation with the given initial state and environment,
-- returning the final state and output, discarding the final value.
execRSS :: Monoid w
=> RSS r w s a -- ^RWS computation to execute
-> r -- ^initial environment
-> s -- ^initial value
-> (s, w) -- ^final state and output
execRSS m r s = let
(_, s', w) = runRSS m r s
in (s', w)
and state modified by applying @f@.
--
* @'runRSS ' ( ' withRSS ' f m ) r s = ' uncurry ' ( ' runRSS ' m ) ( f r s)@
withRSS :: (r' -> s -> (r, s)) -> RSS r w s a -> RSS r' w s a
withRSS = withRSST
---------------------------------------------------------------------------
-- | A monad transformer adding reading an environment of type @r@,
-- collecting an output of type @w@ and updating a state of type @s@
-- to an inner monad @m@.
newtype RSST r w s m a = RSST { runRSST' :: r -> (s,w) -> m (a, (s, w)) }
runRSST :: (Monoid w, Monad m) => RSST r w s m a -> r -> s -> m (a, s, w)
runRSST m r s = do
~(a,(s',w)) <- runRSST' m r (s,mempty)
return (a,s',w)
-- | Evaluate a computation with the given initial state and environment,
-- returning the final value and output, discarding the final state.
evalRSST :: (Monad m, Monoid w)
=> RSST r w s m a -- ^computation to execute
-> r -- ^initial environment
-> s -- ^initial value
-> m (a,w) -- ^computation yielding final value and output
evalRSST m r s = do
~(a, (_, w)) <- runRSST' m r (s,mempty)
return (a, w)
-- | Evaluate a computation with the given initial state and environment,
-- returning the final state and output, discarding the final value.
execRSST :: (Monad m, Monoid w)
=> RSST r w s m a -- ^computation to execute
-> r -- ^initial environment
-> s -- ^initial value
-> m (s, w) -- ^computation yielding final state and output
execRSST m r s = do
~(_, (s', w)) <- runRSST' m r (s,mempty)
return (s', w)
-- | @'withRSST' f m@ executes action @m@ with an initial environment
and state modified by applying @f@.
--
* ( ' withRSST ' f m ) r s = ' uncurry ' ( ' runRSST ' m ) ( f r s)@
withRSST :: (r' -> s -> (r, s)) -> RSST r w s m a -> RSST r' w s m a
withRSST f m = RSST $ \r (s,w) ->
let (r',s') = f r s
in runRSST' m r' (s',w)
instance (Functor m) => Functor (RSST r w s m) where
fmap f m = RSST $ \r s ->
fmap (\ ~(a, (s', w)) -> (f a, (s', w))) $ runRSST' m r s
instance (Monad m) => Monad (RSST r w s m) where
return = pure
m >>= k = RSST $ \r s -> do
~(a, (s', w)) <- runRSST' m r s
runRSST' (k a) r (s',w)
instance (MonadFail m) => MonadFail (RSST r w s m) where
fail msg = RSST $ \_ _ -> fail msg
instance (MonadPlus m) => MonadPlus (RSST r w s m) where
mzero = empty
mplus = (<|>)
instance (Functor m, Monad m) => Applicative (RSST r w s m) where
pure a = RSST $ \_ s -> pure (a, s)
(<*>) = ap
instance (Functor m, MonadPlus m) => Alternative (RSST r w s m) where
empty = RSST $ \_ _ -> empty
m <|> n = RSST $ \r s -> runRSST' m r s <|> runRSST' n r s
instance (MonadFix m) => MonadFix (RSST r w s m) where
mfix f = RSST $ \r s -> mfix $ \ ~(a, _) -> runRSST' (f a) r s
instance MonadTrans (RSST r w s) where
lift m = RSST $ \_ s -> do
a <- m
return (a, s)
instance (MonadIO m) => MonadIO (RSST r w s m) where
liftIO = lift . liftIO
instance Monad m => MonadState s (RSST r w s m) where
get = RSST $ \_ (s,w) -> return (s,(s,w))
put ns = RSST $ \_ (_,w) -> return ((),(ns,w))
state f = RSST $ \_ (s,w) -> case f s of
(a,s') -> return (a, (s', w))
instance Monad m => MonadReader r (RSST r w s m) where
ask = RSST $ \r s -> return (r, s)
local f rw = RSST $ \r s -> runRSST' rw (f r) s
reader f = RSST $ \r s -> return (f r, s)
instance (Monoid w, Monad m) => MonadWriter w (RSST r w s m) where
writer (a,w) = tell w >> return a
tell w = RSST $ \_ (s, ow) ->
let nw = ow <> w
in return ((), (s, nw))
listen rw = RSST $ \r (s, w) -> do
(a, (ns, nw)) <- runRSST' rw r (s, mempty)
return ((a, nw), (ns, w <> nw))
pass rw = RSST $ \r (s, w) -> do
( (a, fw), (s', w') ) <- runRSST' rw r (s, mempty)
return (a, (s', w `mappend` fw w'))
instance (Monoid w, Monad m) => MonadRWS r w s (RSST r w s m)
instance (Monoid w, MonadError e m) => MonadError e (RSST r w s m) where
throwError = lift . throwError
catchError = liftCatch catchError
-- | Lift a @catchE@ operation to the new monad.
liftCatch :: Catch e m (a,(s,w)) -> Catch e (RSST r w s m) a
liftCatch catchE m h =
RSST $ \ r s -> runRSST' m r s `catchE` \ e -> runRSST' (h e) r s
# INLINE liftCatch #
| null | https://raw.githubusercontent.com/bartavelle/stateWriter/2f7bfc9171ea6b7e1d3f564f08b6f5a74f049b34/Control/Monad/Trans/RSS/Lazy.hs | haskell | * The RSST monad transformer
* Helpers
| A monad containing an environment of type @r@, output of type @w@
and an updatable state of type @s@.
| Unwrap an RSS computation as a function.
(The inverse of 'rws'.)
| Evaluate a computation with the given initial state and environment,
returning the final value and output, discarding the final state.
^RWS computation to execute
^initial environment
^initial value
^final value and output
| Evaluate a computation with the given initial state and environment,
returning the final state and output, discarding the final value.
^RWS computation to execute
^initial environment
^initial value
^final state and output
-------------------------------------------------------------------------
| A monad transformer adding reading an environment of type @r@,
collecting an output of type @w@ and updating a state of type @s@
to an inner monad @m@.
| Evaluate a computation with the given initial state and environment,
returning the final value and output, discarding the final state.
^computation to execute
^initial environment
^initial value
^computation yielding final value and output
| Evaluate a computation with the given initial state and environment,
returning the final state and output, discarding the final value.
^computation to execute
^initial environment
^initial value
^computation yielding final state and output
| @'withRSST' f m@ executes action @m@ with an initial environment
| Lift a @catchE@ operation to the new monad. | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleContexts #
# LANGUAGE UndecidableInstances #
module Control.Monad.Trans.RSS.Lazy (
* The RWS monad
RSS,
rss,
runRSS,
evalRSS,
execRSS,
withRSS,
RSST,
runRSST,
evalRSST,
execRSST,
withRSST,
liftCatch
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Fix
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Except
import Control.Monad.Signatures
import Data.Functor.Identity
import Control.Monad.State
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.RWS
type RSS r w s = RSST r w s Identity
| Construct an RSS computation from a function .
( The inverse of ' runRSS ' . )
rss :: Monoid w => (r -> s -> (a, s, w)) -> RSS r w s a
rss f = RSST $ \r (s,w) -> let (a,s',w') = f r s
in Identity (a, (s', w <> w'))
runRSS :: Monoid w => RSS r w s a -> r -> s -> (a,s,w)
runRSS m r s = runIdentity (runRSST m r s)
evalRSS :: Monoid w
evalRSS m r s = let
(a, _, w) = runRSS m r s
in (a, w)
execRSS :: Monoid w
execRSS m r s = let
(_, s', w) = runRSS m r s
in (s', w)
and state modified by applying @f@.
* @'runRSS ' ( ' withRSS ' f m ) r s = ' uncurry ' ( ' runRSS ' m ) ( f r s)@
withRSS :: (r' -> s -> (r, s)) -> RSS r w s a -> RSS r' w s a
withRSS = withRSST
newtype RSST r w s m a = RSST { runRSST' :: r -> (s,w) -> m (a, (s, w)) }
runRSST :: (Monoid w, Monad m) => RSST r w s m a -> r -> s -> m (a, s, w)
runRSST m r s = do
~(a,(s',w)) <- runRSST' m r (s,mempty)
return (a,s',w)
evalRSST :: (Monad m, Monoid w)
evalRSST m r s = do
~(a, (_, w)) <- runRSST' m r (s,mempty)
return (a, w)
execRSST :: (Monad m, Monoid w)
execRSST m r s = do
~(_, (s', w)) <- runRSST' m r (s,mempty)
return (s', w)
and state modified by applying @f@.
* ( ' withRSST ' f m ) r s = ' uncurry ' ( ' runRSST ' m ) ( f r s)@
withRSST :: (r' -> s -> (r, s)) -> RSST r w s m a -> RSST r' w s m a
withRSST f m = RSST $ \r (s,w) ->
let (r',s') = f r s
in runRSST' m r' (s',w)
instance (Functor m) => Functor (RSST r w s m) where
fmap f m = RSST $ \r s ->
fmap (\ ~(a, (s', w)) -> (f a, (s', w))) $ runRSST' m r s
instance (Monad m) => Monad (RSST r w s m) where
return = pure
m >>= k = RSST $ \r s -> do
~(a, (s', w)) <- runRSST' m r s
runRSST' (k a) r (s',w)
instance (MonadFail m) => MonadFail (RSST r w s m) where
fail msg = RSST $ \_ _ -> fail msg
instance (MonadPlus m) => MonadPlus (RSST r w s m) where
mzero = empty
mplus = (<|>)
instance (Functor m, Monad m) => Applicative (RSST r w s m) where
pure a = RSST $ \_ s -> pure (a, s)
(<*>) = ap
instance (Functor m, MonadPlus m) => Alternative (RSST r w s m) where
empty = RSST $ \_ _ -> empty
m <|> n = RSST $ \r s -> runRSST' m r s <|> runRSST' n r s
instance (MonadFix m) => MonadFix (RSST r w s m) where
mfix f = RSST $ \r s -> mfix $ \ ~(a, _) -> runRSST' (f a) r s
instance MonadTrans (RSST r w s) where
lift m = RSST $ \_ s -> do
a <- m
return (a, s)
instance (MonadIO m) => MonadIO (RSST r w s m) where
liftIO = lift . liftIO
instance Monad m => MonadState s (RSST r w s m) where
get = RSST $ \_ (s,w) -> return (s,(s,w))
put ns = RSST $ \_ (_,w) -> return ((),(ns,w))
state f = RSST $ \_ (s,w) -> case f s of
(a,s') -> return (a, (s', w))
instance Monad m => MonadReader r (RSST r w s m) where
ask = RSST $ \r s -> return (r, s)
local f rw = RSST $ \r s -> runRSST' rw (f r) s
reader f = RSST $ \r s -> return (f r, s)
instance (Monoid w, Monad m) => MonadWriter w (RSST r w s m) where
writer (a,w) = tell w >> return a
tell w = RSST $ \_ (s, ow) ->
let nw = ow <> w
in return ((), (s, nw))
listen rw = RSST $ \r (s, w) -> do
(a, (ns, nw)) <- runRSST' rw r (s, mempty)
return ((a, nw), (ns, w <> nw))
pass rw = RSST $ \r (s, w) -> do
( (a, fw), (s', w') ) <- runRSST' rw r (s, mempty)
return (a, (s', w `mappend` fw w'))
instance (Monoid w, Monad m) => MonadRWS r w s (RSST r w s m)
instance (Monoid w, MonadError e m) => MonadError e (RSST r w s m) where
throwError = lift . throwError
catchError = liftCatch catchError
liftCatch :: Catch e m (a,(s,w)) -> Catch e (RSST r w s m) a
liftCatch catchE m h =
RSST $ \ r s -> runRSST' m r s `catchE` \ e -> runRSST' (h e) r s
# INLINE liftCatch #
|
6315b9a49e671b9bd63c59924564bace1caba69e735f0965347c628cd140220e | slipstream/SlipStreamServer | connector_template.clj | (ns com.sixsq.slipstream.ssclj.resources.connector-template
(:require
[clojure.tools.logging :as log]
[com.sixsq.slipstream.auth.acl :as a]
[com.sixsq.slipstream.ssclj.resources.common.crud :as crud]
[com.sixsq.slipstream.ssclj.resources.common.schema :as c]
[com.sixsq.slipstream.ssclj.resources.common.utils :as u]
[com.sixsq.slipstream.ssclj.resources.spec.connector-template]
[com.sixsq.slipstream.util.response :as r]))
(def ^:const resource-tag :connectorTemplates)
(def ^:const resource-name "ConnectorTemplate")
(def ^:const resource-url (u/de-camelcase resource-name))
(def ^:const collection-name "ConnectorTemplateCollection")
(def ^:const resource-uri (str c/slipstream-schema-uri resource-name))
(def ^:const collection-uri (str c/slipstream-schema-uri collection-name))
(def user-can-view {:principal "USER"
:type "ROLE"
:right "VIEW"})
(def resource-acl {:owner {:principal "ADMIN"
:type "ROLE"}
:rules [{:principal "ADMIN"
:type "ROLE"
:right "VIEW"}
user-can-view]})
(def collection-acl {:owner {:principal "ADMIN"
:type "ROLE"}
:rules [{:principal "ADMIN"
:type "ROLE"
:right "VIEW"}
user-can-view]})
;;
;; Resource defaults
;;
(def connector-instance-name-default
{:instanceName "Provide valid connector instance name."})
(def connector-mandatory-reference-attrs-defaults
{:orchestratorImageid ""
:quotaVm "20"
:maxIaasWorkers 5})
(def connector-reference-attrs-defaults
{:endpoint ""
:objectStoreEndpoint ""
:nativeContextualization "linux-only"
:orchestratorSSHUsername ""
:orchestratorSSHPassword ""
:securityGroups "slipstream_managed"
:updateClientURL ""
})
;;
atom to keep track of the loaded ConnectorTemplate resources
;;
(def templates (atom {}))
(def descriptions (atom {}))
(def name->kw (atom {}))
(defn collection-wrapper-fn
"Specialized version of this function that removes the adding
of operations to the collection and entries. These are already
part of the stored resources."
[resource-name collection-acl collection-uri collection-key]
(fn [_ entries]
(let [skeleton {:acl collection-acl
:resourceURI collection-uri
:id (u/de-camelcase resource-name)}]
(assoc skeleton collection-key entries))))
(defn complete-resource
"Completes the given document with server-managed information:
resourceURI, timestamps, operations, and ACL."
[{:keys [cloudServiceType] :as resource}]
(when cloudServiceType
(let [id (str resource-url "/" cloudServiceType)
href (str id "/describe")
ops [{:rel (:describe c/action-uri) :href href}]]
(-> resource
(merge {:id id
:resourceURI resource-uri
:acl resource-acl
:operations ops})
(merge connector-mandatory-reference-attrs-defaults)
(merge connector-instance-name-default)
u/update-timestamps))))
(defn register
"Registers a given ConnectorTemplate resource and its description
with the server. The resource document (resource) and the description
(desc) must be valid. The key will be used to create the id of
the resource as 'connector-template/key'."
[resource desc & [name-kw-map]]
(when-let [full-resource (complete-resource resource)]
(let [id (:id full-resource)]
(swap! templates assoc id full-resource)
(log/info "loaded ConnectorTemplate" id)
(when desc
(let [acl (:acl full-resource)
full-desc (assoc desc :acl acl)]
(swap! descriptions assoc id full-desc))
(log/info "loaded ConnectorTemplate description" id))
(when name-kw-map
(swap! name->kw assoc id name-kw-map)
(log/info "added name->kw mapping from ConnectorTemplate" id)))))
(def ConnectorTemplateDescription
(merge c/CommonParameterDescription
{:cloudServiceType {:displayName "Cloud Service Type"
:category "general"
:description "type of cloud service targeted by connector"
:type "string"
:mandatory true
:readOnly true
:order 0}
:instanceName {:displayName "Connector Instance Name"
:category "general"
:description "Connector Instance Name"
:type "string"
:mandatory true
:readOnly false
:order 1}
; Mandatory reference attributes. Can go into a separate .edn.
:orchestratorImageid {:displayName "orchestrator.imageid"
:type "string"
:category ""
:description "Image Id of the orchestrator for the connector"
:mandatory true
:readOnly false
:order 15}
:quotaVm {:displayName "quota.vm"
:type "string"
:category ""
:description "VM quota for the connector (i.e. maximum number of VMs allowed)"
:mandatory true
:readOnly false
:order 910}
:maxIaasWorkers {:displayName "max.iaas.workers"
:type "string"
:category ""
:description "Max number of concurrently provisioned VMs by orchestrator"
:mandatory true
:readOnly false
:order 915}}))
(def connector-reference-attrs-description
{:endpoint
{:displayName "endpoint"
:type "string"
:category ""
:description "Service endpoint for the connector (e.g. :5000)"
:mandatory true
:readOnly false
:order 10}
:objectStoreEndpoint
{:displayName "object.store.endpoint"
:type "string"
:category ""
:description "Cloud Object Store Service endpoint (e.g. :5000)"
:mandatory true
:readOnly false
:order 10}
:securityGroups
{:displayName "security.groups"
:type "string"
:category ""
:description "Orchestrator security groups (comma separated list)"
:mandatory true
:readOnly false
:order 11}
:orchestratorSSHUsername
{:displayName "orchestrator.ssh.username"
:type "string"
:category ""
:description "Orchestrator username"
:mandatory true
:readOnly false
:order 20
:instructions (str "Username used to contextualize the orchestrator VM. Leave this "
"field empty if you are using a native Cloud contextualization.")}
:orchestratorSSHPassword
{:displayName "orchestrator.ssh.password"
:type "password"
:category ""
:description "Orchestrator password"
:mandatory true
:readOnly false
:order 21
:instructions (str "Password used to contextualize the orchestrator VM. Leave this "
"field empty if you are using a native Cloud contextualization.")}
:nativeContextualization
{:displayName "native-contextualization"
:type "enum"
:category ""
:description "Use native cloud contextualisation"
:mandatory true
:readOnly false
:order 30
:enum ["never" "linux-only" "windows-only" "always"]
:instructions (str "Here you can define when SlipStream should use the native Cloud "
"contextualization or when it should try other methods like SSH and WinRM. <br/>")}
:updateClientURL
{:displayName "update.clienturl"
:type "string"
:category ""
:description "URL pointing to the tarball containing the client for the connector"
:mandatory true
:readOnly false
:order 31}})
;;
;; multimethods for validation
;;
(defmulti validate-subtype
"Validates the given resource against the specific
ConnectorTemplate subtype schema."
:cloudServiceType)
(defmethod validate-subtype :default
[resource]
(throw (ex-info (str "unknown ConnectorTemplate type: " (:cloudServiceType resource)) resource)))
(defmethod crud/validate
resource-uri
[resource]
(validate-subtype resource))
;;
;; CRUD operations
;;
(defmethod crud/add resource-name
[request]
(throw (r/ex-bad-method request)))
(defmethod crud/retrieve resource-name
[{{uuid :uuid} :params :as request}]
(try
(let [id (str resource-url "/" uuid)]
(-> (get @templates id)
(a/can-view? request)
(r/json-response)))
(catch Exception e
(or (ex-data e) (throw e)))))
;; must override the default implementation so that the
;; data can be pulled from the atom rather than the database
(defmethod crud/retrieve-by-id resource-url
[id]
(try
(get @templates id)
(catch Exception e
(or (ex-data e) (throw e)))))
(defmethod crud/edit resource-name
[request]
(throw (r/ex-bad-method request)))
(defmethod crud/delete resource-name
[request]
(throw (r/ex-bad-method request)))
(defmethod crud/query resource-name
[request]
(a/can-view? {:acl collection-acl} request)
(let [wrapper-fn (collection-wrapper-fn resource-name collection-acl collection-uri resource-tag)
;; FIXME: At least the paging options should be supported.
options (select-keys request [:identity :query-params :cimi-params :user-name :user-roles])
[count-before-pagination entries] ((juxt count vals) @templates)
wrapped-entries (wrapper-fn request entries)
entries-and-count (assoc wrapped-entries :count count-before-pagination)]
(r/json-response entries-and-count)))
;;
;; actions
;;
(defmethod crud/do-action [resource-url "describe"]
[{{uuid :uuid} :params :as request}]
(try
(let [id (str resource-url "/" uuid)]
(-> (get @descriptions id)
(a/can-view? request)
(r/json-response)))
(catch Exception e
(or (ex-data e) (throw e)))))
| null | https://raw.githubusercontent.com/slipstream/SlipStreamServer/3ee5c516877699746c61c48fc72779fe3d4e4652/cimi-resources/src/com/sixsq/slipstream/ssclj/resources/connector_template.clj | clojure |
Resource defaults
Mandatory reference attributes. Can go into a separate .edn.
multimethods for validation
CRUD operations
must override the default implementation so that the
data can be pulled from the atom rather than the database
FIXME: At least the paging options should be supported.
actions
| (ns com.sixsq.slipstream.ssclj.resources.connector-template
(:require
[clojure.tools.logging :as log]
[com.sixsq.slipstream.auth.acl :as a]
[com.sixsq.slipstream.ssclj.resources.common.crud :as crud]
[com.sixsq.slipstream.ssclj.resources.common.schema :as c]
[com.sixsq.slipstream.ssclj.resources.common.utils :as u]
[com.sixsq.slipstream.ssclj.resources.spec.connector-template]
[com.sixsq.slipstream.util.response :as r]))
(def ^:const resource-tag :connectorTemplates)
(def ^:const resource-name "ConnectorTemplate")
(def ^:const resource-url (u/de-camelcase resource-name))
(def ^:const collection-name "ConnectorTemplateCollection")
(def ^:const resource-uri (str c/slipstream-schema-uri resource-name))
(def ^:const collection-uri (str c/slipstream-schema-uri collection-name))
(def user-can-view {:principal "USER"
:type "ROLE"
:right "VIEW"})
(def resource-acl {:owner {:principal "ADMIN"
:type "ROLE"}
:rules [{:principal "ADMIN"
:type "ROLE"
:right "VIEW"}
user-can-view]})
(def collection-acl {:owner {:principal "ADMIN"
:type "ROLE"}
:rules [{:principal "ADMIN"
:type "ROLE"
:right "VIEW"}
user-can-view]})
(def connector-instance-name-default
{:instanceName "Provide valid connector instance name."})
(def connector-mandatory-reference-attrs-defaults
{:orchestratorImageid ""
:quotaVm "20"
:maxIaasWorkers 5})
(def connector-reference-attrs-defaults
{:endpoint ""
:objectStoreEndpoint ""
:nativeContextualization "linux-only"
:orchestratorSSHUsername ""
:orchestratorSSHPassword ""
:securityGroups "slipstream_managed"
:updateClientURL ""
})
atom to keep track of the loaded ConnectorTemplate resources
(def templates (atom {}))
(def descriptions (atom {}))
(def name->kw (atom {}))
(defn collection-wrapper-fn
"Specialized version of this function that removes the adding
of operations to the collection and entries. These are already
part of the stored resources."
[resource-name collection-acl collection-uri collection-key]
(fn [_ entries]
(let [skeleton {:acl collection-acl
:resourceURI collection-uri
:id (u/de-camelcase resource-name)}]
(assoc skeleton collection-key entries))))
(defn complete-resource
"Completes the given document with server-managed information:
resourceURI, timestamps, operations, and ACL."
[{:keys [cloudServiceType] :as resource}]
(when cloudServiceType
(let [id (str resource-url "/" cloudServiceType)
href (str id "/describe")
ops [{:rel (:describe c/action-uri) :href href}]]
(-> resource
(merge {:id id
:resourceURI resource-uri
:acl resource-acl
:operations ops})
(merge connector-mandatory-reference-attrs-defaults)
(merge connector-instance-name-default)
u/update-timestamps))))
(defn register
"Registers a given ConnectorTemplate resource and its description
with the server. The resource document (resource) and the description
(desc) must be valid. The key will be used to create the id of
the resource as 'connector-template/key'."
[resource desc & [name-kw-map]]
(when-let [full-resource (complete-resource resource)]
(let [id (:id full-resource)]
(swap! templates assoc id full-resource)
(log/info "loaded ConnectorTemplate" id)
(when desc
(let [acl (:acl full-resource)
full-desc (assoc desc :acl acl)]
(swap! descriptions assoc id full-desc))
(log/info "loaded ConnectorTemplate description" id))
(when name-kw-map
(swap! name->kw assoc id name-kw-map)
(log/info "added name->kw mapping from ConnectorTemplate" id)))))
(def ConnectorTemplateDescription
(merge c/CommonParameterDescription
{:cloudServiceType {:displayName "Cloud Service Type"
:category "general"
:description "type of cloud service targeted by connector"
:type "string"
:mandatory true
:readOnly true
:order 0}
:instanceName {:displayName "Connector Instance Name"
:category "general"
:description "Connector Instance Name"
:type "string"
:mandatory true
:readOnly false
:order 1}
:orchestratorImageid {:displayName "orchestrator.imageid"
:type "string"
:category ""
:description "Image Id of the orchestrator for the connector"
:mandatory true
:readOnly false
:order 15}
:quotaVm {:displayName "quota.vm"
:type "string"
:category ""
:description "VM quota for the connector (i.e. maximum number of VMs allowed)"
:mandatory true
:readOnly false
:order 910}
:maxIaasWorkers {:displayName "max.iaas.workers"
:type "string"
:category ""
:description "Max number of concurrently provisioned VMs by orchestrator"
:mandatory true
:readOnly false
:order 915}}))
(def connector-reference-attrs-description
{:endpoint
{:displayName "endpoint"
:type "string"
:category ""
:description "Service endpoint for the connector (e.g. :5000)"
:mandatory true
:readOnly false
:order 10}
:objectStoreEndpoint
{:displayName "object.store.endpoint"
:type "string"
:category ""
:description "Cloud Object Store Service endpoint (e.g. :5000)"
:mandatory true
:readOnly false
:order 10}
:securityGroups
{:displayName "security.groups"
:type "string"
:category ""
:description "Orchestrator security groups (comma separated list)"
:mandatory true
:readOnly false
:order 11}
:orchestratorSSHUsername
{:displayName "orchestrator.ssh.username"
:type "string"
:category ""
:description "Orchestrator username"
:mandatory true
:readOnly false
:order 20
:instructions (str "Username used to contextualize the orchestrator VM. Leave this "
"field empty if you are using a native Cloud contextualization.")}
:orchestratorSSHPassword
{:displayName "orchestrator.ssh.password"
:type "password"
:category ""
:description "Orchestrator password"
:mandatory true
:readOnly false
:order 21
:instructions (str "Password used to contextualize the orchestrator VM. Leave this "
"field empty if you are using a native Cloud contextualization.")}
:nativeContextualization
{:displayName "native-contextualization"
:type "enum"
:category ""
:description "Use native cloud contextualisation"
:mandatory true
:readOnly false
:order 30
:enum ["never" "linux-only" "windows-only" "always"]
:instructions (str "Here you can define when SlipStream should use the native Cloud "
"contextualization or when it should try other methods like SSH and WinRM. <br/>")}
:updateClientURL
{:displayName "update.clienturl"
:type "string"
:category ""
:description "URL pointing to the tarball containing the client for the connector"
:mandatory true
:readOnly false
:order 31}})
(defmulti validate-subtype
"Validates the given resource against the specific
ConnectorTemplate subtype schema."
:cloudServiceType)
(defmethod validate-subtype :default
[resource]
(throw (ex-info (str "unknown ConnectorTemplate type: " (:cloudServiceType resource)) resource)))
(defmethod crud/validate
resource-uri
[resource]
(validate-subtype resource))
(defmethod crud/add resource-name
[request]
(throw (r/ex-bad-method request)))
(defmethod crud/retrieve resource-name
[{{uuid :uuid} :params :as request}]
(try
(let [id (str resource-url "/" uuid)]
(-> (get @templates id)
(a/can-view? request)
(r/json-response)))
(catch Exception e
(or (ex-data e) (throw e)))))
(defmethod crud/retrieve-by-id resource-url
[id]
(try
(get @templates id)
(catch Exception e
(or (ex-data e) (throw e)))))
(defmethod crud/edit resource-name
[request]
(throw (r/ex-bad-method request)))
(defmethod crud/delete resource-name
[request]
(throw (r/ex-bad-method request)))
(defmethod crud/query resource-name
[request]
(a/can-view? {:acl collection-acl} request)
(let [wrapper-fn (collection-wrapper-fn resource-name collection-acl collection-uri resource-tag)
options (select-keys request [:identity :query-params :cimi-params :user-name :user-roles])
[count-before-pagination entries] ((juxt count vals) @templates)
wrapped-entries (wrapper-fn request entries)
entries-and-count (assoc wrapped-entries :count count-before-pagination)]
(r/json-response entries-and-count)))
(defmethod crud/do-action [resource-url "describe"]
[{{uuid :uuid} :params :as request}]
(try
(let [id (str resource-url "/" uuid)]
(-> (get @descriptions id)
(a/can-view? request)
(r/json-response)))
(catch Exception e
(or (ex-data e) (throw e)))))
|
cee2dd0ba0e4a49ebe2b99116fa2d8c40eb2885ef8d87cb979a1839c12d0b81a | thosmos/riverdb | routes.cljs | (ns riverdb.ui.routes
(:import [goog History])
(:require
[edn-query-language.core :as eql]
[goog.events :as gevents]
[clojure.string :as str]
[com.fulcrologic.fulcro.application :as fapp]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[com.fulcrologic.rad.routing :as rroute]
[com.fulcrologic.rad.routing.html5-history :as html5 :refer [url->route apply-route!]]
[pushy.core :as pushy]
[theta.log :as log :refer [debug]]
[clojure.string :as str]
[taoensso.timbre :as timbre]
[riverdb.application :refer [SPA]]))
( defonce page - history
; (doto (History.)
; (.setEnabled true)))
;
( defn listen - nav - change [ f ]
; (gevents/listen page-history "navigate" #(f % (.-token %))))
;
( defn change - route [ path ]
; (.setToken page-history (str/join "/" path)))
;
( defn path->route [ path ]
( let [ r - segments ( vec ( rest ( str / split path " / " ) ) ) ]
; (if (seq r-segments)
; r-segments
; ["main"])))
;
( defn change - route - from - nav - event [ app ]
; (fn [_ path]
( / change - route app ( path->route path ) ) ) )
;
( comment ( change - route ( dr / path - to SearchPage ) ) )
;
( defn start - routing ! [ ]
( listen - nav - change # ( change - route - from - nav - event SPA ) ) )
(declare replace!)
(declare route-to!)
(defn find-route-target [app-or-comp router new-route]
(let [app (comp/any->app app-or-comp)
state-map (fapp/current-state app)
;_ (debug "FIND ROUTER TARGET" router state-map)
root-query (comp/get-query router state-map)
ast (eql/query->ast root-query)
root (dr/ast-node-for-route ast new-route)]
(loop [{:keys [component]} root path new-route]
(when (and component (dr/router? component))
(let [{:keys [target matching-prefix]} (dr/route-target component path)
target-ast (some-> target (comp/get-query state-map) eql/query->ast)
prefix-length (count matching-prefix)
remaining-path (vec (drop prefix-length path))]
(if (seq remaining-path)
(recur (dr/ast-node-for-route target-ast remaining-path) remaining-path)
target))))))
(defn path->route
"Convert a URL path into a route path and parameter map. Returns:
```
{:route [\"path\" \"segment\"]
:params {:param value}}
```
You can save this value and later use it with `html5/apply-route!`.
"
[path]
(let [route (vec (drop 1 (str/split path #"/")))]
{:route route
:params {}}))
(defn route-to-path! [path]
(let [route (:route (path->route path))
target (find-route-target SPA
(comp/registry-key->class :riverdb.ui.root/TopRouter) route)
target-opts (comp/component-options target)
check-fn (:check-session target-opts)
session (get-in (fapp/current-state SPA) [:component/id :session])
sesh-valid? (:session/valid? session)
_ (log/info "URL Routing" path "=>" route "target?" target "target-opts?" target-opts)
check-result (if sesh-valid?
(if check-fn
(check-fn SPA session)
true)
false)
reject? (= false check-result)]
(debug "ROUTE Target" target "reject?" reject? "check-result" check-result)
(cond
(and (seq route) reject?)
(html5/apply-route! SPA ["main"])
(seq route)
(html5/apply-route! SPA route)
:else
(html5/apply-route! SPA ["main"]))))
(defonce history
(pushy/pushy
(fn [p]
(let [r-segments (vec (rest (str/split p "/")))
target (find-route-target SPA
(comp/registry-key->class :riverdb.ui.root/TopRouter) r-segments)
target-opts (comp/component-options target)
check-fn (:check-session target-opts)
session (get-in (fapp/current-state SPA) [:component/id :session])
sesh-valid? (:session/valid? session)
_ (log/info "URL Routing" p "=>" r-segments "target?" target "target-opts?" target-opts)
check-result (if sesh-valid?
(if check-fn
(check-fn SPA session)
true)
false)
reject? (= false check-result)]
(debug "ROUTE Target" target "reject?" reject? "check-result" check-result)
(cond
(and (seq r-segments) reject?)
(replace! "/")
(seq r-segments)
(dr/change-route SPA r-segments)
:else
(dr/change-route SPA ["main"]))))
identity))
(defn start! []
(pushy/start! history)
#_(listen-nav-change #(change-route-from-nav-event SPA)))
(defn route-to! [route-str]
(pushy/set-token! history route-str)
#_(.setToken page-history route-str))
(defn replace! [route-str]
(pushy/replace-token! history route-str))
| null | https://raw.githubusercontent.com/thosmos/riverdb/c0a31710e4430113924178bd0a3b9e8304f58009/src/main/riverdb/ui/routes.cljs | clojure | (doto (History.)
(.setEnabled true)))
(gevents/listen page-history "navigate" #(f % (.-token %))))
(.setToken page-history (str/join "/" path)))
(if (seq r-segments)
r-segments
["main"])))
(fn [_ path]
_ (debug "FIND ROUTER TARGET" router state-map) | (ns riverdb.ui.routes
(:import [goog History])
(:require
[edn-query-language.core :as eql]
[goog.events :as gevents]
[clojure.string :as str]
[com.fulcrologic.fulcro.application :as fapp]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[com.fulcrologic.rad.routing :as rroute]
[com.fulcrologic.rad.routing.html5-history :as html5 :refer [url->route apply-route!]]
[pushy.core :as pushy]
[theta.log :as log :refer [debug]]
[clojure.string :as str]
[taoensso.timbre :as timbre]
[riverdb.application :refer [SPA]]))
( defonce page - history
( defn listen - nav - change [ f ]
( defn change - route [ path ]
( defn path->route [ path ]
( let [ r - segments ( vec ( rest ( str / split path " / " ) ) ) ]
( defn change - route - from - nav - event [ app ]
( / change - route app ( path->route path ) ) ) )
( comment ( change - route ( dr / path - to SearchPage ) ) )
( defn start - routing ! [ ]
( listen - nav - change # ( change - route - from - nav - event SPA ) ) )
(declare replace!)
(declare route-to!)
(defn find-route-target [app-or-comp router new-route]
(let [app (comp/any->app app-or-comp)
state-map (fapp/current-state app)
root-query (comp/get-query router state-map)
ast (eql/query->ast root-query)
root (dr/ast-node-for-route ast new-route)]
(loop [{:keys [component]} root path new-route]
(when (and component (dr/router? component))
(let [{:keys [target matching-prefix]} (dr/route-target component path)
target-ast (some-> target (comp/get-query state-map) eql/query->ast)
prefix-length (count matching-prefix)
remaining-path (vec (drop prefix-length path))]
(if (seq remaining-path)
(recur (dr/ast-node-for-route target-ast remaining-path) remaining-path)
target))))))
(defn path->route
"Convert a URL path into a route path and parameter map. Returns:
```
{:route [\"path\" \"segment\"]
:params {:param value}}
```
You can save this value and later use it with `html5/apply-route!`.
"
[path]
(let [route (vec (drop 1 (str/split path #"/")))]
{:route route
:params {}}))
(defn route-to-path! [path]
(let [route (:route (path->route path))
target (find-route-target SPA
(comp/registry-key->class :riverdb.ui.root/TopRouter) route)
target-opts (comp/component-options target)
check-fn (:check-session target-opts)
session (get-in (fapp/current-state SPA) [:component/id :session])
sesh-valid? (:session/valid? session)
_ (log/info "URL Routing" path "=>" route "target?" target "target-opts?" target-opts)
check-result (if sesh-valid?
(if check-fn
(check-fn SPA session)
true)
false)
reject? (= false check-result)]
(debug "ROUTE Target" target "reject?" reject? "check-result" check-result)
(cond
(and (seq route) reject?)
(html5/apply-route! SPA ["main"])
(seq route)
(html5/apply-route! SPA route)
:else
(html5/apply-route! SPA ["main"]))))
(defonce history
(pushy/pushy
(fn [p]
(let [r-segments (vec (rest (str/split p "/")))
target (find-route-target SPA
(comp/registry-key->class :riverdb.ui.root/TopRouter) r-segments)
target-opts (comp/component-options target)
check-fn (:check-session target-opts)
session (get-in (fapp/current-state SPA) [:component/id :session])
sesh-valid? (:session/valid? session)
_ (log/info "URL Routing" p "=>" r-segments "target?" target "target-opts?" target-opts)
check-result (if sesh-valid?
(if check-fn
(check-fn SPA session)
true)
false)
reject? (= false check-result)]
(debug "ROUTE Target" target "reject?" reject? "check-result" check-result)
(cond
(and (seq r-segments) reject?)
(replace! "/")
(seq r-segments)
(dr/change-route SPA r-segments)
:else
(dr/change-route SPA ["main"]))))
identity))
(defn start! []
(pushy/start! history)
#_(listen-nav-change #(change-route-from-nav-event SPA)))
(defn route-to! [route-str]
(pushy/set-token! history route-str)
#_(.setToken page-history route-str))
(defn replace! [route-str]
(pushy/replace-token! history route-str))
|
a68c20f385ee4d0252eeb250fdbeda6c85a26cdaa43206653df81edb4e97cb22 | Gabriella439/HasCal | HourClock.hs | | This is based on the [ HourClock example]( / tlaplus / Examples / blob / master / specifications / SpecifyingSystems / HourClock / HourClock.tla )
from figure 2.1 on page 20 in Lamport 's \"Specifying Systems\ " book :
> ---------------------- MODULE HourClock ----------------------
> EXTENDS Naturals
> VARIABLE hr
> HCini = = hr \in ( 1 .. 12 )
> HCnxt = = hr ' = IF hr # 12 THEN hr + 1 ELSE 1
> HC = = HCini /\ [ ] [ HCnxt]_hr
> --------------------------------------------------------------
> THEOREM HC = > [ ] HCini
> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
from figure 2.1 on page 20 in Lamport's \"Specifying Systems\" book:
> ---------------------- MODULE HourClock ----------------------
> EXTENDS Naturals
> VARIABLE hr
> HCini == hr \in (1 .. 12)
> HCnxt == hr' = IF hr # 12 THEN hr + 1 ELSE 1
> HC == HCini /\ [][HCnxt]_hr
> --------------------------------------------------------------
> THEOREM HC => []HCini
> ==============================================================
-}
# #
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
module HasCal.Test.HourClock where
import HasCal
import Prelude hiding ((.))
import Test.Tasty (TestTree)
import qualified Test.Tasty.HUnit as HUnit
import qualified Control.Monad as Monad
data Global = Global { _hr :: Int }
deriving (Eq, Generic, Hashable, Show, ToJSON)
data Label = Ini | Nxt
deriving (Eq, Generic, Hashable, Show, ToJSON)
makeLenses ''Global
tick :: Int -> Int
tick hour = hour `mod` 12 + 1
test_hourClock :: TestTree
test_hourClock = HUnit.testCase "Hour clock" do
model defaultModel
{ termination = False
, startingGlobals = do
_hr <- [1 .. 12]
return Global{..}
, coroutine = Coroutine
{ startingLabel = Ini
, startingLocals = pure ()
, process = Monad.forever do
yield Nxt
global.hr %= tick
}
, property = always . viewing (state . hr . to (`elem` [ 1 .. 12 ]))
}
| null | https://raw.githubusercontent.com/Gabriella439/HasCal/cbc58a99eaca8a1b30440fc0cd68ca1f98af4c2f/tasty/HasCal/Test/HourClock.hs | haskell | -------------------- MODULE HourClock ----------------------
------------------------------------------------------------
-------------------- MODULE HourClock ----------------------
------------------------------------------------------------
# LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveGeneric # | | This is based on the [ HourClock example]( / tlaplus / Examples / blob / master / specifications / SpecifyingSystems / HourClock / HourClock.tla )
from figure 2.1 on page 20 in Lamport 's \"Specifying Systems\ " book :
> EXTENDS Naturals
> VARIABLE hr
> HCini = = hr \in ( 1 .. 12 )
> HCnxt = = hr ' = IF hr # 12 THEN hr + 1 ELSE 1
> HC = = HCini /\ [ ] [ HCnxt]_hr
> THEOREM HC = > [ ] HCini
> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
from figure 2.1 on page 20 in Lamport's \"Specifying Systems\" book:
> EXTENDS Naturals
> VARIABLE hr
> HCini == hr \in (1 .. 12)
> HCnxt == hr' = IF hr # 12 THEN hr + 1 ELSE 1
> HC == HCini /\ [][HCnxt]_hr
> THEOREM HC => []HCini
> ==============================================================
-}
# #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
module HasCal.Test.HourClock where
import HasCal
import Prelude hiding ((.))
import Test.Tasty (TestTree)
import qualified Test.Tasty.HUnit as HUnit
import qualified Control.Monad as Monad
data Global = Global { _hr :: Int }
deriving (Eq, Generic, Hashable, Show, ToJSON)
data Label = Ini | Nxt
deriving (Eq, Generic, Hashable, Show, ToJSON)
makeLenses ''Global
tick :: Int -> Int
tick hour = hour `mod` 12 + 1
test_hourClock :: TestTree
test_hourClock = HUnit.testCase "Hour clock" do
model defaultModel
{ termination = False
, startingGlobals = do
_hr <- [1 .. 12]
return Global{..}
, coroutine = Coroutine
{ startingLabel = Ini
, startingLocals = pure ()
, process = Monad.forever do
yield Nxt
global.hr %= tick
}
, property = always . viewing (state . hr . to (`elem` [ 1 .. 12 ]))
}
|
1ea4fe19940e072eec42d87f3f357278f0c46bbc898823e732d98ac2d413ef37 | broadinstitute/wfl | automation_test.clj | (ns wfl.system.automation-test
(:require [clojure.test :refer [deftest is]]
[wfl.tools.fixtures :as fixtures]
[wfl.tools.resources :as resources]
[wfl.service.cromwell :refer [final?]]
[wfl.tools.endpoints :as endpoints]
[wfl.tools.workloads :as workloads]))
(def firecloud-group "workflow-launcher-dev")
(def method-configuration "cdc-covid-surveillance/sarscov2_illumina_full")
(def snapshot-column "run_date")
(def snapshot-readers [""])
(def source-dataset "cd25d59e-1451-44d0-8a24-7669edb9a8f8")
(def source-table "flowcells")
(def workspace-table "flowcell")
(def workspace-to-clone "wfl-dev/CDC_Viral_Sequencing")
(defn ^:private covid-workload-request
"Build a covid workload request."
[workspace]
{:source {:name "TDR Snapshots"
:snapshots ["f9242ab8-c522-4305-966d-7c51419377ab"]}
:executor {:name "Terra"
:workspace workspace
:methodConfiguration "wfl-dev/sarscov2_illumina_full"
:fromSource "importSnapshot"}
:sink {:name "Terra Workspace"
:workspace workspace
:entityType "run_date"
:identifier "run_date"
:fromOutputs (resources/read-resource
"sarscov2_illumina_full/entity-from-outputs.edn")
:skipValidation true}
:project @workloads/project
:creator @workloads/email
:labels ["hornet:test"]})
(deftest test-automate-sarscov2-illumina-full
(fixtures/with-temporary-workspace-clone
workspace-to-clone
firecloud-group
(fn [workspace]
(let [finished? (comp final? :status)
workload (endpoints/create-workload
(covid-workload-request workspace))]
(endpoints/start-workload workload)
(workloads/when-finished
#(is (every? finished? (endpoints/get-workflows %)))
workload)))))
| null | https://raw.githubusercontent.com/broadinstitute/wfl/d6cedb8a2ab97615b21936b045ebb368589a7083/api/test/wfl/system/automation_test.clj | clojure | (ns wfl.system.automation-test
(:require [clojure.test :refer [deftest is]]
[wfl.tools.fixtures :as fixtures]
[wfl.tools.resources :as resources]
[wfl.service.cromwell :refer [final?]]
[wfl.tools.endpoints :as endpoints]
[wfl.tools.workloads :as workloads]))
(def firecloud-group "workflow-launcher-dev")
(def method-configuration "cdc-covid-surveillance/sarscov2_illumina_full")
(def snapshot-column "run_date")
(def snapshot-readers [""])
(def source-dataset "cd25d59e-1451-44d0-8a24-7669edb9a8f8")
(def source-table "flowcells")
(def workspace-table "flowcell")
(def workspace-to-clone "wfl-dev/CDC_Viral_Sequencing")
(defn ^:private covid-workload-request
"Build a covid workload request."
[workspace]
{:source {:name "TDR Snapshots"
:snapshots ["f9242ab8-c522-4305-966d-7c51419377ab"]}
:executor {:name "Terra"
:workspace workspace
:methodConfiguration "wfl-dev/sarscov2_illumina_full"
:fromSource "importSnapshot"}
:sink {:name "Terra Workspace"
:workspace workspace
:entityType "run_date"
:identifier "run_date"
:fromOutputs (resources/read-resource
"sarscov2_illumina_full/entity-from-outputs.edn")
:skipValidation true}
:project @workloads/project
:creator @workloads/email
:labels ["hornet:test"]})
(deftest test-automate-sarscov2-illumina-full
(fixtures/with-temporary-workspace-clone
workspace-to-clone
firecloud-group
(fn [workspace]
(let [finished? (comp final? :status)
workload (endpoints/create-workload
(covid-workload-request workspace))]
(endpoints/start-workload workload)
(workloads/when-finished
#(is (every? finished? (endpoints/get-workflows %)))
workload)))))
|
|
1645de9bdb522f99c37ae045a2149e0a944d0589c83dffc8720812467ba657d6 | facebook/flow | dependency_sigs.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
module type C = sig
type t
val enable_enums : t -> bool
val file : t -> File_key.t
val jsx : t -> Options.jsx_mode
val react_runtime : t -> Options.react_runtime
val enable_const_params : t -> bool
val lti : t -> bool
val add_literal_subtypes : t -> ALoc.t * Env_api.literal_check -> unit
val add_matching_props : t -> string * ALoc.t * ALoc.t -> unit
val add_exhaustive_check : t -> ALoc.t -> ALoc.t list * bool -> unit
val exhaustive_check : t -> ALoc.t -> ALoc.t list * bool
end
module type F = sig
type cx
val add_output : cx -> ?trace:Type.trace -> ALoc.t Error_message.t' -> unit
end
| null | https://raw.githubusercontent.com/facebook/flow/261a19c10fbd3f3ae9a02336bbe93c5ae644be3e/src/analysis/env_builder/dependency_sigs.ml | ocaml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
module type C = sig
type t
val enable_enums : t -> bool
val file : t -> File_key.t
val jsx : t -> Options.jsx_mode
val react_runtime : t -> Options.react_runtime
val enable_const_params : t -> bool
val lti : t -> bool
val add_literal_subtypes : t -> ALoc.t * Env_api.literal_check -> unit
val add_matching_props : t -> string * ALoc.t * ALoc.t -> unit
val add_exhaustive_check : t -> ALoc.t -> ALoc.t list * bool -> unit
val exhaustive_check : t -> ALoc.t -> ALoc.t list * bool
end
module type F = sig
type cx
val add_output : cx -> ?trace:Type.trace -> ALoc.t Error_message.t' -> unit
end
|
|
99376ce48d235ce995384df33e761867a3492020ba51d1be0a057786668dbc64 | zenspider/schemers | exercise.3.21.scm | #lang racket/base
(require "../lib/testes.scm")
(require "../lib/myutils.scm")
Exercise 3.21
decides to test the queue
;; implementation described above. He types in the procedures to the
;; Lisp interpreter and proceeds to try them out:
;;
;; (define q1 (make-queue))
;;
;; (insert-queue! q1 'a)
;; ((a) a)
;;
;; (insert-queue! q1 'b)
;; ((a b) b)
;;
;; (delete-queue! q1)
;; ((b) b)
;;
;; (delete-queue! q1)
;; (() b)
;;
;; "It's all wrong!" he complains. "The interpreter's response shows
;; that the last item is inserted into the queue twice. And when I
delete both items , the second ` b ' is still there , so the queue
is n't empty , even though it 's supposed to be . "
suggests that has misunderstood what is happening . " It 's not
;; that the items are going into the queue twice," she explains.
;; "It's just that the standard Lisp printer doesn't know how to make
;; sense of the queue representation. If you want to see the queue
;; printed correctly, you'll have to define your own print procedure
for queues . " Explain what is talking about . In particular ,
show why 's examples produce the printed results that they do .
;; Define a procedure `print-queue' that takes a queue as input and
;; prints the sequence of items in the queue.
(define front-ptr car)
(define (print-queue q) (display (front-ptr q)))
;; (define q1 (make-queue))
;; (print-queue (insert-queue! q1 'a)) ; => (a)
;; (print-queue (insert-queue! q1 'b)) ; => (a b)
;; (print-queue (delete-queue! q1)) ; => (b)
;; (print-queue (delete-queue! q1)) ; => ()
| null | https://raw.githubusercontent.com/zenspider/schemers/2939ca553ac79013a4c3aaaec812c1bad3933b16/sicp/ch_3/exercise.3.21.scm | scheme | implementation described above. He types in the procedures to the
Lisp interpreter and proceeds to try them out:
(define q1 (make-queue))
(insert-queue! q1 'a)
((a) a)
(insert-queue! q1 'b)
((a b) b)
(delete-queue! q1)
((b) b)
(delete-queue! q1)
(() b)
"It's all wrong!" he complains. "The interpreter's response shows
that the last item is inserted into the queue twice. And when I
that the items are going into the queue twice," she explains.
"It's just that the standard Lisp printer doesn't know how to make
sense of the queue representation. If you want to see the queue
printed correctly, you'll have to define your own print procedure
Define a procedure `print-queue' that takes a queue as input and
prints the sequence of items in the queue.
(define q1 (make-queue))
(print-queue (insert-queue! q1 'a)) ; => (a)
(print-queue (insert-queue! q1 'b)) ; => (a b)
(print-queue (delete-queue! q1)) ; => (b)
(print-queue (delete-queue! q1)) ; => () | #lang racket/base
(require "../lib/testes.scm")
(require "../lib/myutils.scm")
Exercise 3.21
decides to test the queue
delete both items , the second ` b ' is still there , so the queue
is n't empty , even though it 's supposed to be . "
suggests that has misunderstood what is happening . " It 's not
for queues . " Explain what is talking about . In particular ,
show why 's examples produce the printed results that they do .
(define front-ptr car)
(define (print-queue q) (display (front-ptr q)))
|
6079ac4fd065401b62ebbbd5384e0ba2965396b0af6442cef3a8016db94ad27b | MLstate/opalang | gen_opa_manpage.ml |
Copyright © 2011 , 2012 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011, 2012 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
(* FIXME: should be moved in tools *)
* Print the manpage for the compiler . S3 version .
(* Load warnings of opa s3 applications *)
required by some toplevel sideeffects in the modules of OpaEnv
let _ = WarningClass.load_set S3Warnings.warning_set
let _ = OpaEnv.Options.write_manpage stdout
let () = OManager.exit 0
| null | https://raw.githubusercontent.com/MLstate/opalang/424b369160ce693406cece6ac033d75d85f5df4f/compiler/opa/gen_opa_manpage.ml | ocaml | FIXME: should be moved in tools
Load warnings of opa s3 applications |
Copyright © 2011 , 2012 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011, 2012 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
* Print the manpage for the compiler . S3 version .
required by some toplevel sideeffects in the modules of OpaEnv
let _ = WarningClass.load_set S3Warnings.warning_set
let _ = OpaEnv.Options.write_manpage stdout
let () = OManager.exit 0
|
0051e90a4cae276c283927eec0a5843f5af06af233971381511c8cf1c91750ae | a-sassmannshausen/guile-config | getopt-long.scm | Config --- Configuration specification in GNU
Copyright © 2017 < >
;;;
This file is part of Guile - Config .
;;;
;;; Config is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation ; either version 3 of the License , or ( at your option )
;;; any later version.
;;;
;;; Config is distributed in the hope that it will be useful, but WITHOUT ANY
;;; WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
;;; FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
;;; details.
;;;
You should have received a copy of the GNU General Public License
along with Guile - Config ; if not , contact :
;;;
;;; Free Software Foundation Voice: +1-617-542-5942
59 Temple Place - Suite 330 Fax : +1 - 617 - 542 - 2652
Boston , MA 02111 - 1307 , USA
(define-module (config getopt-long)
#:use-module (config api)
#:use-module (ice-9 getopt-long)
#:use-module (ice-9 match)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-26)
#:export (read-commandline))
We no longer use predicate functionality because using it
;; would result in running an option's handler twice during parsing. This is
;; problematic when that handler performs side effects.
(define (read-commandline commandline settings codex)
"Return a codex, with commandline merged into codex, getopt-long style."
Turn Codex into - long construct , then query with
;; commandline values and feed those back into codex.
;;
;; Specifically we pass reagents modified (not raw) commandline values to
;; getopt-long.
;; Replace codex with updated codex
;; getopt-long here causes exits. For testing purposes these can be caught
;; with catch of 'quit signal.
;; The error messages it emits are:
;; $script: option must be specified: --$option
;; $script: no such option: --$option
;; $script: option must be specified with argument: --$option
(define (augment-keywords)
"Return additional switches for help, usage, version, if requested."
(filter-map (match-lambda
((id chr #t)
(switch
(name id) (character chr) (test boolean?) (default #f)
(handler identity) (optional? #f)))
((_ _ (or #f ($ <empty>))) #f)
(n (throw 'augment-keywords "no matching pattern" n)))
`((help #\h ,(codex-metadatum 'generate-help? codex))
(usage #f ,(codex-metadatum 'generate-usage? codex))
(version #f ,(codex-metadatum 'generate-version? codex))
(cmdtree #f ,(codex-metadatum 'generate-cmdtree? codex)))))
(let* ((vls (codex-valus codex))
(kwds (append (valus-keywords vls) (augment-keywords)))
(args (valus-arguments vls))
;; Here we insert the subcommand path to the command we're
executing in commandline , so that emits the full
;; path
(err-line (cons (string-join (cons "error:" (full-command codex)))
commandline))
(gtl (getopt-long err-line (codex->getopt-spec kwds))))
(set-codex-valus
codex
(valus
(map (lambda (kwd)
;; In the specific cases that we are dealing with a switch or a
;; setting, and a value is provided on the commandline, we need
;; to run that value through handler before updating the keyword
;; value. This is done in the code below.
(cond ((secret? kwd) kwd) ; <secret> is never updated
< switch > just takes cmdline value
(match (option-ref gtl (switch-name kwd) (empty))
(($ <empty>) kwd) ; No need to run handler or test
(value
Update our option value from cmdline after running
;; handler and test.
(set-keyword-default
kwd (test-kwd/arg ((switch-handler kwd) value)
(switch-name kwd)
(switch-test kwd) codex
"keyword")))))
< setting > : config - file
(match (option-ref gtl (setting-name kwd) (empty))
(($ <empty>)
;; Update from config file after running tests &
;; handler or use default
(or (set-keyword-default
kwd
(and=> (assoc (setting-name kwd) settings)
(match-lambda
((name . value)
(test-kwd/arg value name
(setting-test kwd) codex
"keyword")))))
kwd))
(value
Update our option value from cmdline after running
;; handler and test.
(set-keyword-default
kwd (test-kwd/arg ((setting-handler kwd) value)
(setting-name kwd)
(setting-test kwd) codex
"keyword")))))))
kwds)
Arguments ca n't be retrieved by name with . Instead ,
;; fetch all args, then handle them ourselves.
(parse-arguments args (option-ref gtl '() '()) codex)))))
(define* (parse-arguments arguments cmd-values codex #:optional (result '()))
"Cycle through ARGUMENTS, the configuration's defined arguments, &
CMD-VALUES the arguments provided on the command-line, returning a new list of
arguments in RESULT after testing them & updating them from CMD-VALUES."
(cond ((null? arguments)
;; Processed all arguments, -> done.
(reverse result))
((null? cmd-values)
;; We're out of cmdline arguments, -> defaults for rest
(append (reverse result) (map (cut optional-argument <> codex)
arguments)))
(else
(parse-arguments
(cdr arguments)
(cdr cmd-values)
codex
(cons
(set-argument-default
(first arguments)
(test-kwd/arg
((argument-handler (first arguments)) (first cmd-values))
(argument-name (first arguments))
(argument-test (first arguments))
codex "argument"))
result)))))
(define (optional-argument argument codex)
"Return ARGUMENT if it is optional or emit an error."
(match (argument-optional? argument)
(#t argument)
(_
(format
;; We want to emit:
;; error: FULL-COMMAND: argument must be specified: NAME
#t "error: ~a: argument must be specified: ~a~%"
(string-join (full-command codex)) (argument-name argument))
(throw 'quit 'optional-arg))))
(define (test-kwd/arg value name test codex type)
"Return VALUE if it passes TEST or throw an error pointing at NAME of TYPE."
(match (test value)
(#f
(format
;; We want to emit:
error : FULL - COMMAND : ARGUMENT|KEYWORD predicate failed : [ --]NAME
#t "error: ~a: ~a predicate failed: ~a~a~%"
(string-join (full-command codex)) type
(match type ("argument" "") (_ "--")) name)
(throw 'quit 'test-kwd/arg))
(_ value)))
(define (codex->getopt-spec keywords)
"Return the getopt-long option-spec corresponding to the <setting> and
<switch> keywords in KEYWORDS."
(reverse
(fold
(lambda (kwd done)
(let ((character (keyword-character kwd)))
(match kwd
(($ <switch> name ($ <empty>) test handler _ _ _ _ #f)
;; A switch is only mandatory if optional is #f
(cons (getopt-spec name test handler character #f #t) done))
(($ <switch> name _ test handler _ _ _ _ optional)
(cons (getopt-spec name test handler character optional #f) done))
(($ <setting> name ($ <empty>) test handler _ _ _ _ optional)
(cons (getopt-spec name test handler character optional #f) done))
(($ <setting> name _ test handler _ _ _ _ optional)
(cons (getopt-spec name test handler character optional #f) done))
(_ done))))
'()
(filter (negate secret?)
keywords))))
(define (getopt-spec name test handler single-char optional? required)
"Create a getopt-long spec entry from NAME, TEST, HANDLER, SINGLE-CHAR,
OPTIONAL? and REQUIRED."
(define (value-entry)
(match (procedure-name test)
;; If our test is boolean, we parse params as flags
('boolean? '((value #f)))
;; If optional?, parse param value as optional, else as valued.
(_ (if optional?
'((value optional))
'((value #t))))))
(apply list name `(required? ,required)
(match single-char
((? char?) (cons `(single-char ,single-char)
(value-entry)))
(#f (value-entry))
(n (throw 'getopt-spec "no matching pattern" n)))))
| null | https://raw.githubusercontent.com/a-sassmannshausen/guile-config/b05957743ee8ab8d111683697a56e46a82429b6f/config/getopt-long.scm | scheme |
Config is free software; you can redistribute it and/or modify it under
either version 3 of the License , or ( at your option )
any later version.
Config is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
if not , contact :
Free Software Foundation Voice: +1-617-542-5942
would result in running an option's handler twice during parsing. This is
problematic when that handler performs side effects.
commandline values and feed those back into codex.
Specifically we pass reagents modified (not raw) commandline values to
getopt-long.
Replace codex with updated codex
getopt-long here causes exits. For testing purposes these can be caught
with catch of 'quit signal.
The error messages it emits are:
$script: option must be specified: --$option
$script: no such option: --$option
$script: option must be specified with argument: --$option
Here we insert the subcommand path to the command we're
path
In the specific cases that we are dealing with a switch or a
setting, and a value is provided on the commandline, we need
to run that value through handler before updating the keyword
value. This is done in the code below.
<secret> is never updated
No need to run handler or test
handler and test.
Update from config file after running tests &
handler or use default
handler and test.
fetch all args, then handle them ourselves.
Processed all arguments, -> done.
We're out of cmdline arguments, -> defaults for rest
We want to emit:
error: FULL-COMMAND: argument must be specified: NAME
We want to emit:
A switch is only mandatory if optional is #f
If our test is boolean, we parse params as flags
If optional?, parse param value as optional, else as valued. | Config --- Configuration specification in GNU
Copyright © 2017 < >
This file is part of Guile - Config .
the terms of the GNU General Public License as published by the Free
You should have received a copy of the GNU General Public License
59 Temple Place - Suite 330 Fax : +1 - 617 - 542 - 2652
Boston , MA 02111 - 1307 , USA
(define-module (config getopt-long)
#:use-module (config api)
#:use-module (ice-9 getopt-long)
#:use-module (ice-9 match)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-26)
#:export (read-commandline))
We no longer use predicate functionality because using it
(define (read-commandline commandline settings codex)
"Return a codex, with commandline merged into codex, getopt-long style."
Turn Codex into - long construct , then query with
(define (augment-keywords)
"Return additional switches for help, usage, version, if requested."
(filter-map (match-lambda
((id chr #t)
(switch
(name id) (character chr) (test boolean?) (default #f)
(handler identity) (optional? #f)))
((_ _ (or #f ($ <empty>))) #f)
(n (throw 'augment-keywords "no matching pattern" n)))
`((help #\h ,(codex-metadatum 'generate-help? codex))
(usage #f ,(codex-metadatum 'generate-usage? codex))
(version #f ,(codex-metadatum 'generate-version? codex))
(cmdtree #f ,(codex-metadatum 'generate-cmdtree? codex)))))
(let* ((vls (codex-valus codex))
(kwds (append (valus-keywords vls) (augment-keywords)))
(args (valus-arguments vls))
executing in commandline , so that emits the full
(err-line (cons (string-join (cons "error:" (full-command codex)))
commandline))
(gtl (getopt-long err-line (codex->getopt-spec kwds))))
(set-codex-valus
codex
(valus
(map (lambda (kwd)
< switch > just takes cmdline value
(match (option-ref gtl (switch-name kwd) (empty))
(value
Update our option value from cmdline after running
(set-keyword-default
kwd (test-kwd/arg ((switch-handler kwd) value)
(switch-name kwd)
(switch-test kwd) codex
"keyword")))))
< setting > : config - file
(match (option-ref gtl (setting-name kwd) (empty))
(($ <empty>)
(or (set-keyword-default
kwd
(and=> (assoc (setting-name kwd) settings)
(match-lambda
((name . value)
(test-kwd/arg value name
(setting-test kwd) codex
"keyword")))))
kwd))
(value
Update our option value from cmdline after running
(set-keyword-default
kwd (test-kwd/arg ((setting-handler kwd) value)
(setting-name kwd)
(setting-test kwd) codex
"keyword")))))))
kwds)
Arguments ca n't be retrieved by name with . Instead ,
(parse-arguments args (option-ref gtl '() '()) codex)))))
(define* (parse-arguments arguments cmd-values codex #:optional (result '()))
"Cycle through ARGUMENTS, the configuration's defined arguments, &
CMD-VALUES the arguments provided on the command-line, returning a new list of
arguments in RESULT after testing them & updating them from CMD-VALUES."
(cond ((null? arguments)
(reverse result))
((null? cmd-values)
(append (reverse result) (map (cut optional-argument <> codex)
arguments)))
(else
(parse-arguments
(cdr arguments)
(cdr cmd-values)
codex
(cons
(set-argument-default
(first arguments)
(test-kwd/arg
((argument-handler (first arguments)) (first cmd-values))
(argument-name (first arguments))
(argument-test (first arguments))
codex "argument"))
result)))))
(define (optional-argument argument codex)
"Return ARGUMENT if it is optional or emit an error."
(match (argument-optional? argument)
(#t argument)
(_
(format
#t "error: ~a: argument must be specified: ~a~%"
(string-join (full-command codex)) (argument-name argument))
(throw 'quit 'optional-arg))))
(define (test-kwd/arg value name test codex type)
"Return VALUE if it passes TEST or throw an error pointing at NAME of TYPE."
(match (test value)
(#f
(format
error : FULL - COMMAND : ARGUMENT|KEYWORD predicate failed : [ --]NAME
#t "error: ~a: ~a predicate failed: ~a~a~%"
(string-join (full-command codex)) type
(match type ("argument" "") (_ "--")) name)
(throw 'quit 'test-kwd/arg))
(_ value)))
(define (codex->getopt-spec keywords)
"Return the getopt-long option-spec corresponding to the <setting> and
<switch> keywords in KEYWORDS."
(reverse
(fold
(lambda (kwd done)
(let ((character (keyword-character kwd)))
(match kwd
(($ <switch> name ($ <empty>) test handler _ _ _ _ #f)
(cons (getopt-spec name test handler character #f #t) done))
(($ <switch> name _ test handler _ _ _ _ optional)
(cons (getopt-spec name test handler character optional #f) done))
(($ <setting> name ($ <empty>) test handler _ _ _ _ optional)
(cons (getopt-spec name test handler character optional #f) done))
(($ <setting> name _ test handler _ _ _ _ optional)
(cons (getopt-spec name test handler character optional #f) done))
(_ done))))
'()
(filter (negate secret?)
keywords))))
(define (getopt-spec name test handler single-char optional? required)
"Create a getopt-long spec entry from NAME, TEST, HANDLER, SINGLE-CHAR,
OPTIONAL? and REQUIRED."
(define (value-entry)
(match (procedure-name test)
('boolean? '((value #f)))
(_ (if optional?
'((value optional))
'((value #t))))))
(apply list name `(required? ,required)
(match single-char
((? char?) (cons `(single-char ,single-char)
(value-entry)))
(#f (value-entry))
(n (throw 'getopt-spec "no matching pattern" n)))))
|
b110d833f188512246326c644b03e534f9dd1df511db317431c1d3b36294111a | iagon-tech/proof-of-burn-cardano | ProofOfBurn.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE MagicHash #-}
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE MultiWayIf #-}
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE TemplateHaskell #-}
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeOperators #-}
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - ignore - interface - pragmas #
-- | This is a burner contract,
--
-- And add function implementations (and rename them to
-- something suitable) for the endpoints:
-- * lock - which locks the value to be redeemed by an addressee;
-- * redeem - which redeems the locked value;
-- * burn - which burns the value instead of locking it.
--
-- In order to check that value was indeed burned, one needs to run a `burned` script with the same commitment value.
module ProofOfBurn where
import Control.Monad (void, when, forM, forM_)
import Control.Applicative (liftA3)
import Control.Lens ( foldOf, folded, Field1(_1), Field4(_4), to )
import Data.Bits (xor)
import Data.Functor (($>))
import Data.Char (ord, chr)
import qualified Data.Aeson as Aeson
import Data.Aeson.TH
import Data.Sequences (snoc, unsnoc)
import Data.Maybe (fromJust, catMaybes)
import qualified Data.Map.Strict as Map
import qualified Data.List.NonEmpty as NonEmpty(toList)
import Plutus.Contract
( (>>),
logInfo,
logError,
endpoint,
ownPubKeyHash,
submitTxConstraints,
submitTxConstraintsSpending,
awaitTxConfirmed,
utxosAt,
utxosTxOutTxAt,
collectFromScript,
selectList,
tell,
Endpoint,
Contract,
Promise,
AsContractError,
type (.\/), ContractError (OtherError) )
import Plutus.ChainIndex.Client ()
import qualified PlutusTx
import qualified PlutusTx.IsData as PlutusTx
import PlutusTx.Prelude
( otherwise,
return,
(>>=),
Bool(False),
Maybe(..),
Either(Right, Left),
(.),
flip,
sha3_256,
either,
elem,
const,
($),
traceError,
traceIfFalse,
BuiltinByteString,
Eq((==)),
Functor(fmap),
Semigroup((<>)), Monoid(mempty), foldr, BuiltinString )
import PlutusTx.Builtins.Internal (BuiltinByteString(..), decodeUtf8)
import Ledger (Address(..), Datum(..), scriptAddress, datumHash)
import Ledger.AddressMap (UtxoMap)
import qualified Ledger.Constraints as Constraints
import qualified Ledger.Typed.Scripts as Scripts
import Ledger.Typed.Scripts.Validators(ValidatorType)
import Ledger.Value (Value, isZero, valueOf)
import Ledger.Tx
( TxOutRef,
ChainIndexTxOut(PublicKeyChainIndexTxOut, ScriptChainIndexTxOut,
_ciTxOutDatum),
_ScriptChainIndexTxOut,
getCardanoTxId,
Address, getCardanoTxUnspentOutputsTx )
import Plutus.V1.Ledger.Crypto
( PubKey, PubKeyHash(getPubKeyHash) )
import Plutus.V1.Ledger.Credential ()
import Plutus.V1.Ledger.Contexts
( ScriptContext(ScriptContext, scriptContextTxInfo),
TxInfo(txInfoSignatories) )
import Plutus.V1.Ledger.Tx (Tx(..), TxOutRef, TxOutTx(..), TxOut(..), txData, txOutDatum)
import qualified Plutus.V1.Ledger.Scripts as Plutus
import Playground.Contract
( mkKnownCurrencies, mkSchemaDefinitions, KnownCurrency )
import qualified Data.Text as T
import Wallet.Emulator.Wallet ()
import Ledger.Crypto ( pubKeyHash )
import qualified Prelude
import qualified Data.ByteString.Short as SBS
import qualified Data.ByteString.Lazy as LBS
import Codec.Serialise ( serialise )
import Cardano.Api.Shelley (PlutusScript (..), PlutusScriptV1)
import Plutus.ChainIndex.Tx
( ChainIndexTx(ChainIndexTx, _citxData) )
import Cardano.Prelude ( Set, MonadError (throwError, catchError) )
import qualified Data.Set as Set
import Text.ParserCombinators.ReadPrec
import GHC.Read
import qualified Text.Read.Lex as L
import Plutus.V1.Ledger.Address
import Plutus.V1.Ledger.Crypto
| holds either :
-- * the hash of the address that can redeem the value, if it was locked;
-- * the hash with the last bit flipped, if it was burned.
newtype MyDatum = MyDatum { fromMyDatum :: BuiltinByteString }
deriving (Prelude.Show)
PlutusTx.makeLift ''MyDatum
PlutusTx.unstableMakeIsData ''MyDatum
instance Prelude.Read MyDatum where
readPrec =
parens
(do expectP (L.Ident "MyDatum")
x <- step readPrec
return $ MyDatum (BuiltinByteString x)
)
readListPrec = readListPrecDefault
readList = readListDefault
-- | Redeemer holds no data, since the address is taken from the context.
newtype MyRedeemer = MyRedeemer { _nothing :: () }
deriving (Prelude.Read, Prelude.Show)
PlutusTx.makeLift ''MyRedeemer
PlutusTx.unstableMakeIsData ''MyRedeemer
data ContractAction =
BurnedValueValidated (Maybe Value)
| BurnedValue Value BuiltinByteString
| LockedValue Value BuiltinByteString
| Redeemed Value BuiltinByteString
deriving Prelude.Show
$(deriveJSON defaultOptions ''ContractAction)
data ContractState = ContractStateAction ContractAction ContractState
| None
deriving Prelude.Show
$(deriveJSON defaultOptions ''ContractState)
instance Prelude.Monoid ContractState where
mempty = None
instance Prelude.Semigroup ContractState where
prevState <> (ContractStateAction a _s) = ContractStateAction a prevState
prevState <> None = prevState
stateActions :: ContractState -> [ContractAction]
stateActions None = []
stateActions (ContractStateAction a s) = stateActions s <> [a]
previousState :: ContractState -> ContractState
previousState (ContractStateAction _ s) = s
previousState None = None
contractAction :: ContractState -> Maybe ContractAction
contractAction (ContractStateAction a _) = Just a
contractAction None = Nothing
tellAction :: ContractAction -> Contract ContractState Schema e ()
tellAction action = tell (ContractStateAction action None)
# INLINABLE validateSpend #
| Spending validator checks that hash of the redeeming address is the same as the datum .
validateSpend :: ValidatorType Burner
validateSpend (MyDatum addrHash) _myRedeemerValue ScriptContext { scriptContextTxInfo = txinfo } =
traceIfFalse traceMsg (addrHash `elem` allPubkeyHashes)
where
requiredSigs :: [PubKeyHash]
requiredSigs = txInfoSignatories txinfo
allPubkeyHashes :: [BuiltinByteString]
allPubkeyHashes = fmap (sha3_256 . getPubKeyHash) requiredSigs
sigsS :: BuiltinString
sigsS = "[" <> foldr (\a b -> decodeUtf8 a <> ", " <> b) mempty allPubkeyHashes <> "]"
traceMsg :: BuiltinString
traceMsg
| allPubkeyHashes == [] = "No required signatures attached. Owner has not signed."
| otherwise = "Owner has not signed, expected " <> decodeUtf8 addrHash <> ", but got: " <> sigsS
-- | The address of the contract (the hash of its validator script).
contractAddress :: Address
contractAddress = Ledger.scriptAddress (Scripts.validatorScript burnerTypedValidator)
| Type level tag for the script .
data Burner
instance Scripts.ValidatorTypes Burner where
type instance RedeemerType Burner = MyRedeemer -- Argument given to redeem value, if possible (empty)
type instance DatumType Burner = MyDatum -- Validator script argument
-- | The script instance is the compiled validator (ready to go onto the chain)
burnerTypedValidator :: Scripts.TypedValidator Burner
burnerTypedValidator = Scripts.mkTypedValidator @Burner
$$(PlutusTx.compile [|| validateSpend ||])
$$(PlutusTx.compile [|| wrap ||])
where
wrap = Scripts.wrapValidator @MyDatum @MyRedeemer
| The schema of the contract , with two endpoints .
type Schema = Endpoint "lock" (PubKeyHash, Value) -- lock the value
.\/ Endpoint "burn" (BuiltinByteString, Value) -- burn the value
.\/ Endpoint "validateBurn" BuiltinByteString -- validate a burn
.\/ Endpoint "redeem" () -- redeem the locked value
burnerScript :: Plutus.Script
burnerScript = Plutus.unValidatorScript burnerValidator
burnerValidator :: Plutus.Validator
burnerValidator = Scripts.validatorScript burnerTypedValidator
burnerSBS :: SBS.ShortByteString
burnerSBS = SBS.toShort . LBS.toStrict $ serialise burnerScript
burnerSerialised :: PlutusScript PlutusScriptV1
burnerSerialised = PlutusScriptSerialised burnerSBS
contract :: Contract ContractState Schema ContractError ()
contract = selectList [lock, burn, validateBurn, redeem] >> contract
-- | The "lock" contract endpoint.
--
-- Lock the value to the given addressee.
lock :: AsContractError e => Promise ContractState Schema e ()
lock = endpoint @"lock" lock'
lock' :: AsContractError e => (PubKeyHash, Value) -> Contract ContractState Schema e ()
lock' (addr, lockedFunds) = do
pubk <- ownPubKeyHash
let hash = sha3_256 $ getPubKeyHash addr
let txConstraint = Constraints.mustPayToTheScript (MyDatum hash) lockedFunds <> Constraints.mustBeSignedBy pubk
tx <- submitTxConstraints burnerTypedValidator txConstraint
awaitTxConfirmed $ getCardanoTxId tx
logInfo @Prelude.String "Tx locked"
tellAction (LockedValue lockedFunds hash)
-- | The "burn" contract endpoint.
--
-- Burn the value with the given commitment.
burn :: AsContractError e => Promise ContractState Schema e ()
burn = endpoint @"burn" burn'
burn' :: AsContractError e => (BuiltinByteString, Value) -> Contract ContractState Schema e ()
burn' (aCommitment, burnedFunds) = do
let hash = flipCommitment $ sha3_256 aCommitment
let txConstraint = Constraints.mustPayToTheScript (MyDatum hash) burnedFunds
tx <- submitTxConstraints burnerTypedValidator txConstraint
awaitTxConfirmed $ getCardanoTxId tx
logInfo @Prelude.String "Tx burned"
tellAction (BurnedValue burnedFunds hash)
-- | Flip lowest bit in the commitment
--
-- Requires non-empty bytestring as argument
flipCommitment :: BuiltinByteString -> BuiltinByteString
flipCommitment (BuiltinByteString bs) = BuiltinByteString $ do
case unsnoc bs of
Nothing -> traceError "Hash was empty" -- input was empty
Just (seq, lastElt) -> snoc seq
$ lastElt `xor` 1
-- | The "redeem" contract endpoint.
--
-- Can redeem the value, if it was published, not burned.
redeem :: Promise ContractState Schema ContractError ()
redeem = endpoint @"redeem" $ \() -> do
pubk <- ownPubKeyHash
unspentOutputs' <- utxosTxOutTxAt contractAddress
let relevantOutputs = filterUTxOs unspentOutputs' (filterByPubKey pubk)
txInputs = Map.map Prelude.fst $ relevantOutputs
when (Map.null relevantOutputs) $ do
logError @Prelude.String $ "No UTxO to redeem from"
throwError (OtherError $ T.pack "No UTxO to redeem from")
let redeemer = MyRedeemer ()
vals <- forM (Map.toList txInputs) $ \(k, v) -> do
let txInput = Map.singleton k v
txConstraint = collectFromScript txInput redeemer <> Constraints.mustBeSignedBy pubk
tx <- try $ submitTxConstraintsSpending burnerTypedValidator txInput txConstraint
case tx of
Left e -> do
logError @Prelude.String $ "Error redeeming tx: " <> Prelude.show e
return Nothing
Right tx -> do
awaitTxConfirmed . getCardanoTxId $ tx
let val = foldOf (folded . to txOutValue) . getCardanoTxUnspentOutputsTx $ tx
logInfo @Prelude.String ("Tx redeemed with value " <> Prelude.show val)
return (Just val)
tellAction (Redeemed (Prelude.mconcat $ catMaybes vals) (getPubKeyHash pubk))
where
filterUTxOs :: Map.Map TxOutRef (ChainIndexTxOut, ChainIndexTx)
-> ((ChainIndexTxOut, ChainIndexTx) -> Bool)
-> Map.Map TxOutRef (ChainIndexTxOut, ChainIndexTx)
filterUTxOs txMap p = flip Map.filter txMap p
filterByPubKey :: PubKeyHash -> (ChainIndexTxOut, ChainIndexTx) -> Bool
filterByPubKey pubk txtpl = fmap fromMyDatum (getMyDatum txtpl) == Just (sha3_256 (getPubKeyHash pubk))
validateBurn' :: AsContractError e => BuiltinByteString -> Contract ContractState Schema e ()
validateBurn' aCommitment = do
burnedVal <- burned aCommitment
if | isZero burnedVal -> do
logInfo @Prelude.String "Nothing burned with given commitment"
tellAction (BurnedValueValidated Nothing)
| otherwise -> do
logInfo @Prelude.String ("Value burned with given commitment: " <> Prelude.show (valueOf burnedVal "" ""))
tellAction (BurnedValueValidated (Just burnedVal))
validateBurn :: Promise ContractState Schema ContractError ()
validateBurn = endpoint @"validateBurn" $ validateBurn'
-- | The "burned" confirmation endpoint.
--
-- Confirms that the value was burned with the given commitment.
-- It returns total value locked with a given commitment.
burned :: AsContractError e => BuiltinByteString -> Contract w Schema e Value
burned aCommitment = do
unspentOutputs :: Map.Map TxOutRef (ChainIndexTxOut, ChainIndexTx) <- utxosTxOutTxAt contractAddress
return $ totalValue (withCommitment `Map.filter` unspentOutputs)
where
-- | Check if the given transaction output is commited to burn with the same hash as `aCommitment`.
withCommitment :: (ChainIndexTxOut, ChainIndexTx) -> Bool
withCommitment txtpl = fmap fromMyDatum (getMyDatum txtpl) == Just commitmentHash
commitmentHash = flipCommitment $ sha3_256 aCommitment
| Total value of Utxos in a ` UtxoMap ` .
totalValue :: Map.Map TxOutRef (ChainIndexTxOut, ChainIndexTx) -> Value
totalValue = foldOf (folded . _1 . _ScriptChainIndexTxOut . _4)
getMyDatum :: (ChainIndexTxOut, ChainIndexTx) -> Maybe MyDatum
getMyDatum (PublicKeyChainIndexTxOut {}, _) = Nothing
getMyDatum (ScriptChainIndexTxOut { _ciTxOutDatum = Left dh }, ChainIndexTx{ _citxData = txData }) = do
lookup Datum from the hash
Unpack from PlutusTx . Datum type .
getMyDatum (ScriptChainIndexTxOut { _ciTxOutDatum = Right (Datum datum) }, _) =
PlutusTx.fromBuiltinData datum
-- | Script endpoints available for calling the contract.
endpoints :: Contract ContractState Schema ContractError ()
endpoints = contract
try :: Contract w s e a -> Contract w s e (Either e a)
try a = catchError (fmap Right $ a) (\e -> return (Left e))
mkSchemaDefinitions ''Schema
$(mkKnownCurrencies [])
| null | https://raw.githubusercontent.com/iagon-tech/proof-of-burn-cardano/fc7738f85d64cf134405c16549f43a36473e8a52/src/ProofOfBurn.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE MagicHash #
# LANGUAGE MultiWayIf #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE OverloadedStrings #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeOperators #
| This is a burner contract,
And add function implementations (and rename them to
something suitable) for the endpoints:
* lock - which locks the value to be redeemed by an addressee;
* redeem - which redeems the locked value;
* burn - which burns the value instead of locking it.
In order to check that value was indeed burned, one needs to run a `burned` script with the same commitment value.
* the hash of the address that can redeem the value, if it was locked;
* the hash with the last bit flipped, if it was burned.
| Redeemer holds no data, since the address is taken from the context.
| The address of the contract (the hash of its validator script).
Argument given to redeem value, if possible (empty)
Validator script argument
| The script instance is the compiled validator (ready to go onto the chain)
lock the value
burn the value
validate a burn
redeem the locked value
| The "lock" contract endpoint.
Lock the value to the given addressee.
| The "burn" contract endpoint.
Burn the value with the given commitment.
| Flip lowest bit in the commitment
Requires non-empty bytestring as argument
input was empty
| The "redeem" contract endpoint.
Can redeem the value, if it was published, not burned.
| The "burned" confirmation endpoint.
Confirms that the value was burned with the given commitment.
It returns total value locked with a given commitment.
| Check if the given transaction output is commited to burn with the same hash as `aCommitment`.
| Script endpoints available for calling the contract. | # LANGUAGE FlexibleContexts #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - ignore - interface - pragmas #
module ProofOfBurn where
import Control.Monad (void, when, forM, forM_)
import Control.Applicative (liftA3)
import Control.Lens ( foldOf, folded, Field1(_1), Field4(_4), to )
import Data.Bits (xor)
import Data.Functor (($>))
import Data.Char (ord, chr)
import qualified Data.Aeson as Aeson
import Data.Aeson.TH
import Data.Sequences (snoc, unsnoc)
import Data.Maybe (fromJust, catMaybes)
import qualified Data.Map.Strict as Map
import qualified Data.List.NonEmpty as NonEmpty(toList)
import Plutus.Contract
( (>>),
logInfo,
logError,
endpoint,
ownPubKeyHash,
submitTxConstraints,
submitTxConstraintsSpending,
awaitTxConfirmed,
utxosAt,
utxosTxOutTxAt,
collectFromScript,
selectList,
tell,
Endpoint,
Contract,
Promise,
AsContractError,
type (.\/), ContractError (OtherError) )
import Plutus.ChainIndex.Client ()
import qualified PlutusTx
import qualified PlutusTx.IsData as PlutusTx
import PlutusTx.Prelude
( otherwise,
return,
(>>=),
Bool(False),
Maybe(..),
Either(Right, Left),
(.),
flip,
sha3_256,
either,
elem,
const,
($),
traceError,
traceIfFalse,
BuiltinByteString,
Eq((==)),
Functor(fmap),
Semigroup((<>)), Monoid(mempty), foldr, BuiltinString )
import PlutusTx.Builtins.Internal (BuiltinByteString(..), decodeUtf8)
import Ledger (Address(..), Datum(..), scriptAddress, datumHash)
import Ledger.AddressMap (UtxoMap)
import qualified Ledger.Constraints as Constraints
import qualified Ledger.Typed.Scripts as Scripts
import Ledger.Typed.Scripts.Validators(ValidatorType)
import Ledger.Value (Value, isZero, valueOf)
import Ledger.Tx
( TxOutRef,
ChainIndexTxOut(PublicKeyChainIndexTxOut, ScriptChainIndexTxOut,
_ciTxOutDatum),
_ScriptChainIndexTxOut,
getCardanoTxId,
Address, getCardanoTxUnspentOutputsTx )
import Plutus.V1.Ledger.Crypto
( PubKey, PubKeyHash(getPubKeyHash) )
import Plutus.V1.Ledger.Credential ()
import Plutus.V1.Ledger.Contexts
( ScriptContext(ScriptContext, scriptContextTxInfo),
TxInfo(txInfoSignatories) )
import Plutus.V1.Ledger.Tx (Tx(..), TxOutRef, TxOutTx(..), TxOut(..), txData, txOutDatum)
import qualified Plutus.V1.Ledger.Scripts as Plutus
import Playground.Contract
( mkKnownCurrencies, mkSchemaDefinitions, KnownCurrency )
import qualified Data.Text as T
import Wallet.Emulator.Wallet ()
import Ledger.Crypto ( pubKeyHash )
import qualified Prelude
import qualified Data.ByteString.Short as SBS
import qualified Data.ByteString.Lazy as LBS
import Codec.Serialise ( serialise )
import Cardano.Api.Shelley (PlutusScript (..), PlutusScriptV1)
import Plutus.ChainIndex.Tx
( ChainIndexTx(ChainIndexTx, _citxData) )
import Cardano.Prelude ( Set, MonadError (throwError, catchError) )
import qualified Data.Set as Set
import Text.ParserCombinators.ReadPrec
import GHC.Read
import qualified Text.Read.Lex as L
import Plutus.V1.Ledger.Address
import Plutus.V1.Ledger.Crypto
| holds either :
newtype MyDatum = MyDatum { fromMyDatum :: BuiltinByteString }
deriving (Prelude.Show)
PlutusTx.makeLift ''MyDatum
PlutusTx.unstableMakeIsData ''MyDatum
instance Prelude.Read MyDatum where
readPrec =
parens
(do expectP (L.Ident "MyDatum")
x <- step readPrec
return $ MyDatum (BuiltinByteString x)
)
readListPrec = readListPrecDefault
readList = readListDefault
newtype MyRedeemer = MyRedeemer { _nothing :: () }
deriving (Prelude.Read, Prelude.Show)
PlutusTx.makeLift ''MyRedeemer
PlutusTx.unstableMakeIsData ''MyRedeemer
data ContractAction =
BurnedValueValidated (Maybe Value)
| BurnedValue Value BuiltinByteString
| LockedValue Value BuiltinByteString
| Redeemed Value BuiltinByteString
deriving Prelude.Show
$(deriveJSON defaultOptions ''ContractAction)
data ContractState = ContractStateAction ContractAction ContractState
| None
deriving Prelude.Show
$(deriveJSON defaultOptions ''ContractState)
instance Prelude.Monoid ContractState where
mempty = None
instance Prelude.Semigroup ContractState where
prevState <> (ContractStateAction a _s) = ContractStateAction a prevState
prevState <> None = prevState
stateActions :: ContractState -> [ContractAction]
stateActions None = []
stateActions (ContractStateAction a s) = stateActions s <> [a]
previousState :: ContractState -> ContractState
previousState (ContractStateAction _ s) = s
previousState None = None
contractAction :: ContractState -> Maybe ContractAction
contractAction (ContractStateAction a _) = Just a
contractAction None = Nothing
tellAction :: ContractAction -> Contract ContractState Schema e ()
tellAction action = tell (ContractStateAction action None)
# INLINABLE validateSpend #
| Spending validator checks that hash of the redeeming address is the same as the datum .
validateSpend :: ValidatorType Burner
validateSpend (MyDatum addrHash) _myRedeemerValue ScriptContext { scriptContextTxInfo = txinfo } =
traceIfFalse traceMsg (addrHash `elem` allPubkeyHashes)
where
requiredSigs :: [PubKeyHash]
requiredSigs = txInfoSignatories txinfo
allPubkeyHashes :: [BuiltinByteString]
allPubkeyHashes = fmap (sha3_256 . getPubKeyHash) requiredSigs
sigsS :: BuiltinString
sigsS = "[" <> foldr (\a b -> decodeUtf8 a <> ", " <> b) mempty allPubkeyHashes <> "]"
traceMsg :: BuiltinString
traceMsg
| allPubkeyHashes == [] = "No required signatures attached. Owner has not signed."
| otherwise = "Owner has not signed, expected " <> decodeUtf8 addrHash <> ", but got: " <> sigsS
contractAddress :: Address
contractAddress = Ledger.scriptAddress (Scripts.validatorScript burnerTypedValidator)
| Type level tag for the script .
data Burner
instance Scripts.ValidatorTypes Burner where
burnerTypedValidator :: Scripts.TypedValidator Burner
burnerTypedValidator = Scripts.mkTypedValidator @Burner
$$(PlutusTx.compile [|| validateSpend ||])
$$(PlutusTx.compile [|| wrap ||])
where
wrap = Scripts.wrapValidator @MyDatum @MyRedeemer
| The schema of the contract , with two endpoints .
burnerScript :: Plutus.Script
burnerScript = Plutus.unValidatorScript burnerValidator
burnerValidator :: Plutus.Validator
burnerValidator = Scripts.validatorScript burnerTypedValidator
burnerSBS :: SBS.ShortByteString
burnerSBS = SBS.toShort . LBS.toStrict $ serialise burnerScript
burnerSerialised :: PlutusScript PlutusScriptV1
burnerSerialised = PlutusScriptSerialised burnerSBS
contract :: Contract ContractState Schema ContractError ()
contract = selectList [lock, burn, validateBurn, redeem] >> contract
lock :: AsContractError e => Promise ContractState Schema e ()
lock = endpoint @"lock" lock'
lock' :: AsContractError e => (PubKeyHash, Value) -> Contract ContractState Schema e ()
lock' (addr, lockedFunds) = do
pubk <- ownPubKeyHash
let hash = sha3_256 $ getPubKeyHash addr
let txConstraint = Constraints.mustPayToTheScript (MyDatum hash) lockedFunds <> Constraints.mustBeSignedBy pubk
tx <- submitTxConstraints burnerTypedValidator txConstraint
awaitTxConfirmed $ getCardanoTxId tx
logInfo @Prelude.String "Tx locked"
tellAction (LockedValue lockedFunds hash)
burn :: AsContractError e => Promise ContractState Schema e ()
burn = endpoint @"burn" burn'
burn' :: AsContractError e => (BuiltinByteString, Value) -> Contract ContractState Schema e ()
burn' (aCommitment, burnedFunds) = do
let hash = flipCommitment $ sha3_256 aCommitment
let txConstraint = Constraints.mustPayToTheScript (MyDatum hash) burnedFunds
tx <- submitTxConstraints burnerTypedValidator txConstraint
awaitTxConfirmed $ getCardanoTxId tx
logInfo @Prelude.String "Tx burned"
tellAction (BurnedValue burnedFunds hash)
flipCommitment :: BuiltinByteString -> BuiltinByteString
flipCommitment (BuiltinByteString bs) = BuiltinByteString $ do
case unsnoc bs of
Just (seq, lastElt) -> snoc seq
$ lastElt `xor` 1
redeem :: Promise ContractState Schema ContractError ()
redeem = endpoint @"redeem" $ \() -> do
pubk <- ownPubKeyHash
unspentOutputs' <- utxosTxOutTxAt contractAddress
let relevantOutputs = filterUTxOs unspentOutputs' (filterByPubKey pubk)
txInputs = Map.map Prelude.fst $ relevantOutputs
when (Map.null relevantOutputs) $ do
logError @Prelude.String $ "No UTxO to redeem from"
throwError (OtherError $ T.pack "No UTxO to redeem from")
let redeemer = MyRedeemer ()
vals <- forM (Map.toList txInputs) $ \(k, v) -> do
let txInput = Map.singleton k v
txConstraint = collectFromScript txInput redeemer <> Constraints.mustBeSignedBy pubk
tx <- try $ submitTxConstraintsSpending burnerTypedValidator txInput txConstraint
case tx of
Left e -> do
logError @Prelude.String $ "Error redeeming tx: " <> Prelude.show e
return Nothing
Right tx -> do
awaitTxConfirmed . getCardanoTxId $ tx
let val = foldOf (folded . to txOutValue) . getCardanoTxUnspentOutputsTx $ tx
logInfo @Prelude.String ("Tx redeemed with value " <> Prelude.show val)
return (Just val)
tellAction (Redeemed (Prelude.mconcat $ catMaybes vals) (getPubKeyHash pubk))
where
filterUTxOs :: Map.Map TxOutRef (ChainIndexTxOut, ChainIndexTx)
-> ((ChainIndexTxOut, ChainIndexTx) -> Bool)
-> Map.Map TxOutRef (ChainIndexTxOut, ChainIndexTx)
filterUTxOs txMap p = flip Map.filter txMap p
filterByPubKey :: PubKeyHash -> (ChainIndexTxOut, ChainIndexTx) -> Bool
filterByPubKey pubk txtpl = fmap fromMyDatum (getMyDatum txtpl) == Just (sha3_256 (getPubKeyHash pubk))
validateBurn' :: AsContractError e => BuiltinByteString -> Contract ContractState Schema e ()
validateBurn' aCommitment = do
burnedVal <- burned aCommitment
if | isZero burnedVal -> do
logInfo @Prelude.String "Nothing burned with given commitment"
tellAction (BurnedValueValidated Nothing)
| otherwise -> do
logInfo @Prelude.String ("Value burned with given commitment: " <> Prelude.show (valueOf burnedVal "" ""))
tellAction (BurnedValueValidated (Just burnedVal))
validateBurn :: Promise ContractState Schema ContractError ()
validateBurn = endpoint @"validateBurn" $ validateBurn'
burned :: AsContractError e => BuiltinByteString -> Contract w Schema e Value
burned aCommitment = do
unspentOutputs :: Map.Map TxOutRef (ChainIndexTxOut, ChainIndexTx) <- utxosTxOutTxAt contractAddress
return $ totalValue (withCommitment `Map.filter` unspentOutputs)
where
withCommitment :: (ChainIndexTxOut, ChainIndexTx) -> Bool
withCommitment txtpl = fmap fromMyDatum (getMyDatum txtpl) == Just commitmentHash
commitmentHash = flipCommitment $ sha3_256 aCommitment
| Total value of Utxos in a ` UtxoMap ` .
totalValue :: Map.Map TxOutRef (ChainIndexTxOut, ChainIndexTx) -> Value
totalValue = foldOf (folded . _1 . _ScriptChainIndexTxOut . _4)
getMyDatum :: (ChainIndexTxOut, ChainIndexTx) -> Maybe MyDatum
getMyDatum (PublicKeyChainIndexTxOut {}, _) = Nothing
getMyDatum (ScriptChainIndexTxOut { _ciTxOutDatum = Left dh }, ChainIndexTx{ _citxData = txData }) = do
lookup Datum from the hash
Unpack from PlutusTx . Datum type .
getMyDatum (ScriptChainIndexTxOut { _ciTxOutDatum = Right (Datum datum) }, _) =
PlutusTx.fromBuiltinData datum
endpoints :: Contract ContractState Schema ContractError ()
endpoints = contract
try :: Contract w s e a -> Contract w s e (Either e a)
try a = catchError (fmap Right $ a) (\e -> return (Left e))
mkSchemaDefinitions ''Schema
$(mkKnownCurrencies [])
|
0642fd212043f39d317cd8017af14bc153d5bedf77f1f7daac0919018e41bcf1 | dbuenzli/hyperbib | entity_html.mli | ---------------------------------------------------------------------------
Copyright ( c ) 2021 University of Bern . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2021 University of Bern. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
(** Entity HTML commonalities. *)
open Hyperbib.Std
* { 1 : description_field Description field }
val edit_description :
(module Entity.DESCRIBABLE with type t = 't) ->
?textarea_at:At.t list -> ?at:At.t list -> 't -> El.html
val view_description :
(module Entity.DESCRIBABLE with type t = 't) ->
?at:At.t list -> 't -> El.html
* { 1 : note_field Note field }
val edit_note :
(module Entity.ANNOTABLE with type t = 't) ->
?textarea_at:At.t list -> ?at:At.t list -> 't -> El.html
val view_note :
(module Entity.ANNOTABLE with type t = 't) ->
?at:At.t list -> 't -> El.html
* { 1 : private_note Private note field }
val edit_private_note :
(module Entity.PRIVATELY_ANNOTABLE with type t = 't) ->
?textarea_at:At.t list -> ?at:At.t list -> 't -> El.html
val view_private_note :
(module Entity.PRIVATELY_ANNOTABLE with type t = 't) -> Page.Gen.t ->
?at:At.t list -> 't -> El.html
* { 1 : public Public field }
val edit_public :
(module Entity.PUBLICABLE with type t = 't) ->
?at:At.t list -> 't -> El.html
val viz : (module Entity.PUBLICABLE with type t = 't) -> 't -> At.t
(** [viz (module E) e] is {!At.void} when [e] is public and
{!Hclass.private'} otherwise. *)
* { 1 : person_inputs Person inputs }
* { 1 : contributors Contributor inputs }
val person_input :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
role:Person.role option -> Person.t -> El.html
(** [person_input uf ~for_list ~input_name ~role p] inputs [p] via a hidden
field named [input_name] and whose name is [s]'s [id]. If [for_list] is
[true] this is for a list of subjects; if [false] it's for selecting a
single person. *)
val person_input_create :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
role:Person.role option -> Person.t -> El.html
(** [person_input_create uf ~input_name p] inputs [p] for creation
via hidden fields listed for now in {!Hquery}. [input_name] is
used in case the creation is removed, to replace it with a
{!person_input_finder}. *)
val person_input_finder :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
role:Person.role option -> El.html
(** [person_input_finder] is a text field to search for a person to input
in an hidden input element named [input_name]. If [for_list] is [true]
this is for a list of persons; if [false] it's for selecting a single
subject. *)
val person_input_finder_results :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
role:Person.role option -> creatable:Person.t option -> Person.t list ->
El.html
(** [person_input_finder_results] is a list of persons to choose from
to input a person in a hidden input element named [input_name].
If [creatable] is provided the given person can be created. *)
* { 1 : subject_inputs Subject inputs }
val subject_input :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
Subject.t -> El.html
(** [subject_input uf ~for_list ~input_name s] inputs [s] via a hidden field
named [input_name] and whose name is [s]'s [id]. If [for_list] is [true]
this is for a list of subjects; if [false] it's for selecting a single
subject. *)
val subject_input_finder :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name -> El.html
(** [subject_input_finder] is a text field to search for a subjec to input
in an hidden input element named [input_name]. If [for_list] is [true]
this is for a list of subjects; if [false] it's for selecting a single
subject. *)
val subject_input_finder_results :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
parents:Subject.t Id.Map.t -> Subject.t list -> El.html
(** [subject_input_finder_results] is a list of subjects to choose from
to input a subject in a hidden input element named [input_name].
If [creatable] is provided the given subject can be created. *)
* { 1 : container_inputs Container inputs }
val container_input :
Kurl.fmt -> input_name:Entity.Url.input_name -> Container.t -> El.html
(** [container_input uf ~input_name c] inputs [c] via a hidden field
named [input_name] and whose value is [c]'s [id]. *)
val container_input_create :
Kurl.fmt -> input_name:Entity.Url.input_name -> Container.t -> El.html
(** [container_input_create uf ~input_name c] inputs [c] for creation
via hidden fields listed for now in {!Hquery}. [input_name] is
used in case the creation is removed, to replace it with a
{!container_input_finder}. *)
val container_input_finder :
Kurl.fmt -> input_name:Entity.Url.input_name -> El.html
(** [input_container] is a text field to search for a container to input
in an hidden input element named [input_name]. *)
val container_input_finder_results :
Kurl.fmt -> input_name:Entity.Url.input_name ->
creatable:Container.t option -> Container.t list -> El.html
(** [container_input_finder_results] is a list of containers to choose from
to input a container in a hidden input element named [input_name].
If [creatable] is provided the given container can be created. *)
---------------------------------------------------------------------------
Copyright ( c ) 2021 University of Bern
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2021 University of Bern
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/hyperbib/b17d1ef8cc3a215d04c31c0e56c2de414911c55c/src/html/entity_html.mli | ocaml | * Entity HTML commonalities.
* [viz (module E) e] is {!At.void} when [e] is public and
{!Hclass.private'} otherwise.
* [person_input uf ~for_list ~input_name ~role p] inputs [p] via a hidden
field named [input_name] and whose name is [s]'s [id]. If [for_list] is
[true] this is for a list of subjects; if [false] it's for selecting a
single person.
* [person_input_create uf ~input_name p] inputs [p] for creation
via hidden fields listed for now in {!Hquery}. [input_name] is
used in case the creation is removed, to replace it with a
{!person_input_finder}.
* [person_input_finder] is a text field to search for a person to input
in an hidden input element named [input_name]. If [for_list] is [true]
this is for a list of persons; if [false] it's for selecting a single
subject.
* [person_input_finder_results] is a list of persons to choose from
to input a person in a hidden input element named [input_name].
If [creatable] is provided the given person can be created.
* [subject_input uf ~for_list ~input_name s] inputs [s] via a hidden field
named [input_name] and whose name is [s]'s [id]. If [for_list] is [true]
this is for a list of subjects; if [false] it's for selecting a single
subject.
* [subject_input_finder] is a text field to search for a subjec to input
in an hidden input element named [input_name]. If [for_list] is [true]
this is for a list of subjects; if [false] it's for selecting a single
subject.
* [subject_input_finder_results] is a list of subjects to choose from
to input a subject in a hidden input element named [input_name].
If [creatable] is provided the given subject can be created.
* [container_input uf ~input_name c] inputs [c] via a hidden field
named [input_name] and whose value is [c]'s [id].
* [container_input_create uf ~input_name c] inputs [c] for creation
via hidden fields listed for now in {!Hquery}. [input_name] is
used in case the creation is removed, to replace it with a
{!container_input_finder}.
* [input_container] is a text field to search for a container to input
in an hidden input element named [input_name].
* [container_input_finder_results] is a list of containers to choose from
to input a container in a hidden input element named [input_name].
If [creatable] is provided the given container can be created. | ---------------------------------------------------------------------------
Copyright ( c ) 2021 University of Bern . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2021 University of Bern. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open Hyperbib.Std
* { 1 : description_field Description field }
val edit_description :
(module Entity.DESCRIBABLE with type t = 't) ->
?textarea_at:At.t list -> ?at:At.t list -> 't -> El.html
val view_description :
(module Entity.DESCRIBABLE with type t = 't) ->
?at:At.t list -> 't -> El.html
* { 1 : note_field Note field }
val edit_note :
(module Entity.ANNOTABLE with type t = 't) ->
?textarea_at:At.t list -> ?at:At.t list -> 't -> El.html
val view_note :
(module Entity.ANNOTABLE with type t = 't) ->
?at:At.t list -> 't -> El.html
* { 1 : private_note Private note field }
val edit_private_note :
(module Entity.PRIVATELY_ANNOTABLE with type t = 't) ->
?textarea_at:At.t list -> ?at:At.t list -> 't -> El.html
val view_private_note :
(module Entity.PRIVATELY_ANNOTABLE with type t = 't) -> Page.Gen.t ->
?at:At.t list -> 't -> El.html
* { 1 : public Public field }
val edit_public :
(module Entity.PUBLICABLE with type t = 't) ->
?at:At.t list -> 't -> El.html
val viz : (module Entity.PUBLICABLE with type t = 't) -> 't -> At.t
* { 1 : person_inputs Person inputs }
* { 1 : contributors Contributor inputs }
val person_input :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
role:Person.role option -> Person.t -> El.html
val person_input_create :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
role:Person.role option -> Person.t -> El.html
val person_input_finder :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
role:Person.role option -> El.html
val person_input_finder_results :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
role:Person.role option -> creatable:Person.t option -> Person.t list ->
El.html
* { 1 : subject_inputs Subject inputs }
val subject_input :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
Subject.t -> El.html
val subject_input_finder :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name -> El.html
val subject_input_finder_results :
Kurl.fmt -> for_list:bool -> input_name:Entity.Url.input_name ->
parents:Subject.t Id.Map.t -> Subject.t list -> El.html
* { 1 : container_inputs Container inputs }
val container_input :
Kurl.fmt -> input_name:Entity.Url.input_name -> Container.t -> El.html
val container_input_create :
Kurl.fmt -> input_name:Entity.Url.input_name -> Container.t -> El.html
val container_input_finder :
Kurl.fmt -> input_name:Entity.Url.input_name -> El.html
val container_input_finder_results :
Kurl.fmt -> input_name:Entity.Url.input_name ->
creatable:Container.t option -> Container.t list -> El.html
---------------------------------------------------------------------------
Copyright ( c ) 2021 University of Bern
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2021 University of Bern
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
3196b87614ccf33d77e69c00a939f99978bd2d885a785d5d560a3c5e17f72ed8 | armon/bloomd_ring | br_wm_ping.erl | -module(br_wm_ping).
-export([init/1, to_html/2]).
-include_lib("webmachine/include/webmachine.hrl").
init([]) ->
{ok, nostate}.
to_html(ReqData, Context) ->
Result = io_lib:format("Result: ~p", [bloomd_ring:ping()]),
{"<html><head><title>bloomd</title></head><body>" ++ Result ++ "</body></html>", ReqData, Context}.
| null | https://raw.githubusercontent.com/armon/bloomd_ring/ca51d00e2f0e1b6f12f061403df8421d7207d078/src/br_wm_ping.erl | erlang | -module(br_wm_ping).
-export([init/1, to_html/2]).
-include_lib("webmachine/include/webmachine.hrl").
init([]) ->
{ok, nostate}.
to_html(ReqData, Context) ->
Result = io_lib:format("Result: ~p", [bloomd_ring:ping()]),
{"<html><head><title>bloomd</title></head><body>" ++ Result ++ "</body></html>", ReqData, Context}.
|
|
17520c0e62f3e484854f71d7008dabb0257090c56939245d65af1f1089945c7d | fiddlerwoaroof/lisp-sandbox | jsonarr_to_table.lisp | (defpackage json-to-table
(:use :cl :alexandria :serapeum :fw.lu))
| null | https://raw.githubusercontent.com/fiddlerwoaroof/lisp-sandbox/38ff817c95af35db042faf760b477675264220d2/jsonarr_to_table.lisp | lisp | (defpackage json-to-table
(:use :cl :alexandria :serapeum :fw.lu))
|
|
fce61311f44088b13154f5dca1f8d601d4a1c473126d6de15fbdbf68bbbd795b | maximedenes/native-coq | indschemes.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Pp
open Names
open Term
open Environ
open Libnames
open Glob_term
open Genarg
open Vernacexpr
open Ind_tables
* See also Auto_ind_decl , Indrec , Eqscheme , Ind_tables , ...
(** Build and register the boolean equalities associated to an inductive type *)
val declare_beq_scheme : mutual_inductive -> unit
val declare_eq_decidability : mutual_inductive -> unit
(** Build and register a congruence scheme for an equality-like inductive type *)
val declare_congr_scheme : inductive -> unit
(** Build and register rewriting schemes for an equality-like inductive type *)
val declare_rewriting_schemes : inductive -> unit
(** Mutual Minimality/Induction scheme *)
val do_mutual_induction_scheme :
(identifier located * bool * inductive * glob_sort) list -> unit
(** Main calls to interpret the Scheme command *)
val do_scheme : (identifier located option * scheme) list -> unit
(** Combine a list of schemes into a conjunction of them *)
val build_combined_scheme : env -> constant list -> constr * types
val do_combined_scheme : identifier located -> identifier located list -> unit
(** Hook called at each inductive type definition *)
val declare_default_schemes : mutual_inductive -> unit
| null | https://raw.githubusercontent.com/maximedenes/native-coq/3623a4d9fe95c165f02f7119c0e6564a83a9f4c9/toplevel/indschemes.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* Build and register the boolean equalities associated to an inductive type
* Build and register a congruence scheme for an equality-like inductive type
* Build and register rewriting schemes for an equality-like inductive type
* Mutual Minimality/Induction scheme
* Main calls to interpret the Scheme command
* Combine a list of schemes into a conjunction of them
* Hook called at each inductive type definition | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Pp
open Names
open Term
open Environ
open Libnames
open Glob_term
open Genarg
open Vernacexpr
open Ind_tables
* See also Auto_ind_decl , Indrec , Eqscheme , Ind_tables , ...
val declare_beq_scheme : mutual_inductive -> unit
val declare_eq_decidability : mutual_inductive -> unit
val declare_congr_scheme : inductive -> unit
val declare_rewriting_schemes : inductive -> unit
val do_mutual_induction_scheme :
(identifier located * bool * inductive * glob_sort) list -> unit
val do_scheme : (identifier located option * scheme) list -> unit
val build_combined_scheme : env -> constant list -> constr * types
val do_combined_scheme : identifier located -> identifier located list -> unit
val declare_default_schemes : mutual_inductive -> unit
|
c821cf3f3315651d56ff064232832026ac25e361a6df652fd3fe8ae9a1165fc4 | mmottl/lacaml | lin_reg.ml | File : lin_reg.ml
Copyright ( C ) 2001 - 2005
email :
WWW :
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
Copyright (C) 2001-2005
Markus Mottl
email:
WWW:
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*)
open Format
open Lacaml.D
open Lacaml.Io
let () =
let m = 5 in
let n = 3 in
let data_mat = Mat.create m n in
let data_mat_copy = Mat.create m n in
let res_len = max 1 (max m n) in
let res_mat = Mat.create_mvec res_len in
let res_mat_copy = Mat.create_mvec res_len in
for i = 1 to m do
let v_ref = ref 0.0 in
for j = 1 to n do
let randf = Random.float 200.0 -. 100.0 in
v_ref := !v_ref +. float j *. randf;
data_mat.{i, j} <- randf;
data_mat_copy.{i, j} <- randf;
done;
let v = !v_ref in
res_mat.{i, 1} <- v;
res_mat_copy.{i, 1} <- v;
done;
printf
"\
@[<2>Predictor variables:\n\
@\n\
%a@]\n\
@\n\
@[<2>Response variable:\n\
@\n\
%a@]@\n\
@\n"
pp_fmat data_mat
pp_rfvec (Mat.col res_mat 1);
let rank = gelsd data_mat res_mat in
printf
"\
@[<2>Regression weights:\n\
@\n\
%a@]\n\
@\n\
Rank: %d@\n@\n"
pp_rfvec (Mat.col res_mat 1)
rank;
let y = gemv data_mat_copy (Mat.col res_mat 1) in
let b = Mat.col res_mat_copy 1 in
printf
"\
@[<2>Check result (must be close to 0):\n\
@\n\
%a@]@\n"
pp_rfvec (Vec.sub y b)
| null | https://raw.githubusercontent.com/mmottl/lacaml/2e01c0747e740e54ab9a23ea59b29ea0d929b50f/examples/lin_reg.ml | ocaml | File : lin_reg.ml
Copyright ( C ) 2001 - 2005
email :
WWW :
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
Copyright (C) 2001-2005
Markus Mottl
email:
WWW:
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*)
open Format
open Lacaml.D
open Lacaml.Io
let () =
let m = 5 in
let n = 3 in
let data_mat = Mat.create m n in
let data_mat_copy = Mat.create m n in
let res_len = max 1 (max m n) in
let res_mat = Mat.create_mvec res_len in
let res_mat_copy = Mat.create_mvec res_len in
for i = 1 to m do
let v_ref = ref 0.0 in
for j = 1 to n do
let randf = Random.float 200.0 -. 100.0 in
v_ref := !v_ref +. float j *. randf;
data_mat.{i, j} <- randf;
data_mat_copy.{i, j} <- randf;
done;
let v = !v_ref in
res_mat.{i, 1} <- v;
res_mat_copy.{i, 1} <- v;
done;
printf
"\
@[<2>Predictor variables:\n\
@\n\
%a@]\n\
@\n\
@[<2>Response variable:\n\
@\n\
%a@]@\n\
@\n"
pp_fmat data_mat
pp_rfvec (Mat.col res_mat 1);
let rank = gelsd data_mat res_mat in
printf
"\
@[<2>Regression weights:\n\
@\n\
%a@]\n\
@\n\
Rank: %d@\n@\n"
pp_rfvec (Mat.col res_mat 1)
rank;
let y = gemv data_mat_copy (Mat.col res_mat 1) in
let b = Mat.col res_mat_copy 1 in
printf
"\
@[<2>Check result (must be close to 0):\n\
@\n\
%a@]@\n"
pp_rfvec (Vec.sub y b)
|
|
7bbb147f7a888d6d69937e1018f644c1dcf4506993f44868855166e89abd60d7 | duelinmarkers/clj-record | thing_one.clj | (ns clj-record.test-model.thing-one
(:require clj-record.boot)
(:use clj-record.test-model.config))
(clj-record.core/init-model
:table-name "thing_one"
(:associations
(has-many thing-twos)
(belongs-to person :fk owner_person_id)))
| null | https://raw.githubusercontent.com/duelinmarkers/clj-record/2dd5b9a1fcb8828565acfdf9919330bf4b5dbfaa/test/clj_record/test_model/thing_one.clj | clojure | (ns clj-record.test-model.thing-one
(:require clj-record.boot)
(:use clj-record.test-model.config))
(clj-record.core/init-model
:table-name "thing_one"
(:associations
(has-many thing-twos)
(belongs-to person :fk owner_person_id)))
|
|
247d803286b9e74ef31b2fff7e30e9cf19587398558abf5777ff3d4aa7399320 | facebook/infer | Sink.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
module type Kind = sig
include TaintTraceElem.Kind
val get : Procname.t -> HilExp.t list -> CallFlags.t -> Tenv.t -> (t * IntSet.t) list
end
module type S = sig
include TaintTraceElem.S
val get : CallSite.t -> HilExp.t list -> CallFlags.t -> Tenv.t -> t list
val indexes : t -> IntSet.t
val with_indexes : t -> IntSet.t -> t
end
module Make (Kind : Kind) = struct
module Kind = Kind
type t = {kind: Kind.t; site: CallSite.t; indexes: IntSet.t} [@@deriving compare, equal]
let kind t = t.kind
let call_site t = t.site
let indexes t = t.indexes
let make ?(indexes = IntSet.empty) kind site = {kind; site; indexes}
let get site actuals call_flags tenv =
Kind.get (CallSite.pname site) actuals call_flags tenv
|> List.rev_map ~f:(fun (kind, indexes) -> {kind; site; indexes})
let with_callsite t callee_site = {t with site= callee_site}
let with_indexes t indexes = {t with indexes}
let pp fmt s = F.fprintf fmt "%a(%a)" Kind.pp s.kind CallSite.pp s.site
module Set = PrettyPrintable.MakePPSet (struct
type nonrec t = t [@@deriving compare]
let pp = pp
end)
end
| null | https://raw.githubusercontent.com/facebook/infer/d2e59e6df24858729129debcc2813ae3915c4f0a/infer/src/absint/Sink.ml | ocaml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
module type Kind = sig
include TaintTraceElem.Kind
val get : Procname.t -> HilExp.t list -> CallFlags.t -> Tenv.t -> (t * IntSet.t) list
end
module type S = sig
include TaintTraceElem.S
val get : CallSite.t -> HilExp.t list -> CallFlags.t -> Tenv.t -> t list
val indexes : t -> IntSet.t
val with_indexes : t -> IntSet.t -> t
end
module Make (Kind : Kind) = struct
module Kind = Kind
type t = {kind: Kind.t; site: CallSite.t; indexes: IntSet.t} [@@deriving compare, equal]
let kind t = t.kind
let call_site t = t.site
let indexes t = t.indexes
let make ?(indexes = IntSet.empty) kind site = {kind; site; indexes}
let get site actuals call_flags tenv =
Kind.get (CallSite.pname site) actuals call_flags tenv
|> List.rev_map ~f:(fun (kind, indexes) -> {kind; site; indexes})
let with_callsite t callee_site = {t with site= callee_site}
let with_indexes t indexes = {t with indexes}
let pp fmt s = F.fprintf fmt "%a(%a)" Kind.pp s.kind CallSite.pp s.site
module Set = PrettyPrintable.MakePPSet (struct
type nonrec t = t [@@deriving compare]
let pp = pp
end)
end
|
|
ec13b89dc1677183ff254e664110166f0a220670000ad6ce4d9b6d661f8dd030 | lamdu/lamdu | ToVersion13.hs | module Lamdu.Data.Export.JSON.Migration.ToVersion13 (migrate) where
import qualified Control.Lens as Lens
import Control.Lens.Extended ((~~>))
import qualified Data.Aeson as Aeson
import Data.Aeson.Instances ()
import Data.Aeson.Lens (_Array, _Object, _String)
import Data.Binary.Extended (encodeS)
import Data.UUID (UUID)
import qualified Data.UUID.Utils as UUIDUtils
import qualified Data.Vector as Vector
import Lamdu.Data.Export.JSON.Migration.Common (migrateToVer)
import Lamdu.Prelude
type LamId = Text
type TagId = Text
type LamParamsMap = Map LamId [TagId]
collectLamParams :: Aeson.Value -> Either Text LamParamsMap
collectLamParams (Aeson.Object obj) =
case (obj ^? Lens.ix "lamId" . _String, obj ^? Lens.ix "lamFieldParams" . _Array) of
(Just lamId, Just raw) ->
case traverse (^? _String) raw of
Nothing -> Left "Malformed 'lamFieldParams'"
Just params -> mempty & Lens.at lamId ?~ params ^.. Lens.folded & Right
_ -> Right mempty
collectLamParams _ = Right mempty
encodeParamList :: UUID -> Maybe [TagId] -> Aeson.Object
encodeParamList _ Nothing = mempty
encodeParamList baseId (Just params) =
"record" ~~>
(Aeson.Array . Vector.fromList)
((zip [0 :: Int ..] params <&> uncurry mkField) <> [rowTail])
where
rowTail =
"rowId" ~~> Aeson.toJSON (UUIDUtils.augment "tail" baseId)
& Aeson.Object
mkField i tagId =
mempty
& addId "id"
& addId "rowId"
& Lens.at "rowTag" ?~ Aeson.String tagId
& Aeson.Object
where
addId :: _ -> _ -> _
addId t x = x & Lens.at t ?~ Aeson.toJSON (UUIDUtils.augment (encodeS (i, t)) baseId)
migrateExpr :: LamParamsMap -> _ -> Either Text _
migrateExpr lamsMap obj =
(traverse . _Object) (migrateExpr lamsMap) obj <&>
case (obj ^? Lens.ix "lamVar", obj ^? Lens.ix "id" . _String, obj ^. Lens.at "id" <&> Aeson.fromJSON) of
(Just{}, Just lamId, Just (Aeson.Success lamExprId))->
Lens.at "lamParamType" ?~
( encodeParamList typId (lamsMap ^. Lens.at lamId)
& Lens.at "id" ?~ Aeson.toJSON typId
& Aeson.Object
)
where
typId = UUIDUtils.augment "to-version-13" lamExprId
_ -> id
migrateRowFields :: Aeson.Object -> Either Text [Aeson.Value]
migrateRowFields obj =
traverse (uncurry mkField) (obj ^@.. Lens.ifolded)
where
mkField key (Aeson.Object val) =
val
& Lens.at "rowTag" ?~ Aeson.toJSON key
& Aeson.Object & Right
mkField _ _ = Left "Malformed row item"
migrateRow :: Aeson.Value -> Either Text Aeson.Value
migrateRow (Aeson.Object obj) =
migrateRowFields obj <&> Aeson.Array . Vector.fromList
migrateRow (Aeson.Array v) =
case v ^.. traverse of
[Aeson.Object obj, Aeson.String rest] ->
migrateRowFields obj
<&> (<> ["rowVar" ~~> Aeson.String rest & Aeson.Object])
<&> Aeson.Array . Vector.fromList
_ -> Left "Malformed row"
migrateRow _ = Left "Malformed row"
migrateType :: _ -> Either Text _
migrateType obj =
(traverse . _Object) migrateType obj
>>= (traverse . _Array . traverse . _Object) migrateType
>>= Lens.ix "variant" migrateRow
>>= Lens.ix "record" migrateRow
migrateScheme :: Aeson.Value -> Either Text Aeson.Value
migrateScheme (Aeson.Object obj) = obj & (Lens.ix "schemeType" . _Object) migrateType <&> Aeson.Object
migrateScheme _ = Left "Malformed scheme"
migrateFrozenDeps :: _ -> Either Text _
migrateFrozenDeps obj =
obj
& (Lens.ix "defTypes" . _Object . traverse) migrateScheme
>>= (Lens.ix "nominals" . _Object . traverse . _Object . Lens.ix "nomType") migrateScheme
migrateEntity :: LamParamsMap -> Aeson.Value -> Either Text [Aeson.Value]
migrateEntity lamsMap (Aeson.Object obj)
| Lens.has (Lens.ix "lamId") obj =
Right
[ obj
& Lens.at "lamFieldParams" .~ Nothing
& Lens.at "lamId" .~ Nothing
& Aeson.Object
]
| otherwise =
obj
& (Lens.ix "val" . _Object) (migrateExpr lamsMap)
>>= (Lens.ix "frozenDeps" . _Object) migrateFrozenDeps
>>= (Lens.ix "repl" . _Object . Lens.ix "val" . _Object) (migrateExpr lamsMap)
>>= (Lens.ix "repl" . _Object . Lens.ix "frozenDeps" . _Object) migrateFrozenDeps
>>= Lens.ix "typ" migrateScheme
>>= Lens.ix "nomType" migrateScheme
<&> (:[]) . Aeson.Object
migrateEntity _ x = Right [x]
migrate :: Aeson.Value -> Either Text Aeson.Value
migrate =
migrateToVer 13 $
\xs ->
do
lamsMap <- traverse collectLamParams xs <&> (^. traverse)
traverse (migrateEntity lamsMap) xs <&> Vector.fromList . concat
| null | https://raw.githubusercontent.com/lamdu/lamdu/e31e36f5cef7b3c5d9123d799b45bb55a1d78efa/src/Lamdu/Data/Export/JSON/Migration/ToVersion13.hs | haskell | module Lamdu.Data.Export.JSON.Migration.ToVersion13 (migrate) where
import qualified Control.Lens as Lens
import Control.Lens.Extended ((~~>))
import qualified Data.Aeson as Aeson
import Data.Aeson.Instances ()
import Data.Aeson.Lens (_Array, _Object, _String)
import Data.Binary.Extended (encodeS)
import Data.UUID (UUID)
import qualified Data.UUID.Utils as UUIDUtils
import qualified Data.Vector as Vector
import Lamdu.Data.Export.JSON.Migration.Common (migrateToVer)
import Lamdu.Prelude
type LamId = Text
type TagId = Text
type LamParamsMap = Map LamId [TagId]
collectLamParams :: Aeson.Value -> Either Text LamParamsMap
collectLamParams (Aeson.Object obj) =
case (obj ^? Lens.ix "lamId" . _String, obj ^? Lens.ix "lamFieldParams" . _Array) of
(Just lamId, Just raw) ->
case traverse (^? _String) raw of
Nothing -> Left "Malformed 'lamFieldParams'"
Just params -> mempty & Lens.at lamId ?~ params ^.. Lens.folded & Right
_ -> Right mempty
collectLamParams _ = Right mempty
encodeParamList :: UUID -> Maybe [TagId] -> Aeson.Object
encodeParamList _ Nothing = mempty
encodeParamList baseId (Just params) =
"record" ~~>
(Aeson.Array . Vector.fromList)
((zip [0 :: Int ..] params <&> uncurry mkField) <> [rowTail])
where
rowTail =
"rowId" ~~> Aeson.toJSON (UUIDUtils.augment "tail" baseId)
& Aeson.Object
mkField i tagId =
mempty
& addId "id"
& addId "rowId"
& Lens.at "rowTag" ?~ Aeson.String tagId
& Aeson.Object
where
addId :: _ -> _ -> _
addId t x = x & Lens.at t ?~ Aeson.toJSON (UUIDUtils.augment (encodeS (i, t)) baseId)
migrateExpr :: LamParamsMap -> _ -> Either Text _
migrateExpr lamsMap obj =
(traverse . _Object) (migrateExpr lamsMap) obj <&>
case (obj ^? Lens.ix "lamVar", obj ^? Lens.ix "id" . _String, obj ^. Lens.at "id" <&> Aeson.fromJSON) of
(Just{}, Just lamId, Just (Aeson.Success lamExprId))->
Lens.at "lamParamType" ?~
( encodeParamList typId (lamsMap ^. Lens.at lamId)
& Lens.at "id" ?~ Aeson.toJSON typId
& Aeson.Object
)
where
typId = UUIDUtils.augment "to-version-13" lamExprId
_ -> id
migrateRowFields :: Aeson.Object -> Either Text [Aeson.Value]
migrateRowFields obj =
traverse (uncurry mkField) (obj ^@.. Lens.ifolded)
where
mkField key (Aeson.Object val) =
val
& Lens.at "rowTag" ?~ Aeson.toJSON key
& Aeson.Object & Right
mkField _ _ = Left "Malformed row item"
migrateRow :: Aeson.Value -> Either Text Aeson.Value
migrateRow (Aeson.Object obj) =
migrateRowFields obj <&> Aeson.Array . Vector.fromList
migrateRow (Aeson.Array v) =
case v ^.. traverse of
[Aeson.Object obj, Aeson.String rest] ->
migrateRowFields obj
<&> (<> ["rowVar" ~~> Aeson.String rest & Aeson.Object])
<&> Aeson.Array . Vector.fromList
_ -> Left "Malformed row"
migrateRow _ = Left "Malformed row"
migrateType :: _ -> Either Text _
migrateType obj =
(traverse . _Object) migrateType obj
>>= (traverse . _Array . traverse . _Object) migrateType
>>= Lens.ix "variant" migrateRow
>>= Lens.ix "record" migrateRow
migrateScheme :: Aeson.Value -> Either Text Aeson.Value
migrateScheme (Aeson.Object obj) = obj & (Lens.ix "schemeType" . _Object) migrateType <&> Aeson.Object
migrateScheme _ = Left "Malformed scheme"
migrateFrozenDeps :: _ -> Either Text _
migrateFrozenDeps obj =
obj
& (Lens.ix "defTypes" . _Object . traverse) migrateScheme
>>= (Lens.ix "nominals" . _Object . traverse . _Object . Lens.ix "nomType") migrateScheme
migrateEntity :: LamParamsMap -> Aeson.Value -> Either Text [Aeson.Value]
migrateEntity lamsMap (Aeson.Object obj)
| Lens.has (Lens.ix "lamId") obj =
Right
[ obj
& Lens.at "lamFieldParams" .~ Nothing
& Lens.at "lamId" .~ Nothing
& Aeson.Object
]
| otherwise =
obj
& (Lens.ix "val" . _Object) (migrateExpr lamsMap)
>>= (Lens.ix "frozenDeps" . _Object) migrateFrozenDeps
>>= (Lens.ix "repl" . _Object . Lens.ix "val" . _Object) (migrateExpr lamsMap)
>>= (Lens.ix "repl" . _Object . Lens.ix "frozenDeps" . _Object) migrateFrozenDeps
>>= Lens.ix "typ" migrateScheme
>>= Lens.ix "nomType" migrateScheme
<&> (:[]) . Aeson.Object
migrateEntity _ x = Right [x]
migrate :: Aeson.Value -> Either Text Aeson.Value
migrate =
migrateToVer 13 $
\xs ->
do
lamsMap <- traverse collectLamParams xs <&> (^. traverse)
traverse (migrateEntity lamsMap) xs <&> Vector.fromList . concat
|
|
8cee8f49fc333d83978b1103b45e17a1f48f2d2199e71f5e1d9262789cecce90 | janestreet/bonsai | main.ml | open! Core
open Bonsai_web
let widget uri : Vdom.Node.t =
[ init ] is called whenever [ uri ] changes , updating the favicon . The DOM element
produced by the widget is unused .
produced by the widget is unused. *)
Vdom.Node.widget
~id:(Type_equal.Id.create ~name:"favicon" (const [%sexp "favicon"]))
~init:(fun () ->
let open Js_of_ocaml in
let icon_node = Dom_html.document##querySelector (Js.string {|link[rel="icon"]|}) in
let href_value = uri |> Uri.to_string in
(match Js.Opt.to_option icon_node with
| Some icon_node ->
icon_node##setAttribute (Js.string "href") (Js.string href_value)
| None ->
let head = Dom_html.document##querySelector (Js.string "head") in
let link =
let open Vdom in
Node.create
"link"
~attr:
(Attr.many
[ Attr.type_ "image/svg+xml"
; Attr.create "rel" "icon"
; Attr.href href_value
])
[]
|> Node.to_dom
in
let link = (link :> Dom.node Js.t) in
Js.Opt.iter head (fun head -> ignore (head##appendChild link : Dom.node Js.t)));
(), Vdom.Node.to_dom Vdom.Node.none)
()
;;
let slider ~min ~max ~value ~inject =
let open Vdom in
Node.input
~attr:
(Attr.many
[ Attr.type_ "range"
; Attr.min min
; Attr.max max
; Attr.value (value |> string_of_int)
; Attr.on_input (fun _ev value -> inject (int_of_string value))
])
()
;;
let component =
let open Bonsai.Let_syntax in
let%sub text = Bonsai.state_opt (module String) ~default_model:"🤯" in
let%sub size = Bonsai.state (module Int) ~default_model:80 in
let%sub pos_x = Bonsai.state (module Int) ~default_model:50 in
let%sub pos_y = Bonsai.state (module Int) ~default_model:50 in
let%sub fg_color = Bonsai.state (module String) ~default_model:"#000000" in
let%sub bg_color = Bonsai.state (module String) ~default_model:"#ffffff" in
let%arr text, inject_text = text
and size, inject_size = size
and pos_x, inject_pos_x = pos_x
and pos_y, inject_pos_y = pos_y
and fg_color, inject_fg_color = fg_color
and bg_color, inject_bg_color = bg_color in
let open Vdom in
let text_box =
Vdom_input_widgets.Entry.text
~merge_behavior:Legacy_dont_merge
~value:text
~on_input:inject_text
()
in
let size_slider = slider ~min:1. ~max:200. ~value:size ~inject:inject_size in
let x_slider = slider ~min:0. ~max:100. ~value:pos_x ~inject:inject_pos_x in
let y_slider = slider ~min:0. ~max:100. ~value:pos_y ~inject:inject_pos_y in
let background_color =
if String.equal "#ffffff" bg_color then None else Some (`Hex bg_color)
in
let favicon =
Favicon_svg.of_unicode
~font_size:(Percent.of_percentage (float_of_int size))
~pos_x:(Percent.of_percentage (float_of_int pos_x))
~pos_y:(Percent.of_percentage (float_of_int pos_y))
?background_color
~font_color:(`Hex fg_color)
(Option.value text ~default:"")
in
let uri = Favicon_svg.to_embedded_url favicon in
let image =
Node.create
"img"
~attr:(Attr.many [ Attr.src (Uri.to_string uri); Attr.class_ "svg-preview" ])
[]
in
let code_section =
match text with
| None -> []
| Some text ->
let attr fmt cond value = Option.some_if (cond value) (fmt value ^ "\n ") in
let non eq x y = not (eq x y) in
let attrs =
[ attr
(sprintf "~font_size:(Percent.of_percentage %.1f)")
(Fn.const true)
(float_of_int size)
; attr
(sprintf "~pos_x:(Percent.of_percentage %.1f)")
(non Float.equal 50.)
(float_of_int pos_x)
; attr
(sprintf "~pos_y:(Percent.of_percentage %.1f)")
(non Float.equal 50.)
(float_of_int pos_y)
; attr (sprintf {|~font_color:(`Hex "%s")|}) (non String.equal "#000000") fg_color
; attr
(sprintf {|~background_color:(`Hex "%s")|})
(non String.equal "#ffffff")
bg_color
]
in
let attrs = List.filter_opt attrs |> String.concat in
[ Node.div ~attr:(Attr.class_ "section-header") [ Node.text "Code:" ]
; Node.pre
[ [%string {|
Favicon_svg.of_unicode
%{attrs}"%{text}" |}]
|> String.strip
|> Node.text
]
]
in
let spacer x = Node.div ~attr:(Attr.style (Css_gen.height (`Px x))) [] in
Node.div
~attr:(Attr.class_ "container")
([ widget uri
; Node.h3
[ Node.text "What would you like "
; Node.create "i" [ Node.text "your" ]
; Node.text " favicon to say?"
]
; spacer 10
; text_box
; spacer 20
; Node.div ~attr:(Attr.class_ "section-header") [ Node.text "Preview:" ]
; spacer 10
; image
; spacer 20
; Node.div ~attr:(Attr.class_ "section-header") [ Node.text "Fine tuning:" ]
; spacer 5
; Node.div [ Node.text "Size: "; size_slider ]
; Node.div [ Node.text "Pos x: "; x_slider ]
; Node.div [ Node.text "Pos y: "; y_slider ]
; Node.div
[ Node.text "Text color: "
; Node.input
~attr:
(Attr.many
[ Attr.type_ "color"
; Attr.value fg_color
; Attr.on_input (fun _ev value -> inject_fg_color value)
])
()
]
; spacer 5
; Node.div
[ Node.text "Background color: "
; Node.input
~attr:
(Attr.many
[ Attr.type_ "color"
; Attr.value bg_color
; Attr.on_input (fun _ev value -> inject_bg_color value)
])
()
]
; spacer 20
]
@ code_section)
;;
let run () =
Async_js.init ();
Bonsai_web.Start.start component
;;
let () = run ()
| null | https://raw.githubusercontent.com/janestreet/bonsai/782fecd000a1f97b143a3f24b76efec96e36a398/examples/favicon_svg/main.ml | ocaml | open! Core
open Bonsai_web
let widget uri : Vdom.Node.t =
[ init ] is called whenever [ uri ] changes , updating the favicon . The DOM element
produced by the widget is unused .
produced by the widget is unused. *)
Vdom.Node.widget
~id:(Type_equal.Id.create ~name:"favicon" (const [%sexp "favicon"]))
~init:(fun () ->
let open Js_of_ocaml in
let icon_node = Dom_html.document##querySelector (Js.string {|link[rel="icon"]|}) in
let href_value = uri |> Uri.to_string in
(match Js.Opt.to_option icon_node with
| Some icon_node ->
icon_node##setAttribute (Js.string "href") (Js.string href_value)
| None ->
let head = Dom_html.document##querySelector (Js.string "head") in
let link =
let open Vdom in
Node.create
"link"
~attr:
(Attr.many
[ Attr.type_ "image/svg+xml"
; Attr.create "rel" "icon"
; Attr.href href_value
])
[]
|> Node.to_dom
in
let link = (link :> Dom.node Js.t) in
Js.Opt.iter head (fun head -> ignore (head##appendChild link : Dom.node Js.t)));
(), Vdom.Node.to_dom Vdom.Node.none)
()
;;
let slider ~min ~max ~value ~inject =
let open Vdom in
Node.input
~attr:
(Attr.many
[ Attr.type_ "range"
; Attr.min min
; Attr.max max
; Attr.value (value |> string_of_int)
; Attr.on_input (fun _ev value -> inject (int_of_string value))
])
()
;;
let component =
let open Bonsai.Let_syntax in
let%sub text = Bonsai.state_opt (module String) ~default_model:"🤯" in
let%sub size = Bonsai.state (module Int) ~default_model:80 in
let%sub pos_x = Bonsai.state (module Int) ~default_model:50 in
let%sub pos_y = Bonsai.state (module Int) ~default_model:50 in
let%sub fg_color = Bonsai.state (module String) ~default_model:"#000000" in
let%sub bg_color = Bonsai.state (module String) ~default_model:"#ffffff" in
let%arr text, inject_text = text
and size, inject_size = size
and pos_x, inject_pos_x = pos_x
and pos_y, inject_pos_y = pos_y
and fg_color, inject_fg_color = fg_color
and bg_color, inject_bg_color = bg_color in
let open Vdom in
let text_box =
Vdom_input_widgets.Entry.text
~merge_behavior:Legacy_dont_merge
~value:text
~on_input:inject_text
()
in
let size_slider = slider ~min:1. ~max:200. ~value:size ~inject:inject_size in
let x_slider = slider ~min:0. ~max:100. ~value:pos_x ~inject:inject_pos_x in
let y_slider = slider ~min:0. ~max:100. ~value:pos_y ~inject:inject_pos_y in
let background_color =
if String.equal "#ffffff" bg_color then None else Some (`Hex bg_color)
in
let favicon =
Favicon_svg.of_unicode
~font_size:(Percent.of_percentage (float_of_int size))
~pos_x:(Percent.of_percentage (float_of_int pos_x))
~pos_y:(Percent.of_percentage (float_of_int pos_y))
?background_color
~font_color:(`Hex fg_color)
(Option.value text ~default:"")
in
let uri = Favicon_svg.to_embedded_url favicon in
let image =
Node.create
"img"
~attr:(Attr.many [ Attr.src (Uri.to_string uri); Attr.class_ "svg-preview" ])
[]
in
let code_section =
match text with
| None -> []
| Some text ->
let attr fmt cond value = Option.some_if (cond value) (fmt value ^ "\n ") in
let non eq x y = not (eq x y) in
let attrs =
[ attr
(sprintf "~font_size:(Percent.of_percentage %.1f)")
(Fn.const true)
(float_of_int size)
; attr
(sprintf "~pos_x:(Percent.of_percentage %.1f)")
(non Float.equal 50.)
(float_of_int pos_x)
; attr
(sprintf "~pos_y:(Percent.of_percentage %.1f)")
(non Float.equal 50.)
(float_of_int pos_y)
; attr (sprintf {|~font_color:(`Hex "%s")|}) (non String.equal "#000000") fg_color
; attr
(sprintf {|~background_color:(`Hex "%s")|})
(non String.equal "#ffffff")
bg_color
]
in
let attrs = List.filter_opt attrs |> String.concat in
[ Node.div ~attr:(Attr.class_ "section-header") [ Node.text "Code:" ]
; Node.pre
[ [%string {|
Favicon_svg.of_unicode
%{attrs}"%{text}" |}]
|> String.strip
|> Node.text
]
]
in
let spacer x = Node.div ~attr:(Attr.style (Css_gen.height (`Px x))) [] in
Node.div
~attr:(Attr.class_ "container")
([ widget uri
; Node.h3
[ Node.text "What would you like "
; Node.create "i" [ Node.text "your" ]
; Node.text " favicon to say?"
]
; spacer 10
; text_box
; spacer 20
; Node.div ~attr:(Attr.class_ "section-header") [ Node.text "Preview:" ]
; spacer 10
; image
; spacer 20
; Node.div ~attr:(Attr.class_ "section-header") [ Node.text "Fine tuning:" ]
; spacer 5
; Node.div [ Node.text "Size: "; size_slider ]
; Node.div [ Node.text "Pos x: "; x_slider ]
; Node.div [ Node.text "Pos y: "; y_slider ]
; Node.div
[ Node.text "Text color: "
; Node.input
~attr:
(Attr.many
[ Attr.type_ "color"
; Attr.value fg_color
; Attr.on_input (fun _ev value -> inject_fg_color value)
])
()
]
; spacer 5
; Node.div
[ Node.text "Background color: "
; Node.input
~attr:
(Attr.many
[ Attr.type_ "color"
; Attr.value bg_color
; Attr.on_input (fun _ev value -> inject_bg_color value)
])
()
]
; spacer 20
]
@ code_section)
;;
let run () =
Async_js.init ();
Bonsai_web.Start.start component
;;
let () = run ()
|
|
aadc449826d9430c3f49fa591fa9073bb94dc6867ea103e97227e19de3199654 | pentlandedge/s4607 | dwell.erl | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Copyright 2016 Pentland Edge Ltd.
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
%% use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
%% License for the specific language governing permissions and limitations
%% under the License.
%%
-module(dwell).
%% Main API functions.
-export([
decode/1,
encode/1,
new/1,
payload_size/1,
payload_size/2,
to_dict/1,
display/1,
to_csv_iolist/1,
set_dwell_time/2,
update_targets/2]).
Accessor functions for accessing elements of the dwell segment .
-export([
get_existence_mask/1,
get_revisit_index/1,
get_dwell_index/1,
get_last_dwell_of_revisit/1,
get_target_report_count/1,
get_dwell_time/1,
get_sensor_lat/1,
get_sensor_lon/1,
get_sensor_alt/1,
get_lat_scale_factor/1,
get_lon_scale_factor/1,
get_spu_along_track/1,
get_spu_cross_track/1,
get_spu_alt/1,
get_sensor_track/1,
get_sensor_speed/1,
get_sensor_vert_vel/1,
get_sensor_track_unc/1,
get_sensor_speed_unc/1,
get_sensor_vert_vel_unc/1,
get_platform_heading/1,
get_platform_pitch/1,
get_platform_roll/1,
get_dwell_center_lat/1,
get_dwell_center_lon/1,
get_dwell_range_half_extent/1,
get_dwell_angle_half_extent/1,
get_sensor_heading/1,
get_sensor_pitch/1,
get_sensor_roll/1,
get_mdv/1,
get_targets/1]).
-record(dwell_segment, {
existence_mask,
revisit_index,
dwell_index,
last_dwell_of_revisit,
target_report_count,
dwell_time,
sensor_lat,
sensor_lon,
sensor_alt,
lat_scale_factor,
lon_scale_factor,
spu_along_track,
spu_cross_track,
spu_alt,
sensor_track,
sensor_speed,
sensor_vert_vel,
sensor_track_unc,
sensor_speed_unc,
sensor_vert_vel_unc,
platform_heading,
platform_pitch,
platform_roll,
dwell_center_lat,
dwell_center_lon,
dwell_range_half_extent,
dwell_angle_half_extent,
sensor_heading,
sensor_pitch,
sensor_roll,
mdv,
targets}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Type specifications.
-opaque dwell_segment() :: #dwell_segment{}.
-export_type([dwell_segment/0]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Dwell segment decoding functions .
%% @doc Decode a binary dwell segment into structured form.
decode(<<EM:8/binary,RI:16/integer-unsigned-big,
DI:16/integer-unsigned-big,LD,TRC:16/integer-unsigned-big,
DT:32/integer-unsigned-big,SLat:4/binary,SLon:4/binary,SAlt:4/binary,
Rest/binary>>) ->
% Fixed part of the dwell segement is pattern matched above, remainder
% depends on the existence mask.
EMrec = exist_mask:decode(EM),
{LatScaleFactor, Bin1} = sutils:conditional_extract(
Rest,
exist_mask:get_lat_scale_factor(EMrec),
4,
fun stanag_types:sa32_to_float/1,
1.0),
{LonScaleFactor, Bin2} = sutils:conditional_extract(
Bin1,
exist_mask:get_lon_scale_factor(EMrec),
4,
fun stanag_types:ba32_to_float/1,
1.0),
{SpuAlongTrack, Bin3} = sutils:conditional_extract(
Bin2,
exist_mask:get_spu_cross_track(EMrec),
4,
fun stanag_types:i32_to_integer/1,
0),
{SpuCrossTrack, Bin4} = sutils:conditional_extract(
Bin3,
exist_mask:get_spu_cross_track(EMrec),
4,
fun stanag_types:i32_to_integer/1,
0),
{SpuAlt, Bin5} = sutils:conditional_extract(
Bin4,
exist_mask:get_spu_alt(EMrec),
2,
fun stanag_types:i16_to_integer/1,
0),
{SensorTrack, Bin6} = sutils:conditional_extract(
Bin5,
exist_mask:get_sensor_track(EMrec),
2,
fun stanag_types:ba16_to_float/1,
0.0),
{SensorSpeed, Bin7} = sutils:conditional_extract(
Bin6,
exist_mask:get_sensor_speed(EMrec),
4,
fun stanag_types:i32_to_integer/1,
0),
{SensorVertVel, Bin8} = sutils:conditional_extract(
Bin7,
exist_mask:get_sensor_vert_vel(EMrec),
1,
fun stanag_types:s8_to_integer/1,
0),
{SensorTrackUnc, Bin9} = sutils:conditional_extract(
Bin8,
exist_mask:get_sensor_track_unc(EMrec),
1,
fun stanag_types:i8_to_integer/1,
0),
{SensorSpeedUnc, Bin10} = sutils:conditional_extract(
Bin9,
exist_mask:get_sensor_speed_unc(EMrec),
2,
fun stanag_types:i16_to_integer/1,
0),
{SensorVertVelUnc, Bin11} = sutils:conditional_extract(
Bin10,
exist_mask:get_sensor_vert_vel_unc(EMrec),
2,
fun stanag_types:i16_to_integer/1,
0),
{PlatHeading, Bin12} = sutils:conditional_extract(
Bin11,
exist_mask:get_platform_heading(EMrec),
2,
fun stanag_types:ba16_to_float/1,
0.0),
{PlatPitch, Bin13} = sutils:conditional_extract(
Bin12,
exist_mask:get_platform_pitch(EMrec),
2,
fun stanag_types:sa16_to_float/1,
0.0),
{PlatRoll, Bin14} = sutils:conditional_extract(
Bin13,
exist_mask:get_platform_roll(EMrec),
2,
fun stanag_types:sa16_to_float/1,
0.0),
{DwellCenterLat, Bin15} = sutils:conditional_extract(
Bin14,
exist_mask:get_dwell_center_lat(EMrec),
4,
fun stanag_types:sa32_to_float/1,
0.0),
{DwellCenterLon, Bin16} = sutils:conditional_extract(
Bin15,
exist_mask:get_dwell_center_lon(EMrec),
4,
fun stanag_types:ba32_to_float/1,
0.0),
{DwellRangeHalfExtent, Bin17} = sutils:conditional_extract(
Bin16,
exist_mask:get_dwell_range_half_extent(EMrec),
2,
fun stanag_types:b16_to_float/1,
0.0),
{DwellAngleHalfExtent, Bin18} = sutils:conditional_extract(
Bin17,
exist_mask:get_dwell_angle_half_extent(EMrec),
2,
fun stanag_types:ba16_to_float/1,
0.0),
{SensorHeading, Bin19} = sutils:conditional_extract(
Bin18,
exist_mask:get_sensor_heading(EMrec),
2,
fun stanag_types:ba16_to_float/1,
0.0),
{SensorPitch, Bin20} = sutils:conditional_extract(
Bin19,
exist_mask:get_sensor_pitch(EMrec),
2,
fun stanag_types:sa16_to_float/1,
0.0),
{SensorRoll, Bin21} = sutils:conditional_extract(
Bin20,
exist_mask:get_sensor_roll(EMrec),
2,
fun stanag_types:sa16_to_float/1,
0.0),
{MDV, Bin22} = sutils:conditional_extract(
Bin21,
exist_mask:get_mdv(EMrec),
1,
fun stanag_types:i8_to_integer/1,
0),
TgtRepList = decode_target_report_list(Bin22, EMrec, TRC),
{ok, #dwell_segment{
existence_mask = EMrec,
revisit_index = RI,
dwell_index = DI,
last_dwell_of_revisit = decode_last_dwell_of_revisit(LD),
target_report_count = TRC,
dwell_time = DT,
sensor_lat = stanag_types:sa32_to_float(SLat),
sensor_lon = stanag_types:ba32_to_float(SLon),
sensor_alt = stanag_types:s32_to_integer(SAlt),
lat_scale_factor = LatScaleFactor,
lon_scale_factor = LonScaleFactor,
spu_along_track = SpuAlongTrack,
spu_cross_track = SpuCrossTrack,
spu_alt = SpuAlt,
sensor_track = SensorTrack,
sensor_speed = SensorSpeed,
sensor_vert_vel = SensorVertVel,
sensor_track_unc = SensorTrackUnc,
sensor_speed_unc = SensorSpeedUnc,
sensor_vert_vel_unc = SensorVertVelUnc,
platform_heading = PlatHeading,
platform_pitch = PlatPitch,
platform_roll = PlatRoll,
dwell_center_lat = DwellCenterLat,
dwell_center_lon = DwellCenterLon,
dwell_range_half_extent = DwellRangeHalfExtent,
dwell_angle_half_extent = DwellAngleHalfExtent,
sensor_heading = SensorHeading,
sensor_pitch = SensorPitch,
sensor_roll = SensorRoll,
mdv = MDV,
targets = TgtRepList}};
decode(_) ->
{error, dwell_mismatch}.
%% @doc Encode a dwell segment record in its binary form.
encode(DS) ->
% Extract the existence mask from the incoming dwell segment.
EM = get_existence_mask(DS),
% Create a local function to wrap the check of the existence mask and
% the parameter encoding/appending.
% Exploits the trick that the function to access the dwell segment
% fields is the same as that to access the corresponding existence mask
% field.
F = fun({EvalFun, EncFun}, Acc) ->
case exist_mask:EvalFun(EM) of
1 ->
Param = dwell:EvalFun(DS),
PB = EncFun(Param),
<<Acc/binary,PB/binary>>;
0 ->
Acc
end
end,
%% Encode does not yet cater for adding the actual target reports.
ParamTable = [
{get_revisit_index, fun stanag_types:integer_to_i16/1},
{get_dwell_index, fun stanag_types:integer_to_i16/1},
{get_last_dwell_of_revisit, fun encode_last_dwell_of_revisit/1},
{get_target_report_count, fun stanag_types:integer_to_i16/1},
{get_dwell_time, fun stanag_types:integer_to_i32/1},
{get_sensor_lat, fun stanag_types:float_to_sa32/1},
{get_sensor_lon, fun stanag_types:float_to_ba32/1},
{get_sensor_alt, fun stanag_types:integer_to_s32/1},
{get_lat_scale_factor, fun stanag_types:float_to_sa32/1},
{get_lon_scale_factor, fun stanag_types:float_to_ba32/1},
{get_spu_along_track, fun stanag_types:integer_to_i32/1},
{get_spu_cross_track, fun stanag_types:integer_to_i32/1},
{get_spu_alt, fun stanag_types:integer_to_i16/1},
{get_sensor_track, fun stanag_types:float_to_ba16/1},
{get_sensor_speed, fun stanag_types:integer_to_i32/1},
{get_sensor_vert_vel, fun stanag_types:integer_to_s8/1},
{get_sensor_track_unc, fun stanag_types:integer_to_i8/1},
{get_sensor_speed_unc, fun stanag_types:integer_to_i16/1},
{get_sensor_vert_vel_unc, fun stanag_types:integer_to_i16/1},
{get_platform_heading, fun stanag_types:float_to_ba16/1},
{get_platform_pitch, fun stanag_types:float_to_sa16/1},
{get_platform_roll, fun stanag_types:float_to_sa16/1},
{get_dwell_center_lat, fun stanag_types:float_to_sa32/1},
{get_dwell_center_lon, fun stanag_types:float_to_ba32/1},
{get_dwell_range_half_extent, fun stanag_types:float_to_b16/1},
{get_dwell_angle_half_extent, fun stanag_types:float_to_ba16/1},
{get_sensor_heading, fun stanag_types:float_to_sa16/1},
{get_sensor_pitch, fun stanag_types:float_to_sa16/1},
{get_sensor_roll, fun stanag_types:float_to_sa16/1},
{get_mdv, fun stanag_types:integer_to_i8/1}],
EMenc = exist_mask:encode(EM),
% Produce a binary dwell segment, missing only the target reports.
Bin1 = lists:foldl(F, EMenc, ParamTable),
% Append any target reports.
encode_target_reports(get_target_report_count(DS), get_targets(DS), EM, Bin1).
%% @doc Helper function for the dwell encode: encodes the target reports.
InitBin can be set to the dwell report prior to adding the target reports
%% so that the target reports are automatically added to the end of the
%% dwell segment.
encode_target_reports(0, _RepList, _EM, InitBin) ->
InitBin;
encode_target_reports(RepCount, RepList, EM, InitBin)
when RepCount =:= length(RepList) ->
F = fun(Rep, Acc) ->
Bin = tgt_report:encode(Rep, EM),
<<Acc/binary,Bin/binary>>
end,
lists:foldl(F, InitBin, RepList).
%% @doc Create a new dwell report structure from the specified fields.
new(Fields) ->
% Local function to pull the parameter from the list or supply a default
% value.
F = fun(P, L, Default) ->
case lists:keyfind(P, 1, L) of
{P, V} -> V;
false -> Default
end
end,
#dwell_segment{
existence_mask = F(existence_mask, Fields, 0),
revisit_index = F(revisit_index, Fields, 0),
dwell_index = F(dwell_index, Fields, 0),
last_dwell_of_revisit = F(last_dwell_of_revisit, Fields, 0),
target_report_count = F(target_report_count, Fields, 0),
dwell_time = F(dwell_time, Fields, 0),
sensor_lat = F(sensor_lat, Fields, 0),
sensor_lon = F(sensor_lon, Fields, 0),
sensor_alt = F(sensor_alt, Fields, 0),
lat_scale_factor = F(lat_scale_factor, Fields, 0),
lon_scale_factor = F(lon_scale_factor, Fields, 0),
spu_along_track = F(spu_along_track, Fields, 0),
spu_cross_track = F(spu_cross_track, Fields, 0),
spu_alt = F(spu_alt, Fields, 0),
sensor_track = F(sensor_track, Fields, 0),
sensor_speed = F(sensor_speed, Fields, 0),
sensor_vert_vel = F(sensor_vert_vel, Fields, 0),
sensor_track_unc = F(sensor_track_unc, Fields, 0),
sensor_speed_unc = F(sensor_speed_unc, Fields, 0),
sensor_vert_vel_unc = F(sensor_vert_vel_unc, Fields, 0),
platform_heading = F(platform_heading, Fields, 0),
platform_pitch = F(platform_pitch, Fields, 0),
platform_roll = F(platform_roll, Fields, 0),
dwell_center_lat = F(dwell_center_lat, Fields, 0),
dwell_center_lon = F(dwell_center_lon, Fields, 0),
dwell_range_half_extent = F(dwell_range_half_extent, Fields, 0),
dwell_angle_half_extent = F(dwell_angle_half_extent, Fields, 0),
sensor_heading = F(sensor_heading, Fields, 0),
sensor_pitch = F(sensor_pitch, Fields, 0),
sensor_roll = F(sensor_roll, Fields, 0),
mdv = F(mdv, Fields, 0),
targets = F(targets, Fields, [])}.
%% @doc Calculate the expected size of a dwell segment once encoded.
payload_size(#dwell_segment{existence_mask = EM, target_report_count = TC}) ->
payload_size(EM, TC).
%% @doc Calculate the size of an encoded dwell segment payload from the
%% existence mask and the target report count.
payload_size(EM, TgtRepCount) ->
SizeList = [
{get_revisit_index, 2},
{get_dwell_index, 2},
{get_last_dwell_of_revisit, 1},
{get_target_report_count, 2},
{get_dwell_time, 4},
{get_sensor_lat, 4},
{get_sensor_lon, 4},
{get_sensor_alt, 4},
{get_lat_scale_factor, 4},
{get_lon_scale_factor, 4},
{get_spu_along_track, 4},
{get_spu_cross_track, 4},
{get_spu_alt, 2},
{get_sensor_track, 2},
{get_sensor_speed, 4},
{get_sensor_vert_vel, 1},
{get_sensor_track_unc, 1},
{get_sensor_speed_unc, 2},
{get_sensor_vert_vel_unc, 2},
{get_platform_heading, 2},
{get_platform_pitch, 2},
{get_platform_roll, 2},
{get_dwell_center_lat, 4},
{get_dwell_center_lon, 4},
{get_dwell_range_half_extent, 2},
{get_dwell_angle_half_extent, 2},
{get_sensor_heading, 2},
{get_sensor_pitch, 2},
{get_sensor_roll, 2},
{get_mdv, 1}],
% Define a function to accumulate the size.
F = fun({GetF, Size}, Acc) ->
case exist_mask:GetF(EM) of
1 -> Acc + Size;
0 -> Acc
end
end,
% Accumulate the total size for all the included parameters (excluding
the target reports ) . Initial size of 8 is to allow for the existence
% mask itself.
DwellSize = lists:foldl(F, 8, SizeList),
% Calculate the size for the target reports.
TgtRepSize = TgtRepCount * tgt_report:payload_size(EM),
% Return the combined total of the dwell and the target reports.
DwellSize + TgtRepSize.
%% @doc Convert the dwell segment into a dictionary.
to_dict(DS) ->
% Extract the existence mask from the incoming dwell segment.
EM = get_existence_mask(DS),
% Table definition with of the form [{Name, Accessor}].
% The accessor function name is used with both the dwell segment and the
% existence mask.
ParamTable = [
{revisit_index, get_revisit_index},
{dwell_index, get_dwell_index},
{last_dwell_of_revisit, get_last_dwell_of_revisit},
{target_report_count, get_target_report_count},
{dwell_time, get_dwell_time},
{sensor_lat, get_sensor_lat},
{sensor_lon, get_sensor_lon},
{sensor_alt, get_sensor_alt},
{lat_scale_factor, get_lat_scale_factor},
{lon_scale_factor, get_lon_scale_factor},
{spu_along_track, get_spu_along_track},
{spu_cross_track, get_spu_cross_track},
{spu_alt, get_spu_alt},
{sensor_track, get_sensor_track},
{sensor_speed, get_sensor_speed},
{sensor_vert_vel, get_sensor_vert_vel},
{sensor_track_unc, get_sensor_track_unc},
{sensor_speed_unc, get_sensor_speed_unc},
{sensor_vert_vel_unc, get_sensor_vert_vel_unc},
{platform_heading, get_platform_heading},
{platform_pitch, get_platform_pitch},
{platform_roll, get_platform_roll},
{dwell_center_lat, get_dwell_center_lat},
{dwell_center_lon, get_dwell_center_lon},
{dwell_range_half_extent, get_dwell_range_half_extent},
{dwell_angle_half_extent, get_dwell_angle_half_extent},
{sensor_heading, get_sensor_heading},
{sensor_pitch, get_sensor_pitch},
{sensor_roll, get_sensor_roll},
{get_mdv, get_mdv}],
% Function to convert each field present in the existence mask into a
% dictionary element.
F = fun({Param, GetFn}, Acc) ->
case exist_mask:GetFn(EM) of
1 ->
P = dwell:GetFn(DS),
dict:store(Param, P, Acc);
0 ->
Acc
end
end,
Dict1 = lists:foldl(F, dict:new(), ParamTable),
% Add the target reports. Convert a list of records to a list of
% dictionaries.
Targets = dwell:get_targets(DS),
T = fun(TgtRep) ->
tgt_report:to_dict(TgtRep, EM)
end,
TgtDictList = lists:map(T, Targets),
dict:store(targets, TgtDictList, Dict1).
decode_last_dwell_of_revisit(0) -> additional_dwells;
decode_last_dwell_of_revisit(1) -> no_additional_dwells.
encode_last_dwell_of_revisit(additional_dwells) -> <<0>>;
encode_last_dwell_of_revisit(no_additional_dwells) -> <<1>>.
%% Function to walk through a binary containing a number of target reports,
%% decoding each returning as a list of reports.
decode_target_report_list(Bin, EM, TgtCount) ->
decode_target_report_list(Bin, EM, TgtCount, []).
%% Helper function with the accumulator.
decode_target_report_list(_Bin, _EM, 0, AccTgts) ->
lists:reverse(AccTgts);
decode_target_report_list(Bin, EM, TgtCount, AccTgts) when TgtCount > 0 ->
{ok, TR, Rem} = tgt_report:decode(Bin, EM),
decode_target_report_list(Rem, EM, TgtCount-1, [TR|AccTgts]).
%% @doc Display the contents of a dwell segment in the console.
display(DS) ->
io:format("****************************************~n"),
io:format("** @dwell~n"),
EM = DS#dwell_segment.existence_mask,
exist_mask:display(EM),
io:format("Revisit index: ~p~n", [get_revisit_index(DS)]),
io:format("Dwell index: ~p~n", [get_dwell_index(DS)]),
io:format("Last dwell of revisit: ~p~n", [get_last_dwell_of_revisit(DS)]),
io:format("Target report count: ~p~n", [get_target_report_count(DS)]),
io:format("Dwell time: ~p~n", [get_dwell_time(DS)]),
io:format("Sensor Lat.: ~p~n", [get_sensor_lat(DS)]),
io:format("Sensor Lon.: ~p~n", [get_sensor_lon(DS)]),
io:format("Sensor alt. (cm): ~p~n", [get_sensor_alt(DS)]),
sutils:conditional_display("Lat. scale factor: ~p~n", [get_lat_scale_factor(DS)], exist_mask:get_lat_scale_factor(EM)),
sutils:conditional_display("Lon. scale factor: ~p~n", [get_lon_scale_factor(DS)], exist_mask:get_lon_scale_factor(EM)),
sutils:conditional_display("SPU along track: ~p~n", [get_spu_along_track(DS)], exist_mask:get_spu_along_track(EM)),
sutils:conditional_display("SPU cross track: ~p~n", [get_spu_cross_track(DS)], exist_mask:get_spu_cross_track(EM)),
sutils:conditional_display("SPU alt: ~p~n", [get_spu_alt(DS)], exist_mask:get_spu_alt(EM)),
sutils:conditional_display("Sensor track: ~p~n", [get_sensor_track(DS)], exist_mask:get_sensor_track(EM)),
sutils:conditional_display("Sensor speed: ~p~n", [get_sensor_speed(DS)], exist_mask:get_sensor_speed(EM)),
sutils:conditional_display("Sensor vert. vel.: ~p~n", [get_sensor_vert_vel(DS)], exist_mask:get_sensor_vert_vel(EM)),
sutils:conditional_display("Sensor track unc.: ~p~n", [get_sensor_track_unc(DS)], exist_mask:get_sensor_track_unc(EM)),
sutils:conditional_display("Sensor speed unc.: ~p~n", [get_sensor_speed_unc(DS)], exist_mask:get_sensor_speed_unc(EM)),
sutils:conditional_display("Sensor vert. vel. unc.: ~p~n", [get_sensor_vert_vel_unc(DS)], exist_mask:get_sensor_vert_vel_unc(EM)),
sutils:conditional_display("Platform heading: ~p~n", [get_platform_heading(DS)], exist_mask:get_platform_heading(EM)),
sutils:conditional_display("Platform pitch: ~p~n", [get_platform_pitch(DS)], exist_mask:get_platform_pitch(EM)),
sutils:conditional_display("Platform roll: ~p~n", [get_platform_roll(DS)], exist_mask:get_platform_roll(EM)),
sutils:conditional_display("Dwell centre Lat.: ~p~n", [get_dwell_center_lat(DS)], exist_mask:get_dwell_center_lat(EM)),
sutils:conditional_display("Dwell centre Lon.: ~p~n", [get_dwell_center_lon(DS)], exist_mask:get_dwell_center_lon(EM)),
sutils:conditional_display("Dwell range half extent: ~p~n", [get_dwell_range_half_extent(DS)], exist_mask:get_dwell_range_half_extent(EM)),
sutils:conditional_display("Dwell angle half extent: ~p~n", [get_dwell_angle_half_extent(DS)], exist_mask:get_dwell_angle_half_extent(EM)),
sutils:conditional_display("Sensor heading: ~p~n", [get_sensor_heading(DS)], exist_mask:get_sensor_heading(EM)),
sutils:conditional_display("Sensor pitch: ~p~n", [get_sensor_pitch(DS)], exist_mask:get_sensor_pitch(EM)),
sutils:conditional_display("Sensor roll: ~p~n", [get_sensor_roll(DS)], exist_mask:get_sensor_roll(EM)),
sutils:conditional_display("MDV: ~p~n", [get_mdv(DS)], exist_mask:get_mdv(EM)),
F = fun(TR) -> tgt_report:display(TR, EM) end,
lists:map(F, DS#dwell_segment.targets).
%% @doc Function to display the contents of a dwell segment.
-spec to_csv_iolist(DS::dwell_segment()) -> iolist().
to_csv_iolist(DS) ->
EM = dwell:get_existence_mask(DS),
F = fun({FmtStr, FnName}) ->
Args = [dwell:FnName(DS)],
ExistBit = exist_mask:FnName(EM),
sutils:conditional_format(FmtStr, Args, ExistBit)
end,
Params =
[{"~p,", get_revisit_index},
{"~p,", get_dwell_index},
{"~p,", get_last_dwell_of_revisit},
{"~p,", get_target_report_count},
{"~p,", get_dwell_time},
{"~p,", get_sensor_lat},
{"~p,", get_sensor_lon},
{"~p,", get_sensor_alt},
{"~p,", get_lat_scale_factor},
{"~p,", get_lon_scale_factor},
{"~p,", get_spu_along_track},
{"~p,", get_spu_cross_track},
{"~p,", get_spu_alt},
{"~p,", get_sensor_track},
{"~p,", get_sensor_speed},
{"~p,", get_sensor_vert_vel},
{"~p,", get_sensor_track_unc},
{"~p,", get_sensor_speed_unc},
{"~p,", get_sensor_vert_vel_unc},
{"~p,", get_platform_heading},
{"~p,", get_platform_pitch},
{"~p,", get_platform_roll},
{"~p,", get_dwell_center_lat},
{"~p,", get_dwell_center_lon},
{"~p,", get_dwell_range_half_extent},
{"~p,", get_dwell_angle_half_extent},
{"~p,", get_sensor_heading},
{"~p,", get_sensor_pitch},
{"~p,", get_sensor_roll},
{"~p", get_mdv}],
DwellList = lists:map(F, Params),
Tgts = get_targets(DS),
TgtIO = [tgt_report:to_csv_iolist(T, EM) || T <- Tgts],
%% Prefix the line identifier and add all the targets (which will span
%% multiple lines).
%% Also add an empty field placeholder for the existence mask.
["DS,,"|DwellList] ++ io_lib:format("~n", []) ++ TgtIO.
%% @doc Set the dwell time in an existing dwell segment record. Used for data
%% replay tasks.
set_dwell_time(#dwell_segment{} = DS, DwellTimeMS) ->
DS#dwell_segment{dwell_time = DwellTimeMS}.
%% @doc Update the targets in a dwell segment. Updates the target report count
%% in the dwell segment too.
update_targets(#dwell_segment{} = DS, NewTargets) when is_list(NewTargets) ->
TgtCount = length(NewTargets),
DS#dwell_segment{
targets = NewTargets,
target_report_count = TgtCount}.
Accessor functions to allow access to the record fields with out creating
%% client dependencies on the actual structure.
@doc Accessor function for the existence mask .
get_existence_mask(#dwell_segment{existence_mask = X}) -> X.
@doc Accessor function for the revisit index .
get_revisit_index(#dwell_segment{revisit_index = X}) -> X.
@doc Accessor function for the dwell index .
get_dwell_index(#dwell_segment{dwell_index = X}) -> X.
@doc Accessor function for the last dwell of revisit .
get_last_dwell_of_revisit(#dwell_segment{last_dwell_of_revisit = X}) -> X.
@doc Accessor function for the target report count .
get_target_report_count(#dwell_segment{target_report_count = X}) -> X.
@doc Accessor function for the dwell time .
get_dwell_time(#dwell_segment{dwell_time = X}) -> X.
@doc Accessor function for the sensor Latitude .
get_sensor_lat(#dwell_segment{sensor_lat = X}) -> X.
@doc Accessor function for the sensor .
get_sensor_lon(#dwell_segment{sensor_lon = X}) -> X.
@doc Accessor function for the sensor Altitude .
get_sensor_alt(#dwell_segment{sensor_alt = X}) -> X.
@doc Accessor function for the Latitude scale factor .
get_lat_scale_factor(#dwell_segment{lat_scale_factor = X}) -> X.
@doc Accessor function for the Longitude scale factor .
get_lon_scale_factor(#dwell_segment{lon_scale_factor = X}) -> X.
@doc Accessor function for the sensor position uncertainty along track .
get_spu_along_track(#dwell_segment{spu_along_track = X}) -> X.
@doc Accessor function for the sensor position uncertainty cross track .
get_spu_cross_track(#dwell_segment{spu_cross_track = X}) -> X.
@doc Accessor function for the sensor altitude uncertainty .
get_spu_alt(#dwell_segment{spu_alt = X}) -> X.
@doc Accessor function for the sensor track .
get_sensor_track(#dwell_segment{sensor_track = X}) -> X.
@doc Accessor function for the sensor speed .
get_sensor_speed(#dwell_segment{sensor_speed = X}) -> X.
@doc Accessor function for the sensor vertical velocity .
get_sensor_vert_vel(#dwell_segment{sensor_vert_vel = X}) -> X.
@doc Accessor function for the sensor track uncertainty .
get_sensor_track_unc(#dwell_segment{sensor_track_unc = X}) -> X.
@doc Accessor function for the sensor speed uncertainty .
get_sensor_speed_unc(#dwell_segment{sensor_speed_unc = X}) -> X.
@doc Accessor function for the sensor vertical velocity uncertainty .
get_sensor_vert_vel_unc(#dwell_segment{sensor_vert_vel_unc = X}) -> X.
@doc Accessor function for the platform heading .
get_platform_heading(#dwell_segment{platform_heading = X}) -> X.
@doc Accessor function for the platform pitch .
get_platform_pitch(#dwell_segment{platform_pitch = X}) -> X.
@doc Accessor function for the platform roll .
get_platform_roll(#dwell_segment{platform_roll = X}) -> X.
@doc Accessor function for the dwell center Latitude .
get_dwell_center_lat(#dwell_segment{dwell_center_lat = X}) -> X.
@doc Accessor function for the dwell center Longitude .
get_dwell_center_lon(#dwell_segment{dwell_center_lon = X}) -> X.
@doc Accessor function for the dwell range half extent .
get_dwell_range_half_extent(#dwell_segment{dwell_range_half_extent = X}) -> X.
@doc Accessor function for the dwell angle half extent .
get_dwell_angle_half_extent(#dwell_segment{dwell_angle_half_extent = X}) -> X.
@doc Accessor function for the sensor heading .
get_sensor_heading(#dwell_segment{sensor_heading = X}) -> X.
@doc Accessor function for the sensor pitch .
get_sensor_pitch(#dwell_segment{sensor_pitch = X}) -> X.
@doc Accessor function for the sensor roll .
get_sensor_roll(#dwell_segment{sensor_roll = X}) -> X.
@doc Accessor function for the minimum detectable velocity .
get_mdv(#dwell_segment{mdv = X}) -> X.
@doc Accessor function for the target reports .
get_targets(#dwell_segment{targets = X}) -> X.
| null | https://raw.githubusercontent.com/pentlandedge/s4607/b3cdae404ac5bd50419f33259dfa62af9d1a8d60/src/dwell.erl | erlang |
use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
Main API functions.
Type specifications.
@doc Decode a binary dwell segment into structured form.
Fixed part of the dwell segement is pattern matched above, remainder
depends on the existence mask.
@doc Encode a dwell segment record in its binary form.
Extract the existence mask from the incoming dwell segment.
Create a local function to wrap the check of the existence mask and
the parameter encoding/appending.
Exploits the trick that the function to access the dwell segment
fields is the same as that to access the corresponding existence mask
field.
Encode does not yet cater for adding the actual target reports.
Produce a binary dwell segment, missing only the target reports.
Append any target reports.
@doc Helper function for the dwell encode: encodes the target reports.
so that the target reports are automatically added to the end of the
dwell segment.
@doc Create a new dwell report structure from the specified fields.
Local function to pull the parameter from the list or supply a default
value.
@doc Calculate the expected size of a dwell segment once encoded.
@doc Calculate the size of an encoded dwell segment payload from the
existence mask and the target report count.
Define a function to accumulate the size.
Accumulate the total size for all the included parameters (excluding
mask itself.
Calculate the size for the target reports.
Return the combined total of the dwell and the target reports.
@doc Convert the dwell segment into a dictionary.
Extract the existence mask from the incoming dwell segment.
Table definition with of the form [{Name, Accessor}].
The accessor function name is used with both the dwell segment and the
existence mask.
Function to convert each field present in the existence mask into a
dictionary element.
Add the target reports. Convert a list of records to a list of
dictionaries.
Function to walk through a binary containing a number of target reports,
decoding each returning as a list of reports.
Helper function with the accumulator.
@doc Display the contents of a dwell segment in the console.
@doc Function to display the contents of a dwell segment.
Prefix the line identifier and add all the targets (which will span
multiple lines).
Also add an empty field placeholder for the existence mask.
@doc Set the dwell time in an existing dwell segment record. Used for data
replay tasks.
@doc Update the targets in a dwell segment. Updates the target report count
in the dwell segment too.
client dependencies on the actual structure. | Copyright 2016 Pentland Edge Ltd.
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(dwell).
-export([
decode/1,
encode/1,
new/1,
payload_size/1,
payload_size/2,
to_dict/1,
display/1,
to_csv_iolist/1,
set_dwell_time/2,
update_targets/2]).
Accessor functions for accessing elements of the dwell segment .
-export([
get_existence_mask/1,
get_revisit_index/1,
get_dwell_index/1,
get_last_dwell_of_revisit/1,
get_target_report_count/1,
get_dwell_time/1,
get_sensor_lat/1,
get_sensor_lon/1,
get_sensor_alt/1,
get_lat_scale_factor/1,
get_lon_scale_factor/1,
get_spu_along_track/1,
get_spu_cross_track/1,
get_spu_alt/1,
get_sensor_track/1,
get_sensor_speed/1,
get_sensor_vert_vel/1,
get_sensor_track_unc/1,
get_sensor_speed_unc/1,
get_sensor_vert_vel_unc/1,
get_platform_heading/1,
get_platform_pitch/1,
get_platform_roll/1,
get_dwell_center_lat/1,
get_dwell_center_lon/1,
get_dwell_range_half_extent/1,
get_dwell_angle_half_extent/1,
get_sensor_heading/1,
get_sensor_pitch/1,
get_sensor_roll/1,
get_mdv/1,
get_targets/1]).
-record(dwell_segment, {
existence_mask,
revisit_index,
dwell_index,
last_dwell_of_revisit,
target_report_count,
dwell_time,
sensor_lat,
sensor_lon,
sensor_alt,
lat_scale_factor,
lon_scale_factor,
spu_along_track,
spu_cross_track,
spu_alt,
sensor_track,
sensor_speed,
sensor_vert_vel,
sensor_track_unc,
sensor_speed_unc,
sensor_vert_vel_unc,
platform_heading,
platform_pitch,
platform_roll,
dwell_center_lat,
dwell_center_lon,
dwell_range_half_extent,
dwell_angle_half_extent,
sensor_heading,
sensor_pitch,
sensor_roll,
mdv,
targets}).
-opaque dwell_segment() :: #dwell_segment{}.
-export_type([dwell_segment/0]).
Dwell segment decoding functions .
decode(<<EM:8/binary,RI:16/integer-unsigned-big,
DI:16/integer-unsigned-big,LD,TRC:16/integer-unsigned-big,
DT:32/integer-unsigned-big,SLat:4/binary,SLon:4/binary,SAlt:4/binary,
Rest/binary>>) ->
EMrec = exist_mask:decode(EM),
{LatScaleFactor, Bin1} = sutils:conditional_extract(
Rest,
exist_mask:get_lat_scale_factor(EMrec),
4,
fun stanag_types:sa32_to_float/1,
1.0),
{LonScaleFactor, Bin2} = sutils:conditional_extract(
Bin1,
exist_mask:get_lon_scale_factor(EMrec),
4,
fun stanag_types:ba32_to_float/1,
1.0),
{SpuAlongTrack, Bin3} = sutils:conditional_extract(
Bin2,
exist_mask:get_spu_cross_track(EMrec),
4,
fun stanag_types:i32_to_integer/1,
0),
{SpuCrossTrack, Bin4} = sutils:conditional_extract(
Bin3,
exist_mask:get_spu_cross_track(EMrec),
4,
fun stanag_types:i32_to_integer/1,
0),
{SpuAlt, Bin5} = sutils:conditional_extract(
Bin4,
exist_mask:get_spu_alt(EMrec),
2,
fun stanag_types:i16_to_integer/1,
0),
{SensorTrack, Bin6} = sutils:conditional_extract(
Bin5,
exist_mask:get_sensor_track(EMrec),
2,
fun stanag_types:ba16_to_float/1,
0.0),
{SensorSpeed, Bin7} = sutils:conditional_extract(
Bin6,
exist_mask:get_sensor_speed(EMrec),
4,
fun stanag_types:i32_to_integer/1,
0),
{SensorVertVel, Bin8} = sutils:conditional_extract(
Bin7,
exist_mask:get_sensor_vert_vel(EMrec),
1,
fun stanag_types:s8_to_integer/1,
0),
{SensorTrackUnc, Bin9} = sutils:conditional_extract(
Bin8,
exist_mask:get_sensor_track_unc(EMrec),
1,
fun stanag_types:i8_to_integer/1,
0),
{SensorSpeedUnc, Bin10} = sutils:conditional_extract(
Bin9,
exist_mask:get_sensor_speed_unc(EMrec),
2,
fun stanag_types:i16_to_integer/1,
0),
{SensorVertVelUnc, Bin11} = sutils:conditional_extract(
Bin10,
exist_mask:get_sensor_vert_vel_unc(EMrec),
2,
fun stanag_types:i16_to_integer/1,
0),
{PlatHeading, Bin12} = sutils:conditional_extract(
Bin11,
exist_mask:get_platform_heading(EMrec),
2,
fun stanag_types:ba16_to_float/1,
0.0),
{PlatPitch, Bin13} = sutils:conditional_extract(
Bin12,
exist_mask:get_platform_pitch(EMrec),
2,
fun stanag_types:sa16_to_float/1,
0.0),
{PlatRoll, Bin14} = sutils:conditional_extract(
Bin13,
exist_mask:get_platform_roll(EMrec),
2,
fun stanag_types:sa16_to_float/1,
0.0),
{DwellCenterLat, Bin15} = sutils:conditional_extract(
Bin14,
exist_mask:get_dwell_center_lat(EMrec),
4,
fun stanag_types:sa32_to_float/1,
0.0),
{DwellCenterLon, Bin16} = sutils:conditional_extract(
Bin15,
exist_mask:get_dwell_center_lon(EMrec),
4,
fun stanag_types:ba32_to_float/1,
0.0),
{DwellRangeHalfExtent, Bin17} = sutils:conditional_extract(
Bin16,
exist_mask:get_dwell_range_half_extent(EMrec),
2,
fun stanag_types:b16_to_float/1,
0.0),
{DwellAngleHalfExtent, Bin18} = sutils:conditional_extract(
Bin17,
exist_mask:get_dwell_angle_half_extent(EMrec),
2,
fun stanag_types:ba16_to_float/1,
0.0),
{SensorHeading, Bin19} = sutils:conditional_extract(
Bin18,
exist_mask:get_sensor_heading(EMrec),
2,
fun stanag_types:ba16_to_float/1,
0.0),
{SensorPitch, Bin20} = sutils:conditional_extract(
Bin19,
exist_mask:get_sensor_pitch(EMrec),
2,
fun stanag_types:sa16_to_float/1,
0.0),
{SensorRoll, Bin21} = sutils:conditional_extract(
Bin20,
exist_mask:get_sensor_roll(EMrec),
2,
fun stanag_types:sa16_to_float/1,
0.0),
{MDV, Bin22} = sutils:conditional_extract(
Bin21,
exist_mask:get_mdv(EMrec),
1,
fun stanag_types:i8_to_integer/1,
0),
TgtRepList = decode_target_report_list(Bin22, EMrec, TRC),
{ok, #dwell_segment{
existence_mask = EMrec,
revisit_index = RI,
dwell_index = DI,
last_dwell_of_revisit = decode_last_dwell_of_revisit(LD),
target_report_count = TRC,
dwell_time = DT,
sensor_lat = stanag_types:sa32_to_float(SLat),
sensor_lon = stanag_types:ba32_to_float(SLon),
sensor_alt = stanag_types:s32_to_integer(SAlt),
lat_scale_factor = LatScaleFactor,
lon_scale_factor = LonScaleFactor,
spu_along_track = SpuAlongTrack,
spu_cross_track = SpuCrossTrack,
spu_alt = SpuAlt,
sensor_track = SensorTrack,
sensor_speed = SensorSpeed,
sensor_vert_vel = SensorVertVel,
sensor_track_unc = SensorTrackUnc,
sensor_speed_unc = SensorSpeedUnc,
sensor_vert_vel_unc = SensorVertVelUnc,
platform_heading = PlatHeading,
platform_pitch = PlatPitch,
platform_roll = PlatRoll,
dwell_center_lat = DwellCenterLat,
dwell_center_lon = DwellCenterLon,
dwell_range_half_extent = DwellRangeHalfExtent,
dwell_angle_half_extent = DwellAngleHalfExtent,
sensor_heading = SensorHeading,
sensor_pitch = SensorPitch,
sensor_roll = SensorRoll,
mdv = MDV,
targets = TgtRepList}};
decode(_) ->
{error, dwell_mismatch}.
encode(DS) ->
EM = get_existence_mask(DS),
F = fun({EvalFun, EncFun}, Acc) ->
case exist_mask:EvalFun(EM) of
1 ->
Param = dwell:EvalFun(DS),
PB = EncFun(Param),
<<Acc/binary,PB/binary>>;
0 ->
Acc
end
end,
ParamTable = [
{get_revisit_index, fun stanag_types:integer_to_i16/1},
{get_dwell_index, fun stanag_types:integer_to_i16/1},
{get_last_dwell_of_revisit, fun encode_last_dwell_of_revisit/1},
{get_target_report_count, fun stanag_types:integer_to_i16/1},
{get_dwell_time, fun stanag_types:integer_to_i32/1},
{get_sensor_lat, fun stanag_types:float_to_sa32/1},
{get_sensor_lon, fun stanag_types:float_to_ba32/1},
{get_sensor_alt, fun stanag_types:integer_to_s32/1},
{get_lat_scale_factor, fun stanag_types:float_to_sa32/1},
{get_lon_scale_factor, fun stanag_types:float_to_ba32/1},
{get_spu_along_track, fun stanag_types:integer_to_i32/1},
{get_spu_cross_track, fun stanag_types:integer_to_i32/1},
{get_spu_alt, fun stanag_types:integer_to_i16/1},
{get_sensor_track, fun stanag_types:float_to_ba16/1},
{get_sensor_speed, fun stanag_types:integer_to_i32/1},
{get_sensor_vert_vel, fun stanag_types:integer_to_s8/1},
{get_sensor_track_unc, fun stanag_types:integer_to_i8/1},
{get_sensor_speed_unc, fun stanag_types:integer_to_i16/1},
{get_sensor_vert_vel_unc, fun stanag_types:integer_to_i16/1},
{get_platform_heading, fun stanag_types:float_to_ba16/1},
{get_platform_pitch, fun stanag_types:float_to_sa16/1},
{get_platform_roll, fun stanag_types:float_to_sa16/1},
{get_dwell_center_lat, fun stanag_types:float_to_sa32/1},
{get_dwell_center_lon, fun stanag_types:float_to_ba32/1},
{get_dwell_range_half_extent, fun stanag_types:float_to_b16/1},
{get_dwell_angle_half_extent, fun stanag_types:float_to_ba16/1},
{get_sensor_heading, fun stanag_types:float_to_sa16/1},
{get_sensor_pitch, fun stanag_types:float_to_sa16/1},
{get_sensor_roll, fun stanag_types:float_to_sa16/1},
{get_mdv, fun stanag_types:integer_to_i8/1}],
EMenc = exist_mask:encode(EM),
Bin1 = lists:foldl(F, EMenc, ParamTable),
encode_target_reports(get_target_report_count(DS), get_targets(DS), EM, Bin1).
InitBin can be set to the dwell report prior to adding the target reports
encode_target_reports(0, _RepList, _EM, InitBin) ->
InitBin;
encode_target_reports(RepCount, RepList, EM, InitBin)
when RepCount =:= length(RepList) ->
F = fun(Rep, Acc) ->
Bin = tgt_report:encode(Rep, EM),
<<Acc/binary,Bin/binary>>
end,
lists:foldl(F, InitBin, RepList).
new(Fields) ->
F = fun(P, L, Default) ->
case lists:keyfind(P, 1, L) of
{P, V} -> V;
false -> Default
end
end,
#dwell_segment{
existence_mask = F(existence_mask, Fields, 0),
revisit_index = F(revisit_index, Fields, 0),
dwell_index = F(dwell_index, Fields, 0),
last_dwell_of_revisit = F(last_dwell_of_revisit, Fields, 0),
target_report_count = F(target_report_count, Fields, 0),
dwell_time = F(dwell_time, Fields, 0),
sensor_lat = F(sensor_lat, Fields, 0),
sensor_lon = F(sensor_lon, Fields, 0),
sensor_alt = F(sensor_alt, Fields, 0),
lat_scale_factor = F(lat_scale_factor, Fields, 0),
lon_scale_factor = F(lon_scale_factor, Fields, 0),
spu_along_track = F(spu_along_track, Fields, 0),
spu_cross_track = F(spu_cross_track, Fields, 0),
spu_alt = F(spu_alt, Fields, 0),
sensor_track = F(sensor_track, Fields, 0),
sensor_speed = F(sensor_speed, Fields, 0),
sensor_vert_vel = F(sensor_vert_vel, Fields, 0),
sensor_track_unc = F(sensor_track_unc, Fields, 0),
sensor_speed_unc = F(sensor_speed_unc, Fields, 0),
sensor_vert_vel_unc = F(sensor_vert_vel_unc, Fields, 0),
platform_heading = F(platform_heading, Fields, 0),
platform_pitch = F(platform_pitch, Fields, 0),
platform_roll = F(platform_roll, Fields, 0),
dwell_center_lat = F(dwell_center_lat, Fields, 0),
dwell_center_lon = F(dwell_center_lon, Fields, 0),
dwell_range_half_extent = F(dwell_range_half_extent, Fields, 0),
dwell_angle_half_extent = F(dwell_angle_half_extent, Fields, 0),
sensor_heading = F(sensor_heading, Fields, 0),
sensor_pitch = F(sensor_pitch, Fields, 0),
sensor_roll = F(sensor_roll, Fields, 0),
mdv = F(mdv, Fields, 0),
targets = F(targets, Fields, [])}.
payload_size(#dwell_segment{existence_mask = EM, target_report_count = TC}) ->
payload_size(EM, TC).
payload_size(EM, TgtRepCount) ->
SizeList = [
{get_revisit_index, 2},
{get_dwell_index, 2},
{get_last_dwell_of_revisit, 1},
{get_target_report_count, 2},
{get_dwell_time, 4},
{get_sensor_lat, 4},
{get_sensor_lon, 4},
{get_sensor_alt, 4},
{get_lat_scale_factor, 4},
{get_lon_scale_factor, 4},
{get_spu_along_track, 4},
{get_spu_cross_track, 4},
{get_spu_alt, 2},
{get_sensor_track, 2},
{get_sensor_speed, 4},
{get_sensor_vert_vel, 1},
{get_sensor_track_unc, 1},
{get_sensor_speed_unc, 2},
{get_sensor_vert_vel_unc, 2},
{get_platform_heading, 2},
{get_platform_pitch, 2},
{get_platform_roll, 2},
{get_dwell_center_lat, 4},
{get_dwell_center_lon, 4},
{get_dwell_range_half_extent, 2},
{get_dwell_angle_half_extent, 2},
{get_sensor_heading, 2},
{get_sensor_pitch, 2},
{get_sensor_roll, 2},
{get_mdv, 1}],
F = fun({GetF, Size}, Acc) ->
case exist_mask:GetF(EM) of
1 -> Acc + Size;
0 -> Acc
end
end,
the target reports ) . Initial size of 8 is to allow for the existence
DwellSize = lists:foldl(F, 8, SizeList),
TgtRepSize = TgtRepCount * tgt_report:payload_size(EM),
DwellSize + TgtRepSize.
to_dict(DS) ->
EM = get_existence_mask(DS),
ParamTable = [
{revisit_index, get_revisit_index},
{dwell_index, get_dwell_index},
{last_dwell_of_revisit, get_last_dwell_of_revisit},
{target_report_count, get_target_report_count},
{dwell_time, get_dwell_time},
{sensor_lat, get_sensor_lat},
{sensor_lon, get_sensor_lon},
{sensor_alt, get_sensor_alt},
{lat_scale_factor, get_lat_scale_factor},
{lon_scale_factor, get_lon_scale_factor},
{spu_along_track, get_spu_along_track},
{spu_cross_track, get_spu_cross_track},
{spu_alt, get_spu_alt},
{sensor_track, get_sensor_track},
{sensor_speed, get_sensor_speed},
{sensor_vert_vel, get_sensor_vert_vel},
{sensor_track_unc, get_sensor_track_unc},
{sensor_speed_unc, get_sensor_speed_unc},
{sensor_vert_vel_unc, get_sensor_vert_vel_unc},
{platform_heading, get_platform_heading},
{platform_pitch, get_platform_pitch},
{platform_roll, get_platform_roll},
{dwell_center_lat, get_dwell_center_lat},
{dwell_center_lon, get_dwell_center_lon},
{dwell_range_half_extent, get_dwell_range_half_extent},
{dwell_angle_half_extent, get_dwell_angle_half_extent},
{sensor_heading, get_sensor_heading},
{sensor_pitch, get_sensor_pitch},
{sensor_roll, get_sensor_roll},
{get_mdv, get_mdv}],
F = fun({Param, GetFn}, Acc) ->
case exist_mask:GetFn(EM) of
1 ->
P = dwell:GetFn(DS),
dict:store(Param, P, Acc);
0 ->
Acc
end
end,
Dict1 = lists:foldl(F, dict:new(), ParamTable),
Targets = dwell:get_targets(DS),
T = fun(TgtRep) ->
tgt_report:to_dict(TgtRep, EM)
end,
TgtDictList = lists:map(T, Targets),
dict:store(targets, TgtDictList, Dict1).
decode_last_dwell_of_revisit(0) -> additional_dwells;
decode_last_dwell_of_revisit(1) -> no_additional_dwells.
encode_last_dwell_of_revisit(additional_dwells) -> <<0>>;
encode_last_dwell_of_revisit(no_additional_dwells) -> <<1>>.
decode_target_report_list(Bin, EM, TgtCount) ->
decode_target_report_list(Bin, EM, TgtCount, []).
decode_target_report_list(_Bin, _EM, 0, AccTgts) ->
lists:reverse(AccTgts);
decode_target_report_list(Bin, EM, TgtCount, AccTgts) when TgtCount > 0 ->
{ok, TR, Rem} = tgt_report:decode(Bin, EM),
decode_target_report_list(Rem, EM, TgtCount-1, [TR|AccTgts]).
display(DS) ->
io:format("****************************************~n"),
io:format("** @dwell~n"),
EM = DS#dwell_segment.existence_mask,
exist_mask:display(EM),
io:format("Revisit index: ~p~n", [get_revisit_index(DS)]),
io:format("Dwell index: ~p~n", [get_dwell_index(DS)]),
io:format("Last dwell of revisit: ~p~n", [get_last_dwell_of_revisit(DS)]),
io:format("Target report count: ~p~n", [get_target_report_count(DS)]),
io:format("Dwell time: ~p~n", [get_dwell_time(DS)]),
io:format("Sensor Lat.: ~p~n", [get_sensor_lat(DS)]),
io:format("Sensor Lon.: ~p~n", [get_sensor_lon(DS)]),
io:format("Sensor alt. (cm): ~p~n", [get_sensor_alt(DS)]),
sutils:conditional_display("Lat. scale factor: ~p~n", [get_lat_scale_factor(DS)], exist_mask:get_lat_scale_factor(EM)),
sutils:conditional_display("Lon. scale factor: ~p~n", [get_lon_scale_factor(DS)], exist_mask:get_lon_scale_factor(EM)),
sutils:conditional_display("SPU along track: ~p~n", [get_spu_along_track(DS)], exist_mask:get_spu_along_track(EM)),
sutils:conditional_display("SPU cross track: ~p~n", [get_spu_cross_track(DS)], exist_mask:get_spu_cross_track(EM)),
sutils:conditional_display("SPU alt: ~p~n", [get_spu_alt(DS)], exist_mask:get_spu_alt(EM)),
sutils:conditional_display("Sensor track: ~p~n", [get_sensor_track(DS)], exist_mask:get_sensor_track(EM)),
sutils:conditional_display("Sensor speed: ~p~n", [get_sensor_speed(DS)], exist_mask:get_sensor_speed(EM)),
sutils:conditional_display("Sensor vert. vel.: ~p~n", [get_sensor_vert_vel(DS)], exist_mask:get_sensor_vert_vel(EM)),
sutils:conditional_display("Sensor track unc.: ~p~n", [get_sensor_track_unc(DS)], exist_mask:get_sensor_track_unc(EM)),
sutils:conditional_display("Sensor speed unc.: ~p~n", [get_sensor_speed_unc(DS)], exist_mask:get_sensor_speed_unc(EM)),
sutils:conditional_display("Sensor vert. vel. unc.: ~p~n", [get_sensor_vert_vel_unc(DS)], exist_mask:get_sensor_vert_vel_unc(EM)),
sutils:conditional_display("Platform heading: ~p~n", [get_platform_heading(DS)], exist_mask:get_platform_heading(EM)),
sutils:conditional_display("Platform pitch: ~p~n", [get_platform_pitch(DS)], exist_mask:get_platform_pitch(EM)),
sutils:conditional_display("Platform roll: ~p~n", [get_platform_roll(DS)], exist_mask:get_platform_roll(EM)),
sutils:conditional_display("Dwell centre Lat.: ~p~n", [get_dwell_center_lat(DS)], exist_mask:get_dwell_center_lat(EM)),
sutils:conditional_display("Dwell centre Lon.: ~p~n", [get_dwell_center_lon(DS)], exist_mask:get_dwell_center_lon(EM)),
sutils:conditional_display("Dwell range half extent: ~p~n", [get_dwell_range_half_extent(DS)], exist_mask:get_dwell_range_half_extent(EM)),
sutils:conditional_display("Dwell angle half extent: ~p~n", [get_dwell_angle_half_extent(DS)], exist_mask:get_dwell_angle_half_extent(EM)),
sutils:conditional_display("Sensor heading: ~p~n", [get_sensor_heading(DS)], exist_mask:get_sensor_heading(EM)),
sutils:conditional_display("Sensor pitch: ~p~n", [get_sensor_pitch(DS)], exist_mask:get_sensor_pitch(EM)),
sutils:conditional_display("Sensor roll: ~p~n", [get_sensor_roll(DS)], exist_mask:get_sensor_roll(EM)),
sutils:conditional_display("MDV: ~p~n", [get_mdv(DS)], exist_mask:get_mdv(EM)),
F = fun(TR) -> tgt_report:display(TR, EM) end,
lists:map(F, DS#dwell_segment.targets).
-spec to_csv_iolist(DS::dwell_segment()) -> iolist().
to_csv_iolist(DS) ->
EM = dwell:get_existence_mask(DS),
F = fun({FmtStr, FnName}) ->
Args = [dwell:FnName(DS)],
ExistBit = exist_mask:FnName(EM),
sutils:conditional_format(FmtStr, Args, ExistBit)
end,
Params =
[{"~p,", get_revisit_index},
{"~p,", get_dwell_index},
{"~p,", get_last_dwell_of_revisit},
{"~p,", get_target_report_count},
{"~p,", get_dwell_time},
{"~p,", get_sensor_lat},
{"~p,", get_sensor_lon},
{"~p,", get_sensor_alt},
{"~p,", get_lat_scale_factor},
{"~p,", get_lon_scale_factor},
{"~p,", get_spu_along_track},
{"~p,", get_spu_cross_track},
{"~p,", get_spu_alt},
{"~p,", get_sensor_track},
{"~p,", get_sensor_speed},
{"~p,", get_sensor_vert_vel},
{"~p,", get_sensor_track_unc},
{"~p,", get_sensor_speed_unc},
{"~p,", get_sensor_vert_vel_unc},
{"~p,", get_platform_heading},
{"~p,", get_platform_pitch},
{"~p,", get_platform_roll},
{"~p,", get_dwell_center_lat},
{"~p,", get_dwell_center_lon},
{"~p,", get_dwell_range_half_extent},
{"~p,", get_dwell_angle_half_extent},
{"~p,", get_sensor_heading},
{"~p,", get_sensor_pitch},
{"~p,", get_sensor_roll},
{"~p", get_mdv}],
DwellList = lists:map(F, Params),
Tgts = get_targets(DS),
TgtIO = [tgt_report:to_csv_iolist(T, EM) || T <- Tgts],
["DS,,"|DwellList] ++ io_lib:format("~n", []) ++ TgtIO.
set_dwell_time(#dwell_segment{} = DS, DwellTimeMS) ->
DS#dwell_segment{dwell_time = DwellTimeMS}.
update_targets(#dwell_segment{} = DS, NewTargets) when is_list(NewTargets) ->
TgtCount = length(NewTargets),
DS#dwell_segment{
targets = NewTargets,
target_report_count = TgtCount}.
Accessor functions to allow access to the record fields with out creating
@doc Accessor function for the existence mask .
get_existence_mask(#dwell_segment{existence_mask = X}) -> X.
@doc Accessor function for the revisit index .
get_revisit_index(#dwell_segment{revisit_index = X}) -> X.
@doc Accessor function for the dwell index .
get_dwell_index(#dwell_segment{dwell_index = X}) -> X.
@doc Accessor function for the last dwell of revisit .
get_last_dwell_of_revisit(#dwell_segment{last_dwell_of_revisit = X}) -> X.
@doc Accessor function for the target report count .
get_target_report_count(#dwell_segment{target_report_count = X}) -> X.
@doc Accessor function for the dwell time .
get_dwell_time(#dwell_segment{dwell_time = X}) -> X.
@doc Accessor function for the sensor Latitude .
get_sensor_lat(#dwell_segment{sensor_lat = X}) -> X.
@doc Accessor function for the sensor .
get_sensor_lon(#dwell_segment{sensor_lon = X}) -> X.
@doc Accessor function for the sensor Altitude .
get_sensor_alt(#dwell_segment{sensor_alt = X}) -> X.
@doc Accessor function for the Latitude scale factor .
get_lat_scale_factor(#dwell_segment{lat_scale_factor = X}) -> X.
@doc Accessor function for the Longitude scale factor .
get_lon_scale_factor(#dwell_segment{lon_scale_factor = X}) -> X.
@doc Accessor function for the sensor position uncertainty along track .
get_spu_along_track(#dwell_segment{spu_along_track = X}) -> X.
@doc Accessor function for the sensor position uncertainty cross track .
get_spu_cross_track(#dwell_segment{spu_cross_track = X}) -> X.
@doc Accessor function for the sensor altitude uncertainty .
get_spu_alt(#dwell_segment{spu_alt = X}) -> X.
@doc Accessor function for the sensor track .
get_sensor_track(#dwell_segment{sensor_track = X}) -> X.
@doc Accessor function for the sensor speed .
get_sensor_speed(#dwell_segment{sensor_speed = X}) -> X.
@doc Accessor function for the sensor vertical velocity .
get_sensor_vert_vel(#dwell_segment{sensor_vert_vel = X}) -> X.
@doc Accessor function for the sensor track uncertainty .
get_sensor_track_unc(#dwell_segment{sensor_track_unc = X}) -> X.
@doc Accessor function for the sensor speed uncertainty .
get_sensor_speed_unc(#dwell_segment{sensor_speed_unc = X}) -> X.
@doc Accessor function for the sensor vertical velocity uncertainty .
get_sensor_vert_vel_unc(#dwell_segment{sensor_vert_vel_unc = X}) -> X.
@doc Accessor function for the platform heading .
get_platform_heading(#dwell_segment{platform_heading = X}) -> X.
@doc Accessor function for the platform pitch .
get_platform_pitch(#dwell_segment{platform_pitch = X}) -> X.
@doc Accessor function for the platform roll .
get_platform_roll(#dwell_segment{platform_roll = X}) -> X.
@doc Accessor function for the dwell center Latitude .
get_dwell_center_lat(#dwell_segment{dwell_center_lat = X}) -> X.
@doc Accessor function for the dwell center Longitude .
get_dwell_center_lon(#dwell_segment{dwell_center_lon = X}) -> X.
@doc Accessor function for the dwell range half extent .
get_dwell_range_half_extent(#dwell_segment{dwell_range_half_extent = X}) -> X.
@doc Accessor function for the dwell angle half extent .
get_dwell_angle_half_extent(#dwell_segment{dwell_angle_half_extent = X}) -> X.
@doc Accessor function for the sensor heading .
get_sensor_heading(#dwell_segment{sensor_heading = X}) -> X.
@doc Accessor function for the sensor pitch .
get_sensor_pitch(#dwell_segment{sensor_pitch = X}) -> X.
@doc Accessor function for the sensor roll .
get_sensor_roll(#dwell_segment{sensor_roll = X}) -> X.
@doc Accessor function for the minimum detectable velocity .
get_mdv(#dwell_segment{mdv = X}) -> X.
@doc Accessor function for the target reports .
get_targets(#dwell_segment{targets = X}) -> X.
|
e607a6f417404751d4183a9013cd11ceb0c4964593167942df6aec1d80077b92 | AccelerateHS/accelerate-llvm | Permute.hs | # LANGUAGE GADTs #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeApplications #
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.Array.Accelerate.LLVM.CodeGen.Permute
Copyright : [ 2016 .. 2020 ] The Accelerate Team
-- License : BSD3
--
Maintainer : < >
-- Stability : experimental
Portability : non - portable ( GHC extensions )
--
module Data.Array.Accelerate.LLVM.CodeGen.Permute (
IRPermuteFun(..),
llvmOfPermuteFun,
atomicCAS_rmw,
atomicCAS_cmp,
) where
import Data.Array.Accelerate.AST
import Data.Array.Accelerate.AST.Idx
import Data.Array.Accelerate.AST.LeftHandSide
import Data.Array.Accelerate.AST.Var
import Data.Array.Accelerate.Debug.Internal
import Data.Array.Accelerate.Error
import Data.Array.Accelerate.Representation.Type
import Data.Array.Accelerate.Trafo.Substitution
import Data.Array.Accelerate.Type
import Data.Array.Accelerate.LLVM.CodeGen.Environment
import Data.Array.Accelerate.LLVM.CodeGen.Exp
import Data.Array.Accelerate.LLVM.CodeGen.IR
import Data.Array.Accelerate.LLVM.CodeGen.Monad
import Data.Array.Accelerate.LLVM.CodeGen.Sugar
import Data.Array.Accelerate.LLVM.CodeGen.Type
import Data.Array.Accelerate.LLVM.Foreign
import LLVM.AST.Type.AddrSpace
import LLVM.AST.Type.Instruction
import LLVM.AST.Type.Instruction.Atomic
import LLVM.AST.Type.Instruction.RMW as RMW
import LLVM.AST.Type.Instruction.Volatile
import LLVM.AST.Type.Name
import LLVM.AST.Type.Operand
import LLVM.AST.Type.Representation
import Control.Applicative
import Data.Constraint ( withDict )
import System.IO.Unsafe
import Prelude
-- | A forward permutation might be specialised to use atomic instructions to
-- perform the read-modify-write of the output array directly, rather than
-- separately acquiring a lock. The basic operation is always provided in case
-- a backend does not support the atomic operation at that type, or if it is
-- executing sequentially.
--
-- For the atomicRMW case, the function is applied to the new value before
-- feeding to the atomic instruction to combine with the old.
--
data IRPermuteFun arch aenv t where
IRPermuteFun :: { combine :: IRFun2 arch aenv (e -> e -> e)
, atomicRMW :: Maybe
( RMWOperation
, IRFun1 arch aenv (e -> e)
)
}
-> IRPermuteFun arch aenv (e -> e -> e)
-- | Analysis and code generation for forward permutation combination function.
--
-- Specialisation for atomic operations is currently limited to direct
-- applications of the function; that is, we don't dig down underneath
-- let-bindings.
--
llvmOfPermuteFun
:: forall arch aenv e. Foreign arch
=> Fun aenv (e -> e -> e)
-> Gamma aenv
-> IRPermuteFun arch aenv (e -> e -> e)
llvmOfPermuteFun fun aenv = IRPermuteFun{..}
where
combine = llvmOfFun2 fun aenv
atomicRMW
-- If the old value is not used (i.e. permute const) then we can just
-- store the new value directly. Since we do not require the return value
we can do this for any scalar value with a regular Store . However ,
-- as we use an unzipped struct-of-array representation for product
-- types, the multiple store instructions for the different fields
-- could come from different threads, so we only allow the non-atomic
-- version if the flag @-ffast-permute-const@ is set.
--
| Lam lhs (Lam (LeftHandSideWildcard tp) (Body body)) <- fun
, True <- fast tp
, fun' <- llvmOfFun1 (Lam lhs (Body body)) aenv
= Just (Exchange, fun')
-- LLVM natively supports atomic operations on integral types only.
-- However different targets may support atomic instructions on other
-- scalar types (for example the NVPTX target supports atomic add and
-- subtract on floating point values).
--
-- Additionally it is possible to implement atomic instructions using
-- atomic compare-and-swap, which is likely to be more performant than the
-- generic spin-lock based approach.
--
| Lam lhs@(LeftHandSideSingle _) (Lam (LeftHandSideSingle _) (Body body)) <- fun
, Just (rmw, x) <- rmwOp body
, Just x' <- strengthenE latest x
, fun' <- llvmOfFun1 (Lam lhs (Body x')) aenv
= Just (rmw, fun')
| otherwise
= Nothing
fast :: TypeR e -> Bool
fast tp
| TupRsingle{} <- tp = True
| otherwise = unsafePerformIO (getFlag fast_permute_const)
-- XXX: This doesn't work for newtypes because the coercion gets in the
-- way. This should be generalised to work for product types (e.g.
-- complex numbers) and take this factor into account as well.
TLM-2019 - 09 - 27
--
rmwOp :: OpenExp (((),e),e) aenv e -> Maybe (RMWOperation, OpenExp (((),e),e) aenv e)
rmwOp (PrimApp f xs)
| PrimAdd{} <- f = (RMW.Add,) <$> extract xs
| PrimSub{} <- f = (RMW.Sub,) <$> extract xs
| PrimMin{} <- f = (RMW.Min,) <$> extract xs
| PrimMax{} <- f = (RMW.Max,) <$> extract xs
| PrimBOr{} <- f = (RMW.Or,) <$> extract xs
| PrimBAnd{} <- f = (RMW.And,) <$> extract xs
| PrimBXor{} <- f = (RMW.Xor,) <$> extract xs
rmwOp _ = Nothing
-- Determine which argument to a binary function was the new value being
-- combined. This only works when the old value is used unmodified, but that
-- is sufficient for us because otherwise it would not be suitable for the
-- atomic update operation.
--
In the permutation function , the old value is given as the second
argument , corresponding to ZeroIdx .
--
extract :: OpenExp (((),e),e) aenv (e,e) -> Maybe (OpenExp (((),e),e) aenv e)
extract (Pair x y)
| Evar (Var _ ZeroIdx) <- x = Just y
| Evar (Var _ ZeroIdx) <- y = Just x
extract _
= Nothing
-- Used with 'strengthenE' to ensure that the expression does not make use
-- of the old value except in the combination function.
latest :: (((),e),e) :?> ((),e)
latest ZeroIdx = Nothing
latest (SuccIdx ix) = Just ix
Implementation of atomic RMW operation ( e.g. ( + ) , ( - ) ) using atomic
-- compare-and-swap instructions, for targets which do not support the native
instruction at this type but do support CAS at this bit width .
--
> void casAdd(double * addr , double val )
-- > {
-- > uint64_t* addr_i = reinterpret_cast<uint64_t*> addr;
-- > uint64_t old = *addr_i;
-- >
-- > do {
-- > uint64_t expected = old;
-- > uint64_t new = reinterpret_cast<uint64_t>(val + reinterpret_cast<double>(expected));
-- >
-- > uint64_t old = atomicCAS(addr_i, expected, new);
-- > }
-- > while (old != expected);
-- > }
--
atomicCAS_rmw
:: forall arch e. HasCallStack
=> SingleType e
-> (Operands e -> CodeGen arch (Operands e))
-> Operand (Ptr e)
-> CodeGen arch ()
atomicCAS_rmw t update addr =
case t of
NumSingleType (FloatingNumType f) -> floating f
NumSingleType (IntegralNumType i) -> integral i
where
floating :: FloatingType t -> CodeGen arch ()
floating TypeHalf{} = atomicCAS_rmw' t (integralType :: IntegralType Word16) update addr
floating TypeFloat{} = atomicCAS_rmw' t (integralType :: IntegralType Word32) update addr
floating TypeDouble{} = atomicCAS_rmw' t (integralType :: IntegralType Word64) update addr
integral :: IntegralType t -> CodeGen arch ()
integral i = atomicCAS_rmw' t i update addr
atomicCAS_rmw'
:: HasCallStack
=> SingleType t
-> IntegralType i
-> (Operands t -> CodeGen arch (Operands t))
-> Operand (Ptr t)
-> CodeGen arch ()
atomicCAS_rmw' t i update addr = withDict (integralElt i) $ do
let si = SingleScalarType (NumSingleType (IntegralNumType i))
--
spin <- newBlock "rmw.spin"
exit <- newBlock "rmw.exit"
addr' <- instr' $ PtrCast (PtrPrimType (ScalarPrimType si) defaultAddrSpace) addr
init' <- instr' $ Load si NonVolatile addr'
old' <- fresh $ TupRsingle si
top <- br spin
setBlock spin
old <- instr' $ BitCast (SingleScalarType t) (op i old')
val <- update $ ir t old
val' <- instr' $ BitCast si (op t val)
r <- instr' $ CmpXchg i NonVolatile addr' (op i old') val' (CrossThread, AcquireRelease) Monotonic
done <- instr' $ ExtractValue scalarType PairIdxRight r
next' <- instr' $ ExtractValue si PairIdxLeft r
Since we removed from the set of primitive types Accelerate
supports , we have to do a small hack to have consider this as its
correct type of a 1 - bit integer ( rather than the 8 - bits it is actually
-- stored as)
done' <- case done of
LocalReference _ (UnName n) -> return $ OP_Bool (LocalReference type' (UnName n))
_ -> internalError "expected unnamed local reference"
bot <- cbr done' exit spin
_ <- phi' (TupRsingle si) spin old' [(ir i init',top), (ir i next',bot)]
setBlock exit
Implementation of atomic comparison operators ( i.e. min , ) using
-- compare-and-swap, for targets which do not support the native instruction at
this type but do support CAS at this bit width . The old value is discarded .
--
-- For example, atomicMin is implemented similarly to the following (however the
-- loop condition is more complex):
--
> void casMin(double * addr , double val )
-- > {
-- > double old = *addr;
> uint64_t val_i = reinterpret_cast < uint64_t>(val ) ;
-- > uint64_t addr_i = reinterpret_cast<uint64_t*>(addr);
-- >
> while ( < old ) {
-- > uint64_t assumed_i = reinterpret_cast<uint64_t>(old);
> uint64_t old_i = atomicCAS(addr_i , assumed_i , val_i ) ;
-- > old = reinterpret_cast<double>(old_i);
-- > }
-- > }
--
-- If the function returns 'True', then the given value should be written to the
-- address.
--
atomicCAS_cmp
:: forall arch e. HasCallStack
=> SingleType e
-> (SingleType e -> Operands e -> Operands e -> CodeGen arch (Operands Bool))
-> Operand (Ptr e)
-> Operand e
-> CodeGen arch ()
atomicCAS_cmp t cmp addr val =
case t of
NumSingleType (FloatingNumType f) -> floating f
NumSingleType (IntegralNumType i) -> integral i
where
floating :: FloatingType t -> CodeGen arch ()
floating TypeHalf{} = atomicCAS_cmp' t (integralType :: IntegralType Word16) cmp addr val
floating TypeFloat{} = atomicCAS_cmp' t (integralType :: IntegralType Word32) cmp addr val
floating TypeDouble{} = atomicCAS_cmp' t (integralType :: IntegralType Word64) cmp addr val
integral :: IntegralType t -> CodeGen arch ()
integral i = atomicCAS_cmp' t i cmp addr val
atomicCAS_cmp'
:: HasCallStack
=> SingleType t -- actual type of elements
-> IntegralType i -- unsigned integral type of same bit size as 't'
-> (SingleType t -> Operands t -> Operands t -> CodeGen arch (Operands Bool))
-> Operand (Ptr t)
-> Operand t
-> CodeGen arch ()
atomicCAS_cmp' t i cmp addr val = withDict (singleElt t) $ do
let si = SingleScalarType (NumSingleType (IntegralNumType i))
--
test <- newBlock "cas.cmp"
spin <- newBlock "cas.retry"
exit <- newBlock "cas.exit"
-- The new value and address to swap cast to integral type
addr' <- instr' $ PtrCast (PtrPrimType (ScalarPrimType si) defaultAddrSpace) addr
val' <- instr' $ BitCast si val
old <- fresh $ TupRsingle $ SingleScalarType t
-- Read the current value at the address
start <- instr' $ Load (SingleScalarType t) NonVolatile addr
top <- br test
-- Compare the new value with the current contents at that memory slot. If the
-- comparison fails (e.g. we are computing atomicMin but the new value is
-- already larger than the current value) then exit.
setBlock test
yes <- cmp t (ir t val) old
_ <- cbr yes spin exit
-- Attempt to exchange the memory at this location with the new value. The
-- CmpXchg instruction returns the old value together with a flag indicating
-- whether or not the swap occurred. If the swap is successful we are done,
-- otherwise reapply the comparison value with the newly acquired value.
setBlock spin
old' <- instr' $ BitCast si (op t old)
r <- instr' $ CmpXchg i NonVolatile addr' old' val' (CrossThread, AcquireRelease) Monotonic
done <- instr' $ ExtractValue scalarType PairIdxRight r
next <- instr' $ ExtractValue si PairIdxLeft r
next' <- instr' $ BitCast (SingleScalarType t) next
done' <- case done of
LocalReference _ (UnName n) -> return $ OP_Bool (LocalReference type' (UnName n))
_ -> internalError "expected unnamed local reference"
bot <- cbr done' exit test
_ <- phi' (TupRsingle $ SingleScalarType t) test old [(ir t start,top), (ir t next',bot)]
setBlock exit
| null | https://raw.githubusercontent.com/AccelerateHS/accelerate-llvm/57136bfc67f0da8ea0a0aba0172397a2e92d3378/accelerate-llvm/src/Data/Array/Accelerate/LLVM/CodeGen/Permute.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeOperators #
# OPTIONS_HADDOCK hide #
|
Module : Data.Array.Accelerate.LLVM.CodeGen.Permute
License : BSD3
Stability : experimental
| A forward permutation might be specialised to use atomic instructions to
perform the read-modify-write of the output array directly, rather than
separately acquiring a lock. The basic operation is always provided in case
a backend does not support the atomic operation at that type, or if it is
executing sequentially.
For the atomicRMW case, the function is applied to the new value before
feeding to the atomic instruction to combine with the old.
| Analysis and code generation for forward permutation combination function.
Specialisation for atomic operations is currently limited to direct
applications of the function; that is, we don't dig down underneath
let-bindings.
If the old value is not used (i.e. permute const) then we can just
store the new value directly. Since we do not require the return value
as we use an unzipped struct-of-array representation for product
types, the multiple store instructions for the different fields
could come from different threads, so we only allow the non-atomic
version if the flag @-ffast-permute-const@ is set.
LLVM natively supports atomic operations on integral types only.
However different targets may support atomic instructions on other
scalar types (for example the NVPTX target supports atomic add and
subtract on floating point values).
Additionally it is possible to implement atomic instructions using
atomic compare-and-swap, which is likely to be more performant than the
generic spin-lock based approach.
XXX: This doesn't work for newtypes because the coercion gets in the
way. This should be generalised to work for product types (e.g.
complex numbers) and take this factor into account as well.
Determine which argument to a binary function was the new value being
combined. This only works when the old value is used unmodified, but that
is sufficient for us because otherwise it would not be suitable for the
atomic update operation.
Used with 'strengthenE' to ensure that the expression does not make use
of the old value except in the combination function.
compare-and-swap instructions, for targets which do not support the native
> {
> uint64_t* addr_i = reinterpret_cast<uint64_t*> addr;
> uint64_t old = *addr_i;
>
> do {
> uint64_t expected = old;
> uint64_t new = reinterpret_cast<uint64_t>(val + reinterpret_cast<double>(expected));
>
> uint64_t old = atomicCAS(addr_i, expected, new);
> }
> while (old != expected);
> }
stored as)
compare-and-swap, for targets which do not support the native instruction at
For example, atomicMin is implemented similarly to the following (however the
loop condition is more complex):
> {
> double old = *addr;
> uint64_t addr_i = reinterpret_cast<uint64_t*>(addr);
>
> uint64_t assumed_i = reinterpret_cast<uint64_t>(old);
> old = reinterpret_cast<double>(old_i);
> }
> }
If the function returns 'True', then the given value should be written to the
address.
actual type of elements
unsigned integral type of same bit size as 't'
The new value and address to swap cast to integral type
Read the current value at the address
Compare the new value with the current contents at that memory slot. If the
comparison fails (e.g. we are computing atomicMin but the new value is
already larger than the current value) then exit.
Attempt to exchange the memory at this location with the new value. The
CmpXchg instruction returns the old value together with a flag indicating
whether or not the swap occurred. If the swap is successful we are done,
otherwise reapply the comparison value with the newly acquired value. | # LANGUAGE GADTs #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeApplications #
Copyright : [ 2016 .. 2020 ] The Accelerate Team
Maintainer : < >
Portability : non - portable ( GHC extensions )
module Data.Array.Accelerate.LLVM.CodeGen.Permute (
IRPermuteFun(..),
llvmOfPermuteFun,
atomicCAS_rmw,
atomicCAS_cmp,
) where
import Data.Array.Accelerate.AST
import Data.Array.Accelerate.AST.Idx
import Data.Array.Accelerate.AST.LeftHandSide
import Data.Array.Accelerate.AST.Var
import Data.Array.Accelerate.Debug.Internal
import Data.Array.Accelerate.Error
import Data.Array.Accelerate.Representation.Type
import Data.Array.Accelerate.Trafo.Substitution
import Data.Array.Accelerate.Type
import Data.Array.Accelerate.LLVM.CodeGen.Environment
import Data.Array.Accelerate.LLVM.CodeGen.Exp
import Data.Array.Accelerate.LLVM.CodeGen.IR
import Data.Array.Accelerate.LLVM.CodeGen.Monad
import Data.Array.Accelerate.LLVM.CodeGen.Sugar
import Data.Array.Accelerate.LLVM.CodeGen.Type
import Data.Array.Accelerate.LLVM.Foreign
import LLVM.AST.Type.AddrSpace
import LLVM.AST.Type.Instruction
import LLVM.AST.Type.Instruction.Atomic
import LLVM.AST.Type.Instruction.RMW as RMW
import LLVM.AST.Type.Instruction.Volatile
import LLVM.AST.Type.Name
import LLVM.AST.Type.Operand
import LLVM.AST.Type.Representation
import Control.Applicative
import Data.Constraint ( withDict )
import System.IO.Unsafe
import Prelude
data IRPermuteFun arch aenv t where
IRPermuteFun :: { combine :: IRFun2 arch aenv (e -> e -> e)
, atomicRMW :: Maybe
( RMWOperation
, IRFun1 arch aenv (e -> e)
)
}
-> IRPermuteFun arch aenv (e -> e -> e)
llvmOfPermuteFun
:: forall arch aenv e. Foreign arch
=> Fun aenv (e -> e -> e)
-> Gamma aenv
-> IRPermuteFun arch aenv (e -> e -> e)
llvmOfPermuteFun fun aenv = IRPermuteFun{..}
where
combine = llvmOfFun2 fun aenv
atomicRMW
we can do this for any scalar value with a regular Store . However ,
| Lam lhs (Lam (LeftHandSideWildcard tp) (Body body)) <- fun
, True <- fast tp
, fun' <- llvmOfFun1 (Lam lhs (Body body)) aenv
= Just (Exchange, fun')
| Lam lhs@(LeftHandSideSingle _) (Lam (LeftHandSideSingle _) (Body body)) <- fun
, Just (rmw, x) <- rmwOp body
, Just x' <- strengthenE latest x
, fun' <- llvmOfFun1 (Lam lhs (Body x')) aenv
= Just (rmw, fun')
| otherwise
= Nothing
fast :: TypeR e -> Bool
fast tp
| TupRsingle{} <- tp = True
| otherwise = unsafePerformIO (getFlag fast_permute_const)
TLM-2019 - 09 - 27
rmwOp :: OpenExp (((),e),e) aenv e -> Maybe (RMWOperation, OpenExp (((),e),e) aenv e)
rmwOp (PrimApp f xs)
| PrimAdd{} <- f = (RMW.Add,) <$> extract xs
| PrimSub{} <- f = (RMW.Sub,) <$> extract xs
| PrimMin{} <- f = (RMW.Min,) <$> extract xs
| PrimMax{} <- f = (RMW.Max,) <$> extract xs
| PrimBOr{} <- f = (RMW.Or,) <$> extract xs
| PrimBAnd{} <- f = (RMW.And,) <$> extract xs
| PrimBXor{} <- f = (RMW.Xor,) <$> extract xs
rmwOp _ = Nothing
In the permutation function , the old value is given as the second
argument , corresponding to ZeroIdx .
extract :: OpenExp (((),e),e) aenv (e,e) -> Maybe (OpenExp (((),e),e) aenv e)
extract (Pair x y)
| Evar (Var _ ZeroIdx) <- x = Just y
| Evar (Var _ ZeroIdx) <- y = Just x
extract _
= Nothing
latest :: (((),e),e) :?> ((),e)
latest ZeroIdx = Nothing
latest (SuccIdx ix) = Just ix
Implementation of atomic RMW operation ( e.g. ( + ) , ( - ) ) using atomic
instruction at this type but do support CAS at this bit width .
> void casAdd(double * addr , double val )
atomicCAS_rmw
:: forall arch e. HasCallStack
=> SingleType e
-> (Operands e -> CodeGen arch (Operands e))
-> Operand (Ptr e)
-> CodeGen arch ()
atomicCAS_rmw t update addr =
case t of
NumSingleType (FloatingNumType f) -> floating f
NumSingleType (IntegralNumType i) -> integral i
where
floating :: FloatingType t -> CodeGen arch ()
floating TypeHalf{} = atomicCAS_rmw' t (integralType :: IntegralType Word16) update addr
floating TypeFloat{} = atomicCAS_rmw' t (integralType :: IntegralType Word32) update addr
floating TypeDouble{} = atomicCAS_rmw' t (integralType :: IntegralType Word64) update addr
integral :: IntegralType t -> CodeGen arch ()
integral i = atomicCAS_rmw' t i update addr
atomicCAS_rmw'
:: HasCallStack
=> SingleType t
-> IntegralType i
-> (Operands t -> CodeGen arch (Operands t))
-> Operand (Ptr t)
-> CodeGen arch ()
atomicCAS_rmw' t i update addr = withDict (integralElt i) $ do
let si = SingleScalarType (NumSingleType (IntegralNumType i))
spin <- newBlock "rmw.spin"
exit <- newBlock "rmw.exit"
addr' <- instr' $ PtrCast (PtrPrimType (ScalarPrimType si) defaultAddrSpace) addr
init' <- instr' $ Load si NonVolatile addr'
old' <- fresh $ TupRsingle si
top <- br spin
setBlock spin
old <- instr' $ BitCast (SingleScalarType t) (op i old')
val <- update $ ir t old
val' <- instr' $ BitCast si (op t val)
r <- instr' $ CmpXchg i NonVolatile addr' (op i old') val' (CrossThread, AcquireRelease) Monotonic
done <- instr' $ ExtractValue scalarType PairIdxRight r
next' <- instr' $ ExtractValue si PairIdxLeft r
Since we removed from the set of primitive types Accelerate
supports , we have to do a small hack to have consider this as its
correct type of a 1 - bit integer ( rather than the 8 - bits it is actually
done' <- case done of
LocalReference _ (UnName n) -> return $ OP_Bool (LocalReference type' (UnName n))
_ -> internalError "expected unnamed local reference"
bot <- cbr done' exit spin
_ <- phi' (TupRsingle si) spin old' [(ir i init',top), (ir i next',bot)]
setBlock exit
Implementation of atomic comparison operators ( i.e. min , ) using
this type but do support CAS at this bit width . The old value is discarded .
> void casMin(double * addr , double val )
> uint64_t val_i = reinterpret_cast < uint64_t>(val ) ;
> while ( < old ) {
> uint64_t old_i = atomicCAS(addr_i , assumed_i , val_i ) ;
atomicCAS_cmp
:: forall arch e. HasCallStack
=> SingleType e
-> (SingleType e -> Operands e -> Operands e -> CodeGen arch (Operands Bool))
-> Operand (Ptr e)
-> Operand e
-> CodeGen arch ()
atomicCAS_cmp t cmp addr val =
case t of
NumSingleType (FloatingNumType f) -> floating f
NumSingleType (IntegralNumType i) -> integral i
where
floating :: FloatingType t -> CodeGen arch ()
floating TypeHalf{} = atomicCAS_cmp' t (integralType :: IntegralType Word16) cmp addr val
floating TypeFloat{} = atomicCAS_cmp' t (integralType :: IntegralType Word32) cmp addr val
floating TypeDouble{} = atomicCAS_cmp' t (integralType :: IntegralType Word64) cmp addr val
integral :: IntegralType t -> CodeGen arch ()
integral i = atomicCAS_cmp' t i cmp addr val
atomicCAS_cmp'
:: HasCallStack
-> (SingleType t -> Operands t -> Operands t -> CodeGen arch (Operands Bool))
-> Operand (Ptr t)
-> Operand t
-> CodeGen arch ()
atomicCAS_cmp' t i cmp addr val = withDict (singleElt t) $ do
let si = SingleScalarType (NumSingleType (IntegralNumType i))
test <- newBlock "cas.cmp"
spin <- newBlock "cas.retry"
exit <- newBlock "cas.exit"
addr' <- instr' $ PtrCast (PtrPrimType (ScalarPrimType si) defaultAddrSpace) addr
val' <- instr' $ BitCast si val
old <- fresh $ TupRsingle $ SingleScalarType t
start <- instr' $ Load (SingleScalarType t) NonVolatile addr
top <- br test
setBlock test
yes <- cmp t (ir t val) old
_ <- cbr yes spin exit
setBlock spin
old' <- instr' $ BitCast si (op t old)
r <- instr' $ CmpXchg i NonVolatile addr' old' val' (CrossThread, AcquireRelease) Monotonic
done <- instr' $ ExtractValue scalarType PairIdxRight r
next <- instr' $ ExtractValue si PairIdxLeft r
next' <- instr' $ BitCast (SingleScalarType t) next
done' <- case done of
LocalReference _ (UnName n) -> return $ OP_Bool (LocalReference type' (UnName n))
_ -> internalError "expected unnamed local reference"
bot <- cbr done' exit test
_ <- phi' (TupRsingle $ SingleScalarType t) test old [(ir t start,top), (ir t next',bot)]
setBlock exit
|
40fa325098bec4334ac2a15f1e662c98ad3a13078c623f5efdfa413d914afa15 | reagent-project/reagent-cookbook | core.cljs | (ns morris.core
(:require [reagent.dom :as rdom]
[reagent.core :as reagent]))
(defn home-render []
[:div#donut-example ])
(defn home-did-mount []
(.Donut js/Morris (clj->js {:element "donut-example"
:data [{:label "Download Sales" :value 12}
{:label "In-Store Sales" :value 30}
{:label "Mail-Order Sales" :value 20}]})))
(defn home []
(reagent/create-class {:reagent-render home-render
:component-did-mount home-did-mount}))
(defn ^:export main []
(rdom/render [home]
(.getElementById js/document "app")))
| null | https://raw.githubusercontent.com/reagent-project/reagent-cookbook/ccda91a74377098d27e2707c306a0187243cb290/recipes/morris/src/cljs/morris/core.cljs | clojure | (ns morris.core
(:require [reagent.dom :as rdom]
[reagent.core :as reagent]))
(defn home-render []
[:div#donut-example ])
(defn home-did-mount []
(.Donut js/Morris (clj->js {:element "donut-example"
:data [{:label "Download Sales" :value 12}
{:label "In-Store Sales" :value 30}
{:label "Mail-Order Sales" :value 20}]})))
(defn home []
(reagent/create-class {:reagent-render home-render
:component-did-mount home-did-mount}))
(defn ^:export main []
(rdom/render [home]
(.getElementById js/document "app")))
|
|
143ea975f68cc31897060b6024878bcf6d5f436a5f9c94983d1e802e802efcda | ocsigen/ocaml-eliom | divint.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Gallium , INRIA Rocquencourt
(* *)
Copyright 2013 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Printf
Test integer division and modulus , esp . ocamlopt 's optimization
when the divisor is a constant .
when the divisor is a constant. *)
let error = ref false
module WithInt = struct
let d = ref 0
let divref n = n / !d
let modref n = n mod !d
let test_one (df: int -> int) (mf: int -> int) x =
if not (df x = divref x && mf x = modref x) then begin
printf "mismatch for %d\n" x;
error := true
end
let do_test divisor (df: int -> int) (mf: int -> int) =
d := divisor;
List.iter (test_one df mf)
[0; 1; 2; 3; 4; 5; 6; 7; 8; 9; 10;
100; 1000; 10000; 100000; 1000000; max_int - 1; max_int;
-1; -2; -3; -4; -5; -6; -7; -8; -9; -10;
-100; -1000; -10000; -100000; -1000000; min_int + 1; min_int];
let seed = ref 0 in
for i = 1 to 1000 do
seed := !seed * 69069 + 25173;
test_one df mf !seed
done
end
module WithNat = struct
let d = ref 0n
let divref n = Nativeint.div n !d
let modref n = Nativeint.rem n !d
let test_one (df: nativeint -> nativeint) (mf: nativeint -> nativeint) x =
if not (df x = divref x && mf x = modref x) then begin
printf "mismatch for %nd\n" x;
error := true
end
let do_test divisor (df: nativeint -> nativeint) (mf: nativeint -> nativeint) =
d := Nativeint.of_int divisor;
List.iter (test_one df mf)
[0n; 1n; 2n; 3n; 4n; 5n; 6n; 7n; 8n; 9n; 10n;
100n; 1000n; 10000n; 100000n; 1000000n;
Nativeint.(pred max_int); Nativeint.max_int;
-1n; -2n; -3n; -4n; -5n; -6n; -7n; -8n; -9n; -10n;
-100n; -1000n; -10000n; -100000n; -1000000n;
Nativeint.(succ min_int); Nativeint.min_int];
let seed = ref 0n in
for i = 1 to 1000 do
seed := Nativeint.(add (mul !seed 69069n) 25173n);
test_one df mf !seed
done
end
let _ =
printf "1 int\n"; WithInt.do_test 1 (fun x -> x / 1)(fun x -> x mod 1);
printf "2 int\n"; WithInt.do_test 2 (fun x -> x / 2)(fun x -> x mod 2);
printf "3 int\n"; WithInt.do_test 3 (fun x -> x / 3)(fun x -> x mod 3);
printf "4 int\n"; WithInt.do_test 4 (fun x -> x / 4)(fun x -> x mod 4);
printf "5 int\n"; WithInt.do_test 5 (fun x -> x / 5)(fun x -> x mod 5);
printf "6 int\n"; WithInt.do_test 6 (fun x -> x / 6)(fun x -> x mod 6);
printf "7 int\n"; WithInt.do_test 7 (fun x -> x / 7)(fun x -> x mod 7);
printf "9 int\n"; WithInt.do_test 9 (fun x -> x / 9)(fun x -> x mod 9);
printf "10 int\n"; WithInt.do_test 10 (fun x -> x / 10)(fun x -> x mod 10);
printf "11 int\n"; WithInt.do_test 11 (fun x -> x / 11)(fun x -> x mod 11);
printf "12 int\n"; WithInt.do_test 12 (fun x -> x / 12)(fun x -> x mod 12);
printf "25 int\n"; WithInt.do_test 25 (fun x -> x / 25)(fun x -> x mod 25);
printf "55 int\n"; WithInt.do_test 55 (fun x -> x / 55)(fun x -> x mod 55);
printf "125 int\n";
WithInt.do_test 125 (fun x -> x / 125)(fun x -> x mod 125);
printf "625 int\n";
WithInt.do_test 625 (fun x -> x / 625)(fun x -> x mod 625);
printf "-1 int\n";
WithInt.do_test (-1) (fun x -> x / (-1))(fun x -> x mod (-1));
printf "-2 int\n";
WithInt.do_test (-2) (fun x -> x / (-2))(fun x -> x mod (-2));
printf "-3 int\n";
WithInt.do_test (-3) (fun x -> x / (-3))(fun x -> x mod (-3));
printf "1 nat\n";
WithNat.do_test 1 (fun x -> Nativeint.div x 1n)(fun x -> Nativeint.rem x 1n);
printf "2 nat\n";
WithNat.do_test 2 (fun x -> Nativeint.div x 2n)(fun x -> Nativeint.rem x 2n);
printf "3 nat\n";
WithNat.do_test 3 (fun x -> Nativeint.div x 3n)(fun x -> Nativeint.rem x 3n);
printf "4 nat\n";
WithNat.do_test 4 (fun x -> Nativeint.div x 4n)(fun x -> Nativeint.rem x 4n);
printf "5 nat\n";
WithNat.do_test 5 (fun x -> Nativeint.div x 5n)(fun x -> Nativeint.rem x 5n);
printf "6 nat\n";
WithNat.do_test 6 (fun x -> Nativeint.div x 6n)(fun x -> Nativeint.rem x 6n);
printf "7 nat\n";
WithNat.do_test 7 (fun x -> Nativeint.div x 7n)(fun x -> Nativeint.rem x 7n);
printf "9 nat\n";
WithNat.do_test 9 (fun x -> Nativeint.div x 9n)(fun x -> Nativeint.rem x 9n);
printf "10 nat\n";
WithNat.do_test 10 (fun x -> Nativeint.div x 10n)
(fun x -> Nativeint.rem x 10n);
printf "11 nat\n";
WithNat.do_test 11 (fun x -> Nativeint.div x 11n)
(fun x -> Nativeint.rem x 11n);
printf "12 nat\n";
WithNat.do_test 12 (fun x -> Nativeint.div x 12n)
(fun x -> Nativeint.rem x 12n);
printf "25 nat\n";
WithNat.do_test 25 (fun x -> Nativeint.div x 25n)
(fun x -> Nativeint.rem x 25n);
printf "55 nat\n";
WithNat.do_test 55 (fun x -> Nativeint.div x 55n)
(fun x -> Nativeint.rem x 55n);
printf "125 nat\n";
WithNat.do_test 125 (fun x -> Nativeint.div x 125n)
(fun x -> Nativeint.rem x 125n);
printf "625 nat\n";
WithNat.do_test 625 (fun x -> Nativeint.div x 625n)
(fun x -> Nativeint.rem x 625n);
printf "-1 nat\n";
WithNat.do_test (-1) (fun x -> Nativeint.div x (-1n))
(fun x -> Nativeint.rem x (-1n));
printf "-2 nat\n";
WithNat.do_test (-2) (fun x -> Nativeint.div x (-2n))
(fun x -> Nativeint.rem x (-2n));
printf "-3 nat\n";
WithNat.do_test (-3) (fun x -> Nativeint.div x (-3n))
(fun x -> Nativeint.rem x (-3n));
if !error then printf "TEST FAILED.\n" else printf "Test passed.\n"
(* PR#6879 *)
let f n = assert (1 mod n = 0)
let () = f 1
| null | https://raw.githubusercontent.com/ocsigen/ocaml-eliom/497c6707f477cb3086dc6d8124384e74a8c379ae/testsuite/tests/basic/divint.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
PR#6879 | , projet Gallium , INRIA Rocquencourt
Copyright 2013 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Printf
Test integer division and modulus , esp . ocamlopt 's optimization
when the divisor is a constant .
when the divisor is a constant. *)
let error = ref false
module WithInt = struct
let d = ref 0
let divref n = n / !d
let modref n = n mod !d
let test_one (df: int -> int) (mf: int -> int) x =
if not (df x = divref x && mf x = modref x) then begin
printf "mismatch for %d\n" x;
error := true
end
let do_test divisor (df: int -> int) (mf: int -> int) =
d := divisor;
List.iter (test_one df mf)
[0; 1; 2; 3; 4; 5; 6; 7; 8; 9; 10;
100; 1000; 10000; 100000; 1000000; max_int - 1; max_int;
-1; -2; -3; -4; -5; -6; -7; -8; -9; -10;
-100; -1000; -10000; -100000; -1000000; min_int + 1; min_int];
let seed = ref 0 in
for i = 1 to 1000 do
seed := !seed * 69069 + 25173;
test_one df mf !seed
done
end
module WithNat = struct
let d = ref 0n
let divref n = Nativeint.div n !d
let modref n = Nativeint.rem n !d
let test_one (df: nativeint -> nativeint) (mf: nativeint -> nativeint) x =
if not (df x = divref x && mf x = modref x) then begin
printf "mismatch for %nd\n" x;
error := true
end
let do_test divisor (df: nativeint -> nativeint) (mf: nativeint -> nativeint) =
d := Nativeint.of_int divisor;
List.iter (test_one df mf)
[0n; 1n; 2n; 3n; 4n; 5n; 6n; 7n; 8n; 9n; 10n;
100n; 1000n; 10000n; 100000n; 1000000n;
Nativeint.(pred max_int); Nativeint.max_int;
-1n; -2n; -3n; -4n; -5n; -6n; -7n; -8n; -9n; -10n;
-100n; -1000n; -10000n; -100000n; -1000000n;
Nativeint.(succ min_int); Nativeint.min_int];
let seed = ref 0n in
for i = 1 to 1000 do
seed := Nativeint.(add (mul !seed 69069n) 25173n);
test_one df mf !seed
done
end
let _ =
printf "1 int\n"; WithInt.do_test 1 (fun x -> x / 1)(fun x -> x mod 1);
printf "2 int\n"; WithInt.do_test 2 (fun x -> x / 2)(fun x -> x mod 2);
printf "3 int\n"; WithInt.do_test 3 (fun x -> x / 3)(fun x -> x mod 3);
printf "4 int\n"; WithInt.do_test 4 (fun x -> x / 4)(fun x -> x mod 4);
printf "5 int\n"; WithInt.do_test 5 (fun x -> x / 5)(fun x -> x mod 5);
printf "6 int\n"; WithInt.do_test 6 (fun x -> x / 6)(fun x -> x mod 6);
printf "7 int\n"; WithInt.do_test 7 (fun x -> x / 7)(fun x -> x mod 7);
printf "9 int\n"; WithInt.do_test 9 (fun x -> x / 9)(fun x -> x mod 9);
printf "10 int\n"; WithInt.do_test 10 (fun x -> x / 10)(fun x -> x mod 10);
printf "11 int\n"; WithInt.do_test 11 (fun x -> x / 11)(fun x -> x mod 11);
printf "12 int\n"; WithInt.do_test 12 (fun x -> x / 12)(fun x -> x mod 12);
printf "25 int\n"; WithInt.do_test 25 (fun x -> x / 25)(fun x -> x mod 25);
printf "55 int\n"; WithInt.do_test 55 (fun x -> x / 55)(fun x -> x mod 55);
printf "125 int\n";
WithInt.do_test 125 (fun x -> x / 125)(fun x -> x mod 125);
printf "625 int\n";
WithInt.do_test 625 (fun x -> x / 625)(fun x -> x mod 625);
printf "-1 int\n";
WithInt.do_test (-1) (fun x -> x / (-1))(fun x -> x mod (-1));
printf "-2 int\n";
WithInt.do_test (-2) (fun x -> x / (-2))(fun x -> x mod (-2));
printf "-3 int\n";
WithInt.do_test (-3) (fun x -> x / (-3))(fun x -> x mod (-3));
printf "1 nat\n";
WithNat.do_test 1 (fun x -> Nativeint.div x 1n)(fun x -> Nativeint.rem x 1n);
printf "2 nat\n";
WithNat.do_test 2 (fun x -> Nativeint.div x 2n)(fun x -> Nativeint.rem x 2n);
printf "3 nat\n";
WithNat.do_test 3 (fun x -> Nativeint.div x 3n)(fun x -> Nativeint.rem x 3n);
printf "4 nat\n";
WithNat.do_test 4 (fun x -> Nativeint.div x 4n)(fun x -> Nativeint.rem x 4n);
printf "5 nat\n";
WithNat.do_test 5 (fun x -> Nativeint.div x 5n)(fun x -> Nativeint.rem x 5n);
printf "6 nat\n";
WithNat.do_test 6 (fun x -> Nativeint.div x 6n)(fun x -> Nativeint.rem x 6n);
printf "7 nat\n";
WithNat.do_test 7 (fun x -> Nativeint.div x 7n)(fun x -> Nativeint.rem x 7n);
printf "9 nat\n";
WithNat.do_test 9 (fun x -> Nativeint.div x 9n)(fun x -> Nativeint.rem x 9n);
printf "10 nat\n";
WithNat.do_test 10 (fun x -> Nativeint.div x 10n)
(fun x -> Nativeint.rem x 10n);
printf "11 nat\n";
WithNat.do_test 11 (fun x -> Nativeint.div x 11n)
(fun x -> Nativeint.rem x 11n);
printf "12 nat\n";
WithNat.do_test 12 (fun x -> Nativeint.div x 12n)
(fun x -> Nativeint.rem x 12n);
printf "25 nat\n";
WithNat.do_test 25 (fun x -> Nativeint.div x 25n)
(fun x -> Nativeint.rem x 25n);
printf "55 nat\n";
WithNat.do_test 55 (fun x -> Nativeint.div x 55n)
(fun x -> Nativeint.rem x 55n);
printf "125 nat\n";
WithNat.do_test 125 (fun x -> Nativeint.div x 125n)
(fun x -> Nativeint.rem x 125n);
printf "625 nat\n";
WithNat.do_test 625 (fun x -> Nativeint.div x 625n)
(fun x -> Nativeint.rem x 625n);
printf "-1 nat\n";
WithNat.do_test (-1) (fun x -> Nativeint.div x (-1n))
(fun x -> Nativeint.rem x (-1n));
printf "-2 nat\n";
WithNat.do_test (-2) (fun x -> Nativeint.div x (-2n))
(fun x -> Nativeint.rem x (-2n));
printf "-3 nat\n";
WithNat.do_test (-3) (fun x -> Nativeint.div x (-3n))
(fun x -> Nativeint.rem x (-3n));
if !error then printf "TEST FAILED.\n" else printf "Test passed.\n"
let f n = assert (1 mod n = 0)
let () = f 1
|
4aeb3e1772b5f41e90e2fd43ee1ac82509ad4da92e22cb6cfdf0cc12411479da | khigia/ocaml-fuzlog | fuzzySetTest.ml | open Fuzlog
let _cmp_float a b =
let d = a -. b in
abs_float d < 0.01
let _cmp_points l1 l2 =
List.fold_left2
(fun acc (xa, ya) (xb, yb) ->
let r1 = _cmp_float xa xb in
let r2 = _cmp_float ya yb in
acc && r1 && r2
)
true
l1
l2
let _mu_check s x expected =
let y = FuzzySet.mu s x in
(*let _ = Printf.printf "mu(%f) = %f\n" x y in*)
OUnit.assert_equal
~cmp:_cmp_float
~msg:"membership function"
~printer:(fun v -> Printf.sprintf "mu(%f)=%f" x v)
expected
y
let _ = Tests.register "create" (fun () ->
let s = FuzzySet.create [
(2.0, 0.8);
(3.0, 0.0);
(1.0, 0.0);
(2.0, 1.0);
] in
OUnit.assert_equal
~cmp:_cmp_points
~msg:"create from point list"
[(1.0, 0.0); (2.0, 1.0); (3.0, 0.0);]
(FuzzySet.points s)
;
OUnit.assert_raises
(FuzzySet.InvalidPossibilityValue (-0.1))
(fun () -> FuzzySet.create [ (2.0, -0.1); ])
;
OUnit.assert_raises
(FuzzySet.InvalidPossibilityValue (1.1))
(fun () -> FuzzySet.create [ (2.0, 1.1); ])
)
let _ = Tests.register "Create triangle" (fun () ->
let s = FuzzySet.create_triangle 1.0 3.0 in
OUnit.assert_equal
~cmp:_cmp_points
~msg:"create skew triangle"
[(1.0, 0.0); (2.0, 1.0); (3.0, 0.0);]
(FuzzySet.points s)
)
let _ = Tests.register "to_s" (fun () ->
let s = FuzzySet.create_triangle 12.0 42.0 in
let _ = FuzzySet.to_s s in
OUnit.assert_bool "to_s failure" true
)
let _ = Tests.register "mu" (fun () ->
let s = FuzzySet.create_triangle 1.0 3.0 in
let _ = _mu_check s 0.5 0.0 in
let _ = _mu_check s 1.0 0.0 in
let _ = _mu_check s 1.3 0.3 in
let _ = _mu_check s 1.5 0.5 in
let _ = _mu_check s 1.6 0.6 in
let _ = _mu_check s 2.0 1.0 in
let _ = _mu_check s 2.5 0.5 in
let _ = _mu_check s 3.0 0.0 in
let _ = _mu_check s 3.5 0.0 in
()
)
let _ = Tests.register "product" (fun () ->
let s0 = FuzzySet.create [
(-1.0, 0.2);
(2.0, 1.0);
(5.5, 0.8);
(6.5, 0.3);
] in
let s1 = FuzzySet.product s0 2.0 in
OUnit.assert_equal
~cmp:_cmp_points
~msg:"product with saturation"
[(-1.0, 0.4); (2.0, 1.0); (5.5, 1.0); (6.5, 0.6);]
(FuzzySet.points s1)
)
let _ = Tests.register "combine by maximum" (fun () ->
let s1 = FuzzySet.create_triangle 0.0 4.0 in
let s2 = FuzzySet.create_triangle 2.0 6.0 in
let max = FuzzySet.combine_max s1 s2 in
OUnit.assert_equal
~cmp:_cmp_points
~msg:"combine maximum"
[(0.0, 0.0); (2.0, 1.0); (3.0, 0.5); (4.0, 1.0); (6.0, 0.0);]
(FuzzySet.points max)
)
let _ = Tests.register "x of barycenter" (fun () ->
let s1 = FuzzySet.create_triangle 0.0 4.0 in
let s2 = FuzzySet.create_triangle 2.0 6.0 in
let comb = FuzzySet.combine_max s1 s2 in
let tester = fun set expected ->
let x = FuzzySet.x_cog set in
OUnit.assert_equal
~cmp:_cmp_float
~msg:"x_cog"
~printer:(fun v -> Printf.sprintf "x_cog=%f" v)
expected
x
in
tester s1 2.0;
tester s2 4.0;
tester comb 3.0
)
let _ = Tests.run "FuzzySet test suite"
| null | https://raw.githubusercontent.com/khigia/ocaml-fuzlog/cf8effd33eb21ef62ed770c03a6f236ba0ee49ef/test/fuzzySetTest.ml | ocaml | let _ = Printf.printf "mu(%f) = %f\n" x y in | open Fuzlog
let _cmp_float a b =
let d = a -. b in
abs_float d < 0.01
let _cmp_points l1 l2 =
List.fold_left2
(fun acc (xa, ya) (xb, yb) ->
let r1 = _cmp_float xa xb in
let r2 = _cmp_float ya yb in
acc && r1 && r2
)
true
l1
l2
let _mu_check s x expected =
let y = FuzzySet.mu s x in
OUnit.assert_equal
~cmp:_cmp_float
~msg:"membership function"
~printer:(fun v -> Printf.sprintf "mu(%f)=%f" x v)
expected
y
let _ = Tests.register "create" (fun () ->
let s = FuzzySet.create [
(2.0, 0.8);
(3.0, 0.0);
(1.0, 0.0);
(2.0, 1.0);
] in
OUnit.assert_equal
~cmp:_cmp_points
~msg:"create from point list"
[(1.0, 0.0); (2.0, 1.0); (3.0, 0.0);]
(FuzzySet.points s)
;
OUnit.assert_raises
(FuzzySet.InvalidPossibilityValue (-0.1))
(fun () -> FuzzySet.create [ (2.0, -0.1); ])
;
OUnit.assert_raises
(FuzzySet.InvalidPossibilityValue (1.1))
(fun () -> FuzzySet.create [ (2.0, 1.1); ])
)
let _ = Tests.register "Create triangle" (fun () ->
let s = FuzzySet.create_triangle 1.0 3.0 in
OUnit.assert_equal
~cmp:_cmp_points
~msg:"create skew triangle"
[(1.0, 0.0); (2.0, 1.0); (3.0, 0.0);]
(FuzzySet.points s)
)
let _ = Tests.register "to_s" (fun () ->
let s = FuzzySet.create_triangle 12.0 42.0 in
let _ = FuzzySet.to_s s in
OUnit.assert_bool "to_s failure" true
)
let _ = Tests.register "mu" (fun () ->
let s = FuzzySet.create_triangle 1.0 3.0 in
let _ = _mu_check s 0.5 0.0 in
let _ = _mu_check s 1.0 0.0 in
let _ = _mu_check s 1.3 0.3 in
let _ = _mu_check s 1.5 0.5 in
let _ = _mu_check s 1.6 0.6 in
let _ = _mu_check s 2.0 1.0 in
let _ = _mu_check s 2.5 0.5 in
let _ = _mu_check s 3.0 0.0 in
let _ = _mu_check s 3.5 0.0 in
()
)
let _ = Tests.register "product" (fun () ->
let s0 = FuzzySet.create [
(-1.0, 0.2);
(2.0, 1.0);
(5.5, 0.8);
(6.5, 0.3);
] in
let s1 = FuzzySet.product s0 2.0 in
OUnit.assert_equal
~cmp:_cmp_points
~msg:"product with saturation"
[(-1.0, 0.4); (2.0, 1.0); (5.5, 1.0); (6.5, 0.6);]
(FuzzySet.points s1)
)
let _ = Tests.register "combine by maximum" (fun () ->
let s1 = FuzzySet.create_triangle 0.0 4.0 in
let s2 = FuzzySet.create_triangle 2.0 6.0 in
let max = FuzzySet.combine_max s1 s2 in
OUnit.assert_equal
~cmp:_cmp_points
~msg:"combine maximum"
[(0.0, 0.0); (2.0, 1.0); (3.0, 0.5); (4.0, 1.0); (6.0, 0.0);]
(FuzzySet.points max)
)
let _ = Tests.register "x of barycenter" (fun () ->
let s1 = FuzzySet.create_triangle 0.0 4.0 in
let s2 = FuzzySet.create_triangle 2.0 6.0 in
let comb = FuzzySet.combine_max s1 s2 in
let tester = fun set expected ->
let x = FuzzySet.x_cog set in
OUnit.assert_equal
~cmp:_cmp_float
~msg:"x_cog"
~printer:(fun v -> Printf.sprintf "x_cog=%f" v)
expected
x
in
tester s1 2.0;
tester s2 4.0;
tester comb 3.0
)
let _ = Tests.run "FuzzySet test suite"
|
e5a7ef11fb9a6a183cf852c93d14e2982dd48d574f447e84cd757db3b9742124 | skanev/playground | 40-tests.scm | (require rackunit rackunit/text-ui)
(load "../40.scm")
(define sicp-2.40-tests
(test-suite
"Tests for SICP exercise 2.40"
(check-equal? (enumerate-interval 1 5) '(1 2 3 4 5))
(check-equal? (unique-pairs 2) '((1 2)))
(check-equal? (unique-pairs 3) '((1 2) (1 3) (2 3)))
(check-equal? (unique-pairs 4) '((1 2) (1 3) (1 4) (2 3) (2 4) (3 4)))
(check-equal? (prime-sum-pairs 6) '((1 2) (1 4) (1 6) (2 3) (2 5) (3 4) (5 6)))
))
(run-tests sicp-2.40-tests)
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/02/tests/40-tests.scm | scheme | (require rackunit rackunit/text-ui)
(load "../40.scm")
(define sicp-2.40-tests
(test-suite
"Tests for SICP exercise 2.40"
(check-equal? (enumerate-interval 1 5) '(1 2 3 4 5))
(check-equal? (unique-pairs 2) '((1 2)))
(check-equal? (unique-pairs 3) '((1 2) (1 3) (2 3)))
(check-equal? (unique-pairs 4) '((1 2) (1 3) (1 4) (2 3) (2 4) (3 4)))
(check-equal? (prime-sum-pairs 6) '((1 2) (1 4) (1 6) (2 3) (2 5) (3 4) (5 6)))
))
(run-tests sicp-2.40-tests)
|
|
0b4d674c0a2b22932bec9971e0ea78b9e514029fe5a20adf2b63fee6ae7e1cc4 | tomahawkins/atom | Probes.hs | |
Module : Probes
Description : Example usage of probes in Atom
Copyright : ( c ) 2015
This demonstrates the usage of Atom 's probe functionality . In this case , it
simply uses @printf@ to log a probe 's value . Most POSIX systems should be able
to build and run the generated C code .
Module: Probes
Description: Example usage of probes in Atom
Copyright: (c) 2015 Chris Hodapp
This demonstrates the usage of Atom's probe functionality. In this case, it
simply uses @printf@ to log a probe's value. Most POSIX systems should be able
to build and run the generated C code.
-}
module Language.Atom.Example.Probes where
import Data.Word
import Language.Atom
| Invoke the Atom compiler
main :: IO ()
main = do
let atomCfg = defaults { cCode = prePostCode , cRuleCoverage = False }
(sched, _, _, _, _) <- compile "probe_example" atomCfg example
putStrLn $ reportSchedule sched
-- | Generate a code comment about the given probe.
probeStr :: (Name, Type) -> String
probeStr (n, t) = "// Probe: " ++ n ++ ", type: " ++ show t
-- | Use 'action' to call @PROBE_PRINTF@ on a probe given as (name, value).
-- This will work only on integer-valued probes.
logProbe :: (String, UE) -> Atom ()
logProbe (str, ue_) = action probeFn [ue_]
where probeFn v = "PROBE_PRINTF(\"%u, " ++ str ++
": %i\\n\", __global_clock, " ++ head v ++ ")"
-- | Top-level rule
example :: Atom ()
example = do
-- Include in the once-per-second clock:
sec <- tickSecond
Compute minutes and hours as well ( probes take arbitrary expressions ):
probe "Minutes" $ (value sec) `div_` 60
probe "Hours" $ (value sec) `div_` 3600
At 1/200 of our base rate ( ~ 5 seconds ) , we call ' logProbe ' on all of the
-- probes that are in use.
period 200 $ atom "monitor" $ do
mapM_ logProbe =<< probes
prePostCode :: [Name] -> [Name] -> [(Name, Type)] -> (String, String)
prePostCode _ _ probeList =
( unlines $ [ "// ---- This source is automatically generated by Atom ----"
, "#define PROBE_PRINTF printf"
, "#include <stdio.h>"
, "#include <stdlib.h>"
, "#include <unistd.h>"
] ++ map probeStr probeList
Basic stub to call with a 1 millisecond delay ( do not attempt anything like
-- this in production - use an interrupt):
, unlines [ "int main(void) {"
, " while (true) {"
, " probe_example();"
, " usleep(1000);"
, " }"
, " return 0;"
, "}"
, "// ---- End automatically-generated source ----"
])
| Count up seconds of runtime , assuming our base rate is 1 millisecond :
tickSecond :: Atom (V Word64)
tickSecond = do
sec <- word64 "seconds" 0
-- Add a probe to the clock:
probe "Seconds" $ value sec
period 1000 $ exactPhase 0 $ atom "second" $ incr sec
return sec
| null | https://raw.githubusercontent.com/tomahawkins/atom/e552e18859c6d249af4b293e9d2197878c0fd4fd/Language/Atom/Example/Probes.hs | haskell | | Generate a code comment about the given probe.
| Use 'action' to call @PROBE_PRINTF@ on a probe given as (name, value).
This will work only on integer-valued probes.
| Top-level rule
Include in the once-per-second clock:
probes that are in use.
this in production - use an interrupt):
Add a probe to the clock: | |
Module : Probes
Description : Example usage of probes in Atom
Copyright : ( c ) 2015
This demonstrates the usage of Atom 's probe functionality . In this case , it
simply uses @printf@ to log a probe 's value . Most POSIX systems should be able
to build and run the generated C code .
Module: Probes
Description: Example usage of probes in Atom
Copyright: (c) 2015 Chris Hodapp
This demonstrates the usage of Atom's probe functionality. In this case, it
simply uses @printf@ to log a probe's value. Most POSIX systems should be able
to build and run the generated C code.
-}
module Language.Atom.Example.Probes where
import Data.Word
import Language.Atom
| Invoke the Atom compiler
main :: IO ()
main = do
let atomCfg = defaults { cCode = prePostCode , cRuleCoverage = False }
(sched, _, _, _, _) <- compile "probe_example" atomCfg example
putStrLn $ reportSchedule sched
probeStr :: (Name, Type) -> String
probeStr (n, t) = "// Probe: " ++ n ++ ", type: " ++ show t
logProbe :: (String, UE) -> Atom ()
logProbe (str, ue_) = action probeFn [ue_]
where probeFn v = "PROBE_PRINTF(\"%u, " ++ str ++
": %i\\n\", __global_clock, " ++ head v ++ ")"
example :: Atom ()
example = do
sec <- tickSecond
Compute minutes and hours as well ( probes take arbitrary expressions ):
probe "Minutes" $ (value sec) `div_` 60
probe "Hours" $ (value sec) `div_` 3600
At 1/200 of our base rate ( ~ 5 seconds ) , we call ' logProbe ' on all of the
period 200 $ atom "monitor" $ do
mapM_ logProbe =<< probes
prePostCode :: [Name] -> [Name] -> [(Name, Type)] -> (String, String)
prePostCode _ _ probeList =
( unlines $ [ "// ---- This source is automatically generated by Atom ----"
, "#define PROBE_PRINTF printf"
, "#include <stdio.h>"
, "#include <stdlib.h>"
, "#include <unistd.h>"
] ++ map probeStr probeList
Basic stub to call with a 1 millisecond delay ( do not attempt anything like
, unlines [ "int main(void) {"
, " while (true) {"
, " probe_example();"
, " usleep(1000);"
, " }"
, " return 0;"
, "}"
, "// ---- End automatically-generated source ----"
])
| Count up seconds of runtime , assuming our base rate is 1 millisecond :
tickSecond :: Atom (V Word64)
tickSecond = do
sec <- word64 "seconds" 0
probe "Seconds" $ value sec
period 1000 $ exactPhase 0 $ atom "second" $ incr sec
return sec
|
99063858c137cd38a672372c490beda75ff48ff4336e3a520fcbddbd58482ef3 | strint/sicpAns | 2023_list_for_each.scm | (define (for-each-i p l)
(if (null? l)
#t
(begin (p (car l))
(for-each-i p (cdr l)))))
| null | https://raw.githubusercontent.com/strint/sicpAns/efc4bdfaab7583117d42f2141d4b3b9e12792b79/2.2.1-1_ListClosure/2023_list_for_each.scm | scheme | (define (for-each-i p l)
(if (null? l)
#t
(begin (p (car l))
(for-each-i p (cdr l)))))
|
|
ad711a476c1a10dacf8d1e34ec359d125c6517f4751eb7a2434f8c0909229ab9 | tek/proteome | Quit.hs | module Proteome.Quit where
import Conc (Lock)
import Ribosome (Handler, PersistError, Rpc, RpcError, resumeReport)
import Ribosome.Effect.Persist (Persist)
import Proteome.Data.Env (Env)
import Proteome.Data.PersistBuffers (PersistBuffers)
import Proteome.PersistBuffers (StoreBuffersLock, storeBuffers)
proQuit ::
Member (Persist PersistBuffers !! PersistError) r =>
Members [Lock @@ StoreBuffersLock, AtomicState Env, Rpc !! RpcError, Resource, Embed IO] r =>
Handler r ()
proQuit =
resumeReport @(Persist _) $ resumeReport @Rpc do
storeBuffers
| null | https://raw.githubusercontent.com/tek/proteome/274e36e99d801219bdf4e74899c509827e8f7275/packages/proteome/lib/Proteome/Quit.hs | haskell | module Proteome.Quit where
import Conc (Lock)
import Ribosome (Handler, PersistError, Rpc, RpcError, resumeReport)
import Ribosome.Effect.Persist (Persist)
import Proteome.Data.Env (Env)
import Proteome.Data.PersistBuffers (PersistBuffers)
import Proteome.PersistBuffers (StoreBuffersLock, storeBuffers)
proQuit ::
Member (Persist PersistBuffers !! PersistError) r =>
Members [Lock @@ StoreBuffersLock, AtomicState Env, Rpc !! RpcError, Resource, Embed IO] r =>
Handler r ()
proQuit =
resumeReport @(Persist _) $ resumeReport @Rpc do
storeBuffers
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.