_id
stringlengths
64
64
repository
stringlengths
6
84
name
stringlengths
4
110
content
stringlengths
0
248k
license
null
download_url
stringlengths
89
454
language
stringclasses
7 values
comments
stringlengths
0
74.6k
code
stringlengths
0
248k
622fcc9938d272ddc3865dc55a225b20ae335c0a41ab584f1860c0a041ec3c72
WorksHub/client
landing.cljc
(ns wh.styles.landing) (def page "landing__page--2REJ9") (def page__main "landing__page__main--2aEJv") (def side-column "landing__side-column--1krwu") (def side-column--left "landing__side-column--left--1Ucjl") (def tablet-only "landing__tablet-only--18Cc3") (def main-column "landing__main-column--3d3VB") (def card "landing__card--1Kley") (def card--tag-picker "landing__card--tag-picker--2fgJO") (def card--blog-published "landing__card--blog-published--1t1Ub") (def card--job-published "landing__card--job-published--3kvVB") (def card--matching-issues "landing__card--matching-issues--3l9_0") (def card--company-stats "landing__card--company-stats--2p-JS") (def card--issue-started "landing__card--issue-started--OHecJ") (def card--matching-jobs "landing__card--matching-jobs--1xlgC") (def loader "landing__loader--3T80b") (def prev-next-buttons "landing__prev-next-buttons--NZ9Q3") (def prev-next-button__text "landing__prev-next-button__text---fDJ8") (def separator "landing__separator--3E_kH")
null
https://raw.githubusercontent.com/WorksHub/client/a51729585c2b9d7692e57b3edcd5217c228cf47c/common/src/wh/styles/landing.cljc
clojure
(ns wh.styles.landing) (def page "landing__page--2REJ9") (def page__main "landing__page__main--2aEJv") (def side-column "landing__side-column--1krwu") (def side-column--left "landing__side-column--left--1Ucjl") (def tablet-only "landing__tablet-only--18Cc3") (def main-column "landing__main-column--3d3VB") (def card "landing__card--1Kley") (def card--tag-picker "landing__card--tag-picker--2fgJO") (def card--blog-published "landing__card--blog-published--1t1Ub") (def card--job-published "landing__card--job-published--3kvVB") (def card--matching-issues "landing__card--matching-issues--3l9_0") (def card--company-stats "landing__card--company-stats--2p-JS") (def card--issue-started "landing__card--issue-started--OHecJ") (def card--matching-jobs "landing__card--matching-jobs--1xlgC") (def loader "landing__loader--3T80b") (def prev-next-buttons "landing__prev-next-buttons--NZ9Q3") (def prev-next-button__text "landing__prev-next-button__text---fDJ8") (def separator "landing__separator--3E_kH")
8e5f24d82447bce4ed010fa60ad4122a34528e7f6c625083cb936b4869c6ae3d
robrix/starlight
TVar.hs
{-# LANGUAGE ExplicitForAll #-} # LANGUAGE FlexibleInstances # {-# LANGUAGE GADTs #-} # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE MultiParamTypeClasses # # LANGUAGE TypeOperators # # LANGUAGE UndecidableInstances # | A ' Control . Concurrent . STM.TVar . TVar'-backed carrier for ' State ' . Individual ' get 's and ' put 's are run ' atomically ' , but NB that ' modify ' is /not/ atomic , so this is likely unsuitable for complex interleaving of concurrent reads and writes . module Control.Carrier.State.STM.TVar ( -- * State carrier runStateVar , runState , evalState , execState , StateC(..) -- * State effect , module Control.Effect.State ) where import Control.Algebra import Control.Carrier.Lift import Control.Carrier.Reader import Control.Concurrent.STM.TVar import Control.Effect.State import Control.Monad.Fix import Control.Monad.IO.Class import Control.Monad.STM import Control.Monad.Trans.Class runStateVar :: TVar s -> StateC s m a -> m a runStateVar var (StateC m) = runReader var m runState :: forall s m a sig . Has (Lift IO) sig m => s -> StateC s m a -> m (s, a) runState s m = do var <- sendM (newTVarIO s) a <- runStateVar var m s' <- sendM (readTVarIO var) pure (s', a) evalState :: forall s m a sig . Has (Lift IO) sig m => s -> StateC s m a -> m a evalState s = fmap snd . runState s execState :: forall s m a sig . Has (Lift IO) sig m => s -> StateC s m a -> m s execState s = fmap fst . runState s newtype StateC s m a = StateC { runStateC :: ReaderC (TVar s) m a } deriving (Applicative, Functor, Monad, MonadFail, MonadFix, MonadIO, MonadTrans) instance Has (Lift IO) sig m => Algebra (State s :+: sig) (StateC s m) where alg hdl sig ctx = case sig of L Get -> (<$ ctx) <$> (StateC ask >>= sendM . readTVarIO) L (Put s) -> do var <- StateC ask ctx <$ StateC (sendM (atomically (writeTVar var s))) R other -> StateC (alg (runStateC . hdl) (R other) ctx)
null
https://raw.githubusercontent.com/robrix/starlight/ad80ab74dc2eedbb52a75ac8ce507661d32f488e/src/Control/Carrier/State/STM/TVar.hs
haskell
# LANGUAGE ExplicitForAll # # LANGUAGE GADTs # * State carrier * State effect
# LANGUAGE FlexibleInstances # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE MultiParamTypeClasses # # LANGUAGE TypeOperators # # LANGUAGE UndecidableInstances # | A ' Control . Concurrent . STM.TVar . TVar'-backed carrier for ' State ' . Individual ' get 's and ' put 's are run ' atomically ' , but NB that ' modify ' is /not/ atomic , so this is likely unsuitable for complex interleaving of concurrent reads and writes . module Control.Carrier.State.STM.TVar runStateVar , runState , evalState , execState , StateC(..) , module Control.Effect.State ) where import Control.Algebra import Control.Carrier.Lift import Control.Carrier.Reader import Control.Concurrent.STM.TVar import Control.Effect.State import Control.Monad.Fix import Control.Monad.IO.Class import Control.Monad.STM import Control.Monad.Trans.Class runStateVar :: TVar s -> StateC s m a -> m a runStateVar var (StateC m) = runReader var m runState :: forall s m a sig . Has (Lift IO) sig m => s -> StateC s m a -> m (s, a) runState s m = do var <- sendM (newTVarIO s) a <- runStateVar var m s' <- sendM (readTVarIO var) pure (s', a) evalState :: forall s m a sig . Has (Lift IO) sig m => s -> StateC s m a -> m a evalState s = fmap snd . runState s execState :: forall s m a sig . Has (Lift IO) sig m => s -> StateC s m a -> m s execState s = fmap fst . runState s newtype StateC s m a = StateC { runStateC :: ReaderC (TVar s) m a } deriving (Applicative, Functor, Monad, MonadFail, MonadFix, MonadIO, MonadTrans) instance Has (Lift IO) sig m => Algebra (State s :+: sig) (StateC s m) where alg hdl sig ctx = case sig of L Get -> (<$ ctx) <$> (StateC ask >>= sendM . readTVarIO) L (Put s) -> do var <- StateC ask ctx <$ StateC (sendM (atomically (writeTVar var s))) R other -> StateC (alg (runStateC . hdl) (R other) ctx)
3bc99d7d57f607e61fe46c4fd619d54aa41ef447ca4dcee5b93b44761a2a9e69
psg-mit/probzelus-haskell
AssocList.hs
module Util.AssocList where import Data.List (nubBy, groupBy, partition) import Data.Function (on) import Util.AssocListSet (Set (Set)) import qualified Util.AssocListSet as S newtype Map k v = Map [(k, v)] empty :: Map k v empty = Map [] singleton :: k -> v -> Map k v singleton k v = Map [(k, v)] fromList :: Eq k => [(k, v)] -> Map k v fromList = Map . nubBy ((==) `on` fst) fromListWith :: Eq k => (v -> v -> v) -> [(k, v)] -> Map k v fromListWith f = Map . map (\xs -> (fst (head xs), foldr1 f (map snd xs))) . groupBy ((==) `on` fst) fromSet :: (k -> v) -> Set k -> Map k v fromSet f (Set ks) = Map [ (k, f k) | k <- ks ] insertWith' :: Eq k => (v -> v -> v) -> k -> v -> [(k, v)] -> [(k, v)] insertWith' f k v [] = [(k, v)] insertWith' f k v ((k', v') : xs) = if k == k' then (k, f v v') : xs else (k', v') : insertWith' f k v xs insertWith :: Eq k => (v -> v -> v) -> k -> v -> Map k v -> Map k v insertWith f k v (Map xs) = Map (insertWith' f k v xs) unionWith :: Eq k => (v -> v -> v) -> Map k v -> Map k v -> Map k v unionWith f (Map xs) m = foldr (uncurry (insertWith f)) m xs partition :: (k -> Bool) -> Map k v -> (Map k v, Map k v) partition f (Map xs) = (Map ys, Map zs) where (ys, zs) = Data.List.partition (f . fst) xs toList :: Map k v -> [(k, v)] toList (Map xs) = xs size :: Map k v -> Int size (Map xs) = length xs
null
https://raw.githubusercontent.com/psg-mit/probzelus-haskell/a4b66631451b6156938a9c5420cfff2999ecbbc6/haskell/src/Util/AssocList.hs
haskell
module Util.AssocList where import Data.List (nubBy, groupBy, partition) import Data.Function (on) import Util.AssocListSet (Set (Set)) import qualified Util.AssocListSet as S newtype Map k v = Map [(k, v)] empty :: Map k v empty = Map [] singleton :: k -> v -> Map k v singleton k v = Map [(k, v)] fromList :: Eq k => [(k, v)] -> Map k v fromList = Map . nubBy ((==) `on` fst) fromListWith :: Eq k => (v -> v -> v) -> [(k, v)] -> Map k v fromListWith f = Map . map (\xs -> (fst (head xs), foldr1 f (map snd xs))) . groupBy ((==) `on` fst) fromSet :: (k -> v) -> Set k -> Map k v fromSet f (Set ks) = Map [ (k, f k) | k <- ks ] insertWith' :: Eq k => (v -> v -> v) -> k -> v -> [(k, v)] -> [(k, v)] insertWith' f k v [] = [(k, v)] insertWith' f k v ((k', v') : xs) = if k == k' then (k, f v v') : xs else (k', v') : insertWith' f k v xs insertWith :: Eq k => (v -> v -> v) -> k -> v -> Map k v -> Map k v insertWith f k v (Map xs) = Map (insertWith' f k v xs) unionWith :: Eq k => (v -> v -> v) -> Map k v -> Map k v -> Map k v unionWith f (Map xs) m = foldr (uncurry (insertWith f)) m xs partition :: (k -> Bool) -> Map k v -> (Map k v, Map k v) partition f (Map xs) = (Map ys, Map zs) where (ys, zs) = Data.List.partition (f . fst) xs toList :: Map k v -> [(k, v)] toList (Map xs) = xs size :: Map k v -> Int size (Map xs) = length xs
b3d48c727756207dccd2437ca507cae866b6468435d03f3277d2a16a52141e11
komi1230/kai
launch.lisp
;;;; launch.lisp --- File opener ;;; ;;; This code has been placed in the Public Domain. All warranties ;;; are disclaimed. ;;; ;;; This file handles opening js files with system commands. (in-package :cl-user) (defpackage :kai.plotly.launch (:use :cl) (:import-from :kai.util :check-file-exist) (:export :open-browser)) (in-package :kai.plotly.launch) ;;;; Open browser ;;; ;;; When launching js file in the browser, we use system command ;;; to open browser. (defun open-browser () (let ((path-to-html (check-file-exist "plotly" "kai.html"))) (uiop:run-program #+(or win32 mswindows windows) (format nil "explorer file:///~A" path-to-html) #+(or macos darwin) (format nil "open ~A" path-to-html) #-(or win32 mswindows macos darwin windows) (format nil "xdg-open ~A" path-to-html))))
null
https://raw.githubusercontent.com/komi1230/kai/7481aae3ca11a79c117dd6fbc4e3bf2122a89627/src/plotly/launch.lisp
lisp
launch.lisp --- File opener This code has been placed in the Public Domain. All warranties are disclaimed. This file handles opening js files with system commands. Open browser When launching js file in the browser, we use system command to open browser.
(in-package :cl-user) (defpackage :kai.plotly.launch (:use :cl) (:import-from :kai.util :check-file-exist) (:export :open-browser)) (in-package :kai.plotly.launch) (defun open-browser () (let ((path-to-html (check-file-exist "plotly" "kai.html"))) (uiop:run-program #+(or win32 mswindows windows) (format nil "explorer file:///~A" path-to-html) #+(or macos darwin) (format nil "open ~A" path-to-html) #-(or win32 mswindows macos darwin windows) (format nil "xdg-open ~A" path-to-html))))
bd1e7a4e41b1756682fa3b0697a76890f38eab8fddc730b565974e665d3066a4
inria-parkas/sundialsml
test_sunmatrix_dense.ml
* ----------------------------------------------------------------- * Programmer(s ): @ SMU * @ LLNL * ----------------------------------------------------------------- * OCaml port : , , Jun 2018 . * ----------------------------------------------------------------- * LLNS / SMU Copyright Start * Copyright ( c ) 2017 , Southern Methodist University and * National Security * * This work was performed under the auspices of the U.S. Department * of Energy by Southern Methodist University and * National Laboratory under Contract DE - AC52 - 07NA27344 . * Produced at Southern Methodist University and Livermore National Laboratory . * * All rights reserved . * For details , see the LICENSE file . * LLNS / SMU Copyright End * ----------------------------------------------------------------- * This is the testing routine to check the SUNMatrix Dense module * implementation . * ----------------------------------------------------------------- * ----------------------------------------------------------------- * Programmer(s): Daniel Reynolds @ SMU * David Gardner @ LLNL * ----------------------------------------------------------------- * OCaml port: Timothy Bourke, Inria, Jun 2018. * ----------------------------------------------------------------- * LLNS/SMU Copyright Start * Copyright (c) 2017, Southern Methodist University and * Lawrence Livermore National Security * * This work was performed under the auspices of the U.S. Department * of Energy by Southern Methodist University and Lawrence Livermore * National Laboratory under Contract DE-AC52-07NA27344. * Produced at Southern Methodist University and the Lawrence * Livermore National Laboratory. * * All rights reserved. * For details, see the LICENSE file. * LLNS/SMU Copyright End * ----------------------------------------------------------------- * This is the testing routine to check the SUNMatrix Dense module * implementation. * ----------------------------------------------------------------- *) module Matrix = Sundials.Matrix let printf = Format.printf let (+=) r x = r := !r + x let compat2_3 = match Sundials.Config.sundials_version with | 2,_,_ -> true | 3,_,_ -> true | _ -> false module Dense_tests = struct type k = Matrix.standard type m = Matrix.Dense.t type nd = Nvector_serial.data type nk = Nvector_serial.kind type t = nk Matrix.dense type nvec = Nvector_serial.t let rewrap = Matrix.wrap_dense let check_matrix a b tol = let ca, cb = Matrix.(unwrap a, unwrap b) in let ((ma, na) as sa), sb = Matrix.Dense.(size ca, size cb) in let adata, bdata = Matrix.Dense.(unwrap ca, unwrap cb) in if sa <> sb then (printf ">>> ERROR: check_matrix: Different data array lengths @\n"; true) else begin let failure = ref 0 in for i = 0 to ma - 1 do for j = 0 to na - 1 do failure += Test_matrix.fneq adata.{j, i} bdata.{j, i} tol done done; !failure > 0 end let check_matrix_entry a v tol = let ca = Matrix.unwrap a in let ma, na = Matrix.Dense.size ca in let adata = Matrix.Dense.unwrap ca in let failure = ref 0 in for i = 0 to ma - 1 do for j = 0 to na - 1 do failure += Test_matrix.fneq adata.{j, i} v tol done done; !failure > 0 let is_square a = let ca = Matrix.unwrap a in let ma, na = Matrix.Dense.size ca in ma = na let check_vector x y tol = let xdata, ydata = Nvector.(unwrap x, unwrap y) in let nx, ny = Sundials.RealArray.(length xdata, length ydata) in if nx <> ny then (printf ">>> ERROR: check_vector: Different data array lengths @\n"; true) else begin let failure = ref 0 in for i = 0 to nx - 1 do failure += Test_matrix.fneq xdata.{i} ydata.{i} tol done; if !failure > 0 then begin printf "Check_vector failures:@\n"; for i = 0 to nx - 1 do if Test_matrix.fneq xdata.{i} ydata.{i} tol <> 0 then printf " xdata[%d] = %g != %g (err = %g)@\n" i xdata.{i} ydata.{i} (abs_float (xdata.{i} -. ydata.{i})) done; true end else false end let nvec_pp = Nvector_serial.pp end module Test = Test_matrix.Test (Dense_tests) (Nvector_serial.Ops) (* ---------------------------------------------------------------------- * Main Matrix Testing Routine * --------------------------------------------------------------------*) let main () = let fails = ref 0 in (* check input and set vector length *) if Array.length Sys.argv < 4 then (printf "ERROR: THREE (3) Input required: matrix rows, matrix cols, print timing @\n"; exit (-1)); let matrows = int_of_string Sys.argv.(1) in if matrows <= 0 then (printf "ERROR: number of rows must be a positive integer @\n"; exit (-1)); let matcols = int_of_string Sys.argv.(2) in if matcols <= 0 then (printf "ERROR: number of cols must be a positive integer @\n"; exit (-1)); let print_timing = int_of_string Sys.argv.(3) in let _ = Test.set_timing (print_timing <> 0) in let square = (matrows = matcols) in printf "@\nDense matrix test: size %d by %d@\n@\n" matrows matcols; (* Create vectors and matrices *) let x = Nvector_serial.make matcols 0.0 and y = Nvector_serial.make matrows 0.0 and a = Matrix.dense ~m:matrows matcols and i = Matrix.dense matcols in let adata = Matrix.(Dense.unwrap (unwrap a)) in for j = 0 to matcols -1 do for i = 0 to matrows - 1 do adata.{j, i} <- float_of_int ((j + 1) * (i + j)) done done; let idata = Matrix.(Dense.unwrap (unwrap i)) in if square then begin for i = 0 to matrows -1 do idata.{i, i} <- 1.0 done end; let xdata = Nvector.unwrap x in for i = 0 to matcols - 1 do xdata.{i} <- 1.0 /. float_of_int (i + 1) done; let ydata = Nvector.unwrap y in for i = 0 to matrows - 1 do let m = float_of_int i in let n = m +. float_of_int (matcols - 1) in ydata.{i} <- 0.5 *. (n +. 1. -. m) *. (n +. m) done; SUNMatrix Tests fails += Test.test_sunmatgetid a Matrix.Dense 0; fails += Test.test_sunmatclone a 0; fails += Test.test_sunmatcopy a 0; fails += Test.test_sunmatzero a 0; if compat2_3 then fails += Test.test_sunmatscaleadd a i 0; if square then fails += Test.test_sunmatscaleaddi a i 0; fails += Test.test_sunmatmatvec a x y 0; fails += Test.test_sunmatspace a 0; (* Print result *) if !fails <> 0 then begin printf "FAIL: SUNMatrix module failed %d tests @\n @\n" !fails; printf "@\nA = %a@\n" Matrix.Dense.pp (Matrix.unwrap a); if square then printf "@\nI = %a@\n" Matrix.Dense.pp (Matrix.unwrap i); printf "@\nx = %a@\n" Nvector_serial.pp x; printf "@\ny = %a@\n" Nvector_serial.pp y end else printf "SUCCESS: SUNMatrix module passed all tests @\n @\n" (* Check environment variables for extra arguments. *) let reps = try int_of_string (Unix.getenv "NUM_REPS") with Not_found | Failure _ -> 1 let gc_at_end = try int_of_string (Unix.getenv "GC_AT_END") <> 0 with Not_found | Failure _ -> false let gc_each_rep = try int_of_string (Unix.getenv "GC_EACH_REP") <> 0 with Not_found | Failure _ -> false (* Entry point *) let _ = for _ = 1 to reps do main (); if gc_each_rep then Gc.compact () done; if gc_at_end then Gc.compact ()
null
https://raw.githubusercontent.com/inria-parkas/sundialsml/a1848318cac2e340c32ddfd42671bef07b1390db/examples/matrix/dense/test_sunmatrix_dense.ml
ocaml
---------------------------------------------------------------------- * Main Matrix Testing Routine * -------------------------------------------------------------------- check input and set vector length Create vectors and matrices Print result Check environment variables for extra arguments. Entry point
* ----------------------------------------------------------------- * Programmer(s ): @ SMU * @ LLNL * ----------------------------------------------------------------- * OCaml port : , , Jun 2018 . * ----------------------------------------------------------------- * LLNS / SMU Copyright Start * Copyright ( c ) 2017 , Southern Methodist University and * National Security * * This work was performed under the auspices of the U.S. Department * of Energy by Southern Methodist University and * National Laboratory under Contract DE - AC52 - 07NA27344 . * Produced at Southern Methodist University and Livermore National Laboratory . * * All rights reserved . * For details , see the LICENSE file . * LLNS / SMU Copyright End * ----------------------------------------------------------------- * This is the testing routine to check the SUNMatrix Dense module * implementation . * ----------------------------------------------------------------- * ----------------------------------------------------------------- * Programmer(s): Daniel Reynolds @ SMU * David Gardner @ LLNL * ----------------------------------------------------------------- * OCaml port: Timothy Bourke, Inria, Jun 2018. * ----------------------------------------------------------------- * LLNS/SMU Copyright Start * Copyright (c) 2017, Southern Methodist University and * Lawrence Livermore National Security * * This work was performed under the auspices of the U.S. Department * of Energy by Southern Methodist University and Lawrence Livermore * National Laboratory under Contract DE-AC52-07NA27344. * Produced at Southern Methodist University and the Lawrence * Livermore National Laboratory. * * All rights reserved. * For details, see the LICENSE file. * LLNS/SMU Copyright End * ----------------------------------------------------------------- * This is the testing routine to check the SUNMatrix Dense module * implementation. * ----------------------------------------------------------------- *) module Matrix = Sundials.Matrix let printf = Format.printf let (+=) r x = r := !r + x let compat2_3 = match Sundials.Config.sundials_version with | 2,_,_ -> true | 3,_,_ -> true | _ -> false module Dense_tests = struct type k = Matrix.standard type m = Matrix.Dense.t type nd = Nvector_serial.data type nk = Nvector_serial.kind type t = nk Matrix.dense type nvec = Nvector_serial.t let rewrap = Matrix.wrap_dense let check_matrix a b tol = let ca, cb = Matrix.(unwrap a, unwrap b) in let ((ma, na) as sa), sb = Matrix.Dense.(size ca, size cb) in let adata, bdata = Matrix.Dense.(unwrap ca, unwrap cb) in if sa <> sb then (printf ">>> ERROR: check_matrix: Different data array lengths @\n"; true) else begin let failure = ref 0 in for i = 0 to ma - 1 do for j = 0 to na - 1 do failure += Test_matrix.fneq adata.{j, i} bdata.{j, i} tol done done; !failure > 0 end let check_matrix_entry a v tol = let ca = Matrix.unwrap a in let ma, na = Matrix.Dense.size ca in let adata = Matrix.Dense.unwrap ca in let failure = ref 0 in for i = 0 to ma - 1 do for j = 0 to na - 1 do failure += Test_matrix.fneq adata.{j, i} v tol done done; !failure > 0 let is_square a = let ca = Matrix.unwrap a in let ma, na = Matrix.Dense.size ca in ma = na let check_vector x y tol = let xdata, ydata = Nvector.(unwrap x, unwrap y) in let nx, ny = Sundials.RealArray.(length xdata, length ydata) in if nx <> ny then (printf ">>> ERROR: check_vector: Different data array lengths @\n"; true) else begin let failure = ref 0 in for i = 0 to nx - 1 do failure += Test_matrix.fneq xdata.{i} ydata.{i} tol done; if !failure > 0 then begin printf "Check_vector failures:@\n"; for i = 0 to nx - 1 do if Test_matrix.fneq xdata.{i} ydata.{i} tol <> 0 then printf " xdata[%d] = %g != %g (err = %g)@\n" i xdata.{i} ydata.{i} (abs_float (xdata.{i} -. ydata.{i})) done; true end else false end let nvec_pp = Nvector_serial.pp end module Test = Test_matrix.Test (Dense_tests) (Nvector_serial.Ops) let main () = let fails = ref 0 in if Array.length Sys.argv < 4 then (printf "ERROR: THREE (3) Input required: matrix rows, matrix cols, print timing @\n"; exit (-1)); let matrows = int_of_string Sys.argv.(1) in if matrows <= 0 then (printf "ERROR: number of rows must be a positive integer @\n"; exit (-1)); let matcols = int_of_string Sys.argv.(2) in if matcols <= 0 then (printf "ERROR: number of cols must be a positive integer @\n"; exit (-1)); let print_timing = int_of_string Sys.argv.(3) in let _ = Test.set_timing (print_timing <> 0) in let square = (matrows = matcols) in printf "@\nDense matrix test: size %d by %d@\n@\n" matrows matcols; let x = Nvector_serial.make matcols 0.0 and y = Nvector_serial.make matrows 0.0 and a = Matrix.dense ~m:matrows matcols and i = Matrix.dense matcols in let adata = Matrix.(Dense.unwrap (unwrap a)) in for j = 0 to matcols -1 do for i = 0 to matrows - 1 do adata.{j, i} <- float_of_int ((j + 1) * (i + j)) done done; let idata = Matrix.(Dense.unwrap (unwrap i)) in if square then begin for i = 0 to matrows -1 do idata.{i, i} <- 1.0 done end; let xdata = Nvector.unwrap x in for i = 0 to matcols - 1 do xdata.{i} <- 1.0 /. float_of_int (i + 1) done; let ydata = Nvector.unwrap y in for i = 0 to matrows - 1 do let m = float_of_int i in let n = m +. float_of_int (matcols - 1) in ydata.{i} <- 0.5 *. (n +. 1. -. m) *. (n +. m) done; SUNMatrix Tests fails += Test.test_sunmatgetid a Matrix.Dense 0; fails += Test.test_sunmatclone a 0; fails += Test.test_sunmatcopy a 0; fails += Test.test_sunmatzero a 0; if compat2_3 then fails += Test.test_sunmatscaleadd a i 0; if square then fails += Test.test_sunmatscaleaddi a i 0; fails += Test.test_sunmatmatvec a x y 0; fails += Test.test_sunmatspace a 0; if !fails <> 0 then begin printf "FAIL: SUNMatrix module failed %d tests @\n @\n" !fails; printf "@\nA = %a@\n" Matrix.Dense.pp (Matrix.unwrap a); if square then printf "@\nI = %a@\n" Matrix.Dense.pp (Matrix.unwrap i); printf "@\nx = %a@\n" Nvector_serial.pp x; printf "@\ny = %a@\n" Nvector_serial.pp y end else printf "SUCCESS: SUNMatrix module passed all tests @\n @\n" let reps = try int_of_string (Unix.getenv "NUM_REPS") with Not_found | Failure _ -> 1 let gc_at_end = try int_of_string (Unix.getenv "GC_AT_END") <> 0 with Not_found | Failure _ -> false let gc_each_rep = try int_of_string (Unix.getenv "GC_EACH_REP") <> 0 with Not_found | Failure _ -> false let _ = for _ = 1 to reps do main (); if gc_each_rep then Gc.compact () done; if gc_at_end then Gc.compact ()
09a649027a8069e0f9f2966dc3059d14f61932b13b19797d6d9b300f587e91cc
pirapira/coq2rust
scheme.ml
(************************************************************************) v * The Coq Proof Assistant / The Coq Development Team < O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012 \VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * (* // * This file is distributed under the terms of the *) (* * GNU Lesser General Public License Version 2.1 *) (************************************************************************) (*s Production of Scheme syntax. *) open Pp open Errors open Util open Names open Miniml open Mlutil open Table open Common (*s Scheme renaming issues. *) let keywords = List.fold_right (fun s -> Id.Set.add (Id.of_string s)) [ "define"; "let"; "lambda"; "lambdas"; "match"; "apply"; "car"; "cdr"; "error"; "delay"; "force"; "_"; "__"] Id.Set.empty let pp_comment s = str";; "++h 0 s++fnl () let pp_header_comment = function | None -> mt () | Some com -> pp_comment com ++ fnl () ++ fnl () let preamble _ comment _ usf = pp_header_comment comment ++ str ";; This extracted scheme code relies on some additional macros\n" ++ str ";; available at -paris-diderot.fr/~letouzey/scheme\n" ++ str "(load \"macros_extr.scm\")\n\n" ++ (if usf.mldummy then str "(define __ (lambda (_) __))\n\n" else mt ()) let pr_id id = let s = Id.to_string id in for i = 0 to String.length s - 1 do if s.[i] == '\'' then s.[i] <- '~' done; str s let paren = pp_par true let pp_abst st = function | [] -> assert false | [id] -> paren (str "lambda " ++ paren (pr_id id) ++ spc () ++ st) | l -> paren (str "lambdas " ++ paren (prlist_with_sep spc pr_id l) ++ spc () ++ st) let pp_apply st _ = function | [] -> st | [a] -> hov 2 (paren (st ++ spc () ++ a)) | args -> hov 2 (paren (str "@ " ++ st ++ (prlist_strict (fun x -> spc () ++ x) args))) (*s The pretty-printer for Scheme syntax *) let pp_global k r = str (Common.pp_global k r) (*s Pretty-printing of expressions. *) let rec pp_expr env args = let apply st = pp_apply st true args in function | MLrel n -> let id = get_db_name n env in apply (pr_id id) | MLapp (f,args') -> let stl = List.map (pp_expr env []) args' in pp_expr env (stl @ args) f | MLlam _ as a -> let fl,a' = collect_lams a in let fl,env' = push_vars (List.map id_of_mlid fl) env in apply (pp_abst (pp_expr env' [] a') (List.rev fl)) | MLletin (id,a1,a2) -> let i,env' = push_vars [id_of_mlid id] env in apply (hv 0 (hov 2 (paren (str "let " ++ paren (paren (pr_id (List.hd i) ++ spc () ++ pp_expr env [] a1)) ++ spc () ++ hov 0 (pp_expr env' [] a2))))) | MLglob r -> apply (pp_global Term r) | MLcons (_,r,args') -> assert (List.is_empty args); let st = str "`" ++ paren (pp_global Cons r ++ (if List.is_empty args' then mt () else spc ()) ++ prlist_with_sep spc (pp_cons_args env) args') in if is_coinductive r then paren (str "delay " ++ st) else st | MLtuple _ -> error "Cannot handle tuples in Scheme yet." | MLcase (_,_,pv) when not (is_regular_match pv) -> error "Cannot handle general patterns in Scheme yet." | MLcase (_,t,pv) when is_custom_match pv -> let mkfun (ids,_,e) = if not (List.is_empty ids) then named_lams (List.rev ids) e else dummy_lams (ast_lift 1 e) 1 in apply (paren (hov 2 (str (find_custom_match pv) ++ fnl () ++ prvect (fun tr -> pp_expr env [] (mkfun tr) ++ fnl ()) pv ++ pp_expr env [] t))) | MLcase (typ,t, pv) -> let e = if not (is_coinductive_type typ) then pp_expr env [] t else paren (str "force" ++ spc () ++ pp_expr env [] t) in apply (v 3 (paren (str "match " ++ e ++ fnl () ++ pp_pat env pv))) | MLfix (i,ids,defs) -> let ids',env' = push_vars (List.rev (Array.to_list ids)) env in pp_fix env' i (Array.of_list (List.rev ids'),defs) args | MLexn s -> An [ MLexn ] may be applied , but I do n't really care . paren (str "error" ++ spc () ++ qs s) | MLdummy -> str "__" (* An [MLdummy] may be applied, but I don't really care. *) | MLmagic a -> pp_expr env args a | MLaxiom -> paren (str "error \"AXIOM TO BE REALIZED\"") and pp_cons_args env = function | MLcons (_,r,args) when is_coinductive r -> paren (pp_global Cons r ++ (if List.is_empty args then mt () else spc ()) ++ prlist_with_sep spc (pp_cons_args env) args) | e -> str "," ++ pp_expr env [] e and pp_one_pat env (ids,p,t) = let r = match p with | Pusual r -> r | Pcons (r,l) -> r (* cf. the check [is_regular_match] above *) | _ -> assert false in let ids,env' = push_vars (List.rev_map id_of_mlid ids) env in let args = if List.is_empty ids then mt () else (str " " ++ prlist_with_sep spc pr_id (List.rev ids)) in (pp_global Cons r ++ args), (pp_expr env' [] t) and pp_pat env pv = prvect_with_sep fnl (fun x -> let s1,s2 = pp_one_pat env x in hov 2 (str "((" ++ s1 ++ str ")" ++ spc () ++ s2 ++ str ")")) pv (*s names of the functions ([ids]) are already pushed in [env], and passed here just for convenience. *) and pp_fix env j (ids,bl) args = paren (str "letrec " ++ (v 0 (paren (prvect_with_sep fnl (fun (fi,ti) -> paren ((pr_id fi) ++ spc () ++ (pp_expr env [] ti))) (Array.map2 (fun id b -> (id,b)) ids bl)) ++ fnl () ++ hov 2 (pp_apply (pr_id (ids.(j))) true args)))) (*s Pretty-printing of a declaration. *) let pp_decl = function | Dind _ -> mt () | Dtype _ -> mt () | Dfix (rv, defs,_) -> let names = Array.map (fun r -> if is_inline_custom r then mt () else pp_global Term r) rv in prvecti (fun i r -> let void = is_inline_custom r || (not (is_custom r) && match defs.(i) with MLexn "UNUSED" -> true | _ -> false) in if void then mt () else hov 2 (paren (str "define " ++ names.(i) ++ spc () ++ (if is_custom r then str (find_custom r) else pp_expr (empty_env ()) [] defs.(i))) ++ fnl ()) ++ fnl ()) rv | Dterm (r, a, _) -> if is_inline_custom r then mt () else hov 2 (paren (str "define " ++ pp_global Term r ++ spc () ++ (if is_custom r then str (find_custom r) else pp_expr (empty_env ()) [] a))) ++ fnl2 () let rec pp_structure_elem = function | (l,SEdecl d) -> pp_decl d | (l,SEmodule m) -> pp_module_expr m.ml_mod_expr | (l,SEmodtype m) -> mt () (* for the moment we simply discard module type *) and pp_module_expr = function | MEstruct (mp,sel) -> prlist_strict pp_structure_elem sel | MEfunctor _ -> mt () (* for the moment we simply discard unapplied functors *) | MEident _ | MEapply _ -> assert false (* should be expansed in extract_env *) let pp_struct = let pp_sel (mp,sel) = push_visible mp []; let p = prlist_strict pp_structure_elem sel in pop_visible (); p in prlist_strict pp_sel let scheme_descr = { keywords = keywords; file_suffix = ".scm"; preamble = preamble; pp_struct = pp_struct; sig_suffix = None; sig_preamble = (fun _ _ _ _ -> mt ()); pp_sig = (fun _ -> mt ()); pp_decl = pp_decl; }
null
https://raw.githubusercontent.com/pirapira/coq2rust/22e8aaefc723bfb324ca2001b2b8e51fcc923543/plugins/extraction/scheme.ml
ocaml
********************************************************************** // * This file is distributed under the terms of the * GNU Lesser General Public License Version 2.1 ********************************************************************** s Production of Scheme syntax. s Scheme renaming issues. s The pretty-printer for Scheme syntax s Pretty-printing of expressions. An [MLdummy] may be applied, but I don't really care. cf. the check [is_regular_match] above s names of the functions ([ids]) are already pushed in [env], and passed here just for convenience. s Pretty-printing of a declaration. for the moment we simply discard module type for the moment we simply discard unapplied functors should be expansed in extract_env
v * The Coq Proof Assistant / The Coq Development Team < O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012 \VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * open Pp open Errors open Util open Names open Miniml open Mlutil open Table open Common let keywords = List.fold_right (fun s -> Id.Set.add (Id.of_string s)) [ "define"; "let"; "lambda"; "lambdas"; "match"; "apply"; "car"; "cdr"; "error"; "delay"; "force"; "_"; "__"] Id.Set.empty let pp_comment s = str";; "++h 0 s++fnl () let pp_header_comment = function | None -> mt () | Some com -> pp_comment com ++ fnl () ++ fnl () let preamble _ comment _ usf = pp_header_comment comment ++ str ";; This extracted scheme code relies on some additional macros\n" ++ str ";; available at -paris-diderot.fr/~letouzey/scheme\n" ++ str "(load \"macros_extr.scm\")\n\n" ++ (if usf.mldummy then str "(define __ (lambda (_) __))\n\n" else mt ()) let pr_id id = let s = Id.to_string id in for i = 0 to String.length s - 1 do if s.[i] == '\'' then s.[i] <- '~' done; str s let paren = pp_par true let pp_abst st = function | [] -> assert false | [id] -> paren (str "lambda " ++ paren (pr_id id) ++ spc () ++ st) | l -> paren (str "lambdas " ++ paren (prlist_with_sep spc pr_id l) ++ spc () ++ st) let pp_apply st _ = function | [] -> st | [a] -> hov 2 (paren (st ++ spc () ++ a)) | args -> hov 2 (paren (str "@ " ++ st ++ (prlist_strict (fun x -> spc () ++ x) args))) let pp_global k r = str (Common.pp_global k r) let rec pp_expr env args = let apply st = pp_apply st true args in function | MLrel n -> let id = get_db_name n env in apply (pr_id id) | MLapp (f,args') -> let stl = List.map (pp_expr env []) args' in pp_expr env (stl @ args) f | MLlam _ as a -> let fl,a' = collect_lams a in let fl,env' = push_vars (List.map id_of_mlid fl) env in apply (pp_abst (pp_expr env' [] a') (List.rev fl)) | MLletin (id,a1,a2) -> let i,env' = push_vars [id_of_mlid id] env in apply (hv 0 (hov 2 (paren (str "let " ++ paren (paren (pr_id (List.hd i) ++ spc () ++ pp_expr env [] a1)) ++ spc () ++ hov 0 (pp_expr env' [] a2))))) | MLglob r -> apply (pp_global Term r) | MLcons (_,r,args') -> assert (List.is_empty args); let st = str "`" ++ paren (pp_global Cons r ++ (if List.is_empty args' then mt () else spc ()) ++ prlist_with_sep spc (pp_cons_args env) args') in if is_coinductive r then paren (str "delay " ++ st) else st | MLtuple _ -> error "Cannot handle tuples in Scheme yet." | MLcase (_,_,pv) when not (is_regular_match pv) -> error "Cannot handle general patterns in Scheme yet." | MLcase (_,t,pv) when is_custom_match pv -> let mkfun (ids,_,e) = if not (List.is_empty ids) then named_lams (List.rev ids) e else dummy_lams (ast_lift 1 e) 1 in apply (paren (hov 2 (str (find_custom_match pv) ++ fnl () ++ prvect (fun tr -> pp_expr env [] (mkfun tr) ++ fnl ()) pv ++ pp_expr env [] t))) | MLcase (typ,t, pv) -> let e = if not (is_coinductive_type typ) then pp_expr env [] t else paren (str "force" ++ spc () ++ pp_expr env [] t) in apply (v 3 (paren (str "match " ++ e ++ fnl () ++ pp_pat env pv))) | MLfix (i,ids,defs) -> let ids',env' = push_vars (List.rev (Array.to_list ids)) env in pp_fix env' i (Array.of_list (List.rev ids'),defs) args | MLexn s -> An [ MLexn ] may be applied , but I do n't really care . paren (str "error" ++ spc () ++ qs s) | MLdummy -> | MLmagic a -> pp_expr env args a | MLaxiom -> paren (str "error \"AXIOM TO BE REALIZED\"") and pp_cons_args env = function | MLcons (_,r,args) when is_coinductive r -> paren (pp_global Cons r ++ (if List.is_empty args then mt () else spc ()) ++ prlist_with_sep spc (pp_cons_args env) args) | e -> str "," ++ pp_expr env [] e and pp_one_pat env (ids,p,t) = let r = match p with | Pusual r -> r | _ -> assert false in let ids,env' = push_vars (List.rev_map id_of_mlid ids) env in let args = if List.is_empty ids then mt () else (str " " ++ prlist_with_sep spc pr_id (List.rev ids)) in (pp_global Cons r ++ args), (pp_expr env' [] t) and pp_pat env pv = prvect_with_sep fnl (fun x -> let s1,s2 = pp_one_pat env x in hov 2 (str "((" ++ s1 ++ str ")" ++ spc () ++ s2 ++ str ")")) pv and pp_fix env j (ids,bl) args = paren (str "letrec " ++ (v 0 (paren (prvect_with_sep fnl (fun (fi,ti) -> paren ((pr_id fi) ++ spc () ++ (pp_expr env [] ti))) (Array.map2 (fun id b -> (id,b)) ids bl)) ++ fnl () ++ hov 2 (pp_apply (pr_id (ids.(j))) true args)))) let pp_decl = function | Dind _ -> mt () | Dtype _ -> mt () | Dfix (rv, defs,_) -> let names = Array.map (fun r -> if is_inline_custom r then mt () else pp_global Term r) rv in prvecti (fun i r -> let void = is_inline_custom r || (not (is_custom r) && match defs.(i) with MLexn "UNUSED" -> true | _ -> false) in if void then mt () else hov 2 (paren (str "define " ++ names.(i) ++ spc () ++ (if is_custom r then str (find_custom r) else pp_expr (empty_env ()) [] defs.(i))) ++ fnl ()) ++ fnl ()) rv | Dterm (r, a, _) -> if is_inline_custom r then mt () else hov 2 (paren (str "define " ++ pp_global Term r ++ spc () ++ (if is_custom r then str (find_custom r) else pp_expr (empty_env ()) [] a))) ++ fnl2 () let rec pp_structure_elem = function | (l,SEdecl d) -> pp_decl d | (l,SEmodule m) -> pp_module_expr m.ml_mod_expr | (l,SEmodtype m) -> mt () and pp_module_expr = function | MEstruct (mp,sel) -> prlist_strict pp_structure_elem sel | MEfunctor _ -> mt () | MEident _ | MEapply _ -> assert false let pp_struct = let pp_sel (mp,sel) = push_visible mp []; let p = prlist_strict pp_structure_elem sel in pop_visible (); p in prlist_strict pp_sel let scheme_descr = { keywords = keywords; file_suffix = ".scm"; preamble = preamble; pp_struct = pp_struct; sig_suffix = None; sig_preamble = (fun _ _ _ _ -> mt ()); pp_sig = (fun _ -> mt ()); pp_decl = pp_decl; }
371f0fd1916acfe6c1b774affcac32c9d937ac9d2b0fef5cfcee6cdbcefbcd6f
fetburner/Coq2SML
pfedit.mli
(************************************************************************) v * The Coq Proof Assistant / The Coq Development Team < O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014 \VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * (* // * This file is distributed under the terms of the *) (* * GNU Lesser General Public License Version 2.1 *) (************************************************************************) open Util open Pp open Names open Term open Sign open Environ open Decl_kinds open Tacmach open Tacexpr * Several proofs can be opened simultaneously but at most one is focused at some time . The following functions work by side - effect on current set of open proofs . In this module , ` ` proofs '' means an open proof ( something started by vernacular command [ Goal ] , [ Lemma ] or [ Theorem ] ) , and ` ` goal '' means a subgoal of the current focused proof focused at some time. The following functions work by side-effect on current set of open proofs. In this module, ``proofs'' means an open proof (something started by vernacular command [Goal], [Lemma] or [Theorem]), and ``goal'' means a subgoal of the current focused proof *) * { 6 ... } * [ refining ( ) ] tells if there is some proof in progress , even if a not focused one focused one *) val refining : unit -> bool (** [check_no_pending_proofs ()] fails if there is still some proof in progress *) val check_no_pending_proofs : unit -> unit * { 6 ... } (** [delete_proof name] deletes proof of name [name] or fails if no proof has this name *) val delete_proof : identifier located -> unit (** [delete_current_proof ()] deletes current focused proof or fails if no proof is focused *) val delete_current_proof : unit -> unit (** [delete_all_proofs ()] deletes all open proofs if any *) val delete_all_proofs : unit -> unit * { 6 ... } (** [undo n] undoes the effect of the last [n] tactics applied to the current proof; it fails if no proof is focused or if the ``undo'' stack is exhausted *) val undo : int -> unit (** [undo_todepth n] resets the proof to its nth step (does [undo (d-n)] where d is the depth of the undo stack). *) val undo_todepth : int -> unit (** Returns the depth of the current focused proof stack, this is used to put informations in coq prompt (in emacs mode). *) val current_proof_depth: unit -> int * { 6 ... } * [ start_proof s str env t hook tac ] starts a proof of name [ s ] and conclusion [ t ] ; [ hook ] is optionally a function to be applied at proof end ( e.g. to declare the built constructions as a coercion or a setoid morphism ) ; init_tac is possibly a tactic to systematically apply at initialization time ( e.g. to start the proof of mutually dependent theorems ) conclusion [t]; [hook] is optionally a function to be applied at proof end (e.g. to declare the built constructions as a coercion or a setoid morphism); init_tac is possibly a tactic to systematically apply at initialization time (e.g. to start the proof of mutually dependent theorems) *) type lemma_possible_guards = Proof_global.lemma_possible_guards val start_proof : identifier -> goal_kind -> named_context_val -> constr -> ?init_tac:tactic -> ?compute_guard:lemma_possible_guards -> declaration_hook -> unit (** [restart_proof ()] restarts the current focused proof from the beginning or fails if no proof is focused *) val restart_proof : unit -> unit * { 6 ... } (** [cook_proof opacity] turns the current proof (assumed completed) into a constant with its name, kind and possible hook (see [start_proof]); it fails if there is no current proof of if it is not completed; it also tells if the guardness condition has to be inferred. *) val cook_proof : (Proof.proof -> unit) -> identifier * (Entries.definition_entry * lemma_possible_guards * goal_kind * declaration_hook) (** To export completed proofs to xml *) val set_xml_cook_proof : (goal_kind * Proof.proof -> unit) -> unit * { 6 ... } * [ get_Proof.proof ( ) ] returns the current focused pending proof or raises [ UserError " no focused proof " ] raises [UserError "no focused proof"] *) val get_pftreestate : unit -> Proof.proof * [ get_goal_context n ] returns the context of the [ n]th subgoal of the current focused proof or raises a [ UserError ] if there is no focused proof or if there is no more subgoals the current focused proof or raises a [UserError] if there is no focused proof or if there is no more subgoals *) val get_goal_context : int -> Evd.evar_map * env * [ get_current_goal_context ( ) ] works as [ get_goal_context 1 ] val get_current_goal_context : unit -> Evd.evar_map * env (** [current_proof_statement] *) val current_proof_statement : unit -> identifier * goal_kind * types * declaration_hook * { 6 ... } (** [get_current_proof_name ()] return the name of the current focused proof or failed if no proof is focused *) val get_current_proof_name : unit -> identifier * [ get_all_proof_names ( ) ] returns the list of all pending proof names . The first name is the current proof , the other names may come in any order . The first name is the current proof, the other names may come in any order. *) val get_all_proof_names : unit -> identifier list * { 6 ... } * [ tac ] applies tactic [ tac ] to all subgoal generate by [ solve_nth ] by [solve_nth] *) val set_end_tac : tactic -> unit * { 6 ... } (** [set_used_variables l] declares that section variables [l] will be used in the proof *) val set_used_variables : identifier list -> unit val get_used_variables : unit -> Sign.section_context option * { 6 ... } * [ solve_nth n tac ] applies tactic [ tac ] to the [ n]th subgoal of the current focused proof or raises a UserError if no proof is focused or if there is no [ n]th subgoal current focused proof or raises a UserError if no proof is focused or if there is no [n]th subgoal *) val solve_nth : ?with_end_tac:bool -> int -> tactic -> unit * [ by tac ] applies tactic [ tac ] to the 1st subgoal of the current focused proof or raises a UserError if there is no focused proof or if there is no more subgoals focused proof or raises a UserError if there is no focused proof or if there is no more subgoals *) val by : tactic -> unit * [ instantiate_nth_evar_com n c ] instantiate the [ n]th undefined existential variable of the current focused proof by [ c ] or raises a UserError if no proof is focused or if there is no such [ n]th existential variable existential variable of the current focused proof by [c] or raises a UserError if no proof is focused or if there is no such [n]th existential variable *) val instantiate_nth_evar_com : int -> Topconstr.constr_expr -> unit (** [build_by_tactic typ tac] returns a term of type [typ] by calling [tac] *) val build_constant_by_tactic : identifier -> named_context_val -> types -> tactic -> Entries.definition_entry val build_by_tactic : env -> types -> tactic -> constr (** Declare the default tactic to fill implicit arguments *) val declare_implicit_tactic : tactic -> unit (* Raise Exit if cannot solve *) val solve_by_implicit_tactic : env -> Evd.evar_map -> existential -> constr
null
https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/proofs/pfedit.mli
ocaml
********************************************************************** // * This file is distributed under the terms of the * GNU Lesser General Public License Version 2.1 ********************************************************************** * [check_no_pending_proofs ()] fails if there is still some proof in progress * [delete_proof name] deletes proof of name [name] or fails if no proof has this name * [delete_current_proof ()] deletes current focused proof or fails if no proof is focused * [delete_all_proofs ()] deletes all open proofs if any * [undo n] undoes the effect of the last [n] tactics applied to the current proof; it fails if no proof is focused or if the ``undo'' stack is exhausted * [undo_todepth n] resets the proof to its nth step (does [undo (d-n)] where d is the depth of the undo stack). * Returns the depth of the current focused proof stack, this is used to put informations in coq prompt (in emacs mode). * [restart_proof ()] restarts the current focused proof from the beginning or fails if no proof is focused * [cook_proof opacity] turns the current proof (assumed completed) into a constant with its name, kind and possible hook (see [start_proof]); it fails if there is no current proof of if it is not completed; it also tells if the guardness condition has to be inferred. * To export completed proofs to xml * [current_proof_statement] * [get_current_proof_name ()] return the name of the current focused proof or failed if no proof is focused * [set_used_variables l] declares that section variables [l] will be used in the proof * [build_by_tactic typ tac] returns a term of type [typ] by calling [tac] * Declare the default tactic to fill implicit arguments Raise Exit if cannot solve
v * The Coq Proof Assistant / The Coq Development Team < O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014 \VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * open Util open Pp open Names open Term open Sign open Environ open Decl_kinds open Tacmach open Tacexpr * Several proofs can be opened simultaneously but at most one is focused at some time . The following functions work by side - effect on current set of open proofs . In this module , ` ` proofs '' means an open proof ( something started by vernacular command [ Goal ] , [ Lemma ] or [ Theorem ] ) , and ` ` goal '' means a subgoal of the current focused proof focused at some time. The following functions work by side-effect on current set of open proofs. In this module, ``proofs'' means an open proof (something started by vernacular command [Goal], [Lemma] or [Theorem]), and ``goal'' means a subgoal of the current focused proof *) * { 6 ... } * [ refining ( ) ] tells if there is some proof in progress , even if a not focused one focused one *) val refining : unit -> bool val check_no_pending_proofs : unit -> unit * { 6 ... } val delete_proof : identifier located -> unit val delete_current_proof : unit -> unit val delete_all_proofs : unit -> unit * { 6 ... } val undo : int -> unit val undo_todepth : int -> unit val current_proof_depth: unit -> int * { 6 ... } * [ start_proof s str env t hook tac ] starts a proof of name [ s ] and conclusion [ t ] ; [ hook ] is optionally a function to be applied at proof end ( e.g. to declare the built constructions as a coercion or a setoid morphism ) ; init_tac is possibly a tactic to systematically apply at initialization time ( e.g. to start the proof of mutually dependent theorems ) conclusion [t]; [hook] is optionally a function to be applied at proof end (e.g. to declare the built constructions as a coercion or a setoid morphism); init_tac is possibly a tactic to systematically apply at initialization time (e.g. to start the proof of mutually dependent theorems) *) type lemma_possible_guards = Proof_global.lemma_possible_guards val start_proof : identifier -> goal_kind -> named_context_val -> constr -> ?init_tac:tactic -> ?compute_guard:lemma_possible_guards -> declaration_hook -> unit val restart_proof : unit -> unit * { 6 ... } val cook_proof : (Proof.proof -> unit) -> identifier * (Entries.definition_entry * lemma_possible_guards * goal_kind * declaration_hook) val set_xml_cook_proof : (goal_kind * Proof.proof -> unit) -> unit * { 6 ... } * [ get_Proof.proof ( ) ] returns the current focused pending proof or raises [ UserError " no focused proof " ] raises [UserError "no focused proof"] *) val get_pftreestate : unit -> Proof.proof * [ get_goal_context n ] returns the context of the [ n]th subgoal of the current focused proof or raises a [ UserError ] if there is no focused proof or if there is no more subgoals the current focused proof or raises a [UserError] if there is no focused proof or if there is no more subgoals *) val get_goal_context : int -> Evd.evar_map * env * [ get_current_goal_context ( ) ] works as [ get_goal_context 1 ] val get_current_goal_context : unit -> Evd.evar_map * env val current_proof_statement : unit -> identifier * goal_kind * types * declaration_hook * { 6 ... } val get_current_proof_name : unit -> identifier * [ get_all_proof_names ( ) ] returns the list of all pending proof names . The first name is the current proof , the other names may come in any order . The first name is the current proof, the other names may come in any order. *) val get_all_proof_names : unit -> identifier list * { 6 ... } * [ tac ] applies tactic [ tac ] to all subgoal generate by [ solve_nth ] by [solve_nth] *) val set_end_tac : tactic -> unit * { 6 ... } val set_used_variables : identifier list -> unit val get_used_variables : unit -> Sign.section_context option * { 6 ... } * [ solve_nth n tac ] applies tactic [ tac ] to the [ n]th subgoal of the current focused proof or raises a UserError if no proof is focused or if there is no [ n]th subgoal current focused proof or raises a UserError if no proof is focused or if there is no [n]th subgoal *) val solve_nth : ?with_end_tac:bool -> int -> tactic -> unit * [ by tac ] applies tactic [ tac ] to the 1st subgoal of the current focused proof or raises a UserError if there is no focused proof or if there is no more subgoals focused proof or raises a UserError if there is no focused proof or if there is no more subgoals *) val by : tactic -> unit * [ instantiate_nth_evar_com n c ] instantiate the [ n]th undefined existential variable of the current focused proof by [ c ] or raises a UserError if no proof is focused or if there is no such [ n]th existential variable existential variable of the current focused proof by [c] or raises a UserError if no proof is focused or if there is no such [n]th existential variable *) val instantiate_nth_evar_com : int -> Topconstr.constr_expr -> unit val build_constant_by_tactic : identifier -> named_context_val -> types -> tactic -> Entries.definition_entry val build_by_tactic : env -> types -> tactic -> constr val declare_implicit_tactic : tactic -> unit val solve_by_implicit_tactic : env -> Evd.evar_map -> existential -> constr
9fa15f23215e9ad124570096b67402597a41679c360079dbcc6076ba9685db38
dmitryvk/sbcl-win32-threads
support.lisp
This software is part of the SBCL system . See the README file for ;;;; more information. ;;;; This software is derived from the CMU CL system , which was written at Carnegie Mellon University and released into the ;;;; public domain. The software is in the public domain and is ;;;; provided with absolutely no warranty. See the COPYING and CREDITS ;;;; files for more information. (in-package "SB!VM") ;;; The :full-call assembly-routines must use the same full-call ;;; unknown-values return convention as a normal call, as some ;;; of the routines will tail-chain to a static-function. The ;;; routines themselves, however, take all of their arguments in registers ( this will typically be one or two arguments , ;;; and is one of the lower bounds on the number of argument- ;;; passing registers), and thus don't need a call frame, which ;;; simplifies things for the normal call/return case. When it is neccessary for one of the assembly - functions to call a ;;; static-function it will construct the required call frame. Also , none of the assembly - routines return other than one ;;; value, which again simplifies the return path. -- AB , 2006 / Feb/05 . (!def-vm-support-routine generate-call-sequence (name style vop) (ecase style ((:raw :none) (values `((inst call (make-fixup ',name :assembly-routine))) nil)) (:full-call (values `((note-this-location ,vop :call-site) (inst call (make-fixup ',name :assembly-routine)) (note-this-location ,vop :single-value-return) (cond ((member :cmov *backend-subfeatures*) (inst cmov :c esp-tn ebx-tn)) (t (let ((single-value (gen-label))) (inst jmp :nc single-value) (move esp-tn ebx-tn) (emit-label single-value))))) '((:save-p :compute-only)))))) (!def-vm-support-routine generate-return-sequence (style) (ecase style (:raw `(inst ret)) (:full-call `((inst clc) (inst ret))) (:none)))
null
https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/assembly/x86/support.lisp
lisp
more information. public domain. The software is in the public domain and is provided with absolutely no warranty. See the COPYING and CREDITS files for more information. The :full-call assembly-routines must use the same full-call unknown-values return convention as a normal call, as some of the routines will tail-chain to a static-function. The routines themselves, however, take all of their arguments and is one of the lower bounds on the number of argument- passing registers), and thus don't need a call frame, which simplifies things for the normal call/return case. When it static-function it will construct the required call frame. value, which again simplifies the return path.
This software is part of the SBCL system . See the README file for This software is derived from the CMU CL system , which was written at Carnegie Mellon University and released into the (in-package "SB!VM") in registers ( this will typically be one or two arguments , is neccessary for one of the assembly - functions to call a Also , none of the assembly - routines return other than one -- AB , 2006 / Feb/05 . (!def-vm-support-routine generate-call-sequence (name style vop) (ecase style ((:raw :none) (values `((inst call (make-fixup ',name :assembly-routine))) nil)) (:full-call (values `((note-this-location ,vop :call-site) (inst call (make-fixup ',name :assembly-routine)) (note-this-location ,vop :single-value-return) (cond ((member :cmov *backend-subfeatures*) (inst cmov :c esp-tn ebx-tn)) (t (let ((single-value (gen-label))) (inst jmp :nc single-value) (move esp-tn ebx-tn) (emit-label single-value))))) '((:save-p :compute-only)))))) (!def-vm-support-routine generate-return-sequence (style) (ecase style (:raw `(inst ret)) (:full-call `((inst clc) (inst ret))) (:none)))
2549abc005f263461ab0bbe2cdbd690fcce2d338bf4c51d677b20a9f806cc58b
clojurians-org/haskell-example
Route.hs
# LANGUAGE EmptyCase # # LANGUAGE FlexibleContexts # {-# LANGUAGE GADTs #-} # LANGUAGE LambdaCase # {-# LANGUAGE RankNTypes #-} # LANGUAGE TemplateHaskell # # LANGUAGE KindSignatures # # LANGUAGE EmptyCase # # LANGUAGE MultiParamTypeClasses # # LANGUAGE FlexibleInstances # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE StandaloneDeriving # module Common.Route where -- You will probably want these imports for composing Encoders . import Prelude hiding ( i d , ( . ) ) import Control . Category import Prelude hiding (id, (.)) import Control.Category -} import Data.Text (Text) import Data.Functor.Identity import Data.Functor.Sum import Obelisk.Route import Obelisk.Route.TH data BackendAPI :: * -> * where BackendAPI_Ping :: BackendAPI () data BackendRoute :: * -> * where -- | Used to handle unparseable routes. BackendRoute_Missing :: BackendRoute () BackendRoute_API :: BackendRoute (Maybe (R BackendAPI)) -- You can define any routes that will be handled specially by the backend here. -- i.e. These do not serve the frontend, but do something different, such as serving static files. data FrontendRoute :: * -> * where FrontendRoute_Ping :: FrontendRoute () FrontendRoute_Home :: FrontendRoute () -- This type is used to define frontend routes, i.e. ones for which the backend will serve the frontend. deriving instance Show (FrontendRoute e) backendRouteEncoder :: Encoder (Either Text) Identity (R (Sum BackendRoute (ObeliskRoute FrontendRoute))) PageName backendRouteEncoder = handleEncoder (const (InL BackendRoute_Missing :/ ())) $ pathComponentEncoder $ \case InL backendRoute -> case backendRoute of BackendRoute_Missing -> PathSegment "missing" $ unitEncoder mempty BackendRoute_API -> PathSegment "api" $ maybeEncoder (unitEncoder mempty) $ pathComponentEncoder $ \case BackendAPI_Ping -> PathSegment "ping" $ unitEncoder mempty InR obeliskRoute -> obeliskRouteSegment obeliskRoute $ \case The encoder given to PathEnd determines how to parse query parameters , -- in this example, we have none, so we insist on it. FrontendRoute_Ping -> PathSegment "ping" $ unitEncoder mempty FrontendRoute_Home -> PathEnd $ unitEncoder mempty concat <$> mapM deriveRouteComponent [ ''BackendRoute , ''BackendAPI , ''FrontendRoute ]
null
https://raw.githubusercontent.com/clojurians-org/haskell-example/c96b021bdef52a121e04ea203c8c3e458770a25a/ws-dashboard/common/src/Common/Route.hs
haskell
# LANGUAGE GADTs # # LANGUAGE RankNTypes # # LANGUAGE OverloadedStrings # You will probably want these imports for composing Encoders . | Used to handle unparseable routes. You can define any routes that will be handled specially by the backend here. i.e. These do not serve the frontend, but do something different, such as serving static files. This type is used to define frontend routes, i.e. ones for which the backend will serve the frontend. in this example, we have none, so we insist on it.
# LANGUAGE EmptyCase # # LANGUAGE FlexibleContexts # # LANGUAGE LambdaCase # # LANGUAGE TemplateHaskell # # LANGUAGE KindSignatures # # LANGUAGE EmptyCase # # LANGUAGE MultiParamTypeClasses # # LANGUAGE FlexibleInstances # # LANGUAGE StandaloneDeriving # module Common.Route where import Prelude hiding ( i d , ( . ) ) import Control . Category import Prelude hiding (id, (.)) import Control.Category -} import Data.Text (Text) import Data.Functor.Identity import Data.Functor.Sum import Obelisk.Route import Obelisk.Route.TH data BackendAPI :: * -> * where BackendAPI_Ping :: BackendAPI () data BackendRoute :: * -> * where BackendRoute_Missing :: BackendRoute () BackendRoute_API :: BackendRoute (Maybe (R BackendAPI)) data FrontendRoute :: * -> * where FrontendRoute_Ping :: FrontendRoute () FrontendRoute_Home :: FrontendRoute () deriving instance Show (FrontendRoute e) backendRouteEncoder :: Encoder (Either Text) Identity (R (Sum BackendRoute (ObeliskRoute FrontendRoute))) PageName backendRouteEncoder = handleEncoder (const (InL BackendRoute_Missing :/ ())) $ pathComponentEncoder $ \case InL backendRoute -> case backendRoute of BackendRoute_Missing -> PathSegment "missing" $ unitEncoder mempty BackendRoute_API -> PathSegment "api" $ maybeEncoder (unitEncoder mempty) $ pathComponentEncoder $ \case BackendAPI_Ping -> PathSegment "ping" $ unitEncoder mempty InR obeliskRoute -> obeliskRouteSegment obeliskRoute $ \case The encoder given to PathEnd determines how to parse query parameters , FrontendRoute_Ping -> PathSegment "ping" $ unitEncoder mempty FrontendRoute_Home -> PathEnd $ unitEncoder mempty concat <$> mapM deriveRouteComponent [ ''BackendRoute , ''BackendAPI , ''FrontendRoute ]
21be165c369c9580c259bce3094ac48827f86fa84e72d64e1c5b4e5a4ce35569
markbastian/partsbin
core.clj
(ns partsbin.datahike.api.core (:require [partsbin.datahike.api.alpha :as datahike] [integrant.core :as ig] [datahike.api :as d])) (derive ::database ::datahike/database) (derive ::connection ::datahike/connection) (comment (require '[partsbin.system :refer [with-system]]) (def schema [{:db/ident :name :db/valueType :db.type/string :db/cardinality :db.cardinality/one}]) (def config {::database {:db-file "tmp/datahike" :delete-on-halt? true :initial-tx schema} ::connection {:db-config (ig/ref ::database)}}) (with-system [{conn ::connection} config] (d/transact conn [{:name "Mark"} {:name "Becky"}]) (d/q '[:find [?n ...] :in $ :where [_ :name ?n]] @conn)) )
null
https://raw.githubusercontent.com/markbastian/partsbin/8dc159327f296c9625d129b5943ec79433019e54/src/partsbin/datahike/api/core.clj
clojure
(ns partsbin.datahike.api.core (:require [partsbin.datahike.api.alpha :as datahike] [integrant.core :as ig] [datahike.api :as d])) (derive ::database ::datahike/database) (derive ::connection ::datahike/connection) (comment (require '[partsbin.system :refer [with-system]]) (def schema [{:db/ident :name :db/valueType :db.type/string :db/cardinality :db.cardinality/one}]) (def config {::database {:db-file "tmp/datahike" :delete-on-halt? true :initial-tx schema} ::connection {:db-config (ig/ref ::database)}}) (with-system [{conn ::connection} config] (d/transact conn [{:name "Mark"} {:name "Becky"}]) (d/q '[:find [?n ...] :in $ :where [_ :name ?n]] @conn)) )
bde5ad8e3f58416748538312573ef5878cf30d95619acacf92612ff0d5884588
xapi-project/xen-api-client
xen_api_lwt_unix.ml
* Copyright ( C ) 2012 Citrix Systems Inc. * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation ; version 2.1 only . with the special * exception on linking described in file LICENSE . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU Lesser General Public License for more details . * Copyright (C) 2012 Citrix Systems Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation; version 2.1 only. with the special * exception on linking described in file LICENSE. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. *) open Xen_api open Lwt module Lwt_unix_IO = struct type 'a t = 'a Lwt.t let (>>=) = Lwt.bind let return = Lwt.return let (>>) m n = m >>= fun _ -> n type ic = (unit -> unit Lwt.t) * Lwt_io.input_channel type oc = (unit -> unit Lwt.t) * Lwt_io.output_channel type conn = Lwt_unix.file_descr let read_line (_, ic) = Lwt_io.read_line_opt ic let read (_, ic) count = Lwt.catch (fun () -> Lwt_io.read ~count ic) (function | End_of_file -> return "" | e -> Lwt.fail e) let read_exactly ( _ , ic ) buf off len = Lwt.catch ( fun ( ) - > Lwt_io.read_into_exactly ic buf off len > > return true ) ( function | End_of_file - > return false | e - > Lwt.fail e ) let read_exactly ic len = let buf = Bytes.create len in read_exactly ic buf 0 len > > = function | true - > return ( Some buf ) | false - > return None Lwt.catch (fun () -> Lwt_io.read_into_exactly ic buf off len >> return true) (function | End_of_file -> return false | e -> Lwt.fail e) let read_exactly ic len = let buf = Bytes.create len in read_exactly ic buf 0 len >>= function | true -> return (Some buf) | false -> return None *) let write (_, oc) = Lwt_io.write oc (* let write_line (_, oc) = Lwt_io.write_line oc *) let flush (_, oc) = Lwt_io.flush oc let close ((close1, _), (close2, _)) = close1 () >> close2 () let sslctx = Ssl.init (); Ssl.create_context Ssl.TLSv1_2 Ssl.Client_context let open_connection uri = (match Uri.scheme uri with | Some "file" -> return (Unix.PF_UNIX, Unix.ADDR_UNIX (Uri.path uri), false) | Some "http" | Some "https" -> Util.sockaddr_of_uri uri >|= fun (sockaddr, ssl) -> ((Unix.domain_of_sockaddr sockaddr), sockaddr, ssl) | Some x -> fail (Unsupported_scheme x) | None -> fail (Unsupported_scheme "")) >>= fun (domain, sockaddr, ssl) -> if ssl then begin let fd = Lwt_unix.socket domain Unix.SOCK_STREAM 0 in Lwt.catch (fun () -> Lwt.catch (fun () -> Lwt_unix.connect fd sockaddr ) (fun e -> Lwt_unix.close fd >>= fun () -> Lwt.fail e ) >>= fun () -> Lwt_ssl.ssl_connect fd sslctx >>= fun sock -> let ic = Lwt_ssl.in_channel_of_descr sock in let oc = Lwt_ssl.out_channel_of_descr sock in return (Ok ((return, ic), ((fun () -> Lwt_ssl.close sock), oc)))) (fun e -> return (Error e)) end else begin let fd = Lwt_unix.socket domain Unix.SOCK_STREAM 0 in Lwt.catch (fun () -> Lwt.catch (fun () -> Lwt_unix.connect fd sockaddr ) (fun e -> Lwt_unix.close fd >>= fun () -> Lwt.fail e ) >>= fun () -> let ic = Lwt_io.of_fd ~close:return ~mode:Lwt_io.input fd in let oc = Lwt_io.of_fd ~close:(fun () -> Lwt_unix.close fd) ~mode:Lwt_io.output fd in return (Ok (((fun () -> Lwt_io.close ic), ic), ((fun () -> Lwt_io.close oc), oc)))) (fun e -> return (Error e)) end let sleep = Lwt_unix.sleep let gettimeofday = Unix.gettimeofday end module M = Make(Lwt_unix_IO) let exn_to_string = function | Api_errors.Server_error(code, params) -> Printf.sprintf "%s %s" code (String.concat " " params) | e -> Printexc.to_string e let do_it uri string = let uri = Uri.of_string uri in let connection = M.make uri in Lwt.finalize (fun () -> M.rpc connection string >>= fun result -> match result with | Ok x -> return x | Error e -> Printf.fprintf stderr "Caught: %s\n%!" (exn_to_string e); fail e) (fun () -> M.disconnect connection) (* TODO: modify do_it to accept the timeout and remove the warnings *) [@@@ocaml.warning "-27"] let make ?(timeout=30.) uri call = let string = Xmlrpc.string_of_call call in do_it uri string >>= fun result -> Lwt.return (Xmlrpc.response_of_string result) [@@@ocaml.warning "-27"] let make_json ?(timeout=30.) uri call = let string = Jsonrpc.string_of_call call in do_it uri string >>= fun result -> Lwt.return (Jsonrpc.response_of_string result) module Client = Client.ClientF(Lwt) include Client
null
https://raw.githubusercontent.com/xapi-project/xen-api-client/c3855a3dae3ea16b087855347a544bf22f2bb0dd/lwt/xen_api_lwt_unix.ml
ocaml
let write_line (_, oc) = Lwt_io.write_line oc TODO: modify do_it to accept the timeout and remove the warnings
* Copyright ( C ) 2012 Citrix Systems Inc. * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation ; version 2.1 only . with the special * exception on linking described in file LICENSE . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU Lesser General Public License for more details . * Copyright (C) 2012 Citrix Systems Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation; version 2.1 only. with the special * exception on linking described in file LICENSE. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. *) open Xen_api open Lwt module Lwt_unix_IO = struct type 'a t = 'a Lwt.t let (>>=) = Lwt.bind let return = Lwt.return let (>>) m n = m >>= fun _ -> n type ic = (unit -> unit Lwt.t) * Lwt_io.input_channel type oc = (unit -> unit Lwt.t) * Lwt_io.output_channel type conn = Lwt_unix.file_descr let read_line (_, ic) = Lwt_io.read_line_opt ic let read (_, ic) count = Lwt.catch (fun () -> Lwt_io.read ~count ic) (function | End_of_file -> return "" | e -> Lwt.fail e) let read_exactly ( _ , ic ) buf off len = Lwt.catch ( fun ( ) - > Lwt_io.read_into_exactly ic buf off len > > return true ) ( function | End_of_file - > return false | e - > Lwt.fail e ) let read_exactly ic len = let buf = Bytes.create len in read_exactly ic buf 0 len > > = function | true - > return ( Some buf ) | false - > return None Lwt.catch (fun () -> Lwt_io.read_into_exactly ic buf off len >> return true) (function | End_of_file -> return false | e -> Lwt.fail e) let read_exactly ic len = let buf = Bytes.create len in read_exactly ic buf 0 len >>= function | true -> return (Some buf) | false -> return None *) let write (_, oc) = Lwt_io.write oc let flush (_, oc) = Lwt_io.flush oc let close ((close1, _), (close2, _)) = close1 () >> close2 () let sslctx = Ssl.init (); Ssl.create_context Ssl.TLSv1_2 Ssl.Client_context let open_connection uri = (match Uri.scheme uri with | Some "file" -> return (Unix.PF_UNIX, Unix.ADDR_UNIX (Uri.path uri), false) | Some "http" | Some "https" -> Util.sockaddr_of_uri uri >|= fun (sockaddr, ssl) -> ((Unix.domain_of_sockaddr sockaddr), sockaddr, ssl) | Some x -> fail (Unsupported_scheme x) | None -> fail (Unsupported_scheme "")) >>= fun (domain, sockaddr, ssl) -> if ssl then begin let fd = Lwt_unix.socket domain Unix.SOCK_STREAM 0 in Lwt.catch (fun () -> Lwt.catch (fun () -> Lwt_unix.connect fd sockaddr ) (fun e -> Lwt_unix.close fd >>= fun () -> Lwt.fail e ) >>= fun () -> Lwt_ssl.ssl_connect fd sslctx >>= fun sock -> let ic = Lwt_ssl.in_channel_of_descr sock in let oc = Lwt_ssl.out_channel_of_descr sock in return (Ok ((return, ic), ((fun () -> Lwt_ssl.close sock), oc)))) (fun e -> return (Error e)) end else begin let fd = Lwt_unix.socket domain Unix.SOCK_STREAM 0 in Lwt.catch (fun () -> Lwt.catch (fun () -> Lwt_unix.connect fd sockaddr ) (fun e -> Lwt_unix.close fd >>= fun () -> Lwt.fail e ) >>= fun () -> let ic = Lwt_io.of_fd ~close:return ~mode:Lwt_io.input fd in let oc = Lwt_io.of_fd ~close:(fun () -> Lwt_unix.close fd) ~mode:Lwt_io.output fd in return (Ok (((fun () -> Lwt_io.close ic), ic), ((fun () -> Lwt_io.close oc), oc)))) (fun e -> return (Error e)) end let sleep = Lwt_unix.sleep let gettimeofday = Unix.gettimeofday end module M = Make(Lwt_unix_IO) let exn_to_string = function | Api_errors.Server_error(code, params) -> Printf.sprintf "%s %s" code (String.concat " " params) | e -> Printexc.to_string e let do_it uri string = let uri = Uri.of_string uri in let connection = M.make uri in Lwt.finalize (fun () -> M.rpc connection string >>= fun result -> match result with | Ok x -> return x | Error e -> Printf.fprintf stderr "Caught: %s\n%!" (exn_to_string e); fail e) (fun () -> M.disconnect connection) [@@@ocaml.warning "-27"] let make ?(timeout=30.) uri call = let string = Xmlrpc.string_of_call call in do_it uri string >>= fun result -> Lwt.return (Xmlrpc.response_of_string result) [@@@ocaml.warning "-27"] let make_json ?(timeout=30.) uri call = let string = Jsonrpc.string_of_call call in do_it uri string >>= fun result -> Lwt.return (Jsonrpc.response_of_string result) module Client = Client.ClientF(Lwt) include Client
52e7d4c0053dd968d3b54d8635f9a1d0d2b644c5a50a5a203a6defcf4135b1fa
jordanthayer/ocaml-search
setcover_problem.ml
$ I d : problem.ml , v 1.2 2004/08/17 20:02:10 ruml Exp $ instances of weighted set cover instances of weighted set cover *) type subset = float * int list type t = { num_objs : int; subsets : subset array; } let instance_root = User_paths.instance_root ^ "setcover/" let num_subsets p = Array.length p.subsets let max_depth p = Array.length p.subsets (** [max_depth p] gets an upper bound on the maximum problem depth. *) (***** I/O *****) let read ch = let num_objs = Wrio.input_int ch in let num_subsets = Wrio.input_int ch in let subsets = Array.init num_subsets (fun _ -> let v = Wrio.input_float ch in let elts = Wrstr.parse_ints (input_line ch) in v, elts) in { num_objs = num_objs; subsets = subsets; } let output ch p = (** prints many vals as floats, not ints! *) Printf.fprintf ch "%d\n%d\n" p.num_objs (Array.length p.subsets); Array.iter (fun (v, elts) -> Wrutils.pf ch "%f " v; Wrlist.fprint ch string_of_int " " elts; Wrutils.newline ch) p.subsets (*********** some utilities ***********) let subset_eq (c1,s1) (c2,s2) = (c1 = c2) && (Wrlist.set_equal s1 s2) let subset_better (c1,s1) (c2,s2) = (** more or equal stuff for less or equal cost *) (Wrlist.is_subset s2 s1) && (c1 <= c2) let undominated p = let subsets = ref [] in Array.iteri (fun i s -> if not (List.exists (fun i -> subset_eq s p.subsets.(i)) !subsets) then Wrutils.push i subsets) p.subsets; List.filter (fun i -> not (List.exists (fun j -> (j != i) && (subset_better p.subsets.(j) p.subsets.(i))) !subsets)) !subsets let remove_dominated p = let subsets = Array.of_list (List.map (fun i -> p.subsets.(i)) (undominated p)) in { p with subsets = subsets } let uncovered num_objs subsets = (** returns list of objs not in any subset *) let coverable = Array.make num_objs false in List.iter (fun (_,elts) -> List.iter (fun i -> coverable.(i) <- true) elts) subsets; Wrutils.map_n_opt (fun i -> if coverable.(i) then None else Some i) (num_objs - 1) let ensure_feasible p = Array.iter (fun (c,s) -> if not (Math.finite_p c) then failwith "bad subset cost"; List.iter (fun x -> if (x < 0) || (x >= p.num_objs) then failwith (Wrutils.str "%d invalid (num objs %d)" x p.num_objs)) s; if Wrlist.duplicates_p s then failwith "subset contains duplicates!") p.subsets; match uncovered p.num_objs (Array.to_list p.subsets) with [] -> () | _ -> failwith "some elements are uncoverable!" (********** constructing random instances **********) let make_uniform_subset num_objs max_proportion min_value max_value = * [ make_uniform_subset num_objs max_proportion min_value max_value ] makes a subset of elements which are choosen uniformly from a superset with [ num_objs ] objects . The size of the set is uniformly choosen from [ 1 , num_objs * . max_proportion ) . The value is uniformly choosen in the range [ min_value , max_value ) . max_value] makes a subset of elements which are choosen uniformly from a superset with [num_objs] objects. The size of the set is uniformly choosen from [1, num_objs *. max_proportion). The value is uniformly choosen in the range [min_value, max_value). *) let subset_size = truncate (Wrrandom.float_in_range 1. ((float num_objs) *. max_proportion)) and weight = (Random.float (max_value -. min_value)) +. min_value in let elts = Wrutils.map_ntimes (fun () -> Random.int num_objs) subset_size in weight, (Wrlist.remove_duplicates elts) * Makes a random instance . @param num_objs is the number of objects in the set . @param num_subsets is the number of subsets . @param max_value is the maximum value of a subset . @param max_proportion is the proportion of the number of elements in the set to use as an upper - bound for subset size . Subsets are between 1 and [ max_proportion * . num_objs ] elements . @param num_objs is the number of objects in the set. @param num_subsets is the number of subsets. @param max_value is the maximum value of a subset. @param max_proportion is the proportion of the number of elements in the set to use as an upper-bound for subset size. Subsets are between 1 and [max_proportion *. num_objs] elements. *) let random_instance num_objs num_subsets min_value max_value max_proportion = let make_subset () = make_uniform_subset num_objs max_proportion min_value max_value in let most = Wrutils.map_ntimes make_subset (num_subsets - 1) in let extra = (match uncovered num_objs most with [] -> make_subset () | l -> (Random.float max_value), l) in { num_objs = num_objs; subsets = Array.of_list (extra :: most); } let make_uniform num_objs num_subsets max_proportion min_value max_value count = let attrs = [ "model", "uniform"; "objects", string_of_int num_objs; "subsets", string_of_int num_subsets; "min value", string_of_float min_value; "max value", string_of_float max_value; "max proportion", string_of_float max_proportion; ] in for c = 1 to count do let inst_attrs = attrs @ [ "num", string_of_int c ] in let path = Rdb.path_for instance_root inst_attrs in if not (Sys.file_exists path) then begin Wrutils.pr "Saving %s\n%!" path; Wrio.with_outfile path (fun ch -> output ch (random_instance num_objs num_subsets min_value max_value max_proportion)) end else Wrutils.pr "Skipping %s\n%!" path; done (********** solutions *****************) type solution = int list let default_solution p = (** useful for returning when no leaf was reached *) Wrutils.map_n Fn.identity ((num_subsets p) - 1) let check_sol p s = (** returns cost or raises failure *) match uncovered p.num_objs (List.map (fun i -> p.subsets.(i)) s) with [] -> Wrlist.sum_floats (fun i -> fst p.subsets.(i)) s | l -> Wrutils.pr "Subsets: "; Wrlist.fprint stdout string_of_int " " s; Wrutils.pr "\nUncovered: "; Wrlist.fprint stdout string_of_int " " l; Wrutils.newline stdout; flush_all (); failwith "solution leaves element(s) uncovered!" let print_sol ch s = Wrlist.fprint ch string_of_int " " s EOF
null
https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/setcover/setcover_problem.ml
ocaml
* [max_depth p] gets an upper bound on the maximum problem depth. **** I/O **** * prints many vals as floats, not ints! ********** some utilities ********** * more or equal stuff for less or equal cost * returns list of objs not in any subset ********* constructing random instances ********* ********* solutions **************** * useful for returning when no leaf was reached * returns cost or raises failure
$ I d : problem.ml , v 1.2 2004/08/17 20:02:10 ruml Exp $ instances of weighted set cover instances of weighted set cover *) type subset = float * int list type t = { num_objs : int; subsets : subset array; } let instance_root = User_paths.instance_root ^ "setcover/" let num_subsets p = Array.length p.subsets let max_depth p = Array.length p.subsets let read ch = let num_objs = Wrio.input_int ch in let num_subsets = Wrio.input_int ch in let subsets = Array.init num_subsets (fun _ -> let v = Wrio.input_float ch in let elts = Wrstr.parse_ints (input_line ch) in v, elts) in { num_objs = num_objs; subsets = subsets; } let output ch p = Printf.fprintf ch "%d\n%d\n" p.num_objs (Array.length p.subsets); Array.iter (fun (v, elts) -> Wrutils.pf ch "%f " v; Wrlist.fprint ch string_of_int " " elts; Wrutils.newline ch) p.subsets let subset_eq (c1,s1) (c2,s2) = (c1 = c2) && (Wrlist.set_equal s1 s2) let subset_better (c1,s1) (c2,s2) = (Wrlist.is_subset s2 s1) && (c1 <= c2) let undominated p = let subsets = ref [] in Array.iteri (fun i s -> if not (List.exists (fun i -> subset_eq s p.subsets.(i)) !subsets) then Wrutils.push i subsets) p.subsets; List.filter (fun i -> not (List.exists (fun j -> (j != i) && (subset_better p.subsets.(j) p.subsets.(i))) !subsets)) !subsets let remove_dominated p = let subsets = Array.of_list (List.map (fun i -> p.subsets.(i)) (undominated p)) in { p with subsets = subsets } let uncovered num_objs subsets = let coverable = Array.make num_objs false in List.iter (fun (_,elts) -> List.iter (fun i -> coverable.(i) <- true) elts) subsets; Wrutils.map_n_opt (fun i -> if coverable.(i) then None else Some i) (num_objs - 1) let ensure_feasible p = Array.iter (fun (c,s) -> if not (Math.finite_p c) then failwith "bad subset cost"; List.iter (fun x -> if (x < 0) || (x >= p.num_objs) then failwith (Wrutils.str "%d invalid (num objs %d)" x p.num_objs)) s; if Wrlist.duplicates_p s then failwith "subset contains duplicates!") p.subsets; match uncovered p.num_objs (Array.to_list p.subsets) with [] -> () | _ -> failwith "some elements are uncoverable!" let make_uniform_subset num_objs max_proportion min_value max_value = * [ make_uniform_subset num_objs max_proportion min_value max_value ] makes a subset of elements which are choosen uniformly from a superset with [ num_objs ] objects . The size of the set is uniformly choosen from [ 1 , num_objs * . max_proportion ) . The value is uniformly choosen in the range [ min_value , max_value ) . max_value] makes a subset of elements which are choosen uniformly from a superset with [num_objs] objects. The size of the set is uniformly choosen from [1, num_objs *. max_proportion). The value is uniformly choosen in the range [min_value, max_value). *) let subset_size = truncate (Wrrandom.float_in_range 1. ((float num_objs) *. max_proportion)) and weight = (Random.float (max_value -. min_value)) +. min_value in let elts = Wrutils.map_ntimes (fun () -> Random.int num_objs) subset_size in weight, (Wrlist.remove_duplicates elts) * Makes a random instance . @param num_objs is the number of objects in the set . @param num_subsets is the number of subsets . @param max_value is the maximum value of a subset . @param max_proportion is the proportion of the number of elements in the set to use as an upper - bound for subset size . Subsets are between 1 and [ max_proportion * . num_objs ] elements . @param num_objs is the number of objects in the set. @param num_subsets is the number of subsets. @param max_value is the maximum value of a subset. @param max_proportion is the proportion of the number of elements in the set to use as an upper-bound for subset size. Subsets are between 1 and [max_proportion *. num_objs] elements. *) let random_instance num_objs num_subsets min_value max_value max_proportion = let make_subset () = make_uniform_subset num_objs max_proportion min_value max_value in let most = Wrutils.map_ntimes make_subset (num_subsets - 1) in let extra = (match uncovered num_objs most with [] -> make_subset () | l -> (Random.float max_value), l) in { num_objs = num_objs; subsets = Array.of_list (extra :: most); } let make_uniform num_objs num_subsets max_proportion min_value max_value count = let attrs = [ "model", "uniform"; "objects", string_of_int num_objs; "subsets", string_of_int num_subsets; "min value", string_of_float min_value; "max value", string_of_float max_value; "max proportion", string_of_float max_proportion; ] in for c = 1 to count do let inst_attrs = attrs @ [ "num", string_of_int c ] in let path = Rdb.path_for instance_root inst_attrs in if not (Sys.file_exists path) then begin Wrutils.pr "Saving %s\n%!" path; Wrio.with_outfile path (fun ch -> output ch (random_instance num_objs num_subsets min_value max_value max_proportion)) end else Wrutils.pr "Skipping %s\n%!" path; done type solution = int list let default_solution p = Wrutils.map_n Fn.identity ((num_subsets p) - 1) let check_sol p s = match uncovered p.num_objs (List.map (fun i -> p.subsets.(i)) s) with [] -> Wrlist.sum_floats (fun i -> fst p.subsets.(i)) s | l -> Wrutils.pr "Subsets: "; Wrlist.fprint stdout string_of_int " " s; Wrutils.pr "\nUncovered: "; Wrlist.fprint stdout string_of_int " " l; Wrutils.newline stdout; flush_all (); failwith "solution leaves element(s) uncovered!" let print_sol ch s = Wrlist.fprint ch string_of_int " " s EOF
6bc9580dc7f827ba9fad414a81733cdf21c5445d242c0967887b05291c1d902a
clojusc/friend-oauth2-examples
google_handler.clj
(ns friend-oauth2-examples.google-handler (:require [compojure.core :refer :all] [compojure.handler :as handler] [compojure.route :as route] [cemerick.friend :as friend] [friend-oauth2.workflow :as oauth2] [friend-oauth2.util :refer [format-config-uri]] [cheshire.core :as j] (cemerick.friend [workflows :as workflows] [credentials :as creds]))) (defn credential-fn [token] ;;lookup token in DB or whatever to fetch appropriate :roles {:identity token :roles #{::user}}) (def client-config {:client-id "" :client-secret "" :callback {:domain "" :path "/oauth2callback"}}) (def uri-config {:authentication-uri {:url "" :query {:client_id (:client-id client-config) :response_type "code" :redirect_uri (format-config-uri client-config) :scope "email"}} :access-token-uri {:url "" :query {:client_id (:client-id client-config) :client_secret (:client-secret client-config) :grant_type "authorization_code" :redirect_uri (format-config-uri client-config)}}}) (defroutes ring-app (GET "/" request "open.") (GET "/status" request (let [count (:count (:session request) 0) session (assoc (:session request) :count (inc count))] (-> (ring.util.response/response (str "<p>We've hit the session page " (:count session) " times.</p><p>The current session: " session "</p>")) (assoc :session session)))) (GET "/authlink" request (friend/authorize #{::user} "Authorized page.")) (GET "/authlink2" request (friend/authorize #{::user} "Authorized page 2.")) (GET "/admin" request (friend/authorize #{::admin} "Only admins can see this page.")) (friend/logout (ANY "/logout" request (ring.util.response/redirect "/")))) (def app (handler/site (friend/authenticate ring-app {:allow-anon? true :workflows [(oauth2/workflow {:client-config client-config :uri-config uri-config :credential-fn credential-fn})]})))
null
https://raw.githubusercontent.com/clojusc/friend-oauth2-examples/3ea1ca7e51644f8f1985566097652d78abfca457/src/friend_oauth2_examples/google_handler.clj
clojure
lookup token in DB or whatever to fetch appropriate :roles
(ns friend-oauth2-examples.google-handler (:require [compojure.core :refer :all] [compojure.handler :as handler] [compojure.route :as route] [cemerick.friend :as friend] [friend-oauth2.workflow :as oauth2] [friend-oauth2.util :refer [format-config-uri]] [cheshire.core :as j] (cemerick.friend [workflows :as workflows] [credentials :as creds]))) (defn credential-fn [token] {:identity token :roles #{::user}}) (def client-config {:client-id "" :client-secret "" :callback {:domain "" :path "/oauth2callback"}}) (def uri-config {:authentication-uri {:url "" :query {:client_id (:client-id client-config) :response_type "code" :redirect_uri (format-config-uri client-config) :scope "email"}} :access-token-uri {:url "" :query {:client_id (:client-id client-config) :client_secret (:client-secret client-config) :grant_type "authorization_code" :redirect_uri (format-config-uri client-config)}}}) (defroutes ring-app (GET "/" request "open.") (GET "/status" request (let [count (:count (:session request) 0) session (assoc (:session request) :count (inc count))] (-> (ring.util.response/response (str "<p>We've hit the session page " (:count session) " times.</p><p>The current session: " session "</p>")) (assoc :session session)))) (GET "/authlink" request (friend/authorize #{::user} "Authorized page.")) (GET "/authlink2" request (friend/authorize #{::user} "Authorized page 2.")) (GET "/admin" request (friend/authorize #{::admin} "Only admins can see this page.")) (friend/logout (ANY "/logout" request (ring.util.response/redirect "/")))) (def app (handler/site (friend/authenticate ring-app {:allow-anon? true :workflows [(oauth2/workflow {:client-config client-config :uri-config uri-config :credential-fn credential-fn})]})))
7f7a44c4c1154d277786cd9fe3ef1f56bf41b0083547a9839911ce6bd81dd21d
kadena-io/chainweb-mining-client
External.hs
# LANGUAGE DeriveGeneric # # LANGUAGE LambdaCase # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE ScopedTypeVariables # -- | -- Module: Worker.External Copyright : Copyright © 2020 Kadena LLC . License : MIT Maintainer : < > -- Stability: experimental -- TODO -- module Worker.External ( externalWorker ) where import Control.Concurrent.Async import Control.Monad.Catch import qualified Data.ByteArray.Encoding as BA import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as B8 import qualified Data.ByteString.Short as BS import Data.Char import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Data.Text.IO as T import GHC.Generics import System.Exit import System.IO import System.LogLevel import qualified System.Process as P -- internal modules import Logger import Worker -- -------------------------------------------------------------------------- -- -- Exceptions newtype ExternalWorkerException = ExternalWorkerException T.Text deriving (Show, Eq, Ord, Generic) instance Exception ExternalWorkerException -- -------------------------------------------------------------------------- -- -- -- | Run an external worker: -- -- External workers is an external operating system process that is provided by -- it's file system path. -- -- It is invoked with -- * the target as first parameter ( hex bytes in little endian encoding ) , -- followed by -- * what ever extra arguments are configured by the user. -- The work bytes are provided to stdin as raw bytes . -- -- On finding a solution it is expected to print the nonce (encoded in hex in big endian byte order ) and to exit with an exit code of 0 . -- In case of an exit code other than zero any outputs are logged and discarded . -- externalWorker :: Logger -> String -- ^ worker command -> Worker externalWorker logger cmd _nonce target (Work work) = withLogTag logger "Worker" $ \workerLogger -> P.withCreateProcess workerProc (go workerLogger) where targetArg = T.unpack $ targetToText16Le target workerProc = (P.shell $ cmd <> " " <> targetArg) { P.std_in = P.CreatePipe , P.std_out = P.CreatePipe , P.std_err = P.CreatePipe } go workerLogger (Just hstdin) (Just hstdout) (Just hstderr) ph = do B.hPut hstdin $ BS.fromShort work hClose hstdin writeLog logger Info "send command to external worker" writeLog logger Debug $ "external worker command: " <> T.pack (cmd <> " " <> targetArg) withAsync (B.hGetContents hstdout) $ \stdoutThread -> withAsync (errThread workerLogger hstderr) $ \stderrThread -> do code <- P.waitForProcess ph (outbytes, _) <- (,) <$> wait stdoutThread <*> wait stderrThread writeLog logger Info "received nonce for solved work from external worker" if code /= ExitSuccess then throwM $ ExternalWorkerException $ "External worker failed with code: " <> (T.pack . show) code -- FIXME: handle non-printable characters else do nonceB16 <- case B8.splitWith isSpace outbytes of [] -> throwM $ ExternalWorkerException $ "expected nonce from miner, got: " <> T.decodeUtf8 outbytes -- FIXME: handle non-printable characters (a:_) -> return a -- reverse -- we want little-endian case BA.convertFromBase BA.Base16 nonceB16 of Left e -> throwM $ ExternalWorkerException $ "failed to decode nonce bytes: " <> (T.pack . show) e Right bs | B.length bs /= 8 -> throwM $ ExternalWorkerException "process returned short nonce" | otherwise -> return $ Work $! BS.toShort $ B.take 278 (BS.fromShort work) <> B.reverse bs go _ _ _ _ _ = throwM $ ExternalWorkerException "impossible: process is opened with CreatePipe in/out/err" errThread l hstderr = next where next = hIsEOF hstderr >>= \case True -> return () False -> do T.hGetLine hstderr >>= writeLog l Debug next
null
https://raw.githubusercontent.com/kadena-io/chainweb-mining-client/e051c1faab0ffb85d7c0d454763e77af2646384d/src/Worker/External.hs
haskell
# LANGUAGE OverloadedStrings # | Module: Worker.External Stability: experimental internal modules -------------------------------------------------------------------------- -- Exceptions -------------------------------------------------------------------------- -- | Run an external worker: External workers is an external operating system process that is provided by it's file system path. It is invoked with followed by * what ever extra arguments are configured by the user. On finding a solution it is expected to print the nonce (encoded in hex in ^ worker command FIXME: handle non-printable characters FIXME: handle non-printable characters reverse -- we want little-endian
# LANGUAGE DeriveGeneric # # LANGUAGE LambdaCase # # LANGUAGE ScopedTypeVariables # Copyright : Copyright © 2020 Kadena LLC . License : MIT Maintainer : < > TODO module Worker.External ( externalWorker ) where import Control.Concurrent.Async import Control.Monad.Catch import qualified Data.ByteArray.Encoding as BA import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as B8 import qualified Data.ByteString.Short as BS import Data.Char import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Data.Text.IO as T import GHC.Generics import System.Exit import System.IO import System.LogLevel import qualified System.Process as P import Logger import Worker newtype ExternalWorkerException = ExternalWorkerException T.Text deriving (Show, Eq, Ord, Generic) instance Exception ExternalWorkerException * the target as first parameter ( hex bytes in little endian encoding ) , The work bytes are provided to stdin as raw bytes . big endian byte order ) and to exit with an exit code of 0 . In case of an exit code other than zero any outputs are logged and discarded . externalWorker :: Logger -> String -> Worker externalWorker logger cmd _nonce target (Work work) = withLogTag logger "Worker" $ \workerLogger -> P.withCreateProcess workerProc (go workerLogger) where targetArg = T.unpack $ targetToText16Le target workerProc = (P.shell $ cmd <> " " <> targetArg) { P.std_in = P.CreatePipe , P.std_out = P.CreatePipe , P.std_err = P.CreatePipe } go workerLogger (Just hstdin) (Just hstdout) (Just hstderr) ph = do B.hPut hstdin $ BS.fromShort work hClose hstdin writeLog logger Info "send command to external worker" writeLog logger Debug $ "external worker command: " <> T.pack (cmd <> " " <> targetArg) withAsync (B.hGetContents hstdout) $ \stdoutThread -> withAsync (errThread workerLogger hstderr) $ \stderrThread -> do code <- P.waitForProcess ph (outbytes, _) <- (,) <$> wait stdoutThread <*> wait stderrThread writeLog logger Info "received nonce for solved work from external worker" if code /= ExitSuccess then throwM $ ExternalWorkerException $ "External worker failed with code: " <> (T.pack . show) code else do nonceB16 <- case B8.splitWith isSpace outbytes of [] -> throwM $ ExternalWorkerException $ "expected nonce from miner, got: " <> T.decodeUtf8 outbytes (a:_) -> return a case BA.convertFromBase BA.Base16 nonceB16 of Left e -> throwM $ ExternalWorkerException $ "failed to decode nonce bytes: " <> (T.pack . show) e Right bs | B.length bs /= 8 -> throwM $ ExternalWorkerException "process returned short nonce" | otherwise -> return $ Work $! BS.toShort $ B.take 278 (BS.fromShort work) <> B.reverse bs go _ _ _ _ _ = throwM $ ExternalWorkerException "impossible: process is opened with CreatePipe in/out/err" errThread l hstderr = next where next = hIsEOF hstderr >>= \case True -> return () False -> do T.hGetLine hstderr >>= writeLog l Debug next
5d32e83a02945d0f96a3eef0c2feece9a1dfcf772df41b3bae0205bbf8c66e15
dannypsnl/racket-tree-sitter
main.rkt
#lang racket/base (provide (all-from-out "types.rkt" "language.rkt" "parser.rkt" "node.rkt" "tree.rkt" "tree-cursor.rkt" "query.rkt" "query-cursor.rkt")) (require "types.rkt" "language.rkt" "parser.rkt" "node.rkt" "tree.rkt" "tree-cursor.rkt" "query.rkt" "query-cursor.rkt") (module+ main (require ffi/unsafe ffi/unsafe/define) (define-ffi-definer define-racket (ffi-lib "./zig-out/lib/libtree-sitter-racket" '(#f))) (define-racket rkt-language (_fun -> _TSLanguageRef) #:c-id tree_sitter_racket) (define p (parser-new)) (set-language p (rkt-language)) (define source-code " (+ 1 2) (define x 1) (define (foo a b) (+ a b x)) ") (define tree (parse-string p #f source-code)) (define root (root-node tree)) (displayln (node->string root)) (tree-delete tree))
null
https://raw.githubusercontent.com/dannypsnl/racket-tree-sitter/d5193cb8af43428372db34188924f4fea372ba8d/main.rkt
racket
#lang racket/base (provide (all-from-out "types.rkt" "language.rkt" "parser.rkt" "node.rkt" "tree.rkt" "tree-cursor.rkt" "query.rkt" "query-cursor.rkt")) (require "types.rkt" "language.rkt" "parser.rkt" "node.rkt" "tree.rkt" "tree-cursor.rkt" "query.rkt" "query-cursor.rkt") (module+ main (require ffi/unsafe ffi/unsafe/define) (define-ffi-definer define-racket (ffi-lib "./zig-out/lib/libtree-sitter-racket" '(#f))) (define-racket rkt-language (_fun -> _TSLanguageRef) #:c-id tree_sitter_racket) (define p (parser-new)) (set-language p (rkt-language)) (define source-code " (+ 1 2) (define x 1) (define (foo a b) (+ a b x)) ") (define tree (parse-string p #f source-code)) (define root (root-node tree)) (displayln (node->string root)) (tree-delete tree))
8178f07b4e6b66c197ae758971de69d87ed84e18067ac9d73993f40a1734f1e5
AccelerateHS/accelerate
Bounded.hs
{-# LANGUAGE ConstraintKinds #-} # LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # # LANGUAGE TemplateHaskell # # LANGUAGE TypeApplications # # OPTIONS_GHC -fno - warn - orphans # -- | -- Module : Data.Array.Accelerate.Classes.Bounded Copyright : [ 2016 .. 2020 ] The Accelerate Team -- License : BSD3 -- Maintainer : < > -- Stability : experimental Portability : non - portable ( GHC extensions ) -- module Data.Array.Accelerate.Classes.Bounded ( Bounded, P.minBound, P.maxBound, ) where import Data.Array.Accelerate.Array.Data import Data.Array.Accelerate.Pattern import Data.Array.Accelerate.Smart import Data.Array.Accelerate.Sugar.Elt import Data.Array.Accelerate.Type import Prelude ( ($), (<$>), Num(..), Char, Bool, show, concat, map, mapM ) import Language.Haskell.TH.Extra hiding ( Exp ) import qualified Prelude as P -- | Name the upper and lower limits of a type. Types which are not totally -- ordered may still have upper and lower bounds. -- type Bounded a = (Elt a, P.Bounded (Exp a)) instance P.Bounded (Exp ()) where minBound = constant () maxBound = constant () instance P.Bounded (Exp Int) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Int8) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Int16) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Int32) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Int64) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word8) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word16) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word32) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word64) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp CShort) where minBound = mkBitcast (mkMinBound @Int16) maxBound = mkBitcast (mkMaxBound @Int16) instance P.Bounded (Exp CUShort) where minBound = mkBitcast (mkMinBound @Word16) maxBound = mkBitcast (mkMaxBound @Word16) instance P.Bounded (Exp CInt) where minBound = mkBitcast (mkMinBound @Int32) maxBound = mkBitcast (mkMaxBound @Int32) instance P.Bounded (Exp CUInt) where minBound = mkBitcast (mkMinBound @Word32) maxBound = mkBitcast (mkMaxBound @Word32) instance P.Bounded (Exp CLong) where minBound = mkBitcast (mkMinBound @HTYPE_CLONG) maxBound = mkBitcast (mkMaxBound @HTYPE_CLONG) instance P.Bounded (Exp CULong) where minBound = mkBitcast (mkMinBound @HTYPE_CULONG) maxBound = mkBitcast (mkMaxBound @HTYPE_CULONG) instance P.Bounded (Exp CLLong) where minBound = mkBitcast (mkMinBound @Int64) maxBound = mkBitcast (mkMaxBound @Int64) instance P.Bounded (Exp CULLong) where minBound = mkBitcast (mkMinBound @Word64) maxBound = mkBitcast (mkMaxBound @Word64) instance P.Bounded (Exp Bool) where minBound = constant P.minBound maxBound = constant P.maxBound instance P.Bounded (Exp Char) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp CChar) where minBound = mkBitcast (mkMinBound @HTYPE_CCHAR) maxBound = mkBitcast (mkMaxBound @HTYPE_CCHAR) instance P.Bounded (Exp CSChar) where minBound = mkBitcast (mkMinBound @Int8) maxBound = mkBitcast (mkMaxBound @Int8) instance P.Bounded (Exp CUChar) where minBound = mkBitcast (mkMinBound @Word8) maxBound = mkBitcast (mkMaxBound @Word8) $(runQ $ do let mkInstance :: Int -> Q [Dec] mkInstance n = let xs = [ mkName ('x':show i) | i <- [0 .. n-1] ] cst = tupT (map (\x -> [t| Bounded $(varT x) |]) xs) res = tupT (map varT xs) app x = appsE (conE (mkName ('T':show n)) : P.replicate n x) in [d| instance $cst => P.Bounded (Exp $res) where minBound = $(app [| P.minBound |]) maxBound = $(app [| P.maxBound |]) |] -- concat <$> mapM mkInstance [2..16] )
null
https://raw.githubusercontent.com/AccelerateHS/accelerate/7c769b761d0b2a91f318096b9dd3fced94616961/src/Data/Array/Accelerate/Classes/Bounded.hs
haskell
# LANGUAGE ConstraintKinds # | Module : Data.Array.Accelerate.Classes.Bounded License : BSD3 Stability : experimental | Name the upper and lower limits of a type. Types which are not totally ordered may still have upper and lower bounds.
# LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # # LANGUAGE TemplateHaskell # # LANGUAGE TypeApplications # # OPTIONS_GHC -fno - warn - orphans # Copyright : [ 2016 .. 2020 ] The Accelerate Team Maintainer : < > Portability : non - portable ( GHC extensions ) module Data.Array.Accelerate.Classes.Bounded ( Bounded, P.minBound, P.maxBound, ) where import Data.Array.Accelerate.Array.Data import Data.Array.Accelerate.Pattern import Data.Array.Accelerate.Smart import Data.Array.Accelerate.Sugar.Elt import Data.Array.Accelerate.Type import Prelude ( ($), (<$>), Num(..), Char, Bool, show, concat, map, mapM ) import Language.Haskell.TH.Extra hiding ( Exp ) import qualified Prelude as P type Bounded a = (Elt a, P.Bounded (Exp a)) instance P.Bounded (Exp ()) where minBound = constant () maxBound = constant () instance P.Bounded (Exp Int) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Int8) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Int16) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Int32) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Int64) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word8) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word16) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word32) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp Word64) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp CShort) where minBound = mkBitcast (mkMinBound @Int16) maxBound = mkBitcast (mkMaxBound @Int16) instance P.Bounded (Exp CUShort) where minBound = mkBitcast (mkMinBound @Word16) maxBound = mkBitcast (mkMaxBound @Word16) instance P.Bounded (Exp CInt) where minBound = mkBitcast (mkMinBound @Int32) maxBound = mkBitcast (mkMaxBound @Int32) instance P.Bounded (Exp CUInt) where minBound = mkBitcast (mkMinBound @Word32) maxBound = mkBitcast (mkMaxBound @Word32) instance P.Bounded (Exp CLong) where minBound = mkBitcast (mkMinBound @HTYPE_CLONG) maxBound = mkBitcast (mkMaxBound @HTYPE_CLONG) instance P.Bounded (Exp CULong) where minBound = mkBitcast (mkMinBound @HTYPE_CULONG) maxBound = mkBitcast (mkMaxBound @HTYPE_CULONG) instance P.Bounded (Exp CLLong) where minBound = mkBitcast (mkMinBound @Int64) maxBound = mkBitcast (mkMaxBound @Int64) instance P.Bounded (Exp CULLong) where minBound = mkBitcast (mkMinBound @Word64) maxBound = mkBitcast (mkMaxBound @Word64) instance P.Bounded (Exp Bool) where minBound = constant P.minBound maxBound = constant P.maxBound instance P.Bounded (Exp Char) where minBound = mkMinBound maxBound = mkMaxBound instance P.Bounded (Exp CChar) where minBound = mkBitcast (mkMinBound @HTYPE_CCHAR) maxBound = mkBitcast (mkMaxBound @HTYPE_CCHAR) instance P.Bounded (Exp CSChar) where minBound = mkBitcast (mkMinBound @Int8) maxBound = mkBitcast (mkMaxBound @Int8) instance P.Bounded (Exp CUChar) where minBound = mkBitcast (mkMinBound @Word8) maxBound = mkBitcast (mkMaxBound @Word8) $(runQ $ do let mkInstance :: Int -> Q [Dec] mkInstance n = let xs = [ mkName ('x':show i) | i <- [0 .. n-1] ] cst = tupT (map (\x -> [t| Bounded $(varT x) |]) xs) res = tupT (map varT xs) app x = appsE (conE (mkName ('T':show n)) : P.replicate n x) in [d| instance $cst => P.Bounded (Exp $res) where minBound = $(app [| P.minBound |]) maxBound = $(app [| P.maxBound |]) |] concat <$> mapM mkInstance [2..16] )
5937089bce9f531915648601997b8e7748eaaf19514601926a27075aefacfb16
fpottier/pprint
PPrintBench.ml
(******************************************************************************) (* *) PPrint (* *) , (* *) Copyright 2007 - 2022 Inria . All rights reserved . This file is distributed under the terms of the GNU Library General Public (* License, with an exception, as described in the file LICENSE. *) (* *) (******************************************************************************) (* ------------------------------------------------------------------------- *) (* The following signature is common to the old and new engines. *) module type ENGINE = sig type document val empty: document val char: char -> document val string: string -> document val substring: string -> int -> int -> document val fancystring: string -> int -> document val fancysubstring : string -> int -> int -> int -> document val utf8string: string -> document val hardline: document val blank: int -> document val break: int -> document val (^^): document -> document -> document val nest: int -> document -> document val group: document -> document val ifflat: document -> document -> document module ToBuffer : PPrint.RENDERER with type channel = Buffer.t and type document = document end (* ------------------------------------------------------------------------- *) We use our own abstract syntax of documents . We produce random documents in this syntax first , then ( as part of the timed test ) translate them to the engine 's syntax . This allows timing the engine 's document construction code too . in this syntax first, then (as part of the timed test) translate them to the engine's syntax. This allows timing the engine's document construction code too. *) type mydoc = | MyEmpty | MyChar of char | MyString of string | MySubString of string * int * int | MyUtf8String of string | MyHardLine | MyBlank of int | MyBreak of int | MyCat of mydoc * mydoc | MyNest of int * mydoc | MyGroup of mydoc | MyIfFlat of mydoc * mydoc (* ------------------------------------------------------------------------- *) (* [measure v] measures the size of an OCaml value [v] in bytes. *) let measure v = (* String.length (Marshal.to_string v []) *) Size.size_b v (* ------------------------------------------------------------------------- *) [ split n ] produces two numbers [ n1 ] and [ n2 ] comprised between [ 0 ] and [ n ] ( inclusive ) whose sum is [ n ] . (inclusive) whose sum is [n]. *) let split n = let n1 = Random.int (n + 1) in let n2 = n - n1 in n1, n2 (* [choose xs] randomly and uniformly chooses between the elements of the array [xs]. *) let choose xs = Array.unsafe_get xs (Random.int (Array.length xs)) (* [pick] is analogous, but each element comes with a relative integer weight. *) let pick wxs = (* Compute the total weight. *) let weight = List.fold_left (fun weight (w, _) -> weight + w) 0 wxs in assert (weight > 0); (* Pick a random integer between 0 and the total weight. *) let i = Random.int weight in (* Find the corresponding element. *) let rec loop i wxs = match wxs with | [] -> assert false | (w, x) :: wxs -> if i < w then x else loop (i - w) wxs in loop i wxs (* ------------------------------------------------------------------------- *) (* A random document generator. *) let leaf = [| MyChar 'c'; MyString "hello"; MySubString ("the cat", 4, 3); MyUtf8String "étoile"; MyHardLine; MyBlank 2; MyBreak 2 |] let rec random (n : int) : mydoc = (* If the budget is 0, produce an empty document. *) if n = 0 then MyEmpty If the budget is 1 , produce a leaf . else if n = 1 then choose leaf (* Otherwise, decrement the budget, and produce a node of nonzero arity, spending the rest of the budget on the children. *) else let n = n - 1 in Lazy.force (pick [ 10, lazy (let n1, n2 = split n in MyCat (random n1, random n2)); 2, lazy (MyNest (2, random n)); 10, lazy (MyGroup (random n)); 2, lazy (let n1, n2 = split n in MyIfFlat (random n1, random n2)) ]) (* ------------------------------------------------------------------------- *) (* Building documents for a particular engine. *) module Build (E : ENGINE) = struct open E let rec build (doc : mydoc) : document = match doc with | MyEmpty -> empty | MyChar c -> char c | MyString s -> string s | MySubString (s, ofs, len) -> substring s ofs len | MyUtf8String s -> utf8string s | MyHardLine -> hardline | MyBlank b -> blank b | MyBreak b -> break b | MyCat (doc1, doc2) -> build doc1 ^^ build doc2 | MyNest (i, doc) -> nest i (build doc) | MyGroup doc -> group (build doc) | MyIfFlat (doc1, doc2) -> ifflat (build doc1) (build doc2) end (* ------------------------------------------------------------------------- *) (* The rendering parameters. *) let rfrac = 0.8 let width = 80 (* ------------------------------------------------------------------------- *) (* Testing an engine, alone. *) module Test1 (E : ENGINE) = struct open E (* The size of the randomly generated documents. *) let n = 1000 (* The number of runs. *) let runs = 10000 let () = let module B = Build(E) in let s = ref 0 in for _r = 1 to runs do let document = B.build (random n) in s := !s + measure document; let buffer = Buffer.create 32768 in ToBuffer.pretty rfrac width buffer document; let buffer = Buffer.create 32768 in ToBuffer.compact buffer document done; Printf.printf "Test 1: success.\n%!"; let average = float_of_int !s /. float_of_int runs in Printf.printf "Average document size: %d bytes.\n%!" (truncate average) end (* ------------------------------------------------------------------------- *) Testing two engines and comparing their output . module Test2 (E1 : ENGINE) (E2 : ENGINE) = struct (* The size of the randomly generated documents. *) let n = 1000 (* The number of runs. *) let runs = 10000 let () = let module B1 = Build(E1) in let module B2 = Build(E2) in for _r = 1 to runs do let document = random n in let document1 = B1.build document in let document2 = B2.build document in let buffer1 = Buffer.create 32768 in E1.ToBuffer.pretty rfrac width buffer1 document1; let buffer2 = Buffer.create 32768 in E2.ToBuffer.pretty rfrac width buffer2 document2; assert (Buffer.contents buffer1 = Buffer.contents buffer2) done; Printf.printf "Test 2: success.\n%!" end (* ------------------------------------------------------------------------- *) (* Timing an engine, alone. *) module Time1 (E : ENGINE) (D : sig val n: int val runs: int val docs : mydoc array end) = struct open E open D let gc = false let time f x = if gc then Gc.major(); let start = Unix.gettimeofday() in let y = f x in let finish = Unix.gettimeofday() in y, finish -. start let () = let module B = Build(E) in Printf.printf "Time: building documents...\n%!"; let docs, duration = time (fun () -> Array.map B.build docs) () in Printf.printf "Time: built %d documents of size %d in %.2f seconds.\n%!" runs n duration; let size = Array.fold_left (fun accu doc -> accu + measure doc) 0 docs in let average = float_of_int size /. float_of_int runs in Printf.printf "Average document size: %d bytes.\n%!" (truncate average); let buffer = Buffer.create 32768 in Printf.printf "Time: rendering documents...\n%!"; let (), duration = time (fun () -> Array.iter (fun document -> ToBuffer.pretty rfrac width buffer document; Buffer.clear buffer ) docs ) () in Printf.printf "Time: rendered %d documents of size %d in %.2f seconds.\n%!" runs n duration end (* ------------------------------------------------------------------------- *) (* Main. *) let test1 () = (* Testing both engines on the same set of documents. *) Printf.printf "Testing old engine...\n"; let state = Random.get_state() in let module T = Test1(OldPPrintEngine) in Random.set_state state; Printf.printf "Testing new engine...\n"; let module T = Test1(PPrintEngine) in () let test2 () = Comparing the two engines . Printf.printf "Comparing old and new engines...\n"; let module T = Test2(OldPPrintEngine)(PPrintEngine) in () type engine = Old | New let test3 engine = The timing test . Best to run it separately on each engine ( in two different processes ) , as there are otherwise GC effects . If a major GC is triggered , the timing test is severely affected . (in two different processes), as there are otherwise GC effects. If a major GC is triggered, the timing test is severely affected. *) let module D = struct (* The size of the randomly generated documents. *) let n = 10000 (* The number of runs. *) let runs = 1000 let () = Printf.printf "Generating %d documents of size %d...\n%!" runs n let docs = Array.init runs (fun _ -> random n) end in match engine with | Old -> Printf.printf "Timing old engine...\n"; let module T = Time1(OldPPrintEngine)(D) in () | New -> Printf.printf "Timing new engine...\n"; let module T = Time1(PPrintEngine)(D) in () let () = (* The comparison between the old and new engines is now disabled, because the new engine removes trailing blank characters on every line, whereas the old engine does not. *) if false then test2(); test3 New
null
https://raw.githubusercontent.com/fpottier/pprint/fbf9ed81e6de690f60f20c8c16a1374753722868/bench/PPrintBench.ml
ocaml
**************************************************************************** License, with an exception, as described in the file LICENSE. **************************************************************************** ------------------------------------------------------------------------- The following signature is common to the old and new engines. ------------------------------------------------------------------------- ------------------------------------------------------------------------- [measure v] measures the size of an OCaml value [v] in bytes. String.length (Marshal.to_string v []) ------------------------------------------------------------------------- [choose xs] randomly and uniformly chooses between the elements of the array [xs]. [pick] is analogous, but each element comes with a relative integer weight. Compute the total weight. Pick a random integer between 0 and the total weight. Find the corresponding element. ------------------------------------------------------------------------- A random document generator. If the budget is 0, produce an empty document. Otherwise, decrement the budget, and produce a node of nonzero arity, spending the rest of the budget on the children. ------------------------------------------------------------------------- Building documents for a particular engine. ------------------------------------------------------------------------- The rendering parameters. ------------------------------------------------------------------------- Testing an engine, alone. The size of the randomly generated documents. The number of runs. ------------------------------------------------------------------------- The size of the randomly generated documents. The number of runs. ------------------------------------------------------------------------- Timing an engine, alone. ------------------------------------------------------------------------- Main. Testing both engines on the same set of documents. The size of the randomly generated documents. The number of runs. The comparison between the old and new engines is now disabled, because the new engine removes trailing blank characters on every line, whereas the old engine does not.
PPrint , Copyright 2007 - 2022 Inria . All rights reserved . This file is distributed under the terms of the GNU Library General Public module type ENGINE = sig type document val empty: document val char: char -> document val string: string -> document val substring: string -> int -> int -> document val fancystring: string -> int -> document val fancysubstring : string -> int -> int -> int -> document val utf8string: string -> document val hardline: document val blank: int -> document val break: int -> document val (^^): document -> document -> document val nest: int -> document -> document val group: document -> document val ifflat: document -> document -> document module ToBuffer : PPrint.RENDERER with type channel = Buffer.t and type document = document end We use our own abstract syntax of documents . We produce random documents in this syntax first , then ( as part of the timed test ) translate them to the engine 's syntax . This allows timing the engine 's document construction code too . in this syntax first, then (as part of the timed test) translate them to the engine's syntax. This allows timing the engine's document construction code too. *) type mydoc = | MyEmpty | MyChar of char | MyString of string | MySubString of string * int * int | MyUtf8String of string | MyHardLine | MyBlank of int | MyBreak of int | MyCat of mydoc * mydoc | MyNest of int * mydoc | MyGroup of mydoc | MyIfFlat of mydoc * mydoc let measure v = Size.size_b v [ split n ] produces two numbers [ n1 ] and [ n2 ] comprised between [ 0 ] and [ n ] ( inclusive ) whose sum is [ n ] . (inclusive) whose sum is [n]. *) let split n = let n1 = Random.int (n + 1) in let n2 = n - n1 in n1, n2 let choose xs = Array.unsafe_get xs (Random.int (Array.length xs)) let pick wxs = let weight = List.fold_left (fun weight (w, _) -> weight + w) 0 wxs in assert (weight > 0); let i = Random.int weight in let rec loop i wxs = match wxs with | [] -> assert false | (w, x) :: wxs -> if i < w then x else loop (i - w) wxs in loop i wxs let leaf = [| MyChar 'c'; MyString "hello"; MySubString ("the cat", 4, 3); MyUtf8String "étoile"; MyHardLine; MyBlank 2; MyBreak 2 |] let rec random (n : int) : mydoc = if n = 0 then MyEmpty If the budget is 1 , produce a leaf . else if n = 1 then choose leaf else let n = n - 1 in Lazy.force (pick [ 10, lazy (let n1, n2 = split n in MyCat (random n1, random n2)); 2, lazy (MyNest (2, random n)); 10, lazy (MyGroup (random n)); 2, lazy (let n1, n2 = split n in MyIfFlat (random n1, random n2)) ]) module Build (E : ENGINE) = struct open E let rec build (doc : mydoc) : document = match doc with | MyEmpty -> empty | MyChar c -> char c | MyString s -> string s | MySubString (s, ofs, len) -> substring s ofs len | MyUtf8String s -> utf8string s | MyHardLine -> hardline | MyBlank b -> blank b | MyBreak b -> break b | MyCat (doc1, doc2) -> build doc1 ^^ build doc2 | MyNest (i, doc) -> nest i (build doc) | MyGroup doc -> group (build doc) | MyIfFlat (doc1, doc2) -> ifflat (build doc1) (build doc2) end let rfrac = 0.8 let width = 80 module Test1 (E : ENGINE) = struct open E let n = 1000 let runs = 10000 let () = let module B = Build(E) in let s = ref 0 in for _r = 1 to runs do let document = B.build (random n) in s := !s + measure document; let buffer = Buffer.create 32768 in ToBuffer.pretty rfrac width buffer document; let buffer = Buffer.create 32768 in ToBuffer.compact buffer document done; Printf.printf "Test 1: success.\n%!"; let average = float_of_int !s /. float_of_int runs in Printf.printf "Average document size: %d bytes.\n%!" (truncate average) end Testing two engines and comparing their output . module Test2 (E1 : ENGINE) (E2 : ENGINE) = struct let n = 1000 let runs = 10000 let () = let module B1 = Build(E1) in let module B2 = Build(E2) in for _r = 1 to runs do let document = random n in let document1 = B1.build document in let document2 = B2.build document in let buffer1 = Buffer.create 32768 in E1.ToBuffer.pretty rfrac width buffer1 document1; let buffer2 = Buffer.create 32768 in E2.ToBuffer.pretty rfrac width buffer2 document2; assert (Buffer.contents buffer1 = Buffer.contents buffer2) done; Printf.printf "Test 2: success.\n%!" end module Time1 (E : ENGINE) (D : sig val n: int val runs: int val docs : mydoc array end) = struct open E open D let gc = false let time f x = if gc then Gc.major(); let start = Unix.gettimeofday() in let y = f x in let finish = Unix.gettimeofday() in y, finish -. start let () = let module B = Build(E) in Printf.printf "Time: building documents...\n%!"; let docs, duration = time (fun () -> Array.map B.build docs) () in Printf.printf "Time: built %d documents of size %d in %.2f seconds.\n%!" runs n duration; let size = Array.fold_left (fun accu doc -> accu + measure doc) 0 docs in let average = float_of_int size /. float_of_int runs in Printf.printf "Average document size: %d bytes.\n%!" (truncate average); let buffer = Buffer.create 32768 in Printf.printf "Time: rendering documents...\n%!"; let (), duration = time (fun () -> Array.iter (fun document -> ToBuffer.pretty rfrac width buffer document; Buffer.clear buffer ) docs ) () in Printf.printf "Time: rendered %d documents of size %d in %.2f seconds.\n%!" runs n duration end let test1 () = Printf.printf "Testing old engine...\n"; let state = Random.get_state() in let module T = Test1(OldPPrintEngine) in Random.set_state state; Printf.printf "Testing new engine...\n"; let module T = Test1(PPrintEngine) in () let test2 () = Comparing the two engines . Printf.printf "Comparing old and new engines...\n"; let module T = Test2(OldPPrintEngine)(PPrintEngine) in () type engine = Old | New let test3 engine = The timing test . Best to run it separately on each engine ( in two different processes ) , as there are otherwise GC effects . If a major GC is triggered , the timing test is severely affected . (in two different processes), as there are otherwise GC effects. If a major GC is triggered, the timing test is severely affected. *) let module D = struct let n = 10000 let runs = 1000 let () = Printf.printf "Generating %d documents of size %d...\n%!" runs n let docs = Array.init runs (fun _ -> random n) end in match engine with | Old -> Printf.printf "Timing old engine...\n"; let module T = Time1(OldPPrintEngine)(D) in () | New -> Printf.printf "Timing new engine...\n"; let module T = Time1(PPrintEngine)(D) in () let () = if false then test2(); test3 New
61df1aa0ce1297a9fb5134fee739a15ac3c34aa5b4919cfa19025a07f7a72049
expipiplus1/orbits
Test.hs
{-# LANGUAGE DataKinds #-} # LANGUAGE FlexibleContexts # # LANGUAGE GeneralizedNewtypeDeriving # {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE TemplateHaskell #-} module Main ( main ) where import Data.CReal ( CReal ) import Data.CReal.QuickCheck ( ) import Data.Coerce ( coerce ) import Data.Constants.Mechanics.Extra import Data.Maybe import Data.Metrology hiding ( (%) ) import Data.Metrology.Extra import Data.Units.SI.Parser import Physics.Orbit import Physics.Orbit.QuickCheck import Test.QuickCheck.Arbitrary ( Arbitrary ) import Test.QuickCheck.Checkers ( inverse , inverseL ) import Test.QuickCheck.Extra ( slowTest , slowTestQCRatio ) import Test.Tasty ( TestTree , defaultIngredients , defaultMainWithIngredients , includingOptions , testGroup ) import Test.Tasty.QuickCheck ( (===) , (==>) , testProperty ) import Test.Tasty.TH ( testGroupGenerator ) import WrappedAngle ( WrappedAngle(..) ) import qualified Test.StateVectors # ANN module ( " HLint : ignore Reduce duplication " : : String ) # -- | The type used for tests which require exact arithmetic. They are compared at a resolution of 2 ^ 32 type Exact = CReal 32 -------------------------------------------------------------------------------- -- The tests -------------------------------------------------------------------------------- test_sanity :: [TestTree] test_sanity = [ testProperty "circular isValid" (\(CircularOrbit o) -> isValid (o :: Orbit Double)) , testProperty "elliptic isValid" (\(EllipticOrbit o) -> isValid (o :: Orbit Double)) , testProperty "parabolic isValid" (\(ParabolicOrbit o) -> isValid (o :: Orbit Double)) , testProperty "hyperbolic isValid" (\(HyperbolicOrbit o) -> isValid (o :: Orbit Double)) ] test_classify :: [TestTree] test_classify = [ testProperty "circular" (\(CircularOrbit o) -> classify (o :: Orbit Double) === Elliptic) , testProperty "elliptic" (\(EllipticOrbit o) -> classify (o :: Orbit Double) === Elliptic) , testProperty "parabolic" (\(ParabolicOrbit o) -> classify (o :: Orbit Double) === Parabolic) , testProperty "hyperbolic" (\(HyperbolicOrbit o) -> classify (o :: Orbit Double) === Hyperbolic) ] test_semiMajorAxis :: [TestTree] test_semiMajorAxis = [ testProperty "circular" (\(CircularOrbit o) -> fromJust (semiMajorAxis (o :: Orbit Double)) === periapsis o) , testProperty "elliptic" (\(EllipticOrbit o) -> fromJust (semiMajorAxis (o :: Orbit Double)) > zero) , testProperty "parabolic" (\(ParabolicOrbit o) -> semiMajorAxis (o :: Orbit Double) === Nothing) , testProperty "hyperbolic" (\(HyperbolicOrbit o) -> fromJust (semiMajorAxis (o :: Orbit Double)) < zero) ] test_semiMinorAxis :: [TestTree] test_semiMinorAxis = [ testGroup "range" [ testProperty "elliptic: b > 0" (\(EllipticOrbit o) -> semiMinorAxis (o :: Orbit Double) > zero) , testProperty "parabolic: b = 0" (\(ParabolicOrbit o) -> semiMinorAxis (o :: Orbit Double) === zero) , testProperty "hyperbolic: b < 0" (\(HyperbolicOrbit o) -> semiMinorAxis (o :: Orbit Double) < zero) ] , testProperty "semiMinorAxis circular = q" (\(CircularOrbit o) -> semiMinorAxis (o :: Orbit Double) === periapsis o) , testGroup "b^2 = a * l" [ testProperty "elliptic" (\(EllipticOrbit o) -> let a = fromJust (semiMajorAxis (o :: Orbit Exact)) b = semiMinorAxis o l = semiLatusRectum o in b |*| b === a |*| l) , testProperty "hyperbolic" (\(HyperbolicOrbit o) -> let a = fromJust (semiMajorAxis (o :: Orbit Exact)) b = semiMinorAxis o l = semiLatusRectum o in b |*| b === qNegate (a |*| l)) ] ] test_apoapsis :: [TestTree] test_apoapsis = [ testProperty "ap > q" (\(EllipticOrbit o) -> eccentricity (o :: Orbit Double) /= 0 ==> fromJust (apoapsis o) > periapsis o) , testProperty "circular: ap = q" (\(CircularOrbit o) -> fromJust (apoapsis (o :: Orbit Double)) === periapsis o) , testProperty "parabolic: no ap" (\(ParabolicOrbit o) -> apoapsis (o :: Orbit Double) === Nothing) , testProperty "hyperbolic: no ap" (\(HyperbolicOrbit o) -> apoapsis (o :: Orbit Double) === Nothing) ] test_meanMotion :: [TestTree] test_meanMotion = [ testProperty "n > 0" (\o -> meanMotion (o :: Orbit Double) > zero) ] test_period :: [TestTree] test_period = [ testProperty "p > 0" (\(EllipticOrbit o) -> fromJust (period (o :: Orbit Double)) > zero) , testProperty "4 π a^3 / p^2 = μ" (\(EllipticOrbit o) -> let Just p = period (o :: Orbit Exact) Just a = semiMajorAxis o μ = primaryGravitationalParameter o in (4 * qSq pi) |*| qCube a |/| qSq p === μ) , testProperty "parabolic: no p" (\(ParabolicOrbit o) -> period (o :: Orbit Double) === Nothing) , testProperty "hyperbolic: no p" (\(HyperbolicOrbit o) -> period (o :: Orbit Double) === Nothing) ] -- TODO: Put converge test here test_hyperbolicAngles :: [TestTree] test_hyperbolicAngles = [ testProperty "parabolic approach" (\(ParabolicOrbit o) -> fromJust (hyperbolicApproachAngle (o :: Orbit Double)) === qNegate halfTurn) , testProperty "parabolic departure" (\(ParabolicOrbit o) -> fromJust (hyperbolicDepartureAngle (o :: Orbit Double)) === halfTurn) , testProperty "hyperbolic symmetry" (\(HyperbolicOrbit o) -> fromJust (hyperbolicDepartureAngle (o :: Orbit Double)) === qNegate (fromJust (hyperbolicApproachAngle o))) , testProperty "elliptic: no approach" (\(EllipticOrbit o) -> hyperbolicApproachAngle (o :: Orbit Double) === Nothing) , testProperty "elliptic: no departure" (\(EllipticOrbit o) -> hyperbolicDepartureAngle (o :: Orbit Double) === Nothing) ] anomalyConversionTests :: (forall a. (RealFloat a, Show a, Arbitrary a, Converge [a]) => Orbit a -> Angle a -> Angle a) -> String -> String -> [TestTree] anomalyConversionTests convertAnomaly fromName toName = [ testProperty (toName ++ " when " ++ fromName ++ " = 0") (\(EllipticOrbit o) -> let to = convertAnomaly (o :: Orbit Double) zero in to === zero) , testProperty (toName ++ " when " ++ fromName ++ " = π") (\(EllipticOrbit o) -> let to = convertAnomaly (o :: Orbit Double) halfTurn in to === halfTurn) , testProperty (toName ++ " when " ++ fromName ++ " = 2π") (\(EllipticOrbit o) -> let to = convertAnomaly (o :: Orbit Double) turn in to === turn) , testProperty "identity on circular orbits" (\(CircularOrbit o) from -> let to = convertAnomaly (o :: Orbit Exact) from in from === to) , testProperty "orbit number preservation" (\(EllipticOrbit o) from -> let to = convertAnomaly (o :: Orbit Double) from in from `div'` turn === (to `div'` turn :: Unitless Integer)) ] timeAnomalyConversionTests :: (forall a. (RealFloat a, Show a, Arbitrary a, Converge [a]) => Orbit a -> Time a -> Angle a) -> String -> [TestTree] timeAnomalyConversionTests timeToAnomaly toName = [ testProperty (toName ++ " when time = 0") (\(EllipticOrbit o) -> let to = timeToAnomaly (o :: Orbit Double) zero in to === zero) , testProperty (toName ++ " when time = p/2") (\(EllipticOrbit o) -> let to = timeToAnomaly (o :: Orbit Exact) (p|/|2) Just p = period o in to === halfTurn) , testProperty (toName ++ " when time = p") (\(EllipticOrbit o) -> let to = timeToAnomaly (o :: Orbit Exact) p Just p = period o in to === turn) , testProperty "identity on the unit orbit (modulo units!)" (\time -> let o = unitOrbit to = timeToAnomaly (o :: Orbit Exact) time in time # [si|s|] === to # [si|rad|]) , testProperty "orbit number preservation" (\(EllipticOrbit o) time -> let to = timeToAnomaly (o :: Orbit Double) time Just p = period o in time `div'` p === (to `div'` turn :: Unitless Integer)) ] anomalyTimeConversionTests :: (forall a. (RealFloat a, Show a, Arbitrary a, Converge [a]) => Orbit a -> Angle a -> Time a) -> String -> [TestTree] anomalyTimeConversionTests anomalyToTime fromName = [ testProperty ("time when " ++ fromName ++ " = 0") (\(EllipticOrbit o) -> let t = anomalyToTime (o :: Orbit Double) zero in t === zero) , testProperty ("time when " ++ fromName ++ " = π") (\(EllipticOrbit o) -> let t = anomalyToTime (o :: Orbit Double) halfTurn Just p = period o in t === p |/| 2) , testProperty ("time when " ++ fromName ++ " = 2π") (\(EllipticOrbit o) -> let t = anomalyToTime (o :: Orbit Double) turn Just p = period o in t === p) , testProperty "identity on the unit orbit (modulo units!)" (\from -> let o = unitOrbit t = anomalyToTime (o :: Orbit Exact) from in from # [si|rad|] === t # [si|s|]) , testProperty "orbit number preservation" (\(EllipticOrbit o) from -> let t = anomalyToTime (o :: Orbit Double) from Just p = period o in from `div'` turn === (t `div'` p :: Unitless Integer)) ] (.:) :: (a -> b) -> (c -> d -> a) -> c -> d -> b f .: g = \x y -> f (g x y) (~>) :: Bool -> Bool -> Bool a ~> b = not a || b test_conversions :: [TestTree] test_conversions = [ conversionToTime , conversionToMeanAnomaly , conversionToEccentricAnomaly , conversionToTrueAnomaly , conversionInverses ] where conversionToTime = testGroup "conversion to time" [ testGroup "from mean anomaly" (anomalyTimeConversionTests timeAtMeanAnomaly "mean anomaly") , testGroup "from eccentric anomaly" (anomalyTimeConversionTests (fromJust .: timeAtEccentricAnomaly) "eccentric anomaly") , testGroup "from true anomaly" (anomalyTimeConversionTests (fromJust .: timeAtTrueAnomaly) "true anomaly") , testProperty "from true anomaly out of bounds parabolic" (\ν (ParabolicOrbitF o) -> validTrueAnomaly o ν ~> isJust (timeAtTrueAnomaly o ν)) ] conversionToMeanAnomaly = let s = "mean anomaly" in testGroup ("conversion to " ++ s) [ testGroup "from time" (timeAnomalyConversionTests meanAnomalyAtTime s) , testGroup "from eccentric anomaly" (anomalyConversionTests (fromJust .: meanAnomalyAtEccentricAnomaly) "eccentric anomaly" s) , testGroup "from true anomaly" (anomalyConversionTests (fromJust .: meanAnomalyAtTrueAnomaly) "true anomaly" s) ] conversionToEccentricAnomaly = let s = "eccentric anomaly" in testGroup ("conversion to " ++ s) [ testGroup "from time" (timeAnomalyConversionTests (fromJust .: eccentricAnomalyAtTime) s) , testGroup "from mean anomaly" (anomalyConversionTests (fromJust .: eccentricAnomalyAtMeanAnomaly) "mean anomaly" s) , testGroup "from true anomaly" (anomalyConversionTests (fromJust .: eccentricAnomalyAtTrueAnomaly) "true anomaly" s) ] conversionToTrueAnomaly = let s = "true anomaly" in testGroup ("conversion to " ++ s) [ testGroup "from time" (timeAnomalyConversionTests trueAnomalyAtTime s) , testGroup "from mean anomaly" (anomalyConversionTests trueAnomalyAtMeanAnomaly "mean anomaly" s) , testGroup "from eccentric anomaly" (anomalyConversionTests (fromJust .: trueAnomalyAtEccentricAnomaly) "eccentric anomaly" s) ] conversionInverses = testGroup "conversionInverses" [ testProperty "mean time inverse" (\o -> inverse (meanAnomalyAtTime (o :: Orbit Exact)) (timeAtMeanAnomaly o)) , slowTest $ testProperty "mean eccentric inverse" (\(EllipticOrbit o) -> inverse (coerce (fromJust . meanAnomalyAtEccentricAnomaly (o :: Orbit Exact)) :: WrappedAngle Exact -> WrappedAngle Exact) (coerce (fromJust . eccentricAnomalyAtMeanAnomaly o))) , slowTest $ testProperty "mean hyperbolic inverse" (\(HyperbolicOrbit o) -> inverseL (fromJust . meanAnomalyAtHyperbolicAnomaly @Exact o) (fromJust . hyperbolicAnomalyAtMeanAnomaly o)) , slowTest $ testProperty "mean true inverse" (\(EllipticOrbit o) -> inverse (fromJust . meanAnomalyAtTrueAnomaly (o :: Orbit Exact)) (trueAnomalyAtMeanAnomaly o)) , slowTest $ testProperty "time true inverse elliptic" (\(EllipticOrbit o) -> inverse (fromJust . timeAtTrueAnomaly (o :: Orbit Exact)) (trueAnomalyAtTime o)) , slowTest $ testProperty "true time inverse parabolic" (\(ParabolicOrbit o) -> -- Use inverseL because there doesn't exist a time for every true -- anomaly inverseL (fromJust . timeAtTrueAnomaly (o :: Orbit Exact)) (trueAnomalyAtTime o) ) , testProperty "time eccentric inverse" (\(EllipticOrbit o) -> inverse (fromJust . timeAtEccentricAnomaly (o :: Orbit Exact)) (fromJust . eccentricAnomalyAtTime o)) -- , slowTest $ testProperty "time hyperbolic inverse" -- (\(HyperbolicOrbit o) -> inverseL ( fromJust . timeAtHyperbolicAnomaly @Exact o ) -- (fromJust . hyperbolicAnomalyAtTime o)) , testProperty "eccentric true inverse" (\(EllipticOrbit o) -> inverse (coerce (fromJust . eccentricAnomalyAtTrueAnomaly (o:: Orbit Exact)) :: WrappedAngle Exact -> WrappedAngle Exact) (fromJust . coerce (trueAnomalyAtEccentricAnomaly o))) , testProperty "hyperbolic true inverse" (\(HyperbolicOrbit o) -> inverseL (fromJust . hyperbolicAnomalyAtTrueAnomaly o) (fromJust . trueAnomalyAtHyperbolicAnomaly @Exact o)) ] test_anomalies :: [TestTree] test_anomalies = [ slowTest $ testProperty "hyperbolic true" (\(HyperbolicOrbit o) _M -> let Just _H = hyperbolicAnomalyAtMeanAnomaly @Exact o _M ν = trueAnomalyAtMeanAnomaly o _M e = eccentricity o in qCosh _H === (qCos ν + e) / (1 + e * qCos ν) ) ] -- TODO: Put parabolic and hyperbolic tests here test_areal :: [TestTree] test_areal = [ testProperty "elliptic areal area" (\(EllipticOrbit o) -> let Just a = semiMajorAxis (o :: Orbit Exact) b = semiMinorAxis o area = pi |*| a |*| b Just p = period o in area === p |*| arealVelocity o) ] test_orbitalEnergy :: [TestTree] test_orbitalEnergy = [ testProperty "negative elliptical energy" (\(EllipticOrbitF o) -> specificOrbitalEnergy o < zero) , testProperty "zero parabolic energy" (\(ParabolicOrbitF o) -> specificOrbitalEnergy o === zero) , testProperty "positive hyperbolic energy" (\(HyperbolicOrbitF o) -> specificOrbitalEnergy o > zero) , testGroup "potential + kinetic" (overAllClasses (\o ν -> specificOrbitalEnergy @Exact o === specificPotentialEnergyAtTrueAnomaly o ν |+| specificKineticEnergyAtTrueAnomaly o ν ) ) ] test_radius :: [TestTree] test_radius = [ testGroup "periapsis when ν = 0" (overAllClasses (\o -> radiusAtTrueAnomaly @Exact o zero === periapsis o)) , testProperty "constant on circular" (\(CircularOrbitF o) ν -> radiusAtTrueAnomaly o ν === periapsis o) , testProperty "apoapsis when ν == π for elliptic" (\(EllipticOrbit o) -> radiusAtTrueAnomaly @Exact o halfTurn === fromJust (apoapsis o) ) , testGroup "l when ν == π/2" (overAllClasses (\o -> radiusAtTrueAnomaly @Exact o (halfTurn |*| (-0.5)) === semiLatusRectum o ) ) , testGroup "l when ν == -π/2" (overAllClasses (\o -> radiusAtTrueAnomaly @Exact o (halfTurn |*| (-0.5)) === semiLatusRectum o ) ) , testProperty "from E" (\(EllipticOrbit o) ν -> let Just _E = eccentricAnomalyAtTrueAnomaly @Exact o ν in radiusAtTrueAnomaly o ν === fromJust (semiMajorAxis o) |*| (1 - eccentricity o |*| qCos _E) ) ] test_speed :: [TestTree] test_speed = [ testProperty "constant on circular" (\(CircularOrbitF o) ν ν' -> speedAtTrueAnomaly o ν === speedAtTrueAnomaly o ν' ) , testProperty "zero at apex" (\(ParabolicOrbitF o) -> speedAtTrueAnomaly o halfTurn === zero) , testProperty "below escape velocity for elliptical" (\(EllipticOrbitF o) ν -> speedAtTrueAnomaly o ν < escapeVelocityAtDistance (primaryGravitationalParameter o) (radiusAtTrueAnomaly o ν) ) , testProperty "escape velocity for parabolic" (\(ParabolicOrbitF o) ν -> speedAtTrueAnomaly o ν === escapeVelocityAtDistance (primaryGravitationalParameter o) (radiusAtTrueAnomaly o ν) ) , testProperty "above escape velocity for hyperbolic" (\(HyperbolicOrbitF o) _M -> let ν = trueAnomalyAtMeanAnomaly o _M in speedAtTrueAnomaly o ν > escapeVelocityAtDistance (primaryGravitationalParameter o) (radiusAtTrueAnomaly o ν) ) ] test_angularMomentum :: [TestTree] test_angularMomentum = [ testProperty "negative elliptical energy" (\(EllipticOrbitF o) -> specificOrbitalEnergy o < zero) , testProperty "zero parabolic energy" (\(ParabolicOrbitF o) -> specificOrbitalEnergy o === zero) , testProperty "positive hyperbolic energy" (\(HyperbolicOrbitF o) -> specificOrbitalEnergy o > zero) ] test_stateVectors :: [TestTree] test_stateVectors = [Test.StateVectors.tests] main :: IO () main = do let is = includingOptions [slowTestQCRatio] : defaultIngredients defaultMainWithIngredients is $(testGroupGenerator) ---------------------------------------------------------------- Orbit utils ---------------------------------------------------------------- validTrueAnomaly :: (Floating a, Ord a) => Orbit a -> Angle a -> Bool validTrueAnomaly o ν = case hyperbolicDepartureAngle o of Nothing -> True Just d -> qAbs ν < d
null
https://raw.githubusercontent.com/expipiplus1/orbits/6cc5c43f50aadc7517a6677ca04363b492474a6a/test/Test.hs
haskell
# LANGUAGE DataKinds # # LANGUAGE QuasiQuotes # # LANGUAGE RankNTypes # # LANGUAGE TemplateHaskell # | The type used for tests which require exact arithmetic. They are compared ------------------------------------------------------------------------------ The tests ------------------------------------------------------------------------------ TODO: Put converge test here Use inverseL because there doesn't exist a time for every true anomaly , slowTest $ testProperty "time hyperbolic inverse" (\(HyperbolicOrbit o) -> (fromJust . hyperbolicAnomalyAtTime o)) TODO: Put parabolic and hyperbolic tests here -------------------------------------------------------------- --------------------------------------------------------------
# LANGUAGE FlexibleContexts # # LANGUAGE GeneralizedNewtypeDeriving # module Main ( main ) where import Data.CReal ( CReal ) import Data.CReal.QuickCheck ( ) import Data.Coerce ( coerce ) import Data.Constants.Mechanics.Extra import Data.Maybe import Data.Metrology hiding ( (%) ) import Data.Metrology.Extra import Data.Units.SI.Parser import Physics.Orbit import Physics.Orbit.QuickCheck import Test.QuickCheck.Arbitrary ( Arbitrary ) import Test.QuickCheck.Checkers ( inverse , inverseL ) import Test.QuickCheck.Extra ( slowTest , slowTestQCRatio ) import Test.Tasty ( TestTree , defaultIngredients , defaultMainWithIngredients , includingOptions , testGroup ) import Test.Tasty.QuickCheck ( (===) , (==>) , testProperty ) import Test.Tasty.TH ( testGroupGenerator ) import WrappedAngle ( WrappedAngle(..) ) import qualified Test.StateVectors # ANN module ( " HLint : ignore Reduce duplication " : : String ) # at a resolution of 2 ^ 32 type Exact = CReal 32 test_sanity :: [TestTree] test_sanity = [ testProperty "circular isValid" (\(CircularOrbit o) -> isValid (o :: Orbit Double)) , testProperty "elliptic isValid" (\(EllipticOrbit o) -> isValid (o :: Orbit Double)) , testProperty "parabolic isValid" (\(ParabolicOrbit o) -> isValid (o :: Orbit Double)) , testProperty "hyperbolic isValid" (\(HyperbolicOrbit o) -> isValid (o :: Orbit Double)) ] test_classify :: [TestTree] test_classify = [ testProperty "circular" (\(CircularOrbit o) -> classify (o :: Orbit Double) === Elliptic) , testProperty "elliptic" (\(EllipticOrbit o) -> classify (o :: Orbit Double) === Elliptic) , testProperty "parabolic" (\(ParabolicOrbit o) -> classify (o :: Orbit Double) === Parabolic) , testProperty "hyperbolic" (\(HyperbolicOrbit o) -> classify (o :: Orbit Double) === Hyperbolic) ] test_semiMajorAxis :: [TestTree] test_semiMajorAxis = [ testProperty "circular" (\(CircularOrbit o) -> fromJust (semiMajorAxis (o :: Orbit Double)) === periapsis o) , testProperty "elliptic" (\(EllipticOrbit o) -> fromJust (semiMajorAxis (o :: Orbit Double)) > zero) , testProperty "parabolic" (\(ParabolicOrbit o) -> semiMajorAxis (o :: Orbit Double) === Nothing) , testProperty "hyperbolic" (\(HyperbolicOrbit o) -> fromJust (semiMajorAxis (o :: Orbit Double)) < zero) ] test_semiMinorAxis :: [TestTree] test_semiMinorAxis = [ testGroup "range" [ testProperty "elliptic: b > 0" (\(EllipticOrbit o) -> semiMinorAxis (o :: Orbit Double) > zero) , testProperty "parabolic: b = 0" (\(ParabolicOrbit o) -> semiMinorAxis (o :: Orbit Double) === zero) , testProperty "hyperbolic: b < 0" (\(HyperbolicOrbit o) -> semiMinorAxis (o :: Orbit Double) < zero) ] , testProperty "semiMinorAxis circular = q" (\(CircularOrbit o) -> semiMinorAxis (o :: Orbit Double) === periapsis o) , testGroup "b^2 = a * l" [ testProperty "elliptic" (\(EllipticOrbit o) -> let a = fromJust (semiMajorAxis (o :: Orbit Exact)) b = semiMinorAxis o l = semiLatusRectum o in b |*| b === a |*| l) , testProperty "hyperbolic" (\(HyperbolicOrbit o) -> let a = fromJust (semiMajorAxis (o :: Orbit Exact)) b = semiMinorAxis o l = semiLatusRectum o in b |*| b === qNegate (a |*| l)) ] ] test_apoapsis :: [TestTree] test_apoapsis = [ testProperty "ap > q" (\(EllipticOrbit o) -> eccentricity (o :: Orbit Double) /= 0 ==> fromJust (apoapsis o) > periapsis o) , testProperty "circular: ap = q" (\(CircularOrbit o) -> fromJust (apoapsis (o :: Orbit Double)) === periapsis o) , testProperty "parabolic: no ap" (\(ParabolicOrbit o) -> apoapsis (o :: Orbit Double) === Nothing) , testProperty "hyperbolic: no ap" (\(HyperbolicOrbit o) -> apoapsis (o :: Orbit Double) === Nothing) ] test_meanMotion :: [TestTree] test_meanMotion = [ testProperty "n > 0" (\o -> meanMotion (o :: Orbit Double) > zero) ] test_period :: [TestTree] test_period = [ testProperty "p > 0" (\(EllipticOrbit o) -> fromJust (period (o :: Orbit Double)) > zero) , testProperty "4 π a^3 / p^2 = μ" (\(EllipticOrbit o) -> let Just p = period (o :: Orbit Exact) Just a = semiMajorAxis o μ = primaryGravitationalParameter o in (4 * qSq pi) |*| qCube a |/| qSq p === μ) , testProperty "parabolic: no p" (\(ParabolicOrbit o) -> period (o :: Orbit Double) === Nothing) , testProperty "hyperbolic: no p" (\(HyperbolicOrbit o) -> period (o :: Orbit Double) === Nothing) ] test_hyperbolicAngles :: [TestTree] test_hyperbolicAngles = [ testProperty "parabolic approach" (\(ParabolicOrbit o) -> fromJust (hyperbolicApproachAngle (o :: Orbit Double)) === qNegate halfTurn) , testProperty "parabolic departure" (\(ParabolicOrbit o) -> fromJust (hyperbolicDepartureAngle (o :: Orbit Double)) === halfTurn) , testProperty "hyperbolic symmetry" (\(HyperbolicOrbit o) -> fromJust (hyperbolicDepartureAngle (o :: Orbit Double)) === qNegate (fromJust (hyperbolicApproachAngle o))) , testProperty "elliptic: no approach" (\(EllipticOrbit o) -> hyperbolicApproachAngle (o :: Orbit Double) === Nothing) , testProperty "elliptic: no departure" (\(EllipticOrbit o) -> hyperbolicDepartureAngle (o :: Orbit Double) === Nothing) ] anomalyConversionTests :: (forall a. (RealFloat a, Show a, Arbitrary a, Converge [a]) => Orbit a -> Angle a -> Angle a) -> String -> String -> [TestTree] anomalyConversionTests convertAnomaly fromName toName = [ testProperty (toName ++ " when " ++ fromName ++ " = 0") (\(EllipticOrbit o) -> let to = convertAnomaly (o :: Orbit Double) zero in to === zero) , testProperty (toName ++ " when " ++ fromName ++ " = π") (\(EllipticOrbit o) -> let to = convertAnomaly (o :: Orbit Double) halfTurn in to === halfTurn) , testProperty (toName ++ " when " ++ fromName ++ " = 2π") (\(EllipticOrbit o) -> let to = convertAnomaly (o :: Orbit Double) turn in to === turn) , testProperty "identity on circular orbits" (\(CircularOrbit o) from -> let to = convertAnomaly (o :: Orbit Exact) from in from === to) , testProperty "orbit number preservation" (\(EllipticOrbit o) from -> let to = convertAnomaly (o :: Orbit Double) from in from `div'` turn === (to `div'` turn :: Unitless Integer)) ] timeAnomalyConversionTests :: (forall a. (RealFloat a, Show a, Arbitrary a, Converge [a]) => Orbit a -> Time a -> Angle a) -> String -> [TestTree] timeAnomalyConversionTests timeToAnomaly toName = [ testProperty (toName ++ " when time = 0") (\(EllipticOrbit o) -> let to = timeToAnomaly (o :: Orbit Double) zero in to === zero) , testProperty (toName ++ " when time = p/2") (\(EllipticOrbit o) -> let to = timeToAnomaly (o :: Orbit Exact) (p|/|2) Just p = period o in to === halfTurn) , testProperty (toName ++ " when time = p") (\(EllipticOrbit o) -> let to = timeToAnomaly (o :: Orbit Exact) p Just p = period o in to === turn) , testProperty "identity on the unit orbit (modulo units!)" (\time -> let o = unitOrbit to = timeToAnomaly (o :: Orbit Exact) time in time # [si|s|] === to # [si|rad|]) , testProperty "orbit number preservation" (\(EllipticOrbit o) time -> let to = timeToAnomaly (o :: Orbit Double) time Just p = period o in time `div'` p === (to `div'` turn :: Unitless Integer)) ] anomalyTimeConversionTests :: (forall a. (RealFloat a, Show a, Arbitrary a, Converge [a]) => Orbit a -> Angle a -> Time a) -> String -> [TestTree] anomalyTimeConversionTests anomalyToTime fromName = [ testProperty ("time when " ++ fromName ++ " = 0") (\(EllipticOrbit o) -> let t = anomalyToTime (o :: Orbit Double) zero in t === zero) , testProperty ("time when " ++ fromName ++ " = π") (\(EllipticOrbit o) -> let t = anomalyToTime (o :: Orbit Double) halfTurn Just p = period o in t === p |/| 2) , testProperty ("time when " ++ fromName ++ " = 2π") (\(EllipticOrbit o) -> let t = anomalyToTime (o :: Orbit Double) turn Just p = period o in t === p) , testProperty "identity on the unit orbit (modulo units!)" (\from -> let o = unitOrbit t = anomalyToTime (o :: Orbit Exact) from in from # [si|rad|] === t # [si|s|]) , testProperty "orbit number preservation" (\(EllipticOrbit o) from -> let t = anomalyToTime (o :: Orbit Double) from Just p = period o in from `div'` turn === (t `div'` p :: Unitless Integer)) ] (.:) :: (a -> b) -> (c -> d -> a) -> c -> d -> b f .: g = \x y -> f (g x y) (~>) :: Bool -> Bool -> Bool a ~> b = not a || b test_conversions :: [TestTree] test_conversions = [ conversionToTime , conversionToMeanAnomaly , conversionToEccentricAnomaly , conversionToTrueAnomaly , conversionInverses ] where conversionToTime = testGroup "conversion to time" [ testGroup "from mean anomaly" (anomalyTimeConversionTests timeAtMeanAnomaly "mean anomaly") , testGroup "from eccentric anomaly" (anomalyTimeConversionTests (fromJust .: timeAtEccentricAnomaly) "eccentric anomaly") , testGroup "from true anomaly" (anomalyTimeConversionTests (fromJust .: timeAtTrueAnomaly) "true anomaly") , testProperty "from true anomaly out of bounds parabolic" (\ν (ParabolicOrbitF o) -> validTrueAnomaly o ν ~> isJust (timeAtTrueAnomaly o ν)) ] conversionToMeanAnomaly = let s = "mean anomaly" in testGroup ("conversion to " ++ s) [ testGroup "from time" (timeAnomalyConversionTests meanAnomalyAtTime s) , testGroup "from eccentric anomaly" (anomalyConversionTests (fromJust .: meanAnomalyAtEccentricAnomaly) "eccentric anomaly" s) , testGroup "from true anomaly" (anomalyConversionTests (fromJust .: meanAnomalyAtTrueAnomaly) "true anomaly" s) ] conversionToEccentricAnomaly = let s = "eccentric anomaly" in testGroup ("conversion to " ++ s) [ testGroup "from time" (timeAnomalyConversionTests (fromJust .: eccentricAnomalyAtTime) s) , testGroup "from mean anomaly" (anomalyConversionTests (fromJust .: eccentricAnomalyAtMeanAnomaly) "mean anomaly" s) , testGroup "from true anomaly" (anomalyConversionTests (fromJust .: eccentricAnomalyAtTrueAnomaly) "true anomaly" s) ] conversionToTrueAnomaly = let s = "true anomaly" in testGroup ("conversion to " ++ s) [ testGroup "from time" (timeAnomalyConversionTests trueAnomalyAtTime s) , testGroup "from mean anomaly" (anomalyConversionTests trueAnomalyAtMeanAnomaly "mean anomaly" s) , testGroup "from eccentric anomaly" (anomalyConversionTests (fromJust .: trueAnomalyAtEccentricAnomaly) "eccentric anomaly" s) ] conversionInverses = testGroup "conversionInverses" [ testProperty "mean time inverse" (\o -> inverse (meanAnomalyAtTime (o :: Orbit Exact)) (timeAtMeanAnomaly o)) , slowTest $ testProperty "mean eccentric inverse" (\(EllipticOrbit o) -> inverse (coerce (fromJust . meanAnomalyAtEccentricAnomaly (o :: Orbit Exact)) :: WrappedAngle Exact -> WrappedAngle Exact) (coerce (fromJust . eccentricAnomalyAtMeanAnomaly o))) , slowTest $ testProperty "mean hyperbolic inverse" (\(HyperbolicOrbit o) -> inverseL (fromJust . meanAnomalyAtHyperbolicAnomaly @Exact o) (fromJust . hyperbolicAnomalyAtMeanAnomaly o)) , slowTest $ testProperty "mean true inverse" (\(EllipticOrbit o) -> inverse (fromJust . meanAnomalyAtTrueAnomaly (o :: Orbit Exact)) (trueAnomalyAtMeanAnomaly o)) , slowTest $ testProperty "time true inverse elliptic" (\(EllipticOrbit o) -> inverse (fromJust . timeAtTrueAnomaly (o :: Orbit Exact)) (trueAnomalyAtTime o)) , slowTest $ testProperty "true time inverse parabolic" (\(ParabolicOrbit o) -> inverseL (fromJust . timeAtTrueAnomaly (o :: Orbit Exact)) (trueAnomalyAtTime o) ) , testProperty "time eccentric inverse" (\(EllipticOrbit o) -> inverse (fromJust . timeAtEccentricAnomaly (o :: Orbit Exact)) (fromJust . eccentricAnomalyAtTime o)) inverseL ( fromJust . timeAtHyperbolicAnomaly @Exact o ) , testProperty "eccentric true inverse" (\(EllipticOrbit o) -> inverse (coerce (fromJust . eccentricAnomalyAtTrueAnomaly (o:: Orbit Exact)) :: WrappedAngle Exact -> WrappedAngle Exact) (fromJust . coerce (trueAnomalyAtEccentricAnomaly o))) , testProperty "hyperbolic true inverse" (\(HyperbolicOrbit o) -> inverseL (fromJust . hyperbolicAnomalyAtTrueAnomaly o) (fromJust . trueAnomalyAtHyperbolicAnomaly @Exact o)) ] test_anomalies :: [TestTree] test_anomalies = [ slowTest $ testProperty "hyperbolic true" (\(HyperbolicOrbit o) _M -> let Just _H = hyperbolicAnomalyAtMeanAnomaly @Exact o _M ν = trueAnomalyAtMeanAnomaly o _M e = eccentricity o in qCosh _H === (qCos ν + e) / (1 + e * qCos ν) ) ] test_areal :: [TestTree] test_areal = [ testProperty "elliptic areal area" (\(EllipticOrbit o) -> let Just a = semiMajorAxis (o :: Orbit Exact) b = semiMinorAxis o area = pi |*| a |*| b Just p = period o in area === p |*| arealVelocity o) ] test_orbitalEnergy :: [TestTree] test_orbitalEnergy = [ testProperty "negative elliptical energy" (\(EllipticOrbitF o) -> specificOrbitalEnergy o < zero) , testProperty "zero parabolic energy" (\(ParabolicOrbitF o) -> specificOrbitalEnergy o === zero) , testProperty "positive hyperbolic energy" (\(HyperbolicOrbitF o) -> specificOrbitalEnergy o > zero) , testGroup "potential + kinetic" (overAllClasses (\o ν -> specificOrbitalEnergy @Exact o === specificPotentialEnergyAtTrueAnomaly o ν |+| specificKineticEnergyAtTrueAnomaly o ν ) ) ] test_radius :: [TestTree] test_radius = [ testGroup "periapsis when ν = 0" (overAllClasses (\o -> radiusAtTrueAnomaly @Exact o zero === periapsis o)) , testProperty "constant on circular" (\(CircularOrbitF o) ν -> radiusAtTrueAnomaly o ν === periapsis o) , testProperty "apoapsis when ν == π for elliptic" (\(EllipticOrbit o) -> radiusAtTrueAnomaly @Exact o halfTurn === fromJust (apoapsis o) ) , testGroup "l when ν == π/2" (overAllClasses (\o -> radiusAtTrueAnomaly @Exact o (halfTurn |*| (-0.5)) === semiLatusRectum o ) ) , testGroup "l when ν == -π/2" (overAllClasses (\o -> radiusAtTrueAnomaly @Exact o (halfTurn |*| (-0.5)) === semiLatusRectum o ) ) , testProperty "from E" (\(EllipticOrbit o) ν -> let Just _E = eccentricAnomalyAtTrueAnomaly @Exact o ν in radiusAtTrueAnomaly o ν === fromJust (semiMajorAxis o) |*| (1 - eccentricity o |*| qCos _E) ) ] test_speed :: [TestTree] test_speed = [ testProperty "constant on circular" (\(CircularOrbitF o) ν ν' -> speedAtTrueAnomaly o ν === speedAtTrueAnomaly o ν' ) , testProperty "zero at apex" (\(ParabolicOrbitF o) -> speedAtTrueAnomaly o halfTurn === zero) , testProperty "below escape velocity for elliptical" (\(EllipticOrbitF o) ν -> speedAtTrueAnomaly o ν < escapeVelocityAtDistance (primaryGravitationalParameter o) (radiusAtTrueAnomaly o ν) ) , testProperty "escape velocity for parabolic" (\(ParabolicOrbitF o) ν -> speedAtTrueAnomaly o ν === escapeVelocityAtDistance (primaryGravitationalParameter o) (radiusAtTrueAnomaly o ν) ) , testProperty "above escape velocity for hyperbolic" (\(HyperbolicOrbitF o) _M -> let ν = trueAnomalyAtMeanAnomaly o _M in speedAtTrueAnomaly o ν > escapeVelocityAtDistance (primaryGravitationalParameter o) (radiusAtTrueAnomaly o ν) ) ] test_angularMomentum :: [TestTree] test_angularMomentum = [ testProperty "negative elliptical energy" (\(EllipticOrbitF o) -> specificOrbitalEnergy o < zero) , testProperty "zero parabolic energy" (\(ParabolicOrbitF o) -> specificOrbitalEnergy o === zero) , testProperty "positive hyperbolic energy" (\(HyperbolicOrbitF o) -> specificOrbitalEnergy o > zero) ] test_stateVectors :: [TestTree] test_stateVectors = [Test.StateVectors.tests] main :: IO () main = do let is = includingOptions [slowTestQCRatio] : defaultIngredients defaultMainWithIngredients is $(testGroupGenerator) Orbit utils validTrueAnomaly :: (Floating a, Ord a) => Orbit a -> Angle a -> Bool validTrueAnomaly o ν = case hyperbolicDepartureAngle o of Nothing -> True Just d -> qAbs ν < d
4eefcc3f957ccfdb5026995418337525b4da8afa37ec4dffc67d09c421c5c228
eslick/cl-registry
bundle-widgets.lisp
(in-package :registry) ;; ;; View elements for bundles ;; ;; We can get away with less widgets ;; ;; Creation protocol: 1 ) When a page bundle is to be rendered , we first walk it ;; and generate a set of widgets to render the content. 2 ) Bundles do not necessarily translation 1:1 to widgets 3 ) Configuration of widgets is set based on current rendering context ;; e.g. the depth of the widget in a possible layout 4 ) Presentation types are created for embedded questions 5 ) Any layout / presentation constraints ( i.e. CSS ) are applied to ;; set the configuration of the widgets and questions ;; Rendering protocol: ;; ;; The rendering protocol is now fairly simple, we simply accommodate the ;; parameterized widget's render-body method to render its' context and ;; then render the children. ;; ;; SIMPLE CONTEXT ;; ;; Track my path in the tree ;; Provides my parent, and my current depth... ;; (defclass rendering-context () ((path :accessor context-path :initarg :path :initform nil))) (defun make-rendering-context (&rest args) (apply #'make-instance 'rendering-context args)) (defmethod update-context (widget context system) (make-rendering-context :path (append widget (context-path context)))) (defun context-depth (context) (length (context-path context))) (defun context-parent (context) (first (context-path context))) ;; ;; BASE WIDGET CLASS ;; ;; We should not need to touch the DB to render a page, everything should ;; be cached. ;; Children are automatically rendered so default render method is nil. (defwidget bundle-widget () ((bundle :accessor bundle :initarg :bundle :initform nil :documentation "A reference to the defining bundle for debugging") (layout-info :accessor layout-info :initarg :layout-info :initform nil :documentation "A reference to any CSS-style constraints") (value-cache :accessor value-cache :initarg :value-cache :initform nil :documentation "To avoid accesses to the DB, an alist of text to render based on current translation")) (:documentation "Abstract base-class")) ;; Called by a widget which wishes to display page content or forms ;; Could also be done via a cascading stylesheet model w/ default hints, ;; removing need for a presentation system override. (defun make-bundle-widget-tree (top-bundle stylesheet system) "Main entry point to buildling bundle-widget tree" (let ((context (make-rendering-context)) (widget (make-bundle-widget top-bundle system))) (configure-bundle-widget widget context stylesheet system))) (defgeneric make-bundle-widget (bundle system) (:documentation "Turn a bundle for a presentation system which dictates default behaviors") (:method ((bundle bundle) system) (error "Bundle-widget is an abstract class and should not be instantiated by ~A for system ~A" bundle system))) (defgeneric configure-bundle-widget (bundle-widget context stylesheet system) (:documentation "Configure a bundle widget given context, stylesheet and system") (:method ((widget bundle-widget) context stylesheet system) "Default behavior is to cache translatable values and configure our children widgets" (cache-slot-translations widget) (mapcar (lambda (child) (configure-bundle-widget child (update-context bundle-widget context system) stylesheet system)) (widget-children widget)))) ;; PAGE LAYOUTS ;; (defwidget page-widget () ((header-p :accessor render-header-p :initarg :header-p :initform t) (help-p :accessor render-header-help-p :initarg :help-p :initform nil))) Should we default to a 1:1 assumption of bundles to containers ? ;; Some of this should be factored out to the base class (defmethod make-bundle-widget ((bundle page-bundle) system) "Mostly to select the widget instance we need" (let ((widget (make-instance 'page-widget :bundle bundle))) (setf (widget-children widget) (mapcar (f (bundle) (make-bundle-widget bundle system)) (children bundle))))) (defmethod configure-bundle-widget ((widget page-widget) context stylesheet system) "Placeholder example: add stylesheet support for help state, etc?" (when (> (context-depth context) 0) (setf (render-header-p widget) nil)) (call-next-method)) (defmethod render-widget-body ((widget page-widget) &rest args) "This renders the page header. Weblocks by default will then render all of the children bundle-widgets inside the overall widget header." (declare (ignore args)) (when (render-header-p widget) (with-html (:div :class "page-widget-header" (:span :class "page-widget-title" (get-cached-slot-translation widget 'title)) (:span :class "page-widget-description" (get-cached-slot-translation widget 'description)) (:span :class "page-header-help-link" (render-link (f* (toggle-slot widget 'help-p)) "Help...")) (when (render-header-help-p widget) (htm (:span :class "page-widget-help" (get-cached-slot-translation widget 'help)))))))) ;; ;; QUESTIONS (as presentation as widgets) ;; (defmethod make-bundle-widget ((bundle question)) (make-presentation question (current-id))) (defmethod configure-bundle-widget ((p web-field-presentation) context stylesheet system) (declare (ignore context stylesheet system)) (when (equal (type-of (context-parent context)) 'row-bundle-widget) ;; set inhibit prompt p nil)) ;; ;; Presentations as widgets ;; (defmethod render-widget-body ((p web-field-presentation) &rest args) "Support basic widget protocol for presentations" (declare (ignore args)) (render-presentation p)) (defmethod render-widget-children ((p web-field-presentation) &rest args) "Support basic widget protocol for presentations" (declare (ignore args)) nil) ;; UTILITIES ;; (defun toggle-slot (object slotname) (setf (slot-value object slotname) (not (slot-value object slotname)))) ;; ;; Cheap value cache ;; (defun cache-bundle-value ((widget bundle-widget) key text) (assoc-setf (value-cache widget) key text)) (defun get-bundle-value ((widget bundle-widget) key) (assoc-get (value-cache widget) key)) ;; ;; Special case translation slot values ;; (defmethod cache-slot-translations ((widget bundle-widget) &optional slotnames) (mapcar (lambda (slotname) (cache-bundle-value widget slotname (slot-value-translation (bundle widget) slotname))) (or slotnames (translate-fields (bundle widget))))) (defmethod get-cached-slot-translation ((widget bundle-widget) slotname) (aif-ret (get-bundle-value widget slotname) (slot-value-translation (bundle widget) slotname)))
null
https://raw.githubusercontent.com/eslick/cl-registry/d4015c400dc6abf0eeaf908ed9056aac956eee82/src/data-model/bundle-widgets.lisp
lisp
View elements for bundles We can get away with less widgets Creation protocol: and generate a set of widgets to render the content. e.g. the depth of the widget in a possible layout set the configuration of the widgets and questions Rendering protocol: The rendering protocol is now fairly simple, we simply accommodate the parameterized widget's render-body method to render its' context and then render the children. SIMPLE CONTEXT Track my path in the tree Provides my parent, and my current depth... BASE WIDGET CLASS We should not need to touch the DB to render a page, everything should be cached. Children are automatically rendered so default render method is nil. Called by a widget which wishes to display page content or forms Could also be done via a cascading stylesheet model w/ default hints, removing need for a presentation system override. Some of this should be factored out to the base class QUESTIONS (as presentation as widgets) set inhibit prompt p Presentations as widgets Cheap value cache Special case translation slot values
(in-package :registry) 1 ) When a page bundle is to be rendered , we first walk it 2 ) Bundles do not necessarily translation 1:1 to widgets 3 ) Configuration of widgets is set based on current rendering context 4 ) Presentation types are created for embedded questions 5 ) Any layout / presentation constraints ( i.e. CSS ) are applied to (defclass rendering-context () ((path :accessor context-path :initarg :path :initform nil))) (defun make-rendering-context (&rest args) (apply #'make-instance 'rendering-context args)) (defmethod update-context (widget context system) (make-rendering-context :path (append widget (context-path context)))) (defun context-depth (context) (length (context-path context))) (defun context-parent (context) (first (context-path context))) (defwidget bundle-widget () ((bundle :accessor bundle :initarg :bundle :initform nil :documentation "A reference to the defining bundle for debugging") (layout-info :accessor layout-info :initarg :layout-info :initform nil :documentation "A reference to any CSS-style constraints") (value-cache :accessor value-cache :initarg :value-cache :initform nil :documentation "To avoid accesses to the DB, an alist of text to render based on current translation")) (:documentation "Abstract base-class")) (defun make-bundle-widget-tree (top-bundle stylesheet system) "Main entry point to buildling bundle-widget tree" (let ((context (make-rendering-context)) (widget (make-bundle-widget top-bundle system))) (configure-bundle-widget widget context stylesheet system))) (defgeneric make-bundle-widget (bundle system) (:documentation "Turn a bundle for a presentation system which dictates default behaviors") (:method ((bundle bundle) system) (error "Bundle-widget is an abstract class and should not be instantiated by ~A for system ~A" bundle system))) (defgeneric configure-bundle-widget (bundle-widget context stylesheet system) (:documentation "Configure a bundle widget given context, stylesheet and system") (:method ((widget bundle-widget) context stylesheet system) "Default behavior is to cache translatable values and configure our children widgets" (cache-slot-translations widget) (mapcar (lambda (child) (configure-bundle-widget child (update-context bundle-widget context system) stylesheet system)) (widget-children widget)))) PAGE LAYOUTS (defwidget page-widget () ((header-p :accessor render-header-p :initarg :header-p :initform t) (help-p :accessor render-header-help-p :initarg :help-p :initform nil))) Should we default to a 1:1 assumption of bundles to containers ? (defmethod make-bundle-widget ((bundle page-bundle) system) "Mostly to select the widget instance we need" (let ((widget (make-instance 'page-widget :bundle bundle))) (setf (widget-children widget) (mapcar (f (bundle) (make-bundle-widget bundle system)) (children bundle))))) (defmethod configure-bundle-widget ((widget page-widget) context stylesheet system) "Placeholder example: add stylesheet support for help state, etc?" (when (> (context-depth context) 0) (setf (render-header-p widget) nil)) (call-next-method)) (defmethod render-widget-body ((widget page-widget) &rest args) "This renders the page header. Weblocks by default will then render all of the children bundle-widgets inside the overall widget header." (declare (ignore args)) (when (render-header-p widget) (with-html (:div :class "page-widget-header" (:span :class "page-widget-title" (get-cached-slot-translation widget 'title)) (:span :class "page-widget-description" (get-cached-slot-translation widget 'description)) (:span :class "page-header-help-link" (render-link (f* (toggle-slot widget 'help-p)) "Help...")) (when (render-header-help-p widget) (htm (:span :class "page-widget-help" (get-cached-slot-translation widget 'help)))))))) (defmethod make-bundle-widget ((bundle question)) (make-presentation question (current-id))) (defmethod configure-bundle-widget ((p web-field-presentation) context stylesheet system) (declare (ignore context stylesheet system)) (when (equal (type-of (context-parent context)) 'row-bundle-widget) nil)) (defmethod render-widget-body ((p web-field-presentation) &rest args) "Support basic widget protocol for presentations" (declare (ignore args)) (render-presentation p)) (defmethod render-widget-children ((p web-field-presentation) &rest args) "Support basic widget protocol for presentations" (declare (ignore args)) nil) UTILITIES (defun toggle-slot (object slotname) (setf (slot-value object slotname) (not (slot-value object slotname)))) (defun cache-bundle-value ((widget bundle-widget) key text) (assoc-setf (value-cache widget) key text)) (defun get-bundle-value ((widget bundle-widget) key) (assoc-get (value-cache widget) key)) (defmethod cache-slot-translations ((widget bundle-widget) &optional slotnames) (mapcar (lambda (slotname) (cache-bundle-value widget slotname (slot-value-translation (bundle widget) slotname))) (or slotnames (translate-fields (bundle widget))))) (defmethod get-cached-slot-translation ((widget bundle-widget) slotname) (aif-ret (get-bundle-value widget slotname) (slot-value-translation (bundle widget) slotname)))
3c0618349d0e81efd1b7a23d1c7bda5b858a6ba83ed4832e445b6b89032fab6f
janestreet/hardcaml
wave.mli
module B : Hardcaml.Comb.S with type t = Hardcaml.Bits.t type exarray = { mutable len : int ; mutable data : B.t array } val make : unit -> exarray val extend : exarray -> unit val set : exarray -> int -> B.t -> unit val get : exarray -> int -> B.t val length : exarray -> int val data : exarray -> B.t array type wave = { name : string ; nbits : int ; data : exarray } type waves = wave array val wrap : B.t Hardcaml.Cyclesim.Api.cyclesim -> B.t Hardcaml.Cyclesim.Api.cyclesim * wave array module Gui : sig val render_1 : int * int -> int * int -> int -> Dom_html.canvasRenderingContext2D Js.t -> B.t array -> unit val render_n : ('a -> string) -> int * int -> int * int -> int -> Dom_html.canvasRenderingContext2D Js.t -> 'a array -> unit val mk_wave_table : #Dom.node Js.t -> int -> int -> wave array -> unit end
null
https://raw.githubusercontent.com/janestreet/hardcaml/4126f65f39048fef5853ba9b8d766143f678a9e4/js/wave.mli
ocaml
module B : Hardcaml.Comb.S with type t = Hardcaml.Bits.t type exarray = { mutable len : int ; mutable data : B.t array } val make : unit -> exarray val extend : exarray -> unit val set : exarray -> int -> B.t -> unit val get : exarray -> int -> B.t val length : exarray -> int val data : exarray -> B.t array type wave = { name : string ; nbits : int ; data : exarray } type waves = wave array val wrap : B.t Hardcaml.Cyclesim.Api.cyclesim -> B.t Hardcaml.Cyclesim.Api.cyclesim * wave array module Gui : sig val render_1 : int * int -> int * int -> int -> Dom_html.canvasRenderingContext2D Js.t -> B.t array -> unit val render_n : ('a -> string) -> int * int -> int * int -> int -> Dom_html.canvasRenderingContext2D Js.t -> 'a array -> unit val mk_wave_table : #Dom.node Js.t -> int -> int -> wave array -> unit end
493da4070798c872ee4823fa0c16372aa08fd17c78a3db5fa61309dca03b3f65
basho/riak_test
ensemble_remove_node.erl
%% ------------------------------------------------------------------- %% Copyright ( c ) 2013 - 2014 Basho Technologies , Inc. %% This file is provided to you under the Apache License , %% Version 2.0 (the "License"); you may not use this file except in compliance with the License . You may obtain %% a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- -module(ensemble_remove_node). -export([confirm/0]). -include_lib("eunit/include/eunit.hrl"). -compile({parse_transform, rt_intercept_pt}). -define(M, riak_kv_ensemble_backend_orig). confirm() -> NumNodes = 3, NVal = 3, Config = ensemble_util:fast_config(NVal), lager:info("Building cluster and waiting for ensemble to stablize"), Nodes = ensemble_util:build_cluster(NumNodes, Config, NVal), [Node, Node2, Node3] = Nodes, ok = ensemble_util:wait_until_stable(Node, NVal), lager:info("Store a value in the root ensemble"), {ok, _} = riak_ensemble_client:kput_once(Node, root, testerooni, testvalue, 1000), lager:info("Read value from the root ensemble"), {ok, _} = riak_ensemble_client:kget(Node, root, testerooni, 1000), EnsembleStatusPid = spawn(fun()-> ensemble_status_server([]) end), rt_intercept:add(Node, {riak_kv_ensemble_backend, [{{maybe_async_update, 2}, {[EnsembleStatusPid], fun(Changes, State) -> case lists:keyfind(del, 1, Changes) of false -> ?M:maybe_async_update_orig(Changes, State); {del, {_, Node}}-> {ok, Ring} = riak_core_ring_manager:get_raw_ring(), ExitingMembers = riak_core_ring:members(Ring, [exiting]), EnsembleStatusPid ! {exiting_members, Node, ExitingMembers}, ?M:maybe_async_update_orig(Changes, State) end end}}]}), lager:info("Removing Nodes 2 and 3 from the cluster"), rt:leave(Node2), ok = ensemble_util:wait_until_stable(Node, NVal), rt:leave(Node3), ok = ensemble_util:wait_until_stable(Node, NVal), Remaining = Nodes -- [Node2, Node3], rt:wait_until_nodes_agree_about_ownership(Remaining), ok = rt:wait_until_unpingable(Node2), ok = rt:wait_until_unpingable(Node3), lager:info("Read value from the root ensemble"), {ok, _Obj} = riak_ensemble_client:kget(Node, root, testerooni, 1000), Members3 = rpc:call(Node, riak_ensemble_manager, get_members, [root]), ?assertEqual(1, length(Members3)), Cluster = rpc:call(Node, riak_ensemble_manager, cluster, []), ?assertEqual(1, length(Cluster)), EnsembleStatusPid ! {get_errors, self()}, ExitingErrors = receive E -> E end, ?assertEqual(ExitingErrors, []), pass. ensemble_status_server(Errors) -> receive {exiting_members, Node, ExitingMembers} -> case lists:member(Node, ExitingMembers) of false -> ensemble_status_server(Errors); true -> E = {invalid_exiting_status, Node, ExitingMembers}, ensemble_status_server([E | Errors]) end; {get_errors, From} -> From ! Errors end.
null
https://raw.githubusercontent.com/basho/riak_test/8170137b283061ba94bc85bf42575021e26c929d/tests/ensemble_remove_node.erl
erlang
------------------------------------------------------------------- Version 2.0 (the "License"); you may not use this file a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -------------------------------------------------------------------
Copyright ( c ) 2013 - 2014 Basho Technologies , Inc. This file is provided to you under the Apache License , except in compliance with the License . You may obtain software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY -module(ensemble_remove_node). -export([confirm/0]). -include_lib("eunit/include/eunit.hrl"). -compile({parse_transform, rt_intercept_pt}). -define(M, riak_kv_ensemble_backend_orig). confirm() -> NumNodes = 3, NVal = 3, Config = ensemble_util:fast_config(NVal), lager:info("Building cluster and waiting for ensemble to stablize"), Nodes = ensemble_util:build_cluster(NumNodes, Config, NVal), [Node, Node2, Node3] = Nodes, ok = ensemble_util:wait_until_stable(Node, NVal), lager:info("Store a value in the root ensemble"), {ok, _} = riak_ensemble_client:kput_once(Node, root, testerooni, testvalue, 1000), lager:info("Read value from the root ensemble"), {ok, _} = riak_ensemble_client:kget(Node, root, testerooni, 1000), EnsembleStatusPid = spawn(fun()-> ensemble_status_server([]) end), rt_intercept:add(Node, {riak_kv_ensemble_backend, [{{maybe_async_update, 2}, {[EnsembleStatusPid], fun(Changes, State) -> case lists:keyfind(del, 1, Changes) of false -> ?M:maybe_async_update_orig(Changes, State); {del, {_, Node}}-> {ok, Ring} = riak_core_ring_manager:get_raw_ring(), ExitingMembers = riak_core_ring:members(Ring, [exiting]), EnsembleStatusPid ! {exiting_members, Node, ExitingMembers}, ?M:maybe_async_update_orig(Changes, State) end end}}]}), lager:info("Removing Nodes 2 and 3 from the cluster"), rt:leave(Node2), ok = ensemble_util:wait_until_stable(Node, NVal), rt:leave(Node3), ok = ensemble_util:wait_until_stable(Node, NVal), Remaining = Nodes -- [Node2, Node3], rt:wait_until_nodes_agree_about_ownership(Remaining), ok = rt:wait_until_unpingable(Node2), ok = rt:wait_until_unpingable(Node3), lager:info("Read value from the root ensemble"), {ok, _Obj} = riak_ensemble_client:kget(Node, root, testerooni, 1000), Members3 = rpc:call(Node, riak_ensemble_manager, get_members, [root]), ?assertEqual(1, length(Members3)), Cluster = rpc:call(Node, riak_ensemble_manager, cluster, []), ?assertEqual(1, length(Cluster)), EnsembleStatusPid ! {get_errors, self()}, ExitingErrors = receive E -> E end, ?assertEqual(ExitingErrors, []), pass. ensemble_status_server(Errors) -> receive {exiting_members, Node, ExitingMembers} -> case lists:member(Node, ExitingMembers) of false -> ensemble_status_server(Errors); true -> E = {invalid_exiting_status, Node, ExitingMembers}, ensemble_status_server([E | Errors]) end; {get_errors, From} -> From ! Errors end.
e08c8a4197ff2a8400fbb24d1a6ed9ced2c113147e67b69edc8dde3b412b84ad
caradoc-org/caradoc
fetchimpl.ml
(*****************************************************************************) (* Caradoc: a PDF parser and validator *) Copyright ( C ) 2015 ANSSI Copyright ( C ) 2015 - 2017 (* *) (* This program is free software; you can redistribute it and/or modify *) it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation . (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU General Public License for more details. *) (* *) You should have received a copy of the GNU General Public License along with this program ; if not , write to the Free Software Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA . (*****************************************************************************) open Fetchcommon open Fetch open Fetchcomp module rec FetchImpl : FetchT = MakeFetch(FetchCompImpl) and FetchCompImpl : FetchCompT = MakeFetchComp(FetchImpl)
null
https://raw.githubusercontent.com/caradoc-org/caradoc/100f53bc55ef682049e10fabf24869bc019dc6ce/src/parser/relaxed/fetchimpl.ml
ocaml
*************************************************************************** Caradoc: a PDF parser and validator This program is free software; you can redistribute it and/or modify This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. ***************************************************************************
Copyright ( C ) 2015 ANSSI Copyright ( C ) 2015 - 2017 it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation . You should have received a copy of the GNU General Public License along with this program ; if not , write to the Free Software Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA . open Fetchcommon open Fetch open Fetchcomp module rec FetchImpl : FetchT = MakeFetch(FetchCompImpl) and FetchCompImpl : FetchCompT = MakeFetchComp(FetchImpl)
0d98d4df44c3264c2fef09e3d464c1cfd4c93c8ee00e71adb3ac179f070e5b6f
fujimura/hi
doctests.hs
import Control.Applicative import System.Process import Test.DocTest main :: IO () main = doctest =<< lines <$> readProcess "git" ["ls-files", "src"] []
null
https://raw.githubusercontent.com/fujimura/hi/a55cf5ee87e3607f398a08aeb078f5ef63e6f4c5/test/doctests.hs
haskell
import Control.Applicative import System.Process import Test.DocTest main :: IO () main = doctest =<< lines <$> readProcess "git" ["ls-files", "src"] []
242f9ae1cddf345aedf741e91d424253074dbda95751b7a06859fcd45c58825a
Ongy/pulseaudio
Test.hs
{-# LANGUAGE OverloadedStrings #-} import Data.Text (Text) import qualified Data.Text as Text import qualified Data.Text.IO as Text import Sound.Pulse.Context import Sound.Pulse.Mainloop.Simple import Data.Word (Word32) import Control.Monad (void) import Sound.Pulse.Mainloop import Sound.Pulse.Sinkinfo import Sound.Pulse.Subscribe import Sound.Pulse.Serverinfo import Sound.Pulse.Volume printSink :: Sinkinfo -> IO () printSink sink = do let vol = cVolumeToLinear $ siVolume sink let base = volumeToLinear $ siBaseVolume sink putStrLn . show $ map (\v -> v / base * 100) vol startLoop :: Context -> Sinkinfo -> IO () startLoop cxt info = do printSink info void $ subscribeEvents cxt [SubscriptionMaskSink] fun where fun :: ((SubscriptionEventFacility, SubscriptionEventType) -> Word32 -> IO ()) fun _ 0 = void $ getContextSinkByIndex cxt (siIndex info) printSink fun _ _ = return () getDefaultSink :: Context -> IO () getDefaultSink cxt = void $ getServerInfo cxt fun where fun :: ServerInfo -> IO () fun serv = let name = defaultSinkName serv in do Text.putStrLn ("Default sink: " <> name) void $ getContextSinkByName cxt name (startLoop cxt) main :: IO () main = do impl <- getMainloopImpl cxt <- getContext impl "hs-test" setStateCallback cxt $ do state <- getContextState cxt putStrLn ("State: " ++ show state) case state of ContextFailed -> do putStr "PulseError: " Text.putStrLn =<< getContextErrStr cxt quitLoop impl =<< getContextErr cxt ContextReady -> getDefaultSink cxt _ -> return () connectContext cxt Nothing [] void $ doLoop impl
null
https://raw.githubusercontent.com/Ongy/pulseaudio/593d096e9386224f2e05bdc14c72c52ff9cd7d87/bin/Test.hs
haskell
# LANGUAGE OverloadedStrings #
import Data.Text (Text) import qualified Data.Text as Text import qualified Data.Text.IO as Text import Sound.Pulse.Context import Sound.Pulse.Mainloop.Simple import Data.Word (Word32) import Control.Monad (void) import Sound.Pulse.Mainloop import Sound.Pulse.Sinkinfo import Sound.Pulse.Subscribe import Sound.Pulse.Serverinfo import Sound.Pulse.Volume printSink :: Sinkinfo -> IO () printSink sink = do let vol = cVolumeToLinear $ siVolume sink let base = volumeToLinear $ siBaseVolume sink putStrLn . show $ map (\v -> v / base * 100) vol startLoop :: Context -> Sinkinfo -> IO () startLoop cxt info = do printSink info void $ subscribeEvents cxt [SubscriptionMaskSink] fun where fun :: ((SubscriptionEventFacility, SubscriptionEventType) -> Word32 -> IO ()) fun _ 0 = void $ getContextSinkByIndex cxt (siIndex info) printSink fun _ _ = return () getDefaultSink :: Context -> IO () getDefaultSink cxt = void $ getServerInfo cxt fun where fun :: ServerInfo -> IO () fun serv = let name = defaultSinkName serv in do Text.putStrLn ("Default sink: " <> name) void $ getContextSinkByName cxt name (startLoop cxt) main :: IO () main = do impl <- getMainloopImpl cxt <- getContext impl "hs-test" setStateCallback cxt $ do state <- getContextState cxt putStrLn ("State: " ++ show state) case state of ContextFailed -> do putStr "PulseError: " Text.putStrLn =<< getContextErrStr cxt quitLoop impl =<< getContextErr cxt ContextReady -> getDefaultSink cxt _ -> return () connectContext cxt Nothing [] void $ doLoop impl
b769b4a1cf176db26b71e88b86169ddc8e5dcf6b89f02388847adf838fd05293
chrovis/cljam
sequence_test.clj
(ns cljam.io.sequence-test (:require [clojure.test :refer [deftest is are testing]] [clojure.java.io :as cio] [clojure.string :as cstr] [cljam.test-common :refer [with-before-after prepare-cache! clean-cache! not-throw? same-file? same-sequence-contents? http-server temp-dir test-fa-file test-fai-file test-twobit-file test-twobit-n-file test-twobit-be-file test-twobit-be-n-file medium-fa-file medium-fai-file medium-fa-bgz-file medium-twobit-file]] [cljam.io.fasta.core :as fa-core] [cljam.io.sequence :as cseq] [cljam.io.protocols :as protocols])) (def temp-test-fa-file (str temp-dir "/test.fa")) (def temp-medium-fa-file (str temp-dir "/medium.fa")) (deftest reader-test (testing "fasta" (with-open [rdr (cseq/reader test-fa-file) cloned (cseq/reader rdr)] (is (instance? cljam.io.fasta.reader.FASTAReader rdr)) (is (instance? cljam.io.fasta.reader.FASTAReader cloned))) (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (let [tmp (cio/file temp-dir "temp-fasta-file-without-suffix")] (cio/copy (cio/file test-fa-file) tmp) (with-open [rdr (cseq/reader tmp)] (is (instance? cljam.io.fasta.reader.FASTAReader rdr)))))) (testing "twobit" (with-open [rdr (cseq/reader test-twobit-file) cloned (cseq/reader rdr)] (is (instance? cljam.io.twobit.reader.TwoBitReader rdr)) (is (instance? cljam.io.twobit.reader.TwoBitReader cloned))) (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (let [tmp (cio/file temp-dir "temp-2bit-file-without-suffix")] (cio/copy (cio/file test-twobit-file) tmp) (with-open [rdr (cseq/reader tmp)] (is (instance? cljam.io.twobit.reader.TwoBitReader rdr)))))) (testing "throws Exception" (are [f] (thrown? Exception (cseq/reader f)) "test-resources/fasta/not-found.fa" test-fai-file (Object.)))) (deftest multithread-reader-test (with-open [f (cseq/reader medium-fa-file) b (cseq/reader medium-fa-bgz-file) t (cseq/reader medium-twobit-file)] (let [xs (cseq/read-seq-summaries f)] (is (->> (repeatedly #(let [{:keys [name len]} (rand-nth xs) [s e] (sort [(inc (rand-int len)) (inc (rand-int len))])] {:chr name :start s :end e})) (take 100) (pmap (fn [region] (with-open [fc (cseq/reader f) bc (cseq/reader b) tc (cseq/reader t)] (= (cseq/read-sequence fc region) (cseq/read-sequence bc region) (cseq/read-sequence tc region))))) (every? true?)))))) (deftest read-seq-summaries-test (testing "fasta" (with-open [rdr (cseq/reader test-fa-file)] (is (= (cseq/read-seq-summaries rdr) [{:name "ref", :len 45} {:name "ref2", :len 40}])))) (testing "twobit" (with-open [rdr (cseq/reader test-twobit-file)] (is (= (cseq/read-seq-summaries rdr) [{:name "ref", :len 45} {:name "ref2", :len 40}]))))) (deftest read-indices-test (testing "fasta" (with-open [rdr (cseq/reader test-fa-file)] (is (= (cseq/read-indices rdr) [{:name "ref", :len 45, :offset 5, :line-blen 45, :line-len 46} {:name "ref2", :len 40, :offset 57, :line-blen 40, :line-len 41}])))) (testing "twobit" (with-open [rdr (cseq/reader test-twobit-file)] (is (= (cseq/read-indices rdr) [{:name "ref", :len 45, :offset 33, :ambs {}, :masks {}, :header-offset 16} {:name "ref2", :len 40, :offset 61, :ambs {}, :masks {1 40}, :header-offset 24}]))))) (deftest read-sequence-test (with-open [fa-rdr (cseq/reader test-fa-file) tb-rdr (cseq/reader test-twobit-file)] (are [?reg ?opts ?expect] (= (cseq/read-sequence fa-rdr ?reg ?opts) (cseq/read-sequence tb-rdr ?reg ?opts) ?expect) {} {} nil {:chr "badref"} {} nil {:chr "ref" :start -1 :end 0} {} nil {:chr "ref" :start 0 :end 0} {} nil {:chr "ref" :start -1 :end 1} {} "A" {:chr "ref" :start 0 :end 1} {} "A" {:chr "ref" :start 1 :end 2} {} "AG" {:chr "ref" :start 44 :end 45} {} "AT" {:chr "ref" :start 45 :end 45} {} "T" {:chr "ref" :start 45 :end 46} {} "T" {:chr "ref" :start 46 :end 46} {} nil {:chr "ref" :start 46 :end 47} {} nil {:chr "ref" :start 5 :end 10} {:mask? false} "TGTTAG" {:chr "ref" :start 5 :end 10} {:mask? true} "TGTTAG" {:chr "ref2" :start 1 :end 16} {:mask? false} "AGGTTTTATAAAACAA" {:chr "ref2" :start 1 :end 16} {:mask? true} "aggttttataaaacaa" {:chr "ref2" :start 1 :end 45} {:mask? false} "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG" {:chr "ref2" :start 10} {:mask? false} "AAAACAATTAAGTCTACAGAGCAACTACGCG" {:chr "ref2" :end 10} {:mask? false} "AGGTTTTATA" {:chr "ref"} {:mask? false} "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT" {:chr "ref"} {:mask? true} "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT" {:chr "ref2"} {:mask? false} "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG" {:chr "ref2"} {:mask? true} "aggttttataaaacaattaagtctacagagcaactacgcg")) (with-open [fa-rdr (cseq/reader test-fa-file) tb-rdr (cseq/reader test-twobit-file)] (are [?reg ?expect] (= (cseq/read-sequence fa-rdr ?reg) (cseq/read-sequence tb-rdr ?reg) ?expect) {:chr "ref" :start 5 :end 10} "TGTTAG" {:chr "ref2" :start 1 :end 16} "AGGTTTTATAAAACAA" {:chr "ref2" :start 0 :end 45} "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG" {:chr "ref"} "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT" {:chr "ref2"} "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG")) (with-open [fa-rdr (cseq/reader test-fa-file) tb-rdr (cseq/reader test-twobit-file)] (is (= (protocols/read-in-region fa-rdr {:chr "ref2" :start 1 :end 16}) (protocols/read-in-region tb-rdr {:chr "ref2" :start 1 :end 16}) "AGGTTTTATAAAACAA")) (is (= (protocols/read-in-region fa-rdr {:chr "ref2" :start 1 :end 16} {:mask? false}) (protocols/read-in-region tb-rdr {:chr "ref2" :start 1 :end 16} {:mask? false}) "AGGTTTTATAAAACAA")) (is (= (protocols/read-in-region fa-rdr {:chr "ref2" :start 1 :end 16} {:mask? true}) (protocols/read-in-region tb-rdr {:chr "ref2" :start 1 :end 16} {:mask? true}) "aggttttataaaacaa")))) (deftest read-sequence-medium-fasta-test (let [expect (str "tgaatcaCATCAATTAAGAACTTTCTTCACCACCCCTTCGCTGTCATC" "CTTTTCTCTCCACTATTCACCCAACATCATCCGGGACCAGAACTAATGTC" "AGCAAAGC") u-expect (cstr/upper-case expect)] (doseq [file [medium-fa-file medium-fa-bgz-file]] (with-open [rdr (cseq/fasta-reader file)] (is (= (cseq/read-sequence rdr {:chr "chr3" :start 2053 :end 2158}) u-expect)) (is (= (cseq/read-sequence rdr {:chr "chr3" :start 2053 :end 2158} {:mask? false}) u-expect)) (is (= (cseq/read-sequence rdr {:chr "chr3" :start 2053 :end 2158} {:mask? true}) expect)))))) (deftest read-sequence-twobit-test (testing "reference test" (with-open [r (cseq/twobit-reader test-twobit-file) c (cseq/reader r)] (is (= (for [i (range 1 45) j (range i 46)] (cseq/read-sequence r {:chr "ref" :start i :end j})) (for [i (range 1 45) j (range i 46)] (cseq/read-sequence c {:chr "ref" :start i :end j})) (for [i (range 1 45) j (range i 46)] (subs "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT" (dec i) j)))) (is (= (protocols/read-in-region r {:chr "ref2" :start 1 :end 40}) (protocols/read-in-region c {:chr "ref2" :start 1 :end 40}) "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG")) (is (= (protocols/read-in-region r {:chr "ref2" :start 1 :end 40} {:mask? false}) (protocols/read-in-region c {:chr "ref2" :start 1 :end 40} {:mask? false}) "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG")) (is (= (protocols/read-in-region r {:chr "ref2" :start 1 :end 40} {:mask? true}) (protocols/read-in-region c {:chr "ref2" :start 1 :end 40} {:mask? true}) "aggttttataaaacaattaagtctacagagcaactacgcg")))) (testing "reference test with N" (with-open [r (cseq/twobit-reader test-twobit-n-file) c (cseq/reader r)] (are [?region ?opt ?result] (= (cseq/read-sequence r ?region ?opt) (cseq/read-sequence c ?region ?opt) ?result) {} {} nil {:chr "badref"} {} nil {:chr "ref"} {} "NNNNNGTTAGATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT" {:chr "ref2"} {} "AGNNNTTATAAAACAATTANNNCTACAGAGCAACTANNNN" {:chr "ref2"} {:mask? true} "agNNNttataaaacaattaNNNctacagagcaactaNNNN" {:chr "ref" :start 10} {} "GATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT" {:chr "ref" :end 10} {} "NNNNNGTTAG" {:chr "ref" :start -3 :end 0} {} nil {:chr "ref" :start -3 :end 1} {} "N" {:chr "ref" :start 46 :end 50} {} nil {:chr "ref" :start 45 :end 50} {} "T" {:chr "ref" :start 1 :end 4} {} "NNNN" {:chr "ref" :start 0 :end 4} {} "NNNN" {:chr "ref" :start 41 :end 50} {} "NCCAT" {:chr "ref" :start 1 :end 45} {} "NNNNNGTTAGATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT" {:chr "ref2" :start 1 :end 40} {} "AGNNNTTATAAAACAATTANNNCTACAGAGCAACTANNNN" {:chr "ref2" :start 1 :end 40} {:mask? true} "agNNNttataaaacaattaNNNctacagagcaactaNNNN" {:chr "chr1" :start 1 :end 40} {} nil) (is (= (for [i (range 1 45) j (range i 46)] (cseq/read-sequence r {:chr "ref" :start i :end j})) (for [i (range 1 45) j (range i 46)] (subs "NNNNNGTTAGATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT" (dec i) j)))))) (testing "big endian" (with-open [r (cseq/twobit-reader test-twobit-be-file)] (is (= (cseq/read-sequence r {:chr "ref"}) "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT"))) (with-open [r (cseq/twobit-reader test-twobit-be-n-file)] (is (= (cseq/read-sequence r {:chr "ref"}) "NNNNNGTTAGATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT"))))) (deftest indexed?-test (testing "fasta" (are [f] (with-open [rdr (cseq/reader f)] (true? (cseq/indexed? rdr))) test-fa-file medium-fa-file) (with-before-after {:before (do (prepare-cache!) (cio/copy (cio/file test-fa-file) (cio/file temp-test-fa-file)) (cio/copy (cio/file medium-fa-file) (cio/file temp-medium-fa-file))) :after (clean-cache!)} (are [f] (with-open [rdr (cseq/reader f)] (false? (cseq/indexed? rdr))) temp-test-fa-file temp-medium-fa-file))) (testing "twobit" (with-open [rdr (cseq/reader test-twobit-file)] (true? (cseq/indexed? rdr))))) (deftest writer-test (testing "fasta" (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (with-open [wtr (cseq/writer (.getAbsolutePath (cio/file temp-dir "temp.fa")))] (is (instance? cljam.io.fasta.writer.FASTAWriter wtr))))) (testing "twobit" (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (with-open [wtr (cseq/writer (.getAbsolutePath (cio/file temp-dir "temp.2bit")))] (is (instance? cljam.io.twobit.writer.TwoBitWriter wtr))))) (testing "throws Exception" (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (are [f] (thrown? Exception (cseq/writer (.getAbsolutePath (cio/file temp-dir f)))) "temp.fsta" "temp.fa.fai")))) (deftest write-sequences-fasta-test (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (let [f (str temp-dir "/test.fa")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (fa-core/read r))) (is (same-sequence-contents? f test-fa-file)) (is (same-file? f test-fa-file)) (is (same-file? (str f ".fai") test-fai-file)) (with-open [r1 (cseq/fasta-reader f) r2 (cseq/fasta-reader test-fa-file)] (is (= (map (juxt :rname :seq) (fa-core/read r1)) (map (juxt :rname :seq) (fa-core/read r2)))))) (let [f (str temp-dir "/test2.fa")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (map (fn [s] (update s :seq (fn [x] (map cstr/upper-case x)))) (fa-core/read r)))) (with-open [r1 (cseq/fasta-reader f) r2 (cseq/fasta-reader test-fa-file)] (is (= (map (juxt :rname (comp cstr/upper-case :seq)) (fa-core/read r1)) (map (juxt :rname (comp cstr/upper-case :seq)) (fa-core/read r2)))))) (let [f (str temp-dir "/medium.fa")] (with-open [r (cseq/fasta-reader medium-fa-file) w (cseq/fasta-writer f {:cols 50})] (cseq/write-sequences w (fa-core/read r))) (is (same-sequence-contents? f medium-fa-file)) (is (same-file? f medium-fa-file)) (is (same-file? (str f ".fai") medium-fai-file)) (with-open [r1 (cseq/fasta-reader f) r2 (cseq/fasta-reader medium-fa-file)] (is (= (map (juxt :rname :seq) (fa-core/read r1)) (map (juxt :rname :seq) (fa-core/read r2)))))) (let [f (str temp-dir "/test.fa.gz")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (fa-core/read r))) (is (same-sequence-contents? f test-fa-file))) (let [f (str temp-dir "/test.fa.bz2")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (fa-core/read r))) (is (same-sequence-contents? f test-fa-file))) (let [f (str temp-dir "/test3.fa")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (fa-core/sequential-read r))) (is (same-sequence-contents? f test-fa-file)) (with-open [r1 (cseq/fasta-reader f) r2 (cseq/fasta-reader test-fa-file)] (is (= (map (juxt :rname (comp cstr/upper-case :seq)) (fa-core/read r1)) (map (juxt :rname (comp cstr/upper-case :seq)) (fa-core/read r2)))))) (let [f (str temp-dir "/test4.fa")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f {:create-index? false})] (cseq/write-sequences w (fa-core/read r))) (is (thrown? java.io.FileNotFoundException (with-open [r (cseq/fasta-reader f)] (cseq/read-sequence r {:chr "ref"}))))))) (deftest write-sequences-twobit-test (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (let [f (str temp-dir "/test.2bit")] (with-open [r (cseq/twobit-reader test-twobit-file) w (cseq/twobit-writer f)] (cseq/write-sequences w (cseq/read-all-sequences r {:mask? true}))) (is (same-file? f test-twobit-file)) (with-open [r1 (cseq/twobit-reader f) r2 (cseq/twobit-reader test-twobit-file)] (is (= (cseq/read-all-sequences r1 {:mask? true}) (cseq/read-all-sequences r2 {:mask? true}))))) (let [f (str temp-dir "/test-n.2bit")] (with-open [r (cseq/twobit-reader test-twobit-n-file) w (cseq/twobit-writer f)] (cseq/write-sequences w (cseq/read-all-sequences r {:mask? true}))) (is (same-file? f test-twobit-n-file))) (let [f (str temp-dir "/test-2.2bit") s [{:name "SEQ1" :sequence "AAAA"} {:name "SEQ2" :sequence "AAAAG"} {:name "SEQ3" :sequence "AAAAGC"} {:name "SEQ4" :sequence "AAAAGCT"} {:name "SEQ5" :sequence "AAAAGCTA"}]] (with-open [w (cseq/writer f)] (cseq/write-sequences w s)) (with-open [r (cseq/reader f)] (is (= (cseq/read-all-sequences r) s)))))) (deftest source-type-test (testing "reader" (are [x] (with-open [rdr (cseq/reader x)] (= (cseq/read-sequence rdr {:chr "ref" :start 44 :end 45} {}) "AT")) test-fa-file (cio/file test-fa-file) (cio/as-url (cio/file test-fa-file)) test-twobit-file (cio/file test-twobit-file) (cio/as-url (cio/file test-twobit-file)))) (testing "reader (non-file URL is not supported)" (with-open [server (http-server)] (are [x] (thrown? Exception (with-open [rdr (cseq/reader x)] (cseq/read-sequence rdr {:chr "ref" :start 44 :end 45} {}))) (cio/as-url (str (:uri server) "/fasta/test.fa")) (cio/as-url (str (:uri server) "/twobit/test.2bit"))))) (testing "writer" (let [temp-test-twobit-file (.getPath (cio/file temp-dir "test.2bit"))] (are [x] (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (with-open [rdr (cseq/reader test-fa-file) wtr (cseq/writer x)] (not-throw? (cseq/write-sequences wtr (cseq/read-all-sequences rdr))))) temp-test-fa-file (cio/file temp-test-fa-file) (cio/as-url (cio/file temp-test-fa-file)) temp-test-twobit-file (cio/file temp-test-twobit-file) (cio/as-url (cio/file temp-test-twobit-file))))))
null
https://raw.githubusercontent.com/chrovis/cljam/2b8e7386765be8efdbbbb4f18dbc52447f4a08af/test/cljam/io/sequence_test.clj
clojure
(ns cljam.io.sequence-test (:require [clojure.test :refer [deftest is are testing]] [clojure.java.io :as cio] [clojure.string :as cstr] [cljam.test-common :refer [with-before-after prepare-cache! clean-cache! not-throw? same-file? same-sequence-contents? http-server temp-dir test-fa-file test-fai-file test-twobit-file test-twobit-n-file test-twobit-be-file test-twobit-be-n-file medium-fa-file medium-fai-file medium-fa-bgz-file medium-twobit-file]] [cljam.io.fasta.core :as fa-core] [cljam.io.sequence :as cseq] [cljam.io.protocols :as protocols])) (def temp-test-fa-file (str temp-dir "/test.fa")) (def temp-medium-fa-file (str temp-dir "/medium.fa")) (deftest reader-test (testing "fasta" (with-open [rdr (cseq/reader test-fa-file) cloned (cseq/reader rdr)] (is (instance? cljam.io.fasta.reader.FASTAReader rdr)) (is (instance? cljam.io.fasta.reader.FASTAReader cloned))) (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (let [tmp (cio/file temp-dir "temp-fasta-file-without-suffix")] (cio/copy (cio/file test-fa-file) tmp) (with-open [rdr (cseq/reader tmp)] (is (instance? cljam.io.fasta.reader.FASTAReader rdr)))))) (testing "twobit" (with-open [rdr (cseq/reader test-twobit-file) cloned (cseq/reader rdr)] (is (instance? cljam.io.twobit.reader.TwoBitReader rdr)) (is (instance? cljam.io.twobit.reader.TwoBitReader cloned))) (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (let [tmp (cio/file temp-dir "temp-2bit-file-without-suffix")] (cio/copy (cio/file test-twobit-file) tmp) (with-open [rdr (cseq/reader tmp)] (is (instance? cljam.io.twobit.reader.TwoBitReader rdr)))))) (testing "throws Exception" (are [f] (thrown? Exception (cseq/reader f)) "test-resources/fasta/not-found.fa" test-fai-file (Object.)))) (deftest multithread-reader-test (with-open [f (cseq/reader medium-fa-file) b (cseq/reader medium-fa-bgz-file) t (cseq/reader medium-twobit-file)] (let [xs (cseq/read-seq-summaries f)] (is (->> (repeatedly #(let [{:keys [name len]} (rand-nth xs) [s e] (sort [(inc (rand-int len)) (inc (rand-int len))])] {:chr name :start s :end e})) (take 100) (pmap (fn [region] (with-open [fc (cseq/reader f) bc (cseq/reader b) tc (cseq/reader t)] (= (cseq/read-sequence fc region) (cseq/read-sequence bc region) (cseq/read-sequence tc region))))) (every? true?)))))) (deftest read-seq-summaries-test (testing "fasta" (with-open [rdr (cseq/reader test-fa-file)] (is (= (cseq/read-seq-summaries rdr) [{:name "ref", :len 45} {:name "ref2", :len 40}])))) (testing "twobit" (with-open [rdr (cseq/reader test-twobit-file)] (is (= (cseq/read-seq-summaries rdr) [{:name "ref", :len 45} {:name "ref2", :len 40}]))))) (deftest read-indices-test (testing "fasta" (with-open [rdr (cseq/reader test-fa-file)] (is (= (cseq/read-indices rdr) [{:name "ref", :len 45, :offset 5, :line-blen 45, :line-len 46} {:name "ref2", :len 40, :offset 57, :line-blen 40, :line-len 41}])))) (testing "twobit" (with-open [rdr (cseq/reader test-twobit-file)] (is (= (cseq/read-indices rdr) [{:name "ref", :len 45, :offset 33, :ambs {}, :masks {}, :header-offset 16} {:name "ref2", :len 40, :offset 61, :ambs {}, :masks {1 40}, :header-offset 24}]))))) (deftest read-sequence-test (with-open [fa-rdr (cseq/reader test-fa-file) tb-rdr (cseq/reader test-twobit-file)] (are [?reg ?opts ?expect] (= (cseq/read-sequence fa-rdr ?reg ?opts) (cseq/read-sequence tb-rdr ?reg ?opts) ?expect) {} {} nil {:chr "badref"} {} nil {:chr "ref" :start -1 :end 0} {} nil {:chr "ref" :start 0 :end 0} {} nil {:chr "ref" :start -1 :end 1} {} "A" {:chr "ref" :start 0 :end 1} {} "A" {:chr "ref" :start 1 :end 2} {} "AG" {:chr "ref" :start 44 :end 45} {} "AT" {:chr "ref" :start 45 :end 45} {} "T" {:chr "ref" :start 45 :end 46} {} "T" {:chr "ref" :start 46 :end 46} {} nil {:chr "ref" :start 46 :end 47} {} nil {:chr "ref" :start 5 :end 10} {:mask? false} "TGTTAG" {:chr "ref" :start 5 :end 10} {:mask? true} "TGTTAG" {:chr "ref2" :start 1 :end 16} {:mask? false} "AGGTTTTATAAAACAA" {:chr "ref2" :start 1 :end 16} {:mask? true} "aggttttataaaacaa" {:chr "ref2" :start 1 :end 45} {:mask? false} "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG" {:chr "ref2" :start 10} {:mask? false} "AAAACAATTAAGTCTACAGAGCAACTACGCG" {:chr "ref2" :end 10} {:mask? false} "AGGTTTTATA" {:chr "ref"} {:mask? false} "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT" {:chr "ref"} {:mask? true} "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT" {:chr "ref2"} {:mask? false} "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG" {:chr "ref2"} {:mask? true} "aggttttataaaacaattaagtctacagagcaactacgcg")) (with-open [fa-rdr (cseq/reader test-fa-file) tb-rdr (cseq/reader test-twobit-file)] (are [?reg ?expect] (= (cseq/read-sequence fa-rdr ?reg) (cseq/read-sequence tb-rdr ?reg) ?expect) {:chr "ref" :start 5 :end 10} "TGTTAG" {:chr "ref2" :start 1 :end 16} "AGGTTTTATAAAACAA" {:chr "ref2" :start 0 :end 45} "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG" {:chr "ref"} "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT" {:chr "ref2"} "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG")) (with-open [fa-rdr (cseq/reader test-fa-file) tb-rdr (cseq/reader test-twobit-file)] (is (= (protocols/read-in-region fa-rdr {:chr "ref2" :start 1 :end 16}) (protocols/read-in-region tb-rdr {:chr "ref2" :start 1 :end 16}) "AGGTTTTATAAAACAA")) (is (= (protocols/read-in-region fa-rdr {:chr "ref2" :start 1 :end 16} {:mask? false}) (protocols/read-in-region tb-rdr {:chr "ref2" :start 1 :end 16} {:mask? false}) "AGGTTTTATAAAACAA")) (is (= (protocols/read-in-region fa-rdr {:chr "ref2" :start 1 :end 16} {:mask? true}) (protocols/read-in-region tb-rdr {:chr "ref2" :start 1 :end 16} {:mask? true}) "aggttttataaaacaa")))) (deftest read-sequence-medium-fasta-test (let [expect (str "tgaatcaCATCAATTAAGAACTTTCTTCACCACCCCTTCGCTGTCATC" "CTTTTCTCTCCACTATTCACCCAACATCATCCGGGACCAGAACTAATGTC" "AGCAAAGC") u-expect (cstr/upper-case expect)] (doseq [file [medium-fa-file medium-fa-bgz-file]] (with-open [rdr (cseq/fasta-reader file)] (is (= (cseq/read-sequence rdr {:chr "chr3" :start 2053 :end 2158}) u-expect)) (is (= (cseq/read-sequence rdr {:chr "chr3" :start 2053 :end 2158} {:mask? false}) u-expect)) (is (= (cseq/read-sequence rdr {:chr "chr3" :start 2053 :end 2158} {:mask? true}) expect)))))) (deftest read-sequence-twobit-test (testing "reference test" (with-open [r (cseq/twobit-reader test-twobit-file) c (cseq/reader r)] (is (= (for [i (range 1 45) j (range i 46)] (cseq/read-sequence r {:chr "ref" :start i :end j})) (for [i (range 1 45) j (range i 46)] (cseq/read-sequence c {:chr "ref" :start i :end j})) (for [i (range 1 45) j (range i 46)] (subs "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT" (dec i) j)))) (is (= (protocols/read-in-region r {:chr "ref2" :start 1 :end 40}) (protocols/read-in-region c {:chr "ref2" :start 1 :end 40}) "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG")) (is (= (protocols/read-in-region r {:chr "ref2" :start 1 :end 40} {:mask? false}) (protocols/read-in-region c {:chr "ref2" :start 1 :end 40} {:mask? false}) "AGGTTTTATAAAACAATTAAGTCTACAGAGCAACTACGCG")) (is (= (protocols/read-in-region r {:chr "ref2" :start 1 :end 40} {:mask? true}) (protocols/read-in-region c {:chr "ref2" :start 1 :end 40} {:mask? true}) "aggttttataaaacaattaagtctacagagcaactacgcg")))) (testing "reference test with N" (with-open [r (cseq/twobit-reader test-twobit-n-file) c (cseq/reader r)] (are [?region ?opt ?result] (= (cseq/read-sequence r ?region ?opt) (cseq/read-sequence c ?region ?opt) ?result) {} {} nil {:chr "badref"} {} nil {:chr "ref"} {} "NNNNNGTTAGATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT" {:chr "ref2"} {} "AGNNNTTATAAAACAATTANNNCTACAGAGCAACTANNNN" {:chr "ref2"} {:mask? true} "agNNNttataaaacaattaNNNctacagagcaactaNNNN" {:chr "ref" :start 10} {} "GATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT" {:chr "ref" :end 10} {} "NNNNNGTTAG" {:chr "ref" :start -3 :end 0} {} nil {:chr "ref" :start -3 :end 1} {} "N" {:chr "ref" :start 46 :end 50} {} nil {:chr "ref" :start 45 :end 50} {} "T" {:chr "ref" :start 1 :end 4} {} "NNNN" {:chr "ref" :start 0 :end 4} {} "NNNN" {:chr "ref" :start 41 :end 50} {} "NCCAT" {:chr "ref" :start 1 :end 45} {} "NNNNNGTTAGATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT" {:chr "ref2" :start 1 :end 40} {} "AGNNNTTATAAAACAATTANNNCTACAGAGCAACTANNNN" {:chr "ref2" :start 1 :end 40} {:mask? true} "agNNNttataaaacaattaNNNctacagagcaactaNNNN" {:chr "chr1" :start 1 :end 40} {} nil) (is (= (for [i (range 1 45) j (range i 46)] (cseq/read-sequence r {:chr "ref" :start i :end j})) (for [i (range 1 45) j (range i 46)] (subs "NNNNNGTTAGATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT" (dec i) j)))))) (testing "big endian" (with-open [r (cseq/twobit-reader test-twobit-be-file)] (is (= (cseq/read-sequence r {:chr "ref"}) "AGCATGTTAGATAAGATAGCTGTGCTAGTAGGCAGTCAGCGCCAT"))) (with-open [r (cseq/twobit-reader test-twobit-be-n-file)] (is (= (cseq/read-sequence r {:chr "ref"}) "NNNNNGTTAGATAAGATAGCNNTGCTAGTAGGCAGTCNNNNCCAT"))))) (deftest indexed?-test (testing "fasta" (are [f] (with-open [rdr (cseq/reader f)] (true? (cseq/indexed? rdr))) test-fa-file medium-fa-file) (with-before-after {:before (do (prepare-cache!) (cio/copy (cio/file test-fa-file) (cio/file temp-test-fa-file)) (cio/copy (cio/file medium-fa-file) (cio/file temp-medium-fa-file))) :after (clean-cache!)} (are [f] (with-open [rdr (cseq/reader f)] (false? (cseq/indexed? rdr))) temp-test-fa-file temp-medium-fa-file))) (testing "twobit" (with-open [rdr (cseq/reader test-twobit-file)] (true? (cseq/indexed? rdr))))) (deftest writer-test (testing "fasta" (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (with-open [wtr (cseq/writer (.getAbsolutePath (cio/file temp-dir "temp.fa")))] (is (instance? cljam.io.fasta.writer.FASTAWriter wtr))))) (testing "twobit" (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (with-open [wtr (cseq/writer (.getAbsolutePath (cio/file temp-dir "temp.2bit")))] (is (instance? cljam.io.twobit.writer.TwoBitWriter wtr))))) (testing "throws Exception" (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (are [f] (thrown? Exception (cseq/writer (.getAbsolutePath (cio/file temp-dir f)))) "temp.fsta" "temp.fa.fai")))) (deftest write-sequences-fasta-test (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (let [f (str temp-dir "/test.fa")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (fa-core/read r))) (is (same-sequence-contents? f test-fa-file)) (is (same-file? f test-fa-file)) (is (same-file? (str f ".fai") test-fai-file)) (with-open [r1 (cseq/fasta-reader f) r2 (cseq/fasta-reader test-fa-file)] (is (= (map (juxt :rname :seq) (fa-core/read r1)) (map (juxt :rname :seq) (fa-core/read r2)))))) (let [f (str temp-dir "/test2.fa")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (map (fn [s] (update s :seq (fn [x] (map cstr/upper-case x)))) (fa-core/read r)))) (with-open [r1 (cseq/fasta-reader f) r2 (cseq/fasta-reader test-fa-file)] (is (= (map (juxt :rname (comp cstr/upper-case :seq)) (fa-core/read r1)) (map (juxt :rname (comp cstr/upper-case :seq)) (fa-core/read r2)))))) (let [f (str temp-dir "/medium.fa")] (with-open [r (cseq/fasta-reader medium-fa-file) w (cseq/fasta-writer f {:cols 50})] (cseq/write-sequences w (fa-core/read r))) (is (same-sequence-contents? f medium-fa-file)) (is (same-file? f medium-fa-file)) (is (same-file? (str f ".fai") medium-fai-file)) (with-open [r1 (cseq/fasta-reader f) r2 (cseq/fasta-reader medium-fa-file)] (is (= (map (juxt :rname :seq) (fa-core/read r1)) (map (juxt :rname :seq) (fa-core/read r2)))))) (let [f (str temp-dir "/test.fa.gz")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (fa-core/read r))) (is (same-sequence-contents? f test-fa-file))) (let [f (str temp-dir "/test.fa.bz2")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (fa-core/read r))) (is (same-sequence-contents? f test-fa-file))) (let [f (str temp-dir "/test3.fa")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f)] (cseq/write-sequences w (fa-core/sequential-read r))) (is (same-sequence-contents? f test-fa-file)) (with-open [r1 (cseq/fasta-reader f) r2 (cseq/fasta-reader test-fa-file)] (is (= (map (juxt :rname (comp cstr/upper-case :seq)) (fa-core/read r1)) (map (juxt :rname (comp cstr/upper-case :seq)) (fa-core/read r2)))))) (let [f (str temp-dir "/test4.fa")] (with-open [r (cseq/fasta-reader test-fa-file) w (cseq/fasta-writer f {:create-index? false})] (cseq/write-sequences w (fa-core/read r))) (is (thrown? java.io.FileNotFoundException (with-open [r (cseq/fasta-reader f)] (cseq/read-sequence r {:chr "ref"}))))))) (deftest write-sequences-twobit-test (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (let [f (str temp-dir "/test.2bit")] (with-open [r (cseq/twobit-reader test-twobit-file) w (cseq/twobit-writer f)] (cseq/write-sequences w (cseq/read-all-sequences r {:mask? true}))) (is (same-file? f test-twobit-file)) (with-open [r1 (cseq/twobit-reader f) r2 (cseq/twobit-reader test-twobit-file)] (is (= (cseq/read-all-sequences r1 {:mask? true}) (cseq/read-all-sequences r2 {:mask? true}))))) (let [f (str temp-dir "/test-n.2bit")] (with-open [r (cseq/twobit-reader test-twobit-n-file) w (cseq/twobit-writer f)] (cseq/write-sequences w (cseq/read-all-sequences r {:mask? true}))) (is (same-file? f test-twobit-n-file))) (let [f (str temp-dir "/test-2.2bit") s [{:name "SEQ1" :sequence "AAAA"} {:name "SEQ2" :sequence "AAAAG"} {:name "SEQ3" :sequence "AAAAGC"} {:name "SEQ4" :sequence "AAAAGCT"} {:name "SEQ5" :sequence "AAAAGCTA"}]] (with-open [w (cseq/writer f)] (cseq/write-sequences w s)) (with-open [r (cseq/reader f)] (is (= (cseq/read-all-sequences r) s)))))) (deftest source-type-test (testing "reader" (are [x] (with-open [rdr (cseq/reader x)] (= (cseq/read-sequence rdr {:chr "ref" :start 44 :end 45} {}) "AT")) test-fa-file (cio/file test-fa-file) (cio/as-url (cio/file test-fa-file)) test-twobit-file (cio/file test-twobit-file) (cio/as-url (cio/file test-twobit-file)))) (testing "reader (non-file URL is not supported)" (with-open [server (http-server)] (are [x] (thrown? Exception (with-open [rdr (cseq/reader x)] (cseq/read-sequence rdr {:chr "ref" :start 44 :end 45} {}))) (cio/as-url (str (:uri server) "/fasta/test.fa")) (cio/as-url (str (:uri server) "/twobit/test.2bit"))))) (testing "writer" (let [temp-test-twobit-file (.getPath (cio/file temp-dir "test.2bit"))] (are [x] (with-before-after {:before (prepare-cache!) :after (clean-cache!)} (with-open [rdr (cseq/reader test-fa-file) wtr (cseq/writer x)] (not-throw? (cseq/write-sequences wtr (cseq/read-all-sequences rdr))))) temp-test-fa-file (cio/file temp-test-fa-file) (cio/as-url (cio/file temp-test-fa-file)) temp-test-twobit-file (cio/file temp-test-twobit-file) (cio/as-url (cio/file temp-test-twobit-file))))))
b9b44786bcdc4928a45b1f14f2d9b5245e38f16f5d5b01a9660ef15c96f81d01
j-mie6/ParsleyHaskell
Identifiers.hs
# OPTIONS_GHC -Wno - incomplete - patterns # # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving # GeneralizedNewtypeDeriving #-} | Module : Parsley . Internal . Backend . Machine . Identifiers Description : frontend specific identifiers . License : BSD-3 - Clause Maintainer : : experimental This module defines " identifiers " , which are used to uniquely identify different nodes in the combinator tree ( and abstract machine ) . @since 1.0.0.0 Module : Parsley.Internal.Backend.Machine.Identifiers Description : frontend specific identifiers. License : BSD-3-Clause Maintainer : Jamie Willis Stability : experimental This module defines "identifiers", which are used to uniquely identify different nodes in the combinator tree (and abstract machine). @since 1.0.0.0 -} module Parsley.Internal.Core.Identifiers ( MVar(..), IMVar, ΣVar(..), IΣVar, SomeΣVar(..), getIΣVar ) where import Data.Array (Ix) import Data.Function (on) import Data.GADT.Compare (GEq, GCompare, gcompare, geq, GOrdering(..)) import Data.Kind (Type) import Data.Typeable ((:~:)(Refl)) import Data.Word (Word64) import Unsafe.Coerce (unsafeCoerce) | An identifier representing concrete registers and mutable state . @since 0.1.0.0 An identifier representing concrete registers and mutable state. @since 0.1.0.0 -} newtype ΣVar (a :: Type) = ΣVar IΣVar | An identifier representing let - bound parsers , recursion , and iteration . @since 0.1.0.0 An identifier representing let-bound parsers, recursion, and iteration. @since 0.1.0.0 -} newtype MVar (a :: Type) = MVar IMVar | Underlying untyped identifier , which is numeric but otherwise opaque . @since 0.1.0.0 Underlying untyped identifier, which is numeric but otherwise opaque. @since 0.1.0.0 -} newtype IMVar = IMVar Word64 deriving newtype (Ord, Eq, Num, Enum, Show, Ix) | Underlying untyped identifier , which is numeric but otherwise opaque . @since 0.1.0.0 Underlying untyped identifier, which is numeric but otherwise opaque. @since 0.1.0.0 -} newtype IΣVar = IΣVar Word64 deriving newtype (Ord, Eq, Num, Enum, Show, Ix) instance Show (MVar a) where show (MVar μ) = "μ" ++ show μ instance Show (ΣVar a) where show (ΣVar σ) = "σ" ++ show σ instance GEq ΣVar where geq (ΣVar u) (ΣVar v) | u == v = Just (unsafeCoerce Refl) | otherwise = Nothing instance GCompare ΣVar where gcompare σ1@(ΣVar u) σ2@(ΣVar v) = case compare u v of LT -> GLT EQ -> case geq σ1 σ2 of Just Refl -> GEQ GT -> GGT | An identifier representing some concrete register , but where the type is existential . @since 0.1.0.0 An identifier representing some concrete register, but where the type is existential. @since 0.1.0.0 -} data SomeΣVar = forall r. SomeΣVar (ΣVar r) instance Eq SomeΣVar where (==) = (==) `on` getIΣVar instance Ord SomeΣVar where compare = compare `on` getIΣVar instance Show SomeΣVar where show (SomeΣVar σ) = show σ | Fetch the untyped identifier from under the existential . @since 0.1.0.0 Fetch the untyped identifier from under the existential. @since 0.1.0.0 -} getIΣVar :: SomeΣVar -> IΣVar getIΣVar (SomeΣVar (ΣVar σ)) = σ instance GEq MVar where geq (MVar u) (MVar v) | u == v = Just (unsafeCoerce Refl) | otherwise = Nothing instance GCompare MVar where gcompare μ1@(MVar u) μ2@(MVar v) = case compare u v of LT -> GLT EQ -> case geq μ1 μ2 of Just Refl -> GEQ GT -> GGT
null
https://raw.githubusercontent.com/j-mie6/ParsleyHaskell/045ab78ed7af0cbb52cf8b42b6aeef5dd7f91ab2/parsley-core/src/ghc/Parsley/Internal/Core/Identifiers.hs
haskell
# OPTIONS_GHC -Wno - incomplete - patterns # # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving # GeneralizedNewtypeDeriving #-} | Module : Parsley . Internal . Backend . Machine . Identifiers Description : frontend specific identifiers . License : BSD-3 - Clause Maintainer : : experimental This module defines " identifiers " , which are used to uniquely identify different nodes in the combinator tree ( and abstract machine ) . @since 1.0.0.0 Module : Parsley.Internal.Backend.Machine.Identifiers Description : frontend specific identifiers. License : BSD-3-Clause Maintainer : Jamie Willis Stability : experimental This module defines "identifiers", which are used to uniquely identify different nodes in the combinator tree (and abstract machine). @since 1.0.0.0 -} module Parsley.Internal.Core.Identifiers ( MVar(..), IMVar, ΣVar(..), IΣVar, SomeΣVar(..), getIΣVar ) where import Data.Array (Ix) import Data.Function (on) import Data.GADT.Compare (GEq, GCompare, gcompare, geq, GOrdering(..)) import Data.Kind (Type) import Data.Typeable ((:~:)(Refl)) import Data.Word (Word64) import Unsafe.Coerce (unsafeCoerce) | An identifier representing concrete registers and mutable state . @since 0.1.0.0 An identifier representing concrete registers and mutable state. @since 0.1.0.0 -} newtype ΣVar (a :: Type) = ΣVar IΣVar | An identifier representing let - bound parsers , recursion , and iteration . @since 0.1.0.0 An identifier representing let-bound parsers, recursion, and iteration. @since 0.1.0.0 -} newtype MVar (a :: Type) = MVar IMVar | Underlying untyped identifier , which is numeric but otherwise opaque . @since 0.1.0.0 Underlying untyped identifier, which is numeric but otherwise opaque. @since 0.1.0.0 -} newtype IMVar = IMVar Word64 deriving newtype (Ord, Eq, Num, Enum, Show, Ix) | Underlying untyped identifier , which is numeric but otherwise opaque . @since 0.1.0.0 Underlying untyped identifier, which is numeric but otherwise opaque. @since 0.1.0.0 -} newtype IΣVar = IΣVar Word64 deriving newtype (Ord, Eq, Num, Enum, Show, Ix) instance Show (MVar a) where show (MVar μ) = "μ" ++ show μ instance Show (ΣVar a) where show (ΣVar σ) = "σ" ++ show σ instance GEq ΣVar where geq (ΣVar u) (ΣVar v) | u == v = Just (unsafeCoerce Refl) | otherwise = Nothing instance GCompare ΣVar where gcompare σ1@(ΣVar u) σ2@(ΣVar v) = case compare u v of LT -> GLT EQ -> case geq σ1 σ2 of Just Refl -> GEQ GT -> GGT | An identifier representing some concrete register , but where the type is existential . @since 0.1.0.0 An identifier representing some concrete register, but where the type is existential. @since 0.1.0.0 -} data SomeΣVar = forall r. SomeΣVar (ΣVar r) instance Eq SomeΣVar where (==) = (==) `on` getIΣVar instance Ord SomeΣVar where compare = compare `on` getIΣVar instance Show SomeΣVar where show (SomeΣVar σ) = show σ | Fetch the untyped identifier from under the existential . @since 0.1.0.0 Fetch the untyped identifier from under the existential. @since 0.1.0.0 -} getIΣVar :: SomeΣVar -> IΣVar getIΣVar (SomeΣVar (ΣVar σ)) = σ instance GEq MVar where geq (MVar u) (MVar v) | u == v = Just (unsafeCoerce Refl) | otherwise = Nothing instance GCompare MVar where gcompare μ1@(MVar u) μ2@(MVar v) = case compare u v of LT -> GLT EQ -> case geq μ1 μ2 of Just Refl -> GEQ GT -> GGT
5381d39add6f785c2744347958e7ec95a5d55378869311a2f62ee42c83ddd241
mmottl/gsl-ocaml
eigen.ml
gsl - ocaml - OCaml interface to GSL Copyright ( © ) 2002 - 2012 - Olivier Andrieu Distributed under the terms of the GPL version 3 open Vectmat let () = Error.init () type symm_ws external _symm_alloc : int -> symm_ws = "ml_gsl_eigen_symm_alloc" external _symm_free : symm_ws -> unit = "ml_gsl_eigen_symm_free" let make_symm_ws s = let ws = _symm_alloc s in Gc.finalise _symm_free ws ; ws external _symm : mat -> vec -> symm_ws -> unit = "ml_gsl_eigen_symm" let symm ?protect a = let a' = Vectmat.mat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector.create n in let ws = _symm_alloc n in begin try _symm a' (`V v) ws with exn -> _symm_free ws ; raise exn end ; _symm_free ws ; v type symmv_ws external _symmv_alloc_v : int -> symmv_ws = "ml_gsl_eigen_symmv_alloc" external _symmv_free_v : symmv_ws -> unit = "ml_gsl_eigen_symmv_free" let make_symmv_ws s = let ws = _symmv_alloc_v s in Gc.finalise _symmv_free_v ws ; ws external _symmv : mat -> vec -> mat -> symmv_ws -> unit = "ml_gsl_eigen_symmv" let symmv ?protect a = let a' = Vectmat.mat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector.create n in let evec = Matrix.create n n in let ws = _symmv_alloc_v n in begin try _symmv a' (`V v) (`M evec) ws with exn -> _symmv_free_v ws ; raise exn end ; _symmv_free_v ws ; (v, evec) type sort = | VAL_ASC | VAL_DESC | ABS_ASC | ABS_DESC external symmv_sort : Vector.vector * Matrix.matrix -> sort -> unit = "ml_gsl_eigen_symmv_sort" Complex Hermitian Matrices type herm_ws external _herm_alloc : int -> herm_ws = "ml_gsl_eigen_herm_alloc" external _herm_free : herm_ws -> unit = "ml_gsl_eigen_herm_free" let make_herm_ws s = let ws = _herm_alloc s in Gc.finalise _herm_free ws ; ws external _herm : cmat -> vec -> herm_ws -> unit = "ml_gsl_eigen_herm" let herm ?protect a = let a' = Vectmat.cmat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector.create n in let ws = _herm_alloc n in begin try _herm a' (`V v) ws with exn -> _herm_free ws ; raise exn end ; _herm_free ws ; v type hermv_ws external _hermv_alloc_v : int -> hermv_ws = "ml_gsl_eigen_hermv_alloc" external _hermv_free_v : hermv_ws -> unit = "ml_gsl_eigen_hermv_free" let make_hermv_ws s = let ws = _hermv_alloc_v s in Gc.finalise _hermv_free_v ws ; ws external _hermv : cmat -> vec -> cmat -> hermv_ws -> unit = "ml_gsl_eigen_hermv" let hermv ?protect a = let a' = Vectmat.cmat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector.create n in let evec = Matrix_complex.create n n in let ws = _hermv_alloc_v n in begin try _hermv a' (`V v) (`CM evec) ws with exn -> _hermv_free_v ws ; raise exn end ; _hermv_free_v ws ; (v, evec) external hermv_sort : Vector.vector * Matrix_complex.matrix -> sort -> unit = "ml_gsl_eigen_hermv_sort" (** Real Nonsymmetric Matrices *) type nonsymm_ws external _nonsymm_alloc : int -> nonsymm_ws = "ml_gsl_eigen_nonsymm_alloc" external _nonsymm_free : nonsymm_ws -> unit = "ml_gsl_eigen_nonsymm_free" let make_nonsymm_ws s = let ws = _nonsymm_alloc s in Gc.finalise _nonsymm_free ws ; ws external _nonsymm : mat -> cvec -> nonsymm_ws -> unit = "ml_gsl_eigen_nonsymm" external _nonsymm_Z : mat -> cvec -> mat -> nonsymm_ws -> unit = "ml_gsl_eigen_nonsymm_Z" let nonsymm ?protect a = let a' = Vectmat.mat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector_complex.create n in let ws = _nonsymm_alloc n in begin try _nonsymm a' (`CV v) ws with exn -> _nonsymm_free ws ; raise exn end ; _nonsymm_free ws ; v type nonsymmv_ws external _nonsymmv_alloc_v : int -> nonsymmv_ws = "ml_gsl_eigen_nonsymmv_alloc" external _nonsymmv_free_v : nonsymmv_ws -> unit = "ml_gsl_eigen_nonsymmv_free" let make_nonsymmv_ws s = let ws = _nonsymmv_alloc_v s in Gc.finalise _nonsymmv_free_v ws ; ws external _nonsymmv : mat -> cvec -> cmat -> nonsymmv_ws -> unit = "ml_gsl_eigen_nonsymmv" external _nonsymmv_Z : mat -> cvec -> cmat -> mat -> nonsymmv_ws -> unit = "ml_gsl_eigen_nonsymmv_Z" let nonsymmv ?protect a = let a' = Vectmat.mat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector_complex.create n in let evec = Matrix_complex.create n n in let ws = _nonsymmv_alloc_v n in begin try _nonsymmv a' (`CV v) (`CM evec) ws with exn -> _nonsymmv_free_v ws ; raise exn end ; _nonsymmv_free_v ws ; (v, evec) external nonsymmv_sort : Vector_complex.vector * Matrix_complex.matrix -> sort -> unit = "ml_gsl_eigen_nonsymmv_sort"
null
https://raw.githubusercontent.com/mmottl/gsl-ocaml/76f8d93cccc1f23084f4a33d3e0a8f1289450580/src/eigen.ml
ocaml
* Real Nonsymmetric Matrices
gsl - ocaml - OCaml interface to GSL Copyright ( © ) 2002 - 2012 - Olivier Andrieu Distributed under the terms of the GPL version 3 open Vectmat let () = Error.init () type symm_ws external _symm_alloc : int -> symm_ws = "ml_gsl_eigen_symm_alloc" external _symm_free : symm_ws -> unit = "ml_gsl_eigen_symm_free" let make_symm_ws s = let ws = _symm_alloc s in Gc.finalise _symm_free ws ; ws external _symm : mat -> vec -> symm_ws -> unit = "ml_gsl_eigen_symm" let symm ?protect a = let a' = Vectmat.mat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector.create n in let ws = _symm_alloc n in begin try _symm a' (`V v) ws with exn -> _symm_free ws ; raise exn end ; _symm_free ws ; v type symmv_ws external _symmv_alloc_v : int -> symmv_ws = "ml_gsl_eigen_symmv_alloc" external _symmv_free_v : symmv_ws -> unit = "ml_gsl_eigen_symmv_free" let make_symmv_ws s = let ws = _symmv_alloc_v s in Gc.finalise _symmv_free_v ws ; ws external _symmv : mat -> vec -> mat -> symmv_ws -> unit = "ml_gsl_eigen_symmv" let symmv ?protect a = let a' = Vectmat.mat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector.create n in let evec = Matrix.create n n in let ws = _symmv_alloc_v n in begin try _symmv a' (`V v) (`M evec) ws with exn -> _symmv_free_v ws ; raise exn end ; _symmv_free_v ws ; (v, evec) type sort = | VAL_ASC | VAL_DESC | ABS_ASC | ABS_DESC external symmv_sort : Vector.vector * Matrix.matrix -> sort -> unit = "ml_gsl_eigen_symmv_sort" Complex Hermitian Matrices type herm_ws external _herm_alloc : int -> herm_ws = "ml_gsl_eigen_herm_alloc" external _herm_free : herm_ws -> unit = "ml_gsl_eigen_herm_free" let make_herm_ws s = let ws = _herm_alloc s in Gc.finalise _herm_free ws ; ws external _herm : cmat -> vec -> herm_ws -> unit = "ml_gsl_eigen_herm" let herm ?protect a = let a' = Vectmat.cmat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector.create n in let ws = _herm_alloc n in begin try _herm a' (`V v) ws with exn -> _herm_free ws ; raise exn end ; _herm_free ws ; v type hermv_ws external _hermv_alloc_v : int -> hermv_ws = "ml_gsl_eigen_hermv_alloc" external _hermv_free_v : hermv_ws -> unit = "ml_gsl_eigen_hermv_free" let make_hermv_ws s = let ws = _hermv_alloc_v s in Gc.finalise _hermv_free_v ws ; ws external _hermv : cmat -> vec -> cmat -> hermv_ws -> unit = "ml_gsl_eigen_hermv" let hermv ?protect a = let a' = Vectmat.cmat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector.create n in let evec = Matrix_complex.create n n in let ws = _hermv_alloc_v n in begin try _hermv a' (`V v) (`CM evec) ws with exn -> _hermv_free_v ws ; raise exn end ; _hermv_free_v ws ; (v, evec) external hermv_sort : Vector.vector * Matrix_complex.matrix -> sort -> unit = "ml_gsl_eigen_hermv_sort" type nonsymm_ws external _nonsymm_alloc : int -> nonsymm_ws = "ml_gsl_eigen_nonsymm_alloc" external _nonsymm_free : nonsymm_ws -> unit = "ml_gsl_eigen_nonsymm_free" let make_nonsymm_ws s = let ws = _nonsymm_alloc s in Gc.finalise _nonsymm_free ws ; ws external _nonsymm : mat -> cvec -> nonsymm_ws -> unit = "ml_gsl_eigen_nonsymm" external _nonsymm_Z : mat -> cvec -> mat -> nonsymm_ws -> unit = "ml_gsl_eigen_nonsymm_Z" let nonsymm ?protect a = let a' = Vectmat.mat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector_complex.create n in let ws = _nonsymm_alloc n in begin try _nonsymm a' (`CV v) ws with exn -> _nonsymm_free ws ; raise exn end ; _nonsymm_free ws ; v type nonsymmv_ws external _nonsymmv_alloc_v : int -> nonsymmv_ws = "ml_gsl_eigen_nonsymmv_alloc" external _nonsymmv_free_v : nonsymmv_ws -> unit = "ml_gsl_eigen_nonsymmv_free" let make_nonsymmv_ws s = let ws = _nonsymmv_alloc_v s in Gc.finalise _nonsymmv_free_v ws ; ws external _nonsymmv : mat -> cvec -> cmat -> nonsymmv_ws -> unit = "ml_gsl_eigen_nonsymmv" external _nonsymmv_Z : mat -> cvec -> cmat -> mat -> nonsymmv_ws -> unit = "ml_gsl_eigen_nonsymmv_Z" let nonsymmv ?protect a = let a' = Vectmat.mat_convert ?protect a in let (n, _) = Vectmat.dims a' in let v = Vector_complex.create n in let evec = Matrix_complex.create n n in let ws = _nonsymmv_alloc_v n in begin try _nonsymmv a' (`CV v) (`CM evec) ws with exn -> _nonsymmv_free_v ws ; raise exn end ; _nonsymmv_free_v ws ; (v, evec) external nonsymmv_sort : Vector_complex.vector * Matrix_complex.matrix -> sort -> unit = "ml_gsl_eigen_nonsymmv_sort"
48581c72986a3fa60fa631ead60449331e89fffe16bfed7ece62a4a596d1dc54
KenDickey/Crosstalk
selector-lookup.scm
;;; FILE: "selector-lookup.scm" ;;; IMPLEMENTS: Selector->Method lookup strategy AUTHOR : DATE : 10 December 2017 ;; Strategy for selector lookup ;; ;; In place of a Class or hash-table, each St object has a slot with ;; a vector of closures which are its methods. ;; Selectors are assigned an i d when first used to refer to a method . ;; The id is used to find the appropriate method in the method vector. Holes in the vector are kept track of in slot zero . ;; ;; No hashes. No caches. Holes elided. ;; ;; E.g. (#f ==> unused) ;; Logical Actual ..(hole-index num-adjacent-holes).. 0 | # f | | -- > | ( ( 5 . 1 ) ( 9 . 2 ) ( 15 . 0 ) ) 1 | m1 | | m1 | 2 | m2 | | m2 | ;; 3 | m3 | | m3 | 4 | m4 | | m4 | 5 | # f | | m7 | 6 | # f | | m8 | 7 | m7 | | m12 | 8 | m8 | | m13 | 9 | # f | | m14 | 10 | # f | | m16 | 11 | # f | 12 | m12 | 13 | m13 | 14 | m14 | 15 | # f | 16 | m16 | ;; ;; Tuning: Could use method Object>>doesNotUnderstand: in unused ;; slots, only condensing when the number of unused 'holes' ;; exceeds a given threshold. ;; Alt : Check against Larceny 's optimized case dispatch . ;; Alt: Large # of gaps -> binary search (define (make-selector-table) (vector nil)) (define assign-id-to-selector ;; return the selector id (let ( (counter 0) ) (lambda (sym) (cond ((not (symbol? sym)) doesNotUnderstand: ) ((selector->id sym)) ;; Answer the index (else (set! counter (+ 1 counter)) (set-selector-index! sym counter) counter) ) ) ) ) (define (selector->id sym) (getprop sym '%%method-index%%)) (define (set-selector-index! sym idx) (putprop sym '%%method-index%% idx)) (define (selector+table->method sym vec) (let ( (idx (selector->id sym)) ) (if (not idx) doesNotUnderstand: (let loop ( (hole-count 0) (holes-alist (vector-ref vec 0)) ) (cond ((null? holes-alist) (let ( (index (- idx hole-count)) ) (if (< index (vector-length vec)) (vector-ref vec index) doesNotUnderstand:) )) (else (let ( (next-hole (caar holes-alist)) (num-adjacent (cdar holes-alist)) ) (cond ((< idx next-hole) (vector-ref vec (- idx hole-count)) ) ((<= idx (+ next-hole num-adjacent)) doesNotUnderstand: ;; not in table ) (else (loop (+ hole-count 1 num-adjacent) (cdr holes-alist))))))) ) ) ) ) ;;;=================Quick Checks===============;;; ;; (define doesNotUnderstand: 'doesNotUnderstand: ) ;; (map assign-id-to-selector ' ( m1 m2 m3 m4 m5 m6 m7 m8 m9 m10 m11 m12 m13 m14 m15 m16 m17 ) ) ( define ( vector ' ( ( 5 . 1 ) ( 9 . 2 ) ( 15 . 0 ) ) ;; 'm1 'm2 'm3 'm4 'm7 'm8 'm12 'm13 'm14 'm16)) ;; (map (lambda (s) (selector+table->method s mvec)) ' ( m1 m2 m3 m4 m5 m6 m7 m8 m9 m10 m11 m12 m13 m14 m15 m16 m17 ) ) ;;;; Expect: ;;'(m1 ;; m4 ;; doesNotUnderstand: ;; doesNotUnderstand: ;; m7 ;; m8 ;; doesNotUnderstand: ;; doesNotUnderstand: ;; doesNotUnderstand: ;; m13 m14 ;; doesNotUnderstand: ;; m16 ;; doesNotUnderstand:) ;;; FIXME: Add & Remove Methods (define (add-selector+method-to-mvec selector method mvec) ;; (unless (and (symbol? selector) ;; (procedure? method) ( vector ? ) ) ;; (error @@FIXME: checks@@) (let ( (idx (assign-id-to-selector selector)) (mvec-len (vector-length mvec)) ) (let loop ( (hole-count 0) FIXME : checks ) (cond ((null? holes-alist) ;; go ahead and add (let ( (index (- idx hole-count)) ) (if (< index mvec-len) ;; replace (vector-set! mvec index method) ;; add at end (let ( (result-vec (vector-add-at-end method)) ) (cond ((= index mvec-len) result-vec ;; no change to holes-alist ) (else (vector-set! result-vec 0 (append ;; add to end of alist (vector-ref mvec 0) (list (cons ;; new hole mvec-len ;; num adjacent holes (- index mvec-len 1) )))) result-vec) ))) ) ) (else (let ( (next-hole (caar holes-alist)) (num-adjacent (cdar holes-alist)) ) (cond ((< index next-hole) (vector-ref vec (- idx hole-count)) ) ((<= idx (+ next-hole num-adjacent)) doesNotUnderstand: ;; not in table ) (else (loop (+ hole-count 1 num-adjacent) (cdr holes-alist))))))) ) ) ) ) ;; --- E O F --- ;;
null
https://raw.githubusercontent.com/KenDickey/Crosstalk/c4a315baf039e714980e14f5b07860632b9bd0e6/selector-lookup.scm
scheme
FILE: "selector-lookup.scm" IMPLEMENTS: Selector->Method lookup strategy Strategy for selector lookup In place of a Class or hash-table, each St object has a slot with a vector of closures which are its methods. The id is used to find the appropriate method in the method vector. No hashes. No caches. Holes elided. E.g. (#f ==> unused) Logical Actual ..(hole-index num-adjacent-holes).. 3 | m3 | | m3 | Tuning: Could use method Object>>doesNotUnderstand: in unused slots, only condensing when the number of unused 'holes' exceeds a given threshold. Alt: Large # of gaps -> binary search return the selector id Answer the index not in table =================Quick Checks===============;;; (define doesNotUnderstand: 'doesNotUnderstand: ) (map assign-id-to-selector 'm1 'm2 'm3 'm4 'm7 'm8 'm12 'm13 'm14 'm16)) (map (lambda (s) (selector+table->method s mvec)) Expect: '(m1 m4 doesNotUnderstand: doesNotUnderstand: m7 m8 doesNotUnderstand: doesNotUnderstand: doesNotUnderstand: m13 doesNotUnderstand: m16 doesNotUnderstand:) FIXME: Add & Remove Methods (unless (and (symbol? selector) (procedure? method) (error @@FIXME: checks@@) go ahead and add replace add at end no change to holes-alist add to end of alist new hole num adjacent holes not in table --- E O F --- ;;
AUTHOR : DATE : 10 December 2017 Selectors are assigned an i d when first used to refer to a method . Holes in the vector are kept track of in slot zero . 0 | # f | | -- > | ( ( 5 . 1 ) ( 9 . 2 ) ( 15 . 0 ) ) 1 | m1 | | m1 | 2 | m2 | | m2 | 4 | m4 | | m4 | 5 | # f | | m7 | 6 | # f | | m8 | 7 | m7 | | m12 | 8 | m8 | | m13 | 9 | # f | | m14 | 10 | # f | | m16 | 11 | # f | 12 | m12 | 13 | m13 | 14 | m14 | 15 | # f | 16 | m16 | Alt : Check against Larceny 's optimized case dispatch . (define (make-selector-table) (vector nil)) (let ( (counter 0) ) (lambda (sym) (cond ((not (symbol? sym)) doesNotUnderstand: ) (else (set! counter (+ 1 counter)) (set-selector-index! sym counter) counter) ) ) ) ) (define (selector->id sym) (getprop sym '%%method-index%%)) (define (set-selector-index! sym idx) (putprop sym '%%method-index%% idx)) (define (selector+table->method sym vec) (let ( (idx (selector->id sym)) ) (if (not idx) doesNotUnderstand: (let loop ( (hole-count 0) (holes-alist (vector-ref vec 0)) ) (cond ((null? holes-alist) (let ( (index (- idx hole-count)) ) (if (< index (vector-length vec)) (vector-ref vec index) doesNotUnderstand:) )) (else (let ( (next-hole (caar holes-alist)) (num-adjacent (cdar holes-alist)) ) (cond ((< idx next-hole) (vector-ref vec (- idx hole-count)) ) ((<= idx (+ next-hole num-adjacent)) ) (else (loop (+ hole-count 1 num-adjacent) (cdr holes-alist))))))) ) ) ) ) ' ( m1 m2 m3 m4 m5 m6 m7 m8 m9 m10 m11 m12 m13 m14 m15 m16 m17 ) ) ( define ( vector ' ( ( 5 . 1 ) ( 9 . 2 ) ( 15 . 0 ) ) ' ( m1 m2 m3 m4 m5 m6 m7 m8 m9 m10 m11 m12 m13 m14 m15 m16 m17 ) ) m14 (define (add-selector+method-to-mvec selector method mvec) ( vector ? ) ) (let ( (idx (assign-id-to-selector selector)) (mvec-len (vector-length mvec)) ) (let loop ( (hole-count 0) FIXME : checks ) (cond (let ( (index (- idx hole-count)) ) (if (< index mvec-len) (vector-set! mvec index method) (let ( (result-vec (vector-add-at-end method)) ) (cond ((= index mvec-len) ) (else (vector-set! result-vec 0 (vector-ref mvec 0) (list (cons mvec-len (- index mvec-len 1) )))) result-vec) ))) ) ) (else (let ( (next-hole (caar holes-alist)) (num-adjacent (cdar holes-alist)) ) (cond ((< index next-hole) (vector-ref vec (- idx hole-count)) ) ((<= idx (+ next-hole num-adjacent)) ) (else (loop (+ hole-count 1 num-adjacent) (cdr holes-alist))))))) ) ) ) )
d52ce184b0c3a3baa58735f0d4e7e3526e6c748f88303d1e5183a2e047796d3c
Clozure/ccl
arm-arch.lisp
-*- Mode : Lisp ; Package : ( ARM : use CL ) -*- ;;; ;;; Copyright 2010 Clozure Associates ;;; Licensed under the Apache License , Version 2.0 ( the " License " ) ; ;;; you may not use this file except in compliance with the License. ;;; You may obtain a copy of the License at ;;; ;;; -2.0 ;;; ;;; Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , ;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;;; See the License for the specific language governing permissions and ;;; limitations under the License. (defpackage "ARM" (:use "CL") #+arm-target (:nicknames "TARGET")) (require "ARCH") (in-package "ARM") ;;; Lisp registers. (eval-when (:compile-toplevel :load-toplevel :execute) (defvar *arm-register-names* ()) (defun get-arm-register (name) (let* ((pair (assoc (string name) *arm-register-names* :test #'string-equal))) (if pair (cdr pair)))) (defun get-arm-gpr (name) (let* ((value (get-arm-register name))) (and value (< value 16) value))) (defun get-arm-sfpr (name) (let* ((value (get-arm-register name))) (and value (logbitp 5 value) (logand #x1f value)))) (defun get-arm-dfpr (name) (let* ((value (get-arm-register name))) (and value (logbitp 6 value) (logand #x0f value)))) ;;; This allows redefinition, which might be helpful while boostrapping . ARM - LAP - EQUATE - FORM checks for redefinition ;;; before calling this. (defun define-arm-register (name val) (let* ((value (if (typep val 'fixnum) val (get-arm-register val))) (string (string name))) (unless value (error "invalid ARM register value ~d for ~s." val name)) (let* ((pair (assoc string *arm-register-names* :test #'string-equal))) (if pair (progn (unless (eql (cdr pair) value) (when ccl::*cerror-on-constant-redefinition* (cerror "Redefine ARM register ~s to have value ~*~d." "ARM register ~s currently has value ~d." name (cdr pair) value) (setf (cdr pair) value)))) (push (cons string value) *arm-register-names*)) value))) (defmacro defarmgpr (name val) `(defconstant ,name (define-arm-register ',name ',val))) (defarmgpr r0 0) (defarmgpr r1 1) (defarmgpr r2 2) (defarmgpr r3 3) (defarmgpr r4 4) (defarmgpr r5 5) (defarmgpr r6 6) (defarmgpr r7 7) (defarmgpr r8 8) (defarmgpr r9 9) (defarmgpr r10 10) (defarmgpr r11 11) (defarmgpr r12 12) (defarmgpr r13 13) (defarmgpr r14 14) (defarmgpr r15 15) (defarmgpr imm0 r0) (defarmgpr imm1 r1) (defarmgpr imm2 r2) (defarmgpr rcontext r3) (defarmgpr arg_z r4) (defarmgpr arg_y r5) (defarmgpr arg_x r6) (defarmgpr temp0 r7) (defarmgpr temp1 r8) (defarmgpr temp2 r9) (defarmgpr vsp r10) (defarmgpr fn r11) (defarmgpr allocptr r12) (defarmgpr sp r13) (defarmgpr lr r14) (defarmgpr pc r15) ;;; Calling sequence may pass additional arguments in temp registers. ;;; "nfn" (new function) is always passed; it's the new value of "fn". (defarmgpr nfn temp2) CLOS may pass the context for , e.g .. , CALL - NEXT - METHOD in ;;;; the "next-method-context" register. (defarmgpr next-method-context temp1) (defarmgpr fname temp1) (defarmgpr nargs imm2) (defmacro defarmsfpr (name val) `(defconstant ,name (define-arm-register ',name ',val))) (defarmsfpr s0 32) (defarmsfpr s1 33) (defarmsfpr s2 34) (defarmsfpr s3 35) (defarmsfpr s4 36) (defarmsfpr s5 37) (defarmsfpr s6 38) (defarmsfpr s7 39) (defarmsfpr s8 40) (defarmsfpr s9 41) (defarmsfpr s10 42) (defarmsfpr s11 43) (defarmsfpr s12 44) (defarmsfpr s13 45) (defarmsfpr s14 46) (defarmsfpr s15 47) (defarmsfpr s16 48) (defarmsfpr s17 49) (defarmsfpr s18 50) (defarmsfpr s19 51) (defarmsfpr s20 52) (defarmsfpr s21 53) (defarmsfpr s22 54) (defarmsfpr s23 55) (defarmsfpr s24 56) (defarmsfpr s25 57) (defarmsfpr s26 58) (defarmsfpr s27 59) (defarmsfpr s28 60) (defarmsfpr s29 61) (defarmsfpr s30 62) (defarmsfpr s31 63) (defarmsfpr single-float-zero s30) The first 16 double - float registers overlap pairs of single - float registers ( d0 overlaps s0 - s1 , d15 overlaps s30 - s31 , etc . ) (defmacro defarmdfpr (name val) `(defconstant ,name (define-arm-register ',name ',val))) (defarmdfpr d0 64) (defarmdfpr d1 65) (defarmdfpr d2 66) (defarmdfpr d3 67) (defarmdfpr d4 68) (defarmdfpr d5 69) (defarmdfpr d6 70) (defarmdfpr d7 71) (defarmdfpr d8 72) (defarmdfpr d9 73) (defarmdfpr d10 74) (defarmdfpr d11 75) (defarmdfpr d12 76) (defarmdfpr d13 77) (defarmdfpr d14 78) (defarmdfpr d15 79) (defarmdfpr double-float-zero d15) ) (defparameter *standard-arm-register-names* *arm-register-names*) ;;; Kernel globals are allocated "below" nil. This list (used to map ;;; symbolic names to rnil-relative offsets) must (of course) exactly ;;; match the kernel's notion of where things are. ;;; The order here matches "ccl:lisp-kernel;lisp_globals.h" & the ;;; lisp_globals record in "ccl:lisp-kernel;*constants*.s" (defparameter *arm-kernel-globals* '(get-tcr ; callback to obtain (real) tcr tcr-count interrupt-signal ; used by PROCESS-INTERRUPT kernel-imports ; some things we need to have imported for us. objc-2-personality savetoc ; used to save TOC on some platforms saver13 ; used to save r13 on some platforms subprims-base ; start of dynamic subprims jump table ret1valaddr ; magic multiple-values return address. tcr-key ; tsd key for thread's tcr area-lock ; serialize access to gc exception-lock ; serialize exception handling when FREEZE is in effect default-allocation-quantum ; log2_heap_segment_size, as a fixnum. intflag ; interrupt-pending flag gc-inhibit-count ; for gc locking oldspace number of dnodes in dynamic space that are older than ; youngest generation float-abi ; non-zero if using hard float abi fixnum : GC " forwarder " call count . fixnum : GC call count . linked - list of weak macptrs . heap-start ; start of lisp heap heap-end ; end of lisp heap statically-linked ; true if the lisp kernel is statically linked stack-size ; value of --stack-size arg objc-2-begin-catch ; objc_begin_catch kernel-path all-areas ; doubly-linked area list lexpr-return ; multiple-value lexpr return address lexpr-return1v ; single-value lexpr return address non - zero when GC - ish thing active free-static-conses ; fixnum objc-2-end-catch ; _objc_end_catch low half of 1.0d0 high half of 1.0d0 static-cons-area ; exception-saved-registers ; saved registers from exception frame doublenode address of oldest ephemeral object or 0 tenured-area ; the tenured_area. address of C lib errno argv ; address of C lib argv 0 on MacOS , 1 on ARM Linux , 2 on ... batch-flag ; non-zero if --batch specified _ Unwind_Resume weak-gc-method ; weak gc algorithm. image-name ; current image name initial-tcr ; initial thread's context record all populations as of last GC )) The order here matches " ccl : lisp - kernel;lisp_globals.h " and the nrs record ;;; in "ccl:lisp-kernel;constants.s". (defparameter *arm-nil-relative-symbols* '(t nil ccl::%err-disp ccl::cmain eval ccl::apply-evaluated-function error ccl::%defun ccl::%defvar ccl::%defconstant ccl::%macro ccl::%kernel-restart *package* ccl::*total-bytes-freed* :allow-other-keys ccl::%toplevel-catch% ccl::%toplevel-function% ccl::%pascal-functions% ccl::restore-lisp-pointers ccl::*total-gc-microseconds* ccl::%builtin-functions% ccl::%unbound-function% ccl::%init-misc ccl::%macro-code% ccl::%closure-code% ccl::%new-gcable-ptr ccl::*gc-event-status-bits* ccl::*post-gc-hook* ccl::%handlers% ccl::%all-packages% ccl::*keyword-package* ccl::%os-init-function% ccl::%foreign-thread-control )) Old ( and slightly confusing ) name ; NIL used to be in a register . (defparameter *arm-nilreg-relative-symbols* *arm-nil-relative-symbols*) (eval-when (:compile-toplevel :load-toplevel :execute) (defparameter *arm-subprims-shift* 2) (defconstant tcr.sptab 256) (defparameter *arm-subprims-base* tcr.sptab ) ) (defvar *arm-subprims*) (let* ((origin *arm-subprims-base*) (step (ash 1 *arm-subprims-shift*))) (flet ((define-arm-subprim (name) (ccl::make-subprimitive-info :name (string name) :offset (prog1 origin (incf origin step))))) (macrolet ((defarmsubprim (name) `(define-arm-subprim ',name))) (setq *arm-subprims* (vector must be first (defarmsubprim .SPbuiltin-plus) (defarmsubprim .SPbuiltin-minus) (defarmsubprim .SPbuiltin-times) (defarmsubprim .SPbuiltin-div) (defarmsubprim .SPbuiltin-eq) (defarmsubprim .SPbuiltin-ne) (defarmsubprim .SPbuiltin-gt) (defarmsubprim .SPbuiltin-ge) (defarmsubprim .SPbuiltin-lt) (defarmsubprim .SPbuiltin-le) (defarmsubprim .SPbuiltin-eql) (defarmsubprim .SPbuiltin-length) (defarmsubprim .SPbuiltin-seqtype) (defarmsubprim .SPbuiltin-assq) (defarmsubprim .SPbuiltin-memq) (defarmsubprim .SPbuiltin-logbitp) (defarmsubprim .SPbuiltin-logior) (defarmsubprim .SPbuiltin-logand) (defarmsubprim .SPbuiltin-ash) (defarmsubprim .SPbuiltin-negate) (defarmsubprim .SPbuiltin-logxor) (defarmsubprim .SPbuiltin-aref1) (defarmsubprim .SPbuiltin-aset1) (defarmsubprim .SPfuncall) (defarmsubprim .SPmkcatch1v) (defarmsubprim .SPmkcatchmv) (defarmsubprim .SPmkunwind) (defarmsubprim .SPbind) (defarmsubprim .SPconslist) (defarmsubprim .SPconslist-star) (defarmsubprim .SPmakes32) (defarmsubprim .SPmakeu32) (defarmsubprim .SPfix-overflow) (defarmsubprim .SPmakeu64) (defarmsubprim .SPmakes64) (defarmsubprim .SPmvpass) (defarmsubprim .SPvalues) (defarmsubprim .SPnvalret) (defarmsubprim .SPthrow) (defarmsubprim .SPnthrowvalues) (defarmsubprim .SPnthrow1value) (defarmsubprim .SPbind-self) (defarmsubprim .SPbind-nil) (defarmsubprim .SPbind-self-boundp-check) (defarmsubprim .SPrplaca) (defarmsubprim .SPrplacd) (defarmsubprim .SPgvset) (defarmsubprim .SPset-hash-key) (defarmsubprim .SPstore-node-conditional) (defarmsubprim .SPset-hash-key-conditional) (defarmsubprim .SPstkconslist) (defarmsubprim .SPstkconslist-star) (defarmsubprim .SPmkstackv) (defarmsubprim .SPsetqsym) (defarmsubprim .SPprogvsave) (defarmsubprim .SPstack-misc-alloc) (defarmsubprim .SPgvector) (defarmsubprim .SPfitvals) (defarmsubprim .SPnthvalue) (defarmsubprim .SPdefault-optional-args) (defarmsubprim .SPopt-supplied-p) (defarmsubprim .SPheap-rest-arg) (defarmsubprim .SPreq-heap-rest-arg) (defarmsubprim .SPheap-cons-rest-arg) (defarmsubprim .SPcheck-fpu-exception) (defarmsubprim .SPdiscard_stack_object) (defarmsubprim .SPksignalerr) (defarmsubprim .SPstack-rest-arg) (defarmsubprim .SPreq-stack-rest-arg) (defarmsubprim .SPstack-cons-rest-arg) (defarmsubprim .SPcall-closure) (defarmsubprim .SPspreadargz) (defarmsubprim .SPtfuncallgen) (defarmsubprim .SPtfuncallslide) (defarmsubprim .SPjmpsym) (defarmsubprim .SPtcallsymgen) (defarmsubprim .SPtcallsymslide) (defarmsubprim .SPtcallnfngen) (defarmsubprim .SPtcallnfnslide) (defarmsubprim .SPmisc-ref) (defarmsubprim .SPsubtag-misc-ref) (defarmsubprim .SPmakestackblock) (defarmsubprim .SPmakestackblock0) (defarmsubprim .SPmakestacklist) (defarmsubprim .SPstkgvector) (defarmsubprim .SPmisc-alloc) (defarmsubprim .SPatomic-incf-node) (defarmsubprim .SPunused1) (defarmsubprim .SPunused2) (defarmsubprim .SPrecover-values) (defarmsubprim .SPinteger-sign) (defarmsubprim .SPsubtag-misc-set) (defarmsubprim .SPmisc-set) (defarmsubprim .SPspread-lexprz) (defarmsubprim .SPreset) (defarmsubprim .SPmvslide) (defarmsubprim .SPsave-values) (defarmsubprim .SPadd-values) (defarmsubprim .SPmisc-alloc-init) (defarmsubprim .SPstack-misc-alloc-init) (defarmsubprim .SPpopj) (defarmsubprim .SPudiv64by32) (defarmsubprim .SPgetu64) (defarmsubprim .SPgets64) (defarmsubprim .SPspecref) (defarmsubprim .SPspecrefcheck) (defarmsubprim .SPspecset) (defarmsubprim .SPgets32) (defarmsubprim .SPgetu32) (defarmsubprim .SPmvpasssym) (defarmsubprim .SPunbind) (defarmsubprim .SPunbind-n) (defarmsubprim .SPunbind-to) (defarmsubprim .SPprogvrestore) (defarmsubprim .SPbind-interrupt-level-0) (defarmsubprim .SPbind-interrupt-level-m1) (defarmsubprim .SPbind-interrupt-level) (defarmsubprim .SPunbind-interrupt-level) (defarmsubprim .SParef2) (defarmsubprim .SParef3) (defarmsubprim .SPaset2) (defarmsubprim .SPaset3) (defarmsubprim .SPkeyword-bind) (defarmsubprim .SPudiv32) (defarmsubprim .SPsdiv32) (defarmsubprim .SPeabi-ff-call-simple) (defarmsubprim .SPdebind) (defarmsubprim .SPeabi-callback) (defarmsubprim .SPeabi-ff-callhf) ))))) (defmacro define-storage-layout (name origin &rest cells) `(progn (ccl::defenum (:start ,origin :step 4) ,@(mapcar #'(lambda (cell) (ccl::form-symbol name "." cell)) cells)) (defconstant ,(ccl::form-symbol name ".SIZE") ,(* (length cells) 4)))) (defmacro define-lisp-object (name tagname &rest cells) `(define-storage-layout ,name ,(- (symbol-value tagname)) ,@cells)) (defmacro define-subtag (name tag subtag) `(defconstant ,(ccl::form-symbol "SUBTAG-" name) (logior ,tag (ash ,subtag ntagbits)))) (defmacro define-imm-subtag (name subtag) `(define-subtag ,name fulltag-immheader ,subtag)) (defmacro define-node-subtag (name subtag) `(define-subtag ,name fulltag-nodeheader ,subtag)) (defmacro define-fixedsized-object (name &rest non-header-cells) `(progn (define-lisp-object ,name fulltag-misc header ,@non-header-cells) (ccl::defenum () ,@(mapcar #'(lambda (cell) (ccl::form-symbol name "." cell "-CELL")) non-header-cells)) (defconstant ,(ccl::form-symbol name ".ELEMENT-COUNT") ,(length non-header-cells)))) (eval-when (:compile-toplevel :load-toplevel :execute) (defconstant nbits-in-word 32) (defconstant least-significant-bit 31) (defconstant nbits-in-byte 8) But non - header objects only use 2 (defconstant nlisptagbits 2) (defconstant nfixnumtagbits 2) ; See ? tag part of header is 8 bits wide (defconstant fixnumshift nfixnumtagbits) (defconstant fixnum-shift fixnumshift) ; A pet name for it. Only needed by GC / very low - level code (defconstant full-tag-mask fulltagmask) (defconstant tagmask (1- (ash 1 nlisptagbits))) (defconstant tag-mask tagmask) (defconstant fixnummask (1- (ash 1 nfixnumtagbits))) (defconstant fixnum-mask fixnummask) (defconstant subtag-mask (1- (ash 1 num-subtag-bits))) only the low 8 bits are used , currently (defconstant charcode-shift (- nbits-in-word ncharcodebits)) (defconstant word-shift 2) (defconstant word-size-in-bytes 4) (defconstant node-size 4) (defconstant dnode-size 8) (defconstant dnode-align-bits 3) (defconstant dnode-shift dnode-align-bits) (defconstant bitmap-shift 5) (defconstant target-most-negative-fixnum (ash -1 (1- (- nbits-in-word nfixnumtagbits)))) (defconstant target-most-positive-fixnum (1- (ash 1 (1- (- nbits-in-word nfixnumtagbits))))) (defconstant fixnumone (ash 1 fixnumshift)) ;; Tags. There are two - bit tags and three - bit tags . A FULLTAG is the value of the low three bits of a tagged object . A TAG is the value of the low two bits of a tagged object . A TYPECODE is either a TAG or the value of a " tag - misc " object 's header - byte . There are 4 primary TAG values . Any object which lisp can " see " can be classified by its TAG . ( Some headers have FULLTAGS that are congruent modulo 4 with the TAGS of other objects , but lisp ca n't ;; "see" headers.) (ccl::defenum () tag-fixnum ; All fixnums, whether odd or even Conses and NIL Heap - consed objects other than lists : vectors , symbols , functions , floats ... Immediate - objects : characters , UNBOUND , other markers . ) And there are 8 FULLTAG values . Note that NIL has its own FULLTAG ( congruent mod 4 to tag - list ) and that both FULLTAG - MISC and FULLTAG - IMM have header fulltags that share the same TAG . Things ;;; that walk memory (and the stack) have to be careful to look at the ;;; FULLTAG of each object that they see. (ccl::defenum () fulltag-even-fixnum ; I suppose EVENP/ODDP might care; nothing else does. NIL and nothing but . ( Note that there 's still a hidden NILSYM . ) fulltag-nodeheader ; Header of heap-allocated object that contains lisp-object pointers fulltag-imm ; a "real" immediate object. Shares TAG with fulltag-immheader. fulltag-odd-fixnum ; fulltag-cons ; a real (non-null) cons. Shares TAG with fulltag-nil. Pointer " real " tag - misc object . Shares TAG with fulltag - nodeheader . fulltag-immheader ; Header of heap-allocated object that contains unboxed data. ) (defconstant misc-header-offset (- fulltag-misc)) (defconstant misc-subtag-offset misc-header-offset) (defconstant misc-data-offset (+ misc-header-offset 4)) (defconstant misc-dfloat-offset (+ misc-header-offset 8)) (defconstant misc-complex-dfloat-offset misc-dfloat-offset) (defconstant canonical-nil-value (+ #x04000000 fulltag-nil)) (defconstant nil-value canonical-nil-value) T is almost adjacent to NIL : since NIL is a misaligned CONS , it spans two doublewords . The arithmetic difference between T and NIL is ;;; not inherently interesting; it should be possible to express that ;;; difference as an ARM constant, but that's the only real constraint. (defconstant t-offset (+ (- dnode-size fulltag-nil) fulltag-misc)) ;;; The order in which various header values are defined is significant in several ways: 1 ) Numeric subtags precede non - numeric ones ; there are further ;;; orderings among numeric subtags. 2 ) All subtags which denote CL arrays are preceded by those that ;;; don't, with a further ordering which requires that (< ;;; header-arrayH header-vectorH ,@all-other-CL-vector-types) 3 ) The element - size of ivectors is determined by the ordering of . 4 ) All subtags are > = fulltag - immheader . ;;; Numeric subtags. (define-imm-subtag bignum 0) (define-node-subtag ratio 1) (define-imm-subtag single-float 1) ; "SINGLE" float, aka short-float in the new order. (define-imm-subtag double-float 2) (define-node-subtag complex 3) CL array types . There are more immediate types than node types ; all CL array subtags must be > than all non - CL - array subtags . So we start by defining the immediate subtags in decreasing order , starting ;;; with that subtag whose element size isn't an integral number of bits and ending with those whose element size - like all non - CL - array fulltag - immheader types - is 32 bits . (define-imm-subtag bit-vector 31) (define-imm-subtag complex-double-float-vector 30) (define-imm-subtag complex-single-float-vector 29) (define-imm-subtag double-float-vector 28) (define-imm-subtag s16-vector 27) (define-imm-subtag u16-vector 26) (defconstant min-16-bit-ivector-subtag subtag-u16-vector) (defconstant max-16-bit-ivector-subtag subtag-s16-vector) (define-imm-subtag s8-vector 25) (define-imm-subtag u8-vector 24) (defconstant min-8-bit-ivector-subtag subtag-u8-vector) (defconstant max-8-bit-ivector-subtag (logior fulltag-immheader (ash 25 ntagbits))) (define-imm-subtag simple-base-string 23) (define-imm-subtag fixnum-vector 22) (define-imm-subtag s32-vector 21) (define-imm-subtag u32-vector 20) (define-imm-subtag single-float-vector 19) (defconstant max-32-bit-ivector-subtag (logior fulltag-immheader (ash 23 ntagbits))) (defconstant min-cl-ivector-subtag subtag-single-float-vector) (define-node-subtag vectorH 30) (define-node-subtag arrayH 29) Only one such subtag (assert (< subtag-arrayH subtag-vectorH subtag-simple-vector)) ;;; So, we get the remaining subtags (n: (n < min-array-subtag)) ;;; for various immediate/node object types. (define-node-subtag pseudofunction 0) (define-imm-subtag macptr 3) (define-imm-subtag dead-macptr 4) (define-imm-subtag code-vector 5) (define-imm-subtag creole-object 6) (define-imm-subtag xcode-vector 7) ; code-vector for cross-development (define-imm-subtag complex-single-float 8) (define-imm-subtag complex-double-float 9) (defconstant max-non-array-imm-subtag (logior (ash 19 ntagbits) fulltag-immheader)) (define-node-subtag catch-frame 4) (defconstant min-non-numeric-node-subtag subtag-catch-frame) (define-node-subtag function 5) (define-node-subtag basic-stream 6) (define-node-subtag symbol 7) (define-node-subtag lock 8) (define-node-subtag hash-vector 9) (define-node-subtag pool 10) (define-node-subtag weak 11) (define-node-subtag package 12) (define-node-subtag slot-vector 13) (define-node-subtag instance 14) (define-node-subtag struct 15) (define-node-subtag istruct 16) (define-node-subtag value-cell 17) (define-node-subtag xfunction 18) ; Function for cross-development (defconstant max-non-array-node-subtag (logior (ash 18 ntagbits) fulltag-nodeheader)) (define-subtag stack-alloc-marker fulltag-imm 1) (define-subtag lisp-frame-marker fulltag-imm 2) (define-subtag character fulltag-imm 9) (define-subtag slot-unbound fulltag-imm 10) (defconstant slot-unbound-marker subtag-slot-unbound) (define-subtag illegal fulltag-imm 11) (defconstant illegal-marker subtag-illegal) (define-subtag go-tag fulltag-imm 12) (define-subtag block-tag fulltag-imm 24) (define-subtag no-thread-local-binding fulltag-imm 30) (define-subtag unbound fulltag-imm 6) (defconstant unbound-marker subtag-unbound) (defconstant undefined unbound-marker) (defconstant lisp-frame-marker subtag-lisp-frame-marker) (defconstant stack-alloc-marker subtag-stack-alloc-marker) (defconstant max-64-bit-constant-index 127) (defconstant max-32-bit-constant-index (ash (+ #xfff arm::misc-data-offset) -2)) (defconstant max-16-bit-constant-index (ash (+ #xfff arm::misc-data-offset) -1)) (defconstant max-8-bit-constant-index (+ #xfff arm::misc-data-offset)) (defconstant max-1-bit-constant-index (ash (+ #xfff arm::misc-data-offset) 5)) ;;; The objects themselves look something like this: ;;; Order of CAR and CDR doesn't seem to matter much - there aren't too many tricks to be played with addressing . ;;; Keep them in the confusing MCL 3.0 order, to avoid confusion. (define-lisp-object cons fulltag-cons cdr car) (define-fixedsized-object ratio numer denom) (define-fixedsized-object single-float value) (define-fixedsized-object double-float pad val-low val-high) (define-fixedsized-object complex-single-float pad realpart imagpart) (define-fixedsized-object complex-double-float pad realpart-low realpart-high imagpart-low imagpart-high) (defconstant complex-double-float.realpart complex-double-float.realpart-low) (defconstant complex-double-float.imagpart complex-double-float.imagpart-low) (defconstant double-float.value double-float.val-low) (defconstant double-float.value-cell double-float.val-low-cell) (define-fixedsized-object complex realpart imagpart ) There are two kinds of macptr ; use the length field of the header if you ;;; need to distinguish between them (define-fixedsized-object macptr address domain type ) (define-fixedsized-object xmacptr address domain type flags link ) Catch frames go on the cstack , below a lisp frame whose ;;; field references the catch exit point/unwind-protect cleanup code. (define-fixedsized-object catch-frame link ; tagged pointer to next older catch frame 0 if single - value , 1 if uwp or multiple - value catch-tag ; #<unbound> -> unwind-protect, else catch db-link ; value of dynamic-binding link on thread entry. xframe ; exception-frame link last-lisp-frame nfp ) (define-fixedsized-object lock _value ;finalizable pointer to kernel object kind ; '0 = recursive-lock, '1 = rwlock tcr of owning thread or 0 name whostate whostate-2 ) (define-fixedsized-object symbol pname vcell fcell package-predicate flags plist binding-index ) (define-fixedsized-object function entrypoint codevector ) (defconstant nilsym-offset (+ t-offset symbol.size)) (define-fixedsized-object vectorH logsize ; fillpointer if it has one, physsize otherwise physsize ; total size of (possibly displaced) data vector data-vector ; object this header describes true displacement or 0 flags ; has-fill-pointer,displaced-to,adjustable bits; subtype of underlying simple vector. ) (define-lisp-object arrayH fulltag-misc header ; subtag = subtag-arrayH rank ; NEVER 1 physsize ; total size of (possibly displaced) data vector data-vector ; object this header describes displacement ; true displacement or 0 flags ; has-fill-pointer,displaced-to,adjustable bits; subtype of underlying simple vector. ;; Dimensions follow ) (defconstant arrayH.rank-cell 0) (defconstant arrayH.physsize-cell 1) (defconstant arrayH.data-vector-cell 2) (defconstant arrayH.displacement-cell 3) (defconstant arrayH.flags-cell 4) (defconstant arrayH.dim0-cell 5) (defconstant arrayH.flags-cell-bits-byte (byte 8 0)) (defconstant arrayH.flags-cell-subtag-byte (byte 8 8)) (define-fixedsized-object value-cell value) ;;; The kernel uses these (rather generically named) structures ;;; to keep track of various memory regions it (or the lisp) is ;;; interested in. ;;; The gc-area record definition in "ccl:interfaces;mcl-records.lisp" ;;; matches this. (define-storage-layout area 0 pred ; pointer to preceding area in DLL succ ; pointer to next area in DLL low ; low bound on area addresses high ; high bound on area addresses. active ; low limit on stacks, high limit on heaps softlimit ; overflow bound another one code ; an area-code; see below bit vector for GC ndnodes ; "active" size of dynamic area or stack in EGC sense also for EGC h ; Handle or null pointer softprot ; protected_area structure pointer hardprot ; another one. owner ; fragment (library) which "owns" the area refbits ; bitvector for intergenerational refernces threshold ; for egc gc-count ; generational gc count. static-dnodes ; for honsing, etc. static-used ; bitvector ) (define-storage-layout protected-area 0 next start ; first byte (page-aligned) that might be protected end ; last byte (page-aligned) that could be protected nprot ; Might be 0 protsize ; number of bytes to protect why) (defconstant tcr-bias 0) (define-storage-layout tcr (- tcr-bias) prev ; in doubly-linked list next ; in doubly-linked list lisp-fpscr pad db-link ; special binding chain head catch-top ; top catch frame VSP when in foreign code save-tsp ; TSP when in foreign code cs-area ; cstack area pointer vs-area ; vstack area pointer last-lisp-frame cs-limit ; cstack overflow limit total-bytes-allocated-low total-bytes-allocated-high log2-allocation-quantum ; unboxed interrupt-pending ; fixnum xframe ; exception frame linked list errno-loc ; thread-private, maybe ffi-exception ; fpscr bits from ff-call. osid ; OS thread id valence ; odd when in foreign code foreign-exception-status native-thread-info native-thread-id last-allocptr save-allocptr save-allocbase reset-completion activate suspend-count suspend-context pending-exception-context suspend ; semaphore for suspension notify resume ; sempahore for resumption notify flags ; foreign, being reset, ... gc-context termination-semaphore unwinding tlb-limit tlb-pointer shutdown-count safe-ref-address architecture-version ;keep in each TCR for ease of access. nfp ) (defconstant interrupt-level-binding-index (ash 1 fixnumshift)) (define-storage-layout lockptr 0 avail owner count signal waiting malloced-ptr spinlock) (define-storage-layout rwlock 0 spin state blocked-writers blocked-readers writer reader-signal writer-signal malloced-ptr ) (arm::define-storage-layout lisp-frame 0 marker savevsp savefn savelr ) (defmacro define-header (name element-count subtag) `(defconstant ,name (logior (ash ,element-count num-subtag-bits) ,subtag))) (define-header single-float-header single-float.element-count subtag-single-float) (define-header double-float-header double-float.element-count subtag-double-float) (define-header complex-single-float-header complex-single-float.element-count subtag-complex-single-float) (define-header complex-double-float-header complex-double-float.element-count subtag-complex-double-float) (define-header one-digit-bignum-header 1 subtag-bignum) (define-header two-digit-bignum-header 2 subtag-bignum) (define-header three-digit-bignum-header 3 subtag-bignum) (define-header symbol-header symbol.element-count subtag-symbol) (define-header value-cell-header value-cell.element-count subtag-value-cell) (define-header macptr-header macptr.element-count subtag-macptr) ) (defun %kernel-global (sym) ;; Returns index relative to (- nil-value fulltag-nil) (let* ((pos (position sym arm::*arm-kernel-globals* :test #'string=))) (if pos (- (* (+ 3 pos) 4)) (error "Unknown kernel global : ~s ." sym)))) (defmacro kernel-global (sym) (let* ((pos (position sym arm::*arm-kernel-globals* :test #'string=))) (if pos (- (* (+ 3 pos) 4)) (error "Unknown kernel global : ~s ." sym)))) ;;; The kernel imports things that are defined in various other ;;; libraries for us. The objects in question are generally fixnum - tagged ; the entries in the " kernel - imports " vector are 4 ;;; bytes apart. (ccl::defenum (:prefix "KERNEL-IMPORT-" :start 0 :step 4) fd-setsize-bytes do-fd-set do-fd-clr do-fd-is-set do-fd-zero MakeDataExecutable GetSharedLibrary FindSymbol malloc free wait-for-signal tcr-frame-ptr register-xmacptr-dispose-function open-debug-output get-r-debug restore-soft-stack-limit egc-control lisp-bug NewThread YieldToThread DisposeThread ThreadCurrentStackSpace usage-exit save-fp-context restore-fp-context put-altivec-registers get-altivec-registers new-semaphore wait-on-semaphore signal-semaphore destroy-semaphore new-recursive-lock lock-recursive-lock unlock-recursive-lock destroy-recursive-lock suspend-other-threads resume-other-threads suspend-tcr resume-tcr rwlock-new rwlock-destroy rwlock-rlock rwlock-wlock rwlock-unlock recursive-lock-trylock foreign-name-and-offset lisp-read lisp-write lisp-open lisp-fchmod lisp-lseek lisp-close lisp-ftruncate lisp-stat lisp-fstat lisp-futex lisp-opendir lisp-readdir lisp-closedir lisp-pipe lisp-gettimeofday lisp-sigexit jvm-init lisp-lstat lisp-realpath ;; Dummy entry last-kernel-import ) (defconstant num-kernel-imports (ash kernel-import-last-kernel-import -2)) (defmacro nrs-offset (name) (let* ((pos (position name arm::*arm-nilreg-relative-symbols* :test #'eq))) (if pos (+ t-offset (* pos symbol.size))))) (defmacro with-stack-short-floats (specs &body body) (ccl::collect ((binds) (inits) (names)) (dolist (spec specs) (let ((name (first spec))) (binds `(,name (ccl::%make-sfloat))) (names name) (let ((init (second spec))) (when init (inits `(ccl::%short-float ,init ,name)))))) `(let* ,(binds) (declare (dynamic-extent ,@(names)) (short-float ,@(names))) ,@(inits) ,@body))) (defun arm-fpr-mask (value mode) (ecase (ccl::fpr-mode-value-name mode) (:single-float (ash 1 value)) ((:double-float :complex-single-float) (ash 3 (ash value 1))) (:complex-double-float (ash 15 (ash value 2))))) (defparameter *arm-target-uvector-subtags* `((:bignum . ,subtag-bignum) (:ratio . ,subtag-ratio) (:single-float . ,subtag-single-float) (:double-float . ,subtag-double-float) (:complex . ,subtag-complex ) (:complex-single-float . ,subtag-complex-single-float) (:complex-double-float . ,subtag-complex-double-float) (:symbol . ,subtag-symbol) (:function . ,subtag-function ) (:code-vector . ,subtag-code-vector) (:xcode-vector . ,subtag-xcode-vector) (:macptr . ,subtag-macptr ) (:catch-frame . ,subtag-catch-frame) (:struct . ,subtag-struct ) (:istruct . ,subtag-istruct ) (:pool . ,subtag-pool ) (:population . ,subtag-weak ) (:hash-vector . ,subtag-hash-vector ) (:package . ,subtag-package ) (:value-cell . ,subtag-value-cell) (:instance . ,subtag-instance ) (:lock . ,subtag-lock ) (:slot-vector . ,subtag-slot-vector) (:basic-stream . ,subtag-basic-stream) (:simple-string . ,subtag-simple-base-string ) (:bit-vector . ,subtag-bit-vector ) (:signed-8-bit-vector . ,subtag-s8-vector ) (:unsigned-8-bit-vector . ,subtag-u8-vector ) (:signed-16-bit-vector . ,subtag-s16-vector ) (:unsigned-16-bit-vector . ,subtag-u16-vector ) (:signed-32-bit-vector . ,subtag-s32-vector ) (:fixnum-vector . ,subtag-fixnum-vector) (:unsigned-32-bit-vector . ,subtag-u32-vector ) (:single-float-vector . ,subtag-single-float-vector) (:double-float-vector . ,subtag-double-float-vector ) (:complex-single-float-vector . ,subtag-complex-single-float-vector) (:complex-double-float-vector . ,subtag-complex-double-float-vector) (:simple-vector . ,subtag-simple-vector ) (:vector-header . ,subtag-vectorH) (:array-header . ,subtag-arrayH) (:xfunction . ,subtag-xfunction) (:pseudofunction . ,subtag-pseudofunction) ;;; Defining the smallest cl ivector tag this way lets us use NX - LOOKUP - TARGET - UVECTOR - SUBTAG and ;;; is easier to bootsrtap than adding a slot to the ;; ARCH structure would be. (:min-cl-ivector-subtag . ,min-cl-ivector-subtag) )) ;;; This should return NIL unless it's sure of how the indicated ;;; type would be represented (in particular, it should return NIL if the element type is unknown or unspecified at compile - time . (defun arm-array-type-name-from-ctype (ctype) (when (typep ctype 'ccl::array-ctype) (let* ((element-type (ccl::array-ctype-element-type ctype))) (typecase element-type (ccl::class-ctype (let* ((class (ccl::class-ctype-class element-type))) (if (or (eq class ccl::*character-class*) (eq class ccl::*base-char-class*) (eq class ccl::*standard-char-class*)) :simple-string :simple-vector))) (ccl::numeric-ctype (if (eq (ccl::numeric-ctype-complexp element-type) :complex) (case (ccl::numeric-ctype-format element-type) (single-float :complex-single-float-vector) (double-float :complex-double-float-vector) (t :simple-vector)) (case (ccl::numeric-ctype-class element-type) (integer (let* ((low (ccl::numeric-ctype-low element-type)) (high (ccl::numeric-ctype-high element-type))) (cond ((or (null low) (null high)) :simple-vector) ((and (>= low 0) (<= high 1) :bit-vector)) ((and (>= low 0) (<= high 255)) :unsigned-8-bit-vector) ((and (>= low 0) (<= high 65535)) :unsigned-16-bit-vector) ((and (>= low 0) (<= high #xffffffff) :unsigned-32-bit-vector)) ((and (>= low -128) (<= high 127)) :signed-8-bit-vector) ((and (>= low -32768) (<= high 32767) :signed-16-bit-vector)) ((and (>= low target-most-negative-fixnum) (<= high target-most-positive-fixnum)) :fixnum-vector) ((and (>= low (ash -1 31)) (<= high (1- (ash 1 31)))) :signed-32-bit-vector) (t :simple-vector)))) (float (case (ccl::numeric-ctype-format element-type) ((double-float long-float) :double-float-vector) ((single-float short-float) :single-float-vector) (t :simple-vector))) (t :simple-vector)))) (ccl::unknown-ctype) (ccl::named-ctype (if (eq element-type ccl::*universal-type*) :simple-vector)) (t nil))))) (defun arm-misc-byte-count (subtag element-count) (declare (fixnum subtag)) (if (or (= fulltag-nodeheader (logand subtag fulltagmask)) (<= subtag max-32-bit-ivector-subtag)) (ash element-count 2) (if (<= subtag max-8-bit-ivector-subtag) element-count (if (<= subtag max-16-bit-ivector-subtag) (ash element-count 1) (if (= subtag subtag-bit-vector) (ash (+ element-count 7) -3) (if (= subtag subtag-complex-double-float-vector) (+ 4 (ash element-count 4)) (+ 4 (ash element-count 3)))))))) (defparameter *arm-target-arch* (progn (arch::make-target-arch :name :arm :lisp-node-size 4 :nil-value canonical-nil-value :fixnum-shift fixnumshift :most-positive-fixnum (1- (ash 1 (1- (- 32 fixnumshift)))) :most-negative-fixnum (- (ash 1 (1- (- 32 fixnumshift)))) :misc-data-offset misc-data-offset :misc-dfloat-offset misc-dfloat-offset :nbits-in-word 32 :ntagbits 3 :nlisptagbits 2 :uvector-subtags *arm-target-uvector-subtags* :max-64-bit-constant-index max-64-bit-constant-index :max-32-bit-constant-index max-32-bit-constant-index :max-16-bit-constant-index max-16-bit-constant-index :max-8-bit-constant-index max-8-bit-constant-index :max-1-bit-constant-index max-1-bit-constant-index :word-shift 2 :code-vector-prefix () :gvector-types '(:ratio :complex :symbol :function :catch-frame :struct :istruct :pool :population :hash-vector :package :value-cell :instance :lock :slot-vector :simple-vector :xfunction :pseudofunction) :1-bit-ivector-types '(:bit-vector) :8-bit-ivector-types '(:signed-8-bit-vector :unsigned-8-bit-vector) :16-bit-ivector-types '(:signed-16-bit-vector :unsigned-16-bit-vector) :32-bit-ivector-types '(:signed-32-bit-vector :unsigned-32-bit-vector :single-float-vector :fixnum-vector :single-float :double-float :bignum :simple-string) :64-bit-ivector-types '(:double-float-vector :complex-single-float-vector) :array-type-name-from-ctype-function #'arm-array-type-name-from-ctype :package-name "ARM" :t-offset t-offset :array-data-size-function #'arm-misc-byte-count :fpr-mask-function 'arm-fpr-mask :subprims-base arm::*arm-subprims-base* :subprims-shift arm::*arm-subprims-shift* :subprims-table arm::*arm-subprims* :primitive->subprims `(((0 . 23) . ,(ccl::%subprim-name->offset '.SPbuiltin-plus arm::*arm-subprims*))) :unbound-marker-value unbound-marker :slot-unbound-marker-value slot-unbound-marker :fixnum-tag tag-fixnum :single-float-tag subtag-single-float :single-float-tag-is-subtag t :double-float-tag subtag-double-float :cons-tag fulltag-cons :null-tag fulltag-nil :symbol-tag subtag-symbol :symbol-tag-is-subtag t :function-tag subtag-function :function-tag-is-subtag t :big-endian nil :misc-subtag-offset misc-subtag-offset :car-offset cons.car :cdr-offset cons.cdr :subtag-char subtag-character :charcode-shift charcode-shift :fulltagmask fulltagmask :fulltag-misc fulltag-misc :char-code-limit #x110000 ))) ;;; arch macros (defmacro defarmarchmacro (name lambda-list &body body) `(arch::defarchmacro :arm ,name ,lambda-list ,@body)) (defarmarchmacro ccl::%make-sfloat () `(ccl::%alloc-misc arm::single-float.element-count arm::subtag-single-float)) (defarmarchmacro ccl::%make-dfloat () `(ccl::%alloc-misc arm::double-float.element-count arm::subtag-double-float)) (defarmarchmacro ccl::%numerator (x) `(ccl::%svref ,x arm::ratio.numer-cell)) (defarmarchmacro ccl::%denominator (x) `(ccl::%svref ,x arm::ratio.denom-cell)) ;;; (defarmarchmacro ccl::%get-single-float-from-double-ptr (ptr offset) `(ccl::%double-float->short-float (ccl::%get-double-float ,ptr ,offset) (ccl::%alloc-misc 1 arm::subtag-single-float))) (defarmarchmacro ccl::codevec-header-p (word) `(eql arm::subtag-code-vector (logand ,word arm::subtag-mask))) (defarmarchmacro ccl::immediate-p-macro (thing) (let* ((tag (gensym))) `(let* ((,tag (ccl::lisptag ,thing))) (declare (fixnum ,tag)) (or (= ,tag arm::tag-fixnum) (= ,tag arm::tag-imm))))) (defarmarchmacro ccl::hashed-by-identity (thing) (let* ((typecode (gensym))) `(let* ((,typecode (ccl::typecode ,thing))) (declare (fixnum ,typecode)) (or (= ,typecode arm::tag-fixnum) (= ,typecode arm::tag-imm) (= ,typecode arm::subtag-symbol) (= ,typecode arm::subtag-instance))))) ;;; (defarmarchmacro ccl::%get-kernel-global (name) `(ccl::%fixnum-ref (ash (+ (- nil-value fulltag-nil) ,(%kernel-global (if (ccl::quoted-form-p name) (cadr name) name))) (- fixnumshift)))) (defarmarchmacro ccl::%get-kernel-global-ptr (name dest) `(ccl::%setf-macptr ,dest (ccl::%fixnum-ref-macptr (ash (+ (- nil-value fulltag-nil) ,(%kernel-global (if (ccl::quoted-form-p name) (cadr name) name))) (- fixnumshift))))) (defarmarchmacro ccl::%target-kernel-global (name) `(arm::%kernel-global ,name)) (defarmarchmacro ccl::lfun-vector (fun) fun) (defarmarchmacro ccl::lfun-vector-lfun (lfv) lfv) (defarmarchmacro ccl::area-code () area.code) (defarmarchmacro ccl::area-succ () area.succ) ;;; We generally don't want much code to see the function's entrypoint. (defarmarchmacro ccl::nth-immediate (f i) `(ccl::%svref ,f (the fixnum (+ (the fixnum ,i) 1)))) (defarmarchmacro ccl::set-nth-immediate (f i new) `(setf (ccl::%svref ,f (the fixnum (+ (the fixnum ,i) 1))) ,new)) (defarmarchmacro ccl::symptr->symvector (s) s) (defarmarchmacro ccl::symvector->symptr (s) s) (defarmarchmacro ccl::function-to-function-vector (f) f) (defarmarchmacro ccl::function-vector-to-function (v) v) (defarmarchmacro ccl::with-ffcall-results ((buf) &body body) (let* ((size (+ (* 8 4) (* 31 8)))) `(%stack-block ((,buf ,size)) ,@body))) (defconstant arg-check-trap-pc-limit 8) UUO encoding 12 bits of code 8 bits of info - NOT type info - 4 - bit reg 2 bits of lisptag info , 4 - bit reg 3 bits of fulltag info , 4 bit reg 8 bits of extended type / subtag info , 4 bit reg continuable , lisptag , reg continuable , fulltag , reg continuable , xtype , reg 4 bits of code , r1 , r0 xtypes : 8 - bit integers used to report type errors for types that ca n't ;;; be represented via tags. (defconstant xtype-unsigned-byte-24 252) (defconstant xtype-array2d 248) (defconstant xtype-array3d 244) (defconstant xtype-integer 4) (defconstant xtype-s64 8) (defconstant xtype-u64 12) (defconstant xtype-s32 16) (defconstant xtype-u32 20) (defconstant xtype-s16 24) (defconstant xtype-u16 28) (defconstant xtype-s8 32) (defconstant xtype-u8 36) (defconstant xtype-bit 40) (defconstant xtype-rational 44) (defconstant xtype-real 48) (defconstant xtype-number 52) (defconstant xtype-char-code 56) ;;; Condition field values. (ccl::defenum (:prefix "ARM-COND-") eq ne hs lo mi pl vs vc hi ls ge lt gt le al) ;;; FPSCR exception bits (defconstant ioc 0) ;invalid operation division by 0 (defconstant ofc 2) ;overflow (defconstant ufc 3) ;underflow (defconstant ixc 4) ;inexact (defconstant ioe 8) ;invalid operation enable division by 0 enable (defconstant ofe 10) ;overflow enable (defconstant ufe 11) ;underflow enable (defconstant ixe 12) ;inexact enable ;;; These are always stack-allocated, "near" where the missing lisp frame ;;; that they represent would be. (define-storage-layout fake-stack-frame 0 header type ; 'arm::fake-stack-frame sp next-sp fn lr vsp xp) #+arm-target (ccl::make-istruct-class 'fake-stack-frame ccl::*istruct-class*) (defconstant real-tags-mask (logior (ash 1 tag-fixnum) (ash 1 subtag-bignum) (ash 1 subtag-single-float) (ash 1 subtag-double-float) (ash 1 subtag-ratio))) (defconstant numeric-tags-mask (logior real-tags-mask (ash 1 subtag-complex))) (defconstant fasl-version #x66) (defconstant fasl-max-version #x66) (defconstant fasl-min-version #x66) (defparameter *image-abi-version* 1045) (provide "ARM-ARCH")
null
https://raw.githubusercontent.com/Clozure/ccl/6c1a9458f7a5437b73ec227e989aa5b825f32fd3/compiler/ARM/arm-arch.lisp
lisp
Package : ( ARM : use CL ) -*- Copyright 2010 Clozure Associates you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Lisp registers. This allows redefinition, which might be helpful while before calling this. Calling sequence may pass additional arguments in temp registers. "nfn" (new function) is always passed; it's the new value of "fn". the "next-method-context" register. Kernel globals are allocated "below" nil. This list (used to map symbolic names to rnil-relative offsets) must (of course) exactly match the kernel's notion of where things are. The order here matches "ccl:lisp-kernel;lisp_globals.h" & the lisp_globals record in "ccl:lisp-kernel;*constants*.s" callback to obtain (real) tcr used by PROCESS-INTERRUPT some things we need to have imported for us. used to save TOC on some platforms used to save r13 on some platforms start of dynamic subprims jump table magic multiple-values return address. tsd key for thread's tcr serialize access to gc serialize exception handling log2_heap_segment_size, as a fixnum. interrupt-pending flag for gc locking youngest generation non-zero if using hard float abi start of lisp heap end of lisp heap true if the lisp kernel is statically linked value of --stack-size arg objc_begin_catch doubly-linked area list multiple-value lexpr return address single-value lexpr return address fixnum _objc_end_catch saved registers from exception frame the tenured_area. address of C lib argv non-zero if --batch specified weak gc algorithm. current image name initial thread's context record in "ccl:lisp-kernel;constants.s". NIL used to be in a register . See ? A pet name for it. Tags. "see" headers.) All fixnums, whether odd or even that walk memory (and the stack) have to be careful to look at the FULLTAG of each object that they see. I suppose EVENP/ODDP might care; nothing else does. Header of heap-allocated object that contains lisp-object pointers a "real" immediate object. Shares TAG with fulltag-immheader. a real (non-null) cons. Shares TAG with fulltag-nil. Header of heap-allocated object that contains unboxed data. not inherently interesting; it should be possible to express that difference as an ARM constant, but that's the only real constraint. The order in which various header values are defined is significant in several ways: there are further orderings among numeric subtags. don't, with a further ordering which requires that (< header-arrayH header-vectorH ,@all-other-CL-vector-types) Numeric subtags. "SINGLE" float, aka short-float in the new order. all CL array subtags must be > than with that subtag whose element size isn't an integral number of bits and ending with those whose So, we get the remaining subtags (n: (n < min-array-subtag)) for various immediate/node object types. code-vector for cross-development Function for cross-development The objects themselves look something like this: Order of CAR and CDR doesn't seem to matter much - there aren't Keep them in the confusing MCL 3.0 order, to avoid confusion. use the length field of the header if you need to distinguish between them field references the catch exit point/unwind-protect cleanup code. tagged pointer to next older catch frame #<unbound> -> unwind-protect, else catch value of dynamic-binding link on thread entry. exception-frame link finalizable pointer to kernel object '0 = recursive-lock, '1 = rwlock fillpointer if it has one, physsize otherwise total size of (possibly displaced) data vector object this header describes has-fill-pointer,displaced-to,adjustable bits; subtype of underlying simple vector. subtag = subtag-arrayH NEVER 1 total size of (possibly displaced) data vector object this header describes true displacement or 0 has-fill-pointer,displaced-to,adjustable bits; subtype of underlying simple vector. Dimensions follow The kernel uses these (rather generically named) structures to keep track of various memory regions it (or the lisp) is interested in. The gc-area record definition in "ccl:interfaces;mcl-records.lisp" matches this. pointer to preceding area in DLL pointer to next area in DLL low bound on area addresses high bound on area addresses. low limit on stacks, high limit on heaps overflow bound an area-code; see below "active" size of dynamic area or stack Handle or null pointer protected_area structure pointer another one. fragment (library) which "owns" the area bitvector for intergenerational refernces for egc generational gc count. for honsing, etc. bitvector first byte (page-aligned) that might be protected last byte (page-aligned) that could be protected Might be 0 number of bytes to protect in doubly-linked list in doubly-linked list special binding chain head top catch frame TSP when in foreign code cstack area pointer vstack area pointer cstack overflow limit unboxed fixnum exception frame linked list thread-private, maybe fpscr bits from ff-call. OS thread id odd when in foreign code semaphore for suspension notify sempahore for resumption notify foreign, being reset, ... keep in each TCR for ease of access. Returns index relative to (- nil-value fulltag-nil) The kernel imports things that are defined in various other libraries for us. The objects in question are generally the entries in the " kernel - imports " vector are 4 bytes apart. Dummy entry Defining the smallest cl ivector tag this way is easier to bootsrtap than adding a slot to the ARCH structure would be. This should return NIL unless it's sure of how the indicated type would be represented (in particular, it should return arch macros We generally don't want much code to see the function's entrypoint. be represented via tags. Condition field values. FPSCR exception bits invalid operation overflow underflow inexact invalid operation enable overflow enable underflow enable inexact enable These are always stack-allocated, "near" where the missing lisp frame that they represent would be. 'arm::fake-stack-frame
distributed under the License is distributed on an " AS IS " BASIS , (defpackage "ARM" (:use "CL") #+arm-target (:nicknames "TARGET")) (require "ARCH") (in-package "ARM") (eval-when (:compile-toplevel :load-toplevel :execute) (defvar *arm-register-names* ()) (defun get-arm-register (name) (let* ((pair (assoc (string name) *arm-register-names* :test #'string-equal))) (if pair (cdr pair)))) (defun get-arm-gpr (name) (let* ((value (get-arm-register name))) (and value (< value 16) value))) (defun get-arm-sfpr (name) (let* ((value (get-arm-register name))) (and value (logbitp 5 value) (logand #x1f value)))) (defun get-arm-dfpr (name) (let* ((value (get-arm-register name))) (and value (logbitp 6 value) (logand #x0f value)))) boostrapping . ARM - LAP - EQUATE - FORM checks for redefinition (defun define-arm-register (name val) (let* ((value (if (typep val 'fixnum) val (get-arm-register val))) (string (string name))) (unless value (error "invalid ARM register value ~d for ~s." val name)) (let* ((pair (assoc string *arm-register-names* :test #'string-equal))) (if pair (progn (unless (eql (cdr pair) value) (when ccl::*cerror-on-constant-redefinition* (cerror "Redefine ARM register ~s to have value ~*~d." "ARM register ~s currently has value ~d." name (cdr pair) value) (setf (cdr pair) value)))) (push (cons string value) *arm-register-names*)) value))) (defmacro defarmgpr (name val) `(defconstant ,name (define-arm-register ',name ',val))) (defarmgpr r0 0) (defarmgpr r1 1) (defarmgpr r2 2) (defarmgpr r3 3) (defarmgpr r4 4) (defarmgpr r5 5) (defarmgpr r6 6) (defarmgpr r7 7) (defarmgpr r8 8) (defarmgpr r9 9) (defarmgpr r10 10) (defarmgpr r11 11) (defarmgpr r12 12) (defarmgpr r13 13) (defarmgpr r14 14) (defarmgpr r15 15) (defarmgpr imm0 r0) (defarmgpr imm1 r1) (defarmgpr imm2 r2) (defarmgpr rcontext r3) (defarmgpr arg_z r4) (defarmgpr arg_y r5) (defarmgpr arg_x r6) (defarmgpr temp0 r7) (defarmgpr temp1 r8) (defarmgpr temp2 r9) (defarmgpr vsp r10) (defarmgpr fn r11) (defarmgpr allocptr r12) (defarmgpr sp r13) (defarmgpr lr r14) (defarmgpr pc r15) (defarmgpr nfn temp2) CLOS may pass the context for , e.g .. , CALL - NEXT - METHOD in (defarmgpr next-method-context temp1) (defarmgpr fname temp1) (defarmgpr nargs imm2) (defmacro defarmsfpr (name val) `(defconstant ,name (define-arm-register ',name ',val))) (defarmsfpr s0 32) (defarmsfpr s1 33) (defarmsfpr s2 34) (defarmsfpr s3 35) (defarmsfpr s4 36) (defarmsfpr s5 37) (defarmsfpr s6 38) (defarmsfpr s7 39) (defarmsfpr s8 40) (defarmsfpr s9 41) (defarmsfpr s10 42) (defarmsfpr s11 43) (defarmsfpr s12 44) (defarmsfpr s13 45) (defarmsfpr s14 46) (defarmsfpr s15 47) (defarmsfpr s16 48) (defarmsfpr s17 49) (defarmsfpr s18 50) (defarmsfpr s19 51) (defarmsfpr s20 52) (defarmsfpr s21 53) (defarmsfpr s22 54) (defarmsfpr s23 55) (defarmsfpr s24 56) (defarmsfpr s25 57) (defarmsfpr s26 58) (defarmsfpr s27 59) (defarmsfpr s28 60) (defarmsfpr s29 61) (defarmsfpr s30 62) (defarmsfpr s31 63) (defarmsfpr single-float-zero s30) The first 16 double - float registers overlap pairs of single - float registers ( d0 overlaps s0 - s1 , d15 overlaps s30 - s31 , etc . ) (defmacro defarmdfpr (name val) `(defconstant ,name (define-arm-register ',name ',val))) (defarmdfpr d0 64) (defarmdfpr d1 65) (defarmdfpr d2 66) (defarmdfpr d3 67) (defarmdfpr d4 68) (defarmdfpr d5 69) (defarmdfpr d6 70) (defarmdfpr d7 71) (defarmdfpr d8 72) (defarmdfpr d9 73) (defarmdfpr d10 74) (defarmdfpr d11 75) (defarmdfpr d12 76) (defarmdfpr d13 77) (defarmdfpr d14 78) (defarmdfpr d15 79) (defarmdfpr double-float-zero d15) ) (defparameter *standard-arm-register-names* *arm-register-names*) (defparameter *arm-kernel-globals* tcr-count objc-2-personality when FREEZE is in effect oldspace number of dnodes in dynamic space that are older than fixnum : GC " forwarder " call count . fixnum : GC call count . linked - list of weak macptrs . kernel-path non - zero when GC - ish thing active low half of 1.0d0 high half of 1.0d0 doublenode address of oldest ephemeral object or 0 address of C lib errno 0 on MacOS , 1 on ARM Linux , 2 on ... _ Unwind_Resume all populations as of last GC )) The order here matches " ccl : lisp - kernel;lisp_globals.h " and the nrs record (defparameter *arm-nil-relative-symbols* '(t nil ccl::%err-disp ccl::cmain eval ccl::apply-evaluated-function error ccl::%defun ccl::%defvar ccl::%defconstant ccl::%macro ccl::%kernel-restart *package* ccl::*total-bytes-freed* :allow-other-keys ccl::%toplevel-catch% ccl::%toplevel-function% ccl::%pascal-functions% ccl::restore-lisp-pointers ccl::*total-gc-microseconds* ccl::%builtin-functions% ccl::%unbound-function% ccl::%init-misc ccl::%macro-code% ccl::%closure-code% ccl::%new-gcable-ptr ccl::*gc-event-status-bits* ccl::*post-gc-hook* ccl::%handlers% ccl::%all-packages% ccl::*keyword-package* ccl::%os-init-function% ccl::%foreign-thread-control )) (defparameter *arm-nilreg-relative-symbols* *arm-nil-relative-symbols*) (eval-when (:compile-toplevel :load-toplevel :execute) (defparameter *arm-subprims-shift* 2) (defconstant tcr.sptab 256) (defparameter *arm-subprims-base* tcr.sptab ) ) (defvar *arm-subprims*) (let* ((origin *arm-subprims-base*) (step (ash 1 *arm-subprims-shift*))) (flet ((define-arm-subprim (name) (ccl::make-subprimitive-info :name (string name) :offset (prog1 origin (incf origin step))))) (macrolet ((defarmsubprim (name) `(define-arm-subprim ',name))) (setq *arm-subprims* (vector must be first (defarmsubprim .SPbuiltin-plus) (defarmsubprim .SPbuiltin-minus) (defarmsubprim .SPbuiltin-times) (defarmsubprim .SPbuiltin-div) (defarmsubprim .SPbuiltin-eq) (defarmsubprim .SPbuiltin-ne) (defarmsubprim .SPbuiltin-gt) (defarmsubprim .SPbuiltin-ge) (defarmsubprim .SPbuiltin-lt) (defarmsubprim .SPbuiltin-le) (defarmsubprim .SPbuiltin-eql) (defarmsubprim .SPbuiltin-length) (defarmsubprim .SPbuiltin-seqtype) (defarmsubprim .SPbuiltin-assq) (defarmsubprim .SPbuiltin-memq) (defarmsubprim .SPbuiltin-logbitp) (defarmsubprim .SPbuiltin-logior) (defarmsubprim .SPbuiltin-logand) (defarmsubprim .SPbuiltin-ash) (defarmsubprim .SPbuiltin-negate) (defarmsubprim .SPbuiltin-logxor) (defarmsubprim .SPbuiltin-aref1) (defarmsubprim .SPbuiltin-aset1) (defarmsubprim .SPfuncall) (defarmsubprim .SPmkcatch1v) (defarmsubprim .SPmkcatchmv) (defarmsubprim .SPmkunwind) (defarmsubprim .SPbind) (defarmsubprim .SPconslist) (defarmsubprim .SPconslist-star) (defarmsubprim .SPmakes32) (defarmsubprim .SPmakeu32) (defarmsubprim .SPfix-overflow) (defarmsubprim .SPmakeu64) (defarmsubprim .SPmakes64) (defarmsubprim .SPmvpass) (defarmsubprim .SPvalues) (defarmsubprim .SPnvalret) (defarmsubprim .SPthrow) (defarmsubprim .SPnthrowvalues) (defarmsubprim .SPnthrow1value) (defarmsubprim .SPbind-self) (defarmsubprim .SPbind-nil) (defarmsubprim .SPbind-self-boundp-check) (defarmsubprim .SPrplaca) (defarmsubprim .SPrplacd) (defarmsubprim .SPgvset) (defarmsubprim .SPset-hash-key) (defarmsubprim .SPstore-node-conditional) (defarmsubprim .SPset-hash-key-conditional) (defarmsubprim .SPstkconslist) (defarmsubprim .SPstkconslist-star) (defarmsubprim .SPmkstackv) (defarmsubprim .SPsetqsym) (defarmsubprim .SPprogvsave) (defarmsubprim .SPstack-misc-alloc) (defarmsubprim .SPgvector) (defarmsubprim .SPfitvals) (defarmsubprim .SPnthvalue) (defarmsubprim .SPdefault-optional-args) (defarmsubprim .SPopt-supplied-p) (defarmsubprim .SPheap-rest-arg) (defarmsubprim .SPreq-heap-rest-arg) (defarmsubprim .SPheap-cons-rest-arg) (defarmsubprim .SPcheck-fpu-exception) (defarmsubprim .SPdiscard_stack_object) (defarmsubprim .SPksignalerr) (defarmsubprim .SPstack-rest-arg) (defarmsubprim .SPreq-stack-rest-arg) (defarmsubprim .SPstack-cons-rest-arg) (defarmsubprim .SPcall-closure) (defarmsubprim .SPspreadargz) (defarmsubprim .SPtfuncallgen) (defarmsubprim .SPtfuncallslide) (defarmsubprim .SPjmpsym) (defarmsubprim .SPtcallsymgen) (defarmsubprim .SPtcallsymslide) (defarmsubprim .SPtcallnfngen) (defarmsubprim .SPtcallnfnslide) (defarmsubprim .SPmisc-ref) (defarmsubprim .SPsubtag-misc-ref) (defarmsubprim .SPmakestackblock) (defarmsubprim .SPmakestackblock0) (defarmsubprim .SPmakestacklist) (defarmsubprim .SPstkgvector) (defarmsubprim .SPmisc-alloc) (defarmsubprim .SPatomic-incf-node) (defarmsubprim .SPunused1) (defarmsubprim .SPunused2) (defarmsubprim .SPrecover-values) (defarmsubprim .SPinteger-sign) (defarmsubprim .SPsubtag-misc-set) (defarmsubprim .SPmisc-set) (defarmsubprim .SPspread-lexprz) (defarmsubprim .SPreset) (defarmsubprim .SPmvslide) (defarmsubprim .SPsave-values) (defarmsubprim .SPadd-values) (defarmsubprim .SPmisc-alloc-init) (defarmsubprim .SPstack-misc-alloc-init) (defarmsubprim .SPpopj) (defarmsubprim .SPudiv64by32) (defarmsubprim .SPgetu64) (defarmsubprim .SPgets64) (defarmsubprim .SPspecref) (defarmsubprim .SPspecrefcheck) (defarmsubprim .SPspecset) (defarmsubprim .SPgets32) (defarmsubprim .SPgetu32) (defarmsubprim .SPmvpasssym) (defarmsubprim .SPunbind) (defarmsubprim .SPunbind-n) (defarmsubprim .SPunbind-to) (defarmsubprim .SPprogvrestore) (defarmsubprim .SPbind-interrupt-level-0) (defarmsubprim .SPbind-interrupt-level-m1) (defarmsubprim .SPbind-interrupt-level) (defarmsubprim .SPunbind-interrupt-level) (defarmsubprim .SParef2) (defarmsubprim .SParef3) (defarmsubprim .SPaset2) (defarmsubprim .SPaset3) (defarmsubprim .SPkeyword-bind) (defarmsubprim .SPudiv32) (defarmsubprim .SPsdiv32) (defarmsubprim .SPeabi-ff-call-simple) (defarmsubprim .SPdebind) (defarmsubprim .SPeabi-callback) (defarmsubprim .SPeabi-ff-callhf) ))))) (defmacro define-storage-layout (name origin &rest cells) `(progn (ccl::defenum (:start ,origin :step 4) ,@(mapcar #'(lambda (cell) (ccl::form-symbol name "." cell)) cells)) (defconstant ,(ccl::form-symbol name ".SIZE") ,(* (length cells) 4)))) (defmacro define-lisp-object (name tagname &rest cells) `(define-storage-layout ,name ,(- (symbol-value tagname)) ,@cells)) (defmacro define-subtag (name tag subtag) `(defconstant ,(ccl::form-symbol "SUBTAG-" name) (logior ,tag (ash ,subtag ntagbits)))) (defmacro define-imm-subtag (name subtag) `(define-subtag ,name fulltag-immheader ,subtag)) (defmacro define-node-subtag (name subtag) `(define-subtag ,name fulltag-nodeheader ,subtag)) (defmacro define-fixedsized-object (name &rest non-header-cells) `(progn (define-lisp-object ,name fulltag-misc header ,@non-header-cells) (ccl::defenum () ,@(mapcar #'(lambda (cell) (ccl::form-symbol name "." cell "-CELL")) non-header-cells)) (defconstant ,(ccl::form-symbol name ".ELEMENT-COUNT") ,(length non-header-cells)))) (eval-when (:compile-toplevel :load-toplevel :execute) (defconstant nbits-in-word 32) (defconstant least-significant-bit 31) (defconstant nbits-in-byte 8) But non - header objects only use 2 (defconstant nlisptagbits 2) tag part of header is 8 bits wide (defconstant fixnumshift nfixnumtagbits) Only needed by GC / very low - level code (defconstant full-tag-mask fulltagmask) (defconstant tagmask (1- (ash 1 nlisptagbits))) (defconstant tag-mask tagmask) (defconstant fixnummask (1- (ash 1 nfixnumtagbits))) (defconstant fixnum-mask fixnummask) (defconstant subtag-mask (1- (ash 1 num-subtag-bits))) only the low 8 bits are used , currently (defconstant charcode-shift (- nbits-in-word ncharcodebits)) (defconstant word-shift 2) (defconstant word-size-in-bytes 4) (defconstant node-size 4) (defconstant dnode-size 8) (defconstant dnode-align-bits 3) (defconstant dnode-shift dnode-align-bits) (defconstant bitmap-shift 5) (defconstant target-most-negative-fixnum (ash -1 (1- (- nbits-in-word nfixnumtagbits)))) (defconstant target-most-positive-fixnum (1- (ash 1 (1- (- nbits-in-word nfixnumtagbits))))) (defconstant fixnumone (ash 1 fixnumshift)) There are two - bit tags and three - bit tags . A FULLTAG is the value of the low three bits of a tagged object . A TAG is the value of the low two bits of a tagged object . A TYPECODE is either a TAG or the value of a " tag - misc " object 's header - byte . There are 4 primary TAG values . Any object which lisp can " see " can be classified by its TAG . ( Some headers have FULLTAGS that are congruent modulo 4 with the TAGS of other objects , but lisp ca n't (ccl::defenum () Conses and NIL Heap - consed objects other than lists : vectors , symbols , functions , floats ... Immediate - objects : characters , UNBOUND , other markers . ) And there are 8 FULLTAG values . Note that NIL has its own FULLTAG ( congruent mod 4 to tag - list ) and that both FULLTAG - MISC and FULLTAG - IMM have header fulltags that share the same TAG . Things (ccl::defenum () NIL and nothing but . ( Note that there 's still a hidden NILSYM . ) Pointer " real " tag - misc object . Shares TAG with fulltag - nodeheader . ) (defconstant misc-header-offset (- fulltag-misc)) (defconstant misc-subtag-offset misc-header-offset) (defconstant misc-data-offset (+ misc-header-offset 4)) (defconstant misc-dfloat-offset (+ misc-header-offset 8)) (defconstant misc-complex-dfloat-offset misc-dfloat-offset) (defconstant canonical-nil-value (+ #x04000000 fulltag-nil)) (defconstant nil-value canonical-nil-value) T is almost adjacent to NIL : since NIL is a misaligned CONS , it spans two doublewords . The arithmetic difference between T and NIL is (defconstant t-offset (+ (- dnode-size fulltag-nil) fulltag-misc)) 2 ) All subtags which denote CL arrays are preceded by those that 3 ) The element - size of ivectors is determined by the ordering of . 4 ) All subtags are > = fulltag - immheader . (define-imm-subtag bignum 0) (define-node-subtag ratio 1) (define-imm-subtag double-float 2) (define-node-subtag complex 3) all non - CL - array subtags . So we start by defining the immediate subtags in decreasing order , starting element size - like all non - CL - array fulltag - immheader types - is 32 bits . (define-imm-subtag bit-vector 31) (define-imm-subtag complex-double-float-vector 30) (define-imm-subtag complex-single-float-vector 29) (define-imm-subtag double-float-vector 28) (define-imm-subtag s16-vector 27) (define-imm-subtag u16-vector 26) (defconstant min-16-bit-ivector-subtag subtag-u16-vector) (defconstant max-16-bit-ivector-subtag subtag-s16-vector) (define-imm-subtag s8-vector 25) (define-imm-subtag u8-vector 24) (defconstant min-8-bit-ivector-subtag subtag-u8-vector) (defconstant max-8-bit-ivector-subtag (logior fulltag-immheader (ash 25 ntagbits))) (define-imm-subtag simple-base-string 23) (define-imm-subtag fixnum-vector 22) (define-imm-subtag s32-vector 21) (define-imm-subtag u32-vector 20) (define-imm-subtag single-float-vector 19) (defconstant max-32-bit-ivector-subtag (logior fulltag-immheader (ash 23 ntagbits))) (defconstant min-cl-ivector-subtag subtag-single-float-vector) (define-node-subtag vectorH 30) (define-node-subtag arrayH 29) Only one such subtag (assert (< subtag-arrayH subtag-vectorH subtag-simple-vector)) (define-node-subtag pseudofunction 0) (define-imm-subtag macptr 3) (define-imm-subtag dead-macptr 4) (define-imm-subtag code-vector 5) (define-imm-subtag creole-object 6) (define-imm-subtag complex-single-float 8) (define-imm-subtag complex-double-float 9) (defconstant max-non-array-imm-subtag (logior (ash 19 ntagbits) fulltag-immheader)) (define-node-subtag catch-frame 4) (defconstant min-non-numeric-node-subtag subtag-catch-frame) (define-node-subtag function 5) (define-node-subtag basic-stream 6) (define-node-subtag symbol 7) (define-node-subtag lock 8) (define-node-subtag hash-vector 9) (define-node-subtag pool 10) (define-node-subtag weak 11) (define-node-subtag package 12) (define-node-subtag slot-vector 13) (define-node-subtag instance 14) (define-node-subtag struct 15) (define-node-subtag istruct 16) (define-node-subtag value-cell 17) (defconstant max-non-array-node-subtag (logior (ash 18 ntagbits) fulltag-nodeheader)) (define-subtag stack-alloc-marker fulltag-imm 1) (define-subtag lisp-frame-marker fulltag-imm 2) (define-subtag character fulltag-imm 9) (define-subtag slot-unbound fulltag-imm 10) (defconstant slot-unbound-marker subtag-slot-unbound) (define-subtag illegal fulltag-imm 11) (defconstant illegal-marker subtag-illegal) (define-subtag go-tag fulltag-imm 12) (define-subtag block-tag fulltag-imm 24) (define-subtag no-thread-local-binding fulltag-imm 30) (define-subtag unbound fulltag-imm 6) (defconstant unbound-marker subtag-unbound) (defconstant undefined unbound-marker) (defconstant lisp-frame-marker subtag-lisp-frame-marker) (defconstant stack-alloc-marker subtag-stack-alloc-marker) (defconstant max-64-bit-constant-index 127) (defconstant max-32-bit-constant-index (ash (+ #xfff arm::misc-data-offset) -2)) (defconstant max-16-bit-constant-index (ash (+ #xfff arm::misc-data-offset) -1)) (defconstant max-8-bit-constant-index (+ #xfff arm::misc-data-offset)) (defconstant max-1-bit-constant-index (ash (+ #xfff arm::misc-data-offset) 5)) too many tricks to be played with addressing . (define-lisp-object cons fulltag-cons cdr car) (define-fixedsized-object ratio numer denom) (define-fixedsized-object single-float value) (define-fixedsized-object double-float pad val-low val-high) (define-fixedsized-object complex-single-float pad realpart imagpart) (define-fixedsized-object complex-double-float pad realpart-low realpart-high imagpart-low imagpart-high) (defconstant complex-double-float.realpart complex-double-float.realpart-low) (defconstant complex-double-float.imagpart complex-double-float.imagpart-low) (defconstant double-float.value double-float.val-low) (defconstant double-float.value-cell double-float.val-low-cell) (define-fixedsized-object complex realpart imagpart ) (define-fixedsized-object macptr address domain type ) (define-fixedsized-object xmacptr address domain type flags link ) Catch frames go on the cstack , below a lisp frame whose (define-fixedsized-object catch-frame 0 if single - value , 1 if uwp or multiple - value last-lisp-frame nfp ) (define-fixedsized-object lock tcr of owning thread or 0 name whostate whostate-2 ) (define-fixedsized-object symbol pname vcell fcell package-predicate flags plist binding-index ) (define-fixedsized-object function entrypoint codevector ) (defconstant nilsym-offset (+ t-offset symbol.size)) (define-fixedsized-object vectorH true displacement or 0 ) (define-lisp-object arrayH fulltag-misc ) (defconstant arrayH.rank-cell 0) (defconstant arrayH.physsize-cell 1) (defconstant arrayH.data-vector-cell 2) (defconstant arrayH.displacement-cell 3) (defconstant arrayH.flags-cell 4) (defconstant arrayH.dim0-cell 5) (defconstant arrayH.flags-cell-bits-byte (byte 8 0)) (defconstant arrayH.flags-cell-subtag-byte (byte 8 8)) (define-fixedsized-object value-cell value) (define-storage-layout area 0 another one bit vector for GC in EGC sense also for EGC ) (define-storage-layout protected-area 0 next why) (defconstant tcr-bias 0) (define-storage-layout tcr (- tcr-bias) lisp-fpscr pad VSP when in foreign code last-lisp-frame total-bytes-allocated-low total-bytes-allocated-high foreign-exception-status native-thread-info native-thread-id last-allocptr save-allocptr save-allocbase reset-completion activate suspend-count suspend-context pending-exception-context gc-context termination-semaphore unwinding tlb-limit tlb-pointer shutdown-count safe-ref-address nfp ) (defconstant interrupt-level-binding-index (ash 1 fixnumshift)) (define-storage-layout lockptr 0 avail owner count signal waiting malloced-ptr spinlock) (define-storage-layout rwlock 0 spin state blocked-writers blocked-readers writer reader-signal writer-signal malloced-ptr ) (arm::define-storage-layout lisp-frame 0 marker savevsp savefn savelr ) (defmacro define-header (name element-count subtag) `(defconstant ,name (logior (ash ,element-count num-subtag-bits) ,subtag))) (define-header single-float-header single-float.element-count subtag-single-float) (define-header double-float-header double-float.element-count subtag-double-float) (define-header complex-single-float-header complex-single-float.element-count subtag-complex-single-float) (define-header complex-double-float-header complex-double-float.element-count subtag-complex-double-float) (define-header one-digit-bignum-header 1 subtag-bignum) (define-header two-digit-bignum-header 2 subtag-bignum) (define-header three-digit-bignum-header 3 subtag-bignum) (define-header symbol-header symbol.element-count subtag-symbol) (define-header value-cell-header value-cell.element-count subtag-value-cell) (define-header macptr-header macptr.element-count subtag-macptr) ) (defun %kernel-global (sym) (let* ((pos (position sym arm::*arm-kernel-globals* :test #'string=))) (if pos (- (* (+ 3 pos) 4)) (error "Unknown kernel global : ~s ." sym)))) (defmacro kernel-global (sym) (let* ((pos (position sym arm::*arm-kernel-globals* :test #'string=))) (if pos (- (* (+ 3 pos) 4)) (error "Unknown kernel global : ~s ." sym)))) (ccl::defenum (:prefix "KERNEL-IMPORT-" :start 0 :step 4) fd-setsize-bytes do-fd-set do-fd-clr do-fd-is-set do-fd-zero MakeDataExecutable GetSharedLibrary FindSymbol malloc free wait-for-signal tcr-frame-ptr register-xmacptr-dispose-function open-debug-output get-r-debug restore-soft-stack-limit egc-control lisp-bug NewThread YieldToThread DisposeThread ThreadCurrentStackSpace usage-exit save-fp-context restore-fp-context put-altivec-registers get-altivec-registers new-semaphore wait-on-semaphore signal-semaphore destroy-semaphore new-recursive-lock lock-recursive-lock unlock-recursive-lock destroy-recursive-lock suspend-other-threads resume-other-threads suspend-tcr resume-tcr rwlock-new rwlock-destroy rwlock-rlock rwlock-wlock rwlock-unlock recursive-lock-trylock foreign-name-and-offset lisp-read lisp-write lisp-open lisp-fchmod lisp-lseek lisp-close lisp-ftruncate lisp-stat lisp-fstat lisp-futex lisp-opendir lisp-readdir lisp-closedir lisp-pipe lisp-gettimeofday lisp-sigexit jvm-init lisp-lstat lisp-realpath last-kernel-import ) (defconstant num-kernel-imports (ash kernel-import-last-kernel-import -2)) (defmacro nrs-offset (name) (let* ((pos (position name arm::*arm-nilreg-relative-symbols* :test #'eq))) (if pos (+ t-offset (* pos symbol.size))))) (defmacro with-stack-short-floats (specs &body body) (ccl::collect ((binds) (inits) (names)) (dolist (spec specs) (let ((name (first spec))) (binds `(,name (ccl::%make-sfloat))) (names name) (let ((init (second spec))) (when init (inits `(ccl::%short-float ,init ,name)))))) `(let* ,(binds) (declare (dynamic-extent ,@(names)) (short-float ,@(names))) ,@(inits) ,@body))) (defun arm-fpr-mask (value mode) (ecase (ccl::fpr-mode-value-name mode) (:single-float (ash 1 value)) ((:double-float :complex-single-float) (ash 3 (ash value 1))) (:complex-double-float (ash 15 (ash value 2))))) (defparameter *arm-target-uvector-subtags* `((:bignum . ,subtag-bignum) (:ratio . ,subtag-ratio) (:single-float . ,subtag-single-float) (:double-float . ,subtag-double-float) (:complex . ,subtag-complex ) (:complex-single-float . ,subtag-complex-single-float) (:complex-double-float . ,subtag-complex-double-float) (:symbol . ,subtag-symbol) (:function . ,subtag-function ) (:code-vector . ,subtag-code-vector) (:xcode-vector . ,subtag-xcode-vector) (:macptr . ,subtag-macptr ) (:catch-frame . ,subtag-catch-frame) (:struct . ,subtag-struct ) (:istruct . ,subtag-istruct ) (:pool . ,subtag-pool ) (:population . ,subtag-weak ) (:hash-vector . ,subtag-hash-vector ) (:package . ,subtag-package ) (:value-cell . ,subtag-value-cell) (:instance . ,subtag-instance ) (:lock . ,subtag-lock ) (:slot-vector . ,subtag-slot-vector) (:basic-stream . ,subtag-basic-stream) (:simple-string . ,subtag-simple-base-string ) (:bit-vector . ,subtag-bit-vector ) (:signed-8-bit-vector . ,subtag-s8-vector ) (:unsigned-8-bit-vector . ,subtag-u8-vector ) (:signed-16-bit-vector . ,subtag-s16-vector ) (:unsigned-16-bit-vector . ,subtag-u16-vector ) (:signed-32-bit-vector . ,subtag-s32-vector ) (:fixnum-vector . ,subtag-fixnum-vector) (:unsigned-32-bit-vector . ,subtag-u32-vector ) (:single-float-vector . ,subtag-single-float-vector) (:double-float-vector . ,subtag-double-float-vector ) (:complex-single-float-vector . ,subtag-complex-single-float-vector) (:complex-double-float-vector . ,subtag-complex-double-float-vector) (:simple-vector . ,subtag-simple-vector ) (:vector-header . ,subtag-vectorH) (:array-header . ,subtag-arrayH) (:xfunction . ,subtag-xfunction) (:pseudofunction . ,subtag-pseudofunction) lets us use NX - LOOKUP - TARGET - UVECTOR - SUBTAG and (:min-cl-ivector-subtag . ,min-cl-ivector-subtag) )) NIL if the element type is unknown or unspecified at compile - time . (defun arm-array-type-name-from-ctype (ctype) (when (typep ctype 'ccl::array-ctype) (let* ((element-type (ccl::array-ctype-element-type ctype))) (typecase element-type (ccl::class-ctype (let* ((class (ccl::class-ctype-class element-type))) (if (or (eq class ccl::*character-class*) (eq class ccl::*base-char-class*) (eq class ccl::*standard-char-class*)) :simple-string :simple-vector))) (ccl::numeric-ctype (if (eq (ccl::numeric-ctype-complexp element-type) :complex) (case (ccl::numeric-ctype-format element-type) (single-float :complex-single-float-vector) (double-float :complex-double-float-vector) (t :simple-vector)) (case (ccl::numeric-ctype-class element-type) (integer (let* ((low (ccl::numeric-ctype-low element-type)) (high (ccl::numeric-ctype-high element-type))) (cond ((or (null low) (null high)) :simple-vector) ((and (>= low 0) (<= high 1) :bit-vector)) ((and (>= low 0) (<= high 255)) :unsigned-8-bit-vector) ((and (>= low 0) (<= high 65535)) :unsigned-16-bit-vector) ((and (>= low 0) (<= high #xffffffff) :unsigned-32-bit-vector)) ((and (>= low -128) (<= high 127)) :signed-8-bit-vector) ((and (>= low -32768) (<= high 32767) :signed-16-bit-vector)) ((and (>= low target-most-negative-fixnum) (<= high target-most-positive-fixnum)) :fixnum-vector) ((and (>= low (ash -1 31)) (<= high (1- (ash 1 31)))) :signed-32-bit-vector) (t :simple-vector)))) (float (case (ccl::numeric-ctype-format element-type) ((double-float long-float) :double-float-vector) ((single-float short-float) :single-float-vector) (t :simple-vector))) (t :simple-vector)))) (ccl::unknown-ctype) (ccl::named-ctype (if (eq element-type ccl::*universal-type*) :simple-vector)) (t nil))))) (defun arm-misc-byte-count (subtag element-count) (declare (fixnum subtag)) (if (or (= fulltag-nodeheader (logand subtag fulltagmask)) (<= subtag max-32-bit-ivector-subtag)) (ash element-count 2) (if (<= subtag max-8-bit-ivector-subtag) element-count (if (<= subtag max-16-bit-ivector-subtag) (ash element-count 1) (if (= subtag subtag-bit-vector) (ash (+ element-count 7) -3) (if (= subtag subtag-complex-double-float-vector) (+ 4 (ash element-count 4)) (+ 4 (ash element-count 3)))))))) (defparameter *arm-target-arch* (progn (arch::make-target-arch :name :arm :lisp-node-size 4 :nil-value canonical-nil-value :fixnum-shift fixnumshift :most-positive-fixnum (1- (ash 1 (1- (- 32 fixnumshift)))) :most-negative-fixnum (- (ash 1 (1- (- 32 fixnumshift)))) :misc-data-offset misc-data-offset :misc-dfloat-offset misc-dfloat-offset :nbits-in-word 32 :ntagbits 3 :nlisptagbits 2 :uvector-subtags *arm-target-uvector-subtags* :max-64-bit-constant-index max-64-bit-constant-index :max-32-bit-constant-index max-32-bit-constant-index :max-16-bit-constant-index max-16-bit-constant-index :max-8-bit-constant-index max-8-bit-constant-index :max-1-bit-constant-index max-1-bit-constant-index :word-shift 2 :code-vector-prefix () :gvector-types '(:ratio :complex :symbol :function :catch-frame :struct :istruct :pool :population :hash-vector :package :value-cell :instance :lock :slot-vector :simple-vector :xfunction :pseudofunction) :1-bit-ivector-types '(:bit-vector) :8-bit-ivector-types '(:signed-8-bit-vector :unsigned-8-bit-vector) :16-bit-ivector-types '(:signed-16-bit-vector :unsigned-16-bit-vector) :32-bit-ivector-types '(:signed-32-bit-vector :unsigned-32-bit-vector :single-float-vector :fixnum-vector :single-float :double-float :bignum :simple-string) :64-bit-ivector-types '(:double-float-vector :complex-single-float-vector) :array-type-name-from-ctype-function #'arm-array-type-name-from-ctype :package-name "ARM" :t-offset t-offset :array-data-size-function #'arm-misc-byte-count :fpr-mask-function 'arm-fpr-mask :subprims-base arm::*arm-subprims-base* :subprims-shift arm::*arm-subprims-shift* :subprims-table arm::*arm-subprims* :primitive->subprims `(((0 . 23) . ,(ccl::%subprim-name->offset '.SPbuiltin-plus arm::*arm-subprims*))) :unbound-marker-value unbound-marker :slot-unbound-marker-value slot-unbound-marker :fixnum-tag tag-fixnum :single-float-tag subtag-single-float :single-float-tag-is-subtag t :double-float-tag subtag-double-float :cons-tag fulltag-cons :null-tag fulltag-nil :symbol-tag subtag-symbol :symbol-tag-is-subtag t :function-tag subtag-function :function-tag-is-subtag t :big-endian nil :misc-subtag-offset misc-subtag-offset :car-offset cons.car :cdr-offset cons.cdr :subtag-char subtag-character :charcode-shift charcode-shift :fulltagmask fulltagmask :fulltag-misc fulltag-misc :char-code-limit #x110000 ))) (defmacro defarmarchmacro (name lambda-list &body body) `(arch::defarchmacro :arm ,name ,lambda-list ,@body)) (defarmarchmacro ccl::%make-sfloat () `(ccl::%alloc-misc arm::single-float.element-count arm::subtag-single-float)) (defarmarchmacro ccl::%make-dfloat () `(ccl::%alloc-misc arm::double-float.element-count arm::subtag-double-float)) (defarmarchmacro ccl::%numerator (x) `(ccl::%svref ,x arm::ratio.numer-cell)) (defarmarchmacro ccl::%denominator (x) `(ccl::%svref ,x arm::ratio.denom-cell)) (defarmarchmacro ccl::%get-single-float-from-double-ptr (ptr offset) `(ccl::%double-float->short-float (ccl::%get-double-float ,ptr ,offset) (ccl::%alloc-misc 1 arm::subtag-single-float))) (defarmarchmacro ccl::codevec-header-p (word) `(eql arm::subtag-code-vector (logand ,word arm::subtag-mask))) (defarmarchmacro ccl::immediate-p-macro (thing) (let* ((tag (gensym))) `(let* ((,tag (ccl::lisptag ,thing))) (declare (fixnum ,tag)) (or (= ,tag arm::tag-fixnum) (= ,tag arm::tag-imm))))) (defarmarchmacro ccl::hashed-by-identity (thing) (let* ((typecode (gensym))) `(let* ((,typecode (ccl::typecode ,thing))) (declare (fixnum ,typecode)) (or (= ,typecode arm::tag-fixnum) (= ,typecode arm::tag-imm) (= ,typecode arm::subtag-symbol) (= ,typecode arm::subtag-instance))))) (defarmarchmacro ccl::%get-kernel-global (name) `(ccl::%fixnum-ref (ash (+ (- nil-value fulltag-nil) ,(%kernel-global (if (ccl::quoted-form-p name) (cadr name) name))) (- fixnumshift)))) (defarmarchmacro ccl::%get-kernel-global-ptr (name dest) `(ccl::%setf-macptr ,dest (ccl::%fixnum-ref-macptr (ash (+ (- nil-value fulltag-nil) ,(%kernel-global (if (ccl::quoted-form-p name) (cadr name) name))) (- fixnumshift))))) (defarmarchmacro ccl::%target-kernel-global (name) `(arm::%kernel-global ,name)) (defarmarchmacro ccl::lfun-vector (fun) fun) (defarmarchmacro ccl::lfun-vector-lfun (lfv) lfv) (defarmarchmacro ccl::area-code () area.code) (defarmarchmacro ccl::area-succ () area.succ) (defarmarchmacro ccl::nth-immediate (f i) `(ccl::%svref ,f (the fixnum (+ (the fixnum ,i) 1)))) (defarmarchmacro ccl::set-nth-immediate (f i new) `(setf (ccl::%svref ,f (the fixnum (+ (the fixnum ,i) 1))) ,new)) (defarmarchmacro ccl::symptr->symvector (s) s) (defarmarchmacro ccl::symvector->symptr (s) s) (defarmarchmacro ccl::function-to-function-vector (f) f) (defarmarchmacro ccl::function-vector-to-function (v) v) (defarmarchmacro ccl::with-ffcall-results ((buf) &body body) (let* ((size (+ (* 8 4) (* 31 8)))) `(%stack-block ((,buf ,size)) ,@body))) (defconstant arg-check-trap-pc-limit 8) UUO encoding 12 bits of code 8 bits of info - NOT type info - 4 - bit reg 2 bits of lisptag info , 4 - bit reg 3 bits of fulltag info , 4 bit reg 8 bits of extended type / subtag info , 4 bit reg continuable , lisptag , reg continuable , fulltag , reg continuable , xtype , reg 4 bits of code , r1 , r0 xtypes : 8 - bit integers used to report type errors for types that ca n't (defconstant xtype-unsigned-byte-24 252) (defconstant xtype-array2d 248) (defconstant xtype-array3d 244) (defconstant xtype-integer 4) (defconstant xtype-s64 8) (defconstant xtype-u64 12) (defconstant xtype-s32 16) (defconstant xtype-u32 20) (defconstant xtype-s16 24) (defconstant xtype-u16 28) (defconstant xtype-s8 32) (defconstant xtype-u8 36) (defconstant xtype-bit 40) (defconstant xtype-rational 44) (defconstant xtype-real 48) (defconstant xtype-number 52) (defconstant xtype-char-code 56) (ccl::defenum (:prefix "ARM-COND-") eq ne hs lo mi pl vs vc hi ls ge lt gt le al) division by 0 division by 0 enable (define-storage-layout fake-stack-frame 0 header sp next-sp fn lr vsp xp) #+arm-target (ccl::make-istruct-class 'fake-stack-frame ccl::*istruct-class*) (defconstant real-tags-mask (logior (ash 1 tag-fixnum) (ash 1 subtag-bignum) (ash 1 subtag-single-float) (ash 1 subtag-double-float) (ash 1 subtag-ratio))) (defconstant numeric-tags-mask (logior real-tags-mask (ash 1 subtag-complex))) (defconstant fasl-version #x66) (defconstant fasl-max-version #x66) (defconstant fasl-min-version #x66) (defparameter *image-abi-version* 1045) (provide "ARM-ARCH")
f9d76dd38492b1fb4aa3c1119ea85ddb86ca3ae734fa1ed133ff0c62deed7f25
xmonad/xmonad-contrib
DebugStack.hs
----------------------------------------------------------------------------- -- | -- Module : XMonad.Hooks.DebugStack Description : Dump the state of the StackSet . Copyright : ( c ) , 2014 -- License : BSD3-style (see LICENSE) -- -- Maintainer : -- Stability : unstable -- Portability : not portable -- Dump the state of the ' StackSet ' . A @logHook@ and @handleEventHook@ are -- also provided. -- ----------------------------------------------------------------------------- module XMonad.Hooks.DebugStack (debugStack ,debugStackFull ,debugStackString ,debugStackFullString ,debugStackLogHook ,debugStackFullLogHook ,debugStackEventHook ,debugStackFullEventHook ) where import XMonad.Core import XMonad.Prelude import qualified XMonad.StackSet as W import XMonad.Util.DebugWindow import Graphics.X11.Types (Window) import Graphics.X11.Xlib.Extras (Event) import Data.Map (member) -- | Print the state of the current window stack for the current workspace to -- @stderr@, which for most installations goes to @~/.xsession-errors@. -- "XMonad.Util.DebugWindow" is used to display the individual windows. debugStack :: X () debugStack = debugStackString >>= trace -- | Print the state of the current window stack for all workspaces to -- @stderr@, which for most installations goes to @~/.xsession-errors@. -- "XMonad.Util.DebugWindow" is used to display the individual windows. debugStackFull :: X () debugStackFull = debugStackFullString >>= trace -- | 'debugStack' packaged as a 'logHook'. (Currently this is identical.) debugStackLogHook :: X () debugStackLogHook = debugStack -- | 'debugStackFull packaged as a 'logHook'. (Currently this is identical.) debugStackFullLogHook :: X () debugStackFullLogHook = debugStackFull -- | 'debugStack' packaged as a 'handleEventHook'. You almost certainly do not -- want to use this unconditionally, as it will cause massive amounts of -- output and possibly slow @xmonad@ down severely. debugStackEventHook :: Event -> X All debugStackEventHook _ = debugStack >> return (All True) -- | 'debugStackFull' packaged as a 'handleEventHook'. You almost certainly do -- not want to use this unconditionally, as it will cause massive amounts of -- output and possibly slow @xmonad@ down severely. debugStackFullEventHook :: Event -> X All debugStackFullEventHook _ = debugStackFull >> return (All True) | Dump the state of the current workspace in the ' StackSet ' as a multiline ' String ' . debugStackString :: X String debugStackString = withWindowSet $ debugStackWs . W.workspace . W.current | Dump the state of all workspaces in the ' StackSet ' as a multiline ' String ' . @@@ this is in stackset order , which is roughly lru - ish debugStackFullString :: X String debugStackFullString = withWindowSet $ fmap (intercalate "\n") . mapM debugStackWs . W.workspaces | Dump the state of a workspace in the current ' StackSet ' as a multiline ' String ' . -- @ -- Workspace "foo:: -- mm -- * ww -- ^ww -- @ -- * indicates the focused window, ^ indicates a floating window debugStackWs :: W.Workspace String (Layout Window) Window -> X String debugStackWs w = withWindowSet $ \ws -> do let cur = if wt == W.currentTag ws then " (current)" else "" wt = W.tag w s <- emit ws $ W.integrate' . W.stack $ w return $ intercalate "\n" $ ("Workspace " ++ show wt ++ cur):s where emit :: WindowSet -> [Window] -> X [String] emit _ [] = return [" -empty workspace-"] emit ww ws = do (_,ss) <- foldM emit' (ww,[]) ws return ss emit' :: (WindowSet,[String]) -> Window -> X (WindowSet,[String]) emit' (ws,a) w' = do let focus = if Just w' == W.peek ws then '*' else ' ' float = if w' `member` W.floating ws then '^' else ' ' s <- debugWindow w' return (ws,(focus:float:s):a)
null
https://raw.githubusercontent.com/xmonad/xmonad-contrib/3058d1ca22d565b2fa93227fdde44d8626d6f75d/XMonad/Hooks/DebugStack.hs
haskell
--------------------------------------------------------------------------- | Module : XMonad.Hooks.DebugStack License : BSD3-style (see LICENSE) Maintainer : Stability : unstable Portability : not portable also provided. --------------------------------------------------------------------------- | Print the state of the current window stack for the current workspace to @stderr@, which for most installations goes to @~/.xsession-errors@. "XMonad.Util.DebugWindow" is used to display the individual windows. | Print the state of the current window stack for all workspaces to @stderr@, which for most installations goes to @~/.xsession-errors@. "XMonad.Util.DebugWindow" is used to display the individual windows. | 'debugStack' packaged as a 'logHook'. (Currently this is identical.) | 'debugStackFull packaged as a 'logHook'. (Currently this is identical.) | 'debugStack' packaged as a 'handleEventHook'. You almost certainly do not want to use this unconditionally, as it will cause massive amounts of output and possibly slow @xmonad@ down severely. | 'debugStackFull' packaged as a 'handleEventHook'. You almost certainly do not want to use this unconditionally, as it will cause massive amounts of output and possibly slow @xmonad@ down severely. @ Workspace "foo:: mm * ww ^ww @ * indicates the focused window, ^ indicates a floating window
Description : Dump the state of the StackSet . Copyright : ( c ) , 2014 Dump the state of the ' StackSet ' . A @logHook@ and @handleEventHook@ are module XMonad.Hooks.DebugStack (debugStack ,debugStackFull ,debugStackString ,debugStackFullString ,debugStackLogHook ,debugStackFullLogHook ,debugStackEventHook ,debugStackFullEventHook ) where import XMonad.Core import XMonad.Prelude import qualified XMonad.StackSet as W import XMonad.Util.DebugWindow import Graphics.X11.Types (Window) import Graphics.X11.Xlib.Extras (Event) import Data.Map (member) debugStack :: X () debugStack = debugStackString >>= trace debugStackFull :: X () debugStackFull = debugStackFullString >>= trace debugStackLogHook :: X () debugStackLogHook = debugStack debugStackFullLogHook :: X () debugStackFullLogHook = debugStackFull debugStackEventHook :: Event -> X All debugStackEventHook _ = debugStack >> return (All True) debugStackFullEventHook :: Event -> X All debugStackFullEventHook _ = debugStackFull >> return (All True) | Dump the state of the current workspace in the ' StackSet ' as a multiline ' String ' . debugStackString :: X String debugStackString = withWindowSet $ debugStackWs . W.workspace . W.current | Dump the state of all workspaces in the ' StackSet ' as a multiline ' String ' . @@@ this is in stackset order , which is roughly lru - ish debugStackFullString :: X String debugStackFullString = withWindowSet $ fmap (intercalate "\n") . mapM debugStackWs . W.workspaces | Dump the state of a workspace in the current ' StackSet ' as a multiline ' String ' . debugStackWs :: W.Workspace String (Layout Window) Window -> X String debugStackWs w = withWindowSet $ \ws -> do let cur = if wt == W.currentTag ws then " (current)" else "" wt = W.tag w s <- emit ws $ W.integrate' . W.stack $ w return $ intercalate "\n" $ ("Workspace " ++ show wt ++ cur):s where emit :: WindowSet -> [Window] -> X [String] emit _ [] = return [" -empty workspace-"] emit ww ws = do (_,ss) <- foldM emit' (ww,[]) ws return ss emit' :: (WindowSet,[String]) -> Window -> X (WindowSet,[String]) emit' (ws,a) w' = do let focus = if Just w' == W.peek ws then '*' else ' ' float = if w' `member` W.floating ws then '^' else ' ' s <- debugWindow w' return (ws,(focus:float:s):a)
00f9609b63cf86e0b3b3c2cdd418ac38842c9c18be1f68331b44a6030767ba6e
aleator/CV
Corners.hs
module CV.Corners ( HarrisDesc , Corner(..) , ImageWithCorners(..) , harris , harrisCorners ) where import CV.Bindings.Types import CV.Bindings.Core import CV.Bindings.ImgProc import CV.Image import CV.Operations import CV.Iterators import System.IO.Unsafe type HarrisDesc = Float data Corner d = Corner { pos :: (Int,Int) , desc :: d } data ImageWithCorners d = ImageWithCorners { image :: Image GrayScale D32 , corners :: [Corner d] } harris :: Int -> Int -> Double -> Image GrayScale D32 -> Image GrayScale D32 harris bs as k src = unsafePerformIO $ do withCloneValue src $ \clone -> withGenImage src $ \si -> withGenImage clone $ \ci -> do c'cvCornerHarris si ci bs as k return clone -- threshold for selecting harris corners -- image _with_harris_applied_ (TODO: describe images with operations applied to them?) result is an image normalize to [ 0 .. 1 ] range , and a list of found corners harrisCorners :: Float -> Image GrayScale D32 -> ImageWithCorners HarrisDesc harrisCorners t src = (ImageWithCorners src cs) where cs = map (\(p,v) -> (Corner p v)) $ filterPixels (>t) $ normalize 1 0 NormMinMax $ harris 2 3 0.04 $ src
null
https://raw.githubusercontent.com/aleator/CV/1e2c9116bcaacdf305044c861a1b36d0d8fb71b7/CV/Corners.hs
haskell
threshold for selecting harris corners image _with_harris_applied_ (TODO: describe images with operations applied to them?)
module CV.Corners ( HarrisDesc , Corner(..) , ImageWithCorners(..) , harris , harrisCorners ) where import CV.Bindings.Types import CV.Bindings.Core import CV.Bindings.ImgProc import CV.Image import CV.Operations import CV.Iterators import System.IO.Unsafe type HarrisDesc = Float data Corner d = Corner { pos :: (Int,Int) , desc :: d } data ImageWithCorners d = ImageWithCorners { image :: Image GrayScale D32 , corners :: [Corner d] } harris :: Int -> Int -> Double -> Image GrayScale D32 -> Image GrayScale D32 harris bs as k src = unsafePerformIO $ do withCloneValue src $ \clone -> withGenImage src $ \si -> withGenImage clone $ \ci -> do c'cvCornerHarris si ci bs as k return clone result is an image normalize to [ 0 .. 1 ] range , and a list of found corners harrisCorners :: Float -> Image GrayScale D32 -> ImageWithCorners HarrisDesc harrisCorners t src = (ImageWithCorners src cs) where cs = map (\(p,v) -> (Corner p v)) $ filterPixels (>t) $ normalize 1 0 NormMinMax $ harris 2 3 0.04 $ src
e706def4a76d9be4b78abe96a053a3374c269ef84c42d133e64ac28e28396a1c
project-oak/hafnium-verification
CostIssues.ml
* Copyright ( c ) Facebook , Inc. and its affiliates . * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree . * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open! IStd type issue_spec = { extract_cost_f: Jsonbug_t.cost_item -> Jsonbug_t.cost_info ; name: string ; threshold: int option ; complexity_increase_issue: is_on_cold_start:bool -> is_on_ui_thread:bool -> IssueType.t ; expensive_issue: is_on_cold_start:bool -> is_on_ui_thread:bool -> IssueType.t ; zero_issue: IssueType.t ; infinite_issue: IssueType.t ; top_and_bottom: bool } module CostKindMap = struct include PrettyPrintable.MakePPMap (CostKind) type no_value = | let iter2 map1 map2 ~f = let (_ : no_value t) = merge (fun k v1_opt v2_opt -> (match (v1_opt, v2_opt) with Some v1, Some v2 -> f k v1 v2 | _ -> ()) ; None ) map1 map2 in () end let enabled_cost_map = List.fold CostKind.enabled_cost_kinds ~init:CostKindMap.empty ~f:(fun acc CostKind.{kind; top_and_bottom} -> let kind_spec = { name= Format.asprintf "The %a" CostKind.pp kind ; threshold= (if Config.use_cost_threshold then CostKind.to_threshold kind else None) ; extract_cost_f= (fun c -> CostKind.to_json_cost_info c kind) ; complexity_increase_issue= (fun ~is_on_cold_start ~is_on_ui_thread -> IssueType.complexity_increase ~kind ~is_on_cold_start ~is_on_ui_thread ) ; expensive_issue= (fun ~is_on_cold_start ~is_on_ui_thread -> IssueType.expensive_cost_call ~kind ~is_on_cold_start ~is_on_ui_thread ) ; zero_issue= IssueType.zero_cost_call ~kind ; infinite_issue= IssueType.infinite_cost_call ~kind ; top_and_bottom } in CostKindMap.add kind kind_spec acc )
null
https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/base/CostIssues.ml
ocaml
* Copyright ( c ) Facebook , Inc. and its affiliates . * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree . * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. *) open! IStd type issue_spec = { extract_cost_f: Jsonbug_t.cost_item -> Jsonbug_t.cost_info ; name: string ; threshold: int option ; complexity_increase_issue: is_on_cold_start:bool -> is_on_ui_thread:bool -> IssueType.t ; expensive_issue: is_on_cold_start:bool -> is_on_ui_thread:bool -> IssueType.t ; zero_issue: IssueType.t ; infinite_issue: IssueType.t ; top_and_bottom: bool } module CostKindMap = struct include PrettyPrintable.MakePPMap (CostKind) type no_value = | let iter2 map1 map2 ~f = let (_ : no_value t) = merge (fun k v1_opt v2_opt -> (match (v1_opt, v2_opt) with Some v1, Some v2 -> f k v1 v2 | _ -> ()) ; None ) map1 map2 in () end let enabled_cost_map = List.fold CostKind.enabled_cost_kinds ~init:CostKindMap.empty ~f:(fun acc CostKind.{kind; top_and_bottom} -> let kind_spec = { name= Format.asprintf "The %a" CostKind.pp kind ; threshold= (if Config.use_cost_threshold then CostKind.to_threshold kind else None) ; extract_cost_f= (fun c -> CostKind.to_json_cost_info c kind) ; complexity_increase_issue= (fun ~is_on_cold_start ~is_on_ui_thread -> IssueType.complexity_increase ~kind ~is_on_cold_start ~is_on_ui_thread ) ; expensive_issue= (fun ~is_on_cold_start ~is_on_ui_thread -> IssueType.expensive_cost_call ~kind ~is_on_cold_start ~is_on_ui_thread ) ; zero_issue= IssueType.zero_cost_call ~kind ; infinite_issue= IssueType.infinite_cost_call ~kind ; top_and_bottom } in CostKindMap.add kind kind_spec acc )
78b993fb0927c4576ab4cb072ec23ebf4656a3519096251f4b9d9849071b16ae
basho/riak_core
riak_core_apl.erl
%% ------------------------------------------------------------------- %% riak_core : Core Active Preference Lists %% Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved . %% This file is provided to you under the Apache License , %% Version 2.0 (the "License"); you may not use this file except in compliance with the License . You may obtain %% a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either express or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% %% ------------------------------------------------------------------- %% Get active preference list - preference list with secondary nodes %% substituted. %% ------------------------------------------------------------------- -module(riak_core_apl). -export([active_owners/1, active_owners/2, get_apl/3, get_apl/4, get_apl_ann/2, get_apl_ann/3, get_apl_ann/4, get_apl_ann_with_pnum/1, get_primary_apl/3, get_primary_apl/4, get_primary_apl_chbin/4, first_up/2, offline_owners/1, offline_owners/2, offline_owners/3 ]). -export_type([preflist/0, preflist_ann/0, preflist_with_pnum_ann/0]). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -type index() :: chash:index_as_int(). -type n_val() :: non_neg_integer(). -type ring() :: riak_core_ring:riak_core_ring(). -type preflist() :: [{index(), node()}]. -type preflist_ann() :: [{{index(), node()}, primary|fallback}]. %% @type preflist_with_pnum_ann(). %% Annotated preflist where the partition value is an id/number %% (0 to ring_size-1) instead of a hash. -type preflist_with_pnum_ann() :: [{{riak_core_ring:partition_id(), node()}, primary|fallback}]. -type iterator() :: term(). -type chashbin() :: term(). -type docidx() :: chash:index(). %% @doc Return preflist of all active primary nodes (with no %% substituion of fallbacks). Used to simulate a %% preflist with N=ring_size. -spec active_owners(atom()) -> preflist_ann(). active_owners(Service) -> {ok, Ring} = riak_core_ring_manager:get_my_ring(), active_owners(Ring, riak_core_node_watcher:nodes(Service)). -spec active_owners(ring(), [node()]) -> preflist_ann(). active_owners(Ring, UpNodes) -> UpNodes1 = UpNodes, Primaries = riak_core_ring:all_owners(Ring), {Up, _Pangs} = check_up(Primaries, UpNodes1, [], []), Up. %% @doc Get the active preflist taking account of which nodes are up. -spec get_apl(docidx(), n_val(), atom()) -> preflist(). get_apl(DocIdx, N, Service) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), get_apl_chbin(DocIdx, N, CHBin, riak_core_node_watcher:nodes(Service)). %% @doc Get the active preflist taking account of which nodes are up %% for a given chash/upnodes list. -spec get_apl_chbin(docidx(), n_val(), chashbin:chashbin(), [node()]) -> preflist(). get_apl_chbin(DocIdx, N, CHBin, UpNodes) -> [{Partition, Node} || {{Partition, Node}, _Type} <- get_apl_ann_chbin(DocIdx, N, CHBin, UpNodes)]. %% @doc Get the active preflist taking account of which nodes are up %% for a given ring/upnodes list. -spec get_apl(docidx(), n_val(), ring(), [node()]) -> preflist(). get_apl(DocIdx, N, Ring, UpNodes) -> [{Partition, Node} || {{Partition, Node}, _Type} <- get_apl_ann(DocIdx, N, Ring, UpNodes)]. %% @doc Get the active preflist taking account of which nodes are up for a given %% chash/upnodes list and annotate each node with type of primary/fallback. get_apl_ann(DocIdx, N, UpNodes) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), get_apl_ann_chbin(DocIdx, N, CHBin, UpNodes). %% @doc Get the active preflist taking account of which nodes are up %% for a given ring/upnodes list and annotate each node with type of %% primary/fallback. -spec get_apl_ann(binary(), n_val(), ring(), [node()]) -> preflist_ann(). get_apl_ann(DocIdx, N, Ring, UpNodes) -> UpNodes1 = UpNodes, Preflist = riak_core_ring:preflist(DocIdx, Ring), {Primaries, Fallbacks} = lists:split(N, Preflist), {Up, Pangs} = check_up(Primaries, UpNodes1, [], []), Up ++ find_fallbacks(Pangs, Fallbacks, UpNodes1, []). %% @doc Get the active preflist for a given {bucket, key} and list of nodes %% and annotate each node with type of primary/fallback. -spec get_apl_ann(riak_core_bucket:bucket(), [node()]) -> preflist_ann(). get_apl_ann({Bucket, Key}, UpNodes) -> BucketProps = riak_core_bucket:get_bucket(Bucket), NVal = proplists:get_value(n_val, BucketProps), DocIdx = riak_core_util:chash_key({Bucket, Key}), get_apl_ann(DocIdx, NVal, UpNodes). %% @doc Get the active preflist taking account of which nodes are up %% for a given {bucket, key} and annotate each node with type of %% primary/fallback -spec get_apl_ann_with_pnum(riak_core_bucket:bucket()) -> preflist_with_pnum_ann(). get_apl_ann_with_pnum(BKey) -> {ok, Ring} = riak_core_ring_manager:get_my_ring(), UpNodes = riak_core_ring:all_members(Ring), Apl = get_apl_ann(BKey, UpNodes), Size = riak_core_ring:num_partitions(Ring), apl_with_partition_nums(Apl, Size). %% @doc Get the active preflist taking account of which nodes are up %% for a given chash/upnodes list and annotate each node with type of %% primary/fallback. -spec get_apl_ann_chbin(binary(), n_val(), chashbin(), [node()]) -> preflist_ann(). get_apl_ann_chbin(DocIdx, N, CHBin, UpNodes) -> UpNodes1 = UpNodes, Itr = chashbin:iterator(DocIdx, CHBin), {Primaries, Itr2} = chashbin:itr_pop(N, Itr), {Up, Pangs} = check_up(Primaries, UpNodes1, [], []), Up ++ find_fallbacks_chbin(Pangs, Itr2, UpNodes1, []). %% @doc Same as get_apl, but returns only the primaries. -spec get_primary_apl(binary(), n_val(), atom()) -> preflist_ann(). get_primary_apl(DocIdx, N, Service) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), get_primary_apl_chbin(DocIdx, N, CHBin, riak_core_node_watcher:nodes(Service)). %% @doc Same as get_apl, but returns only the primaries. -spec get_primary_apl_chbin(binary(), n_val(), chashbin(), [node()]) -> preflist_ann(). get_primary_apl_chbin(DocIdx, N, CHBin, UpNodes) -> UpNodes1 = UpNodes, Itr = chashbin:iterator(DocIdx, CHBin), {Primaries, _} = chashbin:itr_pop(N, Itr), {Up, _} = check_up(Primaries, UpNodes1, [], []), Up. %% @doc Same as get_apl, but returns only the primaries. -spec get_primary_apl(binary(), n_val(), ring(), [node()]) -> preflist_ann(). get_primary_apl(DocIdx, N, Ring, UpNodes) -> UpNodes1 = UpNodes, Preflist = riak_core_ring:preflist(DocIdx, Ring), {Primaries, _} = lists:split(N, Preflist), {Up, _} = check_up(Primaries, UpNodes1, [], []), Up. @doc Return the first entry that is up in the preflist for ` DocIdx ' . This %% will crash if all owning nodes are offline. first_up(DocIdx, Service) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), Itr = chashbin:iterator(DocIdx, CHBin), UpSet = ordsets:from_list(riak_core_node_watcher:nodes(Service)), Itr2 = chashbin:itr_next_while(fun({_P, Node}) -> not ordsets:is_element(Node, UpSet) end, Itr), chashbin:itr_value(Itr2). offline_owners(Service) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), offline_owners(Service, CHBin). offline_owners(Service, CHBin) -> offline_owners(Service, CHBin, []). offline_owners(Service, CHBin, OtherDownNodes) -> UpSet = ordsets:from_list(riak_core_node_watcher:nodes(Service)), DownVNodes = chashbin:to_list_filter(fun({_Index, Node}) -> (not is_up(Node, UpSet) or lists:member(Node,OtherDownNodes)) end, CHBin), DownVNodes. %% @doc Split a preference list into up and down lists. -spec check_up(preflist(), [node()], preflist_ann(), preflist()) -> {preflist_ann(), preflist()}. check_up([], _UpNodes, Up, Pangs) -> {lists:reverse(Up), lists:reverse(Pangs)}; check_up([{Partition,Node}|Rest], UpNodes, Up, Pangs) -> case is_up(Node, UpNodes) of true -> check_up(Rest, UpNodes, [{{Partition, Node}, primary} | Up], Pangs); false -> check_up(Rest, UpNodes, Up, [{Partition, Node} | Pangs]) end. %% @doc Find fallbacks for downed nodes in the preference list. -spec find_fallbacks(preflist(), preflist(), [node()], preflist_ann()) -> preflist_ann(). find_fallbacks(_Pangs, [], _UpNodes, Secondaries) -> lists:reverse(Secondaries); find_fallbacks([], _Fallbacks, _UpNodes, Secondaries) -> lists:reverse(Secondaries); find_fallbacks([{Partition, _Node}|Rest]=Pangs, [{_,FN}|Fallbacks], UpNodes, Secondaries) -> case is_up(FN, UpNodes) of true -> find_fallbacks(Rest, Fallbacks, UpNodes, [{{Partition, FN}, fallback} | Secondaries]); false -> find_fallbacks(Pangs, Fallbacks, UpNodes, Secondaries) end. %% @doc Find fallbacks for downed nodes in the preference list. -spec find_fallbacks_chbin(preflist(), iterator(),[node()], preflist_ann()) -> preflist_ann(). find_fallbacks_chbin([], _Fallbacks, _UpNodes, Secondaries) -> lists:reverse(Secondaries); find_fallbacks_chbin(_, done, _UpNodes, Secondaries) -> lists:reverse(Secondaries); find_fallbacks_chbin([{Partition, _Node}|Rest]=Pangs, Itr, UpNodes, Secondaries) -> {_, FN} = chashbin:itr_value(Itr), Itr2 = chashbin:itr_next(Itr), case is_up(FN, UpNodes) of true -> find_fallbacks_chbin(Rest, Itr2, UpNodes, [{{Partition, FN}, fallback} | Secondaries]); false -> find_fallbacks_chbin(Pangs, Itr2, UpNodes, Secondaries) end. %% @doc Return true if a node is up. is_up(Node, UpNodes) -> lists:member(Node, UpNodes). %% @doc Return annotated preflist with partition ids/nums instead of hashes. -spec apl_with_partition_nums(preflist_ann(), riak_core_ring:ring_size()) -> preflist_with_pnum_ann(). apl_with_partition_nums(Apl, Size) -> [{{riak_core_ring_util:hash_to_partition_id(Hash, Size), Node}, Ann} || {{Hash, Node}, Ann} <- Apl]. -ifdef(TEST). smallest_test() -> Ring = riak_core_ring:fresh(1,node()), ?assertEqual([{0,node()}], get_apl(last_in_ring(), 1, Ring, [node()])). four_node_test() -> Nodes = [nodea, nodeb, nodec, noded], Ring = perfect_ring(8, Nodes), ?assertEqual([{0,nodea}, {182687704666362864775460604089535377456991567872,nodeb}, {365375409332725729550921208179070754913983135744,nodec}], get_apl(last_in_ring(), 3, Ring, Nodes)), %% With a node down ?assertEqual([{182687704666362864775460604089535377456991567872,nodeb}, {365375409332725729550921208179070754913983135744,nodec}, {0,noded}], get_apl(last_in_ring(), 3, Ring, [nodeb, nodec, noded])), With two nodes down ?assertEqual([{365375409332725729550921208179070754913983135744,nodec}, {0,noded}, {182687704666362864775460604089535377456991567872,nodec}], get_apl(last_in_ring(), 3, Ring, [nodec, noded])), With the other two nodes down ?assertEqual([{0,nodea}, {182687704666362864775460604089535377456991567872,nodeb}, {365375409332725729550921208179070754913983135744,nodea}], get_apl(last_in_ring(), 3, Ring, [nodea, nodeb])). %% Create a perfect ring - RingSize must be a multiple of nodes perfect_ring(RingSize, Nodes) when RingSize rem length(Nodes) =:= 0 -> Ring = riak_core_ring:fresh(RingSize,node()), Owners = riak_core_ring:all_owners(Ring), TransferNode = fun({Idx,_CurOwner}, {Ring0, [NewOwner|Rest]}) -> {riak_core_ring:transfer_node(Idx, NewOwner, Ring0), Rest ++ [NewOwner]} end, {PerfectRing, _} = lists:foldl(TransferNode, {Ring, Nodes}, Owners), PerfectRing. last_in_ring() -> <<1461501637330902918203684832716283019655932542975:160/unsigned>>. six_node_test() -> its non - trivial to create a real 6 node ring , so here 's one we made %% earlier {ok, [Ring0]} = file:consult("test/my_ring"), Ring = riak_core_ring:upgrade(Ring0), riak_core_util : chash_key({<<"foo " > > , < < " bar " > > } ) , DocIdx = <<73,212,27,234,104,13,150,207,0,82,86,183,125,225,172, 154,135,46,6,112>>, Nodes = ['dev1@127.0.0.1', 'dev2@127.0.0.1', 'dev3@127.0.0.1', 'dev4@127.0.0.1', 'dev5@127.0.0.1', 'dev6@127.0.0.1'], %% Fallbacks should be selected by finding the next-highest partition after the DocIdx of the key , in this case the 433883 partition . The N %% partitions at that point are the primary partitions. If any of the primaries %% are down, the next up node found by walking the preflist is used as the %% fallback for that partition. ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev3@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev4@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes)), ?assertEqual([{456719261665907161938651510223838443642478919680, 'dev3@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev4@127.0.0.1'}, {433883298582611803841718934712646521460354973696, 'dev5@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev2@127.0.0.1'])), ?assertEqual([{479555224749202520035584085735030365824602865664, 'dev4@127.0.0.1'}, {433883298582611803841718934712646521460354973696, 'dev5@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev6@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev2@127.0.0.1', 'dev3@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev5@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev6@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev1@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev2@127.0.0.1', 'dev3@127.0.0.1', 'dev4@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev5@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev6@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev5@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev2@127.0.0.1', 'dev3@127.0.0.1', 'dev4@127.0.0.1', 'dev1@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev3@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev5@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev5@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev6@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1', 'dev3@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev5@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev1@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1', 'dev3@127.0.0.1', 'dev6@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev5@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev2@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1', 'dev3@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev2@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev2@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1', 'dev3@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1', 'dev5@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev4@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev5@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev3@127.0.0.1'])), ok. six_node_bucket_key_ann_test() -> {ok, [Ring0]} = file:consult("test/my_ring"), Nodes = ['dev1@127.0.0.1', 'dev2@127.0.0.1', 'dev3@127.0.0.1', 'dev4@127.0.0.1', 'dev5@127.0.0.1', 'dev6@127.0.0.1'], Ring = riak_core_ring:upgrade(Ring0), Bucket = <<"favorite">>, Key = <<"jethrotull">>, application:set_env(riak_core, default_bucket_props, [{n_val, 3}, {chash_keyfun,{riak_core_util,chash_std_keyfun}}]), riak_core_ring_manager:setup_ets(test), riak_core_ring_manager:set_ring_global(Ring), Size = riak_core_ring:num_partitions(Ring), ?assertEqual([{{34, 'dev5@127.0.0.1'}, primary}, {{35, 'dev6@127.0.0.1'}, primary}, {{36, 'dev1@127.0.0.1'}, primary}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes), Size)), ?assertEqual([{{35, 'dev6@127.0.0.1'}, primary}, {{36, 'dev1@127.0.0.1'}, primary}, {{34, 'dev2@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1']), Size)), ?assertEqual([{{36, 'dev1@127.0.0.1'}, primary}, {{34, 'dev2@127.0.0.1'}, fallback}, {{35, 'dev3@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1', 'dev6@127.0.0.1']), Size)), ?assertEqual([{{34, 'dev2@127.0.0.1'}, fallback}, {{35, 'dev3@127.0.0.1'}, fallback}, {{36, 'dev4@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1']), Size)), ?assertEqual([{{34, 'dev3@127.0.0.1'}, fallback}, {{35, 'dev4@127.0.0.1'}, fallback}, {{36, 'dev3@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1', 'dev2@127.0.0.1']), Size)), ?assertEqual([{{34, 'dev4@127.0.0.1'}, fallback}, {{35, 'dev4@127.0.0.1'}, fallback}, {{36, 'dev4@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1', 'dev2@127.0.0.1', 'dev3@127.0.0.1']), Size)), ?assertEqual([{{34, 'dev5@127.0.0.1'}, primary}, {{35, 'dev6@127.0.0.1'}, primary}, {{36, 'dev3@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev1@127.0.0.1', 'dev2@127.0.0.1']), Size)), riak_core_ring_manager:cleanup_ets(test), ok. chbin_test_() -> {timeout, 180, fun chbin_test_scenario/0}. chbin_test_scenario() -> [chbin_test_scenario(Size, NumNodes) || Size <- [32, 64, 128], NumNodes <- [1, 2, 3, 4, 5, 8, Size div 4]], ok. chbin_test_scenario(Size, NumNodes) -> RingTop = 1 bsl 160, Ring = riak_core_test_util:fake_ring(Size, NumNodes), Nodes = riak_core_ring:all_members(Ring), CHash = riak_core_ring:chash(Ring), CHBin = chashbin:create(CHash), Inc = chash:ring_increment(Size), HashKeys = [<<X:160/integer>> || X <- lists:seq(0, RingTop, Inc div 2)], Shuffled = riak_core_util:shuffle(Nodes), _ = CHBin, [begin Up = max(0, NumNodes - Down), UpNodes = lists:sublist(Shuffled, Up), ?assertEqual(get_apl(HashKey, N, Ring, UpNodes), get_apl_chbin(HashKey, N, CHBin, UpNodes)), ?assertEqual(get_primary_apl(HashKey, N, Ring, UpNodes), get_primary_apl_chbin(HashKey, N, CHBin, UpNodes)), ok end || HashKey <- HashKeys, N <- [1, 2, 3, 4], Down <- [0, 1, 2, Size div 2, Size-1, Size]], ok. -endif.
null
https://raw.githubusercontent.com/basho/riak_core/762ec81ae9af9a278e853f1feca418b9dcf748a3/src/riak_core_apl.erl
erlang
------------------------------------------------------------------- Version 2.0 (the "License"); you may not use this file a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ------------------------------------------------------------------- Get active preference list - preference list with secondary nodes substituted. ------------------------------------------------------------------- @type preflist_with_pnum_ann(). Annotated preflist where the partition value is an id/number (0 to ring_size-1) instead of a hash. @doc Return preflist of all active primary nodes (with no substituion of fallbacks). Used to simulate a preflist with N=ring_size. @doc Get the active preflist taking account of which nodes are up. @doc Get the active preflist taking account of which nodes are up for a given chash/upnodes list. @doc Get the active preflist taking account of which nodes are up for a given ring/upnodes list. @doc Get the active preflist taking account of which nodes are up for a given chash/upnodes list and annotate each node with type of primary/fallback. @doc Get the active preflist taking account of which nodes are up for a given ring/upnodes list and annotate each node with type of primary/fallback. @doc Get the active preflist for a given {bucket, key} and list of nodes and annotate each node with type of primary/fallback. @doc Get the active preflist taking account of which nodes are up for a given {bucket, key} and annotate each node with type of primary/fallback @doc Get the active preflist taking account of which nodes are up for a given chash/upnodes list and annotate each node with type of primary/fallback. @doc Same as get_apl, but returns only the primaries. @doc Same as get_apl, but returns only the primaries. @doc Same as get_apl, but returns only the primaries. will crash if all owning nodes are offline. @doc Split a preference list into up and down lists. @doc Find fallbacks for downed nodes in the preference list. @doc Find fallbacks for downed nodes in the preference list. @doc Return true if a node is up. @doc Return annotated preflist with partition ids/nums instead of hashes. With a node down Create a perfect ring - RingSize must be a multiple of nodes earlier Fallbacks should be selected by finding the next-highest partition after partitions at that point are the primary partitions. If any of the primaries are down, the next up node found by walking the preflist is used as the fallback for that partition.
riak_core : Core Active Preference Lists Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved . This file is provided to you under the Apache License , except in compliance with the License . You may obtain software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY -module(riak_core_apl). -export([active_owners/1, active_owners/2, get_apl/3, get_apl/4, get_apl_ann/2, get_apl_ann/3, get_apl_ann/4, get_apl_ann_with_pnum/1, get_primary_apl/3, get_primary_apl/4, get_primary_apl_chbin/4, first_up/2, offline_owners/1, offline_owners/2, offline_owners/3 ]). -export_type([preflist/0, preflist_ann/0, preflist_with_pnum_ann/0]). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). -endif. -type index() :: chash:index_as_int(). -type n_val() :: non_neg_integer(). -type ring() :: riak_core_ring:riak_core_ring(). -type preflist() :: [{index(), node()}]. -type preflist_ann() :: [{{index(), node()}, primary|fallback}]. -type preflist_with_pnum_ann() :: [{{riak_core_ring:partition_id(), node()}, primary|fallback}]. -type iterator() :: term(). -type chashbin() :: term(). -type docidx() :: chash:index(). -spec active_owners(atom()) -> preflist_ann(). active_owners(Service) -> {ok, Ring} = riak_core_ring_manager:get_my_ring(), active_owners(Ring, riak_core_node_watcher:nodes(Service)). -spec active_owners(ring(), [node()]) -> preflist_ann(). active_owners(Ring, UpNodes) -> UpNodes1 = UpNodes, Primaries = riak_core_ring:all_owners(Ring), {Up, _Pangs} = check_up(Primaries, UpNodes1, [], []), Up. -spec get_apl(docidx(), n_val(), atom()) -> preflist(). get_apl(DocIdx, N, Service) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), get_apl_chbin(DocIdx, N, CHBin, riak_core_node_watcher:nodes(Service)). -spec get_apl_chbin(docidx(), n_val(), chashbin:chashbin(), [node()]) -> preflist(). get_apl_chbin(DocIdx, N, CHBin, UpNodes) -> [{Partition, Node} || {{Partition, Node}, _Type} <- get_apl_ann_chbin(DocIdx, N, CHBin, UpNodes)]. -spec get_apl(docidx(), n_val(), ring(), [node()]) -> preflist(). get_apl(DocIdx, N, Ring, UpNodes) -> [{Partition, Node} || {{Partition, Node}, _Type} <- get_apl_ann(DocIdx, N, Ring, UpNodes)]. get_apl_ann(DocIdx, N, UpNodes) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), get_apl_ann_chbin(DocIdx, N, CHBin, UpNodes). -spec get_apl_ann(binary(), n_val(), ring(), [node()]) -> preflist_ann(). get_apl_ann(DocIdx, N, Ring, UpNodes) -> UpNodes1 = UpNodes, Preflist = riak_core_ring:preflist(DocIdx, Ring), {Primaries, Fallbacks} = lists:split(N, Preflist), {Up, Pangs} = check_up(Primaries, UpNodes1, [], []), Up ++ find_fallbacks(Pangs, Fallbacks, UpNodes1, []). -spec get_apl_ann(riak_core_bucket:bucket(), [node()]) -> preflist_ann(). get_apl_ann({Bucket, Key}, UpNodes) -> BucketProps = riak_core_bucket:get_bucket(Bucket), NVal = proplists:get_value(n_val, BucketProps), DocIdx = riak_core_util:chash_key({Bucket, Key}), get_apl_ann(DocIdx, NVal, UpNodes). -spec get_apl_ann_with_pnum(riak_core_bucket:bucket()) -> preflist_with_pnum_ann(). get_apl_ann_with_pnum(BKey) -> {ok, Ring} = riak_core_ring_manager:get_my_ring(), UpNodes = riak_core_ring:all_members(Ring), Apl = get_apl_ann(BKey, UpNodes), Size = riak_core_ring:num_partitions(Ring), apl_with_partition_nums(Apl, Size). -spec get_apl_ann_chbin(binary(), n_val(), chashbin(), [node()]) -> preflist_ann(). get_apl_ann_chbin(DocIdx, N, CHBin, UpNodes) -> UpNodes1 = UpNodes, Itr = chashbin:iterator(DocIdx, CHBin), {Primaries, Itr2} = chashbin:itr_pop(N, Itr), {Up, Pangs} = check_up(Primaries, UpNodes1, [], []), Up ++ find_fallbacks_chbin(Pangs, Itr2, UpNodes1, []). -spec get_primary_apl(binary(), n_val(), atom()) -> preflist_ann(). get_primary_apl(DocIdx, N, Service) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), get_primary_apl_chbin(DocIdx, N, CHBin, riak_core_node_watcher:nodes(Service)). -spec get_primary_apl_chbin(binary(), n_val(), chashbin(), [node()]) -> preflist_ann(). get_primary_apl_chbin(DocIdx, N, CHBin, UpNodes) -> UpNodes1 = UpNodes, Itr = chashbin:iterator(DocIdx, CHBin), {Primaries, _} = chashbin:itr_pop(N, Itr), {Up, _} = check_up(Primaries, UpNodes1, [], []), Up. -spec get_primary_apl(binary(), n_val(), ring(), [node()]) -> preflist_ann(). get_primary_apl(DocIdx, N, Ring, UpNodes) -> UpNodes1 = UpNodes, Preflist = riak_core_ring:preflist(DocIdx, Ring), {Primaries, _} = lists:split(N, Preflist), {Up, _} = check_up(Primaries, UpNodes1, [], []), Up. @doc Return the first entry that is up in the preflist for ` DocIdx ' . This first_up(DocIdx, Service) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), Itr = chashbin:iterator(DocIdx, CHBin), UpSet = ordsets:from_list(riak_core_node_watcher:nodes(Service)), Itr2 = chashbin:itr_next_while(fun({_P, Node}) -> not ordsets:is_element(Node, UpSet) end, Itr), chashbin:itr_value(Itr2). offline_owners(Service) -> {ok, CHBin} = riak_core_ring_manager:get_chash_bin(), offline_owners(Service, CHBin). offline_owners(Service, CHBin) -> offline_owners(Service, CHBin, []). offline_owners(Service, CHBin, OtherDownNodes) -> UpSet = ordsets:from_list(riak_core_node_watcher:nodes(Service)), DownVNodes = chashbin:to_list_filter(fun({_Index, Node}) -> (not is_up(Node, UpSet) or lists:member(Node,OtherDownNodes)) end, CHBin), DownVNodes. -spec check_up(preflist(), [node()], preflist_ann(), preflist()) -> {preflist_ann(), preflist()}. check_up([], _UpNodes, Up, Pangs) -> {lists:reverse(Up), lists:reverse(Pangs)}; check_up([{Partition,Node}|Rest], UpNodes, Up, Pangs) -> case is_up(Node, UpNodes) of true -> check_up(Rest, UpNodes, [{{Partition, Node}, primary} | Up], Pangs); false -> check_up(Rest, UpNodes, Up, [{Partition, Node} | Pangs]) end. -spec find_fallbacks(preflist(), preflist(), [node()], preflist_ann()) -> preflist_ann(). find_fallbacks(_Pangs, [], _UpNodes, Secondaries) -> lists:reverse(Secondaries); find_fallbacks([], _Fallbacks, _UpNodes, Secondaries) -> lists:reverse(Secondaries); find_fallbacks([{Partition, _Node}|Rest]=Pangs, [{_,FN}|Fallbacks], UpNodes, Secondaries) -> case is_up(FN, UpNodes) of true -> find_fallbacks(Rest, Fallbacks, UpNodes, [{{Partition, FN}, fallback} | Secondaries]); false -> find_fallbacks(Pangs, Fallbacks, UpNodes, Secondaries) end. -spec find_fallbacks_chbin(preflist(), iterator(),[node()], preflist_ann()) -> preflist_ann(). find_fallbacks_chbin([], _Fallbacks, _UpNodes, Secondaries) -> lists:reverse(Secondaries); find_fallbacks_chbin(_, done, _UpNodes, Secondaries) -> lists:reverse(Secondaries); find_fallbacks_chbin([{Partition, _Node}|Rest]=Pangs, Itr, UpNodes, Secondaries) -> {_, FN} = chashbin:itr_value(Itr), Itr2 = chashbin:itr_next(Itr), case is_up(FN, UpNodes) of true -> find_fallbacks_chbin(Rest, Itr2, UpNodes, [{{Partition, FN}, fallback} | Secondaries]); false -> find_fallbacks_chbin(Pangs, Itr2, UpNodes, Secondaries) end. is_up(Node, UpNodes) -> lists:member(Node, UpNodes). -spec apl_with_partition_nums(preflist_ann(), riak_core_ring:ring_size()) -> preflist_with_pnum_ann(). apl_with_partition_nums(Apl, Size) -> [{{riak_core_ring_util:hash_to_partition_id(Hash, Size), Node}, Ann} || {{Hash, Node}, Ann} <- Apl]. -ifdef(TEST). smallest_test() -> Ring = riak_core_ring:fresh(1,node()), ?assertEqual([{0,node()}], get_apl(last_in_ring(), 1, Ring, [node()])). four_node_test() -> Nodes = [nodea, nodeb, nodec, noded], Ring = perfect_ring(8, Nodes), ?assertEqual([{0,nodea}, {182687704666362864775460604089535377456991567872,nodeb}, {365375409332725729550921208179070754913983135744,nodec}], get_apl(last_in_ring(), 3, Ring, Nodes)), ?assertEqual([{182687704666362864775460604089535377456991567872,nodeb}, {365375409332725729550921208179070754913983135744,nodec}, {0,noded}], get_apl(last_in_ring(), 3, Ring, [nodeb, nodec, noded])), With two nodes down ?assertEqual([{365375409332725729550921208179070754913983135744,nodec}, {0,noded}, {182687704666362864775460604089535377456991567872,nodec}], get_apl(last_in_ring(), 3, Ring, [nodec, noded])), With the other two nodes down ?assertEqual([{0,nodea}, {182687704666362864775460604089535377456991567872,nodeb}, {365375409332725729550921208179070754913983135744,nodea}], get_apl(last_in_ring(), 3, Ring, [nodea, nodeb])). perfect_ring(RingSize, Nodes) when RingSize rem length(Nodes) =:= 0 -> Ring = riak_core_ring:fresh(RingSize,node()), Owners = riak_core_ring:all_owners(Ring), TransferNode = fun({Idx,_CurOwner}, {Ring0, [NewOwner|Rest]}) -> {riak_core_ring:transfer_node(Idx, NewOwner, Ring0), Rest ++ [NewOwner]} end, {PerfectRing, _} = lists:foldl(TransferNode, {Ring, Nodes}, Owners), PerfectRing. last_in_ring() -> <<1461501637330902918203684832716283019655932542975:160/unsigned>>. six_node_test() -> its non - trivial to create a real 6 node ring , so here 's one we made {ok, [Ring0]} = file:consult("test/my_ring"), Ring = riak_core_ring:upgrade(Ring0), riak_core_util : chash_key({<<"foo " > > , < < " bar " > > } ) , DocIdx = <<73,212,27,234,104,13,150,207,0,82,86,183,125,225,172, 154,135,46,6,112>>, Nodes = ['dev1@127.0.0.1', 'dev2@127.0.0.1', 'dev3@127.0.0.1', 'dev4@127.0.0.1', 'dev5@127.0.0.1', 'dev6@127.0.0.1'], the DocIdx of the key , in this case the 433883 partition . The N ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev3@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev4@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes)), ?assertEqual([{456719261665907161938651510223838443642478919680, 'dev3@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev4@127.0.0.1'}, {433883298582611803841718934712646521460354973696, 'dev5@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev2@127.0.0.1'])), ?assertEqual([{479555224749202520035584085735030365824602865664, 'dev4@127.0.0.1'}, {433883298582611803841718934712646521460354973696, 'dev5@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev6@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev2@127.0.0.1', 'dev3@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev5@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev6@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev1@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev2@127.0.0.1', 'dev3@127.0.0.1', 'dev4@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev5@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev6@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev5@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev2@127.0.0.1', 'dev3@127.0.0.1', 'dev4@127.0.0.1', 'dev1@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev3@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev5@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev5@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev6@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1', 'dev3@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev5@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev1@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1', 'dev3@127.0.0.1', 'dev6@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev5@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev2@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1', 'dev3@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev2@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev2@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev4@127.0.0.1', 'dev3@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1', 'dev5@127.0.0.1'])), ?assertEqual([{433883298582611803841718934712646521460354973696, 'dev2@127.0.0.1'}, {479555224749202520035584085735030365824602865664, 'dev4@127.0.0.1'}, {456719261665907161938651510223838443642478919680, 'dev5@127.0.0.1'}], get_apl(DocIdx, 3, Ring, Nodes -- ['dev3@127.0.0.1'])), ok. six_node_bucket_key_ann_test() -> {ok, [Ring0]} = file:consult("test/my_ring"), Nodes = ['dev1@127.0.0.1', 'dev2@127.0.0.1', 'dev3@127.0.0.1', 'dev4@127.0.0.1', 'dev5@127.0.0.1', 'dev6@127.0.0.1'], Ring = riak_core_ring:upgrade(Ring0), Bucket = <<"favorite">>, Key = <<"jethrotull">>, application:set_env(riak_core, default_bucket_props, [{n_val, 3}, {chash_keyfun,{riak_core_util,chash_std_keyfun}}]), riak_core_ring_manager:setup_ets(test), riak_core_ring_manager:set_ring_global(Ring), Size = riak_core_ring:num_partitions(Ring), ?assertEqual([{{34, 'dev5@127.0.0.1'}, primary}, {{35, 'dev6@127.0.0.1'}, primary}, {{36, 'dev1@127.0.0.1'}, primary}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes), Size)), ?assertEqual([{{35, 'dev6@127.0.0.1'}, primary}, {{36, 'dev1@127.0.0.1'}, primary}, {{34, 'dev2@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1']), Size)), ?assertEqual([{{36, 'dev1@127.0.0.1'}, primary}, {{34, 'dev2@127.0.0.1'}, fallback}, {{35, 'dev3@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1', 'dev6@127.0.0.1']), Size)), ?assertEqual([{{34, 'dev2@127.0.0.1'}, fallback}, {{35, 'dev3@127.0.0.1'}, fallback}, {{36, 'dev4@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1']), Size)), ?assertEqual([{{34, 'dev3@127.0.0.1'}, fallback}, {{35, 'dev4@127.0.0.1'}, fallback}, {{36, 'dev3@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1', 'dev2@127.0.0.1']), Size)), ?assertEqual([{{34, 'dev4@127.0.0.1'}, fallback}, {{35, 'dev4@127.0.0.1'}, fallback}, {{36, 'dev4@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev5@127.0.0.1', 'dev6@127.0.0.1', 'dev1@127.0.0.1', 'dev2@127.0.0.1', 'dev3@127.0.0.1']), Size)), ?assertEqual([{{34, 'dev5@127.0.0.1'}, primary}, {{35, 'dev6@127.0.0.1'}, primary}, {{36, 'dev3@127.0.0.1'}, fallback}], apl_with_partition_nums( get_apl_ann({Bucket, Key}, Nodes -- ['dev1@127.0.0.1', 'dev2@127.0.0.1']), Size)), riak_core_ring_manager:cleanup_ets(test), ok. chbin_test_() -> {timeout, 180, fun chbin_test_scenario/0}. chbin_test_scenario() -> [chbin_test_scenario(Size, NumNodes) || Size <- [32, 64, 128], NumNodes <- [1, 2, 3, 4, 5, 8, Size div 4]], ok. chbin_test_scenario(Size, NumNodes) -> RingTop = 1 bsl 160, Ring = riak_core_test_util:fake_ring(Size, NumNodes), Nodes = riak_core_ring:all_members(Ring), CHash = riak_core_ring:chash(Ring), CHBin = chashbin:create(CHash), Inc = chash:ring_increment(Size), HashKeys = [<<X:160/integer>> || X <- lists:seq(0, RingTop, Inc div 2)], Shuffled = riak_core_util:shuffle(Nodes), _ = CHBin, [begin Up = max(0, NumNodes - Down), UpNodes = lists:sublist(Shuffled, Up), ?assertEqual(get_apl(HashKey, N, Ring, UpNodes), get_apl_chbin(HashKey, N, CHBin, UpNodes)), ?assertEqual(get_primary_apl(HashKey, N, Ring, UpNodes), get_primary_apl_chbin(HashKey, N, CHBin, UpNodes)), ok end || HashKey <- HashKeys, N <- [1, 2, 3, 4], Down <- [0, 1, 2, Size div 2, Size-1, Size]], ok. -endif.
b021e0fb9ca57eb43a76c1c729f77a8bcf267a7067b038f74816d95aa89af3f3
puffnfresh/sonic2
Offsets.hs
module Game.Sega.Sonic.Offsets where import qualified Data.ByteString as BS import Data.Word (Word32) data Offset = Offset Word32 Word32 deriving (Eq, Ord, Show) sineData :: Offset sineData = Offset 0x33CE 0x364E angleData :: Offset angleData = Offset 0x36B4 0X37B6 paletteSonic :: Offset paletteSonic = Offset 0x29E2 0x2A02 paletteEhz :: Offset paletteEhz = Offset 0x2A22 0x2A82 curveAndResistanceMapping :: Offset curveAndResistanceMapping = Offset 0x42D50 0x42E50 collisionArray1 :: Offset collisionArray1 = Offset 0x42E50 0x43E50 collisionEhzHtzPrimary :: Offset collisionEhzHtzPrimary = Offset 0x44E50 0x44F40 layoutEhz1 :: Offset layoutEhz1 = Offset 0x45AC4 0x45C84 layoutEhz2 :: Offset layoutEhz2 = Offset 0x45C84 0x45E74 artSonic :: Offset artSonic = Offset 0x50000 0x64320 artTails :: Offset artTails = Offset 0x64320 0x6FBE0 mappingSonic :: Offset mappingSonic = Offset 0x6FBE0 0x714E0 mappingTails :: Offset mappingTails = Offset 0x739E2 0x7446C animationSonicWalk :: Offset animationSonicWalk = Offset 0x1B668 0x1B672 animationSonicRun :: Offset animationSonicRun = Offset 0x1B666 0x1B670 animationSonicRoll :: Offset animationSonicRoll = Offset 0x1B670 0x1B67A animationSonicRoll2 :: Offset animationSonicRoll2 = Offset 0x1B67A 0x1B684 animationSonicPush :: Offset animationSonicPush = Offset 0x1B684 0x1B68E animationSonicWait :: Offset animationSonicWait = Offset 0x1B68E 0x1B744 animationTailsWait :: Offset animationTailsWait = Offset 0x1D0A2 0x1D0E0 dplcSonic :: Offset dplcSonic = Offset 0x714E0 0x71D8E dplcTails :: Offset dplcTails = Offset 0x7446C 0x74876 blockEhz :: Offset blockEhz = Offset 0x94E74 0x95C24 artEhzHtz :: Offset artEhzHtz = Offset 0x95C24 0x985A4 chunkEhzHtz :: Offset chunkEhzHtz = Offset 0x99D34 0x9CFD4 startPosEhz1 :: Offset startPosEhz1 = Offset 0xC1D0 0xC1D4 data LevelOffsets = LevelOffsets { levelLayoutOffset :: Offset , levelChunksOffset :: Offset , levelBlocksOffset :: Offset , levelCollisionOffset :: Offset , levelPaletteOffset :: Offset , levelArtOffset :: Offset , levelStartPos :: Offset } deriving (Eq, Ord, Show) ehz1 :: LevelOffsets ehz1 = LevelOffsets layoutEhz1 chunkEhzHtz blockEhz collisionEhzHtzPrimary paletteEhz artEhzHtz startPosEhz1 data SpriteOffsets = SpriteOffsets { spriteArt :: Offset , spriteMapping :: Offset , spritePalette :: Offset , spriteDPLC :: Offset } deriving (Eq, Ord, Show) sonicOffsets :: SpriteOffsets sonicOffsets = SpriteOffsets artSonic mappingSonic paletteSonic dplcSonic tailsOffsets :: SpriteOffsets tailsOffsets = SpriteOffsets artTails mappingTails paletteSonic dplcTails sliceOffset :: Offset -> BS.ByteString -> BS.ByteString sliceOffset (Offset start end) = BS.take (fromIntegral count) . BS.drop (fromIntegral start) where count = end - start
null
https://raw.githubusercontent.com/puffnfresh/sonic2/0abc3e109a847582c2e16edb13e83e611419fc8a/src/Game/Sega/Sonic/Offsets.hs
haskell
module Game.Sega.Sonic.Offsets where import qualified Data.ByteString as BS import Data.Word (Word32) data Offset = Offset Word32 Word32 deriving (Eq, Ord, Show) sineData :: Offset sineData = Offset 0x33CE 0x364E angleData :: Offset angleData = Offset 0x36B4 0X37B6 paletteSonic :: Offset paletteSonic = Offset 0x29E2 0x2A02 paletteEhz :: Offset paletteEhz = Offset 0x2A22 0x2A82 curveAndResistanceMapping :: Offset curveAndResistanceMapping = Offset 0x42D50 0x42E50 collisionArray1 :: Offset collisionArray1 = Offset 0x42E50 0x43E50 collisionEhzHtzPrimary :: Offset collisionEhzHtzPrimary = Offset 0x44E50 0x44F40 layoutEhz1 :: Offset layoutEhz1 = Offset 0x45AC4 0x45C84 layoutEhz2 :: Offset layoutEhz2 = Offset 0x45C84 0x45E74 artSonic :: Offset artSonic = Offset 0x50000 0x64320 artTails :: Offset artTails = Offset 0x64320 0x6FBE0 mappingSonic :: Offset mappingSonic = Offset 0x6FBE0 0x714E0 mappingTails :: Offset mappingTails = Offset 0x739E2 0x7446C animationSonicWalk :: Offset animationSonicWalk = Offset 0x1B668 0x1B672 animationSonicRun :: Offset animationSonicRun = Offset 0x1B666 0x1B670 animationSonicRoll :: Offset animationSonicRoll = Offset 0x1B670 0x1B67A animationSonicRoll2 :: Offset animationSonicRoll2 = Offset 0x1B67A 0x1B684 animationSonicPush :: Offset animationSonicPush = Offset 0x1B684 0x1B68E animationSonicWait :: Offset animationSonicWait = Offset 0x1B68E 0x1B744 animationTailsWait :: Offset animationTailsWait = Offset 0x1D0A2 0x1D0E0 dplcSonic :: Offset dplcSonic = Offset 0x714E0 0x71D8E dplcTails :: Offset dplcTails = Offset 0x7446C 0x74876 blockEhz :: Offset blockEhz = Offset 0x94E74 0x95C24 artEhzHtz :: Offset artEhzHtz = Offset 0x95C24 0x985A4 chunkEhzHtz :: Offset chunkEhzHtz = Offset 0x99D34 0x9CFD4 startPosEhz1 :: Offset startPosEhz1 = Offset 0xC1D0 0xC1D4 data LevelOffsets = LevelOffsets { levelLayoutOffset :: Offset , levelChunksOffset :: Offset , levelBlocksOffset :: Offset , levelCollisionOffset :: Offset , levelPaletteOffset :: Offset , levelArtOffset :: Offset , levelStartPos :: Offset } deriving (Eq, Ord, Show) ehz1 :: LevelOffsets ehz1 = LevelOffsets layoutEhz1 chunkEhzHtz blockEhz collisionEhzHtzPrimary paletteEhz artEhzHtz startPosEhz1 data SpriteOffsets = SpriteOffsets { spriteArt :: Offset , spriteMapping :: Offset , spritePalette :: Offset , spriteDPLC :: Offset } deriving (Eq, Ord, Show) sonicOffsets :: SpriteOffsets sonicOffsets = SpriteOffsets artSonic mappingSonic paletteSonic dplcSonic tailsOffsets :: SpriteOffsets tailsOffsets = SpriteOffsets artTails mappingTails paletteSonic dplcTails sliceOffset :: Offset -> BS.ByteString -> BS.ByteString sliceOffset (Offset start end) = BS.take (fromIntegral count) . BS.drop (fromIntegral start) where count = end - start
cbdfaffd9c805a9d08cce54c0792b2c53b2aa76991787f545dcba5981523911a
jeromesimeon/Galax
physical_load.mli
(***********************************************************************) (* *) (* GALAX *) (* XQuery Engine *) (* *) Copyright 2001 - 2007 . (* Distributed only by permission. *) (* *) (***********************************************************************) $ I d : physical_load.mli , v 1.5 2007/02/01 22:08:51 simeon Exp $ (* Module: Physical_load Description: This contains operations for loading streams as in-memory representation. *) open Streaming_types open Nodeid open Nodeid_context open Physical_value type document_id_generator = unit -> docid type load_xml_value_function = ordered_xml_stream -> item list type load_node_sequence_function = ordered_xml_stream -> Dm.node list type load_xml_document_function = ordered_xml_stream -> item list val register_load_functions : document_id_generator -> load_xml_value_function -> load_node_sequence_function -> load_xml_document_function -> unit (* Load a data model instance from an XML stream *) val load_xml_value_from_typed_stream : nodeid_context -> typed_xml_stream -> item list val load_xml_node_sequence_from_typed_stream : nodeid_context -> typed_xml_stream -> Dm.node list val load_xml_document_from_typed_stream : nodeid_context -> typed_xml_stream -> item list Note : The following function can be used to apply loading inside an existing document . This is use to support updates , notably . - The following function can be used to apply loading inside an existing document. This is use to support updates, notably. - Jerome *) val load_xml_node_sequence_from_typed_stream_for_docid : nodeid_context -> docid -> typed_xml_stream -> Dm.node list
null
https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/physicaldm/physical_load.mli
ocaml
********************************************************************* GALAX XQuery Engine Distributed only by permission. ********************************************************************* Module: Physical_load Description: This contains operations for loading streams as in-memory representation. Load a data model instance from an XML stream
Copyright 2001 - 2007 . $ I d : physical_load.mli , v 1.5 2007/02/01 22:08:51 simeon Exp $ open Streaming_types open Nodeid open Nodeid_context open Physical_value type document_id_generator = unit -> docid type load_xml_value_function = ordered_xml_stream -> item list type load_node_sequence_function = ordered_xml_stream -> Dm.node list type load_xml_document_function = ordered_xml_stream -> item list val register_load_functions : document_id_generator -> load_xml_value_function -> load_node_sequence_function -> load_xml_document_function -> unit val load_xml_value_from_typed_stream : nodeid_context -> typed_xml_stream -> item list val load_xml_node_sequence_from_typed_stream : nodeid_context -> typed_xml_stream -> Dm.node list val load_xml_document_from_typed_stream : nodeid_context -> typed_xml_stream -> item list Note : The following function can be used to apply loading inside an existing document . This is use to support updates , notably . - The following function can be used to apply loading inside an existing document. This is use to support updates, notably. - Jerome *) val load_xml_node_sequence_from_typed_stream_for_docid : nodeid_context -> docid -> typed_xml_stream -> Dm.node list
be5921400af1e7b91b1b748126886d8bba64d5978b221ab3502200bae418abee
rudymatela/leancheck
LeanCheck.hs
-- | Module : Test . Copyright : ( c ) 2015 - 2020 License : 3 - Clause BSD ( see the file LICENSE ) Maintainer : < > -- LeanCheck is a simple enumerative property - based testing library . -- -- A __property__ is a function returning a 'Bool' that should be 'True' for -- all possible choices of arguments. Properties can be viewed as a -- parameterized unit tests. -- -- To check if a property ' holds ' by testing up to a thousand values , -- we evaluate: -- > holds 1000 property -- -- 'True' indicates success. 'False' indicates a bug. -- -- For example: -- -- > > import Data.List (sort) > > holds 1000 $ \xs - > length ( sort xs ) = = length ( xs::[Int ] ) -- > True -- To get the smallest ' counterExample ' by testing up to a thousand values , -- we evaluate: -- -- > counterExample 1000 property -- -- 'Nothing' indicates no counterexample was found, -- a 'Just' value indicates a counterexample. -- -- For instance: -- -- > > import Data.List (union) > > counterExample 1000 $ \xs ys - > union xs ys = = union ys ( xs : : [ Int ] ) > Just [ " [ ] " , " [ 0,0 ] " ] -- The suggested values for the number of tests to use with LeanCheck are 500 , 1 000 or 10 000 . is memory intensive and you should take -- care if you go beyond that. -- -- The function 'check' can also be used to test and report counterexamples. -- > > check $ \xs ys - > union xs ys = = union ys ( xs : : [ Int ] ) > * * * Failed ! ( after 4 tests ): > [ ] [ 0,0 ] -- -- Arguments of properties should be instances of the ' ' typeclass . ' Listable ' instances are provided for the most common types . New instances are easily defined ( see ' ' for more info ) . module Test.LeanCheck ( -- * Checking and testing holds , fails , exists -- ** Boolean (property) operators , (==>) -- ** Counterexamples and witnesses , counterExample , counterExamples , witness , witnesses -- ** Reporting , check , checkFor , checkResult , checkResultFor -- * Listing test values , Listable(..) -- ** Listing constructors , cons0 , cons1 , cons2 , cons3 , cons4 , cons5 , cons6 , cons7 , cons8 , cons9 , cons10 , cons11 , cons12 , delay , reset , ofWeight , addWeight , suchThat -- ** Combining tiers , (\/) , (\\//) , (><) , productWith -- ** Manipulating tiers , mapT , filterT , concatT , concatMapT , deleteT , normalizeT , toTiers * * Automatically deriving Listable instances , deriveListable , deriveListableCascading -- ** Specialized constructors of tiers , setCons , bagCons , noDupListCons , mapCons -- ** Products of tiers , product3With , productMaybeWith -- * Listing lists , listsOf , setsOf , bagsOf , noDupListsOf , products , listsOfLength -- ** Listing values , tiersFloating , tiersFractional , listFloating , listFractional , listIntegral , (+|) -- * Test results , Testable , results ) where import Test.LeanCheck.Basic import Test.LeanCheck.Tiers import Test.LeanCheck.Derive import Test.LeanCheck.IO -- | Tiers of 'Fractional' values. -- This can be used as the implementation of 'tiers' for 'Fractional' types. -- -- This function is deprecated. Please consider using 'listFractional' instead -- or use 'toTiers' 'listFractional'. tiersFractional :: (Ord a, Fractional a) => [[a]] tiersFractional = toTiers listFractional -- | Tiers of 'Floating' values. -- This can be used as the implementation of 'tiers' for 'Floating' types. -- -- This function is equivalent to 'tiersFractional' with positive and negative infinities included : 1/0 and -1/0 . -- @NaN@ and @-0@ are excluded from this enumeration . -- -- This function is deprecated. Please consider using 'listFloating' instead -- or use 'toTiers' 'listFloating'. tiersFloating :: (Ord a, Fractional a) => [[a]] tiersFloating = toTiers listFloating
null
https://raw.githubusercontent.com/rudymatela/leancheck/af299d3616a362816c21931ada6fa49a2af7f911/src/Test/LeanCheck.hs
haskell
| A __property__ is a function returning a 'Bool' that should be 'True' for all possible choices of arguments. Properties can be viewed as a parameterized unit tests. we evaluate: 'True' indicates success. 'False' indicates a bug. For example: > > import Data.List (sort) > True we evaluate: > counterExample 1000 property 'Nothing' indicates no counterexample was found, a 'Just' value indicates a counterexample. For instance: > > import Data.List (union) care if you go beyond that. The function 'check' can also be used to test and report counterexamples. * Checking and testing ** Boolean (property) operators ** Counterexamples and witnesses ** Reporting * Listing test values ** Listing constructors ** Combining tiers ** Manipulating tiers ** Specialized constructors of tiers ** Products of tiers * Listing lists ** Listing values * Test results | Tiers of 'Fractional' values. This can be used as the implementation of 'tiers' for 'Fractional' types. This function is deprecated. Please consider using 'listFractional' instead or use 'toTiers' 'listFractional'. | Tiers of 'Floating' values. This can be used as the implementation of 'tiers' for 'Floating' types. This function is equivalent to 'tiersFractional' This function is deprecated. Please consider using 'listFloating' instead or use 'toTiers' 'listFloating'.
Module : Test . Copyright : ( c ) 2015 - 2020 License : 3 - Clause BSD ( see the file LICENSE ) Maintainer : < > LeanCheck is a simple enumerative property - based testing library . To check if a property ' holds ' by testing up to a thousand values , > holds 1000 property > > holds 1000 $ \xs - > length ( sort xs ) = = length ( xs::[Int ] ) To get the smallest ' counterExample ' by testing up to a thousand values , > > counterExample 1000 $ \xs ys - > union xs ys = = union ys ( xs : : [ Int ] ) > Just [ " [ ] " , " [ 0,0 ] " ] The suggested values for the number of tests to use with LeanCheck are 500 , 1 000 or 10 000 . is memory intensive and you should take > > check $ \xs ys - > union xs ys = = union ys ( xs : : [ Int ] ) > * * * Failed ! ( after 4 tests ): > [ ] [ 0,0 ] Arguments of properties should be instances of the ' ' typeclass . ' Listable ' instances are provided for the most common types . New instances are easily defined ( see ' ' for more info ) . module Test.LeanCheck ( holds , fails , exists , (==>) , counterExample , counterExamples , witness , witnesses , check , checkFor , checkResult , checkResultFor , Listable(..) , cons0 , cons1 , cons2 , cons3 , cons4 , cons5 , cons6 , cons7 , cons8 , cons9 , cons10 , cons11 , cons12 , delay , reset , ofWeight , addWeight , suchThat , (\/) , (\\//) , (><) , productWith , mapT , filterT , concatT , concatMapT , deleteT , normalizeT , toTiers * * Automatically deriving Listable instances , deriveListable , deriveListableCascading , setCons , bagCons , noDupListCons , mapCons , product3With , productMaybeWith , listsOf , setsOf , bagsOf , noDupListsOf , products , listsOfLength , tiersFloating , tiersFractional , listFloating , listFractional , listIntegral , (+|) , Testable , results ) where import Test.LeanCheck.Basic import Test.LeanCheck.Tiers import Test.LeanCheck.Derive import Test.LeanCheck.IO tiersFractional :: (Ord a, Fractional a) => [[a]] tiersFractional = toTiers listFractional with positive and negative infinities included : 1/0 and -1/0 . @NaN@ and @-0@ are excluded from this enumeration . tiersFloating :: (Ord a, Fractional a) => [[a]] tiersFloating = toTiers listFloating
42cb920730700cfcc71dcaa10ff3eadd7a1356dd89f0f81089c1acdbb2ce6143
sanel/hubris
pwd.clj
;; example of custom command (defcommand pwd "Print current directory." [] (.getCanonicalPath (java.io.File. ".")))
null
https://raw.githubusercontent.com/sanel/hubris/e119f13a855b4ec86db8d8deb696f8dd59ceea17/commands/pwd.clj
clojure
example of custom command
(defcommand pwd "Print current directory." [] (.getCanonicalPath (java.io.File. ".")))
8d0d52fbdcc4e7654bebc55278d073ceb64d78ceb2af1be6d536df7723e278c9
haskell/cabal
setup.test.hs
import Test.Cabal.Prelude -- Test building a dynamic library/executable which uses Template -- Haskell main = setupAndCabalTest $ do skipUnless "no shared libs" =<< hasSharedLibraries setup_build ["--enable-shared", "--enable-executable-dynamic"]
null
https://raw.githubusercontent.com/haskell/cabal/00a2351789a460700a2567eb5ecc42cca0af913f/cabal-testsuite/PackageTests/TemplateHaskell/dynamic/setup.test.hs
haskell
Test building a dynamic library/executable which uses Template Haskell
import Test.Cabal.Prelude main = setupAndCabalTest $ do skipUnless "no shared libs" =<< hasSharedLibraries setup_build ["--enable-shared", "--enable-executable-dynamic"]
9ffd0dc07f5df56cba961927142a932a6c657f8b3c9e44beed0c090d84733075
eryx67/vk-api-example
Utils.hs
# LANGUAGE FlexibleContexts # {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE TypeFamilies #-} -- | module VK.App.Internal.Utils where import Control.Concurrent (forkIO) import Control.Monad (forM_, void) import qualified Data.Text as T import Data.Text.Format import qualified Data.Text.Lazy as LT import Data.Typeable (Typeable) import React.Flux import React.Flux.Lifecycle formatDuration :: Int -> T.Text formatDuration v = let ss = v `rem` 60 ms = v `div` 60 `rem` 60 hs = v `div` 3600 in LT.toStrict $ format "{}:{}:{}" (hs, left 2 '0' ms, left 2 '0' ss) execAction :: (a -> [SomeStoreAction]) -> a -> IO () execAction dispatcher a = void . forkIO $ forM_ (dispatcher a) executeAction txt_ :: T.Text -> ReactElementM eventHandler () txt_ = elemText . T.unpack willUnmountView :: Typeable props => String -> IO () -> ReactView props willUnmountView name action = defineLifecycleView name () lifecycleConfig { lComponentWillUnmount = Just (\_ _ -> action) } willUnmountView_ :: String -> IO () -> ReactElementM eventHandler () willUnmountView_ name action = view (willUnmountView name action) () mempty
null
https://raw.githubusercontent.com/eryx67/vk-api-example/4ce634e2f72cf0ab6ef3b80387ad489de9d8c0ee/src/VK/App/Internal/Utils.hs
haskell
# LANGUAGE OverloadedStrings # # LANGUAGE RankNTypes # # LANGUAGE TypeFamilies # |
# LANGUAGE FlexibleContexts # module VK.App.Internal.Utils where import Control.Concurrent (forkIO) import Control.Monad (forM_, void) import qualified Data.Text as T import Data.Text.Format import qualified Data.Text.Lazy as LT import Data.Typeable (Typeable) import React.Flux import React.Flux.Lifecycle formatDuration :: Int -> T.Text formatDuration v = let ss = v `rem` 60 ms = v `div` 60 `rem` 60 hs = v `div` 3600 in LT.toStrict $ format "{}:{}:{}" (hs, left 2 '0' ms, left 2 '0' ss) execAction :: (a -> [SomeStoreAction]) -> a -> IO () execAction dispatcher a = void . forkIO $ forM_ (dispatcher a) executeAction txt_ :: T.Text -> ReactElementM eventHandler () txt_ = elemText . T.unpack willUnmountView :: Typeable props => String -> IO () -> ReactView props willUnmountView name action = defineLifecycleView name () lifecycleConfig { lComponentWillUnmount = Just (\_ _ -> action) } willUnmountView_ :: String -> IO () -> ReactElementM eventHandler () willUnmountView_ name action = view (willUnmountView name action) () mempty
0f6e05f17c96e6c7b516ab924a2e1bc9bfe6470d24a069e207e5b4e396cf73ba
antono/guix-debian
file-systems.scm
;;; GNU Guix --- Functional package management for GNU Copyright © 2013 , 2014 < > ;;; ;;; This file is part of GNU Guix. ;;; GNU is free software ; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; either version 3 of the License , or ( at ;;; your option) any later version. ;;; ;;; GNU Guix is distributed in the hope that it will be useful, but ;;; WITHOUT ANY WARRANTY; without even the implied warranty of ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;;; GNU General Public License for more details. ;;; You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (gnu system file-systems) #:use-module (guix records) #:export (<file-system> file-system file-system? file-system-device file-system-title file-system-mount-point file-system-type file-system-needed-for-boot? file-system-flags file-system-options file-system-check? file-system-create-mount-point? %fuse-control-file-system %binary-format-file-system %shared-memory-file-system %pseudo-terminal-file-system %devtmpfs-file-system %base-file-systems)) ;;; Commentary: ;;; ;;; Declaring file systems to be mounted. ;;; ;;; Code: ;; File system declaration. (define-record-type* <file-system> file-system make-file-system file-system? (device file-system-device) ; string (title file-system-title ; 'device | 'label | 'uuid (default 'device)) (mount-point file-system-mount-point) ; string (type file-system-type) ; string (flags file-system-flags ; list of symbols (default '())) (options file-system-options ; string or #f (default #f)) Boolean (default #f)) Boolean (default #t)) Boolean (default #f))) (define %fuse-control-file-system ;; Control file system for Linux' file systems in user-space (FUSE). (file-system (device "fusectl") (mount-point "/sys/fs/fuse/connections") (type "fusectl") (check? #f))) (define %binary-format-file-system ;; Support for arbitrary executable binary format. (file-system (device "binfmt_misc") (mount-point "/proc/sys/fs/binfmt_misc") (type "binfmt_misc") (check? #f))) (define %devtmpfs-file-system ;; /dev as a 'devtmpfs' file system, needed for udev. (file-system (device "none") (mount-point "/dev") (type "devtmpfs") (check? #f) ;; Mount it from the initrd so /dev/pts & co. can then be mounted over it. (needed-for-boot? #t))) (define %tty-gid ;; ID of the 'tty' group. Allocate it statically to make it easy to refer ;; to it from here and from the 'tty' group definitions. 996) (define %pseudo-terminal-file-system ;; The pseudo-terminal file system. It needs to be mounted so that statfs(2 ) returns DEVPTS_SUPER_MAGIC like libc 's getpt(3 ) expects ( and ;; thus openpty(3) and its users, such as xterm.) (file-system (device "none") (mount-point "/dev/pts") (type "devpts") (check? #f) (needed-for-boot? #f) (create-mount-point? #t) (options (string-append "gid=" (number->string %tty-gid) ",mode=620")))) (define %shared-memory-file-system ;; Shared memory. (file-system (device "tmpfs") (mount-point "/dev/shm") (type "tmpfs") (check? #f) (flags '(no-suid no-dev)) (options "size=50%") ;TODO: make size configurable (create-mount-point? #t))) (define %base-file-systems ;; List of basic file systems to be mounted. Note that /proc and /sys are ;; currently mounted by the initrd. (list %devtmpfs-file-system %pseudo-terminal-file-system %shared-memory-file-system)) ;;; file-systems.scm ends here
null
https://raw.githubusercontent.com/antono/guix-debian/85ef443788f0788a62010a942973d4f7714d10b4/gnu/system/file-systems.scm
scheme
GNU Guix --- Functional package management for GNU This file is part of GNU Guix. you can redistribute it and/or modify it either version 3 of the License , or ( at your option) any later version. GNU Guix is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. Commentary: Declaring file systems to be mounted. Code: File system declaration. string 'device | 'label | 'uuid string string list of symbols string or #f Control file system for Linux' file systems in user-space (FUSE). Support for arbitrary executable binary format. /dev as a 'devtmpfs' file system, needed for udev. Mount it from the initrd so /dev/pts & co. can then be mounted over it. ID of the 'tty' group. Allocate it statically to make it easy to refer to it from here and from the 'tty' group definitions. The pseudo-terminal file system. It needs to be mounted so that thus openpty(3) and its users, such as xterm.) Shared memory. TODO: make size configurable List of basic file systems to be mounted. Note that /proc and /sys are currently mounted by the initrd. file-systems.scm ends here
Copyright © 2013 , 2014 < > under the terms of the GNU General Public License as published by You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (gnu system file-systems) #:use-module (guix records) #:export (<file-system> file-system file-system? file-system-device file-system-title file-system-mount-point file-system-type file-system-needed-for-boot? file-system-flags file-system-options file-system-check? file-system-create-mount-point? %fuse-control-file-system %binary-format-file-system %shared-memory-file-system %pseudo-terminal-file-system %devtmpfs-file-system %base-file-systems)) (define-record-type* <file-system> file-system make-file-system file-system? (default 'device)) (default '())) (default #f)) Boolean (default #f)) Boolean (default #t)) Boolean (default #f))) (define %fuse-control-file-system (file-system (device "fusectl") (mount-point "/sys/fs/fuse/connections") (type "fusectl") (check? #f))) (define %binary-format-file-system (file-system (device "binfmt_misc") (mount-point "/proc/sys/fs/binfmt_misc") (type "binfmt_misc") (check? #f))) (define %devtmpfs-file-system (file-system (device "none") (mount-point "/dev") (type "devtmpfs") (check? #f) (needed-for-boot? #t))) (define %tty-gid 996) (define %pseudo-terminal-file-system statfs(2 ) returns DEVPTS_SUPER_MAGIC like libc 's getpt(3 ) expects ( and (file-system (device "none") (mount-point "/dev/pts") (type "devpts") (check? #f) (needed-for-boot? #f) (create-mount-point? #t) (options (string-append "gid=" (number->string %tty-gid) ",mode=620")))) (define %shared-memory-file-system (file-system (device "tmpfs") (mount-point "/dev/shm") (type "tmpfs") (check? #f) (flags '(no-suid no-dev)) (create-mount-point? #t))) (define %base-file-systems (list %devtmpfs-file-system %pseudo-terminal-file-system %shared-memory-file-system))
04f4e9845086aead276a673387dca8adff998e2d7d9e01110f5d4c14b9201c2b
garrigue/lablgl
lesson3.ml
* This code was created by 99 * If you 've found this code useful , please let me know . * * Visit Jeff at / * * Ported to O'Caml / lablglut by 8/02 * For port - specific issues , comments , etc . , please * contact * This code was created by Jeff Molofee '99 * If you've found this code useful, please let me know. * * Visit Jeff at / * * Ported to O'Caml/lablglut by Jeffrey Palmer 8/02 * For port-specific issues, comments, etc., please * contact *) let init_gl width height = GlDraw.shade_model `smooth; GlClear.color (0.0, 0.0, 0.0); GlClear.depth 1.0; GlClear.clear [`color; `depth]; Gl.enable `depth_test; GlFunc.depth_func `lequal; GlMisc.hint `perspective_correction `nicest let draw_gl_scene () = GlClear.clear [`color; `depth]; GlMat.load_identity (); (* Draw the triangle *) GlMat.translate3 (-1.5, 0.0, -6.0); GlDraw.begins `triangles; GlDraw.color ( 1.0, 0.0, 0.0); GlDraw.vertex3 ( 0.0, 1.0, 0.0); GlDraw.color ( 0.0, 1.0, 0.0); GlDraw.vertex3 (-1.0, -1.0, 0.0); GlDraw.color ( 0.0, 0.0, 1.0); GlDraw.vertex3 ( 1.0, -1.0, 0.0); GlDraw.ends (); (* Draw the square *) GlMat.translate3 (3.0, 0.0, 0.0); GlDraw.begins `quads; GlDraw.color ( 0.5, 0.5, 1.0); GlDraw.vertex3 (-1.0, 1.0, 0.0); GlDraw.vertex3 ( 1.0, 1.0, 0.0); GlDraw.vertex3 ( 1.0, -1.0, 0.0); GlDraw.vertex3 (-1.0, -1.0, 0.0); GlDraw.ends (); Glut.swapBuffers () (* Handle window reshape events *) let reshape_cb ~w ~h = let ratio = (float_of_int w) /. (float_of_int h) in GlDraw.viewport 0 0 w h; GlMat.mode `projection; GlMat.load_identity (); GluMat.perspective 45.0 ratio (0.1, 100.0); GlMat.mode `modelview; GlMat.load_identity () (* Handle keyboard events *) let keyboard_cb ~key ~x ~y = match key with ESC | _ -> () let main () = let width = 640 and height = 480 in ignore (Glut.init Sys.argv); Glut.initDisplayMode ~alpha:true ~depth:true ~double_buffer:true (); Glut.initWindowSize width height; ignore (Glut.createWindow "O'Caml OpenGL Lesson 3"); Glut.displayFunc draw_gl_scene; Glut.keyboardFunc keyboard_cb; Glut.reshapeFunc reshape_cb; init_gl width height; Glut.mainLoop () let _ = main ()
null
https://raw.githubusercontent.com/garrigue/lablgl/d76e4ac834b6d803e7a6c07c3b71bff0e534614f/LablGlut/examples/nehe/lesson3.ml
ocaml
Draw the triangle Draw the square Handle window reshape events Handle keyboard events
* This code was created by 99 * If you 've found this code useful , please let me know . * * Visit Jeff at / * * Ported to O'Caml / lablglut by 8/02 * For port - specific issues , comments , etc . , please * contact * This code was created by Jeff Molofee '99 * If you've found this code useful, please let me know. * * Visit Jeff at / * * Ported to O'Caml/lablglut by Jeffrey Palmer 8/02 * For port-specific issues, comments, etc., please * contact *) let init_gl width height = GlDraw.shade_model `smooth; GlClear.color (0.0, 0.0, 0.0); GlClear.depth 1.0; GlClear.clear [`color; `depth]; Gl.enable `depth_test; GlFunc.depth_func `lequal; GlMisc.hint `perspective_correction `nicest let draw_gl_scene () = GlClear.clear [`color; `depth]; GlMat.load_identity (); GlMat.translate3 (-1.5, 0.0, -6.0); GlDraw.begins `triangles; GlDraw.color ( 1.0, 0.0, 0.0); GlDraw.vertex3 ( 0.0, 1.0, 0.0); GlDraw.color ( 0.0, 1.0, 0.0); GlDraw.vertex3 (-1.0, -1.0, 0.0); GlDraw.color ( 0.0, 0.0, 1.0); GlDraw.vertex3 ( 1.0, -1.0, 0.0); GlDraw.ends (); GlMat.translate3 (3.0, 0.0, 0.0); GlDraw.begins `quads; GlDraw.color ( 0.5, 0.5, 1.0); GlDraw.vertex3 (-1.0, 1.0, 0.0); GlDraw.vertex3 ( 1.0, 1.0, 0.0); GlDraw.vertex3 ( 1.0, -1.0, 0.0); GlDraw.vertex3 (-1.0, -1.0, 0.0); GlDraw.ends (); Glut.swapBuffers () let reshape_cb ~w ~h = let ratio = (float_of_int w) /. (float_of_int h) in GlDraw.viewport 0 0 w h; GlMat.mode `projection; GlMat.load_identity (); GluMat.perspective 45.0 ratio (0.1, 100.0); GlMat.mode `modelview; GlMat.load_identity () let keyboard_cb ~key ~x ~y = match key with ESC | _ -> () let main () = let width = 640 and height = 480 in ignore (Glut.init Sys.argv); Glut.initDisplayMode ~alpha:true ~depth:true ~double_buffer:true (); Glut.initWindowSize width height; ignore (Glut.createWindow "O'Caml OpenGL Lesson 3"); Glut.displayFunc draw_gl_scene; Glut.keyboardFunc keyboard_cb; Glut.reshapeFunc reshape_cb; init_gl width height; Glut.mainLoop () let _ = main ()
0be80e8e516f7d801f778a675ee5fb82fdf6013391e6c8f25529f747bb3a2e49
anwarmamat/cmsc330fall20
public.ml
open OUnit2 open D10.Lexer open D10.Parser open D10.Interpreter let test_lex_empty _ = assert_equal [Tok_EOF] (lexer " ") let test_lex_one _ = assert_equal [Tok_Int 1; Tok_EOF] (lexer "1") let test_lex_one_plus_two _ = assert_equal [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_EOF] (lexer "1+ 2") let test_lex_one_times_two _ = assert_equal [Tok_Int 1; Tok_Mult; Tok_Int 2; Tok_EOF] (lexer "1 * 2") let test_long_lexer_1 _ = assert_equal [Tok_Int 1; Tok_Mult; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_EOF] (lexer "1*2+3") let test_long_lexer_2 _ = assert_equal [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_Mult; Tok_Int 3; Tok_EOF] (lexer "1+ 2 *3") let test_long_lexer_3 _ = assert_equal [Tok_Int 1; Tok_Mult; Tok_LParen; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_RParen; Tok_EOF] (lexer "1 * (2 + 3)") let test_long_lexer_4 _ = assert_equal [Tok_LParen; Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_RParen; Tok_Mult; Tok_Int 3; Tok_EOF] (lexer "(1 + 2) * 3") let test_parse_one _ = assert_equal (Int 1) (parser [Tok_Int 1; Tok_EOF]) let test_parse_one_plus_two _ = assert_equal (Plus (Int 1, Int 2)) (parser [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_EOF]) let test_parse_one_times_two _ = assert_equal (Mult (Int 1, Int 2)) (parser [Tok_Int 1; Tok_Mult; Tok_Int 2; Tok_EOF]) let test_long_parser_1 _ = assert_equal (Plus (Mult (Int 1, Int 2), Int 3)) (parser [Tok_Int 1; Tok_Mult; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_EOF]) let test_long_parser_2 _ = assert_equal (Plus (Int 1, Mult (Int 2, Int 3))) (parser [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_Mult; Tok_Int 3; Tok_EOF]) let test_long_parser_3 _ = assert_equal (Mult (Int 1, Plus (Int 2, Int 3))) (parser [Tok_Int 1; Tok_Mult; Tok_LParen; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_RParen; Tok_EOF]) let test_long_parser_4 _ = assert_equal (Mult (Plus (Int 1, Int 2), Int 3)) (parser [Tok_LParen; Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_RParen; Tok_Mult; Tok_Int 3; Tok_EOF]) let test_right_associative _ = assert_equal (Plus (Int 1, Plus (Int 2, Int 3))) (parser [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_EOF]) let test_parentheses _ = assert_equal (Plus (Plus (Int 1, Int 2), Int 3)) (parser [Tok_LParen; Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_RParen; Tok_Plus; Tok_Int 3; Tok_EOF]) let test_one_interpreter _ = assert_equal 1 (eval (Int 1)) let test_one_plus_two_interpreter _ = assert_equal 3 (eval (Plus (Int 1, Int 2))) let test_long_interpreter_1 _ = assert_equal 6 (eval (Plus (Int 1, Plus (Int 2, Int 3)))) let test_long_interpreter_2 _ = assert_equal 6 (eval (Plus (Plus (Int 1, Int 2), Int 3))) let test_long_interpreter_3 _ = assert_equal 6 (eval (Mult (Mult (Int 1, Int 2), Int 3))) let test_long_interpreter_4 _ = assert_equal 12 (eval (Plus (Mult (Plus (Int 1, Int 2), Int 3), Int 3))) let test_long_interpreter_5 _ = assert_equal 21 (eval (Mult (Plus (Mult (Int 3, Int 2), Int 1), Int 3))) let suite = "public" >::: [ "lex_empty" >:: test_lex_empty; "lex_one" >:: test_lex_one; "lex_one_plus_two" >:: test_lex_one_plus_two; "lex_one_times_two" >:: test_lex_one_times_two; "long_lexer_1" >:: test_long_lexer_1; "long_lexer_2" >:: test_long_lexer_2; "long_lexer_3" >:: test_long_lexer_3; "long_lexer_4" >:: test_long_lexer_4; "parse_one" >:: test_parse_one; "parse_one_plus_two" >:: test_parse_one_plus_two; "parse_one_times_two" >:: test_parse_one_times_two; "long_parser_1" >:: test_long_parser_1; "long_parser_2" >:: test_long_parser_2; "long_parser_3" >:: test_long_parser_3; "long_parser_4" >:: test_long_parser_4; "right_associative" >:: test_right_associative; "parentheses" >:: test_parentheses; "one_interpreter" >:: test_one_interpreter; "one_plus_two_interpreter" >:: test_one_plus_two_interpreter; "long_interpreter_1" >:: test_long_interpreter_1; "long_interpreter_2" >:: test_long_interpreter_2; "long_interpreter_3" >:: test_long_interpreter_3; "long_interpreter_4" >:: test_long_interpreter_4; "long_interpreter_5" >:: test_long_interpreter_5 ] let _ = run_test_tt_main suite
null
https://raw.githubusercontent.com/anwarmamat/cmsc330fall20/1f185a757ad86c37587ec15d580ea0ff728d9472/disc10/test/public/public.ml
ocaml
open OUnit2 open D10.Lexer open D10.Parser open D10.Interpreter let test_lex_empty _ = assert_equal [Tok_EOF] (lexer " ") let test_lex_one _ = assert_equal [Tok_Int 1; Tok_EOF] (lexer "1") let test_lex_one_plus_two _ = assert_equal [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_EOF] (lexer "1+ 2") let test_lex_one_times_two _ = assert_equal [Tok_Int 1; Tok_Mult; Tok_Int 2; Tok_EOF] (lexer "1 * 2") let test_long_lexer_1 _ = assert_equal [Tok_Int 1; Tok_Mult; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_EOF] (lexer "1*2+3") let test_long_lexer_2 _ = assert_equal [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_Mult; Tok_Int 3; Tok_EOF] (lexer "1+ 2 *3") let test_long_lexer_3 _ = assert_equal [Tok_Int 1; Tok_Mult; Tok_LParen; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_RParen; Tok_EOF] (lexer "1 * (2 + 3)") let test_long_lexer_4 _ = assert_equal [Tok_LParen; Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_RParen; Tok_Mult; Tok_Int 3; Tok_EOF] (lexer "(1 + 2) * 3") let test_parse_one _ = assert_equal (Int 1) (parser [Tok_Int 1; Tok_EOF]) let test_parse_one_plus_two _ = assert_equal (Plus (Int 1, Int 2)) (parser [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_EOF]) let test_parse_one_times_two _ = assert_equal (Mult (Int 1, Int 2)) (parser [Tok_Int 1; Tok_Mult; Tok_Int 2; Tok_EOF]) let test_long_parser_1 _ = assert_equal (Plus (Mult (Int 1, Int 2), Int 3)) (parser [Tok_Int 1; Tok_Mult; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_EOF]) let test_long_parser_2 _ = assert_equal (Plus (Int 1, Mult (Int 2, Int 3))) (parser [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_Mult; Tok_Int 3; Tok_EOF]) let test_long_parser_3 _ = assert_equal (Mult (Int 1, Plus (Int 2, Int 3))) (parser [Tok_Int 1; Tok_Mult; Tok_LParen; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_RParen; Tok_EOF]) let test_long_parser_4 _ = assert_equal (Mult (Plus (Int 1, Int 2), Int 3)) (parser [Tok_LParen; Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_RParen; Tok_Mult; Tok_Int 3; Tok_EOF]) let test_right_associative _ = assert_equal (Plus (Int 1, Plus (Int 2, Int 3))) (parser [Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_Plus; Tok_Int 3; Tok_EOF]) let test_parentheses _ = assert_equal (Plus (Plus (Int 1, Int 2), Int 3)) (parser [Tok_LParen; Tok_Int 1; Tok_Plus; Tok_Int 2; Tok_RParen; Tok_Plus; Tok_Int 3; Tok_EOF]) let test_one_interpreter _ = assert_equal 1 (eval (Int 1)) let test_one_plus_two_interpreter _ = assert_equal 3 (eval (Plus (Int 1, Int 2))) let test_long_interpreter_1 _ = assert_equal 6 (eval (Plus (Int 1, Plus (Int 2, Int 3)))) let test_long_interpreter_2 _ = assert_equal 6 (eval (Plus (Plus (Int 1, Int 2), Int 3))) let test_long_interpreter_3 _ = assert_equal 6 (eval (Mult (Mult (Int 1, Int 2), Int 3))) let test_long_interpreter_4 _ = assert_equal 12 (eval (Plus (Mult (Plus (Int 1, Int 2), Int 3), Int 3))) let test_long_interpreter_5 _ = assert_equal 21 (eval (Mult (Plus (Mult (Int 3, Int 2), Int 1), Int 3))) let suite = "public" >::: [ "lex_empty" >:: test_lex_empty; "lex_one" >:: test_lex_one; "lex_one_plus_two" >:: test_lex_one_plus_two; "lex_one_times_two" >:: test_lex_one_times_two; "long_lexer_1" >:: test_long_lexer_1; "long_lexer_2" >:: test_long_lexer_2; "long_lexer_3" >:: test_long_lexer_3; "long_lexer_4" >:: test_long_lexer_4; "parse_one" >:: test_parse_one; "parse_one_plus_two" >:: test_parse_one_plus_two; "parse_one_times_two" >:: test_parse_one_times_two; "long_parser_1" >:: test_long_parser_1; "long_parser_2" >:: test_long_parser_2; "long_parser_3" >:: test_long_parser_3; "long_parser_4" >:: test_long_parser_4; "right_associative" >:: test_right_associative; "parentheses" >:: test_parentheses; "one_interpreter" >:: test_one_interpreter; "one_plus_two_interpreter" >:: test_one_plus_two_interpreter; "long_interpreter_1" >:: test_long_interpreter_1; "long_interpreter_2" >:: test_long_interpreter_2; "long_interpreter_3" >:: test_long_interpreter_3; "long_interpreter_4" >:: test_long_interpreter_4; "long_interpreter_5" >:: test_long_interpreter_5 ] let _ = run_test_tt_main suite
2faa537a31ae4ca9dc327b9d044a706d5316b7b106d417bfb3bdb01b7035f93e
Frama-C/Frama-C-snapshot
ast_init.ml
open Cil_types let apply _ = let f = Globals.Functions.find_by_name "f" in let s = Kernel_function.find_first_stmt f in let l = Kernel_function.find_all_enclosing_blocks s in List.iter (fun b -> b.bstmts <- Cil.mkStmtOneInstr (Skip (Cil_datatype.Stmt.loc s)) :: b.bstmts) l; Ast.mark_as_grown () let () = Ast.apply_after_computed apply let run () = Ast.compute (); File.pretty_ast () let () = Db.Main.extend run
null
https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/tests/syntax/ast_init.ml
ocaml
open Cil_types let apply _ = let f = Globals.Functions.find_by_name "f" in let s = Kernel_function.find_first_stmt f in let l = Kernel_function.find_all_enclosing_blocks s in List.iter (fun b -> b.bstmts <- Cil.mkStmtOneInstr (Skip (Cil_datatype.Stmt.loc s)) :: b.bstmts) l; Ast.mark_as_grown () let () = Ast.apply_after_computed apply let run () = Ast.compute (); File.pretty_ast () let () = Db.Main.extend run
626d7065524af23b876e8b84d8c0a40cba78b8d3b783f39444b8a08831bbb564
tonsky/advent2018
day3.clj
(ns advent2018.day3 (:require [clojure.string :as str] [clojure.set :as set] [clojure.walk :as walk])) (def input (slurp "inputs/day3")) (def lines (str/split input #"\n")) (defn parse-int [s] (Integer/parseInt s)) (defn parse-line [l] (let [[_ id l t w h] (re-matches #"#(\d+) @ (\d+),(\d+): (\d+)x(\d+)" l)] [id (parse-int l) (parse-int t) (+ (parse-int l) (parse-int w)) (+ (parse-int t) (parse-int h))])) (def rects (mapv parse-line lines)) (defn rect->map [[_ x0 y0 x1 y1]] (into {} (for [x (range x0 x1) y (range y0 y1)] [[x y] 1]))) (defn part1 [] (->> (apply merge-with + (map rect->map rects)) (filter (fn [[k v]] (> v 1))) (count))) #_(part1) (defn doesnt-overlap? [[_ x11 y11 x12 y12] [_ x21 y21 x22 y22]] (or (>= x21 x12) (<= x22 x11) (>= y21 y12) (<= y22 y11))) (defn part2 [] (->> (for [r1 rects :when (every? #(or (= % r1) (doesnt-overlap? r1 %)) rects)] r1) ffirst)) #_(part2)
null
https://raw.githubusercontent.com/tonsky/advent2018/6f8d15bf37a150a288e3447df7766c362f7086e9/src/advent2018/day3.clj
clojure
(ns advent2018.day3 (:require [clojure.string :as str] [clojure.set :as set] [clojure.walk :as walk])) (def input (slurp "inputs/day3")) (def lines (str/split input #"\n")) (defn parse-int [s] (Integer/parseInt s)) (defn parse-line [l] (let [[_ id l t w h] (re-matches #"#(\d+) @ (\d+),(\d+): (\d+)x(\d+)" l)] [id (parse-int l) (parse-int t) (+ (parse-int l) (parse-int w)) (+ (parse-int t) (parse-int h))])) (def rects (mapv parse-line lines)) (defn rect->map [[_ x0 y0 x1 y1]] (into {} (for [x (range x0 x1) y (range y0 y1)] [[x y] 1]))) (defn part1 [] (->> (apply merge-with + (map rect->map rects)) (filter (fn [[k v]] (> v 1))) (count))) #_(part1) (defn doesnt-overlap? [[_ x11 y11 x12 y12] [_ x21 y21 x22 y22]] (or (>= x21 x12) (<= x22 x11) (>= y21 y12) (<= y22 y11))) (defn part2 [] (->> (for [r1 rects :when (every? #(or (= % r1) (doesnt-overlap? r1 %)) rects)] r1) ffirst)) #_(part2)
68f7160d3da55314b6dc5a441d4f30d08199192b7f2a2656b0015665f24a736c
facebook/duckling
Rules.hs
Copyright ( c ) 2016 - present , Facebook , Inc. -- All rights reserved. -- -- This source code is licensed under the BSD-style license found in the -- LICENSE file in the root directory of this source tree. {-# LANGUAGE GADTs #-} # LANGUAGE LambdaCase # {-# LANGUAGE OverloadedStrings #-} module Duckling.AmountOfMoney.VI.Rules ( rules ) where import Data.Maybe import Data.String import Prelude import Duckling.AmountOfMoney.Helpers import Duckling.AmountOfMoney.Types (Currency(..), AmountOfMoneyData (..)) import Duckling.Dimensions.Types import Duckling.Numeral.Helpers (isNatural, isPositive) import Duckling.Numeral.Types (NumeralData (..)) import Duckling.Types import qualified Duckling.AmountOfMoney.Types as TAmountOfMoney import qualified Duckling.Numeral.Types as TNumeral ruleUnitAmount :: Rule ruleUnitAmount = Rule { name = "<unit> <amount>" , pattern = [ Predicate isCurrencyOnly , Predicate isPositive ] , prod = \case (Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.currency = c}: Token Numeral NumeralData{TNumeral.value = v}: _) -> Just . Token AmountOfMoney . withValue v $ currencyOnly c _ -> Nothing } ruleNg :: Rule ruleNg = Rule { name = "đồng" , pattern = [ regex "đ(ồng)?" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly VND } ruleDollar :: Rule ruleDollar = Rule { name = "$" , pattern = [ regex "đô( la)?( mỹ)?" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Dollar } ruleVnd :: Rule ruleVnd = Rule { name = "VNĐ" , pattern = [ regex "vn(Đ|D|\\$)" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly VND } ruleCent :: Rule ruleCent = Rule { name = "cent" , pattern = [ regex "xen|xu|cắc|penn(y|ies)" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Cent } rulePounds :: Rule rulePounds = Rule { name = "£" , pattern = [ regex "pounds?|pao" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Pound } ruleIntersect :: Rule ruleIntersect = Rule { name = "intersect" , pattern = [ Predicate isWithoutCents , Predicate isNatural ] , prod = \case (Token AmountOfMoney fd: Token Numeral NumeralData{TNumeral.value = c}: _) -> Just . Token AmountOfMoney $ withCents c fd _ -> Nothing } ruleIntersectAndNumeral :: Rule ruleIntersectAndNumeral = Rule { name = "intersect and number" , pattern = [ Predicate isWithoutCents , regex "và" , Predicate isNatural ] , prod = \case (Token AmountOfMoney fd: _: Token Numeral NumeralData{TNumeral.value = c}: _) -> Just . Token AmountOfMoney $ withCents c fd _ -> Nothing } ruleIntersectXXuxen :: Rule ruleIntersectXXuxen = Rule { name = "intersect (X xu|xen)" , pattern = [ Predicate isWithoutCents , Predicate isCents ] , prod = \case (Token AmountOfMoney fd: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just c}: _) -> Just . Token AmountOfMoney $ withCents c fd _ -> Nothing } ruleIntersectVXXuxen :: Rule ruleIntersectVXXuxen = Rule { name = "intersect (và X xu|xen)" , pattern = [ Predicate isWithoutCents , regex "và" , Predicate isCents ] , prod = \case (Token AmountOfMoney fd: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just c}: _) -> Just . Token AmountOfMoney $ withCents c fd _ -> Nothing } ruleDirham :: Rule ruleDirham = Rule { name = "AED" , pattern = [ regex "AED\\.|dirhams?" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly AED } rulePrecision :: Rule rulePrecision = Rule { name = "about|exactly <amount-of-money>" , pattern = [ regex "đúng|chính xác|khoảng( chừng)?|tầm|xấp xỉ|gần" , Predicate isMoneyWithValue ] , prod = \case (_:token:_) -> Just token _ -> Nothing } ruleIntervalBetweenNumeral :: Rule ruleIntervalBetweenNumeral = Rule { name = "between|from <numeral> to|and <amount-of-money>" , pattern = [ regex "từ|giữa|khoảng" , Predicate isPositive , regex "đến|tới|và" , Predicate isSimpleAmountOfMoney ] , prod = \case (_: Token Numeral NumeralData{TNumeral.value = from}: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) | from < to -> Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c _ -> Nothing } ruleIntervalBetween :: Rule ruleIntervalBetween = Rule { name = "từ|giữa <amount-of-money> đến|tới|và <amount-of-money>" , pattern = [ regex "từ|giữa" , Predicate isSimpleAmountOfMoney , regex "đến|tới|và" , Predicate isSimpleAmountOfMoney ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from, TAmountOfMoney.currency = c1}: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c2}: _) | from < to && c1 == c2 -> Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1 _ -> Nothing } ruleIntervalNumeralDash :: Rule ruleIntervalNumeralDash = Rule { name = "<numeral> - <amount-of-money>" , pattern = [ Predicate isNatural , regex "-" , Predicate isSimpleAmountOfMoney ] , prod = \case (Token Numeral NumeralData{TNumeral.value = from}: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) | from < to-> Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c _ -> Nothing } ruleIntervalDash :: Rule ruleIntervalDash = Rule { name = "<amount-of-money> - <amount-of-money>" , pattern = [ Predicate isSimpleAmountOfMoney , regex "-" , Predicate isSimpleAmountOfMoney ] , prod = \case (Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from, TAmountOfMoney.currency = c1}: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c2}: _) | from < to && c1 == c2 -> Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1 _ -> Nothing } ruleIntervalMax :: Rule ruleIntervalMax = Rule { name = "under/less/lower/no more than <amount-of-money>" , pattern = [ regex "dưới|(bé|ít|thấp|kém|không( cao| nhiều)?) hơn|không (đến|tới|quá)|(nhiều|cao) nhất" , Predicate isSimpleAmountOfMoney ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) -> Just . Token AmountOfMoney . withMax to $ currencyOnly c _ -> Nothing } ruleIntervalMax2 :: Rule ruleIntervalMax2 = Rule { name = "under <amount-of-money>" , pattern = [ regex "từ" , Predicate isSimpleAmountOfMoney , regex "trở xuống" ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) -> Just . Token AmountOfMoney . withMax to $ currencyOnly c _ -> Nothing } ruleIntervalMin :: Rule ruleIntervalMin = Rule { name = "over/above/at least/more than <amount-of-money>" , pattern = [ regex "trên|(lớn |nhiều |cao |không (thấp|ít|kém) )?hơn|(ít|thấp) nhất" , Predicate isSimpleAmountOfMoney ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) -> Just . Token AmountOfMoney . withMin to $ currencyOnly c _ -> Nothing } ruleIntervalMin2 :: Rule ruleIntervalMin2 = Rule { name = "above <amount-of-money>" , pattern = [ regex "từ" , Predicate isSimpleAmountOfMoney , regex "trở lên" ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) -> Just . Token AmountOfMoney . withMin to $ currencyOnly c _ -> Nothing } rules :: [Rule] rules = [ ruleUnitAmount , ruleCent , ruleDirham , ruleDollar , ruleIntersect , ruleIntersectAndNumeral , ruleIntersectVXXuxen , ruleIntersectXXuxen , ruleIntervalBetweenNumeral , ruleIntervalBetween , ruleIntervalDash , ruleIntervalNumeralDash , ruleIntervalMax , ruleIntervalMax2 , ruleIntervalMin , ruleIntervalMin2 , ruleNg , rulePounds , rulePrecision , ruleVnd ]
null
https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/Duckling/AmountOfMoney/VI/Rules.hs
haskell
All rights reserved. This source code is licensed under the BSD-style license found in the LICENSE file in the root directory of this source tree. # LANGUAGE GADTs # # LANGUAGE OverloadedStrings #
Copyright ( c ) 2016 - present , Facebook , Inc. # LANGUAGE LambdaCase # module Duckling.AmountOfMoney.VI.Rules ( rules ) where import Data.Maybe import Data.String import Prelude import Duckling.AmountOfMoney.Helpers import Duckling.AmountOfMoney.Types (Currency(..), AmountOfMoneyData (..)) import Duckling.Dimensions.Types import Duckling.Numeral.Helpers (isNatural, isPositive) import Duckling.Numeral.Types (NumeralData (..)) import Duckling.Types import qualified Duckling.AmountOfMoney.Types as TAmountOfMoney import qualified Duckling.Numeral.Types as TNumeral ruleUnitAmount :: Rule ruleUnitAmount = Rule { name = "<unit> <amount>" , pattern = [ Predicate isCurrencyOnly , Predicate isPositive ] , prod = \case (Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.currency = c}: Token Numeral NumeralData{TNumeral.value = v}: _) -> Just . Token AmountOfMoney . withValue v $ currencyOnly c _ -> Nothing } ruleNg :: Rule ruleNg = Rule { name = "đồng" , pattern = [ regex "đ(ồng)?" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly VND } ruleDollar :: Rule ruleDollar = Rule { name = "$" , pattern = [ regex "đô( la)?( mỹ)?" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Dollar } ruleVnd :: Rule ruleVnd = Rule { name = "VNĐ" , pattern = [ regex "vn(Đ|D|\\$)" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly VND } ruleCent :: Rule ruleCent = Rule { name = "cent" , pattern = [ regex "xen|xu|cắc|penn(y|ies)" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Cent } rulePounds :: Rule rulePounds = Rule { name = "£" , pattern = [ regex "pounds?|pao" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Pound } ruleIntersect :: Rule ruleIntersect = Rule { name = "intersect" , pattern = [ Predicate isWithoutCents , Predicate isNatural ] , prod = \case (Token AmountOfMoney fd: Token Numeral NumeralData{TNumeral.value = c}: _) -> Just . Token AmountOfMoney $ withCents c fd _ -> Nothing } ruleIntersectAndNumeral :: Rule ruleIntersectAndNumeral = Rule { name = "intersect and number" , pattern = [ Predicate isWithoutCents , regex "và" , Predicate isNatural ] , prod = \case (Token AmountOfMoney fd: _: Token Numeral NumeralData{TNumeral.value = c}: _) -> Just . Token AmountOfMoney $ withCents c fd _ -> Nothing } ruleIntersectXXuxen :: Rule ruleIntersectXXuxen = Rule { name = "intersect (X xu|xen)" , pattern = [ Predicate isWithoutCents , Predicate isCents ] , prod = \case (Token AmountOfMoney fd: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just c}: _) -> Just . Token AmountOfMoney $ withCents c fd _ -> Nothing } ruleIntersectVXXuxen :: Rule ruleIntersectVXXuxen = Rule { name = "intersect (và X xu|xen)" , pattern = [ Predicate isWithoutCents , regex "và" , Predicate isCents ] , prod = \case (Token AmountOfMoney fd: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just c}: _) -> Just . Token AmountOfMoney $ withCents c fd _ -> Nothing } ruleDirham :: Rule ruleDirham = Rule { name = "AED" , pattern = [ regex "AED\\.|dirhams?" ] , prod = \_ -> Just . Token AmountOfMoney $ currencyOnly AED } rulePrecision :: Rule rulePrecision = Rule { name = "about|exactly <amount-of-money>" , pattern = [ regex "đúng|chính xác|khoảng( chừng)?|tầm|xấp xỉ|gần" , Predicate isMoneyWithValue ] , prod = \case (_:token:_) -> Just token _ -> Nothing } ruleIntervalBetweenNumeral :: Rule ruleIntervalBetweenNumeral = Rule { name = "between|from <numeral> to|and <amount-of-money>" , pattern = [ regex "từ|giữa|khoảng" , Predicate isPositive , regex "đến|tới|và" , Predicate isSimpleAmountOfMoney ] , prod = \case (_: Token Numeral NumeralData{TNumeral.value = from}: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) | from < to -> Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c _ -> Nothing } ruleIntervalBetween :: Rule ruleIntervalBetween = Rule { name = "từ|giữa <amount-of-money> đến|tới|và <amount-of-money>" , pattern = [ regex "từ|giữa" , Predicate isSimpleAmountOfMoney , regex "đến|tới|và" , Predicate isSimpleAmountOfMoney ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from, TAmountOfMoney.currency = c1}: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c2}: _) | from < to && c1 == c2 -> Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1 _ -> Nothing } ruleIntervalNumeralDash :: Rule ruleIntervalNumeralDash = Rule { name = "<numeral> - <amount-of-money>" , pattern = [ Predicate isNatural , regex "-" , Predicate isSimpleAmountOfMoney ] , prod = \case (Token Numeral NumeralData{TNumeral.value = from}: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) | from < to-> Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c _ -> Nothing } ruleIntervalDash :: Rule ruleIntervalDash = Rule { name = "<amount-of-money> - <amount-of-money>" , pattern = [ Predicate isSimpleAmountOfMoney , regex "-" , Predicate isSimpleAmountOfMoney ] , prod = \case (Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from, TAmountOfMoney.currency = c1}: _: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c2}: _) | from < to && c1 == c2 -> Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1 _ -> Nothing } ruleIntervalMax :: Rule ruleIntervalMax = Rule { name = "under/less/lower/no more than <amount-of-money>" , pattern = [ regex "dưới|(bé|ít|thấp|kém|không( cao| nhiều)?) hơn|không (đến|tới|quá)|(nhiều|cao) nhất" , Predicate isSimpleAmountOfMoney ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) -> Just . Token AmountOfMoney . withMax to $ currencyOnly c _ -> Nothing } ruleIntervalMax2 :: Rule ruleIntervalMax2 = Rule { name = "under <amount-of-money>" , pattern = [ regex "từ" , Predicate isSimpleAmountOfMoney , regex "trở xuống" ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) -> Just . Token AmountOfMoney . withMax to $ currencyOnly c _ -> Nothing } ruleIntervalMin :: Rule ruleIntervalMin = Rule { name = "over/above/at least/more than <amount-of-money>" , pattern = [ regex "trên|(lớn |nhiều |cao |không (thấp|ít|kém) )?hơn|(ít|thấp) nhất" , Predicate isSimpleAmountOfMoney ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) -> Just . Token AmountOfMoney . withMin to $ currencyOnly c _ -> Nothing } ruleIntervalMin2 :: Rule ruleIntervalMin2 = Rule { name = "above <amount-of-money>" , pattern = [ regex "từ" , Predicate isSimpleAmountOfMoney , regex "trở lên" ] , prod = \case (_: Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to, TAmountOfMoney.currency = c}: _) -> Just . Token AmountOfMoney . withMin to $ currencyOnly c _ -> Nothing } rules :: [Rule] rules = [ ruleUnitAmount , ruleCent , ruleDirham , ruleDollar , ruleIntersect , ruleIntersectAndNumeral , ruleIntersectVXXuxen , ruleIntersectXXuxen , ruleIntervalBetweenNumeral , ruleIntervalBetween , ruleIntervalDash , ruleIntervalNumeralDash , ruleIntervalMax , ruleIntervalMax2 , ruleIntervalMin , ruleIntervalMin2 , ruleNg , rulePounds , rulePrecision , ruleVnd ]
6c9664d2b3960104f03538103e44e2ec6a6aa5d236d72bfa0832384cc281fa32
easyuc/EasyUC
ucParseFile.ml
UcParseFile module Parse a DSL Specification open EcLocation open EcSymbols open UcMessage open UcLexer module L = Lexing let lexbuf_from_channel file ch = let lexbuf = Lexing.from_channel ch in lexbuf.Lexing.lex_curr_p <- { Lexing.pos_fname = file; Lexing.pos_lnum = 1; Lexing.pos_bol = 0; Lexing.pos_cnum = 0 }; lexbuf type file_or_id = (* file name, interpreted relative to working directory, if not fully qualified *) | FOID_File of string root name of .uc file , initial letter capitalized , and without " .uc " and without " /"s , normally located in file that was lexed ".uc" and without "/"s, normally located in file that was lexed *) | FOID_Id of symbol located let foid_to_str foid = match foid with | FOID_File s -> s | FOID_Id id -> unloc id let parse_file_or_id foid = let prelude_dir = UcConfig.uc_prelude_dir in let inc_dirs = UcState.get_include_dirs () in let (ch, file) = match foid with | FOID_File file -> (try (open_in file, file) with | Sys_error _ -> non_loc_error_message (fun ppf -> Format.fprintf ppf "@[unable@ to@ open@ file:@ %s@]" file)) | FOID_Id id -> let uid = unloc id in (match UcUtils.find_file uid ".uc" prelude_dir inc_dirs with | None -> error_message (loc id) (fun ppf -> Format.fprintf ppf "@[unable@ to@ find@ .uc@ file:@ %s@]" uid) | Some qual_file -> (try (open_in qual_file, qual_file) with | Sys_error _ -> error_message (loc id) (fun ppf -> Format.fprintf ppf "@[unable@ to@ open@ file:@ %s@]" qual_file))) in let lexbuf = lexbuf_from_channel file ch in try (let res = (UcParser.spec read lexbuf, file) in close_in ch; res) with | UcParser.Error -> (error_message (* no need to close channel *) (EcLocation.make lexbuf.L.lex_start_p lexbuf.L.lex_curr_p) (fun ppf -> Format.fprintf ppf "@[parse@ error@]"))
null
https://raw.githubusercontent.com/easyuc/EasyUC/0ee14ef8b024a8e7acde1035d06afecbdcaec990/uc-dsl/ucdsl-proj/src/ucParseFile.ml
ocaml
file name, interpreted relative to working directory, if not fully qualified no need to close channel
UcParseFile module Parse a DSL Specification open EcLocation open EcSymbols open UcMessage open UcLexer module L = Lexing let lexbuf_from_channel file ch = let lexbuf = Lexing.from_channel ch in lexbuf.Lexing.lex_curr_p <- { Lexing.pos_fname = file; Lexing.pos_lnum = 1; Lexing.pos_bol = 0; Lexing.pos_cnum = 0 }; lexbuf type file_or_id = | FOID_File of string root name of .uc file , initial letter capitalized , and without " .uc " and without " /"s , normally located in file that was lexed ".uc" and without "/"s, normally located in file that was lexed *) | FOID_Id of symbol located let foid_to_str foid = match foid with | FOID_File s -> s | FOID_Id id -> unloc id let parse_file_or_id foid = let prelude_dir = UcConfig.uc_prelude_dir in let inc_dirs = UcState.get_include_dirs () in let (ch, file) = match foid with | FOID_File file -> (try (open_in file, file) with | Sys_error _ -> non_loc_error_message (fun ppf -> Format.fprintf ppf "@[unable@ to@ open@ file:@ %s@]" file)) | FOID_Id id -> let uid = unloc id in (match UcUtils.find_file uid ".uc" prelude_dir inc_dirs with | None -> error_message (loc id) (fun ppf -> Format.fprintf ppf "@[unable@ to@ find@ .uc@ file:@ %s@]" uid) | Some qual_file -> (try (open_in qual_file, qual_file) with | Sys_error _ -> error_message (loc id) (fun ppf -> Format.fprintf ppf "@[unable@ to@ open@ file:@ %s@]" qual_file))) in let lexbuf = lexbuf_from_channel file ch in try (let res = (UcParser.spec read lexbuf, file) in close_in ch; res) with | UcParser.Error -> (EcLocation.make lexbuf.L.lex_start_p lexbuf.L.lex_curr_p) (fun ppf -> Format.fprintf ppf "@[parse@ error@]"))
edc92634da2522ca54fce9e27354b02d66d38a520dec987627b539f46b7a1e18
alan-j-hu/camyll
url.ml
let relativize ~src ~dest = let chop_common_prefix url1 url2 = let rec loop url1 url2 = match (url1, url2) with | x :: xs, y :: ys when x = y -> loop xs ys | url1, url2 -> (url1, url2) in loop url1 url2 in if String.length dest > 0 && String.get dest 0 = '/' then let src_dir = (* chop the filename off the end of the source path *) match List.rev (String.split_on_char '/' src) with | _ :: xs -> List.rev xs | [] -> failwith "Unreachable" in let dest_file, dest_dir = (* chop the filename off the end of the dest path *) match List.rev (String.split_on_char '/' dest) with | x :: xs -> (x, List.rev xs) | [] -> failwith "Unreachable" in let src_dir, dest_dir = chop_common_prefix src_dir dest_dir in let url = [ dest_file ] |> ( @ ) dest_dir |> List.rev_append (List.init (List.length src_dir) (Fun.const "..")) |> String.concat "/" in if url = "" then "./" else url else dest
null
https://raw.githubusercontent.com/alan-j-hu/camyll/7caa78b2930e77c40c3906f383a5264a99588821/src/url.ml
ocaml
chop the filename off the end of the source path chop the filename off the end of the dest path
let relativize ~src ~dest = let chop_common_prefix url1 url2 = let rec loop url1 url2 = match (url1, url2) with | x :: xs, y :: ys when x = y -> loop xs ys | url1, url2 -> (url1, url2) in loop url1 url2 in if String.length dest > 0 && String.get dest 0 = '/' then let src_dir = match List.rev (String.split_on_char '/' src) with | _ :: xs -> List.rev xs | [] -> failwith "Unreachable" in let dest_file, dest_dir = match List.rev (String.split_on_char '/' dest) with | x :: xs -> (x, List.rev xs) | [] -> failwith "Unreachable" in let src_dir, dest_dir = chop_common_prefix src_dir dest_dir in let url = [ dest_file ] |> ( @ ) dest_dir |> List.rev_append (List.init (List.length src_dir) (Fun.const "..")) |> String.concat "/" in if url = "" then "./" else url else dest
d4883e9504c92f9835125c2e92b4fcd811098a90680ab4fc10c7494e1db9f07a
district0x/district-designer
graphql_resolvers.cljs
(ns users.server.graphql-resolvers) (def resolvers-map {})
null
https://raw.githubusercontent.com/district0x/district-designer/3f2a585b866b6e173f2cb7b32ecadcffa33e2804/src/users/server/graphql_resolvers.cljs
clojure
(ns users.server.graphql-resolvers) (def resolvers-map {})
3ef5dd5f1401e1ccf23473e3dd31cd11737a112079affd0ff2ac785c84e15f5d
AbstractMachinesLab/caramel
typecore.mli
(**************************************************************************) (* *) (* OCaml *) (* *) , projet Cristal , INRIA Rocquencourt (* *) Copyright 1996 Institut National de Recherche en Informatique et (* en Automatique. *) (* *) (* All rights reserved. This file is distributed under the terms of *) the GNU Lesser General Public License version 2.1 , with the (* special exception on linking described in the file LICENSE. *) (* *) (**************************************************************************) (* Type inference for the core language *) open Asttypes open Types (* This variant is used to print improved error messages, and does not affect the behavior of the typechecker itself. It describes possible explanation for types enforced by a keyword of the language; e.g. "if" requires the condition to be of type bool, and the then-branch to be of type unit if there is no else branch; "for" requires indices to be of type int, and the body to be of type unit. *) type type_forcing_context = | If_conditional | If_no_else_branch | While_loop_conditional | While_loop_body | For_loop_start_index | For_loop_stop_index | For_loop_body | Assert_condition | Sequence_left_hand_side | When_guard (* The combination of a type and a "type forcing context". The intent is that it describes a type that is "expected" (required) by the context. If unifying with such a type fails, then the "explanation" field explains why it was required, in order to display a more enlightening error message. *) type type_expected = private { ty: type_expr; explanation: type_forcing_context option; } val mk_expected: ?explanation:type_forcing_context -> type_expr -> type_expected val is_nonexpansive: Typedtree.expression -> bool type existential_restriction = | At_toplevel (** no existential types at the toplevel *) | In_group (** nor with [let ... and ...] *) | In_rec (** or recursive definition *) | With_attributes (** or [let[@any_attribute] = ...] *) | In_class_args (** or in class arguments [class c (...) = ...] *) | In_class_def (** or in [class c = let ... in ...] *) | In_self_pattern (** or in self pattern *) val type_binding: Env.t -> rec_flag -> Parsetree.value_binding list -> Annot.ident option -> Typedtree.value_binding list * Env.t val type_let: existential_restriction -> Env.t -> rec_flag -> Parsetree.value_binding list -> Annot.ident option -> Typedtree.value_binding list * Env.t val type_expression: Env.t -> Parsetree.expression -> Typedtree.expression val type_class_arg_pattern: string -> Env.t -> Env.t -> arg_label -> Parsetree.pattern -> Typedtree.pattern * (Ident.t * Ident.t * type_expr) list * Env.t * Env.t val type_self_pattern: string -> type_expr -> Env.t -> Env.t -> Env.t -> Parsetree.pattern -> Typedtree.pattern * (Ident.t * type_expr) Meths.t ref * (Ident.t * Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr) Vars.t ref * Env.t * Env.t * Env.t val check_partial: ?lev:int -> Env.t -> type_expr -> Location.t -> Typedtree.case list -> Typedtree.partial val type_expect: ?in_function:(Location.t * type_expr) -> Env.t -> Parsetree.expression -> type_expected -> Typedtree.expression val type_exp: Env.t -> Parsetree.expression -> Typedtree.expression val type_approx: Env.t -> Parsetree.expression -> type_expr val type_argument: Env.t -> Parsetree.expression -> type_expr -> type_expr -> Typedtree.expression val option_some: Env.t -> Typedtree.expression -> Typedtree.expression val option_none: Env.t -> type_expr -> Location.t -> Typedtree.expression val extract_option_type: Env.t -> type_expr -> type_expr val generalizable: int -> type_expr -> bool type delayed_check val delayed_checks: delayed_check list ref val reset_delayed_checks: unit -> unit val force_delayed_checks: unit -> unit val name_pattern : string -> Typedtree.pattern list -> Ident.t val name_cases : string -> Typedtree.case list -> Ident.t val self_coercion : (Path.t * Location.t list ref) list ref type error = | Constructor_arity_mismatch of Longident.t * int * int | Label_mismatch of Longident.t * Ctype.Unification_trace.t | Pattern_type_clash of Ctype.Unification_trace.t * Typedtree.pattern_desc option | Or_pattern_type_clash of Ident.t * Ctype.Unification_trace.t | Multiply_bound_variable of string | Orpat_vars of Ident.t * Ident.t list | Expr_type_clash of Ctype.Unification_trace.t * type_forcing_context option * Typedtree.expression_desc option | Apply_non_function of type_expr | Apply_wrong_label of arg_label * type_expr | Label_multiply_defined of string | Label_missing of Ident.t list | Label_not_mutable of Longident.t | Wrong_name of string * type_expected * string * Path.t * string * string list | Name_type_mismatch of string * Longident.t * (Path.t * Path.t) * (Path.t * Path.t) list | Invalid_format of string | Undefined_method of type_expr * string * string list option | Undefined_inherited_method of string * string list | Virtual_class of Longident.t | Private_type of type_expr | Private_label of Longident.t * type_expr | Private_constructor of constructor_description * type_expr | Unbound_instance_variable of string * string list | Instance_variable_not_mutable of string | Not_subtype of Ctype.Unification_trace.t * Ctype.Unification_trace.t | Outside_class | Value_multiply_overridden of string | Coercion_failure of type_expr * type_expr * Ctype.Unification_trace.t * bool | Too_many_arguments of bool * type_expr * type_forcing_context option | Abstract_wrong_label of arg_label * type_expr * type_forcing_context option | Scoping_let_module of string * type_expr | Not_a_variant_type of Longident.t | Incoherent_label_order | Less_general of string * Ctype.Unification_trace.t | Modules_not_allowed | Cannot_infer_signature | Not_a_packed_module of type_expr | Unexpected_existential of existential_restriction * string * string list | Invalid_interval | Invalid_for_loop_index | No_value_clauses | Exception_pattern_disallowed | Mixed_value_and_exception_patterns_under_guard | Inlined_record_escape | Inlined_record_expected | Unrefuted_pattern of Typedtree.pattern | Invalid_extension_constructor_payload | Not_an_extension_constructor | Literal_overflow of string | Unknown_literal of string * char | Illegal_letrec_pat | Illegal_letrec_expr | Illegal_class_expr | Empty_pattern | Letop_type_clash of string * Ctype.Unification_trace.t | Andop_type_clash of string * Ctype.Unification_trace.t | Bindings_type_clash of Ctype.Unification_trace.t exception Error of Location.t * Env.t * error exception Error_forward of Location.error val report_error: loc:Location.t -> Env.t -> error -> Location.error * @deprecated . Use { ! } , { ! } . Forward declaration , to be filled in by val type_module: (Env.t -> Parsetree.module_expr -> Typedtree.module_expr) ref (* Forward declaration, to be filled in by Typemod.type_open *) val type_open: (?used_slot:bool ref -> override_flag -> Env.t -> Location.t -> Longident.t loc -> Path.t * Env.t) ref (* Forward declaration, to be filled in by Typemod.type_open_decl *) val type_open_decl: (?used_slot:bool ref -> Env.t -> Parsetree.open_declaration -> Typedtree.open_declaration * Types.signature * Env.t) ref Forward declaration , to be filled in by Typeclass.class_structure val type_object: (Env.t -> Location.t -> Parsetree.class_structure -> Typedtree.class_structure * Types.class_signature * string list) ref val type_package: (Env.t -> Parsetree.module_expr -> Path.t -> Longident.t list -> Typedtree.module_expr * type_expr list) ref val create_package_type : Location.t -> Env.t -> Longident.t * (Longident.t * Parsetree.core_type) list -> Path.t * (Longident.t * Typedtree.core_type) list * Types.type_expr val constant: Parsetree.constant -> (Asttypes.constant, error) result val check_recursive_bindings : Env.t -> Typedtree.value_binding list -> unit val check_recursive_class_bindings : Env.t -> Ident.t list -> Typedtree.class_expr list -> unit
null
https://raw.githubusercontent.com/AbstractMachinesLab/caramel/7d4e505d6032e22a630d2e3bd7085b77d0efbb0c/vendor/ocaml-lsp-1.4.0/ocaml-lsp-server/vendor/merlin/src/ocaml/typing/410/typecore.mli
ocaml
************************************************************************ OCaml en Automatique. All rights reserved. This file is distributed under the terms of special exception on linking described in the file LICENSE. ************************************************************************ Type inference for the core language This variant is used to print improved error messages, and does not affect the behavior of the typechecker itself. It describes possible explanation for types enforced by a keyword of the language; e.g. "if" requires the condition to be of type bool, and the then-branch to be of type unit if there is no else branch; "for" requires indices to be of type int, and the body to be of type unit. The combination of a type and a "type forcing context". The intent is that it describes a type that is "expected" (required) by the context. If unifying with such a type fails, then the "explanation" field explains why it was required, in order to display a more enlightening error message. * no existential types at the toplevel * nor with [let ... and ...] * or recursive definition * or [let[@any_attribute] = ...] * or in class arguments [class c (...) = ...] * or in [class c = let ... in ...] * or in self pattern Forward declaration, to be filled in by Typemod.type_open Forward declaration, to be filled in by Typemod.type_open_decl
, projet Cristal , INRIA Rocquencourt Copyright 1996 Institut National de Recherche en Informatique et the GNU Lesser General Public License version 2.1 , with the open Asttypes open Types type type_forcing_context = | If_conditional | If_no_else_branch | While_loop_conditional | While_loop_body | For_loop_start_index | For_loop_stop_index | For_loop_body | Assert_condition | Sequence_left_hand_side | When_guard type type_expected = private { ty: type_expr; explanation: type_forcing_context option; } val mk_expected: ?explanation:type_forcing_context -> type_expr -> type_expected val is_nonexpansive: Typedtree.expression -> bool type existential_restriction = val type_binding: Env.t -> rec_flag -> Parsetree.value_binding list -> Annot.ident option -> Typedtree.value_binding list * Env.t val type_let: existential_restriction -> Env.t -> rec_flag -> Parsetree.value_binding list -> Annot.ident option -> Typedtree.value_binding list * Env.t val type_expression: Env.t -> Parsetree.expression -> Typedtree.expression val type_class_arg_pattern: string -> Env.t -> Env.t -> arg_label -> Parsetree.pattern -> Typedtree.pattern * (Ident.t * Ident.t * type_expr) list * Env.t * Env.t val type_self_pattern: string -> type_expr -> Env.t -> Env.t -> Env.t -> Parsetree.pattern -> Typedtree.pattern * (Ident.t * type_expr) Meths.t ref * (Ident.t * Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr) Vars.t ref * Env.t * Env.t * Env.t val check_partial: ?lev:int -> Env.t -> type_expr -> Location.t -> Typedtree.case list -> Typedtree.partial val type_expect: ?in_function:(Location.t * type_expr) -> Env.t -> Parsetree.expression -> type_expected -> Typedtree.expression val type_exp: Env.t -> Parsetree.expression -> Typedtree.expression val type_approx: Env.t -> Parsetree.expression -> type_expr val type_argument: Env.t -> Parsetree.expression -> type_expr -> type_expr -> Typedtree.expression val option_some: Env.t -> Typedtree.expression -> Typedtree.expression val option_none: Env.t -> type_expr -> Location.t -> Typedtree.expression val extract_option_type: Env.t -> type_expr -> type_expr val generalizable: int -> type_expr -> bool type delayed_check val delayed_checks: delayed_check list ref val reset_delayed_checks: unit -> unit val force_delayed_checks: unit -> unit val name_pattern : string -> Typedtree.pattern list -> Ident.t val name_cases : string -> Typedtree.case list -> Ident.t val self_coercion : (Path.t * Location.t list ref) list ref type error = | Constructor_arity_mismatch of Longident.t * int * int | Label_mismatch of Longident.t * Ctype.Unification_trace.t | Pattern_type_clash of Ctype.Unification_trace.t * Typedtree.pattern_desc option | Or_pattern_type_clash of Ident.t * Ctype.Unification_trace.t | Multiply_bound_variable of string | Orpat_vars of Ident.t * Ident.t list | Expr_type_clash of Ctype.Unification_trace.t * type_forcing_context option * Typedtree.expression_desc option | Apply_non_function of type_expr | Apply_wrong_label of arg_label * type_expr | Label_multiply_defined of string | Label_missing of Ident.t list | Label_not_mutable of Longident.t | Wrong_name of string * type_expected * string * Path.t * string * string list | Name_type_mismatch of string * Longident.t * (Path.t * Path.t) * (Path.t * Path.t) list | Invalid_format of string | Undefined_method of type_expr * string * string list option | Undefined_inherited_method of string * string list | Virtual_class of Longident.t | Private_type of type_expr | Private_label of Longident.t * type_expr | Private_constructor of constructor_description * type_expr | Unbound_instance_variable of string * string list | Instance_variable_not_mutable of string | Not_subtype of Ctype.Unification_trace.t * Ctype.Unification_trace.t | Outside_class | Value_multiply_overridden of string | Coercion_failure of type_expr * type_expr * Ctype.Unification_trace.t * bool | Too_many_arguments of bool * type_expr * type_forcing_context option | Abstract_wrong_label of arg_label * type_expr * type_forcing_context option | Scoping_let_module of string * type_expr | Not_a_variant_type of Longident.t | Incoherent_label_order | Less_general of string * Ctype.Unification_trace.t | Modules_not_allowed | Cannot_infer_signature | Not_a_packed_module of type_expr | Unexpected_existential of existential_restriction * string * string list | Invalid_interval | Invalid_for_loop_index | No_value_clauses | Exception_pattern_disallowed | Mixed_value_and_exception_patterns_under_guard | Inlined_record_escape | Inlined_record_expected | Unrefuted_pattern of Typedtree.pattern | Invalid_extension_constructor_payload | Not_an_extension_constructor | Literal_overflow of string | Unknown_literal of string * char | Illegal_letrec_pat | Illegal_letrec_expr | Illegal_class_expr | Empty_pattern | Letop_type_clash of string * Ctype.Unification_trace.t | Andop_type_clash of string * Ctype.Unification_trace.t | Bindings_type_clash of Ctype.Unification_trace.t exception Error of Location.t * Env.t * error exception Error_forward of Location.error val report_error: loc:Location.t -> Env.t -> error -> Location.error * @deprecated . Use { ! } , { ! } . Forward declaration , to be filled in by val type_module: (Env.t -> Parsetree.module_expr -> Typedtree.module_expr) ref val type_open: (?used_slot:bool ref -> override_flag -> Env.t -> Location.t -> Longident.t loc -> Path.t * Env.t) ref val type_open_decl: (?used_slot:bool ref -> Env.t -> Parsetree.open_declaration -> Typedtree.open_declaration * Types.signature * Env.t) ref Forward declaration , to be filled in by Typeclass.class_structure val type_object: (Env.t -> Location.t -> Parsetree.class_structure -> Typedtree.class_structure * Types.class_signature * string list) ref val type_package: (Env.t -> Parsetree.module_expr -> Path.t -> Longident.t list -> Typedtree.module_expr * type_expr list) ref val create_package_type : Location.t -> Env.t -> Longident.t * (Longident.t * Parsetree.core_type) list -> Path.t * (Longident.t * Typedtree.core_type) list * Types.type_expr val constant: Parsetree.constant -> (Asttypes.constant, error) result val check_recursive_bindings : Env.t -> Typedtree.value_binding list -> unit val check_recursive_class_bindings : Env.t -> Ident.t list -> Typedtree.class_expr list -> unit
c705658cfb70e1ab8b0aea4e2ac696683c6f72bd0758dcfa4640dd5f7430fb45
racket/gui
printer-dc.rkt
#lang racket/base (require racket/class racket/math racket/draw/private/local racket/draw/private/dc racket/draw/unsafe/cairo racket/draw/private/bitmap racket/draw/private/bitmap-dc racket/draw/private/record-dc racket/draw/private/ps-setup ffi/unsafe ffi/unsafe/objc "../../lock.rkt" "dc.rkt" "frame.rkt" "cg.rkt" "utils.rkt" "types.rkt" "queue.rkt") (provide (protect-out printer-dc% show-print-setup)) (import-class NSPrintOperation NSView NSGraphicsContext NSPrintInfo NSDictionary NSPageLayout NSNumber) (define NSPortraitOrientation 0) (define NSLandscapeOrientation 1) (define-cocoa NSPrintScalingFactor _id) (define-objc-class PrinterView NSView [wxb] [-a _BOOL (knowsPageRange: [_NSRange-pointer rng]) (set-NSRange-location! rng 1) (set-NSRange-length! rng (let ([wx (->wx wxb)]) (if wx (send wx get-page-count) 0))) #t] [-a _NSRect (rectForPage: [_NSInteger n]) (let ([wx (->wx wxb)]) (if wx (send wx get-rect-for-page n) (make-NSRect (make-NSPoint 0 0) (make-NSSize 10 10))))] [-a _void (beginPageInRect: [_NSRect aRect] atPlacement: [_NSPoint location]) (let ([wx (->wx wxb)]) (when wx (send wx start-page-at aRect))) (super-tell #:type _void beginPageInRect: #:type _NSRect aRect atPlacement: #:type _NSPoint location)] [-a _void (drawPageBorderWithSize: [_NSSize size]) (let ([wx (->wx wxb)]) (when wx (send wx draw-print-page self size)))]) (define (make-print-info [prev #f]) (as-objc-allocation-with-retain (tell (tell NSPrintInfo alloc) initWithDictionary: (if prev (tell prev dictionary) (tell NSDictionary dictionary))))) (define (get-scaling-factor print-info) 10.6 only : #; (tell #:type _CGFloat print-info scalingFactor) (atomically (with-autorelease (tell #:type _double (tell (tell print-info dictionary) objectForKey: NSPrintScalingFactor) doubleValue)))) (define (install-pss-to-print-info pss print-info) (tellv print-info setOrientation: #:type _int (if (eq? (send pss get-orientation) 'landscape) NSLandscapeOrientation NSPortraitOrientation)) (let ([scale (let ([x (box 0)] [y (box 0)]) (send pss get-scaling x y) (unbox y))]) 10.6 only : #; (tellv print-info setScalingFactor: #:type _CGFloat scale) (atomically (with-autorelease (tellv (tell print-info dictionary) setObject: (tell NSNumber numberWithDouble: #:type _double scale) forKey: NSPrintScalingFactor))))) (define NSOkButton 1) (define (show-print-setup parent) (promote-to-gui!) (force-global-flush-resume) (let* ([pss (current-ps-setup)] [print-info (let ([pi (send pss get-native)]) (or pi (let ([pi (make-print-info)]) (send pss set-native pi make-print-info) pi)))]) (install-pss-to-print-info pss print-info) (if (atomically (let ([front (get-front)]) (begin0 (= (tell #:type _NSInteger (tell NSPageLayout pageLayout) runModalWithPrintInfo: print-info) NSOkButton) (when front (tellv (send front get-cocoa-window) makeKeyAndOrderFront: #f))))) (begin (let ([o (tell #:type _int print-info orientation)]) (send pss set-orientation (if (= o NSLandscapeOrientation) 'landscape 'portrait))) (let ([s (get-scaling-factor print-info)]) (send pss set-scaling s s)) #t) #f))) (define printer-dc% (class (record-dc-mixin (dc-mixin bitmap-dc-backend%)) (init [parent #f]) (super-make-object (make-object quartz-bitmap% 1 1)) (inherit get-recorded-command reset-recording) (define pages null) (define/override (end-page) (set! pages (cons (get-recorded-command) pages)) (reset-recording)) (define print-info (or (let-values ([(pi copier) (send (current-ps-setup) get-native-copy)]) pi) (make-print-info))) (install-pss-to-print-info (current-ps-setup) print-info) (define-values (page-width page-height page-scaling) (let ([s (NSRect-size (tell #:type _NSRect print-info imageablePageBounds))] [scaling (get-scaling-factor print-info)]) (values (NSSize-width s) (NSSize-height s) scaling))) (define/override (get-size) (values (/ page-width page-scaling) (/ page-height page-scaling))) (define/override (get-device-scale) (values page-scaling page-scaling)) (define current-page 0) (define/public (get-page-count) (length pages)) (define/public (get-rect-for-page i) (make-NSRect (make-NSPoint 0 (* (sub1 i) page-height)) (make-NSSize page-width page-height))) (define/public (start-page-at r) (set! current-page (inexact->exact (round (/ (NSPoint-y (NSRect-origin r)) page-height))))) (define/public (draw-print-page view-cocoa s) (let ([f (tell #:type _NSRect view-cocoa frame)]) (tellv view-cocoa lockFocus) (let ([cg (tell #:type _CGContextRef (tell NSGraphicsContext currentContext) graphicsPort)] [s (tell #:type _NSSize print-info paperSize)] [b (tell #:type _NSRect print-info imageablePageBounds)]) (CGContextTranslateCTM cg 0 (/ (NSSize-height s) page-scaling)) (CGContextScaleCTM cg 1 -1) (CGContextTranslateCTM cg (/ (NSPoint-x (NSRect-origin b)) page-scaling) (/ (- (NSSize-height s) (+ (NSPoint-y (NSRect-origin b)) (NSSize-height (NSRect-size b)))) page-scaling)) (let* ([surface (cairo_quartz_surface_create_for_cg_context cg (inexact->exact (ceiling (/ page-width page-scaling))) (inexact->exact (ceiling (/ page-height page-scaling))))] [cr (cairo_create surface)]) (cairo_surface_destroy surface) (let ([dc (make-object (dc-mixin (class default-dc-backend% (define/override (get-cr) cr) (super-new))))]) (let ([proc (list-ref (reverse pages) current-page)]) (proc dc))) (cairo_destroy cr))) (tellv view-cocoa unlockFocus))) (define/override (end-doc) (promote-to-gui!) (force-global-flush-resume) (define view-cocoa (as-objc-allocation-with-retain (tell (tell PrinterView alloc) initWithFrame: #:type _NSRect (make-NSRect (make-NSPoint 0 0) (make-NSSize 10 10))))) (define op-cocoa (as-objc-allocation-with-retain (tell NSPrintOperation printOperationWithView: view-cocoa printInfo: print-info))) (set-ivar! view-cocoa wxb (->wxb this)) (atomically (let ([front (get-front)]) (tellv op-cocoa runOperation) (when front (tellv (send front get-cocoa-window) makeKeyAndOrderFront: #f)))))))
null
https://raw.githubusercontent.com/racket/gui/d1fef7a43a482c0fdd5672be9a6e713f16d8be5c/gui-lib/mred/private/wx/cocoa/printer-dc.rkt
racket
#lang racket/base (require racket/class racket/math racket/draw/private/local racket/draw/private/dc racket/draw/unsafe/cairo racket/draw/private/bitmap racket/draw/private/bitmap-dc racket/draw/private/record-dc racket/draw/private/ps-setup ffi/unsafe ffi/unsafe/objc "../../lock.rkt" "dc.rkt" "frame.rkt" "cg.rkt" "utils.rkt" "types.rkt" "queue.rkt") (provide (protect-out printer-dc% show-print-setup)) (import-class NSPrintOperation NSView NSGraphicsContext NSPrintInfo NSDictionary NSPageLayout NSNumber) (define NSPortraitOrientation 0) (define NSLandscapeOrientation 1) (define-cocoa NSPrintScalingFactor _id) (define-objc-class PrinterView NSView [wxb] [-a _BOOL (knowsPageRange: [_NSRange-pointer rng]) (set-NSRange-location! rng 1) (set-NSRange-length! rng (let ([wx (->wx wxb)]) (if wx (send wx get-page-count) 0))) #t] [-a _NSRect (rectForPage: [_NSInteger n]) (let ([wx (->wx wxb)]) (if wx (send wx get-rect-for-page n) (make-NSRect (make-NSPoint 0 0) (make-NSSize 10 10))))] [-a _void (beginPageInRect: [_NSRect aRect] atPlacement: [_NSPoint location]) (let ([wx (->wx wxb)]) (when wx (send wx start-page-at aRect))) (super-tell #:type _void beginPageInRect: #:type _NSRect aRect atPlacement: #:type _NSPoint location)] [-a _void (drawPageBorderWithSize: [_NSSize size]) (let ([wx (->wx wxb)]) (when wx (send wx draw-print-page self size)))]) (define (make-print-info [prev #f]) (as-objc-allocation-with-retain (tell (tell NSPrintInfo alloc) initWithDictionary: (if prev (tell prev dictionary) (tell NSDictionary dictionary))))) (define (get-scaling-factor print-info) 10.6 only : (tell #:type _CGFloat print-info scalingFactor) (atomically (with-autorelease (tell #:type _double (tell (tell print-info dictionary) objectForKey: NSPrintScalingFactor) doubleValue)))) (define (install-pss-to-print-info pss print-info) (tellv print-info setOrientation: #:type _int (if (eq? (send pss get-orientation) 'landscape) NSLandscapeOrientation NSPortraitOrientation)) (let ([scale (let ([x (box 0)] [y (box 0)]) (send pss get-scaling x y) (unbox y))]) 10.6 only : (tellv print-info setScalingFactor: #:type _CGFloat scale) (atomically (with-autorelease (tellv (tell print-info dictionary) setObject: (tell NSNumber numberWithDouble: #:type _double scale) forKey: NSPrintScalingFactor))))) (define NSOkButton 1) (define (show-print-setup parent) (promote-to-gui!) (force-global-flush-resume) (let* ([pss (current-ps-setup)] [print-info (let ([pi (send pss get-native)]) (or pi (let ([pi (make-print-info)]) (send pss set-native pi make-print-info) pi)))]) (install-pss-to-print-info pss print-info) (if (atomically (let ([front (get-front)]) (begin0 (= (tell #:type _NSInteger (tell NSPageLayout pageLayout) runModalWithPrintInfo: print-info) NSOkButton) (when front (tellv (send front get-cocoa-window) makeKeyAndOrderFront: #f))))) (begin (let ([o (tell #:type _int print-info orientation)]) (send pss set-orientation (if (= o NSLandscapeOrientation) 'landscape 'portrait))) (let ([s (get-scaling-factor print-info)]) (send pss set-scaling s s)) #t) #f))) (define printer-dc% (class (record-dc-mixin (dc-mixin bitmap-dc-backend%)) (init [parent #f]) (super-make-object (make-object quartz-bitmap% 1 1)) (inherit get-recorded-command reset-recording) (define pages null) (define/override (end-page) (set! pages (cons (get-recorded-command) pages)) (reset-recording)) (define print-info (or (let-values ([(pi copier) (send (current-ps-setup) get-native-copy)]) pi) (make-print-info))) (install-pss-to-print-info (current-ps-setup) print-info) (define-values (page-width page-height page-scaling) (let ([s (NSRect-size (tell #:type _NSRect print-info imageablePageBounds))] [scaling (get-scaling-factor print-info)]) (values (NSSize-width s) (NSSize-height s) scaling))) (define/override (get-size) (values (/ page-width page-scaling) (/ page-height page-scaling))) (define/override (get-device-scale) (values page-scaling page-scaling)) (define current-page 0) (define/public (get-page-count) (length pages)) (define/public (get-rect-for-page i) (make-NSRect (make-NSPoint 0 (* (sub1 i) page-height)) (make-NSSize page-width page-height))) (define/public (start-page-at r) (set! current-page (inexact->exact (round (/ (NSPoint-y (NSRect-origin r)) page-height))))) (define/public (draw-print-page view-cocoa s) (let ([f (tell #:type _NSRect view-cocoa frame)]) (tellv view-cocoa lockFocus) (let ([cg (tell #:type _CGContextRef (tell NSGraphicsContext currentContext) graphicsPort)] [s (tell #:type _NSSize print-info paperSize)] [b (tell #:type _NSRect print-info imageablePageBounds)]) (CGContextTranslateCTM cg 0 (/ (NSSize-height s) page-scaling)) (CGContextScaleCTM cg 1 -1) (CGContextTranslateCTM cg (/ (NSPoint-x (NSRect-origin b)) page-scaling) (/ (- (NSSize-height s) (+ (NSPoint-y (NSRect-origin b)) (NSSize-height (NSRect-size b)))) page-scaling)) (let* ([surface (cairo_quartz_surface_create_for_cg_context cg (inexact->exact (ceiling (/ page-width page-scaling))) (inexact->exact (ceiling (/ page-height page-scaling))))] [cr (cairo_create surface)]) (cairo_surface_destroy surface) (let ([dc (make-object (dc-mixin (class default-dc-backend% (define/override (get-cr) cr) (super-new))))]) (let ([proc (list-ref (reverse pages) current-page)]) (proc dc))) (cairo_destroy cr))) (tellv view-cocoa unlockFocus))) (define/override (end-doc) (promote-to-gui!) (force-global-flush-resume) (define view-cocoa (as-objc-allocation-with-retain (tell (tell PrinterView alloc) initWithFrame: #:type _NSRect (make-NSRect (make-NSPoint 0 0) (make-NSSize 10 10))))) (define op-cocoa (as-objc-allocation-with-retain (tell NSPrintOperation printOperationWithView: view-cocoa printInfo: print-info))) (set-ivar! view-cocoa wxb (->wxb this)) (atomically (let ([front (get-front)]) (tellv op-cocoa runOperation) (when front (tellv (send front get-cocoa-window) makeKeyAndOrderFront: #f)))))))
6c72179fb6ab2c92c93d560abdee00f45f7d200d2b152c56ad8e94f5bff389a4
anonymous-admin/anonymous
interpreter.erl
%%-------------------------------------------------------------------- @author ( C ) 2011 %% @doc interpreter.erl %% @end Created : 18 Nov 2011 by %%-------------------------------------------------------------------- -module(interpreter). -author('Sorush Arefipour'). -export([create_record/1, get_tracker_response_info/1, init/1, handle_cast/2, handle_call/3]). -compile(export_all). -behaviour(gen_server). -include("defs.hrl"). %% ************************************************************************************************************ %% ********************************************** External functions ****************************************** %% ************************************************************************************************************ %%-------------------------------------------------------------------- @author %% @doc interpreter.erl %% @spec Used by the supervisor to start the process %% @end %%-------------------------------------------------------------------- start_link() -> start_link([]). start_link(_Args) -> gen_server:start_link({local, interpreter}, ?MODULE, _Args, []). %% ************************************************************************************************************ %% ******************************************** Gen_server functions ******************************************** %% ************************************************************************************************************ %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec %% @end %%-------------------------------------------------------------------- init(_Args)-> gen_server:cast(msg_controller, {subscribe, interpreter, [{torrent_filepath, -1}]}), {ok,null}. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec %% Start the torrent process so it gets the torrent_info message. %% @end %%-------------------------------------------------------------------- handle_cast({notify, torrent_filepath, {_Id, Filepath}}, _Data)-> Record = create_record(Filepath), dynamic_supervisor:start_torrent(Record), gen_server:cast(msg_controller, {notify, torrent_info, {Record#torrent.id, Record}}), {noreply,_Data}. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl %% @spec %% @end %%-------------------------------------------------------------------- handle_call(_State,_From,_Data)-> Reply = ok, {reply,Reply,_Data}. %% ************************************************************************************************************ %% ********************************************** External functions ******************************************** %% ************************************************************************************************************ %%-------------------------------------------------------------------- @author %% @doc interpreter.erl %% @spec Function gets the directory as argument of a torrent file %% and make a record of all information in the torrent file. %% @end %%-------------------------------------------------------------------- create_record(FileName) -> Parsed_info = read_file(FileName), Info_hash_tracker = get_info_hash_tracker(Parsed_info), Info_hash_handshake = get_info_hash_handshake(Parsed_info), Announce = get_announce(Parsed_info), Creation_date = get_creation_date(Parsed_info), Comment = get_comment(Parsed_info), Created_by = get_created_by(Parsed_info), Encoding = get_encoding(Parsed_info), Files = get_files(Parsed_info, 0), Filename = get_filename(Parsed_info), Piece_length = get_piece_length(Parsed_info), Number_of_pieces = get_number_of_pieces(Parsed_info), File_length = get_file_length(Parsed_info), Pieces = get_pieces(Parsed_info), Bitfield = get_bitfield(Parsed_info), TrackerInfo = tracker_info_record(Announce, Info_hash_tracker, File_length), #torrent{id = list_to_atom(binary_to_list(Info_hash_handshake)), info_hash_tracker = Info_hash_tracker, announce = Announce, creation_date = Creation_date, comment = Comment, created_by = Created_by, encoding = Encoding, files = Files, filename = Filename, piece_length = Piece_length, number_of_pieces = Number_of_pieces,file_length = File_length, pieces = Pieces, bitfield = Bitfield, trackers = TrackerInfo, downloaded = "0", max_peers = "50", size = integer_to_list(File_length), left = integer_to_list(File_length) }. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It takes a list of announce , info hash and file length and %% creates a record per each announce and put all of records %% in a list. %% @end %%-------------------------------------------------------------------- tracker_info_record([], _, _)-> []; tracker_info_record([H|T], Info_hash, File_length)-> [#tracker_info{url = H, info_hash = Info_hash, peer_id = "-AZ4004-znmphhbrij37", port = "6881", uploaded = "0", downloaded = "0", left = integer_to_list(File_length), event = started, num_want = "50", interval = "1000"}|tracker_info_record(T, Info_hash, File_length)]. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It take string of directory as argument and read the content %% of it and passes the content to parser to be parsed. %% @end %%-------------------------------------------------------------------- read_file(FileName) -> {ok, FileContents} = file:read_file(FileName), {{dict, Info}, _Remainder} = parser:decode(FileContents), {info, Info}. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It gets the parsed information and after fetching info %% dictonary from it, it calculate the info hash that is being %% used for communicating with tracker. %% @end %%-------------------------------------------------------------------- get_info_hash_tracker({info, Info}) -> Information = dict:fetch(<<"info">>, Info), BencodedInfo = binary_to_list(parser:encode(Information)), hash_gen(encoder_tracker(BencodedInfo),2). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It gets the parsed information and after fetching info %% dictonary from it, it calculate the info hash that is being %% used for handshaking. %% @end %%-------------------------------------------------------------------- get_info_hash_handshake({info, Info}) -> Information = dict:fetch(<<"info">>, Info), encoder_handshake(parser:encode(Information)). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The function gets the parsed information as argument and check if the torrent has one announce or list of announces and return %% a list of tracker or trackers %% @end %%-------------------------------------------------------------------- get_announce({info, Info}) -> Info_keys = dict:fetch_keys(Info), case lists:member(<<"announce-list">>, Info_keys) of true -> {list, Announce_list} = dict:fetch(<<"announce-list">>, Info), check_tcp(to_string(Announce_list)); false -> check_tcp([binary_to_list(dict:fetch(<<"announce">>, Info))]) end. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The function gets the parsed information as argument and %% and returns the creation date while checking if field is not %% empty, and if it is, it will return novalue. %% @end %%-------------------------------------------------------------------- get_creation_date({info, Info}) -> case dict:find(<<"creation date">>, Info) of {ok, Creation_date} -> Creation_date; error -> novalue end. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The function gets the parsed information as argument and %% and returns the comment while checking if field is not %% empty, and if it is, it will return novalue. %% @end %%-------------------------------------------------------------------- get_comment({info, Info}) -> case dict:find(<<"comment">>, Info) of {ok, Comment} -> binary_to_list(Comment); error -> novalue end. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The function gets the parsed information as argument and %% and returns the created by while checking if field is not %% empty, and if it is, it will return novalue. %% @end %%-------------------------------------------------------------------- get_created_by({info, Info}) -> case dict:find(<<"created by">>, Info) of {ok, Created_by} -> binary_to_list(Created_by); error -> novalue end. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The function gets the parsed information as argument and %% and returns the encoding while checking if field is not %% empty, and if it is, it will return novalue. %% @end %%-------------------------------------------------------------------- get_encoding({info, Info}) -> case dict:find(<<"encoding">>, Info) of {ok, Encoding} -> binary_to_list(Encoding); error -> novalue end. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The function gets the parsed information as argument and %% and returns the creation date while checking if field is not %% empty, and if it is, it will return novalue. %% @end %%-------------------------------------------------------------------- get_files({info, Info},FileSize) -> Info_dec_keys = get_info_dec_keys(Info), case lists:member(<<"files">>, Info_dec_keys) of true -> {list, Files_dict} = get_info_dec(<<"files">>, Info), files_interpreter(Files_dict,FileSize,[]); false -> Path = get_info_dec(<<"name">>, Info), Length = get_info_dec(<<"length">>, Info), {[[[Path],Length]],Length} end. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The function gets the parsed information as argument and %% and returns the name of the file. %% @end %%-------------------------------------------------------------------- get_filename({info, Info}) -> Name = get_info_dec(<<"name">>, Info), binary_to_list(Name). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The function gets the parsed information as argument and %% fetches length of each piece in info dictionary. %% @end %%-------------------------------------------------------------------- get_piece_length({info, Info}) -> get_info_dec(<<"piece length">>, Info). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The function gets the parsed information as argument and %% after feching the pieces from info dictionary it calculates %% number of pieces. %% @end %%-------------------------------------------------------------------- get_number_of_pieces({info, Info}) -> PieceHashes = get_info_dec(<<"pieces">>, Info), HashLength = 20, round(length(binary_to_list(PieceHashes)) / HashLength). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It take string of directory as argument and calculate size of %% file by multipying piece length and number of pieces. %% @end %%-------------------------------------------------------------------- get_file_length({info, Info}) -> {_, Length} = get_files({info, Info}, 0), Length. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It take string of directory as argument and get the content %% pieces field. %% @end %%-------------------------------------------------------------------- get_pieces({info, Info}) -> get_info_dec(<<"pieces">>, Info). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It take string of directory as argument and generate bitfield %% of the torrent file. %% @end %%-------------------------------------------------------------------- get_bitfield({info, Info}) -> NumberOfPieces = get_number_of_pieces({info, Info}), create_bitfield_binary(NumberOfPieces). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It gets reponse of th tracker and returns interval , , incomplete , and list of peers with their IPs and Ports by %% calling the function peers_compact(). %% @end %%-------------------------------------------------------------------- get_tracker_response_info({{dict,Response},_}) -> Interval = dict:fetch(<<"interval">>,Response), Seeds = dict:fetch(<<"complete">>,Response), Leechers = dict:fetch(<<"incomplete">>,Response), PeersList = dict:fetch(<<"peers">>,Response), % {list,Peers_dict} = dict:fetch(<<"peers">>,Response), [Interval,Seeds,Leechers,peers_compact(binary_to_list(PeersList))]. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec The funcion gets a list as its argument and returns a list of IPs and Ports to get_tracker_reponse_info ( ) . %% @end %%-------------------------------------------------------------------- peers_compact([])-> []; peers_compact([M1,M2,M3,M4,M5,M6|T]) -> IP = lists:concat([M1,'.',M2,'.',M3,'.',M4]), <<Port:16>> = list_to_binary([M5,M6]), [[IP,Port]|peers_compact(T)]. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It takes a list as argument and put all names and size in %% in a list and call the the function again recursively for %% all other files. %% @end %%-------------------------------------------------------------------- files_interpreter([],FileSize,Acc) -> {Acc,FileSize}; files_interpreter([H|T],FileSize,Acc) -> {dict, Info} = H, Length = dict:fetch(<<"length">>, Info), Size = FileSize + Length, {list, Path} = dict:fetch(<<"path">>, Info), files_interpreter(T,Size, Acc ++ [[Path, Length]]). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It gets a list of announce and return the udp tracker in %% a list. %% @end %%-------------------------------------------------------------------- check_udp([])-> []; check_udp([H|T]) -> [H1|_] = H, case H1 == 117 of true -> [H] ++ check_udp(T); false -> check_udp(T) end. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It gets a list of announce and return the tcp tracker in %% a list. %% @end %%-------------------------------------------------------------------- check_tcp([])-> []; check_tcp([H|T]) -> [H1|_] = H, case H1 == 104 of true -> [H] ++ check_tcp(T); false -> check_tcp(T) end. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It takes the data of info dictionary and encode it for %% tracker infohash. %% @end %%-------------------------------------------------------------------- encoder_tracker(Data)-> crypto:start(), Info_hash = [ hd(integer_to_list(N, 16)) || << N:4 >> <= crypto:sha(Data) ], Info_hash. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It takes the data of info dictionary and encode it for %% handshake infohash. %% @end %%-------------------------------------------------------------------- encoder_handshake(Data)-> crypto:start(), Info_hash = crypto:sha(Data), Info_hash. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It takes string of infohash and put % between each two %% character. %% @end %%-------------------------------------------------------------------- hash_gen(List, Num) when Num < length(List)-> {A, B} = lists:split(Num,List), hash_gen(A ++ [37] ++ B, Num+3 ); hash_gen(List, _) -> [37] ++ List. %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It take list and binary and make them list and put all of them %% in a list. %% @end %%-------------------------------------------------------------------- to_string([]) -> []; to_string([H|T]) -> {list,B} = H, [binary_to_list(hd(B))] ++ to_string(T). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl %% @spec Function takes a key and data as arguments and fetches the %% information according to given tag name. %% @end %%-------------------------------------------------------------------- get_info_dec(Name, Info) -> {dict,{dict, Info_dec}} = {dict, dict:fetch(<<"info">>, Info)}, dict:fetch(Name, Info_dec). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It gets content of torrent file and returns keys name of %% info dictionary of the torrent file. %% @end %%-------------------------------------------------------------------- get_info_dec_keys(Info) -> {dict,{dict, Info_dec}} = {dict, dict:fetch(<<"info">>, Info)}, dict:fetch_keys(Info_dec). %%-------------------------------------------------------------------- @author %% @doc interpreter.erl @spec It gets number of pieces and generate bitfield accordingly %% @end %%-------------------------------------------------------------------- create_bitfield_binary(0) -> <<>>; create_bitfield_binary(NumberOfPieces) -> LeaderLength = (8 - NumberOfPieces rem 8) rem 8, create_bitfield_binary(<<>>, LeaderLength, NumberOfPieces). create_bitfield_binary(Binary, 0, 0) -> Binary; create_bitfield_binary(Binary, 0, NumberOfPieces) -> NewBinary = <<Binary/bitstring, 1:1>>, create_bitfield_binary(NewBinary, 0, NumberOfPieces - 1); create_bitfield_binary(Binary, LeaderLength, NumberOfPieces) -> NewBinary = <<Binary/bitstring, 0:1>>, create_bitfield_binary(NewBinary, LeaderLength - 1, NumberOfPieces).
null
https://raw.githubusercontent.com/anonymous-admin/anonymous/0d178f8f02dce74d5f76d78f81f70da9229a77cd/Testenviro/interpreter.erl
erlang
-------------------------------------------------------------------- @doc interpreter.erl @end -------------------------------------------------------------------- ************************************************************************************************************ ********************************************** External functions ****************************************** ************************************************************************************************************ -------------------------------------------------------------------- @doc interpreter.erl @spec Used by the supervisor to start the process @end -------------------------------------------------------------------- ************************************************************************************************************ ******************************************** Gen_server functions ******************************************** ************************************************************************************************************ -------------------------------------------------------------------- @doc interpreter.erl @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl Start the torrent process so it gets the torrent_info message. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl @spec @end -------------------------------------------------------------------- ************************************************************************************************************ ********************************************** External functions ******************************************** ************************************************************************************************************ -------------------------------------------------------------------- @doc interpreter.erl @spec Function gets the directory as argument of a torrent file and make a record of all information in the torrent file. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl creates a record per each announce and put all of records in a list. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl of it and passes the content to parser to be parsed. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl dictonary from it, it calculate the info hash that is being used for communicating with tracker. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl dictonary from it, it calculate the info hash that is being used for handshaking. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl a list of tracker or trackers @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl and returns the creation date while checking if field is not empty, and if it is, it will return novalue. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl and returns the comment while checking if field is not empty, and if it is, it will return novalue. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl and returns the created by while checking if field is not empty, and if it is, it will return novalue. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl and returns the encoding while checking if field is not empty, and if it is, it will return novalue. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl and returns the creation date while checking if field is not empty, and if it is, it will return novalue. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl and returns the name of the file. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl fetches length of each piece in info dictionary. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl after feching the pieces from info dictionary it calculates number of pieces. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl file by multipying piece length and number of pieces. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl pieces field. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl of the torrent file. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl calling the function peers_compact(). @end -------------------------------------------------------------------- {list,Peers_dict} = dict:fetch(<<"peers">>,Response), -------------------------------------------------------------------- @doc interpreter.erl @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl in a list and call the the function again recursively for all other files. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl a list. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl a list. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl tracker infohash. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl handshake infohash. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl between each two character. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl in a list. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl @spec Function takes a key and data as arguments and fetches the information according to given tag name. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl info dictionary of the torrent file. @end -------------------------------------------------------------------- -------------------------------------------------------------------- @doc interpreter.erl @end --------------------------------------------------------------------
@author ( C ) 2011 Created : 18 Nov 2011 by -module(interpreter). -author('Sorush Arefipour'). -export([create_record/1, get_tracker_response_info/1, init/1, handle_cast/2, handle_call/3]). -compile(export_all). -behaviour(gen_server). -include("defs.hrl"). @author start_link() -> start_link([]). start_link(_Args) -> gen_server:start_link({local, interpreter}, ?MODULE, _Args, []). @author @spec init(_Args)-> gen_server:cast(msg_controller, {subscribe, interpreter, [{torrent_filepath, -1}]}), {ok,null}. @author @spec handle_cast({notify, torrent_filepath, {_Id, Filepath}}, _Data)-> Record = create_record(Filepath), dynamic_supervisor:start_torrent(Record), gen_server:cast(msg_controller, {notify, torrent_info, {Record#torrent.id, Record}}), {noreply,_Data}. @author handle_call(_State,_From,_Data)-> Reply = ok, {reply,Reply,_Data}. @author create_record(FileName) -> Parsed_info = read_file(FileName), Info_hash_tracker = get_info_hash_tracker(Parsed_info), Info_hash_handshake = get_info_hash_handshake(Parsed_info), Announce = get_announce(Parsed_info), Creation_date = get_creation_date(Parsed_info), Comment = get_comment(Parsed_info), Created_by = get_created_by(Parsed_info), Encoding = get_encoding(Parsed_info), Files = get_files(Parsed_info, 0), Filename = get_filename(Parsed_info), Piece_length = get_piece_length(Parsed_info), Number_of_pieces = get_number_of_pieces(Parsed_info), File_length = get_file_length(Parsed_info), Pieces = get_pieces(Parsed_info), Bitfield = get_bitfield(Parsed_info), TrackerInfo = tracker_info_record(Announce, Info_hash_tracker, File_length), #torrent{id = list_to_atom(binary_to_list(Info_hash_handshake)), info_hash_tracker = Info_hash_tracker, announce = Announce, creation_date = Creation_date, comment = Comment, created_by = Created_by, encoding = Encoding, files = Files, filename = Filename, piece_length = Piece_length, number_of_pieces = Number_of_pieces,file_length = File_length, pieces = Pieces, bitfield = Bitfield, trackers = TrackerInfo, downloaded = "0", max_peers = "50", size = integer_to_list(File_length), left = integer_to_list(File_length) }. @author @spec It takes a list of announce , info hash and file length and tracker_info_record([], _, _)-> []; tracker_info_record([H|T], Info_hash, File_length)-> [#tracker_info{url = H, info_hash = Info_hash, peer_id = "-AZ4004-znmphhbrij37", port = "6881", uploaded = "0", downloaded = "0", left = integer_to_list(File_length), event = started, num_want = "50", interval = "1000"}|tracker_info_record(T, Info_hash, File_length)]. @author @spec It take string of directory as argument and read the content read_file(FileName) -> {ok, FileContents} = file:read_file(FileName), {{dict, Info}, _Remainder} = parser:decode(FileContents), {info, Info}. @author @spec It gets the parsed information and after fetching info get_info_hash_tracker({info, Info}) -> Information = dict:fetch(<<"info">>, Info), BencodedInfo = binary_to_list(parser:encode(Information)), hash_gen(encoder_tracker(BencodedInfo),2). @author @spec It gets the parsed information and after fetching info get_info_hash_handshake({info, Info}) -> Information = dict:fetch(<<"info">>, Info), encoder_handshake(parser:encode(Information)). @author @spec The function gets the parsed information as argument and check if the torrent has one announce or list of announces and return get_announce({info, Info}) -> Info_keys = dict:fetch_keys(Info), case lists:member(<<"announce-list">>, Info_keys) of true -> {list, Announce_list} = dict:fetch(<<"announce-list">>, Info), check_tcp(to_string(Announce_list)); false -> check_tcp([binary_to_list(dict:fetch(<<"announce">>, Info))]) end. @author @spec The function gets the parsed information as argument and get_creation_date({info, Info}) -> case dict:find(<<"creation date">>, Info) of {ok, Creation_date} -> Creation_date; error -> novalue end. @author @spec The function gets the parsed information as argument and get_comment({info, Info}) -> case dict:find(<<"comment">>, Info) of {ok, Comment} -> binary_to_list(Comment); error -> novalue end. @author @spec The function gets the parsed information as argument and get_created_by({info, Info}) -> case dict:find(<<"created by">>, Info) of {ok, Created_by} -> binary_to_list(Created_by); error -> novalue end. @author @spec The function gets the parsed information as argument and get_encoding({info, Info}) -> case dict:find(<<"encoding">>, Info) of {ok, Encoding} -> binary_to_list(Encoding); error -> novalue end. @author @spec The function gets the parsed information as argument and get_files({info, Info},FileSize) -> Info_dec_keys = get_info_dec_keys(Info), case lists:member(<<"files">>, Info_dec_keys) of true -> {list, Files_dict} = get_info_dec(<<"files">>, Info), files_interpreter(Files_dict,FileSize,[]); false -> Path = get_info_dec(<<"name">>, Info), Length = get_info_dec(<<"length">>, Info), {[[[Path],Length]],Length} end. @author @spec The function gets the parsed information as argument and get_filename({info, Info}) -> Name = get_info_dec(<<"name">>, Info), binary_to_list(Name). @author @spec The function gets the parsed information as argument and get_piece_length({info, Info}) -> get_info_dec(<<"piece length">>, Info). @author @spec The function gets the parsed information as argument and get_number_of_pieces({info, Info}) -> PieceHashes = get_info_dec(<<"pieces">>, Info), HashLength = 20, round(length(binary_to_list(PieceHashes)) / HashLength). @author @spec It take string of directory as argument and calculate size of get_file_length({info, Info}) -> {_, Length} = get_files({info, Info}, 0), Length. @author @spec It take string of directory as argument and get the content get_pieces({info, Info}) -> get_info_dec(<<"pieces">>, Info). @author @spec It take string of directory as argument and generate bitfield get_bitfield({info, Info}) -> NumberOfPieces = get_number_of_pieces({info, Info}), create_bitfield_binary(NumberOfPieces). @author @spec It gets reponse of th tracker and returns interval , , incomplete , and list of peers with their IPs and Ports by get_tracker_response_info({{dict,Response},_}) -> Interval = dict:fetch(<<"interval">>,Response), Seeds = dict:fetch(<<"complete">>,Response), Leechers = dict:fetch(<<"incomplete">>,Response), PeersList = dict:fetch(<<"peers">>,Response), [Interval,Seeds,Leechers,peers_compact(binary_to_list(PeersList))]. @author @spec The funcion gets a list as its argument and returns a list of IPs and Ports to get_tracker_reponse_info ( ) . peers_compact([])-> []; peers_compact([M1,M2,M3,M4,M5,M6|T]) -> IP = lists:concat([M1,'.',M2,'.',M3,'.',M4]), <<Port:16>> = list_to_binary([M5,M6]), [[IP,Port]|peers_compact(T)]. @author @spec It takes a list as argument and put all names and size in files_interpreter([],FileSize,Acc) -> {Acc,FileSize}; files_interpreter([H|T],FileSize,Acc) -> {dict, Info} = H, Length = dict:fetch(<<"length">>, Info), Size = FileSize + Length, {list, Path} = dict:fetch(<<"path">>, Info), files_interpreter(T,Size, Acc ++ [[Path, Length]]). @author @spec It gets a list of announce and return the udp tracker in check_udp([])-> []; check_udp([H|T]) -> [H1|_] = H, case H1 == 117 of true -> [H] ++ check_udp(T); false -> check_udp(T) end. @author @spec It gets a list of announce and return the tcp tracker in check_tcp([])-> []; check_tcp([H|T]) -> [H1|_] = H, case H1 == 104 of true -> [H] ++ check_tcp(T); false -> check_tcp(T) end. @author @spec It takes the data of info dictionary and encode it for encoder_tracker(Data)-> crypto:start(), Info_hash = [ hd(integer_to_list(N, 16)) || << N:4 >> <= crypto:sha(Data) ], Info_hash. @author @spec It takes the data of info dictionary and encode it for encoder_handshake(Data)-> crypto:start(), Info_hash = crypto:sha(Data), Info_hash. @author hash_gen(List, Num) when Num < length(List)-> {A, B} = lists:split(Num,List), hash_gen(A ++ [37] ++ B, Num+3 ); hash_gen(List, _) -> [37] ++ List. @author @spec It take list and binary and make them list and put all of them to_string([]) -> []; to_string([H|T]) -> {list,B} = H, [binary_to_list(hd(B))] ++ to_string(T). @author get_info_dec(Name, Info) -> {dict,{dict, Info_dec}} = {dict, dict:fetch(<<"info">>, Info)}, dict:fetch(Name, Info_dec). @author @spec It gets content of torrent file and returns keys name of get_info_dec_keys(Info) -> {dict,{dict, Info_dec}} = {dict, dict:fetch(<<"info">>, Info)}, dict:fetch_keys(Info_dec). @author @spec It gets number of pieces and generate bitfield accordingly create_bitfield_binary(0) -> <<>>; create_bitfield_binary(NumberOfPieces) -> LeaderLength = (8 - NumberOfPieces rem 8) rem 8, create_bitfield_binary(<<>>, LeaderLength, NumberOfPieces). create_bitfield_binary(Binary, 0, 0) -> Binary; create_bitfield_binary(Binary, 0, NumberOfPieces) -> NewBinary = <<Binary/bitstring, 1:1>>, create_bitfield_binary(NewBinary, 0, NumberOfPieces - 1); create_bitfield_binary(Binary, LeaderLength, NumberOfPieces) -> NewBinary = <<Binary/bitstring, 0:1>>, create_bitfield_binary(NewBinary, LeaderLength - 1, NumberOfPieces).
c669fe245bc03ee21f791877a1dd3810d1b7177900f370dbceb8dcbe0b14acdf
jbracker/supermonad
ScopeLevel.hs
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * H M T C * * * * Module : ScopeLevel * * Purpose : Definition of and operation on scope level . * * Authors : * * * * Copyright ( c ) , 2013 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ****************************************************************************** * H M T C * * * * Module: ScopeLevel * * Purpose: Definition of and operation on scope level. * * Authors: Henrik Nilsson * * * * Copyright (c) Henrik Nilsson, 2013 * * * ****************************************************************************** -} -- | ScopeLevel: Definition of and operation on scope level module ScopeLevel ( ScopeLvl, -- Scope level topMajScopeLvl, -- :: Int topScopeLvl, -- :: ScopeLvl majScopeLvl, -- :: ScopeLvl -> Int minScopeLvl, -- :: ScopeLvl -> Int incMajScopeLvl, -- :: ScopeLvl -> ScopeLvl incMinScopeLvl -- :: ScopeLvl -> ScopeLvl ) where ------------------------------------------------------------------------------ -- Scope level ------------------------------------------------------------------------------ -- | Scope level. -- Pair of major (depth of procedure/function) nesting -- and minor (depth of let-command nesting) levels. type ScopeLvl = (Int, Int) topMajScopeLvl :: Int topMajScopeLvl = 0 topScopeLvl :: ScopeLvl topScopeLvl = (topMajScopeLvl, 0) majScopeLvl :: ScopeLvl -> Int majScopeLvl = fst minScopeLvl :: ScopeLvl -> Int minScopeLvl = fst incMajScopeLvl :: ScopeLvl -> ScopeLvl incMajScopeLvl (majl, _) = (majl + 1, 0) incMinScopeLvl :: ScopeLvl -> ScopeLvl incMinScopeLvl (majl, minl) = (majl, minl + 1)
null
https://raw.githubusercontent.com/jbracker/supermonad/2595396a225a65b1dce6ed9a1ce59960f392a55b/examples/monad/hmtc/monad-param/ScopeLevel.hs
haskell
| ScopeLevel: Definition of and operation on scope level Scope level :: Int :: ScopeLvl :: ScopeLvl -> Int :: ScopeLvl -> Int :: ScopeLvl -> ScopeLvl :: ScopeLvl -> ScopeLvl ---------------------------------------------------------------------------- Scope level ---------------------------------------------------------------------------- | Scope level. Pair of major (depth of procedure/function) nesting and minor (depth of let-command nesting) levels.
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * H M T C * * * * Module : ScopeLevel * * Purpose : Definition of and operation on scope level . * * Authors : * * * * Copyright ( c ) , 2013 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ****************************************************************************** * H M T C * * * * Module: ScopeLevel * * Purpose: Definition of and operation on scope level. * * Authors: Henrik Nilsson * * * * Copyright (c) Henrik Nilsson, 2013 * * * ****************************************************************************** -} module ScopeLevel ( ) where type ScopeLvl = (Int, Int) topMajScopeLvl :: Int topMajScopeLvl = 0 topScopeLvl :: ScopeLvl topScopeLvl = (topMajScopeLvl, 0) majScopeLvl :: ScopeLvl -> Int majScopeLvl = fst minScopeLvl :: ScopeLvl -> Int minScopeLvl = fst incMajScopeLvl :: ScopeLvl -> ScopeLvl incMajScopeLvl (majl, _) = (majl + 1, 0) incMinScopeLvl :: ScopeLvl -> ScopeLvl incMinScopeLvl (majl, minl) = (majl, minl + 1)
334c613277c09811287b02e3de7246bf4f4ca1cf2d5ac8fc21188cace2ff8d8a
twystd/GA144
hccforth.erl
-module(hccforth). % EXPORTS -export([step/0,go/0]). % INCLUDES -include("include/f18A.hrl"). RECORDS % API step() -> M = self(), L = spawn(fun() -> peek(M,undefined) end), P = fun(CPU) -> T = CPU#cpu.t, S = CPU#cpu.s, {I,DS} = CPU#cpu.ds, DSX = rotate(array:to_list(DS),I), L ! {peek,lists:append([T,S],DSX)} end, GA144 = ga144:init([{404,"../cucumber/404.bin"}, {405,"../cucumber/405.bin"}, {406,"../cucumber/406.bin"}, {505,"../cucumber/505.bin"} ]), ga144:probe(GA144,505,P), ga144:reset(GA144), ga144:step (GA144,97), L ! stop, receive {peek,X} -> X end. go() -> M = self(), L = spawn(fun() -> peek(M,undefined) end), P = fun(CPU) -> T = CPU#cpu.t, S = CPU#cpu.s, {I,DS} = CPU#cpu.ds, DSX = rotate(array:to_list(DS),I), L ! {peek,lists:append([T,S],DSX)}, case T of 41 -> M ! break; _else -> ok end end, GA144 = ga144:init([{404,"../cucumber/404.bin"}, {405,"../cucumber/405.bin"}, {406,"../cucumber/406.bin"}, {505,"../cucumber/505.bin"} ]), ga144:probe(GA144,505,P), ga144:reset(GA144), ga144:go (GA144), receive break -> ga144:break(GA144), timer:sleep(100), ok after 500 -> timeout end, ga144:stop(GA144), L ! stop, receive {peek,V} -> V after 100 -> none end. peek(M,X) -> receive {peek,DS} -> peek(M,DS); stop -> M ! {peek,X} end. rotate(L, 0) -> L; rotate([],_) -> []; rotate(L, N) -> {H,T} = lists:split(N,L), lists:append(T,H).
null
https://raw.githubusercontent.com/twystd/GA144/741d2f2fca82133d594c51807115bd5121aa5350/erlang/src/hccforth.erl
erlang
EXPORTS INCLUDES API
-module(hccforth). -export([step/0,go/0]). -include("include/f18A.hrl"). RECORDS step() -> M = self(), L = spawn(fun() -> peek(M,undefined) end), P = fun(CPU) -> T = CPU#cpu.t, S = CPU#cpu.s, {I,DS} = CPU#cpu.ds, DSX = rotate(array:to_list(DS),I), L ! {peek,lists:append([T,S],DSX)} end, GA144 = ga144:init([{404,"../cucumber/404.bin"}, {405,"../cucumber/405.bin"}, {406,"../cucumber/406.bin"}, {505,"../cucumber/505.bin"} ]), ga144:probe(GA144,505,P), ga144:reset(GA144), ga144:step (GA144,97), L ! stop, receive {peek,X} -> X end. go() -> M = self(), L = spawn(fun() -> peek(M,undefined) end), P = fun(CPU) -> T = CPU#cpu.t, S = CPU#cpu.s, {I,DS} = CPU#cpu.ds, DSX = rotate(array:to_list(DS),I), L ! {peek,lists:append([T,S],DSX)}, case T of 41 -> M ! break; _else -> ok end end, GA144 = ga144:init([{404,"../cucumber/404.bin"}, {405,"../cucumber/405.bin"}, {406,"../cucumber/406.bin"}, {505,"../cucumber/505.bin"} ]), ga144:probe(GA144,505,P), ga144:reset(GA144), ga144:go (GA144), receive break -> ga144:break(GA144), timer:sleep(100), ok after 500 -> timeout end, ga144:stop(GA144), L ! stop, receive {peek,V} -> V after 100 -> none end. peek(M,X) -> receive {peek,DS} -> peek(M,DS); stop -> M ! {peek,X} end. rotate(L, 0) -> L; rotate([],_) -> []; rotate(L, N) -> {H,T} = lists:split(N,L), lists:append(T,H).
0aca163f60c98d4964bb3a34e1b3a5506eb7742b8f675f493cf291e5c9dfe3fb
travitch/foreign-inference
Escape.hs
# OPTIONS_GHC -fno - warn - orphans # # LANGUAGE OverloadedStrings , FlexibleContexts , RankNTypes # # LANGUAGE DeriveGeneric , ViewPatterns , TemplateHaskell # module Foreign.Inference.Analysis.Escape ( EscapeSummary, identifyEscapes, instructionEscapes, instructionEscapesWith, -- * Testing EscapeClass(..), escapeResultToTestFormat, -- escapeUseGraphs, -- useGraphvizRepr ) where import GHC.Generics ( Generic ) import Control.DeepSeq import Control.DeepSeq.Generics ( genericRnf ) import Control.Lens ( Lens', (^.), (%~), makeLenses ) import Control.Monad ( foldM ) import qualified Data.Foldable as F import Data.Hashable import Data.HashMap.Strict ( HashMap ) import qualified Data.HashMap.Strict as HM import Data.List ( mapAccumR ) import Data.Map ( Map ) import qualified Data.Map as M import Data.Maybe ( fromMaybe, isNothing, mapMaybe ) import Data.Set ( Set ) import qualified Data.Set as S import Data.Monoid import Safe.Failure ( at ) import Text.Printf import LLVM.Analysis import LLVM.Analysis.AccessPath import LLVM.Analysis.CallGraphSCCTraversal import Constraints.Set.Solver import Foreign.Inference.Diagnostics ( HasDiagnostics(..), Diagnostics ) import Foreign.Inference.Interface import Foreign.Inference.Internal.FlattenValue import Foreign.Inference.AnalysisMonad import Foreign.Inference.Analysis.IndirectCallResolver import System . -- import Text.Printf -- import Debug.Trace -- debug = flip trace -- | The ways a value can escape from a function data EscapeClass = DirectEscape | BrokenContractEscape | IndirectEscape | ArgumentEscape !Int -- ^ Index escaped into deriving (Eq, Ord, Read, Show) instance Hashable EscapeClass where hashWithSalt s DirectEscape = s `hashWithSalt` (76 :: Int) hashWithSalt s BrokenContractEscape = s `hashWithSalt` (699 :: Int) hashWithSalt s IndirectEscape = s `hashWithSalt` (5 :: Int) hashWithSalt s (ArgumentEscape i) = s `hashWithSalt` (77997 :: Int) `hashWithSalt` i instance NFData EscapeClass data ArgumentDescriptor = ArgumentDescriptor Function Int deriving (Eq, Ord, Show, Generic) instance NFData ArgumentDescriptor where rnf = genericRnf data Constructor = Sink { sinkClass :: EscapeClass , sinkWitness :: Instruction , sinkIntoArgument :: Maybe ArgumentDescriptor } deriving (Eq, Ord, Show, Generic) data Var = Location !Value | FieldSource { fieldSourceArgument :: !Argument , fieldSourcePath :: AbstractAccessPath } deriving (Eq, Ord, Show, Generic) type SetExp = SetExpression Var Constructor type ValueFlowGraph = SolvedSystem Var Constructor data EscapeGraph = EscapeGraph { escapeGraphFieldSourceMap :: HashMap Argument [AbstractAccessPath], escapeVFG :: ValueFlowGraph } deriving (Eq, Generic) instance NFData EscapeGraph -- | The monad in which we construct the value flow graph -- type GraphBuilder = State GraphState data EscapeSummary = EscapeSummary { _escapeGraph :: HashMap Function EscapeGraph , _escapeArguments :: HashMap Argument (EscapeClass, Instruction) , _escapeFields :: HashMap Argument (Set (EscapeClass, AbstractAccessPath, Instruction)) , _escapeIntoArguments :: HashMap Argument (EscapeClass, Function, Int) , _escapeDiagnostics :: Diagnostics } deriving (Generic) $(makeLenses ''EscapeSummary) instance Show EscapeSummary where show (EscapeSummary _ ea ef ei _) = show ea ++ "/" ++ show ef ++ "/" ++ show ei instance Eq EscapeSummary where (EscapeSummary g1 ea1 ef1 ei1 _) == (EscapeSummary g2 ea2 ef2 ei2 _) = g1 == g2 && ea1 == ea2 && ef1 == ef2 && ei1 == ei2 emptySummary :: EscapeSummary emptySummary = EscapeSummary mempty mempty mempty mempty mempty instance Monoid EscapeSummary where mempty = emptySummary mappend (EscapeSummary g1 as1 was1 ei1 d1) (EscapeSummary g2 as2 was2 ei2 d2) = EscapeSummary { _escapeGraph = HM.union g1 g2 , _escapeArguments = HM.union as1 as2 , _escapeFields = HM.union was1 was2 , _escapeIntoArguments = HM.union ei1 ei2 , _escapeDiagnostics = d1 `mappend` d2 } instance NFData EscapeSummary where rnf = genericRnf instance HasDiagnostics EscapeSummary where diagnosticLens = escapeDiagnostics instance SummarizeModule EscapeSummary where summarizeFunction _ _ = [] summarizeArgument = summarizeEscapeArgument type Analysis = AnalysisMonad () () -- | This is the underlying bottom-up analysis to identify which arguments escape . It builds an EscapeGraph for the function -- (incorporating information from other functions that have already -- been analyzed) and then checks to see which arguments escape using -- that graph. identifyEscapes :: (FuncLike funcLike, HasFunction funcLike) => DependencySummary -> IndirectCallSummary -> Lens' compositeSummary EscapeSummary -> ComposableAnalysis compositeSummary funcLike identifyEscapes ds ics lns = composableAnalysisM runner escapeWrapper lns where runner a = runAnalysis a ds () () escapeWrapper funcLike s = do let f = getFunction funcLike g <- buildValueFlowGraph ics s (functionInstructions f) let s' = foldr (summarizeArgumentEscapes g) s (functionParameters f) return $ (escapeGraph %~ HM.insert f g) s' extSumm ef ix = -- FIXME : Switch the builder to be a StateT so we can let this -- monadic extsumm record missing summaries case lookupArgumentSummary ds ( undefined : : EscapeSummary ) ef ix of Nothing - > True -- do -- let msg = " Missing summary for " + + show ( externalFunctionName ef ) -- emitWarning Nothing " EscapeAnalysis " msg -- return True Just annots - > PAEscape ` elem ` annots extSumm ef ix = -- FIXME: Switch the builder to be a StateT so we can let this -- monadic extsumm record missing summaries case lookupArgumentSummary ds (undefined :: EscapeSummary) ef ix of Nothing -> True -- do -- let msg = "Missing summary for " ++ show (externalFunctionName ef) -- emitWarning Nothing "EscapeAnalysis" msg -- return True Just annots -> PAEscape `elem` annots -} | A generalization of ' instructionEscapes ' . The first argument is -- a predicate that returns True if the input Instruction (which is a -- sink) should be excluded from the reachability search of the value -- flow graph. -- -- The intended use of this variant is to issue escape queries for -- instructions that are known to escape via some desired means (e.g., -- an out parameter) and to determine if they also escape via some -- other means. In that case, the @ignore@ predicate should return -- True for the store instruction that created the known escape. instructionEscapesWith :: (Instruction -> Bool) -> Instruction -> EscapeSummary -> Maybe Instruction instructionEscapesWith = instructionEscapeCore -- | Returns the instruction (if any) that causes the input instruction to escape . This does * not * cover WillEscape at all . instructionEscapes :: Instruction -> EscapeSummary -> Maybe Instruction instructionEscapes = instructionEscapeCore (const False) -- | This is shared code for all of the instruction escape queries. -- -- Most of the description is on 'instructionEscapesWith' instructionEscapeCore :: (Instruction -> Bool) -> Instruction -> EscapeSummary -> Maybe Instruction instructionEscapeCore ignorePred i (EscapeSummary egs _ _ _ _) = do f <- instructionFunction i EscapeGraph _ eg <- HM.lookup f egs ts@(_:_) <- leastSolution eg (Location (toValue i)) let sinks = map toSink ts sinks' = filter (not . ignorePred . sinkWitness) sinks case sinks' of [] -> Nothing s:_ -> return (sinkWitness s) summarizeEscapeArgument :: Argument -> EscapeSummary -> [(ParamAnnotation, [Witness])] summarizeEscapeArgument a er | not (isPointerType a) = [] | otherwise = case HM.lookup a (er ^. escapeArguments) of Nothing -> [] Just (DirectEscape, w@RetInst {}) -> [(PAWillEscape, [Witness w "ret"])] Just (t, w@StoreInst {}) -> [(tagToAnnot t, [Witness w "store"])] Just (t, w@CallInst {}) -> [(tagToAnnot t, [Witness w "call"])] Just (t, w@InvokeInst {}) -> [(tagToAnnot t, [Witness w "call"])] Just (t, w) -> [(tagToAnnot t, [Witness w "access"])] where tagToAnnot t = case t of DirectEscape -> PAEscape IndirectEscape -> PAFptrEscape BrokenContractEscape -> PAContractEscape ArgumentEscape ix -> PAArgEscape ix takeFirst :: a -> [Maybe a] -> a takeFirst def [] = def takeFirst def (act:rest) = case act of Nothing -> takeFirst def rest Just thing -> thing summarizeArgumentEscapes :: EscapeGraph -> Argument -> EscapeSummary -> EscapeSummary summarizeArgumentEscapes eg a s = takeFirst s [ entireArgumentEscapes eg a s , argumentFieldsEscape eg a s ] toSink :: SetExp -> Constructor toSink (ConstructedTerm e _ []) = e toSink e = error ("Foreign.Inference.Analysis.Escape.toSink: Unexpected non-constructed term: " ++ show e) entireArgumentEscapes :: EscapeGraph -> Argument -> EscapeSummary -> Maybe EscapeSummary entireArgumentEscapes (EscapeGraph _ eg) a s = do ts@(_:_) <- leastSolution eg (Location (toValue a)) let sink:_ = map toSink ts return $ (escapeArguments %~ HM.insert a (sinkClass sink, sinkWitness sink)) s argumentFieldsEscape :: EscapeGraph -> Argument -> EscapeSummary -> Maybe EscapeSummary argumentFieldsEscape (EscapeGraph fields eg) a s = do fieldPaths <- HM.lookup a fields return $ foldr fieldEscapes s fieldPaths where fieldEscapes fldPath acc = fromMaybe acc $ do ts@(_:_) <- leastSolution eg (FieldSource a fldPath) let sink:_ = map toSink ts entry = S.singleton (sinkClass sink, fldPath, sinkWitness sink) return $ (escapeFields %~ HM.insertWith S.union a entry) acc notPointer :: IsValue v => v -> Bool notPointer v = case valueType v of TypePointer _ _ -> False _ -> True buildValueFlowGraph :: IndirectCallSummary -> EscapeSummary -> [Instruction] -> Analysis EscapeGraph buildValueFlowGraph ics summ is = do (inclusionSystem, fieldSrcs) <- foldM addInclusion ([], mempty) is let Just sys = solveSystem inclusionSystem return $ EscapeGraph { escapeGraphFieldSourceMap = fieldSrcs , escapeVFG = sys } where sinkExp klass witness argDesc = atom (Sink klass witness argDesc) setExpFor v = case valueContent' v of InstructionC i@GetElementPtrInst { } -> case argumentBasedField i of Nothing -> setVariable (Location (stripBitcasts v)) Just (a, aap) -> setVariable (FieldSource a aap) InstructionC i@LoadInst { } -> case argumentBasedField i of Nothing -> setVariable (Location (stripBitcasts v)) Just (a, aap) -> setVariable (FieldSource a aap) _ -> setVariable (Location (stripBitcasts v)) addInclusion :: ([Inclusion Var Constructor], HashMap Argument [AbstractAccessPath]) -> Instruction -> Analysis ([Inclusion Var Constructor], HashMap Argument [AbstractAccessPath]) addInclusion acc@(incs, fsrcs) i = case i of RetInst { retInstValue = Just (valueContent' -> rv) } -> let s = sinkExp DirectEscape i Nothing c = s <=! setExpFor rv in return (c : incs, fsrcs) -- If this is a load of an argument field, we need to make it into a FieldSource and see what happens to it later . -- Record the argument/access path in a map somewhere for -- later lookup (otherwise we can't find the variable) GetElementPtrInst {} -> case argumentBasedField i of Just (a, aap) -> let c = setExpFor (toValue i) <=! setVariable (FieldSource a aap) srcs' = HM.insertWith (++) a [aap] fsrcs in return (c : incs, srcs') Nothing -> return acc LoadInst { loadAddress = la } | notPointer i || isNothing (argumentBasedField i) -> return acc | otherwise -> let c = setExpFor (toValue i) <=! setExpFor la in return (c : incs, fsrcs) StoreInst { storeAddress = sa , storeValue = sv } | mustEsc -> let sinkTag = maybe DirectEscape (ArgumentEscape . argumentIndex) mArg s = sinkExp sinkTag i Nothing c = s <=! setExpFor sv in return (c : incs, fsrcs) | otherwise -> May escape later if the alloca escapes let c = setExpFor sa <=! setExpFor sv in return (c : incs, fsrcs) where (mustEsc, mArg) = mustEscapeLocation sa CallInst { callFunction = callee, callArguments = (map (stripBitcasts . fst) -> args) } -> addCallConstraints i acc callee args InvokeInst { invokeFunction = callee, invokeArguments = (map (stripBitcasts . fst) -> args) } -> addCallConstraints i acc callee args SelectInst { selectTrueValue = (valueContent' -> tv) , selectFalseValue = (valueContent' -> fv) } -> let c1 = setExpFor (toValue i) <=! setExpFor tv c2 = setExpFor (toValue i) <=! setExpFor fv in return (c1 : c2 : incs, fsrcs) PhiNode { phiIncomingValues = (map (stripBitcasts . fst) -> ivs) } -> let toIncl v = setExpFor (toValue i) <=! setExpFor v cs = map toIncl ivs in return (cs ++ incs, fsrcs) _ -> return acc addCallConstraints :: Instruction -> ([Inclusion Var Constructor], HashMap Argument [AbstractAccessPath]) -> Value -> [Value] -> Analysis ([Inclusion Var Constructor], HashMap Argument [AbstractAccessPath]) addCallConstraints callInst (incs, fsrcs) callee args = case valueContent' callee of FunctionC f -> do let indexedArgs = zip [0..] args incs' <- foldM (addActualConstraint callInst f) incs indexedArgs return (incs', fsrcs) ExternalFunctionC ef -> do let indexedArgs = zip [0..] args incs' <- foldM (addActualConstraint callInst ef) incs indexedArgs return (incs', fsrcs) _ -> case indirectCallInitializers ics callee of -- No targets known; all pointer arguments indirectly escape [] -> do incs' <- foldM (addIndirectEscape callInst) incs args return (incs', fsrcs) We have at least one target ; take it as a representative (repr:_) -> do let indexedArgs = zip [0..] args incs' <- foldM (addContractEscapes callInst repr) incs indexedArgs return (incs', fsrcs) argEscapeConstraint callInst etype actual incs = FIXME ; figure out how to use the index in a field escape here let s = sinkExp etype callInst Nothing c = s <=! setExpFor actual in return $ c : incs addContractEscapes :: Instruction -> Value -> [Inclusion Var Constructor] -> (Int, Value) -> Analysis [Inclusion Var Constructor] addContractEscapes callInst repr incs (ix, actual) | notPointer actual = return incs | otherwise = do s <- lookupArgumentSummary summ repr ix case s of -- If we don't have a summary for our representative, treat -- it as an indirect call with no known target (we could do -- better by looking at the next possible representative, if -- any). Nothing -> addIndirectEscape callInst incs actual Just pannots -> case F.find isEscapeAnnot pannots of -- If we don't find an escape annotation, we generate a BrokenContractEscape since the argument will only -- escape if the function pointer breaks a contract Nothing -> argEscapeConstraint callInst BrokenContractEscape actual incs Just PAEscape -> argEscapeConstraint callInst DirectEscape actual incs Just PAContractEscape -> argEscapeConstraint callInst BrokenContractEscape actual incs Just PAFptrEscape -> argEscapeConstraint callInst IndirectEscape actual incs _ -> return incs addActualConstraint callInst callee incs (ix, actual) = do pannots <- lookupArgumentSummaryList summ callee ix case F.find isEscapeAnnot pannots of Nothing -> return incs Just PAEscape -> argEscapeConstraint callInst DirectEscape actual incs Just PAContractEscape -> argEscapeConstraint callInst BrokenContractEscape actual incs Just PAFptrEscape -> argEscapeConstraint callInst IndirectEscape actual incs Just (PAArgEscape argIx) | callInstActualIsAlloca callInst argIx -> return incs | otherwise -> argEscapeConstraint callInst (ArgumentEscape argIx) actual incs _ -> return incs -- Note, it isn't quite obvious what to do with PAArgEscape here. addIndirectEscape callInst incs actual | notPointer actual = return incs | otherwise = argEscapeConstraint callInst IndirectEscape actual incs -- FIXME This should be a "not address taken" alloca - that is, not -- passed to any functions. callInstActualIsAlloca :: Instruction -> Int -> Bool callInstActualIsAlloca i ix = case i of CallInst { callArguments = (map fst -> args) } -> isAlloca args InvokeInst { invokeArguments = (map fst -> args) } -> isAlloca args _ -> False where isAlloca args = fromMaybe False $ do actual <- args `at` ix actualInst <- fromValue actual case actualInst of AllocaInst {} -> return True _ -> fail "Not an alloca" isEscapeAnnot :: ParamAnnotation -> Bool isEscapeAnnot a = case a of PAEscape -> True PAArgEscape _ -> True PAContractEscape -> True PAFptrEscape -> True _ -> False Ignore PAWillEscape for now ... isPointerType :: (IsValue a) => a -> Bool isPointerType v = case valueType v of TypePointer _ _ -> True _ -> False -- Given a GetElementPtrInst, return its base and the path accessed IFF the base was an Argument . argumentBasedField :: Instruction -> Maybe (Argument, AbstractAccessPath) argumentBasedField li = do accPath <- accessPath li case valueContent' (accessPathBaseValue accPath) of ArgumentC a -> return (a, abstractAccessPath accPath) _ -> Nothing mustEscapeLocation :: Value -> (Bool, Maybe Argument) mustEscapeLocation = snd . go mempty where go visited v | S.member v visited = (visited, (False, Nothing)) | otherwise = case valueContent' v of GlobalVariableC _ -> (visited', (True, Nothing)) ExternalValueC _ -> (visited', (True, Nothing)) ArgumentC a -> (visited', (True, Just a)) InstructionC CallInst {} -> (visited', (True, Nothing)) InstructionC InvokeInst {} -> (visited', (True, Nothing)) InstructionC LoadInst { loadAddress = la } -> go visited' la InstructionC GetElementPtrInst { getElementPtrValue = base } -> go visited' base InstructionC SelectInst { } -> let (visited'', pairs) = mapAccumR go visited' (flattenValue v) argVal = mconcat $ map (First . snd) pairs in (visited'', (any fst pairs, getFirst argVal)) InstructionC PhiNode {} -> let (visited'', pairs) = mapAccumR go visited' (flattenValue v) argVal = mconcat $ map (First . snd) pairs in (visited'', (any fst pairs, getFirst argVal)) _ -> (visited', (False, Nothing)) where visited' = S.insert v visited -- Testing -- | Extract the arguments for each function that escape. The keys of -- the map are function names and the set elements are argument names. -- This format exposes the internal results for testing purposes. -- For actual use in a program , use one of ' functionEscapeArguments ' , ' functionWillEscapeArguments ' , or ' instructionEscapes ' instead . escapeResultToTestFormat :: EscapeSummary -> Map String (Set (EscapeClass, String)) escapeResultToTestFormat er = M.filter (not . S.null) $ foldr fieldTransform argEscapes (HM.toList fm) where directEscapes = foldr transform mempty (HM.toList m) argEscapes = foldr argTransform directEscapes (HM.toList am) m = er ^. escapeArguments fm = er ^. escapeFields am = er ^. escapeIntoArguments argTransform (a, (tag, _, _)) acc = let aname = show (argumentName a) f = argumentFunction a fname = show (functionName f) in M.insertWith' S.union fname (S.singleton (tag, aname)) acc transform (a, (tag, _)) acc = let f = argumentFunction a fname = show (functionName f) aname = show (argumentName a) in M.insertWith' S.union fname (S.singleton (tag, aname)) acc fieldTransform (a, fieldsAndInsts) acc = let f = argumentFunction a fname = show (functionName f) aname = show (argumentName a) tagsAndFields = S.toList $ S.map (\(tag, fld, _) -> (tag, fld)) fieldsAndInsts newEntries = S.fromList $ mapMaybe (toFieldRef aname) tagsAndFields in M.insertWith' S.union fname newEntries acc toFieldRef aname (tag, fld) = case abstractAccessPathComponents fld of [AccessField ix] -> Just $ (tag, printf "%s.<%d>" aname ix) _ -> Nothing
null
https://raw.githubusercontent.com/travitch/foreign-inference/9cc0f7c730f7cd19afbd00d890abbc7109391cc6/src/Foreign/Inference/Analysis/Escape.hs
haskell
* Testing escapeUseGraphs, useGraphvizRepr import Text.Printf import Debug.Trace debug = flip trace | The ways a value can escape from a function ^ Index escaped into | The monad in which we construct the value flow graph type GraphBuilder = State GraphState | This is the underlying bottom-up analysis to identify which (incorporating information from other functions that have already been analyzed) and then checks to see which arguments escape using that graph. FIXME : Switch the builder to be a StateT so we can let this monadic extsumm record missing summaries do let msg = " Missing summary for " + + show ( externalFunctionName ef ) emitWarning Nothing " EscapeAnalysis " msg return True FIXME: Switch the builder to be a StateT so we can let this monadic extsumm record missing summaries do let msg = "Missing summary for " ++ show (externalFunctionName ef) emitWarning Nothing "EscapeAnalysis" msg return True a predicate that returns True if the input Instruction (which is a sink) should be excluded from the reachability search of the value flow graph. The intended use of this variant is to issue escape queries for instructions that are known to escape via some desired means (e.g., an out parameter) and to determine if they also escape via some other means. In that case, the @ignore@ predicate should return True for the store instruction that created the known escape. | Returns the instruction (if any) that causes the input | This is shared code for all of the instruction escape queries. Most of the description is on 'instructionEscapesWith' If this is a load of an argument field, we need to make it Record the argument/access path in a map somewhere for later lookup (otherwise we can't find the variable) No targets known; all pointer arguments indirectly escape If we don't have a summary for our representative, treat it as an indirect call with no known target (we could do better by looking at the next possible representative, if any). If we don't find an escape annotation, we generate a escape if the function pointer breaks a contract Note, it isn't quite obvious what to do with PAArgEscape here. FIXME This should be a "not address taken" alloca - that is, not passed to any functions. Given a GetElementPtrInst, return its base and the path accessed Testing | Extract the arguments for each function that escape. The keys of the map are function names and the set elements are argument names. This format exposes the internal results for testing purposes.
# OPTIONS_GHC -fno - warn - orphans # # LANGUAGE OverloadedStrings , FlexibleContexts , RankNTypes # # LANGUAGE DeriveGeneric , ViewPatterns , TemplateHaskell # module Foreign.Inference.Analysis.Escape ( EscapeSummary, identifyEscapes, instructionEscapes, instructionEscapesWith, EscapeClass(..), escapeResultToTestFormat, ) where import GHC.Generics ( Generic ) import Control.DeepSeq import Control.DeepSeq.Generics ( genericRnf ) import Control.Lens ( Lens', (^.), (%~), makeLenses ) import Control.Monad ( foldM ) import qualified Data.Foldable as F import Data.Hashable import Data.HashMap.Strict ( HashMap ) import qualified Data.HashMap.Strict as HM import Data.List ( mapAccumR ) import Data.Map ( Map ) import qualified Data.Map as M import Data.Maybe ( fromMaybe, isNothing, mapMaybe ) import Data.Set ( Set ) import qualified Data.Set as S import Data.Monoid import Safe.Failure ( at ) import Text.Printf import LLVM.Analysis import LLVM.Analysis.AccessPath import LLVM.Analysis.CallGraphSCCTraversal import Constraints.Set.Solver import Foreign.Inference.Diagnostics ( HasDiagnostics(..), Diagnostics ) import Foreign.Inference.Interface import Foreign.Inference.Internal.FlattenValue import Foreign.Inference.AnalysisMonad import Foreign.Inference.Analysis.IndirectCallResolver import System . data EscapeClass = DirectEscape | BrokenContractEscape | IndirectEscape deriving (Eq, Ord, Read, Show) instance Hashable EscapeClass where hashWithSalt s DirectEscape = s `hashWithSalt` (76 :: Int) hashWithSalt s BrokenContractEscape = s `hashWithSalt` (699 :: Int) hashWithSalt s IndirectEscape = s `hashWithSalt` (5 :: Int) hashWithSalt s (ArgumentEscape i) = s `hashWithSalt` (77997 :: Int) `hashWithSalt` i instance NFData EscapeClass data ArgumentDescriptor = ArgumentDescriptor Function Int deriving (Eq, Ord, Show, Generic) instance NFData ArgumentDescriptor where rnf = genericRnf data Constructor = Sink { sinkClass :: EscapeClass , sinkWitness :: Instruction , sinkIntoArgument :: Maybe ArgumentDescriptor } deriving (Eq, Ord, Show, Generic) data Var = Location !Value | FieldSource { fieldSourceArgument :: !Argument , fieldSourcePath :: AbstractAccessPath } deriving (Eq, Ord, Show, Generic) type SetExp = SetExpression Var Constructor type ValueFlowGraph = SolvedSystem Var Constructor data EscapeGraph = EscapeGraph { escapeGraphFieldSourceMap :: HashMap Argument [AbstractAccessPath], escapeVFG :: ValueFlowGraph } deriving (Eq, Generic) instance NFData EscapeGraph data EscapeSummary = EscapeSummary { _escapeGraph :: HashMap Function EscapeGraph , _escapeArguments :: HashMap Argument (EscapeClass, Instruction) , _escapeFields :: HashMap Argument (Set (EscapeClass, AbstractAccessPath, Instruction)) , _escapeIntoArguments :: HashMap Argument (EscapeClass, Function, Int) , _escapeDiagnostics :: Diagnostics } deriving (Generic) $(makeLenses ''EscapeSummary) instance Show EscapeSummary where show (EscapeSummary _ ea ef ei _) = show ea ++ "/" ++ show ef ++ "/" ++ show ei instance Eq EscapeSummary where (EscapeSummary g1 ea1 ef1 ei1 _) == (EscapeSummary g2 ea2 ef2 ei2 _) = g1 == g2 && ea1 == ea2 && ef1 == ef2 && ei1 == ei2 emptySummary :: EscapeSummary emptySummary = EscapeSummary mempty mempty mempty mempty mempty instance Monoid EscapeSummary where mempty = emptySummary mappend (EscapeSummary g1 as1 was1 ei1 d1) (EscapeSummary g2 as2 was2 ei2 d2) = EscapeSummary { _escapeGraph = HM.union g1 g2 , _escapeArguments = HM.union as1 as2 , _escapeFields = HM.union was1 was2 , _escapeIntoArguments = HM.union ei1 ei2 , _escapeDiagnostics = d1 `mappend` d2 } instance NFData EscapeSummary where rnf = genericRnf instance HasDiagnostics EscapeSummary where diagnosticLens = escapeDiagnostics instance SummarizeModule EscapeSummary where summarizeFunction _ _ = [] summarizeArgument = summarizeEscapeArgument type Analysis = AnalysisMonad () () arguments escape . It builds an EscapeGraph for the function identifyEscapes :: (FuncLike funcLike, HasFunction funcLike) => DependencySummary -> IndirectCallSummary -> Lens' compositeSummary EscapeSummary -> ComposableAnalysis compositeSummary funcLike identifyEscapes ds ics lns = composableAnalysisM runner escapeWrapper lns where runner a = runAnalysis a ds () () escapeWrapper funcLike s = do let f = getFunction funcLike g <- buildValueFlowGraph ics s (functionInstructions f) let s' = foldr (summarizeArgumentEscapes g) s (functionParameters f) return $ (escapeGraph %~ HM.insert f g) s' extSumm ef ix = case lookupArgumentSummary ds ( undefined : : EscapeSummary ) ef ix of Just annots - > PAEscape ` elem ` annots extSumm ef ix = case lookupArgumentSummary ds (undefined :: EscapeSummary) ef ix of Just annots -> PAEscape `elem` annots -} | A generalization of ' instructionEscapes ' . The first argument is instructionEscapesWith :: (Instruction -> Bool) -> Instruction -> EscapeSummary -> Maybe Instruction instructionEscapesWith = instructionEscapeCore instruction to escape . This does * not * cover WillEscape at all . instructionEscapes :: Instruction -> EscapeSummary -> Maybe Instruction instructionEscapes = instructionEscapeCore (const False) instructionEscapeCore :: (Instruction -> Bool) -> Instruction -> EscapeSummary -> Maybe Instruction instructionEscapeCore ignorePred i (EscapeSummary egs _ _ _ _) = do f <- instructionFunction i EscapeGraph _ eg <- HM.lookup f egs ts@(_:_) <- leastSolution eg (Location (toValue i)) let sinks = map toSink ts sinks' = filter (not . ignorePred . sinkWitness) sinks case sinks' of [] -> Nothing s:_ -> return (sinkWitness s) summarizeEscapeArgument :: Argument -> EscapeSummary -> [(ParamAnnotation, [Witness])] summarizeEscapeArgument a er | not (isPointerType a) = [] | otherwise = case HM.lookup a (er ^. escapeArguments) of Nothing -> [] Just (DirectEscape, w@RetInst {}) -> [(PAWillEscape, [Witness w "ret"])] Just (t, w@StoreInst {}) -> [(tagToAnnot t, [Witness w "store"])] Just (t, w@CallInst {}) -> [(tagToAnnot t, [Witness w "call"])] Just (t, w@InvokeInst {}) -> [(tagToAnnot t, [Witness w "call"])] Just (t, w) -> [(tagToAnnot t, [Witness w "access"])] where tagToAnnot t = case t of DirectEscape -> PAEscape IndirectEscape -> PAFptrEscape BrokenContractEscape -> PAContractEscape ArgumentEscape ix -> PAArgEscape ix takeFirst :: a -> [Maybe a] -> a takeFirst def [] = def takeFirst def (act:rest) = case act of Nothing -> takeFirst def rest Just thing -> thing summarizeArgumentEscapes :: EscapeGraph -> Argument -> EscapeSummary -> EscapeSummary summarizeArgumentEscapes eg a s = takeFirst s [ entireArgumentEscapes eg a s , argumentFieldsEscape eg a s ] toSink :: SetExp -> Constructor toSink (ConstructedTerm e _ []) = e toSink e = error ("Foreign.Inference.Analysis.Escape.toSink: Unexpected non-constructed term: " ++ show e) entireArgumentEscapes :: EscapeGraph -> Argument -> EscapeSummary -> Maybe EscapeSummary entireArgumentEscapes (EscapeGraph _ eg) a s = do ts@(_:_) <- leastSolution eg (Location (toValue a)) let sink:_ = map toSink ts return $ (escapeArguments %~ HM.insert a (sinkClass sink, sinkWitness sink)) s argumentFieldsEscape :: EscapeGraph -> Argument -> EscapeSummary -> Maybe EscapeSummary argumentFieldsEscape (EscapeGraph fields eg) a s = do fieldPaths <- HM.lookup a fields return $ foldr fieldEscapes s fieldPaths where fieldEscapes fldPath acc = fromMaybe acc $ do ts@(_:_) <- leastSolution eg (FieldSource a fldPath) let sink:_ = map toSink ts entry = S.singleton (sinkClass sink, fldPath, sinkWitness sink) return $ (escapeFields %~ HM.insertWith S.union a entry) acc notPointer :: IsValue v => v -> Bool notPointer v = case valueType v of TypePointer _ _ -> False _ -> True buildValueFlowGraph :: IndirectCallSummary -> EscapeSummary -> [Instruction] -> Analysis EscapeGraph buildValueFlowGraph ics summ is = do (inclusionSystem, fieldSrcs) <- foldM addInclusion ([], mempty) is let Just sys = solveSystem inclusionSystem return $ EscapeGraph { escapeGraphFieldSourceMap = fieldSrcs , escapeVFG = sys } where sinkExp klass witness argDesc = atom (Sink klass witness argDesc) setExpFor v = case valueContent' v of InstructionC i@GetElementPtrInst { } -> case argumentBasedField i of Nothing -> setVariable (Location (stripBitcasts v)) Just (a, aap) -> setVariable (FieldSource a aap) InstructionC i@LoadInst { } -> case argumentBasedField i of Nothing -> setVariable (Location (stripBitcasts v)) Just (a, aap) -> setVariable (FieldSource a aap) _ -> setVariable (Location (stripBitcasts v)) addInclusion :: ([Inclusion Var Constructor], HashMap Argument [AbstractAccessPath]) -> Instruction -> Analysis ([Inclusion Var Constructor], HashMap Argument [AbstractAccessPath]) addInclusion acc@(incs, fsrcs) i = case i of RetInst { retInstValue = Just (valueContent' -> rv) } -> let s = sinkExp DirectEscape i Nothing c = s <=! setExpFor rv in return (c : incs, fsrcs) into a FieldSource and see what happens to it later . GetElementPtrInst {} -> case argumentBasedField i of Just (a, aap) -> let c = setExpFor (toValue i) <=! setVariable (FieldSource a aap) srcs' = HM.insertWith (++) a [aap] fsrcs in return (c : incs, srcs') Nothing -> return acc LoadInst { loadAddress = la } | notPointer i || isNothing (argumentBasedField i) -> return acc | otherwise -> let c = setExpFor (toValue i) <=! setExpFor la in return (c : incs, fsrcs) StoreInst { storeAddress = sa , storeValue = sv } | mustEsc -> let sinkTag = maybe DirectEscape (ArgumentEscape . argumentIndex) mArg s = sinkExp sinkTag i Nothing c = s <=! setExpFor sv in return (c : incs, fsrcs) | otherwise -> May escape later if the alloca escapes let c = setExpFor sa <=! setExpFor sv in return (c : incs, fsrcs) where (mustEsc, mArg) = mustEscapeLocation sa CallInst { callFunction = callee, callArguments = (map (stripBitcasts . fst) -> args) } -> addCallConstraints i acc callee args InvokeInst { invokeFunction = callee, invokeArguments = (map (stripBitcasts . fst) -> args) } -> addCallConstraints i acc callee args SelectInst { selectTrueValue = (valueContent' -> tv) , selectFalseValue = (valueContent' -> fv) } -> let c1 = setExpFor (toValue i) <=! setExpFor tv c2 = setExpFor (toValue i) <=! setExpFor fv in return (c1 : c2 : incs, fsrcs) PhiNode { phiIncomingValues = (map (stripBitcasts . fst) -> ivs) } -> let toIncl v = setExpFor (toValue i) <=! setExpFor v cs = map toIncl ivs in return (cs ++ incs, fsrcs) _ -> return acc addCallConstraints :: Instruction -> ([Inclusion Var Constructor], HashMap Argument [AbstractAccessPath]) -> Value -> [Value] -> Analysis ([Inclusion Var Constructor], HashMap Argument [AbstractAccessPath]) addCallConstraints callInst (incs, fsrcs) callee args = case valueContent' callee of FunctionC f -> do let indexedArgs = zip [0..] args incs' <- foldM (addActualConstraint callInst f) incs indexedArgs return (incs', fsrcs) ExternalFunctionC ef -> do let indexedArgs = zip [0..] args incs' <- foldM (addActualConstraint callInst ef) incs indexedArgs return (incs', fsrcs) _ -> case indirectCallInitializers ics callee of [] -> do incs' <- foldM (addIndirectEscape callInst) incs args return (incs', fsrcs) We have at least one target ; take it as a representative (repr:_) -> do let indexedArgs = zip [0..] args incs' <- foldM (addContractEscapes callInst repr) incs indexedArgs return (incs', fsrcs) argEscapeConstraint callInst etype actual incs = FIXME ; figure out how to use the index in a field escape here let s = sinkExp etype callInst Nothing c = s <=! setExpFor actual in return $ c : incs addContractEscapes :: Instruction -> Value -> [Inclusion Var Constructor] -> (Int, Value) -> Analysis [Inclusion Var Constructor] addContractEscapes callInst repr incs (ix, actual) | notPointer actual = return incs | otherwise = do s <- lookupArgumentSummary summ repr ix case s of Nothing -> addIndirectEscape callInst incs actual Just pannots -> case F.find isEscapeAnnot pannots of BrokenContractEscape since the argument will only Nothing -> argEscapeConstraint callInst BrokenContractEscape actual incs Just PAEscape -> argEscapeConstraint callInst DirectEscape actual incs Just PAContractEscape -> argEscapeConstraint callInst BrokenContractEscape actual incs Just PAFptrEscape -> argEscapeConstraint callInst IndirectEscape actual incs _ -> return incs addActualConstraint callInst callee incs (ix, actual) = do pannots <- lookupArgumentSummaryList summ callee ix case F.find isEscapeAnnot pannots of Nothing -> return incs Just PAEscape -> argEscapeConstraint callInst DirectEscape actual incs Just PAContractEscape -> argEscapeConstraint callInst BrokenContractEscape actual incs Just PAFptrEscape -> argEscapeConstraint callInst IndirectEscape actual incs Just (PAArgEscape argIx) | callInstActualIsAlloca callInst argIx -> return incs | otherwise -> argEscapeConstraint callInst (ArgumentEscape argIx) actual incs _ -> return incs addIndirectEscape callInst incs actual | notPointer actual = return incs | otherwise = argEscapeConstraint callInst IndirectEscape actual incs callInstActualIsAlloca :: Instruction -> Int -> Bool callInstActualIsAlloca i ix = case i of CallInst { callArguments = (map fst -> args) } -> isAlloca args InvokeInst { invokeArguments = (map fst -> args) } -> isAlloca args _ -> False where isAlloca args = fromMaybe False $ do actual <- args `at` ix actualInst <- fromValue actual case actualInst of AllocaInst {} -> return True _ -> fail "Not an alloca" isEscapeAnnot :: ParamAnnotation -> Bool isEscapeAnnot a = case a of PAEscape -> True PAArgEscape _ -> True PAContractEscape -> True PAFptrEscape -> True _ -> False Ignore PAWillEscape for now ... isPointerType :: (IsValue a) => a -> Bool isPointerType v = case valueType v of TypePointer _ _ -> True _ -> False IFF the base was an Argument . argumentBasedField :: Instruction -> Maybe (Argument, AbstractAccessPath) argumentBasedField li = do accPath <- accessPath li case valueContent' (accessPathBaseValue accPath) of ArgumentC a -> return (a, abstractAccessPath accPath) _ -> Nothing mustEscapeLocation :: Value -> (Bool, Maybe Argument) mustEscapeLocation = snd . go mempty where go visited v | S.member v visited = (visited, (False, Nothing)) | otherwise = case valueContent' v of GlobalVariableC _ -> (visited', (True, Nothing)) ExternalValueC _ -> (visited', (True, Nothing)) ArgumentC a -> (visited', (True, Just a)) InstructionC CallInst {} -> (visited', (True, Nothing)) InstructionC InvokeInst {} -> (visited', (True, Nothing)) InstructionC LoadInst { loadAddress = la } -> go visited' la InstructionC GetElementPtrInst { getElementPtrValue = base } -> go visited' base InstructionC SelectInst { } -> let (visited'', pairs) = mapAccumR go visited' (flattenValue v) argVal = mconcat $ map (First . snd) pairs in (visited'', (any fst pairs, getFirst argVal)) InstructionC PhiNode {} -> let (visited'', pairs) = mapAccumR go visited' (flattenValue v) argVal = mconcat $ map (First . snd) pairs in (visited'', (any fst pairs, getFirst argVal)) _ -> (visited', (False, Nothing)) where visited' = S.insert v visited For actual use in a program , use one of ' functionEscapeArguments ' , ' functionWillEscapeArguments ' , or ' instructionEscapes ' instead . escapeResultToTestFormat :: EscapeSummary -> Map String (Set (EscapeClass, String)) escapeResultToTestFormat er = M.filter (not . S.null) $ foldr fieldTransform argEscapes (HM.toList fm) where directEscapes = foldr transform mempty (HM.toList m) argEscapes = foldr argTransform directEscapes (HM.toList am) m = er ^. escapeArguments fm = er ^. escapeFields am = er ^. escapeIntoArguments argTransform (a, (tag, _, _)) acc = let aname = show (argumentName a) f = argumentFunction a fname = show (functionName f) in M.insertWith' S.union fname (S.singleton (tag, aname)) acc transform (a, (tag, _)) acc = let f = argumentFunction a fname = show (functionName f) aname = show (argumentName a) in M.insertWith' S.union fname (S.singleton (tag, aname)) acc fieldTransform (a, fieldsAndInsts) acc = let f = argumentFunction a fname = show (functionName f) aname = show (argumentName a) tagsAndFields = S.toList $ S.map (\(tag, fld, _) -> (tag, fld)) fieldsAndInsts newEntries = S.fromList $ mapMaybe (toFieldRef aname) tagsAndFields in M.insertWith' S.union fname newEntries acc toFieldRef aname (tag, fld) = case abstractAccessPathComponents fld of [AccessField ix] -> Just $ (tag, printf "%s.<%d>" aname ix) _ -> Nothing
a49c9dad6369454994420aa7570564373fc55f867a231cf639e2ce1f35fe52a6
mirage/cactus
leaf.mli
* Copyright ( c ) 2021 Tarides < > * Copyright ( c ) 2021 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2021 Tarides <> * Copyright (c) 2021 Gabriel Belouze <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) include Leaf_intf.Leaf
null
https://raw.githubusercontent.com/mirage/cactus/3eb2a4abee79bf8f20de5b4b12a57c3421c3e2fe/src/leaf.mli
ocaml
* Copyright ( c ) 2021 Tarides < > * Copyright ( c ) 2021 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2021 Tarides <> * Copyright (c) 2021 Gabriel Belouze <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) include Leaf_intf.Leaf
15dc341b1ec47c867ff52a102e8aae43ce2cc36504f5cdf769e6b3d86a634862
AbstractMachinesLab/caramel
either0.ml
type ('f, 's) t = First of 'f | Second of 's
null
https://raw.githubusercontent.com/AbstractMachinesLab/caramel/7d4e505d6032e22a630d2e3bd7085b77d0efbb0c/vendor/ocamlformat-0.17.0/vendor/compat/either0.ml
ocaml
type ('f, 's) t = First of 'f | Second of 's
63f0e0d1d4874a47c90e507989769f648184d5aea898314c3f7c75d6a216b2d7
ocsigen/wikidoc
wiki_menulatex.ml
Ocsimore * Copyright ( C ) 2010 * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation ; either version 2 of the License , or * ( at your option ) any later version . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU General Public License for more details . * * You should have received a copy of the GNU General Public License * along with this program ; if not , write to the Free Software * Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA . * Copyright (C) 2010 Vincent Balat * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) * Pretty print wiki menus to LaTeX @author Pretty print wiki menus to LaTeX @author Vincent Balat *) open Wikicreole open Wiki_latex let (>>=) = Lwt.bind let unsup = "unsupported syntax in menu" let failed _ _ = failwith unsup let failed1 _ = failwith unsup let offset_file = open_out ".latex_of_wiki_offsets" let item i attribs il = Lwt_list.map_s (fun x -> x) il >>= function | Menu_link (addr, _)::_ -> output_string offset_file addr; output_string offset_file " "; output_string offset_file (string_of_int (i - 2)); output_string offset_file "\n"; flush offset_file; Lwt.return (Node3 ("\\input{", [Leaf addr], "}\n")) | il -> output_string offset_file "==="; Lwt.return (Nodelist [sect i; Nodelist il; close_sect ()]) let plugin_fun = function | _ -> (true, fun () args content -> `Phrasing_without_interactive (Lwt.return (Leaf ""))) (* implement at least somthing for a_file? *) module LatexMenuBuilder = struct include Wiki_latex.LatexBuilder let p_elem = failed let pre_elem = failed let h1_elem = item 1 let h2_elem = item 2 let h3_elem = item 3 let h4_elem = item 4 let h5_elem = item 5 let h6_elem = item 6 let ol_elem = failed let dl_elem = failed let hr_elem = failed1 let table_elem = failed let a_elem_phrasing attribs addr c = Lwt_list.map_s (fun x -> x) c >>= fun c -> Lwt.return (Menu_link (addr, Node (Nodelist c, Leaf "}"))) let a_elem_flow = a_elem_phrasing let ul_elem = failed let plugin_fun = plugin_fun end let builder = (module LatexMenuBuilder : Wikicreole.Builder with type param = unit and type flow = rope Lwt.t) let menu_of_wiki s = Lwt_list.map_s (fun x -> x) (Wikicreole.from_string ~sectioning:false () builder s)
null
https://raw.githubusercontent.com/ocsigen/wikidoc/8c10fd2998297a15d58b0f8f5c4f31e78376e161/src/latex_of_wiki/wiki_menulatex.ml
ocaml
implement at least somthing for a_file?
Ocsimore * Copyright ( C ) 2010 * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation ; either version 2 of the License , or * ( at your option ) any later version . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU General Public License for more details . * * You should have received a copy of the GNU General Public License * along with this program ; if not , write to the Free Software * Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA . * Copyright (C) 2010 Vincent Balat * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) * Pretty print wiki menus to LaTeX @author Pretty print wiki menus to LaTeX @author Vincent Balat *) open Wikicreole open Wiki_latex let (>>=) = Lwt.bind let unsup = "unsupported syntax in menu" let failed _ _ = failwith unsup let failed1 _ = failwith unsup let offset_file = open_out ".latex_of_wiki_offsets" let item i attribs il = Lwt_list.map_s (fun x -> x) il >>= function | Menu_link (addr, _)::_ -> output_string offset_file addr; output_string offset_file " "; output_string offset_file (string_of_int (i - 2)); output_string offset_file "\n"; flush offset_file; Lwt.return (Node3 ("\\input{", [Leaf addr], "}\n")) | il -> output_string offset_file "==="; Lwt.return (Nodelist [sect i; Nodelist il; close_sect ()]) let plugin_fun = function | _ -> (true, fun () args content -> `Phrasing_without_interactive (Lwt.return (Leaf ""))) module LatexMenuBuilder = struct include Wiki_latex.LatexBuilder let p_elem = failed let pre_elem = failed let h1_elem = item 1 let h2_elem = item 2 let h3_elem = item 3 let h4_elem = item 4 let h5_elem = item 5 let h6_elem = item 6 let ol_elem = failed let dl_elem = failed let hr_elem = failed1 let table_elem = failed let a_elem_phrasing attribs addr c = Lwt_list.map_s (fun x -> x) c >>= fun c -> Lwt.return (Menu_link (addr, Node (Nodelist c, Leaf "}"))) let a_elem_flow = a_elem_phrasing let ul_elem = failed let plugin_fun = plugin_fun end let builder = (module LatexMenuBuilder : Wikicreole.Builder with type param = unit and type flow = rope Lwt.t) let menu_of_wiki s = Lwt_list.map_s (fun x -> x) (Wikicreole.from_string ~sectioning:false () builder s)
308d292602851aaacedfc7db11ac2070fc84833ea067c0a8e5f5a6c42d1cff08
kelamg/HtDP2e-workthrough
ex253.rkt
The first three lines of this file were inserted by . They record metadata ;; about the language level of this file in a form that our tools can easily process. #reader(lib "htdp-intermediate-reader.ss" "lang")((modname ex253) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f))) ; [Number -> Boolean] (odd? 42) (even? 42) (number? 42) [ Boolean String - > Boolean ] (define (? b s) #f) (? #false "suh dude") ; [Number Number Number -> Number] (max 1 3 7) (min 1 3 7) ; [Number -> [List-of Number]] (define (tab-sin n) (cond [(= n 0) (list (sin 0))] [else (cons (sin n) (tab-sin (sub1 n)))])) ; [[List-of Number] -> Boolean] (define (contains-1? l) (cond [(empty? l) #f] [else (or (= 1 (first l)) (contains-1? (rest l)))]))
null
https://raw.githubusercontent.com/kelamg/HtDP2e-workthrough/ec05818d8b667a3c119bea8d1d22e31e72e0a958/HtDP/Abstraction/ex253.rkt
racket
about the language level of this file in a form that our tools can easily process. [Number -> Boolean] [Number Number Number -> Number] [Number -> [List-of Number]] [[List-of Number] -> Boolean]
The first three lines of this file were inserted by . They record metadata #reader(lib "htdp-intermediate-reader.ss" "lang")((modname ex253) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f))) (odd? 42) (even? 42) (number? 42) [ Boolean String - > Boolean ] (define (? b s) #f) (? #false "suh dude") (max 1 3 7) (min 1 3 7) (define (tab-sin n) (cond [(= n 0) (list (sin 0))] [else (cons (sin n) (tab-sin (sub1 n)))])) (define (contains-1? l) (cond [(empty? l) #f] [else (or (= 1 (first l)) (contains-1? (rest l)))]))
e800031244c8be72f407ecc3213258caa14fde1ef7f65e07a3de579f03b2e0a0
deg/sodium
extensions.cljs
Author : ( ) Copyright ( c ) 2017 , (ns sodium.extensions (:require [clojure.spec.alpha :as s] [reagent.core :as reagent] [re-frame.core :as re-frame] [re-frame.loggers :refer [console]] [iron.re-utils :refer [<sub >evt sub->fn event->fn]] [iron.utils :refer [ci-sort validate]] [sodium.core :as na] [sodium.utils :as utils])) ;;; HEADERS ;;; Various page and section headers/dividers (defn- header-maker [title size dividing? sub?] {:pre [(validate (s/or :string string? :event vector?) title) (validate boolean? dividing?) (validate :sodium/size size)]} (na/header {:content (if (vector? title) (str (<sub title)) title) :size size :dividing? dividing? :sub? sub?})) (defn app-header "Large header. Title can be either a string or a subscription vector" [title] (header-maker title :large true false)) (defn panel-header "Medium header. Title can be either a string or a subscription vector" [title] (header-maker title :medium false false)) (defn panel-subheader "Small header. Title can be either a string or a subscription vector" [title] (header-maker title :small false false)) (defn section-header "Medium de-emphasized header. Title can be either a string or a subscription vector" [title] (header-maker title :medium false true)) (defn subsection-header "Small de-emphasized header. Title can be either a string or a subscription vector" [title] (header-maker title :small false true)) ;;; FORM FIELDS (defn labelled-field "Form field with a label and arbitrary content" [& {:keys [label content field-key errors inline?]}] (let [error (and field-key (field-key errors))] [na/form-input {:inline? inline? :label label} content (when error [na/rail {:position "right" :attached? true :class-name "errmsg"} error])])) ;;; Working text-area Semantic UI 's text - area does not work smoothly with re - frame , for reasons that I ;;; don't yet fully understand. Issues: ;;; - :value does not behave right. It causes the cursor to jump to end of text after ;;; each input. This can be fixed by using :default-value instead, but that creates ;;; problems in situations where we need to inject a value after the component has ;;; been initialized. ;;; - Text input is very slow, losing characters when typing fast. ;;; [TODO] Really should fix this right, since I assume this code is losing other niceties offered by Semantic UI 's component . (defn native-text-area "Simple HTML text-area, to bypass some problems in Semantic UI (temp?)" [{:keys [on-change placeholder rows value] :as params}] [:textarea params]) ;;; TAGSONOMY (re-frame/reg-sub ;; Default sub for getting the set of all tags. ::all-tags (fn [db _] (set (get db ::all-tags)))) (re-frame/reg-event-db ;; Default event for setting the set of all tags ::all-tags (fn [db [_ tags]] (assoc db ::all-tags (set tags)))) (re-frame/reg-sub ;; Default sub for getting the set of selected tags. ::selected-tags (fn [db _] (set (get db ::selected-tags)))) (re-frame/reg-sub ::class-of-tag ;; Default sub for getting the class of a tag (fn [db [_ tag]] "tag")) (re-frame/reg-event-db ;; Default event for setting set of selected tags ::selected-tags (fn [db [_ tags]] (assoc db ::selected-tags (set tags)))) (defn- draw-tag "Draw one tag in a list of tags. See draw-tags" [{:keys [selected-tags-sub set-selected-tags-event class-of-tag-sub selected-class unselected-class]} tag] (let [selected-tags (or ((sub->fn selected-tags-sub)) #{}) selected? (contains? selected-tags tag)] [na/list-item {:key tag :on-click #(>evt (conj set-selected-tags-event ((if selected? disj conj) selected-tags tag)))} [:span {:class (str (<sub (conj class-of-tag-sub tag)) " " (if selected? selected-class unselected-class))} tag]])) (defn draw-tags "Draw a list of tags. Formatting will depend on whether the tag is in the list of selected tags. Clicking on a tag will toggle it between selected and unselected. Options: - :selected-tags-sub - Re-frame subscription that returns the set of selected tags - :set-selected-tags-event - Re-frame event that sets the set of selected tags - :class-of-tag-sub - Re-frame subscription that returns the CSS class for a tag - :selected-class - CSS class name for selected tags - :unselected-class - CSS class name for unselected tags - :sort? - Should the list of tags be sorted - tags - Set or sequence of tags to display" [{:keys [selected-tags-sub set-selected-tags-event class-of-tag-sub selected-class unselected-class sort?] :or {selected-tags-sub [::selected-tags] set-selected-tags-event [::selected-tags] class-of-tag-sub [::class-of-tag] selected-class "selected-tag" unselected-class "unselected-tag" sort? true}} tags] [na/list-na {:class-name "tags" :horizontal? true} (doall (map (partial draw-tag {:selected-tags-sub selected-tags-sub :set-selected-tags-event set-selected-tags-event :class-of-tag-sub class-of-tag-sub :selected-class selected-class :unselected-class unselected-class}) (if sort? (ci-sort tags) tags)))]) (defn tag-adder "Component that lets the user add a tag (existing or new) to the set of selected tags. Options: - :all-tags-sub - Re-frame subscription that returns the set of all tags - :selected-tags-sub - Function or re-frame subscription that returns the set of selected tags - :set-selected-tags-event - Function or re-frame event that sets the external set of selected tags - :partial-tag-text - Atom to use to hold text of new tag before it is added. This parameter is not often needed, but is important if something outside us needs to watch our exact state. I use this, for example, when I don't want to let a dialog close if the user has started to create a new tag but has not yet saved it. " [{:keys [all-tags-sub selected-tags-sub set-selected-tags-event partial-tag-text] :or {all-tags-sub [::all-tags] selected-tags-sub [::selected-tags] set-selected-tags-event [::selected-tags] partial-tag-text (reagent/atom "")}}] (fn [] (let [all-tags (<sub all-tags-sub) selected-tags (or ((sub->fn selected-tags-sub)) #{}) available-tags (ci-sort (clojure.set/difference all-tags selected-tags)) list-id (str (gensym "tags-")) input-id (str (gensym "tags-input-"))] [na/grid {:container? true} [na/grid-row {} [draw-tags {:selected-tags-sub selected-tags-sub :set-selected-tags-event set-selected-tags-event} selected-tags]] [na/grid-row {} `[:datalist {:id ~list-id} ~(map (fn [tag] [:option {:key tag :value tag}]) available-tags)] [na/input {:type :text :id input-id :list list-id ;; [???] Setting :value fails subtly, updating datalist options for ;; the previous character entered as each character is entered. ;; So, :default-value and pay the piper below :default-value (or @partial-tag-text "") :on-change (na/value->atom-fn partial-tag-text) :action (when-not (empty? @partial-tag-text) {:icon "add" :on-click #(let [tags (conj selected-tags @partial-tag-text)] (if (vector? set-selected-tags-event) (>evt (conj set-selected-tags-event tags)) (set-selected-tags-event tags)) (reset! partial-tag-text "") ;; Need to clear field explicitly, because ;; :default-value above (set! (.-value (.getElementById js/document input-id)) ""))}) :placeholder "add tag"}]]]))) (defn tag-selector "Component that lets the user select tags Options: - :all-tags-sub - Re-frame subscription that returns the set of all tags - :selected-tags-sub - Re-frame subscription that returns the set of selected tags - :set-selected-tags-event - Re-frame event that sets the set of selected tags" [{:keys [all-tags-sub selected-tags-sub set-selected-tags-event] :or {all-tags-sub [::all-tags] selected-tags-sub [::selected-tags] set-selected-tags-event [::selected-tags]}}] (let [available-tags (ci-sort (<sub all-tags-sub)) chosen-tags (ci-sort (<sub selected-tags-sub #{}))] [na/dropdown {:multiple? true :button? true :value chosen-tags :on-change (na/value->event-fn set-selected-tags-event {:default #{} :coercer set}) :options (na/dropdown-list available-tags identity identity)}])) GOOGLE ADS (defn google-ad "Google advert component. See -ui.com/views/advertisement and . - unit, ad-client, ad-slot - Supplied by your Google ad campaign - test - Text to render instead of a real ad. You will typically supply this in your development builds." [& {:keys [unit ad-client ad-slot test]}] (reagent/create-class {:display-name "google-ad" :component-did-mount #(when (and js.window.adsbygoogle (not test)) (. js.window.adsbygoogle push {})) :reagent-render (fn [& {:keys [unit ad-client ad-slot]}] [na/advertisement {:unit unit :centered? true :test test} (when-not test [:ins {:class-name "adsbygoogle" :style {:display "block"} :data-ad-format "auto" :data-ad-client ad-client :data-ad-slot ad-slot}])])}))
null
https://raw.githubusercontent.com/deg/sodium/1855fe870c3719af7d16d85c2558df1799a9bd32/src/sodium/extensions.cljs
clojure
HEADERS Various page and section headers/dividers FORM FIELDS Working text-area don't yet fully understand. Issues: - :value does not behave right. It causes the cursor to jump to end of text after each input. This can be fixed by using :default-value instead, but that creates problems in situations where we need to inject a value after the component has been initialized. - Text input is very slow, losing characters when typing fast. [TODO] Really should fix this right, since I assume this code is losing other TAGSONOMY Default sub for getting the set of all tags. Default event for setting the set of all tags Default sub for getting the set of selected tags. Default sub for getting the class of a tag Default event for setting set of selected tags [???] Setting :value fails subtly, updating datalist options for the previous character entered as each character is entered. So, :default-value and pay the piper below Need to clear field explicitly, because :default-value above
Author : ( ) Copyright ( c ) 2017 , (ns sodium.extensions (:require [clojure.spec.alpha :as s] [reagent.core :as reagent] [re-frame.core :as re-frame] [re-frame.loggers :refer [console]] [iron.re-utils :refer [<sub >evt sub->fn event->fn]] [iron.utils :refer [ci-sort validate]] [sodium.core :as na] [sodium.utils :as utils])) (defn- header-maker [title size dividing? sub?] {:pre [(validate (s/or :string string? :event vector?) title) (validate boolean? dividing?) (validate :sodium/size size)]} (na/header {:content (if (vector? title) (str (<sub title)) title) :size size :dividing? dividing? :sub? sub?})) (defn app-header "Large header. Title can be either a string or a subscription vector" [title] (header-maker title :large true false)) (defn panel-header "Medium header. Title can be either a string or a subscription vector" [title] (header-maker title :medium false false)) (defn panel-subheader "Small header. Title can be either a string or a subscription vector" [title] (header-maker title :small false false)) (defn section-header "Medium de-emphasized header. Title can be either a string or a subscription vector" [title] (header-maker title :medium false true)) (defn subsection-header "Small de-emphasized header. Title can be either a string or a subscription vector" [title] (header-maker title :small false true)) (defn labelled-field "Form field with a label and arbitrary content" [& {:keys [label content field-key errors inline?]}] (let [error (and field-key (field-key errors))] [na/form-input {:inline? inline? :label label} content (when error [na/rail {:position "right" :attached? true :class-name "errmsg"} error])])) Semantic UI 's text - area does not work smoothly with re - frame , for reasons that I niceties offered by Semantic UI 's component . (defn native-text-area "Simple HTML text-area, to bypass some problems in Semantic UI (temp?)" [{:keys [on-change placeholder rows value] :as params}] [:textarea params]) (re-frame/reg-sub ::all-tags (fn [db _] (set (get db ::all-tags)))) (re-frame/reg-event-db ::all-tags (fn [db [_ tags]] (assoc db ::all-tags (set tags)))) (re-frame/reg-sub ::selected-tags (fn [db _] (set (get db ::selected-tags)))) (re-frame/reg-sub ::class-of-tag (fn [db [_ tag]] "tag")) (re-frame/reg-event-db ::selected-tags (fn [db [_ tags]] (assoc db ::selected-tags (set tags)))) (defn- draw-tag "Draw one tag in a list of tags. See draw-tags" [{:keys [selected-tags-sub set-selected-tags-event class-of-tag-sub selected-class unselected-class]} tag] (let [selected-tags (or ((sub->fn selected-tags-sub)) #{}) selected? (contains? selected-tags tag)] [na/list-item {:key tag :on-click #(>evt (conj set-selected-tags-event ((if selected? disj conj) selected-tags tag)))} [:span {:class (str (<sub (conj class-of-tag-sub tag)) " " (if selected? selected-class unselected-class))} tag]])) (defn draw-tags "Draw a list of tags. Formatting will depend on whether the tag is in the list of selected tags. Clicking on a tag will toggle it between selected and unselected. Options: - :selected-tags-sub - Re-frame subscription that returns the set of selected tags - :set-selected-tags-event - Re-frame event that sets the set of selected tags - :class-of-tag-sub - Re-frame subscription that returns the CSS class for a tag - :selected-class - CSS class name for selected tags - :unselected-class - CSS class name for unselected tags - :sort? - Should the list of tags be sorted - tags - Set or sequence of tags to display" [{:keys [selected-tags-sub set-selected-tags-event class-of-tag-sub selected-class unselected-class sort?] :or {selected-tags-sub [::selected-tags] set-selected-tags-event [::selected-tags] class-of-tag-sub [::class-of-tag] selected-class "selected-tag" unselected-class "unselected-tag" sort? true}} tags] [na/list-na {:class-name "tags" :horizontal? true} (doall (map (partial draw-tag {:selected-tags-sub selected-tags-sub :set-selected-tags-event set-selected-tags-event :class-of-tag-sub class-of-tag-sub :selected-class selected-class :unselected-class unselected-class}) (if sort? (ci-sort tags) tags)))]) (defn tag-adder "Component that lets the user add a tag (existing or new) to the set of selected tags. Options: - :all-tags-sub - Re-frame subscription that returns the set of all tags - :selected-tags-sub - Function or re-frame subscription that returns the set of selected tags - :set-selected-tags-event - Function or re-frame event that sets the external set of selected tags - :partial-tag-text - Atom to use to hold text of new tag before it is added. This parameter is not often needed, but is important if something outside us needs to watch our exact state. I use this, for example, when I don't want to let a dialog close if the user has started to create a new tag but has not yet saved it. " [{:keys [all-tags-sub selected-tags-sub set-selected-tags-event partial-tag-text] :or {all-tags-sub [::all-tags] selected-tags-sub [::selected-tags] set-selected-tags-event [::selected-tags] partial-tag-text (reagent/atom "")}}] (fn [] (let [all-tags (<sub all-tags-sub) selected-tags (or ((sub->fn selected-tags-sub)) #{}) available-tags (ci-sort (clojure.set/difference all-tags selected-tags)) list-id (str (gensym "tags-")) input-id (str (gensym "tags-input-"))] [na/grid {:container? true} [na/grid-row {} [draw-tags {:selected-tags-sub selected-tags-sub :set-selected-tags-event set-selected-tags-event} selected-tags]] [na/grid-row {} `[:datalist {:id ~list-id} ~(map (fn [tag] [:option {:key tag :value tag}]) available-tags)] [na/input {:type :text :id input-id :list list-id :default-value (or @partial-tag-text "") :on-change (na/value->atom-fn partial-tag-text) :action (when-not (empty? @partial-tag-text) {:icon "add" :on-click #(let [tags (conj selected-tags @partial-tag-text)] (if (vector? set-selected-tags-event) (>evt (conj set-selected-tags-event tags)) (set-selected-tags-event tags)) (reset! partial-tag-text "") (set! (.-value (.getElementById js/document input-id)) ""))}) :placeholder "add tag"}]]]))) (defn tag-selector "Component that lets the user select tags Options: - :all-tags-sub - Re-frame subscription that returns the set of all tags - :selected-tags-sub - Re-frame subscription that returns the set of selected tags - :set-selected-tags-event - Re-frame event that sets the set of selected tags" [{:keys [all-tags-sub selected-tags-sub set-selected-tags-event] :or {all-tags-sub [::all-tags] selected-tags-sub [::selected-tags] set-selected-tags-event [::selected-tags]}}] (let [available-tags (ci-sort (<sub all-tags-sub)) chosen-tags (ci-sort (<sub selected-tags-sub #{}))] [na/dropdown {:multiple? true :button? true :value chosen-tags :on-change (na/value->event-fn set-selected-tags-event {:default #{} :coercer set}) :options (na/dropdown-list available-tags identity identity)}])) GOOGLE ADS (defn google-ad "Google advert component. See -ui.com/views/advertisement and . - unit, ad-client, ad-slot - Supplied by your Google ad campaign - test - Text to render instead of a real ad. You will typically supply this in your development builds." [& {:keys [unit ad-client ad-slot test]}] (reagent/create-class {:display-name "google-ad" :component-did-mount #(when (and js.window.adsbygoogle (not test)) (. js.window.adsbygoogle push {})) :reagent-render (fn [& {:keys [unit ad-client ad-slot]}] [na/advertisement {:unit unit :centered? true :test test} (when-not test [:ins {:class-name "adsbygoogle" :style {:display "block"} :data-ad-format "auto" :data-ad-client ad-client :data-ad-slot ad-slot}])])}))
c132cac76cdab969457dbe9a6c4cf8f380db5ca4ee9d794d2a0f08c6dcb71067
Helium4Haskell/helium
Overlap2.hs
main :: Bool -> () main True = () main False = () main _ = ()
null
https://raw.githubusercontent.com/Helium4Haskell/helium/5928bff479e6f151b4ceb6c69bbc15d71e29eb47/test/staticwarnings/Overlap2.hs
haskell
main :: Bool -> () main True = () main False = () main _ = ()
deb2ec8c96397d687869a851976e72a04d38913cfd0d1ce9ed38b85e76b0b241
avichalp/hql
core_test.clj
(ns hql.core-test (:require [clojure.test :refer :all] [hql.core])) (defmacro deftest-graphql [& ts] `(deftest graphql-test ~@(mapv (fn [[d gq q]] `(testing ~d (is (= ~gq (hql.core/graphql ~q))))) ts))) (deftest-graphql ["Test a simple query" "query {viewer{login}}" [:query [:viewer [:login]]]] ["Test query with variables" "query ($number_of_repos: Int!){viewer{name repositories(last: $number_of_repos){nodes{name}}}}" [:query {:$number_of_repos 'Int!} [:viewer [:name] [:repositories {:last '$number_of_repos} [:nodes [:name]]]]]] ["Query last 20 closed issues in a repo from Github" "query issuesQuery ($owner: String!, $repo: String!){repo: repository(owner: $owner, name: $repo){issues: issues(last: 20, states: CLOSED){edges{node{labels(first: 5){edges{node{name}}}}}}}}" [:query "issuesQuery" {:$owner 'String! :$repo 'String!} [:repository "repo" {:owner '$owner, :name '$repo} [:issues "issues" {:last 20, :states 'CLOSED} [:edges [:node [:labels {:first 5} [:edges [:node [:name]]]]]]]]]] ["Test Mutation, add a reaction to a Github comment" "mutation AddReactionToIssue{addReaction(input: {subjectId: \"MDU6SXNzdWUyMzEzOTE1NTE=\", content: HOORAY}){reaction{content} subject{id}}}" [:mutation "AddReactionToIssue" [:addReaction {:input {:subjectId "MDU6SXNzdWUyMzEzOTE1NTE=" :content 'HOORAY}} [:reaction [:content]] [:subject [:id]]]]] ["Test a GraphQL document with query and fragments and inline fragments" "query MyQuery ($myId: Int!){...FriendFields ... on User{friends{count}} user(id: $myId){profilePic}}fragment friendFields on User{profilePic(size: 50)}" [[:query "MyQuery" {:$myId 'Int!} [:fragment "FriendFields"] [:fragment {:on 'User} [:friends [:count]]] [:user {:id '$myId} [:profilePic]]] [:fragment "friendFields" {:on 'User} [:profilePic {:size 50}]]]] ) (comment (run-tests *ns*))
null
https://raw.githubusercontent.com/avichalp/hql/b216eaa0de499d29b891a28e5c8f21b7a1cbee45/test/hql/core_test.clj
clojure
(ns hql.core-test (:require [clojure.test :refer :all] [hql.core])) (defmacro deftest-graphql [& ts] `(deftest graphql-test ~@(mapv (fn [[d gq q]] `(testing ~d (is (= ~gq (hql.core/graphql ~q))))) ts))) (deftest-graphql ["Test a simple query" "query {viewer{login}}" [:query [:viewer [:login]]]] ["Test query with variables" "query ($number_of_repos: Int!){viewer{name repositories(last: $number_of_repos){nodes{name}}}}" [:query {:$number_of_repos 'Int!} [:viewer [:name] [:repositories {:last '$number_of_repos} [:nodes [:name]]]]]] ["Query last 20 closed issues in a repo from Github" "query issuesQuery ($owner: String!, $repo: String!){repo: repository(owner: $owner, name: $repo){issues: issues(last: 20, states: CLOSED){edges{node{labels(first: 5){edges{node{name}}}}}}}}" [:query "issuesQuery" {:$owner 'String! :$repo 'String!} [:repository "repo" {:owner '$owner, :name '$repo} [:issues "issues" {:last 20, :states 'CLOSED} [:edges [:node [:labels {:first 5} [:edges [:node [:name]]]]]]]]]] ["Test Mutation, add a reaction to a Github comment" "mutation AddReactionToIssue{addReaction(input: {subjectId: \"MDU6SXNzdWUyMzEzOTE1NTE=\", content: HOORAY}){reaction{content} subject{id}}}" [:mutation "AddReactionToIssue" [:addReaction {:input {:subjectId "MDU6SXNzdWUyMzEzOTE1NTE=" :content 'HOORAY}} [:reaction [:content]] [:subject [:id]]]]] ["Test a GraphQL document with query and fragments and inline fragments" "query MyQuery ($myId: Int!){...FriendFields ... on User{friends{count}} user(id: $myId){profilePic}}fragment friendFields on User{profilePic(size: 50)}" [[:query "MyQuery" {:$myId 'Int!} [:fragment "FriendFields"] [:fragment {:on 'User} [:friends [:count]]] [:user {:id '$myId} [:profilePic]]] [:fragment "friendFields" {:on 'User} [:profilePic {:size 50}]]]] ) (comment (run-tests *ns*))
6674a746a8f4f90dfcec67f13b4d687e10caba6677ebd63ad8ad22d7417a2b7b
joaotavora/sly
slynk-retro.lisp
(defpackage :slynk-retro (:use :cl :slynk :slynk-api)) (in-package :slynk-retro) (defun ensure-slynk-package-nicknames (&rest ignored) "Nickname all SLYNK-* package to SWANK-*" (declare (ignore ignored)) (loop for package in (list-all-packages) for package-name = (package-name package) when (search "SLYNK" package-name :test #'char-equal) do (rename-package package package-name (remove-duplicates (cons (format nil "SWANK~a" (subseq package-name 5)) (package-nicknames package)) :test #'string-equal)))) (defun load-swankrcs-maybe () (find-if (lambda (homedir-file) (load (merge-pathnames (user-homedir-pathname) homedir-file) :if-does-not-exist nil)) (list (make-pathname :name ".swank" :type "lisp") (make-pathname :name ".swankrc")))) (setq slynk-rpc:*translating-swank-to-slynk* nil) (push #'ensure-slynk-package-nicknames slynk-api:*slynk-require-hook*) (ensure-slynk-package-nicknames) ;;; Take this chance to load ~/.swank.lisp and ~/.swankrc if no ;;; ~/.slynk.lisp or ~/.slynkrc have already been loaded. ;;; (unless slynk-api:*loaded-user-init-file* (setq slynk-api:*loaded-user-init-file* (load-swankrcs-maybe))) (provide :slynk/retro)
null
https://raw.githubusercontent.com/joaotavora/sly/5966d68727898fa6130fb6bb02208f70aa8d5ce3/contrib/slynk-retro.lisp
lisp
Take this chance to load ~/.swank.lisp and ~/.swankrc if no ~/.slynk.lisp or ~/.slynkrc have already been loaded.
(defpackage :slynk-retro (:use :cl :slynk :slynk-api)) (in-package :slynk-retro) (defun ensure-slynk-package-nicknames (&rest ignored) "Nickname all SLYNK-* package to SWANK-*" (declare (ignore ignored)) (loop for package in (list-all-packages) for package-name = (package-name package) when (search "SLYNK" package-name :test #'char-equal) do (rename-package package package-name (remove-duplicates (cons (format nil "SWANK~a" (subseq package-name 5)) (package-nicknames package)) :test #'string-equal)))) (defun load-swankrcs-maybe () (find-if (lambda (homedir-file) (load (merge-pathnames (user-homedir-pathname) homedir-file) :if-does-not-exist nil)) (list (make-pathname :name ".swank" :type "lisp") (make-pathname :name ".swankrc")))) (setq slynk-rpc:*translating-swank-to-slynk* nil) (push #'ensure-slynk-package-nicknames slynk-api:*slynk-require-hook*) (ensure-slynk-package-nicknames) (unless slynk-api:*loaded-user-init-file* (setq slynk-api:*loaded-user-init-file* (load-swankrcs-maybe))) (provide :slynk/retro)
6792c981f4d71d434041a70d55e5961af263664b98a77c5dbabfd24de9a63f40
backtracking/ocamlgraph
dGraphRandModel.ml
(**************************************************************************) (* *) (* This file is part of OcamlGraph. *) (* *) Copyright ( C ) 2009 - 2010 CEA ( Commissariat � l' � ) (* *) (* you can redistribute it and/or modify it under the terms of the GNU *) Lesser General Public License as published by the Free Software Foundation , version 2.1 , with a linking exception . (* *) (* It is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Lesser General Public License for more details. *) (* *) (* See the file ../LICENSE for more details. *) (* *) (* Authors: *) ( ) (* - Jean-Denis Koeck () *) - ( ) (* *) (**************************************************************************) let element = function | [] -> invalid_arg "empty list in element" | l -> Random.self_init (); List.nth l (Random.int (List.length l)) let black = 0x000000 and white = 0xFFFFFF and red = 0xFF0000 and green = 0x00FF00 and blue = 0x0000FF and yellow = 0xFFFF00 and cyan = 0x00FFFF and magenta = 0xFF00FF module Vertex = struct type t = int end module Edge = struct type t = int let compare : int -> int -> int = Stdlib.compare let default = 0 end module G = Imperative.Digraph.AbstractLabeled(Vertex)(Edge) module R = Rand.I(G) module GraphAttrs = struct include G let graph_attributes _ = [] let default_vertex_attributes _ = [] let vertex_name v = string_of_int (G.V.label v) let vertex_attributes _ = let shape = element [`Ellipse; `Box; `Circle; `Doublecircle; `Diamond] in let color = element [black; white; red; green; blue; yellow; cyan; magenta] in [`Shape shape; `Color color] let default_edge_attributes _ = [] let edge_attributes _ = [] let get_subgraph _ = None end module Model = DGraphModel.Make(GraphAttrs) let create () = State Random.self_init (); let v = 100 in let e = Random.int (v*2) in let g = R.graph ~loops:true ~v ~e () in Model.from_graph g
null
https://raw.githubusercontent.com/backtracking/ocamlgraph/1c028af097339ca8bc379436f7bd9477fa3a49cd/src/dGraphRandModel.ml
ocaml
************************************************************************ This file is part of OcamlGraph. you can redistribute it and/or modify it under the terms of the GNU It is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. See the file ../LICENSE for more details. Authors: - Jean-Denis Koeck () ************************************************************************
Copyright ( C ) 2009 - 2010 CEA ( Commissariat � l' � ) Lesser General Public License as published by the Free Software Foundation , version 2.1 , with a linking exception . ( ) - ( ) let element = function | [] -> invalid_arg "empty list in element" | l -> Random.self_init (); List.nth l (Random.int (List.length l)) let black = 0x000000 and white = 0xFFFFFF and red = 0xFF0000 and green = 0x00FF00 and blue = 0x0000FF and yellow = 0xFFFF00 and cyan = 0x00FFFF and magenta = 0xFF00FF module Vertex = struct type t = int end module Edge = struct type t = int let compare : int -> int -> int = Stdlib.compare let default = 0 end module G = Imperative.Digraph.AbstractLabeled(Vertex)(Edge) module R = Rand.I(G) module GraphAttrs = struct include G let graph_attributes _ = [] let default_vertex_attributes _ = [] let vertex_name v = string_of_int (G.V.label v) let vertex_attributes _ = let shape = element [`Ellipse; `Box; `Circle; `Doublecircle; `Diamond] in let color = element [black; white; red; green; blue; yellow; cyan; magenta] in [`Shape shape; `Color color] let default_edge_attributes _ = [] let edge_attributes _ = [] let get_subgraph _ = None end module Model = DGraphModel.Make(GraphAttrs) let create () = State Random.self_init (); let v = 100 in let e = Random.int (v*2) in let g = R.graph ~loops:true ~v ~e () in Model.from_graph g
264ac8f611caadbc19704db1ed5e1e2e6a70b2ad5615409bbf7ab561f8df2c7f
bos/pronk
Environment.hs
# LANGUAGE DeriveDataTypeable , OverloadedStrings , RecordWildCards # module Network.HTTP.LoadTest.Environment ( Environment(..) , environment ) where import Control.Applicative ((<$>), (<*>), empty) import Data.Aeson.Types (Value(..), FromJSON(..), ToJSON(..), (.:), (.=), object) import Data.Data (Data) import Data.Typeable (Typeable) import GHC.Conc (numCapabilities) import System.PosixCompat.Unistd (SystemID(..), getSystemID) data Environment = Environment { osName :: String , osVersion :: String , hostName :: String , numCores :: Int } deriving (Eq, Read, Show, Typeable, Data) instance ToJSON Environment where toJSON Environment{..} = object [ "osName" .= osName , "osVersion" .= osVersion , "hostName" .= hostName , "numCores" .= numCores ] instance FromJSON Environment where parseJSON (Object v) = Environment <$> v .: "osName" <*> v .: "osVersion" <*> v .: "hostName" <*> v .: "numCores" parseJSON _ = empty environment :: IO Environment environment = do SystemID{..} <- getSystemID return Environment { osName = systemName , osVersion = version , hostName = nodeName , numCores = numCapabilities }
null
https://raw.githubusercontent.com/bos/pronk/e3a0f789801237b5abdd7b2c65d15b47d00d0b98/lib/Network/HTTP/LoadTest/Environment.hs
haskell
# LANGUAGE DeriveDataTypeable , OverloadedStrings , RecordWildCards # module Network.HTTP.LoadTest.Environment ( Environment(..) , environment ) where import Control.Applicative ((<$>), (<*>), empty) import Data.Aeson.Types (Value(..), FromJSON(..), ToJSON(..), (.:), (.=), object) import Data.Data (Data) import Data.Typeable (Typeable) import GHC.Conc (numCapabilities) import System.PosixCompat.Unistd (SystemID(..), getSystemID) data Environment = Environment { osName :: String , osVersion :: String , hostName :: String , numCores :: Int } deriving (Eq, Read, Show, Typeable, Data) instance ToJSON Environment where toJSON Environment{..} = object [ "osName" .= osName , "osVersion" .= osVersion , "hostName" .= hostName , "numCores" .= numCores ] instance FromJSON Environment where parseJSON (Object v) = Environment <$> v .: "osName" <*> v .: "osVersion" <*> v .: "hostName" <*> v .: "numCores" parseJSON _ = empty environment :: IO Environment environment = do SystemID{..} <- getSystemID return Environment { osName = systemName , osVersion = version , hostName = nodeName , numCores = numCapabilities }
0e4124859e18485e6d8dc70498c6dc99278abc3813bc8d111652367fbb39229e
incoherentsoftware/defect-process
Types.hs
module Level.Room.Item.Pickup.Types ( ItemPickupIsBuyConfirmOnInteract , ItemPickupBuyConfirmStartMessages , ItemPickupThinkBuyConfirm , ItemPickupUpdateBuyConfirm , ItemPickupDrawBuyConfirm , ItemPickupBuyConfirmData(..) , ItemPickupStatus(..) , ItemPickupData(..) ) where import qualified Data.Text as T import AppEnv import Level.Room.Item.Types import Level.Room.Types import Msg.Payload import Msg.Phase import Msg.Types import Window.Graphics import World.Util type ItemPickupIsBuyConfirmOnInteract m = m Bool type ItemPickupBuyConfirmStartMessages m = RoomItem ItemPickupData -> m [Msg ThinkLevelMsgsPhase] type ItemPickupThinkBuyConfirm m = RoomItem ItemPickupData -> m [Msg ThinkLevelMsgsPhase] type ItemPickupUpdateBuyConfirm m = ItemPickupData -> m ItemPickupBuyConfirmData type ItemPickupDrawBuyConfirm m = RoomItem ItemPickupData -> m () data ItemPickupBuyConfirmData = ItemPickupBuyConfirmData { _selectDisplayText :: DisplayText , _upAliasInputDisplayText :: InputDisplayText , _downAliasInputDisplayText :: InputDisplayText , _confirmDisplayText :: DisplayText , _interactAliasInputDisplayText :: InputDisplayText , _selectedLineIndex :: Int , _replace0DisplayText :: DisplayText , _replace1DisplayText :: DisplayText , _replace2DisplayText :: DisplayText , _line0DisplayText :: DisplayText , _line1DisplayText :: DisplayText , _line1SelectedDisplayText :: DisplayText , _line2DisplayText :: DisplayText , _line2SelectedDisplayText :: DisplayText , _line3DisplayText :: DisplayText , _line3SelectedDisplayText :: DisplayText , _line4DisplayText :: DisplayText , _slotsOverlayNeutralSelectedImage :: Image , _slotsOverlayUpSelectedImage :: Image , _slotsOverlayDownSelectedImage :: Image , _literalEmptyText :: DisplayText } data ItemPickupStatus = ItemPickupNormalStatus | ItemPickupIndicatorStatus Sprite | ItemPickupReappearStatus Sprite | ItemPickupBuyConfirmStatus deriving Eq data ItemPickupData = ItemPickupData { _name :: T.Text , _buyMsgPayload :: PlayerMsgPayload , _cost :: GoldValue , _touchingPlayer :: Bool , _roomType :: RoomType , _image :: Image , _costInputDisplayText :: InputDisplayText , _buyInfoInputDisplayText :: InputDisplayText , _status :: ItemPickupStatus , _buyConfirmData :: ItemPickupBuyConfirmData , _isBuyConfirmOnInteract :: ItemPickupIsBuyConfirmOnInteract (AppEnv ThinkLevelMsgsPhase) , _buyConfirmStartMessages :: ItemPickupBuyConfirmStartMessages (AppEnv ThinkLevelMsgsPhase) , _thinkBuyConfirm :: ItemPickupThinkBuyConfirm (AppEnv ThinkLevelMsgsPhase) , _updateBuyConfirm :: ItemPickupUpdateBuyConfirm (AppEnv UpdateLevelMsgsPhase) , _drawBuyConfirmOverlay :: ItemPickupDrawBuyConfirm (AppEnv DrawMsgsPhase) }
null
https://raw.githubusercontent.com/incoherentsoftware/defect-process/4ce836f8996f5d5be050b6fe3a79c42a7ae7caa8/src/Level/Room/Item/Pickup/Types.hs
haskell
module Level.Room.Item.Pickup.Types ( ItemPickupIsBuyConfirmOnInteract , ItemPickupBuyConfirmStartMessages , ItemPickupThinkBuyConfirm , ItemPickupUpdateBuyConfirm , ItemPickupDrawBuyConfirm , ItemPickupBuyConfirmData(..) , ItemPickupStatus(..) , ItemPickupData(..) ) where import qualified Data.Text as T import AppEnv import Level.Room.Item.Types import Level.Room.Types import Msg.Payload import Msg.Phase import Msg.Types import Window.Graphics import World.Util type ItemPickupIsBuyConfirmOnInteract m = m Bool type ItemPickupBuyConfirmStartMessages m = RoomItem ItemPickupData -> m [Msg ThinkLevelMsgsPhase] type ItemPickupThinkBuyConfirm m = RoomItem ItemPickupData -> m [Msg ThinkLevelMsgsPhase] type ItemPickupUpdateBuyConfirm m = ItemPickupData -> m ItemPickupBuyConfirmData type ItemPickupDrawBuyConfirm m = RoomItem ItemPickupData -> m () data ItemPickupBuyConfirmData = ItemPickupBuyConfirmData { _selectDisplayText :: DisplayText , _upAliasInputDisplayText :: InputDisplayText , _downAliasInputDisplayText :: InputDisplayText , _confirmDisplayText :: DisplayText , _interactAliasInputDisplayText :: InputDisplayText , _selectedLineIndex :: Int , _replace0DisplayText :: DisplayText , _replace1DisplayText :: DisplayText , _replace2DisplayText :: DisplayText , _line0DisplayText :: DisplayText , _line1DisplayText :: DisplayText , _line1SelectedDisplayText :: DisplayText , _line2DisplayText :: DisplayText , _line2SelectedDisplayText :: DisplayText , _line3DisplayText :: DisplayText , _line3SelectedDisplayText :: DisplayText , _line4DisplayText :: DisplayText , _slotsOverlayNeutralSelectedImage :: Image , _slotsOverlayUpSelectedImage :: Image , _slotsOverlayDownSelectedImage :: Image , _literalEmptyText :: DisplayText } data ItemPickupStatus = ItemPickupNormalStatus | ItemPickupIndicatorStatus Sprite | ItemPickupReappearStatus Sprite | ItemPickupBuyConfirmStatus deriving Eq data ItemPickupData = ItemPickupData { _name :: T.Text , _buyMsgPayload :: PlayerMsgPayload , _cost :: GoldValue , _touchingPlayer :: Bool , _roomType :: RoomType , _image :: Image , _costInputDisplayText :: InputDisplayText , _buyInfoInputDisplayText :: InputDisplayText , _status :: ItemPickupStatus , _buyConfirmData :: ItemPickupBuyConfirmData , _isBuyConfirmOnInteract :: ItemPickupIsBuyConfirmOnInteract (AppEnv ThinkLevelMsgsPhase) , _buyConfirmStartMessages :: ItemPickupBuyConfirmStartMessages (AppEnv ThinkLevelMsgsPhase) , _thinkBuyConfirm :: ItemPickupThinkBuyConfirm (AppEnv ThinkLevelMsgsPhase) , _updateBuyConfirm :: ItemPickupUpdateBuyConfirm (AppEnv UpdateLevelMsgsPhase) , _drawBuyConfirmOverlay :: ItemPickupDrawBuyConfirm (AppEnv DrawMsgsPhase) }
397e440117fddccf395d4398961f5f3767a938c1ea77fc5bbdb6bc95ef95c5e9
dyzsr/ocaml-selectml
tophooks.mli
(**************************************************************************) (* *) (* OCaml *) (* *) , projet Cristal , INRIA Rocquencourt (* *) Copyright 1996 Institut National de Recherche en Informatique et (* en Automatique. *) (* *) (* All rights reserved. This file is distributed under the terms of *) the GNU Lesser General Public License version 2.1 , with the (* special exception on linking described in the file LICENSE. *) (* *) (**************************************************************************) (** This module contains sections of Topeval in native code which can be overridden, for example to change the linker. *) type lookup_fn = string -> Obj.t option type load_fn = Format.formatter -> string -> Lambda.program -> Topcommon.evaluation_outcome val lookup : lookup_fn (** Find a global symbol by name. Default implementation may be overridden with {!register_assembler}. *) val load : load_fn (** [load ppf phrase_name lambda] compiles and evaluates [lambda]. [phrase_name] may be used for temporary files and is unique. [ppf] may be used for debugging output. Default implementation may be overridden with {!register_loader}. *) val register_loader : lookup:lookup_fn -> load:load_fn -> unit (** Sets the functions used for {!lookup} and {!load}. *)
null
https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/toplevel/native/tophooks.mli
ocaml
************************************************************************ OCaml en Automatique. All rights reserved. This file is distributed under the terms of special exception on linking described in the file LICENSE. ************************************************************************ * This module contains sections of Topeval in native code which can be overridden, for example to change the linker. * Find a global symbol by name. Default implementation may be overridden with {!register_assembler}. * [load ppf phrase_name lambda] compiles and evaluates [lambda]. [phrase_name] may be used for temporary files and is unique. [ppf] may be used for debugging output. Default implementation may be overridden with {!register_loader}. * Sets the functions used for {!lookup} and {!load}.
, projet Cristal , INRIA Rocquencourt Copyright 1996 Institut National de Recherche en Informatique et the GNU Lesser General Public License version 2.1 , with the type lookup_fn = string -> Obj.t option type load_fn = Format.formatter -> string -> Lambda.program -> Topcommon.evaluation_outcome val lookup : lookup_fn val load : load_fn val register_loader : lookup:lookup_fn -> load:load_fn -> unit
32d2e2c54bb3b2a53d8eea2be1551516c19bc9e94335357d421fd08550e08143
cstar/ejabberd-old
pubsub_subscription_odbc.erl
%%% ==================================================================== ` ` The contents of this file are subject to the Erlang Public License , Version 1.1 , ( the " License " ) ; you may not use this file except in %%% compliance with the License. You should have received a copy of the %%% Erlang Public License along with this software. If not, it can be %%% retrieved via the world wide web at /. %%% Software distributed under the License is distributed on an " AS IS " %%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See %%% the License for the specific language governing rights and limitations %%% under the License. %%% The Initial Developer of the Original Code is ProcessOne . Portions created by ProcessOne are Copyright 2006 - 2009 , ProcessOne All Rights Reserved . '' This software is copyright 2006 - 2009 , ProcessOne . %%% @author > @author based on pubsub_subscription.erl by < > %%% @version {@vsn}, {@date} {@time} %%% @end %%% ==================================================================== -module(pubsub_subscription_odbc). -author(""). %% API -export([init/0, subscribe_node/3, unsubscribe_node/3, get_subscription/3, set_subscription/4, get_options_xform/2, parse_options_xform/1]). -include("pubsub.hrl"). -include("jlib.hrl"). -define(PUBSUB_DELIVER, "pubsub#deliver"). -define(PUBSUB_DIGEST, "pubsub#digest"). -define(PUBSUB_DIGEST_FREQUENCY, "pubsub#digest_frequency"). -define(PUBSUB_EXPIRE, "pubsub#expire"). -define(PUBSUB_INCLUDE_BODY, "pubsub#include_body"). -define(PUBSUB_SHOW_VALUES, "pubsub#show-values"). -define(PUBSUB_SUBSCRIPTION_TYPE, "pubsub#subscription_type"). -define(PUBSUB_SUBSCRIPTION_DEPTH, "pubsub#subscription_depth"). -define(DELIVER_LABEL, "Whether an entity wants to receive or disable notifications"). -define(DIGEST_LABEL, "Whether an entity wants to receive digests (aggregations) of notifications or all notifications individually"). -define(DIGEST_FREQUENCY_LABEL, "The minimum number of milliseconds between sending any two notification digests"). -define(EXPIRE_LABEL, "The DateTime at which a leased subscription will end or has ended"). -define(INCLUDE_BODY_LABEL, "Whether an entity wants to receive an XMPP message body in addition to the payload format"). -define(SHOW_VALUES_LABEL, "The presence states for which an entity wants to receive notifications"). -define(SUBSCRIPTION_TYPE_LABEL, "Type of notification to receive"). -define(SUBSCRIPTION_DEPTH_LABEL, "Depth from subscription for which to receive notifications"). -define(SHOW_VALUE_AWAY_LABEL, "XMPP Show Value of Away"). -define(SHOW_VALUE_CHAT_LABEL, "XMPP Show Value of Chat"). -define(SHOW_VALUE_DND_LABEL, "XMPP Show Value of DND (Do Not Disturb)"). -define(SHOW_VALUE_ONLINE_LABEL, "Mere Availability in XMPP (No Show Value)"). -define(SHOW_VALUE_XA_LABEL, "XMPP Show Value of XA (Extended Away)"). -define(SUBSCRIPTION_TYPE_VALUE_ITEMS_LABEL, "Receive notification of new items only"). -define(SUBSCRIPTION_TYPE_VALUE_NODES_LABEL, "Receive notification of new nodes only"). -define(SUBSCRIPTION_DEPTH_VALUE_ONE_LABEL, "Receive notification from direct child nodes only"). -define(SUBSCRIPTION_DEPTH_VALUE_ALL_LABEL, "Receive notification from all descendent nodes"). -define(DB_MOD, pubsub_db_odbc). %%==================================================================== %% API %%==================================================================== init() -> ok = create_table(). subscribe_node(_JID, _NodeID, Options) -> SubID = make_subid(), ?DB_MOD:add_subscription(#pubsub_subscription{subid = SubID, options = Options}), {result, SubID}. unsubscribe_node(_JID, _NodeID, SubID) -> case ?DB_MOD:read_subscription(SubID) of {ok, Sub} -> ?DB_MOD:delete_subscription(SubID), {result, Sub}; notfound -> {error, notfound} end. get_subscription(_JID, _NodeID, SubID) -> case ?DB_MOD:read_subscription(SubID) of {ok, Sub} -> {result, Sub}; notfound -> {error, notfound} end. set_subscription(_JID, _NodeID, SubID, Options) -> case ?DB_MOD:read_subscription(SubID) of {ok, _} -> ?DB_MOD:update_subscription(#pubsub_subscription{subid = SubID, options = Options}), {result, ok}; notfound -> ?DB_MOD:add_subscription(#pubsub_subscription{subid = SubID, options = Options}), {result, ok} end. get_options_xform(Lang, Options) -> Keys = [deliver, show_values, subscription_type, subscription_depth], XFields = [get_option_xfield(Lang, Key, Options) || Key <- Keys], {result, {xmlelement, "x", [{"xmlns", ?NS_XDATA}], [{xmlelement, "field", [{"var", "FORM_TYPE"}, {"type", "hidden"}], [{xmlelement, "value", [], [{xmlcdata, ?NS_PUBSUB_SUB_OPTIONS}]}]}] ++ XFields}}. parse_options_xform(XFields) -> case xml:remove_cdata(XFields) of [] -> {result, []}; [{xmlelement, "x", _Attrs, _Els} = XEl] -> case jlib:parse_xdata_submit(XEl) of XData when is_list(XData) -> case set_xoption(XData, []) of Opts when is_list(Opts) -> {result, Opts}; Other -> Other end; Other -> Other end; Other -> Other end. %%==================================================================== Internal functions %%==================================================================== create_table() -> ok. make_subid() -> {T1, T2, T3} = now(), lists:flatten(io_lib:fwrite("~.16B~.16B~.16B", [T1, T2, T3])). %% %% Subscription XForm processing. %% %% Return processed options, with types converted and so forth, using Opts as defaults . set_xoption([], Opts) -> Opts; set_xoption([{Var, Value} | T], Opts) -> NewOpts = case var_xfield(Var) of {error, _} -> Opts; Key -> Val = val_xfield(Key, Value), lists:keystore(Key, 1, Opts, {Key, Val}) end, set_xoption(T, NewOpts). %% Return the options list's key for an XForm var. var_xfield(?PUBSUB_DELIVER) -> deliver; var_xfield(?PUBSUB_DIGEST) -> digest; var_xfield(?PUBSUB_DIGEST_FREQUENCY) -> digest_frequency; var_xfield(?PUBSUB_EXPIRE) -> expire; var_xfield(?PUBSUB_INCLUDE_BODY) -> include_body; var_xfield(?PUBSUB_SHOW_VALUES) -> show_values; var_xfield(?PUBSUB_SUBSCRIPTION_TYPE) -> subscription_type; var_xfield(?PUBSUB_SUBSCRIPTION_DEPTH) -> subscription_depth; var_xfield(_) -> {error, badarg}. %% Convert Values for option list's Key. val_xfield(deliver, [Val]) -> xopt_to_bool(Val); val_xfield(digest, [Val]) -> xopt_to_bool(Val); val_xfield(digest_frequency, [Val]) -> list_to_integer(Val); val_xfield(expire, [Val]) -> jlib:datetime_string_to_timestamp(Val); val_xfield(include_body, [Val]) -> xopt_to_bool(Val); val_xfield(show_values, Vals) -> Vals; val_xfield(subscription_type, ["items"]) -> items; val_xfield(subscription_type, ["nodes"]) -> nodes; val_xfield(subscription_depth, ["all"]) -> all; val_xfield(subscription_depth, [Depth]) -> case catch list_to_integer(Depth) of N when is_integer(N) -> N; _ -> {error, ?ERR_NOT_ACCEPTABLE} end. Convert XForm booleans to Erlang booleans . xopt_to_bool("0") -> false; xopt_to_bool("1") -> true; xopt_to_bool("false") -> false; xopt_to_bool("true") -> true; xopt_to_bool(_) -> {error, ?ERR_NOT_ACCEPTABLE}. %% Return a field for an XForm for Key, with data filled in, if %% applicable, from Options. get_option_xfield(Lang, Key, Options) -> Var = xfield_var(Key), Label = xfield_label(Key), {Type, OptEls} = type_and_options(xfield_type(Key), Lang), Vals = case lists:keysearch(Key, 1, Options) of {value, {_, Val}} -> [tr_xfield_values(Vals) || Vals <- xfield_val(Key, Val)]; false -> [] end, {xmlelement, "field", [{"var", Var}, {"type", Type}, {"label", translate:translate(Lang, Label)}], OptEls ++ Vals}. type_and_options({Type, Options}, Lang) -> {Type, [tr_xfield_options(O, Lang) || O <- Options]}; type_and_options(Type, _Lang) -> {Type, []}. tr_xfield_options({Value, Label}, Lang) -> {xmlelement, "option", [{"label", translate:translate(Lang, Label)}], [{xmlelement, "value", [], [{xmlcdata, Value}]}]}. tr_xfield_values(Value) -> {xmlelement, "value", [], [{xmlcdata, Value}]}. %% Return the XForm variable name for a subscription option key. xfield_var(deliver) -> ?PUBSUB_DELIVER; xfield_var(digest) -> ?PUBSUB_DIGEST; xfield_var(digest_frequency) -> ?PUBSUB_DIGEST_FREQUENCY; xfield_var(expire) -> ?PUBSUB_EXPIRE; xfield_var(include_body) -> ?PUBSUB_INCLUDE_BODY; xfield_var(show_values) -> ?PUBSUB_SHOW_VALUES; xfield_var(subscription_type) -> ?PUBSUB_SUBSCRIPTION_TYPE; xfield_var(subscription_depth) -> ?PUBSUB_SUBSCRIPTION_DEPTH. %% Return the XForm variable type for a subscription option key. xfield_type(deliver) -> "boolean"; xfield_type(digest) -> "boolean"; xfield_type(digest_frequency) -> "text-single"; xfield_type(expire) -> "text-single"; xfield_type(include_body) -> "boolean"; xfield_type(show_values) -> {"list-multi", [{"away", ?SHOW_VALUE_AWAY_LABEL}, {"chat", ?SHOW_VALUE_CHAT_LABEL}, {"dnd", ?SHOW_VALUE_DND_LABEL}, {"online", ?SHOW_VALUE_ONLINE_LABEL}, {"xa", ?SHOW_VALUE_XA_LABEL}]}; xfield_type(subscription_type) -> {"list-single", [{"items", ?SUBSCRIPTION_TYPE_VALUE_ITEMS_LABEL}, {"nodes", ?SUBSCRIPTION_TYPE_VALUE_NODES_LABEL}]}; xfield_type(subscription_depth) -> {"list-single", [{"1", ?SUBSCRIPTION_DEPTH_VALUE_ONE_LABEL}, {"all", ?SUBSCRIPTION_DEPTH_VALUE_ALL_LABEL}]}. %% Return the XForm variable label for a subscription option key. xfield_label(deliver) -> ?DELIVER_LABEL; xfield_label(digest) -> ?DIGEST_LABEL; xfield_label(digest_frequency) -> ?DIGEST_FREQUENCY_LABEL; xfield_label(expire) -> ?EXPIRE_LABEL; xfield_label(include_body) -> ?INCLUDE_BODY_LABEL; xfield_label(show_values) -> ?SHOW_VALUES_LABEL; xfield_label(subscription_type) -> ?SUBSCRIPTION_TYPE_LABEL; xfield_label(subscription_depth) -> ?SUBSCRIPTION_DEPTH_LABEL. %% Return the XForm value for a subscription option key. xfield_val(deliver, Val) -> [bool_to_xopt(Val)]; xfield_val(digest, Val) -> [bool_to_xopt(Val)]; xfield_val(digest_frequency, Val) -> [integer_to_list(Val)]; xfield_val(expire, Val) -> [jlib:now_to_utc_string(Val)]; xfield_val(include_body, Val) -> [bool_to_xopt(Val)]; xfield_val(show_values, Val) -> Val; xfield_val(subscription_type, items) -> ["items"]; xfield_val(subscription_type, nodes) -> ["nodes"]; xfield_val(subscription_depth, all) -> ["all"]; xfield_val(subscription_depth, N) -> [integer_to_list(N)]. Convert erlang booleans to XForms . bool_to_xopt(false) -> "false"; bool_to_xopt(true) -> "true".
null
https://raw.githubusercontent.com/cstar/ejabberd-old/559f8b6b0a935710fe93e9afacb4270d6d6ea00f/src/mod_pubsub/pubsub_subscription_odbc.erl
erlang
==================================================================== compliance with the License. You should have received a copy of the Erlang Public License along with this software. If not, it can be retrieved via the world wide web at /. basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the specific language governing rights and limitations under the License. @version {@vsn}, {@date} {@time} @end ==================================================================== API ==================================================================== API ==================================================================== ==================================================================== ==================================================================== Subscription XForm processing. Return processed options, with types converted and so forth, using Return the options list's key for an XForm var. Convert Values for option list's Key. Return a field for an XForm for Key, with data filled in, if applicable, from Options. Return the XForm variable name for a subscription option key. Return the XForm variable type for a subscription option key. Return the XForm variable label for a subscription option key. Return the XForm value for a subscription option key.
` ` The contents of this file are subject to the Erlang Public License , Version 1.1 , ( the " License " ) ; you may not use this file except in Software distributed under the License is distributed on an " AS IS " The Initial Developer of the Original Code is ProcessOne . Portions created by ProcessOne are Copyright 2006 - 2009 , ProcessOne All Rights Reserved . '' This software is copyright 2006 - 2009 , ProcessOne . @author > @author based on pubsub_subscription.erl by < > -module(pubsub_subscription_odbc). -author(""). -export([init/0, subscribe_node/3, unsubscribe_node/3, get_subscription/3, set_subscription/4, get_options_xform/2, parse_options_xform/1]). -include("pubsub.hrl"). -include("jlib.hrl"). -define(PUBSUB_DELIVER, "pubsub#deliver"). -define(PUBSUB_DIGEST, "pubsub#digest"). -define(PUBSUB_DIGEST_FREQUENCY, "pubsub#digest_frequency"). -define(PUBSUB_EXPIRE, "pubsub#expire"). -define(PUBSUB_INCLUDE_BODY, "pubsub#include_body"). -define(PUBSUB_SHOW_VALUES, "pubsub#show-values"). -define(PUBSUB_SUBSCRIPTION_TYPE, "pubsub#subscription_type"). -define(PUBSUB_SUBSCRIPTION_DEPTH, "pubsub#subscription_depth"). -define(DELIVER_LABEL, "Whether an entity wants to receive or disable notifications"). -define(DIGEST_LABEL, "Whether an entity wants to receive digests (aggregations) of notifications or all notifications individually"). -define(DIGEST_FREQUENCY_LABEL, "The minimum number of milliseconds between sending any two notification digests"). -define(EXPIRE_LABEL, "The DateTime at which a leased subscription will end or has ended"). -define(INCLUDE_BODY_LABEL, "Whether an entity wants to receive an XMPP message body in addition to the payload format"). -define(SHOW_VALUES_LABEL, "The presence states for which an entity wants to receive notifications"). -define(SUBSCRIPTION_TYPE_LABEL, "Type of notification to receive"). -define(SUBSCRIPTION_DEPTH_LABEL, "Depth from subscription for which to receive notifications"). -define(SHOW_VALUE_AWAY_LABEL, "XMPP Show Value of Away"). -define(SHOW_VALUE_CHAT_LABEL, "XMPP Show Value of Chat"). -define(SHOW_VALUE_DND_LABEL, "XMPP Show Value of DND (Do Not Disturb)"). -define(SHOW_VALUE_ONLINE_LABEL, "Mere Availability in XMPP (No Show Value)"). -define(SHOW_VALUE_XA_LABEL, "XMPP Show Value of XA (Extended Away)"). -define(SUBSCRIPTION_TYPE_VALUE_ITEMS_LABEL, "Receive notification of new items only"). -define(SUBSCRIPTION_TYPE_VALUE_NODES_LABEL, "Receive notification of new nodes only"). -define(SUBSCRIPTION_DEPTH_VALUE_ONE_LABEL, "Receive notification from direct child nodes only"). -define(SUBSCRIPTION_DEPTH_VALUE_ALL_LABEL, "Receive notification from all descendent nodes"). -define(DB_MOD, pubsub_db_odbc). init() -> ok = create_table(). subscribe_node(_JID, _NodeID, Options) -> SubID = make_subid(), ?DB_MOD:add_subscription(#pubsub_subscription{subid = SubID, options = Options}), {result, SubID}. unsubscribe_node(_JID, _NodeID, SubID) -> case ?DB_MOD:read_subscription(SubID) of {ok, Sub} -> ?DB_MOD:delete_subscription(SubID), {result, Sub}; notfound -> {error, notfound} end. get_subscription(_JID, _NodeID, SubID) -> case ?DB_MOD:read_subscription(SubID) of {ok, Sub} -> {result, Sub}; notfound -> {error, notfound} end. set_subscription(_JID, _NodeID, SubID, Options) -> case ?DB_MOD:read_subscription(SubID) of {ok, _} -> ?DB_MOD:update_subscription(#pubsub_subscription{subid = SubID, options = Options}), {result, ok}; notfound -> ?DB_MOD:add_subscription(#pubsub_subscription{subid = SubID, options = Options}), {result, ok} end. get_options_xform(Lang, Options) -> Keys = [deliver, show_values, subscription_type, subscription_depth], XFields = [get_option_xfield(Lang, Key, Options) || Key <- Keys], {result, {xmlelement, "x", [{"xmlns", ?NS_XDATA}], [{xmlelement, "field", [{"var", "FORM_TYPE"}, {"type", "hidden"}], [{xmlelement, "value", [], [{xmlcdata, ?NS_PUBSUB_SUB_OPTIONS}]}]}] ++ XFields}}. parse_options_xform(XFields) -> case xml:remove_cdata(XFields) of [] -> {result, []}; [{xmlelement, "x", _Attrs, _Els} = XEl] -> case jlib:parse_xdata_submit(XEl) of XData when is_list(XData) -> case set_xoption(XData, []) of Opts when is_list(Opts) -> {result, Opts}; Other -> Other end; Other -> Other end; Other -> Other end. Internal functions create_table() -> ok. make_subid() -> {T1, T2, T3} = now(), lists:flatten(io_lib:fwrite("~.16B~.16B~.16B", [T1, T2, T3])). Opts as defaults . set_xoption([], Opts) -> Opts; set_xoption([{Var, Value} | T], Opts) -> NewOpts = case var_xfield(Var) of {error, _} -> Opts; Key -> Val = val_xfield(Key, Value), lists:keystore(Key, 1, Opts, {Key, Val}) end, set_xoption(T, NewOpts). var_xfield(?PUBSUB_DELIVER) -> deliver; var_xfield(?PUBSUB_DIGEST) -> digest; var_xfield(?PUBSUB_DIGEST_FREQUENCY) -> digest_frequency; var_xfield(?PUBSUB_EXPIRE) -> expire; var_xfield(?PUBSUB_INCLUDE_BODY) -> include_body; var_xfield(?PUBSUB_SHOW_VALUES) -> show_values; var_xfield(?PUBSUB_SUBSCRIPTION_TYPE) -> subscription_type; var_xfield(?PUBSUB_SUBSCRIPTION_DEPTH) -> subscription_depth; var_xfield(_) -> {error, badarg}. val_xfield(deliver, [Val]) -> xopt_to_bool(Val); val_xfield(digest, [Val]) -> xopt_to_bool(Val); val_xfield(digest_frequency, [Val]) -> list_to_integer(Val); val_xfield(expire, [Val]) -> jlib:datetime_string_to_timestamp(Val); val_xfield(include_body, [Val]) -> xopt_to_bool(Val); val_xfield(show_values, Vals) -> Vals; val_xfield(subscription_type, ["items"]) -> items; val_xfield(subscription_type, ["nodes"]) -> nodes; val_xfield(subscription_depth, ["all"]) -> all; val_xfield(subscription_depth, [Depth]) -> case catch list_to_integer(Depth) of N when is_integer(N) -> N; _ -> {error, ?ERR_NOT_ACCEPTABLE} end. Convert XForm booleans to Erlang booleans . xopt_to_bool("0") -> false; xopt_to_bool("1") -> true; xopt_to_bool("false") -> false; xopt_to_bool("true") -> true; xopt_to_bool(_) -> {error, ?ERR_NOT_ACCEPTABLE}. get_option_xfield(Lang, Key, Options) -> Var = xfield_var(Key), Label = xfield_label(Key), {Type, OptEls} = type_and_options(xfield_type(Key), Lang), Vals = case lists:keysearch(Key, 1, Options) of {value, {_, Val}} -> [tr_xfield_values(Vals) || Vals <- xfield_val(Key, Val)]; false -> [] end, {xmlelement, "field", [{"var", Var}, {"type", Type}, {"label", translate:translate(Lang, Label)}], OptEls ++ Vals}. type_and_options({Type, Options}, Lang) -> {Type, [tr_xfield_options(O, Lang) || O <- Options]}; type_and_options(Type, _Lang) -> {Type, []}. tr_xfield_options({Value, Label}, Lang) -> {xmlelement, "option", [{"label", translate:translate(Lang, Label)}], [{xmlelement, "value", [], [{xmlcdata, Value}]}]}. tr_xfield_values(Value) -> {xmlelement, "value", [], [{xmlcdata, Value}]}. xfield_var(deliver) -> ?PUBSUB_DELIVER; xfield_var(digest) -> ?PUBSUB_DIGEST; xfield_var(digest_frequency) -> ?PUBSUB_DIGEST_FREQUENCY; xfield_var(expire) -> ?PUBSUB_EXPIRE; xfield_var(include_body) -> ?PUBSUB_INCLUDE_BODY; xfield_var(show_values) -> ?PUBSUB_SHOW_VALUES; xfield_var(subscription_type) -> ?PUBSUB_SUBSCRIPTION_TYPE; xfield_var(subscription_depth) -> ?PUBSUB_SUBSCRIPTION_DEPTH. xfield_type(deliver) -> "boolean"; xfield_type(digest) -> "boolean"; xfield_type(digest_frequency) -> "text-single"; xfield_type(expire) -> "text-single"; xfield_type(include_body) -> "boolean"; xfield_type(show_values) -> {"list-multi", [{"away", ?SHOW_VALUE_AWAY_LABEL}, {"chat", ?SHOW_VALUE_CHAT_LABEL}, {"dnd", ?SHOW_VALUE_DND_LABEL}, {"online", ?SHOW_VALUE_ONLINE_LABEL}, {"xa", ?SHOW_VALUE_XA_LABEL}]}; xfield_type(subscription_type) -> {"list-single", [{"items", ?SUBSCRIPTION_TYPE_VALUE_ITEMS_LABEL}, {"nodes", ?SUBSCRIPTION_TYPE_VALUE_NODES_LABEL}]}; xfield_type(subscription_depth) -> {"list-single", [{"1", ?SUBSCRIPTION_DEPTH_VALUE_ONE_LABEL}, {"all", ?SUBSCRIPTION_DEPTH_VALUE_ALL_LABEL}]}. xfield_label(deliver) -> ?DELIVER_LABEL; xfield_label(digest) -> ?DIGEST_LABEL; xfield_label(digest_frequency) -> ?DIGEST_FREQUENCY_LABEL; xfield_label(expire) -> ?EXPIRE_LABEL; xfield_label(include_body) -> ?INCLUDE_BODY_LABEL; xfield_label(show_values) -> ?SHOW_VALUES_LABEL; xfield_label(subscription_type) -> ?SUBSCRIPTION_TYPE_LABEL; xfield_label(subscription_depth) -> ?SUBSCRIPTION_DEPTH_LABEL. xfield_val(deliver, Val) -> [bool_to_xopt(Val)]; xfield_val(digest, Val) -> [bool_to_xopt(Val)]; xfield_val(digest_frequency, Val) -> [integer_to_list(Val)]; xfield_val(expire, Val) -> [jlib:now_to_utc_string(Val)]; xfield_val(include_body, Val) -> [bool_to_xopt(Val)]; xfield_val(show_values, Val) -> Val; xfield_val(subscription_type, items) -> ["items"]; xfield_val(subscription_type, nodes) -> ["nodes"]; xfield_val(subscription_depth, all) -> ["all"]; xfield_val(subscription_depth, N) -> [integer_to_list(N)]. Convert erlang booleans to XForms . bool_to_xopt(false) -> "false"; bool_to_xopt(true) -> "true".
c6dc30824bdce17f2e39dd3cbd73b9f8a8fc56f872067fb95638a9e8a8cfd311
pedestal/pedestal-app
cljs_formatter.cljs
Copyright 2013 Relevance , Inc. ; The use and distribution terms for this software are covered by the Eclipse Public License 1.0 ( ) ; which can be found in the file epl-v10.html at the root of this distribution. ; ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. ; ; You must not remove this notice, or any other, from this software. ;; This namespace was copied from ;; -formatter. Once that library ;; has a release, we can add a dependency. (ns io.pedestal.app.render.push.cljs-formatter (:require [domina :as d] [domina.xpath :as dx] [clojure.string :as string] [goog.dom :as gdom] [goog.style :as style] [goog.color :as color] [goog.dom.classes :as classes] [goog.events :as events])) ;;; Data to HTML strings (defn span [class body] (str "<span class='" class "'>" body "</span>")) (defn literal [class x] (span class (pr-str x))) (declare html) (defn join [separator coll] (string/join (span "separator" separator) (map html coll))) (defn html-collection [class opener closer coll] (span (str "collection " class) (str (span "opener" opener) (span "contents" (join " " coll)) (span "closer" closer)))) (defn html-keyval [[k v]] (span "keyval" (str (html k) (span "separator" " ") (html v)))) (defn html-keyvals [coll] (string/join (span "separator" ", ") (map html-keyval coll))) (defn html-map [coll] (span "collection map" (str (span "opener" "{") (span "contents" (html-keyvals coll)) (span "closer" "}")))) (defn html-string [s] (span "string" (str (span "opener" "\"") (span "contents" s) (span "closer" "\"")))) (defn html [x] (cond (number? x) (literal "number" x) (keyword? x) (literal "keyword" x) (symbol? x) (literal "symbol" x) (string? x) (html-string x) (map? x) (html-map x) (set? x) (html-collection "set" "#{" "}" x) (vector? x) (html-collection "vector" "[" "]" x) (seq? x) (html-collection "seq" "(" ")" x) :else (literal "literal" x))) DOM layout (defn overflow? [child parent] (let [parent-box (.toBox (style/getBounds parent)) child-box (.toBox (style/getBounds child))] (< (.-right parent-box) (.-right child-box)))) (defn max-inline-width [elem container] (let [child (d/single-node elem) parent (.-parentNode (d/single-node elem)) container-node (d/single-node container) left-bound (.-left (.toBox (style/getBounds child))) parent-right-bound (.-right (.toBox (style/getBounds parent))) container-right-bound (.-right (.toBox (style/getBounds container-node)))] (- (min parent-right-bound container-right-bound) left-bound))) (defn width [elem] (.-width (style/getBounds (d/single-node elem)))) (declare arrange-element!) Colors chosen with the help of Adobe Kuler ;; / (def initial-arrange-state (cycle ["#e6f3f7" "#f2ffff" "#e5f2ff" "#ebf7f4" "#e5fff1"])) (def color first) (def next-state rest) (defn arrange-keyval! [state elem container] (let [[key separator val] (d/children elem)] (arrange-element! state key container) (arrange-element! state val container) (when (overflow? elem container) (d/set-styles! separator {:display "none"}) (d/set-styles! key {:display "block"}) (d/set-styles! val {:display "block" :margin-left "1em"})))) (def collection-styles {:color "black" :display "inline-block" :padding-top "1px" :padding-right "2px" :padding-bottom "2px" :padding-left "2px" :margin-bottom "1ex" :border-top-left-radius "5px" :border-top-right-radius "10px" :border-bottom-right-radius "5px" :border-bottom-left-radius "10px"}) (defn arrange-collection! [state elem container] (d/add-class! elem "arranged") (d/set-styles! elem (merge collection-styles {:background-color (color state)})) (let [[opener contents closer] (d/children elem)] (d/set-styles! opener {:display "inline" :vertical-align "top"}) (d/set-styles! closer {:display "inline" :vertical-align "bottom"}) (d/set-styles! contents {:display "inline-block" :vertical-align "top"}) (doseq [child (d/children contents)] (if (d/has-class? child "separator") (d/set-styles! child {:display "none"}) (do (arrange-element! (next-state state) child container) (d/set-styles! child {:display "block"})))) ;; Make containing box no wider than it needs to be (d/set-styles! elem {:width (str (+ (width contents) (width opener) (width closer)) "px")}))) (defn remove-all-styles! [elem] ;; remove-attr! doesn't always work (d/set-attr! elem :style "") (d/remove-class! elem "arranged") (doseq [child (d/children elem)] (remove-all-styles! child))) (defn condense-collection! [elem container] (let [[opener contents closer] (d/children elem) w (- (max-inline-width elem container) (* 2 (+ (width opener) (width closer))))] (d/set-styles! opener {:font-weight "bold"}) (d/set-styles! closer {:font-weight "bold"}) (d/set-styles! contents {:color "gray" :display "inline-block" :max-width (str w "px") :overflow "hidden" :text-overflow "ellipsis"}))) (defn arrange-element! [state elem container] (remove-all-styles! elem) (d/set-styles! elem {:white-space "pre"}) (when (overflow? elem container) (cond (d/has-class? elem "collection") (if (d/has-class? elem "condensed") (condense-collection! elem container) (arrange-collection! state elem container)) (d/has-class? elem "keyval") (arrange-keyval! state elem container)))) (defn arrange! [elem container] (arrange-element! initial-arrange-state elem container)) (defn find-arranged-parent [elem container] (cond (= container elem) elem (and (gdom/isElement elem) (d/has-class? elem "collection") (d/has-class? elem "arranged")) elem :else (recur (.-parentNode elem) container))) (defn toggle! [target-elem arranged-elem container] (if (d/has-class? target-elem "condensed") (d/remove-class! target-elem "condensed") (d/add-class! target-elem "condensed")) (arrange! arranged-elem container)) (defn set-toggle-on-click! [elem container] (events/listen (d/single-node elem) "click" (fn [event] (loop [t (.-target event)] (when t (if (and (gdom/isElement t) (d/has-class? t "collection") (or (d/has-class? t "condensed") (d/has-class? t "arranged"))) (do (.stopPropagation event) (.preventDefault event) (toggle! t elem container)) (recur (.-parentNode t))))))))
null
https://raw.githubusercontent.com/pedestal/pedestal-app/509ab766a54921c0fbb2dd7c6a3cb20223b8e1a1/app/src/io/pedestal/app/render/push/cljs_formatter.cljs
clojure
The use and distribution terms for this software are covered by the which can be found in the file epl-v10.html at the root of this distribution. By using this software in any fashion, you are agreeing to be bound by the terms of this license. You must not remove this notice, or any other, from this software. This namespace was copied from -formatter. Once that library has a release, we can add a dependency. Data to HTML strings / Make containing box no wider than it needs to be remove-attr! doesn't always work
Copyright 2013 Relevance , Inc. Eclipse Public License 1.0 ( ) (ns io.pedestal.app.render.push.cljs-formatter (:require [domina :as d] [domina.xpath :as dx] [clojure.string :as string] [goog.dom :as gdom] [goog.style :as style] [goog.color :as color] [goog.dom.classes :as classes] [goog.events :as events])) (defn span [class body] (str "<span class='" class "'>" body "</span>")) (defn literal [class x] (span class (pr-str x))) (declare html) (defn join [separator coll] (string/join (span "separator" separator) (map html coll))) (defn html-collection [class opener closer coll] (span (str "collection " class) (str (span "opener" opener) (span "contents" (join " " coll)) (span "closer" closer)))) (defn html-keyval [[k v]] (span "keyval" (str (html k) (span "separator" " ") (html v)))) (defn html-keyvals [coll] (string/join (span "separator" ", ") (map html-keyval coll))) (defn html-map [coll] (span "collection map" (str (span "opener" "{") (span "contents" (html-keyvals coll)) (span "closer" "}")))) (defn html-string [s] (span "string" (str (span "opener" "\"") (span "contents" s) (span "closer" "\"")))) (defn html [x] (cond (number? x) (literal "number" x) (keyword? x) (literal "keyword" x) (symbol? x) (literal "symbol" x) (string? x) (html-string x) (map? x) (html-map x) (set? x) (html-collection "set" "#{" "}" x) (vector? x) (html-collection "vector" "[" "]" x) (seq? x) (html-collection "seq" "(" ")" x) :else (literal "literal" x))) DOM layout (defn overflow? [child parent] (let [parent-box (.toBox (style/getBounds parent)) child-box (.toBox (style/getBounds child))] (< (.-right parent-box) (.-right child-box)))) (defn max-inline-width [elem container] (let [child (d/single-node elem) parent (.-parentNode (d/single-node elem)) container-node (d/single-node container) left-bound (.-left (.toBox (style/getBounds child))) parent-right-bound (.-right (.toBox (style/getBounds parent))) container-right-bound (.-right (.toBox (style/getBounds container-node)))] (- (min parent-right-bound container-right-bound) left-bound))) (defn width [elem] (.-width (style/getBounds (d/single-node elem)))) (declare arrange-element!) Colors chosen with the help of Adobe Kuler (def initial-arrange-state (cycle ["#e6f3f7" "#f2ffff" "#e5f2ff" "#ebf7f4" "#e5fff1"])) (def color first) (def next-state rest) (defn arrange-keyval! [state elem container] (let [[key separator val] (d/children elem)] (arrange-element! state key container) (arrange-element! state val container) (when (overflow? elem container) (d/set-styles! separator {:display "none"}) (d/set-styles! key {:display "block"}) (d/set-styles! val {:display "block" :margin-left "1em"})))) (def collection-styles {:color "black" :display "inline-block" :padding-top "1px" :padding-right "2px" :padding-bottom "2px" :padding-left "2px" :margin-bottom "1ex" :border-top-left-radius "5px" :border-top-right-radius "10px" :border-bottom-right-radius "5px" :border-bottom-left-radius "10px"}) (defn arrange-collection! [state elem container] (d/add-class! elem "arranged") (d/set-styles! elem (merge collection-styles {:background-color (color state)})) (let [[opener contents closer] (d/children elem)] (d/set-styles! opener {:display "inline" :vertical-align "top"}) (d/set-styles! closer {:display "inline" :vertical-align "bottom"}) (d/set-styles! contents {:display "inline-block" :vertical-align "top"}) (doseq [child (d/children contents)] (if (d/has-class? child "separator") (d/set-styles! child {:display "none"}) (do (arrange-element! (next-state state) child container) (d/set-styles! child {:display "block"})))) (d/set-styles! elem {:width (str (+ (width contents) (width opener) (width closer)) "px")}))) (defn remove-all-styles! [elem] (d/set-attr! elem :style "") (d/remove-class! elem "arranged") (doseq [child (d/children elem)] (remove-all-styles! child))) (defn condense-collection! [elem container] (let [[opener contents closer] (d/children elem) w (- (max-inline-width elem container) (* 2 (+ (width opener) (width closer))))] (d/set-styles! opener {:font-weight "bold"}) (d/set-styles! closer {:font-weight "bold"}) (d/set-styles! contents {:color "gray" :display "inline-block" :max-width (str w "px") :overflow "hidden" :text-overflow "ellipsis"}))) (defn arrange-element! [state elem container] (remove-all-styles! elem) (d/set-styles! elem {:white-space "pre"}) (when (overflow? elem container) (cond (d/has-class? elem "collection") (if (d/has-class? elem "condensed") (condense-collection! elem container) (arrange-collection! state elem container)) (d/has-class? elem "keyval") (arrange-keyval! state elem container)))) (defn arrange! [elem container] (arrange-element! initial-arrange-state elem container)) (defn find-arranged-parent [elem container] (cond (= container elem) elem (and (gdom/isElement elem) (d/has-class? elem "collection") (d/has-class? elem "arranged")) elem :else (recur (.-parentNode elem) container))) (defn toggle! [target-elem arranged-elem container] (if (d/has-class? target-elem "condensed") (d/remove-class! target-elem "condensed") (d/add-class! target-elem "condensed")) (arrange! arranged-elem container)) (defn set-toggle-on-click! [elem container] (events/listen (d/single-node elem) "click" (fn [event] (loop [t (.-target event)] (when t (if (and (gdom/isElement t) (d/has-class? t "collection") (or (d/has-class? t "condensed") (d/has-class? t "arranged"))) (do (.stopPropagation event) (.preventDefault event) (toggle! t elem container)) (recur (.-parentNode t))))))))
aec285238d498c132ff2f2d8c455459e45476e1eca727f27f621c1ab2d38365c
inria-parkas/sundialsml
ark_heat1D.ml
--------------------------------------------------------------- * Programmer(s ): @ SMU * --------------------------------------------------------------- * OCaml port : , , Jan 2016 . * --------------------------------------------------------------- * Copyright ( c ) 2015 , Southern Methodist University and * National Security * * This work was performed under the auspices of the U.S. Department * of Energy by Southern Methodist University and * National Laboratory under Contract DE - AC52 - 07NA27344 . * Produced at Southern Methodist University and Livermore National Laboratory . * * All rights reserved . * For details , see the LICENSE file . * --------------------------------------------------------------- * Example problem : * * The following test simulates a simple 1D heat equation , * u_t = k*u_xx + f * for t in [ 0 , 10 ] , x in [ 0 , 1 ] , with initial conditions * u(0,x ) = 0 * Dirichlet boundary conditions , i.e. * u_t(t,0 ) = u_t(t,1 ) = 0 , * and a point - source heating term , * f = 1 for x=0.5 . * * The spatial derivatives are computed using second - order * centered differences , with the data distributed over N points * on a uniform spatial grid . * * This program solves the problem with either an ERK or DIRK * method . For the DIRK method , we use a Newton iteration with * the PCG linear solver , and a user - supplied Jacobian - vector * product routine . * * 100 outputs are printed at equal intervals , and run statistics * are printed at the end . * --------------------------------------------------------------- * Programmer(s): Daniel R. Reynolds @ SMU *--------------------------------------------------------------- * OCaml port: Timothy Bourke, Inria, Jan 2016. *--------------------------------------------------------------- * Copyright (c) 2015, Southern Methodist University and * Lawrence Livermore National Security * * This work was performed under the auspices of the U.S. Department * of Energy by Southern Methodist University and Lawrence Livermore * National Laboratory under Contract DE-AC52-07NA27344. * Produced at Southern Methodist University and the Lawrence * Livermore National Laboratory. * * All rights reserved. * For details, see the LICENSE file. *--------------------------------------------------------------- * Example problem: * * The following test simulates a simple 1D heat equation, * u_t = k*u_xx + f * for t in [0, 10], x in [0, 1], with initial conditions * u(0,x) = 0 * Dirichlet boundary conditions, i.e. * u_t(t,0) = u_t(t,1) = 0, * and a point-source heating term, * f = 1 for x=0.5. * * The spatial derivatives are computed using second-order * centered differences, with the data distributed over N points * on a uniform spatial grid. * * This program solves the problem with either an ERK or DIRK * method. For the DIRK method, we use a Newton iteration with * the PCG linear solver, and a user-supplied Jacobian-vector * product routine. * * 100 outputs are printed at equal intervals, and run statistics * are printed at the end. *---------------------------------------------------------------*) open Sundials module ARKStep = Arkode.ARKStep let printf = Printf.printf let fprintf = Printf.fprintf let dotprod = Nvector_serial.Ops.dotprod let sundials_270_or_later = match Config.sundials_version with | 2,5,_ | 2,6,_ -> false | _ -> true (* user data structure *) type user_data = { n : int; (* number of intervals *) dx : float; (* mesh spacing *) k : float; (* diffusion coefficient *) } (* Functions called by the solver *) (* f routine to compute the ODE RHS function f(t,y). *) let f { n; dx; k } _ (y : RealArray.t) (ydot : RealArray.t) = Initialize ydot to zero (* iterate over domain, computing all equations *) let c1 = k/.dx/.dx in let c2 = -2.0*.k/.dx/.dx in let isource = n/2 in ydot.{0} <- 0.0; (* left boundary condition *) for i=1 to n-1-1 do ydot.{i} <- c1*.y.{i-1} +. c2*.y.{i} +. c1*.y.{i+1} done; ydot.{n-1} <- 0.0; (* right boundary condition *) ydot.{isource} <- ydot.{isource} +. 0.01/.dx (* source term *) Jacobian routine to compute J(t , y ) = df / dy . let jac { n; dx; k } _ (v : RealArray.t) (jv : RealArray.t) = iterate over domain , computing all Jacobian - vector products let c1 = k/.dx/.dx in let c2 = -2.0*.k/.dx/.dx in jv.{0} <- 0.0; for i=1 to n-1-1 do jv.{i} <- c1*.v.{i-1} +. c2*.v.{i} +. c1*.v.{i+1} done; jv.{n-1} <- 0.0 Main Program let main () = (* general problem parameters *) let t0 = 0.0 in (* initial time *) let tf = 1.0 in (* final time *) let nt = 10 in (* total number of output times *) let rtol = 1.e-6 in (* relative tolerance *) let atol = 1.e-10 in (* absolute tolerance *) let mesh_n = 201 in (* spatial mesh size *) let heat_k = 0.5 in (* heat conductivity *) (* general problem variables *) (* allocate and fill udata structure *) let udata = { n = mesh_n; dx = 1.0/.(1.0*.float(mesh_n)-.1.0); (* mesh spacing *) k = heat_k; } in (* Initial problem output *) printf "\n1D Heat PDE test problem:\n"; printf " N = %d\n" udata.n; printf " diffusion coefficient: k = %g\n" udata.k; Initialize data structures let data = RealArray.make mesh_n 0.0 in(* Set initial conditions *) let y = Nvector_serial.wrap data in (* Create serial vector for solution *) (* Call ARKodeInit to initialize the integrator memory and specify the hand-side side function in y'=f(t,y), the inital time t0, and the initial dependent variable vector y. Note: since this problem is fully implicit, we set f_E to NULL and f_I to f. *) let arkode_mem = ARKStep.( init (implicit ~lsolver:Spils.(solver (pcg ~maxl:mesh_n y) ~jac_times_vec:(None, jac udata) prec_none) ~linearity:(Linear true) (f udata)) (SStolerances (rtol, atol)) t0 y ) in (* Set routines *) ARKStep.set_max_num_steps arkode_mem 10000; (* Increase max num steps *) if sundials_270_or_later then ARKStep.(set_predictor_method arkode_mem MaximumOrderPredictor); (* output mesh to disk *) let fid = open_out "heat_mesh.txt" in for i=0 to mesh_n-1 do fprintf fid " %.16e\n" (udata.dx*.float i) done; close_out fid; (* Open output stream for results, access data array *) let ufid = open_out "heat1D.txt" in (* output initial condition to disk *) for i=0 to mesh_n-1 do fprintf ufid " %.16e" data.{i} done; fprintf ufid "\n"; (* Main time-stepping loop: calls ARKode to perform the integration, then prints results. Stops when the final time has been reached *) let dTout = (tf-.t0)/.float nt in let tout = ref (t0+.dTout) in printf " t ||u||_rms\n"; printf " -------------------------\n"; printf " %10.6f %10.6f\n" t0 (sqrt((dotprod y y)/.float mesh_n)); (try for _ = 0 to nt-1 do (* call integrator *) let t, _ = ARKStep.evolve_normal arkode_mem !tout y in (* print solution stats *) printf " %10.6f %10.6f\n" t (sqrt((dotprod y y)/.float mesh_n)); (* successful solve: update output time *) tout := !tout +. dTout; if !tout > tf then tout := tf; (* output results to disk *) for i=0 to mesh_n-1 do fprintf ufid " %.16e" data.{i} done; fprintf ufid "\n" done with _ -> (* unsuccessful solve: break *) fprintf stderr "Solver failure, stopping integration\n"); printf " -------------------------\n"; close_out ufid; (* Print some final statistics *) let open ARKStep in let nst = get_num_steps arkode_mem in let nst_a = get_num_step_attempts arkode_mem in let nfe, nfi = get_num_rhs_evals arkode_mem in let nsetups = get_num_lin_solv_setups arkode_mem in let netf = get_num_err_test_fails arkode_mem in let nni = get_num_nonlin_solv_iters arkode_mem in let ncfn = get_num_nonlin_solv_conv_fails arkode_mem in let nli = Spils.get_num_lin_iters arkode_mem in let nJv = Spils.get_num_jtimes_evals arkode_mem in let nlcf = Spils.get_num_lin_conv_fails arkode_mem in printf "\nFinal Solver Statistics:\n"; printf " Internal solver steps = %d (attempted = %d)\n" nst nst_a; printf " Total RHS evals: Fe = %d, Fi = %d\n" nfe nfi; printf " Total linear solver setups = %d\n" nsetups; printf " Total linear iterations = %d\n" nli; printf " Total number of Jacobian-vector products = %d\n" nJv; printf " Total number of linear solver convergence failures = %d\n" nlcf; printf " Total number of Newton iterations = %d\n" nni; printf " Total number of nonlinear solver convergence failures = %d\n" ncfn; printf " Total number of error test failures = %d\n" netf (* Check environment variables for extra arguments. *) let reps = try int_of_string (Unix.getenv "NUM_REPS") with Not_found | Failure _ -> 1 let gc_at_end = try int_of_string (Unix.getenv "GC_AT_END") <> 0 with Not_found | Failure _ -> false let gc_each_rep = try int_of_string (Unix.getenv "GC_EACH_REP") <> 0 with Not_found | Failure _ -> false (* Entry point *) let _ = for _ = 1 to reps do main (); if gc_each_rep then Gc.compact () done; if gc_at_end then Gc.compact ()
null
https://raw.githubusercontent.com/inria-parkas/sundialsml/a1848318cac2e340c32ddfd42671bef07b1390db/examples/arkode/C_serial/ark_heat1D.ml
ocaml
user data structure number of intervals mesh spacing diffusion coefficient Functions called by the solver f routine to compute the ODE RHS function f(t,y). iterate over domain, computing all equations left boundary condition right boundary condition source term general problem parameters initial time final time total number of output times relative tolerance absolute tolerance spatial mesh size heat conductivity general problem variables allocate and fill udata structure mesh spacing Initial problem output Set initial conditions Create serial vector for solution Call ARKodeInit to initialize the integrator memory and specify the hand-side side function in y'=f(t,y), the inital time t0, and the initial dependent variable vector y. Note: since this problem is fully implicit, we set f_E to NULL and f_I to f. Set routines Increase max num steps output mesh to disk Open output stream for results, access data array output initial condition to disk Main time-stepping loop: calls ARKode to perform the integration, then prints results. Stops when the final time has been reached call integrator print solution stats successful solve: update output time output results to disk unsuccessful solve: break Print some final statistics Check environment variables for extra arguments. Entry point
--------------------------------------------------------------- * Programmer(s ): @ SMU * --------------------------------------------------------------- * OCaml port : , , Jan 2016 . * --------------------------------------------------------------- * Copyright ( c ) 2015 , Southern Methodist University and * National Security * * This work was performed under the auspices of the U.S. Department * of Energy by Southern Methodist University and * National Laboratory under Contract DE - AC52 - 07NA27344 . * Produced at Southern Methodist University and Livermore National Laboratory . * * All rights reserved . * For details , see the LICENSE file . * --------------------------------------------------------------- * Example problem : * * The following test simulates a simple 1D heat equation , * u_t = k*u_xx + f * for t in [ 0 , 10 ] , x in [ 0 , 1 ] , with initial conditions * u(0,x ) = 0 * Dirichlet boundary conditions , i.e. * u_t(t,0 ) = u_t(t,1 ) = 0 , * and a point - source heating term , * f = 1 for x=0.5 . * * The spatial derivatives are computed using second - order * centered differences , with the data distributed over N points * on a uniform spatial grid . * * This program solves the problem with either an ERK or DIRK * method . For the DIRK method , we use a Newton iteration with * the PCG linear solver , and a user - supplied Jacobian - vector * product routine . * * 100 outputs are printed at equal intervals , and run statistics * are printed at the end . * --------------------------------------------------------------- * Programmer(s): Daniel R. Reynolds @ SMU *--------------------------------------------------------------- * OCaml port: Timothy Bourke, Inria, Jan 2016. *--------------------------------------------------------------- * Copyright (c) 2015, Southern Methodist University and * Lawrence Livermore National Security * * This work was performed under the auspices of the U.S. Department * of Energy by Southern Methodist University and Lawrence Livermore * National Laboratory under Contract DE-AC52-07NA27344. * Produced at Southern Methodist University and the Lawrence * Livermore National Laboratory. * * All rights reserved. * For details, see the LICENSE file. *--------------------------------------------------------------- * Example problem: * * The following test simulates a simple 1D heat equation, * u_t = k*u_xx + f * for t in [0, 10], x in [0, 1], with initial conditions * u(0,x) = 0 * Dirichlet boundary conditions, i.e. * u_t(t,0) = u_t(t,1) = 0, * and a point-source heating term, * f = 1 for x=0.5. * * The spatial derivatives are computed using second-order * centered differences, with the data distributed over N points * on a uniform spatial grid. * * This program solves the problem with either an ERK or DIRK * method. For the DIRK method, we use a Newton iteration with * the PCG linear solver, and a user-supplied Jacobian-vector * product routine. * * 100 outputs are printed at equal intervals, and run statistics * are printed at the end. *---------------------------------------------------------------*) open Sundials module ARKStep = Arkode.ARKStep let printf = Printf.printf let fprintf = Printf.fprintf let dotprod = Nvector_serial.Ops.dotprod let sundials_270_or_later = match Config.sundials_version with | 2,5,_ | 2,6,_ -> false | _ -> true type user_data = { } let f { n; dx; k } _ (y : RealArray.t) (ydot : RealArray.t) = Initialize ydot to zero let c1 = k/.dx/.dx in let c2 = -2.0*.k/.dx/.dx in let isource = n/2 in for i=1 to n-1-1 do ydot.{i} <- c1*.y.{i-1} +. c2*.y.{i} +. c1*.y.{i+1} done; Jacobian routine to compute J(t , y ) = df / dy . let jac { n; dx; k } _ (v : RealArray.t) (jv : RealArray.t) = iterate over domain , computing all Jacobian - vector products let c1 = k/.dx/.dx in let c2 = -2.0*.k/.dx/.dx in jv.{0} <- 0.0; for i=1 to n-1-1 do jv.{i} <- c1*.v.{i-1} +. c2*.v.{i} +. c1*.v.{i+1} done; jv.{n-1} <- 0.0 Main Program let main () = let udata = { n = mesh_n; k = heat_k; } in printf "\n1D Heat PDE test problem:\n"; printf " N = %d\n" udata.n; printf " diffusion coefficient: k = %g\n" udata.k; Initialize data structures let arkode_mem = ARKStep.( init (implicit ~lsolver:Spils.(solver (pcg ~maxl:mesh_n y) ~jac_times_vec:(None, jac udata) prec_none) ~linearity:(Linear true) (f udata)) (SStolerances (rtol, atol)) t0 y ) in if sundials_270_or_later then ARKStep.(set_predictor_method arkode_mem MaximumOrderPredictor); let fid = open_out "heat_mesh.txt" in for i=0 to mesh_n-1 do fprintf fid " %.16e\n" (udata.dx*.float i) done; close_out fid; let ufid = open_out "heat1D.txt" in for i=0 to mesh_n-1 do fprintf ufid " %.16e" data.{i} done; fprintf ufid "\n"; let dTout = (tf-.t0)/.float nt in let tout = ref (t0+.dTout) in printf " t ||u||_rms\n"; printf " -------------------------\n"; printf " %10.6f %10.6f\n" t0 (sqrt((dotprod y y)/.float mesh_n)); (try for _ = 0 to nt-1 do let t, _ = ARKStep.evolve_normal arkode_mem !tout y in printf " %10.6f %10.6f\n" t (sqrt((dotprod y y)/.float mesh_n)); tout := !tout +. dTout; if !tout > tf then tout := tf; for i=0 to mesh_n-1 do fprintf ufid " %.16e" data.{i} done; fprintf ufid "\n" done with _ -> fprintf stderr "Solver failure, stopping integration\n"); printf " -------------------------\n"; close_out ufid; let open ARKStep in let nst = get_num_steps arkode_mem in let nst_a = get_num_step_attempts arkode_mem in let nfe, nfi = get_num_rhs_evals arkode_mem in let nsetups = get_num_lin_solv_setups arkode_mem in let netf = get_num_err_test_fails arkode_mem in let nni = get_num_nonlin_solv_iters arkode_mem in let ncfn = get_num_nonlin_solv_conv_fails arkode_mem in let nli = Spils.get_num_lin_iters arkode_mem in let nJv = Spils.get_num_jtimes_evals arkode_mem in let nlcf = Spils.get_num_lin_conv_fails arkode_mem in printf "\nFinal Solver Statistics:\n"; printf " Internal solver steps = %d (attempted = %d)\n" nst nst_a; printf " Total RHS evals: Fe = %d, Fi = %d\n" nfe nfi; printf " Total linear solver setups = %d\n" nsetups; printf " Total linear iterations = %d\n" nli; printf " Total number of Jacobian-vector products = %d\n" nJv; printf " Total number of linear solver convergence failures = %d\n" nlcf; printf " Total number of Newton iterations = %d\n" nni; printf " Total number of nonlinear solver convergence failures = %d\n" ncfn; printf " Total number of error test failures = %d\n" netf let reps = try int_of_string (Unix.getenv "NUM_REPS") with Not_found | Failure _ -> 1 let gc_at_end = try int_of_string (Unix.getenv "GC_AT_END") <> 0 with Not_found | Failure _ -> false let gc_each_rep = try int_of_string (Unix.getenv "GC_EACH_REP") <> 0 with Not_found | Failure _ -> false let _ = for _ = 1 to reps do main (); if gc_each_rep then Gc.compact () done; if gc_at_end then Gc.compact ()
accbedb080ea4fdf27e7d9011130ac754d31b23045e96b90ee81da458e2dcae2
bos/rwh
Indentation.hs
{-- snippet foo --} foo = let firstDefinition = blah blah -- a comment-only line is treated as empty continuation blah -- we reduce the indentation, so this is a new definition secondDefinition = yada yada continuation yada in whatever {-- /snippet foo --}
null
https://raw.githubusercontent.com/bos/rwh/7fd1e467d54aef832f5476ebf5f4f6a898a895d1/examples/ch04/Indentation.hs
haskell
- snippet foo - a comment-only line is treated as empty we reduce the indentation, so this is a new definition - /snippet foo -
foo = let firstDefinition = blah blah continuation blah secondDefinition = yada yada continuation yada in whatever
1d330a036481858050081bda1da57950ee630f3fe435ecd32ba61466e6a7650f
cac-t-u-s/om-sharp
textbuffer.lisp
;============================================================================ ; om#: visual programming language for computer-assisted music composition ;============================================================================ ; ; This program is free software. For information on usage ; and redistribution, see the "LICENSE" file in this distribution. ; ; This program is distributed in the hope that it will be useful, ; but WITHOUT ANY WARRANTY; without even the implied warranty of ; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. ; ;============================================================================ File author : ;============================================================================ (in-package :om) (defclass* textbuffer (named-object) ((contents :initarg :contents :initform nil :accessor contents :documentation "data or text input/output") (input-mode :initform :lines-cols :accessor input-mode :documentation "determines how input <contents> is read and converted to text") (reader :initform nil :accessor reader)) (:documentation "The class TEXTBUFFER represents a text buffer in a visual program. It can be used to enter or collect data and is editable in a text window. - <self> represents/returns the TEXTBUFFER object. - <contents> represents the data in the TEXTBUFFER As input it can be a single item (string or value) or a list, and then each item is considered as a new line in the text buffer. As output it returns the contents of the text buffer as a list formatted according to the box :output-mode - <input-mode> determines how input <contents> is read and formatted. The options are - 'lines-cols' [default] : each item of the input list becomes a line of text, each element in the line is a 'row' - 'lines' : each item of the input list becomes a raw line of text - 'value' : the input is formatted as such in the textbuffer contents " )) ;The input can be connected to a pathname to attach and fill the TextFile buffer with a file on the disk. ;(Note: use the contextual menu in order to change the TextFile attachement settings.) ;- <output-mode> determines how <contents> access is formatted. The options are ; - NIL (default) : each line is read as a string (returns a list of strings) ; - ':lines' : each line is collected in a list of values (returns a list of lists) ; - ':lines-cols' : each line is collected a value (returns a list of lists) ; - ':list' : ignores line breaks and returns a flat list (returns a list) ; - ':value' : returns the contents as read by the Lisp reader ; - ':text' : returns a single text ;;; I don't remember why :output-mode is a box attribute and :input-mode a class attrobute... :( (defmethod additional-class-attributes ((self textbuffer)) '(input-mode)) (defmethod class-attributes-menus ((self textbuffer)) '((input-mode (("2D matrix" :lines-cols) ("list of lines" :lines) ("plain" :value))))) (defmethod additional-box-attributes ((self textbuffer)) '((:output-mode "determines how <contents> text is formatted for output" (("list of lists" :lines-cols) ("list of values/line" :lines) ("single value (lisp reader)" :value) ("flat list (ignore lines)" :list) ("list of text lines" :text-list) ("plain text" :text))))) (defmethod object-default-edition-params ((self textbuffer)) '((:output-mode :lines-cols))) ;;;=================================== ;;; FORMATTING (defun format-from-text-lines (lines mode) (case mode (:lines-cols (remove nil (loop for line in lines collect (om-read-list-from-string line)))) ;; (or ... (list line)) (:lines (remove nil (mapcar #'(lambda (l) (ignore-errors (read-from-string l nil))) lines))) (:value (read-from-string (apply 'string+ (mapcar #'(lambda (line) (string+ (delete-lisp-comments line) " ")) lines)) nil)) (:list (flat (mapcar 'om-read-list-from-string lines) 1)) (:text-list lines) (:text (if lines (reduce #'(lambda (s1 s2) (concatenate 'string s1 (string #\Newline) s2)) lines) "")) )) (defun format-to-text-lines (data mode) (case mode (:lines-cols (loop for line in (list! data) collect (format nil "~{~a~^ ~}" (list! line)))) (:lines (loop for line in (list! data) collect (format nil "~A" line))) (:text-list data) (otherwise (list (format nil "~A" data))) )) ;;;=================================== (defmethod om-init-instance ((self textbuffer) &optional initargs) (let ((supplied-contents (find-value-in-kv-list initargs :contents))) ( in - mode ( find - value - in - kv - list : input - mode ) ) ;; in-mode would exist only if the input is explicitely out... (not unsed anyway...) (when supplied-contents ;; we're evaluating the box (setf (contents self) (format-to-text-lines supplied-contents (input-mode self)))) self)) (defmethod prepare-obj-for-request ((object textbuffer) (box omboxeditcall)) (setf (reader object) (get-edit-param box :output-mode)) object) (defmethod get-slot-val ((obj textbuffer) slot-name) (if (string-equal (string slot-name) "contents") (format-from-text-lines (contents obj) (reader obj)) (call-next-method))) ;;;=================================== (defmethod* save-as-text ((self textbuffer) &optional (path "data") (type "txt")) (save-as-text (format-from-text-lines (contents self) :text) path type)) (defmethod objfromobjs ((model pathname) (target textbuffer)) (when (probe-file model) (om-init-instance target `((:contents ,(lines-from-file model)) (:input-mode :text-list))))) (defmethod objfromobjs ((model string) (target textbuffer)) (objfromobjs (pathname model) target)) (defmethod objfromobjs ((model null) (target textbuffer)) target) ;;;======================== ;;; BOX ;;;======================== (defclass TextBufferBox (omboxeditcall) ()) (defmethod special-box-type ((class-name (eql 'textbuffer))) 'TextBufferBox) (defmethod display-modes-for-object ((self textbuffer)) '(:mini-view :text :hidden)) (defmethod get-cache-display-for-text ((self textbuffer) box) (declare (ignore box)) (list (list :text-buffer (if (contents self) (format nil "[~D lines]" (length (contents self))) "<EMPTY>")) )) (defmethod draw-mini-view ((self textbuffer) (box TextBufferBox) x y w h &optional time) (let (; (display-cache (get-display-draw box)) (font (om-def-font :small))) (om-with-font font (loop for line in (list! (contents self)) for y = 22 then (+ y 12) do (if (< y (- h 8)) (let ((line (format nil "~A" line))) (if (> (om-string-size line font) (- w 10)) (om-draw-string 5 y (concatenate 'string (subseq line 0 (min (length line) (- (round w (om-string-size "a" font)) 3))) " ...")) (om-draw-string 5 y line))) (progn (om-draw-string (- (round w 2) 10) (- h 10) "...") (return))))))) (defmethod gen-code-for-call ((self TextBufferBox) &optional args) (declare (ignore args)) `(let ((tb ,(call-next-method))) (setf (reader tb) ,(get-edit-param self :output-mode)) tb)) ;;;======================== ;;; UTILS / FUNCTIONS ;;;======================== (defmethod* textbuffer-eval ((self textbuffer)) :indoc '("a textfile object") :doc "Evaluates the contents of a TEXTBUFFER (<self>). Evaluation allows defining functions or data in Lisp and running commands or programs from the TEXTBUFFER." (eval (read-from-string (apply 'string+ (append '("(progn ") (contents self) '(")")))))) (defmethod* textbuffer-read ((self textbuffer) mode) :indoc '("a textfile object" "a reading mode") :initvals '(nil :lines-cols) :menuins '((1 (("value" :value) ("list" :list) ("lines" :lines) ("lines-cols" :lines-cols) ("list of text lines" :text-list) ("text" :text)))) :doc "Reads the contents of a TEXTBUFFER (<self>) as Lisp values depending on <mode>: - :value = reads the contents as a single value - :list = reads the successive values and returns a list - :lines = reads each line as a single value (returns a list) - :lines-cols = reads each line as a list of values (returns a list of lists) - :text-list = collects the contents as list of text lines (returns a list of strings) - :text = collects the contents as a single text (string with line returns) " (format-from-text-lines (contents self) mode)) ;;;======================== ;;; EDITOR ;;;======================== (defmethod object-has-editor ((self textbuffer)) t) (defmethod get-editor-class ((self textbuffer)) 'textbuffer-editor) (defclass textbuffer-editor (omeditor) ()) (defmethod editor-window-class ((self textbuffer-editor)) 'textbuffer-editor-window) (defclass textbuffer-editor-window (om-lisp::om-text-editor-window) ((editor :initarg :editor :initform nil :accessor editor))) (defmethod om-lisp::save-operation-enabled ((self textbuffer-editor-window)) nil) REDEFINED SOME EDITOR - WINDOW METHODS (defmethod open-editor-window ((self textbuffer-editor)) (if (and (window self) (om-window-open-p (window self))) (om-select-window (window self)) (let* ((textbuffer (object-value self)) (edwin (om-lisp::om-open-text-editor :contents (contents textbuffer) :class 'textbuffer-editor-window :lisp nil :title (editor-window-title self) :x (and (window-pos (object self)) (om-point-x (window-pos (object self)))) :y (and (window-pos (object self)) (om-point-y (window-pos (object self)))) :w (and (window-size (object self)) (om-point-x (window-size (object self)))) :h (and (window-size (object self)) (om-point-y (window-size (object self)))) ))) (setf (editor edwin) self) (setf (window self) edwin) edwin))) (defmethod om-lisp::om-text-editor-activate-callback ((self textbuffer-editor-window) activatep) (when (editor self) (editor-activate (editor self) activatep))) (defmethod om-lisp::om-text-editor-destroy-callback ((self textbuffer-editor-window)) (when (editor self) (editor-close (editor self)) (setf (window (editor self)) nil) (setf (g-components (editor self)) nil))) ( defmethod om - view - key - handler ( ( self textbuffer - editor - window ) key ) ; (editor-key-action (editor self) key) ; (report-modifications (editor self))) ;;; NOT CALLED ! ( defmethod om - window - resized ( ( self textbuffer - editor - window ) size ) ( when ( editor self ) ; ; ; sometimes the editor is not yet set ( e.g. editor ) ( setf ( window - size ( object ( editor self ) ) ) size ) ) ) ( defmethod om - window - moved ( ( self textbuffer - editor - window ) pos ) ( when ( editor self ) ; ; ; sometimes the editor is not yet set ( e.g. editor ) ( setf ( window - pos ( object ( editor self ) ) ) pos ) ) ) ;;; SPECIFIC CALLBACKS (defmethod om-lisp::om-text-editor-modified ((self textbuffer-editor-window)) (let ((textbuffer (object-value (editor self)))) (when textbuffer (setf (contents textbuffer) (om-lisp::om-get-text-editor-text self)))) (when (equal self (om-front-window)) (report-modifications (editor self)))) ;;; pb : this will generate the callback above and lock the box if the window is open... (defmethod update-to-editor ((self textbuffer-editor) (from OMBoxEditCall)) (when (window self) (om-lisp::om-set-text-editor-text (window self) (contents (object-value self))))) (defmethod om-lisp::om-text-editor-resized ((win textbuffer-editor-window) w h) (when (editor win) (setf (window-size (object (editor win))) (omp w h)))) (defmethod om-lisp::om-text-editor-moved ((win textbuffer-editor-window) x y) (when (editor win) (setf (window-pos (object (editor win))) (omp x y))))
null
https://raw.githubusercontent.com/cac-t-u-s/om-sharp/80f9537368471d0e6e4accdc9fff01ed277b879e/src/packages/basic/text/textbuffer.lisp
lisp
============================================================================ om#: visual programming language for computer-assisted music composition ============================================================================ This program is free software. For information on usage and redistribution, see the "LICENSE" file in this distribution. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. ============================================================================ ============================================================================ The input can be connected to a pathname to attach and fill the TextFile buffer with a file on the disk. (Note: use the contextual menu in order to change the TextFile attachement settings.) - <output-mode> determines how <contents> access is formatted. The options are - NIL (default) : each line is read as a string (returns a list of strings) - ':lines' : each line is collected in a list of values (returns a list of lists) - ':lines-cols' : each line is collected a value (returns a list of lists) - ':list' : ignores line breaks and returns a flat list (returns a list) - ':value' : returns the contents as read by the Lisp reader - ':text' : returns a single text I don't remember why :output-mode is a box attribute and :input-mode a class attrobute... :( =================================== FORMATTING (or ... (list line)) =================================== in-mode would exist only if the input is explicitely out... (not unsed anyway...) we're evaluating the box =================================== ======================== BOX ======================== (display-cache (get-display-draw box)) ======================== UTILS / FUNCTIONS ======================== ======================== EDITOR ======================== (editor-key-action (editor self) key) (report-modifications (editor self))) NOT CALLED ! ; ; sometimes the editor is not yet set ( e.g. editor ) ; ; sometimes the editor is not yet set ( e.g. editor ) SPECIFIC CALLBACKS pb : this will generate the callback above and lock the box if the window is open...
File author : (in-package :om) (defclass* textbuffer (named-object) ((contents :initarg :contents :initform nil :accessor contents :documentation "data or text input/output") (input-mode :initform :lines-cols :accessor input-mode :documentation "determines how input <contents> is read and converted to text") (reader :initform nil :accessor reader)) (:documentation "The class TEXTBUFFER represents a text buffer in a visual program. It can be used to enter or collect data and is editable in a text window. - <self> represents/returns the TEXTBUFFER object. - <contents> represents the data in the TEXTBUFFER As input it can be a single item (string or value) or a list, and then each item is considered as a new line in the text buffer. As output it returns the contents of the text buffer as a list formatted according to the box :output-mode - <input-mode> determines how input <contents> is read and formatted. The options are - 'lines-cols' [default] : each item of the input list becomes a line of text, each element in the line is a 'row' - 'lines' : each item of the input list becomes a raw line of text - 'value' : the input is formatted as such in the textbuffer contents " )) (defmethod additional-class-attributes ((self textbuffer)) '(input-mode)) (defmethod class-attributes-menus ((self textbuffer)) '((input-mode (("2D matrix" :lines-cols) ("list of lines" :lines) ("plain" :value))))) (defmethod additional-box-attributes ((self textbuffer)) '((:output-mode "determines how <contents> text is formatted for output" (("list of lists" :lines-cols) ("list of values/line" :lines) ("single value (lisp reader)" :value) ("flat list (ignore lines)" :list) ("list of text lines" :text-list) ("plain text" :text))))) (defmethod object-default-edition-params ((self textbuffer)) '((:output-mode :lines-cols))) (defun format-from-text-lines (lines mode) (case mode (:lines (remove nil (mapcar #'(lambda (l) (ignore-errors (read-from-string l nil))) lines))) (:value (read-from-string (apply 'string+ (mapcar #'(lambda (line) (string+ (delete-lisp-comments line) " ")) lines)) nil)) (:list (flat (mapcar 'om-read-list-from-string lines) 1)) (:text-list lines) (:text (if lines (reduce #'(lambda (s1 s2) (concatenate 'string s1 (string #\Newline) s2)) lines) "")) )) (defun format-to-text-lines (data mode) (case mode (:lines-cols (loop for line in (list! data) collect (format nil "~{~a~^ ~}" (list! line)))) (:lines (loop for line in (list! data) collect (format nil "~A" line))) (:text-list data) (otherwise (list (format nil "~A" data))) )) (defmethod om-init-instance ((self textbuffer) &optional initargs) (let ((supplied-contents (find-value-in-kv-list initargs :contents))) ( in - mode ( find - value - in - kv - list : input - mode ) ) (when supplied-contents (setf (contents self) (format-to-text-lines supplied-contents (input-mode self)))) self)) (defmethod prepare-obj-for-request ((object textbuffer) (box omboxeditcall)) (setf (reader object) (get-edit-param box :output-mode)) object) (defmethod get-slot-val ((obj textbuffer) slot-name) (if (string-equal (string slot-name) "contents") (format-from-text-lines (contents obj) (reader obj)) (call-next-method))) (defmethod* save-as-text ((self textbuffer) &optional (path "data") (type "txt")) (save-as-text (format-from-text-lines (contents self) :text) path type)) (defmethod objfromobjs ((model pathname) (target textbuffer)) (when (probe-file model) (om-init-instance target `((:contents ,(lines-from-file model)) (:input-mode :text-list))))) (defmethod objfromobjs ((model string) (target textbuffer)) (objfromobjs (pathname model) target)) (defmethod objfromobjs ((model null) (target textbuffer)) target) (defclass TextBufferBox (omboxeditcall) ()) (defmethod special-box-type ((class-name (eql 'textbuffer))) 'TextBufferBox) (defmethod display-modes-for-object ((self textbuffer)) '(:mini-view :text :hidden)) (defmethod get-cache-display-for-text ((self textbuffer) box) (declare (ignore box)) (list (list :text-buffer (if (contents self) (format nil "[~D lines]" (length (contents self))) "<EMPTY>")) )) (defmethod draw-mini-view ((self textbuffer) (box TextBufferBox) x y w h &optional time) (font (om-def-font :small))) (om-with-font font (loop for line in (list! (contents self)) for y = 22 then (+ y 12) do (if (< y (- h 8)) (let ((line (format nil "~A" line))) (if (> (om-string-size line font) (- w 10)) (om-draw-string 5 y (concatenate 'string (subseq line 0 (min (length line) (- (round w (om-string-size "a" font)) 3))) " ...")) (om-draw-string 5 y line))) (progn (om-draw-string (- (round w 2) 10) (- h 10) "...") (return))))))) (defmethod gen-code-for-call ((self TextBufferBox) &optional args) (declare (ignore args)) `(let ((tb ,(call-next-method))) (setf (reader tb) ,(get-edit-param self :output-mode)) tb)) (defmethod* textbuffer-eval ((self textbuffer)) :indoc '("a textfile object") :doc "Evaluates the contents of a TEXTBUFFER (<self>). Evaluation allows defining functions or data in Lisp and running commands or programs from the TEXTBUFFER." (eval (read-from-string (apply 'string+ (append '("(progn ") (contents self) '(")")))))) (defmethod* textbuffer-read ((self textbuffer) mode) :indoc '("a textfile object" "a reading mode") :initvals '(nil :lines-cols) :menuins '((1 (("value" :value) ("list" :list) ("lines" :lines) ("lines-cols" :lines-cols) ("list of text lines" :text-list) ("text" :text)))) :doc "Reads the contents of a TEXTBUFFER (<self>) as Lisp values depending on <mode>: - :value = reads the contents as a single value - :list = reads the successive values and returns a list - :lines = reads each line as a single value (returns a list) - :lines-cols = reads each line as a list of values (returns a list of lists) - :text-list = collects the contents as list of text lines (returns a list of strings) - :text = collects the contents as a single text (string with line returns) " (format-from-text-lines (contents self) mode)) (defmethod object-has-editor ((self textbuffer)) t) (defmethod get-editor-class ((self textbuffer)) 'textbuffer-editor) (defclass textbuffer-editor (omeditor) ()) (defmethod editor-window-class ((self textbuffer-editor)) 'textbuffer-editor-window) (defclass textbuffer-editor-window (om-lisp::om-text-editor-window) ((editor :initarg :editor :initform nil :accessor editor))) (defmethod om-lisp::save-operation-enabled ((self textbuffer-editor-window)) nil) REDEFINED SOME EDITOR - WINDOW METHODS (defmethod open-editor-window ((self textbuffer-editor)) (if (and (window self) (om-window-open-p (window self))) (om-select-window (window self)) (let* ((textbuffer (object-value self)) (edwin (om-lisp::om-open-text-editor :contents (contents textbuffer) :class 'textbuffer-editor-window :lisp nil :title (editor-window-title self) :x (and (window-pos (object self)) (om-point-x (window-pos (object self)))) :y (and (window-pos (object self)) (om-point-y (window-pos (object self)))) :w (and (window-size (object self)) (om-point-x (window-size (object self)))) :h (and (window-size (object self)) (om-point-y (window-size (object self)))) ))) (setf (editor edwin) self) (setf (window self) edwin) edwin))) (defmethod om-lisp::om-text-editor-activate-callback ((self textbuffer-editor-window) activatep) (when (editor self) (editor-activate (editor self) activatep))) (defmethod om-lisp::om-text-editor-destroy-callback ((self textbuffer-editor-window)) (when (editor self) (editor-close (editor self)) (setf (window (editor self)) nil) (setf (g-components (editor self)) nil))) ( defmethod om - view - key - handler ( ( self textbuffer - editor - window ) key ) ( defmethod om - window - resized ( ( self textbuffer - editor - window ) size ) ( setf ( window - size ( object ( editor self ) ) ) size ) ) ) ( defmethod om - window - moved ( ( self textbuffer - editor - window ) pos ) ( setf ( window - pos ( object ( editor self ) ) ) pos ) ) ) (defmethod om-lisp::om-text-editor-modified ((self textbuffer-editor-window)) (let ((textbuffer (object-value (editor self)))) (when textbuffer (setf (contents textbuffer) (om-lisp::om-get-text-editor-text self)))) (when (equal self (om-front-window)) (report-modifications (editor self)))) (defmethod update-to-editor ((self textbuffer-editor) (from OMBoxEditCall)) (when (window self) (om-lisp::om-set-text-editor-text (window self) (contents (object-value self))))) (defmethod om-lisp::om-text-editor-resized ((win textbuffer-editor-window) w h) (when (editor win) (setf (window-size (object (editor win))) (omp w h)))) (defmethod om-lisp::om-text-editor-moved ((win textbuffer-editor-window) x y) (when (editor win) (setf (window-pos (object (editor win))) (omp x y))))
d77705462f729757a60737c8e0667248d0283d8aecee24cd848360461be3ea15
hmac/kite
ModuleGroupCompiler.hs
# LANGUAGE DuplicateRecordFields # module ModuleGroupCompiler where This module takes a ModuleGroup , compiles each module in it , and somehow -- merges it all together. import Control.Monad.Except ( MonadError ) import qualified Chez.Compile as Chez import Data.Name import ModuleGroup import Syn.Typed import Util -- We'll attempt this as follows: -- All top level declarations will be qualified with their module name. -- When a module A imports module B, we'll insert top level declarations into A which alias all the declarations in This approach should generalise well -- to import aliases, qualification, hiding etc when we support them. -- Example: -- -- module A module B module C -- import B import C -- -- a = ... b = ... c = ... -- b = B.b c = C.c a = ... -- a = C.a data CompiledModule a = CompiledModule { cModuleName :: PkgModuleName , cModuleImports :: [Import] , cModuleExports :: [Name] , cModuleEnv :: a , cModuleDeps :: [CompiledModule a] } deriving Show compileToChez :: MonadError Chez.Error m => TypedModuleGroup -> m (CompiledModule Chez.Env) compileToChez (TypedModuleGroup m deps) = do env <- foldM Chez.compileModule mempty (deps ++ [m]) pure CompiledModule { cModuleName = moduleName m , cModuleImports = moduleImports m , cModuleExports = moduleExports m , cModuleEnv = Chez.builtins <> env , cModuleDeps = [] }
null
https://raw.githubusercontent.com/hmac/kite/f58758f20310e23cb50eb41537ec04bfa820cc90/src/ModuleGroupCompiler.hs
haskell
merges it all together. We'll attempt this as follows: All top level declarations will be qualified with their module name. When a module A imports module B, we'll insert top level declarations into A to import aliases, qualification, hiding etc when we support them. Example: module A module B module C import B import C a = ... b = ... c = ... b = B.b c = C.c a = ... a = C.a
# LANGUAGE DuplicateRecordFields # module ModuleGroupCompiler where This module takes a ModuleGroup , compiles each module in it , and somehow import Control.Monad.Except ( MonadError ) import qualified Chez.Compile as Chez import Data.Name import ModuleGroup import Syn.Typed import Util which alias all the declarations in This approach should generalise well data CompiledModule a = CompiledModule { cModuleName :: PkgModuleName , cModuleImports :: [Import] , cModuleExports :: [Name] , cModuleEnv :: a , cModuleDeps :: [CompiledModule a] } deriving Show compileToChez :: MonadError Chez.Error m => TypedModuleGroup -> m (CompiledModule Chez.Env) compileToChez (TypedModuleGroup m deps) = do env <- foldM Chez.compileModule mempty (deps ++ [m]) pure CompiledModule { cModuleName = moduleName m , cModuleImports = moduleImports m , cModuleExports = moduleExports m , cModuleEnv = Chez.builtins <> env , cModuleDeps = [] }
407ee40283aebc3bac58021dc50312a5c859d70c3042b2c503bece177ecdf7f4
ygmpkk/house
Char.hs
{-# OPTIONS -fno-implicit-prelude #-} ----------------------------------------------------------------------------- -- | Module : Data . Copyright : ( c ) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : -- Stability : stable -- Portability : portable -- The type and associated operations . -- ----------------------------------------------------------------------------- module Data.Char ( Char , String -- * Character classification -- | Unicode characters are divided into letters, numbers, marks, -- punctuation, symbols, separators (including spaces) and others -- (including control characters). The full set of Unicode character attributes is not accessible -- in this library. , isAscii, isLatin1, isControl, isSpace , isLower, isUpper, isAlpha, isAlphaNum, isPrint : : Bool -- * Case conversion : : -- * Single digit characters : : Int : : -- * Numeric representations : : Int : : -- * String representations : : ShowS , lexLitChar -- :: ReadS String : : ReadS Implementation checked wrt . 98 lib report , 1/99 . ) where #ifdef __GLASGOW_HASKELL__ import GHC.Base import GHC.Show import GHC.Read (readLitChar, lexLitChar) import GHC.Unicode import GHC.Num #endif #ifdef __HUGS__ import Hugs.Char #endif #ifdef __NHC__ import Prelude import Prelude(Char,String) import Char #endif | Convert a single digit ' ' to the corresponding ' Int ' . -- This function fails unless its argument satisfies 'isHexDigit', -- but recognises both upper and lower-case hexadecimal digits ( i.e. @\'0\'@ .. @\'9\'@ , @\'a\'@ .. @\'f\'@ , @\'A\'@ .. @\'F\'@ ) . digitToInt :: Char -> Int digitToInt c | isDigit c = ord c - ord '0' | c >= 'a' && c <= 'f' = ord c - ord 'a' + 10 | c >= 'A' && c <= 'F' = ord c - ord 'A' + 10 | otherwise = error ("Char.digitToInt: not a digit " ++ show c) -- sigh
null
https://raw.githubusercontent.com/ygmpkk/house/1ed0eed82139869e85e3c5532f2b579cf2566fa2/ghc-6.2/libraries/base/Data/Char.hs
haskell
# OPTIONS -fno-implicit-prelude # --------------------------------------------------------------------------- | License : BSD-style (see the file libraries/base/LICENSE) Maintainer : Stability : stable Portability : portable --------------------------------------------------------------------------- * Character classification | Unicode characters are divided into letters, numbers, marks, punctuation, symbols, separators (including spaces) and others (including control characters). in this library. * Case conversion * Single digit characters * Numeric representations * String representations :: ReadS String This function fails unless its argument satisfies 'isHexDigit', but recognises both upper and lower-case hexadecimal digits sigh
Module : Data . Copyright : ( c ) The University of Glasgow 2001 The type and associated operations . module Data.Char ( Char , String The full set of Unicode character attributes is not accessible , isAscii, isLatin1, isControl, isSpace , isLower, isUpper, isAlpha, isAlphaNum, isPrint : : Bool : : : : Int : : : : Int : : : : ShowS : : ReadS Implementation checked wrt . 98 lib report , 1/99 . ) where #ifdef __GLASGOW_HASKELL__ import GHC.Base import GHC.Show import GHC.Read (readLitChar, lexLitChar) import GHC.Unicode import GHC.Num #endif #ifdef __HUGS__ import Hugs.Char #endif #ifdef __NHC__ import Prelude import Prelude(Char,String) import Char #endif | Convert a single digit ' ' to the corresponding ' Int ' . ( i.e. @\'0\'@ .. @\'9\'@ , @\'a\'@ .. @\'f\'@ , @\'A\'@ .. @\'F\'@ ) . digitToInt :: Char -> Int digitToInt c | isDigit c = ord c - ord '0' | c >= 'a' && c <= 'f' = ord c - ord 'a' + 10 | c >= 'A' && c <= 'F' = ord c - ord 'A' + 10
46f4d4435c8170c3473a2f1630a44c00074f3db6f6f7cd6f44ab3186fd36dcc0
discus-lang/salt
CreateMainSH.hs
module War.Task.Create.CreateMainSH where import War.Task.Create.Way import War.Task.Job () import War.Driver.Base import qualified War.Task.Job.Shell as Shell import qualified War.Task.Job.Diff as Diff import qualified Data.Set as Set -- | Run Main.sh files. create :: Way -> Set FilePath -> FilePath -> Maybe Chain create way allFiles filePath | takeFileName filePath == "Main.sh" = let sourceDir = takeDirectory filePath buildDir = sourceDir </> "war-" ++ wayName way testName = filePath mainShellStdout = buildDir </> "Main.shell.stdout" mainShellStderr = buildDir </> "Main.shell.stderr" mainShellStderrDiff = buildDir </> "Main.compile.stderr.diff" mainErrorCheck = sourceDir </> "Main.error.check" shouldSucceed = not $ Set.member mainErrorCheck allFiles shell = jobOfSpec (JobId testName (wayName way)) $ Shell.Spec filePath sourceDir buildDir mainShellStdout mainShellStderr shouldSucceed diffError = jobOfSpec (JobId testName (wayName way)) $ Diff.Spec mainErrorCheck mainShellStderr mainShellStderrDiff in Just $ Chain $ [shell] ++ (if shouldSucceed then [] else [diffError]) | otherwise = Nothing
null
https://raw.githubusercontent.com/discus-lang/salt/33c14414ac7e238fdbd8161971b8b8ac67fff569/src/war/War/Task/Create/CreateMainSH.hs
haskell
| Run Main.sh files.
module War.Task.Create.CreateMainSH where import War.Task.Create.Way import War.Task.Job () import War.Driver.Base import qualified War.Task.Job.Shell as Shell import qualified War.Task.Job.Diff as Diff import qualified Data.Set as Set create :: Way -> Set FilePath -> FilePath -> Maybe Chain create way allFiles filePath | takeFileName filePath == "Main.sh" = let sourceDir = takeDirectory filePath buildDir = sourceDir </> "war-" ++ wayName way testName = filePath mainShellStdout = buildDir </> "Main.shell.stdout" mainShellStderr = buildDir </> "Main.shell.stderr" mainShellStderrDiff = buildDir </> "Main.compile.stderr.diff" mainErrorCheck = sourceDir </> "Main.error.check" shouldSucceed = not $ Set.member mainErrorCheck allFiles shell = jobOfSpec (JobId testName (wayName way)) $ Shell.Spec filePath sourceDir buildDir mainShellStdout mainShellStderr shouldSucceed diffError = jobOfSpec (JobId testName (wayName way)) $ Diff.Spec mainErrorCheck mainShellStderr mainShellStderrDiff in Just $ Chain $ [shell] ++ (if shouldSucceed then [] else [diffError]) | otherwise = Nothing
84e9eb09b60afaecc24af6b34c5a6cca27d7ada66c925a194f1f72b5be4a39bf
fossas/fossa-cli
SbtDependencyTreeJson.hs
# LANGUAGE RecordWildCards # module Strategy.Scala.SbtDependencyTreeJson ( analyze, parseSbtArtifact, ) where import Control.Effect.Diagnostics (Diagnostics) import Control.Monad (void) import Control.Monad.Identity (Identity) import Data.Aeson ( FromJSON (parseJSON), withArray, withObject, (.:), ) import Data.Foldable (for_) import Data.Maybe (fromMaybe) import Data.String.Conversion (toText) import Data.Text (Text) import Data.Vector (toList) import Data.Void (Void) import DepTypes ( DepType (MavenType), Dependency (..), VerConstraint (CEq), ) import Effect.Grapher (GrapherC, Has, deep, direct, edge, evalGrapher, run) import Effect.ReadFS (ReadFS, readContentsJson) import Graphing (Graphing, shrinkRoots) import Path (Abs, File, Path) import Strategy.Scala.Common (SbtArtifact (..)) import Text.Megaparsec ( MonadParsec (try), Parsec, empty, errorBundlePretty, optional, runParser, some, (<|>), ) import Text.Megaparsec.Char (alphaNumChar, char) import Text.Megaparsec.Char.Lexer qualified as Lexer | Represents output of Sbt 's Treeview command . -- Ref: #L22 -- -- It looks like: -- > [ -- > { -- > "text": "groupId:artifactId:version", -- > "children": [ -- > { -- > "text": "...", -- > "children": [ -- > ... -- > ] -- > }, -- > ] -- > } -- > ] -- - newtype SbtTree = SbtTree [SbtDep] deriving (Eq, Ord, Show) data SbtDep = SbtDep { artifact :: SbtArtifact , dependsOn :: [SbtDep] } deriving (Show, Eq, Ord) instance FromJSON SbtTree where parseJSON = withArray "SbtDepRoot" $ \arr -> do SbtTree . toList <$> traverse parseJSON arr instance FromJSON SbtDep where parseJSON = withObject "SbtDep" $ \obj -> do txt <- obj .: "text" children <- obj .: "children" artifact <- case runParser parseSbtArtifact "sbtArtifact" txt of Left err -> fail $ errorBundlePretty err Right sa -> pure sa pure $ SbtDep artifact children type ParserT = Parsec Void Text parseSbtArtifact :: ParserT SbtArtifact parseSbtArtifact = do groupId <- parseValidProjectIdentifier artifactId <- (":" >> parseValidProjectIdentifier) version <- lexeme (":" >> parseValidVersionConstraint) evictedByVersion <- try . optional $ parseEviction pure $ SbtArtifact groupId artifactId (fromMaybe version evictedByVersion) where sc :: ParserT () sc = Lexer.space (void $ some $ char ' ' <|> char '\t') empty empty lexeme :: ParserT a -> ParserT a lexeme = Lexer.lexeme sc symbol :: Text -> ParserT Text symbol = Lexer.symbol sc parseEviction :: ParserT (Text) parseEviction = lexeme ("(evicted by" <|> "(evicted by:") *> parseValidProjectIdentifier <* symbol ")" parseValidProjectIdentifier :: ParserT Text parseValidProjectIdentifier = toText <$> some (alphaNumChar <|> char '.' <|> char '-' <|> char '_') parseValidVersionConstraint :: ParserT Text parseValidVersionConstraint = toText <$> some ( alphaNumChar <|> char '.' <|> char '-' <|> char '_' <|> char ',' -- If there is eviction, sbt tree may provide version constraint instead of <|> char '[' -- resolved version. -sbt.org/1.x/docs/Library-Dependencies.html <|> char ']' <|> char '(' <|> char ')' <|> char '+' ) buildGraph :: SbtTree -> Graphing Dependency buildGraph (SbtTree deps) = shrinkRoots . run . evalGrapher $ buildGraph' where buildGraph' :: GrapherC Dependency Identity () buildGraph' = do for_ deps $ \dep -> do direct $ toDependency (artifact dep) unfold dep unfold candidate = do let parent = toDependency (artifact candidate) deep parent for_ (dependsOn candidate) $ \child -> do let childDep = toDependency (artifact child) edge parent childDep unfold child toDependency :: SbtArtifact -> Dependency toDependency SbtArtifact{..} = Dependency { dependencyType = MavenType , dependencyName = toText groupId <> ":" <> toText artifactId , dependencyVersion = Just (CEq version) , dependencyLocations = mempty , dependencyEnvironments = mempty , dependencyTags = mempty } analyze :: (Has ReadFS sig m, Has Diagnostics sig m) => Path Abs File -> m (Graphing Dependency) analyze treeJsonPath = buildGraph <$> readContentsJson treeJsonPath
null
https://raw.githubusercontent.com/fossas/fossa-cli/ad4ab0b369995c7fc6d6056d0038141c492ad8cb/src/Strategy/Scala/SbtDependencyTreeJson.hs
haskell
Ref: #L22 It looks like: > [ > { > "text": "groupId:artifactId:version", > "children": [ > { > "text": "...", > "children": [ > ... > ] > }, > ] > } > ] - If there is eviction, sbt tree may provide version constraint instead of resolved version. -sbt.org/1.x/docs/Library-Dependencies.html
# LANGUAGE RecordWildCards # module Strategy.Scala.SbtDependencyTreeJson ( analyze, parseSbtArtifact, ) where import Control.Effect.Diagnostics (Diagnostics) import Control.Monad (void) import Control.Monad.Identity (Identity) import Data.Aeson ( FromJSON (parseJSON), withArray, withObject, (.:), ) import Data.Foldable (for_) import Data.Maybe (fromMaybe) import Data.String.Conversion (toText) import Data.Text (Text) import Data.Vector (toList) import Data.Void (Void) import DepTypes ( DepType (MavenType), Dependency (..), VerConstraint (CEq), ) import Effect.Grapher (GrapherC, Has, deep, direct, edge, evalGrapher, run) import Effect.ReadFS (ReadFS, readContentsJson) import Graphing (Graphing, shrinkRoots) import Path (Abs, File, Path) import Strategy.Scala.Common (SbtArtifact (..)) import Text.Megaparsec ( MonadParsec (try), Parsec, empty, errorBundlePretty, optional, runParser, some, (<|>), ) import Text.Megaparsec.Char (alphaNumChar, char) import Text.Megaparsec.Char.Lexer qualified as Lexer | Represents output of Sbt 's Treeview command . newtype SbtTree = SbtTree [SbtDep] deriving (Eq, Ord, Show) data SbtDep = SbtDep { artifact :: SbtArtifact , dependsOn :: [SbtDep] } deriving (Show, Eq, Ord) instance FromJSON SbtTree where parseJSON = withArray "SbtDepRoot" $ \arr -> do SbtTree . toList <$> traverse parseJSON arr instance FromJSON SbtDep where parseJSON = withObject "SbtDep" $ \obj -> do txt <- obj .: "text" children <- obj .: "children" artifact <- case runParser parseSbtArtifact "sbtArtifact" txt of Left err -> fail $ errorBundlePretty err Right sa -> pure sa pure $ SbtDep artifact children type ParserT = Parsec Void Text parseSbtArtifact :: ParserT SbtArtifact parseSbtArtifact = do groupId <- parseValidProjectIdentifier artifactId <- (":" >> parseValidProjectIdentifier) version <- lexeme (":" >> parseValidVersionConstraint) evictedByVersion <- try . optional $ parseEviction pure $ SbtArtifact groupId artifactId (fromMaybe version evictedByVersion) where sc :: ParserT () sc = Lexer.space (void $ some $ char ' ' <|> char '\t') empty empty lexeme :: ParserT a -> ParserT a lexeme = Lexer.lexeme sc symbol :: Text -> ParserT Text symbol = Lexer.symbol sc parseEviction :: ParserT (Text) parseEviction = lexeme ("(evicted by" <|> "(evicted by:") *> parseValidProjectIdentifier <* symbol ")" parseValidProjectIdentifier :: ParserT Text parseValidProjectIdentifier = toText <$> some (alphaNumChar <|> char '.' <|> char '-' <|> char '_') parseValidVersionConstraint :: ParserT Text parseValidVersionConstraint = toText <$> some ( alphaNumChar <|> char '.' <|> char '-' <|> char '_' <|> char ']' <|> char '(' <|> char ')' <|> char '+' ) buildGraph :: SbtTree -> Graphing Dependency buildGraph (SbtTree deps) = shrinkRoots . run . evalGrapher $ buildGraph' where buildGraph' :: GrapherC Dependency Identity () buildGraph' = do for_ deps $ \dep -> do direct $ toDependency (artifact dep) unfold dep unfold candidate = do let parent = toDependency (artifact candidate) deep parent for_ (dependsOn candidate) $ \child -> do let childDep = toDependency (artifact child) edge parent childDep unfold child toDependency :: SbtArtifact -> Dependency toDependency SbtArtifact{..} = Dependency { dependencyType = MavenType , dependencyName = toText groupId <> ":" <> toText artifactId , dependencyVersion = Just (CEq version) , dependencyLocations = mempty , dependencyEnvironments = mempty , dependencyTags = mempty } analyze :: (Has ReadFS sig m, Has Diagnostics sig m) => Path Abs File -> m (Graphing Dependency) analyze treeJsonPath = buildGraph <$> readContentsJson treeJsonPath
37935754a926fa3de01d7e240fd3d7698fef00c6adaaee9deb804b643d5ad4b2
jhunt/shout
test.lisp
#!/usr/bin/env sbcl --script (load "build/quicklisp/setup.lisp") (require "prove") (prove:run :shout-test :reporter :list)
null
https://raw.githubusercontent.com/jhunt/shout/6b8ec9ac2f967b970dae2d9b45275c7a69c23b30/test.lisp
lisp
#!/usr/bin/env sbcl --script (load "build/quicklisp/setup.lisp") (require "prove") (prove:run :shout-test :reporter :list)
f4a94a8d392d67f436c912a68e1615aece9e2bbafb2eaff84972827c9c5c7a3f
rjnw/sham
syntax.rkt
#lang racket (require sham/md sham/ir/ast sham/ir/simple (prefix-in ll- sham/llvm/ir/simple)) (require (for-syntax racket/syntax syntax/parse) syntax/parse/define) (provide (all-defined-out)) TODO for maintaing backward compatibility with " : A DSL for Fast DSLs " paper o / w just use simple stmt (define set!^ stmt-set!) (define if^ stmt-if) (define continue^ stmt-continue) (define break^ stmt-break) (define svoid^ stmt-void) (define expr^ stmt-expr) (define return^ stmt-return) (define return-void^ stmt-return-void) (define (stmt-block stmts) (stmt-block (flatten (for/list ([s stmts]) (cond [(sham:expr? s) (stmt-expr s)] [(sham:stmt:block? s) (sham:stmt:block-stmts s)] [(sham:stmt? s) s] [(list? s) s] [(ll-inst? s) s] [else (error "block expects a stmt/expr given: " s)]))))) (define (block^ . stmts) (if (eq? (length stmts) 1) (car stmts) (stmt-block stmts))) (define (while^ expr . stmts) (stmt-while expr (stmt-block stmts))) (define-syntax (let^ stx) (syntax-parse stx [(_ ([arg (~optional val) (~datum :) typ] ...) s:expr ... e:expr) #`(let ([arg (expr-ref (gensym (quasiquote arg)))] ...) (expr-let (list arg ...) (list (~? val #f) ...) (list typ ...) (block^ s ...) e))])) (define-simple-macro (slet^ args ...) (stmt-expr (let^ args ... (expr-void)))) (define-simple-macro (switch^ v:expr [check:expr body:expr ...] ... default) (stmt-switch v (list check ...) (list (block^ body ...) ...) default)) (define-simple-macro (label^ name:id stmt ...) (stmt-label `name (block^ stmt ...))) (define-simple-macro (label-jump^ name:id) (ll-bru `name)) ;; expr (define ref^ expr-ref) (define op^ expr-op) (define-syntax (access^ stx) (syntax-parse stx [(_ struct-field value) #`(e-access struct-field value)] [(_ struct-name:id field-name:id value) #`(e-access (cons struct-name field-name) value)])) (define evoid^ expr-void) (define etype expr-etype) (define (app^ rator #:flags (flags #f) . rands) (expr-app rator flags rands)) defs (define-simple-macro (function-body^ [(args (~datum :) arg-types) ...] body ...) #:with (arg-nums ...) (build-list (length (syntax->list #`(args ...))) (λ (i) #`#,i)) (slet^ ([args (ll-val-param arg-nums) : arg-types] ...) body ...)) (define-syntax (function^ stx) (syntax-parse stx [(_ (~optional (~seq (~or (~datum #:md) (~datum #:metadata)) md)) (name:expr (args:id (~datum :) arg-types:expr) ... (~optional (~and va (~datum ...))) (~datum :) ret-type:expr) body:expr ...) #:with (arg-type-names ...) (generate-temporaries #`(args ...)) #:with va-type (if (attribute va) #`#t #`#f) #`(let ([arg-type-names arg-types] ...) (def-function (~? (~@ #:md md)) (quasiquote name) (ll-type-function arg-type-names ... va-type ret-type) (function-body^ [(args : arg-type-names) ...] body ...)))])) (define-simple-macro (efunction^ header ... ebody) (function^ header ... (return^ ebody))) (define-syntax (struct^ stx) (syntax-parse stx [(_ (~optional (~seq #:md md)) name:id (field-name:id field-type:expr) ...) #`(def-struct (~? (~@ #:md md)) (quasiquote name) `(field-name ...) (list field-type ...))])) (define-syntax (module^ stx) (syntax-parse stx [(_ (~optional (~seq #:md md)) name:id (defs ...)) #`(def-module (~? (~@ #:md md)) (quasiquote name) (list defs ...))]))
null
https://raw.githubusercontent.com/rjnw/sham/6e0524b1eb01bcda83ae7a5be6339da4257c6781/sham-base/sham/ir/syntax.rkt
racket
expr
#lang racket (require sham/md sham/ir/ast sham/ir/simple (prefix-in ll- sham/llvm/ir/simple)) (require (for-syntax racket/syntax syntax/parse) syntax/parse/define) (provide (all-defined-out)) TODO for maintaing backward compatibility with " : A DSL for Fast DSLs " paper o / w just use simple stmt (define set!^ stmt-set!) (define if^ stmt-if) (define continue^ stmt-continue) (define break^ stmt-break) (define svoid^ stmt-void) (define expr^ stmt-expr) (define return^ stmt-return) (define return-void^ stmt-return-void) (define (stmt-block stmts) (stmt-block (flatten (for/list ([s stmts]) (cond [(sham:expr? s) (stmt-expr s)] [(sham:stmt:block? s) (sham:stmt:block-stmts s)] [(sham:stmt? s) s] [(list? s) s] [(ll-inst? s) s] [else (error "block expects a stmt/expr given: " s)]))))) (define (block^ . stmts) (if (eq? (length stmts) 1) (car stmts) (stmt-block stmts))) (define (while^ expr . stmts) (stmt-while expr (stmt-block stmts))) (define-syntax (let^ stx) (syntax-parse stx [(_ ([arg (~optional val) (~datum :) typ] ...) s:expr ... e:expr) #`(let ([arg (expr-ref (gensym (quasiquote arg)))] ...) (expr-let (list arg ...) (list (~? val #f) ...) (list typ ...) (block^ s ...) e))])) (define-simple-macro (slet^ args ...) (stmt-expr (let^ args ... (expr-void)))) (define-simple-macro (switch^ v:expr [check:expr body:expr ...] ... default) (stmt-switch v (list check ...) (list (block^ body ...) ...) default)) (define-simple-macro (label^ name:id stmt ...) (stmt-label `name (block^ stmt ...))) (define-simple-macro (label-jump^ name:id) (ll-bru `name)) (define ref^ expr-ref) (define op^ expr-op) (define-syntax (access^ stx) (syntax-parse stx [(_ struct-field value) #`(e-access struct-field value)] [(_ struct-name:id field-name:id value) #`(e-access (cons struct-name field-name) value)])) (define evoid^ expr-void) (define etype expr-etype) (define (app^ rator #:flags (flags #f) . rands) (expr-app rator flags rands)) defs (define-simple-macro (function-body^ [(args (~datum :) arg-types) ...] body ...) #:with (arg-nums ...) (build-list (length (syntax->list #`(args ...))) (λ (i) #`#,i)) (slet^ ([args (ll-val-param arg-nums) : arg-types] ...) body ...)) (define-syntax (function^ stx) (syntax-parse stx [(_ (~optional (~seq (~or (~datum #:md) (~datum #:metadata)) md)) (name:expr (args:id (~datum :) arg-types:expr) ... (~optional (~and va (~datum ...))) (~datum :) ret-type:expr) body:expr ...) #:with (arg-type-names ...) (generate-temporaries #`(args ...)) #:with va-type (if (attribute va) #`#t #`#f) #`(let ([arg-type-names arg-types] ...) (def-function (~? (~@ #:md md)) (quasiquote name) (ll-type-function arg-type-names ... va-type ret-type) (function-body^ [(args : arg-type-names) ...] body ...)))])) (define-simple-macro (efunction^ header ... ebody) (function^ header ... (return^ ebody))) (define-syntax (struct^ stx) (syntax-parse stx [(_ (~optional (~seq #:md md)) name:id (field-name:id field-type:expr) ...) #`(def-struct (~? (~@ #:md md)) (quasiquote name) `(field-name ...) (list field-type ...))])) (define-syntax (module^ stx) (syntax-parse stx [(_ (~optional (~seq #:md md)) name:id (defs ...)) #`(def-module (~? (~@ #:md md)) (quasiquote name) (list defs ...))]))
fd30a0168f6ee31c576b99235c04d404243d24293a6a95755d362a7cc22c206d
ruricolist/serapeum
heap.lisp
(in-package :serapeum) Heap implementation adapted from timers package for SBCL . (defstruct (heap (:constructor make-heap (&key (size 100) (element-type t) (key #'identity) (test #'>=) &aux (vector (make-array size :adjustable t :fill-pointer 0 :element-type element-type))))) "Create an empty (max) heap. SIZE is a hint for at least how many items will be used; it is not a limit but providing it speeds up initializing the heap. ELEMENT-TYPE is like the `:element-type' argument to `make-array'. It may, or may not, restrict the element type of the heap, depending on the request type and what the Lisp implementation supports. It is for optimization, not type safety. KEY and TEST are used to order the heap elements." (vector #() :type vector :read-only t) (key #'identity :type function) (test #'>= :type function)) (declaim (inline heap-parent heap-left heap-right)) (defun heap-parent (i) (declare (array-index i)) (ash (1- i) -1)) (defun heap-left (i) (declare (array-index i)) (1+ (ash i 1))) (defun heap-right (i) (declare (array-index i)) (+ 2 (ash i 1))) (defun heapify (vec start key test) (declare (function key test) (array-index start) (vector vec)) (fbind ((ge test)) (declare (ftype (-> (t t) t) ge)) (with-vector-dispatch () vec (let ((l (heap-left start)) (r (heap-right start)) (size (length vec)) largest) (with-item-key-function (key) (setf largest (if (and (< l size) (not (ge (key (aref vec start)) (key (aref vec l))))) l start)) (when (and (< r size) (not (ge (key (aref vec largest)) (key (aref vec r))))) (setf largest r))) (when (/= largest start) (rotatef (aref vec largest) (aref vec start)) (heapify vec largest key test))) vec))) (defun heap-insert (heap new-item) "Insert NEW-ITEM into HEAP." (let ((vec (heap-vector heap))) (fbind ((ge (heap-test heap))) (vector-push-extend nil vec) (with-item-key-function (key (heap-key heap)) (loop for i = (1- (length vec)) then parent-i for parent-i = (heap-parent i) while (and (> i 0) (not (ge (key (aref vec parent-i)) (key new-item)))) do (setf (aref vec i) (aref vec parent-i)) finally (setf (aref vec i) new-item) (return-from heap-insert i)))))) (defun heap-maximum (heap) "Return (without extracting) the greatest element in HEAP." (let ((vec (heap-vector heap))) (unless (zerop (length vec)) (aref vec 0)))) (defun heap-extract (heap i) "Destructively extract the element in heap at index I, counting from the greatest element." (declare (heap heap) (array-index i)) (let ((vec (heap-vector heap))) (unless (> (length vec) i) (error "Heap underflow")) (with-accessors ((key heap-key) (test heap-test)) heap (prog1 (aref vec i) (setf (aref vec i) (aref vec (1- (length vec)))) (decf (fill-pointer vec)) (heapify vec i key test))))) (defun heap-extract-maximum (heap) "Destructively extract the greatest element of HEAP." (heap-extract heap 0)) (defun heap-extract-all (heap) "Destructively extract all the elements of HEAP from greatest to least." (declare (heap heap)) (loop while (> (length (heap-vector heap)) 0) collect (heap-extract-maximum heap)))
null
https://raw.githubusercontent.com/ruricolist/serapeum/d98b4863d7cdcb8a1ed8478cc44ab41bdad5635b/heap.lisp
lisp
it is not a
(in-package :serapeum) Heap implementation adapted from timers package for SBCL . (defstruct (heap (:constructor make-heap (&key (size 100) (element-type t) (key #'identity) (test #'>=) &aux (vector (make-array size :adjustable t :fill-pointer 0 :element-type element-type))))) "Create an empty (max) heap. limit but providing it speeds up initializing the heap. ELEMENT-TYPE is like the `:element-type' argument to `make-array'. It may, or may not, restrict the element type of the heap, depending on the request type and what the Lisp implementation supports. It is for optimization, not type safety. KEY and TEST are used to order the heap elements." (vector #() :type vector :read-only t) (key #'identity :type function) (test #'>= :type function)) (declaim (inline heap-parent heap-left heap-right)) (defun heap-parent (i) (declare (array-index i)) (ash (1- i) -1)) (defun heap-left (i) (declare (array-index i)) (1+ (ash i 1))) (defun heap-right (i) (declare (array-index i)) (+ 2 (ash i 1))) (defun heapify (vec start key test) (declare (function key test) (array-index start) (vector vec)) (fbind ((ge test)) (declare (ftype (-> (t t) t) ge)) (with-vector-dispatch () vec (let ((l (heap-left start)) (r (heap-right start)) (size (length vec)) largest) (with-item-key-function (key) (setf largest (if (and (< l size) (not (ge (key (aref vec start)) (key (aref vec l))))) l start)) (when (and (< r size) (not (ge (key (aref vec largest)) (key (aref vec r))))) (setf largest r))) (when (/= largest start) (rotatef (aref vec largest) (aref vec start)) (heapify vec largest key test))) vec))) (defun heap-insert (heap new-item) "Insert NEW-ITEM into HEAP." (let ((vec (heap-vector heap))) (fbind ((ge (heap-test heap))) (vector-push-extend nil vec) (with-item-key-function (key (heap-key heap)) (loop for i = (1- (length vec)) then parent-i for parent-i = (heap-parent i) while (and (> i 0) (not (ge (key (aref vec parent-i)) (key new-item)))) do (setf (aref vec i) (aref vec parent-i)) finally (setf (aref vec i) new-item) (return-from heap-insert i)))))) (defun heap-maximum (heap) "Return (without extracting) the greatest element in HEAP." (let ((vec (heap-vector heap))) (unless (zerop (length vec)) (aref vec 0)))) (defun heap-extract (heap i) "Destructively extract the element in heap at index I, counting from the greatest element." (declare (heap heap) (array-index i)) (let ((vec (heap-vector heap))) (unless (> (length vec) i) (error "Heap underflow")) (with-accessors ((key heap-key) (test heap-test)) heap (prog1 (aref vec i) (setf (aref vec i) (aref vec (1- (length vec)))) (decf (fill-pointer vec)) (heapify vec i key test))))) (defun heap-extract-maximum (heap) "Destructively extract the greatest element of HEAP." (heap-extract heap 0)) (defun heap-extract-all (heap) "Destructively extract all the elements of HEAP from greatest to least." (declare (heap heap)) (loop while (> (length (heap-vector heap)) 0) collect (heap-extract-maximum heap)))
2ef49e9060fde6d4b6c11b3349300682ed3fcab2448aa68565d7a9913772fea2
zxymike93/SICP
329.rkt
#lang sicp (define (logical-not s) (cond ((= s 0) 1) ((= s 1) 0) (else (error "Invalid signal " s)))) (define (logical-and s1 s2) (cond ((and (= s1 1) (= s2 1)) 1) ((or (= s1 0) (= s2 0)) 0) (else (error "Invalid signal " s1 s2)))) (define (inverter input output) (define (invert-proc) (let ((new-value (logical-not (get-signal input)))) (after-delay inverter-delay (lambda () (set-signal! output new-value))))) (add-action! input invert-proc) 'ok) (define (and-gate a1 a2 output) (define (and-proc) (let ((new-value (logical-and (get-signal a1) (get-signal a2)))) (after-delay and-gate-delay (lambda () (set-signal! output new-value))))) (add-action! a1 and-proc) (add-action! a2 and-proc) 'ok) 因为或门是 0 0 - > 0 , 0 1 ->1 , 1 0 - > 1 , 1 1 - > 1 而与门是 1 1 - > 1 , 1 0 - > 0 , 0 1 - > 0 , 0 0 - > 0 ;; 可以将输入 o1, o2 过非门、过与门,再将输出过非门,实现与门 因此,or - delay 的时间是 3 * inverter - delay + 1 * and - gate - delay (define (or-gate o1 o2 output) (define (invert in) (let ((out (make-wire))) (inverter in out) out)) (define (or-proc) (let ((new-value (invert (logical-and (get-signal (invert o1)) (get-signal (invert o2)))))) (lambda () (set-signal! output new-value)))) (add-action! o1 or-proc) (add-action! o2 or-proc) 'ok)
null
https://raw.githubusercontent.com/zxymike93/SICP/9d8e84d6a185bf4d7f28c414fc3359741384beb5/chapter3/329.rkt
racket
可以将输入 o1, o2 过非门、过与门,再将输出过非门,实现与门
#lang sicp (define (logical-not s) (cond ((= s 0) 1) ((= s 1) 0) (else (error "Invalid signal " s)))) (define (logical-and s1 s2) (cond ((and (= s1 1) (= s2 1)) 1) ((or (= s1 0) (= s2 0)) 0) (else (error "Invalid signal " s1 s2)))) (define (inverter input output) (define (invert-proc) (let ((new-value (logical-not (get-signal input)))) (after-delay inverter-delay (lambda () (set-signal! output new-value))))) (add-action! input invert-proc) 'ok) (define (and-gate a1 a2 output) (define (and-proc) (let ((new-value (logical-and (get-signal a1) (get-signal a2)))) (after-delay and-gate-delay (lambda () (set-signal! output new-value))))) (add-action! a1 and-proc) (add-action! a2 and-proc) 'ok) 因为或门是 0 0 - > 0 , 0 1 ->1 , 1 0 - > 1 , 1 1 - > 1 而与门是 1 1 - > 1 , 1 0 - > 0 , 0 1 - > 0 , 0 0 - > 0 因此,or - delay 的时间是 3 * inverter - delay + 1 * and - gate - delay (define (or-gate o1 o2 output) (define (invert in) (let ((out (make-wire))) (inverter in out) out)) (define (or-proc) (let ((new-value (invert (logical-and (get-signal (invert o1)) (get-signal (invert o2)))))) (lambda () (set-signal! output new-value)))) (add-action! o1 or-proc) (add-action! o2 or-proc) 'ok)
eaf25d28471ff0a78e82fc4b8c52df81412093347ab1bc7e6c8e72d8f2117317
vyorkin/tiger
symbol.ml
open Core_kernel module L = Location type t = { id : int; name : string; } [@@deriving compare, equal, sexp, show { with_path = false }] let (=) x y = equal x y let (<>) x y = not (equal x y) let next_id = let n = ref (-1) in fun () -> incr n; !n let mk = let tbl = Hashtbl.create (module String) ~size:128 in fun name -> match Hashtbl.find tbl name with | Some id -> { id; name } | None -> let id = next_id () in Hashtbl.add_exn tbl ~key:name ~data:id; { id; name } let mk_unique name = { name; id = next_id () } let to_string s = sprintf "%s <#%d>" s.name s.id let to_string_loc s = sprintf "%s %s" (to_string s.L.value) (L.range_string s.L.loc)
null
https://raw.githubusercontent.com/vyorkin/tiger/54dd179c1cd291df42f7894abce3ee9064e18def/chapter8/lib/symbol.ml
ocaml
open Core_kernel module L = Location type t = { id : int; name : string; } [@@deriving compare, equal, sexp, show { with_path = false }] let (=) x y = equal x y let (<>) x y = not (equal x y) let next_id = let n = ref (-1) in fun () -> incr n; !n let mk = let tbl = Hashtbl.create (module String) ~size:128 in fun name -> match Hashtbl.find tbl name with | Some id -> { id; name } | None -> let id = next_id () in Hashtbl.add_exn tbl ~key:name ~data:id; { id; name } let mk_unique name = { name; id = next_id () } let to_string s = sprintf "%s <#%d>" s.name s.id let to_string_loc s = sprintf "%s %s" (to_string s.L.value) (L.range_string s.L.loc)
4d49ef3cebba5f842975955841d70986a04c2fa6665889bc8194c8847c93ff37
xapi-project/xen-api
import.ml
* Copyright ( C ) 2006 - 2009 Citrix Systems Inc. * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation ; version 2.1 only . with the special * exception on linking described in file LICENSE . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU Lesser General Public License for more details . * Copyright (C) 2006-2009 Citrix Systems Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation; version 2.1 only. with the special * exception on linking described in file LICENSE. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. *) (** HTTP handler for importing a VM from a stream. * @group Import and Export *) module D = Debug.Make (struct let name = "import" end) open D module Listext = Xapi_stdext_std.Listext module Xstringext = Xapi_stdext_std.Xstringext module Unixext = Xapi_stdext_unix.Unixext open Http open Importexport open Xapi_stdext_pervasives.Pervasiveext open Client type import_failure = | Some_checksums_failed | Cannot_handle_chunked | Failed_to_find_object of string | Attached_disks_not_found | Unexpected_file of string (* expected *) * string (* actual *) exception IFailure of import_failure open Xapi_vm_memory_constraints open Vm_memory_constraints type metadata_options = { (* If true, don't create any database objects. *) dry_run: bool If true , treat the import as if it is preparation for a live migration . * This has the following consequences : * - We must perform extra checks on the VM object - do we have enough memory ? Are the CPU flags compatible ? Is there an HA plan for it ? * - If the migration is a dry run we do n't need to check for , since VDI.mirror will have created them during a real migration . * - If the migration is for real , we will expect the VM export code on the source host to have mapped the VDI locations onto their * mirrored counterparts which are present on this host . * This has the following consequences: * - We must perform extra checks on the VM object - do we have enough memory? Are the CPU flags compatible? Is there an HA plan for it? * - If the migration is a dry run we don't need to check for VDIs, since VDI.mirror will have created them during a real migration. * - If the migration is for real, we will expect the VM export code on the source host to have mapped the VDI locations onto their * mirrored counterparts which are present on this host. *) live: bool An optional src VDI - > destination VDI rewrite list vdi_map: (string * string) list } type import_type = (* Import the metadata of a VM whose disks already exist. *) | Metadata_import of metadata_options Import a VM and stream its disks into the specified SR . | Full_import of API.ref_SR (** Allows the import to be customised *) type config = { (* Determines how to handle the import - see above. *) import_type: import_type true if we want to restore as a perfect backup . Currently we preserve the interface MAC addresses but we still regenerate ( because we lack the internal APIs to keep them interface MAC addresses but we still regenerate UUIDs (because we lack the internal APIs to keep them *) full_restore: bool ; (* true if the user has provided '--force' *) force: bool } let is_live config = match config.import_type with Metadata_import {live; _} -> live | _ -> false (** List of (datamodel classname * Reference in export * Reference in database) *) type table = (string * string * string) list (** Track the table of external reference -> internal reference and a list of cleanup functions to delete all the objects we've created, in the event of error. *) type state = { mutable table: table ; mutable created_vms: table ; mutable cleanup: (Context.t -> (Rpc.call -> Rpc.response) -> API.ref_session -> unit) list ; export: obj list } let initial_state export = {table= []; created_vms= []; cleanup= []; export} let log_reraise msg f x = try f x with e -> Backtrace.is_important e ; error "Import failed: %s" msg ; raise e let lookup x (table : table) = let id = Ref.string_of x in try let _, _, r = List.find (fun (_, i, _) -> i = id) table in Ref.of_string r with Not_found as e -> Backtrace.reraise e (IFailure (Failed_to_find_object id)) let exists x (table : table) = let id = Ref.string_of x in List.filter (fun (_, i, _) -> i = id) table <> [] Using a reference string from the original export , find the XMLRPC snapshot of the appropriate object . of the appropriate object. *) let find_in_export x export = try let obj = List.find (fun obj -> obj.id = x) export in obj.snapshot with Not_found as e -> Backtrace.reraise e (IFailure (Failed_to_find_object x)) let choose_one = function [x] -> Some x | x :: _ -> Some x | [] -> None (* Return the list of non-CDROM VDIs ie those which will be streamed-in *) let non_cdrom_vdis (x : header) = let all_vbds = List.filter (fun x -> x.cls = Datamodel_common._vbd) x.objects in let all_vbds = List.map (fun x -> API.vBD_t_of_rpc x.snapshot) all_vbds in let all_disk_vbds = List.filter (fun x -> x.API.vBD_type <> `CD) all_vbds in let all_disk_vdis = List.map (fun x -> Ref.string_of x.API.vBD_VDI) all_disk_vbds in Remove all those whose SR has content - type = " iso " let all_disk_vdis = List.filter (fun vdi -> let vdir = API.vDI_t_of_rpc (find_in_export vdi x.objects) in let sr = API.sR_t_of_rpc (find_in_export (Ref.string_of vdir.API.vDI_SR) x.objects) in sr.API.sR_content_type <> "iso" ) all_disk_vdis in let all_vdis = List.filter (fun x -> x.cls = Datamodel_common._vdi) x.objects in List.filter (fun x -> false || List.mem x.id all_disk_vdis || (API.vDI_t_of_rpc x.snapshot).API.vDI_type = `suspend ) all_vdis let get_vm_record snapshot = let vm_record = API.vM_t_of_rpc snapshot in (* Ensure that the domain_type is set correctly *) if vm_record.API.vM_domain_type = `unspecified then { vm_record with API.vM_domain_type= Xapi_vm_helpers.derive_domain_type ~hVM_boot_policy:vm_record.API.vM_HVM_boot_policy } else vm_record Check to see if another VM exists with the same MAC seed . Check VM uuids do n't already exist . Check that if a VDI exists then it is a CDROM . let assert_can_restore_backup ~__context rpc session_id (x : header) = let get_mac_seed vm = if List.mem_assoc Xapi_globs.mac_seed vm.API.vM_other_config then Some (List.assoc Xapi_globs.mac_seed vm.API.vM_other_config, vm) else None in let get_vm_uuid_of_snap s = let snapshot_of = Ref.string_of s.API.vM_snapshot_of in try if Xstringext.String.startswith "Ref:" snapshot_of then (* This should be a snapshot in the archive *) let v = List.find (fun v -> v.cls = Datamodel_common._vm && v.id = snapshot_of) x.objects in let v = get_vm_record v.snapshot in Some v.API.vM_uuid else if Xstringext.String.startswith Ref.ref_prefix snapshot_of then (* This should be a snapshot in a live system *) if Db.is_valid_ref __context s.API.vM_snapshot_of then Some (Db.VM.get_uuid ~__context ~self:s.API.vM_snapshot_of) else Some (List.assoc Db_names.uuid (Helpers.vm_string_to_assoc s.API.vM_snapshot_metadata) ) else None with _ -> None in This function should be called when a VM / snapshot to import has the same mac seed as an existing VM . They are considered compatible only in the following cases : - Both are VMs , and having the same uuid - Both are snapshots , and the VMs they were derived from are the same one - One is snapshot , one is VM , and the snapshot was derived from the VM mac seed as an existing VM. They are considered compatible only in the following cases: - Both are VMs, and having the same uuid - Both are snapshots, and the VMs they were derived from are the same one - One is snapshot, one is VM, and the snapshot was derived from the VM *) let is_compatible v1 v2 = match (v1.API.vM_is_a_snapshot, v2.API.vM_is_a_snapshot) with | false, false -> v1.API.vM_uuid = v2.API.vM_uuid | true, true -> let v1' = get_vm_uuid_of_snap v1 in let v2' = get_vm_uuid_of_snap v2 in v1' <> None && v2' <> None && v1' = v2' | true, false -> let v1' = get_vm_uuid_of_snap v1 in v1' = Some v2.API.vM_uuid | false, true -> let v2' = get_vm_uuid_of_snap v2 in v2' = Some v1.API.vM_uuid in let import_vms = List.filter_map (fun x -> if x.cls <> Datamodel_common._vm then None else let x = get_vm_record x.snapshot in get_mac_seed x ) x.objects in let existing_vms = List.filter_map (fun (_, v) -> get_mac_seed v) (Client.VM.get_all_records ~rpc ~session_id) in List.iter (fun (mac, vm) -> List.iter (fun (mac', vm') -> if mac = mac' && not (is_compatible vm vm') then raise Api_errors.(Server_error (duplicate_mac_seed, [mac'])) ) existing_vms ) import_vms let assert_can_live_import __context vm_record = let assert_memory_available () = let host = Helpers.get_localhost ~__context in let host_mem_available = Memory_check.host_compute_free_memory_with_maximum_compression ~__context ~host None in let main, shadow = Memory_check.vm_compute_start_memory ~__context vm_record in let mem_reqd_for_vm = Int64.add main shadow in if host_mem_available < mem_reqd_for_vm then raise (Api_errors.Server_error ( Api_errors.host_not_enough_free_memory , [ Int64.to_string mem_reqd_for_vm ; Int64.to_string host_mem_available ] ) ) in match vm_record.API.vM_power_state with | `Running | `Paused -> assert_memory_available () | _other -> () Assert that the local host , which is the host we are live - migrating the VM to , * has free capacity on a PGPU from the given VGPU 's GPU group . * has free capacity on a PGPU from the given VGPU's GPU group. *) let assert_can_live_import_vgpu ~__context vgpu_record = let host = Helpers.get_localhost ~__context in let local_pgpus = Db.PGPU.get_refs_where ~__context ~expr: Db_filter_types.( And ( Eq ( Field "GPU_group" , Literal (Ref.string_of vgpu_record.API.vGPU_GPU_group) ) , Eq (Field "host", Literal (Ref.string_of host)) ) ) in let capacity_exists = List.exists (fun pgpu -> try Xapi_pgpu_helpers.assert_capacity_exists_for_VGPU_type ~__context ~self:pgpu ~vgpu_type:vgpu_record.API.vGPU_type ; true with _ -> false ) local_pgpus in if not capacity_exists then raise Api_errors.( Server_error ( vm_requires_gpu , [ Ref.string_of vgpu_record.API.vGPU_VM ; Ref.string_of vgpu_record.API.vGPU_GPU_group ] ) ) (* The signature for a set of functions which we must provide to be able to import an object type. *) module type HandlerTools = sig (* A type which represents how we should deal with the import of an object. *) type precheck_t (* Compare the state of the database with the metadata to be imported. *) (* Returns a result which signals what we should do to import the metadata. *) val precheck : Context.t -> config -> (Rpc.call -> Rpc.response) -> API.ref_session -> state -> obj -> precheck_t (* Handle the result of the precheck function, but don't create any database objects. *) (* Add objects to the state table if necessary, to keep track of what would have been imported.*) val handle_dry_run : Context.t -> config -> (Rpc.call -> Rpc.response) -> API.ref_session -> state -> obj -> precheck_t -> unit (* Handle the result of the check function, creating database objects if necessary. *) (* For certain combinations of result and object type, this can be aliased to handle_dry_run. *) val handle : Context.t -> config -> (Rpc.call -> Rpc.response) -> API.ref_session -> state -> obj -> precheck_t -> unit end (* Make a handler for a set of handler functions. *) module MakeHandler = functor (M : HandlerTools) -> struct let handle __context config rpc session_id state obj = let dry_run = match config.import_type with | Metadata_import {dry_run= true; _} -> true | _ -> false in let precheck_result = M.precheck __context config rpc session_id state obj in if dry_run then M.handle_dry_run __context config rpc session_id state obj precheck_result else M.handle __context config rpc session_id state obj precheck_result end module Host : HandlerTools = struct type precheck_t = Found_host of API.ref_host | Found_no_host let precheck __context _config _rpc _session_id _state x = let host_record = API.host_t_of_rpc x.snapshot in try Found_host (Db.Host.get_by_uuid ~__context ~uuid:host_record.API.host_uuid) with _ -> Found_no_host let handle_dry_run __context _config _rpc _session_id state x precheck_result = let host = match precheck_result with | Found_host host' -> host' | Found_no_host -> Ref.null in state.table <- (x.cls, x.id, Ref.string_of host) :: state.table let handle = handle_dry_run end module VM : HandlerTools = struct type precheck_t = | Default_template of API.ref_VM | Replace of API.ref_VM * API.vM_t | Fail of exn | Skip | Clean_import of API.vM_t let precheck __context config _rpc _session_id _state x = let vm_record = get_vm_record x.snapshot in we ca n't import a VM if it is not in a resting state and requires DMC DMC *) let is_dmc_compatible = match vm_record.API.vM_power_state with | (`Running | `Suspended | `Paused) when not @@ Xapi_vm_helpers.is_dmc_compatible_vmr ~__context ~vmr:vm_record -> false | _ -> true (* but might require memory adjustments *) in let is_default_template = vm_record.API.vM_is_default_template || vm_record.API.vM_is_a_template && List.mem_assoc Xapi_globs.default_template_key vm_record.API.vM_other_config && List.assoc Xapi_globs.default_template_key vm_record.API.vM_other_config = "true" in if not @@ is_dmc_compatible then Fail Api_errors.( Server_error (dynamic_memory_control_unavailable, [vm_record.API.vM_uuid]) ) else if is_default_template then (* If the VM is a default template, then pick up the one with the same name. *) let template = try List.hd (Db.VM.get_by_name_label ~__context ~label:vm_record.API.vM_name_label ) with _ -> Ref.null in Default_template template else let import_action = Check for an existing VM with the same UUID - if one exists , what we do next (* will depend on the state of the VM and whether the import is forced. *) let get_vm_by_uuid () = Db.VM.get_by_uuid ~__context ~uuid:vm_record.API.vM_uuid in let vm_uuid_exists () = try ignore (get_vm_by_uuid ()) ; true with _ -> false in (* If full_restore is true then we want to keep the VM uuid - this may involve replacing an existing VM. *) if config.full_restore && vm_uuid_exists () then let vm = get_vm_by_uuid () in (* The existing VM cannot be replaced if it is running. *) (* If import is forced then skip the VM, else throw an error. *) let power_state = Db.VM.get_power_state ~__context ~self:vm in if power_state <> `Halted then if config.force then ( debug "Forced import skipping VM %s as VM to replace was not halted." vm_record.API.vM_uuid ; Skip ) else Fail (Api_errors.Server_error ( Api_errors.vm_bad_power_state , [ Ref.string_of vm ; Record_util.power_state_to_string `Halted ; Record_util.power_state_to_string power_state ] ) ) else (* The existing VM should not be replaced if the version to be imported is no newer, *) (* unless the import is forced. *) let existing_version = Db.VM.get_version ~__context ~self:vm in let version_to_import = vm_record.API.vM_version in if existing_version >= version_to_import && config.force = false then Fail (Api_errors.Server_error ( Api_errors.vm_to_import_is_not_newer_version , [ Ref.string_of vm ; Int64.to_string existing_version ; Int64.to_string version_to_import ] ) ) else Replace (vm, vm_record) else Clean_import vm_record in match import_action with | Replace (_, vm_record) | Clean_import vm_record -> if is_live config then assert_can_live_import __context vm_record ; import_action | _ -> import_action let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Skip -> () | Fail e -> raise e | Default_template template -> state.table <- (x.cls, x.id, Ref.string_of template) :: state.table ; state.created_vms <- (x.cls, x.id, Ref.string_of template) :: state.created_vms | Clean_import _ | Replace _ -> let dummy_vm = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vm) :: state.table let handle __context config rpc session_id state x precheck_result = This function assumes we 've already checked for and dealt with any existing VM with the same UUID . let do_import vm_record = let task_id = Ref.string_of (Context.get_task_id __context) in (* Remove the grant guest API access key unconditionally (it's only for our RHEL4 templates atm) *) let other_config = List.filter (fun (key, _) -> key <> Xapi_globs.grant_api_access) vm_record.API.vM_other_config in If not performing a full restore then generate a fresh MAC seed let other_config = if config.full_restore then other_config else (Xapi_globs.mac_seed, Uuidx.(to_string (make ()))) :: List.filter (fun (x, _) -> x <> Xapi_globs.mac_seed) other_config in let vm_record = {vm_record with API.vM_other_config= other_config} in (* Preserve genid for cross-pool migrates, because to the guest the * disk looks like it hasn't changed. * Preserve genid for templates, since they're not going to be started. * Generate a fresh genid for normal VM imports. *) let vm_record = if is_live config || vm_record.API.vM_is_a_template then vm_record else { vm_record with API.vM_generation_id= Xapi_vm_helpers.fresh_genid ~current_genid:vm_record.API.vM_generation_id () } in let vm_record = match vm_record.API.vM_power_state with | `Halted -> make sure we do n't use DMC let safe_constraints = Vm_memory_constraints.reset_to_safe_defaults ~constraints:(Vm_memory_constraints.extract ~vm_record) in debug "Disabling DMC for VM %s; dynamic_{min,max},target <- %Ld" vm_record.API.vM_uuid safe_constraints.dynamic_max ; { vm_record with API.vM_memory_static_min= safe_constraints.static_min ; vM_memory_dynamic_min= safe_constraints.dynamic_min ; vM_memory_target= safe_constraints.target ; vM_memory_dynamic_max= safe_constraints.dynamic_max ; vM_memory_static_max= safe_constraints.static_max } | _otherwise -> the precheck should make sure we do n't have a VM that requires DMC . But just to be safe , we do n't update memory settings on any VM that is not in rest requires DMC. But just to be safe, we don't update memory settings on any VM that is not in rest *) vm_record in let vm_record = if vm_has_field ~x ~name:"has_vendor_device" then vm_record else {vm_record with API.vM_has_vendor_device= false} in let vm_record = { vm_record with API.vM_memory_overhead= Memory_check.vm_compute_memory_overhead ~vm_record } in let vm_record = {vm_record with API.vM_protection_policy= Ref.null} in Full restore preserves UUIDs , so if we are replacing an existing VM the version number should be incremented (* to keep track of how many times this VM has been restored. If not a full restore, then we don't need to keep track. *) let vm_record = if config.full_restore then {vm_record with API.vM_version= Int64.add vm_record.API.vM_version 1L} else {vm_record with API.vM_version= 0L} in Clear the appliance field - in the case of DR we will reconstruct the appliance separately . let vm_record = {vm_record with API.vM_appliance= Ref.null} in Correct ha - restart - priority for pre boston imports let vm_record = match vm_record.API.vM_ha_restart_priority with | ("0" | "1" | "2" | "3") as order -> { vm_record with API.vM_ha_restart_priority= "restart" ; API.vM_order= Int64.of_string order } | _ -> vm_record in Initialize platform["device - model " ] if it is not set let vm_record = { vm_record with API.vM_platform= Xapi_vm_helpers.ensure_device_model_profile_present ~__context ~domain_type:vm_record.API.vM_domain_type ~is_a_template:vm_record.API.vM_is_a_template vm_record.API.vM_platform } in let vm = log_reraise ("failed to create VM with name-label " ^ vm_record.API.vM_name_label) (fun value -> let vm = Xapi_vm_helpers .create_from_record_without_checking_licence_feature_for_vendor_device ~__context rpc session_id value in if config.full_restore then Db.VM.set_uuid ~__context ~self:vm ~value:value.API.vM_uuid ; vm ) vm_record in state.cleanup <- (fun __context rpc session_id -> (* Need to get rid of the import task or we cannot destroy the VM *) Helpers.log_exn_continue (Printf.sprintf "Attempting to remove import from current_operations of VM: %s" (Ref.string_of vm) ) (fun () -> Db.VM.remove_from_current_operations ~__context ~self:vm ~key:task_id ) () ; Db.VM.set_power_state ~__context ~self:vm ~value:`Halted ; Client.VM.destroy ~rpc ~session_id ~self:vm ) :: state.cleanup ; (* Restore the last_booted_record too (critical if suspended but might as well do it all the time) *) Db.VM.set_last_booted_record ~__context ~self:vm ~value:vm_record.API.vM_last_booted_record ; Db.VM.set_last_boot_CPU_flags ~__context ~self:vm ~value:vm_record.API.vM_last_boot_CPU_flags ; TaskHelper.operate_on_db_task ~__context (fun t -> ( try Db.VM.remove_from_other_config ~__context ~self:vm ~key:Xapi_globs.import_task with _ -> () ) ; Db.VM.add_to_other_config ~__context ~self:vm ~key:Xapi_globs.import_task ~value:(Ref.string_of t) ) ; (* Set the power_state and suspend_VDI if the VM is suspended. * If anything goes wrong, still continue if forced. *) if vm_record.API.vM_power_state = `Suspended then ( try let vdi = (lookup vm_record.API.vM_suspend_VDI) state.table in Db.VM.set_power_state ~__context ~self:vm ~value:`Suspended ; Db.VM.set_suspend_VDI ~__context ~self:vm ~value:vdi ; let vm_metrics = Db.VM.get_metrics ~__context ~self:vm in Db.VM_metrics.set_current_domain_type ~__context ~self:vm_metrics ~value:vm_record.API.vM_domain_type with e -> if not config.force then ( Backtrace.is_important e ; let msg = "Failed to find VM's suspend_VDI: " ^ Ref.string_of vm_record.API.vM_suspend_VDI in error "Import failed: %s" msg ; raise e ) ) else Db.VM.set_power_state ~__context ~self:vm ~value:`Halted ; (* We might want to import a control domain *) Db.VM.set_is_control_domain ~__context ~self:vm ~value:vm_record.API.vM_is_control_domain ; Db.VM.set_resident_on ~__context ~self:vm ~value: ( try lookup vm_record.API.vM_resident_on state.table with _ -> Ref.null ) ; Db.VM.set_affinity ~__context ~self:vm ~value: (try lookup vm_record.API.vM_affinity state.table with _ -> Ref.null) ; (* Update the snapshot metadata. At this points, the snapshot_of field is not relevant as it use the export ref. However, as the corresponding VM object may have not been created yet, this fiels contains some useful information to update it later. *) Db.VM.set_is_a_snapshot ~__context ~self:vm ~value:vm_record.API.vM_is_a_snapshot ; Db.VM.set_snapshot_info ~__context ~self:vm ~value:vm_record.API.vM_snapshot_info ; Db.VM.set_snapshot_of ~__context ~self:vm ~value:vm_record.API.vM_snapshot_of ; Db.VM.set_snapshot_time ~__context ~self:vm ~value:vm_record.API.vM_snapshot_time ; Db.VM.set_transportable_snapshot_id ~__context ~self:vm ~value:vm_record.API.vM_transportable_snapshot_id ; (* VM might have suspend_SR that does not exist on this pool *) if None = Helpers.check_sr_exists ~__context ~self:vm_record.API.vM_suspend_SR then Db.VM.set_suspend_SR ~__context ~self:vm ~value:Ref.null ; Db.VM.set_parent ~__context ~self:vm ~value:vm_record.API.vM_parent ; ( try let gm = lookup vm_record.API.vM_guest_metrics state.table in Db.VM.set_guest_metrics ~__context ~self:vm ~value:gm with _ -> () ) ; Db.VM.set_bios_strings ~__context ~self:vm ~value:vm_record.API.vM_bios_strings ; debug "Created VM: %s (was %s)" (Ref.string_of vm) x.id ; (* Although someone could sneak in here and attempt to power on the VM, it doesn't really matter since no VBDs have been created yet. We don't bother doing this if --force is set otherwise on error the VM remains locked. *) if not config.force then Db.VM.add_to_current_operations ~__context ~self:vm ~key:task_id ~value:`import ; Xapi_vm_lifecycle.update_allowed_operations ~__context ~self:vm ; state.table <- (x.cls, x.id, Ref.string_of vm) :: state.table ; state.created_vms <- (x.cls, x.id, Ref.string_of vm) :: state.created_vms in match precheck_result with | Skip | Fail _ | Default_template _ -> handle_dry_run __context config rpc session_id state x precheck_result | Clean_import vm_record -> do_import vm_record | Replace (vm, vm_record) -> Destroy the existing VM , along with its VIFs and VBDs . debug "Replacing VM %s" vm_record.API.vM_uuid ; Helpers.call_api_functions ~__context (fun rpc session_id -> let vifs = Db.VM.get_VIFs ~__context ~self:vm in List.iter (fun vif -> Client.VIF.destroy ~rpc ~session_id ~self:vif) vifs ; let vbds = Db.VM.get_VBDs ~__context ~self:vm in List.iter (fun vbd -> Client.VBD.destroy ~rpc ~session_id ~self:vbd) vbds ; Client.VM.destroy ~rpc ~session_id ~self:vm ) ; do_import vm_record end (** Create the guest metrics *) module GuestMetrics : HandlerTools = struct type precheck_t = OK let precheck __context _config _rpc _session_id _state _x = OK let handle_dry_run __context _config _rpc _session_id state x _precheck_result = let dummy_gm = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_gm) :: state.table let handle __context _config _rpc _session_id state x _precheck_result = let gm_record = API.vM_guest_metrics_t_of_rpc x.snapshot in let gm = Ref.make () in Db.VM_guest_metrics.create ~__context ~ref:gm ~uuid:(Uuidx.to_string (Uuidx.make ())) ~os_version:gm_record.API.vM_guest_metrics_os_version ~pV_drivers_version:gm_record.API.vM_guest_metrics_PV_drivers_version ~pV_drivers_up_to_date: gm_record.API.vM_guest_metrics_PV_drivers_up_to_date ~memory:gm_record.API.vM_guest_metrics_memory ~disks:gm_record.API.vM_guest_metrics_disks ~networks:gm_record.API.vM_guest_metrics_networks ~pV_drivers_detected:gm_record.API.vM_guest_metrics_PV_drivers_detected ~other:gm_record.API.vM_guest_metrics_other ~last_updated:gm_record.API.vM_guest_metrics_last_updated ~other_config:gm_record.API.vM_guest_metrics_other_config ~live:gm_record.API.vM_guest_metrics_live ~can_use_hotplug_vbd:gm_record.API.vM_guest_metrics_can_use_hotplug_vbd ~can_use_hotplug_vif:gm_record.API.vM_guest_metrics_can_use_hotplug_vif ; state.table <- (x.cls, x.id, Ref.string_of gm) :: state.table end * If we 're restoring VM metadata only then lookup the SR by uuid . If we ca n't find the SR then we will still try to match later ( except CDROMs ) the SR then we will still try to match VDIs later (except CDROMs) *) module SR : HandlerTools = struct type precheck_t = | Found_SR of API.ref_SR | Found_no_SR | Will_use_SR of API.ref_SR | SR_not_needed let precheck __context config rpc session_id _state x = let sr_record = API.sR_t_of_rpc x.snapshot in match config.import_type with | Metadata_import _ -> ( try Look up the existing SR record let sr = Client.SR.get_by_uuid ~rpc ~session_id ~uuid:sr_record.API.sR_uuid in Found_SR sr with _ -> let msg = match sr_record.API.sR_content_type with | "iso" -> "- will eject disk" (* Will be handled specially in handle_vdi *) | _ -> "- will still try to find individual VDIs" in warn "Failed to find SR with UUID: %s content-type: %s %s" sr_record.API.sR_uuid sr_record.API.sR_content_type msg ; Found_no_SR ) | Full_import sr -> if sr_record.API.sR_content_type = "iso" then SR_not_needed (* this one will be ejected *) else Will_use_SR sr let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_SR sr | Will_use_SR sr -> state.table <- (x.cls, x.id, Ref.string_of sr) :: state.table | Found_no_SR | SR_not_needed -> () let handle = handle_dry_run end * If we 're restoring VM metadata only then lookup the VDI by uuid . If restoring metadata only : lookup the VDI by location , falling back to content_id if available . If importing everything : create a new VDI in the SR On any error : If the SR can not be found then we skip this VDI . If the SR can be found AND is an iso SR then we attempt to lookup the VDI by name_label If the SR can be found AND is not an iso SR then we attempt to create the VDI in it If restoring metadata only: lookup the VDI by location, falling back to content_id if available. If importing everything: create a new VDI in the SR On any error: If the SR cannot be found then we skip this VDI. If the SR can be found AND is an iso SR then we attempt to lookup the VDI by name_label If the SR can be found AND is not an iso SR then we attempt to create the VDI in it *) module VDI : HandlerTools = struct type precheck_t = | Found_iso of API.ref_VDI | Found_no_iso | Found_disk of API.ref_VDI | Found_no_disk of exn | Skip | Create of API.vDI_t let precheck __context config rpc session_id state x = let vdi_record = API.vDI_t_of_rpc x.snapshot in let original_sr = API.sR_t_of_rpc (find_in_export (Ref.string_of vdi_record.API.vDI_SR) state.export) in if original_sr.API.sR_content_type = "iso" then ( Best effort : locate a VDI in any shared ISO SR with a matching VDI.location let iso_srs = List.filter (fun self -> Client.SR.get_content_type ~rpc ~session_id ~self = "iso" && Client.SR.get_type ~rpc ~session_id ~self <> "udev" ) (Client.SR.get_all ~rpc ~session_id) in match List.filter (fun (_, vdir) -> vdir.API.vDI_location = vdi_record.API.vDI_location && List.mem vdir.API.vDI_SR iso_srs ) (Client.VDI.get_all_records ~rpc ~session_id) |> choose_one with | Some (vdi, _) -> Found_iso vdi | None -> warn "Found no ISO VDI with location = %s; attempting to eject" vdi_record.API.vDI_location ; Found_no_iso ) else match config.import_type with | Metadata_import {vdi_map; _} -> ( let mapto = if List.mem_assoc Constants.storage_migrate_vdi_map_key vdi_record.API.vDI_other_config then Some (Ref.of_string (List.assoc Constants.storage_migrate_vdi_map_key vdi_record.API.vDI_other_config ) ) else None in let vdi_records = Client.VDI.get_all_records ~rpc ~session_id in let find_by_sr_and_location sr location = vdi_records |> List.filter (fun (_, vdir) -> vdir.API.vDI_location = location && vdir.API.vDI_SR = sr ) |> choose_one |> Option.map fst in let find_by_uuid uuid = vdi_records |> List.filter (fun (_, vdir) -> vdir.API.vDI_uuid = uuid) |> choose_one |> Option.map fst in let _scsiid = "SCSIid" in let scsiid_of vdi_record = if List.mem_assoc _scsiid vdi_record.API.vDI_sm_config then Some (List.assoc _scsiid vdi_record.API.vDI_sm_config) else None in let find_by_scsiid x = vdi_records |> List.filter_map (fun (rf, vdir) -> if scsiid_of vdir = Some x then Some (rf, vdir) else None ) |> choose_one in let by_vdi_map = Look up the mapping by both uuid and SCSIid match if List.mem_assoc vdi_record.API.vDI_uuid vdi_map then Some (List.assoc vdi_record.API.vDI_uuid vdi_map) else match scsiid_of vdi_record with | None -> None | Some x -> if List.mem_assoc x vdi_map then Some (List.assoc x vdi_map) else None with | Some destination -> ( match find_by_uuid destination with | Some x -> Some x | None -> ( match find_by_scsiid destination with | Some (rf, rc) -> info "VDI %s (SCSIid %s) mapped to %s (SCSIid %s) by user" vdi_record.API.vDI_uuid (Option.value ~default:"None" (scsiid_of vdi_record)) rc.API.vDI_uuid (Option.value ~default:"None" (scsiid_of rc)) ; Some rf | None -> None ) ) | None -> ( match scsiid_of vdi_record with | None -> None | Some x -> ( match find_by_scsiid x with | Some (rf, rc) -> info "VDI %s (SCSIid %s) mapped to %s (SCSIid %s) by user" vdi_record.API.vDI_uuid (Option.value ~default:"None" (scsiid_of vdi_record)) rc.API.vDI_uuid (Option.value ~default:"None" (scsiid_of rc)) ; Some rf | None -> None ) ) in match by_vdi_map with | Some vdi -> Found_disk vdi | None -> ( match if exists vdi_record.API.vDI_SR state.table then let sr = lookup vdi_record.API.vDI_SR state.table in match find_by_sr_and_location sr vdi_record.API.vDI_location with | Some x -> Some x | None -> mapto else mapto with | Some vdi -> Found_disk vdi | None -> error "Found no VDI with location = %s: %s" vdi_record.API.vDI_location ( if config.force then "ignoring error because '--force' is set" else "treating as fatal and abandoning import" ) ; if config.force then Skip else if exists vdi_record.API.vDI_SR state.table then let sr = lookup vdi_record.API.vDI_SR state.table in Found_no_disk (Api_errors.Server_error ( Api_errors.vdi_location_missing , [Ref.string_of sr; vdi_record.API.vDI_location] ) ) else Found_no_disk (Api_errors.Server_error (Api_errors.vdi_content_id_missing, []) ) ) ) | Full_import _ -> Create vdi_record let handle_dry_run __context config _rpc _session_id state x precheck_result = match precheck_result with | Found_iso vdi | Found_disk vdi -> state.table <- (x.cls, x.id, Ref.string_of vdi) :: state.table | Found_no_iso -> VDI will be ejected . | Found_no_disk e -> ( match config.import_type with | Metadata_import {live= true; _} -> (* We expect the disk to be missing during a live migration dry run. *) debug "Ignoring missing disk %s - this will be mirrored during a real \ live migration." x.id ; Create a dummy disk in the state table so the VBD import has a disk to look up . let dummy_vdi = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vdi) :: state.table | _ -> raise e ) | Skip -> () | Create _ -> let dummy_vdi = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vdi) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_iso _ | Found_no_iso | Skip -> handle_dry_run __context config rpc session_id state x precheck_result | Found_disk vdi -> handle_dry_run __context config rpc session_id state x precheck_result ; let other_config_record = (API.vDI_t_of_rpc x.snapshot).API.vDI_other_config in List.iter (fun key -> Db.VDI.remove_from_other_config ~__context ~self:vdi ~key ; try Db.VDI.add_to_other_config ~__context ~self:vdi ~key ~value:(List.assoc key other_config_record) with Not_found -> () ) Xapi_globs.vdi_other_config_sync_keys | Found_no_disk e -> raise e | Create vdi_record -> Make a new VDI for streaming data into ; adding task - id to sm - config on VDI.create so SM backend can see this is an import let sr = lookup vdi_record.API.vDI_SR state.table in let task_id = Ref.string_of (Context.get_task_id __context) in let sm_config = List.filter (fun (k, _) -> k <> Xapi_globs.import_task) vdi_record.API.vDI_sm_config in let sm_config = (Xapi_globs.import_task, task_id) :: sm_config in let vdi = Client.VDI.create_from_record ~rpc ~session_id ~value: {vdi_record with API.vDI_SR= sr; API.vDI_sm_config= sm_config} in state.cleanup <- (fun __context rpc session_id -> Client.VDI.destroy ~rpc ~session_id ~self:vdi ) :: state.cleanup ; state.table <- (x.cls, x.id, Ref.string_of vdi) :: state.table end * Lookup the network by name_label only . Previously we used UUID which worked if importing to the same host that originated the export but would fail if the network UUID had changed even if ( from the user 's PoV ) the " backend network " had not . Since we do n't model networks it seems less confusing to match on names : whether networks are the same or different is then under the control of the user . to the same host that originated the export but would fail if the network UUID had changed even if (from the user's PoV) the "backend network" had not. Since we don't model networks it seems less confusing to match on names: whether networks are the same or different is then under the control of the user. *) module Net : HandlerTools = struct type precheck_t = Found_net of API.ref_network | Create of API.network_t let precheck __context _config rpc session_id _state x = let net_record = API.network_t_of_rpc x.snapshot in let possibilities = Client.Network.get_by_name_label ~rpc ~session_id ~label:net_record.API.network_name_label in match possibilities with | [] -> ( (* Lookup by bridge name as fallback *) let expr = "field \"bridge\"=\"" ^ net_record.API.network_bridge ^ "\"" in let nets = Client.Network.get_all_records_where ~rpc ~session_id ~expr in match nets with | [] -> Create net_record | (net, _) :: _ -> Found_net net ) | n :: _ -> Found_net n let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_net net -> state.table <- (x.cls, x.id, Ref.string_of net) :: state.table | Create _ -> let dummy_net = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_net) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_net _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create net_record -> let net = log_reraise ("failed to create Network with name_label " ^ net_record.API.network_name_label ) (fun value -> Client.Network.create_from_record ~rpc ~session_id ~value ) net_record in (* Only add task flag to networks which get created in this import *) TaskHelper.operate_on_db_task ~__context (fun t -> ( try Db.Network.remove_from_other_config ~__context ~self:net ~key:Xapi_globs.import_task with _ -> () ) ; Db.Network.add_to_other_config ~__context ~self:net ~key:Xapi_globs.import_task ~value:(Ref.string_of t) ) ; state.cleanup <- (fun __context rpc session_id -> Client.Network.destroy ~rpc ~session_id ~self:net ) :: state.cleanup ; state.table <- (x.cls, x.id, Ref.string_of net) :: state.table end * Lookup the GPU group by GPU_types only . Currently , the GPU_types field contains the prototype * of just a single pGPU . We would probably have to extend this function once we support GPU groups * for multiple compatible GPU types . * of just a single pGPU. We would probably have to extend this function once we support GPU groups * for multiple compatible GPU types. *) module GPUGroup : HandlerTools = struct type precheck_t = | Found_GPU_group of API.ref_GPU_group | Found_no_GPU_group of exn | Create of API.gPU_group_t let precheck __context config rpc session_id _state x = let gpu_group_record = API.gPU_group_t_of_rpc x.snapshot in let groups = Client.GPU_group.get_all_records ~rpc ~session_id in try let group, _ = List.find (fun (_, groupr) -> groupr.API.gPU_group_GPU_types = gpu_group_record.API.gPU_group_GPU_types ) groups in Found_GPU_group group with Not_found -> ( match config.import_type with | Metadata_import _ -> In vm_metadata_only mode the GPU group must exist let msg = Printf.sprintf "Unable to find GPU group with matching GPU_types = '[%s]'" (String.concat "," gpu_group_record.API.gPU_group_GPU_types) in error "%s" msg ; Found_no_GPU_group (Failure msg) | Full_import _ -> (* In normal mode we attempt to create any missing GPU groups *) Create gpu_group_record ) let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_GPU_group group -> state.table <- (x.cls, x.id, Ref.string_of group) :: state.table | Found_no_GPU_group e -> raise e | Create _ -> let dummy_gpu_group = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_gpu_group) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_GPU_group _ | Found_no_GPU_group _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create gpu_group_record -> let group = log_reraise ("Unable to create GPU group with GPU_types = '[%s]'" ^ String.concat "," gpu_group_record.API.gPU_group_GPU_types ) (fun value -> let group = Client.GPU_group.create ~rpc ~session_id ~name_label:value.API.gPU_group_name_label ~name_description:value.API.gPU_group_name_description ~other_config:value.API.gPU_group_other_config in Db.GPU_group.set_GPU_types ~__context ~self:group ~value:value.API.gPU_group_GPU_types ; group ) gpu_group_record in (* Only add task flag to GPU groups which get created in this import *) TaskHelper.operate_on_db_task ~__context (fun t -> ( try Db.GPU_group.remove_from_other_config ~__context ~self:group ~key:Xapi_globs.import_task with _ -> () ) ; Db.GPU_group.add_to_other_config ~__context ~self:group ~key:Xapi_globs.import_task ~value:(Ref.string_of t) ) ; state.cleanup <- (fun __context rpc session_id -> Client.GPU_group.destroy ~rpc ~session_id ~self:group ) :: state.cleanup ; state.table <- (x.cls, x.id, Ref.string_of group) :: state.table end * Create a new VBD record , add the reference to the table . The VM and VDI must already have been handled first . If the VDI does n't exist and the VBD is a CDROM then eject it . Note that any currently attached disk MUST be present , unless it 's an HVM guest and a CDROM in which case we eject it anyway . The VM and VDI must already have been handled first. If the VDI doesn't exist and the VBD is a CDROM then eject it. Note that any currently attached disk MUST be present, unless it's an HVM guest and a CDROM in which case we eject it anyway. *) module VBD : HandlerTools = struct type precheck_t = Found_VBD of API.ref_VBD | Skip | Create of API.vBD_t let precheck __context config rpc session_id state x = let vbd_record = API.vBD_t_of_rpc x.snapshot in let get_vbd () = Client.VBD.get_by_uuid ~rpc ~session_id ~uuid:vbd_record.API.vBD_uuid in let vbd_exists () = try ignore (get_vbd ()) ; true with _ -> false in if config.full_restore && vbd_exists () then let vbd = get_vbd () in Found_VBD vbd else let vm = log_reraise ("Failed to find VBD's VM: " ^ Ref.string_of vbd_record.API.vBD_VM) (lookup vbd_record.API.vBD_VM) state.table in If the VBD is supposed to be attached to a PV guest ( which does n't support currently_attached empty drives ) then throw a fatal error . currently_attached empty drives) then throw a fatal error. *) let original_vm = get_vm_record (find_in_export (Ref.string_of vbd_record.API.vBD_VM) state.export) in Note : the following is potentially inaccurate : the find out whether a running or * suspended VM has booted HVM , we must consult the VM metrics , but those are n't * available in the exported metadata . * suspended VM has booted HVM, we must consult the VM metrics, but those aren't * available in the exported metadata. *) let has_qemu = Helpers.will_have_qemu_from_record original_vm in (* In the case of dry_run live migration, don't check for missing disks as CDs will be ejected before the real migration. *) let dry_run, live = match config.import_type with | Metadata_import {dry_run; live; _} -> (dry_run, live) | _ -> (false, false) in ( if vbd_record.API.vBD_currently_attached && not (exists vbd_record.API.vBD_VDI state.table) then It 's only ok if it 's a CDROM attached to an HVM guest , or it 's part of SXM and we know the sender would eject it . let will_eject = dry_run && live && original_vm.API.vM_power_state <> `Suspended in if not (vbd_record.API.vBD_type = `CD && (has_qemu || will_eject)) then raise (IFailure Attached_disks_not_found) ) ; let vbd_record = {vbd_record with API.vBD_VM= vm} in match (vbd_record.API.vBD_type, exists vbd_record.API.vBD_VDI state.table) with | `CD, false | `Floppy, false -> if has_qemu || original_vm.API.vM_power_state <> `Suspended then Create {vbd_record with API.vBD_VDI= Ref.null; API.vBD_empty= true} (* eject *) else Create vbd_record | `Disk, false -> (* omit: cannot have empty disks *) warn "Cannot import VM's disk: was it an .iso attached as a disk rather \ than CD?" ; Skip | _, true -> Create { vbd_record with API.vBD_VDI= lookup vbd_record.API.vBD_VDI state.table } let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_VBD vbd -> state.table <- (x.cls, x.id, Ref.string_of vbd) :: state.table ; state.table <- (x.cls, Ref.string_of vbd, Ref.string_of vbd) :: state.table | Skip -> () | Create _ -> let dummy_vbd = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vbd) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_VBD _ | Skip -> handle_dry_run __context config rpc session_id state x precheck_result | Create vbd_record -> let vbd = log_reraise "failed to create VBD" (fun value -> let vbd = Client.VBD.create_from_record ~rpc ~session_id ~value: { value with API.vBD_device= "" ; API.vBD_currently_attached= false } in if config.full_restore then Db.VBD.set_uuid ~__context ~self:vbd ~value:value.API.vBD_uuid ; vbd ) vbd_record in state.cleanup <- (fun __context rpc session_id -> Client.VBD.destroy ~rpc ~session_id ~self:vbd ) :: state.cleanup ; (* Now that we can import/export suspended VMs we need to preserve the currently_attached flag *) Db.VBD.set_currently_attached ~__context ~self:vbd ~value:vbd_record.API.vBD_currently_attached ; state.table <- (x.cls, x.id, Ref.string_of vbd) :: state.table end * Create a new VIF record , add the reference to the table . The VM and Network must have already been handled first . The VM and Network must have already been handled first. *) module VIF : HandlerTools = struct type precheck_t = Found_VIF of API.ref_VIF | Create of API.vIF_t let precheck __context config rpc session_id state x = let vif_record = API.vIF_t_of_rpc x.snapshot in let get_vif () = Client.VIF.get_by_uuid ~rpc ~session_id ~uuid:vif_record.API.vIF_uuid in let vif_exists () = try ignore (get_vif ()) ; true with _ -> false in if config.full_restore && vif_exists () then If there 's already a VIF with the same UUID and we 're preserving UUIDs , use that one . let vif = get_vif () in Found_VIF vif else If not restoring a full backup then blank the MAC so it is regenerated let vif_record = { vif_record with API.vIF_MAC= (if config.full_restore then vif_record.API.vIF_MAC else "") } in (* Determine the VM to which we're going to attach this VIF. *) let vm = log_reraise ("Failed to find VIF's VM: " ^ Ref.string_of vif_record.API.vIF_VM) (lookup vif_record.API.vIF_VM) state.table in (* Determine the network to which we're going to attach this VIF. *) let net = (* If we find the cross-pool migration key, attach the VIF to that network... *) if List.mem_assoc Constants.storage_migrate_vif_map_key vif_record.API.vIF_other_config then Ref.of_string (List.assoc Constants.storage_migrate_vif_map_key vif_record.API.vIF_other_config ) else (* ...otherwise fall back to looking up the network from the state table. *) log_reraise ("Failed to find VIF's Network: " ^ Ref.string_of vif_record.API.vIF_network ) (lookup vif_record.API.vIF_network) state.table in (* Make sure we remove the cross-pool migration VIF mapping key from the other_config * before creating a VIF - otherwise we'll risk sending this key on to another pool * during a future cross-pool migration and it won't make sense. *) let other_config = List.filter (fun (k, _) -> k <> Constants.storage_migrate_vif_map_key) vif_record.API.vIF_other_config in Construct the VIF record we 're going to try to create locally . let vif_record = if Pool_features.is_enabled ~__context Features.VIF_locking then vif_record else if vif_record.API.vIF_locking_mode = `locked then { vif_record with API.vIF_locking_mode= `network_default ; API.vIF_ipv4_allowed= [] ; API.vIF_ipv6_allowed= [] } else {vif_record with API.vIF_ipv4_allowed= []; API.vIF_ipv6_allowed= []} in let vif_record = { vif_record with API.vIF_VM= vm ; API.vIF_network= net ; API.vIF_other_config= other_config } in Create vif_record let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_VIF vif -> state.table <- (x.cls, x.id, Ref.string_of vif) :: state.table ; state.table <- (x.cls, Ref.string_of vif, Ref.string_of vif) :: state.table | Create _ -> let dummy_vif = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vif) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_VIF _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create vif_record -> let vif = log_reraise "failed to create VIF" (fun value -> let vif = Client.VIF.create_from_record ~rpc ~session_id ~value:{value with API.vIF_currently_attached= false} in if config.full_restore then Db.VIF.set_uuid ~__context ~self:vif ~value:value.API.vIF_uuid ; vif ) vif_record in state.cleanup <- (fun __context rpc session_id -> Client.VIF.destroy ~rpc ~session_id ~self:vif ) :: state.cleanup ; (* Now that we can import/export suspended VMs we need to preserve the currently_attached flag *) if Db.VM.get_power_state ~__context ~self:vif_record.API.vIF_VM <> `Halted then Db.VIF.set_currently_attached ~__context ~self:vif ~value:vif_record.API.vIF_currently_attached ; state.table <- (x.cls, x.id, Ref.string_of vif) :: state.table end module VGPUType : HandlerTools = struct type precheck_t = | Found_VGPU_type of API.ref_VGPU_type | Create of API.vGPU_type_t let precheck __context _config rpc session_id _state x = let vgpu_type_record = API.vGPU_type_t_of_rpc x.snapshot in First look up VGPU types using the identifier string . let compatible_types = match Client.VGPU_type.get_all_records_where ~rpc ~session_id ~expr: (Printf.sprintf "field \"identifier\"=\"%s\"" vgpu_type_record.API.vGPU_type_identifier ) with | [] -> (* If that fails, look up using the vendor name and model name. *) Client.VGPU_type.get_all_records_where ~rpc ~session_id ~expr: (Printf.sprintf "field \"vendor_name\"=\"%s\" and field \"model_name\"=\"%s\"" vgpu_type_record.API.vGPU_type_vendor_name vgpu_type_record.API.vGPU_type_model_name ) | types -> types in match choose_one compatible_types with | Some (vgpu_type, _) -> Found_VGPU_type vgpu_type | None -> warn "Unable to find VGPU_type (%s,%s,%s) - creating a new record" vgpu_type_record.API.vGPU_type_identifier vgpu_type_record.API.vGPU_type_vendor_name vgpu_type_record.API.vGPU_type_model_name ; Create vgpu_type_record let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_VGPU_type vgpu_type -> state.table <- (x.cls, x.id, Ref.string_of vgpu_type) :: state.table ; state.table <- (x.cls, Ref.string_of vgpu_type, Ref.string_of vgpu_type) :: state.table | Create _ -> let dummy_vgpu_type = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vgpu_type) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_VGPU_type _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create vgpu_type_record -> let vgpu_type = log_reraise "failed to create VGPU_type" (fun value -> size and internal_config are left as defaults for now . They 'll * be updated if and when comes across the real config file . * be updated if and when xapi comes across the real config file. *) Xapi_vgpu_type.create ~__context ~vendor_name:value.API.vGPU_type_vendor_name ~model_name:value.API.vGPU_type_model_name ~framebuffer_size:value.API.vGPU_type_framebuffer_size ~max_heads:value.API.vGPU_type_max_heads ~max_resolution_x:value.API.vGPU_type_max_resolution_x ~max_resolution_y:value.API.vGPU_type_max_resolution_y ~size:0L ~internal_config:[] ~implementation:value.API.vGPU_type_implementation ~identifier:value.API.vGPU_type_identifier ~experimental:value.API.vGPU_type_experimental ~compatible_model_names_in_vm: value.API.vGPU_type_compatible_types_in_vm ~compatible_model_names_on_pgpu:[] ) vgpu_type_record in state.cleanup <- (fun __context _ _ -> Db.VGPU_type.destroy ~__context ~self:vgpu_type) :: state.cleanup ; state.table <- (x.cls, x.id, Ref.string_of vgpu_type) :: state.table end * Create a new VGPU record , add the reference to the table . The VM and GPU_group must have already been handled first . The VM and GPU_group must have already been handled first. *) module VGPU : HandlerTools = struct type precheck_t = Found_VGPU of API.ref_VGPU | Create of API.vGPU_t let precheck __context config rpc session_id state x = let vgpu_record = API.vGPU_t_of_rpc x.snapshot in let get_vgpu () = Client.VGPU.get_by_uuid ~rpc ~session_id ~uuid:vgpu_record.API.vGPU_uuid in let vgpu_exists () = try ignore (get_vgpu ()) ; true with _ -> false in if config.full_restore && vgpu_exists () then let vgpu = get_vgpu () in Found_VGPU vgpu else let vm = log_reraise ("Failed to find VGPU's VM: " ^ Ref.string_of vgpu_record.API.vGPU_VM) (lookup vgpu_record.API.vGPU_VM) state.table in let group = (* If we find the cross-pool migration key, attach the vgpu to the provided gpu_group... *) if List.mem_assoc Constants.storage_migrate_vgpu_map_key vgpu_record.API.vGPU_other_config then Ref.of_string (List.assoc Constants.storage_migrate_vgpu_map_key vgpu_record.API.vGPU_other_config ) else (* ...otherwise fall back to looking up the vgpu from the state table. *) log_reraise ("Failed to find VGPU's GPU group: " ^ Ref.string_of vgpu_record.API.vGPU_GPU_group ) (lookup vgpu_record.API.vGPU_GPU_group) state.table in let _type = log_reraise ("Failed to find VGPU's type: " ^ Ref.string_of vgpu_record.API.vGPU_type ) (lookup vgpu_record.API.vGPU_type) state.table in Make sure we remove the cross - pool migration VGPU mapping key from the other_config * before creating a VGPU - otherwise we 'll risk sending this key on to another pool * during a future cross - pool migration and it wo n't make sense . * before creating a VGPU - otherwise we'll risk sending this key on to another pool * during a future cross-pool migration and it won't make sense. *) let other_config = List.filter (fun (k, _) -> k <> Constants.storage_migrate_vgpu_map_key) vgpu_record.API.vGPU_other_config in let vgpu_record = { vgpu_record with API.vGPU_VM= vm ; API.vGPU_GPU_group= group ; API.vGPU_type= _type ; API.vGPU_other_config= other_config } in if is_live config then assert_can_live_import_vgpu ~__context vgpu_record ; Create vgpu_record let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_VGPU vgpu -> state.table <- (x.cls, x.id, Ref.string_of vgpu) :: state.table ; state.table <- (x.cls, Ref.string_of vgpu, Ref.string_of vgpu) :: state.table | Create _ -> let dummy_vgpu = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vgpu) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_VGPU _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create vgpu_record -> let vgpu = log_reraise "failed to create VGPU" (fun value -> let vgpu = Client.VGPU.create ~rpc ~session_id ~vM:value.API.vGPU_VM ~gPU_group:value.API.vGPU_GPU_group ~device:value.API.vGPU_device ~other_config:value.API.vGPU_other_config ~_type:value.API.vGPU_type in if config.full_restore then Db.VGPU.set_uuid ~__context ~self:vgpu ~value:value.API.vGPU_uuid ; vgpu ) vgpu_record in state.cleanup <- (fun __context rpc session_id -> Client.VGPU.destroy ~rpc ~session_id ~self:vgpu ) :: state.cleanup ; (* Now that we can import/export suspended VMs we need to preserve the currently_attached flag *) if Db.VM.get_power_state ~__context ~self:vgpu_record.API.vGPU_VM <> `Halted then Db.VGPU.set_currently_attached ~__context ~self:vgpu ~value:vgpu_record.API.vGPU_currently_attached ; state.table <- (x.cls, x.id, Ref.string_of vgpu) :: state.table end module PVS_Proxy : HandlerTools = struct type precheck_t = | Drop (* can't find a PVS Site at destination to use *) | Create of API.pVS_proxy_t (* find a PVS site of a given [uuid] and [name] with [uuid] taking * precedence *) let find_pvs_site __context _config _rpc _session_id pvs_uuid = let sites = Db.PVS_site.get_all_records ~__context in let has_uuid (_, site) = site.API.pVS_site_PVS_uuid = pvs_uuid in let candidates = List.filter has_uuid sites in match candidates with (ref, _) :: _ -> Some ref | [] -> None * We obtain the name and uuid of the PVS site this * proxy was linked to . Then we use these trying to find * a matching site on this ( destination ) side . The result is recorded * in the [ precheck_t ] value . * proxy was linked to. Then we use these trying to find * a matching site on this (destination) side. The result is recorded * in the [precheck_t] value. *) let precheck __context config rpc session_id state obj = let proxy = API.pVS_proxy_t_of_rpc obj.snapshot in let site = proxy.API.pVS_proxy_site |> fun ref -> find_in_export (Ref.string_of ref) state.export |> API.pVS_site_t_of_rpc in let pvs_uuid = site.API.pVS_site_PVS_uuid in match find_pvs_site __context config rpc session_id pvs_uuid with | None -> Drop | Some site -> Create { proxy with API.pVS_proxy_site= site ; API.pVS_proxy_VIF= lookup proxy.API.pVS_proxy_VIF state.table } let handle_dry_run __context _config _rpc _session_id state obj = function | Drop -> debug "no matching PVS Site found for PVS Proxy %s" obj.id | Create _ -> let dummy = Ref.make () in state.table <- (obj.cls, obj.id, Ref.string_of dummy) :: state.table let handle __context _config rpc session_id state obj = function | Drop -> debug "no matching PVS Site found for PVS Proxy %s" obj.id | Create p -> let proxy = Client.PVS_proxy.create ~rpc ~session_id ~site:p.API.pVS_proxy_site ~vIF:p.API.pVS_proxy_VIF in debug "creating PVS Proxy %s btw PVS Site %s <-> %s VIF during import" (Ref.string_of proxy) (Ref.string_of p.API.pVS_proxy_site) (Ref.string_of p.API.pVS_proxy_VIF) ; state.cleanup <- (fun __context rpc session_id -> Client.PVS_proxy.destroy ~rpc ~session_id ~self:proxy ) :: state.cleanup ; state.table <- (obj.cls, obj.id, Ref.string_of proxy) :: state.table end module PVS_Site : HandlerTools = struct (* A PVS site is never re-created as part of the import of a VM that * refers to it. We just forget it. *) type precheck_t = unit let precheck __context _ _ _ _ _ = () let handle_dry_run __context _ _ _ _ _ () = () let handle __context _ _ _ _ _ () = () end (** Create a handler for each object type. *) module HostHandler = MakeHandler (Host) module SRHandler = MakeHandler (SR) module VDIHandler = MakeHandler (VDI) module GuestMetricsHandler = MakeHandler (GuestMetrics) module VMHandler = MakeHandler (VM) module NetworkHandler = MakeHandler (Net) module GPUGroupHandler = MakeHandler (GPUGroup) module VBDHandler = MakeHandler (VBD) module VIFHandler = MakeHandler (VIF) module VGPUTypeHandler = MakeHandler (VGPUType) module VGPUHandler = MakeHandler (VGPU) module PVS_SiteHandler = MakeHandler (PVS_Site) module PVS_ProxyHandler = MakeHandler (PVS_Proxy) (** Table mapping datamodel class names to handlers, in order we have to run them *) let handlers = [ (Datamodel_common._host, HostHandler.handle) ; (Datamodel_common._sr, SRHandler.handle) ; (Datamodel_common._vdi, VDIHandler.handle) ; (Datamodel_common._vm_guest_metrics, GuestMetricsHandler.handle) ; (Datamodel_common._vm, VMHandler.handle) ; (Datamodel_common._network, NetworkHandler.handle) ; (Datamodel_common._gpu_group, GPUGroupHandler.handle) ; (Datamodel_common._vbd, VBDHandler.handle) ; (Datamodel_common._vif, VIFHandler.handle) ; (Datamodel_common._vgpu_type, VGPUTypeHandler.handle) ; (Datamodel_common._vgpu, VGPUHandler.handle) ; (Datamodel_common._pvs_site, PVS_SiteHandler.handle) ; (Datamodel_common._pvs_proxy, PVS_ProxyHandler.handle) ] let update_snapshot_and_parent_links ~__context state = let aux (cls, _, ref) = let ref = Ref.of_string ref in ( if cls = Datamodel_common._vm && Db.VM.get_is_a_snapshot ~__context ~self:ref then let snapshot_of = Db.VM.get_snapshot_of ~__context ~self:ref in if snapshot_of <> Ref.null then ( debug "lookup for snapshot_of = '%s'" (Ref.string_of snapshot_of) ; log_reraise ("Failed to find the VM which is snapshot of " ^ Db.VM.get_name_label ~__context ~self:ref ) (fun table -> let snapshot_of = (lookup snapshot_of) table in Db.VM.set_snapshot_of ~__context ~self:ref ~value:snapshot_of ) state.table ) ) ; if cls = Datamodel_common._vm then ( let parent = Db.VM.get_parent ~__context ~self:ref in debug "lookup for parent = '%s'" (Ref.string_of parent) ; try let parent = lookup parent state.table in Db.VM.set_parent ~__context ~self:ref ~value:parent with _ -> debug "no parent found" ) in List.iter aux state.table (** Take a list of objects, lookup the handlers by class name and 'handle' them *) let handle_all __context config rpc session_id (xs : obj list) = let state = initial_state xs in try let one_type (cls, handler) = let instances = List.filter (fun x -> x.cls = cls) xs in debug "Importing %i %s(s)" (List.length instances) cls ; List.iter (fun x -> handler __context config rpc session_id state x) instances in List.iter one_type handlers ; let dry_run = match config.import_type with | Metadata_import {dry_run= true; _} -> true | _ -> false in if not dry_run then update_snapshot_and_parent_links ~__context state ; state with e -> Backtrace.is_important e ; error "Caught exception in import: %s" (ExnHelper.string_of_exn e) ; (* execute all the cleanup actions now *) if config.force then warn "Not cleaning up after import failure since --force provided: %s" (ExnHelper.string_of_exn e) else cleanup state.cleanup ; raise e (** Read the next file in the archive as xml *) let read_xml hdr fd = Unixext.really_read_string fd (Int64.to_int hdr.Tar.Header.file_size) let assert_filename_is hdr = let expected = Xapi_globs.ova_xml_filename in let actual = hdr.Tar.Header.file_name in if expected <> actual then ( let hex = Tar.Header.to_hex in error "import expects the next file in the stream to be [%s]; got [%s]" (hex expected) (hex actual) ; raise (IFailure (Unexpected_file (expected, actual))) ) * Takes an fd and a function , tries first to read the first tar block and checks for the existence of ' ova.xml ' . If that fails then pipe the lot through an appropriate decompressor and try again and checks for the existence of 'ova.xml'. If that fails then pipe the lot through an appropriate decompressor and try again *) let with_open_archive fd ?length f = Read the first header 's worth into a buffer let buffer = Cstruct.create Tar.Header.length in let retry_with_compression = ref true in try Tar_unix.really_read fd buffer ; we assume the first block is not all zeroes let hdr = Option.get (Tar.Header.unmarshal buffer) in assert_filename_is hdr ; (* successfully opened uncompressed stream *) retry_with_compression := false ; let xml = read_xml hdr fd in Tar_helpers.skip fd (Tar.Header.compute_zero_padding_length hdr) ; f xml fd with e -> if not !retry_with_compression then raise e ; let decompress = If the file starts with the zstd magic string decompress with zstd ; otherwise fall back to trying gzip . otherwise fall back to trying gzip. *) let zstd_magic = "\x28\xb5\x2f\xfd" in let zstd = Cstruct.equal (Cstruct.of_string zstd_magic) (Cstruct.sub buffer 0 (String.length zstd_magic)) in if zstd then ( debug "Failed to directly open the archive; trying zstd" ; Zstd.Default.decompress ) else ( debug "Failed to directly open the archive; trying gzip" ; Gzip.Default.decompress ) in let feeder pipe_in = finally (fun () -> decompress pipe_in (fun compressed_in -> (* Write the initial buffer *) Unix.set_close_on_exec compressed_in ; debug "Writing initial buffer" ; Tar_unix.really_write compressed_in buffer ; let limit = Option.map (fun x -> Int64.sub x (Int64.of_int Tar.Header.length)) length in let n = Unixext.copy_file ?limit fd compressed_in in debug "Written a total of %d + %Ld bytes" Tar.Header.length n ) ) (fun () -> ignore_exn (fun () -> Unix.close pipe_in)) in let consumer pipe_out feeder_t = finally (fun () -> let hdr = Tar_unix.get_next_header pipe_out in assert_filename_is hdr ; let xml = read_xml hdr pipe_out in Tar_helpers.skip pipe_out (Tar.Header.compute_zero_padding_length hdr) ; f xml pipe_out ) (fun () -> ignore_exn (fun () -> Unix.close pipe_out) ; Thread.join feeder_t ) in let pipe_out, pipe_in = Unix.pipe () in let feeder_t = Thread.create feeder pipe_in in consumer pipe_out feeder_t (** Remove "import" from the current operations of all created VMs, complete the task including the VM references *) let complete_import ~__context vmrefs = debug "length of vmrefs: %d" (List.length vmrefs) ; debug "content: %s" (String.concat "," (List.map Ref.string_of vmrefs)) ; try (* Remove the "import" current operation, recompute allowed operations *) let task_id = Ref.string_of (Context.get_task_id __context) in List.iter (fun vm -> Db.VM.remove_from_current_operations ~__context ~self:vm ~key:task_id ; Xapi_vm_lifecycle.update_allowed_operations ~__context ~self:vm ) vmrefs ; (* We only keep VMs which are not snapshot *) let vmrefs = List.filter (fun vmref -> not (Db.VM.get_is_a_snapshot ~__context ~self:vmref)) vmrefs in (* We only set the result on the task since it is officially completed later. *) TaskHelper.set_result ~__context (Some (API.rpc_of_ref_VM_set vmrefs)) with e -> Backtrace.is_important e ; error "Caught exception completing import: %s" (ExnHelper.string_of_exn e) ; raise e let find_query_flag query key = List.mem_assoc key query && List.assoc key query = "true" let read_map_params name params = let len = String.length name + 1 in (* include ':' *) let filter_params = List.filter (fun (p, _) -> Xstringext.String.startswith name p && String.length p > len ) params in List.map (fun (k, v) -> (String.sub k len (String.length k - len), v)) filter_params let with_error_handling f = match Backtrace.with_backtraces f with | `Ok result -> result | `Error (e, backtrace) -> ( Debug.log_backtrace e backtrace ; let reraise = Backtrace.reraise e in match e with | IFailure failure -> ( match failure with | Cannot_handle_chunked -> error "import code cannot handle chunked encoding" ; reraise (Api_errors.Server_error (Api_errors.import_error_cannot_handle_chunked, []) ) | Some_checksums_failed -> error "some checksums failed" ; reraise (Api_errors.Server_error (Api_errors.import_error_some_checksums_failed, []) ) | Failed_to_find_object id -> error "Failed to find object with ID: %s" id ; reraise (Api_errors.Server_error (Api_errors.import_error_failed_to_find_object, [id]) ) | Attached_disks_not_found -> error "Cannot import guest with currently attached disks which cannot \ be found" ; reraise (Api_errors.Server_error (Api_errors.import_error_attached_disks_not_found, []) ) | Unexpected_file (expected, actual) -> let hex = Tar.Header.to_hex in error "Invalid XVA file: import expects the next file in the stream to \ be \"%s\" [%s]; got \"%s\" [%s]" expected (hex expected) actual (hex actual) ; reraise (Api_errors.Server_error (Api_errors.import_error_unexpected_file, [expected; actual]) ) ) | Api_errors.Server_error _ as e -> Backtrace.is_important e ; raise e | End_of_file -> error "Prematurely reached end-of-file during import" ; reraise (Api_errors.Server_error (Api_errors.import_error_premature_eof, [])) | e -> let msg_exn = ExnHelper.string_of_exn e in error "Import caught exception: %s" msg_exn ; reraise Api_errors.(Server_error (import_error_generic, [msg_exn])) ) (** Import metadata only *) let metadata_handler (req : Request.t) s _ = debug "metadata_handler called" ; req.Request.close <- true ; Xapi_http.with_context "VM.metadata_import" req s (fun __context -> Helpers.call_api_functions ~__context (fun rpc session_id -> let full_restore = find_query_flag req.Request.query "restore" in let force = find_query_flag req.Request.query "force" in let dry_run = find_query_flag req.Request.query "dry_run" in let live = find_query_flag req.Request.query "live" in let vdi_map = read_map_params "vdi" req.Request.query in info "VM.import_metadata: force = %b; full_restore = %b dry_run = %b; \ live = %b; vdi_map = [ %s ]" force full_restore dry_run live (String.concat "; " (List.map (fun (a, b) -> a ^ "=" ^ b) vdi_map)) ; let metadata_options = {dry_run; live; vdi_map} in let config = {import_type= Metadata_import metadata_options; full_restore; force} in let headers = Http.http_200_ok ~keep_alive:false () @ [ Http.Hdr.task_id ^ ":" ^ Ref.string_of (Context.get_task_id __context) ; content_type ] in Http_svr.headers s headers ; with_open_archive s ?length:req.Request.content_length (fun metadata s -> debug "Got XML" ; Skip trailing two zero blocks Tar_helpers.skip s (Tar.Header.length * 2) ; let header = metadata |> Xmlrpc.of_string |> header_of_rpc in assert_compatible ~__context header.version ; if full_restore then assert_can_restore_backup ~__context rpc session_id header ; with_error_handling (fun () -> let state = handle_all __context config rpc session_id header.objects in let table = state.table in let on_cleanup_stack = state.cleanup in try List.iter (fun (cls, id, r) -> debug "Imported object type %s: external ref: %s internal \ ref: %s" cls id r ) table ; let vmrefs = List.map (fun (_, _, r) -> Ref.of_string r) state.created_vms in let vmrefs = Listext.List.setify vmrefs in complete_import ~__context vmrefs ; info "import_metadata successful" with e -> Backtrace.is_important e ; error "Caught exception during import: %s" (ExnHelper.string_of_exn e) ; if force then warn "Not cleaning up after import failure since --force \ provided: %s" (ExnHelper.string_of_exn e) else ( debug "Cleaning up after import failure: %s" (ExnHelper.string_of_exn e) ; cleanup on_cleanup_stack ) ; raise e ) ) ) ) let stream_import __context rpc session_id s content_length refresh_session config = with_open_archive s ?length:content_length (fun metadata s -> debug "Got XML" ; let vmrefs = let header = metadata |> Xmlrpc.of_string |> header_of_rpc in assert_compatible ~__context header.version ; if config.full_restore then assert_can_restore_backup ~__context rpc session_id header ; (* objects created here: *) let state = handle_all __context config rpc session_id header.objects in let table, on_cleanup_stack = (state.table, state.cleanup) in (* signal to GUI that object have been created and they can now go off and remapp networks *) TaskHelper.add_to_other_config ~__context "object_creation" "complete" ; try List.iter (fun (cls, id, r) -> debug "Imported object type %s: external ref: %s internal ref: %s" cls id r ) table ; (* now stream the disks. We expect not to stream CDROMs *) let all_vdis = non_cdrom_vdis header in (* some CDROMs might be in as disks, don't stream them either *) let all_vdis = List.filter (fun x -> exists (Ref.of_string x.id) table) all_vdis in let vdis = List.map (fun x -> let vdir = API.vDI_t_of_rpc (find_in_export x.id state.export) in ( x.id , lookup (Ref.of_string x.id) table , vdir.API.vDI_virtual_size ) ) all_vdis in List.iter (fun (extid, intid, size) -> debug "Expecting to import VDI %s into %s (size=%Ld)" extid (Ref.string_of intid) size ) vdis ; let checksum_table = Stream_vdi.recv_all refresh_session s __context rpc session_id header.version config.force vdis in : Stream_vdi.recv_all only checks for task cancellation every ten seconds , so we need to check again now . After this point , we disable cancellation for this task . every ten seconds, so we need to check again now. After this point, we disable cancellation for this task. *) TaskHelper.exn_if_cancelling ~__context ; TaskHelper.set_not_cancellable ~__context ; Pre - miami GA exports have a checksum table at the end of the export . Check the calculated checksums against the table here . Nb . Rio GA - Miami B2 exports get their checksums checked twice ! ( if header.version.export_vsn < 2 then let xml = Tar_unix.Archive.with_next_file s (fun s hdr -> read_xml hdr s) in let expected_checksums = xml |> Xmlrpc.of_string |> checksum_table_of_rpc in if not (compare_checksums checksum_table expected_checksums) then ( error "Some data checksums were incorrect: VM may be corrupt" ; if not config.force then raise (IFailure Some_checksums_failed) else error "Ignoring incorrect checksums since 'force' flag was \ supplied" ) ) ; return vmrefs Listext.List.setify (List.map (fun (_, _, r) -> Ref.of_string r) state.created_vms) with e -> Backtrace.is_important e ; error "Caught exception during import: %s" (ExnHelper.string_of_exn e) ; if config.force then warn "Not cleaning up after import failure since --force provided: %s" (ExnHelper.string_of_exn e) else ( debug "Cleaning up after import failure: %s" (ExnHelper.string_of_exn e) ; cleanup on_cleanup_stack ) ; raise e in complete_import ~__context vmrefs ; debug "import successful" ; vmrefs ) let handler (req : Request.t) s _ = req.Request.close <- true ; Xapi_http.assert_credentials_ok "VM.import" ~http_action:"put_import" req s ; debug "import handler" ; let full_restore = find_query_flag req.Request.query "restore" in let force = find_query_flag req.Request.query "force" in let all = req.Request.cookie @ req.Request.query in let subtask_of = if List.mem_assoc "subtask_of" all then Some (Ref.of_string (List.assoc "subtask_of" all)) else None in (* Perform the SR reachability check using a fresh context/task because we don't want to complete the task in the forwarding case *) Server_helpers.exec_with_new_task ?subtask_of "VM.import" (fun __context -> Helpers.call_api_functions ~__context (fun rpc session_id -> let sr = match Importexport.sr_of_req ~__context req with | Some x -> x | None -> log_reraise "request was missing both sr_id and sr_uuid: one must be \ provided" (fun () -> Helpers.call_api_functions ~__context get_default_sr ) () in info "VM.import: SR = '%s%s'; force = %b; full_restore = %b" (try Db.SR.get_uuid ~__context ~self:sr with _ -> "invalid") ( try Printf.sprintf " (%s)" (Db.SR.get_name_label ~__context ~self:sr) with _ -> "" ) force full_restore ; if not (check_sr_availability ~__context sr) then ( debug "sr not available - redirecting" ; let host = find_host_for_sr ~__context sr in let address = Http.Url.maybe_wrap_IPv6_literal (Db.Host.get_address ~__context ~self:host) in let url = Printf.sprintf "" address req.Request.uri (String.concat "&" (List.map (fun (a, b) -> a ^ "=" ^ b) req.Request.query) ) in let headers = Http.http_302_redirect url in debug "new location: %s" url ; Http_svr.headers s headers ) else Xapi_http.with_context "VM.import" req s (fun __context -> (* This is the signal to say we've taken responsibility from the CLI server for completing the task *) (* The GUI can deal with this itself, but the CLI is complicated by the thin cli/cli server split *) TaskHelper.set_progress ~__context 0.0 ; Block VM.import operation during RPU debug "Check RPU status before VM.import" ; if Helpers.rolling_upgrade_in_progress ~__context then ( warn "VM.import is not supported during RPU" ; Http_svr.headers s (Http.http_400_badrequest ()) ; raise (Api_errors.Server_error (Api_errors.not_supported_during_upgrade, []) ) ) ; if force then warn "Force option supplied: will ignore checksum failures" ; (* Let's check that we're not trying to import into an iso library! *) if Db.SR.get_content_type ~__context ~self:sr = "iso" then ( Http_svr.headers s (Http.http_400_badrequest ()) ; raise (Api_errors.Server_error (Api_errors.sr_operation_not_supported, []) ) ) ; with_error_handling (fun () -> let refresh_external = if List.mem_assoc "session_id" all then let external_session_id = List.assoc "session_id" all in Xapi_session.consider_touching_session rpc (Ref.of_string external_session_id) else fun () -> () in let refresh_internal = Xapi_session.consider_touching_session rpc session_id in let refresh_session () = refresh_external () ; refresh_internal () in debug "Importing %s" (if full_restore then "(as 'restore')" else "(as new VM)") ; let config = {import_type= Full_import sr; full_restore; force} in match (req.Request.transfer_encoding, req.Request.content_length) with | Some x, _ -> error "Encoding not yet implemented in the import code: %s" x ; Http_svr.headers s (http_403_forbidden ()) ; raise (IFailure Cannot_handle_chunked) | None, content_length -> let headers = Http.http_200_ok ~keep_alive:false () @ [ Http.Hdr.task_id ^ ":" ^ Ref.string_of (Context.get_task_id __context) ; content_type ] in Http_svr.headers s headers ; debug "Reading XML" ; ignore (stream_import __context rpc session_id s content_length refresh_session config ) ) ) ) ; debug "import successful" )
null
https://raw.githubusercontent.com/xapi-project/xen-api/e8c3575316226ac6324e94aa4f9e040a662e279a/ocaml/xapi/import.ml
ocaml
* HTTP handler for importing a VM from a stream. * @group Import and Export expected actual If true, don't create any database objects. Import the metadata of a VM whose disks already exist. * Allows the import to be customised Determines how to handle the import - see above. true if the user has provided '--force' * List of (datamodel classname * Reference in export * Reference in database) * Track the table of external reference -> internal reference and a list of cleanup functions to delete all the objects we've created, in the event of error. Return the list of non-CDROM VDIs ie those which will be streamed-in Ensure that the domain_type is set correctly This should be a snapshot in the archive This should be a snapshot in a live system The signature for a set of functions which we must provide to be able to import an object type. A type which represents how we should deal with the import of an object. Compare the state of the database with the metadata to be imported. Returns a result which signals what we should do to import the metadata. Handle the result of the precheck function, but don't create any database objects. Add objects to the state table if necessary, to keep track of what would have been imported. Handle the result of the check function, creating database objects if necessary. For certain combinations of result and object type, this can be aliased to handle_dry_run. Make a handler for a set of handler functions. but might require memory adjustments If the VM is a default template, then pick up the one with the same name. will depend on the state of the VM and whether the import is forced. If full_restore is true then we want to keep the VM uuid - this may involve replacing an existing VM. The existing VM cannot be replaced if it is running. If import is forced then skip the VM, else throw an error. The existing VM should not be replaced if the version to be imported is no newer, unless the import is forced. Remove the grant guest API access key unconditionally (it's only for our RHEL4 templates atm) Preserve genid for cross-pool migrates, because to the guest the * disk looks like it hasn't changed. * Preserve genid for templates, since they're not going to be started. * Generate a fresh genid for normal VM imports. to keep track of how many times this VM has been restored. If not a full restore, then we don't need to keep track. Need to get rid of the import task or we cannot destroy the VM Restore the last_booted_record too (critical if suspended but might as well do it all the time) Set the power_state and suspend_VDI if the VM is suspended. * If anything goes wrong, still continue if forced. We might want to import a control domain Update the snapshot metadata. At this points, the snapshot_of field is not relevant as it use the export ref. However, as the corresponding VM object may have not been created yet, this fiels contains some useful information to update it later. VM might have suspend_SR that does not exist on this pool Although someone could sneak in here and attempt to power on the VM, it doesn't really matter since no VBDs have been created yet. We don't bother doing this if --force is set otherwise on error the VM remains locked. * Create the guest metrics Will be handled specially in handle_vdi this one will be ejected We expect the disk to be missing during a live migration dry run. Lookup by bridge name as fallback Only add task flag to networks which get created in this import In normal mode we attempt to create any missing GPU groups Only add task flag to GPU groups which get created in this import In the case of dry_run live migration, don't check for missing disks as CDs will be ejected before the real migration. eject omit: cannot have empty disks Now that we can import/export suspended VMs we need to preserve the currently_attached flag Determine the VM to which we're going to attach this VIF. Determine the network to which we're going to attach this VIF. If we find the cross-pool migration key, attach the VIF to that network... ...otherwise fall back to looking up the network from the state table. Make sure we remove the cross-pool migration VIF mapping key from the other_config * before creating a VIF - otherwise we'll risk sending this key on to another pool * during a future cross-pool migration and it won't make sense. Now that we can import/export suspended VMs we need to preserve the currently_attached flag If that fails, look up using the vendor name and model name. If we find the cross-pool migration key, attach the vgpu to the provided gpu_group... ...otherwise fall back to looking up the vgpu from the state table. Now that we can import/export suspended VMs we need to preserve the currently_attached flag can't find a PVS Site at destination to use find a PVS site of a given [uuid] and [name] with [uuid] taking * precedence A PVS site is never re-created as part of the import of a VM that * refers to it. We just forget it. * Create a handler for each object type. * Table mapping datamodel class names to handlers, in order we have to run them * Take a list of objects, lookup the handlers by class name and 'handle' them execute all the cleanup actions now * Read the next file in the archive as xml successfully opened uncompressed stream Write the initial buffer * Remove "import" from the current operations of all created VMs, complete the task including the VM references Remove the "import" current operation, recompute allowed operations We only keep VMs which are not snapshot We only set the result on the task since it is officially completed later. include ':' * Import metadata only objects created here: signal to GUI that object have been created and they can now go off and remapp networks now stream the disks. We expect not to stream CDROMs some CDROMs might be in as disks, don't stream them either Perform the SR reachability check using a fresh context/task because we don't want to complete the task in the forwarding case This is the signal to say we've taken responsibility from the CLI server for completing the task The GUI can deal with this itself, but the CLI is complicated by the thin cli/cli server split Let's check that we're not trying to import into an iso library!
* Copyright ( C ) 2006 - 2009 Citrix Systems Inc. * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation ; version 2.1 only . with the special * exception on linking described in file LICENSE . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU Lesser General Public License for more details . * Copyright (C) 2006-2009 Citrix Systems Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation; version 2.1 only. with the special * exception on linking described in file LICENSE. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. *) module D = Debug.Make (struct let name = "import" end) open D module Listext = Xapi_stdext_std.Listext module Xstringext = Xapi_stdext_std.Xstringext module Unixext = Xapi_stdext_unix.Unixext open Http open Importexport open Xapi_stdext_pervasives.Pervasiveext open Client type import_failure = | Some_checksums_failed | Cannot_handle_chunked | Failed_to_find_object of string | Attached_disks_not_found exception IFailure of import_failure open Xapi_vm_memory_constraints open Vm_memory_constraints type metadata_options = { dry_run: bool If true , treat the import as if it is preparation for a live migration . * This has the following consequences : * - We must perform extra checks on the VM object - do we have enough memory ? Are the CPU flags compatible ? Is there an HA plan for it ? * - If the migration is a dry run we do n't need to check for , since VDI.mirror will have created them during a real migration . * - If the migration is for real , we will expect the VM export code on the source host to have mapped the VDI locations onto their * mirrored counterparts which are present on this host . * This has the following consequences: * - We must perform extra checks on the VM object - do we have enough memory? Are the CPU flags compatible? Is there an HA plan for it? * - If the migration is a dry run we don't need to check for VDIs, since VDI.mirror will have created them during a real migration. * - If the migration is for real, we will expect the VM export code on the source host to have mapped the VDI locations onto their * mirrored counterparts which are present on this host. *) live: bool An optional src VDI - > destination VDI rewrite list vdi_map: (string * string) list } type import_type = | Metadata_import of metadata_options Import a VM and stream its disks into the specified SR . | Full_import of API.ref_SR type config = { import_type: import_type true if we want to restore as a perfect backup . Currently we preserve the interface MAC addresses but we still regenerate ( because we lack the internal APIs to keep them interface MAC addresses but we still regenerate UUIDs (because we lack the internal APIs to keep them *) full_restore: bool force: bool } let is_live config = match config.import_type with Metadata_import {live; _} -> live | _ -> false type table = (string * string * string) list type state = { mutable table: table ; mutable created_vms: table ; mutable cleanup: (Context.t -> (Rpc.call -> Rpc.response) -> API.ref_session -> unit) list ; export: obj list } let initial_state export = {table= []; created_vms= []; cleanup= []; export} let log_reraise msg f x = try f x with e -> Backtrace.is_important e ; error "Import failed: %s" msg ; raise e let lookup x (table : table) = let id = Ref.string_of x in try let _, _, r = List.find (fun (_, i, _) -> i = id) table in Ref.of_string r with Not_found as e -> Backtrace.reraise e (IFailure (Failed_to_find_object id)) let exists x (table : table) = let id = Ref.string_of x in List.filter (fun (_, i, _) -> i = id) table <> [] Using a reference string from the original export , find the XMLRPC snapshot of the appropriate object . of the appropriate object. *) let find_in_export x export = try let obj = List.find (fun obj -> obj.id = x) export in obj.snapshot with Not_found as e -> Backtrace.reraise e (IFailure (Failed_to_find_object x)) let choose_one = function [x] -> Some x | x :: _ -> Some x | [] -> None let non_cdrom_vdis (x : header) = let all_vbds = List.filter (fun x -> x.cls = Datamodel_common._vbd) x.objects in let all_vbds = List.map (fun x -> API.vBD_t_of_rpc x.snapshot) all_vbds in let all_disk_vbds = List.filter (fun x -> x.API.vBD_type <> `CD) all_vbds in let all_disk_vdis = List.map (fun x -> Ref.string_of x.API.vBD_VDI) all_disk_vbds in Remove all those whose SR has content - type = " iso " let all_disk_vdis = List.filter (fun vdi -> let vdir = API.vDI_t_of_rpc (find_in_export vdi x.objects) in let sr = API.sR_t_of_rpc (find_in_export (Ref.string_of vdir.API.vDI_SR) x.objects) in sr.API.sR_content_type <> "iso" ) all_disk_vdis in let all_vdis = List.filter (fun x -> x.cls = Datamodel_common._vdi) x.objects in List.filter (fun x -> false || List.mem x.id all_disk_vdis || (API.vDI_t_of_rpc x.snapshot).API.vDI_type = `suspend ) all_vdis let get_vm_record snapshot = let vm_record = API.vM_t_of_rpc snapshot in if vm_record.API.vM_domain_type = `unspecified then { vm_record with API.vM_domain_type= Xapi_vm_helpers.derive_domain_type ~hVM_boot_policy:vm_record.API.vM_HVM_boot_policy } else vm_record Check to see if another VM exists with the same MAC seed . Check VM uuids do n't already exist . Check that if a VDI exists then it is a CDROM . let assert_can_restore_backup ~__context rpc session_id (x : header) = let get_mac_seed vm = if List.mem_assoc Xapi_globs.mac_seed vm.API.vM_other_config then Some (List.assoc Xapi_globs.mac_seed vm.API.vM_other_config, vm) else None in let get_vm_uuid_of_snap s = let snapshot_of = Ref.string_of s.API.vM_snapshot_of in try if Xstringext.String.startswith "Ref:" snapshot_of then let v = List.find (fun v -> v.cls = Datamodel_common._vm && v.id = snapshot_of) x.objects in let v = get_vm_record v.snapshot in Some v.API.vM_uuid else if Xstringext.String.startswith Ref.ref_prefix snapshot_of then if Db.is_valid_ref __context s.API.vM_snapshot_of then Some (Db.VM.get_uuid ~__context ~self:s.API.vM_snapshot_of) else Some (List.assoc Db_names.uuid (Helpers.vm_string_to_assoc s.API.vM_snapshot_metadata) ) else None with _ -> None in This function should be called when a VM / snapshot to import has the same mac seed as an existing VM . They are considered compatible only in the following cases : - Both are VMs , and having the same uuid - Both are snapshots , and the VMs they were derived from are the same one - One is snapshot , one is VM , and the snapshot was derived from the VM mac seed as an existing VM. They are considered compatible only in the following cases: - Both are VMs, and having the same uuid - Both are snapshots, and the VMs they were derived from are the same one - One is snapshot, one is VM, and the snapshot was derived from the VM *) let is_compatible v1 v2 = match (v1.API.vM_is_a_snapshot, v2.API.vM_is_a_snapshot) with | false, false -> v1.API.vM_uuid = v2.API.vM_uuid | true, true -> let v1' = get_vm_uuid_of_snap v1 in let v2' = get_vm_uuid_of_snap v2 in v1' <> None && v2' <> None && v1' = v2' | true, false -> let v1' = get_vm_uuid_of_snap v1 in v1' = Some v2.API.vM_uuid | false, true -> let v2' = get_vm_uuid_of_snap v2 in v2' = Some v1.API.vM_uuid in let import_vms = List.filter_map (fun x -> if x.cls <> Datamodel_common._vm then None else let x = get_vm_record x.snapshot in get_mac_seed x ) x.objects in let existing_vms = List.filter_map (fun (_, v) -> get_mac_seed v) (Client.VM.get_all_records ~rpc ~session_id) in List.iter (fun (mac, vm) -> List.iter (fun (mac', vm') -> if mac = mac' && not (is_compatible vm vm') then raise Api_errors.(Server_error (duplicate_mac_seed, [mac'])) ) existing_vms ) import_vms let assert_can_live_import __context vm_record = let assert_memory_available () = let host = Helpers.get_localhost ~__context in let host_mem_available = Memory_check.host_compute_free_memory_with_maximum_compression ~__context ~host None in let main, shadow = Memory_check.vm_compute_start_memory ~__context vm_record in let mem_reqd_for_vm = Int64.add main shadow in if host_mem_available < mem_reqd_for_vm then raise (Api_errors.Server_error ( Api_errors.host_not_enough_free_memory , [ Int64.to_string mem_reqd_for_vm ; Int64.to_string host_mem_available ] ) ) in match vm_record.API.vM_power_state with | `Running | `Paused -> assert_memory_available () | _other -> () Assert that the local host , which is the host we are live - migrating the VM to , * has free capacity on a PGPU from the given VGPU 's GPU group . * has free capacity on a PGPU from the given VGPU's GPU group. *) let assert_can_live_import_vgpu ~__context vgpu_record = let host = Helpers.get_localhost ~__context in let local_pgpus = Db.PGPU.get_refs_where ~__context ~expr: Db_filter_types.( And ( Eq ( Field "GPU_group" , Literal (Ref.string_of vgpu_record.API.vGPU_GPU_group) ) , Eq (Field "host", Literal (Ref.string_of host)) ) ) in let capacity_exists = List.exists (fun pgpu -> try Xapi_pgpu_helpers.assert_capacity_exists_for_VGPU_type ~__context ~self:pgpu ~vgpu_type:vgpu_record.API.vGPU_type ; true with _ -> false ) local_pgpus in if not capacity_exists then raise Api_errors.( Server_error ( vm_requires_gpu , [ Ref.string_of vgpu_record.API.vGPU_VM ; Ref.string_of vgpu_record.API.vGPU_GPU_group ] ) ) module type HandlerTools = sig type precheck_t val precheck : Context.t -> config -> (Rpc.call -> Rpc.response) -> API.ref_session -> state -> obj -> precheck_t val handle_dry_run : Context.t -> config -> (Rpc.call -> Rpc.response) -> API.ref_session -> state -> obj -> precheck_t -> unit val handle : Context.t -> config -> (Rpc.call -> Rpc.response) -> API.ref_session -> state -> obj -> precheck_t -> unit end module MakeHandler = functor (M : HandlerTools) -> struct let handle __context config rpc session_id state obj = let dry_run = match config.import_type with | Metadata_import {dry_run= true; _} -> true | _ -> false in let precheck_result = M.precheck __context config rpc session_id state obj in if dry_run then M.handle_dry_run __context config rpc session_id state obj precheck_result else M.handle __context config rpc session_id state obj precheck_result end module Host : HandlerTools = struct type precheck_t = Found_host of API.ref_host | Found_no_host let precheck __context _config _rpc _session_id _state x = let host_record = API.host_t_of_rpc x.snapshot in try Found_host (Db.Host.get_by_uuid ~__context ~uuid:host_record.API.host_uuid) with _ -> Found_no_host let handle_dry_run __context _config _rpc _session_id state x precheck_result = let host = match precheck_result with | Found_host host' -> host' | Found_no_host -> Ref.null in state.table <- (x.cls, x.id, Ref.string_of host) :: state.table let handle = handle_dry_run end module VM : HandlerTools = struct type precheck_t = | Default_template of API.ref_VM | Replace of API.ref_VM * API.vM_t | Fail of exn | Skip | Clean_import of API.vM_t let precheck __context config _rpc _session_id _state x = let vm_record = get_vm_record x.snapshot in we ca n't import a VM if it is not in a resting state and requires DMC DMC *) let is_dmc_compatible = match vm_record.API.vM_power_state with | (`Running | `Suspended | `Paused) when not @@ Xapi_vm_helpers.is_dmc_compatible_vmr ~__context ~vmr:vm_record -> false | _ -> true in let is_default_template = vm_record.API.vM_is_default_template || vm_record.API.vM_is_a_template && List.mem_assoc Xapi_globs.default_template_key vm_record.API.vM_other_config && List.assoc Xapi_globs.default_template_key vm_record.API.vM_other_config = "true" in if not @@ is_dmc_compatible then Fail Api_errors.( Server_error (dynamic_memory_control_unavailable, [vm_record.API.vM_uuid]) ) else if is_default_template then let template = try List.hd (Db.VM.get_by_name_label ~__context ~label:vm_record.API.vM_name_label ) with _ -> Ref.null in Default_template template else let import_action = Check for an existing VM with the same UUID - if one exists , what we do next let get_vm_by_uuid () = Db.VM.get_by_uuid ~__context ~uuid:vm_record.API.vM_uuid in let vm_uuid_exists () = try ignore (get_vm_by_uuid ()) ; true with _ -> false in if config.full_restore && vm_uuid_exists () then let vm = get_vm_by_uuid () in let power_state = Db.VM.get_power_state ~__context ~self:vm in if power_state <> `Halted then if config.force then ( debug "Forced import skipping VM %s as VM to replace was not halted." vm_record.API.vM_uuid ; Skip ) else Fail (Api_errors.Server_error ( Api_errors.vm_bad_power_state , [ Ref.string_of vm ; Record_util.power_state_to_string `Halted ; Record_util.power_state_to_string power_state ] ) ) else let existing_version = Db.VM.get_version ~__context ~self:vm in let version_to_import = vm_record.API.vM_version in if existing_version >= version_to_import && config.force = false then Fail (Api_errors.Server_error ( Api_errors.vm_to_import_is_not_newer_version , [ Ref.string_of vm ; Int64.to_string existing_version ; Int64.to_string version_to_import ] ) ) else Replace (vm, vm_record) else Clean_import vm_record in match import_action with | Replace (_, vm_record) | Clean_import vm_record -> if is_live config then assert_can_live_import __context vm_record ; import_action | _ -> import_action let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Skip -> () | Fail e -> raise e | Default_template template -> state.table <- (x.cls, x.id, Ref.string_of template) :: state.table ; state.created_vms <- (x.cls, x.id, Ref.string_of template) :: state.created_vms | Clean_import _ | Replace _ -> let dummy_vm = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vm) :: state.table let handle __context config rpc session_id state x precheck_result = This function assumes we 've already checked for and dealt with any existing VM with the same UUID . let do_import vm_record = let task_id = Ref.string_of (Context.get_task_id __context) in let other_config = List.filter (fun (key, _) -> key <> Xapi_globs.grant_api_access) vm_record.API.vM_other_config in If not performing a full restore then generate a fresh MAC seed let other_config = if config.full_restore then other_config else (Xapi_globs.mac_seed, Uuidx.(to_string (make ()))) :: List.filter (fun (x, _) -> x <> Xapi_globs.mac_seed) other_config in let vm_record = {vm_record with API.vM_other_config= other_config} in let vm_record = if is_live config || vm_record.API.vM_is_a_template then vm_record else { vm_record with API.vM_generation_id= Xapi_vm_helpers.fresh_genid ~current_genid:vm_record.API.vM_generation_id () } in let vm_record = match vm_record.API.vM_power_state with | `Halted -> make sure we do n't use DMC let safe_constraints = Vm_memory_constraints.reset_to_safe_defaults ~constraints:(Vm_memory_constraints.extract ~vm_record) in debug "Disabling DMC for VM %s; dynamic_{min,max},target <- %Ld" vm_record.API.vM_uuid safe_constraints.dynamic_max ; { vm_record with API.vM_memory_static_min= safe_constraints.static_min ; vM_memory_dynamic_min= safe_constraints.dynamic_min ; vM_memory_target= safe_constraints.target ; vM_memory_dynamic_max= safe_constraints.dynamic_max ; vM_memory_static_max= safe_constraints.static_max } | _otherwise -> the precheck should make sure we do n't have a VM that requires DMC . But just to be safe , we do n't update memory settings on any VM that is not in rest requires DMC. But just to be safe, we don't update memory settings on any VM that is not in rest *) vm_record in let vm_record = if vm_has_field ~x ~name:"has_vendor_device" then vm_record else {vm_record with API.vM_has_vendor_device= false} in let vm_record = { vm_record with API.vM_memory_overhead= Memory_check.vm_compute_memory_overhead ~vm_record } in let vm_record = {vm_record with API.vM_protection_policy= Ref.null} in Full restore preserves UUIDs , so if we are replacing an existing VM the version number should be incremented let vm_record = if config.full_restore then {vm_record with API.vM_version= Int64.add vm_record.API.vM_version 1L} else {vm_record with API.vM_version= 0L} in Clear the appliance field - in the case of DR we will reconstruct the appliance separately . let vm_record = {vm_record with API.vM_appliance= Ref.null} in Correct ha - restart - priority for pre boston imports let vm_record = match vm_record.API.vM_ha_restart_priority with | ("0" | "1" | "2" | "3") as order -> { vm_record with API.vM_ha_restart_priority= "restart" ; API.vM_order= Int64.of_string order } | _ -> vm_record in Initialize platform["device - model " ] if it is not set let vm_record = { vm_record with API.vM_platform= Xapi_vm_helpers.ensure_device_model_profile_present ~__context ~domain_type:vm_record.API.vM_domain_type ~is_a_template:vm_record.API.vM_is_a_template vm_record.API.vM_platform } in let vm = log_reraise ("failed to create VM with name-label " ^ vm_record.API.vM_name_label) (fun value -> let vm = Xapi_vm_helpers .create_from_record_without_checking_licence_feature_for_vendor_device ~__context rpc session_id value in if config.full_restore then Db.VM.set_uuid ~__context ~self:vm ~value:value.API.vM_uuid ; vm ) vm_record in state.cleanup <- (fun __context rpc session_id -> Helpers.log_exn_continue (Printf.sprintf "Attempting to remove import from current_operations of VM: %s" (Ref.string_of vm) ) (fun () -> Db.VM.remove_from_current_operations ~__context ~self:vm ~key:task_id ) () ; Db.VM.set_power_state ~__context ~self:vm ~value:`Halted ; Client.VM.destroy ~rpc ~session_id ~self:vm ) :: state.cleanup ; Db.VM.set_last_booted_record ~__context ~self:vm ~value:vm_record.API.vM_last_booted_record ; Db.VM.set_last_boot_CPU_flags ~__context ~self:vm ~value:vm_record.API.vM_last_boot_CPU_flags ; TaskHelper.operate_on_db_task ~__context (fun t -> ( try Db.VM.remove_from_other_config ~__context ~self:vm ~key:Xapi_globs.import_task with _ -> () ) ; Db.VM.add_to_other_config ~__context ~self:vm ~key:Xapi_globs.import_task ~value:(Ref.string_of t) ) ; if vm_record.API.vM_power_state = `Suspended then ( try let vdi = (lookup vm_record.API.vM_suspend_VDI) state.table in Db.VM.set_power_state ~__context ~self:vm ~value:`Suspended ; Db.VM.set_suspend_VDI ~__context ~self:vm ~value:vdi ; let vm_metrics = Db.VM.get_metrics ~__context ~self:vm in Db.VM_metrics.set_current_domain_type ~__context ~self:vm_metrics ~value:vm_record.API.vM_domain_type with e -> if not config.force then ( Backtrace.is_important e ; let msg = "Failed to find VM's suspend_VDI: " ^ Ref.string_of vm_record.API.vM_suspend_VDI in error "Import failed: %s" msg ; raise e ) ) else Db.VM.set_power_state ~__context ~self:vm ~value:`Halted ; Db.VM.set_is_control_domain ~__context ~self:vm ~value:vm_record.API.vM_is_control_domain ; Db.VM.set_resident_on ~__context ~self:vm ~value: ( try lookup vm_record.API.vM_resident_on state.table with _ -> Ref.null ) ; Db.VM.set_affinity ~__context ~self:vm ~value: (try lookup vm_record.API.vM_affinity state.table with _ -> Ref.null) ; Db.VM.set_is_a_snapshot ~__context ~self:vm ~value:vm_record.API.vM_is_a_snapshot ; Db.VM.set_snapshot_info ~__context ~self:vm ~value:vm_record.API.vM_snapshot_info ; Db.VM.set_snapshot_of ~__context ~self:vm ~value:vm_record.API.vM_snapshot_of ; Db.VM.set_snapshot_time ~__context ~self:vm ~value:vm_record.API.vM_snapshot_time ; Db.VM.set_transportable_snapshot_id ~__context ~self:vm ~value:vm_record.API.vM_transportable_snapshot_id ; if None = Helpers.check_sr_exists ~__context ~self:vm_record.API.vM_suspend_SR then Db.VM.set_suspend_SR ~__context ~self:vm ~value:Ref.null ; Db.VM.set_parent ~__context ~self:vm ~value:vm_record.API.vM_parent ; ( try let gm = lookup vm_record.API.vM_guest_metrics state.table in Db.VM.set_guest_metrics ~__context ~self:vm ~value:gm with _ -> () ) ; Db.VM.set_bios_strings ~__context ~self:vm ~value:vm_record.API.vM_bios_strings ; debug "Created VM: %s (was %s)" (Ref.string_of vm) x.id ; if not config.force then Db.VM.add_to_current_operations ~__context ~self:vm ~key:task_id ~value:`import ; Xapi_vm_lifecycle.update_allowed_operations ~__context ~self:vm ; state.table <- (x.cls, x.id, Ref.string_of vm) :: state.table ; state.created_vms <- (x.cls, x.id, Ref.string_of vm) :: state.created_vms in match precheck_result with | Skip | Fail _ | Default_template _ -> handle_dry_run __context config rpc session_id state x precheck_result | Clean_import vm_record -> do_import vm_record | Replace (vm, vm_record) -> Destroy the existing VM , along with its VIFs and VBDs . debug "Replacing VM %s" vm_record.API.vM_uuid ; Helpers.call_api_functions ~__context (fun rpc session_id -> let vifs = Db.VM.get_VIFs ~__context ~self:vm in List.iter (fun vif -> Client.VIF.destroy ~rpc ~session_id ~self:vif) vifs ; let vbds = Db.VM.get_VBDs ~__context ~self:vm in List.iter (fun vbd -> Client.VBD.destroy ~rpc ~session_id ~self:vbd) vbds ; Client.VM.destroy ~rpc ~session_id ~self:vm ) ; do_import vm_record end module GuestMetrics : HandlerTools = struct type precheck_t = OK let precheck __context _config _rpc _session_id _state _x = OK let handle_dry_run __context _config _rpc _session_id state x _precheck_result = let dummy_gm = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_gm) :: state.table let handle __context _config _rpc _session_id state x _precheck_result = let gm_record = API.vM_guest_metrics_t_of_rpc x.snapshot in let gm = Ref.make () in Db.VM_guest_metrics.create ~__context ~ref:gm ~uuid:(Uuidx.to_string (Uuidx.make ())) ~os_version:gm_record.API.vM_guest_metrics_os_version ~pV_drivers_version:gm_record.API.vM_guest_metrics_PV_drivers_version ~pV_drivers_up_to_date: gm_record.API.vM_guest_metrics_PV_drivers_up_to_date ~memory:gm_record.API.vM_guest_metrics_memory ~disks:gm_record.API.vM_guest_metrics_disks ~networks:gm_record.API.vM_guest_metrics_networks ~pV_drivers_detected:gm_record.API.vM_guest_metrics_PV_drivers_detected ~other:gm_record.API.vM_guest_metrics_other ~last_updated:gm_record.API.vM_guest_metrics_last_updated ~other_config:gm_record.API.vM_guest_metrics_other_config ~live:gm_record.API.vM_guest_metrics_live ~can_use_hotplug_vbd:gm_record.API.vM_guest_metrics_can_use_hotplug_vbd ~can_use_hotplug_vif:gm_record.API.vM_guest_metrics_can_use_hotplug_vif ; state.table <- (x.cls, x.id, Ref.string_of gm) :: state.table end * If we 're restoring VM metadata only then lookup the SR by uuid . If we ca n't find the SR then we will still try to match later ( except CDROMs ) the SR then we will still try to match VDIs later (except CDROMs) *) module SR : HandlerTools = struct type precheck_t = | Found_SR of API.ref_SR | Found_no_SR | Will_use_SR of API.ref_SR | SR_not_needed let precheck __context config rpc session_id _state x = let sr_record = API.sR_t_of_rpc x.snapshot in match config.import_type with | Metadata_import _ -> ( try Look up the existing SR record let sr = Client.SR.get_by_uuid ~rpc ~session_id ~uuid:sr_record.API.sR_uuid in Found_SR sr with _ -> let msg = match sr_record.API.sR_content_type with | "iso" -> | _ -> "- will still try to find individual VDIs" in warn "Failed to find SR with UUID: %s content-type: %s %s" sr_record.API.sR_uuid sr_record.API.sR_content_type msg ; Found_no_SR ) | Full_import sr -> if sr_record.API.sR_content_type = "iso" then else Will_use_SR sr let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_SR sr | Will_use_SR sr -> state.table <- (x.cls, x.id, Ref.string_of sr) :: state.table | Found_no_SR | SR_not_needed -> () let handle = handle_dry_run end * If we 're restoring VM metadata only then lookup the VDI by uuid . If restoring metadata only : lookup the VDI by location , falling back to content_id if available . If importing everything : create a new VDI in the SR On any error : If the SR can not be found then we skip this VDI . If the SR can be found AND is an iso SR then we attempt to lookup the VDI by name_label If the SR can be found AND is not an iso SR then we attempt to create the VDI in it If restoring metadata only: lookup the VDI by location, falling back to content_id if available. If importing everything: create a new VDI in the SR On any error: If the SR cannot be found then we skip this VDI. If the SR can be found AND is an iso SR then we attempt to lookup the VDI by name_label If the SR can be found AND is not an iso SR then we attempt to create the VDI in it *) module VDI : HandlerTools = struct type precheck_t = | Found_iso of API.ref_VDI | Found_no_iso | Found_disk of API.ref_VDI | Found_no_disk of exn | Skip | Create of API.vDI_t let precheck __context config rpc session_id state x = let vdi_record = API.vDI_t_of_rpc x.snapshot in let original_sr = API.sR_t_of_rpc (find_in_export (Ref.string_of vdi_record.API.vDI_SR) state.export) in if original_sr.API.sR_content_type = "iso" then ( Best effort : locate a VDI in any shared ISO SR with a matching VDI.location let iso_srs = List.filter (fun self -> Client.SR.get_content_type ~rpc ~session_id ~self = "iso" && Client.SR.get_type ~rpc ~session_id ~self <> "udev" ) (Client.SR.get_all ~rpc ~session_id) in match List.filter (fun (_, vdir) -> vdir.API.vDI_location = vdi_record.API.vDI_location && List.mem vdir.API.vDI_SR iso_srs ) (Client.VDI.get_all_records ~rpc ~session_id) |> choose_one with | Some (vdi, _) -> Found_iso vdi | None -> warn "Found no ISO VDI with location = %s; attempting to eject" vdi_record.API.vDI_location ; Found_no_iso ) else match config.import_type with | Metadata_import {vdi_map; _} -> ( let mapto = if List.mem_assoc Constants.storage_migrate_vdi_map_key vdi_record.API.vDI_other_config then Some (Ref.of_string (List.assoc Constants.storage_migrate_vdi_map_key vdi_record.API.vDI_other_config ) ) else None in let vdi_records = Client.VDI.get_all_records ~rpc ~session_id in let find_by_sr_and_location sr location = vdi_records |> List.filter (fun (_, vdir) -> vdir.API.vDI_location = location && vdir.API.vDI_SR = sr ) |> choose_one |> Option.map fst in let find_by_uuid uuid = vdi_records |> List.filter (fun (_, vdir) -> vdir.API.vDI_uuid = uuid) |> choose_one |> Option.map fst in let _scsiid = "SCSIid" in let scsiid_of vdi_record = if List.mem_assoc _scsiid vdi_record.API.vDI_sm_config then Some (List.assoc _scsiid vdi_record.API.vDI_sm_config) else None in let find_by_scsiid x = vdi_records |> List.filter_map (fun (rf, vdir) -> if scsiid_of vdir = Some x then Some (rf, vdir) else None ) |> choose_one in let by_vdi_map = Look up the mapping by both uuid and SCSIid match if List.mem_assoc vdi_record.API.vDI_uuid vdi_map then Some (List.assoc vdi_record.API.vDI_uuid vdi_map) else match scsiid_of vdi_record with | None -> None | Some x -> if List.mem_assoc x vdi_map then Some (List.assoc x vdi_map) else None with | Some destination -> ( match find_by_uuid destination with | Some x -> Some x | None -> ( match find_by_scsiid destination with | Some (rf, rc) -> info "VDI %s (SCSIid %s) mapped to %s (SCSIid %s) by user" vdi_record.API.vDI_uuid (Option.value ~default:"None" (scsiid_of vdi_record)) rc.API.vDI_uuid (Option.value ~default:"None" (scsiid_of rc)) ; Some rf | None -> None ) ) | None -> ( match scsiid_of vdi_record with | None -> None | Some x -> ( match find_by_scsiid x with | Some (rf, rc) -> info "VDI %s (SCSIid %s) mapped to %s (SCSIid %s) by user" vdi_record.API.vDI_uuid (Option.value ~default:"None" (scsiid_of vdi_record)) rc.API.vDI_uuid (Option.value ~default:"None" (scsiid_of rc)) ; Some rf | None -> None ) ) in match by_vdi_map with | Some vdi -> Found_disk vdi | None -> ( match if exists vdi_record.API.vDI_SR state.table then let sr = lookup vdi_record.API.vDI_SR state.table in match find_by_sr_and_location sr vdi_record.API.vDI_location with | Some x -> Some x | None -> mapto else mapto with | Some vdi -> Found_disk vdi | None -> error "Found no VDI with location = %s: %s" vdi_record.API.vDI_location ( if config.force then "ignoring error because '--force' is set" else "treating as fatal and abandoning import" ) ; if config.force then Skip else if exists vdi_record.API.vDI_SR state.table then let sr = lookup vdi_record.API.vDI_SR state.table in Found_no_disk (Api_errors.Server_error ( Api_errors.vdi_location_missing , [Ref.string_of sr; vdi_record.API.vDI_location] ) ) else Found_no_disk (Api_errors.Server_error (Api_errors.vdi_content_id_missing, []) ) ) ) | Full_import _ -> Create vdi_record let handle_dry_run __context config _rpc _session_id state x precheck_result = match precheck_result with | Found_iso vdi | Found_disk vdi -> state.table <- (x.cls, x.id, Ref.string_of vdi) :: state.table | Found_no_iso -> VDI will be ejected . | Found_no_disk e -> ( match config.import_type with | Metadata_import {live= true; _} -> debug "Ignoring missing disk %s - this will be mirrored during a real \ live migration." x.id ; Create a dummy disk in the state table so the VBD import has a disk to look up . let dummy_vdi = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vdi) :: state.table | _ -> raise e ) | Skip -> () | Create _ -> let dummy_vdi = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vdi) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_iso _ | Found_no_iso | Skip -> handle_dry_run __context config rpc session_id state x precheck_result | Found_disk vdi -> handle_dry_run __context config rpc session_id state x precheck_result ; let other_config_record = (API.vDI_t_of_rpc x.snapshot).API.vDI_other_config in List.iter (fun key -> Db.VDI.remove_from_other_config ~__context ~self:vdi ~key ; try Db.VDI.add_to_other_config ~__context ~self:vdi ~key ~value:(List.assoc key other_config_record) with Not_found -> () ) Xapi_globs.vdi_other_config_sync_keys | Found_no_disk e -> raise e | Create vdi_record -> Make a new VDI for streaming data into ; adding task - id to sm - config on VDI.create so SM backend can see this is an import let sr = lookup vdi_record.API.vDI_SR state.table in let task_id = Ref.string_of (Context.get_task_id __context) in let sm_config = List.filter (fun (k, _) -> k <> Xapi_globs.import_task) vdi_record.API.vDI_sm_config in let sm_config = (Xapi_globs.import_task, task_id) :: sm_config in let vdi = Client.VDI.create_from_record ~rpc ~session_id ~value: {vdi_record with API.vDI_SR= sr; API.vDI_sm_config= sm_config} in state.cleanup <- (fun __context rpc session_id -> Client.VDI.destroy ~rpc ~session_id ~self:vdi ) :: state.cleanup ; state.table <- (x.cls, x.id, Ref.string_of vdi) :: state.table end * Lookup the network by name_label only . Previously we used UUID which worked if importing to the same host that originated the export but would fail if the network UUID had changed even if ( from the user 's PoV ) the " backend network " had not . Since we do n't model networks it seems less confusing to match on names : whether networks are the same or different is then under the control of the user . to the same host that originated the export but would fail if the network UUID had changed even if (from the user's PoV) the "backend network" had not. Since we don't model networks it seems less confusing to match on names: whether networks are the same or different is then under the control of the user. *) module Net : HandlerTools = struct type precheck_t = Found_net of API.ref_network | Create of API.network_t let precheck __context _config rpc session_id _state x = let net_record = API.network_t_of_rpc x.snapshot in let possibilities = Client.Network.get_by_name_label ~rpc ~session_id ~label:net_record.API.network_name_label in match possibilities with | [] -> ( let expr = "field \"bridge\"=\"" ^ net_record.API.network_bridge ^ "\"" in let nets = Client.Network.get_all_records_where ~rpc ~session_id ~expr in match nets with | [] -> Create net_record | (net, _) :: _ -> Found_net net ) | n :: _ -> Found_net n let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_net net -> state.table <- (x.cls, x.id, Ref.string_of net) :: state.table | Create _ -> let dummy_net = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_net) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_net _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create net_record -> let net = log_reraise ("failed to create Network with name_label " ^ net_record.API.network_name_label ) (fun value -> Client.Network.create_from_record ~rpc ~session_id ~value ) net_record in TaskHelper.operate_on_db_task ~__context (fun t -> ( try Db.Network.remove_from_other_config ~__context ~self:net ~key:Xapi_globs.import_task with _ -> () ) ; Db.Network.add_to_other_config ~__context ~self:net ~key:Xapi_globs.import_task ~value:(Ref.string_of t) ) ; state.cleanup <- (fun __context rpc session_id -> Client.Network.destroy ~rpc ~session_id ~self:net ) :: state.cleanup ; state.table <- (x.cls, x.id, Ref.string_of net) :: state.table end * Lookup the GPU group by GPU_types only . Currently , the GPU_types field contains the prototype * of just a single pGPU . We would probably have to extend this function once we support GPU groups * for multiple compatible GPU types . * of just a single pGPU. We would probably have to extend this function once we support GPU groups * for multiple compatible GPU types. *) module GPUGroup : HandlerTools = struct type precheck_t = | Found_GPU_group of API.ref_GPU_group | Found_no_GPU_group of exn | Create of API.gPU_group_t let precheck __context config rpc session_id _state x = let gpu_group_record = API.gPU_group_t_of_rpc x.snapshot in let groups = Client.GPU_group.get_all_records ~rpc ~session_id in try let group, _ = List.find (fun (_, groupr) -> groupr.API.gPU_group_GPU_types = gpu_group_record.API.gPU_group_GPU_types ) groups in Found_GPU_group group with Not_found -> ( match config.import_type with | Metadata_import _ -> In vm_metadata_only mode the GPU group must exist let msg = Printf.sprintf "Unable to find GPU group with matching GPU_types = '[%s]'" (String.concat "," gpu_group_record.API.gPU_group_GPU_types) in error "%s" msg ; Found_no_GPU_group (Failure msg) | Full_import _ -> Create gpu_group_record ) let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_GPU_group group -> state.table <- (x.cls, x.id, Ref.string_of group) :: state.table | Found_no_GPU_group e -> raise e | Create _ -> let dummy_gpu_group = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_gpu_group) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_GPU_group _ | Found_no_GPU_group _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create gpu_group_record -> let group = log_reraise ("Unable to create GPU group with GPU_types = '[%s]'" ^ String.concat "," gpu_group_record.API.gPU_group_GPU_types ) (fun value -> let group = Client.GPU_group.create ~rpc ~session_id ~name_label:value.API.gPU_group_name_label ~name_description:value.API.gPU_group_name_description ~other_config:value.API.gPU_group_other_config in Db.GPU_group.set_GPU_types ~__context ~self:group ~value:value.API.gPU_group_GPU_types ; group ) gpu_group_record in TaskHelper.operate_on_db_task ~__context (fun t -> ( try Db.GPU_group.remove_from_other_config ~__context ~self:group ~key:Xapi_globs.import_task with _ -> () ) ; Db.GPU_group.add_to_other_config ~__context ~self:group ~key:Xapi_globs.import_task ~value:(Ref.string_of t) ) ; state.cleanup <- (fun __context rpc session_id -> Client.GPU_group.destroy ~rpc ~session_id ~self:group ) :: state.cleanup ; state.table <- (x.cls, x.id, Ref.string_of group) :: state.table end * Create a new VBD record , add the reference to the table . The VM and VDI must already have been handled first . If the VDI does n't exist and the VBD is a CDROM then eject it . Note that any currently attached disk MUST be present , unless it 's an HVM guest and a CDROM in which case we eject it anyway . The VM and VDI must already have been handled first. If the VDI doesn't exist and the VBD is a CDROM then eject it. Note that any currently attached disk MUST be present, unless it's an HVM guest and a CDROM in which case we eject it anyway. *) module VBD : HandlerTools = struct type precheck_t = Found_VBD of API.ref_VBD | Skip | Create of API.vBD_t let precheck __context config rpc session_id state x = let vbd_record = API.vBD_t_of_rpc x.snapshot in let get_vbd () = Client.VBD.get_by_uuid ~rpc ~session_id ~uuid:vbd_record.API.vBD_uuid in let vbd_exists () = try ignore (get_vbd ()) ; true with _ -> false in if config.full_restore && vbd_exists () then let vbd = get_vbd () in Found_VBD vbd else let vm = log_reraise ("Failed to find VBD's VM: " ^ Ref.string_of vbd_record.API.vBD_VM) (lookup vbd_record.API.vBD_VM) state.table in If the VBD is supposed to be attached to a PV guest ( which does n't support currently_attached empty drives ) then throw a fatal error . currently_attached empty drives) then throw a fatal error. *) let original_vm = get_vm_record (find_in_export (Ref.string_of vbd_record.API.vBD_VM) state.export) in Note : the following is potentially inaccurate : the find out whether a running or * suspended VM has booted HVM , we must consult the VM metrics , but those are n't * available in the exported metadata . * suspended VM has booted HVM, we must consult the VM metrics, but those aren't * available in the exported metadata. *) let has_qemu = Helpers.will_have_qemu_from_record original_vm in let dry_run, live = match config.import_type with | Metadata_import {dry_run; live; _} -> (dry_run, live) | _ -> (false, false) in ( if vbd_record.API.vBD_currently_attached && not (exists vbd_record.API.vBD_VDI state.table) then It 's only ok if it 's a CDROM attached to an HVM guest , or it 's part of SXM and we know the sender would eject it . let will_eject = dry_run && live && original_vm.API.vM_power_state <> `Suspended in if not (vbd_record.API.vBD_type = `CD && (has_qemu || will_eject)) then raise (IFailure Attached_disks_not_found) ) ; let vbd_record = {vbd_record with API.vBD_VM= vm} in match (vbd_record.API.vBD_type, exists vbd_record.API.vBD_VDI state.table) with | `CD, false | `Floppy, false -> if has_qemu || original_vm.API.vM_power_state <> `Suspended then Create {vbd_record with API.vBD_VDI= Ref.null; API.vBD_empty= true} else Create vbd_record | `Disk, false -> warn "Cannot import VM's disk: was it an .iso attached as a disk rather \ than CD?" ; Skip | _, true -> Create { vbd_record with API.vBD_VDI= lookup vbd_record.API.vBD_VDI state.table } let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_VBD vbd -> state.table <- (x.cls, x.id, Ref.string_of vbd) :: state.table ; state.table <- (x.cls, Ref.string_of vbd, Ref.string_of vbd) :: state.table | Skip -> () | Create _ -> let dummy_vbd = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vbd) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_VBD _ | Skip -> handle_dry_run __context config rpc session_id state x precheck_result | Create vbd_record -> let vbd = log_reraise "failed to create VBD" (fun value -> let vbd = Client.VBD.create_from_record ~rpc ~session_id ~value: { value with API.vBD_device= "" ; API.vBD_currently_attached= false } in if config.full_restore then Db.VBD.set_uuid ~__context ~self:vbd ~value:value.API.vBD_uuid ; vbd ) vbd_record in state.cleanup <- (fun __context rpc session_id -> Client.VBD.destroy ~rpc ~session_id ~self:vbd ) :: state.cleanup ; Db.VBD.set_currently_attached ~__context ~self:vbd ~value:vbd_record.API.vBD_currently_attached ; state.table <- (x.cls, x.id, Ref.string_of vbd) :: state.table end * Create a new VIF record , add the reference to the table . The VM and Network must have already been handled first . The VM and Network must have already been handled first. *) module VIF : HandlerTools = struct type precheck_t = Found_VIF of API.ref_VIF | Create of API.vIF_t let precheck __context config rpc session_id state x = let vif_record = API.vIF_t_of_rpc x.snapshot in let get_vif () = Client.VIF.get_by_uuid ~rpc ~session_id ~uuid:vif_record.API.vIF_uuid in let vif_exists () = try ignore (get_vif ()) ; true with _ -> false in if config.full_restore && vif_exists () then If there 's already a VIF with the same UUID and we 're preserving UUIDs , use that one . let vif = get_vif () in Found_VIF vif else If not restoring a full backup then blank the MAC so it is regenerated let vif_record = { vif_record with API.vIF_MAC= (if config.full_restore then vif_record.API.vIF_MAC else "") } in let vm = log_reraise ("Failed to find VIF's VM: " ^ Ref.string_of vif_record.API.vIF_VM) (lookup vif_record.API.vIF_VM) state.table in let net = if List.mem_assoc Constants.storage_migrate_vif_map_key vif_record.API.vIF_other_config then Ref.of_string (List.assoc Constants.storage_migrate_vif_map_key vif_record.API.vIF_other_config ) else log_reraise ("Failed to find VIF's Network: " ^ Ref.string_of vif_record.API.vIF_network ) (lookup vif_record.API.vIF_network) state.table in let other_config = List.filter (fun (k, _) -> k <> Constants.storage_migrate_vif_map_key) vif_record.API.vIF_other_config in Construct the VIF record we 're going to try to create locally . let vif_record = if Pool_features.is_enabled ~__context Features.VIF_locking then vif_record else if vif_record.API.vIF_locking_mode = `locked then { vif_record with API.vIF_locking_mode= `network_default ; API.vIF_ipv4_allowed= [] ; API.vIF_ipv6_allowed= [] } else {vif_record with API.vIF_ipv4_allowed= []; API.vIF_ipv6_allowed= []} in let vif_record = { vif_record with API.vIF_VM= vm ; API.vIF_network= net ; API.vIF_other_config= other_config } in Create vif_record let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_VIF vif -> state.table <- (x.cls, x.id, Ref.string_of vif) :: state.table ; state.table <- (x.cls, Ref.string_of vif, Ref.string_of vif) :: state.table | Create _ -> let dummy_vif = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vif) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_VIF _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create vif_record -> let vif = log_reraise "failed to create VIF" (fun value -> let vif = Client.VIF.create_from_record ~rpc ~session_id ~value:{value with API.vIF_currently_attached= false} in if config.full_restore then Db.VIF.set_uuid ~__context ~self:vif ~value:value.API.vIF_uuid ; vif ) vif_record in state.cleanup <- (fun __context rpc session_id -> Client.VIF.destroy ~rpc ~session_id ~self:vif ) :: state.cleanup ; if Db.VM.get_power_state ~__context ~self:vif_record.API.vIF_VM <> `Halted then Db.VIF.set_currently_attached ~__context ~self:vif ~value:vif_record.API.vIF_currently_attached ; state.table <- (x.cls, x.id, Ref.string_of vif) :: state.table end module VGPUType : HandlerTools = struct type precheck_t = | Found_VGPU_type of API.ref_VGPU_type | Create of API.vGPU_type_t let precheck __context _config rpc session_id _state x = let vgpu_type_record = API.vGPU_type_t_of_rpc x.snapshot in First look up VGPU types using the identifier string . let compatible_types = match Client.VGPU_type.get_all_records_where ~rpc ~session_id ~expr: (Printf.sprintf "field \"identifier\"=\"%s\"" vgpu_type_record.API.vGPU_type_identifier ) with | [] -> Client.VGPU_type.get_all_records_where ~rpc ~session_id ~expr: (Printf.sprintf "field \"vendor_name\"=\"%s\" and field \"model_name\"=\"%s\"" vgpu_type_record.API.vGPU_type_vendor_name vgpu_type_record.API.vGPU_type_model_name ) | types -> types in match choose_one compatible_types with | Some (vgpu_type, _) -> Found_VGPU_type vgpu_type | None -> warn "Unable to find VGPU_type (%s,%s,%s) - creating a new record" vgpu_type_record.API.vGPU_type_identifier vgpu_type_record.API.vGPU_type_vendor_name vgpu_type_record.API.vGPU_type_model_name ; Create vgpu_type_record let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_VGPU_type vgpu_type -> state.table <- (x.cls, x.id, Ref.string_of vgpu_type) :: state.table ; state.table <- (x.cls, Ref.string_of vgpu_type, Ref.string_of vgpu_type) :: state.table | Create _ -> let dummy_vgpu_type = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vgpu_type) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_VGPU_type _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create vgpu_type_record -> let vgpu_type = log_reraise "failed to create VGPU_type" (fun value -> size and internal_config are left as defaults for now . They 'll * be updated if and when comes across the real config file . * be updated if and when xapi comes across the real config file. *) Xapi_vgpu_type.create ~__context ~vendor_name:value.API.vGPU_type_vendor_name ~model_name:value.API.vGPU_type_model_name ~framebuffer_size:value.API.vGPU_type_framebuffer_size ~max_heads:value.API.vGPU_type_max_heads ~max_resolution_x:value.API.vGPU_type_max_resolution_x ~max_resolution_y:value.API.vGPU_type_max_resolution_y ~size:0L ~internal_config:[] ~implementation:value.API.vGPU_type_implementation ~identifier:value.API.vGPU_type_identifier ~experimental:value.API.vGPU_type_experimental ~compatible_model_names_in_vm: value.API.vGPU_type_compatible_types_in_vm ~compatible_model_names_on_pgpu:[] ) vgpu_type_record in state.cleanup <- (fun __context _ _ -> Db.VGPU_type.destroy ~__context ~self:vgpu_type) :: state.cleanup ; state.table <- (x.cls, x.id, Ref.string_of vgpu_type) :: state.table end * Create a new VGPU record , add the reference to the table . The VM and GPU_group must have already been handled first . The VM and GPU_group must have already been handled first. *) module VGPU : HandlerTools = struct type precheck_t = Found_VGPU of API.ref_VGPU | Create of API.vGPU_t let precheck __context config rpc session_id state x = let vgpu_record = API.vGPU_t_of_rpc x.snapshot in let get_vgpu () = Client.VGPU.get_by_uuid ~rpc ~session_id ~uuid:vgpu_record.API.vGPU_uuid in let vgpu_exists () = try ignore (get_vgpu ()) ; true with _ -> false in if config.full_restore && vgpu_exists () then let vgpu = get_vgpu () in Found_VGPU vgpu else let vm = log_reraise ("Failed to find VGPU's VM: " ^ Ref.string_of vgpu_record.API.vGPU_VM) (lookup vgpu_record.API.vGPU_VM) state.table in let group = if List.mem_assoc Constants.storage_migrate_vgpu_map_key vgpu_record.API.vGPU_other_config then Ref.of_string (List.assoc Constants.storage_migrate_vgpu_map_key vgpu_record.API.vGPU_other_config ) else log_reraise ("Failed to find VGPU's GPU group: " ^ Ref.string_of vgpu_record.API.vGPU_GPU_group ) (lookup vgpu_record.API.vGPU_GPU_group) state.table in let _type = log_reraise ("Failed to find VGPU's type: " ^ Ref.string_of vgpu_record.API.vGPU_type ) (lookup vgpu_record.API.vGPU_type) state.table in Make sure we remove the cross - pool migration VGPU mapping key from the other_config * before creating a VGPU - otherwise we 'll risk sending this key on to another pool * during a future cross - pool migration and it wo n't make sense . * before creating a VGPU - otherwise we'll risk sending this key on to another pool * during a future cross-pool migration and it won't make sense. *) let other_config = List.filter (fun (k, _) -> k <> Constants.storage_migrate_vgpu_map_key) vgpu_record.API.vGPU_other_config in let vgpu_record = { vgpu_record with API.vGPU_VM= vm ; API.vGPU_GPU_group= group ; API.vGPU_type= _type ; API.vGPU_other_config= other_config } in if is_live config then assert_can_live_import_vgpu ~__context vgpu_record ; Create vgpu_record let handle_dry_run __context _config _rpc _session_id state x precheck_result = match precheck_result with | Found_VGPU vgpu -> state.table <- (x.cls, x.id, Ref.string_of vgpu) :: state.table ; state.table <- (x.cls, Ref.string_of vgpu, Ref.string_of vgpu) :: state.table | Create _ -> let dummy_vgpu = Ref.make () in state.table <- (x.cls, x.id, Ref.string_of dummy_vgpu) :: state.table let handle __context config rpc session_id state x precheck_result = match precheck_result with | Found_VGPU _ -> handle_dry_run __context config rpc session_id state x precheck_result | Create vgpu_record -> let vgpu = log_reraise "failed to create VGPU" (fun value -> let vgpu = Client.VGPU.create ~rpc ~session_id ~vM:value.API.vGPU_VM ~gPU_group:value.API.vGPU_GPU_group ~device:value.API.vGPU_device ~other_config:value.API.vGPU_other_config ~_type:value.API.vGPU_type in if config.full_restore then Db.VGPU.set_uuid ~__context ~self:vgpu ~value:value.API.vGPU_uuid ; vgpu ) vgpu_record in state.cleanup <- (fun __context rpc session_id -> Client.VGPU.destroy ~rpc ~session_id ~self:vgpu ) :: state.cleanup ; if Db.VM.get_power_state ~__context ~self:vgpu_record.API.vGPU_VM <> `Halted then Db.VGPU.set_currently_attached ~__context ~self:vgpu ~value:vgpu_record.API.vGPU_currently_attached ; state.table <- (x.cls, x.id, Ref.string_of vgpu) :: state.table end module PVS_Proxy : HandlerTools = struct type precheck_t = | Create of API.pVS_proxy_t let find_pvs_site __context _config _rpc _session_id pvs_uuid = let sites = Db.PVS_site.get_all_records ~__context in let has_uuid (_, site) = site.API.pVS_site_PVS_uuid = pvs_uuid in let candidates = List.filter has_uuid sites in match candidates with (ref, _) :: _ -> Some ref | [] -> None * We obtain the name and uuid of the PVS site this * proxy was linked to . Then we use these trying to find * a matching site on this ( destination ) side . The result is recorded * in the [ precheck_t ] value . * proxy was linked to. Then we use these trying to find * a matching site on this (destination) side. The result is recorded * in the [precheck_t] value. *) let precheck __context config rpc session_id state obj = let proxy = API.pVS_proxy_t_of_rpc obj.snapshot in let site = proxy.API.pVS_proxy_site |> fun ref -> find_in_export (Ref.string_of ref) state.export |> API.pVS_site_t_of_rpc in let pvs_uuid = site.API.pVS_site_PVS_uuid in match find_pvs_site __context config rpc session_id pvs_uuid with | None -> Drop | Some site -> Create { proxy with API.pVS_proxy_site= site ; API.pVS_proxy_VIF= lookup proxy.API.pVS_proxy_VIF state.table } let handle_dry_run __context _config _rpc _session_id state obj = function | Drop -> debug "no matching PVS Site found for PVS Proxy %s" obj.id | Create _ -> let dummy = Ref.make () in state.table <- (obj.cls, obj.id, Ref.string_of dummy) :: state.table let handle __context _config rpc session_id state obj = function | Drop -> debug "no matching PVS Site found for PVS Proxy %s" obj.id | Create p -> let proxy = Client.PVS_proxy.create ~rpc ~session_id ~site:p.API.pVS_proxy_site ~vIF:p.API.pVS_proxy_VIF in debug "creating PVS Proxy %s btw PVS Site %s <-> %s VIF during import" (Ref.string_of proxy) (Ref.string_of p.API.pVS_proxy_site) (Ref.string_of p.API.pVS_proxy_VIF) ; state.cleanup <- (fun __context rpc session_id -> Client.PVS_proxy.destroy ~rpc ~session_id ~self:proxy ) :: state.cleanup ; state.table <- (obj.cls, obj.id, Ref.string_of proxy) :: state.table end module PVS_Site : HandlerTools = struct type precheck_t = unit let precheck __context _ _ _ _ _ = () let handle_dry_run __context _ _ _ _ _ () = () let handle __context _ _ _ _ _ () = () end module HostHandler = MakeHandler (Host) module SRHandler = MakeHandler (SR) module VDIHandler = MakeHandler (VDI) module GuestMetricsHandler = MakeHandler (GuestMetrics) module VMHandler = MakeHandler (VM) module NetworkHandler = MakeHandler (Net) module GPUGroupHandler = MakeHandler (GPUGroup) module VBDHandler = MakeHandler (VBD) module VIFHandler = MakeHandler (VIF) module VGPUTypeHandler = MakeHandler (VGPUType) module VGPUHandler = MakeHandler (VGPU) module PVS_SiteHandler = MakeHandler (PVS_Site) module PVS_ProxyHandler = MakeHandler (PVS_Proxy) let handlers = [ (Datamodel_common._host, HostHandler.handle) ; (Datamodel_common._sr, SRHandler.handle) ; (Datamodel_common._vdi, VDIHandler.handle) ; (Datamodel_common._vm_guest_metrics, GuestMetricsHandler.handle) ; (Datamodel_common._vm, VMHandler.handle) ; (Datamodel_common._network, NetworkHandler.handle) ; (Datamodel_common._gpu_group, GPUGroupHandler.handle) ; (Datamodel_common._vbd, VBDHandler.handle) ; (Datamodel_common._vif, VIFHandler.handle) ; (Datamodel_common._vgpu_type, VGPUTypeHandler.handle) ; (Datamodel_common._vgpu, VGPUHandler.handle) ; (Datamodel_common._pvs_site, PVS_SiteHandler.handle) ; (Datamodel_common._pvs_proxy, PVS_ProxyHandler.handle) ] let update_snapshot_and_parent_links ~__context state = let aux (cls, _, ref) = let ref = Ref.of_string ref in ( if cls = Datamodel_common._vm && Db.VM.get_is_a_snapshot ~__context ~self:ref then let snapshot_of = Db.VM.get_snapshot_of ~__context ~self:ref in if snapshot_of <> Ref.null then ( debug "lookup for snapshot_of = '%s'" (Ref.string_of snapshot_of) ; log_reraise ("Failed to find the VM which is snapshot of " ^ Db.VM.get_name_label ~__context ~self:ref ) (fun table -> let snapshot_of = (lookup snapshot_of) table in Db.VM.set_snapshot_of ~__context ~self:ref ~value:snapshot_of ) state.table ) ) ; if cls = Datamodel_common._vm then ( let parent = Db.VM.get_parent ~__context ~self:ref in debug "lookup for parent = '%s'" (Ref.string_of parent) ; try let parent = lookup parent state.table in Db.VM.set_parent ~__context ~self:ref ~value:parent with _ -> debug "no parent found" ) in List.iter aux state.table let handle_all __context config rpc session_id (xs : obj list) = let state = initial_state xs in try let one_type (cls, handler) = let instances = List.filter (fun x -> x.cls = cls) xs in debug "Importing %i %s(s)" (List.length instances) cls ; List.iter (fun x -> handler __context config rpc session_id state x) instances in List.iter one_type handlers ; let dry_run = match config.import_type with | Metadata_import {dry_run= true; _} -> true | _ -> false in if not dry_run then update_snapshot_and_parent_links ~__context state ; state with e -> Backtrace.is_important e ; error "Caught exception in import: %s" (ExnHelper.string_of_exn e) ; if config.force then warn "Not cleaning up after import failure since --force provided: %s" (ExnHelper.string_of_exn e) else cleanup state.cleanup ; raise e let read_xml hdr fd = Unixext.really_read_string fd (Int64.to_int hdr.Tar.Header.file_size) let assert_filename_is hdr = let expected = Xapi_globs.ova_xml_filename in let actual = hdr.Tar.Header.file_name in if expected <> actual then ( let hex = Tar.Header.to_hex in error "import expects the next file in the stream to be [%s]; got [%s]" (hex expected) (hex actual) ; raise (IFailure (Unexpected_file (expected, actual))) ) * Takes an fd and a function , tries first to read the first tar block and checks for the existence of ' ova.xml ' . If that fails then pipe the lot through an appropriate decompressor and try again and checks for the existence of 'ova.xml'. If that fails then pipe the lot through an appropriate decompressor and try again *) let with_open_archive fd ?length f = Read the first header 's worth into a buffer let buffer = Cstruct.create Tar.Header.length in let retry_with_compression = ref true in try Tar_unix.really_read fd buffer ; we assume the first block is not all zeroes let hdr = Option.get (Tar.Header.unmarshal buffer) in assert_filename_is hdr ; retry_with_compression := false ; let xml = read_xml hdr fd in Tar_helpers.skip fd (Tar.Header.compute_zero_padding_length hdr) ; f xml fd with e -> if not !retry_with_compression then raise e ; let decompress = If the file starts with the zstd magic string decompress with zstd ; otherwise fall back to trying gzip . otherwise fall back to trying gzip. *) let zstd_magic = "\x28\xb5\x2f\xfd" in let zstd = Cstruct.equal (Cstruct.of_string zstd_magic) (Cstruct.sub buffer 0 (String.length zstd_magic)) in if zstd then ( debug "Failed to directly open the archive; trying zstd" ; Zstd.Default.decompress ) else ( debug "Failed to directly open the archive; trying gzip" ; Gzip.Default.decompress ) in let feeder pipe_in = finally (fun () -> decompress pipe_in (fun compressed_in -> Unix.set_close_on_exec compressed_in ; debug "Writing initial buffer" ; Tar_unix.really_write compressed_in buffer ; let limit = Option.map (fun x -> Int64.sub x (Int64.of_int Tar.Header.length)) length in let n = Unixext.copy_file ?limit fd compressed_in in debug "Written a total of %d + %Ld bytes" Tar.Header.length n ) ) (fun () -> ignore_exn (fun () -> Unix.close pipe_in)) in let consumer pipe_out feeder_t = finally (fun () -> let hdr = Tar_unix.get_next_header pipe_out in assert_filename_is hdr ; let xml = read_xml hdr pipe_out in Tar_helpers.skip pipe_out (Tar.Header.compute_zero_padding_length hdr) ; f xml pipe_out ) (fun () -> ignore_exn (fun () -> Unix.close pipe_out) ; Thread.join feeder_t ) in let pipe_out, pipe_in = Unix.pipe () in let feeder_t = Thread.create feeder pipe_in in consumer pipe_out feeder_t let complete_import ~__context vmrefs = debug "length of vmrefs: %d" (List.length vmrefs) ; debug "content: %s" (String.concat "," (List.map Ref.string_of vmrefs)) ; try let task_id = Ref.string_of (Context.get_task_id __context) in List.iter (fun vm -> Db.VM.remove_from_current_operations ~__context ~self:vm ~key:task_id ; Xapi_vm_lifecycle.update_allowed_operations ~__context ~self:vm ) vmrefs ; let vmrefs = List.filter (fun vmref -> not (Db.VM.get_is_a_snapshot ~__context ~self:vmref)) vmrefs in TaskHelper.set_result ~__context (Some (API.rpc_of_ref_VM_set vmrefs)) with e -> Backtrace.is_important e ; error "Caught exception completing import: %s" (ExnHelper.string_of_exn e) ; raise e let find_query_flag query key = List.mem_assoc key query && List.assoc key query = "true" let read_map_params name params = let len = String.length name + 1 in let filter_params = List.filter (fun (p, _) -> Xstringext.String.startswith name p && String.length p > len ) params in List.map (fun (k, v) -> (String.sub k len (String.length k - len), v)) filter_params let with_error_handling f = match Backtrace.with_backtraces f with | `Ok result -> result | `Error (e, backtrace) -> ( Debug.log_backtrace e backtrace ; let reraise = Backtrace.reraise e in match e with | IFailure failure -> ( match failure with | Cannot_handle_chunked -> error "import code cannot handle chunked encoding" ; reraise (Api_errors.Server_error (Api_errors.import_error_cannot_handle_chunked, []) ) | Some_checksums_failed -> error "some checksums failed" ; reraise (Api_errors.Server_error (Api_errors.import_error_some_checksums_failed, []) ) | Failed_to_find_object id -> error "Failed to find object with ID: %s" id ; reraise (Api_errors.Server_error (Api_errors.import_error_failed_to_find_object, [id]) ) | Attached_disks_not_found -> error "Cannot import guest with currently attached disks which cannot \ be found" ; reraise (Api_errors.Server_error (Api_errors.import_error_attached_disks_not_found, []) ) | Unexpected_file (expected, actual) -> let hex = Tar.Header.to_hex in error "Invalid XVA file: import expects the next file in the stream to \ be \"%s\" [%s]; got \"%s\" [%s]" expected (hex expected) actual (hex actual) ; reraise (Api_errors.Server_error (Api_errors.import_error_unexpected_file, [expected; actual]) ) ) | Api_errors.Server_error _ as e -> Backtrace.is_important e ; raise e | End_of_file -> error "Prematurely reached end-of-file during import" ; reraise (Api_errors.Server_error (Api_errors.import_error_premature_eof, [])) | e -> let msg_exn = ExnHelper.string_of_exn e in error "Import caught exception: %s" msg_exn ; reraise Api_errors.(Server_error (import_error_generic, [msg_exn])) ) let metadata_handler (req : Request.t) s _ = debug "metadata_handler called" ; req.Request.close <- true ; Xapi_http.with_context "VM.metadata_import" req s (fun __context -> Helpers.call_api_functions ~__context (fun rpc session_id -> let full_restore = find_query_flag req.Request.query "restore" in let force = find_query_flag req.Request.query "force" in let dry_run = find_query_flag req.Request.query "dry_run" in let live = find_query_flag req.Request.query "live" in let vdi_map = read_map_params "vdi" req.Request.query in info "VM.import_metadata: force = %b; full_restore = %b dry_run = %b; \ live = %b; vdi_map = [ %s ]" force full_restore dry_run live (String.concat "; " (List.map (fun (a, b) -> a ^ "=" ^ b) vdi_map)) ; let metadata_options = {dry_run; live; vdi_map} in let config = {import_type= Metadata_import metadata_options; full_restore; force} in let headers = Http.http_200_ok ~keep_alive:false () @ [ Http.Hdr.task_id ^ ":" ^ Ref.string_of (Context.get_task_id __context) ; content_type ] in Http_svr.headers s headers ; with_open_archive s ?length:req.Request.content_length (fun metadata s -> debug "Got XML" ; Skip trailing two zero blocks Tar_helpers.skip s (Tar.Header.length * 2) ; let header = metadata |> Xmlrpc.of_string |> header_of_rpc in assert_compatible ~__context header.version ; if full_restore then assert_can_restore_backup ~__context rpc session_id header ; with_error_handling (fun () -> let state = handle_all __context config rpc session_id header.objects in let table = state.table in let on_cleanup_stack = state.cleanup in try List.iter (fun (cls, id, r) -> debug "Imported object type %s: external ref: %s internal \ ref: %s" cls id r ) table ; let vmrefs = List.map (fun (_, _, r) -> Ref.of_string r) state.created_vms in let vmrefs = Listext.List.setify vmrefs in complete_import ~__context vmrefs ; info "import_metadata successful" with e -> Backtrace.is_important e ; error "Caught exception during import: %s" (ExnHelper.string_of_exn e) ; if force then warn "Not cleaning up after import failure since --force \ provided: %s" (ExnHelper.string_of_exn e) else ( debug "Cleaning up after import failure: %s" (ExnHelper.string_of_exn e) ; cleanup on_cleanup_stack ) ; raise e ) ) ) ) let stream_import __context rpc session_id s content_length refresh_session config = with_open_archive s ?length:content_length (fun metadata s -> debug "Got XML" ; let vmrefs = let header = metadata |> Xmlrpc.of_string |> header_of_rpc in assert_compatible ~__context header.version ; if config.full_restore then assert_can_restore_backup ~__context rpc session_id header ; let state = handle_all __context config rpc session_id header.objects in let table, on_cleanup_stack = (state.table, state.cleanup) in TaskHelper.add_to_other_config ~__context "object_creation" "complete" ; try List.iter (fun (cls, id, r) -> debug "Imported object type %s: external ref: %s internal ref: %s" cls id r ) table ; let all_vdis = non_cdrom_vdis header in let all_vdis = List.filter (fun x -> exists (Ref.of_string x.id) table) all_vdis in let vdis = List.map (fun x -> let vdir = API.vDI_t_of_rpc (find_in_export x.id state.export) in ( x.id , lookup (Ref.of_string x.id) table , vdir.API.vDI_virtual_size ) ) all_vdis in List.iter (fun (extid, intid, size) -> debug "Expecting to import VDI %s into %s (size=%Ld)" extid (Ref.string_of intid) size ) vdis ; let checksum_table = Stream_vdi.recv_all refresh_session s __context rpc session_id header.version config.force vdis in : Stream_vdi.recv_all only checks for task cancellation every ten seconds , so we need to check again now . After this point , we disable cancellation for this task . every ten seconds, so we need to check again now. After this point, we disable cancellation for this task. *) TaskHelper.exn_if_cancelling ~__context ; TaskHelper.set_not_cancellable ~__context ; Pre - miami GA exports have a checksum table at the end of the export . Check the calculated checksums against the table here . Nb . Rio GA - Miami B2 exports get their checksums checked twice ! ( if header.version.export_vsn < 2 then let xml = Tar_unix.Archive.with_next_file s (fun s hdr -> read_xml hdr s) in let expected_checksums = xml |> Xmlrpc.of_string |> checksum_table_of_rpc in if not (compare_checksums checksum_table expected_checksums) then ( error "Some data checksums were incorrect: VM may be corrupt" ; if not config.force then raise (IFailure Some_checksums_failed) else error "Ignoring incorrect checksums since 'force' flag was \ supplied" ) ) ; return vmrefs Listext.List.setify (List.map (fun (_, _, r) -> Ref.of_string r) state.created_vms) with e -> Backtrace.is_important e ; error "Caught exception during import: %s" (ExnHelper.string_of_exn e) ; if config.force then warn "Not cleaning up after import failure since --force provided: %s" (ExnHelper.string_of_exn e) else ( debug "Cleaning up after import failure: %s" (ExnHelper.string_of_exn e) ; cleanup on_cleanup_stack ) ; raise e in complete_import ~__context vmrefs ; debug "import successful" ; vmrefs ) let handler (req : Request.t) s _ = req.Request.close <- true ; Xapi_http.assert_credentials_ok "VM.import" ~http_action:"put_import" req s ; debug "import handler" ; let full_restore = find_query_flag req.Request.query "restore" in let force = find_query_flag req.Request.query "force" in let all = req.Request.cookie @ req.Request.query in let subtask_of = if List.mem_assoc "subtask_of" all then Some (Ref.of_string (List.assoc "subtask_of" all)) else None in Server_helpers.exec_with_new_task ?subtask_of "VM.import" (fun __context -> Helpers.call_api_functions ~__context (fun rpc session_id -> let sr = match Importexport.sr_of_req ~__context req with | Some x -> x | None -> log_reraise "request was missing both sr_id and sr_uuid: one must be \ provided" (fun () -> Helpers.call_api_functions ~__context get_default_sr ) () in info "VM.import: SR = '%s%s'; force = %b; full_restore = %b" (try Db.SR.get_uuid ~__context ~self:sr with _ -> "invalid") ( try Printf.sprintf " (%s)" (Db.SR.get_name_label ~__context ~self:sr) with _ -> "" ) force full_restore ; if not (check_sr_availability ~__context sr) then ( debug "sr not available - redirecting" ; let host = find_host_for_sr ~__context sr in let address = Http.Url.maybe_wrap_IPv6_literal (Db.Host.get_address ~__context ~self:host) in let url = Printf.sprintf "" address req.Request.uri (String.concat "&" (List.map (fun (a, b) -> a ^ "=" ^ b) req.Request.query) ) in let headers = Http.http_302_redirect url in debug "new location: %s" url ; Http_svr.headers s headers ) else Xapi_http.with_context "VM.import" req s (fun __context -> TaskHelper.set_progress ~__context 0.0 ; Block VM.import operation during RPU debug "Check RPU status before VM.import" ; if Helpers.rolling_upgrade_in_progress ~__context then ( warn "VM.import is not supported during RPU" ; Http_svr.headers s (Http.http_400_badrequest ()) ; raise (Api_errors.Server_error (Api_errors.not_supported_during_upgrade, []) ) ) ; if force then warn "Force option supplied: will ignore checksum failures" ; if Db.SR.get_content_type ~__context ~self:sr = "iso" then ( Http_svr.headers s (Http.http_400_badrequest ()) ; raise (Api_errors.Server_error (Api_errors.sr_operation_not_supported, []) ) ) ; with_error_handling (fun () -> let refresh_external = if List.mem_assoc "session_id" all then let external_session_id = List.assoc "session_id" all in Xapi_session.consider_touching_session rpc (Ref.of_string external_session_id) else fun () -> () in let refresh_internal = Xapi_session.consider_touching_session rpc session_id in let refresh_session () = refresh_external () ; refresh_internal () in debug "Importing %s" (if full_restore then "(as 'restore')" else "(as new VM)") ; let config = {import_type= Full_import sr; full_restore; force} in match (req.Request.transfer_encoding, req.Request.content_length) with | Some x, _ -> error "Encoding not yet implemented in the import code: %s" x ; Http_svr.headers s (http_403_forbidden ()) ; raise (IFailure Cannot_handle_chunked) | None, content_length -> let headers = Http.http_200_ok ~keep_alive:false () @ [ Http.Hdr.task_id ^ ":" ^ Ref.string_of (Context.get_task_id __context) ; content_type ] in Http_svr.headers s headers ; debug "Reading XML" ; ignore (stream_import __context rpc session_id s content_length refresh_session config ) ) ) ) ; debug "import successful" )
c7ddf6a7a0899d8a82eca7de83f13bf9422bfabca9aa33d803b7cde64d398391
ocaml-ppx/ocamlformat
break_colon.ml
(* Bad: unboxing the function type *) external i : (int -> float[@unboxed]) = "i" "i_nat" module type M = sig val action : action (** Formatting action: input type and source, and output destination. *) val doc_atrs : (string Location.loc * payload) list -> (string Location.loc * bool) list option * (string Location.loc * payload) list from (Location.t -> Env.t -> Longident.t -> Path.t) ref val transl_modtype_longident foooooooooo fooooooooooooo foooooooooooooo foooooooooooo *) : (Location.t -> Env.t -> Longident.t -> Path.t) ref val imported_sets_of_closures_table : Simple_value_approx.function_declarations option Set_of_closures_id.Tbl.t type 'a option_decl = names:string list -> doc:string -> section:[`Formatting | `Operational] -> ?allow_inline:bool -> (config -> 'a -> config) -> (config -> 'a) -> 'a t val select : (* The fsevents context *) env -> (* Additional file descriptor to select for reading *) ?read_fdl:fd_select list -> (* Additional file descriptor to select for writing *) ?write_fdl:fd_select list Timeout ... like Unix.select timeout:float -> (* The callback for file system events *) (event list -> unit) -> unit val f : x:t (** an extremely long comment about [x] that does not fit on the same line with [x] *) -> unit val f : fooooooooooooooooo: ( fooooooooooooooo -> fooooooooooooooooooo -> foooooooooooooo -> foooooooooooooo * fooooooooooooooooo -> foooooooooooooooo ) (** an extremely long comment about [x] that does not fit on the same line with [x] *) -> unit end let ssmap : (module MapT with type key = string and type data = string and type map = SSMap.map ) = () let ssmap : (module MapT with type key = string and type data = string and type map = SSMap.map ) -> unit = () let long_function_name : type a. a long_long_type -> a -> a -> a -> wrap_wrap_wrap -> unit = fun () -> () let array_fold_transf (f : numbering -> 'a -> numbering * 'b) n (a : 'a array) : numbering * 'b array = match Array.length a with 0 -> (n, [||]) | 1 -> x let to_clambda_function (id, (function_decl : Flambda.function_declaration)) : Clambda.ufunction = All that we need in the environment , for translating one closure from a closed set of closures , is the substitutions for variables bound to the various closures in the set . Such closures will always be ... closed set of closures, is the substitutions for variables bound to the various closures in the set. Such closures will always be ... *) x
null
https://raw.githubusercontent.com/ocaml-ppx/ocamlformat/59551e34cf1eafc300be7ddbf3e270fd51f1eb66/test/passing/tests/break_colon.ml
ocaml
Bad: unboxing the function type * Formatting action: input type and source, and output destination. The fsevents context Additional file descriptor to select for reading Additional file descriptor to select for writing The callback for file system events * an extremely long comment about [x] that does not fit on the same line with [x] * an extremely long comment about [x] that does not fit on the same line with [x]
external i : (int -> float[@unboxed]) = "i" "i_nat" module type M = sig val action : action val doc_atrs : (string Location.loc * payload) list -> (string Location.loc * bool) list option * (string Location.loc * payload) list from (Location.t -> Env.t -> Longident.t -> Path.t) ref val transl_modtype_longident foooooooooo fooooooooooooo foooooooooooooo foooooooooooo *) : (Location.t -> Env.t -> Longident.t -> Path.t) ref val imported_sets_of_closures_table : Simple_value_approx.function_declarations option Set_of_closures_id.Tbl.t type 'a option_decl = names:string list -> doc:string -> section:[`Formatting | `Operational] -> ?allow_inline:bool -> (config -> 'a -> config) -> (config -> 'a) -> 'a t val select : env ?read_fdl:fd_select list ?write_fdl:fd_select list Timeout ... like Unix.select timeout:float (event list -> unit) -> unit val f : x:t -> unit val f : fooooooooooooooooo: ( fooooooooooooooo -> fooooooooooooooooooo -> foooooooooooooo -> foooooooooooooo * fooooooooooooooooo -> foooooooooooooooo ) -> unit end let ssmap : (module MapT with type key = string and type data = string and type map = SSMap.map ) = () let ssmap : (module MapT with type key = string and type data = string and type map = SSMap.map ) -> unit = () let long_function_name : type a. a long_long_type -> a -> a -> a -> wrap_wrap_wrap -> unit = fun () -> () let array_fold_transf (f : numbering -> 'a -> numbering * 'b) n (a : 'a array) : numbering * 'b array = match Array.length a with 0 -> (n, [||]) | 1 -> x let to_clambda_function (id, (function_decl : Flambda.function_declaration)) : Clambda.ufunction = All that we need in the environment , for translating one closure from a closed set of closures , is the substitutions for variables bound to the various closures in the set . Such closures will always be ... closed set of closures, is the substitutions for variables bound to the various closures in the set. Such closures will always be ... *) x
0c8439140aa819eb671d9a2d94ac351efea87c4f8bf9aa8daadbe32db13c03a0
rds13/xmlrpc
beam_util.erl
Copyright ( C ) 2009 Romuald du Song < rdusong _ AT _ gmail _ DOT _ com > . %% All rights reserved. %% %% Redistribution and use in source and binary forms, with or without %% modification, are permitted provided that the following conditions %% are met: %% 1 . Redistributions of source code must retain the above copyright %% notice, this list of conditions and the following disclaimer. 2 . Redistributions in binary form must reproduce the above %% copyright notice, this list of conditions and the following %% disclaimer in the documentation and/or other materials provided %% with the distribution. %% THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS %% OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED %% WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE %% ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL %% DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE %% GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , %% WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING %% NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS %% SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -module(beam_util). -export([module_export_list/1, filter_arity/3]). %% Module = string() %% Function = atom() module_export_list( Module ) -> {_Module, _Binary, Filename} = code:get_object_code(Module), case beam_lib:info( Filename ) of {error, beam_lib, _} -> false; [ _ , _ , _ ] -> case beam_lib:chunks( Filename, [exports]) of {ok, {_, [{exports, Exports}]}} -> Exports; {error, beam_lib, Er} -> false end end. %% Module = string() %% Arity = integer() %% Exports = list() filter_arity( Function, Arity, Exports) -> case lists:filter( fun( EFName ) -> {Function, Arity} == EFName end, Exports ) of [{_, _}] -> true; [] -> false end.
null
https://raw.githubusercontent.com/rds13/xmlrpc/42e6e96a0fe7106830274feed915125feb1056f3/src/beam_util.erl
erlang
All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: notice, this list of conditions and the following disclaimer. copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Module = string() Function = atom() Module = string() Arity = integer() Exports = list()
Copyright ( C ) 2009 Romuald du Song < rdusong _ AT _ gmail _ DOT _ com > . 1 . Redistributions of source code must retain the above copyright 2 . Redistributions in binary form must reproduce the above THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , -module(beam_util). -export([module_export_list/1, filter_arity/3]). module_export_list( Module ) -> {_Module, _Binary, Filename} = code:get_object_code(Module), case beam_lib:info( Filename ) of {error, beam_lib, _} -> false; [ _ , _ , _ ] -> case beam_lib:chunks( Filename, [exports]) of {ok, {_, [{exports, Exports}]}} -> Exports; {error, beam_lib, Er} -> false end end. filter_arity( Function, Arity, Exports) -> case lists:filter( fun( EFName ) -> {Function, Arity} == EFName end, Exports ) of [{_, _}] -> true; [] -> false end.
5fd4bf4b06b4d8ae6a6457540d27c22f29fec050c72c6bc12678cb1c980b4048
barrucadu/yukibot
Configuration.hs
-- | Module : . Configuration Copyright : ( c ) 2016 License : MIT -- Stability : experimental -- Portability : portable module Yukibot.Configuration ( -- * Parsing parseConfigFile -- * Accessors , getArray , getBool , getDouble , getInteger , getString , getTable , getTableArray , getUTCTime -- ** Array helpers , getStrings , getTables -- ** Table helpers , getNestedTable -- * Combining , override -- * Re-exports , module Text.Toml.Types , E.ParseError, E.Message(..), E.errorPos, E.errorMessages , P.SourcePos, P.Line, P.Column, P.sourceLine, P.sourceColumn ) where import Data.Foldable (toList) import qualified Data.HashMap.Strict as H import Data.Text (Text, pack) import Data.Time.Clock (UTCTime) import System.FilePath (FilePath) import qualified Text.Parsec.Error as E import qualified Text.Parsec.Pos as P import Text.Toml (parseTomlDoc) import Text.Toml.Types -- | Parse a configuration file parseConfigFile :: FilePath -> IO (Either E.ParseError Table) parseConfigFile path = parseTomlDoc "" . pack <$> readFile path ------------------------------------------------------------------------------- Accessors getArray :: Text -> Table -> Maybe [Node] getArray fld tbl = case H.lookup fld tbl of Just (VArray a) -> Just (toList a) _ -> Nothing getBool :: Text -> Table -> Maybe Bool getBool fld tbl = case H.lookup fld tbl of Just (VBoolean b) -> Just b _ -> Nothing getDouble :: Text -> Table -> Maybe Double getDouble fld tbl = case H.lookup fld tbl of Just (VFloat f) -> Just f _ -> Nothing getInteger :: Integral i => Text -> Table -> Maybe i getInteger fld tbl = case H.lookup fld tbl of Just (VInteger i) -> Just (fromIntegral i) _ -> Nothing getString :: Text -> Table -> Maybe Text getString fld tbl = case H.lookup fld tbl of Just (VString t) -> Just t _ -> Nothing getTable :: Text -> Table -> Maybe Table getTable fld tbl = case H.lookup fld tbl of Just (VTable b) -> Just b _ -> Nothing getTableArray :: Text -> Table -> Maybe [Table] getTableArray fld tbl = case H.lookup fld tbl of Just (VTArray ts) -> Just (toList ts) _ -> Nothing getUTCTime :: Text -> Table -> Maybe UTCTime getUTCTime fld tbl = case H.lookup fld tbl of Just (VDatetime b) -> Just b _ -> Nothing ------------------------------------------------------------------------------- -- Array helprs -- | Assumes the field is an array of strings. getStrings :: Text -> Table -> [Text] getStrings fld tbl = case (getString fld tbl, getArray fld tbl) of (Just s, _) -> [s] (_, Just ss) -> [s | VString s <- ss] _ -> [] | Combines ' getTable ' and ' getTableArray ' . getTables :: Text -> Table -> [Table] getTables fld tbl = case (getTable fld tbl, getTableArray fld tbl) of (Just t, _) -> [t] (_, Just ts) -> ts _ -> [] ------------------------------------------------------------------------------- -- Table helpers -- | Get a nested table. getNestedTable :: [Text] -> Table -> Maybe Table getNestedTable (t:ts) tbl = getTable t tbl >>= getNestedTable ts getNestedTable [] tbl = Just tbl ------------------------------------------------------------------------------- -- Combining | Combine two tables . -- -- The keys in the resultant table are the union of the keys in both -- tables. The values are: -- ( 1 ) where the key only appears in one table , that value is -- used; -- ( 2 ) where the key appears in both tables , and both values are -- tables, they are combined with this function; -- ( 3 ) otherwise the value from the left - hand table is used . -- -- It is the recursive case that distinguishes this from the regular -- '<>' for 'Table'. override :: Table -> Table -> Table override left right = H.fromList [(k, val k) | k <- H.keys left ++ H.keys right] where val k = case (H.lookup k left, H.lookup k right) of (Just (VTable a), Just (VTable b)) -> VTable (a `override` b) (Just a, _) -> a (_, Just b) -> b _ -> error "This isn't reachable, but the type system can't tell that :("
null
https://raw.githubusercontent.com/barrucadu/yukibot/7b12153e2cc71b62f86ab3a851a250dce83343b8/yukibot-core/Yukibot/Configuration.hs
haskell
| Stability : experimental Portability : portable * Parsing * Accessors ** Array helpers ** Table helpers * Combining * Re-exports | Parse a configuration file ----------------------------------------------------------------------------- ----------------------------------------------------------------------------- Array helprs | Assumes the field is an array of strings. ----------------------------------------------------------------------------- Table helpers | Get a nested table. ----------------------------------------------------------------------------- Combining The keys in the resultant table are the union of the keys in both tables. The values are: used; tables, they are combined with this function; It is the recursive case that distinguishes this from the regular '<>' for 'Table'.
Module : . Configuration Copyright : ( c ) 2016 License : MIT module Yukibot.Configuration parseConfigFile , getArray , getBool , getDouble , getInteger , getString , getTable , getTableArray , getUTCTime , getStrings , getTables , getNestedTable , override , module Text.Toml.Types , E.ParseError, E.Message(..), E.errorPos, E.errorMessages , P.SourcePos, P.Line, P.Column, P.sourceLine, P.sourceColumn ) where import Data.Foldable (toList) import qualified Data.HashMap.Strict as H import Data.Text (Text, pack) import Data.Time.Clock (UTCTime) import System.FilePath (FilePath) import qualified Text.Parsec.Error as E import qualified Text.Parsec.Pos as P import Text.Toml (parseTomlDoc) import Text.Toml.Types parseConfigFile :: FilePath -> IO (Either E.ParseError Table) parseConfigFile path = parseTomlDoc "" . pack <$> readFile path Accessors getArray :: Text -> Table -> Maybe [Node] getArray fld tbl = case H.lookup fld tbl of Just (VArray a) -> Just (toList a) _ -> Nothing getBool :: Text -> Table -> Maybe Bool getBool fld tbl = case H.lookup fld tbl of Just (VBoolean b) -> Just b _ -> Nothing getDouble :: Text -> Table -> Maybe Double getDouble fld tbl = case H.lookup fld tbl of Just (VFloat f) -> Just f _ -> Nothing getInteger :: Integral i => Text -> Table -> Maybe i getInteger fld tbl = case H.lookup fld tbl of Just (VInteger i) -> Just (fromIntegral i) _ -> Nothing getString :: Text -> Table -> Maybe Text getString fld tbl = case H.lookup fld tbl of Just (VString t) -> Just t _ -> Nothing getTable :: Text -> Table -> Maybe Table getTable fld tbl = case H.lookup fld tbl of Just (VTable b) -> Just b _ -> Nothing getTableArray :: Text -> Table -> Maybe [Table] getTableArray fld tbl = case H.lookup fld tbl of Just (VTArray ts) -> Just (toList ts) _ -> Nothing getUTCTime :: Text -> Table -> Maybe UTCTime getUTCTime fld tbl = case H.lookup fld tbl of Just (VDatetime b) -> Just b _ -> Nothing getStrings :: Text -> Table -> [Text] getStrings fld tbl = case (getString fld tbl, getArray fld tbl) of (Just s, _) -> [s] (_, Just ss) -> [s | VString s <- ss] _ -> [] | Combines ' getTable ' and ' getTableArray ' . getTables :: Text -> Table -> [Table] getTables fld tbl = case (getTable fld tbl, getTableArray fld tbl) of (Just t, _) -> [t] (_, Just ts) -> ts _ -> [] getNestedTable :: [Text] -> Table -> Maybe Table getNestedTable (t:ts) tbl = getTable t tbl >>= getNestedTable ts getNestedTable [] tbl = Just tbl | Combine two tables . ( 1 ) where the key only appears in one table , that value is ( 2 ) where the key appears in both tables , and both values are ( 3 ) otherwise the value from the left - hand table is used . override :: Table -> Table -> Table override left right = H.fromList [(k, val k) | k <- H.keys left ++ H.keys right] where val k = case (H.lookup k left, H.lookup k right) of (Just (VTable a), Just (VTable b)) -> VTable (a `override` b) (Just a, _) -> a (_, Just b) -> b _ -> error "This isn't reachable, but the type system can't tell that :("
45852ca1d1e2b2f1985a03df60a1465682b4e83ee7f65c4b25fd46d7a1cdca3c
passy/android-lint-summary
Main.hs
module Main where import AndroidLintSummary.CLI (runCLI) import Paths_android_lint_summary (version) main :: IO () main = runCLI version
null
https://raw.githubusercontent.com/passy/android-lint-summary/8aaf7884f542176e43a6f6b4e51e6391fafd61e0/Main.hs
haskell
module Main where import AndroidLintSummary.CLI (runCLI) import Paths_android_lint_summary (version) main :: IO () main = runCLI version
db55342f10095cfb06b2aa9258e59f358f34f919fbbcf8b8ee2df88836591457
GaloisInc/tower
DeviceHandle.hs
# LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE FlexibleInstances # # LANGUAGE DataKinds # module Ivory.Tower.HAL.Bus.SPI.DeviceHandle ( SPIDeviceHandle(..) ) where import Ivory.Language newtype SPIDeviceHandle = SPIDeviceHandle Uint8 deriving ( IvoryType, IvoryVar, IvoryExpr, IvoryEq, IvoryOrd , IvoryStore, IvoryInit, IvoryZeroVal)
null
https://raw.githubusercontent.com/GaloisInc/tower/a43f5e36c6443472ea2dc15bbd49faf8643a6f87/tower-hal/src/Ivory/Tower/HAL/Bus/SPI/DeviceHandle.hs
haskell
# LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE FlexibleInstances # # LANGUAGE DataKinds # module Ivory.Tower.HAL.Bus.SPI.DeviceHandle ( SPIDeviceHandle(..) ) where import Ivory.Language newtype SPIDeviceHandle = SPIDeviceHandle Uint8 deriving ( IvoryType, IvoryVar, IvoryExpr, IvoryEq, IvoryOrd , IvoryStore, IvoryInit, IvoryZeroVal)
aa319a695f933015a8bad7b7bf38994044a2f8e1d57ff1b3c2cfad03c89a4932
RefactoringTools/HaRe
A5.hs
module Renaming.A5 where import Renaming.B5 import Renaming.C5 import Renaming.D5 main :: Tree Int ->Bool main t = isSame (sumSquares (fringe t)) (sumSquares (Renaming.B5.myFringe t)+sumSquares (Renaming.C5.myFringe t))
null
https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/test/testdata/Renaming/A5.hs
haskell
module Renaming.A5 where import Renaming.B5 import Renaming.C5 import Renaming.D5 main :: Tree Int ->Bool main t = isSame (sumSquares (fringe t)) (sumSquares (Renaming.B5.myFringe t)+sumSquares (Renaming.C5.myFringe t))
c01d1feb2b23cbd77e3ff79ed06b818ee39a23c16f524fcfabb5e7b693d4bb86
Isaac-DeFrain/KFramework
constants.ml
type sort = |SortRbinds |SortAExp |SortListensCellOpt |SortIdCellOpt |SortK |SortPbindList |SortRidCell |SortListenCell |SortUnconsumableSend |SortThreadCell |SortMlidCell |SortVarsCellOpt |SortNomoCell |SortThreadCellBag |SortThreadCellFragment |SortKItem |SortBindList |SortNames |SortUri |SortIdCell |SortRhoMap |SortLidCell |SortReceivesCellFragment |SortStateCellOpt |SortSendsCellFragment |SortTCellFragment |SortChanLen |SortContCell |SortSchanCell |SortSet |SortChanCellOpt |SortCell |SortProcs |SortWhereCellOpt |SortTupleCellOpt |SortBool |SortKResult |SortReactCell |SortRhoTuple |SortSend |SortLengthCell |SortKCell |SortMsidCellOpt |SortLbind |SortBundle |SortRnumCell |SortRhoKVPairs |SortSidCell |SortTuplespaceCellFragment |SortLidCellOpt |SortName |SortBindOcc |SortReactionCellFragment |SortRhoSet |SortSendCellBag |SortPar |SortInt |SortNumCellOpt |SortRnumCellOpt |SortCollection |SortRbind |SortWhatCellOpt |SortRecCellFragment |SortStypeCell |SortMsidCell |SortReceivesCell |SortEval |SortKCellOpt |SortStypeCellOpt |SortListenCellBag |SortSendCell |SortReactCellOpt |SortRhoList |SortReactionCell |SortMatchCellOpt |SortChanCell |SortLbindList |SortNameVar |SortLengthCellOpt |SortListensCell |SortBExp |SortConsumableSend |SortRidCellOpt |SortMap |SortRecCellBag |SortRecCell |SortContCellOpt |SortThreadsCellOpt |SortTuplespaceCell |SortNew |SortStream |SortThreadsCellFragment |SortListensCellFragment |SortWhoCell |SortReceivesCellOpt |SortProc |SortNameList |SortGround |SortString |SortWhoCellOpt |SortFloat |SortChanList |SortSendsCell |SortReactionCellOpt |SortPbind |SortSingleRec |SortThreadsCell |SortMultiRec |SortTypeCell |SortVarsCell |SortTypeCellOpt |SortSendCellFragment |SortSchanCellOpt |SortTuplespaceCellOpt |SortLbinds |SortNumCell |SortWhereCell |SortForgCellOpt |SortKVariable |SortBytes |SortWhatCell |SortSendsCellOpt |SortNomoCellOpt |SortTupleCell |SortIOError |SortStringBuffer |SortRbindList |SortTCell |SortBind |SortMlidCellOpt |SortMsgCellOpt |SortMsgCell |SortIdNum |SortKConfigVar |SortSidCellOpt |SortPbinds |SortJoinList |SortBinds |SortForgCell |SortProcList |SortId |SortList |SortStateCell |SortReceive |SortListenCellFragment |SortMatchCell type klabel = |Lbl'Hash'argv |LblisPbindList |LblisThreadCell |LblMap'Coln'lookup |LblisChanList |Lblbundle0'LBra'_'RBra'_GRHO'Hyph'SYNTAX |Lbl'Hash'seek'LPar'_'Comm'_'RPar'_K'Hyph'IO |LblisWhoCell |LblisIdNum |LblsignExtendBitRangeInt |Lbl_'EqlsEqls'Bool__BOOL |LblisSet |LblisThreadCellBag |Lbl'Hash'bindocce'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblisNew |Lbl'LBraLSqB'_'SCln'_'RSqBRBra'_GRHO'Hyph'SYNTAX |LblisStypeCell |Lbl_'_LT_Eqls'Set__SET |Lbl_'Hash'in'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblisIOError |Lbl'Hash'parse |Lbl'Hash'EALREADY_K'Hyph'IO |LblisRhoList |LblmakeList |Lbl'Hash'ESPIPE_K'Hyph'IO |Lbl'Hash'unlock'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'Hash'ENOENT_K'Hyph'IO |LblisProcs |Lbl_'_LT_Bang'__GRHO'Hyph'SYNTAX |LblisTypeCell |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX1_ |LblisLbinds |LblisLengthCell |LblnoStateCell |LblisLbind |Lbl'Hash'ENOTTY_K'Hyph'IO |LblisForgCell |Lbl'_LT_'forg'_GT_' |LblinitChanCell |LblisProcList |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX0_ |LblinitRnumCell |LblisRidCellOpt |LblisReceivesCellFragment |Lbl'Hash'ENOTEMPTY_K'Hyph'IO |LblisSidCellOpt |Lbl'Hash'EMSGSIZE_K'Hyph'IO |LblisKConfigVar |LblisRhoMap |Lbl'Hash'ENETRESET_K'Hyph'IO |Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO |LblnoTupleCell |LblnoSendsCell |Lbl'_LT_'thread'_GT_Hyph'fragment |LblisCell |LblisPbind |Lbl'Hash'ENOMEM_K'Hyph'IO |Lblvalues |Lblbundle'PlusLBra'_'RBra'_GRHO'Hyph'SYNTAX |LblisThreadCellFragment |LblisStateCellOpt |LblinitLidCell |LblisNameList |LblisListensCellOpt |LblisTuplespaceCellOpt |Lbl'Hash'ENXIO_K'Hyph'IO |Lbl_'_LT_'Int__INT |LblnoTypeCell |LblisSendCell |Lbl'Hash'configuration_K'Hyph'REFLECTION |LblisSendsCell |LblisFloat |Lbl'_LT_'msg'_GT_' |LblisContCell |LblchrChar |Lbl_divInt__INT |Lbl'Hash'EROFS_K'Hyph'IO |LblisWhereCellOpt |Lbl_ThreadCellBag_ |LblisProc |LblisListensCell |Lbl_'Plus'Int_ |LblisReactionCell |Lbl_orBool__BOOL |Lbl'_LT_'sid'_GT_' |Lbl'Hash'ENFILE_K'Hyph'IO |LblupdateMap |LblisReactionCellOpt |Lbl_'SCln'__GRHO'Hyph'SYNTAX |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX0_ |LblisNomoCell |LblnoWhereCell |LblisJoinList |LblInt2String |Lbl'Hash'stype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'EqlsSlshEqls'K_ |LblisNumCell |LblisRecCell |Lbl_List_ |LblisMultiRec |LbltoString'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl'Hash'open'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'Hash'EOPNOTSUPP_K'Hyph'IO |Lbl_'PipeHyph_GT_'_ |LblisMatchCellOpt |Lbl_'Hyph'Map__MAP |Lbl'Hash'length'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblisRhoSet |Lbl'_LT_'chan'_GT_' |LblnoForgCell |LblisReceivesCellOpt |Lbl'Hash'EMLINK_K'Hyph'IO |LblisListenCellBag |Lbl'Hash'sort |Lbl_'EqlsEqls'K_ |LblisPar |Lblunforgeable'LPar'_'RPar'_GRHO'Hyph'SYNTAX |LblreplaceFirst'LPar'_'Comm'_'Comm'_'RPar'_STRING |LblnoListensCell |LblnoStypeCell |Lbl'Hash'EOVERFLOW_K'Hyph'IO |Lbl'Hash'putc'LPar'_'Comm'_'RPar'_K'Hyph'IO |LblisThreadsCellOpt |Lbl'Stop'Map |LblisVarsCell |Lbl_'EqlsSlshEqls'String__STRING |Lbl'Hash'EIO_K'Hyph'IO |LblinitMlidCell |Lbluri'LPar'_'RPar'_GRHO'Hyph'SYNTAX |LblisSendCellBag |LblisInt |Lbl'Hash'EFAULT_K'Hyph'IO |Lbl'Hash'fresh |Lbl_impliesBool__BOOL |Lbl'Hash'chanlist'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'Star'Int__INT |Lbl'_LT_'T'_GT_' |Lbl'Hash'Thread |LblmaxInt'LPar'_'Comm'_'RPar'_INT |LblinitReceivesCell |Lbl'Hash'EDEADLK_K'Hyph'IO |Lbl_'_LT_Eqls'String__STRING |LblListenCellBagItem |LblisNames |Lbl'Hash'ENOBUFS_K'Hyph'IO |Lbl_Map_ |Lbl_'Hyph'Int__INT |Lbl'Hash'EOF_K'Hyph'IO |Lbl_'BangBang'__GRHO'Hyph'SYNTAX |LblisReactionCellFragment |Lbl_and__GRHO'Hyph'SYNTAX |Lbl'Hash'lengths'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblFloat2String |Lbl'Hash'append'LPar'_'SCln'_'RPar'_AUXFUN'Hyph'SYNTAX |LblinitWhoCell |Lbl'_LT_'listen'_GT_' |LblnoReceivesCell |LblsizeList |Lbl'Hash'EWOULDBLOCK_K'Hyph'IO |LblString2Id |LblinitTuplespaceCell |Lbl'_LT_'thread'_GT_' |Lbl'_LT_'vars'_GT_' |Lbl_'EqlsSlshEqls'Bool__BOOL |Lbl'_LT_'length'_GT_' |LblisCollection |Lbl'Hash'EFBIG_K'Hyph'IO |LblisTCell |Lbl_'LSqB'_'Slsh'_'RSqB'_SUBSTITUTION |Lbl'Hash'EBADF_K'Hyph'IO |Lbl'Hash'msg'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblnoLengthCell |LblinitNomoCell |Lbl'Hash'EPIPE_K'Hyph'IO |Lbl'Hash'bvar'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblnoContCell |LblisRhoTuple |Lbl_'Xor_Perc'Int___INT |LblisMsgCellOpt |Lbl'_LT_'reaction'_GT_' |LblrfindString |LblisChanCellOpt |Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO |LblnoNomoCell |Lbl'Hash'EINTR_K'Hyph'IO |Lbl'Hash'stat'LPar'_'RPar'_K'Hyph'IO |LblupdateList |LblisLidCell |LblisMsgCell |Lbl'Stop'SendCellBag |LblinitContCell |LblnoReactCell |LblcategoryChar |LblSet'Coln'difference |LblisName |Lbl'Hash'EHOSTUNREACH_K'Hyph'IO |Lbl'Hash'ECONNRESET_K'Hyph'IO |LblisBundle |LblisKCellOpt |LblisForgCellOpt |Lbl'Hash'ECHILD_K'Hyph'IO |LblisRecCellFragment |LblisUnconsumableSend |LblisLbindList |LblString2Float |LblMap'Coln'lookupOrDefault |Lbl'Hash'if_'Hash'then_'Hash'else_'Hash'fi_K'Hyph'EQUAL |Lbl'_LT_'tuplespace'_GT_' |Lbl'Hash'ENOTCONN_K'Hyph'IO |Lbl_'_LT_Hyph'__GRHO'Hyph'SYNTAX |Lbl'_LT_'what'_GT_' |Lbl'Hash'stdout_K'Hyph'IO |Lbl_'And'Int__INT |Lbl'Hash'rtype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl'_LT_'tuple'_GT_' |Lbl'Hash'ENAMETOOLONG_K'Hyph'IO |Lbllog2Int |Lbl_'EqlsSlshEqls'Int__INT |Lbl'Hash'stdin_K'Hyph'IO |Lbl'Hash'chanlen'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'_GT_Eqls'String__STRING |LblnoSchanCell |Lbl'_LT_'react'_GT_' |LblisBindOcc |LblSet'LPar'_'RPar'_GRHO'Hyph'SYNTAX |LblsizeMap |LblisWhereCell |LblnoMsgCell |LblisId |LblsubstrString |LblnoTuplespaceCell |Lbl_'Comm'__GRHO'Hyph'SYNTAX |Lbl_'Bang'__GRHO'Hyph'SYNTAX |LblisTypeCellOpt |Lblsize |Lbl'Hash'bindocc'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblnoMatchCell |LblisBind |Lbl'Hash'ENETUNREACH_K'Hyph'IO |Lbl'Hash'EPROTOTYPE_K'Hyph'IO |Lbl'Star'__GRHO'Hyph'SYNTAX |Lbl'_LT_'who'_GT_' |Lbl_'Coln'__GRHO'Hyph'SYNTAX |LblnoThreadsCell |Lbl'Hash'systemResult'LPar'_'Comm'_'Comm'_'RPar'_K'Hyph'IO |Lbl'_LT_'listens'_GT_' |LblsrandInt |Lbl'Hash'EINVAL_K'Hyph'IO |Lbl'_LT_'rid'_GT_' |LblisKItem |Lbl'Hash'ENODEV_K'Hyph'IO |Lbl'Hash'length__AUXFUN'Hyph'SYNTAX |LblisRecCellBag |LblList'Coln'set |LblisUri |LblString2Base |Lbl'Hash'noparse_K'Hyph'IO |Lblkeys |LblinitRecCell |Lbl'Hash'ESHUTDOWN_K'Hyph'IO |LblisGround |Lbl'Stop'ThreadCellBag |LblThreadCellBagItem |Lbl'Hash'cont'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl'Hash'ENOTDIR_K'Hyph'IO |Lblnew_in'LBra'_'RBra'_GRHO'Hyph'SYNTAX |Lbl'Hash'chan'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'Hash'in__AUXFUN'Hyph'SYNTAX |LblinitSendCell |Lbl'LBra'_'RBra'_GRHO'Hyph'SYNTAX |Lbl'_LT_'nomo'_GT_' |Lbl'Hash'bnum'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'_LT_Eqls'Int__INT |LblnotBool_ |LblnoNumCell |Lbl'Hash'stderr_K'Hyph'IO |LblnoKCell |Lbl'Hash'EBUSY_K'Hyph'IO |Lbl'Hash'getenv |LblisTuplespaceCell |LblisBinds |LblnoReactionCell |LblintersectSet |Lbl_in_keys'LPar'_'RPar'_MAP |LblinitMsgCell |Lbl'Hash'bind'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblfindChar |Lbl'LSqB'_'RSqB'_GRHO'Hyph'SYNTAX |LblSet'Coln'in |LblisK |LblisWhoCellOpt |Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_ |LblisReceivesCell |LblString2Int |Lbl'_LT_'where'_GT_' |LblinitWhereCell |LblinitThreadCell |LblisSingleRec |LblisThreadsCell |LblisTupleCellOpt |LblisEval |LblisWhatCell |Lbl'Hash'ENETDOWN_K'Hyph'IO |LblisListenCellFragment |Lbl_'LSqB'_'_LT_Hyph'undef'RSqB' |Lbl'Hash'Bottom |Lbl_'EqlsEqls'Int_ |Lbl_andThenBool__BOOL |LblisPbinds |Lbl'Hash'parseInModule |LblNil_GRHO'Hyph'SYNTAX |LblisAExp |Lbl'Hash'system |Lbl'_LT_'mlid'_GT_' |LblinitRidCell |LblisString |Lbl_'Perc'Int__INT |Lbl_'_GT__GT_'Int__INT |Lbl_'Coln'_'Comm'__GRHO'Hyph'SYNTAX |LblnoWhoCell |LblisList |Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO |LblisTuplespaceCellFragment |LblreplaceAll'LPar'_'Comm'_'Comm'_'RPar'_STRING |LblisBindList |LblnoChanCell |Lbl'Hash'EDESTADDRREQ_K'Hyph'IO |Lbl'Hash'EADDRINUSE_K'Hyph'IO |LblnoRnumCell |Lbl_'Xor_'Int__INT |LblfindString |Lbl'_LT_'k'_GT_' |Lbl'_LT_'reaction'_GT_Hyph'fragment |LblabsInt |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX1_ |Lbl'Hash'EHOSTDOWN_K'Hyph'IO |Lbl_'_GT_'String__STRING |LblisSendsCellFragment |LblinitLengthCell |Lbl_'EqlsEqls'String__STRING |LblisRnumCellOpt |LblisSend |LblisKResult |LblinitStypeCell |LblList'Coln'get |Lbl'Hash'lstat'LPar'_'RPar'_K'Hyph'IO |LblSendCellBagItem |Lbltuple |Lbl'_LT_'id'_GT_' |LblSetItem |Lbl'_LT_'receives'_GT_' |LblisRhoKVPairs |LblunsignedBytes |LblisMsidCellOpt |Lbl'Stop'List |Lbl'Hash'ENOLCK_K'Hyph'IO |LblisSendsCellOpt |Lbl'Hash'ECONNABORTED_K'Hyph'IO |LblrandInt |Lbl'Hash'EXDEV_K'Hyph'IO |Lbl'Hash'close'LPar'_'RPar'_K'Hyph'IO |Lbl_'SCln'_'SClnSCln'__GRHO'Hyph'SYNTAX |Lblkeys_list'LPar'_'RPar'_MAP |LblfreshId |LblinitTypeCell |Lbl_orElseBool__BOOL |LblisSchanCellOpt |Lbl'Hash'EISDIR_K'Hyph'IO |Lbl'_LT_'cont'_GT_' |LblList'Coln'range |LblinitTupleCell |LblnoIdCell |LblisKCell |Lbl'Hash'unknownIOError |Lbl'Hash'freezer_and__GRHO'Hyph'SYNTAX1_ |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX0_ |Lbl_'_GT_Eqls'Int__INT |LblisSendCellFragment |Lbl'Hash'ENOSYS_K'Hyph'IO |Lbl_'Pipe'__GRHO'Hyph'SYNTAX |Lbl'Hash'ECONNREFUSED_K'Hyph'IO |Lbl'_LT_'sends'_GT_Hyph'fragment |Lbl'Hash'lock'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO |LblcountAllOccurrences'LPar'_'Comm'_'RPar'_STRING |Lbl_'_GT_'Int__INT |LblfillList |Lbl'_AT_'__GRHO'Hyph'SYNTAX |LblinitForgCell |LblbitRangeInt |Lbl_'_LT_'String__STRING |Lbl'Hash'ThreadLocal |Lbl_xorBool__BOOL |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX0_ |LblinitReactCell |Lbl'Stop'RecCellBag |Lbl'_LT_'type'_GT_' |Lbl'_LT_'listens'_GT_Hyph'fragment |Lbl_'Plus'__GRHO'Hyph'SYNTAX |Lbl'_LT_'lid'_GT_' |Lbl_ListenCellBag_ |Lbl'Hash'open'LPar'_'RPar'_K'Hyph'IO |Lbl_'LSqB'_'RSqB'_SUBSTITUTION |LblnoMlidCell |Lbl_or__GRHO'Hyph'SYNTAX |Lbl'Hash'ETOOMANYREFS_K'Hyph'IO |Lbl'_LT_'threads'_GT_Hyph'fragment |LblinitListensCell |Lbl'Hash'ENOSPC_K'Hyph'IO |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX0_ |LblisChanCell |LblisRnumCell |Lbl'Hash'chanmany'LPar'_'Coln'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl'Hash'logToFile |Lbl'_LT_'rec'_GT_' |Lbl'Hash'read'LPar'_'Comm'_'RPar'_K'Hyph'IO |LblnoLidCell |LblisNameVar |Lbl'_LT_'schan'_GT_' |LblbigEndianBytes |Lbl'_LT_'match'_GT_' |LblId2String |LblinitListenCell |Lbl'_LT_'num'_GT_' |LblisContCellOpt |LblisLidCellOpt |LblnoSidCell |Lblbundle'LBra'_'RBra'_GRHO'Hyph'SYNTAX |LblMap'Coln'choice |Lbl_Set_ |Lbl'Hash'EEXIST_K'Hyph'IO |Lbl'Hash'getc'LPar'_'RPar'_K'Hyph'IO |LblisRidCell |Lbl'_LT_'state'_GT_' |LblisListenCell |LblisBool |Lbl'Tild'Int__INT |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX1_ |LblordChar |LblinitIdCell |Lbl_modInt__INT |LblrfindChar |LblisRbinds |LblisMlidCellOpt |Lbl'Hash'EAGAIN_K'Hyph'IO |Lbl'Stop'ListenCellBag |LblnoMsidCell |LblinitSchanCell |LbldirectionalityChar |LblisIdCell |Lbl'Hash'opendir'LPar'_'RPar'_K'Hyph'IO |LblinitKCell |LblRecCellBagItem |Lblfor'LPar'_'RParLBra'_'RBra'_GRHO'Hyph'SYNTAX |LblisBExp |Lbl'Stop'Set |LblisChanLen |LblisStateCell |Lbl'Hash'EACCES_K'Hyph'IO |Lbl'Hash'ELOOP_K'Hyph'IO |Lbl'Hash'EDOM_K'Hyph'IO |LblisSidCell |LblremoveAll |LblnoRidCell |Lbl'_LT_'threads'_GT_' |Lbl_andBool_ |Lbl_'Hash'ine'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblisThreadsCellFragment |LblisLengthCellOpt |LblisRbindList |Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO |LblisConsumableSend |LbllengthString |Lbl_'Hyph'__GRHO'Hyph'SYNTAX |Lbl'_LT_'listen'_GT_Hyph'fragment |LblisReceive |Lbl'Hash'ERANGE_K'Hyph'IO |LblinitTCell |LblsignedBytes |LblFloatFormat |LblisMsidCell |Lbl'Hash'ENOTSOCK_K'Hyph'IO |Lbl_'Plus'String__STRING |Lbl_RecCellBag_ |Lbl_'Pipe'Int__INT |Lbl'Hash'EISCONN_K'Hyph'IO |LblisKVariable |Lbl_dividesInt__INT |Lbl'_LT_'rec'_GT_Hyph'fragment |Lbl'_LT_'tuplespace'_GT_Hyph'fragment |Lbl'_LT_'T'_GT_Hyph'fragment |Lbl_'_LT_Eqls'__GRHO'Hyph'SYNTAX |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX1_ |LblisWhatCellOpt |LblSet'Coln'choice |LblisMatchCell |LblisListensCellFragment |Lbl'Hash'buffer |Lbl_'Star'__GRHO'Hyph'SYNTAX |LblinitNumCell |LblfreshInt |Lbl'Hash'write'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'Hash'ETIMEDOUT_K'Hyph'IO |LblinitSidCell |LblisIdCellOpt |Lbl'LPar'_'SCln'_'RPar'_GRHO'Hyph'SYNTAX |Lbl'_LT_'sends'_GT_' |LblisSchanCell |Lbl_xorInt__INT |Lbl'Hash'EINPROGRESS_K'Hyph'IO |LblinitVarsCell |LblinitStateCell |LblisNumCellOpt |LblinitMatchCell |LblisMlidCell |Lbl'Hash'ENOPROTOOPT_K'Hyph'IO |LbllittleEndianBytes |Lbl'Hash'EPERM_K'Hyph'IO |LblnoWhatCell |LblinitWhatCell |Lbl'_LT_'send'_GT_' |Lbl_'_LT__LT_'Int__INT |LblBase2String |LblListItem |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX1_ |LblisStream |Lbl_'_LT_Eqls'Map__MAP |LblnewUUID_STRING |LblnoVarsCell |LblinitThreadsCell |Lbl_SendCellBag_ |Lbl'Hash'ESRCH_K'Hyph'IO |Lbl'Hash'EMFILE_K'Hyph'IO |Lblproject'Coln'Proc |LblisReactCellOpt |Lbl'_LT_'receives'_GT_Hyph'fragment |Lbl'_LT_'stype'_GT_' |Lbl_inList_ |LblisVarsCellOpt |Lbl'Hash'ENOEXEC_K'Hyph'IO |LblminInt'LPar'_'Comm'_'RPar'_INT |LblinitReactionCell |LblisMap |LblisTupleCell |LblisReactCell |LblinitMsidCell |Lbl'_LT_'rnum'_GT_' |LblisNomoCellOpt |LblisStypeCellOpt |LblisTCellFragment |Lblreplace'LPar'_'Comm'_'Comm'_'Comm'_'RPar'_STRING |Lbl_'Slsh'Int__INT |Lbl_'LSqB'_'_LT_Hyph'_'RSqB'_MAP |LblisRbind |Lbl'Hash'tell'LPar'_'RPar'_K'Hyph'IO |Lbl'_LT_'msid'_GT_' |LblinitSendsCell |Lblbundle'HyphLBra'_'RBra'_GRHO'Hyph'SYNTAX |LblgetKLabel |Lblnot__GRHO'Hyph'SYNTAX |Lbl'Hash'E2BIG_K'Hyph'IO |Lbl'Hash'seekEnd'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'_LT_'send'_GT_Hyph'fragment let print_sort(c: sort) : string = match c with |SortRbinds -> "Rbinds" |SortAExp -> "AExp" |SortListensCellOpt -> "ListensCellOpt" |SortIdCellOpt -> "IdCellOpt" |SortK -> "K" |SortPbindList -> "PbindList" |SortRidCell -> "RidCell" |SortListenCell -> "ListenCell" |SortUnconsumableSend -> "UnconsumableSend" |SortThreadCell -> "ThreadCell" |SortMlidCell -> "MlidCell" |SortVarsCellOpt -> "VarsCellOpt" |SortNomoCell -> "NomoCell" |SortThreadCellBag -> "ThreadCellBag" |SortThreadCellFragment -> "ThreadCellFragment" |SortKItem -> "KItem" |SortBindList -> "BindList" |SortNames -> "Names" |SortUri -> "Uri" |SortIdCell -> "IdCell" |SortRhoMap -> "RhoMap" |SortLidCell -> "LidCell" |SortReceivesCellFragment -> "ReceivesCellFragment" |SortStateCellOpt -> "StateCellOpt" |SortSendsCellFragment -> "SendsCellFragment" |SortTCellFragment -> "TCellFragment" |SortChanLen -> "ChanLen" |SortContCell -> "ContCell" |SortSchanCell -> "SchanCell" |SortSet -> "Set" |SortChanCellOpt -> "ChanCellOpt" |SortCell -> "Cell" |SortProcs -> "Procs" |SortWhereCellOpt -> "WhereCellOpt" |SortTupleCellOpt -> "TupleCellOpt" |SortBool -> "Bool" |SortKResult -> "KResult" |SortReactCell -> "ReactCell" |SortRhoTuple -> "RhoTuple" |SortSend -> "Send" |SortLengthCell -> "LengthCell" |SortKCell -> "KCell" |SortMsidCellOpt -> "MsidCellOpt" |SortLbind -> "Lbind" |SortBundle -> "Bundle" |SortRnumCell -> "RnumCell" |SortRhoKVPairs -> "RhoKVPairs" |SortSidCell -> "SidCell" |SortTuplespaceCellFragment -> "TuplespaceCellFragment" |SortLidCellOpt -> "LidCellOpt" |SortName -> "Name" |SortBindOcc -> "BindOcc" |SortReactionCellFragment -> "ReactionCellFragment" |SortRhoSet -> "RhoSet" |SortSendCellBag -> "SendCellBag" |SortPar -> "Par" |SortInt -> "Int" |SortNumCellOpt -> "NumCellOpt" |SortRnumCellOpt -> "RnumCellOpt" |SortCollection -> "Collection" |SortRbind -> "Rbind" |SortWhatCellOpt -> "WhatCellOpt" |SortRecCellFragment -> "RecCellFragment" |SortStypeCell -> "StypeCell" |SortMsidCell -> "MsidCell" |SortReceivesCell -> "ReceivesCell" |SortEval -> "Eval" |SortKCellOpt -> "KCellOpt" |SortStypeCellOpt -> "StypeCellOpt" |SortListenCellBag -> "ListenCellBag" |SortSendCell -> "SendCell" |SortReactCellOpt -> "ReactCellOpt" |SortRhoList -> "RhoList" |SortReactionCell -> "ReactionCell" |SortMatchCellOpt -> "MatchCellOpt" |SortChanCell -> "ChanCell" |SortLbindList -> "LbindList" |SortNameVar -> "NameVar" |SortLengthCellOpt -> "LengthCellOpt" |SortListensCell -> "ListensCell" |SortBExp -> "BExp" |SortConsumableSend -> "ConsumableSend" |SortRidCellOpt -> "RidCellOpt" |SortMap -> "Map" |SortRecCellBag -> "RecCellBag" |SortRecCell -> "RecCell" |SortContCellOpt -> "ContCellOpt" |SortThreadsCellOpt -> "ThreadsCellOpt" |SortTuplespaceCell -> "TuplespaceCell" |SortNew -> "New" |SortStream -> "Stream" |SortThreadsCellFragment -> "ThreadsCellFragment" |SortListensCellFragment -> "ListensCellFragment" |SortWhoCell -> "WhoCell" |SortReceivesCellOpt -> "ReceivesCellOpt" |SortProc -> "Proc" |SortNameList -> "NameList" |SortGround -> "Ground" |SortString -> "String" |SortWhoCellOpt -> "WhoCellOpt" |SortFloat -> "Float" |SortChanList -> "ChanList" |SortSendsCell -> "SendsCell" |SortReactionCellOpt -> "ReactionCellOpt" |SortPbind -> "Pbind" |SortSingleRec -> "SingleRec" |SortThreadsCell -> "ThreadsCell" |SortMultiRec -> "MultiRec" |SortTypeCell -> "TypeCell" |SortVarsCell -> "VarsCell" |SortTypeCellOpt -> "TypeCellOpt" |SortSendCellFragment -> "SendCellFragment" |SortSchanCellOpt -> "SchanCellOpt" |SortTuplespaceCellOpt -> "TuplespaceCellOpt" |SortLbinds -> "Lbinds" |SortNumCell -> "NumCell" |SortWhereCell -> "WhereCell" |SortForgCellOpt -> "ForgCellOpt" |SortKVariable -> "KVariable" |SortBytes -> "Bytes" |SortWhatCell -> "WhatCell" |SortSendsCellOpt -> "SendsCellOpt" |SortNomoCellOpt -> "NomoCellOpt" |SortTupleCell -> "TupleCell" |SortIOError -> "IOError" |SortStringBuffer -> "StringBuffer" |SortRbindList -> "RbindList" |SortTCell -> "TCell" |SortBind -> "Bind" |SortMlidCellOpt -> "MlidCellOpt" |SortMsgCellOpt -> "MsgCellOpt" |SortMsgCell -> "MsgCell" |SortIdNum -> "IdNum" |SortKConfigVar -> "KConfigVar" |SortSidCellOpt -> "SidCellOpt" |SortPbinds -> "Pbinds" |SortJoinList -> "JoinList" |SortBinds -> "Binds" |SortForgCell -> "ForgCell" |SortProcList -> "ProcList" |SortId -> "Id" |SortList -> "List" |SortStateCell -> "StateCell" |SortReceive -> "Receive" |SortListenCellFragment -> "ListenCellFragment" |SortMatchCell -> "MatchCell" let print_klabel(c: klabel) : string = match c with |Lbl'Hash'argv -> "#argv" |LblisPbindList -> "isPbindList" |LblisThreadCell -> "isThreadCell" |LblMap'Coln'lookup -> "`Map:lookup`" |LblisChanList -> "isChanList" |Lblbundle0'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`bundle0{_}_GRHO-SYNTAX`" |Lbl'Hash'seek'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#seek(_,_)_K-IO`" |LblisWhoCell -> "isWhoCell" |LblisIdNum -> "isIdNum" |LblsignExtendBitRangeInt -> "signExtendBitRangeInt" |Lbl_'EqlsEqls'Bool__BOOL -> "`_==Bool__BOOL`" |LblisSet -> "isSet" |LblisThreadCellBag -> "isThreadCellBag" |Lbl'Hash'bindocce'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bindocce(_)_AUXFUN-SYNTAX`" |LblisNew -> "isNew" |Lbl'LBraLSqB'_'SCln'_'RSqBRBra'_GRHO'Hyph'SYNTAX -> "`{[_;_]}_GRHO-SYNTAX`" |LblisStypeCell -> "isStypeCell" |Lbl_'_LT_Eqls'Set__SET -> "`_<=Set__SET`" |Lbl_'Hash'in'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`_#in(_)_AUXFUN-SYNTAX`" |LblisIOError -> "isIOError" |Lbl'Hash'parse -> "#parse" |Lbl'Hash'EALREADY_K'Hyph'IO -> "`#EALREADY_K-IO`" |LblisRhoList -> "isRhoList" |LblmakeList -> "makeList" |Lbl'Hash'ESPIPE_K'Hyph'IO -> "`#ESPIPE_K-IO`" |Lbl'Hash'unlock'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#unlock(_,_)_K-IO`" |Lbl'Hash'ENOENT_K'Hyph'IO -> "`#ENOENT_K-IO`" |LblisProcs -> "isProcs" |Lbl_'_LT_Bang'__GRHO'Hyph'SYNTAX -> "`_<!__GRHO-SYNTAX`" |LblisTypeCell -> "isTypeCell" |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX1_ -> "`#freezer_<=__GRHO-SYNTAX1_`" |LblisLbinds -> "isLbinds" |LblisLengthCell -> "isLengthCell" |LblnoStateCell -> "noStateCell" |LblisLbind -> "isLbind" |Lbl'Hash'ENOTTY_K'Hyph'IO -> "`#ENOTTY_K-IO`" |LblisForgCell -> "isForgCell" |Lbl'_LT_'forg'_GT_' -> "`<forg>`" |LblinitChanCell -> "initChanCell" |LblisProcList -> "isProcList" |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX0_ -> "`#freezer_-__GRHO-SYNTAX0_`" |LblinitRnumCell -> "initRnumCell" |LblisRidCellOpt -> "isRidCellOpt" |LblisReceivesCellFragment -> "isReceivesCellFragment" |Lbl'Hash'ENOTEMPTY_K'Hyph'IO -> "`#ENOTEMPTY_K-IO`" |LblisSidCellOpt -> "isSidCellOpt" |Lbl'Hash'EMSGSIZE_K'Hyph'IO -> "`#EMSGSIZE_K-IO`" |LblisKConfigVar -> "isKConfigVar" |LblisRhoMap -> "isRhoMap" |Lbl'Hash'ENETRESET_K'Hyph'IO -> "`#ENETRESET_K-IO`" |Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO -> "`#EAFNOSUPPORT_K-IO`" |LblnoTupleCell -> "noTupleCell" |LblnoSendsCell -> "noSendsCell" |Lbl'_LT_'thread'_GT_Hyph'fragment -> "`<thread>-fragment`" |LblisCell -> "isCell" |LblisPbind -> "isPbind" |Lbl'Hash'ENOMEM_K'Hyph'IO -> "`#ENOMEM_K-IO`" |Lblvalues -> "values" |Lblbundle'PlusLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`bundle+{_}_GRHO-SYNTAX`" |LblisThreadCellFragment -> "isThreadCellFragment" |LblisStateCellOpt -> "isStateCellOpt" |LblinitLidCell -> "initLidCell" |LblisNameList -> "isNameList" |LblisListensCellOpt -> "isListensCellOpt" |LblisTuplespaceCellOpt -> "isTuplespaceCellOpt" |Lbl'Hash'ENXIO_K'Hyph'IO -> "`#ENXIO_K-IO`" |Lbl_'_LT_'Int__INT -> "`_<Int__INT`" |LblnoTypeCell -> "noTypeCell" |LblisSendCell -> "isSendCell" |Lbl'Hash'configuration_K'Hyph'REFLECTION -> "`#configuration_K-REFLECTION`" |LblisSendsCell -> "isSendsCell" |LblisFloat -> "isFloat" |Lbl'_LT_'msg'_GT_' -> "`<msg>`" |LblisContCell -> "isContCell" |LblchrChar -> "chrChar" |Lbl_divInt__INT -> "`_divInt__INT`" |Lbl'Hash'EROFS_K'Hyph'IO -> "`#EROFS_K-IO`" |LblisWhereCellOpt -> "isWhereCellOpt" |Lbl_ThreadCellBag_ -> "`_ThreadCellBag_`" |LblisProc -> "isProc" |LblisListensCell -> "isListensCell" |Lbl_'Plus'Int_ -> "`_+Int_`" |LblisReactionCell -> "isReactionCell" |Lbl_orBool__BOOL -> "`_orBool__BOOL`" |Lbl'_LT_'sid'_GT_' -> "`<sid>`" |Lbl'Hash'ENFILE_K'Hyph'IO -> "`#ENFILE_K-IO`" |LblupdateMap -> "updateMap" |LblisReactionCellOpt -> "isReactionCellOpt" |Lbl_'SCln'__GRHO'Hyph'SYNTAX -> "`_;__GRHO-SYNTAX`" |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX0_ -> "`#freezer_<=__GRHO-SYNTAX0_`" |LblisNomoCell -> "isNomoCell" |LblnoWhereCell -> "noWhereCell" |LblisJoinList -> "isJoinList" |LblInt2String -> "`Int2String`" |Lbl'Hash'stype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#stype(_)_AUXFUN-SYNTAX`" |Lbl_'EqlsSlshEqls'K_ -> "`_=/=K_`" |LblisNumCell -> "isNumCell" |LblisRecCell -> "isRecCell" |Lbl_List_ -> "`_List_`" |LblisMultiRec -> "isMultiRec" |LbltoString'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`toString(_)_AUXFUN-SYNTAX`" |Lbl'Hash'open'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#open(_,_)_K-IO`" |Lbl'Hash'EOPNOTSUPP_K'Hyph'IO -> "`#EOPNOTSUPP_K-IO`" |Lbl_'PipeHyph_GT_'_ -> "`_|->_`" |LblisMatchCellOpt -> "isMatchCellOpt" |Lbl_'Hyph'Map__MAP -> "`_-Map__MAP`" |Lbl'Hash'length'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#length(_)_AUXFUN-SYNTAX`" |LblisRhoSet -> "isRhoSet" |Lbl'_LT_'chan'_GT_' -> "`<chan>`" |LblnoForgCell -> "noForgCell" |LblisReceivesCellOpt -> "isReceivesCellOpt" |Lbl'Hash'EMLINK_K'Hyph'IO -> "`#EMLINK_K-IO`" |LblisListenCellBag -> "isListenCellBag" |Lbl'Hash'sort -> "#sort" |Lbl_'EqlsEqls'K_ -> "`_==K_`" |LblisPar -> "isPar" |Lblunforgeable'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "`unforgeable(_)_GRHO-SYNTAX`" |LblreplaceFirst'LPar'_'Comm'_'Comm'_'RPar'_STRING -> "`replaceFirst(_,_,_)_STRING`" |LblnoListensCell -> "noListensCell" |LblnoStypeCell -> "noStypeCell" |Lbl'Hash'EOVERFLOW_K'Hyph'IO -> "`#EOVERFLOW_K-IO`" |Lbl'Hash'putc'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#putc(_,_)_K-IO`" |LblisThreadsCellOpt -> "isThreadsCellOpt" |Lbl'Stop'Map -> "`.Map`" |LblisVarsCell -> "isVarsCell" |Lbl_'EqlsSlshEqls'String__STRING -> "`_=/=String__STRING`" |Lbl'Hash'EIO_K'Hyph'IO -> "`#EIO_K-IO`" |LblinitMlidCell -> "initMlidCell" |Lbluri'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "`uri(_)_GRHO-SYNTAX`" |LblisSendCellBag -> "isSendCellBag" |LblisInt -> "isInt" |Lbl'Hash'EFAULT_K'Hyph'IO -> "`#EFAULT_K-IO`" |Lbl'Hash'fresh -> "#fresh" |Lbl_impliesBool__BOOL -> "`_impliesBool__BOOL`" |Lbl'Hash'chanlist'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#chanlist(_)_AUXFUN-SYNTAX`" |Lbl_'Star'Int__INT -> "`_*Int__INT`" |Lbl'_LT_'T'_GT_' -> "`<T>`" |Lbl'Hash'Thread -> "#Thread" |LblmaxInt'LPar'_'Comm'_'RPar'_INT -> "`maxInt(_,_)_INT`" |LblinitReceivesCell -> "initReceivesCell" |Lbl'Hash'EDEADLK_K'Hyph'IO -> "`#EDEADLK_K-IO`" |Lbl_'_LT_Eqls'String__STRING -> "`_<=String__STRING`" |LblListenCellBagItem -> "`ListenCellBagItem`" |LblisNames -> "isNames" |Lbl'Hash'ENOBUFS_K'Hyph'IO -> "`#ENOBUFS_K-IO`" |Lbl_Map_ -> "`_Map_`" |Lbl_'Hyph'Int__INT -> "`_-Int__INT`" |Lbl'Hash'EOF_K'Hyph'IO -> "`#EOF_K-IO`" |Lbl_'BangBang'__GRHO'Hyph'SYNTAX -> "`_!!__GRHO-SYNTAX`" |LblisReactionCellFragment -> "isReactionCellFragment" |Lbl_and__GRHO'Hyph'SYNTAX -> "`_and__GRHO-SYNTAX`" |Lbl'Hash'lengths'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#lengths(_)_AUXFUN-SYNTAX`" |LblFloat2String -> "`Float2String`" |Lbl'Hash'append'LPar'_'SCln'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#append(_;_)_AUXFUN-SYNTAX`" |LblinitWhoCell -> "initWhoCell" |Lbl'_LT_'listen'_GT_' -> "`<listen>`" |LblnoReceivesCell -> "noReceivesCell" |LblsizeList -> "sizeList" |Lbl'Hash'EWOULDBLOCK_K'Hyph'IO -> "`#EWOULDBLOCK_K-IO`" |LblString2Id -> "`String2Id`" |LblinitTuplespaceCell -> "initTuplespaceCell" |Lbl'_LT_'thread'_GT_' -> "`<thread>`" |Lbl'_LT_'vars'_GT_' -> "`<vars>`" |Lbl_'EqlsSlshEqls'Bool__BOOL -> "`_=/=Bool__BOOL`" |Lbl'_LT_'length'_GT_' -> "`<length>`" |LblisCollection -> "isCollection" |Lbl'Hash'EFBIG_K'Hyph'IO -> "`#EFBIG_K-IO`" |LblisTCell -> "isTCell" |Lbl_'LSqB'_'Slsh'_'RSqB'_SUBSTITUTION -> "`_[_/_]_SUBSTITUTION`" |Lbl'Hash'EBADF_K'Hyph'IO -> "`#EBADF_K-IO`" |Lbl'Hash'msg'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#msg(_)_AUXFUN-SYNTAX`" |LblnoLengthCell -> "noLengthCell" |LblinitNomoCell -> "initNomoCell" |Lbl'Hash'EPIPE_K'Hyph'IO -> "`#EPIPE_K-IO`" |Lbl'Hash'bvar'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bvar(_)_AUXFUN-SYNTAX`" |LblnoContCell -> "noContCell" |LblisRhoTuple -> "isRhoTuple" |Lbl_'Xor_Perc'Int___INT -> "`_^%Int___INT`" |LblisMsgCellOpt -> "isMsgCellOpt" |Lbl'_LT_'reaction'_GT_' -> "`<reaction>`" |LblrfindString -> "rfindString" |LblisChanCellOpt -> "isChanCellOpt" |Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO -> "`#ESOCKTNOSUPPORT_K-IO`" |LblnoNomoCell -> "noNomoCell" |Lbl'Hash'EINTR_K'Hyph'IO -> "`#EINTR_K-IO`" |Lbl'Hash'stat'LPar'_'RPar'_K'Hyph'IO -> "`#stat(_)_K-IO`" |LblupdateList -> "updateList" |LblisLidCell -> "isLidCell" |LblisMsgCell -> "isMsgCell" |Lbl'Stop'SendCellBag -> "`.SendCellBag`" |LblinitContCell -> "initContCell" |LblnoReactCell -> "noReactCell" |LblcategoryChar -> "categoryChar" |LblSet'Coln'difference -> "`Set:difference`" |LblisName -> "isName" |Lbl'Hash'EHOSTUNREACH_K'Hyph'IO -> "`#EHOSTUNREACH_K-IO`" |Lbl'Hash'ECONNRESET_K'Hyph'IO -> "`#ECONNRESET_K-IO`" |LblisBundle -> "isBundle" |LblisKCellOpt -> "isKCellOpt" |LblisForgCellOpt -> "isForgCellOpt" |Lbl'Hash'ECHILD_K'Hyph'IO -> "`#ECHILD_K-IO`" |LblisRecCellFragment -> "isRecCellFragment" |LblisUnconsumableSend -> "isUnconsumableSend" |LblisLbindList -> "isLbindList" |LblString2Float -> "`String2Float`" |LblMap'Coln'lookupOrDefault -> "`Map:lookupOrDefault`" |Lbl'Hash'if_'Hash'then_'Hash'else_'Hash'fi_K'Hyph'EQUAL -> "`#if_#then_#else_#fi_K-EQUAL`" |Lbl'_LT_'tuplespace'_GT_' -> "`<tuplespace>`" |Lbl'Hash'ENOTCONN_K'Hyph'IO -> "`#ENOTCONN_K-IO`" |Lbl_'_LT_Hyph'__GRHO'Hyph'SYNTAX -> "`_<-__GRHO-SYNTAX`" |Lbl'_LT_'what'_GT_' -> "`<what>`" |Lbl'Hash'stdout_K'Hyph'IO -> "`#stdout_K-IO`" |Lbl_'And'Int__INT -> "`_&Int__INT`" |Lbl'Hash'rtype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#rtype(_)_AUXFUN-SYNTAX`" |Lbl'_LT_'tuple'_GT_' -> "`<tuple>`" |Lbl'Hash'ENAMETOOLONG_K'Hyph'IO -> "`#ENAMETOOLONG_K-IO`" |Lbllog2Int -> "log2Int" |Lbl_'EqlsSlshEqls'Int__INT -> "`_=/=Int__INT`" |Lbl'Hash'stdin_K'Hyph'IO -> "`#stdin_K-IO`" |Lbl'Hash'chanlen'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#chanlen(_)_AUXFUN-SYNTAX`" |Lbl_'_GT_Eqls'String__STRING -> "`_>=String__STRING`" |LblnoSchanCell -> "noSchanCell" |Lbl'_LT_'react'_GT_' -> "`<react>`" |LblisBindOcc -> "isBindOcc" |LblSet'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "`Set(_)_GRHO-SYNTAX`" |LblsizeMap -> "sizeMap" |LblisWhereCell -> "isWhereCell" |LblnoMsgCell -> "noMsgCell" |LblisId -> "isId" |LblsubstrString -> "substrString" |LblnoTuplespaceCell -> "noTuplespaceCell" |Lbl_'Comm'__GRHO'Hyph'SYNTAX -> "`_,__GRHO-SYNTAX`" |Lbl_'Bang'__GRHO'Hyph'SYNTAX -> "`_!__GRHO-SYNTAX`" |LblisTypeCellOpt -> "isTypeCellOpt" |Lblsize -> "size" |Lbl'Hash'bindocc'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bindocc(_)_AUXFUN-SYNTAX`" |LblnoMatchCell -> "noMatchCell" |LblisBind -> "isBind" |Lbl'Hash'ENETUNREACH_K'Hyph'IO -> "`#ENETUNREACH_K-IO`" |Lbl'Hash'EPROTOTYPE_K'Hyph'IO -> "`#EPROTOTYPE_K-IO`" |Lbl'Star'__GRHO'Hyph'SYNTAX -> "`*__GRHO-SYNTAX`" |Lbl'_LT_'who'_GT_' -> "`<who>`" |Lbl_'Coln'__GRHO'Hyph'SYNTAX -> "`_:__GRHO-SYNTAX`" |LblnoThreadsCell -> "noThreadsCell" |Lbl'Hash'systemResult'LPar'_'Comm'_'Comm'_'RPar'_K'Hyph'IO -> "`#systemResult(_,_,_)_K-IO`" |Lbl'_LT_'listens'_GT_' -> "`<listens>`" |LblsrandInt -> "srandInt" |Lbl'Hash'EINVAL_K'Hyph'IO -> "`#EINVAL_K-IO`" |Lbl'_LT_'rid'_GT_' -> "`<rid>`" |LblisKItem -> "isKItem" |Lbl'Hash'ENODEV_K'Hyph'IO -> "`#ENODEV_K-IO`" |Lbl'Hash'length__AUXFUN'Hyph'SYNTAX -> "`#length__AUXFUN-SYNTAX`" |LblisRecCellBag -> "isRecCellBag" |LblList'Coln'set -> "`List:set`" |LblisUri -> "isUri" |LblString2Base -> "`String2Base`" |Lbl'Hash'noparse_K'Hyph'IO -> "`#noparse_K-IO`" |Lblkeys -> "keys" |LblinitRecCell -> "initRecCell" |Lbl'Hash'ESHUTDOWN_K'Hyph'IO -> "`#ESHUTDOWN_K-IO`" |LblisGround -> "isGround" |Lbl'Stop'ThreadCellBag -> "`.ThreadCellBag`" |LblThreadCellBagItem -> "`ThreadCellBagItem`" |Lbl'Hash'cont'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#cont(_)_AUXFUN-SYNTAX`" |Lbl'Hash'ENOTDIR_K'Hyph'IO -> "`#ENOTDIR_K-IO`" |Lblnew_in'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`new_in{_}_GRHO-SYNTAX`" |Lbl'Hash'chan'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#chan(_)_AUXFUN-SYNTAX`" |Lbl_'Hash'in__AUXFUN'Hyph'SYNTAX -> "`_#in__AUXFUN-SYNTAX`" |LblinitSendCell -> "initSendCell" |Lbl'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`{_}_GRHO-SYNTAX`" |Lbl'_LT_'nomo'_GT_' -> "`<nomo>`" |Lbl'Hash'bnum'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bnum(_)_AUXFUN-SYNTAX`" |Lbl_'_LT_Eqls'Int__INT -> "`_<=Int__INT`" |LblnotBool_ -> "`notBool_`" |LblnoNumCell -> "noNumCell" |Lbl'Hash'stderr_K'Hyph'IO -> "`#stderr_K-IO`" |LblnoKCell -> "noKCell" |Lbl'Hash'EBUSY_K'Hyph'IO -> "`#EBUSY_K-IO`" |Lbl'Hash'getenv -> "#getenv" |LblisTuplespaceCell -> "isTuplespaceCell" |LblisBinds -> "isBinds" |LblnoReactionCell -> "noReactionCell" |LblintersectSet -> "intersectSet" |Lbl_in_keys'LPar'_'RPar'_MAP -> "`_in_keys(_)_MAP`" |LblinitMsgCell -> "initMsgCell" |Lbl'Hash'bind'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bind(_)_AUXFUN-SYNTAX`" |LblfindChar -> "findChar" |Lbl'LSqB'_'RSqB'_GRHO'Hyph'SYNTAX -> "`[_]_GRHO-SYNTAX`" |LblSet'Coln'in -> "`Set:in`" |LblisK -> "isK" |LblisWhoCellOpt -> "isWhoCellOpt" |Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_ -> "`#freezernot__GRHO-SYNTAX0_`" |LblisReceivesCell -> "isReceivesCell" |LblString2Int -> "`String2Int`" |Lbl'_LT_'where'_GT_' -> "`<where>`" |LblinitWhereCell -> "initWhereCell" |LblinitThreadCell -> "initThreadCell" |LblisSingleRec -> "isSingleRec" |LblisThreadsCell -> "isThreadsCell" |LblisTupleCellOpt -> "isTupleCellOpt" |LblisEval -> "isEval" |LblisWhatCell -> "isWhatCell" |Lbl'Hash'ENETDOWN_K'Hyph'IO -> "`#ENETDOWN_K-IO`" |LblisListenCellFragment -> "isListenCellFragment" |Lbl_'LSqB'_'_LT_Hyph'undef'RSqB' -> "`_[_<-undef]`" |Lbl'Hash'Bottom -> "#Bottom" |Lbl_'EqlsEqls'Int_ -> "`_==Int_`" |Lbl_andThenBool__BOOL -> "`_andThenBool__BOOL`" |LblisPbinds -> "isPbinds" |Lbl'Hash'parseInModule -> "#parseInModule" |LblNil_GRHO'Hyph'SYNTAX -> "`Nil_GRHO-SYNTAX`" |LblisAExp -> "isAExp" |Lbl'Hash'system -> "#system" |Lbl'_LT_'mlid'_GT_' -> "`<mlid>`" |LblinitRidCell -> "initRidCell" |LblisString -> "isString" |Lbl_'Perc'Int__INT -> "`_%Int__INT`" |Lbl_'_GT__GT_'Int__INT -> "`_>>Int__INT`" |Lbl_'Coln'_'Comm'__GRHO'Hyph'SYNTAX -> "`_:_,__GRHO-SYNTAX`" |LblnoWhoCell -> "noWhoCell" |LblisList -> "isList" |Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO -> "`#EPROTONOSUPPORT_K-IO`" |LblisTuplespaceCellFragment -> "isTuplespaceCellFragment" |LblreplaceAll'LPar'_'Comm'_'Comm'_'RPar'_STRING -> "`replaceAll(_,_,_)_STRING`" |LblisBindList -> "isBindList" |LblnoChanCell -> "noChanCell" |Lbl'Hash'EDESTADDRREQ_K'Hyph'IO -> "`#EDESTADDRREQ_K-IO`" |Lbl'Hash'EADDRINUSE_K'Hyph'IO -> "`#EADDRINUSE_K-IO`" |LblnoRnumCell -> "noRnumCell" |Lbl_'Xor_'Int__INT -> "`_^Int__INT`" |LblfindString -> "findString" |Lbl'_LT_'k'_GT_' -> "`<k>`" |Lbl'_LT_'reaction'_GT_Hyph'fragment -> "`<reaction>-fragment`" |LblabsInt -> "absInt" |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX1_ -> "`#freezer_+__GRHO-SYNTAX1_`" |Lbl'Hash'EHOSTDOWN_K'Hyph'IO -> "`#EHOSTDOWN_K-IO`" |Lbl_'_GT_'String__STRING -> "`_>String__STRING`" |LblisSendsCellFragment -> "isSendsCellFragment" |LblinitLengthCell -> "initLengthCell" |Lbl_'EqlsEqls'String__STRING -> "`_==String__STRING`" |LblisRnumCellOpt -> "isRnumCellOpt" |LblisSend -> "isSend" |LblisKResult -> "isKResult" |LblinitStypeCell -> "initStypeCell" |LblList'Coln'get -> "`List:get`" |Lbl'Hash'lstat'LPar'_'RPar'_K'Hyph'IO -> "`#lstat(_)_K-IO`" |LblSendCellBagItem -> "`SendCellBagItem`" |Lbltuple -> "tuple" |Lbl'_LT_'id'_GT_' -> "`<id>`" |LblSetItem -> "`SetItem`" |Lbl'_LT_'receives'_GT_' -> "`<receives>`" |LblisRhoKVPairs -> "isRhoKVPairs" |LblunsignedBytes -> "unsignedBytes" |LblisMsidCellOpt -> "isMsidCellOpt" |Lbl'Stop'List -> "`.List`" |Lbl'Hash'ENOLCK_K'Hyph'IO -> "`#ENOLCK_K-IO`" |LblisSendsCellOpt -> "isSendsCellOpt" |Lbl'Hash'ECONNABORTED_K'Hyph'IO -> "`#ECONNABORTED_K-IO`" |LblrandInt -> "randInt" |Lbl'Hash'EXDEV_K'Hyph'IO -> "`#EXDEV_K-IO`" |Lbl'Hash'close'LPar'_'RPar'_K'Hyph'IO -> "`#close(_)_K-IO`" |Lbl_'SCln'_'SClnSCln'__GRHO'Hyph'SYNTAX -> "`_;_;;__GRHO-SYNTAX`" |Lblkeys_list'LPar'_'RPar'_MAP -> "`keys_list(_)_MAP`" |LblfreshId -> "freshId" |LblinitTypeCell -> "initTypeCell" |Lbl_orElseBool__BOOL -> "`_orElseBool__BOOL`" |LblisSchanCellOpt -> "isSchanCellOpt" |Lbl'Hash'EISDIR_K'Hyph'IO -> "`#EISDIR_K-IO`" |Lbl'_LT_'cont'_GT_' -> "`<cont>`" |LblList'Coln'range -> "`List:range`" |LblinitTupleCell -> "initTupleCell" |LblnoIdCell -> "noIdCell" |LblisKCell -> "isKCell" |Lbl'Hash'unknownIOError -> "#unknownIOError" |Lbl'Hash'freezer_and__GRHO'Hyph'SYNTAX1_ -> "`#freezer_and__GRHO-SYNTAX1_`" |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX0_ -> "`#freezer_*__GRHO-SYNTAX0_`" |Lbl_'_GT_Eqls'Int__INT -> "`_>=Int__INT`" |LblisSendCellFragment -> "isSendCellFragment" |Lbl'Hash'ENOSYS_K'Hyph'IO -> "`#ENOSYS_K-IO`" |Lbl_'Pipe'__GRHO'Hyph'SYNTAX -> "`_|__GRHO-SYNTAX`" |Lbl'Hash'ECONNREFUSED_K'Hyph'IO -> "`#ECONNREFUSED_K-IO`" |Lbl'_LT_'sends'_GT_Hyph'fragment -> "`<sends>-fragment`" |Lbl'Hash'lock'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#lock(_,_)_K-IO`" |Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO -> "`#EADDRNOTAVAIL_K-IO`" |LblcountAllOccurrences'LPar'_'Comm'_'RPar'_STRING -> "`countAllOccurrences(_,_)_STRING`" |Lbl_'_GT_'Int__INT -> "`_>Int__INT`" |LblfillList -> "fillList" |Lbl'_AT_'__GRHO'Hyph'SYNTAX -> "`@__GRHO-SYNTAX`" |LblinitForgCell -> "initForgCell" |LblbitRangeInt -> "bitRangeInt" |Lbl_'_LT_'String__STRING -> "`_<String__STRING`" |Lbl'Hash'ThreadLocal -> "#ThreadLocal" |Lbl_xorBool__BOOL -> "`_xorBool__BOOL`" |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX0_ -> "`#freezer_+__GRHO-SYNTAX0_`" |LblinitReactCell -> "initReactCell" |Lbl'Stop'RecCellBag -> "`.RecCellBag`" |Lbl'_LT_'type'_GT_' -> "`<type>`" |Lbl'_LT_'listens'_GT_Hyph'fragment -> "`<listens>-fragment`" |Lbl_'Plus'__GRHO'Hyph'SYNTAX -> "`_+__GRHO-SYNTAX`" |Lbl'_LT_'lid'_GT_' -> "`<lid>`" |Lbl_ListenCellBag_ -> "`_ListenCellBag_`" |Lbl'Hash'open'LPar'_'RPar'_K'Hyph'IO -> "`#open(_)_K-IO`" |Lbl_'LSqB'_'RSqB'_SUBSTITUTION -> "`_[_]_SUBSTITUTION`" |LblnoMlidCell -> "noMlidCell" |Lbl_or__GRHO'Hyph'SYNTAX -> "`_or__GRHO-SYNTAX`" |Lbl'Hash'ETOOMANYREFS_K'Hyph'IO -> "`#ETOOMANYREFS_K-IO`" |Lbl'_LT_'threads'_GT_Hyph'fragment -> "`<threads>-fragment`" |LblinitListensCell -> "initListensCell" |Lbl'Hash'ENOSPC_K'Hyph'IO -> "`#ENOSPC_K-IO`" |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX0_ -> "`#freezer_or__GRHO-SYNTAX0_`" |LblisChanCell -> "isChanCell" |LblisRnumCell -> "isRnumCell" |Lbl'Hash'chanmany'LPar'_'Coln'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#chanmany(_:_)_AUXFUN-SYNTAX`" |Lbl'Hash'logToFile -> "#logToFile" |Lbl'_LT_'rec'_GT_' -> "`<rec>`" |Lbl'Hash'read'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#read(_,_)_K-IO`" |LblnoLidCell -> "noLidCell" |LblisNameVar -> "isNameVar" |Lbl'_LT_'schan'_GT_' -> "`<schan>`" |LblbigEndianBytes -> "bigEndianBytes" |Lbl'_LT_'match'_GT_' -> "`<match>`" |LblId2String -> "`Id2String`" |LblinitListenCell -> "initListenCell" |Lbl'_LT_'num'_GT_' -> "`<num>`" |LblisContCellOpt -> "isContCellOpt" |LblisLidCellOpt -> "isLidCellOpt" |LblnoSidCell -> "noSidCell" |Lblbundle'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`bundle{_}_GRHO-SYNTAX`" |LblMap'Coln'choice -> "`Map:choice`" |Lbl_Set_ -> "`_Set_`" |Lbl'Hash'EEXIST_K'Hyph'IO -> "`#EEXIST_K-IO`" |Lbl'Hash'getc'LPar'_'RPar'_K'Hyph'IO -> "`#getc(_)_K-IO`" |LblisRidCell -> "isRidCell" |Lbl'_LT_'state'_GT_' -> "`<state>`" |LblisListenCell -> "isListenCell" |LblisBool -> "isBool" |Lbl'Tild'Int__INT -> "`~Int__INT`" |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX1_ -> "`#freezer_*__GRHO-SYNTAX1_`" |LblordChar -> "ordChar" |LblinitIdCell -> "initIdCell" |Lbl_modInt__INT -> "`_modInt__INT`" |LblrfindChar -> "rfindChar" |LblisRbinds -> "isRbinds" |LblisMlidCellOpt -> "isMlidCellOpt" |Lbl'Hash'EAGAIN_K'Hyph'IO -> "`#EAGAIN_K-IO`" |Lbl'Stop'ListenCellBag -> "`.ListenCellBag`" |LblnoMsidCell -> "noMsidCell" |LblinitSchanCell -> "initSchanCell" |LbldirectionalityChar -> "directionalityChar" |LblisIdCell -> "isIdCell" |Lbl'Hash'opendir'LPar'_'RPar'_K'Hyph'IO -> "`#opendir(_)_K-IO`" |LblinitKCell -> "initKCell" |LblRecCellBagItem -> "`RecCellBagItem`" |Lblfor'LPar'_'RParLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`for(_){_}_GRHO-SYNTAX`" |LblisBExp -> "isBExp" |Lbl'Stop'Set -> "`.Set`" |LblisChanLen -> "isChanLen" |LblisStateCell -> "isStateCell" |Lbl'Hash'EACCES_K'Hyph'IO -> "`#EACCES_K-IO`" |Lbl'Hash'ELOOP_K'Hyph'IO -> "`#ELOOP_K-IO`" |Lbl'Hash'EDOM_K'Hyph'IO -> "`#EDOM_K-IO`" |LblisSidCell -> "isSidCell" |LblremoveAll -> "removeAll" |LblnoRidCell -> "noRidCell" |Lbl'_LT_'threads'_GT_' -> "`<threads>`" |Lbl_andBool_ -> "`_andBool_`" |Lbl_'Hash'ine'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`_#ine(_)_AUXFUN-SYNTAX`" |LblisThreadsCellFragment -> "isThreadsCellFragment" |LblisLengthCellOpt -> "isLengthCellOpt" |LblisRbindList -> "isRbindList" |Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO -> "`#EPFNOSUPPORT_K-IO`" |LblisConsumableSend -> "isConsumableSend" |LbllengthString -> "lengthString" |Lbl_'Hyph'__GRHO'Hyph'SYNTAX -> "`_-__GRHO-SYNTAX`" |Lbl'_LT_'listen'_GT_Hyph'fragment -> "`<listen>-fragment`" |LblisReceive -> "isReceive" |Lbl'Hash'ERANGE_K'Hyph'IO -> "`#ERANGE_K-IO`" |LblinitTCell -> "initTCell" |LblsignedBytes -> "signedBytes" |LblFloatFormat -> "`FloatFormat`" |LblisMsidCell -> "isMsidCell" |Lbl'Hash'ENOTSOCK_K'Hyph'IO -> "`#ENOTSOCK_K-IO`" |Lbl_'Plus'String__STRING -> "`_+String__STRING`" |Lbl_RecCellBag_ -> "`_RecCellBag_`" |Lbl_'Pipe'Int__INT -> "`_|Int__INT`" |Lbl'Hash'EISCONN_K'Hyph'IO -> "`#EISCONN_K-IO`" |LblisKVariable -> "isKVariable" |Lbl_dividesInt__INT -> "`_dividesInt__INT`" |Lbl'_LT_'rec'_GT_Hyph'fragment -> "`<rec>-fragment`" |Lbl'_LT_'tuplespace'_GT_Hyph'fragment -> "`<tuplespace>-fragment`" |Lbl'_LT_'T'_GT_Hyph'fragment -> "`<T>-fragment`" |Lbl_'_LT_Eqls'__GRHO'Hyph'SYNTAX -> "`_<=__GRHO-SYNTAX`" |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX1_ -> "`#freezer_or__GRHO-SYNTAX1_`" |LblisWhatCellOpt -> "isWhatCellOpt" |LblSet'Coln'choice -> "`Set:choice`" |LblisMatchCell -> "isMatchCell" |LblisListensCellFragment -> "isListensCellFragment" |Lbl'Hash'buffer -> "#buffer" |Lbl_'Star'__GRHO'Hyph'SYNTAX -> "`_*__GRHO-SYNTAX`" |LblinitNumCell -> "initNumCell" |LblfreshInt -> "freshInt" |Lbl'Hash'write'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#write(_,_)_K-IO`" |Lbl'Hash'ETIMEDOUT_K'Hyph'IO -> "`#ETIMEDOUT_K-IO`" |LblinitSidCell -> "initSidCell" |LblisIdCellOpt -> "isIdCellOpt" |Lbl'LPar'_'SCln'_'RPar'_GRHO'Hyph'SYNTAX -> "`(_;_)_GRHO-SYNTAX`" |Lbl'_LT_'sends'_GT_' -> "`<sends>`" |LblisSchanCell -> "isSchanCell" |Lbl_xorInt__INT -> "`_xorInt__INT`" |Lbl'Hash'EINPROGRESS_K'Hyph'IO -> "`#EINPROGRESS_K-IO`" |LblinitVarsCell -> "initVarsCell" |LblinitStateCell -> "initStateCell" |LblisNumCellOpt -> "isNumCellOpt" |LblinitMatchCell -> "initMatchCell" |LblisMlidCell -> "isMlidCell" |Lbl'Hash'ENOPROTOOPT_K'Hyph'IO -> "`#ENOPROTOOPT_K-IO`" |LbllittleEndianBytes -> "littleEndianBytes" |Lbl'Hash'EPERM_K'Hyph'IO -> "`#EPERM_K-IO`" |LblnoWhatCell -> "noWhatCell" |LblinitWhatCell -> "initWhatCell" |Lbl'_LT_'send'_GT_' -> "`<send>`" |Lbl_'_LT__LT_'Int__INT -> "`_<<Int__INT`" |LblBase2String -> "`Base2String`" |LblListItem -> "`ListItem`" |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX1_ -> "`#freezer_-__GRHO-SYNTAX1_`" |LblisStream -> "isStream" |Lbl_'_LT_Eqls'Map__MAP -> "`_<=Map__MAP`" |LblnewUUID_STRING -> "`newUUID_STRING`" |LblnoVarsCell -> "noVarsCell" |LblinitThreadsCell -> "initThreadsCell" |Lbl_SendCellBag_ -> "`_SendCellBag_`" |Lbl'Hash'ESRCH_K'Hyph'IO -> "`#ESRCH_K-IO`" |Lbl'Hash'EMFILE_K'Hyph'IO -> "`#EMFILE_K-IO`" |Lblproject'Coln'Proc -> "`project:Proc`" |LblisReactCellOpt -> "isReactCellOpt" |Lbl'_LT_'receives'_GT_Hyph'fragment -> "`<receives>-fragment`" |Lbl'_LT_'stype'_GT_' -> "`<stype>`" |Lbl_inList_ -> "`_inList_`" |LblisVarsCellOpt -> "isVarsCellOpt" |Lbl'Hash'ENOEXEC_K'Hyph'IO -> "`#ENOEXEC_K-IO`" |LblminInt'LPar'_'Comm'_'RPar'_INT -> "`minInt(_,_)_INT`" |LblinitReactionCell -> "initReactionCell" |LblisMap -> "isMap" |LblisTupleCell -> "isTupleCell" |LblisReactCell -> "isReactCell" |LblinitMsidCell -> "initMsidCell" |Lbl'_LT_'rnum'_GT_' -> "`<rnum>`" |LblisNomoCellOpt -> "isNomoCellOpt" |LblisStypeCellOpt -> "isStypeCellOpt" |LblisTCellFragment -> "isTCellFragment" |Lblreplace'LPar'_'Comm'_'Comm'_'Comm'_'RPar'_STRING -> "`replace(_,_,_,_)_STRING`" |Lbl_'Slsh'Int__INT -> "`_/Int__INT`" |Lbl_'LSqB'_'_LT_Hyph'_'RSqB'_MAP -> "`_[_<-_]_MAP`" |LblisRbind -> "isRbind" |Lbl'Hash'tell'LPar'_'RPar'_K'Hyph'IO -> "`#tell(_)_K-IO`" |Lbl'_LT_'msid'_GT_' -> "`<msid>`" |LblinitSendsCell -> "initSendsCell" |Lblbundle'HyphLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`bundle-{_}_GRHO-SYNTAX`" |LblgetKLabel -> "getKLabel" |Lblnot__GRHO'Hyph'SYNTAX -> "`not__GRHO-SYNTAX`" |Lbl'Hash'E2BIG_K'Hyph'IO -> "`#E2BIG_K-IO`" |Lbl'Hash'seekEnd'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#seekEnd(_,_)_K-IO`" |Lbl'_LT_'send'_GT_Hyph'fragment -> "`<send>-fragment`" let print_klabel_string(c: klabel) : string = match c with |Lbl'Hash'argv -> "#argv" |LblisPbindList -> "isPbindList" |LblisThreadCell -> "isThreadCell" |LblMap'Coln'lookup -> "Map:lookup" |LblisChanList -> "isChanList" |Lblbundle0'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "bundle0{_}_GRHO-SYNTAX" |Lbl'Hash'seek'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#seek(_,_)_K-IO" |LblisWhoCell -> "isWhoCell" |LblisIdNum -> "isIdNum" |LblsignExtendBitRangeInt -> "signExtendBitRangeInt" |Lbl_'EqlsEqls'Bool__BOOL -> "_==Bool__BOOL" |LblisSet -> "isSet" |LblisThreadCellBag -> "isThreadCellBag" |Lbl'Hash'bindocce'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bindocce(_)_AUXFUN-SYNTAX" |LblisNew -> "isNew" |Lbl'LBraLSqB'_'SCln'_'RSqBRBra'_GRHO'Hyph'SYNTAX -> "{[_;_]}_GRHO-SYNTAX" |LblisStypeCell -> "isStypeCell" |Lbl_'_LT_Eqls'Set__SET -> "_<=Set__SET" |Lbl_'Hash'in'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "_#in(_)_AUXFUN-SYNTAX" |LblisIOError -> "isIOError" |Lbl'Hash'parse -> "#parse" |Lbl'Hash'EALREADY_K'Hyph'IO -> "#EALREADY_K-IO" |LblisRhoList -> "isRhoList" |LblmakeList -> "makeList" |Lbl'Hash'ESPIPE_K'Hyph'IO -> "#ESPIPE_K-IO" |Lbl'Hash'unlock'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#unlock(_,_)_K-IO" |Lbl'Hash'ENOENT_K'Hyph'IO -> "#ENOENT_K-IO" |LblisProcs -> "isProcs" |Lbl_'_LT_Bang'__GRHO'Hyph'SYNTAX -> "_<!__GRHO-SYNTAX" |LblisTypeCell -> "isTypeCell" |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX1_ -> "#freezer_<=__GRHO-SYNTAX1_" |LblisLbinds -> "isLbinds" |LblisLengthCell -> "isLengthCell" |LblnoStateCell -> "noStateCell" |LblisLbind -> "isLbind" |Lbl'Hash'ENOTTY_K'Hyph'IO -> "#ENOTTY_K-IO" |LblisForgCell -> "isForgCell" |Lbl'_LT_'forg'_GT_' -> "<forg>" |LblinitChanCell -> "initChanCell" |LblisProcList -> "isProcList" |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX0_ -> "#freezer_-__GRHO-SYNTAX0_" |LblinitRnumCell -> "initRnumCell" |LblisRidCellOpt -> "isRidCellOpt" |LblisReceivesCellFragment -> "isReceivesCellFragment" |Lbl'Hash'ENOTEMPTY_K'Hyph'IO -> "#ENOTEMPTY_K-IO" |LblisSidCellOpt -> "isSidCellOpt" |Lbl'Hash'EMSGSIZE_K'Hyph'IO -> "#EMSGSIZE_K-IO" |LblisKConfigVar -> "isKConfigVar" |LblisRhoMap -> "isRhoMap" |Lbl'Hash'ENETRESET_K'Hyph'IO -> "#ENETRESET_K-IO" |Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO -> "#EAFNOSUPPORT_K-IO" |LblnoTupleCell -> "noTupleCell" |LblnoSendsCell -> "noSendsCell" |Lbl'_LT_'thread'_GT_Hyph'fragment -> "<thread>-fragment" |LblisCell -> "isCell" |LblisPbind -> "isPbind" |Lbl'Hash'ENOMEM_K'Hyph'IO -> "#ENOMEM_K-IO" |Lblvalues -> "values" |Lblbundle'PlusLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "bundle+{_}_GRHO-SYNTAX" |LblisThreadCellFragment -> "isThreadCellFragment" |LblisStateCellOpt -> "isStateCellOpt" |LblinitLidCell -> "initLidCell" |LblisNameList -> "isNameList" |LblisListensCellOpt -> "isListensCellOpt" |LblisTuplespaceCellOpt -> "isTuplespaceCellOpt" |Lbl'Hash'ENXIO_K'Hyph'IO -> "#ENXIO_K-IO" |Lbl_'_LT_'Int__INT -> "_<Int__INT" |LblnoTypeCell -> "noTypeCell" |LblisSendCell -> "isSendCell" |Lbl'Hash'configuration_K'Hyph'REFLECTION -> "#configuration_K-REFLECTION" |LblisSendsCell -> "isSendsCell" |LblisFloat -> "isFloat" |Lbl'_LT_'msg'_GT_' -> "<msg>" |LblisContCell -> "isContCell" |LblchrChar -> "chrChar" |Lbl_divInt__INT -> "_divInt__INT" |Lbl'Hash'EROFS_K'Hyph'IO -> "#EROFS_K-IO" |LblisWhereCellOpt -> "isWhereCellOpt" |Lbl_ThreadCellBag_ -> "_ThreadCellBag_" |LblisProc -> "isProc" |LblisListensCell -> "isListensCell" |Lbl_'Plus'Int_ -> "_+Int_" |LblisReactionCell -> "isReactionCell" |Lbl_orBool__BOOL -> "_orBool__BOOL" |Lbl'_LT_'sid'_GT_' -> "<sid>" |Lbl'Hash'ENFILE_K'Hyph'IO -> "#ENFILE_K-IO" |LblupdateMap -> "updateMap" |LblisReactionCellOpt -> "isReactionCellOpt" |Lbl_'SCln'__GRHO'Hyph'SYNTAX -> "_;__GRHO-SYNTAX" |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX0_ -> "#freezer_<=__GRHO-SYNTAX0_" |LblisNomoCell -> "isNomoCell" |LblnoWhereCell -> "noWhereCell" |LblisJoinList -> "isJoinList" |LblInt2String -> "Int2String" |Lbl'Hash'stype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#stype(_)_AUXFUN-SYNTAX" |Lbl_'EqlsSlshEqls'K_ -> "_=/=K_" |LblisNumCell -> "isNumCell" |LblisRecCell -> "isRecCell" |Lbl_List_ -> "_List_" |LblisMultiRec -> "isMultiRec" |LbltoString'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "toString(_)_AUXFUN-SYNTAX" |Lbl'Hash'open'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#open(_,_)_K-IO" |Lbl'Hash'EOPNOTSUPP_K'Hyph'IO -> "#EOPNOTSUPP_K-IO" |Lbl_'PipeHyph_GT_'_ -> "_|->_" |LblisMatchCellOpt -> "isMatchCellOpt" |Lbl_'Hyph'Map__MAP -> "_-Map__MAP" |Lbl'Hash'length'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#length(_)_AUXFUN-SYNTAX" |LblisRhoSet -> "isRhoSet" |Lbl'_LT_'chan'_GT_' -> "<chan>" |LblnoForgCell -> "noForgCell" |LblisReceivesCellOpt -> "isReceivesCellOpt" |Lbl'Hash'EMLINK_K'Hyph'IO -> "#EMLINK_K-IO" |LblisListenCellBag -> "isListenCellBag" |Lbl'Hash'sort -> "#sort" |Lbl_'EqlsEqls'K_ -> "_==K_" |LblisPar -> "isPar" |Lblunforgeable'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "unforgeable(_)_GRHO-SYNTAX" |LblreplaceFirst'LPar'_'Comm'_'Comm'_'RPar'_STRING -> "replaceFirst(_,_,_)_STRING" |LblnoListensCell -> "noListensCell" |LblnoStypeCell -> "noStypeCell" |Lbl'Hash'EOVERFLOW_K'Hyph'IO -> "#EOVERFLOW_K-IO" |Lbl'Hash'putc'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#putc(_,_)_K-IO" |LblisThreadsCellOpt -> "isThreadsCellOpt" |Lbl'Stop'Map -> ".Map" |LblisVarsCell -> "isVarsCell" |Lbl_'EqlsSlshEqls'String__STRING -> "_=/=String__STRING" |Lbl'Hash'EIO_K'Hyph'IO -> "#EIO_K-IO" |LblinitMlidCell -> "initMlidCell" |Lbluri'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "uri(_)_GRHO-SYNTAX" |LblisSendCellBag -> "isSendCellBag" |LblisInt -> "isInt" |Lbl'Hash'EFAULT_K'Hyph'IO -> "#EFAULT_K-IO" |Lbl'Hash'fresh -> "#fresh" |Lbl_impliesBool__BOOL -> "_impliesBool__BOOL" |Lbl'Hash'chanlist'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#chanlist(_)_AUXFUN-SYNTAX" |Lbl_'Star'Int__INT -> "_*Int__INT" |Lbl'_LT_'T'_GT_' -> "<T>" |Lbl'Hash'Thread -> "#Thread" |LblmaxInt'LPar'_'Comm'_'RPar'_INT -> "maxInt(_,_)_INT" |LblinitReceivesCell -> "initReceivesCell" |Lbl'Hash'EDEADLK_K'Hyph'IO -> "#EDEADLK_K-IO" |Lbl_'_LT_Eqls'String__STRING -> "_<=String__STRING" |LblListenCellBagItem -> "ListenCellBagItem" |LblisNames -> "isNames" |Lbl'Hash'ENOBUFS_K'Hyph'IO -> "#ENOBUFS_K-IO" |Lbl_Map_ -> "_Map_" |Lbl_'Hyph'Int__INT -> "_-Int__INT" |Lbl'Hash'EOF_K'Hyph'IO -> "#EOF_K-IO" |Lbl_'BangBang'__GRHO'Hyph'SYNTAX -> "_!!__GRHO-SYNTAX" |LblisReactionCellFragment -> "isReactionCellFragment" |Lbl_and__GRHO'Hyph'SYNTAX -> "_and__GRHO-SYNTAX" |Lbl'Hash'lengths'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#lengths(_)_AUXFUN-SYNTAX" |LblFloat2String -> "Float2String" |Lbl'Hash'append'LPar'_'SCln'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#append(_;_)_AUXFUN-SYNTAX" |LblinitWhoCell -> "initWhoCell" |Lbl'_LT_'listen'_GT_' -> "<listen>" |LblnoReceivesCell -> "noReceivesCell" |LblsizeList -> "sizeList" |Lbl'Hash'EWOULDBLOCK_K'Hyph'IO -> "#EWOULDBLOCK_K-IO" |LblString2Id -> "String2Id" |LblinitTuplespaceCell -> "initTuplespaceCell" |Lbl'_LT_'thread'_GT_' -> "<thread>" |Lbl'_LT_'vars'_GT_' -> "<vars>" |Lbl_'EqlsSlshEqls'Bool__BOOL -> "_=/=Bool__BOOL" |Lbl'_LT_'length'_GT_' -> "<length>" |LblisCollection -> "isCollection" |Lbl'Hash'EFBIG_K'Hyph'IO -> "#EFBIG_K-IO" |LblisTCell -> "isTCell" |Lbl_'LSqB'_'Slsh'_'RSqB'_SUBSTITUTION -> "_[_/_]_SUBSTITUTION" |Lbl'Hash'EBADF_K'Hyph'IO -> "#EBADF_K-IO" |Lbl'Hash'msg'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#msg(_)_AUXFUN-SYNTAX" |LblnoLengthCell -> "noLengthCell" |LblinitNomoCell -> "initNomoCell" |Lbl'Hash'EPIPE_K'Hyph'IO -> "#EPIPE_K-IO" |Lbl'Hash'bvar'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bvar(_)_AUXFUN-SYNTAX" |LblnoContCell -> "noContCell" |LblisRhoTuple -> "isRhoTuple" |Lbl_'Xor_Perc'Int___INT -> "_^%Int___INT" |LblisMsgCellOpt -> "isMsgCellOpt" |Lbl'_LT_'reaction'_GT_' -> "<reaction>" |LblrfindString -> "rfindString" |LblisChanCellOpt -> "isChanCellOpt" |Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO -> "#ESOCKTNOSUPPORT_K-IO" |LblnoNomoCell -> "noNomoCell" |Lbl'Hash'EINTR_K'Hyph'IO -> "#EINTR_K-IO" |Lbl'Hash'stat'LPar'_'RPar'_K'Hyph'IO -> "#stat(_)_K-IO" |LblupdateList -> "updateList" |LblisLidCell -> "isLidCell" |LblisMsgCell -> "isMsgCell" |Lbl'Stop'SendCellBag -> ".SendCellBag" |LblinitContCell -> "initContCell" |LblnoReactCell -> "noReactCell" |LblcategoryChar -> "categoryChar" |LblSet'Coln'difference -> "Set:difference" |LblisName -> "isName" |Lbl'Hash'EHOSTUNREACH_K'Hyph'IO -> "#EHOSTUNREACH_K-IO" |Lbl'Hash'ECONNRESET_K'Hyph'IO -> "#ECONNRESET_K-IO" |LblisBundle -> "isBundle" |LblisKCellOpt -> "isKCellOpt" |LblisForgCellOpt -> "isForgCellOpt" |Lbl'Hash'ECHILD_K'Hyph'IO -> "#ECHILD_K-IO" |LblisRecCellFragment -> "isRecCellFragment" |LblisUnconsumableSend -> "isUnconsumableSend" |LblisLbindList -> "isLbindList" |LblString2Float -> "String2Float" |LblMap'Coln'lookupOrDefault -> "Map:lookupOrDefault" |Lbl'Hash'if_'Hash'then_'Hash'else_'Hash'fi_K'Hyph'EQUAL -> "#if_#then_#else_#fi_K-EQUAL" |Lbl'_LT_'tuplespace'_GT_' -> "<tuplespace>" |Lbl'Hash'ENOTCONN_K'Hyph'IO -> "#ENOTCONN_K-IO" |Lbl_'_LT_Hyph'__GRHO'Hyph'SYNTAX -> "_<-__GRHO-SYNTAX" |Lbl'_LT_'what'_GT_' -> "<what>" |Lbl'Hash'stdout_K'Hyph'IO -> "#stdout_K-IO" |Lbl_'And'Int__INT -> "_&Int__INT" |Lbl'Hash'rtype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#rtype(_)_AUXFUN-SYNTAX" |Lbl'_LT_'tuple'_GT_' -> "<tuple>" |Lbl'Hash'ENAMETOOLONG_K'Hyph'IO -> "#ENAMETOOLONG_K-IO" |Lbllog2Int -> "log2Int" |Lbl_'EqlsSlshEqls'Int__INT -> "_=/=Int__INT" |Lbl'Hash'stdin_K'Hyph'IO -> "#stdin_K-IO" |Lbl'Hash'chanlen'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#chanlen(_)_AUXFUN-SYNTAX" |Lbl_'_GT_Eqls'String__STRING -> "_>=String__STRING" |LblnoSchanCell -> "noSchanCell" |Lbl'_LT_'react'_GT_' -> "<react>" |LblisBindOcc -> "isBindOcc" |LblSet'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "Set(_)_GRHO-SYNTAX" |LblsizeMap -> "sizeMap" |LblisWhereCell -> "isWhereCell" |LblnoMsgCell -> "noMsgCell" |LblisId -> "isId" |LblsubstrString -> "substrString" |LblnoTuplespaceCell -> "noTuplespaceCell" |Lbl_'Comm'__GRHO'Hyph'SYNTAX -> "_,__GRHO-SYNTAX" |Lbl_'Bang'__GRHO'Hyph'SYNTAX -> "_!__GRHO-SYNTAX" |LblisTypeCellOpt -> "isTypeCellOpt" |Lblsize -> "size" |Lbl'Hash'bindocc'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bindocc(_)_AUXFUN-SYNTAX" |LblnoMatchCell -> "noMatchCell" |LblisBind -> "isBind" |Lbl'Hash'ENETUNREACH_K'Hyph'IO -> "#ENETUNREACH_K-IO" |Lbl'Hash'EPROTOTYPE_K'Hyph'IO -> "#EPROTOTYPE_K-IO" |Lbl'Star'__GRHO'Hyph'SYNTAX -> "*__GRHO-SYNTAX" |Lbl'_LT_'who'_GT_' -> "<who>" |Lbl_'Coln'__GRHO'Hyph'SYNTAX -> "_:__GRHO-SYNTAX" |LblnoThreadsCell -> "noThreadsCell" |Lbl'Hash'systemResult'LPar'_'Comm'_'Comm'_'RPar'_K'Hyph'IO -> "#systemResult(_,_,_)_K-IO" |Lbl'_LT_'listens'_GT_' -> "<listens>" |LblsrandInt -> "srandInt" |Lbl'Hash'EINVAL_K'Hyph'IO -> "#EINVAL_K-IO" |Lbl'_LT_'rid'_GT_' -> "<rid>" |LblisKItem -> "isKItem" |Lbl'Hash'ENODEV_K'Hyph'IO -> "#ENODEV_K-IO" |Lbl'Hash'length__AUXFUN'Hyph'SYNTAX -> "#length__AUXFUN-SYNTAX" |LblisRecCellBag -> "isRecCellBag" |LblList'Coln'set -> "List:set" |LblisUri -> "isUri" |LblString2Base -> "String2Base" |Lbl'Hash'noparse_K'Hyph'IO -> "#noparse_K-IO" |Lblkeys -> "keys" |LblinitRecCell -> "initRecCell" |Lbl'Hash'ESHUTDOWN_K'Hyph'IO -> "#ESHUTDOWN_K-IO" |LblisGround -> "isGround" |Lbl'Stop'ThreadCellBag -> ".ThreadCellBag" |LblThreadCellBagItem -> "ThreadCellBagItem" |Lbl'Hash'cont'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#cont(_)_AUXFUN-SYNTAX" |Lbl'Hash'ENOTDIR_K'Hyph'IO -> "#ENOTDIR_K-IO" |Lblnew_in'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "new_in{_}_GRHO-SYNTAX" |Lbl'Hash'chan'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#chan(_)_AUXFUN-SYNTAX" |Lbl_'Hash'in__AUXFUN'Hyph'SYNTAX -> "_#in__AUXFUN-SYNTAX" |LblinitSendCell -> "initSendCell" |Lbl'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "{_}_GRHO-SYNTAX" |Lbl'_LT_'nomo'_GT_' -> "<nomo>" |Lbl'Hash'bnum'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bnum(_)_AUXFUN-SYNTAX" |Lbl_'_LT_Eqls'Int__INT -> "_<=Int__INT" |LblnotBool_ -> "notBool_" |LblnoNumCell -> "noNumCell" |Lbl'Hash'stderr_K'Hyph'IO -> "#stderr_K-IO" |LblnoKCell -> "noKCell" |Lbl'Hash'EBUSY_K'Hyph'IO -> "#EBUSY_K-IO" |Lbl'Hash'getenv -> "#getenv" |LblisTuplespaceCell -> "isTuplespaceCell" |LblisBinds -> "isBinds" |LblnoReactionCell -> "noReactionCell" |LblintersectSet -> "intersectSet" |Lbl_in_keys'LPar'_'RPar'_MAP -> "_in_keys(_)_MAP" |LblinitMsgCell -> "initMsgCell" |Lbl'Hash'bind'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bind(_)_AUXFUN-SYNTAX" |LblfindChar -> "findChar" |Lbl'LSqB'_'RSqB'_GRHO'Hyph'SYNTAX -> "[_]_GRHO-SYNTAX" |LblSet'Coln'in -> "Set:in" |LblisK -> "isK" |LblisWhoCellOpt -> "isWhoCellOpt" |Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_ -> "#freezernot__GRHO-SYNTAX0_" |LblisReceivesCell -> "isReceivesCell" |LblString2Int -> "String2Int" |Lbl'_LT_'where'_GT_' -> "<where>" |LblinitWhereCell -> "initWhereCell" |LblinitThreadCell -> "initThreadCell" |LblisSingleRec -> "isSingleRec" |LblisThreadsCell -> "isThreadsCell" |LblisTupleCellOpt -> "isTupleCellOpt" |LblisEval -> "isEval" |LblisWhatCell -> "isWhatCell" |Lbl'Hash'ENETDOWN_K'Hyph'IO -> "#ENETDOWN_K-IO" |LblisListenCellFragment -> "isListenCellFragment" |Lbl_'LSqB'_'_LT_Hyph'undef'RSqB' -> "_[_<-undef]" |Lbl'Hash'Bottom -> "#Bottom" |Lbl_'EqlsEqls'Int_ -> "_==Int_" |Lbl_andThenBool__BOOL -> "_andThenBool__BOOL" |LblisPbinds -> "isPbinds" |Lbl'Hash'parseInModule -> "#parseInModule" |LblNil_GRHO'Hyph'SYNTAX -> "Nil_GRHO-SYNTAX" |LblisAExp -> "isAExp" |Lbl'Hash'system -> "#system" |Lbl'_LT_'mlid'_GT_' -> "<mlid>" |LblinitRidCell -> "initRidCell" |LblisString -> "isString" |Lbl_'Perc'Int__INT -> "_%Int__INT" |Lbl_'_GT__GT_'Int__INT -> "_>>Int__INT" |Lbl_'Coln'_'Comm'__GRHO'Hyph'SYNTAX -> "_:_,__GRHO-SYNTAX" |LblnoWhoCell -> "noWhoCell" |LblisList -> "isList" |Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO -> "#EPROTONOSUPPORT_K-IO" |LblisTuplespaceCellFragment -> "isTuplespaceCellFragment" |LblreplaceAll'LPar'_'Comm'_'Comm'_'RPar'_STRING -> "replaceAll(_,_,_)_STRING" |LblisBindList -> "isBindList" |LblnoChanCell -> "noChanCell" |Lbl'Hash'EDESTADDRREQ_K'Hyph'IO -> "#EDESTADDRREQ_K-IO" |Lbl'Hash'EADDRINUSE_K'Hyph'IO -> "#EADDRINUSE_K-IO" |LblnoRnumCell -> "noRnumCell" |Lbl_'Xor_'Int__INT -> "_^Int__INT" |LblfindString -> "findString" |Lbl'_LT_'k'_GT_' -> "<k>" |Lbl'_LT_'reaction'_GT_Hyph'fragment -> "<reaction>-fragment" |LblabsInt -> "absInt" |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX1_ -> "#freezer_+__GRHO-SYNTAX1_" |Lbl'Hash'EHOSTDOWN_K'Hyph'IO -> "#EHOSTDOWN_K-IO" |Lbl_'_GT_'String__STRING -> "_>String__STRING" |LblisSendsCellFragment -> "isSendsCellFragment" |LblinitLengthCell -> "initLengthCell" |Lbl_'EqlsEqls'String__STRING -> "_==String__STRING" |LblisRnumCellOpt -> "isRnumCellOpt" |LblisSend -> "isSend" |LblisKResult -> "isKResult" |LblinitStypeCell -> "initStypeCell" |LblList'Coln'get -> "List:get" |Lbl'Hash'lstat'LPar'_'RPar'_K'Hyph'IO -> "#lstat(_)_K-IO" |LblSendCellBagItem -> "SendCellBagItem" |Lbltuple -> "tuple" |Lbl'_LT_'id'_GT_' -> "<id>" |LblSetItem -> "SetItem" |Lbl'_LT_'receives'_GT_' -> "<receives>" |LblisRhoKVPairs -> "isRhoKVPairs" |LblunsignedBytes -> "unsignedBytes" |LblisMsidCellOpt -> "isMsidCellOpt" |Lbl'Stop'List -> ".List" |Lbl'Hash'ENOLCK_K'Hyph'IO -> "#ENOLCK_K-IO" |LblisSendsCellOpt -> "isSendsCellOpt" |Lbl'Hash'ECONNABORTED_K'Hyph'IO -> "#ECONNABORTED_K-IO" |LblrandInt -> "randInt" |Lbl'Hash'EXDEV_K'Hyph'IO -> "#EXDEV_K-IO" |Lbl'Hash'close'LPar'_'RPar'_K'Hyph'IO -> "#close(_)_K-IO" |Lbl_'SCln'_'SClnSCln'__GRHO'Hyph'SYNTAX -> "_;_;;__GRHO-SYNTAX" |Lblkeys_list'LPar'_'RPar'_MAP -> "keys_list(_)_MAP" |LblfreshId -> "freshId" |LblinitTypeCell -> "initTypeCell" |Lbl_orElseBool__BOOL -> "_orElseBool__BOOL" |LblisSchanCellOpt -> "isSchanCellOpt" |Lbl'Hash'EISDIR_K'Hyph'IO -> "#EISDIR_K-IO" |Lbl'_LT_'cont'_GT_' -> "<cont>" |LblList'Coln'range -> "List:range" |LblinitTupleCell -> "initTupleCell" |LblnoIdCell -> "noIdCell" |LblisKCell -> "isKCell" |Lbl'Hash'unknownIOError -> "#unknownIOError" |Lbl'Hash'freezer_and__GRHO'Hyph'SYNTAX1_ -> "#freezer_and__GRHO-SYNTAX1_" |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX0_ -> "#freezer_*__GRHO-SYNTAX0_" |Lbl_'_GT_Eqls'Int__INT -> "_>=Int__INT" |LblisSendCellFragment -> "isSendCellFragment" |Lbl'Hash'ENOSYS_K'Hyph'IO -> "#ENOSYS_K-IO" |Lbl_'Pipe'__GRHO'Hyph'SYNTAX -> "_|__GRHO-SYNTAX" |Lbl'Hash'ECONNREFUSED_K'Hyph'IO -> "#ECONNREFUSED_K-IO" |Lbl'_LT_'sends'_GT_Hyph'fragment -> "<sends>-fragment" |Lbl'Hash'lock'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#lock(_,_)_K-IO" |Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO -> "#EADDRNOTAVAIL_K-IO" |LblcountAllOccurrences'LPar'_'Comm'_'RPar'_STRING -> "countAllOccurrences(_,_)_STRING" |Lbl_'_GT_'Int__INT -> "_>Int__INT" |LblfillList -> "fillList" |Lbl'_AT_'__GRHO'Hyph'SYNTAX -> "@__GRHO-SYNTAX" |LblinitForgCell -> "initForgCell" |LblbitRangeInt -> "bitRangeInt" |Lbl_'_LT_'String__STRING -> "_<String__STRING" |Lbl'Hash'ThreadLocal -> "#ThreadLocal" |Lbl_xorBool__BOOL -> "_xorBool__BOOL" |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX0_ -> "#freezer_+__GRHO-SYNTAX0_" |LblinitReactCell -> "initReactCell" |Lbl'Stop'RecCellBag -> ".RecCellBag" |Lbl'_LT_'type'_GT_' -> "<type>" |Lbl'_LT_'listens'_GT_Hyph'fragment -> "<listens>-fragment" |Lbl_'Plus'__GRHO'Hyph'SYNTAX -> "_+__GRHO-SYNTAX" |Lbl'_LT_'lid'_GT_' -> "<lid>" |Lbl_ListenCellBag_ -> "_ListenCellBag_" |Lbl'Hash'open'LPar'_'RPar'_K'Hyph'IO -> "#open(_)_K-IO" |Lbl_'LSqB'_'RSqB'_SUBSTITUTION -> "_[_]_SUBSTITUTION" |LblnoMlidCell -> "noMlidCell" |Lbl_or__GRHO'Hyph'SYNTAX -> "_or__GRHO-SYNTAX" |Lbl'Hash'ETOOMANYREFS_K'Hyph'IO -> "#ETOOMANYREFS_K-IO" |Lbl'_LT_'threads'_GT_Hyph'fragment -> "<threads>-fragment" |LblinitListensCell -> "initListensCell" |Lbl'Hash'ENOSPC_K'Hyph'IO -> "#ENOSPC_K-IO" |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX0_ -> "#freezer_or__GRHO-SYNTAX0_" |LblisChanCell -> "isChanCell" |LblisRnumCell -> "isRnumCell" |Lbl'Hash'chanmany'LPar'_'Coln'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#chanmany(_:_)_AUXFUN-SYNTAX" |Lbl'Hash'logToFile -> "#logToFile" |Lbl'_LT_'rec'_GT_' -> "<rec>" |Lbl'Hash'read'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#read(_,_)_K-IO" |LblnoLidCell -> "noLidCell" |LblisNameVar -> "isNameVar" |Lbl'_LT_'schan'_GT_' -> "<schan>" |LblbigEndianBytes -> "bigEndianBytes" |Lbl'_LT_'match'_GT_' -> "<match>" |LblId2String -> "Id2String" |LblinitListenCell -> "initListenCell" |Lbl'_LT_'num'_GT_' -> "<num>" |LblisContCellOpt -> "isContCellOpt" |LblisLidCellOpt -> "isLidCellOpt" |LblnoSidCell -> "noSidCell" |Lblbundle'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "bundle{_}_GRHO-SYNTAX" |LblMap'Coln'choice -> "Map:choice" |Lbl_Set_ -> "_Set_" |Lbl'Hash'EEXIST_K'Hyph'IO -> "#EEXIST_K-IO" |Lbl'Hash'getc'LPar'_'RPar'_K'Hyph'IO -> "#getc(_)_K-IO" |LblisRidCell -> "isRidCell" |Lbl'_LT_'state'_GT_' -> "<state>" |LblisListenCell -> "isListenCell" |LblisBool -> "isBool" |Lbl'Tild'Int__INT -> "~Int__INT" |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX1_ -> "#freezer_*__GRHO-SYNTAX1_" |LblordChar -> "ordChar" |LblinitIdCell -> "initIdCell" |Lbl_modInt__INT -> "_modInt__INT" |LblrfindChar -> "rfindChar" |LblisRbinds -> "isRbinds" |LblisMlidCellOpt -> "isMlidCellOpt" |Lbl'Hash'EAGAIN_K'Hyph'IO -> "#EAGAIN_K-IO" |Lbl'Stop'ListenCellBag -> ".ListenCellBag" |LblnoMsidCell -> "noMsidCell" |LblinitSchanCell -> "initSchanCell" |LbldirectionalityChar -> "directionalityChar" |LblisIdCell -> "isIdCell" |Lbl'Hash'opendir'LPar'_'RPar'_K'Hyph'IO -> "#opendir(_)_K-IO" |LblinitKCell -> "initKCell" |LblRecCellBagItem -> "RecCellBagItem" |Lblfor'LPar'_'RParLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "for(_){_}_GRHO-SYNTAX" |LblisBExp -> "isBExp" |Lbl'Stop'Set -> ".Set" |LblisChanLen -> "isChanLen" |LblisStateCell -> "isStateCell" |Lbl'Hash'EACCES_K'Hyph'IO -> "#EACCES_K-IO" |Lbl'Hash'ELOOP_K'Hyph'IO -> "#ELOOP_K-IO" |Lbl'Hash'EDOM_K'Hyph'IO -> "#EDOM_K-IO" |LblisSidCell -> "isSidCell" |LblremoveAll -> "removeAll" |LblnoRidCell -> "noRidCell" |Lbl'_LT_'threads'_GT_' -> "<threads>" |Lbl_andBool_ -> "_andBool_" |Lbl_'Hash'ine'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "_#ine(_)_AUXFUN-SYNTAX" |LblisThreadsCellFragment -> "isThreadsCellFragment" |LblisLengthCellOpt -> "isLengthCellOpt" |LblisRbindList -> "isRbindList" |Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO -> "#EPFNOSUPPORT_K-IO" |LblisConsumableSend -> "isConsumableSend" |LbllengthString -> "lengthString" |Lbl_'Hyph'__GRHO'Hyph'SYNTAX -> "_-__GRHO-SYNTAX" |Lbl'_LT_'listen'_GT_Hyph'fragment -> "<listen>-fragment" |LblisReceive -> "isReceive" |Lbl'Hash'ERANGE_K'Hyph'IO -> "#ERANGE_K-IO" |LblinitTCell -> "initTCell" |LblsignedBytes -> "signedBytes" |LblFloatFormat -> "FloatFormat" |LblisMsidCell -> "isMsidCell" |Lbl'Hash'ENOTSOCK_K'Hyph'IO -> "#ENOTSOCK_K-IO" |Lbl_'Plus'String__STRING -> "_+String__STRING" |Lbl_RecCellBag_ -> "_RecCellBag_" |Lbl_'Pipe'Int__INT -> "_|Int__INT" |Lbl'Hash'EISCONN_K'Hyph'IO -> "#EISCONN_K-IO" |LblisKVariable -> "isKVariable" |Lbl_dividesInt__INT -> "_dividesInt__INT" |Lbl'_LT_'rec'_GT_Hyph'fragment -> "<rec>-fragment" |Lbl'_LT_'tuplespace'_GT_Hyph'fragment -> "<tuplespace>-fragment" |Lbl'_LT_'T'_GT_Hyph'fragment -> "<T>-fragment" |Lbl_'_LT_Eqls'__GRHO'Hyph'SYNTAX -> "_<=__GRHO-SYNTAX" |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX1_ -> "#freezer_or__GRHO-SYNTAX1_" |LblisWhatCellOpt -> "isWhatCellOpt" |LblSet'Coln'choice -> "Set:choice" |LblisMatchCell -> "isMatchCell" |LblisListensCellFragment -> "isListensCellFragment" |Lbl'Hash'buffer -> "#buffer" |Lbl_'Star'__GRHO'Hyph'SYNTAX -> "_*__GRHO-SYNTAX" |LblinitNumCell -> "initNumCell" |LblfreshInt -> "freshInt" |Lbl'Hash'write'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#write(_,_)_K-IO" |Lbl'Hash'ETIMEDOUT_K'Hyph'IO -> "#ETIMEDOUT_K-IO" |LblinitSidCell -> "initSidCell" |LblisIdCellOpt -> "isIdCellOpt" |Lbl'LPar'_'SCln'_'RPar'_GRHO'Hyph'SYNTAX -> "(_;_)_GRHO-SYNTAX" |Lbl'_LT_'sends'_GT_' -> "<sends>" |LblisSchanCell -> "isSchanCell" |Lbl_xorInt__INT -> "_xorInt__INT" |Lbl'Hash'EINPROGRESS_K'Hyph'IO -> "#EINPROGRESS_K-IO" |LblinitVarsCell -> "initVarsCell" |LblinitStateCell -> "initStateCell" |LblisNumCellOpt -> "isNumCellOpt" |LblinitMatchCell -> "initMatchCell" |LblisMlidCell -> "isMlidCell" |Lbl'Hash'ENOPROTOOPT_K'Hyph'IO -> "#ENOPROTOOPT_K-IO" |LbllittleEndianBytes -> "littleEndianBytes" |Lbl'Hash'EPERM_K'Hyph'IO -> "#EPERM_K-IO" |LblnoWhatCell -> "noWhatCell" |LblinitWhatCell -> "initWhatCell" |Lbl'_LT_'send'_GT_' -> "<send>" |Lbl_'_LT__LT_'Int__INT -> "_<<Int__INT" |LblBase2String -> "Base2String" |LblListItem -> "ListItem" |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX1_ -> "#freezer_-__GRHO-SYNTAX1_" |LblisStream -> "isStream" |Lbl_'_LT_Eqls'Map__MAP -> "_<=Map__MAP" |LblnewUUID_STRING -> "newUUID_STRING" |LblnoVarsCell -> "noVarsCell" |LblinitThreadsCell -> "initThreadsCell" |Lbl_SendCellBag_ -> "_SendCellBag_" |Lbl'Hash'ESRCH_K'Hyph'IO -> "#ESRCH_K-IO" |Lbl'Hash'EMFILE_K'Hyph'IO -> "#EMFILE_K-IO" |Lblproject'Coln'Proc -> "project:Proc" |LblisReactCellOpt -> "isReactCellOpt" |Lbl'_LT_'receives'_GT_Hyph'fragment -> "<receives>-fragment" |Lbl'_LT_'stype'_GT_' -> "<stype>" |Lbl_inList_ -> "_inList_" |LblisVarsCellOpt -> "isVarsCellOpt" |Lbl'Hash'ENOEXEC_K'Hyph'IO -> "#ENOEXEC_K-IO" |LblminInt'LPar'_'Comm'_'RPar'_INT -> "minInt(_,_)_INT" |LblinitReactionCell -> "initReactionCell" |LblisMap -> "isMap" |LblisTupleCell -> "isTupleCell" |LblisReactCell -> "isReactCell" |LblinitMsidCell -> "initMsidCell" |Lbl'_LT_'rnum'_GT_' -> "<rnum>" |LblisNomoCellOpt -> "isNomoCellOpt" |LblisStypeCellOpt -> "isStypeCellOpt" |LblisTCellFragment -> "isTCellFragment" |Lblreplace'LPar'_'Comm'_'Comm'_'Comm'_'RPar'_STRING -> "replace(_,_,_,_)_STRING" |Lbl_'Slsh'Int__INT -> "_/Int__INT" |Lbl_'LSqB'_'_LT_Hyph'_'RSqB'_MAP -> "_[_<-_]_MAP" |LblisRbind -> "isRbind" |Lbl'Hash'tell'LPar'_'RPar'_K'Hyph'IO -> "#tell(_)_K-IO" |Lbl'_LT_'msid'_GT_' -> "<msid>" |LblinitSendsCell -> "initSendsCell" |Lblbundle'HyphLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "bundle-{_}_GRHO-SYNTAX" |LblgetKLabel -> "getKLabel" |Lblnot__GRHO'Hyph'SYNTAX -> "not__GRHO-SYNTAX" |Lbl'Hash'E2BIG_K'Hyph'IO -> "#E2BIG_K-IO" |Lbl'Hash'seekEnd'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#seekEnd(_,_)_K-IO" |Lbl'_LT_'send'_GT_Hyph'fragment -> "<send>-fragment" let parse_sort(c: string) : sort = match c with |"Rbinds" -> SortRbinds |"AExp" -> SortAExp |"ListensCellOpt" -> SortListensCellOpt |"IdCellOpt" -> SortIdCellOpt |"K" -> SortK |"PbindList" -> SortPbindList |"RidCell" -> SortRidCell |"ListenCell" -> SortListenCell |"UnconsumableSend" -> SortUnconsumableSend |"ThreadCell" -> SortThreadCell |"MlidCell" -> SortMlidCell |"VarsCellOpt" -> SortVarsCellOpt |"NomoCell" -> SortNomoCell |"ThreadCellBag" -> SortThreadCellBag |"ThreadCellFragment" -> SortThreadCellFragment |"KItem" -> SortKItem |"BindList" -> SortBindList |"Names" -> SortNames |"Uri" -> SortUri |"IdCell" -> SortIdCell |"RhoMap" -> SortRhoMap |"LidCell" -> SortLidCell |"ReceivesCellFragment" -> SortReceivesCellFragment |"StateCellOpt" -> SortStateCellOpt |"SendsCellFragment" -> SortSendsCellFragment |"TCellFragment" -> SortTCellFragment |"ChanLen" -> SortChanLen |"ContCell" -> SortContCell |"SchanCell" -> SortSchanCell |"Set" -> SortSet |"ChanCellOpt" -> SortChanCellOpt |"Cell" -> SortCell |"Procs" -> SortProcs |"WhereCellOpt" -> SortWhereCellOpt |"TupleCellOpt" -> SortTupleCellOpt |"Bool" -> SortBool |"KResult" -> SortKResult |"ReactCell" -> SortReactCell |"RhoTuple" -> SortRhoTuple |"Send" -> SortSend |"LengthCell" -> SortLengthCell |"KCell" -> SortKCell |"MsidCellOpt" -> SortMsidCellOpt |"Lbind" -> SortLbind |"Bundle" -> SortBundle |"RnumCell" -> SortRnumCell |"RhoKVPairs" -> SortRhoKVPairs |"SidCell" -> SortSidCell |"TuplespaceCellFragment" -> SortTuplespaceCellFragment |"LidCellOpt" -> SortLidCellOpt |"Name" -> SortName |"BindOcc" -> SortBindOcc |"ReactionCellFragment" -> SortReactionCellFragment |"RhoSet" -> SortRhoSet |"SendCellBag" -> SortSendCellBag |"Par" -> SortPar |"Int" -> SortInt |"NumCellOpt" -> SortNumCellOpt |"RnumCellOpt" -> SortRnumCellOpt |"Collection" -> SortCollection |"Rbind" -> SortRbind |"WhatCellOpt" -> SortWhatCellOpt |"RecCellFragment" -> SortRecCellFragment |"StypeCell" -> SortStypeCell |"MsidCell" -> SortMsidCell |"ReceivesCell" -> SortReceivesCell |"Eval" -> SortEval |"KCellOpt" -> SortKCellOpt |"StypeCellOpt" -> SortStypeCellOpt |"ListenCellBag" -> SortListenCellBag |"SendCell" -> SortSendCell |"ReactCellOpt" -> SortReactCellOpt |"RhoList" -> SortRhoList |"ReactionCell" -> SortReactionCell |"MatchCellOpt" -> SortMatchCellOpt |"ChanCell" -> SortChanCell |"LbindList" -> SortLbindList |"NameVar" -> SortNameVar |"LengthCellOpt" -> SortLengthCellOpt |"ListensCell" -> SortListensCell |"BExp" -> SortBExp |"ConsumableSend" -> SortConsumableSend |"RidCellOpt" -> SortRidCellOpt |"Map" -> SortMap |"RecCellBag" -> SortRecCellBag |"RecCell" -> SortRecCell |"ContCellOpt" -> SortContCellOpt |"ThreadsCellOpt" -> SortThreadsCellOpt |"TuplespaceCell" -> SortTuplespaceCell |"New" -> SortNew |"Stream" -> SortStream |"ThreadsCellFragment" -> SortThreadsCellFragment |"ListensCellFragment" -> SortListensCellFragment |"WhoCell" -> SortWhoCell |"ReceivesCellOpt" -> SortReceivesCellOpt |"Proc" -> SortProc |"NameList" -> SortNameList |"Ground" -> SortGround |"String" -> SortString |"WhoCellOpt" -> SortWhoCellOpt |"Float" -> SortFloat |"ChanList" -> SortChanList |"SendsCell" -> SortSendsCell |"ReactionCellOpt" -> SortReactionCellOpt |"Pbind" -> SortPbind |"SingleRec" -> SortSingleRec |"ThreadsCell" -> SortThreadsCell |"MultiRec" -> SortMultiRec |"TypeCell" -> SortTypeCell |"VarsCell" -> SortVarsCell |"TypeCellOpt" -> SortTypeCellOpt |"SendCellFragment" -> SortSendCellFragment |"SchanCellOpt" -> SortSchanCellOpt |"TuplespaceCellOpt" -> SortTuplespaceCellOpt |"Lbinds" -> SortLbinds |"NumCell" -> SortNumCell |"WhereCell" -> SortWhereCell |"ForgCellOpt" -> SortForgCellOpt |"KVariable" -> SortKVariable |"Bytes" -> SortBytes |"WhatCell" -> SortWhatCell |"SendsCellOpt" -> SortSendsCellOpt |"NomoCellOpt" -> SortNomoCellOpt |"TupleCell" -> SortTupleCell |"IOError" -> SortIOError |"StringBuffer" -> SortStringBuffer |"RbindList" -> SortRbindList |"TCell" -> SortTCell |"Bind" -> SortBind |"MlidCellOpt" -> SortMlidCellOpt |"MsgCellOpt" -> SortMsgCellOpt |"MsgCell" -> SortMsgCell |"IdNum" -> SortIdNum |"KConfigVar" -> SortKConfigVar |"SidCellOpt" -> SortSidCellOpt |"Pbinds" -> SortPbinds |"JoinList" -> SortJoinList |"Binds" -> SortBinds |"ForgCell" -> SortForgCell |"ProcList" -> SortProcList |"Id" -> SortId |"List" -> SortList |"StateCell" -> SortStateCell |"Receive" -> SortReceive |"ListenCellFragment" -> SortListenCellFragment |"MatchCell" -> SortMatchCell | _ -> invalid_arg ("parse_sort: " ^ c) let parse_klabel(c: string) : klabel = match c with |"#argv" -> Lbl'Hash'argv |"isPbindList" -> LblisPbindList |"isThreadCell" -> LblisThreadCell |"Map:lookup" -> LblMap'Coln'lookup |"isChanList" -> LblisChanList |"bundle0{_}_GRHO-SYNTAX" -> Lblbundle0'LBra'_'RBra'_GRHO'Hyph'SYNTAX |"#seek(_,_)_K-IO" -> Lbl'Hash'seek'LPar'_'Comm'_'RPar'_K'Hyph'IO |"isWhoCell" -> LblisWhoCell |"isIdNum" -> LblisIdNum |"signExtendBitRangeInt" -> LblsignExtendBitRangeInt |"_==Bool__BOOL" -> Lbl_'EqlsEqls'Bool__BOOL |"isSet" -> LblisSet |"isThreadCellBag" -> LblisThreadCellBag |"#bindocce(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bindocce'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"isNew" -> LblisNew |"{[_;_]}_GRHO-SYNTAX" -> Lbl'LBraLSqB'_'SCln'_'RSqBRBra'_GRHO'Hyph'SYNTAX |"isStypeCell" -> LblisStypeCell |"_<=Set__SET" -> Lbl_'_LT_Eqls'Set__SET |"_#in(_)_AUXFUN-SYNTAX" -> Lbl_'Hash'in'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"isIOError" -> LblisIOError |"#parse" -> Lbl'Hash'parse |"#EALREADY_K-IO" -> Lbl'Hash'EALREADY_K'Hyph'IO |"isRhoList" -> LblisRhoList |"makeList" -> LblmakeList |"#ESPIPE_K-IO" -> Lbl'Hash'ESPIPE_K'Hyph'IO |"#unlock(_,_)_K-IO" -> Lbl'Hash'unlock'LPar'_'Comm'_'RPar'_K'Hyph'IO |"#ENOENT_K-IO" -> Lbl'Hash'ENOENT_K'Hyph'IO |"isProcs" -> LblisProcs |"_<!__GRHO-SYNTAX" -> Lbl_'_LT_Bang'__GRHO'Hyph'SYNTAX |"isTypeCell" -> LblisTypeCell |"#freezer_<=__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX1_ |"isLbinds" -> LblisLbinds |"isLengthCell" -> LblisLengthCell |"noStateCell" -> LblnoStateCell |"isLbind" -> LblisLbind |"#ENOTTY_K-IO" -> Lbl'Hash'ENOTTY_K'Hyph'IO |"isForgCell" -> LblisForgCell |"<forg>" -> Lbl'_LT_'forg'_GT_' |"initChanCell" -> LblinitChanCell |"isProcList" -> LblisProcList |"#freezer_-__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX0_ |"initRnumCell" -> LblinitRnumCell |"isRidCellOpt" -> LblisRidCellOpt |"isReceivesCellFragment" -> LblisReceivesCellFragment |"#ENOTEMPTY_K-IO" -> Lbl'Hash'ENOTEMPTY_K'Hyph'IO |"isSidCellOpt" -> LblisSidCellOpt |"#EMSGSIZE_K-IO" -> Lbl'Hash'EMSGSIZE_K'Hyph'IO |"isKConfigVar" -> LblisKConfigVar |"isRhoMap" -> LblisRhoMap |"#ENETRESET_K-IO" -> Lbl'Hash'ENETRESET_K'Hyph'IO |"#EAFNOSUPPORT_K-IO" -> Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO |"noTupleCell" -> LblnoTupleCell |"noSendsCell" -> LblnoSendsCell |"<thread>-fragment" -> Lbl'_LT_'thread'_GT_Hyph'fragment |"isCell" -> LblisCell |"isPbind" -> LblisPbind |"#ENOMEM_K-IO" -> Lbl'Hash'ENOMEM_K'Hyph'IO |"values" -> Lblvalues |"bundle+{_}_GRHO-SYNTAX" -> Lblbundle'PlusLBra'_'RBra'_GRHO'Hyph'SYNTAX |"isThreadCellFragment" -> LblisThreadCellFragment |"isStateCellOpt" -> LblisStateCellOpt |"initLidCell" -> LblinitLidCell |"isNameList" -> LblisNameList |"isListensCellOpt" -> LblisListensCellOpt |"isTuplespaceCellOpt" -> LblisTuplespaceCellOpt |"#ENXIO_K-IO" -> Lbl'Hash'ENXIO_K'Hyph'IO |"_<Int__INT" -> Lbl_'_LT_'Int__INT |"noTypeCell" -> LblnoTypeCell |"isSendCell" -> LblisSendCell |"#configuration_K-REFLECTION" -> Lbl'Hash'configuration_K'Hyph'REFLECTION |"isSendsCell" -> LblisSendsCell |"isFloat" -> LblisFloat |"<msg>" -> Lbl'_LT_'msg'_GT_' |"isContCell" -> LblisContCell |"chrChar" -> LblchrChar |"_divInt__INT" -> Lbl_divInt__INT |"#EROFS_K-IO" -> Lbl'Hash'EROFS_K'Hyph'IO |"isWhereCellOpt" -> LblisWhereCellOpt |"_ThreadCellBag_" -> Lbl_ThreadCellBag_ |"isProc" -> LblisProc |"isListensCell" -> LblisListensCell |"_+Int_" -> Lbl_'Plus'Int_ |"isReactionCell" -> LblisReactionCell |"_orBool__BOOL" -> Lbl_orBool__BOOL |"<sid>" -> Lbl'_LT_'sid'_GT_' |"#ENFILE_K-IO" -> Lbl'Hash'ENFILE_K'Hyph'IO |"updateMap" -> LblupdateMap |"isReactionCellOpt" -> LblisReactionCellOpt |"_;__GRHO-SYNTAX" -> Lbl_'SCln'__GRHO'Hyph'SYNTAX |"#freezer_<=__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX0_ |"isNomoCell" -> LblisNomoCell |"noWhereCell" -> LblnoWhereCell |"isJoinList" -> LblisJoinList |"Int2String" -> LblInt2String |"#stype(_)_AUXFUN-SYNTAX" -> Lbl'Hash'stype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_=/=K_" -> Lbl_'EqlsSlshEqls'K_ |"isNumCell" -> LblisNumCell |"isRecCell" -> LblisRecCell |"_List_" -> Lbl_List_ |"isMultiRec" -> LblisMultiRec |"toString(_)_AUXFUN-SYNTAX" -> LbltoString'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"#open(_,_)_K-IO" -> Lbl'Hash'open'LPar'_'Comm'_'RPar'_K'Hyph'IO |"#EOPNOTSUPP_K-IO" -> Lbl'Hash'EOPNOTSUPP_K'Hyph'IO |"_|->_" -> Lbl_'PipeHyph_GT_'_ |"isMatchCellOpt" -> LblisMatchCellOpt |"_-Map__MAP" -> Lbl_'Hyph'Map__MAP |"#length(_)_AUXFUN-SYNTAX" -> Lbl'Hash'length'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"isRhoSet" -> LblisRhoSet |"<chan>" -> Lbl'_LT_'chan'_GT_' |"noForgCell" -> LblnoForgCell |"isReceivesCellOpt" -> LblisReceivesCellOpt |"#EMLINK_K-IO" -> Lbl'Hash'EMLINK_K'Hyph'IO |"isListenCellBag" -> LblisListenCellBag |"#sort" -> Lbl'Hash'sort |"_==K_" -> Lbl_'EqlsEqls'K_ |"isPar" -> LblisPar |"unforgeable(_)_GRHO-SYNTAX" -> Lblunforgeable'LPar'_'RPar'_GRHO'Hyph'SYNTAX |"replaceFirst(_,_,_)_STRING" -> LblreplaceFirst'LPar'_'Comm'_'Comm'_'RPar'_STRING |"noListensCell" -> LblnoListensCell |"noStypeCell" -> LblnoStypeCell |"#EOVERFLOW_K-IO" -> Lbl'Hash'EOVERFLOW_K'Hyph'IO |"#putc(_,_)_K-IO" -> Lbl'Hash'putc'LPar'_'Comm'_'RPar'_K'Hyph'IO |"isThreadsCellOpt" -> LblisThreadsCellOpt |".Map" -> Lbl'Stop'Map |"isVarsCell" -> LblisVarsCell |"_=/=String__STRING" -> Lbl_'EqlsSlshEqls'String__STRING |"#EIO_K-IO" -> Lbl'Hash'EIO_K'Hyph'IO |"initMlidCell" -> LblinitMlidCell |"uri(_)_GRHO-SYNTAX" -> Lbluri'LPar'_'RPar'_GRHO'Hyph'SYNTAX |"isSendCellBag" -> LblisSendCellBag |"isInt" -> LblisInt |"#EFAULT_K-IO" -> Lbl'Hash'EFAULT_K'Hyph'IO |"#fresh" -> Lbl'Hash'fresh |"_impliesBool__BOOL" -> Lbl_impliesBool__BOOL |"#chanlist(_)_AUXFUN-SYNTAX" -> Lbl'Hash'chanlist'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_*Int__INT" -> Lbl_'Star'Int__INT |"<T>" -> Lbl'_LT_'T'_GT_' |"#Thread" -> Lbl'Hash'Thread |"maxInt(_,_)_INT" -> LblmaxInt'LPar'_'Comm'_'RPar'_INT |"initReceivesCell" -> LblinitReceivesCell |"#EDEADLK_K-IO" -> Lbl'Hash'EDEADLK_K'Hyph'IO |"_<=String__STRING" -> Lbl_'_LT_Eqls'String__STRING |"ListenCellBagItem" -> LblListenCellBagItem |"isNames" -> LblisNames |"#ENOBUFS_K-IO" -> Lbl'Hash'ENOBUFS_K'Hyph'IO |"_Map_" -> Lbl_Map_ |"_-Int__INT" -> Lbl_'Hyph'Int__INT |"#EOF_K-IO" -> Lbl'Hash'EOF_K'Hyph'IO |"_!!__GRHO-SYNTAX" -> Lbl_'BangBang'__GRHO'Hyph'SYNTAX |"isReactionCellFragment" -> LblisReactionCellFragment |"_and__GRHO-SYNTAX" -> Lbl_and__GRHO'Hyph'SYNTAX |"#lengths(_)_AUXFUN-SYNTAX" -> Lbl'Hash'lengths'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"Float2String" -> LblFloat2String |"#append(_;_)_AUXFUN-SYNTAX" -> Lbl'Hash'append'LPar'_'SCln'_'RPar'_AUXFUN'Hyph'SYNTAX |"initWhoCell" -> LblinitWhoCell |"<listen>" -> Lbl'_LT_'listen'_GT_' |"noReceivesCell" -> LblnoReceivesCell |"sizeList" -> LblsizeList |"#EWOULDBLOCK_K-IO" -> Lbl'Hash'EWOULDBLOCK_K'Hyph'IO |"String2Id" -> LblString2Id |"initTuplespaceCell" -> LblinitTuplespaceCell |"<thread>" -> Lbl'_LT_'thread'_GT_' |"<vars>" -> Lbl'_LT_'vars'_GT_' |"_=/=Bool__BOOL" -> Lbl_'EqlsSlshEqls'Bool__BOOL |"<length>" -> Lbl'_LT_'length'_GT_' |"isCollection" -> LblisCollection |"#EFBIG_K-IO" -> Lbl'Hash'EFBIG_K'Hyph'IO |"isTCell" -> LblisTCell |"_[_/_]_SUBSTITUTION" -> Lbl_'LSqB'_'Slsh'_'RSqB'_SUBSTITUTION |"#EBADF_K-IO" -> Lbl'Hash'EBADF_K'Hyph'IO |"#msg(_)_AUXFUN-SYNTAX" -> Lbl'Hash'msg'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"noLengthCell" -> LblnoLengthCell |"initNomoCell" -> LblinitNomoCell |"#EPIPE_K-IO" -> Lbl'Hash'EPIPE_K'Hyph'IO |"#bvar(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bvar'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"noContCell" -> LblnoContCell |"isRhoTuple" -> LblisRhoTuple |"_^%Int___INT" -> Lbl_'Xor_Perc'Int___INT |"isMsgCellOpt" -> LblisMsgCellOpt |"<reaction>" -> Lbl'_LT_'reaction'_GT_' |"rfindString" -> LblrfindString |"isChanCellOpt" -> LblisChanCellOpt |"#ESOCKTNOSUPPORT_K-IO" -> Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO |"noNomoCell" -> LblnoNomoCell |"#EINTR_K-IO" -> Lbl'Hash'EINTR_K'Hyph'IO |"#stat(_)_K-IO" -> Lbl'Hash'stat'LPar'_'RPar'_K'Hyph'IO |"updateList" -> LblupdateList |"isLidCell" -> LblisLidCell |"isMsgCell" -> LblisMsgCell |".SendCellBag" -> Lbl'Stop'SendCellBag |"initContCell" -> LblinitContCell |"noReactCell" -> LblnoReactCell |"categoryChar" -> LblcategoryChar |"Set:difference" -> LblSet'Coln'difference |"isName" -> LblisName |"#EHOSTUNREACH_K-IO" -> Lbl'Hash'EHOSTUNREACH_K'Hyph'IO |"#ECONNRESET_K-IO" -> Lbl'Hash'ECONNRESET_K'Hyph'IO |"isBundle" -> LblisBundle |"isKCellOpt" -> LblisKCellOpt |"isForgCellOpt" -> LblisForgCellOpt |"#ECHILD_K-IO" -> Lbl'Hash'ECHILD_K'Hyph'IO |"isRecCellFragment" -> LblisRecCellFragment |"isUnconsumableSend" -> LblisUnconsumableSend |"isLbindList" -> LblisLbindList |"String2Float" -> LblString2Float |"Map:lookupOrDefault" -> LblMap'Coln'lookupOrDefault |"#if_#then_#else_#fi_K-EQUAL" -> Lbl'Hash'if_'Hash'then_'Hash'else_'Hash'fi_K'Hyph'EQUAL |"<tuplespace>" -> Lbl'_LT_'tuplespace'_GT_' |"#ENOTCONN_K-IO" -> Lbl'Hash'ENOTCONN_K'Hyph'IO |"_<-__GRHO-SYNTAX" -> Lbl_'_LT_Hyph'__GRHO'Hyph'SYNTAX |"<what>" -> Lbl'_LT_'what'_GT_' |"#stdout_K-IO" -> Lbl'Hash'stdout_K'Hyph'IO |"_&Int__INT" -> Lbl_'And'Int__INT |"#rtype(_)_AUXFUN-SYNTAX" -> Lbl'Hash'rtype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"<tuple>" -> Lbl'_LT_'tuple'_GT_' |"#ENAMETOOLONG_K-IO" -> Lbl'Hash'ENAMETOOLONG_K'Hyph'IO |"log2Int" -> Lbllog2Int |"_=/=Int__INT" -> Lbl_'EqlsSlshEqls'Int__INT |"#stdin_K-IO" -> Lbl'Hash'stdin_K'Hyph'IO |"#chanlen(_)_AUXFUN-SYNTAX" -> Lbl'Hash'chanlen'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_>=String__STRING" -> Lbl_'_GT_Eqls'String__STRING |"noSchanCell" -> LblnoSchanCell |"<react>" -> Lbl'_LT_'react'_GT_' |"isBindOcc" -> LblisBindOcc |"Set(_)_GRHO-SYNTAX" -> LblSet'LPar'_'RPar'_GRHO'Hyph'SYNTAX |"sizeMap" -> LblsizeMap |"isWhereCell" -> LblisWhereCell |"noMsgCell" -> LblnoMsgCell |"isId" -> LblisId |"substrString" -> LblsubstrString |"noTuplespaceCell" -> LblnoTuplespaceCell |"_,__GRHO-SYNTAX" -> Lbl_'Comm'__GRHO'Hyph'SYNTAX |"_!__GRHO-SYNTAX" -> Lbl_'Bang'__GRHO'Hyph'SYNTAX |"isTypeCellOpt" -> LblisTypeCellOpt |"size" -> Lblsize |"#bindocc(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bindocc'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"noMatchCell" -> LblnoMatchCell |"isBind" -> LblisBind |"#ENETUNREACH_K-IO" -> Lbl'Hash'ENETUNREACH_K'Hyph'IO |"#EPROTOTYPE_K-IO" -> Lbl'Hash'EPROTOTYPE_K'Hyph'IO |"*__GRHO-SYNTAX" -> Lbl'Star'__GRHO'Hyph'SYNTAX |"<who>" -> Lbl'_LT_'who'_GT_' |"_:__GRHO-SYNTAX" -> Lbl_'Coln'__GRHO'Hyph'SYNTAX |"noThreadsCell" -> LblnoThreadsCell |"#systemResult(_,_,_)_K-IO" -> Lbl'Hash'systemResult'LPar'_'Comm'_'Comm'_'RPar'_K'Hyph'IO |"<listens>" -> Lbl'_LT_'listens'_GT_' |"srandInt" -> LblsrandInt |"#EINVAL_K-IO" -> Lbl'Hash'EINVAL_K'Hyph'IO |"<rid>" -> Lbl'_LT_'rid'_GT_' |"isKItem" -> LblisKItem |"#ENODEV_K-IO" -> Lbl'Hash'ENODEV_K'Hyph'IO |"#length__AUXFUN-SYNTAX" -> Lbl'Hash'length__AUXFUN'Hyph'SYNTAX |"isRecCellBag" -> LblisRecCellBag |"List:set" -> LblList'Coln'set |"isUri" -> LblisUri |"String2Base" -> LblString2Base |"#noparse_K-IO" -> Lbl'Hash'noparse_K'Hyph'IO |"keys" -> Lblkeys |"initRecCell" -> LblinitRecCell |"#ESHUTDOWN_K-IO" -> Lbl'Hash'ESHUTDOWN_K'Hyph'IO |"isGround" -> LblisGround |".ThreadCellBag" -> Lbl'Stop'ThreadCellBag |"ThreadCellBagItem" -> LblThreadCellBagItem |"#cont(_)_AUXFUN-SYNTAX" -> Lbl'Hash'cont'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"#ENOTDIR_K-IO" -> Lbl'Hash'ENOTDIR_K'Hyph'IO |"new_in{_}_GRHO-SYNTAX" -> Lblnew_in'LBra'_'RBra'_GRHO'Hyph'SYNTAX |"#chan(_)_AUXFUN-SYNTAX" -> Lbl'Hash'chan'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_#in__AUXFUN-SYNTAX" -> Lbl_'Hash'in__AUXFUN'Hyph'SYNTAX |"initSendCell" -> LblinitSendCell |"{_}_GRHO-SYNTAX" -> Lbl'LBra'_'RBra'_GRHO'Hyph'SYNTAX |"<nomo>" -> Lbl'_LT_'nomo'_GT_' |"#bnum(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bnum'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_<=Int__INT" -> Lbl_'_LT_Eqls'Int__INT |"notBool_" -> LblnotBool_ |"noNumCell" -> LblnoNumCell |"#stderr_K-IO" -> Lbl'Hash'stderr_K'Hyph'IO |"noKCell" -> LblnoKCell |"#EBUSY_K-IO" -> Lbl'Hash'EBUSY_K'Hyph'IO |"#getenv" -> Lbl'Hash'getenv |"isTuplespaceCell" -> LblisTuplespaceCell |"isBinds" -> LblisBinds |"noReactionCell" -> LblnoReactionCell |"intersectSet" -> LblintersectSet |"_in_keys(_)_MAP" -> Lbl_in_keys'LPar'_'RPar'_MAP |"initMsgCell" -> LblinitMsgCell |"#bind(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bind'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"findChar" -> LblfindChar |"[_]_GRHO-SYNTAX" -> Lbl'LSqB'_'RSqB'_GRHO'Hyph'SYNTAX |"Set:in" -> LblSet'Coln'in |"isK" -> LblisK |"isWhoCellOpt" -> LblisWhoCellOpt |"#freezernot__GRHO-SYNTAX0_" -> Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_ |"isReceivesCell" -> LblisReceivesCell |"String2Int" -> LblString2Int |"<where>" -> Lbl'_LT_'where'_GT_' |"initWhereCell" -> LblinitWhereCell |"initThreadCell" -> LblinitThreadCell |"isSingleRec" -> LblisSingleRec |"isThreadsCell" -> LblisThreadsCell |"isTupleCellOpt" -> LblisTupleCellOpt |"isEval" -> LblisEval |"isWhatCell" -> LblisWhatCell |"#ENETDOWN_K-IO" -> Lbl'Hash'ENETDOWN_K'Hyph'IO |"isListenCellFragment" -> LblisListenCellFragment |"_[_<-undef]" -> Lbl_'LSqB'_'_LT_Hyph'undef'RSqB' |"#Bottom" -> Lbl'Hash'Bottom |"_==Int_" -> Lbl_'EqlsEqls'Int_ |"_andThenBool__BOOL" -> Lbl_andThenBool__BOOL |"isPbinds" -> LblisPbinds |"#parseInModule" -> Lbl'Hash'parseInModule |"Nil_GRHO-SYNTAX" -> LblNil_GRHO'Hyph'SYNTAX |"isAExp" -> LblisAExp |"#system" -> Lbl'Hash'system |"<mlid>" -> Lbl'_LT_'mlid'_GT_' |"initRidCell" -> LblinitRidCell |"isString" -> LblisString |"_%Int__INT" -> Lbl_'Perc'Int__INT |"_>>Int__INT" -> Lbl_'_GT__GT_'Int__INT |"_:_,__GRHO-SYNTAX" -> Lbl_'Coln'_'Comm'__GRHO'Hyph'SYNTAX |"noWhoCell" -> LblnoWhoCell |"isList" -> LblisList |"#EPROTONOSUPPORT_K-IO" -> Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO |"isTuplespaceCellFragment" -> LblisTuplespaceCellFragment |"replaceAll(_,_,_)_STRING" -> LblreplaceAll'LPar'_'Comm'_'Comm'_'RPar'_STRING |"isBindList" -> LblisBindList |"noChanCell" -> LblnoChanCell |"#EDESTADDRREQ_K-IO" -> Lbl'Hash'EDESTADDRREQ_K'Hyph'IO |"#EADDRINUSE_K-IO" -> Lbl'Hash'EADDRINUSE_K'Hyph'IO |"noRnumCell" -> LblnoRnumCell |"_^Int__INT" -> Lbl_'Xor_'Int__INT |"findString" -> LblfindString |"<k>" -> Lbl'_LT_'k'_GT_' |"<reaction>-fragment" -> Lbl'_LT_'reaction'_GT_Hyph'fragment |"absInt" -> LblabsInt |"#freezer_+__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX1_ |"#EHOSTDOWN_K-IO" -> Lbl'Hash'EHOSTDOWN_K'Hyph'IO |"_>String__STRING" -> Lbl_'_GT_'String__STRING |"isSendsCellFragment" -> LblisSendsCellFragment |"initLengthCell" -> LblinitLengthCell |"_==String__STRING" -> Lbl_'EqlsEqls'String__STRING |"isRnumCellOpt" -> LblisRnumCellOpt |"isSend" -> LblisSend |"isKResult" -> LblisKResult |"initStypeCell" -> LblinitStypeCell |"List:get" -> LblList'Coln'get |"#lstat(_)_K-IO" -> Lbl'Hash'lstat'LPar'_'RPar'_K'Hyph'IO |"SendCellBagItem" -> LblSendCellBagItem |"tuple" -> Lbltuple |"<id>" -> Lbl'_LT_'id'_GT_' |"SetItem" -> LblSetItem |"<receives>" -> Lbl'_LT_'receives'_GT_' |"isRhoKVPairs" -> LblisRhoKVPairs |"unsignedBytes" -> LblunsignedBytes |"isMsidCellOpt" -> LblisMsidCellOpt |".List" -> Lbl'Stop'List |"#ENOLCK_K-IO" -> Lbl'Hash'ENOLCK_K'Hyph'IO |"isSendsCellOpt" -> LblisSendsCellOpt |"#ECONNABORTED_K-IO" -> Lbl'Hash'ECONNABORTED_K'Hyph'IO |"randInt" -> LblrandInt |"#EXDEV_K-IO" -> Lbl'Hash'EXDEV_K'Hyph'IO |"#close(_)_K-IO" -> Lbl'Hash'close'LPar'_'RPar'_K'Hyph'IO |"_;_;;__GRHO-SYNTAX" -> Lbl_'SCln'_'SClnSCln'__GRHO'Hyph'SYNTAX |"keys_list(_)_MAP" -> Lblkeys_list'LPar'_'RPar'_MAP |"freshId" -> LblfreshId |"initTypeCell" -> LblinitTypeCell |"_orElseBool__BOOL" -> Lbl_orElseBool__BOOL |"isSchanCellOpt" -> LblisSchanCellOpt |"#EISDIR_K-IO" -> Lbl'Hash'EISDIR_K'Hyph'IO |"<cont>" -> Lbl'_LT_'cont'_GT_' |"List:range" -> LblList'Coln'range |"initTupleCell" -> LblinitTupleCell |"noIdCell" -> LblnoIdCell |"isKCell" -> LblisKCell |"#unknownIOError" -> Lbl'Hash'unknownIOError |"#freezer_and__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_and__GRHO'Hyph'SYNTAX1_ |"#freezer_*__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX0_ |"_>=Int__INT" -> Lbl_'_GT_Eqls'Int__INT |"isSendCellFragment" -> LblisSendCellFragment |"#ENOSYS_K-IO" -> Lbl'Hash'ENOSYS_K'Hyph'IO |"_|__GRHO-SYNTAX" -> Lbl_'Pipe'__GRHO'Hyph'SYNTAX |"#ECONNREFUSED_K-IO" -> Lbl'Hash'ECONNREFUSED_K'Hyph'IO |"<sends>-fragment" -> Lbl'_LT_'sends'_GT_Hyph'fragment |"#lock(_,_)_K-IO" -> Lbl'Hash'lock'LPar'_'Comm'_'RPar'_K'Hyph'IO |"#EADDRNOTAVAIL_K-IO" -> Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO |"countAllOccurrences(_,_)_STRING" -> LblcountAllOccurrences'LPar'_'Comm'_'RPar'_STRING |"_>Int__INT" -> Lbl_'_GT_'Int__INT |"fillList" -> LblfillList |"@__GRHO-SYNTAX" -> Lbl'_AT_'__GRHO'Hyph'SYNTAX |"initForgCell" -> LblinitForgCell |"bitRangeInt" -> LblbitRangeInt |"_<String__STRING" -> Lbl_'_LT_'String__STRING |"#ThreadLocal" -> Lbl'Hash'ThreadLocal |"_xorBool__BOOL" -> Lbl_xorBool__BOOL |"#freezer_+__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX0_ |"initReactCell" -> LblinitReactCell |".RecCellBag" -> Lbl'Stop'RecCellBag |"<type>" -> Lbl'_LT_'type'_GT_' |"<listens>-fragment" -> Lbl'_LT_'listens'_GT_Hyph'fragment |"_+__GRHO-SYNTAX" -> Lbl_'Plus'__GRHO'Hyph'SYNTAX |"<lid>" -> Lbl'_LT_'lid'_GT_' |"_ListenCellBag_" -> Lbl_ListenCellBag_ |"#open(_)_K-IO" -> Lbl'Hash'open'LPar'_'RPar'_K'Hyph'IO |"_[_]_SUBSTITUTION" -> Lbl_'LSqB'_'RSqB'_SUBSTITUTION |"noMlidCell" -> LblnoMlidCell |"_or__GRHO-SYNTAX" -> Lbl_or__GRHO'Hyph'SYNTAX |"#ETOOMANYREFS_K-IO" -> Lbl'Hash'ETOOMANYREFS_K'Hyph'IO |"<threads>-fragment" -> Lbl'_LT_'threads'_GT_Hyph'fragment |"initListensCell" -> LblinitListensCell |"#ENOSPC_K-IO" -> Lbl'Hash'ENOSPC_K'Hyph'IO |"#freezer_or__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX0_ |"isChanCell" -> LblisChanCell |"isRnumCell" -> LblisRnumCell |"#chanmany(_:_)_AUXFUN-SYNTAX" -> Lbl'Hash'chanmany'LPar'_'Coln'_'RPar'_AUXFUN'Hyph'SYNTAX |"#logToFile" -> Lbl'Hash'logToFile |"<rec>" -> Lbl'_LT_'rec'_GT_' |"#read(_,_)_K-IO" -> Lbl'Hash'read'LPar'_'Comm'_'RPar'_K'Hyph'IO |"noLidCell" -> LblnoLidCell |"isNameVar" -> LblisNameVar |"<schan>" -> Lbl'_LT_'schan'_GT_' |"bigEndianBytes" -> LblbigEndianBytes |"<match>" -> Lbl'_LT_'match'_GT_' |"Id2String" -> LblId2String |"initListenCell" -> LblinitListenCell |"<num>" -> Lbl'_LT_'num'_GT_' |"isContCellOpt" -> LblisContCellOpt |"isLidCellOpt" -> LblisLidCellOpt |"noSidCell" -> LblnoSidCell |"bundle{_}_GRHO-SYNTAX" -> Lblbundle'LBra'_'RBra'_GRHO'Hyph'SYNTAX |"Map:choice" -> LblMap'Coln'choice |"_Set_" -> Lbl_Set_ |"#EEXIST_K-IO" -> Lbl'Hash'EEXIST_K'Hyph'IO |"#getc(_)_K-IO" -> Lbl'Hash'getc'LPar'_'RPar'_K'Hyph'IO |"isRidCell" -> LblisRidCell |"<state>" -> Lbl'_LT_'state'_GT_' |"isListenCell" -> LblisListenCell |"isBool" -> LblisBool |"~Int__INT" -> Lbl'Tild'Int__INT |"#freezer_*__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX1_ |"ordChar" -> LblordChar |"initIdCell" -> LblinitIdCell |"_modInt__INT" -> Lbl_modInt__INT |"rfindChar" -> LblrfindChar |"isRbinds" -> LblisRbinds |"isMlidCellOpt" -> LblisMlidCellOpt |"#EAGAIN_K-IO" -> Lbl'Hash'EAGAIN_K'Hyph'IO |".ListenCellBag" -> Lbl'Stop'ListenCellBag |"noMsidCell" -> LblnoMsidCell |"initSchanCell" -> LblinitSchanCell |"directionalityChar" -> LbldirectionalityChar |"isIdCell" -> LblisIdCell |"#opendir(_)_K-IO" -> Lbl'Hash'opendir'LPar'_'RPar'_K'Hyph'IO |"initKCell" -> LblinitKCell |"RecCellBagItem" -> LblRecCellBagItem |"for(_){_}_GRHO-SYNTAX" -> Lblfor'LPar'_'RParLBra'_'RBra'_GRHO'Hyph'SYNTAX |"isBExp" -> LblisBExp |".Set" -> Lbl'Stop'Set |"isChanLen" -> LblisChanLen |"isStateCell" -> LblisStateCell |"#EACCES_K-IO" -> Lbl'Hash'EACCES_K'Hyph'IO |"#ELOOP_K-IO" -> Lbl'Hash'ELOOP_K'Hyph'IO |"#EDOM_K-IO" -> Lbl'Hash'EDOM_K'Hyph'IO |"isSidCell" -> LblisSidCell |"removeAll" -> LblremoveAll |"noRidCell" -> LblnoRidCell |"<threads>" -> Lbl'_LT_'threads'_GT_' |"_andBool_" -> Lbl_andBool_ |"_#ine(_)_AUXFUN-SYNTAX" -> Lbl_'Hash'ine'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"isThreadsCellFragment" -> LblisThreadsCellFragment |"isLengthCellOpt" -> LblisLengthCellOpt |"isRbindList" -> LblisRbindList |"#EPFNOSUPPORT_K-IO" -> Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO |"isConsumableSend" -> LblisConsumableSend |"lengthString" -> LbllengthString |"_-__GRHO-SYNTAX" -> Lbl_'Hyph'__GRHO'Hyph'SYNTAX |"<listen>-fragment" -> Lbl'_LT_'listen'_GT_Hyph'fragment |"isReceive" -> LblisReceive |"#ERANGE_K-IO" -> Lbl'Hash'ERANGE_K'Hyph'IO |"initTCell" -> LblinitTCell |"signedBytes" -> LblsignedBytes |"FloatFormat" -> LblFloatFormat |"isMsidCell" -> LblisMsidCell |"#ENOTSOCK_K-IO" -> Lbl'Hash'ENOTSOCK_K'Hyph'IO |"_+String__STRING" -> Lbl_'Plus'String__STRING |"_RecCellBag_" -> Lbl_RecCellBag_ |"_|Int__INT" -> Lbl_'Pipe'Int__INT |"#EISCONN_K-IO" -> Lbl'Hash'EISCONN_K'Hyph'IO |"isKVariable" -> LblisKVariable |"_dividesInt__INT" -> Lbl_dividesInt__INT |"<rec>-fragment" -> Lbl'_LT_'rec'_GT_Hyph'fragment |"<tuplespace>-fragment" -> Lbl'_LT_'tuplespace'_GT_Hyph'fragment |"<T>-fragment" -> Lbl'_LT_'T'_GT_Hyph'fragment |"_<=__GRHO-SYNTAX" -> Lbl_'_LT_Eqls'__GRHO'Hyph'SYNTAX |"#freezer_or__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX1_ |"isWhatCellOpt" -> LblisWhatCellOpt |"Set:choice" -> LblSet'Coln'choice |"isMatchCell" -> LblisMatchCell |"isListensCellFragment" -> LblisListensCellFragment |"#buffer" -> Lbl'Hash'buffer |"_*__GRHO-SYNTAX" -> Lbl_'Star'__GRHO'Hyph'SYNTAX |"initNumCell" -> LblinitNumCell |"freshInt" -> LblfreshInt |"#write(_,_)_K-IO" -> Lbl'Hash'write'LPar'_'Comm'_'RPar'_K'Hyph'IO |"#ETIMEDOUT_K-IO" -> Lbl'Hash'ETIMEDOUT_K'Hyph'IO |"initSidCell" -> LblinitSidCell |"isIdCellOpt" -> LblisIdCellOpt |"(_;_)_GRHO-SYNTAX" -> Lbl'LPar'_'SCln'_'RPar'_GRHO'Hyph'SYNTAX |"<sends>" -> Lbl'_LT_'sends'_GT_' |"isSchanCell" -> LblisSchanCell |"_xorInt__INT" -> Lbl_xorInt__INT |"#EINPROGRESS_K-IO" -> Lbl'Hash'EINPROGRESS_K'Hyph'IO |"initVarsCell" -> LblinitVarsCell |"initStateCell" -> LblinitStateCell |"isNumCellOpt" -> LblisNumCellOpt |"initMatchCell" -> LblinitMatchCell |"isMlidCell" -> LblisMlidCell |"#ENOPROTOOPT_K-IO" -> Lbl'Hash'ENOPROTOOPT_K'Hyph'IO |"littleEndianBytes" -> LbllittleEndianBytes |"#EPERM_K-IO" -> Lbl'Hash'EPERM_K'Hyph'IO |"noWhatCell" -> LblnoWhatCell |"initWhatCell" -> LblinitWhatCell |"<send>" -> Lbl'_LT_'send'_GT_' |"_<<Int__INT" -> Lbl_'_LT__LT_'Int__INT |"Base2String" -> LblBase2String |"ListItem" -> LblListItem |"#freezer_-__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX1_ |"isStream" -> LblisStream |"_<=Map__MAP" -> Lbl_'_LT_Eqls'Map__MAP |"newUUID_STRING" -> LblnewUUID_STRING |"noVarsCell" -> LblnoVarsCell |"initThreadsCell" -> LblinitThreadsCell |"_SendCellBag_" -> Lbl_SendCellBag_ |"#ESRCH_K-IO" -> Lbl'Hash'ESRCH_K'Hyph'IO |"#EMFILE_K-IO" -> Lbl'Hash'EMFILE_K'Hyph'IO |"project:Proc" -> Lblproject'Coln'Proc |"isReactCellOpt" -> LblisReactCellOpt |"<receives>-fragment" -> Lbl'_LT_'receives'_GT_Hyph'fragment |"<stype>" -> Lbl'_LT_'stype'_GT_' |"_inList_" -> Lbl_inList_ |"isVarsCellOpt" -> LblisVarsCellOpt |"#ENOEXEC_K-IO" -> Lbl'Hash'ENOEXEC_K'Hyph'IO |"minInt(_,_)_INT" -> LblminInt'LPar'_'Comm'_'RPar'_INT |"initReactionCell" -> LblinitReactionCell |"isMap" -> LblisMap |"isTupleCell" -> LblisTupleCell |"isReactCell" -> LblisReactCell |"initMsidCell" -> LblinitMsidCell |"<rnum>" -> Lbl'_LT_'rnum'_GT_' |"isNomoCellOpt" -> LblisNomoCellOpt |"isStypeCellOpt" -> LblisStypeCellOpt |"isTCellFragment" -> LblisTCellFragment |"replace(_,_,_,_)_STRING" -> Lblreplace'LPar'_'Comm'_'Comm'_'Comm'_'RPar'_STRING |"_/Int__INT" -> Lbl_'Slsh'Int__INT |"_[_<-_]_MAP" -> Lbl_'LSqB'_'_LT_Hyph'_'RSqB'_MAP |"isRbind" -> LblisRbind |"#tell(_)_K-IO" -> Lbl'Hash'tell'LPar'_'RPar'_K'Hyph'IO |"<msid>" -> Lbl'_LT_'msid'_GT_' |"initSendsCell" -> LblinitSendsCell |"bundle-{_}_GRHO-SYNTAX" -> Lblbundle'HyphLBra'_'RBra'_GRHO'Hyph'SYNTAX |"getKLabel" -> LblgetKLabel |"not__GRHO-SYNTAX" -> Lblnot__GRHO'Hyph'SYNTAX |"#E2BIG_K-IO" -> Lbl'Hash'E2BIG_K'Hyph'IO |"#seekEnd(_,_)_K-IO" -> Lbl'Hash'seekEnd'LPar'_'Comm'_'RPar'_K'Hyph'IO |"<send>-fragment" -> Lbl'_LT_'send'_GT_Hyph'fragment | _ -> invalid_arg ("parse_klabel: " ^ c) let collection_for (c: klabel) : klabel = match c with |LblSetItem -> Lbl_Set_ |Lbl'_LT_'listen'_GT_' -> Lbl_ListenCellBag_ |Lbl'Stop'Set -> Lbl_Set_ |Lbl'_LT_'thread'_GT_' -> Lbl_ThreadCellBag_ |Lbl'Stop'List -> Lbl_List_ |Lbl_List_ -> Lbl_List_ |Lbl_Set_ -> Lbl_Set_ |Lbl_'PipeHyph_GT_'_ -> Lbl_Map_ |Lbl'Stop'RecCellBag -> Lbl_RecCellBag_ |Lbl_ThreadCellBag_ -> Lbl_ThreadCellBag_ |LblListenCellBagItem -> Lbl_ListenCellBag_ |Lbl'_LT_'send'_GT_' -> Lbl_SendCellBag_ |Lbl_ListenCellBag_ -> Lbl_ListenCellBag_ |Lbl_RecCellBag_ -> Lbl_RecCellBag_ |Lbl'Stop'ListenCellBag -> Lbl_ListenCellBag_ |LblListItem -> Lbl_List_ |Lbl_Map_ -> Lbl_Map_ |Lbl'Stop'Map -> Lbl_Map_ |Lbl'Stop'ThreadCellBag -> Lbl_ThreadCellBag_ |LblThreadCellBagItem -> Lbl_ThreadCellBag_ |Lbl'Stop'SendCellBag -> Lbl_SendCellBag_ |Lbl'_LT_'rec'_GT_' -> Lbl_RecCellBag_ |Lbl_SendCellBag_ -> Lbl_SendCellBag_ |LblSendCellBagItem -> Lbl_SendCellBag_ |LblRecCellBagItem -> Lbl_RecCellBag_ | _ -> invalid_arg "collection_for" let unit_for (c: klabel) : klabel = match c with |Lbl_ThreadCellBag_ -> Lbl'Stop'ThreadCellBag |Lbl_Set_ -> Lbl'Stop'Set |Lbl_List_ -> Lbl'Stop'List |Lbl_ListenCellBag_ -> Lbl'Stop'ListenCellBag |Lbl_RecCellBag_ -> Lbl'Stop'RecCellBag |Lbl_SendCellBag_ -> Lbl'Stop'SendCellBag |Lbl_Map_ -> Lbl'Stop'Map | _ -> invalid_arg "unit_for" let el_for (c: klabel) : klabel = match c with |Lbl_ThreadCellBag_ -> LblThreadCellBagItem |Lbl_Set_ -> LblSetItem |Lbl_List_ -> LblListItem |Lbl_ListenCellBag_ -> LblListenCellBagItem |Lbl_RecCellBag_ -> LblRecCellBagItem |Lbl_SendCellBag_ -> LblSendCellBagItem |Lbl_Map_ -> Lbl_'PipeHyph_GT_'_ | _ -> invalid_arg "el_for" let unit_for_array (c: sort) : klabel = match c with | _ -> invalid_arg "unit_for_array" let el_for_array (c: sort) : klabel = match c with | _ -> invalid_arg "el_for_array" module Dynarray : sig type 'a t val make : int -> 'a -> 'a t val length : 'a t -> int val get : 'a t -> int -> 'a val set : 'a t -> int -> 'a -> unit val compare : ('a list -> 'a list -> int) -> 'a t -> 'a t -> int val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a val fold_right : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b val iteri : (int -> 'a -> unit) -> 'a t -> unit end = struct type 'a t = { size: int; mutable arr: 'a array; default: 'a } let make size default = { size=size; arr=Array.make (min size 10) default; default=default} let length arr = arr.size let get arr idx = if idx >= Array.length arr.arr && idx < arr.size then arr.default else Array.get arr.arr idx let calc_size arr at_least = let double = Array.length arr.arr * 2 in let at_most = if double > arr.size then arr.size else double in if at_least > at_most then at_least else at_most let upgrade_size arr size = let old = arr.arr in arr.arr <- Array.make size arr.default; Array.blit old 0 arr.arr 0 (Array.length old) let set arr idx value= if idx >= Array.length arr.arr && idx < arr.size then upgrade_size arr (calc_size arr (idx + 1)); Array.set arr.arr idx value let compare_arr f a b = let smaller,larger = if Array.length a.arr < Array.length b.arr then a,b else b,a in upgrade_size smaller (Array.length larger.arr); f (Array.to_list a.arr) (Array.to_list b.arr) let compare f a b = let v = Pervasives.compare a.size b.size in if v = 0 then compare_arr f a b else v let fold_left f init arr = snd (Array.fold_left (fun (i,x) a -> if i > 0 then (i - 1, f x a) else (0,x)) (arr.size,init) arr.arr) let fold_right f arr init = snd (Array.fold_right (fun a (i,x) -> if i > 0 then (i - 1, x) else (0, f a x)) arr.arr (Array.length arr.arr - arr.size, init)) let iteri f arr = Array.iteri (fun i a -> if i < arr.size then f i a else ()) arr.arr end module type S = sig type m type s type t = kitem list and kitem = KToken of sort * string | InjectedKLabel of klabel | Map of sort * klabel * m | List of sort * klabel * t list | Set of sort * klabel * s | Array of sort * t * t Dynarray.t | Int of Z.t | Float of Gmp.FR.t * int * int | String of string | Bytes of bytes | StringBuffer of Buffer.t | Bool of bool | ThreadLocal | Thread of t * t * t * t | Bottom | KApply0 of klabel | KApply1 of klabel * t | KApply2 of klabel * t * t | KApply3 of klabel * t * t * t | KApply4 of klabel * t * t * t * t | KApply5 of klabel * t * t * t * t * t | KApply6 of klabel * t * t * t * t * t * t | KApply7 of klabel * t * t * t * t * t * t * t | KApply8 of klabel * t * t * t * t * t * t * t * t val compare : t -> t -> int val compare_kitem : kitem -> kitem -> int val compare_klist : t list -> t list -> int val equal_k : t -> t -> bool val hash_k : t -> int val hash_k_param : int -> t -> int end module rec K : (S with type m = K.t Map.Make(K).t and type s = Set.Make(K).t) = struct module KMap = Map.Make(K) module KSet = Set.Make(K) type m = K.t KMap.t and s = KSet.t and t = kitem list and kitem = KToken of sort * string | InjectedKLabel of klabel | Map of sort * klabel * m | List of sort * klabel * t list | Set of sort * klabel * s | Array of sort * t * t Dynarray.t | Int of Z.t | Float of Gmp.FR.t * int * int | String of string | Bytes of bytes | StringBuffer of Buffer.t | Bool of bool | ThreadLocal | Thread of t * t * t * t | Bottom | KApply0 of klabel | KApply1 of klabel * t | KApply2 of klabel * t * t | KApply3 of klabel * t * t * t | KApply4 of klabel * t * t * t * t | KApply5 of klabel * t * t * t * t * t | KApply6 of klabel * t * t * t * t * t * t | KApply7 of klabel * t * t * t * t * t * t * t | KApply8 of klabel * t * t * t * t * t * t * t * t let rec hash_k c = match c with | [] -> 1 | hd :: tl -> (hash_k tl) * 31 + hash_kitem hd and hash_kitem c = match c with | KApply0(lbl) -> (Hashtbl.hash lbl) | KApply1(lbl,k0) -> ((Hashtbl.hash lbl)) * 37 + hash_k k0 | KApply2(lbl,k0,k1) -> (((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1 | KApply3(lbl,k0,k1,k2) -> ((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2 | KApply4(lbl,k0,k1,k2,k3) -> (((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3 | KApply5(lbl,k0,k1,k2,k3,k4) -> ((((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 37 + hash_k k4 | KApply6(lbl,k0,k1,k2,k3,k4,k5) -> (((((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 37 + hash_k k4) * 37 + hash_k k5 | KApply7(lbl,k0,k1,k2,k3,k4,k5,k6) -> ((((((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 37 + hash_k k4) * 37 + hash_k k5) * 37 + hash_k k6 | KApply8(lbl,k0,k1,k2,k3,k4,k5,k6,k7) -> (((((((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 37 + hash_k k4) * 37 + hash_k k5) * 37 + hash_k k6) * 37 + hash_k k7 | KToken(s, st) -> Hashtbl.hash s * 41 + Hashtbl.hash st | InjectedKLabel kl -> Hashtbl.hash kl | Map(_,k,m) -> Hashtbl.hash k * 43 + KMap.fold (fun k v accum -> accum + (hash_k k lxor hash_k v)) m 0 | List(_,k,l) -> Hashtbl.hash k * 47 + hash_klist l | Set(_,k,s) -> Hashtbl.hash k * 53 + KSet.fold (fun k accum -> accum + hash_k k) s 0 | Array(k,_,l) -> Hashtbl.hash k * 61 + (Dynarray.length l) | Int i -> Z.hash i | Float (f,_,_) -> Hashtbl.hash (Gmp.FR.to_float f) | String s -> Hashtbl.hash s | StringBuffer s -> Hashtbl.hash (Buffer.contents s) | Bytes b -> Hashtbl.hash b | Bool b -> Hashtbl.hash b | Bottom -> 1 | ThreadLocal -> 2 | Thread(k1,k2,k3,k4) -> ((((Hashtbl.hash Lbl'Hash'Thread) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 36 + hash_k k4 and hash_klist c = match c with | [] -> 1 | hd :: tl -> (hash_klist tl) * 59 + hash_k hd let rec hash_k_param_fld ((l,max) as lmax) = function | [] -> lmax | h::t -> if max < 0 then lmax else hash_k_param_fld (h::l,max-1) t let hash_k_param_add_kitem k max = hash_k_param_fld max k let rec qfld l1 h max = match l1 with | [] -> let (l2,max) = max in if l2 = [] then h else qfld l2 h ([],max) | ki :: kq -> match ki with | KApply0(lbl) -> qfld kq (31*h + Hashtbl.hash lbl) ( max) | KApply1(lbl,k0) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( max)) | KApply2(lbl,k0,k1) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( max))) | KApply3(lbl,k0,k1,k2) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( max)))) | KApply4(lbl,k0,k1,k2,k3) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( max))))) | KApply5(lbl,k0,k1,k2,k3,k4) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( hash_k_param_add_kitem k4 ( max)))))) | KApply6(lbl,k0,k1,k2,k3,k4,k5) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( hash_k_param_add_kitem k4 ( hash_k_param_add_kitem k5 ( max))))))) | KApply7(lbl,k0,k1,k2,k3,k4,k5,k6) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( hash_k_param_add_kitem k4 ( hash_k_param_add_kitem k5 ( hash_k_param_add_kitem k6 ( max)))))))) | KApply8(lbl,k0,k1,k2,k3,k4,k5,k6,k7) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( hash_k_param_add_kitem k4 ( hash_k_param_add_kitem k5 ( hash_k_param_add_kitem k6 ( hash_k_param_add_kitem k7 ( max))))))))) | KToken(s, st) -> qfld kq (31*h + Hashtbl.hash s * 41 + Hashtbl.hash st) ( max) | InjectedKLabel lbl -> qfld kq (31*h + Hashtbl.hash lbl) ( max) | Map(_,lbl,m) -> qfld kq (31*h + 43 * Hashtbl.hash lbl) ( KMap.fold (fun k v max -> hash_k_param_add_kitem v (hash_k_param_add_kitem k max)) m max) | List(_,lbl,l) -> qfld kq (31*h + 47 * Hashtbl.hash lbl) ( List.fold_left (fun max k -> hash_k_param_add_kitem k max) max l) | Set(_,lbl,s) -> qfld kq (31*h + 53 * Hashtbl.hash lbl) ( KSet.fold (fun k max -> hash_k_param_add_kitem k max) s max) | Array(lbl,_,l) -> qfld kq (31*h + 61 * Hashtbl.hash lbl + Dynarray.length l) ( max) | Int i -> qfld kq (31*h + Z.hash i) ( max) | Float (f,_,_) -> qfld kq (31*h + Hashtbl.hash (Gmp.FR.to_float f)) ( max) | String s -> qfld kq (31*h + Hashtbl.hash s) ( max) | Bytes b -> qfld kq (31*h + Hashtbl.hash b) ( max) | StringBuffer s -> qfld kq (31*h + Hashtbl.hash (Buffer.contents s)) ( max) | Bool b -> qfld kq (31*h + Hashtbl.hash b) ( max) | Bottom -> qfld kq (31*h + 1) ( max) | ThreadLocal -> qfld kq (31*h + 2) ( max) | Thread(k1,k2,k3,k4) -> qfld kq (31*h + Hashtbl.hash Lbl'Hash'Thread) (hash_k_param_add_kitem k1 (hash_k_param_add_kitem k2 (hash_k_param_add_kitem k3 (hash_k_param_add_kitem k4 max)))) let hash_k_param max k = qfld [] 0 (hash_k_param_add_kitem k ([],max)) let rec equal_k c1 c2 = if c1 == c2 then true else match (c1, c2) with | [], [] -> true | (hd1 :: tl1), (hd2 :: tl2) -> equal_kitem hd1 hd2 && equal_k tl1 tl2 | _ -> false and equal_kitem c1 c2 = if c1 == c2 then true else match (c1, c2) with | KApply0(lbl1),KApply0(lbl2) -> lbl1 = lbl2 | KApply1(lbl1,k0_1),KApply1(lbl2,k0_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 | KApply2(lbl1,k0_1,k1_1),KApply2(lbl2,k0_2,k1_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 | KApply3(lbl1,k0_1,k1_1,k2_1),KApply3(lbl2,k0_2,k1_2,k2_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 | KApply4(lbl1,k0_1,k1_1,k2_1,k3_1),KApply4(lbl2,k0_2,k1_2,k2_2,k3_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 | KApply5(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1),KApply5(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 && equal_k k4_1 k4_2 | KApply6(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1),KApply6(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 && equal_k k4_1 k4_2 && equal_k k5_1 k5_2 | KApply7(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1,k6_1),KApply7(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2,k6_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 && equal_k k4_1 k4_2 && equal_k k5_1 k5_2 && equal_k k6_1 k6_2 | KApply8(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1,k6_1,k7_1),KApply8(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2,k6_2,k7_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 && equal_k k4_1 k4_2 && equal_k k5_1 k5_2 && equal_k k6_1 k6_2 && equal_k k7_1 k7_2 | (KToken(s1, st1)), (KToken(s2, st2)) -> s1 = s2 && st1 = st2 | (InjectedKLabel kl1), (InjectedKLabel kl2) -> kl1 = kl2 | (Map (_,k1,m1)), (Map (_,k2,m2)) -> k1 = k2 && KMap.cardinal m1 = KMap.cardinal m2 && (KMap.equal) (equal_k) m1 m2 | (List (_,k1,l1)), (List (_,k2,l2)) -> k1 = k2 && equal_klist l1 l2 | (Set (_,k1,s1)), (Set (_,k2,s2)) -> k1 = k2 && KSet.cardinal s1 = KSet.cardinal s2 && (KSet.equal) s1 s2 | (Array (s1,k1,l1)), (Array (s2,k2,l2)) -> s1 = s2 && equal_k k1 k2 && l1 == l2 | (Int i1), (Int i2) -> Z.equal i1 i2 | (Float (f1,e1,p1)), (Float (f2,e2,p2)) -> e1 = e2 && p1 = p2 && Gmp.FR.compare f1 f2 = 0 | (String s1), (String s2) -> s1 = s2 | (Bytes b1), (Bytes b2) -> b1 == b2 | (StringBuffer s1), (StringBuffer s2) -> s1 == s2 | (Bool b1), (Bool b2) -> b1 = b2 | Bottom, Bottom -> true | _ -> false and equal_klist c1 c2 = if c1 == c2 then true else match (c1, c2) with | [], [] -> true | (hd1 :: tl1), (hd2 :: tl2) -> equal_k hd1 hd2 && equal_klist tl1 tl2 | _ -> false let rec compare c1 c2 = if c1 == c2 then 0 else match (c1, c2) with | [], [] -> 0 | (hd1 :: tl1), (hd2 :: tl2) -> let v = compare_kitem hd1 hd2 in if v = 0 then compare tl1 tl2 else v | (_ :: _), _ -> -1 | _ -> 1 and compare_kitem c1 c2 = if c1 == c2 then 0 else match (c1, c2) with | KApply0(lbl1),KApply0(lbl2) -> Pervasives.compare lbl1 lbl2 | KApply1(lbl1,k0_1),KApply1(lbl2,k0_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then compare k0_1 k0_2 else v) | KApply2(lbl1,k0_1,k1_1),KApply2(lbl2,k0_2,k1_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then compare k1_1 k1_2 else v) else v) | KApply3(lbl1,k0_1,k1_1,k2_1),KApply3(lbl2,k0_2,k1_2,k2_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then compare k2_1 k2_2 else v) else v) else v) | KApply4(lbl1,k0_1,k1_1,k2_1,k3_1),KApply4(lbl2,k0_2,k1_2,k2_2,k3_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then compare k3_1 k3_2 else v) else v) else v) else v) | KApply5(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1),KApply5(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then (let v = compare k3_1 k3_2 in if v = 0 then compare k4_1 k4_2 else v) else v) else v) else v) else v) | KApply6(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1),KApply6(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then (let v = compare k3_1 k3_2 in if v = 0 then (let v = compare k4_1 k4_2 in if v = 0 then compare k5_1 k5_2 else v) else v) else v) else v) else v) else v) | KApply7(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1,k6_1),KApply7(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2,k6_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then (let v = compare k3_1 k3_2 in if v = 0 then (let v = compare k4_1 k4_2 in if v = 0 then (let v = compare k5_1 k5_2 in if v = 0 then compare k6_1 k6_2 else v) else v) else v) else v) else v) else v) else v) | KApply8(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1,k6_1,k7_1),KApply8(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2,k6_2,k7_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then (let v = compare k3_1 k3_2 in if v = 0 then (let v = compare k4_1 k4_2 in if v = 0 then (let v = compare k5_1 k5_2 in if v = 0 then (let v = compare k6_1 k6_2 in if v = 0 then compare k7_1 k7_2 else v) else v) else v) else v) else v) else v) else v) else v) | (KToken(s1, st1)), (KToken(s2, st2)) -> let v = Pervasives.compare s1 s2 in if v = 0 then Pervasives.compare st1 st2 else v | (InjectedKLabel kl1), (InjectedKLabel kl2) -> Pervasives.compare kl1 kl2 | (Map (_,k1,m1)), (Map (_,k2,m2)) -> let v = Pervasives.compare k1 k2 in if v = 0 then (KMap.compare) compare m1 m2 else v | (List (_,k1,l1)), (List (_,k2,l2)) -> let v = Pervasives.compare k1 k2 in if v = 0 then compare_klist l1 l2 else v | (Set (_,k1,s1)), (Set (_,k2,s2)) -> let v = Pervasives.compare k1 k2 in if v = 0 then (KSet.compare) s1 s2 else v | (Array (s1,k1,l1)), (Array (s2,k2,l2)) -> let v = Pervasives.compare s1 s2 in if v = 0 then let v = compare k1 k2 in if v = 0 then Dynarray.compare compare_klist l1 l2 else v else v | (Int i1), (Int i2) -> Z.compare i1 i2 | (Float (f1,e1,p1)), (Float (f2,e2,p2)) -> let v = e2 - e1 in if v = 0 then let v2 = p2 - p1 in if v2 = 0 then Gmp.FR.compare f1 f2 else v2 else v | (String s1), (String s2) -> Pervasives.compare s1 s2 | (Bytes b1), (Bytes b2) -> Pervasives.compare b1 b2 | (StringBuffer s1), (StringBuffer s2) -> Pervasives.compare (Buffer.contents s1) (Buffer.contents s2) | (Bool b1), (Bool b2) -> if b1 = b2 then 0 else if b1 then -1 else 1 | Bottom, Bottom -> 0 | ThreadLocal, ThreadLocal -> 0 | Thread (k11, k12, k13, k14), Thread (k21, k22, k23, k24) -> let v = compare k11 k21 in if v = 0 then let v = compare k12 k22 in if v = 0 then let v = compare k13 k23 in if v = 0 then compare k14 k24 else v else v else v | KApply0 _, _ -> -1 | _, KApply0 _ -> 1 | KApply1 _, _ -> -1 | _, KApply1 _ -> 1 | KApply2 _, _ -> -1 | _, KApply2 _ -> 1 | KApply3 _, _ -> -1 | _, KApply3 _ -> 1 | KApply4 _, _ -> -1 | _, KApply4 _ -> 1 | KApply5 _, _ -> -1 | _, KApply5 _ -> 1 | KApply6 _, _ -> -1 | _, KApply6 _ -> 1 | KApply7 _, _ -> -1 | _, KApply7 _ -> 1 | KApply8 _, _ -> -1 | _, KApply8 _ -> 1 | KToken(_, _), _ -> -1 | _, KToken(_, _) -> 1 | InjectedKLabel(_), _ -> -1 | _, InjectedKLabel(_) -> 1 | Map(_), _ -> -1 | _, Map(_) -> 1 | List(_), _ -> -1 | _, List(_) -> 1 | Set(_), _ -> -1 | _, Set(_) -> 1 | Array(_), _ -> -1 | _, Array(_) -> 1 | Int(_), _ -> -1 | _, Int(_) -> 1 | Float(_), _ -> -1 | _, Float(_) -> 1 | String(_), _ -> -1 | _, String(_) -> 1 | Bytes(_), _ -> -1 | _, Bytes(_) -> 1 | StringBuffer(_), _ -> -1 | _, StringBuffer(_) -> 1 | Bool(_), _ -> -1 | _, Bool(_) -> 1 | Bottom, _ -> -1 | _, Bottom -> 1 | ThreadLocal, _ -> -1 | _, ThreadLocal -> 1 and compare_klist c1 c2 = match (c1, c2) with | [], [] -> 0 | (hd1 :: tl1), (hd2 :: tl2) -> let v = compare hd1 hd2 in if v = 0 then compare_klist tl1 tl2 else v | (_ :: _), _ -> -1 | _ -> 1 end type normal_kitem = KApply of klabel * K.t list | KItem of K.kitem open K let normalize (k: kitem) : normal_kitem = match k with | KApply0(lbl) -> KApply (lbl, []) | KApply1(lbl,k0) -> KApply (lbl, [k0]) | KApply2(lbl,k0,k1) -> KApply (lbl, [k0; k1]) | KApply3(lbl,k0,k1,k2) -> KApply (lbl, [k0; k1; k2]) | KApply4(lbl,k0,k1,k2,k3) -> KApply (lbl, [k0; k1; k2; k3]) | KApply5(lbl,k0,k1,k2,k3,k4) -> KApply (lbl, [k0; k1; k2; k3; k4]) | KApply6(lbl,k0,k1,k2,k3,k4,k5) -> KApply (lbl, [k0; k1; k2; k3; k4; k5]) | KApply7(lbl,k0,k1,k2,k3,k4,k5,k6) -> KApply (lbl, [k0; k1; k2; k3; k4; k5; k6]) | KApply8(lbl,k0,k1,k2,k3,k4,k5,k6,k7) -> KApply (lbl, [k0; k1; k2; k3; k4; k5; k6; k7]) | v -> KItem v let denormalize (k: normal_kitem) : kitem = match k with | KApply (lbl, []) -> KApply0(lbl) | KApply (lbl, [k0]) -> KApply1(lbl,k0) | KApply (lbl, [k0; k1]) -> KApply2(lbl,k0,k1) | KApply (lbl, [k0; k1; k2]) -> KApply3(lbl,k0,k1,k2) | KApply (lbl, [k0; k1; k2; k3]) -> KApply4(lbl,k0,k1,k2,k3) | KApply (lbl, [k0; k1; k2; k3; k4]) -> KApply5(lbl,k0,k1,k2,k3,k4) | KApply (lbl, [k0; k1; k2; k3; k4; k5]) -> KApply6(lbl,k0,k1,k2,k3,k4,k5) | KApply (lbl, [k0; k1; k2; k3; k4; k5; k6]) -> KApply7(lbl,k0,k1,k2,k3,k4,k5,k6) | KApply (lbl, [k0; k1; k2; k3; k4; k5; k6; k7]) -> KApply8(lbl,k0,k1,k2,k3,k4,k5,k6,k7) | KItem v -> v | KApply (_, _) -> invalid_arg "denormalize" type k = K.t let denormalize0 (c: unit) : k list = match c with () -> [] let normalize0 (c: k list) = match c with [] -> () | _ -> invalid_arg "normalize0" let denormalize1 (c: k) : k list = match c with (k0) -> [k0] let normalize1 (c: k list) = match c with [k0] -> (k0) | _ -> invalid_arg "normalize1" let denormalize2 (c: k * k) : k list = match c with (k0,k1) -> [k0; k1] let normalize2 (c: k list) = match c with [k0; k1] -> (k0,k1) | _ -> invalid_arg "normalize2" let denormalize3 (c: k * k * k) : k list = match c with (k0,k1,k2) -> [k0; k1; k2] let normalize3 (c: k list) = match c with [k0; k1; k2] -> (k0,k1,k2) | _ -> invalid_arg "normalize3" let denormalize4 (c: k * k * k * k) : k list = match c with (k0,k1,k2,k3) -> [k0; k1; k2; k3] let normalize4 (c: k list) = match c with [k0; k1; k2; k3] -> (k0,k1,k2,k3) | _ -> invalid_arg "normalize4" let denormalize5 (c: k * k * k * k * k) : k list = match c with (k0,k1,k2,k3,k4) -> [k0; k1; k2; k3; k4] let normalize5 (c: k list) = match c with [k0; k1; k2; k3; k4] -> (k0,k1,k2,k3,k4) | _ -> invalid_arg "normalize5" let denormalize6 (c: k * k * k * k * k * k) : k list = match c with (k0,k1,k2,k3,k4,k5) -> [k0; k1; k2; k3; k4; k5] let normalize6 (c: k list) = match c with [k0; k1; k2; k3; k4; k5] -> (k0,k1,k2,k3,k4,k5) | _ -> invalid_arg "normalize6" let denormalize7 (c: k * k * k * k * k * k * k) : k list = match c with (k0,k1,k2,k3,k4,k5,k6) -> [k0; k1; k2; k3; k4; k5; k6] let normalize7 (c: k list) = match c with [k0; k1; k2; k3; k4; k5; k6] -> (k0,k1,k2,k3,k4,k5,k6) | _ -> invalid_arg "normalize7" let denormalize8 (c: k * k * k * k * k * k * k * k) : k list = match c with (k0,k1,k2,k3,k4,k5,k6,k7) -> [k0; k1; k2; k3; k4; k5; k6; k7] let normalize8 (c: k list) = match c with [k0; k1; k2; k3; k4; k5; k6; k7] -> (k0,k1,k2,k3,k4,k5,k6,k7) | _ -> invalid_arg "normalize8" let int0 = lazy (Int (Z.of_string "0")) let int1 = lazy (Int (Z.of_string "1")) let int2 = lazy (Int (Z.of_string "2")) let int'Hyph'1 = lazy (Int (Z.of_string "-1")) let const'Hash'EHOSTDOWN_K'Hyph'IO = KApply0(Lbl'Hash'EHOSTDOWN_K'Hyph'IO) let const'Stop'ThreadCellBag = KApply0(Lbl'Stop'ThreadCellBag) let constnoKCell = KApply0(LblnoKCell) let constinitSchanCell = KApply0(LblinitSchanCell) let const'Hash'EINTR_K'Hyph'IO = KApply0(Lbl'Hash'EINTR_K'Hyph'IO) let constnoLidCell = KApply0(LblnoLidCell) let constnoWhatCell = KApply0(LblnoWhatCell) let constinitWhatCell = KApply0(LblinitWhatCell) let const'Hash'EOVERFLOW_K'Hyph'IO = KApply0(Lbl'Hash'EOVERFLOW_K'Hyph'IO) let constnoStateCell = KApply0(LblnoStateCell) let const'Hash'ENETDOWN_K'Hyph'IO = KApply0(Lbl'Hash'ENETDOWN_K'Hyph'IO) let constnoVarsCell = KApply0(LblnoVarsCell) let constnoRnumCell = KApply0(LblnoRnumCell) let constnoMsgCell = KApply0(LblnoMsgCell) let const'Hash'EIO_K'Hyph'IO = KApply0(Lbl'Hash'EIO_K'Hyph'IO) let const'Hash'Bottom = KApply0(Lbl'Hash'Bottom) let const'Stop'ListenCellBag = KApply0(Lbl'Stop'ListenCellBag) let const'Hash'EISCONN_K'Hyph'IO = KApply0(Lbl'Hash'EISCONN_K'Hyph'IO) let const'Hash'EDOM_K'Hyph'IO = KApply0(Lbl'Hash'EDOM_K'Hyph'IO) let const'Stop'Map = KApply0(Lbl'Stop'Map) let const'Hash'EPROTOTYPE_K'Hyph'IO = KApply0(Lbl'Hash'EPROTOTYPE_K'Hyph'IO) let const'Hash'freezernot__GRHO'Hyph'SYNTAX0_ = KApply0(Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_) let constNil_GRHO'Hyph'SYNTAX = KApply0(LblNil_GRHO'Hyph'SYNTAX) let const'Hash'EADDRINUSE_K'Hyph'IO = KApply0(Lbl'Hash'EADDRINUSE_K'Hyph'IO) let const'Hash'EACCES_K'Hyph'IO = KApply0(Lbl'Hash'EACCES_K'Hyph'IO) let constinitWhereCell = KApply0(LblinitWhereCell) let constnoMlidCell = KApply0(LblnoMlidCell) let constnewUUID_STRING = KApply0(LblnewUUID_STRING) let const'Hash'ENOSPC_K'Hyph'IO = KApply0(Lbl'Hash'ENOSPC_K'Hyph'IO) let constnoTuplespaceCell = KApply0(LblnoTuplespaceCell) let constinitLidCell = KApply0(LblinitLidCell) let const'Hash'EMLINK_K'Hyph'IO = KApply0(Lbl'Hash'EMLINK_K'Hyph'IO) let const'Hash'EINVAL_K'Hyph'IO = KApply0(Lbl'Hash'EINVAL_K'Hyph'IO) let constinitNomoCell = KApply0(LblinitNomoCell) let constinitSendCell = KApply0(LblinitSendCell) let constnoTupleCell = KApply0(LblnoTupleCell) let const'Hash'ENOPROTOOPT_K'Hyph'IO = KApply0(Lbl'Hash'ENOPROTOOPT_K'Hyph'IO) let const'Hash'EPERM_K'Hyph'IO = KApply0(Lbl'Hash'EPERM_K'Hyph'IO) let const'Hash'EWOULDBLOCK_K'Hyph'IO = KApply0(Lbl'Hash'EWOULDBLOCK_K'Hyph'IO) let constnoSchanCell = KApply0(LblnoSchanCell) let const'Stop'Set = KApply0(Lbl'Stop'Set) let const'Hash'ENETUNREACH_K'Hyph'IO = KApply0(Lbl'Hash'ENETUNREACH_K'Hyph'IO) let const'Stop'List = KApply0(Lbl'Stop'List) let const'Hash'ENAMETOOLONG_K'Hyph'IO = KApply0(Lbl'Hash'ENAMETOOLONG_K'Hyph'IO) let const'Hash'ECONNRESET_K'Hyph'IO = KApply0(Lbl'Hash'ECONNRESET_K'Hyph'IO) let const'Hash'ENXIO_K'Hyph'IO = KApply0(Lbl'Hash'ENXIO_K'Hyph'IO) let constnoReceivesCell = KApply0(LblnoReceivesCell) let const'Hash'EALREADY_K'Hyph'IO = KApply0(Lbl'Hash'EALREADY_K'Hyph'IO) let constinitRidCell = KApply0(LblinitRidCell) let const'Hash'ENOTCONN_K'Hyph'IO = KApply0(Lbl'Hash'ENOTCONN_K'Hyph'IO) let constnoNumCell = KApply0(LblnoNumCell) let const'Hash'ETOOMANYREFS_K'Hyph'IO = KApply0(Lbl'Hash'ETOOMANYREFS_K'Hyph'IO) let constnoForgCell = KApply0(LblnoForgCell) let constinitIdCell = KApply0(LblinitIdCell) let const'Hash'configuration_K'Hyph'REFLECTION = KApply0(Lbl'Hash'configuration_K'Hyph'REFLECTION) let const'Hash'EMSGSIZE_K'Hyph'IO = KApply0(Lbl'Hash'EMSGSIZE_K'Hyph'IO) let constnoReactionCell = KApply0(LblnoReactionCell) let const'Stop'SendCellBag = KApply0(Lbl'Stop'SendCellBag) let constinitListensCell = KApply0(LblinitListensCell) let const'Hash'EDEADLK_K'Hyph'IO = KApply0(Lbl'Hash'EDEADLK_K'Hyph'IO) let const'Hash'ENOTSOCK_K'Hyph'IO = KApply0(Lbl'Hash'ENOTSOCK_K'Hyph'IO) let constinitStateCell = KApply0(LblinitStateCell) let const'Hash'EAGAIN_K'Hyph'IO = KApply0(Lbl'Hash'EAGAIN_K'Hyph'IO) let constinitReactionCell = KApply0(LblinitReactionCell) let const'Hash'ESHUTDOWN_K'Hyph'IO = KApply0(Lbl'Hash'ESHUTDOWN_K'Hyph'IO) let constnoMsidCell = KApply0(LblnoMsidCell) let constnoSidCell = KApply0(LblnoSidCell) let constinitRnumCell = KApply0(LblinitRnumCell) let constnoThreadsCell = KApply0(LblnoThreadsCell) let const'Hash'ERANGE_K'Hyph'IO = KApply0(Lbl'Hash'ERANGE_K'Hyph'IO) let const'Hash'E2BIG_K'Hyph'IO = KApply0(Lbl'Hash'E2BIG_K'Hyph'IO) let constnoIdCell = KApply0(LblnoIdCell) let const'Hash'ECONNREFUSED_K'Hyph'IO = KApply0(Lbl'Hash'ECONNREFUSED_K'Hyph'IO) let const'Hash'ENOSYS_K'Hyph'IO = KApply0(Lbl'Hash'ENOSYS_K'Hyph'IO) let constinitMsidCell = KApply0(LblinitMsidCell) let const'Hash'ENOTDIR_K'Hyph'IO = KApply0(Lbl'Hash'ENOTDIR_K'Hyph'IO) let const'Hash'ECONNABORTED_K'Hyph'IO = KApply0(Lbl'Hash'ECONNABORTED_K'Hyph'IO) let const'Hash'EBUSY_K'Hyph'IO = KApply0(Lbl'Hash'EBUSY_K'Hyph'IO) let const'Hash'EOPNOTSUPP_K'Hyph'IO = KApply0(Lbl'Hash'EOPNOTSUPP_K'Hyph'IO) let const'Hash'ESRCH_K'Hyph'IO = KApply0(Lbl'Hash'ESRCH_K'Hyph'IO) let const'Hash'ENOMEM_K'Hyph'IO = KApply0(Lbl'Hash'ENOMEM_K'Hyph'IO) let constinitRecCell = KApply0(LblinitRecCell) let constinitLengthCell = KApply0(LblinitLengthCell) let constinitReactCell = KApply0(LblinitReactCell) let constnoMatchCell = KApply0(LblnoMatchCell) let const'Hash'ESPIPE_K'Hyph'IO = KApply0(Lbl'Hash'ESPIPE_K'Hyph'IO) let constinitNumCell = KApply0(LblinitNumCell) let constnoWhereCell = KApply0(LblnoWhereCell) let const'Hash'ENOENT_K'Hyph'IO = KApply0(Lbl'Hash'ENOENT_K'Hyph'IO) let const'Hash'stdout_K'Hyph'IO = KApply0(Lbl'Hash'stdout_K'Hyph'IO) let constinitStypeCell = KApply0(LblinitStypeCell) let const'Hash'ENODEV_K'Hyph'IO = KApply0(Lbl'Hash'ENODEV_K'Hyph'IO) let const'Hash'EXDEV_K'Hyph'IO = KApply0(Lbl'Hash'EXDEV_K'Hyph'IO) let const'Hash'ENOLCK_K'Hyph'IO = KApply0(Lbl'Hash'ENOLCK_K'Hyph'IO) let const'Stop'RecCellBag = KApply0(Lbl'Stop'RecCellBag) let constinitListenCell = KApply0(LblinitListenCell) let const'Hash'argv = KApply0(Lbl'Hash'argv) let constnoSendsCell = KApply0(LblnoSendsCell) let constnoLengthCell = KApply0(LblnoLengthCell) let const'Hash'ENOTEMPTY_K'Hyph'IO = KApply0(Lbl'Hash'ENOTEMPTY_K'Hyph'IO) let const'Hash'EMFILE_K'Hyph'IO = KApply0(Lbl'Hash'EMFILE_K'Hyph'IO) let const'Hash'ELOOP_K'Hyph'IO = KApply0(Lbl'Hash'ELOOP_K'Hyph'IO) let const'Hash'stderr_K'Hyph'IO = KApply0(Lbl'Hash'stderr_K'Hyph'IO) let constnoTypeCell = KApply0(LblnoTypeCell) let constnoReactCell = KApply0(LblnoReactCell) let constinitForgCell = KApply0(LblinitForgCell) let const'Hash'EPIPE_K'Hyph'IO = KApply0(Lbl'Hash'EPIPE_K'Hyph'IO) let const'Hash'ENETRESET_K'Hyph'IO = KApply0(Lbl'Hash'ENETRESET_K'Hyph'IO) let constinitVarsCell = KApply0(LblinitVarsCell) let constnoRidCell = KApply0(LblnoRidCell) let constinitTuplespaceCell = KApply0(LblinitTuplespaceCell) let constinitMlidCell = KApply0(LblinitMlidCell) let constinitChanCell = KApply0(LblinitChanCell) let constnoListensCell = KApply0(LblnoListensCell) let constinitSendsCell = KApply0(LblinitSendsCell) let const'Hash'EPFNOSUPPORT_K'Hyph'IO = KApply0(Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO) let const'Hash'EEXIST_K'Hyph'IO = KApply0(Lbl'Hash'EEXIST_K'Hyph'IO) let const'Hash'stdin_K'Hyph'IO = KApply0(Lbl'Hash'stdin_K'Hyph'IO) let const'Hash'EADDRNOTAVAIL_K'Hyph'IO = KApply0(Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO) let const'Hash'EHOSTUNREACH_K'Hyph'IO = KApply0(Lbl'Hash'EHOSTUNREACH_K'Hyph'IO) let constnoChanCell = KApply0(LblnoChanCell) let constinitTupleCell = KApply0(LblinitTupleCell) let const'Hash'ECHILD_K'Hyph'IO = KApply0(Lbl'Hash'ECHILD_K'Hyph'IO) let const'Hash'EOF_K'Hyph'IO = KApply0(Lbl'Hash'EOF_K'Hyph'IO) let const'Hash'EDESTADDRREQ_K'Hyph'IO = KApply0(Lbl'Hash'EDESTADDRREQ_K'Hyph'IO) let const'Hash'EFBIG_K'Hyph'IO = KApply0(Lbl'Hash'EFBIG_K'Hyph'IO) let const'Hash'EBADF_K'Hyph'IO = KApply0(Lbl'Hash'EBADF_K'Hyph'IO) let const'Hash'ETIMEDOUT_K'Hyph'IO = KApply0(Lbl'Hash'ETIMEDOUT_K'Hyph'IO) let const'Hash'ESOCKTNOSUPPORT_K'Hyph'IO = KApply0(Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO) let constnoWhoCell = KApply0(LblnoWhoCell) let const'Hash'EISDIR_K'Hyph'IO = KApply0(Lbl'Hash'EISDIR_K'Hyph'IO) let constnoNomoCell = KApply0(LblnoNomoCell) let const'Hash'ENOTTY_K'Hyph'IO = KApply0(Lbl'Hash'ENOTTY_K'Hyph'IO) let const'Hash'EFAULT_K'Hyph'IO = KApply0(Lbl'Hash'EFAULT_K'Hyph'IO) let constinitMatchCell = KApply0(LblinitMatchCell) let const'Hash'EROFS_K'Hyph'IO = KApply0(Lbl'Hash'EROFS_K'Hyph'IO) let constinitTypeCell = KApply0(LblinitTypeCell) let const'Hash'noparse_K'Hyph'IO = KApply0(Lbl'Hash'noparse_K'Hyph'IO) let const'Hash'ENFILE_K'Hyph'IO = KApply0(Lbl'Hash'ENFILE_K'Hyph'IO) let const'Hash'ThreadLocal = KApply0(Lbl'Hash'ThreadLocal) let constinitMsgCell = KApply0(LblinitMsgCell) let constnoStypeCell = KApply0(LblnoStypeCell) let constinitReceivesCell = KApply0(LblinitReceivesCell) let constinitSidCell = KApply0(LblinitSidCell) let constinitWhoCell = KApply0(LblinitWhoCell) let constnoContCell = KApply0(LblnoContCell) let constinitContCell = KApply0(LblinitContCell) let const'Hash'EAFNOSUPPORT_K'Hyph'IO = KApply0(Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO) let const'Hash'ENOEXEC_K'Hyph'IO = KApply0(Lbl'Hash'ENOEXEC_K'Hyph'IO) let const'Hash'EPROTONOSUPPORT_K'Hyph'IO = KApply0(Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO) let const'Hash'ENOBUFS_K'Hyph'IO = KApply0(Lbl'Hash'ENOBUFS_K'Hyph'IO) let const'Hash'EINPROGRESS_K'Hyph'IO = KApply0(Lbl'Hash'EINPROGRESS_K'Hyph'IO) let val_for (c: klabel) (k : k) (v : k) : normal_kitem = match c with |_ -> KApply((el_for c), [k;v])
null
https://raw.githubusercontent.com/Isaac-DeFrain/KFramework/54e6b833f104222ef24ea75551dc97c0ae4f588c/Languages/GroundedRho/Join/join-kompiled/constants.ml
ocaml
type sort = |SortRbinds |SortAExp |SortListensCellOpt |SortIdCellOpt |SortK |SortPbindList |SortRidCell |SortListenCell |SortUnconsumableSend |SortThreadCell |SortMlidCell |SortVarsCellOpt |SortNomoCell |SortThreadCellBag |SortThreadCellFragment |SortKItem |SortBindList |SortNames |SortUri |SortIdCell |SortRhoMap |SortLidCell |SortReceivesCellFragment |SortStateCellOpt |SortSendsCellFragment |SortTCellFragment |SortChanLen |SortContCell |SortSchanCell |SortSet |SortChanCellOpt |SortCell |SortProcs |SortWhereCellOpt |SortTupleCellOpt |SortBool |SortKResult |SortReactCell |SortRhoTuple |SortSend |SortLengthCell |SortKCell |SortMsidCellOpt |SortLbind |SortBundle |SortRnumCell |SortRhoKVPairs |SortSidCell |SortTuplespaceCellFragment |SortLidCellOpt |SortName |SortBindOcc |SortReactionCellFragment |SortRhoSet |SortSendCellBag |SortPar |SortInt |SortNumCellOpt |SortRnumCellOpt |SortCollection |SortRbind |SortWhatCellOpt |SortRecCellFragment |SortStypeCell |SortMsidCell |SortReceivesCell |SortEval |SortKCellOpt |SortStypeCellOpt |SortListenCellBag |SortSendCell |SortReactCellOpt |SortRhoList |SortReactionCell |SortMatchCellOpt |SortChanCell |SortLbindList |SortNameVar |SortLengthCellOpt |SortListensCell |SortBExp |SortConsumableSend |SortRidCellOpt |SortMap |SortRecCellBag |SortRecCell |SortContCellOpt |SortThreadsCellOpt |SortTuplespaceCell |SortNew |SortStream |SortThreadsCellFragment |SortListensCellFragment |SortWhoCell |SortReceivesCellOpt |SortProc |SortNameList |SortGround |SortString |SortWhoCellOpt |SortFloat |SortChanList |SortSendsCell |SortReactionCellOpt |SortPbind |SortSingleRec |SortThreadsCell |SortMultiRec |SortTypeCell |SortVarsCell |SortTypeCellOpt |SortSendCellFragment |SortSchanCellOpt |SortTuplespaceCellOpt |SortLbinds |SortNumCell |SortWhereCell |SortForgCellOpt |SortKVariable |SortBytes |SortWhatCell |SortSendsCellOpt |SortNomoCellOpt |SortTupleCell |SortIOError |SortStringBuffer |SortRbindList |SortTCell |SortBind |SortMlidCellOpt |SortMsgCellOpt |SortMsgCell |SortIdNum |SortKConfigVar |SortSidCellOpt |SortPbinds |SortJoinList |SortBinds |SortForgCell |SortProcList |SortId |SortList |SortStateCell |SortReceive |SortListenCellFragment |SortMatchCell type klabel = |Lbl'Hash'argv |LblisPbindList |LblisThreadCell |LblMap'Coln'lookup |LblisChanList |Lblbundle0'LBra'_'RBra'_GRHO'Hyph'SYNTAX |Lbl'Hash'seek'LPar'_'Comm'_'RPar'_K'Hyph'IO |LblisWhoCell |LblisIdNum |LblsignExtendBitRangeInt |Lbl_'EqlsEqls'Bool__BOOL |LblisSet |LblisThreadCellBag |Lbl'Hash'bindocce'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblisNew |Lbl'LBraLSqB'_'SCln'_'RSqBRBra'_GRHO'Hyph'SYNTAX |LblisStypeCell |Lbl_'_LT_Eqls'Set__SET |Lbl_'Hash'in'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblisIOError |Lbl'Hash'parse |Lbl'Hash'EALREADY_K'Hyph'IO |LblisRhoList |LblmakeList |Lbl'Hash'ESPIPE_K'Hyph'IO |Lbl'Hash'unlock'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'Hash'ENOENT_K'Hyph'IO |LblisProcs |Lbl_'_LT_Bang'__GRHO'Hyph'SYNTAX |LblisTypeCell |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX1_ |LblisLbinds |LblisLengthCell |LblnoStateCell |LblisLbind |Lbl'Hash'ENOTTY_K'Hyph'IO |LblisForgCell |Lbl'_LT_'forg'_GT_' |LblinitChanCell |LblisProcList |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX0_ |LblinitRnumCell |LblisRidCellOpt |LblisReceivesCellFragment |Lbl'Hash'ENOTEMPTY_K'Hyph'IO |LblisSidCellOpt |Lbl'Hash'EMSGSIZE_K'Hyph'IO |LblisKConfigVar |LblisRhoMap |Lbl'Hash'ENETRESET_K'Hyph'IO |Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO |LblnoTupleCell |LblnoSendsCell |Lbl'_LT_'thread'_GT_Hyph'fragment |LblisCell |LblisPbind |Lbl'Hash'ENOMEM_K'Hyph'IO |Lblvalues |Lblbundle'PlusLBra'_'RBra'_GRHO'Hyph'SYNTAX |LblisThreadCellFragment |LblisStateCellOpt |LblinitLidCell |LblisNameList |LblisListensCellOpt |LblisTuplespaceCellOpt |Lbl'Hash'ENXIO_K'Hyph'IO |Lbl_'_LT_'Int__INT |LblnoTypeCell |LblisSendCell |Lbl'Hash'configuration_K'Hyph'REFLECTION |LblisSendsCell |LblisFloat |Lbl'_LT_'msg'_GT_' |LblisContCell |LblchrChar |Lbl_divInt__INT |Lbl'Hash'EROFS_K'Hyph'IO |LblisWhereCellOpt |Lbl_ThreadCellBag_ |LblisProc |LblisListensCell |Lbl_'Plus'Int_ |LblisReactionCell |Lbl_orBool__BOOL |Lbl'_LT_'sid'_GT_' |Lbl'Hash'ENFILE_K'Hyph'IO |LblupdateMap |LblisReactionCellOpt |Lbl_'SCln'__GRHO'Hyph'SYNTAX |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX0_ |LblisNomoCell |LblnoWhereCell |LblisJoinList |LblInt2String |Lbl'Hash'stype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'EqlsSlshEqls'K_ |LblisNumCell |LblisRecCell |Lbl_List_ |LblisMultiRec |LbltoString'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl'Hash'open'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'Hash'EOPNOTSUPP_K'Hyph'IO |Lbl_'PipeHyph_GT_'_ |LblisMatchCellOpt |Lbl_'Hyph'Map__MAP |Lbl'Hash'length'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblisRhoSet |Lbl'_LT_'chan'_GT_' |LblnoForgCell |LblisReceivesCellOpt |Lbl'Hash'EMLINK_K'Hyph'IO |LblisListenCellBag |Lbl'Hash'sort |Lbl_'EqlsEqls'K_ |LblisPar |Lblunforgeable'LPar'_'RPar'_GRHO'Hyph'SYNTAX |LblreplaceFirst'LPar'_'Comm'_'Comm'_'RPar'_STRING |LblnoListensCell |LblnoStypeCell |Lbl'Hash'EOVERFLOW_K'Hyph'IO |Lbl'Hash'putc'LPar'_'Comm'_'RPar'_K'Hyph'IO |LblisThreadsCellOpt |Lbl'Stop'Map |LblisVarsCell |Lbl_'EqlsSlshEqls'String__STRING |Lbl'Hash'EIO_K'Hyph'IO |LblinitMlidCell |Lbluri'LPar'_'RPar'_GRHO'Hyph'SYNTAX |LblisSendCellBag |LblisInt |Lbl'Hash'EFAULT_K'Hyph'IO |Lbl'Hash'fresh |Lbl_impliesBool__BOOL |Lbl'Hash'chanlist'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'Star'Int__INT |Lbl'_LT_'T'_GT_' |Lbl'Hash'Thread |LblmaxInt'LPar'_'Comm'_'RPar'_INT |LblinitReceivesCell |Lbl'Hash'EDEADLK_K'Hyph'IO |Lbl_'_LT_Eqls'String__STRING |LblListenCellBagItem |LblisNames |Lbl'Hash'ENOBUFS_K'Hyph'IO |Lbl_Map_ |Lbl_'Hyph'Int__INT |Lbl'Hash'EOF_K'Hyph'IO |Lbl_'BangBang'__GRHO'Hyph'SYNTAX |LblisReactionCellFragment |Lbl_and__GRHO'Hyph'SYNTAX |Lbl'Hash'lengths'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblFloat2String |Lbl'Hash'append'LPar'_'SCln'_'RPar'_AUXFUN'Hyph'SYNTAX |LblinitWhoCell |Lbl'_LT_'listen'_GT_' |LblnoReceivesCell |LblsizeList |Lbl'Hash'EWOULDBLOCK_K'Hyph'IO |LblString2Id |LblinitTuplespaceCell |Lbl'_LT_'thread'_GT_' |Lbl'_LT_'vars'_GT_' |Lbl_'EqlsSlshEqls'Bool__BOOL |Lbl'_LT_'length'_GT_' |LblisCollection |Lbl'Hash'EFBIG_K'Hyph'IO |LblisTCell |Lbl_'LSqB'_'Slsh'_'RSqB'_SUBSTITUTION |Lbl'Hash'EBADF_K'Hyph'IO |Lbl'Hash'msg'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblnoLengthCell |LblinitNomoCell |Lbl'Hash'EPIPE_K'Hyph'IO |Lbl'Hash'bvar'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblnoContCell |LblisRhoTuple |Lbl_'Xor_Perc'Int___INT |LblisMsgCellOpt |Lbl'_LT_'reaction'_GT_' |LblrfindString |LblisChanCellOpt |Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO |LblnoNomoCell |Lbl'Hash'EINTR_K'Hyph'IO |Lbl'Hash'stat'LPar'_'RPar'_K'Hyph'IO |LblupdateList |LblisLidCell |LblisMsgCell |Lbl'Stop'SendCellBag |LblinitContCell |LblnoReactCell |LblcategoryChar |LblSet'Coln'difference |LblisName |Lbl'Hash'EHOSTUNREACH_K'Hyph'IO |Lbl'Hash'ECONNRESET_K'Hyph'IO |LblisBundle |LblisKCellOpt |LblisForgCellOpt |Lbl'Hash'ECHILD_K'Hyph'IO |LblisRecCellFragment |LblisUnconsumableSend |LblisLbindList |LblString2Float |LblMap'Coln'lookupOrDefault |Lbl'Hash'if_'Hash'then_'Hash'else_'Hash'fi_K'Hyph'EQUAL |Lbl'_LT_'tuplespace'_GT_' |Lbl'Hash'ENOTCONN_K'Hyph'IO |Lbl_'_LT_Hyph'__GRHO'Hyph'SYNTAX |Lbl'_LT_'what'_GT_' |Lbl'Hash'stdout_K'Hyph'IO |Lbl_'And'Int__INT |Lbl'Hash'rtype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl'_LT_'tuple'_GT_' |Lbl'Hash'ENAMETOOLONG_K'Hyph'IO |Lbllog2Int |Lbl_'EqlsSlshEqls'Int__INT |Lbl'Hash'stdin_K'Hyph'IO |Lbl'Hash'chanlen'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'_GT_Eqls'String__STRING |LblnoSchanCell |Lbl'_LT_'react'_GT_' |LblisBindOcc |LblSet'LPar'_'RPar'_GRHO'Hyph'SYNTAX |LblsizeMap |LblisWhereCell |LblnoMsgCell |LblisId |LblsubstrString |LblnoTuplespaceCell |Lbl_'Comm'__GRHO'Hyph'SYNTAX |Lbl_'Bang'__GRHO'Hyph'SYNTAX |LblisTypeCellOpt |Lblsize |Lbl'Hash'bindocc'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblnoMatchCell |LblisBind |Lbl'Hash'ENETUNREACH_K'Hyph'IO |Lbl'Hash'EPROTOTYPE_K'Hyph'IO |Lbl'Star'__GRHO'Hyph'SYNTAX |Lbl'_LT_'who'_GT_' |Lbl_'Coln'__GRHO'Hyph'SYNTAX |LblnoThreadsCell |Lbl'Hash'systemResult'LPar'_'Comm'_'Comm'_'RPar'_K'Hyph'IO |Lbl'_LT_'listens'_GT_' |LblsrandInt |Lbl'Hash'EINVAL_K'Hyph'IO |Lbl'_LT_'rid'_GT_' |LblisKItem |Lbl'Hash'ENODEV_K'Hyph'IO |Lbl'Hash'length__AUXFUN'Hyph'SYNTAX |LblisRecCellBag |LblList'Coln'set |LblisUri |LblString2Base |Lbl'Hash'noparse_K'Hyph'IO |Lblkeys |LblinitRecCell |Lbl'Hash'ESHUTDOWN_K'Hyph'IO |LblisGround |Lbl'Stop'ThreadCellBag |LblThreadCellBagItem |Lbl'Hash'cont'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl'Hash'ENOTDIR_K'Hyph'IO |Lblnew_in'LBra'_'RBra'_GRHO'Hyph'SYNTAX |Lbl'Hash'chan'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'Hash'in__AUXFUN'Hyph'SYNTAX |LblinitSendCell |Lbl'LBra'_'RBra'_GRHO'Hyph'SYNTAX |Lbl'_LT_'nomo'_GT_' |Lbl'Hash'bnum'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl_'_LT_Eqls'Int__INT |LblnotBool_ |LblnoNumCell |Lbl'Hash'stderr_K'Hyph'IO |LblnoKCell |Lbl'Hash'EBUSY_K'Hyph'IO |Lbl'Hash'getenv |LblisTuplespaceCell |LblisBinds |LblnoReactionCell |LblintersectSet |Lbl_in_keys'LPar'_'RPar'_MAP |LblinitMsgCell |Lbl'Hash'bind'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblfindChar |Lbl'LSqB'_'RSqB'_GRHO'Hyph'SYNTAX |LblSet'Coln'in |LblisK |LblisWhoCellOpt |Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_ |LblisReceivesCell |LblString2Int |Lbl'_LT_'where'_GT_' |LblinitWhereCell |LblinitThreadCell |LblisSingleRec |LblisThreadsCell |LblisTupleCellOpt |LblisEval |LblisWhatCell |Lbl'Hash'ENETDOWN_K'Hyph'IO |LblisListenCellFragment |Lbl_'LSqB'_'_LT_Hyph'undef'RSqB' |Lbl'Hash'Bottom |Lbl_'EqlsEqls'Int_ |Lbl_andThenBool__BOOL |LblisPbinds |Lbl'Hash'parseInModule |LblNil_GRHO'Hyph'SYNTAX |LblisAExp |Lbl'Hash'system |Lbl'_LT_'mlid'_GT_' |LblinitRidCell |LblisString |Lbl_'Perc'Int__INT |Lbl_'_GT__GT_'Int__INT |Lbl_'Coln'_'Comm'__GRHO'Hyph'SYNTAX |LblnoWhoCell |LblisList |Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO |LblisTuplespaceCellFragment |LblreplaceAll'LPar'_'Comm'_'Comm'_'RPar'_STRING |LblisBindList |LblnoChanCell |Lbl'Hash'EDESTADDRREQ_K'Hyph'IO |Lbl'Hash'EADDRINUSE_K'Hyph'IO |LblnoRnumCell |Lbl_'Xor_'Int__INT |LblfindString |Lbl'_LT_'k'_GT_' |Lbl'_LT_'reaction'_GT_Hyph'fragment |LblabsInt |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX1_ |Lbl'Hash'EHOSTDOWN_K'Hyph'IO |Lbl_'_GT_'String__STRING |LblisSendsCellFragment |LblinitLengthCell |Lbl_'EqlsEqls'String__STRING |LblisRnumCellOpt |LblisSend |LblisKResult |LblinitStypeCell |LblList'Coln'get |Lbl'Hash'lstat'LPar'_'RPar'_K'Hyph'IO |LblSendCellBagItem |Lbltuple |Lbl'_LT_'id'_GT_' |LblSetItem |Lbl'_LT_'receives'_GT_' |LblisRhoKVPairs |LblunsignedBytes |LblisMsidCellOpt |Lbl'Stop'List |Lbl'Hash'ENOLCK_K'Hyph'IO |LblisSendsCellOpt |Lbl'Hash'ECONNABORTED_K'Hyph'IO |LblrandInt |Lbl'Hash'EXDEV_K'Hyph'IO |Lbl'Hash'close'LPar'_'RPar'_K'Hyph'IO |Lbl_'SCln'_'SClnSCln'__GRHO'Hyph'SYNTAX |Lblkeys_list'LPar'_'RPar'_MAP |LblfreshId |LblinitTypeCell |Lbl_orElseBool__BOOL |LblisSchanCellOpt |Lbl'Hash'EISDIR_K'Hyph'IO |Lbl'_LT_'cont'_GT_' |LblList'Coln'range |LblinitTupleCell |LblnoIdCell |LblisKCell |Lbl'Hash'unknownIOError |Lbl'Hash'freezer_and__GRHO'Hyph'SYNTAX1_ |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX0_ |Lbl_'_GT_Eqls'Int__INT |LblisSendCellFragment |Lbl'Hash'ENOSYS_K'Hyph'IO |Lbl_'Pipe'__GRHO'Hyph'SYNTAX |Lbl'Hash'ECONNREFUSED_K'Hyph'IO |Lbl'_LT_'sends'_GT_Hyph'fragment |Lbl'Hash'lock'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO |LblcountAllOccurrences'LPar'_'Comm'_'RPar'_STRING |Lbl_'_GT_'Int__INT |LblfillList |Lbl'_AT_'__GRHO'Hyph'SYNTAX |LblinitForgCell |LblbitRangeInt |Lbl_'_LT_'String__STRING |Lbl'Hash'ThreadLocal |Lbl_xorBool__BOOL |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX0_ |LblinitReactCell |Lbl'Stop'RecCellBag |Lbl'_LT_'type'_GT_' |Lbl'_LT_'listens'_GT_Hyph'fragment |Lbl_'Plus'__GRHO'Hyph'SYNTAX |Lbl'_LT_'lid'_GT_' |Lbl_ListenCellBag_ |Lbl'Hash'open'LPar'_'RPar'_K'Hyph'IO |Lbl_'LSqB'_'RSqB'_SUBSTITUTION |LblnoMlidCell |Lbl_or__GRHO'Hyph'SYNTAX |Lbl'Hash'ETOOMANYREFS_K'Hyph'IO |Lbl'_LT_'threads'_GT_Hyph'fragment |LblinitListensCell |Lbl'Hash'ENOSPC_K'Hyph'IO |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX0_ |LblisChanCell |LblisRnumCell |Lbl'Hash'chanmany'LPar'_'Coln'_'RPar'_AUXFUN'Hyph'SYNTAX |Lbl'Hash'logToFile |Lbl'_LT_'rec'_GT_' |Lbl'Hash'read'LPar'_'Comm'_'RPar'_K'Hyph'IO |LblnoLidCell |LblisNameVar |Lbl'_LT_'schan'_GT_' |LblbigEndianBytes |Lbl'_LT_'match'_GT_' |LblId2String |LblinitListenCell |Lbl'_LT_'num'_GT_' |LblisContCellOpt |LblisLidCellOpt |LblnoSidCell |Lblbundle'LBra'_'RBra'_GRHO'Hyph'SYNTAX |LblMap'Coln'choice |Lbl_Set_ |Lbl'Hash'EEXIST_K'Hyph'IO |Lbl'Hash'getc'LPar'_'RPar'_K'Hyph'IO |LblisRidCell |Lbl'_LT_'state'_GT_' |LblisListenCell |LblisBool |Lbl'Tild'Int__INT |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX1_ |LblordChar |LblinitIdCell |Lbl_modInt__INT |LblrfindChar |LblisRbinds |LblisMlidCellOpt |Lbl'Hash'EAGAIN_K'Hyph'IO |Lbl'Stop'ListenCellBag |LblnoMsidCell |LblinitSchanCell |LbldirectionalityChar |LblisIdCell |Lbl'Hash'opendir'LPar'_'RPar'_K'Hyph'IO |LblinitKCell |LblRecCellBagItem |Lblfor'LPar'_'RParLBra'_'RBra'_GRHO'Hyph'SYNTAX |LblisBExp |Lbl'Stop'Set |LblisChanLen |LblisStateCell |Lbl'Hash'EACCES_K'Hyph'IO |Lbl'Hash'ELOOP_K'Hyph'IO |Lbl'Hash'EDOM_K'Hyph'IO |LblisSidCell |LblremoveAll |LblnoRidCell |Lbl'_LT_'threads'_GT_' |Lbl_andBool_ |Lbl_'Hash'ine'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |LblisThreadsCellFragment |LblisLengthCellOpt |LblisRbindList |Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO |LblisConsumableSend |LbllengthString |Lbl_'Hyph'__GRHO'Hyph'SYNTAX |Lbl'_LT_'listen'_GT_Hyph'fragment |LblisReceive |Lbl'Hash'ERANGE_K'Hyph'IO |LblinitTCell |LblsignedBytes |LblFloatFormat |LblisMsidCell |Lbl'Hash'ENOTSOCK_K'Hyph'IO |Lbl_'Plus'String__STRING |Lbl_RecCellBag_ |Lbl_'Pipe'Int__INT |Lbl'Hash'EISCONN_K'Hyph'IO |LblisKVariable |Lbl_dividesInt__INT |Lbl'_LT_'rec'_GT_Hyph'fragment |Lbl'_LT_'tuplespace'_GT_Hyph'fragment |Lbl'_LT_'T'_GT_Hyph'fragment |Lbl_'_LT_Eqls'__GRHO'Hyph'SYNTAX |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX1_ |LblisWhatCellOpt |LblSet'Coln'choice |LblisMatchCell |LblisListensCellFragment |Lbl'Hash'buffer |Lbl_'Star'__GRHO'Hyph'SYNTAX |LblinitNumCell |LblfreshInt |Lbl'Hash'write'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'Hash'ETIMEDOUT_K'Hyph'IO |LblinitSidCell |LblisIdCellOpt |Lbl'LPar'_'SCln'_'RPar'_GRHO'Hyph'SYNTAX |Lbl'_LT_'sends'_GT_' |LblisSchanCell |Lbl_xorInt__INT |Lbl'Hash'EINPROGRESS_K'Hyph'IO |LblinitVarsCell |LblinitStateCell |LblisNumCellOpt |LblinitMatchCell |LblisMlidCell |Lbl'Hash'ENOPROTOOPT_K'Hyph'IO |LbllittleEndianBytes |Lbl'Hash'EPERM_K'Hyph'IO |LblnoWhatCell |LblinitWhatCell |Lbl'_LT_'send'_GT_' |Lbl_'_LT__LT_'Int__INT |LblBase2String |LblListItem |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX1_ |LblisStream |Lbl_'_LT_Eqls'Map__MAP |LblnewUUID_STRING |LblnoVarsCell |LblinitThreadsCell |Lbl_SendCellBag_ |Lbl'Hash'ESRCH_K'Hyph'IO |Lbl'Hash'EMFILE_K'Hyph'IO |Lblproject'Coln'Proc |LblisReactCellOpt |Lbl'_LT_'receives'_GT_Hyph'fragment |Lbl'_LT_'stype'_GT_' |Lbl_inList_ |LblisVarsCellOpt |Lbl'Hash'ENOEXEC_K'Hyph'IO |LblminInt'LPar'_'Comm'_'RPar'_INT |LblinitReactionCell |LblisMap |LblisTupleCell |LblisReactCell |LblinitMsidCell |Lbl'_LT_'rnum'_GT_' |LblisNomoCellOpt |LblisStypeCellOpt |LblisTCellFragment |Lblreplace'LPar'_'Comm'_'Comm'_'Comm'_'RPar'_STRING |Lbl_'Slsh'Int__INT |Lbl_'LSqB'_'_LT_Hyph'_'RSqB'_MAP |LblisRbind |Lbl'Hash'tell'LPar'_'RPar'_K'Hyph'IO |Lbl'_LT_'msid'_GT_' |LblinitSendsCell |Lblbundle'HyphLBra'_'RBra'_GRHO'Hyph'SYNTAX |LblgetKLabel |Lblnot__GRHO'Hyph'SYNTAX |Lbl'Hash'E2BIG_K'Hyph'IO |Lbl'Hash'seekEnd'LPar'_'Comm'_'RPar'_K'Hyph'IO |Lbl'_LT_'send'_GT_Hyph'fragment let print_sort(c: sort) : string = match c with |SortRbinds -> "Rbinds" |SortAExp -> "AExp" |SortListensCellOpt -> "ListensCellOpt" |SortIdCellOpt -> "IdCellOpt" |SortK -> "K" |SortPbindList -> "PbindList" |SortRidCell -> "RidCell" |SortListenCell -> "ListenCell" |SortUnconsumableSend -> "UnconsumableSend" |SortThreadCell -> "ThreadCell" |SortMlidCell -> "MlidCell" |SortVarsCellOpt -> "VarsCellOpt" |SortNomoCell -> "NomoCell" |SortThreadCellBag -> "ThreadCellBag" |SortThreadCellFragment -> "ThreadCellFragment" |SortKItem -> "KItem" |SortBindList -> "BindList" |SortNames -> "Names" |SortUri -> "Uri" |SortIdCell -> "IdCell" |SortRhoMap -> "RhoMap" |SortLidCell -> "LidCell" |SortReceivesCellFragment -> "ReceivesCellFragment" |SortStateCellOpt -> "StateCellOpt" |SortSendsCellFragment -> "SendsCellFragment" |SortTCellFragment -> "TCellFragment" |SortChanLen -> "ChanLen" |SortContCell -> "ContCell" |SortSchanCell -> "SchanCell" |SortSet -> "Set" |SortChanCellOpt -> "ChanCellOpt" |SortCell -> "Cell" |SortProcs -> "Procs" |SortWhereCellOpt -> "WhereCellOpt" |SortTupleCellOpt -> "TupleCellOpt" |SortBool -> "Bool" |SortKResult -> "KResult" |SortReactCell -> "ReactCell" |SortRhoTuple -> "RhoTuple" |SortSend -> "Send" |SortLengthCell -> "LengthCell" |SortKCell -> "KCell" |SortMsidCellOpt -> "MsidCellOpt" |SortLbind -> "Lbind" |SortBundle -> "Bundle" |SortRnumCell -> "RnumCell" |SortRhoKVPairs -> "RhoKVPairs" |SortSidCell -> "SidCell" |SortTuplespaceCellFragment -> "TuplespaceCellFragment" |SortLidCellOpt -> "LidCellOpt" |SortName -> "Name" |SortBindOcc -> "BindOcc" |SortReactionCellFragment -> "ReactionCellFragment" |SortRhoSet -> "RhoSet" |SortSendCellBag -> "SendCellBag" |SortPar -> "Par" |SortInt -> "Int" |SortNumCellOpt -> "NumCellOpt" |SortRnumCellOpt -> "RnumCellOpt" |SortCollection -> "Collection" |SortRbind -> "Rbind" |SortWhatCellOpt -> "WhatCellOpt" |SortRecCellFragment -> "RecCellFragment" |SortStypeCell -> "StypeCell" |SortMsidCell -> "MsidCell" |SortReceivesCell -> "ReceivesCell" |SortEval -> "Eval" |SortKCellOpt -> "KCellOpt" |SortStypeCellOpt -> "StypeCellOpt" |SortListenCellBag -> "ListenCellBag" |SortSendCell -> "SendCell" |SortReactCellOpt -> "ReactCellOpt" |SortRhoList -> "RhoList" |SortReactionCell -> "ReactionCell" |SortMatchCellOpt -> "MatchCellOpt" |SortChanCell -> "ChanCell" |SortLbindList -> "LbindList" |SortNameVar -> "NameVar" |SortLengthCellOpt -> "LengthCellOpt" |SortListensCell -> "ListensCell" |SortBExp -> "BExp" |SortConsumableSend -> "ConsumableSend" |SortRidCellOpt -> "RidCellOpt" |SortMap -> "Map" |SortRecCellBag -> "RecCellBag" |SortRecCell -> "RecCell" |SortContCellOpt -> "ContCellOpt" |SortThreadsCellOpt -> "ThreadsCellOpt" |SortTuplespaceCell -> "TuplespaceCell" |SortNew -> "New" |SortStream -> "Stream" |SortThreadsCellFragment -> "ThreadsCellFragment" |SortListensCellFragment -> "ListensCellFragment" |SortWhoCell -> "WhoCell" |SortReceivesCellOpt -> "ReceivesCellOpt" |SortProc -> "Proc" |SortNameList -> "NameList" |SortGround -> "Ground" |SortString -> "String" |SortWhoCellOpt -> "WhoCellOpt" |SortFloat -> "Float" |SortChanList -> "ChanList" |SortSendsCell -> "SendsCell" |SortReactionCellOpt -> "ReactionCellOpt" |SortPbind -> "Pbind" |SortSingleRec -> "SingleRec" |SortThreadsCell -> "ThreadsCell" |SortMultiRec -> "MultiRec" |SortTypeCell -> "TypeCell" |SortVarsCell -> "VarsCell" |SortTypeCellOpt -> "TypeCellOpt" |SortSendCellFragment -> "SendCellFragment" |SortSchanCellOpt -> "SchanCellOpt" |SortTuplespaceCellOpt -> "TuplespaceCellOpt" |SortLbinds -> "Lbinds" |SortNumCell -> "NumCell" |SortWhereCell -> "WhereCell" |SortForgCellOpt -> "ForgCellOpt" |SortKVariable -> "KVariable" |SortBytes -> "Bytes" |SortWhatCell -> "WhatCell" |SortSendsCellOpt -> "SendsCellOpt" |SortNomoCellOpt -> "NomoCellOpt" |SortTupleCell -> "TupleCell" |SortIOError -> "IOError" |SortStringBuffer -> "StringBuffer" |SortRbindList -> "RbindList" |SortTCell -> "TCell" |SortBind -> "Bind" |SortMlidCellOpt -> "MlidCellOpt" |SortMsgCellOpt -> "MsgCellOpt" |SortMsgCell -> "MsgCell" |SortIdNum -> "IdNum" |SortKConfigVar -> "KConfigVar" |SortSidCellOpt -> "SidCellOpt" |SortPbinds -> "Pbinds" |SortJoinList -> "JoinList" |SortBinds -> "Binds" |SortForgCell -> "ForgCell" |SortProcList -> "ProcList" |SortId -> "Id" |SortList -> "List" |SortStateCell -> "StateCell" |SortReceive -> "Receive" |SortListenCellFragment -> "ListenCellFragment" |SortMatchCell -> "MatchCell" let print_klabel(c: klabel) : string = match c with |Lbl'Hash'argv -> "#argv" |LblisPbindList -> "isPbindList" |LblisThreadCell -> "isThreadCell" |LblMap'Coln'lookup -> "`Map:lookup`" |LblisChanList -> "isChanList" |Lblbundle0'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`bundle0{_}_GRHO-SYNTAX`" |Lbl'Hash'seek'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#seek(_,_)_K-IO`" |LblisWhoCell -> "isWhoCell" |LblisIdNum -> "isIdNum" |LblsignExtendBitRangeInt -> "signExtendBitRangeInt" |Lbl_'EqlsEqls'Bool__BOOL -> "`_==Bool__BOOL`" |LblisSet -> "isSet" |LblisThreadCellBag -> "isThreadCellBag" |Lbl'Hash'bindocce'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bindocce(_)_AUXFUN-SYNTAX`" |LblisNew -> "isNew" |Lbl'LBraLSqB'_'SCln'_'RSqBRBra'_GRHO'Hyph'SYNTAX -> "`{[_;_]}_GRHO-SYNTAX`" |LblisStypeCell -> "isStypeCell" |Lbl_'_LT_Eqls'Set__SET -> "`_<=Set__SET`" |Lbl_'Hash'in'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`_#in(_)_AUXFUN-SYNTAX`" |LblisIOError -> "isIOError" |Lbl'Hash'parse -> "#parse" |Lbl'Hash'EALREADY_K'Hyph'IO -> "`#EALREADY_K-IO`" |LblisRhoList -> "isRhoList" |LblmakeList -> "makeList" |Lbl'Hash'ESPIPE_K'Hyph'IO -> "`#ESPIPE_K-IO`" |Lbl'Hash'unlock'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#unlock(_,_)_K-IO`" |Lbl'Hash'ENOENT_K'Hyph'IO -> "`#ENOENT_K-IO`" |LblisProcs -> "isProcs" |Lbl_'_LT_Bang'__GRHO'Hyph'SYNTAX -> "`_<!__GRHO-SYNTAX`" |LblisTypeCell -> "isTypeCell" |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX1_ -> "`#freezer_<=__GRHO-SYNTAX1_`" |LblisLbinds -> "isLbinds" |LblisLengthCell -> "isLengthCell" |LblnoStateCell -> "noStateCell" |LblisLbind -> "isLbind" |Lbl'Hash'ENOTTY_K'Hyph'IO -> "`#ENOTTY_K-IO`" |LblisForgCell -> "isForgCell" |Lbl'_LT_'forg'_GT_' -> "`<forg>`" |LblinitChanCell -> "initChanCell" |LblisProcList -> "isProcList" |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX0_ -> "`#freezer_-__GRHO-SYNTAX0_`" |LblinitRnumCell -> "initRnumCell" |LblisRidCellOpt -> "isRidCellOpt" |LblisReceivesCellFragment -> "isReceivesCellFragment" |Lbl'Hash'ENOTEMPTY_K'Hyph'IO -> "`#ENOTEMPTY_K-IO`" |LblisSidCellOpt -> "isSidCellOpt" |Lbl'Hash'EMSGSIZE_K'Hyph'IO -> "`#EMSGSIZE_K-IO`" |LblisKConfigVar -> "isKConfigVar" |LblisRhoMap -> "isRhoMap" |Lbl'Hash'ENETRESET_K'Hyph'IO -> "`#ENETRESET_K-IO`" |Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO -> "`#EAFNOSUPPORT_K-IO`" |LblnoTupleCell -> "noTupleCell" |LblnoSendsCell -> "noSendsCell" |Lbl'_LT_'thread'_GT_Hyph'fragment -> "`<thread>-fragment`" |LblisCell -> "isCell" |LblisPbind -> "isPbind" |Lbl'Hash'ENOMEM_K'Hyph'IO -> "`#ENOMEM_K-IO`" |Lblvalues -> "values" |Lblbundle'PlusLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`bundle+{_}_GRHO-SYNTAX`" |LblisThreadCellFragment -> "isThreadCellFragment" |LblisStateCellOpt -> "isStateCellOpt" |LblinitLidCell -> "initLidCell" |LblisNameList -> "isNameList" |LblisListensCellOpt -> "isListensCellOpt" |LblisTuplespaceCellOpt -> "isTuplespaceCellOpt" |Lbl'Hash'ENXIO_K'Hyph'IO -> "`#ENXIO_K-IO`" |Lbl_'_LT_'Int__INT -> "`_<Int__INT`" |LblnoTypeCell -> "noTypeCell" |LblisSendCell -> "isSendCell" |Lbl'Hash'configuration_K'Hyph'REFLECTION -> "`#configuration_K-REFLECTION`" |LblisSendsCell -> "isSendsCell" |LblisFloat -> "isFloat" |Lbl'_LT_'msg'_GT_' -> "`<msg>`" |LblisContCell -> "isContCell" |LblchrChar -> "chrChar" |Lbl_divInt__INT -> "`_divInt__INT`" |Lbl'Hash'EROFS_K'Hyph'IO -> "`#EROFS_K-IO`" |LblisWhereCellOpt -> "isWhereCellOpt" |Lbl_ThreadCellBag_ -> "`_ThreadCellBag_`" |LblisProc -> "isProc" |LblisListensCell -> "isListensCell" |Lbl_'Plus'Int_ -> "`_+Int_`" |LblisReactionCell -> "isReactionCell" |Lbl_orBool__BOOL -> "`_orBool__BOOL`" |Lbl'_LT_'sid'_GT_' -> "`<sid>`" |Lbl'Hash'ENFILE_K'Hyph'IO -> "`#ENFILE_K-IO`" |LblupdateMap -> "updateMap" |LblisReactionCellOpt -> "isReactionCellOpt" |Lbl_'SCln'__GRHO'Hyph'SYNTAX -> "`_;__GRHO-SYNTAX`" |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX0_ -> "`#freezer_<=__GRHO-SYNTAX0_`" |LblisNomoCell -> "isNomoCell" |LblnoWhereCell -> "noWhereCell" |LblisJoinList -> "isJoinList" |LblInt2String -> "`Int2String`" |Lbl'Hash'stype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#stype(_)_AUXFUN-SYNTAX`" |Lbl_'EqlsSlshEqls'K_ -> "`_=/=K_`" |LblisNumCell -> "isNumCell" |LblisRecCell -> "isRecCell" |Lbl_List_ -> "`_List_`" |LblisMultiRec -> "isMultiRec" |LbltoString'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`toString(_)_AUXFUN-SYNTAX`" |Lbl'Hash'open'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#open(_,_)_K-IO`" |Lbl'Hash'EOPNOTSUPP_K'Hyph'IO -> "`#EOPNOTSUPP_K-IO`" |Lbl_'PipeHyph_GT_'_ -> "`_|->_`" |LblisMatchCellOpt -> "isMatchCellOpt" |Lbl_'Hyph'Map__MAP -> "`_-Map__MAP`" |Lbl'Hash'length'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#length(_)_AUXFUN-SYNTAX`" |LblisRhoSet -> "isRhoSet" |Lbl'_LT_'chan'_GT_' -> "`<chan>`" |LblnoForgCell -> "noForgCell" |LblisReceivesCellOpt -> "isReceivesCellOpt" |Lbl'Hash'EMLINK_K'Hyph'IO -> "`#EMLINK_K-IO`" |LblisListenCellBag -> "isListenCellBag" |Lbl'Hash'sort -> "#sort" |Lbl_'EqlsEqls'K_ -> "`_==K_`" |LblisPar -> "isPar" |Lblunforgeable'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "`unforgeable(_)_GRHO-SYNTAX`" |LblreplaceFirst'LPar'_'Comm'_'Comm'_'RPar'_STRING -> "`replaceFirst(_,_,_)_STRING`" |LblnoListensCell -> "noListensCell" |LblnoStypeCell -> "noStypeCell" |Lbl'Hash'EOVERFLOW_K'Hyph'IO -> "`#EOVERFLOW_K-IO`" |Lbl'Hash'putc'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#putc(_,_)_K-IO`" |LblisThreadsCellOpt -> "isThreadsCellOpt" |Lbl'Stop'Map -> "`.Map`" |LblisVarsCell -> "isVarsCell" |Lbl_'EqlsSlshEqls'String__STRING -> "`_=/=String__STRING`" |Lbl'Hash'EIO_K'Hyph'IO -> "`#EIO_K-IO`" |LblinitMlidCell -> "initMlidCell" |Lbluri'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "`uri(_)_GRHO-SYNTAX`" |LblisSendCellBag -> "isSendCellBag" |LblisInt -> "isInt" |Lbl'Hash'EFAULT_K'Hyph'IO -> "`#EFAULT_K-IO`" |Lbl'Hash'fresh -> "#fresh" |Lbl_impliesBool__BOOL -> "`_impliesBool__BOOL`" |Lbl'Hash'chanlist'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#chanlist(_)_AUXFUN-SYNTAX`" |Lbl_'Star'Int__INT -> "`_*Int__INT`" |Lbl'_LT_'T'_GT_' -> "`<T>`" |Lbl'Hash'Thread -> "#Thread" |LblmaxInt'LPar'_'Comm'_'RPar'_INT -> "`maxInt(_,_)_INT`" |LblinitReceivesCell -> "initReceivesCell" |Lbl'Hash'EDEADLK_K'Hyph'IO -> "`#EDEADLK_K-IO`" |Lbl_'_LT_Eqls'String__STRING -> "`_<=String__STRING`" |LblListenCellBagItem -> "`ListenCellBagItem`" |LblisNames -> "isNames" |Lbl'Hash'ENOBUFS_K'Hyph'IO -> "`#ENOBUFS_K-IO`" |Lbl_Map_ -> "`_Map_`" |Lbl_'Hyph'Int__INT -> "`_-Int__INT`" |Lbl'Hash'EOF_K'Hyph'IO -> "`#EOF_K-IO`" |Lbl_'BangBang'__GRHO'Hyph'SYNTAX -> "`_!!__GRHO-SYNTAX`" |LblisReactionCellFragment -> "isReactionCellFragment" |Lbl_and__GRHO'Hyph'SYNTAX -> "`_and__GRHO-SYNTAX`" |Lbl'Hash'lengths'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#lengths(_)_AUXFUN-SYNTAX`" |LblFloat2String -> "`Float2String`" |Lbl'Hash'append'LPar'_'SCln'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#append(_;_)_AUXFUN-SYNTAX`" |LblinitWhoCell -> "initWhoCell" |Lbl'_LT_'listen'_GT_' -> "`<listen>`" |LblnoReceivesCell -> "noReceivesCell" |LblsizeList -> "sizeList" |Lbl'Hash'EWOULDBLOCK_K'Hyph'IO -> "`#EWOULDBLOCK_K-IO`" |LblString2Id -> "`String2Id`" |LblinitTuplespaceCell -> "initTuplespaceCell" |Lbl'_LT_'thread'_GT_' -> "`<thread>`" |Lbl'_LT_'vars'_GT_' -> "`<vars>`" |Lbl_'EqlsSlshEqls'Bool__BOOL -> "`_=/=Bool__BOOL`" |Lbl'_LT_'length'_GT_' -> "`<length>`" |LblisCollection -> "isCollection" |Lbl'Hash'EFBIG_K'Hyph'IO -> "`#EFBIG_K-IO`" |LblisTCell -> "isTCell" |Lbl_'LSqB'_'Slsh'_'RSqB'_SUBSTITUTION -> "`_[_/_]_SUBSTITUTION`" |Lbl'Hash'EBADF_K'Hyph'IO -> "`#EBADF_K-IO`" |Lbl'Hash'msg'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#msg(_)_AUXFUN-SYNTAX`" |LblnoLengthCell -> "noLengthCell" |LblinitNomoCell -> "initNomoCell" |Lbl'Hash'EPIPE_K'Hyph'IO -> "`#EPIPE_K-IO`" |Lbl'Hash'bvar'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bvar(_)_AUXFUN-SYNTAX`" |LblnoContCell -> "noContCell" |LblisRhoTuple -> "isRhoTuple" |Lbl_'Xor_Perc'Int___INT -> "`_^%Int___INT`" |LblisMsgCellOpt -> "isMsgCellOpt" |Lbl'_LT_'reaction'_GT_' -> "`<reaction>`" |LblrfindString -> "rfindString" |LblisChanCellOpt -> "isChanCellOpt" |Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO -> "`#ESOCKTNOSUPPORT_K-IO`" |LblnoNomoCell -> "noNomoCell" |Lbl'Hash'EINTR_K'Hyph'IO -> "`#EINTR_K-IO`" |Lbl'Hash'stat'LPar'_'RPar'_K'Hyph'IO -> "`#stat(_)_K-IO`" |LblupdateList -> "updateList" |LblisLidCell -> "isLidCell" |LblisMsgCell -> "isMsgCell" |Lbl'Stop'SendCellBag -> "`.SendCellBag`" |LblinitContCell -> "initContCell" |LblnoReactCell -> "noReactCell" |LblcategoryChar -> "categoryChar" |LblSet'Coln'difference -> "`Set:difference`" |LblisName -> "isName" |Lbl'Hash'EHOSTUNREACH_K'Hyph'IO -> "`#EHOSTUNREACH_K-IO`" |Lbl'Hash'ECONNRESET_K'Hyph'IO -> "`#ECONNRESET_K-IO`" |LblisBundle -> "isBundle" |LblisKCellOpt -> "isKCellOpt" |LblisForgCellOpt -> "isForgCellOpt" |Lbl'Hash'ECHILD_K'Hyph'IO -> "`#ECHILD_K-IO`" |LblisRecCellFragment -> "isRecCellFragment" |LblisUnconsumableSend -> "isUnconsumableSend" |LblisLbindList -> "isLbindList" |LblString2Float -> "`String2Float`" |LblMap'Coln'lookupOrDefault -> "`Map:lookupOrDefault`" |Lbl'Hash'if_'Hash'then_'Hash'else_'Hash'fi_K'Hyph'EQUAL -> "`#if_#then_#else_#fi_K-EQUAL`" |Lbl'_LT_'tuplespace'_GT_' -> "`<tuplespace>`" |Lbl'Hash'ENOTCONN_K'Hyph'IO -> "`#ENOTCONN_K-IO`" |Lbl_'_LT_Hyph'__GRHO'Hyph'SYNTAX -> "`_<-__GRHO-SYNTAX`" |Lbl'_LT_'what'_GT_' -> "`<what>`" |Lbl'Hash'stdout_K'Hyph'IO -> "`#stdout_K-IO`" |Lbl_'And'Int__INT -> "`_&Int__INT`" |Lbl'Hash'rtype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#rtype(_)_AUXFUN-SYNTAX`" |Lbl'_LT_'tuple'_GT_' -> "`<tuple>`" |Lbl'Hash'ENAMETOOLONG_K'Hyph'IO -> "`#ENAMETOOLONG_K-IO`" |Lbllog2Int -> "log2Int" |Lbl_'EqlsSlshEqls'Int__INT -> "`_=/=Int__INT`" |Lbl'Hash'stdin_K'Hyph'IO -> "`#stdin_K-IO`" |Lbl'Hash'chanlen'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#chanlen(_)_AUXFUN-SYNTAX`" |Lbl_'_GT_Eqls'String__STRING -> "`_>=String__STRING`" |LblnoSchanCell -> "noSchanCell" |Lbl'_LT_'react'_GT_' -> "`<react>`" |LblisBindOcc -> "isBindOcc" |LblSet'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "`Set(_)_GRHO-SYNTAX`" |LblsizeMap -> "sizeMap" |LblisWhereCell -> "isWhereCell" |LblnoMsgCell -> "noMsgCell" |LblisId -> "isId" |LblsubstrString -> "substrString" |LblnoTuplespaceCell -> "noTuplespaceCell" |Lbl_'Comm'__GRHO'Hyph'SYNTAX -> "`_,__GRHO-SYNTAX`" |Lbl_'Bang'__GRHO'Hyph'SYNTAX -> "`_!__GRHO-SYNTAX`" |LblisTypeCellOpt -> "isTypeCellOpt" |Lblsize -> "size" |Lbl'Hash'bindocc'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bindocc(_)_AUXFUN-SYNTAX`" |LblnoMatchCell -> "noMatchCell" |LblisBind -> "isBind" |Lbl'Hash'ENETUNREACH_K'Hyph'IO -> "`#ENETUNREACH_K-IO`" |Lbl'Hash'EPROTOTYPE_K'Hyph'IO -> "`#EPROTOTYPE_K-IO`" |Lbl'Star'__GRHO'Hyph'SYNTAX -> "`*__GRHO-SYNTAX`" |Lbl'_LT_'who'_GT_' -> "`<who>`" |Lbl_'Coln'__GRHO'Hyph'SYNTAX -> "`_:__GRHO-SYNTAX`" |LblnoThreadsCell -> "noThreadsCell" |Lbl'Hash'systemResult'LPar'_'Comm'_'Comm'_'RPar'_K'Hyph'IO -> "`#systemResult(_,_,_)_K-IO`" |Lbl'_LT_'listens'_GT_' -> "`<listens>`" |LblsrandInt -> "srandInt" |Lbl'Hash'EINVAL_K'Hyph'IO -> "`#EINVAL_K-IO`" |Lbl'_LT_'rid'_GT_' -> "`<rid>`" |LblisKItem -> "isKItem" |Lbl'Hash'ENODEV_K'Hyph'IO -> "`#ENODEV_K-IO`" |Lbl'Hash'length__AUXFUN'Hyph'SYNTAX -> "`#length__AUXFUN-SYNTAX`" |LblisRecCellBag -> "isRecCellBag" |LblList'Coln'set -> "`List:set`" |LblisUri -> "isUri" |LblString2Base -> "`String2Base`" |Lbl'Hash'noparse_K'Hyph'IO -> "`#noparse_K-IO`" |Lblkeys -> "keys" |LblinitRecCell -> "initRecCell" |Lbl'Hash'ESHUTDOWN_K'Hyph'IO -> "`#ESHUTDOWN_K-IO`" |LblisGround -> "isGround" |Lbl'Stop'ThreadCellBag -> "`.ThreadCellBag`" |LblThreadCellBagItem -> "`ThreadCellBagItem`" |Lbl'Hash'cont'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#cont(_)_AUXFUN-SYNTAX`" |Lbl'Hash'ENOTDIR_K'Hyph'IO -> "`#ENOTDIR_K-IO`" |Lblnew_in'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`new_in{_}_GRHO-SYNTAX`" |Lbl'Hash'chan'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#chan(_)_AUXFUN-SYNTAX`" |Lbl_'Hash'in__AUXFUN'Hyph'SYNTAX -> "`_#in__AUXFUN-SYNTAX`" |LblinitSendCell -> "initSendCell" |Lbl'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`{_}_GRHO-SYNTAX`" |Lbl'_LT_'nomo'_GT_' -> "`<nomo>`" |Lbl'Hash'bnum'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bnum(_)_AUXFUN-SYNTAX`" |Lbl_'_LT_Eqls'Int__INT -> "`_<=Int__INT`" |LblnotBool_ -> "`notBool_`" |LblnoNumCell -> "noNumCell" |Lbl'Hash'stderr_K'Hyph'IO -> "`#stderr_K-IO`" |LblnoKCell -> "noKCell" |Lbl'Hash'EBUSY_K'Hyph'IO -> "`#EBUSY_K-IO`" |Lbl'Hash'getenv -> "#getenv" |LblisTuplespaceCell -> "isTuplespaceCell" |LblisBinds -> "isBinds" |LblnoReactionCell -> "noReactionCell" |LblintersectSet -> "intersectSet" |Lbl_in_keys'LPar'_'RPar'_MAP -> "`_in_keys(_)_MAP`" |LblinitMsgCell -> "initMsgCell" |Lbl'Hash'bind'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#bind(_)_AUXFUN-SYNTAX`" |LblfindChar -> "findChar" |Lbl'LSqB'_'RSqB'_GRHO'Hyph'SYNTAX -> "`[_]_GRHO-SYNTAX`" |LblSet'Coln'in -> "`Set:in`" |LblisK -> "isK" |LblisWhoCellOpt -> "isWhoCellOpt" |Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_ -> "`#freezernot__GRHO-SYNTAX0_`" |LblisReceivesCell -> "isReceivesCell" |LblString2Int -> "`String2Int`" |Lbl'_LT_'where'_GT_' -> "`<where>`" |LblinitWhereCell -> "initWhereCell" |LblinitThreadCell -> "initThreadCell" |LblisSingleRec -> "isSingleRec" |LblisThreadsCell -> "isThreadsCell" |LblisTupleCellOpt -> "isTupleCellOpt" |LblisEval -> "isEval" |LblisWhatCell -> "isWhatCell" |Lbl'Hash'ENETDOWN_K'Hyph'IO -> "`#ENETDOWN_K-IO`" |LblisListenCellFragment -> "isListenCellFragment" |Lbl_'LSqB'_'_LT_Hyph'undef'RSqB' -> "`_[_<-undef]`" |Lbl'Hash'Bottom -> "#Bottom" |Lbl_'EqlsEqls'Int_ -> "`_==Int_`" |Lbl_andThenBool__BOOL -> "`_andThenBool__BOOL`" |LblisPbinds -> "isPbinds" |Lbl'Hash'parseInModule -> "#parseInModule" |LblNil_GRHO'Hyph'SYNTAX -> "`Nil_GRHO-SYNTAX`" |LblisAExp -> "isAExp" |Lbl'Hash'system -> "#system" |Lbl'_LT_'mlid'_GT_' -> "`<mlid>`" |LblinitRidCell -> "initRidCell" |LblisString -> "isString" |Lbl_'Perc'Int__INT -> "`_%Int__INT`" |Lbl_'_GT__GT_'Int__INT -> "`_>>Int__INT`" |Lbl_'Coln'_'Comm'__GRHO'Hyph'SYNTAX -> "`_:_,__GRHO-SYNTAX`" |LblnoWhoCell -> "noWhoCell" |LblisList -> "isList" |Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO -> "`#EPROTONOSUPPORT_K-IO`" |LblisTuplespaceCellFragment -> "isTuplespaceCellFragment" |LblreplaceAll'LPar'_'Comm'_'Comm'_'RPar'_STRING -> "`replaceAll(_,_,_)_STRING`" |LblisBindList -> "isBindList" |LblnoChanCell -> "noChanCell" |Lbl'Hash'EDESTADDRREQ_K'Hyph'IO -> "`#EDESTADDRREQ_K-IO`" |Lbl'Hash'EADDRINUSE_K'Hyph'IO -> "`#EADDRINUSE_K-IO`" |LblnoRnumCell -> "noRnumCell" |Lbl_'Xor_'Int__INT -> "`_^Int__INT`" |LblfindString -> "findString" |Lbl'_LT_'k'_GT_' -> "`<k>`" |Lbl'_LT_'reaction'_GT_Hyph'fragment -> "`<reaction>-fragment`" |LblabsInt -> "absInt" |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX1_ -> "`#freezer_+__GRHO-SYNTAX1_`" |Lbl'Hash'EHOSTDOWN_K'Hyph'IO -> "`#EHOSTDOWN_K-IO`" |Lbl_'_GT_'String__STRING -> "`_>String__STRING`" |LblisSendsCellFragment -> "isSendsCellFragment" |LblinitLengthCell -> "initLengthCell" |Lbl_'EqlsEqls'String__STRING -> "`_==String__STRING`" |LblisRnumCellOpt -> "isRnumCellOpt" |LblisSend -> "isSend" |LblisKResult -> "isKResult" |LblinitStypeCell -> "initStypeCell" |LblList'Coln'get -> "`List:get`" |Lbl'Hash'lstat'LPar'_'RPar'_K'Hyph'IO -> "`#lstat(_)_K-IO`" |LblSendCellBagItem -> "`SendCellBagItem`" |Lbltuple -> "tuple" |Lbl'_LT_'id'_GT_' -> "`<id>`" |LblSetItem -> "`SetItem`" |Lbl'_LT_'receives'_GT_' -> "`<receives>`" |LblisRhoKVPairs -> "isRhoKVPairs" |LblunsignedBytes -> "unsignedBytes" |LblisMsidCellOpt -> "isMsidCellOpt" |Lbl'Stop'List -> "`.List`" |Lbl'Hash'ENOLCK_K'Hyph'IO -> "`#ENOLCK_K-IO`" |LblisSendsCellOpt -> "isSendsCellOpt" |Lbl'Hash'ECONNABORTED_K'Hyph'IO -> "`#ECONNABORTED_K-IO`" |LblrandInt -> "randInt" |Lbl'Hash'EXDEV_K'Hyph'IO -> "`#EXDEV_K-IO`" |Lbl'Hash'close'LPar'_'RPar'_K'Hyph'IO -> "`#close(_)_K-IO`" |Lbl_'SCln'_'SClnSCln'__GRHO'Hyph'SYNTAX -> "`_;_;;__GRHO-SYNTAX`" |Lblkeys_list'LPar'_'RPar'_MAP -> "`keys_list(_)_MAP`" |LblfreshId -> "freshId" |LblinitTypeCell -> "initTypeCell" |Lbl_orElseBool__BOOL -> "`_orElseBool__BOOL`" |LblisSchanCellOpt -> "isSchanCellOpt" |Lbl'Hash'EISDIR_K'Hyph'IO -> "`#EISDIR_K-IO`" |Lbl'_LT_'cont'_GT_' -> "`<cont>`" |LblList'Coln'range -> "`List:range`" |LblinitTupleCell -> "initTupleCell" |LblnoIdCell -> "noIdCell" |LblisKCell -> "isKCell" |Lbl'Hash'unknownIOError -> "#unknownIOError" |Lbl'Hash'freezer_and__GRHO'Hyph'SYNTAX1_ -> "`#freezer_and__GRHO-SYNTAX1_`" |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX0_ -> "`#freezer_*__GRHO-SYNTAX0_`" |Lbl_'_GT_Eqls'Int__INT -> "`_>=Int__INT`" |LblisSendCellFragment -> "isSendCellFragment" |Lbl'Hash'ENOSYS_K'Hyph'IO -> "`#ENOSYS_K-IO`" |Lbl_'Pipe'__GRHO'Hyph'SYNTAX -> "`_|__GRHO-SYNTAX`" |Lbl'Hash'ECONNREFUSED_K'Hyph'IO -> "`#ECONNREFUSED_K-IO`" |Lbl'_LT_'sends'_GT_Hyph'fragment -> "`<sends>-fragment`" |Lbl'Hash'lock'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#lock(_,_)_K-IO`" |Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO -> "`#EADDRNOTAVAIL_K-IO`" |LblcountAllOccurrences'LPar'_'Comm'_'RPar'_STRING -> "`countAllOccurrences(_,_)_STRING`" |Lbl_'_GT_'Int__INT -> "`_>Int__INT`" |LblfillList -> "fillList" |Lbl'_AT_'__GRHO'Hyph'SYNTAX -> "`@__GRHO-SYNTAX`" |LblinitForgCell -> "initForgCell" |LblbitRangeInt -> "bitRangeInt" |Lbl_'_LT_'String__STRING -> "`_<String__STRING`" |Lbl'Hash'ThreadLocal -> "#ThreadLocal" |Lbl_xorBool__BOOL -> "`_xorBool__BOOL`" |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX0_ -> "`#freezer_+__GRHO-SYNTAX0_`" |LblinitReactCell -> "initReactCell" |Lbl'Stop'RecCellBag -> "`.RecCellBag`" |Lbl'_LT_'type'_GT_' -> "`<type>`" |Lbl'_LT_'listens'_GT_Hyph'fragment -> "`<listens>-fragment`" |Lbl_'Plus'__GRHO'Hyph'SYNTAX -> "`_+__GRHO-SYNTAX`" |Lbl'_LT_'lid'_GT_' -> "`<lid>`" |Lbl_ListenCellBag_ -> "`_ListenCellBag_`" |Lbl'Hash'open'LPar'_'RPar'_K'Hyph'IO -> "`#open(_)_K-IO`" |Lbl_'LSqB'_'RSqB'_SUBSTITUTION -> "`_[_]_SUBSTITUTION`" |LblnoMlidCell -> "noMlidCell" |Lbl_or__GRHO'Hyph'SYNTAX -> "`_or__GRHO-SYNTAX`" |Lbl'Hash'ETOOMANYREFS_K'Hyph'IO -> "`#ETOOMANYREFS_K-IO`" |Lbl'_LT_'threads'_GT_Hyph'fragment -> "`<threads>-fragment`" |LblinitListensCell -> "initListensCell" |Lbl'Hash'ENOSPC_K'Hyph'IO -> "`#ENOSPC_K-IO`" |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX0_ -> "`#freezer_or__GRHO-SYNTAX0_`" |LblisChanCell -> "isChanCell" |LblisRnumCell -> "isRnumCell" |Lbl'Hash'chanmany'LPar'_'Coln'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`#chanmany(_:_)_AUXFUN-SYNTAX`" |Lbl'Hash'logToFile -> "#logToFile" |Lbl'_LT_'rec'_GT_' -> "`<rec>`" |Lbl'Hash'read'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#read(_,_)_K-IO`" |LblnoLidCell -> "noLidCell" |LblisNameVar -> "isNameVar" |Lbl'_LT_'schan'_GT_' -> "`<schan>`" |LblbigEndianBytes -> "bigEndianBytes" |Lbl'_LT_'match'_GT_' -> "`<match>`" |LblId2String -> "`Id2String`" |LblinitListenCell -> "initListenCell" |Lbl'_LT_'num'_GT_' -> "`<num>`" |LblisContCellOpt -> "isContCellOpt" |LblisLidCellOpt -> "isLidCellOpt" |LblnoSidCell -> "noSidCell" |Lblbundle'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`bundle{_}_GRHO-SYNTAX`" |LblMap'Coln'choice -> "`Map:choice`" |Lbl_Set_ -> "`_Set_`" |Lbl'Hash'EEXIST_K'Hyph'IO -> "`#EEXIST_K-IO`" |Lbl'Hash'getc'LPar'_'RPar'_K'Hyph'IO -> "`#getc(_)_K-IO`" |LblisRidCell -> "isRidCell" |Lbl'_LT_'state'_GT_' -> "`<state>`" |LblisListenCell -> "isListenCell" |LblisBool -> "isBool" |Lbl'Tild'Int__INT -> "`~Int__INT`" |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX1_ -> "`#freezer_*__GRHO-SYNTAX1_`" |LblordChar -> "ordChar" |LblinitIdCell -> "initIdCell" |Lbl_modInt__INT -> "`_modInt__INT`" |LblrfindChar -> "rfindChar" |LblisRbinds -> "isRbinds" |LblisMlidCellOpt -> "isMlidCellOpt" |Lbl'Hash'EAGAIN_K'Hyph'IO -> "`#EAGAIN_K-IO`" |Lbl'Stop'ListenCellBag -> "`.ListenCellBag`" |LblnoMsidCell -> "noMsidCell" |LblinitSchanCell -> "initSchanCell" |LbldirectionalityChar -> "directionalityChar" |LblisIdCell -> "isIdCell" |Lbl'Hash'opendir'LPar'_'RPar'_K'Hyph'IO -> "`#opendir(_)_K-IO`" |LblinitKCell -> "initKCell" |LblRecCellBagItem -> "`RecCellBagItem`" |Lblfor'LPar'_'RParLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`for(_){_}_GRHO-SYNTAX`" |LblisBExp -> "isBExp" |Lbl'Stop'Set -> "`.Set`" |LblisChanLen -> "isChanLen" |LblisStateCell -> "isStateCell" |Lbl'Hash'EACCES_K'Hyph'IO -> "`#EACCES_K-IO`" |Lbl'Hash'ELOOP_K'Hyph'IO -> "`#ELOOP_K-IO`" |Lbl'Hash'EDOM_K'Hyph'IO -> "`#EDOM_K-IO`" |LblisSidCell -> "isSidCell" |LblremoveAll -> "removeAll" |LblnoRidCell -> "noRidCell" |Lbl'_LT_'threads'_GT_' -> "`<threads>`" |Lbl_andBool_ -> "`_andBool_`" |Lbl_'Hash'ine'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "`_#ine(_)_AUXFUN-SYNTAX`" |LblisThreadsCellFragment -> "isThreadsCellFragment" |LblisLengthCellOpt -> "isLengthCellOpt" |LblisRbindList -> "isRbindList" |Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO -> "`#EPFNOSUPPORT_K-IO`" |LblisConsumableSend -> "isConsumableSend" |LbllengthString -> "lengthString" |Lbl_'Hyph'__GRHO'Hyph'SYNTAX -> "`_-__GRHO-SYNTAX`" |Lbl'_LT_'listen'_GT_Hyph'fragment -> "`<listen>-fragment`" |LblisReceive -> "isReceive" |Lbl'Hash'ERANGE_K'Hyph'IO -> "`#ERANGE_K-IO`" |LblinitTCell -> "initTCell" |LblsignedBytes -> "signedBytes" |LblFloatFormat -> "`FloatFormat`" |LblisMsidCell -> "isMsidCell" |Lbl'Hash'ENOTSOCK_K'Hyph'IO -> "`#ENOTSOCK_K-IO`" |Lbl_'Plus'String__STRING -> "`_+String__STRING`" |Lbl_RecCellBag_ -> "`_RecCellBag_`" |Lbl_'Pipe'Int__INT -> "`_|Int__INT`" |Lbl'Hash'EISCONN_K'Hyph'IO -> "`#EISCONN_K-IO`" |LblisKVariable -> "isKVariable" |Lbl_dividesInt__INT -> "`_dividesInt__INT`" |Lbl'_LT_'rec'_GT_Hyph'fragment -> "`<rec>-fragment`" |Lbl'_LT_'tuplespace'_GT_Hyph'fragment -> "`<tuplespace>-fragment`" |Lbl'_LT_'T'_GT_Hyph'fragment -> "`<T>-fragment`" |Lbl_'_LT_Eqls'__GRHO'Hyph'SYNTAX -> "`_<=__GRHO-SYNTAX`" |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX1_ -> "`#freezer_or__GRHO-SYNTAX1_`" |LblisWhatCellOpt -> "isWhatCellOpt" |LblSet'Coln'choice -> "`Set:choice`" |LblisMatchCell -> "isMatchCell" |LblisListensCellFragment -> "isListensCellFragment" |Lbl'Hash'buffer -> "#buffer" |Lbl_'Star'__GRHO'Hyph'SYNTAX -> "`_*__GRHO-SYNTAX`" |LblinitNumCell -> "initNumCell" |LblfreshInt -> "freshInt" |Lbl'Hash'write'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#write(_,_)_K-IO`" |Lbl'Hash'ETIMEDOUT_K'Hyph'IO -> "`#ETIMEDOUT_K-IO`" |LblinitSidCell -> "initSidCell" |LblisIdCellOpt -> "isIdCellOpt" |Lbl'LPar'_'SCln'_'RPar'_GRHO'Hyph'SYNTAX -> "`(_;_)_GRHO-SYNTAX`" |Lbl'_LT_'sends'_GT_' -> "`<sends>`" |LblisSchanCell -> "isSchanCell" |Lbl_xorInt__INT -> "`_xorInt__INT`" |Lbl'Hash'EINPROGRESS_K'Hyph'IO -> "`#EINPROGRESS_K-IO`" |LblinitVarsCell -> "initVarsCell" |LblinitStateCell -> "initStateCell" |LblisNumCellOpt -> "isNumCellOpt" |LblinitMatchCell -> "initMatchCell" |LblisMlidCell -> "isMlidCell" |Lbl'Hash'ENOPROTOOPT_K'Hyph'IO -> "`#ENOPROTOOPT_K-IO`" |LbllittleEndianBytes -> "littleEndianBytes" |Lbl'Hash'EPERM_K'Hyph'IO -> "`#EPERM_K-IO`" |LblnoWhatCell -> "noWhatCell" |LblinitWhatCell -> "initWhatCell" |Lbl'_LT_'send'_GT_' -> "`<send>`" |Lbl_'_LT__LT_'Int__INT -> "`_<<Int__INT`" |LblBase2String -> "`Base2String`" |LblListItem -> "`ListItem`" |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX1_ -> "`#freezer_-__GRHO-SYNTAX1_`" |LblisStream -> "isStream" |Lbl_'_LT_Eqls'Map__MAP -> "`_<=Map__MAP`" |LblnewUUID_STRING -> "`newUUID_STRING`" |LblnoVarsCell -> "noVarsCell" |LblinitThreadsCell -> "initThreadsCell" |Lbl_SendCellBag_ -> "`_SendCellBag_`" |Lbl'Hash'ESRCH_K'Hyph'IO -> "`#ESRCH_K-IO`" |Lbl'Hash'EMFILE_K'Hyph'IO -> "`#EMFILE_K-IO`" |Lblproject'Coln'Proc -> "`project:Proc`" |LblisReactCellOpt -> "isReactCellOpt" |Lbl'_LT_'receives'_GT_Hyph'fragment -> "`<receives>-fragment`" |Lbl'_LT_'stype'_GT_' -> "`<stype>`" |Lbl_inList_ -> "`_inList_`" |LblisVarsCellOpt -> "isVarsCellOpt" |Lbl'Hash'ENOEXEC_K'Hyph'IO -> "`#ENOEXEC_K-IO`" |LblminInt'LPar'_'Comm'_'RPar'_INT -> "`minInt(_,_)_INT`" |LblinitReactionCell -> "initReactionCell" |LblisMap -> "isMap" |LblisTupleCell -> "isTupleCell" |LblisReactCell -> "isReactCell" |LblinitMsidCell -> "initMsidCell" |Lbl'_LT_'rnum'_GT_' -> "`<rnum>`" |LblisNomoCellOpt -> "isNomoCellOpt" |LblisStypeCellOpt -> "isStypeCellOpt" |LblisTCellFragment -> "isTCellFragment" |Lblreplace'LPar'_'Comm'_'Comm'_'Comm'_'RPar'_STRING -> "`replace(_,_,_,_)_STRING`" |Lbl_'Slsh'Int__INT -> "`_/Int__INT`" |Lbl_'LSqB'_'_LT_Hyph'_'RSqB'_MAP -> "`_[_<-_]_MAP`" |LblisRbind -> "isRbind" |Lbl'Hash'tell'LPar'_'RPar'_K'Hyph'IO -> "`#tell(_)_K-IO`" |Lbl'_LT_'msid'_GT_' -> "`<msid>`" |LblinitSendsCell -> "initSendsCell" |Lblbundle'HyphLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "`bundle-{_}_GRHO-SYNTAX`" |LblgetKLabel -> "getKLabel" |Lblnot__GRHO'Hyph'SYNTAX -> "`not__GRHO-SYNTAX`" |Lbl'Hash'E2BIG_K'Hyph'IO -> "`#E2BIG_K-IO`" |Lbl'Hash'seekEnd'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "`#seekEnd(_,_)_K-IO`" |Lbl'_LT_'send'_GT_Hyph'fragment -> "`<send>-fragment`" let print_klabel_string(c: klabel) : string = match c with |Lbl'Hash'argv -> "#argv" |LblisPbindList -> "isPbindList" |LblisThreadCell -> "isThreadCell" |LblMap'Coln'lookup -> "Map:lookup" |LblisChanList -> "isChanList" |Lblbundle0'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "bundle0{_}_GRHO-SYNTAX" |Lbl'Hash'seek'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#seek(_,_)_K-IO" |LblisWhoCell -> "isWhoCell" |LblisIdNum -> "isIdNum" |LblsignExtendBitRangeInt -> "signExtendBitRangeInt" |Lbl_'EqlsEqls'Bool__BOOL -> "_==Bool__BOOL" |LblisSet -> "isSet" |LblisThreadCellBag -> "isThreadCellBag" |Lbl'Hash'bindocce'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bindocce(_)_AUXFUN-SYNTAX" |LblisNew -> "isNew" |Lbl'LBraLSqB'_'SCln'_'RSqBRBra'_GRHO'Hyph'SYNTAX -> "{[_;_]}_GRHO-SYNTAX" |LblisStypeCell -> "isStypeCell" |Lbl_'_LT_Eqls'Set__SET -> "_<=Set__SET" |Lbl_'Hash'in'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "_#in(_)_AUXFUN-SYNTAX" |LblisIOError -> "isIOError" |Lbl'Hash'parse -> "#parse" |Lbl'Hash'EALREADY_K'Hyph'IO -> "#EALREADY_K-IO" |LblisRhoList -> "isRhoList" |LblmakeList -> "makeList" |Lbl'Hash'ESPIPE_K'Hyph'IO -> "#ESPIPE_K-IO" |Lbl'Hash'unlock'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#unlock(_,_)_K-IO" |Lbl'Hash'ENOENT_K'Hyph'IO -> "#ENOENT_K-IO" |LblisProcs -> "isProcs" |Lbl_'_LT_Bang'__GRHO'Hyph'SYNTAX -> "_<!__GRHO-SYNTAX" |LblisTypeCell -> "isTypeCell" |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX1_ -> "#freezer_<=__GRHO-SYNTAX1_" |LblisLbinds -> "isLbinds" |LblisLengthCell -> "isLengthCell" |LblnoStateCell -> "noStateCell" |LblisLbind -> "isLbind" |Lbl'Hash'ENOTTY_K'Hyph'IO -> "#ENOTTY_K-IO" |LblisForgCell -> "isForgCell" |Lbl'_LT_'forg'_GT_' -> "<forg>" |LblinitChanCell -> "initChanCell" |LblisProcList -> "isProcList" |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX0_ -> "#freezer_-__GRHO-SYNTAX0_" |LblinitRnumCell -> "initRnumCell" |LblisRidCellOpt -> "isRidCellOpt" |LblisReceivesCellFragment -> "isReceivesCellFragment" |Lbl'Hash'ENOTEMPTY_K'Hyph'IO -> "#ENOTEMPTY_K-IO" |LblisSidCellOpt -> "isSidCellOpt" |Lbl'Hash'EMSGSIZE_K'Hyph'IO -> "#EMSGSIZE_K-IO" |LblisKConfigVar -> "isKConfigVar" |LblisRhoMap -> "isRhoMap" |Lbl'Hash'ENETRESET_K'Hyph'IO -> "#ENETRESET_K-IO" |Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO -> "#EAFNOSUPPORT_K-IO" |LblnoTupleCell -> "noTupleCell" |LblnoSendsCell -> "noSendsCell" |Lbl'_LT_'thread'_GT_Hyph'fragment -> "<thread>-fragment" |LblisCell -> "isCell" |LblisPbind -> "isPbind" |Lbl'Hash'ENOMEM_K'Hyph'IO -> "#ENOMEM_K-IO" |Lblvalues -> "values" |Lblbundle'PlusLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "bundle+{_}_GRHO-SYNTAX" |LblisThreadCellFragment -> "isThreadCellFragment" |LblisStateCellOpt -> "isStateCellOpt" |LblinitLidCell -> "initLidCell" |LblisNameList -> "isNameList" |LblisListensCellOpt -> "isListensCellOpt" |LblisTuplespaceCellOpt -> "isTuplespaceCellOpt" |Lbl'Hash'ENXIO_K'Hyph'IO -> "#ENXIO_K-IO" |Lbl_'_LT_'Int__INT -> "_<Int__INT" |LblnoTypeCell -> "noTypeCell" |LblisSendCell -> "isSendCell" |Lbl'Hash'configuration_K'Hyph'REFLECTION -> "#configuration_K-REFLECTION" |LblisSendsCell -> "isSendsCell" |LblisFloat -> "isFloat" |Lbl'_LT_'msg'_GT_' -> "<msg>" |LblisContCell -> "isContCell" |LblchrChar -> "chrChar" |Lbl_divInt__INT -> "_divInt__INT" |Lbl'Hash'EROFS_K'Hyph'IO -> "#EROFS_K-IO" |LblisWhereCellOpt -> "isWhereCellOpt" |Lbl_ThreadCellBag_ -> "_ThreadCellBag_" |LblisProc -> "isProc" |LblisListensCell -> "isListensCell" |Lbl_'Plus'Int_ -> "_+Int_" |LblisReactionCell -> "isReactionCell" |Lbl_orBool__BOOL -> "_orBool__BOOL" |Lbl'_LT_'sid'_GT_' -> "<sid>" |Lbl'Hash'ENFILE_K'Hyph'IO -> "#ENFILE_K-IO" |LblupdateMap -> "updateMap" |LblisReactionCellOpt -> "isReactionCellOpt" |Lbl_'SCln'__GRHO'Hyph'SYNTAX -> "_;__GRHO-SYNTAX" |Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX0_ -> "#freezer_<=__GRHO-SYNTAX0_" |LblisNomoCell -> "isNomoCell" |LblnoWhereCell -> "noWhereCell" |LblisJoinList -> "isJoinList" |LblInt2String -> "Int2String" |Lbl'Hash'stype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#stype(_)_AUXFUN-SYNTAX" |Lbl_'EqlsSlshEqls'K_ -> "_=/=K_" |LblisNumCell -> "isNumCell" |LblisRecCell -> "isRecCell" |Lbl_List_ -> "_List_" |LblisMultiRec -> "isMultiRec" |LbltoString'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "toString(_)_AUXFUN-SYNTAX" |Lbl'Hash'open'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#open(_,_)_K-IO" |Lbl'Hash'EOPNOTSUPP_K'Hyph'IO -> "#EOPNOTSUPP_K-IO" |Lbl_'PipeHyph_GT_'_ -> "_|->_" |LblisMatchCellOpt -> "isMatchCellOpt" |Lbl_'Hyph'Map__MAP -> "_-Map__MAP" |Lbl'Hash'length'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#length(_)_AUXFUN-SYNTAX" |LblisRhoSet -> "isRhoSet" |Lbl'_LT_'chan'_GT_' -> "<chan>" |LblnoForgCell -> "noForgCell" |LblisReceivesCellOpt -> "isReceivesCellOpt" |Lbl'Hash'EMLINK_K'Hyph'IO -> "#EMLINK_K-IO" |LblisListenCellBag -> "isListenCellBag" |Lbl'Hash'sort -> "#sort" |Lbl_'EqlsEqls'K_ -> "_==K_" |LblisPar -> "isPar" |Lblunforgeable'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "unforgeable(_)_GRHO-SYNTAX" |LblreplaceFirst'LPar'_'Comm'_'Comm'_'RPar'_STRING -> "replaceFirst(_,_,_)_STRING" |LblnoListensCell -> "noListensCell" |LblnoStypeCell -> "noStypeCell" |Lbl'Hash'EOVERFLOW_K'Hyph'IO -> "#EOVERFLOW_K-IO" |Lbl'Hash'putc'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#putc(_,_)_K-IO" |LblisThreadsCellOpt -> "isThreadsCellOpt" |Lbl'Stop'Map -> ".Map" |LblisVarsCell -> "isVarsCell" |Lbl_'EqlsSlshEqls'String__STRING -> "_=/=String__STRING" |Lbl'Hash'EIO_K'Hyph'IO -> "#EIO_K-IO" |LblinitMlidCell -> "initMlidCell" |Lbluri'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "uri(_)_GRHO-SYNTAX" |LblisSendCellBag -> "isSendCellBag" |LblisInt -> "isInt" |Lbl'Hash'EFAULT_K'Hyph'IO -> "#EFAULT_K-IO" |Lbl'Hash'fresh -> "#fresh" |Lbl_impliesBool__BOOL -> "_impliesBool__BOOL" |Lbl'Hash'chanlist'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#chanlist(_)_AUXFUN-SYNTAX" |Lbl_'Star'Int__INT -> "_*Int__INT" |Lbl'_LT_'T'_GT_' -> "<T>" |Lbl'Hash'Thread -> "#Thread" |LblmaxInt'LPar'_'Comm'_'RPar'_INT -> "maxInt(_,_)_INT" |LblinitReceivesCell -> "initReceivesCell" |Lbl'Hash'EDEADLK_K'Hyph'IO -> "#EDEADLK_K-IO" |Lbl_'_LT_Eqls'String__STRING -> "_<=String__STRING" |LblListenCellBagItem -> "ListenCellBagItem" |LblisNames -> "isNames" |Lbl'Hash'ENOBUFS_K'Hyph'IO -> "#ENOBUFS_K-IO" |Lbl_Map_ -> "_Map_" |Lbl_'Hyph'Int__INT -> "_-Int__INT" |Lbl'Hash'EOF_K'Hyph'IO -> "#EOF_K-IO" |Lbl_'BangBang'__GRHO'Hyph'SYNTAX -> "_!!__GRHO-SYNTAX" |LblisReactionCellFragment -> "isReactionCellFragment" |Lbl_and__GRHO'Hyph'SYNTAX -> "_and__GRHO-SYNTAX" |Lbl'Hash'lengths'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#lengths(_)_AUXFUN-SYNTAX" |LblFloat2String -> "Float2String" |Lbl'Hash'append'LPar'_'SCln'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#append(_;_)_AUXFUN-SYNTAX" |LblinitWhoCell -> "initWhoCell" |Lbl'_LT_'listen'_GT_' -> "<listen>" |LblnoReceivesCell -> "noReceivesCell" |LblsizeList -> "sizeList" |Lbl'Hash'EWOULDBLOCK_K'Hyph'IO -> "#EWOULDBLOCK_K-IO" |LblString2Id -> "String2Id" |LblinitTuplespaceCell -> "initTuplespaceCell" |Lbl'_LT_'thread'_GT_' -> "<thread>" |Lbl'_LT_'vars'_GT_' -> "<vars>" |Lbl_'EqlsSlshEqls'Bool__BOOL -> "_=/=Bool__BOOL" |Lbl'_LT_'length'_GT_' -> "<length>" |LblisCollection -> "isCollection" |Lbl'Hash'EFBIG_K'Hyph'IO -> "#EFBIG_K-IO" |LblisTCell -> "isTCell" |Lbl_'LSqB'_'Slsh'_'RSqB'_SUBSTITUTION -> "_[_/_]_SUBSTITUTION" |Lbl'Hash'EBADF_K'Hyph'IO -> "#EBADF_K-IO" |Lbl'Hash'msg'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#msg(_)_AUXFUN-SYNTAX" |LblnoLengthCell -> "noLengthCell" |LblinitNomoCell -> "initNomoCell" |Lbl'Hash'EPIPE_K'Hyph'IO -> "#EPIPE_K-IO" |Lbl'Hash'bvar'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bvar(_)_AUXFUN-SYNTAX" |LblnoContCell -> "noContCell" |LblisRhoTuple -> "isRhoTuple" |Lbl_'Xor_Perc'Int___INT -> "_^%Int___INT" |LblisMsgCellOpt -> "isMsgCellOpt" |Lbl'_LT_'reaction'_GT_' -> "<reaction>" |LblrfindString -> "rfindString" |LblisChanCellOpt -> "isChanCellOpt" |Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO -> "#ESOCKTNOSUPPORT_K-IO" |LblnoNomoCell -> "noNomoCell" |Lbl'Hash'EINTR_K'Hyph'IO -> "#EINTR_K-IO" |Lbl'Hash'stat'LPar'_'RPar'_K'Hyph'IO -> "#stat(_)_K-IO" |LblupdateList -> "updateList" |LblisLidCell -> "isLidCell" |LblisMsgCell -> "isMsgCell" |Lbl'Stop'SendCellBag -> ".SendCellBag" |LblinitContCell -> "initContCell" |LblnoReactCell -> "noReactCell" |LblcategoryChar -> "categoryChar" |LblSet'Coln'difference -> "Set:difference" |LblisName -> "isName" |Lbl'Hash'EHOSTUNREACH_K'Hyph'IO -> "#EHOSTUNREACH_K-IO" |Lbl'Hash'ECONNRESET_K'Hyph'IO -> "#ECONNRESET_K-IO" |LblisBundle -> "isBundle" |LblisKCellOpt -> "isKCellOpt" |LblisForgCellOpt -> "isForgCellOpt" |Lbl'Hash'ECHILD_K'Hyph'IO -> "#ECHILD_K-IO" |LblisRecCellFragment -> "isRecCellFragment" |LblisUnconsumableSend -> "isUnconsumableSend" |LblisLbindList -> "isLbindList" |LblString2Float -> "String2Float" |LblMap'Coln'lookupOrDefault -> "Map:lookupOrDefault" |Lbl'Hash'if_'Hash'then_'Hash'else_'Hash'fi_K'Hyph'EQUAL -> "#if_#then_#else_#fi_K-EQUAL" |Lbl'_LT_'tuplespace'_GT_' -> "<tuplespace>" |Lbl'Hash'ENOTCONN_K'Hyph'IO -> "#ENOTCONN_K-IO" |Lbl_'_LT_Hyph'__GRHO'Hyph'SYNTAX -> "_<-__GRHO-SYNTAX" |Lbl'_LT_'what'_GT_' -> "<what>" |Lbl'Hash'stdout_K'Hyph'IO -> "#stdout_K-IO" |Lbl_'And'Int__INT -> "_&Int__INT" |Lbl'Hash'rtype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#rtype(_)_AUXFUN-SYNTAX" |Lbl'_LT_'tuple'_GT_' -> "<tuple>" |Lbl'Hash'ENAMETOOLONG_K'Hyph'IO -> "#ENAMETOOLONG_K-IO" |Lbllog2Int -> "log2Int" |Lbl_'EqlsSlshEqls'Int__INT -> "_=/=Int__INT" |Lbl'Hash'stdin_K'Hyph'IO -> "#stdin_K-IO" |Lbl'Hash'chanlen'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#chanlen(_)_AUXFUN-SYNTAX" |Lbl_'_GT_Eqls'String__STRING -> "_>=String__STRING" |LblnoSchanCell -> "noSchanCell" |Lbl'_LT_'react'_GT_' -> "<react>" |LblisBindOcc -> "isBindOcc" |LblSet'LPar'_'RPar'_GRHO'Hyph'SYNTAX -> "Set(_)_GRHO-SYNTAX" |LblsizeMap -> "sizeMap" |LblisWhereCell -> "isWhereCell" |LblnoMsgCell -> "noMsgCell" |LblisId -> "isId" |LblsubstrString -> "substrString" |LblnoTuplespaceCell -> "noTuplespaceCell" |Lbl_'Comm'__GRHO'Hyph'SYNTAX -> "_,__GRHO-SYNTAX" |Lbl_'Bang'__GRHO'Hyph'SYNTAX -> "_!__GRHO-SYNTAX" |LblisTypeCellOpt -> "isTypeCellOpt" |Lblsize -> "size" |Lbl'Hash'bindocc'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bindocc(_)_AUXFUN-SYNTAX" |LblnoMatchCell -> "noMatchCell" |LblisBind -> "isBind" |Lbl'Hash'ENETUNREACH_K'Hyph'IO -> "#ENETUNREACH_K-IO" |Lbl'Hash'EPROTOTYPE_K'Hyph'IO -> "#EPROTOTYPE_K-IO" |Lbl'Star'__GRHO'Hyph'SYNTAX -> "*__GRHO-SYNTAX" |Lbl'_LT_'who'_GT_' -> "<who>" |Lbl_'Coln'__GRHO'Hyph'SYNTAX -> "_:__GRHO-SYNTAX" |LblnoThreadsCell -> "noThreadsCell" |Lbl'Hash'systemResult'LPar'_'Comm'_'Comm'_'RPar'_K'Hyph'IO -> "#systemResult(_,_,_)_K-IO" |Lbl'_LT_'listens'_GT_' -> "<listens>" |LblsrandInt -> "srandInt" |Lbl'Hash'EINVAL_K'Hyph'IO -> "#EINVAL_K-IO" |Lbl'_LT_'rid'_GT_' -> "<rid>" |LblisKItem -> "isKItem" |Lbl'Hash'ENODEV_K'Hyph'IO -> "#ENODEV_K-IO" |Lbl'Hash'length__AUXFUN'Hyph'SYNTAX -> "#length__AUXFUN-SYNTAX" |LblisRecCellBag -> "isRecCellBag" |LblList'Coln'set -> "List:set" |LblisUri -> "isUri" |LblString2Base -> "String2Base" |Lbl'Hash'noparse_K'Hyph'IO -> "#noparse_K-IO" |Lblkeys -> "keys" |LblinitRecCell -> "initRecCell" |Lbl'Hash'ESHUTDOWN_K'Hyph'IO -> "#ESHUTDOWN_K-IO" |LblisGround -> "isGround" |Lbl'Stop'ThreadCellBag -> ".ThreadCellBag" |LblThreadCellBagItem -> "ThreadCellBagItem" |Lbl'Hash'cont'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#cont(_)_AUXFUN-SYNTAX" |Lbl'Hash'ENOTDIR_K'Hyph'IO -> "#ENOTDIR_K-IO" |Lblnew_in'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "new_in{_}_GRHO-SYNTAX" |Lbl'Hash'chan'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#chan(_)_AUXFUN-SYNTAX" |Lbl_'Hash'in__AUXFUN'Hyph'SYNTAX -> "_#in__AUXFUN-SYNTAX" |LblinitSendCell -> "initSendCell" |Lbl'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "{_}_GRHO-SYNTAX" |Lbl'_LT_'nomo'_GT_' -> "<nomo>" |Lbl'Hash'bnum'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bnum(_)_AUXFUN-SYNTAX" |Lbl_'_LT_Eqls'Int__INT -> "_<=Int__INT" |LblnotBool_ -> "notBool_" |LblnoNumCell -> "noNumCell" |Lbl'Hash'stderr_K'Hyph'IO -> "#stderr_K-IO" |LblnoKCell -> "noKCell" |Lbl'Hash'EBUSY_K'Hyph'IO -> "#EBUSY_K-IO" |Lbl'Hash'getenv -> "#getenv" |LblisTuplespaceCell -> "isTuplespaceCell" |LblisBinds -> "isBinds" |LblnoReactionCell -> "noReactionCell" |LblintersectSet -> "intersectSet" |Lbl_in_keys'LPar'_'RPar'_MAP -> "_in_keys(_)_MAP" |LblinitMsgCell -> "initMsgCell" |Lbl'Hash'bind'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#bind(_)_AUXFUN-SYNTAX" |LblfindChar -> "findChar" |Lbl'LSqB'_'RSqB'_GRHO'Hyph'SYNTAX -> "[_]_GRHO-SYNTAX" |LblSet'Coln'in -> "Set:in" |LblisK -> "isK" |LblisWhoCellOpt -> "isWhoCellOpt" |Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_ -> "#freezernot__GRHO-SYNTAX0_" |LblisReceivesCell -> "isReceivesCell" |LblString2Int -> "String2Int" |Lbl'_LT_'where'_GT_' -> "<where>" |LblinitWhereCell -> "initWhereCell" |LblinitThreadCell -> "initThreadCell" |LblisSingleRec -> "isSingleRec" |LblisThreadsCell -> "isThreadsCell" |LblisTupleCellOpt -> "isTupleCellOpt" |LblisEval -> "isEval" |LblisWhatCell -> "isWhatCell" |Lbl'Hash'ENETDOWN_K'Hyph'IO -> "#ENETDOWN_K-IO" |LblisListenCellFragment -> "isListenCellFragment" |Lbl_'LSqB'_'_LT_Hyph'undef'RSqB' -> "_[_<-undef]" |Lbl'Hash'Bottom -> "#Bottom" |Lbl_'EqlsEqls'Int_ -> "_==Int_" |Lbl_andThenBool__BOOL -> "_andThenBool__BOOL" |LblisPbinds -> "isPbinds" |Lbl'Hash'parseInModule -> "#parseInModule" |LblNil_GRHO'Hyph'SYNTAX -> "Nil_GRHO-SYNTAX" |LblisAExp -> "isAExp" |Lbl'Hash'system -> "#system" |Lbl'_LT_'mlid'_GT_' -> "<mlid>" |LblinitRidCell -> "initRidCell" |LblisString -> "isString" |Lbl_'Perc'Int__INT -> "_%Int__INT" |Lbl_'_GT__GT_'Int__INT -> "_>>Int__INT" |Lbl_'Coln'_'Comm'__GRHO'Hyph'SYNTAX -> "_:_,__GRHO-SYNTAX" |LblnoWhoCell -> "noWhoCell" |LblisList -> "isList" |Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO -> "#EPROTONOSUPPORT_K-IO" |LblisTuplespaceCellFragment -> "isTuplespaceCellFragment" |LblreplaceAll'LPar'_'Comm'_'Comm'_'RPar'_STRING -> "replaceAll(_,_,_)_STRING" |LblisBindList -> "isBindList" |LblnoChanCell -> "noChanCell" |Lbl'Hash'EDESTADDRREQ_K'Hyph'IO -> "#EDESTADDRREQ_K-IO" |Lbl'Hash'EADDRINUSE_K'Hyph'IO -> "#EADDRINUSE_K-IO" |LblnoRnumCell -> "noRnumCell" |Lbl_'Xor_'Int__INT -> "_^Int__INT" |LblfindString -> "findString" |Lbl'_LT_'k'_GT_' -> "<k>" |Lbl'_LT_'reaction'_GT_Hyph'fragment -> "<reaction>-fragment" |LblabsInt -> "absInt" |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX1_ -> "#freezer_+__GRHO-SYNTAX1_" |Lbl'Hash'EHOSTDOWN_K'Hyph'IO -> "#EHOSTDOWN_K-IO" |Lbl_'_GT_'String__STRING -> "_>String__STRING" |LblisSendsCellFragment -> "isSendsCellFragment" |LblinitLengthCell -> "initLengthCell" |Lbl_'EqlsEqls'String__STRING -> "_==String__STRING" |LblisRnumCellOpt -> "isRnumCellOpt" |LblisSend -> "isSend" |LblisKResult -> "isKResult" |LblinitStypeCell -> "initStypeCell" |LblList'Coln'get -> "List:get" |Lbl'Hash'lstat'LPar'_'RPar'_K'Hyph'IO -> "#lstat(_)_K-IO" |LblSendCellBagItem -> "SendCellBagItem" |Lbltuple -> "tuple" |Lbl'_LT_'id'_GT_' -> "<id>" |LblSetItem -> "SetItem" |Lbl'_LT_'receives'_GT_' -> "<receives>" |LblisRhoKVPairs -> "isRhoKVPairs" |LblunsignedBytes -> "unsignedBytes" |LblisMsidCellOpt -> "isMsidCellOpt" |Lbl'Stop'List -> ".List" |Lbl'Hash'ENOLCK_K'Hyph'IO -> "#ENOLCK_K-IO" |LblisSendsCellOpt -> "isSendsCellOpt" |Lbl'Hash'ECONNABORTED_K'Hyph'IO -> "#ECONNABORTED_K-IO" |LblrandInt -> "randInt" |Lbl'Hash'EXDEV_K'Hyph'IO -> "#EXDEV_K-IO" |Lbl'Hash'close'LPar'_'RPar'_K'Hyph'IO -> "#close(_)_K-IO" |Lbl_'SCln'_'SClnSCln'__GRHO'Hyph'SYNTAX -> "_;_;;__GRHO-SYNTAX" |Lblkeys_list'LPar'_'RPar'_MAP -> "keys_list(_)_MAP" |LblfreshId -> "freshId" |LblinitTypeCell -> "initTypeCell" |Lbl_orElseBool__BOOL -> "_orElseBool__BOOL" |LblisSchanCellOpt -> "isSchanCellOpt" |Lbl'Hash'EISDIR_K'Hyph'IO -> "#EISDIR_K-IO" |Lbl'_LT_'cont'_GT_' -> "<cont>" |LblList'Coln'range -> "List:range" |LblinitTupleCell -> "initTupleCell" |LblnoIdCell -> "noIdCell" |LblisKCell -> "isKCell" |Lbl'Hash'unknownIOError -> "#unknownIOError" |Lbl'Hash'freezer_and__GRHO'Hyph'SYNTAX1_ -> "#freezer_and__GRHO-SYNTAX1_" |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX0_ -> "#freezer_*__GRHO-SYNTAX0_" |Lbl_'_GT_Eqls'Int__INT -> "_>=Int__INT" |LblisSendCellFragment -> "isSendCellFragment" |Lbl'Hash'ENOSYS_K'Hyph'IO -> "#ENOSYS_K-IO" |Lbl_'Pipe'__GRHO'Hyph'SYNTAX -> "_|__GRHO-SYNTAX" |Lbl'Hash'ECONNREFUSED_K'Hyph'IO -> "#ECONNREFUSED_K-IO" |Lbl'_LT_'sends'_GT_Hyph'fragment -> "<sends>-fragment" |Lbl'Hash'lock'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#lock(_,_)_K-IO" |Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO -> "#EADDRNOTAVAIL_K-IO" |LblcountAllOccurrences'LPar'_'Comm'_'RPar'_STRING -> "countAllOccurrences(_,_)_STRING" |Lbl_'_GT_'Int__INT -> "_>Int__INT" |LblfillList -> "fillList" |Lbl'_AT_'__GRHO'Hyph'SYNTAX -> "@__GRHO-SYNTAX" |LblinitForgCell -> "initForgCell" |LblbitRangeInt -> "bitRangeInt" |Lbl_'_LT_'String__STRING -> "_<String__STRING" |Lbl'Hash'ThreadLocal -> "#ThreadLocal" |Lbl_xorBool__BOOL -> "_xorBool__BOOL" |Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX0_ -> "#freezer_+__GRHO-SYNTAX0_" |LblinitReactCell -> "initReactCell" |Lbl'Stop'RecCellBag -> ".RecCellBag" |Lbl'_LT_'type'_GT_' -> "<type>" |Lbl'_LT_'listens'_GT_Hyph'fragment -> "<listens>-fragment" |Lbl_'Plus'__GRHO'Hyph'SYNTAX -> "_+__GRHO-SYNTAX" |Lbl'_LT_'lid'_GT_' -> "<lid>" |Lbl_ListenCellBag_ -> "_ListenCellBag_" |Lbl'Hash'open'LPar'_'RPar'_K'Hyph'IO -> "#open(_)_K-IO" |Lbl_'LSqB'_'RSqB'_SUBSTITUTION -> "_[_]_SUBSTITUTION" |LblnoMlidCell -> "noMlidCell" |Lbl_or__GRHO'Hyph'SYNTAX -> "_or__GRHO-SYNTAX" |Lbl'Hash'ETOOMANYREFS_K'Hyph'IO -> "#ETOOMANYREFS_K-IO" |Lbl'_LT_'threads'_GT_Hyph'fragment -> "<threads>-fragment" |LblinitListensCell -> "initListensCell" |Lbl'Hash'ENOSPC_K'Hyph'IO -> "#ENOSPC_K-IO" |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX0_ -> "#freezer_or__GRHO-SYNTAX0_" |LblisChanCell -> "isChanCell" |LblisRnumCell -> "isRnumCell" |Lbl'Hash'chanmany'LPar'_'Coln'_'RPar'_AUXFUN'Hyph'SYNTAX -> "#chanmany(_:_)_AUXFUN-SYNTAX" |Lbl'Hash'logToFile -> "#logToFile" |Lbl'_LT_'rec'_GT_' -> "<rec>" |Lbl'Hash'read'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#read(_,_)_K-IO" |LblnoLidCell -> "noLidCell" |LblisNameVar -> "isNameVar" |Lbl'_LT_'schan'_GT_' -> "<schan>" |LblbigEndianBytes -> "bigEndianBytes" |Lbl'_LT_'match'_GT_' -> "<match>" |LblId2String -> "Id2String" |LblinitListenCell -> "initListenCell" |Lbl'_LT_'num'_GT_' -> "<num>" |LblisContCellOpt -> "isContCellOpt" |LblisLidCellOpt -> "isLidCellOpt" |LblnoSidCell -> "noSidCell" |Lblbundle'LBra'_'RBra'_GRHO'Hyph'SYNTAX -> "bundle{_}_GRHO-SYNTAX" |LblMap'Coln'choice -> "Map:choice" |Lbl_Set_ -> "_Set_" |Lbl'Hash'EEXIST_K'Hyph'IO -> "#EEXIST_K-IO" |Lbl'Hash'getc'LPar'_'RPar'_K'Hyph'IO -> "#getc(_)_K-IO" |LblisRidCell -> "isRidCell" |Lbl'_LT_'state'_GT_' -> "<state>" |LblisListenCell -> "isListenCell" |LblisBool -> "isBool" |Lbl'Tild'Int__INT -> "~Int__INT" |Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX1_ -> "#freezer_*__GRHO-SYNTAX1_" |LblordChar -> "ordChar" |LblinitIdCell -> "initIdCell" |Lbl_modInt__INT -> "_modInt__INT" |LblrfindChar -> "rfindChar" |LblisRbinds -> "isRbinds" |LblisMlidCellOpt -> "isMlidCellOpt" |Lbl'Hash'EAGAIN_K'Hyph'IO -> "#EAGAIN_K-IO" |Lbl'Stop'ListenCellBag -> ".ListenCellBag" |LblnoMsidCell -> "noMsidCell" |LblinitSchanCell -> "initSchanCell" |LbldirectionalityChar -> "directionalityChar" |LblisIdCell -> "isIdCell" |Lbl'Hash'opendir'LPar'_'RPar'_K'Hyph'IO -> "#opendir(_)_K-IO" |LblinitKCell -> "initKCell" |LblRecCellBagItem -> "RecCellBagItem" |Lblfor'LPar'_'RParLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "for(_){_}_GRHO-SYNTAX" |LblisBExp -> "isBExp" |Lbl'Stop'Set -> ".Set" |LblisChanLen -> "isChanLen" |LblisStateCell -> "isStateCell" |Lbl'Hash'EACCES_K'Hyph'IO -> "#EACCES_K-IO" |Lbl'Hash'ELOOP_K'Hyph'IO -> "#ELOOP_K-IO" |Lbl'Hash'EDOM_K'Hyph'IO -> "#EDOM_K-IO" |LblisSidCell -> "isSidCell" |LblremoveAll -> "removeAll" |LblnoRidCell -> "noRidCell" |Lbl'_LT_'threads'_GT_' -> "<threads>" |Lbl_andBool_ -> "_andBool_" |Lbl_'Hash'ine'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX -> "_#ine(_)_AUXFUN-SYNTAX" |LblisThreadsCellFragment -> "isThreadsCellFragment" |LblisLengthCellOpt -> "isLengthCellOpt" |LblisRbindList -> "isRbindList" |Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO -> "#EPFNOSUPPORT_K-IO" |LblisConsumableSend -> "isConsumableSend" |LbllengthString -> "lengthString" |Lbl_'Hyph'__GRHO'Hyph'SYNTAX -> "_-__GRHO-SYNTAX" |Lbl'_LT_'listen'_GT_Hyph'fragment -> "<listen>-fragment" |LblisReceive -> "isReceive" |Lbl'Hash'ERANGE_K'Hyph'IO -> "#ERANGE_K-IO" |LblinitTCell -> "initTCell" |LblsignedBytes -> "signedBytes" |LblFloatFormat -> "FloatFormat" |LblisMsidCell -> "isMsidCell" |Lbl'Hash'ENOTSOCK_K'Hyph'IO -> "#ENOTSOCK_K-IO" |Lbl_'Plus'String__STRING -> "_+String__STRING" |Lbl_RecCellBag_ -> "_RecCellBag_" |Lbl_'Pipe'Int__INT -> "_|Int__INT" |Lbl'Hash'EISCONN_K'Hyph'IO -> "#EISCONN_K-IO" |LblisKVariable -> "isKVariable" |Lbl_dividesInt__INT -> "_dividesInt__INT" |Lbl'_LT_'rec'_GT_Hyph'fragment -> "<rec>-fragment" |Lbl'_LT_'tuplespace'_GT_Hyph'fragment -> "<tuplespace>-fragment" |Lbl'_LT_'T'_GT_Hyph'fragment -> "<T>-fragment" |Lbl_'_LT_Eqls'__GRHO'Hyph'SYNTAX -> "_<=__GRHO-SYNTAX" |Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX1_ -> "#freezer_or__GRHO-SYNTAX1_" |LblisWhatCellOpt -> "isWhatCellOpt" |LblSet'Coln'choice -> "Set:choice" |LblisMatchCell -> "isMatchCell" |LblisListensCellFragment -> "isListensCellFragment" |Lbl'Hash'buffer -> "#buffer" |Lbl_'Star'__GRHO'Hyph'SYNTAX -> "_*__GRHO-SYNTAX" |LblinitNumCell -> "initNumCell" |LblfreshInt -> "freshInt" |Lbl'Hash'write'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#write(_,_)_K-IO" |Lbl'Hash'ETIMEDOUT_K'Hyph'IO -> "#ETIMEDOUT_K-IO" |LblinitSidCell -> "initSidCell" |LblisIdCellOpt -> "isIdCellOpt" |Lbl'LPar'_'SCln'_'RPar'_GRHO'Hyph'SYNTAX -> "(_;_)_GRHO-SYNTAX" |Lbl'_LT_'sends'_GT_' -> "<sends>" |LblisSchanCell -> "isSchanCell" |Lbl_xorInt__INT -> "_xorInt__INT" |Lbl'Hash'EINPROGRESS_K'Hyph'IO -> "#EINPROGRESS_K-IO" |LblinitVarsCell -> "initVarsCell" |LblinitStateCell -> "initStateCell" |LblisNumCellOpt -> "isNumCellOpt" |LblinitMatchCell -> "initMatchCell" |LblisMlidCell -> "isMlidCell" |Lbl'Hash'ENOPROTOOPT_K'Hyph'IO -> "#ENOPROTOOPT_K-IO" |LbllittleEndianBytes -> "littleEndianBytes" |Lbl'Hash'EPERM_K'Hyph'IO -> "#EPERM_K-IO" |LblnoWhatCell -> "noWhatCell" |LblinitWhatCell -> "initWhatCell" |Lbl'_LT_'send'_GT_' -> "<send>" |Lbl_'_LT__LT_'Int__INT -> "_<<Int__INT" |LblBase2String -> "Base2String" |LblListItem -> "ListItem" |Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX1_ -> "#freezer_-__GRHO-SYNTAX1_" |LblisStream -> "isStream" |Lbl_'_LT_Eqls'Map__MAP -> "_<=Map__MAP" |LblnewUUID_STRING -> "newUUID_STRING" |LblnoVarsCell -> "noVarsCell" |LblinitThreadsCell -> "initThreadsCell" |Lbl_SendCellBag_ -> "_SendCellBag_" |Lbl'Hash'ESRCH_K'Hyph'IO -> "#ESRCH_K-IO" |Lbl'Hash'EMFILE_K'Hyph'IO -> "#EMFILE_K-IO" |Lblproject'Coln'Proc -> "project:Proc" |LblisReactCellOpt -> "isReactCellOpt" |Lbl'_LT_'receives'_GT_Hyph'fragment -> "<receives>-fragment" |Lbl'_LT_'stype'_GT_' -> "<stype>" |Lbl_inList_ -> "_inList_" |LblisVarsCellOpt -> "isVarsCellOpt" |Lbl'Hash'ENOEXEC_K'Hyph'IO -> "#ENOEXEC_K-IO" |LblminInt'LPar'_'Comm'_'RPar'_INT -> "minInt(_,_)_INT" |LblinitReactionCell -> "initReactionCell" |LblisMap -> "isMap" |LblisTupleCell -> "isTupleCell" |LblisReactCell -> "isReactCell" |LblinitMsidCell -> "initMsidCell" |Lbl'_LT_'rnum'_GT_' -> "<rnum>" |LblisNomoCellOpt -> "isNomoCellOpt" |LblisStypeCellOpt -> "isStypeCellOpt" |LblisTCellFragment -> "isTCellFragment" |Lblreplace'LPar'_'Comm'_'Comm'_'Comm'_'RPar'_STRING -> "replace(_,_,_,_)_STRING" |Lbl_'Slsh'Int__INT -> "_/Int__INT" |Lbl_'LSqB'_'_LT_Hyph'_'RSqB'_MAP -> "_[_<-_]_MAP" |LblisRbind -> "isRbind" |Lbl'Hash'tell'LPar'_'RPar'_K'Hyph'IO -> "#tell(_)_K-IO" |Lbl'_LT_'msid'_GT_' -> "<msid>" |LblinitSendsCell -> "initSendsCell" |Lblbundle'HyphLBra'_'RBra'_GRHO'Hyph'SYNTAX -> "bundle-{_}_GRHO-SYNTAX" |LblgetKLabel -> "getKLabel" |Lblnot__GRHO'Hyph'SYNTAX -> "not__GRHO-SYNTAX" |Lbl'Hash'E2BIG_K'Hyph'IO -> "#E2BIG_K-IO" |Lbl'Hash'seekEnd'LPar'_'Comm'_'RPar'_K'Hyph'IO -> "#seekEnd(_,_)_K-IO" |Lbl'_LT_'send'_GT_Hyph'fragment -> "<send>-fragment" let parse_sort(c: string) : sort = match c with |"Rbinds" -> SortRbinds |"AExp" -> SortAExp |"ListensCellOpt" -> SortListensCellOpt |"IdCellOpt" -> SortIdCellOpt |"K" -> SortK |"PbindList" -> SortPbindList |"RidCell" -> SortRidCell |"ListenCell" -> SortListenCell |"UnconsumableSend" -> SortUnconsumableSend |"ThreadCell" -> SortThreadCell |"MlidCell" -> SortMlidCell |"VarsCellOpt" -> SortVarsCellOpt |"NomoCell" -> SortNomoCell |"ThreadCellBag" -> SortThreadCellBag |"ThreadCellFragment" -> SortThreadCellFragment |"KItem" -> SortKItem |"BindList" -> SortBindList |"Names" -> SortNames |"Uri" -> SortUri |"IdCell" -> SortIdCell |"RhoMap" -> SortRhoMap |"LidCell" -> SortLidCell |"ReceivesCellFragment" -> SortReceivesCellFragment |"StateCellOpt" -> SortStateCellOpt |"SendsCellFragment" -> SortSendsCellFragment |"TCellFragment" -> SortTCellFragment |"ChanLen" -> SortChanLen |"ContCell" -> SortContCell |"SchanCell" -> SortSchanCell |"Set" -> SortSet |"ChanCellOpt" -> SortChanCellOpt |"Cell" -> SortCell |"Procs" -> SortProcs |"WhereCellOpt" -> SortWhereCellOpt |"TupleCellOpt" -> SortTupleCellOpt |"Bool" -> SortBool |"KResult" -> SortKResult |"ReactCell" -> SortReactCell |"RhoTuple" -> SortRhoTuple |"Send" -> SortSend |"LengthCell" -> SortLengthCell |"KCell" -> SortKCell |"MsidCellOpt" -> SortMsidCellOpt |"Lbind" -> SortLbind |"Bundle" -> SortBundle |"RnumCell" -> SortRnumCell |"RhoKVPairs" -> SortRhoKVPairs |"SidCell" -> SortSidCell |"TuplespaceCellFragment" -> SortTuplespaceCellFragment |"LidCellOpt" -> SortLidCellOpt |"Name" -> SortName |"BindOcc" -> SortBindOcc |"ReactionCellFragment" -> SortReactionCellFragment |"RhoSet" -> SortRhoSet |"SendCellBag" -> SortSendCellBag |"Par" -> SortPar |"Int" -> SortInt |"NumCellOpt" -> SortNumCellOpt |"RnumCellOpt" -> SortRnumCellOpt |"Collection" -> SortCollection |"Rbind" -> SortRbind |"WhatCellOpt" -> SortWhatCellOpt |"RecCellFragment" -> SortRecCellFragment |"StypeCell" -> SortStypeCell |"MsidCell" -> SortMsidCell |"ReceivesCell" -> SortReceivesCell |"Eval" -> SortEval |"KCellOpt" -> SortKCellOpt |"StypeCellOpt" -> SortStypeCellOpt |"ListenCellBag" -> SortListenCellBag |"SendCell" -> SortSendCell |"ReactCellOpt" -> SortReactCellOpt |"RhoList" -> SortRhoList |"ReactionCell" -> SortReactionCell |"MatchCellOpt" -> SortMatchCellOpt |"ChanCell" -> SortChanCell |"LbindList" -> SortLbindList |"NameVar" -> SortNameVar |"LengthCellOpt" -> SortLengthCellOpt |"ListensCell" -> SortListensCell |"BExp" -> SortBExp |"ConsumableSend" -> SortConsumableSend |"RidCellOpt" -> SortRidCellOpt |"Map" -> SortMap |"RecCellBag" -> SortRecCellBag |"RecCell" -> SortRecCell |"ContCellOpt" -> SortContCellOpt |"ThreadsCellOpt" -> SortThreadsCellOpt |"TuplespaceCell" -> SortTuplespaceCell |"New" -> SortNew |"Stream" -> SortStream |"ThreadsCellFragment" -> SortThreadsCellFragment |"ListensCellFragment" -> SortListensCellFragment |"WhoCell" -> SortWhoCell |"ReceivesCellOpt" -> SortReceivesCellOpt |"Proc" -> SortProc |"NameList" -> SortNameList |"Ground" -> SortGround |"String" -> SortString |"WhoCellOpt" -> SortWhoCellOpt |"Float" -> SortFloat |"ChanList" -> SortChanList |"SendsCell" -> SortSendsCell |"ReactionCellOpt" -> SortReactionCellOpt |"Pbind" -> SortPbind |"SingleRec" -> SortSingleRec |"ThreadsCell" -> SortThreadsCell |"MultiRec" -> SortMultiRec |"TypeCell" -> SortTypeCell |"VarsCell" -> SortVarsCell |"TypeCellOpt" -> SortTypeCellOpt |"SendCellFragment" -> SortSendCellFragment |"SchanCellOpt" -> SortSchanCellOpt |"TuplespaceCellOpt" -> SortTuplespaceCellOpt |"Lbinds" -> SortLbinds |"NumCell" -> SortNumCell |"WhereCell" -> SortWhereCell |"ForgCellOpt" -> SortForgCellOpt |"KVariable" -> SortKVariable |"Bytes" -> SortBytes |"WhatCell" -> SortWhatCell |"SendsCellOpt" -> SortSendsCellOpt |"NomoCellOpt" -> SortNomoCellOpt |"TupleCell" -> SortTupleCell |"IOError" -> SortIOError |"StringBuffer" -> SortStringBuffer |"RbindList" -> SortRbindList |"TCell" -> SortTCell |"Bind" -> SortBind |"MlidCellOpt" -> SortMlidCellOpt |"MsgCellOpt" -> SortMsgCellOpt |"MsgCell" -> SortMsgCell |"IdNum" -> SortIdNum |"KConfigVar" -> SortKConfigVar |"SidCellOpt" -> SortSidCellOpt |"Pbinds" -> SortPbinds |"JoinList" -> SortJoinList |"Binds" -> SortBinds |"ForgCell" -> SortForgCell |"ProcList" -> SortProcList |"Id" -> SortId |"List" -> SortList |"StateCell" -> SortStateCell |"Receive" -> SortReceive |"ListenCellFragment" -> SortListenCellFragment |"MatchCell" -> SortMatchCell | _ -> invalid_arg ("parse_sort: " ^ c) let parse_klabel(c: string) : klabel = match c with |"#argv" -> Lbl'Hash'argv |"isPbindList" -> LblisPbindList |"isThreadCell" -> LblisThreadCell |"Map:lookup" -> LblMap'Coln'lookup |"isChanList" -> LblisChanList |"bundle0{_}_GRHO-SYNTAX" -> Lblbundle0'LBra'_'RBra'_GRHO'Hyph'SYNTAX |"#seek(_,_)_K-IO" -> Lbl'Hash'seek'LPar'_'Comm'_'RPar'_K'Hyph'IO |"isWhoCell" -> LblisWhoCell |"isIdNum" -> LblisIdNum |"signExtendBitRangeInt" -> LblsignExtendBitRangeInt |"_==Bool__BOOL" -> Lbl_'EqlsEqls'Bool__BOOL |"isSet" -> LblisSet |"isThreadCellBag" -> LblisThreadCellBag |"#bindocce(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bindocce'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"isNew" -> LblisNew |"{[_;_]}_GRHO-SYNTAX" -> Lbl'LBraLSqB'_'SCln'_'RSqBRBra'_GRHO'Hyph'SYNTAX |"isStypeCell" -> LblisStypeCell |"_<=Set__SET" -> Lbl_'_LT_Eqls'Set__SET |"_#in(_)_AUXFUN-SYNTAX" -> Lbl_'Hash'in'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"isIOError" -> LblisIOError |"#parse" -> Lbl'Hash'parse |"#EALREADY_K-IO" -> Lbl'Hash'EALREADY_K'Hyph'IO |"isRhoList" -> LblisRhoList |"makeList" -> LblmakeList |"#ESPIPE_K-IO" -> Lbl'Hash'ESPIPE_K'Hyph'IO |"#unlock(_,_)_K-IO" -> Lbl'Hash'unlock'LPar'_'Comm'_'RPar'_K'Hyph'IO |"#ENOENT_K-IO" -> Lbl'Hash'ENOENT_K'Hyph'IO |"isProcs" -> LblisProcs |"_<!__GRHO-SYNTAX" -> Lbl_'_LT_Bang'__GRHO'Hyph'SYNTAX |"isTypeCell" -> LblisTypeCell |"#freezer_<=__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX1_ |"isLbinds" -> LblisLbinds |"isLengthCell" -> LblisLengthCell |"noStateCell" -> LblnoStateCell |"isLbind" -> LblisLbind |"#ENOTTY_K-IO" -> Lbl'Hash'ENOTTY_K'Hyph'IO |"isForgCell" -> LblisForgCell |"<forg>" -> Lbl'_LT_'forg'_GT_' |"initChanCell" -> LblinitChanCell |"isProcList" -> LblisProcList |"#freezer_-__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX0_ |"initRnumCell" -> LblinitRnumCell |"isRidCellOpt" -> LblisRidCellOpt |"isReceivesCellFragment" -> LblisReceivesCellFragment |"#ENOTEMPTY_K-IO" -> Lbl'Hash'ENOTEMPTY_K'Hyph'IO |"isSidCellOpt" -> LblisSidCellOpt |"#EMSGSIZE_K-IO" -> Lbl'Hash'EMSGSIZE_K'Hyph'IO |"isKConfigVar" -> LblisKConfigVar |"isRhoMap" -> LblisRhoMap |"#ENETRESET_K-IO" -> Lbl'Hash'ENETRESET_K'Hyph'IO |"#EAFNOSUPPORT_K-IO" -> Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO |"noTupleCell" -> LblnoTupleCell |"noSendsCell" -> LblnoSendsCell |"<thread>-fragment" -> Lbl'_LT_'thread'_GT_Hyph'fragment |"isCell" -> LblisCell |"isPbind" -> LblisPbind |"#ENOMEM_K-IO" -> Lbl'Hash'ENOMEM_K'Hyph'IO |"values" -> Lblvalues |"bundle+{_}_GRHO-SYNTAX" -> Lblbundle'PlusLBra'_'RBra'_GRHO'Hyph'SYNTAX |"isThreadCellFragment" -> LblisThreadCellFragment |"isStateCellOpt" -> LblisStateCellOpt |"initLidCell" -> LblinitLidCell |"isNameList" -> LblisNameList |"isListensCellOpt" -> LblisListensCellOpt |"isTuplespaceCellOpt" -> LblisTuplespaceCellOpt |"#ENXIO_K-IO" -> Lbl'Hash'ENXIO_K'Hyph'IO |"_<Int__INT" -> Lbl_'_LT_'Int__INT |"noTypeCell" -> LblnoTypeCell |"isSendCell" -> LblisSendCell |"#configuration_K-REFLECTION" -> Lbl'Hash'configuration_K'Hyph'REFLECTION |"isSendsCell" -> LblisSendsCell |"isFloat" -> LblisFloat |"<msg>" -> Lbl'_LT_'msg'_GT_' |"isContCell" -> LblisContCell |"chrChar" -> LblchrChar |"_divInt__INT" -> Lbl_divInt__INT |"#EROFS_K-IO" -> Lbl'Hash'EROFS_K'Hyph'IO |"isWhereCellOpt" -> LblisWhereCellOpt |"_ThreadCellBag_" -> Lbl_ThreadCellBag_ |"isProc" -> LblisProc |"isListensCell" -> LblisListensCell |"_+Int_" -> Lbl_'Plus'Int_ |"isReactionCell" -> LblisReactionCell |"_orBool__BOOL" -> Lbl_orBool__BOOL |"<sid>" -> Lbl'_LT_'sid'_GT_' |"#ENFILE_K-IO" -> Lbl'Hash'ENFILE_K'Hyph'IO |"updateMap" -> LblupdateMap |"isReactionCellOpt" -> LblisReactionCellOpt |"_;__GRHO-SYNTAX" -> Lbl_'SCln'__GRHO'Hyph'SYNTAX |"#freezer_<=__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_'_LT_Eqls'__GRHO'Hyph'SYNTAX0_ |"isNomoCell" -> LblisNomoCell |"noWhereCell" -> LblnoWhereCell |"isJoinList" -> LblisJoinList |"Int2String" -> LblInt2String |"#stype(_)_AUXFUN-SYNTAX" -> Lbl'Hash'stype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_=/=K_" -> Lbl_'EqlsSlshEqls'K_ |"isNumCell" -> LblisNumCell |"isRecCell" -> LblisRecCell |"_List_" -> Lbl_List_ |"isMultiRec" -> LblisMultiRec |"toString(_)_AUXFUN-SYNTAX" -> LbltoString'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"#open(_,_)_K-IO" -> Lbl'Hash'open'LPar'_'Comm'_'RPar'_K'Hyph'IO |"#EOPNOTSUPP_K-IO" -> Lbl'Hash'EOPNOTSUPP_K'Hyph'IO |"_|->_" -> Lbl_'PipeHyph_GT_'_ |"isMatchCellOpt" -> LblisMatchCellOpt |"_-Map__MAP" -> Lbl_'Hyph'Map__MAP |"#length(_)_AUXFUN-SYNTAX" -> Lbl'Hash'length'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"isRhoSet" -> LblisRhoSet |"<chan>" -> Lbl'_LT_'chan'_GT_' |"noForgCell" -> LblnoForgCell |"isReceivesCellOpt" -> LblisReceivesCellOpt |"#EMLINK_K-IO" -> Lbl'Hash'EMLINK_K'Hyph'IO |"isListenCellBag" -> LblisListenCellBag |"#sort" -> Lbl'Hash'sort |"_==K_" -> Lbl_'EqlsEqls'K_ |"isPar" -> LblisPar |"unforgeable(_)_GRHO-SYNTAX" -> Lblunforgeable'LPar'_'RPar'_GRHO'Hyph'SYNTAX |"replaceFirst(_,_,_)_STRING" -> LblreplaceFirst'LPar'_'Comm'_'Comm'_'RPar'_STRING |"noListensCell" -> LblnoListensCell |"noStypeCell" -> LblnoStypeCell |"#EOVERFLOW_K-IO" -> Lbl'Hash'EOVERFLOW_K'Hyph'IO |"#putc(_,_)_K-IO" -> Lbl'Hash'putc'LPar'_'Comm'_'RPar'_K'Hyph'IO |"isThreadsCellOpt" -> LblisThreadsCellOpt |".Map" -> Lbl'Stop'Map |"isVarsCell" -> LblisVarsCell |"_=/=String__STRING" -> Lbl_'EqlsSlshEqls'String__STRING |"#EIO_K-IO" -> Lbl'Hash'EIO_K'Hyph'IO |"initMlidCell" -> LblinitMlidCell |"uri(_)_GRHO-SYNTAX" -> Lbluri'LPar'_'RPar'_GRHO'Hyph'SYNTAX |"isSendCellBag" -> LblisSendCellBag |"isInt" -> LblisInt |"#EFAULT_K-IO" -> Lbl'Hash'EFAULT_K'Hyph'IO |"#fresh" -> Lbl'Hash'fresh |"_impliesBool__BOOL" -> Lbl_impliesBool__BOOL |"#chanlist(_)_AUXFUN-SYNTAX" -> Lbl'Hash'chanlist'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_*Int__INT" -> Lbl_'Star'Int__INT |"<T>" -> Lbl'_LT_'T'_GT_' |"#Thread" -> Lbl'Hash'Thread |"maxInt(_,_)_INT" -> LblmaxInt'LPar'_'Comm'_'RPar'_INT |"initReceivesCell" -> LblinitReceivesCell |"#EDEADLK_K-IO" -> Lbl'Hash'EDEADLK_K'Hyph'IO |"_<=String__STRING" -> Lbl_'_LT_Eqls'String__STRING |"ListenCellBagItem" -> LblListenCellBagItem |"isNames" -> LblisNames |"#ENOBUFS_K-IO" -> Lbl'Hash'ENOBUFS_K'Hyph'IO |"_Map_" -> Lbl_Map_ |"_-Int__INT" -> Lbl_'Hyph'Int__INT |"#EOF_K-IO" -> Lbl'Hash'EOF_K'Hyph'IO |"_!!__GRHO-SYNTAX" -> Lbl_'BangBang'__GRHO'Hyph'SYNTAX |"isReactionCellFragment" -> LblisReactionCellFragment |"_and__GRHO-SYNTAX" -> Lbl_and__GRHO'Hyph'SYNTAX |"#lengths(_)_AUXFUN-SYNTAX" -> Lbl'Hash'lengths'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"Float2String" -> LblFloat2String |"#append(_;_)_AUXFUN-SYNTAX" -> Lbl'Hash'append'LPar'_'SCln'_'RPar'_AUXFUN'Hyph'SYNTAX |"initWhoCell" -> LblinitWhoCell |"<listen>" -> Lbl'_LT_'listen'_GT_' |"noReceivesCell" -> LblnoReceivesCell |"sizeList" -> LblsizeList |"#EWOULDBLOCK_K-IO" -> Lbl'Hash'EWOULDBLOCK_K'Hyph'IO |"String2Id" -> LblString2Id |"initTuplespaceCell" -> LblinitTuplespaceCell |"<thread>" -> Lbl'_LT_'thread'_GT_' |"<vars>" -> Lbl'_LT_'vars'_GT_' |"_=/=Bool__BOOL" -> Lbl_'EqlsSlshEqls'Bool__BOOL |"<length>" -> Lbl'_LT_'length'_GT_' |"isCollection" -> LblisCollection |"#EFBIG_K-IO" -> Lbl'Hash'EFBIG_K'Hyph'IO |"isTCell" -> LblisTCell |"_[_/_]_SUBSTITUTION" -> Lbl_'LSqB'_'Slsh'_'RSqB'_SUBSTITUTION |"#EBADF_K-IO" -> Lbl'Hash'EBADF_K'Hyph'IO |"#msg(_)_AUXFUN-SYNTAX" -> Lbl'Hash'msg'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"noLengthCell" -> LblnoLengthCell |"initNomoCell" -> LblinitNomoCell |"#EPIPE_K-IO" -> Lbl'Hash'EPIPE_K'Hyph'IO |"#bvar(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bvar'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"noContCell" -> LblnoContCell |"isRhoTuple" -> LblisRhoTuple |"_^%Int___INT" -> Lbl_'Xor_Perc'Int___INT |"isMsgCellOpt" -> LblisMsgCellOpt |"<reaction>" -> Lbl'_LT_'reaction'_GT_' |"rfindString" -> LblrfindString |"isChanCellOpt" -> LblisChanCellOpt |"#ESOCKTNOSUPPORT_K-IO" -> Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO |"noNomoCell" -> LblnoNomoCell |"#EINTR_K-IO" -> Lbl'Hash'EINTR_K'Hyph'IO |"#stat(_)_K-IO" -> Lbl'Hash'stat'LPar'_'RPar'_K'Hyph'IO |"updateList" -> LblupdateList |"isLidCell" -> LblisLidCell |"isMsgCell" -> LblisMsgCell |".SendCellBag" -> Lbl'Stop'SendCellBag |"initContCell" -> LblinitContCell |"noReactCell" -> LblnoReactCell |"categoryChar" -> LblcategoryChar |"Set:difference" -> LblSet'Coln'difference |"isName" -> LblisName |"#EHOSTUNREACH_K-IO" -> Lbl'Hash'EHOSTUNREACH_K'Hyph'IO |"#ECONNRESET_K-IO" -> Lbl'Hash'ECONNRESET_K'Hyph'IO |"isBundle" -> LblisBundle |"isKCellOpt" -> LblisKCellOpt |"isForgCellOpt" -> LblisForgCellOpt |"#ECHILD_K-IO" -> Lbl'Hash'ECHILD_K'Hyph'IO |"isRecCellFragment" -> LblisRecCellFragment |"isUnconsumableSend" -> LblisUnconsumableSend |"isLbindList" -> LblisLbindList |"String2Float" -> LblString2Float |"Map:lookupOrDefault" -> LblMap'Coln'lookupOrDefault |"#if_#then_#else_#fi_K-EQUAL" -> Lbl'Hash'if_'Hash'then_'Hash'else_'Hash'fi_K'Hyph'EQUAL |"<tuplespace>" -> Lbl'_LT_'tuplespace'_GT_' |"#ENOTCONN_K-IO" -> Lbl'Hash'ENOTCONN_K'Hyph'IO |"_<-__GRHO-SYNTAX" -> Lbl_'_LT_Hyph'__GRHO'Hyph'SYNTAX |"<what>" -> Lbl'_LT_'what'_GT_' |"#stdout_K-IO" -> Lbl'Hash'stdout_K'Hyph'IO |"_&Int__INT" -> Lbl_'And'Int__INT |"#rtype(_)_AUXFUN-SYNTAX" -> Lbl'Hash'rtype'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"<tuple>" -> Lbl'_LT_'tuple'_GT_' |"#ENAMETOOLONG_K-IO" -> Lbl'Hash'ENAMETOOLONG_K'Hyph'IO |"log2Int" -> Lbllog2Int |"_=/=Int__INT" -> Lbl_'EqlsSlshEqls'Int__INT |"#stdin_K-IO" -> Lbl'Hash'stdin_K'Hyph'IO |"#chanlen(_)_AUXFUN-SYNTAX" -> Lbl'Hash'chanlen'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_>=String__STRING" -> Lbl_'_GT_Eqls'String__STRING |"noSchanCell" -> LblnoSchanCell |"<react>" -> Lbl'_LT_'react'_GT_' |"isBindOcc" -> LblisBindOcc |"Set(_)_GRHO-SYNTAX" -> LblSet'LPar'_'RPar'_GRHO'Hyph'SYNTAX |"sizeMap" -> LblsizeMap |"isWhereCell" -> LblisWhereCell |"noMsgCell" -> LblnoMsgCell |"isId" -> LblisId |"substrString" -> LblsubstrString |"noTuplespaceCell" -> LblnoTuplespaceCell |"_,__GRHO-SYNTAX" -> Lbl_'Comm'__GRHO'Hyph'SYNTAX |"_!__GRHO-SYNTAX" -> Lbl_'Bang'__GRHO'Hyph'SYNTAX |"isTypeCellOpt" -> LblisTypeCellOpt |"size" -> Lblsize |"#bindocc(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bindocc'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"noMatchCell" -> LblnoMatchCell |"isBind" -> LblisBind |"#ENETUNREACH_K-IO" -> Lbl'Hash'ENETUNREACH_K'Hyph'IO |"#EPROTOTYPE_K-IO" -> Lbl'Hash'EPROTOTYPE_K'Hyph'IO |"*__GRHO-SYNTAX" -> Lbl'Star'__GRHO'Hyph'SYNTAX |"<who>" -> Lbl'_LT_'who'_GT_' |"_:__GRHO-SYNTAX" -> Lbl_'Coln'__GRHO'Hyph'SYNTAX |"noThreadsCell" -> LblnoThreadsCell |"#systemResult(_,_,_)_K-IO" -> Lbl'Hash'systemResult'LPar'_'Comm'_'Comm'_'RPar'_K'Hyph'IO |"<listens>" -> Lbl'_LT_'listens'_GT_' |"srandInt" -> LblsrandInt |"#EINVAL_K-IO" -> Lbl'Hash'EINVAL_K'Hyph'IO |"<rid>" -> Lbl'_LT_'rid'_GT_' |"isKItem" -> LblisKItem |"#ENODEV_K-IO" -> Lbl'Hash'ENODEV_K'Hyph'IO |"#length__AUXFUN-SYNTAX" -> Lbl'Hash'length__AUXFUN'Hyph'SYNTAX |"isRecCellBag" -> LblisRecCellBag |"List:set" -> LblList'Coln'set |"isUri" -> LblisUri |"String2Base" -> LblString2Base |"#noparse_K-IO" -> Lbl'Hash'noparse_K'Hyph'IO |"keys" -> Lblkeys |"initRecCell" -> LblinitRecCell |"#ESHUTDOWN_K-IO" -> Lbl'Hash'ESHUTDOWN_K'Hyph'IO |"isGround" -> LblisGround |".ThreadCellBag" -> Lbl'Stop'ThreadCellBag |"ThreadCellBagItem" -> LblThreadCellBagItem |"#cont(_)_AUXFUN-SYNTAX" -> Lbl'Hash'cont'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"#ENOTDIR_K-IO" -> Lbl'Hash'ENOTDIR_K'Hyph'IO |"new_in{_}_GRHO-SYNTAX" -> Lblnew_in'LBra'_'RBra'_GRHO'Hyph'SYNTAX |"#chan(_)_AUXFUN-SYNTAX" -> Lbl'Hash'chan'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_#in__AUXFUN-SYNTAX" -> Lbl_'Hash'in__AUXFUN'Hyph'SYNTAX |"initSendCell" -> LblinitSendCell |"{_}_GRHO-SYNTAX" -> Lbl'LBra'_'RBra'_GRHO'Hyph'SYNTAX |"<nomo>" -> Lbl'_LT_'nomo'_GT_' |"#bnum(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bnum'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"_<=Int__INT" -> Lbl_'_LT_Eqls'Int__INT |"notBool_" -> LblnotBool_ |"noNumCell" -> LblnoNumCell |"#stderr_K-IO" -> Lbl'Hash'stderr_K'Hyph'IO |"noKCell" -> LblnoKCell |"#EBUSY_K-IO" -> Lbl'Hash'EBUSY_K'Hyph'IO |"#getenv" -> Lbl'Hash'getenv |"isTuplespaceCell" -> LblisTuplespaceCell |"isBinds" -> LblisBinds |"noReactionCell" -> LblnoReactionCell |"intersectSet" -> LblintersectSet |"_in_keys(_)_MAP" -> Lbl_in_keys'LPar'_'RPar'_MAP |"initMsgCell" -> LblinitMsgCell |"#bind(_)_AUXFUN-SYNTAX" -> Lbl'Hash'bind'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"findChar" -> LblfindChar |"[_]_GRHO-SYNTAX" -> Lbl'LSqB'_'RSqB'_GRHO'Hyph'SYNTAX |"Set:in" -> LblSet'Coln'in |"isK" -> LblisK |"isWhoCellOpt" -> LblisWhoCellOpt |"#freezernot__GRHO-SYNTAX0_" -> Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_ |"isReceivesCell" -> LblisReceivesCell |"String2Int" -> LblString2Int |"<where>" -> Lbl'_LT_'where'_GT_' |"initWhereCell" -> LblinitWhereCell |"initThreadCell" -> LblinitThreadCell |"isSingleRec" -> LblisSingleRec |"isThreadsCell" -> LblisThreadsCell |"isTupleCellOpt" -> LblisTupleCellOpt |"isEval" -> LblisEval |"isWhatCell" -> LblisWhatCell |"#ENETDOWN_K-IO" -> Lbl'Hash'ENETDOWN_K'Hyph'IO |"isListenCellFragment" -> LblisListenCellFragment |"_[_<-undef]" -> Lbl_'LSqB'_'_LT_Hyph'undef'RSqB' |"#Bottom" -> Lbl'Hash'Bottom |"_==Int_" -> Lbl_'EqlsEqls'Int_ |"_andThenBool__BOOL" -> Lbl_andThenBool__BOOL |"isPbinds" -> LblisPbinds |"#parseInModule" -> Lbl'Hash'parseInModule |"Nil_GRHO-SYNTAX" -> LblNil_GRHO'Hyph'SYNTAX |"isAExp" -> LblisAExp |"#system" -> Lbl'Hash'system |"<mlid>" -> Lbl'_LT_'mlid'_GT_' |"initRidCell" -> LblinitRidCell |"isString" -> LblisString |"_%Int__INT" -> Lbl_'Perc'Int__INT |"_>>Int__INT" -> Lbl_'_GT__GT_'Int__INT |"_:_,__GRHO-SYNTAX" -> Lbl_'Coln'_'Comm'__GRHO'Hyph'SYNTAX |"noWhoCell" -> LblnoWhoCell |"isList" -> LblisList |"#EPROTONOSUPPORT_K-IO" -> Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO |"isTuplespaceCellFragment" -> LblisTuplespaceCellFragment |"replaceAll(_,_,_)_STRING" -> LblreplaceAll'LPar'_'Comm'_'Comm'_'RPar'_STRING |"isBindList" -> LblisBindList |"noChanCell" -> LblnoChanCell |"#EDESTADDRREQ_K-IO" -> Lbl'Hash'EDESTADDRREQ_K'Hyph'IO |"#EADDRINUSE_K-IO" -> Lbl'Hash'EADDRINUSE_K'Hyph'IO |"noRnumCell" -> LblnoRnumCell |"_^Int__INT" -> Lbl_'Xor_'Int__INT |"findString" -> LblfindString |"<k>" -> Lbl'_LT_'k'_GT_' |"<reaction>-fragment" -> Lbl'_LT_'reaction'_GT_Hyph'fragment |"absInt" -> LblabsInt |"#freezer_+__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX1_ |"#EHOSTDOWN_K-IO" -> Lbl'Hash'EHOSTDOWN_K'Hyph'IO |"_>String__STRING" -> Lbl_'_GT_'String__STRING |"isSendsCellFragment" -> LblisSendsCellFragment |"initLengthCell" -> LblinitLengthCell |"_==String__STRING" -> Lbl_'EqlsEqls'String__STRING |"isRnumCellOpt" -> LblisRnumCellOpt |"isSend" -> LblisSend |"isKResult" -> LblisKResult |"initStypeCell" -> LblinitStypeCell |"List:get" -> LblList'Coln'get |"#lstat(_)_K-IO" -> Lbl'Hash'lstat'LPar'_'RPar'_K'Hyph'IO |"SendCellBagItem" -> LblSendCellBagItem |"tuple" -> Lbltuple |"<id>" -> Lbl'_LT_'id'_GT_' |"SetItem" -> LblSetItem |"<receives>" -> Lbl'_LT_'receives'_GT_' |"isRhoKVPairs" -> LblisRhoKVPairs |"unsignedBytes" -> LblunsignedBytes |"isMsidCellOpt" -> LblisMsidCellOpt |".List" -> Lbl'Stop'List |"#ENOLCK_K-IO" -> Lbl'Hash'ENOLCK_K'Hyph'IO |"isSendsCellOpt" -> LblisSendsCellOpt |"#ECONNABORTED_K-IO" -> Lbl'Hash'ECONNABORTED_K'Hyph'IO |"randInt" -> LblrandInt |"#EXDEV_K-IO" -> Lbl'Hash'EXDEV_K'Hyph'IO |"#close(_)_K-IO" -> Lbl'Hash'close'LPar'_'RPar'_K'Hyph'IO |"_;_;;__GRHO-SYNTAX" -> Lbl_'SCln'_'SClnSCln'__GRHO'Hyph'SYNTAX |"keys_list(_)_MAP" -> Lblkeys_list'LPar'_'RPar'_MAP |"freshId" -> LblfreshId |"initTypeCell" -> LblinitTypeCell |"_orElseBool__BOOL" -> Lbl_orElseBool__BOOL |"isSchanCellOpt" -> LblisSchanCellOpt |"#EISDIR_K-IO" -> Lbl'Hash'EISDIR_K'Hyph'IO |"<cont>" -> Lbl'_LT_'cont'_GT_' |"List:range" -> LblList'Coln'range |"initTupleCell" -> LblinitTupleCell |"noIdCell" -> LblnoIdCell |"isKCell" -> LblisKCell |"#unknownIOError" -> Lbl'Hash'unknownIOError |"#freezer_and__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_and__GRHO'Hyph'SYNTAX1_ |"#freezer_*__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX0_ |"_>=Int__INT" -> Lbl_'_GT_Eqls'Int__INT |"isSendCellFragment" -> LblisSendCellFragment |"#ENOSYS_K-IO" -> Lbl'Hash'ENOSYS_K'Hyph'IO |"_|__GRHO-SYNTAX" -> Lbl_'Pipe'__GRHO'Hyph'SYNTAX |"#ECONNREFUSED_K-IO" -> Lbl'Hash'ECONNREFUSED_K'Hyph'IO |"<sends>-fragment" -> Lbl'_LT_'sends'_GT_Hyph'fragment |"#lock(_,_)_K-IO" -> Lbl'Hash'lock'LPar'_'Comm'_'RPar'_K'Hyph'IO |"#EADDRNOTAVAIL_K-IO" -> Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO |"countAllOccurrences(_,_)_STRING" -> LblcountAllOccurrences'LPar'_'Comm'_'RPar'_STRING |"_>Int__INT" -> Lbl_'_GT_'Int__INT |"fillList" -> LblfillList |"@__GRHO-SYNTAX" -> Lbl'_AT_'__GRHO'Hyph'SYNTAX |"initForgCell" -> LblinitForgCell |"bitRangeInt" -> LblbitRangeInt |"_<String__STRING" -> Lbl_'_LT_'String__STRING |"#ThreadLocal" -> Lbl'Hash'ThreadLocal |"_xorBool__BOOL" -> Lbl_xorBool__BOOL |"#freezer_+__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_'Plus'__GRHO'Hyph'SYNTAX0_ |"initReactCell" -> LblinitReactCell |".RecCellBag" -> Lbl'Stop'RecCellBag |"<type>" -> Lbl'_LT_'type'_GT_' |"<listens>-fragment" -> Lbl'_LT_'listens'_GT_Hyph'fragment |"_+__GRHO-SYNTAX" -> Lbl_'Plus'__GRHO'Hyph'SYNTAX |"<lid>" -> Lbl'_LT_'lid'_GT_' |"_ListenCellBag_" -> Lbl_ListenCellBag_ |"#open(_)_K-IO" -> Lbl'Hash'open'LPar'_'RPar'_K'Hyph'IO |"_[_]_SUBSTITUTION" -> Lbl_'LSqB'_'RSqB'_SUBSTITUTION |"noMlidCell" -> LblnoMlidCell |"_or__GRHO-SYNTAX" -> Lbl_or__GRHO'Hyph'SYNTAX |"#ETOOMANYREFS_K-IO" -> Lbl'Hash'ETOOMANYREFS_K'Hyph'IO |"<threads>-fragment" -> Lbl'_LT_'threads'_GT_Hyph'fragment |"initListensCell" -> LblinitListensCell |"#ENOSPC_K-IO" -> Lbl'Hash'ENOSPC_K'Hyph'IO |"#freezer_or__GRHO-SYNTAX0_" -> Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX0_ |"isChanCell" -> LblisChanCell |"isRnumCell" -> LblisRnumCell |"#chanmany(_:_)_AUXFUN-SYNTAX" -> Lbl'Hash'chanmany'LPar'_'Coln'_'RPar'_AUXFUN'Hyph'SYNTAX |"#logToFile" -> Lbl'Hash'logToFile |"<rec>" -> Lbl'_LT_'rec'_GT_' |"#read(_,_)_K-IO" -> Lbl'Hash'read'LPar'_'Comm'_'RPar'_K'Hyph'IO |"noLidCell" -> LblnoLidCell |"isNameVar" -> LblisNameVar |"<schan>" -> Lbl'_LT_'schan'_GT_' |"bigEndianBytes" -> LblbigEndianBytes |"<match>" -> Lbl'_LT_'match'_GT_' |"Id2String" -> LblId2String |"initListenCell" -> LblinitListenCell |"<num>" -> Lbl'_LT_'num'_GT_' |"isContCellOpt" -> LblisContCellOpt |"isLidCellOpt" -> LblisLidCellOpt |"noSidCell" -> LblnoSidCell |"bundle{_}_GRHO-SYNTAX" -> Lblbundle'LBra'_'RBra'_GRHO'Hyph'SYNTAX |"Map:choice" -> LblMap'Coln'choice |"_Set_" -> Lbl_Set_ |"#EEXIST_K-IO" -> Lbl'Hash'EEXIST_K'Hyph'IO |"#getc(_)_K-IO" -> Lbl'Hash'getc'LPar'_'RPar'_K'Hyph'IO |"isRidCell" -> LblisRidCell |"<state>" -> Lbl'_LT_'state'_GT_' |"isListenCell" -> LblisListenCell |"isBool" -> LblisBool |"~Int__INT" -> Lbl'Tild'Int__INT |"#freezer_*__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_'Star'__GRHO'Hyph'SYNTAX1_ |"ordChar" -> LblordChar |"initIdCell" -> LblinitIdCell |"_modInt__INT" -> Lbl_modInt__INT |"rfindChar" -> LblrfindChar |"isRbinds" -> LblisRbinds |"isMlidCellOpt" -> LblisMlidCellOpt |"#EAGAIN_K-IO" -> Lbl'Hash'EAGAIN_K'Hyph'IO |".ListenCellBag" -> Lbl'Stop'ListenCellBag |"noMsidCell" -> LblnoMsidCell |"initSchanCell" -> LblinitSchanCell |"directionalityChar" -> LbldirectionalityChar |"isIdCell" -> LblisIdCell |"#opendir(_)_K-IO" -> Lbl'Hash'opendir'LPar'_'RPar'_K'Hyph'IO |"initKCell" -> LblinitKCell |"RecCellBagItem" -> LblRecCellBagItem |"for(_){_}_GRHO-SYNTAX" -> Lblfor'LPar'_'RParLBra'_'RBra'_GRHO'Hyph'SYNTAX |"isBExp" -> LblisBExp |".Set" -> Lbl'Stop'Set |"isChanLen" -> LblisChanLen |"isStateCell" -> LblisStateCell |"#EACCES_K-IO" -> Lbl'Hash'EACCES_K'Hyph'IO |"#ELOOP_K-IO" -> Lbl'Hash'ELOOP_K'Hyph'IO |"#EDOM_K-IO" -> Lbl'Hash'EDOM_K'Hyph'IO |"isSidCell" -> LblisSidCell |"removeAll" -> LblremoveAll |"noRidCell" -> LblnoRidCell |"<threads>" -> Lbl'_LT_'threads'_GT_' |"_andBool_" -> Lbl_andBool_ |"_#ine(_)_AUXFUN-SYNTAX" -> Lbl_'Hash'ine'LPar'_'RPar'_AUXFUN'Hyph'SYNTAX |"isThreadsCellFragment" -> LblisThreadsCellFragment |"isLengthCellOpt" -> LblisLengthCellOpt |"isRbindList" -> LblisRbindList |"#EPFNOSUPPORT_K-IO" -> Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO |"isConsumableSend" -> LblisConsumableSend |"lengthString" -> LbllengthString |"_-__GRHO-SYNTAX" -> Lbl_'Hyph'__GRHO'Hyph'SYNTAX |"<listen>-fragment" -> Lbl'_LT_'listen'_GT_Hyph'fragment |"isReceive" -> LblisReceive |"#ERANGE_K-IO" -> Lbl'Hash'ERANGE_K'Hyph'IO |"initTCell" -> LblinitTCell |"signedBytes" -> LblsignedBytes |"FloatFormat" -> LblFloatFormat |"isMsidCell" -> LblisMsidCell |"#ENOTSOCK_K-IO" -> Lbl'Hash'ENOTSOCK_K'Hyph'IO |"_+String__STRING" -> Lbl_'Plus'String__STRING |"_RecCellBag_" -> Lbl_RecCellBag_ |"_|Int__INT" -> Lbl_'Pipe'Int__INT |"#EISCONN_K-IO" -> Lbl'Hash'EISCONN_K'Hyph'IO |"isKVariable" -> LblisKVariable |"_dividesInt__INT" -> Lbl_dividesInt__INT |"<rec>-fragment" -> Lbl'_LT_'rec'_GT_Hyph'fragment |"<tuplespace>-fragment" -> Lbl'_LT_'tuplespace'_GT_Hyph'fragment |"<T>-fragment" -> Lbl'_LT_'T'_GT_Hyph'fragment |"_<=__GRHO-SYNTAX" -> Lbl_'_LT_Eqls'__GRHO'Hyph'SYNTAX |"#freezer_or__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_or__GRHO'Hyph'SYNTAX1_ |"isWhatCellOpt" -> LblisWhatCellOpt |"Set:choice" -> LblSet'Coln'choice |"isMatchCell" -> LblisMatchCell |"isListensCellFragment" -> LblisListensCellFragment |"#buffer" -> Lbl'Hash'buffer |"_*__GRHO-SYNTAX" -> Lbl_'Star'__GRHO'Hyph'SYNTAX |"initNumCell" -> LblinitNumCell |"freshInt" -> LblfreshInt |"#write(_,_)_K-IO" -> Lbl'Hash'write'LPar'_'Comm'_'RPar'_K'Hyph'IO |"#ETIMEDOUT_K-IO" -> Lbl'Hash'ETIMEDOUT_K'Hyph'IO |"initSidCell" -> LblinitSidCell |"isIdCellOpt" -> LblisIdCellOpt |"(_;_)_GRHO-SYNTAX" -> Lbl'LPar'_'SCln'_'RPar'_GRHO'Hyph'SYNTAX |"<sends>" -> Lbl'_LT_'sends'_GT_' |"isSchanCell" -> LblisSchanCell |"_xorInt__INT" -> Lbl_xorInt__INT |"#EINPROGRESS_K-IO" -> Lbl'Hash'EINPROGRESS_K'Hyph'IO |"initVarsCell" -> LblinitVarsCell |"initStateCell" -> LblinitStateCell |"isNumCellOpt" -> LblisNumCellOpt |"initMatchCell" -> LblinitMatchCell |"isMlidCell" -> LblisMlidCell |"#ENOPROTOOPT_K-IO" -> Lbl'Hash'ENOPROTOOPT_K'Hyph'IO |"littleEndianBytes" -> LbllittleEndianBytes |"#EPERM_K-IO" -> Lbl'Hash'EPERM_K'Hyph'IO |"noWhatCell" -> LblnoWhatCell |"initWhatCell" -> LblinitWhatCell |"<send>" -> Lbl'_LT_'send'_GT_' |"_<<Int__INT" -> Lbl_'_LT__LT_'Int__INT |"Base2String" -> LblBase2String |"ListItem" -> LblListItem |"#freezer_-__GRHO-SYNTAX1_" -> Lbl'Hash'freezer_'Hyph'__GRHO'Hyph'SYNTAX1_ |"isStream" -> LblisStream |"_<=Map__MAP" -> Lbl_'_LT_Eqls'Map__MAP |"newUUID_STRING" -> LblnewUUID_STRING |"noVarsCell" -> LblnoVarsCell |"initThreadsCell" -> LblinitThreadsCell |"_SendCellBag_" -> Lbl_SendCellBag_ |"#ESRCH_K-IO" -> Lbl'Hash'ESRCH_K'Hyph'IO |"#EMFILE_K-IO" -> Lbl'Hash'EMFILE_K'Hyph'IO |"project:Proc" -> Lblproject'Coln'Proc |"isReactCellOpt" -> LblisReactCellOpt |"<receives>-fragment" -> Lbl'_LT_'receives'_GT_Hyph'fragment |"<stype>" -> Lbl'_LT_'stype'_GT_' |"_inList_" -> Lbl_inList_ |"isVarsCellOpt" -> LblisVarsCellOpt |"#ENOEXEC_K-IO" -> Lbl'Hash'ENOEXEC_K'Hyph'IO |"minInt(_,_)_INT" -> LblminInt'LPar'_'Comm'_'RPar'_INT |"initReactionCell" -> LblinitReactionCell |"isMap" -> LblisMap |"isTupleCell" -> LblisTupleCell |"isReactCell" -> LblisReactCell |"initMsidCell" -> LblinitMsidCell |"<rnum>" -> Lbl'_LT_'rnum'_GT_' |"isNomoCellOpt" -> LblisNomoCellOpt |"isStypeCellOpt" -> LblisStypeCellOpt |"isTCellFragment" -> LblisTCellFragment |"replace(_,_,_,_)_STRING" -> Lblreplace'LPar'_'Comm'_'Comm'_'Comm'_'RPar'_STRING |"_/Int__INT" -> Lbl_'Slsh'Int__INT |"_[_<-_]_MAP" -> Lbl_'LSqB'_'_LT_Hyph'_'RSqB'_MAP |"isRbind" -> LblisRbind |"#tell(_)_K-IO" -> Lbl'Hash'tell'LPar'_'RPar'_K'Hyph'IO |"<msid>" -> Lbl'_LT_'msid'_GT_' |"initSendsCell" -> LblinitSendsCell |"bundle-{_}_GRHO-SYNTAX" -> Lblbundle'HyphLBra'_'RBra'_GRHO'Hyph'SYNTAX |"getKLabel" -> LblgetKLabel |"not__GRHO-SYNTAX" -> Lblnot__GRHO'Hyph'SYNTAX |"#E2BIG_K-IO" -> Lbl'Hash'E2BIG_K'Hyph'IO |"#seekEnd(_,_)_K-IO" -> Lbl'Hash'seekEnd'LPar'_'Comm'_'RPar'_K'Hyph'IO |"<send>-fragment" -> Lbl'_LT_'send'_GT_Hyph'fragment | _ -> invalid_arg ("parse_klabel: " ^ c) let collection_for (c: klabel) : klabel = match c with |LblSetItem -> Lbl_Set_ |Lbl'_LT_'listen'_GT_' -> Lbl_ListenCellBag_ |Lbl'Stop'Set -> Lbl_Set_ |Lbl'_LT_'thread'_GT_' -> Lbl_ThreadCellBag_ |Lbl'Stop'List -> Lbl_List_ |Lbl_List_ -> Lbl_List_ |Lbl_Set_ -> Lbl_Set_ |Lbl_'PipeHyph_GT_'_ -> Lbl_Map_ |Lbl'Stop'RecCellBag -> Lbl_RecCellBag_ |Lbl_ThreadCellBag_ -> Lbl_ThreadCellBag_ |LblListenCellBagItem -> Lbl_ListenCellBag_ |Lbl'_LT_'send'_GT_' -> Lbl_SendCellBag_ |Lbl_ListenCellBag_ -> Lbl_ListenCellBag_ |Lbl_RecCellBag_ -> Lbl_RecCellBag_ |Lbl'Stop'ListenCellBag -> Lbl_ListenCellBag_ |LblListItem -> Lbl_List_ |Lbl_Map_ -> Lbl_Map_ |Lbl'Stop'Map -> Lbl_Map_ |Lbl'Stop'ThreadCellBag -> Lbl_ThreadCellBag_ |LblThreadCellBagItem -> Lbl_ThreadCellBag_ |Lbl'Stop'SendCellBag -> Lbl_SendCellBag_ |Lbl'_LT_'rec'_GT_' -> Lbl_RecCellBag_ |Lbl_SendCellBag_ -> Lbl_SendCellBag_ |LblSendCellBagItem -> Lbl_SendCellBag_ |LblRecCellBagItem -> Lbl_RecCellBag_ | _ -> invalid_arg "collection_for" let unit_for (c: klabel) : klabel = match c with |Lbl_ThreadCellBag_ -> Lbl'Stop'ThreadCellBag |Lbl_Set_ -> Lbl'Stop'Set |Lbl_List_ -> Lbl'Stop'List |Lbl_ListenCellBag_ -> Lbl'Stop'ListenCellBag |Lbl_RecCellBag_ -> Lbl'Stop'RecCellBag |Lbl_SendCellBag_ -> Lbl'Stop'SendCellBag |Lbl_Map_ -> Lbl'Stop'Map | _ -> invalid_arg "unit_for" let el_for (c: klabel) : klabel = match c with |Lbl_ThreadCellBag_ -> LblThreadCellBagItem |Lbl_Set_ -> LblSetItem |Lbl_List_ -> LblListItem |Lbl_ListenCellBag_ -> LblListenCellBagItem |Lbl_RecCellBag_ -> LblRecCellBagItem |Lbl_SendCellBag_ -> LblSendCellBagItem |Lbl_Map_ -> Lbl_'PipeHyph_GT_'_ | _ -> invalid_arg "el_for" let unit_for_array (c: sort) : klabel = match c with | _ -> invalid_arg "unit_for_array" let el_for_array (c: sort) : klabel = match c with | _ -> invalid_arg "el_for_array" module Dynarray : sig type 'a t val make : int -> 'a -> 'a t val length : 'a t -> int val get : 'a t -> int -> 'a val set : 'a t -> int -> 'a -> unit val compare : ('a list -> 'a list -> int) -> 'a t -> 'a t -> int val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a val fold_right : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b val iteri : (int -> 'a -> unit) -> 'a t -> unit end = struct type 'a t = { size: int; mutable arr: 'a array; default: 'a } let make size default = { size=size; arr=Array.make (min size 10) default; default=default} let length arr = arr.size let get arr idx = if idx >= Array.length arr.arr && idx < arr.size then arr.default else Array.get arr.arr idx let calc_size arr at_least = let double = Array.length arr.arr * 2 in let at_most = if double > arr.size then arr.size else double in if at_least > at_most then at_least else at_most let upgrade_size arr size = let old = arr.arr in arr.arr <- Array.make size arr.default; Array.blit old 0 arr.arr 0 (Array.length old) let set arr idx value= if idx >= Array.length arr.arr && idx < arr.size then upgrade_size arr (calc_size arr (idx + 1)); Array.set arr.arr idx value let compare_arr f a b = let smaller,larger = if Array.length a.arr < Array.length b.arr then a,b else b,a in upgrade_size smaller (Array.length larger.arr); f (Array.to_list a.arr) (Array.to_list b.arr) let compare f a b = let v = Pervasives.compare a.size b.size in if v = 0 then compare_arr f a b else v let fold_left f init arr = snd (Array.fold_left (fun (i,x) a -> if i > 0 then (i - 1, f x a) else (0,x)) (arr.size,init) arr.arr) let fold_right f arr init = snd (Array.fold_right (fun a (i,x) -> if i > 0 then (i - 1, x) else (0, f a x)) arr.arr (Array.length arr.arr - arr.size, init)) let iteri f arr = Array.iteri (fun i a -> if i < arr.size then f i a else ()) arr.arr end module type S = sig type m type s type t = kitem list and kitem = KToken of sort * string | InjectedKLabel of klabel | Map of sort * klabel * m | List of sort * klabel * t list | Set of sort * klabel * s | Array of sort * t * t Dynarray.t | Int of Z.t | Float of Gmp.FR.t * int * int | String of string | Bytes of bytes | StringBuffer of Buffer.t | Bool of bool | ThreadLocal | Thread of t * t * t * t | Bottom | KApply0 of klabel | KApply1 of klabel * t | KApply2 of klabel * t * t | KApply3 of klabel * t * t * t | KApply4 of klabel * t * t * t * t | KApply5 of klabel * t * t * t * t * t | KApply6 of klabel * t * t * t * t * t * t | KApply7 of klabel * t * t * t * t * t * t * t | KApply8 of klabel * t * t * t * t * t * t * t * t val compare : t -> t -> int val compare_kitem : kitem -> kitem -> int val compare_klist : t list -> t list -> int val equal_k : t -> t -> bool val hash_k : t -> int val hash_k_param : int -> t -> int end module rec K : (S with type m = K.t Map.Make(K).t and type s = Set.Make(K).t) = struct module KMap = Map.Make(K) module KSet = Set.Make(K) type m = K.t KMap.t and s = KSet.t and t = kitem list and kitem = KToken of sort * string | InjectedKLabel of klabel | Map of sort * klabel * m | List of sort * klabel * t list | Set of sort * klabel * s | Array of sort * t * t Dynarray.t | Int of Z.t | Float of Gmp.FR.t * int * int | String of string | Bytes of bytes | StringBuffer of Buffer.t | Bool of bool | ThreadLocal | Thread of t * t * t * t | Bottom | KApply0 of klabel | KApply1 of klabel * t | KApply2 of klabel * t * t | KApply3 of klabel * t * t * t | KApply4 of klabel * t * t * t * t | KApply5 of klabel * t * t * t * t * t | KApply6 of klabel * t * t * t * t * t * t | KApply7 of klabel * t * t * t * t * t * t * t | KApply8 of klabel * t * t * t * t * t * t * t * t let rec hash_k c = match c with | [] -> 1 | hd :: tl -> (hash_k tl) * 31 + hash_kitem hd and hash_kitem c = match c with | KApply0(lbl) -> (Hashtbl.hash lbl) | KApply1(lbl,k0) -> ((Hashtbl.hash lbl)) * 37 + hash_k k0 | KApply2(lbl,k0,k1) -> (((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1 | KApply3(lbl,k0,k1,k2) -> ((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2 | KApply4(lbl,k0,k1,k2,k3) -> (((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3 | KApply5(lbl,k0,k1,k2,k3,k4) -> ((((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 37 + hash_k k4 | KApply6(lbl,k0,k1,k2,k3,k4,k5) -> (((((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 37 + hash_k k4) * 37 + hash_k k5 | KApply7(lbl,k0,k1,k2,k3,k4,k5,k6) -> ((((((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 37 + hash_k k4) * 37 + hash_k k5) * 37 + hash_k k6 | KApply8(lbl,k0,k1,k2,k3,k4,k5,k6,k7) -> (((((((((Hashtbl.hash lbl)) * 37 + hash_k k0) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 37 + hash_k k4) * 37 + hash_k k5) * 37 + hash_k k6) * 37 + hash_k k7 | KToken(s, st) -> Hashtbl.hash s * 41 + Hashtbl.hash st | InjectedKLabel kl -> Hashtbl.hash kl | Map(_,k,m) -> Hashtbl.hash k * 43 + KMap.fold (fun k v accum -> accum + (hash_k k lxor hash_k v)) m 0 | List(_,k,l) -> Hashtbl.hash k * 47 + hash_klist l | Set(_,k,s) -> Hashtbl.hash k * 53 + KSet.fold (fun k accum -> accum + hash_k k) s 0 | Array(k,_,l) -> Hashtbl.hash k * 61 + (Dynarray.length l) | Int i -> Z.hash i | Float (f,_,_) -> Hashtbl.hash (Gmp.FR.to_float f) | String s -> Hashtbl.hash s | StringBuffer s -> Hashtbl.hash (Buffer.contents s) | Bytes b -> Hashtbl.hash b | Bool b -> Hashtbl.hash b | Bottom -> 1 | ThreadLocal -> 2 | Thread(k1,k2,k3,k4) -> ((((Hashtbl.hash Lbl'Hash'Thread) * 37 + hash_k k1) * 37 + hash_k k2) * 37 + hash_k k3) * 36 + hash_k k4 and hash_klist c = match c with | [] -> 1 | hd :: tl -> (hash_klist tl) * 59 + hash_k hd let rec hash_k_param_fld ((l,max) as lmax) = function | [] -> lmax | h::t -> if max < 0 then lmax else hash_k_param_fld (h::l,max-1) t let hash_k_param_add_kitem k max = hash_k_param_fld max k let rec qfld l1 h max = match l1 with | [] -> let (l2,max) = max in if l2 = [] then h else qfld l2 h ([],max) | ki :: kq -> match ki with | KApply0(lbl) -> qfld kq (31*h + Hashtbl.hash lbl) ( max) | KApply1(lbl,k0) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( max)) | KApply2(lbl,k0,k1) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( max))) | KApply3(lbl,k0,k1,k2) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( max)))) | KApply4(lbl,k0,k1,k2,k3) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( max))))) | KApply5(lbl,k0,k1,k2,k3,k4) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( hash_k_param_add_kitem k4 ( max)))))) | KApply6(lbl,k0,k1,k2,k3,k4,k5) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( hash_k_param_add_kitem k4 ( hash_k_param_add_kitem k5 ( max))))))) | KApply7(lbl,k0,k1,k2,k3,k4,k5,k6) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( hash_k_param_add_kitem k4 ( hash_k_param_add_kitem k5 ( hash_k_param_add_kitem k6 ( max)))))))) | KApply8(lbl,k0,k1,k2,k3,k4,k5,k6,k7) -> qfld kq (31*h + Hashtbl.hash lbl) ( hash_k_param_add_kitem k0 ( hash_k_param_add_kitem k1 ( hash_k_param_add_kitem k2 ( hash_k_param_add_kitem k3 ( hash_k_param_add_kitem k4 ( hash_k_param_add_kitem k5 ( hash_k_param_add_kitem k6 ( hash_k_param_add_kitem k7 ( max))))))))) | KToken(s, st) -> qfld kq (31*h + Hashtbl.hash s * 41 + Hashtbl.hash st) ( max) | InjectedKLabel lbl -> qfld kq (31*h + Hashtbl.hash lbl) ( max) | Map(_,lbl,m) -> qfld kq (31*h + 43 * Hashtbl.hash lbl) ( KMap.fold (fun k v max -> hash_k_param_add_kitem v (hash_k_param_add_kitem k max)) m max) | List(_,lbl,l) -> qfld kq (31*h + 47 * Hashtbl.hash lbl) ( List.fold_left (fun max k -> hash_k_param_add_kitem k max) max l) | Set(_,lbl,s) -> qfld kq (31*h + 53 * Hashtbl.hash lbl) ( KSet.fold (fun k max -> hash_k_param_add_kitem k max) s max) | Array(lbl,_,l) -> qfld kq (31*h + 61 * Hashtbl.hash lbl + Dynarray.length l) ( max) | Int i -> qfld kq (31*h + Z.hash i) ( max) | Float (f,_,_) -> qfld kq (31*h + Hashtbl.hash (Gmp.FR.to_float f)) ( max) | String s -> qfld kq (31*h + Hashtbl.hash s) ( max) | Bytes b -> qfld kq (31*h + Hashtbl.hash b) ( max) | StringBuffer s -> qfld kq (31*h + Hashtbl.hash (Buffer.contents s)) ( max) | Bool b -> qfld kq (31*h + Hashtbl.hash b) ( max) | Bottom -> qfld kq (31*h + 1) ( max) | ThreadLocal -> qfld kq (31*h + 2) ( max) | Thread(k1,k2,k3,k4) -> qfld kq (31*h + Hashtbl.hash Lbl'Hash'Thread) (hash_k_param_add_kitem k1 (hash_k_param_add_kitem k2 (hash_k_param_add_kitem k3 (hash_k_param_add_kitem k4 max)))) let hash_k_param max k = qfld [] 0 (hash_k_param_add_kitem k ([],max)) let rec equal_k c1 c2 = if c1 == c2 then true else match (c1, c2) with | [], [] -> true | (hd1 :: tl1), (hd2 :: tl2) -> equal_kitem hd1 hd2 && equal_k tl1 tl2 | _ -> false and equal_kitem c1 c2 = if c1 == c2 then true else match (c1, c2) with | KApply0(lbl1),KApply0(lbl2) -> lbl1 = lbl2 | KApply1(lbl1,k0_1),KApply1(lbl2,k0_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 | KApply2(lbl1,k0_1,k1_1),KApply2(lbl2,k0_2,k1_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 | KApply3(lbl1,k0_1,k1_1,k2_1),KApply3(lbl2,k0_2,k1_2,k2_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 | KApply4(lbl1,k0_1,k1_1,k2_1,k3_1),KApply4(lbl2,k0_2,k1_2,k2_2,k3_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 | KApply5(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1),KApply5(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 && equal_k k4_1 k4_2 | KApply6(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1),KApply6(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 && equal_k k4_1 k4_2 && equal_k k5_1 k5_2 | KApply7(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1,k6_1),KApply7(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2,k6_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 && equal_k k4_1 k4_2 && equal_k k5_1 k5_2 && equal_k k6_1 k6_2 | KApply8(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1,k6_1,k7_1),KApply8(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2,k6_2,k7_2) -> lbl1 = lbl2 && equal_k k0_1 k0_2 && equal_k k1_1 k1_2 && equal_k k2_1 k2_2 && equal_k k3_1 k3_2 && equal_k k4_1 k4_2 && equal_k k5_1 k5_2 && equal_k k6_1 k6_2 && equal_k k7_1 k7_2 | (KToken(s1, st1)), (KToken(s2, st2)) -> s1 = s2 && st1 = st2 | (InjectedKLabel kl1), (InjectedKLabel kl2) -> kl1 = kl2 | (Map (_,k1,m1)), (Map (_,k2,m2)) -> k1 = k2 && KMap.cardinal m1 = KMap.cardinal m2 && (KMap.equal) (equal_k) m1 m2 | (List (_,k1,l1)), (List (_,k2,l2)) -> k1 = k2 && equal_klist l1 l2 | (Set (_,k1,s1)), (Set (_,k2,s2)) -> k1 = k2 && KSet.cardinal s1 = KSet.cardinal s2 && (KSet.equal) s1 s2 | (Array (s1,k1,l1)), (Array (s2,k2,l2)) -> s1 = s2 && equal_k k1 k2 && l1 == l2 | (Int i1), (Int i2) -> Z.equal i1 i2 | (Float (f1,e1,p1)), (Float (f2,e2,p2)) -> e1 = e2 && p1 = p2 && Gmp.FR.compare f1 f2 = 0 | (String s1), (String s2) -> s1 = s2 | (Bytes b1), (Bytes b2) -> b1 == b2 | (StringBuffer s1), (StringBuffer s2) -> s1 == s2 | (Bool b1), (Bool b2) -> b1 = b2 | Bottom, Bottom -> true | _ -> false and equal_klist c1 c2 = if c1 == c2 then true else match (c1, c2) with | [], [] -> true | (hd1 :: tl1), (hd2 :: tl2) -> equal_k hd1 hd2 && equal_klist tl1 tl2 | _ -> false let rec compare c1 c2 = if c1 == c2 then 0 else match (c1, c2) with | [], [] -> 0 | (hd1 :: tl1), (hd2 :: tl2) -> let v = compare_kitem hd1 hd2 in if v = 0 then compare tl1 tl2 else v | (_ :: _), _ -> -1 | _ -> 1 and compare_kitem c1 c2 = if c1 == c2 then 0 else match (c1, c2) with | KApply0(lbl1),KApply0(lbl2) -> Pervasives.compare lbl1 lbl2 | KApply1(lbl1,k0_1),KApply1(lbl2,k0_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then compare k0_1 k0_2 else v) | KApply2(lbl1,k0_1,k1_1),KApply2(lbl2,k0_2,k1_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then compare k1_1 k1_2 else v) else v) | KApply3(lbl1,k0_1,k1_1,k2_1),KApply3(lbl2,k0_2,k1_2,k2_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then compare k2_1 k2_2 else v) else v) else v) | KApply4(lbl1,k0_1,k1_1,k2_1,k3_1),KApply4(lbl2,k0_2,k1_2,k2_2,k3_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then compare k3_1 k3_2 else v) else v) else v) else v) | KApply5(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1),KApply5(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then (let v = compare k3_1 k3_2 in if v = 0 then compare k4_1 k4_2 else v) else v) else v) else v) else v) | KApply6(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1),KApply6(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then (let v = compare k3_1 k3_2 in if v = 0 then (let v = compare k4_1 k4_2 in if v = 0 then compare k5_1 k5_2 else v) else v) else v) else v) else v) else v) | KApply7(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1,k6_1),KApply7(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2,k6_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then (let v = compare k3_1 k3_2 in if v = 0 then (let v = compare k4_1 k4_2 in if v = 0 then (let v = compare k5_1 k5_2 in if v = 0 then compare k6_1 k6_2 else v) else v) else v) else v) else v) else v) else v) | KApply8(lbl1,k0_1,k1_1,k2_1,k3_1,k4_1,k5_1,k6_1,k7_1),KApply8(lbl2,k0_2,k1_2,k2_2,k3_2,k4_2,k5_2,k6_2,k7_2) -> (let v = Pervasives.compare lbl1 lbl2 in if v = 0 then (let v = compare k0_1 k0_2 in if v = 0 then (let v = compare k1_1 k1_2 in if v = 0 then (let v = compare k2_1 k2_2 in if v = 0 then (let v = compare k3_1 k3_2 in if v = 0 then (let v = compare k4_1 k4_2 in if v = 0 then (let v = compare k5_1 k5_2 in if v = 0 then (let v = compare k6_1 k6_2 in if v = 0 then compare k7_1 k7_2 else v) else v) else v) else v) else v) else v) else v) else v) | (KToken(s1, st1)), (KToken(s2, st2)) -> let v = Pervasives.compare s1 s2 in if v = 0 then Pervasives.compare st1 st2 else v | (InjectedKLabel kl1), (InjectedKLabel kl2) -> Pervasives.compare kl1 kl2 | (Map (_,k1,m1)), (Map (_,k2,m2)) -> let v = Pervasives.compare k1 k2 in if v = 0 then (KMap.compare) compare m1 m2 else v | (List (_,k1,l1)), (List (_,k2,l2)) -> let v = Pervasives.compare k1 k2 in if v = 0 then compare_klist l1 l2 else v | (Set (_,k1,s1)), (Set (_,k2,s2)) -> let v = Pervasives.compare k1 k2 in if v = 0 then (KSet.compare) s1 s2 else v | (Array (s1,k1,l1)), (Array (s2,k2,l2)) -> let v = Pervasives.compare s1 s2 in if v = 0 then let v = compare k1 k2 in if v = 0 then Dynarray.compare compare_klist l1 l2 else v else v | (Int i1), (Int i2) -> Z.compare i1 i2 | (Float (f1,e1,p1)), (Float (f2,e2,p2)) -> let v = e2 - e1 in if v = 0 then let v2 = p2 - p1 in if v2 = 0 then Gmp.FR.compare f1 f2 else v2 else v | (String s1), (String s2) -> Pervasives.compare s1 s2 | (Bytes b1), (Bytes b2) -> Pervasives.compare b1 b2 | (StringBuffer s1), (StringBuffer s2) -> Pervasives.compare (Buffer.contents s1) (Buffer.contents s2) | (Bool b1), (Bool b2) -> if b1 = b2 then 0 else if b1 then -1 else 1 | Bottom, Bottom -> 0 | ThreadLocal, ThreadLocal -> 0 | Thread (k11, k12, k13, k14), Thread (k21, k22, k23, k24) -> let v = compare k11 k21 in if v = 0 then let v = compare k12 k22 in if v = 0 then let v = compare k13 k23 in if v = 0 then compare k14 k24 else v else v else v | KApply0 _, _ -> -1 | _, KApply0 _ -> 1 | KApply1 _, _ -> -1 | _, KApply1 _ -> 1 | KApply2 _, _ -> -1 | _, KApply2 _ -> 1 | KApply3 _, _ -> -1 | _, KApply3 _ -> 1 | KApply4 _, _ -> -1 | _, KApply4 _ -> 1 | KApply5 _, _ -> -1 | _, KApply5 _ -> 1 | KApply6 _, _ -> -1 | _, KApply6 _ -> 1 | KApply7 _, _ -> -1 | _, KApply7 _ -> 1 | KApply8 _, _ -> -1 | _, KApply8 _ -> 1 | KToken(_, _), _ -> -1 | _, KToken(_, _) -> 1 | InjectedKLabel(_), _ -> -1 | _, InjectedKLabel(_) -> 1 | Map(_), _ -> -1 | _, Map(_) -> 1 | List(_), _ -> -1 | _, List(_) -> 1 | Set(_), _ -> -1 | _, Set(_) -> 1 | Array(_), _ -> -1 | _, Array(_) -> 1 | Int(_), _ -> -1 | _, Int(_) -> 1 | Float(_), _ -> -1 | _, Float(_) -> 1 | String(_), _ -> -1 | _, String(_) -> 1 | Bytes(_), _ -> -1 | _, Bytes(_) -> 1 | StringBuffer(_), _ -> -1 | _, StringBuffer(_) -> 1 | Bool(_), _ -> -1 | _, Bool(_) -> 1 | Bottom, _ -> -1 | _, Bottom -> 1 | ThreadLocal, _ -> -1 | _, ThreadLocal -> 1 and compare_klist c1 c2 = match (c1, c2) with | [], [] -> 0 | (hd1 :: tl1), (hd2 :: tl2) -> let v = compare hd1 hd2 in if v = 0 then compare_klist tl1 tl2 else v | (_ :: _), _ -> -1 | _ -> 1 end type normal_kitem = KApply of klabel * K.t list | KItem of K.kitem open K let normalize (k: kitem) : normal_kitem = match k with | KApply0(lbl) -> KApply (lbl, []) | KApply1(lbl,k0) -> KApply (lbl, [k0]) | KApply2(lbl,k0,k1) -> KApply (lbl, [k0; k1]) | KApply3(lbl,k0,k1,k2) -> KApply (lbl, [k0; k1; k2]) | KApply4(lbl,k0,k1,k2,k3) -> KApply (lbl, [k0; k1; k2; k3]) | KApply5(lbl,k0,k1,k2,k3,k4) -> KApply (lbl, [k0; k1; k2; k3; k4]) | KApply6(lbl,k0,k1,k2,k3,k4,k5) -> KApply (lbl, [k0; k1; k2; k3; k4; k5]) | KApply7(lbl,k0,k1,k2,k3,k4,k5,k6) -> KApply (lbl, [k0; k1; k2; k3; k4; k5; k6]) | KApply8(lbl,k0,k1,k2,k3,k4,k5,k6,k7) -> KApply (lbl, [k0; k1; k2; k3; k4; k5; k6; k7]) | v -> KItem v let denormalize (k: normal_kitem) : kitem = match k with | KApply (lbl, []) -> KApply0(lbl) | KApply (lbl, [k0]) -> KApply1(lbl,k0) | KApply (lbl, [k0; k1]) -> KApply2(lbl,k0,k1) | KApply (lbl, [k0; k1; k2]) -> KApply3(lbl,k0,k1,k2) | KApply (lbl, [k0; k1; k2; k3]) -> KApply4(lbl,k0,k1,k2,k3) | KApply (lbl, [k0; k1; k2; k3; k4]) -> KApply5(lbl,k0,k1,k2,k3,k4) | KApply (lbl, [k0; k1; k2; k3; k4; k5]) -> KApply6(lbl,k0,k1,k2,k3,k4,k5) | KApply (lbl, [k0; k1; k2; k3; k4; k5; k6]) -> KApply7(lbl,k0,k1,k2,k3,k4,k5,k6) | KApply (lbl, [k0; k1; k2; k3; k4; k5; k6; k7]) -> KApply8(lbl,k0,k1,k2,k3,k4,k5,k6,k7) | KItem v -> v | KApply (_, _) -> invalid_arg "denormalize" type k = K.t let denormalize0 (c: unit) : k list = match c with () -> [] let normalize0 (c: k list) = match c with [] -> () | _ -> invalid_arg "normalize0" let denormalize1 (c: k) : k list = match c with (k0) -> [k0] let normalize1 (c: k list) = match c with [k0] -> (k0) | _ -> invalid_arg "normalize1" let denormalize2 (c: k * k) : k list = match c with (k0,k1) -> [k0; k1] let normalize2 (c: k list) = match c with [k0; k1] -> (k0,k1) | _ -> invalid_arg "normalize2" let denormalize3 (c: k * k * k) : k list = match c with (k0,k1,k2) -> [k0; k1; k2] let normalize3 (c: k list) = match c with [k0; k1; k2] -> (k0,k1,k2) | _ -> invalid_arg "normalize3" let denormalize4 (c: k * k * k * k) : k list = match c with (k0,k1,k2,k3) -> [k0; k1; k2; k3] let normalize4 (c: k list) = match c with [k0; k1; k2; k3] -> (k0,k1,k2,k3) | _ -> invalid_arg "normalize4" let denormalize5 (c: k * k * k * k * k) : k list = match c with (k0,k1,k2,k3,k4) -> [k0; k1; k2; k3; k4] let normalize5 (c: k list) = match c with [k0; k1; k2; k3; k4] -> (k0,k1,k2,k3,k4) | _ -> invalid_arg "normalize5" let denormalize6 (c: k * k * k * k * k * k) : k list = match c with (k0,k1,k2,k3,k4,k5) -> [k0; k1; k2; k3; k4; k5] let normalize6 (c: k list) = match c with [k0; k1; k2; k3; k4; k5] -> (k0,k1,k2,k3,k4,k5) | _ -> invalid_arg "normalize6" let denormalize7 (c: k * k * k * k * k * k * k) : k list = match c with (k0,k1,k2,k3,k4,k5,k6) -> [k0; k1; k2; k3; k4; k5; k6] let normalize7 (c: k list) = match c with [k0; k1; k2; k3; k4; k5; k6] -> (k0,k1,k2,k3,k4,k5,k6) | _ -> invalid_arg "normalize7" let denormalize8 (c: k * k * k * k * k * k * k * k) : k list = match c with (k0,k1,k2,k3,k4,k5,k6,k7) -> [k0; k1; k2; k3; k4; k5; k6; k7] let normalize8 (c: k list) = match c with [k0; k1; k2; k3; k4; k5; k6; k7] -> (k0,k1,k2,k3,k4,k5,k6,k7) | _ -> invalid_arg "normalize8" let int0 = lazy (Int (Z.of_string "0")) let int1 = lazy (Int (Z.of_string "1")) let int2 = lazy (Int (Z.of_string "2")) let int'Hyph'1 = lazy (Int (Z.of_string "-1")) let const'Hash'EHOSTDOWN_K'Hyph'IO = KApply0(Lbl'Hash'EHOSTDOWN_K'Hyph'IO) let const'Stop'ThreadCellBag = KApply0(Lbl'Stop'ThreadCellBag) let constnoKCell = KApply0(LblnoKCell) let constinitSchanCell = KApply0(LblinitSchanCell) let const'Hash'EINTR_K'Hyph'IO = KApply0(Lbl'Hash'EINTR_K'Hyph'IO) let constnoLidCell = KApply0(LblnoLidCell) let constnoWhatCell = KApply0(LblnoWhatCell) let constinitWhatCell = KApply0(LblinitWhatCell) let const'Hash'EOVERFLOW_K'Hyph'IO = KApply0(Lbl'Hash'EOVERFLOW_K'Hyph'IO) let constnoStateCell = KApply0(LblnoStateCell) let const'Hash'ENETDOWN_K'Hyph'IO = KApply0(Lbl'Hash'ENETDOWN_K'Hyph'IO) let constnoVarsCell = KApply0(LblnoVarsCell) let constnoRnumCell = KApply0(LblnoRnumCell) let constnoMsgCell = KApply0(LblnoMsgCell) let const'Hash'EIO_K'Hyph'IO = KApply0(Lbl'Hash'EIO_K'Hyph'IO) let const'Hash'Bottom = KApply0(Lbl'Hash'Bottom) let const'Stop'ListenCellBag = KApply0(Lbl'Stop'ListenCellBag) let const'Hash'EISCONN_K'Hyph'IO = KApply0(Lbl'Hash'EISCONN_K'Hyph'IO) let const'Hash'EDOM_K'Hyph'IO = KApply0(Lbl'Hash'EDOM_K'Hyph'IO) let const'Stop'Map = KApply0(Lbl'Stop'Map) let const'Hash'EPROTOTYPE_K'Hyph'IO = KApply0(Lbl'Hash'EPROTOTYPE_K'Hyph'IO) let const'Hash'freezernot__GRHO'Hyph'SYNTAX0_ = KApply0(Lbl'Hash'freezernot__GRHO'Hyph'SYNTAX0_) let constNil_GRHO'Hyph'SYNTAX = KApply0(LblNil_GRHO'Hyph'SYNTAX) let const'Hash'EADDRINUSE_K'Hyph'IO = KApply0(Lbl'Hash'EADDRINUSE_K'Hyph'IO) let const'Hash'EACCES_K'Hyph'IO = KApply0(Lbl'Hash'EACCES_K'Hyph'IO) let constinitWhereCell = KApply0(LblinitWhereCell) let constnoMlidCell = KApply0(LblnoMlidCell) let constnewUUID_STRING = KApply0(LblnewUUID_STRING) let const'Hash'ENOSPC_K'Hyph'IO = KApply0(Lbl'Hash'ENOSPC_K'Hyph'IO) let constnoTuplespaceCell = KApply0(LblnoTuplespaceCell) let constinitLidCell = KApply0(LblinitLidCell) let const'Hash'EMLINK_K'Hyph'IO = KApply0(Lbl'Hash'EMLINK_K'Hyph'IO) let const'Hash'EINVAL_K'Hyph'IO = KApply0(Lbl'Hash'EINVAL_K'Hyph'IO) let constinitNomoCell = KApply0(LblinitNomoCell) let constinitSendCell = KApply0(LblinitSendCell) let constnoTupleCell = KApply0(LblnoTupleCell) let const'Hash'ENOPROTOOPT_K'Hyph'IO = KApply0(Lbl'Hash'ENOPROTOOPT_K'Hyph'IO) let const'Hash'EPERM_K'Hyph'IO = KApply0(Lbl'Hash'EPERM_K'Hyph'IO) let const'Hash'EWOULDBLOCK_K'Hyph'IO = KApply0(Lbl'Hash'EWOULDBLOCK_K'Hyph'IO) let constnoSchanCell = KApply0(LblnoSchanCell) let const'Stop'Set = KApply0(Lbl'Stop'Set) let const'Hash'ENETUNREACH_K'Hyph'IO = KApply0(Lbl'Hash'ENETUNREACH_K'Hyph'IO) let const'Stop'List = KApply0(Lbl'Stop'List) let const'Hash'ENAMETOOLONG_K'Hyph'IO = KApply0(Lbl'Hash'ENAMETOOLONG_K'Hyph'IO) let const'Hash'ECONNRESET_K'Hyph'IO = KApply0(Lbl'Hash'ECONNRESET_K'Hyph'IO) let const'Hash'ENXIO_K'Hyph'IO = KApply0(Lbl'Hash'ENXIO_K'Hyph'IO) let constnoReceivesCell = KApply0(LblnoReceivesCell) let const'Hash'EALREADY_K'Hyph'IO = KApply0(Lbl'Hash'EALREADY_K'Hyph'IO) let constinitRidCell = KApply0(LblinitRidCell) let const'Hash'ENOTCONN_K'Hyph'IO = KApply0(Lbl'Hash'ENOTCONN_K'Hyph'IO) let constnoNumCell = KApply0(LblnoNumCell) let const'Hash'ETOOMANYREFS_K'Hyph'IO = KApply0(Lbl'Hash'ETOOMANYREFS_K'Hyph'IO) let constnoForgCell = KApply0(LblnoForgCell) let constinitIdCell = KApply0(LblinitIdCell) let const'Hash'configuration_K'Hyph'REFLECTION = KApply0(Lbl'Hash'configuration_K'Hyph'REFLECTION) let const'Hash'EMSGSIZE_K'Hyph'IO = KApply0(Lbl'Hash'EMSGSIZE_K'Hyph'IO) let constnoReactionCell = KApply0(LblnoReactionCell) let const'Stop'SendCellBag = KApply0(Lbl'Stop'SendCellBag) let constinitListensCell = KApply0(LblinitListensCell) let const'Hash'EDEADLK_K'Hyph'IO = KApply0(Lbl'Hash'EDEADLK_K'Hyph'IO) let const'Hash'ENOTSOCK_K'Hyph'IO = KApply0(Lbl'Hash'ENOTSOCK_K'Hyph'IO) let constinitStateCell = KApply0(LblinitStateCell) let const'Hash'EAGAIN_K'Hyph'IO = KApply0(Lbl'Hash'EAGAIN_K'Hyph'IO) let constinitReactionCell = KApply0(LblinitReactionCell) let const'Hash'ESHUTDOWN_K'Hyph'IO = KApply0(Lbl'Hash'ESHUTDOWN_K'Hyph'IO) let constnoMsidCell = KApply0(LblnoMsidCell) let constnoSidCell = KApply0(LblnoSidCell) let constinitRnumCell = KApply0(LblinitRnumCell) let constnoThreadsCell = KApply0(LblnoThreadsCell) let const'Hash'ERANGE_K'Hyph'IO = KApply0(Lbl'Hash'ERANGE_K'Hyph'IO) let const'Hash'E2BIG_K'Hyph'IO = KApply0(Lbl'Hash'E2BIG_K'Hyph'IO) let constnoIdCell = KApply0(LblnoIdCell) let const'Hash'ECONNREFUSED_K'Hyph'IO = KApply0(Lbl'Hash'ECONNREFUSED_K'Hyph'IO) let const'Hash'ENOSYS_K'Hyph'IO = KApply0(Lbl'Hash'ENOSYS_K'Hyph'IO) let constinitMsidCell = KApply0(LblinitMsidCell) let const'Hash'ENOTDIR_K'Hyph'IO = KApply0(Lbl'Hash'ENOTDIR_K'Hyph'IO) let const'Hash'ECONNABORTED_K'Hyph'IO = KApply0(Lbl'Hash'ECONNABORTED_K'Hyph'IO) let const'Hash'EBUSY_K'Hyph'IO = KApply0(Lbl'Hash'EBUSY_K'Hyph'IO) let const'Hash'EOPNOTSUPP_K'Hyph'IO = KApply0(Lbl'Hash'EOPNOTSUPP_K'Hyph'IO) let const'Hash'ESRCH_K'Hyph'IO = KApply0(Lbl'Hash'ESRCH_K'Hyph'IO) let const'Hash'ENOMEM_K'Hyph'IO = KApply0(Lbl'Hash'ENOMEM_K'Hyph'IO) let constinitRecCell = KApply0(LblinitRecCell) let constinitLengthCell = KApply0(LblinitLengthCell) let constinitReactCell = KApply0(LblinitReactCell) let constnoMatchCell = KApply0(LblnoMatchCell) let const'Hash'ESPIPE_K'Hyph'IO = KApply0(Lbl'Hash'ESPIPE_K'Hyph'IO) let constinitNumCell = KApply0(LblinitNumCell) let constnoWhereCell = KApply0(LblnoWhereCell) let const'Hash'ENOENT_K'Hyph'IO = KApply0(Lbl'Hash'ENOENT_K'Hyph'IO) let const'Hash'stdout_K'Hyph'IO = KApply0(Lbl'Hash'stdout_K'Hyph'IO) let constinitStypeCell = KApply0(LblinitStypeCell) let const'Hash'ENODEV_K'Hyph'IO = KApply0(Lbl'Hash'ENODEV_K'Hyph'IO) let const'Hash'EXDEV_K'Hyph'IO = KApply0(Lbl'Hash'EXDEV_K'Hyph'IO) let const'Hash'ENOLCK_K'Hyph'IO = KApply0(Lbl'Hash'ENOLCK_K'Hyph'IO) let const'Stop'RecCellBag = KApply0(Lbl'Stop'RecCellBag) let constinitListenCell = KApply0(LblinitListenCell) let const'Hash'argv = KApply0(Lbl'Hash'argv) let constnoSendsCell = KApply0(LblnoSendsCell) let constnoLengthCell = KApply0(LblnoLengthCell) let const'Hash'ENOTEMPTY_K'Hyph'IO = KApply0(Lbl'Hash'ENOTEMPTY_K'Hyph'IO) let const'Hash'EMFILE_K'Hyph'IO = KApply0(Lbl'Hash'EMFILE_K'Hyph'IO) let const'Hash'ELOOP_K'Hyph'IO = KApply0(Lbl'Hash'ELOOP_K'Hyph'IO) let const'Hash'stderr_K'Hyph'IO = KApply0(Lbl'Hash'stderr_K'Hyph'IO) let constnoTypeCell = KApply0(LblnoTypeCell) let constnoReactCell = KApply0(LblnoReactCell) let constinitForgCell = KApply0(LblinitForgCell) let const'Hash'EPIPE_K'Hyph'IO = KApply0(Lbl'Hash'EPIPE_K'Hyph'IO) let const'Hash'ENETRESET_K'Hyph'IO = KApply0(Lbl'Hash'ENETRESET_K'Hyph'IO) let constinitVarsCell = KApply0(LblinitVarsCell) let constnoRidCell = KApply0(LblnoRidCell) let constinitTuplespaceCell = KApply0(LblinitTuplespaceCell) let constinitMlidCell = KApply0(LblinitMlidCell) let constinitChanCell = KApply0(LblinitChanCell) let constnoListensCell = KApply0(LblnoListensCell) let constinitSendsCell = KApply0(LblinitSendsCell) let const'Hash'EPFNOSUPPORT_K'Hyph'IO = KApply0(Lbl'Hash'EPFNOSUPPORT_K'Hyph'IO) let const'Hash'EEXIST_K'Hyph'IO = KApply0(Lbl'Hash'EEXIST_K'Hyph'IO) let const'Hash'stdin_K'Hyph'IO = KApply0(Lbl'Hash'stdin_K'Hyph'IO) let const'Hash'EADDRNOTAVAIL_K'Hyph'IO = KApply0(Lbl'Hash'EADDRNOTAVAIL_K'Hyph'IO) let const'Hash'EHOSTUNREACH_K'Hyph'IO = KApply0(Lbl'Hash'EHOSTUNREACH_K'Hyph'IO) let constnoChanCell = KApply0(LblnoChanCell) let constinitTupleCell = KApply0(LblinitTupleCell) let const'Hash'ECHILD_K'Hyph'IO = KApply0(Lbl'Hash'ECHILD_K'Hyph'IO) let const'Hash'EOF_K'Hyph'IO = KApply0(Lbl'Hash'EOF_K'Hyph'IO) let const'Hash'EDESTADDRREQ_K'Hyph'IO = KApply0(Lbl'Hash'EDESTADDRREQ_K'Hyph'IO) let const'Hash'EFBIG_K'Hyph'IO = KApply0(Lbl'Hash'EFBIG_K'Hyph'IO) let const'Hash'EBADF_K'Hyph'IO = KApply0(Lbl'Hash'EBADF_K'Hyph'IO) let const'Hash'ETIMEDOUT_K'Hyph'IO = KApply0(Lbl'Hash'ETIMEDOUT_K'Hyph'IO) let const'Hash'ESOCKTNOSUPPORT_K'Hyph'IO = KApply0(Lbl'Hash'ESOCKTNOSUPPORT_K'Hyph'IO) let constnoWhoCell = KApply0(LblnoWhoCell) let const'Hash'EISDIR_K'Hyph'IO = KApply0(Lbl'Hash'EISDIR_K'Hyph'IO) let constnoNomoCell = KApply0(LblnoNomoCell) let const'Hash'ENOTTY_K'Hyph'IO = KApply0(Lbl'Hash'ENOTTY_K'Hyph'IO) let const'Hash'EFAULT_K'Hyph'IO = KApply0(Lbl'Hash'EFAULT_K'Hyph'IO) let constinitMatchCell = KApply0(LblinitMatchCell) let const'Hash'EROFS_K'Hyph'IO = KApply0(Lbl'Hash'EROFS_K'Hyph'IO) let constinitTypeCell = KApply0(LblinitTypeCell) let const'Hash'noparse_K'Hyph'IO = KApply0(Lbl'Hash'noparse_K'Hyph'IO) let const'Hash'ENFILE_K'Hyph'IO = KApply0(Lbl'Hash'ENFILE_K'Hyph'IO) let const'Hash'ThreadLocal = KApply0(Lbl'Hash'ThreadLocal) let constinitMsgCell = KApply0(LblinitMsgCell) let constnoStypeCell = KApply0(LblnoStypeCell) let constinitReceivesCell = KApply0(LblinitReceivesCell) let constinitSidCell = KApply0(LblinitSidCell) let constinitWhoCell = KApply0(LblinitWhoCell) let constnoContCell = KApply0(LblnoContCell) let constinitContCell = KApply0(LblinitContCell) let const'Hash'EAFNOSUPPORT_K'Hyph'IO = KApply0(Lbl'Hash'EAFNOSUPPORT_K'Hyph'IO) let const'Hash'ENOEXEC_K'Hyph'IO = KApply0(Lbl'Hash'ENOEXEC_K'Hyph'IO) let const'Hash'EPROTONOSUPPORT_K'Hyph'IO = KApply0(Lbl'Hash'EPROTONOSUPPORT_K'Hyph'IO) let const'Hash'ENOBUFS_K'Hyph'IO = KApply0(Lbl'Hash'ENOBUFS_K'Hyph'IO) let const'Hash'EINPROGRESS_K'Hyph'IO = KApply0(Lbl'Hash'EINPROGRESS_K'Hyph'IO) let val_for (c: klabel) (k : k) (v : k) : normal_kitem = match c with |_ -> KApply((el_for c), [k;v])
5e0538d8bba6bb3570b9f37865fcc703f7f532712184535f2c7384002fe2f380
simmsb/calamity
Dsl.hs
# LANGUAGE RecursiveDo # -- | A DSL for generating commands and groups module CalamityCommands.Dsl ( -- * Commands DSL -- $dslTutorial command, command', commandA, commandA', hide, help, requires, requires', requiresPure, group, group', groupA, groupA', DSLState, DSLC, raiseDSL, fetchHandler, ) where import CalamityCommands.AliasType import CalamityCommands.Check import CalamityCommands.Command hiding (help) import CalamityCommands.CommandUtils import CalamityCommands.Context import CalamityCommands.Error import CalamityCommands.Group hiding (help) import CalamityCommands.Handler import CalamityCommands.Internal.LocalWriter import CalamityCommands.ParameterInfo import Data.HashMap.Lazy qualified as LH import Data.List.NonEmpty (NonEmpty (..)) import Data.Text qualified as T import Polysemy qualified as P import Polysemy.Fail qualified as P import Polysemy.Fixpoint qualified as P import Polysemy.Reader qualified as P import Polysemy.Tagged qualified as P $ dslTutorial This module provides a way of constructing bot commands in a declarative way . The main component of this is the ' command ' function , which takes a type - level list of command parameters , the name , and the callback and produces a command . There are also the alternatives ' command '' , ' commandA ' and ' commandA '' , for when you want to handle parsing of the input yourself , and/or want aliases of the command . The functions : ' hide ' , ' help ' , ' requires ' , and ' group ' can be used to change attributes of any commands declared inside the monadic action passed to them , for example : @ ' hide ' ' $ ' do ' command ' \@\ ' [ ] " test " \\ctx - > ' pure ' ( ) @ In the above block , any command declared inside ' hide ' will have its \'hidden\ ' flag set and will not be shown by the default help command : ' CalamityCommands . Help.helpCommand ' The ' CalamityCommands . Help.helpCommand ' function can be used to create a help command for the commands DSL action it is used in , read its doc page for more information on how it functions . The ' CalamityCommands . Utils.buildCommands ' function is used to construct a ' CommandHandler ' which can then be used with ' CalamityCommands . Utils.processCommands ' or ' CalamityCommands . Utils.handleCommands ' to process a command . This module provides a way of constructing bot commands in a declarative way. The main component of this is the 'command' function, which takes a type-level list of command parameters, the name, and the callback and produces a command. There are also the alternatives 'command'', 'commandA' and 'commandA'', for when you want to handle parsing of the input yourself, and/or want aliases of the command. The functions: 'hide', 'help', 'requires', and 'group' can be used to change attributes of any commands declared inside the monadic action passed to them, for example: @ 'hide' '$' do 'command' \@\'[] "test" \\ctx -> 'pure' () @ In the above block, any command declared inside 'hide' will have its \'hidden\' flag set and will not be shown by the default help command: 'CalamityCommands.Help.helpCommand' The 'CalamityCommands.Help.helpCommand' function can be used to create a help command for the commands DSL action it is used in, read its doc page for more information on how it functions. The 'CalamityCommands.Utils.buildCommands' function is used to construct a 'CommandHandler' which can then be used with 'CalamityCommands.Utils.processCommands' or 'CalamityCommands.Utils.handleCommands' to process a command. -} type DSLState m c a r = ( LocalWriter (LH.HashMap T.Text (Command m c a, AliasType)) ': LocalWriter (LH.HashMap T.Text (Group m c a, AliasType)) ': P.Reader (Maybe (Group m c a)) ': P.Tagged "hidden" (P.Reader Bool) ': P.Reader (c -> T.Text) ': P.Tagged "original-help" (P.Reader (c -> T.Text)) ': P.Reader [Check m c] ': P.Reader (CommandHandler m c a) ': P.Fixpoint ': r ) type DSLC m c a r = P.Members [ LocalWriter (LH.HashMap T.Text (Command m c a, AliasType)) , LocalWriter (LH.HashMap T.Text (Group m c a, AliasType)) , P.Reader (Maybe (Group m c a)) , P.Tagged "hidden" (P.Reader Bool) , P.Reader (c -> T.Text) , P.Tagged "original-help" (P.Reader (c -> T.Text)) , P.Reader [Check m c] , P.Reader (CommandHandler m c a) , P.Fixpoint ] r raiseDSL :: P.Sem r x -> P.Sem (DSLState m c a r) x raiseDSL = P.raise . P.raise . P.raise . P.raise . P.raise . P.raise . P.raise . P.raise . P.raise | Given the command name and parameter names , @parser@ and @callback@ for a command in the ' P.Sem ' monad , build a command by transforming the Polysemy actions into @m@ actions . Then register the command . The parent group , visibility , checks , and command help are drawn from the reader context . command in the 'P.Sem' monad, build a command by transforming the Polysemy actions into @m@ actions. Then register the command. The parent group, visibility, checks, and command help are drawn from the reader context. -} command' :: (Monad m, P.Member (P.Final m) r, DSLC m c a r) => -- | The name of the command T.Text -> -- | The command's parameter metadata [ParameterInfo] -> -- | The parser for this command (c -> P.Sem r (Either CommandError p)) -> -- | The callback for this command ((c, p) -> P.Sem (P.Fail ': r) a) -> P.Sem r (Command m c a) command' name = commandA' name [] {- | Given the command name, aliases, and parameter names, @parser@ and @callback@ for a command in the 'P.Sem' monad, build a command by transforming the Polysemy actions into @m@ actions. Then register the command. The parent group, visibility, checks, and command help are drawn from the reader context. -} commandA' :: forall p c a m r. (Monad m, P.Member (P.Final m) r, DSLC m c a r) => -- | The name of the command T.Text -> -- | The aliases for the command [T.Text] -> -- | The command's parameter metadata [ParameterInfo] -> -- | The parser for this command (c -> P.Sem r (Either CommandError p)) -> -- | The callback for this command ((c, p) -> P.Sem (P.Fail ': r) a) -> P.Sem r (Command m c a) commandA' name aliases params parser cb = do parent <- P.ask @(Maybe (Group m c a)) hidden <- P.tag $ P.ask @Bool checks <- P.ask @[Check m c] help' <- P.ask @(c -> T.Text) cmd <- buildCommand' (name :| aliases) parent hidden checks params help' parser cb ltell $ LH.singleton name (cmd, Original) ltell $ LH.fromList [(name, (cmd, Alias)) | name <- aliases] pure cmd | Given the name of a command and a callback , and a type level list of the parameters , build and register a command . The parent group , visibility , checks , and command help are drawn from the reader context . Command parameters are parsed by first invoking ' CalamityCommands.Parser.parse ' for the first ' CalamityCommands . . ' , then running the next parser on the remaining input , and so on . = = = = Examples Building a command that adds two numbers . @ ' command ' \@\'['CalamityCommands . . Named ' " a " ' Int ' , ' CalamityCommands . . Named ' " b " ' Int ' ] " add " $ \\ctx a b - > ' pure ' ' $ ' ' Right ' ( a ' + ' b ) @ the parameters, build and register a command. The parent group, visibility, checks, and command help are drawn from the reader context. Command parameters are parsed by first invoking 'CalamityCommands.Parser.parse' for the first 'CalamityCommands.Parser.Parser', then running the next parser on the remaining input, and so on. ==== Examples Building a command that adds two numbers. @ 'command' \@\'['CalamityCommands.Parser.Named' "a" 'Int', 'CalamityCommands.Parser.Named' "b" 'Int'] "add" $ \\ctx a b -> 'pure' '$' 'Right' (a '+' b) @ -} command :: forall ps c a m r. ( Monad m , P.Member (P.Final m) r , DSLC m c a r , TypedCommandC ps c a r , CommandContext m c a ) => -- | The name of the command T.Text -> -- | The callback for this command (c -> CommandForParsers ps r a) -> P.Sem r (Command m c a) command name = commandA @ps name [] | Given the name and aliases of a command and a callback , and a type level list of the parameters , build and register a command . The parent group , visibility , checks , and command help are drawn from the reader context . = = = = Examples Building a command that adds two numbers . @ ' commandA ' \@\'['CalamityCommands . . Named ' " a " ' Int ' , ' CalamityCommands . . Named ' " b " ' Int ' ] " add " [ ] $ \\ctx a b - > ' pure ' ' $ ' ' Right ' ( a ' + ' b ) @ the parameters, build and register a command. The parent group, visibility, checks, and command help are drawn from the reader context. ==== Examples Building a command that adds two numbers. @ 'commandA' \@\'['CalamityCommands.Parser.Named' "a" 'Int', 'CalamityCommands.Parser.Named' "b" 'Int'] "add" [] $ \\ctx a b -> 'pure' '$' 'Right' (a '+' b) @ -} commandA :: forall ps c a m r. ( Monad m , P.Member (P.Final m) r , DSLC m c a r , TypedCommandC ps c a r , CommandContext m c a ) => -- | The name of the command T.Text -> -- | The aliases for the command [T.Text] -> -- | The callback for this command (c -> CommandForParsers ps r a) -> P.Sem r (Command m c a) commandA name aliases cmd = do parent <- P.ask @(Maybe (Group m c a)) hidden <- P.tag $ P.ask @Bool checks <- P.ask @[Check m c] help' <- P.ask @(c -> T.Text) cmd' <- buildCommand @ps (name :| aliases) parent hidden checks help' cmd ltell $ LH.singleton name (cmd', Original) ltell $ LH.fromList [(name, (cmd', Alias)) | name <- aliases] pure cmd' {- | Set the visibility of any groups or commands registered inside the given action to hidden. -} hide :: P.Member (P.Tagged "hidden" (P.Reader Bool)) r => P.Sem r x -> P.Sem r x hide = P.tag @"hidden" . P.local @Bool (const True) . P.raise | Set the help for any groups or commands registered inside the given action . = = = = Examples @ ' help ' ( ' const ' " Add two integers " ) $ ' command ' \@\'['CalamityCommands . . Named ' " a " ' Int ' , ' CalamityCommands . . Named ' " b " ' Int ' ] " add " $ \\ctx a b - > ' pure ' ' $ ' ' Right ' ( a ' + ' b ) @ ==== Examples @ 'help' ('const' "Add two integers") $ 'command' \@\'['CalamityCommands.Parser.Named' "a" 'Int', 'CalamityCommands.Parser.Named' "b" 'Int'] "add" $ \\ctx a b -> 'pure' '$' 'Right' (a '+' b) @ -} help :: P.Member (P.Reader (c -> T.Text)) r => (c -> T.Text) -> P.Sem r a -> P.Sem r a help = P.local . const {- | Add to the list of checks for any commands registered inside the given action. -} requires :: DSLC m c a r => [Check m c] -> P.Sem r x -> P.Sem r x requires = P.local . (<>) | Construct a check and add it to the list of checks for any commands registered inside the given action . Refer to ' CalamityCommands . Check . Check ' for more info on checks . registered inside the given action. Refer to 'CalamityCommands.Check.Check' for more info on checks. -} requires' :: (Monad m, P.Member (P.Final m) r, DSLC m c a r) => -- | The name of the check T.Text -> -- | The callback for the check (c -> P.Sem r (Maybe T.Text)) -> P.Sem r x -> P.Sem r x requires' name cb m = do check <- buildCheck name cb requires [check] m | Construct some pure checks and add them to the list of checks for any commands registered inside the given action . Refer to ' CalamityCommands . Check . Check ' for more info on checks . = = = = Examples @ ' requiresPure ' [ ( " always ok " , ' const ' ' Nothing ' ) ] $ ' command ' \@\'['CalamityCommands . . Named ' " a " ' Int ' , ' CalamityCommands . . Named ' " b " ' Int ' ] " add " $ \\ctx a b - > ' pure ' ' $ ' ' Right ' ( a ' + ' b ) @ commands registered inside the given action. Refer to 'CalamityCommands.Check.Check' for more info on checks. ==== Examples @ 'requiresPure' [("always ok", 'const' 'Nothing')] $ 'command' \@\'['CalamityCommands.Parser.Named' "a" 'Int', 'CalamityCommands.Parser.Named' "b" 'Int'] "add" $ \\ctx a b -> 'pure' '$' 'Right' (a '+' b) @ -} requiresPure :: (Monad m, DSLC m c a r) => [(T.Text, c -> Maybe T.Text)] -> -- A list of check names and check callbacks P.Sem r x -> P.Sem r x requiresPure checks = requires $ fmap (uncurry buildCheckPure) checks {- | Construct a group and place any commands registered in the given action into the new group. This also resets the @help@ function back to its original value, use 'group'' if you don't want that (i.e. your help function is context aware). -} group :: (Monad m, P.Member (P.Final m) r, DSLC m c a r) => -- | The name of the group T.Text -> P.Sem r x -> P.Sem r x group name = groupA name [] {- | Construct a group with aliases and place any commands registered in the given action into the new group. The parent group, visibility, checks, and command help are drawn from the reader context. This also resets the @help@ function back to its original value, use 'group'' if you don't want that (i.e. your help function is context aware). -} groupA :: forall x c m a r. (Monad m, P.Member (P.Final m) r, DSLC m c a r) => -- | The name of the group T.Text -> -- | The aliases of the group [T.Text] -> P.Sem r x -> P.Sem r x groupA name aliases m = mdo parent <- P.ask @(Maybe (Group m c a)) hidden <- P.tag $ P.ask @Bool checks <- P.ask @[Check m c] help' <- P.ask @(c -> T.Text) origHelp <- fetchOrigHelp let group' = Group (name :| aliases) parent hidden commands children help' checks (children, (commands, res)) <- llisten @(LH.HashMap T.Text (Group m c a, AliasType)) $ llisten @(LH.HashMap T.Text (Command m c a, AliasType)) $ P.local @(Maybe (Group m c a)) (const $ Just group') $ P.local @(c -> T.Text) (const origHelp) m ltell $ LH.singleton name (group', Original) ltell $ LH.fromList [(name, (group', Alias)) | name <- aliases] pure res fetchOrigHelp :: P.Member (P.Tagged "original-help" (P.Reader (c -> T.Text))) r => P.Sem r (c -> T.Text) fetchOrigHelp = P.tag P.ask {- | Construct a group and place any commands registered in the given action into the new group. The parent group, visibility, checks, and command help are drawn from the reader context. Unlike 'help' this doesn't reset the @help@ function back to its original value. -} group' :: (P.Member (P.Final m) r, DSLC m c a r) => -- | The name of the group T.Text -> P.Sem r x -> P.Sem r x group' name = groupA' name [] {- | Construct a group with aliases and place any commands registered in the given action into the new group. The parent group, visibility, checks, and command help are drawn from the reader context. Unlike 'help' this doesn't reset the @help@ function back to its original value. -} groupA' :: forall x c m a r. (P.Member (P.Final m) r, DSLC m c a r) => -- | The name of the group T.Text -> -- | The aliases of the group [T.Text] -> P.Sem r x -> P.Sem r x groupA' name aliases m = mdo parent <- P.ask @(Maybe (Group m c a)) hidden <- P.tag $ P.ask @Bool checks <- P.ask @[Check m c] help' <- P.ask @(c -> T.Text) let group' = Group (name :| aliases) parent hidden commands children help' checks (children, (commands, res)) <- llisten @(LH.HashMap T.Text (Group m c a, AliasType)) $ llisten @(LH.HashMap T.Text (Command m c a, AliasType)) $ P.local @(Maybe (Group m c a)) (const $ Just group') m ltell $ LH.singleton name (group', Original) ltell $ LH.fromList [(name, (group', Alias)) | name <- aliases] pure res -- | Retrieve the final command handler for this block fetchHandler :: DSLC m c a r => P.Sem r (CommandHandler m c a) fetchHandler = P.ask
null
https://raw.githubusercontent.com/simmsb/calamity/be310255b446e87e7432673de1fbc67ef46de3ae/calamity-commands/CalamityCommands/Dsl.hs
haskell
| A DSL for generating commands and groups * Commands DSL $dslTutorial | The name of the command | The command's parameter metadata | The parser for this command | The callback for this command | Given the command name, aliases, and parameter names, @parser@ and @callback@ for a command in the 'P.Sem' monad, build a command by transforming the Polysemy actions into @m@ actions. Then register the command. The parent group, visibility, checks, and command help are drawn from the reader context. | The name of the command | The aliases for the command | The command's parameter metadata | The parser for this command | The callback for this command | The name of the command | The callback for this command | The name of the command | The aliases for the command | The callback for this command | Set the visibility of any groups or commands registered inside the given action to hidden. | Add to the list of checks for any commands registered inside the given action. | The name of the check | The callback for the check A list of check names and check callbacks | Construct a group and place any commands registered in the given action into the new group. This also resets the @help@ function back to its original value, use 'group'' if you don't want that (i.e. your help function is context aware). | The name of the group | Construct a group with aliases and place any commands registered in the given action into the new group. The parent group, visibility, checks, and command help are drawn from the reader context. This also resets the @help@ function back to its original value, use 'group'' if you don't want that (i.e. your help function is context aware). | The name of the group | The aliases of the group | Construct a group and place any commands registered in the given action into the new group. The parent group, visibility, checks, and command help are drawn from the reader context. Unlike 'help' this doesn't reset the @help@ function back to its original value. | The name of the group | Construct a group with aliases and place any commands registered in the given action into the new group. The parent group, visibility, checks, and command help are drawn from the reader context. Unlike 'help' this doesn't reset the @help@ function back to its original value. | The name of the group | The aliases of the group | Retrieve the final command handler for this block
# LANGUAGE RecursiveDo # module CalamityCommands.Dsl ( command, command', commandA, commandA', hide, help, requires, requires', requiresPure, group, group', groupA, groupA', DSLState, DSLC, raiseDSL, fetchHandler, ) where import CalamityCommands.AliasType import CalamityCommands.Check import CalamityCommands.Command hiding (help) import CalamityCommands.CommandUtils import CalamityCommands.Context import CalamityCommands.Error import CalamityCommands.Group hiding (help) import CalamityCommands.Handler import CalamityCommands.Internal.LocalWriter import CalamityCommands.ParameterInfo import Data.HashMap.Lazy qualified as LH import Data.List.NonEmpty (NonEmpty (..)) import Data.Text qualified as T import Polysemy qualified as P import Polysemy.Fail qualified as P import Polysemy.Fixpoint qualified as P import Polysemy.Reader qualified as P import Polysemy.Tagged qualified as P $ dslTutorial This module provides a way of constructing bot commands in a declarative way . The main component of this is the ' command ' function , which takes a type - level list of command parameters , the name , and the callback and produces a command . There are also the alternatives ' command '' , ' commandA ' and ' commandA '' , for when you want to handle parsing of the input yourself , and/or want aliases of the command . The functions : ' hide ' , ' help ' , ' requires ' , and ' group ' can be used to change attributes of any commands declared inside the monadic action passed to them , for example : @ ' hide ' ' $ ' do ' command ' \@\ ' [ ] " test " \\ctx - > ' pure ' ( ) @ In the above block , any command declared inside ' hide ' will have its \'hidden\ ' flag set and will not be shown by the default help command : ' CalamityCommands . Help.helpCommand ' The ' CalamityCommands . Help.helpCommand ' function can be used to create a help command for the commands DSL action it is used in , read its doc page for more information on how it functions . The ' CalamityCommands . Utils.buildCommands ' function is used to construct a ' CommandHandler ' which can then be used with ' CalamityCommands . Utils.processCommands ' or ' CalamityCommands . Utils.handleCommands ' to process a command . This module provides a way of constructing bot commands in a declarative way. The main component of this is the 'command' function, which takes a type-level list of command parameters, the name, and the callback and produces a command. There are also the alternatives 'command'', 'commandA' and 'commandA'', for when you want to handle parsing of the input yourself, and/or want aliases of the command. The functions: 'hide', 'help', 'requires', and 'group' can be used to change attributes of any commands declared inside the monadic action passed to them, for example: @ 'hide' '$' do 'command' \@\'[] "test" \\ctx -> 'pure' () @ In the above block, any command declared inside 'hide' will have its \'hidden\' flag set and will not be shown by the default help command: 'CalamityCommands.Help.helpCommand' The 'CalamityCommands.Help.helpCommand' function can be used to create a help command for the commands DSL action it is used in, read its doc page for more information on how it functions. The 'CalamityCommands.Utils.buildCommands' function is used to construct a 'CommandHandler' which can then be used with 'CalamityCommands.Utils.processCommands' or 'CalamityCommands.Utils.handleCommands' to process a command. -} type DSLState m c a r = ( LocalWriter (LH.HashMap T.Text (Command m c a, AliasType)) ': LocalWriter (LH.HashMap T.Text (Group m c a, AliasType)) ': P.Reader (Maybe (Group m c a)) ': P.Tagged "hidden" (P.Reader Bool) ': P.Reader (c -> T.Text) ': P.Tagged "original-help" (P.Reader (c -> T.Text)) ': P.Reader [Check m c] ': P.Reader (CommandHandler m c a) ': P.Fixpoint ': r ) type DSLC m c a r = P.Members [ LocalWriter (LH.HashMap T.Text (Command m c a, AliasType)) , LocalWriter (LH.HashMap T.Text (Group m c a, AliasType)) , P.Reader (Maybe (Group m c a)) , P.Tagged "hidden" (P.Reader Bool) , P.Reader (c -> T.Text) , P.Tagged "original-help" (P.Reader (c -> T.Text)) , P.Reader [Check m c] , P.Reader (CommandHandler m c a) , P.Fixpoint ] r raiseDSL :: P.Sem r x -> P.Sem (DSLState m c a r) x raiseDSL = P.raise . P.raise . P.raise . P.raise . P.raise . P.raise . P.raise . P.raise . P.raise | Given the command name and parameter names , @parser@ and @callback@ for a command in the ' P.Sem ' monad , build a command by transforming the Polysemy actions into @m@ actions . Then register the command . The parent group , visibility , checks , and command help are drawn from the reader context . command in the 'P.Sem' monad, build a command by transforming the Polysemy actions into @m@ actions. Then register the command. The parent group, visibility, checks, and command help are drawn from the reader context. -} command' :: (Monad m, P.Member (P.Final m) r, DSLC m c a r) => T.Text -> [ParameterInfo] -> (c -> P.Sem r (Either CommandError p)) -> ((c, p) -> P.Sem (P.Fail ': r) a) -> P.Sem r (Command m c a) command' name = commandA' name [] commandA' :: forall p c a m r. (Monad m, P.Member (P.Final m) r, DSLC m c a r) => T.Text -> [T.Text] -> [ParameterInfo] -> (c -> P.Sem r (Either CommandError p)) -> ((c, p) -> P.Sem (P.Fail ': r) a) -> P.Sem r (Command m c a) commandA' name aliases params parser cb = do parent <- P.ask @(Maybe (Group m c a)) hidden <- P.tag $ P.ask @Bool checks <- P.ask @[Check m c] help' <- P.ask @(c -> T.Text) cmd <- buildCommand' (name :| aliases) parent hidden checks params help' parser cb ltell $ LH.singleton name (cmd, Original) ltell $ LH.fromList [(name, (cmd, Alias)) | name <- aliases] pure cmd | Given the name of a command and a callback , and a type level list of the parameters , build and register a command . The parent group , visibility , checks , and command help are drawn from the reader context . Command parameters are parsed by first invoking ' CalamityCommands.Parser.parse ' for the first ' CalamityCommands . . ' , then running the next parser on the remaining input , and so on . = = = = Examples Building a command that adds two numbers . @ ' command ' \@\'['CalamityCommands . . Named ' " a " ' Int ' , ' CalamityCommands . . Named ' " b " ' Int ' ] " add " $ \\ctx a b - > ' pure ' ' $ ' ' Right ' ( a ' + ' b ) @ the parameters, build and register a command. The parent group, visibility, checks, and command help are drawn from the reader context. Command parameters are parsed by first invoking 'CalamityCommands.Parser.parse' for the first 'CalamityCommands.Parser.Parser', then running the next parser on the remaining input, and so on. ==== Examples Building a command that adds two numbers. @ 'command' \@\'['CalamityCommands.Parser.Named' "a" 'Int', 'CalamityCommands.Parser.Named' "b" 'Int'] "add" $ \\ctx a b -> 'pure' '$' 'Right' (a '+' b) @ -} command :: forall ps c a m r. ( Monad m , P.Member (P.Final m) r , DSLC m c a r , TypedCommandC ps c a r , CommandContext m c a ) => T.Text -> (c -> CommandForParsers ps r a) -> P.Sem r (Command m c a) command name = commandA @ps name [] | Given the name and aliases of a command and a callback , and a type level list of the parameters , build and register a command . The parent group , visibility , checks , and command help are drawn from the reader context . = = = = Examples Building a command that adds two numbers . @ ' commandA ' \@\'['CalamityCommands . . Named ' " a " ' Int ' , ' CalamityCommands . . Named ' " b " ' Int ' ] " add " [ ] $ \\ctx a b - > ' pure ' ' $ ' ' Right ' ( a ' + ' b ) @ the parameters, build and register a command. The parent group, visibility, checks, and command help are drawn from the reader context. ==== Examples Building a command that adds two numbers. @ 'commandA' \@\'['CalamityCommands.Parser.Named' "a" 'Int', 'CalamityCommands.Parser.Named' "b" 'Int'] "add" [] $ \\ctx a b -> 'pure' '$' 'Right' (a '+' b) @ -} commandA :: forall ps c a m r. ( Monad m , P.Member (P.Final m) r , DSLC m c a r , TypedCommandC ps c a r , CommandContext m c a ) => T.Text -> [T.Text] -> (c -> CommandForParsers ps r a) -> P.Sem r (Command m c a) commandA name aliases cmd = do parent <- P.ask @(Maybe (Group m c a)) hidden <- P.tag $ P.ask @Bool checks <- P.ask @[Check m c] help' <- P.ask @(c -> T.Text) cmd' <- buildCommand @ps (name :| aliases) parent hidden checks help' cmd ltell $ LH.singleton name (cmd', Original) ltell $ LH.fromList [(name, (cmd', Alias)) | name <- aliases] pure cmd' hide :: P.Member (P.Tagged "hidden" (P.Reader Bool)) r => P.Sem r x -> P.Sem r x hide = P.tag @"hidden" . P.local @Bool (const True) . P.raise | Set the help for any groups or commands registered inside the given action . = = = = Examples @ ' help ' ( ' const ' " Add two integers " ) $ ' command ' \@\'['CalamityCommands . . Named ' " a " ' Int ' , ' CalamityCommands . . Named ' " b " ' Int ' ] " add " $ \\ctx a b - > ' pure ' ' $ ' ' Right ' ( a ' + ' b ) @ ==== Examples @ 'help' ('const' "Add two integers") $ 'command' \@\'['CalamityCommands.Parser.Named' "a" 'Int', 'CalamityCommands.Parser.Named' "b" 'Int'] "add" $ \\ctx a b -> 'pure' '$' 'Right' (a '+' b) @ -} help :: P.Member (P.Reader (c -> T.Text)) r => (c -> T.Text) -> P.Sem r a -> P.Sem r a help = P.local . const requires :: DSLC m c a r => [Check m c] -> P.Sem r x -> P.Sem r x requires = P.local . (<>) | Construct a check and add it to the list of checks for any commands registered inside the given action . Refer to ' CalamityCommands . Check . Check ' for more info on checks . registered inside the given action. Refer to 'CalamityCommands.Check.Check' for more info on checks. -} requires' :: (Monad m, P.Member (P.Final m) r, DSLC m c a r) => T.Text -> (c -> P.Sem r (Maybe T.Text)) -> P.Sem r x -> P.Sem r x requires' name cb m = do check <- buildCheck name cb requires [check] m | Construct some pure checks and add them to the list of checks for any commands registered inside the given action . Refer to ' CalamityCommands . Check . Check ' for more info on checks . = = = = Examples @ ' requiresPure ' [ ( " always ok " , ' const ' ' Nothing ' ) ] $ ' command ' \@\'['CalamityCommands . . Named ' " a " ' Int ' , ' CalamityCommands . . Named ' " b " ' Int ' ] " add " $ \\ctx a b - > ' pure ' ' $ ' ' Right ' ( a ' + ' b ) @ commands registered inside the given action. Refer to 'CalamityCommands.Check.Check' for more info on checks. ==== Examples @ 'requiresPure' [("always ok", 'const' 'Nothing')] $ 'command' \@\'['CalamityCommands.Parser.Named' "a" 'Int', 'CalamityCommands.Parser.Named' "b" 'Int'] "add" $ \\ctx a b -> 'pure' '$' 'Right' (a '+' b) @ -} requiresPure :: (Monad m, DSLC m c a r) => [(T.Text, c -> Maybe T.Text)] -> P.Sem r x -> P.Sem r x requiresPure checks = requires $ fmap (uncurry buildCheckPure) checks group :: (Monad m, P.Member (P.Final m) r, DSLC m c a r) => T.Text -> P.Sem r x -> P.Sem r x group name = groupA name [] groupA :: forall x c m a r. (Monad m, P.Member (P.Final m) r, DSLC m c a r) => T.Text -> [T.Text] -> P.Sem r x -> P.Sem r x groupA name aliases m = mdo parent <- P.ask @(Maybe (Group m c a)) hidden <- P.tag $ P.ask @Bool checks <- P.ask @[Check m c] help' <- P.ask @(c -> T.Text) origHelp <- fetchOrigHelp let group' = Group (name :| aliases) parent hidden commands children help' checks (children, (commands, res)) <- llisten @(LH.HashMap T.Text (Group m c a, AliasType)) $ llisten @(LH.HashMap T.Text (Command m c a, AliasType)) $ P.local @(Maybe (Group m c a)) (const $ Just group') $ P.local @(c -> T.Text) (const origHelp) m ltell $ LH.singleton name (group', Original) ltell $ LH.fromList [(name, (group', Alias)) | name <- aliases] pure res fetchOrigHelp :: P.Member (P.Tagged "original-help" (P.Reader (c -> T.Text))) r => P.Sem r (c -> T.Text) fetchOrigHelp = P.tag P.ask group' :: (P.Member (P.Final m) r, DSLC m c a r) => T.Text -> P.Sem r x -> P.Sem r x group' name = groupA' name [] groupA' :: forall x c m a r. (P.Member (P.Final m) r, DSLC m c a r) => T.Text -> [T.Text] -> P.Sem r x -> P.Sem r x groupA' name aliases m = mdo parent <- P.ask @(Maybe (Group m c a)) hidden <- P.tag $ P.ask @Bool checks <- P.ask @[Check m c] help' <- P.ask @(c -> T.Text) let group' = Group (name :| aliases) parent hidden commands children help' checks (children, (commands, res)) <- llisten @(LH.HashMap T.Text (Group m c a, AliasType)) $ llisten @(LH.HashMap T.Text (Command m c a, AliasType)) $ P.local @(Maybe (Group m c a)) (const $ Just group') m ltell $ LH.singleton name (group', Original) ltell $ LH.fromList [(name, (group', Alias)) | name <- aliases] pure res fetchHandler :: DSLC m c a r => P.Sem r (CommandHandler m c a) fetchHandler = P.ask
e21d5128e10157519de11d92c0ad06060354f07652906012faee2d1b74fd908d
phadej/cabal-extras
Golden.hs
# OPTIONS_GHC -Wno - orphans # module Main (main) where import Data.TreeDiff import Data.TreeDiff.Golden (ediffGolden) import Distribution.ModuleName (ModuleName) import Distribution.Pretty (prettyShow) import System.FilePath ((</>)) import Test.Tasty (defaultMain, testGroup) import Test.Tasty.Golden.Advanced (goldenTest) import qualified Data.ByteString as BS import CabalDiff.Hoogle main :: IO () main = defaultMain $ testGroup "golden" [ testGroup "parser" [ golden "foldable1" , golden "optics-core" , golden "servant" , golden "singletons" , golden "vec" , golden "colour-2.3.4" , golden "colour-2.3.5" , golden "resolv" ] ] where golden name = ediffGolden goldenTest name goldenPath $ do contents <- BS.readFile hooglePath either fail return $ parseFile contents where goldenPath = "fixtures" </> (name ++ ".golden") hooglePath = "fixtures" </> (name ++ ".txt") ------------------------------------------------------------------------------- -- orphans ------------------------------------------------------------------------------- instance ToExpr ModuleName where toExpr mn = App "ModuleName" [toExpr (prettyShow mn)] instance ToExpr Key
null
https://raw.githubusercontent.com/phadej/cabal-extras/bb82ebb7fdaa939c27762e10cb4cad49249b553f/cabal-diff/test/Golden.hs
haskell
----------------------------------------------------------------------------- orphans -----------------------------------------------------------------------------
# OPTIONS_GHC -Wno - orphans # module Main (main) where import Data.TreeDiff import Data.TreeDiff.Golden (ediffGolden) import Distribution.ModuleName (ModuleName) import Distribution.Pretty (prettyShow) import System.FilePath ((</>)) import Test.Tasty (defaultMain, testGroup) import Test.Tasty.Golden.Advanced (goldenTest) import qualified Data.ByteString as BS import CabalDiff.Hoogle main :: IO () main = defaultMain $ testGroup "golden" [ testGroup "parser" [ golden "foldable1" , golden "optics-core" , golden "servant" , golden "singletons" , golden "vec" , golden "colour-2.3.4" , golden "colour-2.3.5" , golden "resolv" ] ] where golden name = ediffGolden goldenTest name goldenPath $ do contents <- BS.readFile hooglePath either fail return $ parseFile contents where goldenPath = "fixtures" </> (name ++ ".golden") hooglePath = "fixtures" </> (name ++ ".txt") instance ToExpr ModuleName where toExpr mn = App "ModuleName" [toExpr (prettyShow mn)] instance ToExpr Key
8894068f65076bc432c0cf0422de880addcf279d88f68803c3186d266bc34976
robrix/isometry
Array.hs
# LANGUAGE DataKinds # # LANGUAGE FlexibleContexts # # LANGUAGE KindSignatures # # LANGUAGE NamedFieldPuns # # LANGUAGE ScopedTypeVariables # # LANGUAGE TypeApplications # # LANGUAGE TypeOperators # module GL.Array ( Array(..) , configureInterleaved , configureSeparate , Type(..) , drawArrays , multiDrawArrays , drawArraysInstanced , drawElements , drawElementsInstanced , load , bindArray , askArray , ArrayC ) where import Control.Carrier.Fresh.Church import Control.Carrier.Reader import Control.Carrier.State.Church import Control.Effect.Finally import Control.Effect.Labelled import Control.Effect.Lift import Control.Effect.Trace import Data.Functor.I import Data.Functor.Interval import Data.Functor.K import Data.Word (Word32) import Foreign.Marshal.Array.Lift import Foreign.Ptr import qualified Foreign.Storable as S import GHC.Stack import qualified GL.Buffer as B import GL.Effect.Check import GL.Enum as GL import GL.Object import GL.Primitive import GL.Program (Program, askProgram) import GL.Shader.Vars import qualified GL.Type as GL import Graphics.GL.Core41 import Graphics.GL.Types newtype Array n = Array { unArray :: GLuint } instance Object (Array n) where gen = defaultGenWith glGenVertexArrays Array delete = defaultDeleteWith glDeleteVertexArrays unArray instance Bind (Array n) where bind = checking . sendIO . glBindVertexArray . maybe 0 unArray configureInterleaved :: forall v m sig . (HasLabelled Array (Reader (Array (v I))) sig m, HasLabelled (B.Buffer 'B.Array) (Reader (B.Buffer 'B.Array (v I))) sig m, Vars v, Has Check sig m, Has (Lift IO) sig m, Has Trace sig m) => m () configureInterleaved = askArray >> B.askBuffer @'B.Array >> evalState (Offset 0) (evalFresh 0 (configureVars @v (S.sizeOf @(Fields v) undefined) (defaultVars @v))) configureSeparate :: forall v1 v2 m sig . (HasLabelled Array (Reader (Array ((v1 :**: v2) I))) sig m, Vars v1, Vars v2, Has Check sig m, Has (Lift IO) sig m, Has Trace sig m) => B.Buffer 'B.Array (v1 I) -> B.Buffer 'B.Array (v2 I) -> m () configureSeparate b1 b2 = evalState (Offset 0) (evalFresh 0 (askArray >> B.bindBuffer b1 (configureVars @v1 stride (defaultVars @v1)) >> B.bindBuffer b2 (configureVars @v2 stride (defaultVars @v2)))) where stride = S.sizeOf @((v1 :**: v2) I) undefined configureVars :: (Vars v, Has Check sig m, Has Fresh sig m, Has (Lift IO) sig m, Has (State Offset) sig m, Has Trace sig m) => Int -> v Proxy -> m () configureVars stride = foldVarsM' (\ (Field{ location, name } :: Field Proxy a) -> do offset <- get let size = S.sizeOf @a undefined K ty = GL.glType @a K dims = GL.glDims @a put (offset <> Offset size) trace $ "configuring field " <> name <> " attrib " <> show location <> " at offset " <> show offset <> " stride " <> show stride <> " dims " <> show dims <> " type " <> show ty checking . sendIO $ glEnableVertexAttribArray (fromIntegral location) checking . sendIO $ glVertexAttribPointer (fromIntegral location) dims ty GL_FALSE (fromIntegral stride) (nullPtr `plusPtr` getOffset offset)) drawArrays :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> Interval I Int -> m () drawArrays mode i = askProgram >> askArray >> checking (sendIO (glDrawArrays (glEnum mode) (fromIntegral (inf i)) (fromIntegral (diameter i)))) multiDrawArrays :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> [Interval I Int] -> m () multiDrawArrays mode is | null is = pure () | otherwise = do _ <- askProgram _ <- askArray withArray (map (fromIntegral . inf) is) $ \ firsts -> withArray (map (fromIntegral . diameter) is) $ \ counts -> checking (sendIO (glMultiDrawArrays (glEnum mode) firsts counts (fromIntegral (length is)))) drawArraysInstanced :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> Interval I Int -> Int -> m () drawArraysInstanced mode i n = askProgram >> askArray >> checking (sendIO (glDrawArraysInstanced (glEnum mode) (fromIntegral (inf i)) (fromIntegral (diameter i)) (fromIntegral n))) drawElements :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasLabelled (B.Buffer 'B.ElementArray) (Reader (B.Buffer 'B.ElementArray Word32)) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> Interval I Int -> m () drawElements mode i = do _ <- askProgram _ <- askArray _ <- B.askBuffer @'B.ElementArray checking (sendIO (glDrawElements (glEnum mode) (fromIntegral (diameter i)) GL_UNSIGNED_INT (nullPtr `plusPtr` (getI (inf i) * S.sizeOf @Word32 0)))) drawElementsInstanced :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasLabelled (B.Buffer 'B.ElementArray) (Reader (B.Buffer 'B.ElementArray Word32)) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> Interval I Int -> Int -> m () drawElementsInstanced mode i n = do _ <- askProgram _ <- askArray _ <- B.askBuffer @'B.ElementArray checking (sendIO (glDrawElementsInstanced (glEnum mode) (fromIntegral (diameter i)) GL_UNSIGNED_INT (nullPtr `plusPtr` (getI (inf i) * S.sizeOf @Word32 0)) (fromIntegral n))) load :: (Vars v, S.Storable (v I), Has Check sig m, Has Finally sig m, Has (Lift IO) sig m, Has Trace sig m) => [v I] -> m (B.Buffer 'B.Array (v I), Array (v I)) load is = do b <- gen1 @(B.Buffer 'B.Array _) a <- gen1 bindArray a . B.bindBuffer b $ do B.realloc @'B.Array (length is) B.Static B.Draw B.copy @'B.Array 0 is (b, a) <$ configureInterleaved bindArray :: (Has Check sig m, Has (Lift IO) sig m) => Array (v I) -> ArrayC v m a -> m a bindArray array m = do bind (Just array) a <- runReader array (runLabelled m) a <$ bind @(Array _) Nothing askArray :: HasLabelled Array (Reader (Array (v I))) sig m => m (Array (v I)) askArray = runUnderLabel @Array ask type ArrayC v = Labelled Array (ReaderC (Array (v I)))
null
https://raw.githubusercontent.com/robrix/isometry/171b9261b8d7ea32c86ce6019c8c3973742f0349/src/GL/Array.hs
haskell
# LANGUAGE DataKinds # # LANGUAGE FlexibleContexts # # LANGUAGE KindSignatures # # LANGUAGE NamedFieldPuns # # LANGUAGE ScopedTypeVariables # # LANGUAGE TypeApplications # # LANGUAGE TypeOperators # module GL.Array ( Array(..) , configureInterleaved , configureSeparate , Type(..) , drawArrays , multiDrawArrays , drawArraysInstanced , drawElements , drawElementsInstanced , load , bindArray , askArray , ArrayC ) where import Control.Carrier.Fresh.Church import Control.Carrier.Reader import Control.Carrier.State.Church import Control.Effect.Finally import Control.Effect.Labelled import Control.Effect.Lift import Control.Effect.Trace import Data.Functor.I import Data.Functor.Interval import Data.Functor.K import Data.Word (Word32) import Foreign.Marshal.Array.Lift import Foreign.Ptr import qualified Foreign.Storable as S import GHC.Stack import qualified GL.Buffer as B import GL.Effect.Check import GL.Enum as GL import GL.Object import GL.Primitive import GL.Program (Program, askProgram) import GL.Shader.Vars import qualified GL.Type as GL import Graphics.GL.Core41 import Graphics.GL.Types newtype Array n = Array { unArray :: GLuint } instance Object (Array n) where gen = defaultGenWith glGenVertexArrays Array delete = defaultDeleteWith glDeleteVertexArrays unArray instance Bind (Array n) where bind = checking . sendIO . glBindVertexArray . maybe 0 unArray configureInterleaved :: forall v m sig . (HasLabelled Array (Reader (Array (v I))) sig m, HasLabelled (B.Buffer 'B.Array) (Reader (B.Buffer 'B.Array (v I))) sig m, Vars v, Has Check sig m, Has (Lift IO) sig m, Has Trace sig m) => m () configureInterleaved = askArray >> B.askBuffer @'B.Array >> evalState (Offset 0) (evalFresh 0 (configureVars @v (S.sizeOf @(Fields v) undefined) (defaultVars @v))) configureSeparate :: forall v1 v2 m sig . (HasLabelled Array (Reader (Array ((v1 :**: v2) I))) sig m, Vars v1, Vars v2, Has Check sig m, Has (Lift IO) sig m, Has Trace sig m) => B.Buffer 'B.Array (v1 I) -> B.Buffer 'B.Array (v2 I) -> m () configureSeparate b1 b2 = evalState (Offset 0) (evalFresh 0 (askArray >> B.bindBuffer b1 (configureVars @v1 stride (defaultVars @v1)) >> B.bindBuffer b2 (configureVars @v2 stride (defaultVars @v2)))) where stride = S.sizeOf @((v1 :**: v2) I) undefined configureVars :: (Vars v, Has Check sig m, Has Fresh sig m, Has (Lift IO) sig m, Has (State Offset) sig m, Has Trace sig m) => Int -> v Proxy -> m () configureVars stride = foldVarsM' (\ (Field{ location, name } :: Field Proxy a) -> do offset <- get let size = S.sizeOf @a undefined K ty = GL.glType @a K dims = GL.glDims @a put (offset <> Offset size) trace $ "configuring field " <> name <> " attrib " <> show location <> " at offset " <> show offset <> " stride " <> show stride <> " dims " <> show dims <> " type " <> show ty checking . sendIO $ glEnableVertexAttribArray (fromIntegral location) checking . sendIO $ glVertexAttribPointer (fromIntegral location) dims ty GL_FALSE (fromIntegral stride) (nullPtr `plusPtr` getOffset offset)) drawArrays :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> Interval I Int -> m () drawArrays mode i = askProgram >> askArray >> checking (sendIO (glDrawArrays (glEnum mode) (fromIntegral (inf i)) (fromIntegral (diameter i)))) multiDrawArrays :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> [Interval I Int] -> m () multiDrawArrays mode is | null is = pure () | otherwise = do _ <- askProgram _ <- askArray withArray (map (fromIntegral . inf) is) $ \ firsts -> withArray (map (fromIntegral . diameter) is) $ \ counts -> checking (sendIO (glMultiDrawArrays (glEnum mode) firsts counts (fromIntegral (length is)))) drawArraysInstanced :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> Interval I Int -> Int -> m () drawArraysInstanced mode i n = askProgram >> askArray >> checking (sendIO (glDrawArraysInstanced (glEnum mode) (fromIntegral (inf i)) (fromIntegral (diameter i)) (fromIntegral n))) drawElements :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasLabelled (B.Buffer 'B.ElementArray) (Reader (B.Buffer 'B.ElementArray Word32)) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> Interval I Int -> m () drawElements mode i = do _ <- askProgram _ <- askArray _ <- B.askBuffer @'B.ElementArray checking (sendIO (glDrawElements (glEnum mode) (fromIntegral (diameter i)) GL_UNSIGNED_INT (nullPtr `plusPtr` (getI (inf i) * S.sizeOf @Word32 0)))) drawElementsInstanced :: ( Has Check sig m , Has (Lift IO) sig m , HasLabelled Array (Reader (Array (v I))) sig m , HasLabelled (B.Buffer 'B.ElementArray) (Reader (B.Buffer 'B.ElementArray Word32)) sig m , HasCallStack , HasLabelled Program (Reader (Program u v o)) sig m ) => Type -> Interval I Int -> Int -> m () drawElementsInstanced mode i n = do _ <- askProgram _ <- askArray _ <- B.askBuffer @'B.ElementArray checking (sendIO (glDrawElementsInstanced (glEnum mode) (fromIntegral (diameter i)) GL_UNSIGNED_INT (nullPtr `plusPtr` (getI (inf i) * S.sizeOf @Word32 0)) (fromIntegral n))) load :: (Vars v, S.Storable (v I), Has Check sig m, Has Finally sig m, Has (Lift IO) sig m, Has Trace sig m) => [v I] -> m (B.Buffer 'B.Array (v I), Array (v I)) load is = do b <- gen1 @(B.Buffer 'B.Array _) a <- gen1 bindArray a . B.bindBuffer b $ do B.realloc @'B.Array (length is) B.Static B.Draw B.copy @'B.Array 0 is (b, a) <$ configureInterleaved bindArray :: (Has Check sig m, Has (Lift IO) sig m) => Array (v I) -> ArrayC v m a -> m a bindArray array m = do bind (Just array) a <- runReader array (runLabelled m) a <$ bind @(Array _) Nothing askArray :: HasLabelled Array (Reader (Array (v I))) sig m => m (Array (v I)) askArray = runUnderLabel @Array ask type ArrayC v = Labelled Array (ReaderC (Array (v I)))
1693b6963dc4d1e95868d9477028c6b3691b939cfd705d921ec2eb91cc098888
8c6794b6/haskell-sc-scratch
partest.hs
module Main where import qualified Hal6.ParTest1 as P1 import qualified Hal6.ParTest2 as P2 import qualified Hal6.ParTest3 as P3 import qualified Hal6.ParTest4 as P4 import qualified Hal6.ParTest5 as P5 import qualified Hal6.ParTest6 as P6 import qualified Hal6.ParTest7 as P7 import System.Environment (getArgs) main :: IO () main = do (n:_) <- getArgs case (read n :: Int) of 1 -> P1.main 2 -> P2.main 3 -> P3.main 4 -> P4.main 5 -> P5.main 6 -> P6.main 7 -> P7.main _ -> print $ "Not implemented: " ++ n
null
https://raw.githubusercontent.com/8c6794b6/haskell-sc-scratch/22de2199359fa56f256b544609cd6513b5e40f43/Scratch/Parallel/hal6/run/partest.hs
haskell
module Main where import qualified Hal6.ParTest1 as P1 import qualified Hal6.ParTest2 as P2 import qualified Hal6.ParTest3 as P3 import qualified Hal6.ParTest4 as P4 import qualified Hal6.ParTest5 as P5 import qualified Hal6.ParTest6 as P6 import qualified Hal6.ParTest7 as P7 import System.Environment (getArgs) main :: IO () main = do (n:_) <- getArgs case (read n :: Int) of 1 -> P1.main 2 -> P2.main 3 -> P3.main 4 -> P4.main 5 -> P5.main 6 -> P6.main 7 -> P7.main _ -> print $ "Not implemented: " ++ n
1ee160baa1a61fa9be50397803000eb4049573ecd841990cf967387cb4f44c27
jeffshrager/biobike
load.lisp
;;; -*- mode: Lisp; Syntax: Common-Lisp; Package: user; -*- (in-package :cl-user) (defparameter *soap-files* '( "interface-tools" "kegg-goo" )) (load-system* "websrc:soap;" *soap-files*) (when (fboundp 'provides) (funcall 'provides :soap-tools))
null
https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/soap/load.lisp
lisp
-*- mode: Lisp; Syntax: Common-Lisp; Package: user; -*-
(in-package :cl-user) (defparameter *soap-files* '( "interface-tools" "kegg-goo" )) (load-system* "websrc:soap;" *soap-files*) (when (fboundp 'provides) (funcall 'provides :soap-tools))
60e92341450683b967c580338f0697c6723443d4bd6ee76a1cdb2b008aa95a7d
scrintal/heroicons-reagent
wallet.cljs
(ns com.scrintal.heroicons.mini.wallet) (defn render [] [:svg {:xmlns "" :viewBox "0 0 20 20" :fill "currentColor" :aria-hidden "true"} [:path {:d "M1 4.25a3.733 3.733 0 012.25-.75h13.5c.844 0 1.623.279 2.25.75A2.25 2.25 0 0016.75 2H3.25A2.25 2.25 0 001 4.25zM1 7.25a3.733 3.733 0 012.25-.75h13.5c.844 0 1.623.279 2.25.75A2.25 2.25 0 0016.75 5H3.25A2.25 2.25 0 001 7.25zM7 8a1 1 0 011 1 2 2 0 104 0 1 1 0 011-1h3.75A2.25 2.25 0 0119 10.25v5.5A2.25 2.25 0 0116.75 18H3.25A2.25 2.25 0 011 15.75v-5.5A2.25 2.25 0 013.25 8H7z"}]])
null
https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/mini/wallet.cljs
clojure
(ns com.scrintal.heroicons.mini.wallet) (defn render [] [:svg {:xmlns "" :viewBox "0 0 20 20" :fill "currentColor" :aria-hidden "true"} [:path {:d "M1 4.25a3.733 3.733 0 012.25-.75h13.5c.844 0 1.623.279 2.25.75A2.25 2.25 0 0016.75 2H3.25A2.25 2.25 0 001 4.25zM1 7.25a3.733 3.733 0 012.25-.75h13.5c.844 0 1.623.279 2.25.75A2.25 2.25 0 0016.75 5H3.25A2.25 2.25 0 001 7.25zM7 8a1 1 0 011 1 2 2 0 104 0 1 1 0 011-1h3.75A2.25 2.25 0 0119 10.25v5.5A2.25 2.25 0 0116.75 18H3.25A2.25 2.25 0 011 15.75v-5.5A2.25 2.25 0 013.25 8H7z"}]])
56b6e6a3cc20335c05d09d932e6bce054cdc9ce3a95cdc7e85a27b363a918e0b
pflanze/chj-schemelib
md5.scm
(require cj-u8vector-util (u8vector0 string.utf8-u8vector) test) (export md5:digest (digest md5-digest) #!optional (make-md5-context* make-md5-context) (starts md5-init) (update md5-update) (finish md5-finish)) ;; (compile #t) ( cc - opts " -O3 " ) ; over the default -O1 , for a few percent more speed ;;;; md5.scm adapted from the chicken md5 egg by christian at pflanze mine nu (include "cj-standarddeclares.scm") (c-declare " /* * RFC 1321 compliant MD5 implementation, * by Christophe Devine <> * this program is licensed under the GPL. */ #define uint8 unsigned char #define uint32 unsigned long int struct md5_context { uint32 total[2]; uint32 state[4]; uint8 buffer[64]; }; static void md5_starts( struct md5_context *ctx ); static void md5_update( struct md5_context *ctx, uint8 *input, uint32 length ); static void md5_finish( struct md5_context *ctx, uint8 digest[16] ); #include <string.h> #define GET_UINT32(n,b,i) \\ { \\ (n) = ( (b)[(i) ] ) \\ | ( (b)[(i) + 1] << 8 ) \\ | ( (b)[(i) + 2] << 16 ) \\ | ( (b)[(i) + 3] << 24 ); \\ } #define PUT_UINT32(n,b,i) \\ { \\ (b)[(i) ] = (uint8) ( (n) ); \\ (b)[(i) + 1] = (uint8) ( (n) >> 8 ); \\ (b)[(i) + 2] = (uint8) ( (n) >> 16 ); \\ (b)[(i) + 3] = (uint8) ( (n) >> 24 ); \\ } void md5_starts( struct md5_context *ctx ) { ctx->total[0] = 0; ctx->total[1] = 0; ctx->state[0] = 0x67452301; ctx->state[1] = 0xEFCDAB89; ctx->state[2] = 0x98BADCFE; ctx->state[3] = 0x10325476; } void md5_process( struct md5_context *ctx, uint8 data[64] ) { uint32 A, B, C, D, X[16]; GET_UINT32( X[0], data, 0 ); GET_UINT32( X[1], data, 4 ); GET_UINT32( X[2], data, 8 ); GET_UINT32( X[3], data, 12 ); GET_UINT32( X[4], data, 16 ); GET_UINT32( X[5], data, 20 ); GET_UINT32( X[6], data, 24 ); GET_UINT32( X[7], data, 28 ); GET_UINT32( X[8], data, 32 ); GET_UINT32( X[9], data, 36 ); GET_UINT32( X[10], data, 40 ); GET_UINT32( X[11], data, 44 ); GET_UINT32( X[12], data, 48 ); GET_UINT32( X[13], data, 52 ); GET_UINT32( X[14], data, 56 ); GET_UINT32( X[15], data, 60 ); #define S(x,n) ((x << n) | ((x & 0xFFFFFFFF) >> (32 - n))) #define P(a,b,c,d,k,s,t) \\ { \\ a = S(a , s ) + b ; \\ } A = ctx->state[0]; B = ctx->state[1]; C = ctx->state[2]; D = ctx->state[3]; #define F(x,y,z) (z ^ (x & (y ^ z))) P( A, B, C, D, 0, 7, 0xD76AA478 ); P( D, A, B, C, 1, 12, 0xE8C7B756 ); P( C, D, A, B, 2, 17, 0x242070DB ); P( B, C, D, A, 3, 22, 0xC1BDCEEE ); P( A, B, C, D, 4, 7, 0xF57C0FAF ); P( D, A, B, C, 5, 12, 0x4787C62A ); P( C, D, A, B, 6, 17, 0xA8304613 ); P( B, C, D, A, 7, 22, 0xFD469501 ); P( A, B, C, D, 8, 7, 0x698098D8 ); P( D, A, B, C, 9, 12, 0x8B44F7AF ); P( C, D, A, B, 10, 17, 0xFFFF5BB1 ); P( B, C, D, A, 11, 22, 0x895CD7BE ); P( A, B, C, D, 12, 7, 0x6B901122 ); P( D, A, B, C, 13, 12, 0xFD987193 ); P( C, D, A, B, 14, 17, 0xA679438E ); P( B, C, D, A, 15, 22, 0x49B40821 ); #undef F #define F(x,y,z) (y ^ (z & (x ^ y))) P( A, B, C, D, 1, 5, 0xF61E2562 ); P( D, A, B, C, 6, 9, 0xC040B340 ); P( C, D, A, B, 11, 14, 0x265E5A51 ); P( B, C, D, A, 0, 20, 0xE9B6C7AA ); P( A, B, C, D, 5, 5, 0xD62F105D ); P( D, A, B, C, 10, 9, 0x02441453 ); P( C, D, A, B, 15, 14, 0xD8A1E681 ); P( B, C, D, A, 4, 20, 0xE7D3FBC8 ); P( A, B, C, D, 9, 5, 0x21E1CDE6 ); P( D, A, B, C, 14, 9, 0xC33707D6 ); P( C, D, A, B, 3, 14, 0xF4D50D87 ); P( B, C, D, A, 8, 20, 0x455A14ED ); P( A, B, C, D, 13, 5, 0xA9E3E905 ); P( D, A, B, C, 2, 9, 0xFCEFA3F8 ); P( C, D, A, B, 7, 14, 0x676F02D9 ); P( B, C, D, A, 12, 20, 0x8D2A4C8A ); #undef F #define F(x,y,z) (x ^ y ^ z) P( A, B, C, D, 5, 4, 0xFFFA3942 ); P( D, A, B, C, 8, 11, 0x8771F681 ); P( C, D, A, B, 11, 16, 0x6D9D6122 ); P( B, C, D, A, 14, 23, 0xFDE5380C ); P( A, B, C, D, 1, 4, 0xA4BEEA44 ); P( D, A, B, C, 4, 11, 0x4BDECFA9 ); P( C, D, A, B, 7, 16, 0xF6BB4B60 ); P( B, C, D, A, 10, 23, 0xBEBFBC70 ); P( A, B, C, D, 13, 4, 0x289B7EC6 ); P( D, A, B, C, 0, 11, 0xEAA127FA ); P( C, D, A, B, 3, 16, 0xD4EF3085 ); P( B, C, D, A, 6, 23, 0x04881D05 ); P( A, B, C, D, 9, 4, 0xD9D4D039 ); P( D, A, B, C, 12, 11, 0xE6DB99E5 ); P( C, D, A, B, 15, 16, 0x1FA27CF8 ); P( B, C, D, A, 2, 23, 0xC4AC5665 ); #undef F #define F(x,y,z) (y ^ (x | ~z)) P( A, B, C, D, 0, 6, 0xF4292244 ); P( D, A, B, C, 7, 10, 0x432AFF97 ); P( C, D, A, B, 14, 15, 0xAB9423A7 ); P( B, C, D, A, 5, 21, 0xFC93A039 ); P( A, B, C, D, 12, 6, 0x655B59C3 ); P( D, A, B, C, 3, 10, 0x8F0CCC92 ); P( C, D, A, B, 10, 15, 0xFFEFF47D ); P( B, C, D, A, 1, 21, 0x85845DD1 ); P( A, B, C, D, 8, 6, 0x6FA87E4F ); P( D, A, B, C, 15, 10, 0xFE2CE6E0 ); P( C, D, A, B, 6, 15, 0xA3014314 ); P( B, C, D, A, 13, 21, 0x4E0811A1 ); P( A, B, C, D, 4, 6, 0xF7537E82 ); P( D, A, B, C, 11, 10, 0xBD3AF235 ); P( C, D, A, B, 2, 15, 0x2AD7D2BB ); P( B, C, D, A, 9, 21, 0xEB86D391 ); #undef F ctx->state[0] += A; ctx->state[1] += B; ctx->state[2] += C; ctx->state[3] += D; } void md5_update( struct md5_context *ctx, uint8 *input, uint32 length ) { uint32 left, fill; if( ! length ) return; left = ( ctx->total[0] >> 3 ) & 0x3F; fill = 64 - left; ctx->total[0] += length << 3; ctx->total[1] += length >> 29; ctx->total[0] &= 0xFFFFFFFF; ctx->total[1] += ctx->total[0] < ( length << 3 ); if( left && length >= fill ) { memcpy( (void *) (ctx->buffer + left), (void *) input, fill ); md5_process( ctx, ctx->buffer ); length -= fill; input += fill; left = 0; } while( length >= 64 ) { md5_process( ctx, input ); length -= 64; input += 64; } if( length ) { memcpy( (void *) (ctx->buffer + left), (void *) input, length ); } } static uint8 md5_padding[64] = { 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; void md5_finish( struct md5_context *ctx, uint8 digest[16] ) { uint32 last, padn; uint8 msglen[8]; PUT_UINT32( ctx->total[0], msglen, 0 ); PUT_UINT32( ctx->total[1], msglen, 4 ); last = ( ctx->total[0] >> 3 ) & 0x3F; padn = ( last < 56 ) ? ( 56 - last ) : ( 120 - last ); md5_update( ctx, md5_padding, padn ); md5_update( ctx, msglen, 8 ); PUT_UINT32( ctx->state[0], digest, 0 ); PUT_UINT32( ctx->state[1], digest, 4 ); PUT_UINT32( ctx->state[2], digest, 8 ); PUT_UINT32( ctx->state[3], digest, 12 ); } ") interface for gambit by christian j : (c-define-type md5-context (struct "md5_context")) why does " struct " not work ? (c-declare " #include <stdlib.h> static ___SCMOBJ release_md5_context(void*p){ free(p); /* returns void */ return ___FIX(___NO_ERR); } ") (c-define-type md5-context* (pointer md5-context |md5-context*| "release_md5_context")) (define make-md5-context* (c-lambda () md5-context* " ___result_voidstar= malloc(sizeof(struct md5_context)); if (___result_voidstar==NULL) / * _ _ _ errmsg= of type _ _ _ specific Scheme string error message.*/ ")) (c-define-type uint32 unsigned-int32) (c-declare " #define ___BEGIN_CFUN_u8vector_to_pointer(scm, c, argpos) \\ if (!(___U8VECTORP(scm))) \\ ___err= ___FIX(___STOC_U8_ERR); \\ else { \\ ___err= ___FIX(___NO_ERR); \\ c= (uint8*) &(___BODY_AS(scm,___tSUBTYPED)[0]); #define ___END_CFUN_u8vector_to_pointer(scm, c, argpos) \\ } #define ___BEGIN_CFUN_notimplemented(c, scm) \\ ___err= ___FIX(___IMPL_LIMIT_ERR); #define ___END_CFUN_notimplemented(c, scm) \\ } ") (c-define-type uint8* "uint8*" "notimplemented" "u8vector_to_pointer" #f) (c-declare " #define ___BEGIN_CFUN_u8vectorOfLength16_to_pointer(scm, c, argpos) \\ if ((!(___U8VECTORP(scm))) || (___U8VECTORLENGTH(scm)!=___FIX(16))) \\ ___err= ___FIX(___STOC_U8_ERR); \\ else { \\ ___err= ___FIX(___NO_ERR); \\ c= (uint8*) &(___BODY_AS(scm,___tSUBTYPED)[0]); #define ___END_CFUN_u8vectorOfLength16_to_pointer(scm, c, argpos) \\ } ") (c-define-type |uint8[16]*| todo : c type checking for [ 16 ] impossible ? "notimplemented" "u8vectorOfLength16_to_pointer" #f) (define starts (c-lambda (md5-context*) void "md5_starts")) (define update (c-lambda (md5-context* uint8* uint32) void "md5_update")) (define finish (c-lambda (md5-context* |uint8[16]*|) void "md5_finish")) ;; TODO threadsafety. How to do that? (define alloc-context (make-md5-context*)) (define alloc-result (make-u8vector 16)) (define bufsize 4096) (define alloc-buf (make-u8vector bufsize)) (define (md5:raw-digest obj #!optional (result alloc-result)) (let* ((ctxt alloc-context)) (starts ctxt) (cond ((string? obj) (let ((v (string.utf8-u8vector obj))) (update ctxt v (u8vector-length v)))) ((u8vector? obj) (update ctxt obj (##u8vector-length obj))) ((input-port? obj) todo : only works with " Byte INPUT PORT " 's , not with string ports . (let loop () (let ((n-read (read-subu8vector alloc-buf 0 1 ;; bufsize ;todo: switch ;; back. and make ;; read-subu8vector work correctly first . obj))) (display ".") TODO error handling ? and/or correct eof handling ! ! ! (if (= n-read 0) ;; eof reached theoretically #!void ;; not finished (begin (update ctxt alloc-buf n-read) (loop)))))) (else (error "bad argument type - not a string or input-port" obj))) (finish ctxt result) result)) (define (md5:digest obj) (u8vector->hex-string-lc (md5:raw-digest obj))) (TEST > (define msg '("" "a" "abc" "message digest" "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" "12345678901234567890123456789012345678901234567890123456789012345678901234567890" ;; own tests: "Motörhead" "Garçon méchant")) > (map md5:digest msg) ("d41d8cd98f00b204e9800998ecf8427e" "0cc175b9c0f1b6a831c399e269772661" "900150983cd24fb0d6963f7d28e17f72" "f96b697d7cb7938d525a2f31aaf161d0" "c3fcd3d76192e4007dfb496cca67e13b" "d174ab98d277d9f5a5611c2c9f419d9f" "57edf4a22be3c955ac49da2e2107b67a" ;; "0d8f8b8d8c3dec7c40f0119cdb650038" "24d578412bef48b3514b5f635b4d7a6f" ))
null
https://raw.githubusercontent.com/pflanze/chj-schemelib/59ff8476e39f207c2f1d807cfc9670581c8cedd3/md5.scm
scheme
(compile #t) over the default -O1 , for a few percent more speed md5.scm \\ \\ \\ \\ \\ \\ /* returns void */ \\ \\ \\ \\ TODO threadsafety. How to do that? bufsize ;todo: switch back. and make read-subu8vector work eof reached theoretically not finished own tests:
(require cj-u8vector-util (u8vector0 string.utf8-u8vector) test) (export md5:digest (digest md5-digest) #!optional (make-md5-context* make-md5-context) (starts md5-init) (update md5-update) (finish md5-finish)) adapted from the chicken md5 egg by christian at pflanze mine nu (include "cj-standarddeclares.scm") (c-declare " /* * RFC 1321 compliant MD5 implementation, * by Christophe Devine <> * this program is licensed under the GPL. */ #define uint8 unsigned char #define uint32 unsigned long int struct md5_context { #include <string.h> #define GET_UINT32(n,b,i) \\ { \\ (n) = ( (b)[(i) ] ) \\ | ( (b)[(i) + 1] << 8 ) \\ | ( (b)[(i) + 2] << 16 ) \\ } #define PUT_UINT32(n,b,i) \\ { \\ } void md5_starts( struct md5_context *ctx ) { } void md5_process( struct md5_context *ctx, uint8 data[64] ) { #define S(x,n) ((x << n) | ((x & 0xFFFFFFFF) >> (32 - n))) #define P(a,b,c,d,k,s,t) \\ { \\ } #define F(x,y,z) (z ^ (x & (y ^ z))) #undef F #define F(x,y,z) (y ^ (z & (x ^ y))) #undef F #define F(x,y,z) (x ^ y ^ z) #undef F #define F(x,y,z) (y ^ (x | ~z)) #undef F } void md5_update( struct md5_context *ctx, uint8 *input, uint32 length ) { if( left && length >= fill ) { } while( length >= 64 ) { } if( length ) { } } static uint8 md5_padding[64] = { 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 void md5_finish( struct md5_context *ctx, uint8 digest[16] ) { } ") interface for gambit by christian j : (c-define-type md5-context (struct "md5_context")) why does " struct " not work ? (c-declare " #include <stdlib.h> static ___SCMOBJ release_md5_context(void*p){ } ") (c-define-type md5-context* (pointer md5-context |md5-context*| "release_md5_context")) (define make-md5-context* (c-lambda () md5-context* " if (___result_voidstar==NULL) / * _ _ _ errmsg= of type _ _ _ specific Scheme string error message.*/ ")) (c-define-type uint32 unsigned-int32) (c-declare " #define ___BEGIN_CFUN_u8vector_to_pointer(scm, c, argpos) \\ if (!(___U8VECTORP(scm))) \\ else { \\ #define ___END_CFUN_u8vector_to_pointer(scm, c, argpos) \\ } #define ___BEGIN_CFUN_notimplemented(c, scm) \\ #define ___END_CFUN_notimplemented(c, scm) \\ } ") (c-define-type uint8* "uint8*" "notimplemented" "u8vector_to_pointer" #f) (c-declare " #define ___BEGIN_CFUN_u8vectorOfLength16_to_pointer(scm, c, argpos) \\ if ((!(___U8VECTORP(scm))) || (___U8VECTORLENGTH(scm)!=___FIX(16))) \\ else { \\ #define ___END_CFUN_u8vectorOfLength16_to_pointer(scm, c, argpos) \\ } ") (c-define-type |uint8[16]*| todo : c type checking for [ 16 ] impossible ? "notimplemented" "u8vectorOfLength16_to_pointer" #f) (define starts (c-lambda (md5-context*) void "md5_starts")) (define update (c-lambda (md5-context* uint8* uint32) void "md5_update")) (define finish (c-lambda (md5-context* |uint8[16]*|) void "md5_finish")) (define alloc-context (make-md5-context*)) (define alloc-result (make-u8vector 16)) (define bufsize 4096) (define alloc-buf (make-u8vector bufsize)) (define (md5:raw-digest obj #!optional (result alloc-result)) (let* ((ctxt alloc-context)) (starts ctxt) (cond ((string? obj) (let ((v (string.utf8-u8vector obj))) (update ctxt v (u8vector-length v)))) ((u8vector? obj) (update ctxt obj (##u8vector-length obj))) ((input-port? obj) todo : only works with " Byte INPUT PORT " 's , not with string ports . (let loop () (let ((n-read (read-subu8vector alloc-buf 0 1 correctly first . obj))) (display ".") TODO error handling ? and/or correct eof handling ! ! ! (if (= n-read 0) #!void (begin (update ctxt alloc-buf n-read) (loop)))))) (else (error "bad argument type - not a string or input-port" obj))) (finish ctxt result) result)) (define (md5:digest obj) (u8vector->hex-string-lc (md5:raw-digest obj))) (TEST > (define msg '("" "a" "abc" "message digest" "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" "12345678901234567890123456789012345678901234567890123456789012345678901234567890" "Motörhead" "Garçon méchant")) > (map md5:digest msg) ("d41d8cd98f00b204e9800998ecf8427e" "0cc175b9c0f1b6a831c399e269772661" "900150983cd24fb0d6963f7d28e17f72" "f96b697d7cb7938d525a2f31aaf161d0" "c3fcd3d76192e4007dfb496cca67e13b" "d174ab98d277d9f5a5611c2c9f419d9f" "57edf4a22be3c955ac49da2e2107b67a" "0d8f8b8d8c3dec7c40f0119cdb650038" "24d578412bef48b3514b5f635b4d7a6f" ))
e8f043d606e6fcf1da6b0f167d0ebf33821a8a6e71232076c6c8a8c1ff87c768
shortishly/pgec
pgec_util.erl
Copyright ( c ) 2022 < > %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(pgec_util). -export([snake_case/1]). -export([tl_snake_case/1]). snake_case([_ | _] = Labels) -> list_to_atom(lists:concat(lists:join("_", Labels))). split_on_snake_case(Name) -> string:split(atom_to_list(Name), "_"). tl_snake_case(Name) -> case split_on_snake_case(Name) of [_] -> Name; Names -> snake_case(tl(Names)) end.
null
https://raw.githubusercontent.com/shortishly/pgec/9b026bcfbc8814b77a8bfd3cb2176f797f2f44b7/src/pgec_util.erl
erlang
you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
Copyright ( c ) 2022 < > Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , -module(pgec_util). -export([snake_case/1]). -export([tl_snake_case/1]). snake_case([_ | _] = Labels) -> list_to_atom(lists:concat(lists:join("_", Labels))). split_on_snake_case(Name) -> string:split(atom_to_list(Name), "_"). tl_snake_case(Name) -> case split_on_snake_case(Name) of [_] -> Name; Names -> snake_case(tl(Names)) end.
9477b45e0ad459e9099cc58b3355d7d8a52e2101d143f084d048de4507724fbc
kadena-io/pact
Internal.hs
# LANGUAGE TupleSections # {-# LANGUAGE BangPatterns #-} {-# LANGUAGE RankNTypes #-} # LANGUAGE LambdaCase # # LANGUAGE ScopedTypeVariables # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE FlexibleContexts # # LANGUAGE RecordWildCards # -- | Module : Pact . Native . Internal Copyright : ( C ) 2016 -- License : BSD-style (see the file LICENSE) Maintainer : < > -- Internal functions for built - ins . -- module Pact.Native.Internal (success ,parseMsgKey,parseMsgKey' ,bindReduce ,enforceGuard ,defNative,defGasRNative,defRNative ,defSchema ,defConst ,setTopLevelOnly ,foldDefs ,funType,funType' ,module Pact.Types.Native ,tTyInteger,tTyDecimal,tTyTime,tTyBool ,tTyString,tTyKeySet,tTyObject,tTyObjectAny,tTyGuard ,colsToList ,module Pact.Gas ,(<>) ,getPactId,enforceGuardDef,guardForModuleCall ,provenanceOf ,enforceYield ,appToCap ,requireDefApp ,tLamToApp ) where import Bound import Control.Lens hiding (Fold) import Control.Monad import Data.Aeson hiding (Object) import qualified Data.Aeson.Lens as A import Data.Default import Data.Foldable import qualified Data.Vector as V import Data.Text (Text) import Unsafe.Coerce import Data.Functor (($>)) import Pact.Eval import Pact.Gas import Pact.Types.Capability import Pact.Types.Native import Pact.Types.PactValue import Pact.Types.Pretty import Pact.Types.Runtime import Pact.Runtime.Utils import Pact.Types.KeySet (parseAnyKeysetName) success :: Functor m => Text -> m a -> m (Term Name) success = fmap . const . toTerm colsToList :: Eval m [(Info,FieldKey)] -> Term n -> Eval m [(Info,FieldKey)] colsToList _ (TList cs _ _) = forM (V.toList cs) $ \c -> case c of TLitString col -> return (_tInfo c,FieldKey col) _ -> evalError (_tInfo c) "read: only Strings/Symbols allowed for col keys" colsToList argFail _ = argFail parseMsgKey :: (FromJSON t) => FunApp -> String -> Text -> Eval e t parseMsgKey f s t = parseMsgKey' f s (Just t) parseMsgKey' :: (FromJSON t) => FunApp -> String -> (Maybe Text) -> Eval e t parseMsgKey' i msg key = do b <- view eeMsgBody let go v = case fromJSON v of Success t -> return t Error e -> evalError' i $ prettyString msg <> ": parse failed: " <> prettyString e <> ": " <> pretty v case key of Nothing -> go b Just k -> case preview (A.key k) b of Nothing -> evalError' i $ "No such key in message: " <> pretty k Just v -> go v bindReduce :: [BindPair (Term Ref)] -> Scope Int Term Ref -> Info -> (Text -> Maybe (Term Name)) -> Eval e (Term Name) bindReduce ps bd bi lkpFun = do !(vs :: [BindPair (Term Ref)]) <- forM ps $ \(BindPair a k) -> do var' <- reduce k case var' of (TLitString s) -> case lkpFun s of Nothing -> evalError bi $ "Bad column in binding: " <> pretty s Just v -> return (BindPair a (liftTerm v)) t -> evalError bi $ "Invalid column identifier in binding: " <> pretty t let bd'' = instantiate (resolveArg bi (map _bpVal vs)) bd NB stack frame here just documents scope , but does not incur gas let prettyBindings = list $ pretty . fmap abbrev <$> vs textBindings = renderCompactText' $ "(bind: " <> prettyBindings <> ")" frame = StackFrame textBindings bi Nothing call frame $! (0,) <$> reduceBody bd'' setTopLevelOnly :: NativeDef -> NativeDef setTopLevelOnly = set (_2 . tNativeTopLevelOnly) True | Specify a ' NativeFun ' defNative :: NativeDefName -> NativeFun e -> FunTypes (Term Name) -> [Example] -> Text -> NativeDef defNative n fun ftype examples docs = (n, TNative n (NativeDFun n (unsafeCoerce fun)) ftype examples docs False def) | Specify a ' ' defGasRNative :: NativeDefName -> GasRNativeFun e -> FunTypes (Term Name) -> [Example] -> Text -> NativeDef defGasRNative name fun = defNative name (reduced fun) where reduced f fi as = gasUnreduced fi as (mapM reduce as) >>= \(g,as') -> f g fi as' -- | Specify a 'RNativeFun' defRNative :: NativeDefName -> RNativeFun e -> FunTypes (Term Name) -> [Example] -> Text -> NativeDef defRNative name fun = defNative name (reduced fun) where reduced f fi as = gasUnreduced fi as (mapM reduce as) >>= \(g,as') -> (g,) <$> f fi as' defSchema :: NativeDefName -> Text -> [(FieldKey, Type (Term Name))] -> NativeDef defSchema n doc fields = (n, TSchema (TypeName $ asString n) Nothing (Meta (Just doc) []) (map (\(fr,ty) -> Arg (asString fr) ty def) fields) def) defConst :: NativeDefName -> Text -> Type (Term Name) -> Term Name -> NativeDef defConst name doc ty term = (name, TConst arg Nothing cval meta def ) where arg = Arg (asString name) ty def meta = Meta (Just doc) [] cval = CVEval term term foldDefs :: Monad m => [m a] -> m [a] foldDefs = foldM (\r d -> d >>= \d' -> return (d':r)) [] funType :: Type n -> [(Text,Type n)] -> FunTypes n funType t as = funTypes $ funType' t as funType' :: Type n -> [(Text,Type n)] -> FunType n funType' t as = FunType (map (\(s,ty) -> Arg s ty def) as) t tTyInteger :: Type n; tTyInteger = TyPrim TyInteger tTyDecimal :: Type n; tTyDecimal = TyPrim TyDecimal tTyTime :: Type n; tTyTime = TyPrim TyTime tTyBool :: Type n; tTyBool = TyPrim TyBool tTyString :: Type n; tTyString = TyPrim TyString tTyKeySet :: Type n; tTyKeySet = TyPrim (TyGuard $ Just GTyKeySet) tTyObject :: Type n -> Type n; tTyObject o = TySchema TyObject o def tTyObjectAny :: Type n; tTyObjectAny = tTyObject TyAny tTyGuard :: Maybe GuardType -> Type n; tTyGuard gt = TyPrim (TyGuard gt) enforceGuardDef :: NativeDefName -> NativeDef enforceGuardDef dn = defRNative dn enforceGuard' (funType tTyBool [("guard",tTyGuard Nothing)] <> funType tTyBool [("keysetname",tTyString)]) [ LitExample $ "(" <> asString dn <> " 'admin-keyset)" , LitExample $ "(" <> asString dn <> " row-guard)" ] "Execute GUARD, or defined keyset KEYSETNAME, to enforce desired predicate logic." where enforceGuard' :: RNativeFun e enforceGuard' i as = case as of [TGuard g _] -> enforceGuard i g >> return (toTerm True) [TLitString k] -> do let f ksn = enforceGuard i (GKeySetRef ksn) $> toTerm True ifExecutionFlagSet FlagDisablePact44 (f $ KeySetName k Nothing) (case parseAnyKeysetName k of Left{} -> evalError' i "incorrect keyset name format" Right ksn -> f ksn) _ -> argsError i as | Test that first module app found in call stack is specified module , -- running 'onFound' if true, otherwise requesting module admin. guardForModuleCall :: Info -> ModuleName -> Eval e () -> Eval e () guardForModuleCall i modName onFound = findCallingModule >>= \r -> case r of (Just mn) | mn == modName -> onFound _ -> do md <- _mdModule <$> getModule i modName case md of MDModule m -> void $ acquireModuleAdmin i (_mName m) (_mGovernance m) MDInterface iface -> evalError i $ "Internal error, interface found in call stack: " <> pretty iface -- | Construct a 'Yield' endorsement with a user-supplied -- 'PactId', as opposed to introspecting on the env info -- to retrieve it. -- provenanceOf :: FunApp -> ChainId -- ^ target chain id -> Eval e (Maybe Provenance) provenanceOf fa tid = Just . Provenance tid . _mHash <$> getCallingModule fa -- | Enforce that 'Yield' object and provenance data matches env data -- and fail otherwise. -- enforceYield :: FunApp -> Yield -- ^ yield data to enforce -> Eval e Yield enforceYield fa y = case _yProvenance y of Nothing -> return y Just p -> do m <- getCallingModule fa cid <- view $ eePublicData . pdPublicMeta . pmChainId ifExecutionFlagSet FlagDisablePact40 (do let p' = Provenance cid (_mHash m) unless (p == p') $ evalError' fa $ "enforceYield: yield provenance " <> pretty p' <> " does not match " <> pretty p) (do let p' = Provenance cid (_mHash m):map (Provenance cid) (toList $ _mBlessed m) unless (p `elem` p') $ evalError' fa $ "enforceYield: yield provenance " <> pretty p <> " does not match " <> pretty p') return y | Validate App of indicated DefType and return Def requireDefApp :: DefType -> App (Term Ref) -> Eval e (Def Ref) requireDefApp dt App{..} = lookupFullyQualifiedTerm _appInfo _appFun >>= \case (TVar (Ref (TDef d@Def{} _)) _) -> matchDefTy d TDynamic tref tmem ti -> reduceDynamic tref tmem ti >>= \case Left v -> evalError ti $ "requireDefApp: expected module member for dynamic: " <> pretty v Right d -> matchDefTy d t -> evalError (_tInfo t) $ pretty (show t) where matchDefTy d | _dDefType d == dt = return d | otherwise = evalError _appInfo $ "Can only apply " <> pretty dt <> " here, found: " <> pretty (_dDefType d) argsToParams :: Info -> [Term Name] -> Eval e [PactValue] argsToParams i args = do elideFun <- ifExecutionFlagSet' FlagDisablePact40 id elideModRefInfo forM args $ \arg -> case toPactValue arg of Right pv -> return $ elideFun pv Left e -> evalError i $ "Invalid capability argument: " <> pretty e | Workhorse to convert App to Capability by capturing Def , -- reducing args and converting to pact value, and returning -- byproducts. appToCap :: App (Term Ref) -> Eval e (UserCapability, Def Ref, ([Term Name], FunType (Term Name))) appToCap a@App{..} = requireDefApp Defcap a >>= \d@Def{..} -> do prep@(args,_) <- prepareUserAppArgs d _appArgs _appInfo cap <- SigCapability (QualifiedName _dModule (asString _dDefName) (getInfo a)) <$> argsToParams _appInfo args return (cap,d,prep) -- | Function intended for use as a View pattern -- to convert inline-lambdas to `TApp`s for -- use within natives. tLamToApp :: Term n -> Term n tLamToApp = \case l@TLam{} -> TApp (App l [] def) def x -> x
null
https://raw.githubusercontent.com/kadena-io/pact/98dc6dbe9e917f26a238b2d4ac0f8ea90d65c5be/src/Pact/Native/Internal.hs
haskell
# LANGUAGE BangPatterns # # LANGUAGE RankNTypes # # LANGUAGE OverloadedStrings # | License : BSD-style (see the file LICENSE) | Specify a 'RNativeFun' running 'onFound' if true, otherwise requesting module admin. | Construct a 'Yield' endorsement with a user-supplied 'PactId', as opposed to introspecting on the env info to retrieve it. ^ target chain id | Enforce that 'Yield' object and provenance data matches env data and fail otherwise. ^ yield data to enforce reducing args and converting to pact value, and returning byproducts. | Function intended for use as a View pattern to convert inline-lambdas to `TApp`s for use within natives.
# LANGUAGE TupleSections # # LANGUAGE LambdaCase # # LANGUAGE ScopedTypeVariables # # LANGUAGE FlexibleContexts # # LANGUAGE RecordWildCards # Module : Pact . Native . Internal Copyright : ( C ) 2016 Maintainer : < > Internal functions for built - ins . module Pact.Native.Internal (success ,parseMsgKey,parseMsgKey' ,bindReduce ,enforceGuard ,defNative,defGasRNative,defRNative ,defSchema ,defConst ,setTopLevelOnly ,foldDefs ,funType,funType' ,module Pact.Types.Native ,tTyInteger,tTyDecimal,tTyTime,tTyBool ,tTyString,tTyKeySet,tTyObject,tTyObjectAny,tTyGuard ,colsToList ,module Pact.Gas ,(<>) ,getPactId,enforceGuardDef,guardForModuleCall ,provenanceOf ,enforceYield ,appToCap ,requireDefApp ,tLamToApp ) where import Bound import Control.Lens hiding (Fold) import Control.Monad import Data.Aeson hiding (Object) import qualified Data.Aeson.Lens as A import Data.Default import Data.Foldable import qualified Data.Vector as V import Data.Text (Text) import Unsafe.Coerce import Data.Functor (($>)) import Pact.Eval import Pact.Gas import Pact.Types.Capability import Pact.Types.Native import Pact.Types.PactValue import Pact.Types.Pretty import Pact.Types.Runtime import Pact.Runtime.Utils import Pact.Types.KeySet (parseAnyKeysetName) success :: Functor m => Text -> m a -> m (Term Name) success = fmap . const . toTerm colsToList :: Eval m [(Info,FieldKey)] -> Term n -> Eval m [(Info,FieldKey)] colsToList _ (TList cs _ _) = forM (V.toList cs) $ \c -> case c of TLitString col -> return (_tInfo c,FieldKey col) _ -> evalError (_tInfo c) "read: only Strings/Symbols allowed for col keys" colsToList argFail _ = argFail parseMsgKey :: (FromJSON t) => FunApp -> String -> Text -> Eval e t parseMsgKey f s t = parseMsgKey' f s (Just t) parseMsgKey' :: (FromJSON t) => FunApp -> String -> (Maybe Text) -> Eval e t parseMsgKey' i msg key = do b <- view eeMsgBody let go v = case fromJSON v of Success t -> return t Error e -> evalError' i $ prettyString msg <> ": parse failed: " <> prettyString e <> ": " <> pretty v case key of Nothing -> go b Just k -> case preview (A.key k) b of Nothing -> evalError' i $ "No such key in message: " <> pretty k Just v -> go v bindReduce :: [BindPair (Term Ref)] -> Scope Int Term Ref -> Info -> (Text -> Maybe (Term Name)) -> Eval e (Term Name) bindReduce ps bd bi lkpFun = do !(vs :: [BindPair (Term Ref)]) <- forM ps $ \(BindPair a k) -> do var' <- reduce k case var' of (TLitString s) -> case lkpFun s of Nothing -> evalError bi $ "Bad column in binding: " <> pretty s Just v -> return (BindPair a (liftTerm v)) t -> evalError bi $ "Invalid column identifier in binding: " <> pretty t let bd'' = instantiate (resolveArg bi (map _bpVal vs)) bd NB stack frame here just documents scope , but does not incur gas let prettyBindings = list $ pretty . fmap abbrev <$> vs textBindings = renderCompactText' $ "(bind: " <> prettyBindings <> ")" frame = StackFrame textBindings bi Nothing call frame $! (0,) <$> reduceBody bd'' setTopLevelOnly :: NativeDef -> NativeDef setTopLevelOnly = set (_2 . tNativeTopLevelOnly) True | Specify a ' NativeFun ' defNative :: NativeDefName -> NativeFun e -> FunTypes (Term Name) -> [Example] -> Text -> NativeDef defNative n fun ftype examples docs = (n, TNative n (NativeDFun n (unsafeCoerce fun)) ftype examples docs False def) | Specify a ' ' defGasRNative :: NativeDefName -> GasRNativeFun e -> FunTypes (Term Name) -> [Example] -> Text -> NativeDef defGasRNative name fun = defNative name (reduced fun) where reduced f fi as = gasUnreduced fi as (mapM reduce as) >>= \(g,as') -> f g fi as' defRNative :: NativeDefName -> RNativeFun e -> FunTypes (Term Name) -> [Example] -> Text -> NativeDef defRNative name fun = defNative name (reduced fun) where reduced f fi as = gasUnreduced fi as (mapM reduce as) >>= \(g,as') -> (g,) <$> f fi as' defSchema :: NativeDefName -> Text -> [(FieldKey, Type (Term Name))] -> NativeDef defSchema n doc fields = (n, TSchema (TypeName $ asString n) Nothing (Meta (Just doc) []) (map (\(fr,ty) -> Arg (asString fr) ty def) fields) def) defConst :: NativeDefName -> Text -> Type (Term Name) -> Term Name -> NativeDef defConst name doc ty term = (name, TConst arg Nothing cval meta def ) where arg = Arg (asString name) ty def meta = Meta (Just doc) [] cval = CVEval term term foldDefs :: Monad m => [m a] -> m [a] foldDefs = foldM (\r d -> d >>= \d' -> return (d':r)) [] funType :: Type n -> [(Text,Type n)] -> FunTypes n funType t as = funTypes $ funType' t as funType' :: Type n -> [(Text,Type n)] -> FunType n funType' t as = FunType (map (\(s,ty) -> Arg s ty def) as) t tTyInteger :: Type n; tTyInteger = TyPrim TyInteger tTyDecimal :: Type n; tTyDecimal = TyPrim TyDecimal tTyTime :: Type n; tTyTime = TyPrim TyTime tTyBool :: Type n; tTyBool = TyPrim TyBool tTyString :: Type n; tTyString = TyPrim TyString tTyKeySet :: Type n; tTyKeySet = TyPrim (TyGuard $ Just GTyKeySet) tTyObject :: Type n -> Type n; tTyObject o = TySchema TyObject o def tTyObjectAny :: Type n; tTyObjectAny = tTyObject TyAny tTyGuard :: Maybe GuardType -> Type n; tTyGuard gt = TyPrim (TyGuard gt) enforceGuardDef :: NativeDefName -> NativeDef enforceGuardDef dn = defRNative dn enforceGuard' (funType tTyBool [("guard",tTyGuard Nothing)] <> funType tTyBool [("keysetname",tTyString)]) [ LitExample $ "(" <> asString dn <> " 'admin-keyset)" , LitExample $ "(" <> asString dn <> " row-guard)" ] "Execute GUARD, or defined keyset KEYSETNAME, to enforce desired predicate logic." where enforceGuard' :: RNativeFun e enforceGuard' i as = case as of [TGuard g _] -> enforceGuard i g >> return (toTerm True) [TLitString k] -> do let f ksn = enforceGuard i (GKeySetRef ksn) $> toTerm True ifExecutionFlagSet FlagDisablePact44 (f $ KeySetName k Nothing) (case parseAnyKeysetName k of Left{} -> evalError' i "incorrect keyset name format" Right ksn -> f ksn) _ -> argsError i as | Test that first module app found in call stack is specified module , guardForModuleCall :: Info -> ModuleName -> Eval e () -> Eval e () guardForModuleCall i modName onFound = findCallingModule >>= \r -> case r of (Just mn) | mn == modName -> onFound _ -> do md <- _mdModule <$> getModule i modName case md of MDModule m -> void $ acquireModuleAdmin i (_mName m) (_mGovernance m) MDInterface iface -> evalError i $ "Internal error, interface found in call stack: " <> pretty iface provenanceOf :: FunApp -> ChainId -> Eval e (Maybe Provenance) provenanceOf fa tid = Just . Provenance tid . _mHash <$> getCallingModule fa enforceYield :: FunApp -> Yield -> Eval e Yield enforceYield fa y = case _yProvenance y of Nothing -> return y Just p -> do m <- getCallingModule fa cid <- view $ eePublicData . pdPublicMeta . pmChainId ifExecutionFlagSet FlagDisablePact40 (do let p' = Provenance cid (_mHash m) unless (p == p') $ evalError' fa $ "enforceYield: yield provenance " <> pretty p' <> " does not match " <> pretty p) (do let p' = Provenance cid (_mHash m):map (Provenance cid) (toList $ _mBlessed m) unless (p `elem` p') $ evalError' fa $ "enforceYield: yield provenance " <> pretty p <> " does not match " <> pretty p') return y | Validate App of indicated DefType and return Def requireDefApp :: DefType -> App (Term Ref) -> Eval e (Def Ref) requireDefApp dt App{..} = lookupFullyQualifiedTerm _appInfo _appFun >>= \case (TVar (Ref (TDef d@Def{} _)) _) -> matchDefTy d TDynamic tref tmem ti -> reduceDynamic tref tmem ti >>= \case Left v -> evalError ti $ "requireDefApp: expected module member for dynamic: " <> pretty v Right d -> matchDefTy d t -> evalError (_tInfo t) $ pretty (show t) where matchDefTy d | _dDefType d == dt = return d | otherwise = evalError _appInfo $ "Can only apply " <> pretty dt <> " here, found: " <> pretty (_dDefType d) argsToParams :: Info -> [Term Name] -> Eval e [PactValue] argsToParams i args = do elideFun <- ifExecutionFlagSet' FlagDisablePact40 id elideModRefInfo forM args $ \arg -> case toPactValue arg of Right pv -> return $ elideFun pv Left e -> evalError i $ "Invalid capability argument: " <> pretty e | Workhorse to convert App to Capability by capturing Def , appToCap :: App (Term Ref) -> Eval e (UserCapability, Def Ref, ([Term Name], FunType (Term Name))) appToCap a@App{..} = requireDefApp Defcap a >>= \d@Def{..} -> do prep@(args,_) <- prepareUserAppArgs d _appArgs _appInfo cap <- SigCapability (QualifiedName _dModule (asString _dDefName) (getInfo a)) <$> argsToParams _appInfo args return (cap,d,prep) tLamToApp :: Term n -> Term n tLamToApp = \case l@TLam{} -> TApp (App l [] def) def x -> x
5e08f677ccd3f24e14cd39cd59c577b8fbc0bb0bc4f7e0887dc80a828342761c
jamesdbrock/replace-attoparsec
Lazy.hs
-- | Module : Replace . . Text . Lazy Copyright : © 2019 License : BSD2 Maintainer : < > -- -- __Replace.Attoparsec__ is for finding text patterns, and also -- replacing or splitting on the found patterns. -- This activity is traditionally done with regular expressions, but _ _ Replace . Attoparsec _ _ uses " Data . Attoparsec " parsers instead for -- the pattern matching. -- -- __Replace.Attoparsec__ can be used in the same sort of “pattern capture” or “ find all ” situations in which one would use Python -- <#re.findall re.findall>, or Perl -- < m//>, -- or Unix -- </ grep>. -- -- __Replace.Attoparsec__ can be used in the same sort of “stream editing” or “ search - and - replace ” situations in which one would use Python -- <#re.sub re.sub>, or Perl < > , -- or Unix -- <-_0022s_0022-Command.html sed>, -- or -- < awk>. -- -- __Replace.Attoparsec__ can be used in the same sort of “string splitting” situations in which one would use Python -- <#re.split re.split> or Perl -- < split>. -- -- See the __[replace-attoparsec](-attoparsec)__ package README for usage examples. # LANGUAGE LambdaCase # {-# LANGUAGE BangPatterns #-} {-# LANGUAGE OverloadedStrings #-} # LANGUAGE TypeApplications # # LANGUAGE ScopedTypeVariables # module Replace.Attoparsec.Text.Lazy ( -- * Running parser -- -- | Functions in this section are /ways to run parsers/ ( like ' ' ) . They take -- as arguments a @sep@ parser and some input, run the parser on the input, -- and return a result. streamEdit , streamEditT * combinator -- -- | Functions in this section are /parser combinators/. They take -- a @sep@ parser for an argument, combine @sep@ with another parser, -- and return a new parser. , anyTill ) where import Data.Functor.Identity import Control.Applicative import Data.Attoparsec.Text.Lazy as A hiding (parseOnly) import qualified Data.Attoparsec.Text as AS import Data.List as List ( intercalate ) import qualified Data.Text.Lazy as T import qualified Data.Text.Lazy.Builder as TB import qualified Data.Text.Internal.Lazy as TI import qualified Data.Text as TS import qualified Data.Text.Internal as TIS import qualified Data.Attoparsec.Internal.Types as AT import Data.Coerce -- | -- === Stream editor -- -- Also known as “find-and-replace”, or “match-and-substitute”. Finds all -- of the sections of the stream which match the pattern @sep@, and replaces -- them with the result of the @editor@ function. -- -- ==== Access the matched section of text in the @editor@ -- -- If you want access to the matched string in the @editor@ function, -- then combine the pattern parser @sep@ -- with 'Data.Attoparsec.Text.match'. This will effectively change -- the type of the @editor@ function to @(Text,a) -> Text@. -- -- This allows us to write an @editor@ function which can choose to not -- edit the match and just leave it as it is. If the @editor@ function returns the first item in the tuple , then @streamEdit@ will not change -- the matched string. -- -- So, for all @sep@: -- -- @ -- streamEdit ('Data.Attoparsec.Text.match' sep) 'Data.Tuple.fst' ≡ 'Data.Function.id' -- @ -- -- ==== Laziness -- -- This is lazy in the input text chunks and should release processed chunks to -- the garbage collector promptly. -- -- The output is constructed by a 'TB.Builder' and is subject to the chunk size -- used there. streamEdit :: forall a. Parser a -- ^ The pattern matching parser @sep@ -> (a -> TS.Text) -- ^ The @editor@ function. Takes a parsed result of @sep@ -- and returns a new stream section for the replacement. -> T.Text -- ^ The input stream of text to be edited -> T.Text -- ^ The edited input stream streamEdit = coerce (streamEditT @Identity @a) # INLINABLE streamEdit # -- | -- === Stream editor -- Monad transformer version of ' streamEdit ' . -- -- The @editor@ function will run in the underlying monad context. -- -- If you want to do 'IO' operations in the @editor@ function then -- run this in 'IO'. -- -- If you want the @editor@ function to remember some state, -- then run this in a stateful monad. -- -- ==== Laziness -- -- This is lazy in the input text chunks and should release processed chunks to -- the garbage collector promptly, i.e. as soon as the presence of a @sep@ has -- been ruled out. -- -- Note that this is as only as lazy in the chunks as the selected monad allows -- it to be, i.e. if your monad requires running the entire computation before -- getting the result then this is effectively strict in the input stream. -- -- The output is constructed by a 'TB.Builder' and is subject to the chunk size -- used there. streamEditT :: (Applicative m) => Parser a -- ^ The pattern matching parser @sep@ -> (a -> m TS.Text) -- ^ The @editor@ function. Takes a parsed result of @sep@ -- and returns a new stream section for the replacement. -> T.Text -- ^ The input stream of text to be edited -> m T.Text -- ^ The edited input stream streamEditT sep editor = fmap TB.toLazyText . go mempty defP where -- Our starting parser defP = AS.parse (anyTill sep) go failRet p input = case input of -- We didn't find anything by the end of the stream, return the accumulated -- failure text TI.Empty -> pure failRet TI.Chunk c cs -> case p c of We did n't find sep or the beginning of sep in this chunk , return the -- accumulated failure text as well as this chunk, followed by the -- continued edited stream AS.Fail{} -> (failRet <>) . (TB.fromText c <>) <$> go mempty defP cs We found the beginning of sep , add to the failure text in case this -- isn't really sep and recurse on the remainder of the stream AS.Partial f -> go (failRet <> TB.fromText c) f cs We found sep , return the concatenation of the text until sep , the -- edited sep and the edited rest of the stream. AS.Done next r -> mconcat <$> sequenceA [ pure (TB.fromLazyText (fst r)) , TB.fromText <$> editor (snd r) , go mempty defP (TI.chunk next cs) ] # INLINABLE streamEditT # -- | -- === Specialized <-combinators/docs/Control-Monad-Combinators.html#v:manyTill_ manyTill_> -- Parser combinator to consume and capture input until the @sep@ pattern -- matches, equivalent to @'Control . . Data . . Text.anyChar ' sep@. -- On success, returns the prefix before the pattern match and the parsed match. -- @sep@ may be a zero - width parser , it may succeed without consuming any -- input. -- -- This combinator will produce a parser which acts like ' Data . . ' but is predicated beyond more than just the next one token . It is also like ' Data . . ' in that it is a “ high performance ” parser . -- -- ==== Laziness -- When the ' anyTill ' parser reaches the end of the current input chunk -- before finding the beginning of @sep@ then the parser will fail. -- When the ' anyTill ' parser reaches the end of the current input chunk -- while it is successfully parsing @sep@ then it will lazily fetch more -- input and continue parsing. anyTill :: Parser a -- ^ The pattern matching parser @sep@ -> Parser (T.Text, a) -- ^ parser anyTill sep = do begin <- getOffset (end, x) <- go prefix <- substring begin end pure (prefix, x) where go = do end <- getOffset r <- optional $ try sep case r of Nothing -> atChunkEnd >>= \case True -> empty False -> anyChar >> go Just x -> pure (end, x) -- | Always succeeds, returns 'True' if the parser is at the end of the current -- buffer and any additional input would require a 'TI.Partial' result. atChunkEnd :: Parser Bool atChunkEnd = AT.Parser $ \t pos more _lose succ' -> succ' t pos more (pos + 1 == AT.atBufferEnd (undefined :: TS.Text) t) Get the ' Data . Attoparsec . Internal . Types . ' current offset ' Data . Attoparsec . Internal . Types . Pos ' in the stream . -- Note that this is not the number of ' Data . 's which have been consumed , -- rather it is an offset into the underlying 'Data.Text.Internal.Text' -- array buffer, so you cannot use it as an argument to 'Data.Text.index'. But you use it as an argument to ' Data.Text.Internal.text ' . -- -- [“… you know you're in an uncomfortable state of sin :-)” — bos]() getOffset :: Parser Int getOffset = AT.Parser $ \t pos more _ succ' -> succ' t pos more (AT.fromPos pos) # INLINABLE getOffset # -- Extract a substring from part of the buffer that we've already visited. -- -- The idea here is that we go back and run the parser 'take' at the Pos -- which we saved from before, and then we continue from the current Pos, -- hopefully without messing up the internal parser state. -- -0.13.2.3/docs/src/Data.Attoparsec.Text.Internal.html#take -- -- Should be equivalent to the unexported function -- -0.13.2.3/docs/src/Data.Attoparsec.Text.Internal.html#substring -- -- This is a performance optimization for gathering the unmatched -- sections of the input. The alternative is to accumulate unmatched characters one anyChar at a time in a list of [ ] and then pack -- them into a Text. substring :: Int -> Int -> Parser T.Text substring !bgn !end = AT.Parser $ \t pos more lose succes -> let succes' _t _pos _more a = succes t pos more (T.fromStrict a) in AT.runParser (takeCheat (end - bgn)) t (AT.Pos bgn) more lose succes' where -- Dear reader, you deserve an explanation for 'takeCheat'. The -- alternative to running 'takeCheat' here would be the following line: -- ( A.take ( end - bgn ) ) t ( AT.Pos bgn ) more lose succes ' -- -- But 'Attoparsec.take' is not correct, and 'takeCheat' is correct. -- It is correct because the Pos which we got from 'getOffset' is an -- index into the underlying Data.Text.Array, so (end - bgn) is -- in units of the length of the Data.Text.Array, not in units of the number of . -- -- Furthermore 'takeCheat' is a lot faster because 'A.take' takes a number of and then iterates over the Text by the number of , advancing by 4 bytes when it encounters a wide . So , O(N ) . is ) . -- -- This will be fine as long as we always call 'takeCheat' on the immutable , already - visited part of the Attoparsec . Text . Buffer 's -- Data.Text.Array. Which we do. -- -- It's named 'takeCheat' because we're getting access to the Attoparsec . Text . Buffer through the Data . Text . Internal interface , even though Attoparsec is extremely vigilant about -- not exposing its buffers. -- -- -1.2.3.1/docs/Data-Text-Internal.html takeCheat :: Int -> Parser TS.Text takeCheat len = do (TIS.Text arr off _len) <- A.take 0 return (TIS.Text arr off len) -- These are from the latest version of attoparsec , remove them when bumping it to 0.14.0 or later and use A.parseOnly instead -- -- | Convert a 'Result' value to an 'Either' value. eitherResult' :: Result r -> Either String r eitherResult' (Done _ r) = Right r eitherResult' (Fail _ [] msg) = Left msg eitherResult' (Fail _ ctxs msg) = Left (List.intercalate " > " ctxs ++ ": " ++ msg) -- | Run a parser and convert its 'Result' to an 'Either' value. -- -- This function does not force a parser to consume all of its input. -- Instead, any residual input will be discarded. To force a parser -- to consume all of its input, use something like this: -- -- @ ' parseOnly ' ( myParser ' Control . Applicative . < * ' ' endOfInput ' ) -- @ parseOnly :: A.Parser a -> T.Text -> Either String a parseOnly p = eitherResult' . parse p # INLINE parseOnly #
null
https://raw.githubusercontent.com/jamesdbrock/replace-attoparsec/1cd9c71f9b286e1067aa39d958acf4118d3626e4/src/Replace/Attoparsec/Text/Lazy.hs
haskell
| __Replace.Attoparsec__ is for finding text patterns, and also replacing or splitting on the found patterns. This activity is traditionally done with regular expressions, the pattern matching. __Replace.Attoparsec__ can be used in the same sort of “pattern capture” <#re.findall re.findall>, < m//>, or Unix </ grep>. __Replace.Attoparsec__ can be used in the same sort of “stream editing” <#re.sub re.sub>, or Unix <-_0022s_0022-Command.html sed>, or < awk>. __Replace.Attoparsec__ can be used in the same sort of “string splitting” <#re.split re.split> < split>. See the __[replace-attoparsec](-attoparsec)__ package README for usage examples. # LANGUAGE BangPatterns # # LANGUAGE OverloadedStrings # * Running parser | Functions in this section are /ways to run parsers/ as arguments a @sep@ parser and some input, run the parser on the input, and return a result. | Functions in this section are /parser combinators/. They take a @sep@ parser for an argument, combine @sep@ with another parser, and return a new parser. | === Stream editor Also known as “find-and-replace”, or “match-and-substitute”. Finds all of the sections of the stream which match the pattern @sep@, and replaces them with the result of the @editor@ function. ==== Access the matched section of text in the @editor@ If you want access to the matched string in the @editor@ function, then combine the pattern parser @sep@ with 'Data.Attoparsec.Text.match'. This will effectively change the type of the @editor@ function to @(Text,a) -> Text@. This allows us to write an @editor@ function which can choose to not edit the match and just leave it as it is. If the @editor@ function the matched string. So, for all @sep@: @ streamEdit ('Data.Attoparsec.Text.match' sep) 'Data.Tuple.fst' ≡ 'Data.Function.id' @ ==== Laziness This is lazy in the input text chunks and should release processed chunks to the garbage collector promptly. The output is constructed by a 'TB.Builder' and is subject to the chunk size used there. ^ The pattern matching parser @sep@ ^ The @editor@ function. Takes a parsed result of @sep@ and returns a new stream section for the replacement. ^ The input stream of text to be edited ^ The edited input stream | === Stream editor The @editor@ function will run in the underlying monad context. If you want to do 'IO' operations in the @editor@ function then run this in 'IO'. If you want the @editor@ function to remember some state, then run this in a stateful monad. ==== Laziness This is lazy in the input text chunks and should release processed chunks to the garbage collector promptly, i.e. as soon as the presence of a @sep@ has been ruled out. Note that this is as only as lazy in the chunks as the selected monad allows it to be, i.e. if your monad requires running the entire computation before getting the result then this is effectively strict in the input stream. The output is constructed by a 'TB.Builder' and is subject to the chunk size used there. ^ The pattern matching parser @sep@ ^ The @editor@ function. Takes a parsed result of @sep@ and returns a new stream section for the replacement. ^ The input stream of text to be edited ^ The edited input stream Our starting parser We didn't find anything by the end of the stream, return the accumulated failure text accumulated failure text as well as this chunk, followed by the continued edited stream isn't really sep and recurse on the remainder of the stream edited sep and the edited rest of the stream. | === Specialized <-combinators/docs/Control-Monad-Combinators.html#v:manyTill_ manyTill_> matches, equivalent to On success, returns the prefix before the pattern match and the parsed match. input. This combinator will produce a parser which acts ==== Laziness before finding the beginning of @sep@ then the parser will fail. while it is successfully parsing @sep@ then it will lazily fetch more input and continue parsing. ^ The pattern matching parser @sep@ ^ parser | Always succeeds, returns 'True' if the parser is at the end of the current buffer and any additional input would require a 'TI.Partial' result. rather it is an offset into the underlying 'Data.Text.Internal.Text' array buffer, so you cannot use it as an argument to 'Data.Text.index'. [“… you know you're in an uncomfortable state of sin :-)” — bos]() Extract a substring from part of the buffer that we've already visited. The idea here is that we go back and run the parser 'take' at the Pos which we saved from before, and then we continue from the current Pos, hopefully without messing up the internal parser state. -0.13.2.3/docs/src/Data.Attoparsec.Text.Internal.html#take Should be equivalent to the unexported function -0.13.2.3/docs/src/Data.Attoparsec.Text.Internal.html#substring This is a performance optimization for gathering the unmatched sections of the input. The alternative is to accumulate unmatched them into a Text. Dear reader, you deserve an explanation for 'takeCheat'. The alternative to running 'takeCheat' here would be the following line: But 'Attoparsec.take' is not correct, and 'takeCheat' is correct. It is correct because the Pos which we got from 'getOffset' is an index into the underlying Data.Text.Array, so (end - bgn) is in units of the length of the Data.Text.Array, not in units of the Furthermore 'takeCheat' is a lot faster because 'A.take' takes a This will be fine as long as we always call 'takeCheat' on the Data.Text.Array. Which we do. It's named 'takeCheat' because we're getting access to not exposing its buffers. -1.2.3.1/docs/Data-Text-Internal.html | Convert a 'Result' value to an 'Either' value. | Run a parser and convert its 'Result' to an 'Either' value. This function does not force a parser to consume all of its input. Instead, any residual input will be discarded. To force a parser to consume all of its input, use something like this: @ @
Module : Replace . . Text . Lazy Copyright : © 2019 License : BSD2 Maintainer : < > but _ _ Replace . Attoparsec _ _ uses " Data . Attoparsec " parsers instead for or “ find all ” situations in which one would use Python or Perl or “ search - and - replace ” situations in which one would use Python or Perl < > , situations in which one would use Python or Perl # LANGUAGE LambdaCase # # LANGUAGE TypeApplications # # LANGUAGE ScopedTypeVariables # module Replace.Attoparsec.Text.Lazy ( ( like ' ' ) . They take streamEdit , streamEditT * combinator , anyTill ) where import Data.Functor.Identity import Control.Applicative import Data.Attoparsec.Text.Lazy as A hiding (parseOnly) import qualified Data.Attoparsec.Text as AS import Data.List as List ( intercalate ) import qualified Data.Text.Lazy as T import qualified Data.Text.Lazy.Builder as TB import qualified Data.Text.Internal.Lazy as TI import qualified Data.Text as TS import qualified Data.Text.Internal as TIS import qualified Data.Attoparsec.Internal.Types as AT import Data.Coerce returns the first item in the tuple , then @streamEdit@ will not change streamEdit :: forall a. Parser a -> (a -> TS.Text) -> T.Text -> T.Text streamEdit = coerce (streamEditT @Identity @a) # INLINABLE streamEdit # Monad transformer version of ' streamEdit ' . streamEditT :: (Applicative m) => Parser a -> (a -> m TS.Text) -> T.Text -> m T.Text streamEditT sep editor = fmap TB.toLazyText . go mempty defP where defP = AS.parse (anyTill sep) go failRet p input = case input of TI.Empty -> pure failRet TI.Chunk c cs -> case p c of We did n't find sep or the beginning of sep in this chunk , return the AS.Fail{} -> (failRet <>) . (TB.fromText c <>) <$> go mempty defP cs We found the beginning of sep , add to the failure text in case this AS.Partial f -> go (failRet <> TB.fromText c) f cs We found sep , return the concatenation of the text until sep , the AS.Done next r -> mconcat <$> sequenceA [ pure (TB.fromLazyText (fst r)) , TB.fromText <$> editor (snd r) , go mempty defP (TI.chunk next cs) ] # INLINABLE streamEditT # Parser combinator to consume and capture input until the @sep@ pattern @'Control . . Data . . Text.anyChar ' sep@. @sep@ may be a zero - width parser , it may succeed without consuming any like ' Data . . ' but is predicated beyond more than just the next one token . It is also like ' Data . . ' in that it is a “ high performance ” parser . When the ' anyTill ' parser reaches the end of the current input chunk When the ' anyTill ' parser reaches the end of the current input chunk anyTill anyTill sep = do begin <- getOffset (end, x) <- go prefix <- substring begin end pure (prefix, x) where go = do end <- getOffset r <- optional $ try sep case r of Nothing -> atChunkEnd >>= \case True -> empty False -> anyChar >> go Just x -> pure (end, x) atChunkEnd :: Parser Bool atChunkEnd = AT.Parser $ \t pos more _lose succ' -> succ' t pos more (pos + 1 == AT.atBufferEnd (undefined :: TS.Text) t) Get the ' Data . Attoparsec . Internal . Types . ' current offset ' Data . Attoparsec . Internal . Types . Pos ' in the stream . Note that this is not the number of ' Data . 's which have been consumed , But you use it as an argument to ' Data.Text.Internal.text ' . getOffset :: Parser Int getOffset = AT.Parser $ \t pos more _ succ' -> succ' t pos more (AT.fromPos pos) # INLINABLE getOffset # characters one anyChar at a time in a list of [ ] and then pack substring :: Int -> Int -> Parser T.Text substring !bgn !end = AT.Parser $ \t pos more lose succes -> let succes' _t _pos _more a = succes t pos more (T.fromStrict a) in AT.runParser (takeCheat (end - bgn)) t (AT.Pos bgn) more lose succes' where ( A.take ( end - bgn ) ) t ( AT.Pos bgn ) more lose succes ' number of . number of and then iterates over the Text by the number of , advancing by 4 bytes when it encounters a wide . So , O(N ) . is ) . immutable , already - visited part of the Attoparsec . Text . Buffer 's the Attoparsec . Text . Buffer through the Data . Text . Internal interface , even though Attoparsec is extremely vigilant about takeCheat :: Int -> Parser TS.Text takeCheat len = do (TIS.Text arr off _len) <- A.take 0 return (TIS.Text arr off len) These are from the latest version of attoparsec , remove them when bumping it to 0.14.0 or later and use A.parseOnly instead eitherResult' :: Result r -> Either String r eitherResult' (Done _ r) = Right r eitherResult' (Fail _ [] msg) = Left msg eitherResult' (Fail _ ctxs msg) = Left (List.intercalate " > " ctxs ++ ": " ++ msg) ' parseOnly ' ( myParser ' Control . Applicative . < * ' ' endOfInput ' ) parseOnly :: A.Parser a -> T.Text -> Either String a parseOnly p = eitherResult' . parse p # INLINE parseOnly #
1efeac8d257aad201b4a3fc1924e3b9d21ccdf33905c1db760a30419248ebb2f
MyDataFlow/ttalk-server
jlib_SUITE.erl
-module(jlib_SUITE). -include_lib("exml/include/exml.hrl"). -include_lib("proper/include/proper.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("ejabberd/include/jlib.hrl"). -include_lib("common_test/include/ct.hrl"). -compile([export_all]). -import(prop_helper, [prop/2]). all() -> [make_iq_reply_changes_type_to_result, make_iq_reply_changes_to_to_from, make_iq_reply_switches_from_to_to, make_iq_reply_switches_to_and_from_attrs, binary_to_jid_succeeds_with_valid_binaries, binary_to_jid_fails_with_invalid_binaries, binary_to_jid_fails_with_empty_binary, make_jid_fails_on_binaries_that_are_too_long, jid_to_lower_fails_if_any_binary_is_invalid, jid_replace_resource_failes_for_invalid_resource, nodeprep_fails_with_too_long_username, nameprep_fails_with_too_long_domain, resourceprep_fails_with_too_long_resource, nodeprep_fails_with_incorrect_username, resourceprep_fails_with_incorrect_resource, nameprep_fails_with_incorrect_domain, is_nodename_fails_for_empty_binary, compare_bare_jids]. init_per_suite(C) -> application:start(p1_stringprep), C. end_per_suite(C) -> application:stop(p1_stringprep), C. make_iq_reply_switches_to_and_from_attrs(_C) -> ToJid = <<"/res">>, FromJid = <<"/res2">>, #xmlel{attrs = Attrs} = BaseIQ = base_iq(), IQWithToAndFrom = BaseIQ#xmlel{attrs = [{<<"to">>, ToJid}, {<<"from">>, FromJid} | Attrs]}, WithToFromReply = jlib:make_result_iq_reply(IQWithToAndFrom), <<"result">> = exml_query:attr(WithToFromReply, <<"type">>), FromJid = exml_query:attr(WithToFromReply, <<"to">>), ToJid = exml_query:attr(WithToFromReply, <<"from">>). make_iq_reply_switches_from_to_to(_C) -> FromJid = <<"/res2">>, #xmlel{attrs = Attrs} = BaseIQ = base_iq(), IQWithFrom = BaseIQ#xmlel{attrs = [{<<"from">>, FromJid} | Attrs]}, WithFromReply = jlib:make_result_iq_reply(IQWithFrom), <<"result">> = exml_query:attr(WithFromReply, <<"type">>), FromJid = exml_query:attr(WithFromReply, <<"to">>). make_iq_reply_changes_to_to_from(_C) -> ToJid = <<"/res">>, #xmlel{attrs = Attrs} = BaseIQ = base_iq(), IQWithTo = BaseIQ#xmlel{attrs = [{<<"to">>, ToJid} | Attrs]}, WithToReply = jlib:make_result_iq_reply(IQWithTo), <<"result">> = exml_query:attr(WithToReply, <<"type">>), ToJid = exml_query:attr(WithToReply, <<"from">>). make_iq_reply_changes_type_to_result(_) -> BaseIQReply = jlib:make_result_iq_reply(base_iq()), <<"result">> = exml_query:attr(BaseIQReply, <<"type">>). base_iq() -> #xmlel{name = <<"iq">>, attrs = [{<<"id">>, base64:encode(crypto:rand_bytes(4))}, {<<"xmlns">>, <<"jabber:client">>}, {<<"type">>, <<"set">>}], children = [#xmlel{name = <<"session">>, attrs = [{<<"xmlns">>, <<"urn:ietf:params:xml:ns:xmpp-session">>}]} ]}. binary_to_jid_succeeds_with_valid_binaries(_C) -> Prop = ?FORALL(BinJid, (jid_gen:jid()), (is_record(jlib:binary_to_jid(BinJid), jid))), prop(binary_to_jid_succeeds_with_valid_binaries, Prop). binary_to_jid_fails_with_invalid_binaries(_C) -> Prop = ?FORALL(BinJid, jid_gen:invalid_jid(), error == jlib:binary_to_jid(BinJid)), run_property(Prop, 100, 1, 42). binary_to_jid_fails_with_empty_binary(_) -> error = jlib:binary_to_jid(<<>>). make_jid_fails_on_binaries_that_are_too_long(_) -> Prop = ?FORALL({U, S, R}, {jid_gen:username(), jid_gen:domain(), jid_gen:resource()}, case element_length_is_too_big([U,S,R]) of true -> error == jlib:make_jid(U,S,R); false -> is_record(jlib:make_jid(U,S,R), jid) end), run_property(Prop, 100, 500, 1500). element_length_is_too_big(Els) -> lists:any(fun(El) -> size(El) >= 1024 end, Els). jid_to_lower_fails_if_any_binary_is_invalid(_) -> Prop = ?FORALL({U, S, R}, {jid_gen:maybe_valid_username(), jid_gen:maybe_valid_domain(), jid_gen:maybe_valid_resource()}, case jlib:jid_to_lower({U, S, R}) of {LU, LS, LR} -> jlib:nodeprep(U) == LU andalso jlib:nameprep(S) == LS andalso jlib:resourceprep(R) == LR; error -> jlib:nodeprep(U) == error orelse jlib:nameprep(S) == error orelse jlib:resourceprep(R) == error end), run_property(Prop, 150, 1, 42). nodeprep_fails_with_too_long_username(_C) -> Prop = ?FORALL(Bin, jid_gen:username(), error == jlib:nodeprep(Bin)), run_property(Prop, 5, 1024, 2048). nameprep_fails_with_too_long_domain(_C) -> Prop = ?FORALL(Bin, jid_gen:domain(), error == jlib:nameprep(Bin)), run_property(Prop, 5, 1024, 2048). resourceprep_fails_with_too_long_resource(_C) -> Prop = ?FORALL(Bin, jid_gen:resource(), error == jlib:resourceprep(Bin)), run_property(Prop, 5, 1024, 2048). jid_replace_resource_failes_for_invalid_resource(_) -> Prop = ?FORALL({BinJid, MaybeCorrectRes}, {jid_gen:bare_jid(), jid_gen:maybe_valid_resource()}, jid_replace_resource(BinJid, MaybeCorrectRes)), prop(jid_replace_resource, Prop). jid_replace_resource(BinJid, Res) -> Jid = jlib:binary_to_jid(BinJid), Jid2 = jlib:jid_replace_resource(Jid, Res), check_jid_replace_resource_output(Res, Jid2). check_jid_replace_resource_output(Resource, error) -> jlib:resourceprep(Resource) == error; check_jid_replace_resource_output(Resource, #jid{}) -> jlib:resourceprep(Resource) =/= error. run_property(Prop, NumTest, StartSize, StopSize) -> ?assert(proper:quickcheck(Prop, [verbose, long_result, {numtests, NumTest}, {start_size, StartSize}, {max_size, StopSize}])). nodeprep_fails_with_incorrect_username(_) -> prop(incorrect_username_property, ?FORALL(Bin, jid_gen:invalid_username(), error == jlib:nodeprep(Bin))). resourceprep_fails_with_incorrect_resource(_) -> prop(incorrect_resource_property, ?FORALL(Bin, jid_gen:invalid_resource(), error == jlib:resourceprep(Bin))). nameprep_fails_with_incorrect_domain(_) -> prop(incorrect_domain_property, ?FORALL(Bin, jid_gen:invalid_domain(), error == jlib:nameprep(Bin))). is_nodename_fails_for_empty_binary(_) -> false = jlib:is_nodename(<<>>). compare_bare_jids(_) -> prop(compare_bare_jids, ?FORALL({A, B}, {jid_gen:jid(), jid_gen:jid()}, begin AA = jid:from_binary(A), BB = jid:from_binary(B), equals(jid:are_equal(jid:to_bare(AA), jid:to_bare(BB)), jid:are_bare_equal(AA, BB)) end)).
null
https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/apps/ejabberd/test/jlib_SUITE.erl
erlang
-module(jlib_SUITE). -include_lib("exml/include/exml.hrl"). -include_lib("proper/include/proper.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("ejabberd/include/jlib.hrl"). -include_lib("common_test/include/ct.hrl"). -compile([export_all]). -import(prop_helper, [prop/2]). all() -> [make_iq_reply_changes_type_to_result, make_iq_reply_changes_to_to_from, make_iq_reply_switches_from_to_to, make_iq_reply_switches_to_and_from_attrs, binary_to_jid_succeeds_with_valid_binaries, binary_to_jid_fails_with_invalid_binaries, binary_to_jid_fails_with_empty_binary, make_jid_fails_on_binaries_that_are_too_long, jid_to_lower_fails_if_any_binary_is_invalid, jid_replace_resource_failes_for_invalid_resource, nodeprep_fails_with_too_long_username, nameprep_fails_with_too_long_domain, resourceprep_fails_with_too_long_resource, nodeprep_fails_with_incorrect_username, resourceprep_fails_with_incorrect_resource, nameprep_fails_with_incorrect_domain, is_nodename_fails_for_empty_binary, compare_bare_jids]. init_per_suite(C) -> application:start(p1_stringprep), C. end_per_suite(C) -> application:stop(p1_stringprep), C. make_iq_reply_switches_to_and_from_attrs(_C) -> ToJid = <<"/res">>, FromJid = <<"/res2">>, #xmlel{attrs = Attrs} = BaseIQ = base_iq(), IQWithToAndFrom = BaseIQ#xmlel{attrs = [{<<"to">>, ToJid}, {<<"from">>, FromJid} | Attrs]}, WithToFromReply = jlib:make_result_iq_reply(IQWithToAndFrom), <<"result">> = exml_query:attr(WithToFromReply, <<"type">>), FromJid = exml_query:attr(WithToFromReply, <<"to">>), ToJid = exml_query:attr(WithToFromReply, <<"from">>). make_iq_reply_switches_from_to_to(_C) -> FromJid = <<"/res2">>, #xmlel{attrs = Attrs} = BaseIQ = base_iq(), IQWithFrom = BaseIQ#xmlel{attrs = [{<<"from">>, FromJid} | Attrs]}, WithFromReply = jlib:make_result_iq_reply(IQWithFrom), <<"result">> = exml_query:attr(WithFromReply, <<"type">>), FromJid = exml_query:attr(WithFromReply, <<"to">>). make_iq_reply_changes_to_to_from(_C) -> ToJid = <<"/res">>, #xmlel{attrs = Attrs} = BaseIQ = base_iq(), IQWithTo = BaseIQ#xmlel{attrs = [{<<"to">>, ToJid} | Attrs]}, WithToReply = jlib:make_result_iq_reply(IQWithTo), <<"result">> = exml_query:attr(WithToReply, <<"type">>), ToJid = exml_query:attr(WithToReply, <<"from">>). make_iq_reply_changes_type_to_result(_) -> BaseIQReply = jlib:make_result_iq_reply(base_iq()), <<"result">> = exml_query:attr(BaseIQReply, <<"type">>). base_iq() -> #xmlel{name = <<"iq">>, attrs = [{<<"id">>, base64:encode(crypto:rand_bytes(4))}, {<<"xmlns">>, <<"jabber:client">>}, {<<"type">>, <<"set">>}], children = [#xmlel{name = <<"session">>, attrs = [{<<"xmlns">>, <<"urn:ietf:params:xml:ns:xmpp-session">>}]} ]}. binary_to_jid_succeeds_with_valid_binaries(_C) -> Prop = ?FORALL(BinJid, (jid_gen:jid()), (is_record(jlib:binary_to_jid(BinJid), jid))), prop(binary_to_jid_succeeds_with_valid_binaries, Prop). binary_to_jid_fails_with_invalid_binaries(_C) -> Prop = ?FORALL(BinJid, jid_gen:invalid_jid(), error == jlib:binary_to_jid(BinJid)), run_property(Prop, 100, 1, 42). binary_to_jid_fails_with_empty_binary(_) -> error = jlib:binary_to_jid(<<>>). make_jid_fails_on_binaries_that_are_too_long(_) -> Prop = ?FORALL({U, S, R}, {jid_gen:username(), jid_gen:domain(), jid_gen:resource()}, case element_length_is_too_big([U,S,R]) of true -> error == jlib:make_jid(U,S,R); false -> is_record(jlib:make_jid(U,S,R), jid) end), run_property(Prop, 100, 500, 1500). element_length_is_too_big(Els) -> lists:any(fun(El) -> size(El) >= 1024 end, Els). jid_to_lower_fails_if_any_binary_is_invalid(_) -> Prop = ?FORALL({U, S, R}, {jid_gen:maybe_valid_username(), jid_gen:maybe_valid_domain(), jid_gen:maybe_valid_resource()}, case jlib:jid_to_lower({U, S, R}) of {LU, LS, LR} -> jlib:nodeprep(U) == LU andalso jlib:nameprep(S) == LS andalso jlib:resourceprep(R) == LR; error -> jlib:nodeprep(U) == error orelse jlib:nameprep(S) == error orelse jlib:resourceprep(R) == error end), run_property(Prop, 150, 1, 42). nodeprep_fails_with_too_long_username(_C) -> Prop = ?FORALL(Bin, jid_gen:username(), error == jlib:nodeprep(Bin)), run_property(Prop, 5, 1024, 2048). nameprep_fails_with_too_long_domain(_C) -> Prop = ?FORALL(Bin, jid_gen:domain(), error == jlib:nameprep(Bin)), run_property(Prop, 5, 1024, 2048). resourceprep_fails_with_too_long_resource(_C) -> Prop = ?FORALL(Bin, jid_gen:resource(), error == jlib:resourceprep(Bin)), run_property(Prop, 5, 1024, 2048). jid_replace_resource_failes_for_invalid_resource(_) -> Prop = ?FORALL({BinJid, MaybeCorrectRes}, {jid_gen:bare_jid(), jid_gen:maybe_valid_resource()}, jid_replace_resource(BinJid, MaybeCorrectRes)), prop(jid_replace_resource, Prop). jid_replace_resource(BinJid, Res) -> Jid = jlib:binary_to_jid(BinJid), Jid2 = jlib:jid_replace_resource(Jid, Res), check_jid_replace_resource_output(Res, Jid2). check_jid_replace_resource_output(Resource, error) -> jlib:resourceprep(Resource) == error; check_jid_replace_resource_output(Resource, #jid{}) -> jlib:resourceprep(Resource) =/= error. run_property(Prop, NumTest, StartSize, StopSize) -> ?assert(proper:quickcheck(Prop, [verbose, long_result, {numtests, NumTest}, {start_size, StartSize}, {max_size, StopSize}])). nodeprep_fails_with_incorrect_username(_) -> prop(incorrect_username_property, ?FORALL(Bin, jid_gen:invalid_username(), error == jlib:nodeprep(Bin))). resourceprep_fails_with_incorrect_resource(_) -> prop(incorrect_resource_property, ?FORALL(Bin, jid_gen:invalid_resource(), error == jlib:resourceprep(Bin))). nameprep_fails_with_incorrect_domain(_) -> prop(incorrect_domain_property, ?FORALL(Bin, jid_gen:invalid_domain(), error == jlib:nameprep(Bin))). is_nodename_fails_for_empty_binary(_) -> false = jlib:is_nodename(<<>>). compare_bare_jids(_) -> prop(compare_bare_jids, ?FORALL({A, B}, {jid_gen:jid(), jid_gen:jid()}, begin AA = jid:from_binary(A), BB = jid:from_binary(B), equals(jid:are_equal(jid:to_bare(AA), jid:to_bare(BB)), jid:are_bare_equal(AA, BB)) end)).
f556e588cd51024d5d46732a968a0f6ed0e8c100c61310eb8375aca47192a45a
zotonic/zotonic
zotonic_fileindexer.erl
@author < > 2018 %% @doc Indexes directories of applications. Copyright 2018 %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(zotonic_fileindexer). -behaviour(application). -export([ start/0, start/2, stop/1, scan/2, scan/3, flush/0, flush/1, flush/2 ]). -include_lib("zotonic_notifier/include/zotonic_notifier.hrl"). -include_lib("../include/zotonic_fileindexer.hrl"). -type fileindex() :: #fileindex{}. -export_type([fileindex/0]). %%==================================================================== %% API %%==================================================================== start() -> ensure_started(zotonic_fileindexer). start(_StartType, _StartArgs) -> zotonic_fileindexer_sup:start_link(). %%-------------------------------------------------------------------- stop(_State) -> ok. %% @doc Scan an application/dir for files matching a file pattern -spec scan(atom(), file:filename_all()) -> {ok, list( zotonic_fileindexer:fileindex() )} | {error, term()}. scan(App, SubDir) when is_atom(App) -> scan(App, SubDir, undefined). -spec scan(atom(), file:filename_all(), string()|binary()|undefined) -> {ok, list( zotonic_fileindexer:fileindex() )} | {error, term()}. scan(App, SubDir, Pattern) when is_atom(App) -> zotonic_fileindexer_cache:find(App, SubDir, Pattern). %% @doc Clear the complete cache, force a rescan. -spec flush() -> ok. flush() -> zotonic_fileindexer_cache:flush(). %% @doc Clear the cache for the given application. Useful to force a rescan. -spec flush(atom()) -> ok. flush(App) -> flush(App, <<>>). @doc Clear the cache for the given application and subdir prefix . Useful to force a rescan . -spec flush(atom(), file:filename_all()) -> ok. flush(App, SubDir) -> zotonic_fileindexer_cache:flush(App, SubDir). %%==================================================================== Internal functions %%==================================================================== -spec ensure_started(atom()) -> ok | {error, term()}. ensure_started(App) -> case application:start(App) of ok -> ok; {error, {not_started, Dep}} -> case ensure_started(Dep) of ok -> ensure_started(App); {error, _} = Error -> Error end; {error, {already_started, App}} -> ok; {error, {Tag, Msg}} when is_list(Tag), is_list(Msg) -> {error, lists:flatten(io_lib:format("~s: ~s", [Tag, Msg]))}; {error, {bad_return, {{M, F, Args}, Return}}} -> A = string:join([io_lib:format("~p", [A])|| A <- Args], ", "), {error, lists:flatten( io_lib:format("~s failed to start due to a bad return value from call ~s:~s(~s):~n~p", [App, M, F, A, Return]))}; {error, Reason} -> {error, Reason} end.
null
https://raw.githubusercontent.com/zotonic/zotonic/6a2f139ed30fcf5d7f84aad489a6cff060e0526c/apps/zotonic_fileindexer/src/zotonic_fileindexer.erl
erlang
@doc Indexes directories of applications. you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== API ==================================================================== -------------------------------------------------------------------- @doc Scan an application/dir for files matching a file pattern @doc Clear the complete cache, force a rescan. @doc Clear the cache for the given application. Useful to force a rescan. ==================================================================== ====================================================================
@author < > 2018 Copyright 2018 Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , -module(zotonic_fileindexer). -behaviour(application). -export([ start/0, start/2, stop/1, scan/2, scan/3, flush/0, flush/1, flush/2 ]). -include_lib("zotonic_notifier/include/zotonic_notifier.hrl"). -include_lib("../include/zotonic_fileindexer.hrl"). -type fileindex() :: #fileindex{}. -export_type([fileindex/0]). start() -> ensure_started(zotonic_fileindexer). start(_StartType, _StartArgs) -> zotonic_fileindexer_sup:start_link(). stop(_State) -> ok. -spec scan(atom(), file:filename_all()) -> {ok, list( zotonic_fileindexer:fileindex() )} | {error, term()}. scan(App, SubDir) when is_atom(App) -> scan(App, SubDir, undefined). -spec scan(atom(), file:filename_all(), string()|binary()|undefined) -> {ok, list( zotonic_fileindexer:fileindex() )} | {error, term()}. scan(App, SubDir, Pattern) when is_atom(App) -> zotonic_fileindexer_cache:find(App, SubDir, Pattern). -spec flush() -> ok. flush() -> zotonic_fileindexer_cache:flush(). -spec flush(atom()) -> ok. flush(App) -> flush(App, <<>>). @doc Clear the cache for the given application and subdir prefix . Useful to force a rescan . -spec flush(atom(), file:filename_all()) -> ok. flush(App, SubDir) -> zotonic_fileindexer_cache:flush(App, SubDir). Internal functions -spec ensure_started(atom()) -> ok | {error, term()}. ensure_started(App) -> case application:start(App) of ok -> ok; {error, {not_started, Dep}} -> case ensure_started(Dep) of ok -> ensure_started(App); {error, _} = Error -> Error end; {error, {already_started, App}} -> ok; {error, {Tag, Msg}} when is_list(Tag), is_list(Msg) -> {error, lists:flatten(io_lib:format("~s: ~s", [Tag, Msg]))}; {error, {bad_return, {{M, F, Args}, Return}}} -> A = string:join([io_lib:format("~p", [A])|| A <- Args], ", "), {error, lists:flatten( io_lib:format("~s failed to start due to a bad return value from call ~s:~s(~s):~n~p", [App, M, F, A, Return]))}; {error, Reason} -> {error, Reason} end.