diff --git a/flake.lock b/flake.lock index 8a66abfb9..fa38e041e 100644 --- a/flake.lock +++ b/flake.lock @@ -32,6 +32,55 @@ "type": "github" } }, + "ate-pairing": { + "flake": false, + "locked": { + "lastModified": 1499347915, + "narHash": "sha256-IMfWgKkkX7UcUaOPtGRcYD8MdVEP5Z9JOOSJ4+P07G8=", + "owner": "herumi", + "repo": "ate-pairing", + "rev": "e69890125746cdaf25b5b51227d96678f76479fe", + "type": "github" + }, + "original": { + "owner": "herumi", + "repo": "ate-pairing", + "rev": "e69890125746cdaf25b5b51227d96678f76479fe", + "type": "github" + } + }, + "blockchain-k-plugin": { + "inputs": { + "ate-pairing": "ate-pairing", + "cpp-httplib": "cpp-httplib", + "cryptopp": "cryptopp", + "flake-utils": [ + "k-framework", + "flake-utils" + ], + "libff": "libff", + "nixpkgs": [ + "k-framework", + "nixpkgs" + ], + "secp256k1": "secp256k1", + "xbyak": "xbyak" + }, + "locked": { + "lastModified": 1689159696, + "narHash": "sha256-dGICJl8YAWXjpjeY1rcRMLyWka3kMZmW74B6q3gvrhQ=", + "owner": "runtimeverification", + "repo": "blockchain-k-plugin", + "rev": "da834be67f6c0aff11140ddfc0b04561494c14b8", + "type": "github" + }, + "original": { + "owner": "runtimeverification", + "repo": "blockchain-k-plugin", + "rev": "da834be67f6c0aff11140ddfc0b04561494c14b8", + "type": "github" + } + }, "cabal-32": { "flake": false, "locked": { @@ -166,6 +215,40 @@ "type": "github" } }, + "cpp-httplib": { + "flake": false, + "locked": { + "lastModified": 1595279716, + "narHash": "sha256-cZW/iEwlaB4UQ0OQLNqboY9addncIM/OsxxPzqmATmE=", + "owner": "yhirose", + "repo": "cpp-httplib", + "rev": "72ce293fed9f9335e92c95ab7d085feed18c0ee8", + "type": "github" + }, + "original": { + "owner": "yhirose", + "repo": "cpp-httplib", + "rev": "72ce293fed9f9335e92c95ab7d085feed18c0ee8", + "type": "github" + } + }, + "cryptopp": { + "flake": false, + "locked": { + "lastModified": 1632484127, + "narHash": "sha256-a3TYaK34WvKEXN7LKAfGwQ3ZL6a3k/zMZyyVfnkQqO4=", + "owner": "weidai11", + "repo": "cryptopp", + "rev": "69bf6b53052b59ccb57ce068ce741988ae087317", + "type": "github" + }, + "original": { + "owner": "weidai11", + "repo": "cryptopp", + "rev": "69bf6b53052b59ccb57ce068ce741988ae087317", + "type": "github" + } + }, "flake-compat": { "flake": false, "locked": { @@ -763,6 +846,23 @@ "type": "github" } }, + "libff": { + "flake": false, + "locked": { + "lastModified": 1588032522, + "narHash": "sha256-I0kH2XLvHDSrdL/o4i6XozWQJV0UDv9zH6+sWS0UQHg=", + "owner": "scipr-lab", + "repo": "libff", + "rev": "5835b8c59d4029249645cf551f417608c48f2770", + "type": "github" + }, + "original": { + "owner": "scipr-lab", + "repo": "libff", + "rev": "5835b8c59d4029249645cf551f417608c48f2770", + "type": "github" + } + }, "llvm-backend": { "inputs": { "fmt-src": "fmt-src", @@ -1339,6 +1439,7 @@ }, "root": { "inputs": { + "blockchain-k-plugin": "blockchain-k-plugin", "flake-compat": "flake-compat", "haskell-backend": "haskell-backend", "haskell-nix": [ @@ -1368,6 +1469,23 @@ "type": "github" } }, + "secp256k1": { + "flake": false, + "locked": { + "lastModified": 1502408521, + "narHash": "sha256-PyqNZGER9VypH35S/aU4EBeepieI3BGXrYsJ141os24=", + "owner": "bitcoin-core", + "repo": "secp256k1", + "rev": "f532bdc9f77f7bbf7e93faabfbe9c483f0a9f75f", + "type": "github" + }, + "original": { + "owner": "bitcoin-core", + "repo": "secp256k1", + "rev": "f532bdc9f77f7bbf7e93faabfbe9c483f0a9f75f", + "type": "github" + } + }, "stackage": { "flake": false, "locked": { @@ -1460,6 +1578,23 @@ "type": "github" } }, + "xbyak": { + "flake": false, + "locked": { + "lastModified": 1518572786, + "narHash": "sha256-jqDSNDcqK7010ig941hrJFkv1X7MbgXmhPOOE9wHmho=", + "owner": "herumi", + "repo": "xbyak", + "rev": "f0a8f7faa27121f28186c2a7f4222a9fc66c283d", + "type": "github" + }, + "original": { + "owner": "herumi", + "repo": "xbyak", + "rev": "f0a8f7faa27121f28186c2a7f4222a9fc66c283d", + "type": "github" + } + }, "z3-src": { "flake": false, "locked": { diff --git a/flake.nix b/flake.nix index a8b4fdd13..415a69b83 100644 --- a/flake.nix +++ b/flake.nix @@ -8,6 +8,10 @@ haskell-nix.follows = "haskell-backend/haskell-nix"; nixpkgs.follows = "haskell-backend/haskell-nix/nixpkgs-unstable"; + blockchain-k-plugin.url = "github:runtimeverification/blockchain-k-plugin/da834be67f6c0aff11140ddfc0b04561494c14b8"; + blockchain-k-plugin.inputs.flake-utils.follows = "k-framework/flake-utils"; + blockchain-k-plugin.inputs.nixpkgs.follows = "k-framework/nixpkgs"; + flake-compat = { url = "github:edolstra/flake-compat"; flake = false; @@ -15,7 +19,7 @@ }; outputs = - { self, nixpkgs, haskell-nix, k-framework, haskell-backend, ... }@inputs: + { self, nixpkgs, haskell-nix, k-framework, haskell-backend, blockchain-k-plugin, ... }@inputs: let inherit (nixpkgs) lib; perSystem = lib.genAttrs nixpkgs.lib.systems.flakeExposed; @@ -214,6 +218,7 @@ packages."hs-backend-booster:exe:kore-rpc-booster"; rpc-client = packages."hs-backend-booster:exe:rpc-client"; inherit (k-framework.packages.${system}) k; + blockchain-k-plugin = blockchain-k-plugin.defaultPackage.${system}; }; }); diff --git a/package.yaml b/package.yaml index 96db4a415..8c66df4c4 100644 --- a/package.yaml +++ b/package.yaml @@ -131,6 +131,7 @@ executables: - extra - filepath - hs-backend-booster + - monad-logger - network - network-run - optparse-applicative diff --git a/test/rpc-integration/default.nix b/test/rpc-integration/default.nix index f8d28a143..c3cf2f9ac 100644 --- a/test/rpc-integration/default.nix +++ b/test/rpc-integration/default.nix @@ -1,4 +1,4 @@ -{ stdenv, coreutils, lib, kore-rpc-booster, rpc-client, git, k }: +{ stdenv, coreutils, lib, kore-rpc-booster, rpc-client, git, k, blockchain-k-plugin, openssl, procps }: let mkIntegrationTest = @@ -16,7 +16,8 @@ let export ${f} '') buildFlags} export SERVER=${kore-rpc-booster}/bin/kore-rpc-booster - export CLIENT=${rpc-client}/bin/rpc-client + export CLIENT="${rpc-client}/bin/rpc-client" + export PLUGIN_DIR=${blockchain-k-plugin} patchShebangs runDirectoryTest.sh ./runDirectoryTest.sh test-${name} --time ${ @@ -46,4 +47,5 @@ in { simplify = mkIntegrationTest { name = "simplify"; }; get-model = mkIntegrationTest { name = "get-model"; }; issue212 = mkIntegrationTest { name = "issue212"; }; + foundry-bug-report.tar.gz = mkIntegrationTest { name = "foundry-bug-report.tar.gz"; nativeBuildInputs = [ k openssl procps ]; }; } diff --git a/test/rpc-integration/resources/foundry-bug-report.tar.gz.kompile b/test/rpc-integration/resources/foundry-bug-report.tar.gz.kompile new file mode 100755 index 000000000..8236c49a0 --- /dev/null +++ b/test/rpc-integration/resources/foundry-bug-report.tar.gz.kompile @@ -0,0 +1,48 @@ +set -eux + +SCRIPT_DIR=$(dirname $0) +PLUGIN_DIR=${PLUGIN_DIR:-""} +NIX_LIBS=${NIX_LIBS:-""} + + +if [ -z "$PLUGIN_DIR" ]; then + echo "PLUGIN_DIR required to link in a crypto plugin dependency" + exit 1 +else + for lib in libff libcryptopp libsecp256k1; do + LIBFILE=$(find ${PLUGIN_DIR} -name "${lib}.a" | head -1) + [ -z "$LIBFILE" ] && (echo "[Error] Unable to locate ${lib}.a"; exit 1) + PLUGIN_LIBS+="$LIBFILE " + PLUGIN_INCLUDE+="-I$(dirname $LIBFILE)/../include " + done + #PLUGIN_CPP=$(find ${PLUGIN_DIR}/plugin-c -name "*.cpp") + PLUGIN_CPP="${PLUGIN_DIR}/include/plugin-c/blake2.cpp ${PLUGIN_DIR}/include/plugin-c/crypto.cpp ${PLUGIN_DIR}/include/plugin-c/plugin_util.cpp" +fi + +NAME=$(basename ${0%.tar.gz.kompile}) +NAMETGZ=$(basename ${0%.kompile}) + +# unpack definition.kore and llvm_definition from tarball +# into a directory named after the tarball +cd $SCRIPT_DIR +mkdir -p $NAME +tar xzf ../test-$NAMETGZ -C $NAME definition.kore llvm_definition/ + +# provide definition +cp $NAME/definition.kore ${NAMETGZ}.kore + +# kompile llvm-definition to interpreter + +case "$OSTYPE" in + linux*) LPROCPS="-lprocps" ;; + *) LPROCPS="" ;; +esac + +llvm-kompile $NAME/llvm_definition/definition.kore $NAME/llvm_definition/dt c -- \ + -fPIC -std=c++17 -o interpreter \ + $PLUGIN_LIBS $PLUGIN_INCLUDE $PLUGIN_CPP \ + -lcrypto -lssl $LPROCPS +mv interpreter.* $NAMETGZ.dylib + +# remove temporary artefacts +rm -r $NAME diff --git a/test/rpc-integration/runDirectoryTest.sh b/test/rpc-integration/runDirectoryTest.sh index f09408885..9aebed9e4 100755 --- a/test/rpc-integration/runDirectoryTest.sh +++ b/test/rpc-integration/runDirectoryTest.sh @@ -44,7 +44,7 @@ dylib=resources/${dir#test-}.dylib echo "Running tests from $dir with definition $kore" # make sure directory and kore file exist -[ ! -d "./$dir" ] && exit 1 +[ ! -e "./$dir" ] && exit 1 if [ ! -f "./$kore" ]; then [ ! -f "./$kompile" ] && exit 2 cd resources @@ -68,20 +68,25 @@ server_pid=$! trap 'kill -2 ${server_pid}; popd' ERR EXIT echo "Server PID ${server_pid}" -sleep 2 +sleep 5 -for test in $( ls $dir/state-*.{execute,send,simplify,add-module,get-model} 2>/dev/null ); do - tmp=${test#$dir/state-} - testname=${tmp%.*} - # determine send mode from suffix - mode=${test##*.} - printf "########## Test: %10s %20s\n" "$mode" "$testname #######" - if [ -f "$dir/params-${testname}.json" ]; then - params="--param-file $dir/params-${testname}.json" - else - params="" - fi - # call rpc-client - echo "$client $mode $test $params --expect $dir/response-${testname}.json $*" - $client $mode $test $params --expect $dir/response-${testname}.json $* -done +if [ -d $dir ]; then + for test in $( ls $dir/state-*.{execute,send,simplify,add-module,get-model} 2>/dev/null ); do + tmp=${test#$dir/state-} + testname=${tmp%.*} + # determine send mode from suffix + mode=${test##*.} + printf "########## Test: %10s %20s\n" "$mode" "$testname #######" + if [ -f "$dir/params-${testname}.json" ]; then + params="--param-file $dir/params-${testname}.json" + else + params="" + fi + # call rpc-client + echo "$client $mode $test $params --expect $dir/response-${testname}.json $*" + $client $mode $test $params --expect $dir/response-${testname}.json $* + done +else + echo "$dir is a file, running a tarball test" + $client run-tarball $dir +fi diff --git a/test/rpc-integration/test-foundry-bug-report.tar.gz b/test/rpc-integration/test-foundry-bug-report.tar.gz new file mode 100644 index 000000000..a6ae91745 Binary files /dev/null and b/test/rpc-integration/test-foundry-bug-report.tar.gz differ diff --git a/tools/rpc-client/RpcClient.hs b/tools/rpc-client/RpcClient.hs index 0251ca5fb..5b1fdf675 100644 --- a/tools/rpc-client/RpcClient.hs +++ b/tools/rpc-client/RpcClient.hs @@ -20,13 +20,15 @@ import Codec.Compression.BZip qualified as BZ2 import Codec.Compression.GZip qualified as GZip import Control.Exception import Control.Monad +import Control.Monad.IO.Class +import Control.Monad.Logger import Data.Aeson qualified as Json import Data.Aeson.Encode.Pretty qualified as Json import Data.Aeson.Key qualified as JsonKey import Data.Aeson.KeyMap qualified as JsonKeyMap import Data.Bifunctor import Data.ByteString.Lazy.Char8 qualified as BS -import Data.Char (isDigit) +import Data.Char (isDigit, toLower) import Data.Int (Int64) import Data.List.Extra import Data.Maybe (isNothing, mapMaybe) @@ -41,64 +43,74 @@ import System.Clock import System.Directory import System.Exit import System.FilePath -import System.IO import System.IO.Extra import System.Process +import System.Time.Extra (Seconds, sleep) import Booster.JsonRpc (rpcJsonConfig) import Booster.JsonRpc.Utils (diffJson, isIdentical, renderResult) import Booster.Syntax.Json qualified as Syntax -import Debug.Trace - main :: IO () main = do Options{common, runOptions} <- execParser parseOptions case runOptions of RunTarball tarFile keepGoing -> - runTarball common.host common.port tarFile keepGoing + runTarball common tarFile keepGoing RunSingle mode optionFile options processingOptions -> do let ProcessingOptions{postProcessing, prettify, time, dryRun} = processingOptions - - request <- - trace "[Info] Preparing request data" $ - prepareRequestData mode optionFile options - if dryRun - then do - traceM "[Info] Dry-run mode, just showing request instead of sending" - let write - | Just (Expect True file) <- postProcessing = - trace ("[Info] Writing request to file " <> file) (BS.writeFile file) - | otherwise = BS.putStrLn + request <- withLogLevel common.logLevel $ do + logInfo_ "Preparing request data" + request <- prepareRequestData mode optionFile options + when dryRun $ do + logInfo_ "Dry-run mode, just showing request instead of sending" + let write :: BS.ByteString -> LoggingT IO () + write + | Just (Expect True file) <- postProcessing = \contents -> do + logInfo_ ("Writing request to file " <> file) + liftIO $ BS.writeFile file contents + | otherwise = liftIO . BS.putStrLn reformat = Json.encodePretty' rpcJsonConfig . Json.decode @Json.Value write $ if not prettify then request else reformat request - exitSuccess - else runTCPClient common.host (show common.port) $ \s -> do - makeRequest - time - (getModeFile mode) - s - 8192 - request - (postProcess prettify postProcessing) + liftIO exitSuccess + pure request + -- runTCPClient operates on IO directly, therefore repeating runStderrLogging + retryTCPClient 0.5 5 common.host (show common.port) $ \s -> + cancelIfInterrupted s $ do + withLogLevel common.logLevel $ + makeRequest + time + (getModeFile mode) + s + 8192 + request + (postProcess prettify postProcessing) shutdown s ShutdownReceive makeRequest :: - Bool -> String -> Socket -> Int64 -> BS.ByteString -> (BS.ByteString -> IO a) -> IO a + MonadLoggerIO m => + Bool -> + String -> + Socket -> + Int64 -> + BS.ByteString -> + (BS.ByteString -> m a) -> + m a makeRequest time name s bufSize request handleResponse = do - start <- getTime Monotonic - trace ("[Info] Sending request " <> name <> "...") $ - sendAll s request - response <- readResponse - end <- getTime Monotonic + start <- liftIO $ getTime Monotonic + logInfo_ $ "Sending request " <> name <> "..." + logDebug_ $ "Request JSON: " <> BS.unpack request + liftIO $ sendAll s request + response <- liftIO readResponse + end <- liftIO $ getTime Monotonic + logInfo_ "Response received." + logDebug_ $ "Response JSON: " <> BS.unpack response let timeStr = timeSpecs start end - traceM $ "[Info] Round trip time for request '" <> name <> "' was " <> timeStr - when time $ - trace ("[Info] Saving timing for " <> name) $ - writeFile (name <> ".time") timeStr - - trace "[Info] Response received." $ - handleResponse response + logInfo_ $ "Round trip time for request '" <> name <> "' was " <> timeStr + when time $ do + logInfo_ $ "Saving timing for " <> name + liftIO $ writeFile (name <> ".time") timeStr + handleResponse response where readResponse :: IO BS.ByteString readResponse = do @@ -109,6 +121,55 @@ makeRequest time name s bufSize request handleResponse = do more <- readResponse pure $ part <> more +cancelIfInterrupted :: Socket -> IO () -> IO () +cancelIfInterrupted skt operation = + catchJust isInterrupt operation $ const sendCancel + where + isInterrupt :: AsyncException -> Maybe () + isInterrupt UserInterrupt = Just () + isInterrupt _other = Nothing + + sendCancel = do + sendAll skt (Json.encode cancel) + shutdown skt ShutdownReceive + hPutStrLn stderr "Interrupted" + exitWith (ExitFailure 130) + + cancel = object ["jsonrpc" ~> "2.0", "id" ~> "1", "method" ~> "cancel"] + +retryTCPClient :: Seconds -> Int -> String -> String -> (Socket -> IO a) -> IO a +retryTCPClient delay retries host port operation + | retries < 0 || delay <= 0 = + error $ "retryTCPClient: negative parameters " <> show (delay, retries) + | retries == 0 = runTCPClient host port operation + | otherwise = + catchJust isNoSuchThing (runTCPClient host port operation) tryAgain + where + tryAgain _ = do + hPutStrLn stderr $ "[Warning] Could not connect (retrying " <> show retries <> " times)" + sleep delay + retryTCPClient delay (retries - 1) host port operation + + isNoSuchThing :: IOError -> Maybe () + isNoSuchThing IOError{ioe_type = NoSuchThing} = Just () + isNoSuchThing _other = Nothing + +---------------------------------------- +-- Logging + +-- Logging functions without location +logError_, logWarn_, logInfo_, logDebug_ :: MonadLoggerIO m => String -> m () +logError_ = logWithoutLoc "" LevelError +logWarn_ = logWithoutLoc "" LevelWarn +logInfo_ = logWithoutLoc "" LevelInfo +logDebug_ = logWithoutLoc "" LevelDebug + +withLogLevel :: LogLevel -> LoggingT IO a -> IO a +withLogLevel lvl = runStderrLoggingT . filterLogger (const (>= lvl)) + +---------------------------------------- +-- program options + data Options = Options { common :: CommonOptions , runOptions :: RunOptions @@ -118,6 +179,7 @@ data Options = Options data CommonOptions = CommonOptions { host :: String , port :: Int + , logLevel :: LogLevel } deriving stock (Show) @@ -195,6 +257,24 @@ parseCommonOptions = <> help "server port to connect to" <> showDefault ) + <*> option + readLogLevel + ( long "log-level" + <> short 'l' + <> metavar "LOG_LEVEL" + <> value LevelInfo + <> help "Log level, one of [Error, Warn, Info, Debug]" + <> showDefault + ) + where + readLogLevel :: ReadM LogLevel + readLogLevel = + eitherReader $ \s -> case map toLower s of + "debug" -> Right LevelDebug + "info" -> Right LevelInfo + "warn" -> Right LevelWarn + "error" -> Right LevelError + _other -> Left $ s <> ": Unsupported log level" parseOptions :: ParserInfo Options parseOptions = @@ -336,8 +416,8 @@ parseMode = ---------------------------------------- -- Running all requests contained in the `rpc_*` directory of a tarball -runTarball :: String -> Int -> FilePath -> Bool -> IO () -runTarball host port tarFile keepGoing = do +runTarball :: CommonOptions -> FilePath -> Bool -> IO () +runTarball common tarFile keepGoing = do -- unpack tar files, determining type from extension(s) let unpackTar | ".tar" == takeExtension tarFile = Tar.read @@ -350,32 +430,34 @@ runTarball host port tarFile keepGoing = do let checked = Tar.checkSecurity containedFiles -- probe server connection before doing anything, display -- instructions unless server was found. - runTCPClient host (show port) (runAllRequests checked) - `catch` noServerError + retryTCPClient 1 10 common.host (show common.port) (runAllRequests checked) + `catch` (withLogLevel common.logLevel . noServerError) where - runAllRequests checked skt = do - -- unpack relevant tar files (rpc_* directory only) - withTempDir $ \tmp -> do + runAllRequests :: + Tar.Entries (Either Tar.FormatError Tar.FileNameError) -> Socket -> IO () + runAllRequests checked skt = cancelIfInterrupted skt $ do + withTempDir $ \tmp -> withLogLevel common.logLevel $ do + -- unpack relevant tar files (rpc_* directories only) + logInfo_ $ unwords ["unpacking json files from tarball", tarFile, "into", tmp] jsonFiles <- - trace (unwords ["[Info] unpacking json files from tarball", tarFile, "into", tmp]) $ - Tar.foldEntries (unpackIfRpc tmp) (pure []) throwAnyError checked - traceM $ "[Info] RPC data:" <> show jsonFiles + liftIO $ Tar.foldEntries (unpackIfRpc tmp) (pure []) throwAnyError checked + logInfo_ $ "RPC data:" <> show jsonFiles let requests = mapMaybe (stripSuffix "_request.json") jsonFiles results <- forM requests $ \r -> do mbError <- runRequest skt tmp jsonFiles r case mbError of - Just err -> - trace ("[Error] Request " <> r <> " failed: " <> BS.unpack err) $ - unless keepGoing $ do - shutdown skt ShutdownReceive - exitWith (ExitFailure 2) + Just err -> do + logError_ $ "Request " <> r <> " failed: " <> BS.unpack err + unless keepGoing $ + liftIO $ + shutdown skt ShutdownReceive >> exitWith (ExitFailure 2) Nothing -> - hPutStrLn stderr "[Info] Response matched with expected" + logInfo_ $ "Response to " <> r <> " matched with expected" pure mbError - shutdown skt ShutdownReceive - exitWith (if all isNothing results then ExitSuccess else ExitFailure 2) + liftIO $ shutdown skt ShutdownReceive + liftIO $ exitWith (if all isNothing results then ExitSuccess else ExitFailure 2) -- complain on any errors in the tarball throwAnyError :: Either Tar.FormatError Tar.FileNameError -> IO a @@ -385,33 +467,41 @@ runTarball host port tarFile keepGoing = do unpackIfRpc :: FilePath -> Tar.Entry -> IO [FilePath] -> IO [FilePath] unpackIfRpc tmpDir entry acc = do case splitFileName (Tar.entryPath entry) of - -- assume single directory "rpc_" containing "*.json" files - (_, "") -- skip all directories - | Tar.Directory <- Tar.entryContent entry -> - acc - (dir, file) -- unpack json files into tmp directory + -- unpack all directories "rpc_" containing "*.json" files + (dir, "") -- directory + | Tar.Directory <- Tar.entryContent entry + , "rpc_" `isPrefixOf` dir -> do + createDirectory dir -- create rpc dir so we can unpack files there + acc -- no additional file to return + | otherwise -> + acc -- skip other directories and top-level files + (dir, file) | "rpc_" `isPrefixOf` dir , ".json" `isSuffixOf` file , Tar.NormalFile bs _size <- Tar.entryContent entry -> do - BS.writeFile (tmpDir file) bs - (file :) <$> acc - _other -> - -- skip anything else - acc + -- unpack json files into tmp directory + let newPath = dir file + -- current tarballs do not have dir entries, create dir here + createDirectoryIfMissing False $ tmpDir dir + BS.writeFile (tmpDir newPath) bs + (newPath :) <$> acc + | otherwise -> + -- skip anything else + acc - noServerError :: IOException -> IO () + noServerError :: MonadLoggerIO m => IOException -> m () noServerError e@IOError{ioe_type = NoSuchThing} = do -- show instructions how to run the server - hPutStrLn stderr $ "[Error] Could not connect to RPC server on port " <> show port - hPutStrLn stderr $ "[Error] " <> show e - hPutStrLn stderr $ + logError_ $ "Could not connect to RPC server on port " <> show common.port + logError_ $ show e + logError_ $ unlines [ "" , "To run the required RPC server, you need to" , "1) extract `definition.kore` and `server_instance.json` from the tarball;" , "2) look up the module name `` in `server_instance.json`;" , "3) and then run the server using" - , " $ kore-rpc definition.kore --module --server-port " <> show port + , " $ kore-rpc definition.kore --module --server-port " <> show common.port , "" , "If you want to use `kore-rpc-booster, you should also compile an LLVM backend library" , "by 1) extracting the `llvm_definition/` directory from the tarball;" @@ -420,23 +510,24 @@ runTarball host port tarFile keepGoing = do , "This will generate `interpreter.[so|dylib]` and you can run" , " `kore-rpc-booster definition.kore --main-module --llvm-backend-library interpreter.so`" ] - exitWith (ExitFailure 1) + liftIO $ exitWith (ExitFailure 1) noServerError otherError = do - hPutStrLn stderr $ "[Error] " <> show otherError - exitWith (ExitFailure 1) + logError_ $ show otherError + liftIO $ exitWith (ExitFailure 1) -- Runs one request, checking that a response is available for -- comparison. Returns Nothing if successful (identical -- response), or rendered diff or error message if failing - runRequest :: Socket -> FilePath -> [FilePath] -> String -> IO (Maybe BS.ByteString) + runRequest :: + MonadLoggerIO m => Socket -> FilePath -> [FilePath] -> String -> m (Maybe BS.ByteString) runRequest skt tmpDir jsonFiles basename | not . (`elem` jsonFiles) $ basename <> "_response.json" = pure . Just . BS.pack $ "Response file " <> basename <> "_response.json is missing." | not . (`elem` jsonFiles) $ basename <> "_request.json" = pure . Just . BS.pack $ "Request file " <> basename <> "_request.json is missing." | otherwise = do - request <- BS.readFile $ tmpDir basename <> "_request.json" - expected <- BS.readFile $ tmpDir basename <> "_response.json" + request <- liftIO . BS.readFile $ tmpDir basename <> "_request.json" + expected <- liftIO . BS.readFile $ tmpDir basename <> "_response.json" actual <- makeRequest False basename skt 8192 request pure @@ -446,23 +537,24 @@ runTarball host port tarFile keepGoing = do else pure . Just $ renderResult "expected response" "actual response" diff ---------------------------------------- -prepareRequestData :: Mode -> Maybe FilePath -> [(String, String)] -> IO BS.ByteString +prepareRequestData :: + MonadLoggerIO m => Mode -> Maybe FilePath -> [(String, String)] -> m BS.ByteString prepareRequestData (SendRaw file) mbFile opts = do unless (isNothing mbFile) $ - hPutStrLn stderr "[Warning] Raw mode, ignoring given option file" + logWarn_ "Raw mode, ignoring given option file" unless (null opts) $ - hPutStrLn stderr "[Warning] Raw mode, ignoring given request options" - BS.readFile file + logWarn_ "Raw mode, ignoring given request options" + liftIO $ BS.readFile file prepareRequestData (Exec file) mbOptFile opts = - prepareOneTermRequest "execute" file mbOptFile opts + liftIO $ prepareOneTermRequest "execute" file mbOptFile opts prepareRequestData (Simpl file) mbOptFile opts = - prepareOneTermRequest "simplify" file mbOptFile opts + liftIO $ prepareOneTermRequest "simplify" file mbOptFile opts prepareRequestData (AddModule file) mbOptFile opts = do unless (isNothing mbOptFile) $ - hPutStrLn stderr "[Warning] Add-module mode, ignoring given option file" + logWarn_ "Add-module mode, ignoring given option file" unless (null opts) $ - hPutStrLn stderr "[Warning] Raw mode, ignoring given request options" - moduleText <- readFile file + logWarn_ "Raw mode, ignoring given request options" + moduleText <- liftIO $ readFile file pure . Json.encode $ object [ "jsonrpc" ~> "2.0" @@ -472,7 +564,7 @@ prepareRequestData (AddModule file) mbOptFile opts = do +: "params" ~> Json.Object (object ["module" ~> moduleText]) prepareRequestData (GetModel file) mbOptFile opts = - prepareOneTermRequest "get-model" file mbOptFile opts + liftIO $ prepareOneTermRequest "get-model" file mbOptFile opts prepareRequestData (Check _file1 _file2) _mbOptFile _opts = do error "not implemented yet" @@ -538,38 +630,43 @@ infixl 4 +: (+:) :: Json.Object -> (String, Json.Value) -> Json.Object o +: (k, v) = JsonKeyMap.insert (JsonKey.fromString k) v o -postProcess :: Bool -> Maybe PostProcessing -> BS.ByteString -> IO () +postProcess :: + MonadLoggerIO m => Bool -> Maybe PostProcessing -> BS.ByteString -> m () postProcess prettify postProcessing output = case postProcessing of Nothing -> - BS.putStrLn $ if prettify then prettyOutput else output + liftIO $ BS.putStrLn $ if prettify then prettyOutput else output Just Expect{expectFile, regenerate} -> do - doesFileExist expectFile >>= \case + liftIO (doesFileExist expectFile) >>= \case False -> if regenerate then do - hPutStrLn stderr "[Info] Generating expected file for the first time." - BS.writeFile expectFile prettyOutput + logInfo_ $ "Writing file " <> expectFile <> " for the first time." + liftIO $ BS.writeFile expectFile prettyOutput else do - BS.putStrLn "[Error] The expected file does not exist. Use `--regenerate` if you wish to create it." - exitWith $ ExitFailure 1 + logError_ $ + "The expected file " + <> expectFile + <> " does not exist. Use `--regenerate` if you wish to create it." + liftIO . exitWith $ ExitFailure 1 True -> do - expected <- BS.readFile expectFile + expected <- liftIO $ BS.readFile expectFile when (prettyOutput /= expected) $ do - BS.writeFile "response" prettyOutput + liftIO $ BS.writeFile "response" prettyOutput (_, result, _) <- - readProcessWithExitCode "git" ["diff", "--no-index", "--color-words=.", expectFile, "response"] "" - putStrLn result + liftIO $ + readProcessWithExitCode "git" ["diff", "--no-index", "--color-words=.", expectFile, "response"] "" + liftIO $ putStrLn result if regenerate then do - hPutStrLn stderr "[Info] Re-generating expected file." - renameFile "response" expectFile + logInfo_ $ "Re-generating expected file " <> expectFile + liftIO $ renameFile "response" expectFile else do - removeFile "response" - BS.putStrLn "[Error] Not the same, sorry." - exitWith $ ExitFailure 1 - hPutStrLn stderr $ "[Info] Output matches " <> expectFile + liftIO $ removeFile "response" + logError_ $ "Response differs from expected " <> expectFile + liftIO . exitWith $ ExitFailure 1 + logInfo_ $ "Output matches " <> expectFile where prettyOutput = Json.encodePretty' rpcJsonConfig $