From 6513755670892983db88a6633b8c1ea6019c03d1 Mon Sep 17 00:00:00 2001 From: Ben Sima Date: Fri, 15 Nov 2024 14:55:37 -0500 Subject: Re-namespace some stuff to Omni I was getting confused about what is a product and what is internal infrastructure; I think it is good to keep those things separate. So I moved a bunch of stuff to an Omni namespace, actually most stuff went there. Only things that are explicitly external products are still in the Biz namespace. --- Omni/Bild.hs | 1144 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1144 insertions(+) create mode 100644 Omni/Bild.hs (limited to 'Omni/Bild.hs') diff --git a/Omni/Bild.hs b/Omni/Bild.hs new file mode 100644 index 0000000..60253c0 --- /dev/null +++ b/Omni/Bild.hs @@ -0,0 +1,1144 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE NoImplicitPrelude #-} + +-- | A specific-purpose build tool. +-- +-- : out bild +-- : run git +-- +-- == Design constraints +-- +-- * The only input is one or more a namespaces. No subcommands, no packages, +-- no targets. +-- +-- * No need to write specific build rules. One rule for hs, one for rs, one +-- for scm, and so on. +-- +-- * No need to distinguish between exe and lib because we only build +-- exes; 'libs' are just source files in the tree. +-- +-- * Never concerned with deployment/packaging - leave that to another tool +-- (scp? tar?) +-- +-- * Ability to do local dev builds should be preserved, while remote nix +-- builds are used for the final package. +-- +-- == Features +-- +-- * Namespace maps to filesystem +-- +-- * no need for `bild -l` for listing available targets. +-- Use `ls` or `tree` +-- +-- * you build namespaces, not files/modules/packages/etc +-- +-- * Namespace maps to language modules +-- +-- * build settings can be set in the file comments, or special 'bild' +-- args +-- +-- * pwd is always considered the the source directory, +-- no `src` vs `doc` etc. +-- +-- * Build rules automaticatly detected from file extensions +-- +-- * Flags modify the way to interact with the build, some ideas: +-- +-- * -p = turn on profiling +-- +-- * -o = optimize level +-- +-- * The build is planned out with an analysis, which can be viewed +-- beforehand with `--plan`. The analysis includes compiler flags, which +-- can be used in `repl` for testing compilation locally. +-- +-- * (WIP) Nix is used by default to build everything on a remote build +-- machine, but local, non-Nix builds can be accomplished with `--dev`. +-- +-- == Example Commands +-- +-- > bild [opts] +-- +-- The general scheme is to build the things described by the targets. A target +-- is a namespace. You can list as many as you want, but you must list at least +-- one. It could just be `:!bild %` in vim to build whatever you're working on, +-- or `bild **/*` to build everything, or `fd .hs -X bild` to build all Haskell +-- files. +-- +-- Build outputs will go into the `_` directory in the root of the project. +-- +-- > bild A/B.hs +-- +-- This will build the file at ./A/B.hs, which translates to something like +-- `ghc --make A.B`. +-- +-- == Build Metadata +-- +-- Metadata is set in the comments with a special syntax. For system-level deps, +-- we list the deps in comments in the target file, like: +-- +-- > -- : sys cmark +-- > -- : sys libssl +-- +-- The name is used to lookup the package in `nixpkgs.pkgs.`. Only one +-- package can be listed per line. Language-level deps can automatically +-- determined by passing parsed import statements to a package database, eg +-- `ghc-pkg find-module`. If such a package database is not available, we either +-- keep a hand-written index that maps imports to packages, or we just list the +-- name of the package with: +-- +-- > -- : dep package +-- +-- The output executable is named with: +-- +-- > -- : out my-program +-- +-- or +-- +-- > -- : out my-app.js +-- +-- When multiple compilers are possible we use the @out@ extension to determine +-- target platform. If @out@ does not have an extension, each build type falls +-- back to a default, namely an executable binary. +-- +-- This method of setting metadata in the module comments works pretty well, +-- and really only needs to be done in the entrypoint module anyway. +-- +-- Local module deps are included by just giving the repo root to the underlying +-- compiler for the target, and the compiler does the work of walking the source +-- tree. +module Omni.Bild where + +import Alpha hiding (sym, (<.>)) +import qualified Conduit +import qualified Control.Concurrent.Async as Async +import qualified Data.Aeson as Aeson +import qualified Data.ByteString as ByteString +import qualified Data.ByteString.Char8 as Char8 +import qualified Data.ByteString.Internal as BSI +import qualified Data.ByteString.Lazy as ByteString.Lazy +import qualified Data.Char as Char +import Data.Conduit ((.|)) +import qualified Data.Conduit.Combinators as Conduit +import qualified Data.Conduit.Process as Conduit +import qualified Data.List as List +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Data.String as String +import qualified Data.Text as Text +import qualified Data.Text.IO as Text.IO +import qualified GHC.Conc as GHC +import qualified Network.HostName as HostName +import qualified Omni.Bild.Meta as Meta +import qualified Omni.Cli as Cli +import qualified Omni.Log as Log +import Omni.Namespace (Namespace (..)) +import qualified Omni.Namespace as Namespace +import Omni.Test ((@=?)) +import qualified Omni.Test as Test +import qualified System.Directory as Dir +import qualified System.Environment as Env +import qualified System.Exit as Exit +import System.FilePath (replaceExtension, ()) +import qualified System.IO as IO +import System.IO.Unsafe (unsafePerformIO) +import qualified System.Process as Process +import qualified System.Timeout as Timeout +import qualified Text.Regex.Applicative as Regex + +main :: IO () +main = Cli.Plan help move test_ pure |> Cli.main + where + test_ = + Test.group + "Omni.Bild" + [ test_bildBild, + test_bildExamples, + test_isGitIgnored, + test_isGitHook, + test_detectPythonImports + ] + +test_bildBild :: Test.Tree +test_bildBild = + Test.unit "can bild bild" <| do + root <- Env.getEnv "CODEROOT" + path <- Dir.makeAbsolute "Omni/Bild.hs" + case Namespace.fromPath root path of + Nothing -> Test.assertFailure "can't find ns for bild" + Just ns -> + analyze mempty ns + +> build False False 1 2 + +> \case + [Exit.ExitFailure _] -> + Test.assertFailure "can't bild bild" + _ -> + pure () + +test_bildExamples :: Test.Tree +test_bildExamples = + Test.unit "can bild examples" <| do + Env.getEnv "CODEROOT" +> \root -> + ["c", "hs", "lisp", "rs"] + |> map ("Omni/Bild/Example." <>) + |> traverse Dir.makeAbsolute + /> map (Namespace.fromPath root) + /> catMaybes + +> foldM analyze mempty + +> build False False 4 1 + +> \case + [] -> Test.assertFailure "asdf" + xs -> all (== Exit.ExitSuccess) xs @=? True + +move :: Cli.Arguments -> IO () +move args = + IO.hSetBuffering stdout IO.NoBuffering + >> Env.getEnv "CODEROOT" + +> \root -> + Cli.getAllArgs args (Cli.argument "target") + |> filterM Dir.doesFileExist + +> filterM (\x -> isGitIgnored x /> don't) + /> filter (\x -> isGitHook x |> don't) + +> traverse Dir.makeAbsolute + +> traverse (namespaceFromPathOrDie root) + /> filter isBuildableNs + +> foldM analyze mempty + +> printOrBuild + |> Timeout.timeout (toMillis minutes) + +> \case + Nothing -> + Log.br + >> Log.fail ["bild", "timeout after " <> tshow minutes <> " minutes"] + >> Log.br + >> exitWith (ExitFailure 124) + Just s -> exitSummary s + where + minutes = + Cli.getArgWithDefault args "10" (Cli.longOption "time") + |> readMaybe + |> \case + Nothing -> panic "could not read --time argument" + Just n -> (n == 0) ?: (-1, n) + printOrBuild :: Analysis -> IO [ExitCode] + printOrBuild targets + | Map.null targets = + Log.wipe + >> Log.fail ["bild", "nothing to build"] + >> Log.br + >> exitWith (ExitFailure 1) + | args `Cli.has` Cli.longOption "plan" = + Log.wipe >> putJSON targets >> pure [Exit.ExitSuccess] + | otherwise = do + root <- Env.getEnv "CODEROOT" + nproc <- GHC.getNumProcessors + createHier root + build isTest isLoud jobs (cpus nproc) targets + cpus :: Int -> Int + cpus nproc = + Cli.longOption "cpus" + |> Cli.getArgWithDefault args (str <| (nproc - 4) `div` jobs) + |> readMaybe + |> \case + Nothing -> panic "could not read --cpus argument" + Just n -> n + jobs :: Int + jobs = + Cli.longOption "jobs" + |> Cli.getArgWithDefault args "6" + |> readMaybe + |> \case + Nothing -> panic "could not read --jobs argument" + Just n -> n + isTest = args `Cli.has` Cli.longOption "test" + isLoud = args `Cli.has` Cli.longOption "loud" + putJSON = Aeson.encode .> ByteString.Lazy.toStrict .> Char8.putStrLn + +-- | Don't try to build stuff that isn't part of the git repo. +isGitIgnored :: FilePath -> IO Bool +isGitIgnored path = + Process.readProcessWithExitCode "git" ["check-ignore", path] "" + +> \case + (ExitSuccess, _, _) -> pure True + (ExitFailure _, _, _) -> pure False + +test_isGitIgnored :: Test.Tree +test_isGitIgnored = + Test.group + "isGitIgnored" + [ Test.unit "filters one" <| do + res <- isGitIgnored "_" + res @=? True, + Test.unit "filters many" <| do + traverse isGitIgnored ["Omni/Bild.hs", "TAGS"] + +> (@=? [False, True]) + ] + +isGitHook :: FilePath -> Bool +isGitHook path = + "Omni/Ide/hooks" `List.isInfixOf` path + +test_isGitHook :: Test.Tree +test_isGitHook = + Test.group + "isGitHook" + [ Test.unit "filters pre-commit hook" <| do + root <- Env.getEnv "CODEROOT" + True @=? (isGitHook <| root <> "/Omni/Ide/hooks/pre-commit"), + Test.unit "doesn't filter non-hooks" <| do + root <- Env.getEnv "CODEROOT" + False @=? (isGitHook <| root <> "/Omni/Bild.hs") + ] + +namespaceFromPathOrDie :: FilePath -> FilePath -> IO Namespace +namespaceFromPathOrDie root path = + Namespace.fromPath root path |> \case + Just x -> pure x + Nothing -> + Log.fail ["bild", str path, "could not get namespace"] + >> Log.br + >> exitWith (ExitFailure 1) + +nixStore :: String +nixStore = "/nix/store/00000000000000000000000000000000-" + +help :: Cli.Docopt +help = + [Cli.docopt| +bild + +Usage: + bild test + bild [options] ... + bild --help + +Options: + --test, -t Run tests on a target after building + --loud, -l Show all output from compiler + --plan, -p Print the build plan as JSON, don't build + --time N Set timeout to N minutes, 0 means never timeout [default: 10] + --jobs N, -j N Build up to N jobs at once [default: 6] + --cpus N, -c N Allocate up to N cpu cores per job (default: (nproc-4)/jobs) + --help, -h Print this info +|] + +exitSummary :: [Exit.ExitCode] -> IO () +exitSummary exits = + if failures > 0 + then Exit.die <| show failures + else Exit.exitSuccess + where + failures = length <| filter isFailure exits + +data Compiler + = Copy + | CPython + | Gcc + | Ghc + | Guile + | NixBuild + | Rustc + | Sbcl + deriving (Eq, Show, Generic) + +compilerExe :: (IsString a) => Compiler -> a +compilerExe = \case + Copy -> "cp" + CPython -> "python" + Gcc -> "gcc" + Ghc -> "ghc" + Guile -> "guild" + NixBuild -> "nix-build" + Rustc -> "rustc" + Sbcl -> "sbcl" + +instance Aeson.ToJSON Compiler where + toJSON = Aeson.String <. compilerExe + +instance ToNixFlag Compiler where + toNixFlag = compilerExe + +-- | Type alias for making sure that the path is qualified, meaning it starts at +-- the root of the repo, and is not an absolute path nor a subpath +type QualifiedPath = FilePath + +data Target = Target + { -- | Output name + out :: Meta.Out, + -- | Output path (into cabdir) + outPath :: FilePath, + -- | Fully qualified namespace partitioned by '.' + namespace :: Namespace, + -- | Path to file, qualified based on the root of the git directory + quapath :: QualifiedPath, + -- | Main module name, formatted as the language expects + mainModule :: String, + -- | Name of the packageset in Bild.nix to pull langdeps from + packageSet :: Text, + -- | Language-specific dependencies, required during compilation + langdeps :: Set Meta.Dep, + -- | Local source files on which this target depends + srcs :: Set FilePath, + -- | System-level dependencies, required during runtime either via PATH or + -- linking, depending on the language + sysdeps :: Set Meta.Dep, + -- | Which compiler should we use? + compiler :: Compiler, + -- | Which nix build expression? + builder :: Text, + -- | Who is building this? + user :: Text, + -- | Where are they buildint it? + host :: Text, + -- | Flags and arguments passed to 'Compiler' when building + compilerFlags :: [Text], + -- | Wrapper script (if necessary) + wrapper :: Maybe Text, + -- | Runtime dependences + rundeps :: Set Meta.Run + } + deriving (Show, Generic, Aeson.ToJSON) + +-- | Use this to just get a target to play with at the repl. +dev_getTarget :: FilePath -> IO Target +dev_getTarget fp = do + root <- Env.getEnv "CODEROOT" + path <- Dir.makeAbsolute fp + Namespace.fromPath root path + |> \case + Nothing -> panic "Could not get namespace from path" + Just ns -> + analyze mempty ns + /> Map.lookup ns + /> \case + Nothing -> panic "Could not retrieve target from analysis" + Just t -> t + +data Builder + = -- | Local + Local Text Text + | -- | Remote + Remote Text Text + deriving (Show, Generic) + +instance Aeson.ToJSON Builder where + toJSON (Local u host) = Aeson.String <| u <> "@" <> host + toJSON (Remote u host) = Aeson.String <| u <> "@" <> host + +class ToNixFlag a where + toNixFlag :: a -> String + +instance ToNixFlag Builder where + toNixFlag = \case + Local _ _ -> mempty + Remote u h -> Text.unpack <| Text.concat ["ssh://", u, "@", h, "?ssh-key=/home/", u, "/.ssh/id_rsa"] + +-- | We can't build everything yet... +isBuildableNs :: Namespace -> Bool +isBuildableNs = \case + (Namespace _ Namespace.C) -> True + (Namespace _ Namespace.Css) -> False + (Namespace _ Namespace.Hs) -> True + (Namespace _ Namespace.Html) -> False + (Namespace _ Namespace.Json) -> False + (Namespace _ Namespace.Keys) -> False + (Namespace _ Namespace.Lisp) -> True + (Namespace _ Namespace.Md) -> False + (Namespace path Namespace.Nix) + | path `elem` nixTargets -> True + | otherwise -> False + (Namespace _ Namespace.None) -> False + (Namespace _ Namespace.Py) -> True + (Namespace _ Namespace.Sh) -> False + (Namespace _ Namespace.Scm) -> True + (Namespace _ Namespace.Rs) -> True + (Namespace _ Namespace.Toml) -> True + where + nixTargets = + [ ["Biz", "Pie"], + ["Biz", "Que"], + ["Omni", "Cloud"], + ["Omni", "Dev"], + ["Omni", "Dev", "Lithium"], + ["Omni", "Dev", "Beryllium"], + ["Biz", "Dragons", "Analysis"] + ] + +-- | The default output directory. This is not IO because I don't want to +-- refactor all of my code right now, but it probably should be. +cab :: FilePath +cab = + Env.lookupEnv "CABDIR" + /> fromMaybe "_" + |> unsafePerformIO + +outToPath :: Meta.Out -> FilePath +outToPath = \case + Meta.Bin o -> cab "bin" o + Meta.Lib o -> cab "lib" o + Meta.None -> mempty + +outname :: Meta.Out -> FilePath +outname = \case + Meta.Bin o -> o + Meta.Lib o -> o + Meta.None -> mempty + +bindir, intdir, nixdir, vardir :: FilePath +bindir = cab "bin" +intdir = cab "int" +nixdir = cab "nix" +vardir = cab "var" + +-- | Emulate the *nix hierarchy in the cabdir. +createHier :: String -> IO () +createHier root = + traverse_ + (Dir.createDirectoryIfMissing True) + [ root (outToPath <| Meta.Bin ""), + root (outToPath <| Meta.Lib ""), + root intdir, + root nixdir, + root vardir + ] + +-- >>> removeVersion "array-0.5.4.0-DFLKGIjfsadi" +-- "array" +removeVersion :: String -> String +removeVersion = takeWhile (/= '.') .> butlast2 + where + butlast2 s = take (length s - 2) s + +type Analysis = Map Namespace Target + +analyze :: Analysis -> Namespace -> IO Analysis +analyze hmap ns = case Map.lookup ns hmap of + Nothing -> do + mTarget <- analyzeOne ns + pure <| maybe hmap (\t -> Map.insert ns t hmap) mTarget + Just _ -> pure hmap + where + analyzeOne :: Namespace -> IO (Maybe Target) + analyzeOne namespace@(Namespace _ ext) = do + let path = Namespace.toPath namespace + root <- Env.getEnv "CODEROOT" + let abspath = root path + let quapath = path + user <- Env.getEnv "USER" /> Text.pack + host <- HostName.getHostName /> Text.pack + Log.info ["bild", "analyze", str path] + contentLines <- + withFile abspath ReadMode <| \h -> + IO.hSetEncoding h IO.utf8_bom + >> Text.IO.hGetContents h + /> Text.lines + case ext of + -- basically we don't support building these + Namespace.Css -> pure Nothing + Namespace.Json -> pure Nothing + Namespace.Keys -> pure Nothing + Namespace.Md -> pure Nothing + Namespace.None -> pure Nothing + Namespace.Html -> pure Nothing + Namespace.Toml -> pure Nothing + Namespace.Py -> + contentLines + |> Meta.detectAll "#" + |> \Meta.Parsed {..} -> + detectPythonImports contentLines +> \srcs -> + Target + { builder = "python", + wrapper = Nothing, + compiler = CPython, + compilerFlags = + -- This doesn't really make sense for python, but I'll leave + -- it here for eventual --dev builds + [ "-c", + "\"import py_compile;import os;" + <> "py_compile.compile(file='" + <> str quapath + <> "', cfile=os.getenv('CODEROOT')+'/_/int/" + <> str quapath + <> "', doraise=True)\"" + ], + sysdeps = psys, + langdeps = pdep, + outPath = outToPath pout, + out = pout, + packageSet = "python.packages", + mainModule = Namespace.toModule namespace, + rundeps = prun, + .. + } + |> Just + |> pure + Namespace.Sh -> pure Nothing + Namespace.C -> + Meta.detectAll "//" contentLines |> \Meta.Parsed {..} -> do + Target + { langdeps = pdep, + sysdeps = psys, + wrapper = Nothing, + compiler = Gcc, + builder = "c", + out = pout, + packageSet = "c.packages", + mainModule = Namespace.toModule namespace, + compilerFlags = case pout of + Meta.Bin o -> + ["-o", o, path] <> Set.toList parg |> map Text.pack + _ -> panic "can only bild C exes, not libs", + outPath = outToPath pout, + -- implement detectCImports, then I can fill this out + srcs = Set.empty, + rundeps = prun, + .. + } + |> Just + |> pure + Namespace.Hs -> + contentLines + |> Meta.detectAll "--" + |> \Meta.Parsed {..} -> + detectHaskellImports hmap contentLines +> \(langdeps, srcs) -> + Target + { builder = "haskell", + wrapper = Nothing, + compiler = Ghc, + packageSet = "haskell.packages", + mainModule = Namespace.toModule namespace, + compilerFlags = + [ "-Wall", + "-Werror", + "-haddock", + "-Winvalid-haddock", + "-threaded", + "-i$CODEROOT", + "-odir", + ".", + "-hidir", + ".", + "--make", + "$CODEROOT" quapath + ] + ++ case pout of + Meta.Bin o -> + [ "-main-is", + Namespace.toHaskellModule namespace, + "-o", + o + ] + _ -> [] + |> map Text.pack, + sysdeps = Meta.detect (Meta.sys "--") contentLines, + outPath = outToPath pout, + rundeps = prun, + out = pout, + .. + } + |> Just + |> pure + Namespace.Lisp -> + Meta.detectOut (Meta.out ";;") contentLines |> \out -> do + langdeps <- detectLispImports contentLines + Just + (root outToPath out) <> "\" :toplevel #'main :executable t)" + ], + builder = "base", + outPath = outToPath out, + -- add local src imports to detectLispImports, then i can fill this out + srcs = Set.empty, + rundeps = Set.empty, + .. + } + Namespace.Nix -> + (host == "lithium") ?: (Local user "lithium", Remote user "dev.simatime.com") |> \builder -> + Target + { langdeps = Set.empty, + wrapper = Nothing, + sysdeps = Set.empty, + compiler = NixBuild, + compilerFlags = + [ quapath, + "--out-link", + root nixdir Namespace.toPath namespace, + "--builders", + toNixFlag builder, + "--arg", + "bild", + str <| "import " <> root "Omni/Bild.nix {}" + ] + |> map Text.pack, + out = Meta.None, + outPath = outToPath Meta.None, + srcs = Set.empty, + packageSet = "", + mainModule = Namespace.toModule namespace, + builder = "base", + rundeps = Set.empty, + .. + } + |> Just + |> pure + Namespace.Scm -> + Meta.detectAll ";;" contentLines |> \Meta.Parsed {..} -> + Target + { langdeps = pdep, + sysdeps = psys, + compiler = Guile, + packageSet = "scheme.guilePackages", + mainModule = Namespace.toModule namespace, + compilerFlags = + [ "compile", + "--r7rs", + "--load-path=" ++ root, + "--output=" ++ root intdir replaceExtension quapath ".scm.go", + quapath + ] + |> map Text.pack, + builder = "base", + outPath = outToPath pout, + out = pout, + srcs = Set.empty, -- implement detectSchemeImports + -- TODO: wrapper should just be removed, instead rely on + -- upstream nixpkgs builders to make wrappers + wrapper = + (pout == Meta.None) + ?: ( Nothing, + [ "#!/usr/bin/env bash", + "guile -C \"" + <> root + intdir + <> "\" -e main " + <> "-s " + <> Namespace.toPath namespace + <> " \"$@\"" + ] + |> joinWith "\n" + |> Text.pack + |> Just + ), + rundeps = prun, + .. + } + |> Just + |> pure + Namespace.Rs -> + Meta.detectAll "//" contentLines |> \Meta.Parsed {..} -> + Target + { langdeps = pdep, + -- this packageSet doesn't actually exist because everyone in + -- nix just generates nix expressions for rust dependencies with + -- Cargo.lock, so I have to make it in order to use rust deps + packageSet = "rust.packages", + mainModule = Namespace.toModule namespace, + wrapper = Nothing, + sysdeps = psys <> Set.singleton "rustc", + out = pout, + compiler = Rustc, + compilerFlags = case pout of + Meta.Bin o -> + map + Text.pack + [ "$CODEROOT" path, + "-o", + o + ] + _ -> panic "can't build rust libs", + builder = "base", + outPath = outToPath pout, + -- implement detectRustImports + srcs = Set.empty, + rundeps = prun, + .. + } + |> Just + |> pure + +detectHaskellImports :: Analysis -> [Text] -> IO (Set Meta.Dep, Set FilePath) +detectHaskellImports hmap contentLines = + Env.getEnv "CODEROOT" +> \root -> + contentLines + /> Text.unpack + /> Regex.match haskellImports + |> catMaybes + |> \imports -> + foldM ghcPkgFindModule Set.empty imports + +> \pkgs -> + filepaths imports + +> \files -> + findDeps root files + +> \deps -> + (pkgs <> deps, map (stripRoot root) files |> Set.fromList) + |> pure + where + filepaths :: [String] -> IO [FilePath] + filepaths imports = + imports + |> map Namespace.fromHaskellModule + |> map Namespace.toPath + |> traverse Dir.makeAbsolute + +> filterM Dir.doesFileExist + findDeps :: String -> [FilePath] -> IO (Set Meta.Dep) + findDeps root fps = + fps + |> traverse (pure <. Namespace.fromPath root) + /> catMaybes + -- this is still an inefficiency, because this recurses before the + -- hmap is updated by the fold, transitive imports will be + -- re-visited. you can see this with `TERM=dumb bild`. to fix this i + -- need shared state instead of a fold, or figure out how to do a + -- breadth-first search instead of depth-first. + +> foldM analyze (onlyHaskell hmap) + /> Map.elems + /> map langdeps + /> mconcat + onlyHaskell :: Analysis -> Analysis + onlyHaskell = Map.filterWithKey (\ns _ -> ext ns == Namespace.Hs) + +stripRoot :: FilePath -> FilePath -> FilePath +stripRoot root f = fromMaybe f (List.stripPrefix (root <> "/") f) + +detectLispImports :: [Text] -> IO (Set Meta.Dep) +detectLispImports contentLines = + contentLines + /> Text.unpack + /> Regex.match lispRequires + |> catMaybes + |> Set.fromList + |> pure + +-- | Finds local imports. Does not recurse to find transitive imports like +-- 'detectHaskellImports' does. Someday I will refactor these detection +-- functions and have a common, well-performing, complete solution. +detectPythonImports :: [Text] -> IO (Set FilePath) +detectPythonImports contentLines = + contentLines + /> Text.unpack + /> Regex.match pythonImport + |> catMaybes + /> Namespace.fromPythonModule + /> Namespace.toPath + |> filterM Dir.doesPathExist + /> Set.fromList + where + -- only detects 'import x' because I don't like 'from' + pythonImport :: Regex.RE Char String + pythonImport = + Regex.string "import" + *> Regex.some (Regex.psym Char.isSpace) + *> Regex.many (Regex.psym isModuleChar) + <* Regex.many Regex.anySym + +test_detectPythonImports :: Test.Tree +test_detectPythonImports = + Test.group + "detectPythonImports" + [ Test.unit "matches import statements" <| do + set <- detectPythonImports ["import Omni.Log"] + Set.fromList ["Omni/Log.py"] @=? set, + Test.unit "matches import as statements" <| do + set <- detectPythonImports ["import Omni.Log as Log"] + Set.fromList ["Omni/Log.py"] @=? set + ] + +ghcPkgFindModule :: Set String -> String -> IO (Set String) +ghcPkgFindModule acc m = + Env.getEnv "GHC_PACKAGE_PATH" +> \packageDb -> + Process.readProcess + "ghc-pkg" + ["--package-db", packageDb, "--names-only", "--simple-output", "find-module", m] + "" + /> String.lines + /> Set.fromList + /> Set.union acc + +isFailure :: Exit.ExitCode -> Bool +isFailure (Exit.ExitFailure _) = True +isFailure Exit.ExitSuccess = False + +isSuccess :: Exit.ExitCode -> Bool +isSuccess Exit.ExitSuccess = True +isSuccess _ = False + +test :: Bool -> Target -> IO (Exit.ExitCode, ByteString) +test loud Target {..} = case compiler of + Ghc -> do + root <- Env.getEnv "CODEROOT" + run + <| Proc + { loud = loud, + cmd = root outToPath out, + args = ["test"], + ns = namespace, + onFailure = Log.fail ["test", nschunk namespace] >> Log.br, + onSuccess = Log.pass ["test", nschunk namespace] >> Log.br + } + _ -> + Log.warn ["test", nschunk namespace, "unavailable"] + >> Log.br + >> pure (Exit.ExitFailure 1, mempty) + +build :: Bool -> Bool -> Int -> Int -> Analysis -> IO [Exit.ExitCode] +build andTest loud jobs cpus analysis = + Env.getEnv "CODEROOT" +> \root -> + forM (Map.elems analysis) <| \target@Target {..} -> + fst case out of + Meta.Bin _ -> + Log.info ["bild", "nix", "python", nschunk namespace] + >> nixBuild loud jobs cpus target + _ -> + Log.info ["bild", "nix", "python", nschunk namespace, "cannot build library"] + >> pure (Exit.ExitSuccess, mempty) + Gcc -> + Log.info ["bild", label, "gcc", nschunk namespace] + >> nixBuild loud jobs cpus target + where + label = case out of + Meta.Bin _ -> "bin" + _ -> "lib" + Ghc -> case out of + Meta.None -> pure (Exit.ExitSuccess, mempty) + Meta.Bin _ -> do + Log.info ["bild", "nix", user <> "@" <> host, nschunk namespace] + result <- nixBuild loud jobs cpus target + if andTest && (isSuccess <| fst result) + then test loud target + else pure result + Meta.Lib _ -> do + Log.info ["bild", "dev", "ghc-lib", nschunk namespace] + proc loud namespace (toNixFlag compiler) compilerFlags + Guile -> do + Log.info ["bild", "dev", "guile", nschunk namespace] + _ <- proc loud namespace (toNixFlag compiler) compilerFlags + case wrapper of + Nothing -> pure (Exit.ExitSuccess, mempty) + Just content -> do + writeFile (root outToPath out) content + p <- Dir.getPermissions <| root outToPath out + Dir.setPermissions (root outToPath out) (Dir.setOwnerExecutable True p) + pure (Exit.ExitSuccess, mempty) + NixBuild -> do + Log.info ["bild", "nix", user <> "@" <> host, nschunk namespace] + proc loud namespace (toNixFlag compiler) + <| compilerFlags + ++ [ "--max-jobs", + Text.pack <| str jobs, + "--cores", + Text.pack <| str cpus + ] + Copy -> do + Log.warn ["bild", "copy", "not implemented yet", nschunk namespace] + pure (Exit.ExitSuccess, mempty) + Rustc -> + Log.info ["bild", "dev", "rust", nschunk namespace] + >> nixBuild loud jobs cpus target + Sbcl -> do + Log.info ["bild", "dev", "lisp", nschunk namespace] + proc loud namespace (toNixFlag compiler) compilerFlags + +data Proc = Proc + { loud :: Bool, + cmd :: String, + args :: [String], + ns :: Namespace, + onFailure :: IO (), + onSuccess :: IO () + } + +-- | Convert minutes to milliseconds. +toMillis :: (Num a) => a -> a +toMillis mins = mins * 60_000_000 + +-- | Run a subprocess, streaming output if --loud is set. +run :: Proc -> IO (Exit.ExitCode, ByteString) +run Proc {..} = do + IO.hSetBuffering stdout IO.NoBuffering + loud ?| Log.info ["proc", unwords <| map str <| cmd : args] + Conduit.proc cmd args + |> (\proc_ -> proc_ {Process.create_group = True}) + |> Conduit.streamingProcess + +> \(Conduit.UseProvidedHandle, stdout_, stderr_, hdl) -> + (,,) + Async.Concurrently (loud ?: (puts stdout_, logs ns stdout_)) + <*> Async.Concurrently (loud ?: (puts stderr_, logs ns stderr_)) + |> Async.runConcurrently + +> \case + (Exit.ExitFailure n, output, outerr) -> + Conduit.closeStreamingProcessHandle hdl + >> putStr outerr + >> onFailure + >> pure (Exit.ExitFailure n, output) + (Exit.ExitSuccess, output, _) -> + Conduit.closeStreamingProcessHandle hdl + >> onSuccess + >> pure (Exit.ExitSuccess, output) + +-- | Helper for running a standard bild subprocess. +proc :: + Bool -> + Namespace -> + String -> + [Text] -> + IO (Exit.ExitCode, ByteString) +proc loud namespace cmd args = + Proc + { loud = loud, + ns = namespace, + cmd = cmd, + args = map Text.unpack args, + onFailure = Log.fail ["bild", nschunk namespace] >> Log.br, + onSuccess = Log.good ["bild", nschunk namespace] >> Log.br + } + |> run + +-- | Helper for printing during a subprocess +puts :: + Conduit.ConduitT () ByteString (Conduit.ResourceT IO) () -> + IO ByteString +puts src = + Conduit.runConduitRes + <| src + .| Conduit.iterM (liftIO <. putStr) + .| Conduit.foldC + +-- | Like 'puts' but logs the output via 'Omni.Log'. +logs :: + Namespace -> + Conduit.ConduitT () ByteString (Conduit.ResourceT IO) () -> + IO ByteString +logs ns src = + Env.lookupEnv "COLUMNS" + -- is there a better way to set a default? + /> maybe 79 (readMaybe .> fromMaybe 79) + +> \columns -> + src + .| Conduit.iterM + ( ByteString.filter (/= BSI.c2w '\n') + .> (\t -> Log.fmt ["info", "bild", nschunk ns, decodeUtf8 t]) + .> Text.take (columns - 1) + .> (<> "…\r") + .> putStr + ) + .| Conduit.foldC + |> Conduit.runConduitRes + +nschunk :: Namespace -> Text +nschunk = Namespace.toPath .> Text.pack + +haskellImports :: Regex.RE Char String +haskellImports = + Regex.string "import" + *> Regex.some (Regex.psym Char.isSpace) + *> Regex.many (Regex.psym Char.isLower) + *> Regex.many (Regex.psym Char.isSpace) + *> Regex.some (Regex.psym isModuleChar) + <* Regex.many Regex.anySym + +isModuleChar :: Char -> Bool +isModuleChar c = + elem c <| concat [['A' .. 'Z'], ['a' .. 'z'], ['.', '_'], ['0' .. '9']] + +-- | Matches on `(require :package)` forms and returns `package`. The `require` +-- function is technically deprecated in Common Lisp, but no new spec has been +-- published with a replacement, and I don't wanna use asdf, so this is what we +-- use for Lisp imports. +lispRequires :: Regex.RE Char String +lispRequires = + Regex.string "(require" + *> Regex.some (Regex.psym Char.isSpace) + *> Regex.many (Regex.psym isQuote) + *> Regex.many (Regex.psym isModuleChar) + <* Regex.many (Regex.psym (== ')')) + where + isQuote :: Char -> Bool + isQuote c = c `elem` ['\'', ':'] + +nixBuild :: Bool -> Int -> Int -> Target -> IO (Exit.ExitCode, ByteString) +nixBuild loud maxJobs cores target@(Target {..}) = + Env.getEnv "CODEROOT" +> \root -> + instantiate root |> run +> \case + (_, "") -> panic "instantiate did not produce a drv" + (Exit.ExitSuccess, drv) -> + drv + |> str + |> chomp + |> str + |> realise + |> run + >> run symlink + x -> pure x + where + instantiate root = + Proc + { loud = loud, + ns = namespace, + cmd = "nix-instantiate", + -- Getting the args quoted correctly is harder than it should be. This + -- is tightly coupled with the code in the nix builder and there's no + -- way around that, methinks. + args = + [ ["--argstr", "analysisJSON", str <| Aeson.encode <| (Map.singleton namespace target :: Analysis)], + ["--arg", "bild", str <| "import " <> root "Omni/Bild.nix {}"], + [str <| root "Omni/Bild/Builder.nix"] + ] + |> mconcat + |> map Text.unpack, + onFailure = Log.fail ["bild", "instantiate", nschunk namespace] >> Log.br, + onSuccess = pure () + } + realise drv = + Proc + { loud = loud, + ns = namespace, + cmd = "nix-store", + args = + [ "--realise", + drv, + "--add-root", + nixdir outname out, + "--max-jobs", + str maxJobs, + "--cores", + str cores + ], + onFailure = Log.fail ["bild", "realise", nschunk namespace] >> Log.br, + onSuccess = Log.good ["bild", nschunk namespace] >> Log.br + } + symlink = + Proc + { loud = loud, + ns = namespace, + cmd = "ln", + args = + [ "--relative", + "--force", + "--symbolic", + nixdir outname out "bin" outname out, + bindir outname out + ], + onFailure = Log.fail ["bild", "symlink", nschunk namespace] >> Log.br, + onSuccess = pure () + } -- cgit v1.2.3