summaryrefslogtreecommitdiff
path: root/Omni
diff options
context:
space:
mode:
Diffstat (limited to 'Omni')
-rw-r--r--Omni/App.hs71
-rw-r--r--Omni/Auth.hs141
-rw-r--r--Omni/Bild.hs1144
-rw-r--r--Omni/Bild.nix241
-rw-r--r--Omni/Bild/Builder.nix168
-rw-r--r--Omni/Bild/CcacheWrapper.nix57
-rw-r--r--Omni/Bild/Constants.nix1
-rw-r--r--Omni/Bild/Deps.hs694
-rw-r--r--Omni/Bild/Deps.nix45
-rw-r--r--Omni/Bild/Deps/C.nix1
-rw-r--r--Omni/Bild/Deps/Haskell.nix72
-rw-r--r--Omni/Bild/Deps/Python.nix1
-rw-r--r--Omni/Bild/Deps/accelerate.nix16
-rw-r--r--Omni/Bild/Deps/bitsandbytes.nix86
-rw-r--r--Omni/Bild/Deps/guile-opengl.nix32
-rw-r--r--Omni/Bild/Deps/inspekt3d.nix30
-rw-r--r--Omni/Bild/Deps/interegular.nix21
-rw-r--r--Omni/Bild/Deps/lion-pytorch.nix27
-rw-r--r--Omni/Bild/Deps/llm-ollama.nix40
-rw-r--r--Omni/Bild/Deps/llm-sentence-transformers.nix42
-rw-r--r--Omni/Bild/Deps/nostr-rs-relay.nix19
-rw-r--r--Omni/Bild/Deps/outlines.nix34
-rw-r--r--Omni/Bild/Deps/perscache.nix25
-rw-r--r--Omni/Bild/Example.c15
-rw-r--r--Omni/Bild/Example.hs45
-rw-r--r--Omni/Bild/Example.lisp4
-rw-r--r--Omni/Bild/Example.py45
-rw-r--r--Omni/Bild/Example.rs4
-rw-r--r--Omni/Bild/Functions.nix33
-rw-r--r--Omni/Bild/Haskell.nix36
-rw-r--r--Omni/Bild/Meta.hs108
-rw-r--r--Omni/Bild/Nixpkgs.nix43
-rw-r--r--Omni/Bild/Python.nix17
-rw-r--r--Omni/Bild/Sources.json198
-rw-r--r--Omni/Bild/Sources.nix207
-rw-r--r--Omni/Bot.nix61
-rwxr-xr-xOmni/Bot.scm59
-rwxr-xr-xOmni/Ci.sh62
-rw-r--r--Omni/Cli.hs61
-rw-r--r--Omni/Cloud.nix25
-rw-r--r--Omni/Cloud/Chat.nix94
-rw-r--r--Omni/Cloud/Comms.nix5
-rw-r--r--Omni/Cloud/Comms/Coturn.nix10
-rw-r--r--Omni/Cloud/Comms/Jitsi.nix17
-rw-r--r--Omni/Cloud/Comms/Mumble.nix25
-rw-r--r--Omni/Cloud/Comms/Xmpp.nix210
-rw-r--r--Omni/Cloud/Git.nix119
-rw-r--r--Omni/Cloud/Gmnisrv.nix40
-rw-r--r--Omni/Cloud/Grocy.nix17
-rw-r--r--Omni/Cloud/Hardware.nix9
-rw-r--r--Omni/Cloud/Hub.nix57
-rw-r--r--Omni/Cloud/Mail.nix54
-rw-r--r--Omni/Cloud/Networking.nix48
-rw-r--r--Omni/Cloud/NostrRelay.nix39
-rw-r--r--Omni/Cloud/Ports.nix46
-rw-r--r--Omni/Cloud/Web.nix303
-rw-r--r--Omni/Cloud/Znc.nix76
-rwxr-xr-xOmni/Cloud/post-receive.sh39
-rw-r--r--Omni/Dev/Beryllium.nix14
-rw-r--r--Omni/Dev/Beryllium/Configuration.nix115
-rw-r--r--Omni/Dev/Beryllium/Hardware.nix38
-rw-r--r--Omni/Dev/Beryllium/Ollama.nix48
-rw-r--r--Omni/Dev/Dns.nix19
-rw-r--r--Omni/Dev/Guix.nix43
-rw-r--r--Omni/Dev/Hoogle.nix81
-rw-r--r--Omni/Dev/Lithium.nix27
-rw-r--r--Omni/Dev/Lithium/Configuration.nix217
-rw-r--r--Omni/Dev/Lithium/Hardware.nix32
-rw-r--r--Omni/Dev/Networking.nix44
-rw-r--r--Omni/Dev/Vpn.nix33
-rw-r--r--Omni/Id.hs56
-rwxr-xr-xOmni/Ide/MakeTags.py105
-rwxr-xr-xOmni/Ide/ftags.sh21
-rwxr-xr-xOmni/Ide/hoog.sh17
-rwxr-xr-xOmni/Ide/hooks/commit-msg7
-rwxr-xr-xOmni/Ide/hooks/post-applypatch6
-rwxr-xr-xOmni/Ide/hooks/post-checkout20
-rwxr-xr-xOmni/Ide/hooks/post-commit6
-rwxr-xr-xOmni/Ide/hooks/post-merge6
-rwxr-xr-xOmni/Ide/hooks/post-rewrite6
-rwxr-xr-xOmni/Ide/hooks/pre-auto-gc6
-rwxr-xr-xOmni/Ide/hooks/pre-commit21
-rwxr-xr-xOmni/Ide/hooks/pre-push22
-rwxr-xr-xOmni/Ide/hooks/reference-transaction12
-rwxr-xr-xOmni/Ide/ns.sh50
-rwxr-xr-xOmni/Ide/push.sh20
-rwxr-xr-xOmni/Ide/repl.sh84
-rwxr-xr-xOmni/Ide/run.sh6
-rwxr-xr-xOmni/Ide/ship.sh25
-rwxr-xr-xOmni/Ide/tips.sh12
-rwxr-xr-xOmni/Ide/version.sh15
-rw-r--r--Omni/Keys/Ben.pub6
-rw-r--r--Omni/Keys/Deploy.pub1
-rw-r--r--Omni/Keys/Dre.pub2
-rw-r--r--Omni/Keys/Nick.pub1
-rw-r--r--Omni/Lint.hs310
-rw-r--r--Omni/Llamacpp.py43
-rw-r--r--Omni/Log.hs133
-rw-r--r--Omni/Log.py35
-rw-r--r--Omni/Look.hs187
-rw-r--r--Omni/Namespace.hs170
-rw-r--r--Omni/OsBase.nix42
-rw-r--r--Omni/Packages.nix22
-rw-r--r--Omni/Repl.py265
-rwxr-xr-xOmni/Sentry.sh40
-rw-r--r--Omni/Test.hs110
-rw-r--r--Omni/Users.nix60
-rw-r--r--Omni/Wipbase.c185
108 files changed, 8085 insertions, 0 deletions
diff --git a/Omni/App.hs b/Omni/App.hs
new file mode 100644
index 0000000..38ce91b
--- /dev/null
+++ b/Omni/App.hs
@@ -0,0 +1,71 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE MultiParamTypeClasses #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | General utils for apps
+module Omni.App
+ ( Area (..),
+ CSS (..),
+ HasCss (..),
+ Manifest (..),
+ Html (..),
+ )
+where
+
+import Alpha
+import qualified Clay
+import Data.Aeson (ToJSON)
+import qualified Data.Text.Lazy as Lazy
+import qualified Data.Text.Lazy.Encoding as Lazy
+import Network.HTTP.Media
+ ( (//),
+ (/:),
+ )
+import Servant.API (Accept (..), MimeRender (..))
+import qualified System.Envy as Envy
+
+data Area = Test | Live
+ deriving (Generic, Show)
+
+instance Envy.Var Area where
+ toVar = show
+ fromVar "Test" = Just Test
+ fromVar "Live" = Just Live
+ fromVar _ = Just Test
+
+newtype CSS = CSS
+ { unCSS :: Text
+ }
+
+instance Accept CSS where
+ contentType _ = "text" // "css" /: ("charset", "utf-8")
+
+instance MimeRender CSS Text where
+ mimeRender _ = Lazy.encodeUtf8 <. Lazy.fromStrict
+
+-- | The manifest describes your app for web app thumbnails, iPhone tiles, etc.
+data Manifest = Manifest
+ { name :: Text,
+ short_name :: Text,
+ start_url :: Text,
+ display :: Text,
+ theme_color :: Text,
+ description :: Text
+ }
+ deriving (Show, Eq, Generic)
+
+instance ToJSON Manifest
+
+-- | A wrapper for an HTML page. You need to provide an orphan
+-- 'Lucid.Base.ToHtml' instance in the web module of your app.
+--
+-- Ideally this would be captured in a Omni.App type, with overrides for head
+-- elements, and we would wouldn't have to make the same basic orphan instance
+-- for each app.
+newtype Html a = Html a
+ deriving (Show, Eq)
+
+-- | Class for attaching some CSS to a page specifically.
+class HasCss a where
+ cssFor :: a -> Clay.Css
diff --git a/Omni/Auth.hs b/Omni/Auth.hs
new file mode 100644
index 0000000..0f1cb66
--- /dev/null
+++ b/Omni/Auth.hs
@@ -0,0 +1,141 @@
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+
+-- | A module for common OAuth flows.
+--
+-- Consider using authenticate-oauth package
+module Omni.Auth
+ ( -- * OAuth
+ OAuthResponse (..),
+ OAuthArgs (..),
+
+ -- * GitHub Authentication
+ GitHub (..),
+ githubOauth,
+
+ -- * Servant Helpers
+ SetCookies,
+ liveCookieSettings,
+ testCookieSettings,
+ )
+where
+
+import Alpha
+import qualified Data.Aeson as Aeson
+import Network.HTTP.Req ((/:), (=:))
+import qualified Network.HTTP.Req as Req
+import Servant (Header, Headers)
+import qualified Servant.Auth.Server as Auth
+import qualified System.Envy as Envy
+
+-- | Use this instead of 'mempty' for explicity.
+notset :: Text
+notset = "notset"
+
+-- | Wrapper around 'Auth.SetCookie' that you can put in a servant path
+-- descriptor.
+type SetCookies ret =
+ (Headers '[Header "Set-Cookie" Auth.SetCookie, Header "Set-Cookie" Auth.SetCookie] ret)
+
+liveCookieSettings :: Auth.CookieSettings
+liveCookieSettings =
+ Auth.defaultCookieSettings
+ { Auth.cookieIsSecure = Auth.Secure,
+ -- disable XSRF protection because we don't use any javascript
+ Auth.cookieXsrfSetting = Nothing
+ }
+
+testCookieSettings :: Auth.CookieSettings
+testCookieSettings =
+ Auth.defaultCookieSettings
+ { Auth.cookieIsSecure = Auth.NotSecure,
+ Auth.cookieXsrfSetting = Nothing
+ }
+
+-- | These are arguments that a 3rd-party OAuth provider needs in order for us
+-- to authenticate a user.
+data OAuthArgs = OAuthArgs
+ { clientSecret :: Text,
+ clientId :: Text,
+ clientState :: Text
+ }
+ deriving (Generic, Show)
+
+instance Envy.DefConfig OAuthArgs where
+ defConfig =
+ OAuthArgs
+ { clientSecret = notset,
+ clientId = notset,
+ clientState = notset
+ }
+
+instance Envy.FromEnv OAuthArgs
+
+-- | A type for parsing JSON auth responses, used in 'githubOauth' below.
+-- Should be moved to Omni.Auth with others.
+data OAuthResponse = OAuthResponse
+ { access_token :: Text,
+ scope :: Text,
+ token_type :: Text
+ }
+ deriving (Generic, Aeson.FromJSON)
+
+newtype GitHub = GitHub OAuthArgs
+ deriving (Show, Generic)
+
+instance Envy.DefConfig GitHub where
+ defConfig =
+ GitHub
+ <| OAuthArgs
+ { clientSecret = notset,
+ clientId = notset,
+ clientState = notset
+ }
+
+instance Envy.FromEnv GitHub where
+ fromEnv Nothing =
+ GitHub
+ </ Envy.gFromEnvCustom
+ Envy.Option
+ { Envy.dropPrefixCount = 0,
+ Envy.customPrefix = "GITHUB"
+ }
+ Nothing
+ fromEnv (Just (GitHub x)) =
+ GitHub
+ </ Envy.gFromEnvCustom
+ Envy.Option
+ { Envy.dropPrefixCount = 0,
+ Envy.customPrefix = "GITHUB"
+ }
+ (Just x)
+
+-- | POST to GitHub's OAuth service and get the user's oAuth token.
+githubOauth ::
+ GitHub ->
+ -- | This should be GitHub.Token but GitHub.Auth doesn't export Token.
+ Text ->
+ IO OAuthResponse
+githubOauth (GitHub OAuthArgs {..}) code =
+ accessTokenRequest
+ /> Req.responseBody
+ |> Req.runReq Req.defaultHttpConfig
+ where
+ accessTokenRequest :: Req.Req (Req.JsonResponse OAuthResponse)
+ accessTokenRequest =
+ Req.req
+ Req.POST
+ (Req.https "github.com" /: "login" /: "oauth" /: "access_token")
+ Req.NoReqBody
+ Req.jsonResponse
+ <| "client_id"
+ =: clientId
+ <> "client_secret"
+ =: clientSecret
+ <> "code"
+ =: code
+ <> "state"
+ =: clientState
diff --git a/Omni/Bild.hs b/Omni/Bild.hs
new file mode 100644
index 0000000..60253c0
--- /dev/null
+++ b/Omni/Bild.hs
@@ -0,0 +1,1144 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE TupleSections #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | A specific-purpose build tool.
+--
+-- : out bild
+-- : run git
+--
+-- == Design constraints
+--
+-- * The only input is one or more a namespaces. No subcommands, no packages,
+-- no targets.
+--
+-- * No need to write specific build rules. One rule for hs, one for rs, one
+-- for scm, and so on.
+--
+-- * No need to distinguish between exe and lib because we only build
+-- exes; 'libs' are just source files in the tree.
+--
+-- * Never concerned with deployment/packaging - leave that to another tool
+-- (scp? tar?)
+--
+-- * Ability to do local dev builds should be preserved, while remote nix
+-- builds are used for the final package.
+--
+-- == Features
+--
+-- * Namespace maps to filesystem
+--
+-- * no need for `bild -l` for listing available targets.
+-- Use `ls` or `tree`
+--
+-- * you build namespaces, not files/modules/packages/etc
+--
+-- * Namespace maps to language modules
+--
+-- * build settings can be set in the file comments, or special 'bild'
+-- args
+--
+-- * pwd is always considered the the source directory,
+-- no `src` vs `doc` etc.
+--
+-- * Build rules automaticatly detected from file extensions
+--
+-- * Flags modify the way to interact with the build, some ideas:
+--
+-- * -p = turn on profiling
+--
+-- * -o = optimize level
+--
+-- * The build is planned out with an analysis, which can be viewed
+-- beforehand with `--plan`. The analysis includes compiler flags, which
+-- can be used in `repl` for testing compilation locally.
+--
+-- * (WIP) Nix is used by default to build everything on a remote build
+-- machine, but local, non-Nix builds can be accomplished with `--dev`.
+--
+-- == Example Commands
+--
+-- > bild [opts] <target..>
+--
+-- The general scheme is to build the things described by the targets. A target
+-- is a namespace. You can list as many as you want, but you must list at least
+-- one. It could just be `:!bild %` in vim to build whatever you're working on,
+-- or `bild **/*` to build everything, or `fd .hs -X bild` to build all Haskell
+-- files.
+--
+-- Build outputs will go into the `_` directory in the root of the project.
+--
+-- > bild A/B.hs
+--
+-- This will build the file at ./A/B.hs, which translates to something like
+-- `ghc --make A.B`.
+--
+-- == Build Metadata
+--
+-- Metadata is set in the comments with a special syntax. For system-level deps,
+-- we list the deps in comments in the target file, like:
+--
+-- > -- : sys cmark
+-- > -- : sys libssl
+--
+-- The name is used to lookup the package in `nixpkgs.pkgs.<name>`. Only one
+-- package can be listed per line. Language-level deps can automatically
+-- determined by passing parsed import statements to a package database, eg
+-- `ghc-pkg find-module`. If such a package database is not available, we either
+-- keep a hand-written index that maps imports to packages, or we just list the
+-- name of the package with:
+--
+-- > -- : dep package
+--
+-- The output executable is named with:
+--
+-- > -- : out my-program
+--
+-- or
+--
+-- > -- : out my-app.js
+--
+-- When multiple compilers are possible we use the @out@ extension to determine
+-- target platform. If @out@ does not have an extension, each build type falls
+-- back to a default, namely an executable binary.
+--
+-- This method of setting metadata in the module comments works pretty well,
+-- and really only needs to be done in the entrypoint module anyway.
+--
+-- Local module deps are included by just giving the repo root to the underlying
+-- compiler for the target, and the compiler does the work of walking the source
+-- tree.
+module Omni.Bild where
+
+import Alpha hiding (sym, (<.>))
+import qualified Conduit
+import qualified Control.Concurrent.Async as Async
+import qualified Data.Aeson as Aeson
+import qualified Data.ByteString as ByteString
+import qualified Data.ByteString.Char8 as Char8
+import qualified Data.ByteString.Internal as BSI
+import qualified Data.ByteString.Lazy as ByteString.Lazy
+import qualified Data.Char as Char
+import Data.Conduit ((.|))
+import qualified Data.Conduit.Combinators as Conduit
+import qualified Data.Conduit.Process as Conduit
+import qualified Data.List as List
+import qualified Data.Map as Map
+import qualified Data.Set as Set
+import qualified Data.String as String
+import qualified Data.Text as Text
+import qualified Data.Text.IO as Text.IO
+import qualified GHC.Conc as GHC
+import qualified Network.HostName as HostName
+import qualified Omni.Bild.Meta as Meta
+import qualified Omni.Cli as Cli
+import qualified Omni.Log as Log
+import Omni.Namespace (Namespace (..))
+import qualified Omni.Namespace as Namespace
+import Omni.Test ((@=?))
+import qualified Omni.Test as Test
+import qualified System.Directory as Dir
+import qualified System.Environment as Env
+import qualified System.Exit as Exit
+import System.FilePath (replaceExtension, (</>))
+import qualified System.IO as IO
+import System.IO.Unsafe (unsafePerformIO)
+import qualified System.Process as Process
+import qualified System.Timeout as Timeout
+import qualified Text.Regex.Applicative as Regex
+
+main :: IO ()
+main = Cli.Plan help move test_ pure |> Cli.main
+ where
+ test_ =
+ Test.group
+ "Omni.Bild"
+ [ test_bildBild,
+ test_bildExamples,
+ test_isGitIgnored,
+ test_isGitHook,
+ test_detectPythonImports
+ ]
+
+test_bildBild :: Test.Tree
+test_bildBild =
+ Test.unit "can bild bild" <| do
+ root <- Env.getEnv "CODEROOT"
+ path <- Dir.makeAbsolute "Omni/Bild.hs"
+ case Namespace.fromPath root path of
+ Nothing -> Test.assertFailure "can't find ns for bild"
+ Just ns ->
+ analyze mempty ns
+ +> build False False 1 2
+ +> \case
+ [Exit.ExitFailure _] ->
+ Test.assertFailure "can't bild bild"
+ _ ->
+ pure ()
+
+test_bildExamples :: Test.Tree
+test_bildExamples =
+ Test.unit "can bild examples" <| do
+ Env.getEnv "CODEROOT" +> \root ->
+ ["c", "hs", "lisp", "rs"]
+ |> map ("Omni/Bild/Example." <>)
+ |> traverse Dir.makeAbsolute
+ /> map (Namespace.fromPath root)
+ /> catMaybes
+ +> foldM analyze mempty
+ +> build False False 4 1
+ +> \case
+ [] -> Test.assertFailure "asdf"
+ xs -> all (== Exit.ExitSuccess) xs @=? True
+
+move :: Cli.Arguments -> IO ()
+move args =
+ IO.hSetBuffering stdout IO.NoBuffering
+ >> Env.getEnv "CODEROOT"
+ +> \root ->
+ Cli.getAllArgs args (Cli.argument "target")
+ |> filterM Dir.doesFileExist
+ +> filterM (\x -> isGitIgnored x /> don't)
+ /> filter (\x -> isGitHook x |> don't)
+ +> traverse Dir.makeAbsolute
+ +> traverse (namespaceFromPathOrDie root)
+ /> filter isBuildableNs
+ +> foldM analyze mempty
+ +> printOrBuild
+ |> Timeout.timeout (toMillis minutes)
+ +> \case
+ Nothing ->
+ Log.br
+ >> Log.fail ["bild", "timeout after " <> tshow minutes <> " minutes"]
+ >> Log.br
+ >> exitWith (ExitFailure 124)
+ Just s -> exitSummary s
+ where
+ minutes =
+ Cli.getArgWithDefault args "10" (Cli.longOption "time")
+ |> readMaybe
+ |> \case
+ Nothing -> panic "could not read --time argument"
+ Just n -> (n == 0) ?: (-1, n)
+ printOrBuild :: Analysis -> IO [ExitCode]
+ printOrBuild targets
+ | Map.null targets =
+ Log.wipe
+ >> Log.fail ["bild", "nothing to build"]
+ >> Log.br
+ >> exitWith (ExitFailure 1)
+ | args `Cli.has` Cli.longOption "plan" =
+ Log.wipe >> putJSON targets >> pure [Exit.ExitSuccess]
+ | otherwise = do
+ root <- Env.getEnv "CODEROOT"
+ nproc <- GHC.getNumProcessors
+ createHier root
+ build isTest isLoud jobs (cpus nproc) targets
+ cpus :: Int -> Int
+ cpus nproc =
+ Cli.longOption "cpus"
+ |> Cli.getArgWithDefault args (str <| (nproc - 4) `div` jobs)
+ |> readMaybe
+ |> \case
+ Nothing -> panic "could not read --cpus argument"
+ Just n -> n
+ jobs :: Int
+ jobs =
+ Cli.longOption "jobs"
+ |> Cli.getArgWithDefault args "6"
+ |> readMaybe
+ |> \case
+ Nothing -> panic "could not read --jobs argument"
+ Just n -> n
+ isTest = args `Cli.has` Cli.longOption "test"
+ isLoud = args `Cli.has` Cli.longOption "loud"
+ putJSON = Aeson.encode .> ByteString.Lazy.toStrict .> Char8.putStrLn
+
+-- | Don't try to build stuff that isn't part of the git repo.
+isGitIgnored :: FilePath -> IO Bool
+isGitIgnored path =
+ Process.readProcessWithExitCode "git" ["check-ignore", path] ""
+ +> \case
+ (ExitSuccess, _, _) -> pure True
+ (ExitFailure _, _, _) -> pure False
+
+test_isGitIgnored :: Test.Tree
+test_isGitIgnored =
+ Test.group
+ "isGitIgnored"
+ [ Test.unit "filters one" <| do
+ res <- isGitIgnored "_"
+ res @=? True,
+ Test.unit "filters many" <| do
+ traverse isGitIgnored ["Omni/Bild.hs", "TAGS"]
+ +> (@=? [False, True])
+ ]
+
+isGitHook :: FilePath -> Bool
+isGitHook path =
+ "Omni/Ide/hooks" `List.isInfixOf` path
+
+test_isGitHook :: Test.Tree
+test_isGitHook =
+ Test.group
+ "isGitHook"
+ [ Test.unit "filters pre-commit hook" <| do
+ root <- Env.getEnv "CODEROOT"
+ True @=? (isGitHook <| root <> "/Omni/Ide/hooks/pre-commit"),
+ Test.unit "doesn't filter non-hooks" <| do
+ root <- Env.getEnv "CODEROOT"
+ False @=? (isGitHook <| root <> "/Omni/Bild.hs")
+ ]
+
+namespaceFromPathOrDie :: FilePath -> FilePath -> IO Namespace
+namespaceFromPathOrDie root path =
+ Namespace.fromPath root path |> \case
+ Just x -> pure x
+ Nothing ->
+ Log.fail ["bild", str path, "could not get namespace"]
+ >> Log.br
+ >> exitWith (ExitFailure 1)
+
+nixStore :: String
+nixStore = "/nix/store/00000000000000000000000000000000-"
+
+help :: Cli.Docopt
+help =
+ [Cli.docopt|
+bild
+
+Usage:
+ bild test
+ bild [options] <target>...
+ bild --help
+
+Options:
+ --test, -t Run tests on a target after building
+ --loud, -l Show all output from compiler
+ --plan, -p Print the build plan as JSON, don't build
+ --time N Set timeout to N minutes, 0 means never timeout [default: 10]
+ --jobs N, -j N Build up to N jobs at once [default: 6]
+ --cpus N, -c N Allocate up to N cpu cores per job (default: (nproc-4)/jobs)
+ --help, -h Print this info
+|]
+
+exitSummary :: [Exit.ExitCode] -> IO ()
+exitSummary exits =
+ if failures > 0
+ then Exit.die <| show failures
+ else Exit.exitSuccess
+ where
+ failures = length <| filter isFailure exits
+
+data Compiler
+ = Copy
+ | CPython
+ | Gcc
+ | Ghc
+ | Guile
+ | NixBuild
+ | Rustc
+ | Sbcl
+ deriving (Eq, Show, Generic)
+
+compilerExe :: (IsString a) => Compiler -> a
+compilerExe = \case
+ Copy -> "cp"
+ CPython -> "python"
+ Gcc -> "gcc"
+ Ghc -> "ghc"
+ Guile -> "guild"
+ NixBuild -> "nix-build"
+ Rustc -> "rustc"
+ Sbcl -> "sbcl"
+
+instance Aeson.ToJSON Compiler where
+ toJSON = Aeson.String <. compilerExe
+
+instance ToNixFlag Compiler where
+ toNixFlag = compilerExe
+
+-- | Type alias for making sure that the path is qualified, meaning it starts at
+-- the root of the repo, and is not an absolute path nor a subpath
+type QualifiedPath = FilePath
+
+data Target = Target
+ { -- | Output name
+ out :: Meta.Out,
+ -- | Output path (into cabdir)
+ outPath :: FilePath,
+ -- | Fully qualified namespace partitioned by '.'
+ namespace :: Namespace,
+ -- | Path to file, qualified based on the root of the git directory
+ quapath :: QualifiedPath,
+ -- | Main module name, formatted as the language expects
+ mainModule :: String,
+ -- | Name of the packageset in Bild.nix to pull langdeps from
+ packageSet :: Text,
+ -- | Language-specific dependencies, required during compilation
+ langdeps :: Set Meta.Dep,
+ -- | Local source files on which this target depends
+ srcs :: Set FilePath,
+ -- | System-level dependencies, required during runtime either via PATH or
+ -- linking, depending on the language
+ sysdeps :: Set Meta.Dep,
+ -- | Which compiler should we use?
+ compiler :: Compiler,
+ -- | Which nix build expression?
+ builder :: Text,
+ -- | Who is building this?
+ user :: Text,
+ -- | Where are they buildint it?
+ host :: Text,
+ -- | Flags and arguments passed to 'Compiler' when building
+ compilerFlags :: [Text],
+ -- | Wrapper script (if necessary)
+ wrapper :: Maybe Text,
+ -- | Runtime dependences
+ rundeps :: Set Meta.Run
+ }
+ deriving (Show, Generic, Aeson.ToJSON)
+
+-- | Use this to just get a target to play with at the repl.
+dev_getTarget :: FilePath -> IO Target
+dev_getTarget fp = do
+ root <- Env.getEnv "CODEROOT"
+ path <- Dir.makeAbsolute fp
+ Namespace.fromPath root path
+ |> \case
+ Nothing -> panic "Could not get namespace from path"
+ Just ns ->
+ analyze mempty ns
+ /> Map.lookup ns
+ /> \case
+ Nothing -> panic "Could not retrieve target from analysis"
+ Just t -> t
+
+data Builder
+ = -- | Local <user> <host>
+ Local Text Text
+ | -- | Remote <user> <host>
+ Remote Text Text
+ deriving (Show, Generic)
+
+instance Aeson.ToJSON Builder where
+ toJSON (Local u host) = Aeson.String <| u <> "@" <> host
+ toJSON (Remote u host) = Aeson.String <| u <> "@" <> host
+
+class ToNixFlag a where
+ toNixFlag :: a -> String
+
+instance ToNixFlag Builder where
+ toNixFlag = \case
+ Local _ _ -> mempty
+ Remote u h -> Text.unpack <| Text.concat ["ssh://", u, "@", h, "?ssh-key=/home/", u, "/.ssh/id_rsa"]
+
+-- | We can't build everything yet...
+isBuildableNs :: Namespace -> Bool
+isBuildableNs = \case
+ (Namespace _ Namespace.C) -> True
+ (Namespace _ Namespace.Css) -> False
+ (Namespace _ Namespace.Hs) -> True
+ (Namespace _ Namespace.Html) -> False
+ (Namespace _ Namespace.Json) -> False
+ (Namespace _ Namespace.Keys) -> False
+ (Namespace _ Namespace.Lisp) -> True
+ (Namespace _ Namespace.Md) -> False
+ (Namespace path Namespace.Nix)
+ | path `elem` nixTargets -> True
+ | otherwise -> False
+ (Namespace _ Namespace.None) -> False
+ (Namespace _ Namespace.Py) -> True
+ (Namespace _ Namespace.Sh) -> False
+ (Namespace _ Namespace.Scm) -> True
+ (Namespace _ Namespace.Rs) -> True
+ (Namespace _ Namespace.Toml) -> True
+ where
+ nixTargets =
+ [ ["Biz", "Pie"],
+ ["Biz", "Que"],
+ ["Omni", "Cloud"],
+ ["Omni", "Dev"],
+ ["Omni", "Dev", "Lithium"],
+ ["Omni", "Dev", "Beryllium"],
+ ["Biz", "Dragons", "Analysis"]
+ ]
+
+-- | The default output directory. This is not IO because I don't want to
+-- refactor all of my code right now, but it probably should be.
+cab :: FilePath
+cab =
+ Env.lookupEnv "CABDIR"
+ /> fromMaybe "_"
+ |> unsafePerformIO
+
+outToPath :: Meta.Out -> FilePath
+outToPath = \case
+ Meta.Bin o -> cab </> "bin" </> o
+ Meta.Lib o -> cab </> "lib" </> o
+ Meta.None -> mempty
+
+outname :: Meta.Out -> FilePath
+outname = \case
+ Meta.Bin o -> o
+ Meta.Lib o -> o
+ Meta.None -> mempty
+
+bindir, intdir, nixdir, vardir :: FilePath
+bindir = cab </> "bin"
+intdir = cab </> "int"
+nixdir = cab </> "nix"
+vardir = cab </> "var"
+
+-- | Emulate the *nix hierarchy in the cabdir.
+createHier :: String -> IO ()
+createHier root =
+ traverse_
+ (Dir.createDirectoryIfMissing True)
+ [ root </> (outToPath <| Meta.Bin ""),
+ root </> (outToPath <| Meta.Lib ""),
+ root </> intdir,
+ root </> nixdir,
+ root </> vardir
+ ]
+
+-- >>> removeVersion "array-0.5.4.0-DFLKGIjfsadi"
+-- "array"
+removeVersion :: String -> String
+removeVersion = takeWhile (/= '.') .> butlast2
+ where
+ butlast2 s = take (length s - 2) s
+
+type Analysis = Map Namespace Target
+
+analyze :: Analysis -> Namespace -> IO Analysis
+analyze hmap ns = case Map.lookup ns hmap of
+ Nothing -> do
+ mTarget <- analyzeOne ns
+ pure <| maybe hmap (\t -> Map.insert ns t hmap) mTarget
+ Just _ -> pure hmap
+ where
+ analyzeOne :: Namespace -> IO (Maybe Target)
+ analyzeOne namespace@(Namespace _ ext) = do
+ let path = Namespace.toPath namespace
+ root <- Env.getEnv "CODEROOT"
+ let abspath = root </> path
+ let quapath = path
+ user <- Env.getEnv "USER" /> Text.pack
+ host <- HostName.getHostName /> Text.pack
+ Log.info ["bild", "analyze", str path]
+ contentLines <-
+ withFile abspath ReadMode <| \h ->
+ IO.hSetEncoding h IO.utf8_bom
+ >> Text.IO.hGetContents h
+ /> Text.lines
+ case ext of
+ -- basically we don't support building these
+ Namespace.Css -> pure Nothing
+ Namespace.Json -> pure Nothing
+ Namespace.Keys -> pure Nothing
+ Namespace.Md -> pure Nothing
+ Namespace.None -> pure Nothing
+ Namespace.Html -> pure Nothing
+ Namespace.Toml -> pure Nothing
+ Namespace.Py ->
+ contentLines
+ |> Meta.detectAll "#"
+ |> \Meta.Parsed {..} ->
+ detectPythonImports contentLines +> \srcs ->
+ Target
+ { builder = "python",
+ wrapper = Nothing,
+ compiler = CPython,
+ compilerFlags =
+ -- This doesn't really make sense for python, but I'll leave
+ -- it here for eventual --dev builds
+ [ "-c",
+ "\"import py_compile;import os;"
+ <> "py_compile.compile(file='"
+ <> str quapath
+ <> "', cfile=os.getenv('CODEROOT')+'/_/int/"
+ <> str quapath
+ <> "', doraise=True)\""
+ ],
+ sysdeps = psys,
+ langdeps = pdep,
+ outPath = outToPath pout,
+ out = pout,
+ packageSet = "python.packages",
+ mainModule = Namespace.toModule namespace,
+ rundeps = prun,
+ ..
+ }
+ |> Just
+ |> pure
+ Namespace.Sh -> pure Nothing
+ Namespace.C ->
+ Meta.detectAll "//" contentLines |> \Meta.Parsed {..} -> do
+ Target
+ { langdeps = pdep,
+ sysdeps = psys,
+ wrapper = Nothing,
+ compiler = Gcc,
+ builder = "c",
+ out = pout,
+ packageSet = "c.packages",
+ mainModule = Namespace.toModule namespace,
+ compilerFlags = case pout of
+ Meta.Bin o ->
+ ["-o", o, path] <> Set.toList parg |> map Text.pack
+ _ -> panic "can only bild C exes, not libs",
+ outPath = outToPath pout,
+ -- implement detectCImports, then I can fill this out
+ srcs = Set.empty,
+ rundeps = prun,
+ ..
+ }
+ |> Just
+ |> pure
+ Namespace.Hs ->
+ contentLines
+ |> Meta.detectAll "--"
+ |> \Meta.Parsed {..} ->
+ detectHaskellImports hmap contentLines +> \(langdeps, srcs) ->
+ Target
+ { builder = "haskell",
+ wrapper = Nothing,
+ compiler = Ghc,
+ packageSet = "haskell.packages",
+ mainModule = Namespace.toModule namespace,
+ compilerFlags =
+ [ "-Wall",
+ "-Werror",
+ "-haddock",
+ "-Winvalid-haddock",
+ "-threaded",
+ "-i$CODEROOT",
+ "-odir",
+ ".",
+ "-hidir",
+ ".",
+ "--make",
+ "$CODEROOT" </> quapath
+ ]
+ ++ case pout of
+ Meta.Bin o ->
+ [ "-main-is",
+ Namespace.toHaskellModule namespace,
+ "-o",
+ o
+ ]
+ _ -> []
+ |> map Text.pack,
+ sysdeps = Meta.detect (Meta.sys "--") contentLines,
+ outPath = outToPath pout,
+ rundeps = prun,
+ out = pout,
+ ..
+ }
+ |> Just
+ |> pure
+ Namespace.Lisp ->
+ Meta.detectOut (Meta.out ";;") contentLines |> \out -> do
+ langdeps <- detectLispImports contentLines
+ Just
+ </ pure
+ Target
+ { sysdeps = Set.empty,
+ wrapper = Nothing,
+ compiler = Sbcl,
+ packageSet = "lisp.sbclWith",
+ mainModule = Namespace.toModule namespace,
+ compilerFlags =
+ map
+ Text.pack
+ [ "--eval",
+ "(require :asdf)",
+ "--load",
+ quapath,
+ "--eval",
+ "(sb-ext:save-lisp-and-die #p\"" <> (root </> outToPath out) <> "\" :toplevel #'main :executable t)"
+ ],
+ builder = "base",
+ outPath = outToPath out,
+ -- add local src imports to detectLispImports, then i can fill this out
+ srcs = Set.empty,
+ rundeps = Set.empty,
+ ..
+ }
+ Namespace.Nix ->
+ (host == "lithium") ?: (Local user "lithium", Remote user "dev.simatime.com") |> \builder ->
+ Target
+ { langdeps = Set.empty,
+ wrapper = Nothing,
+ sysdeps = Set.empty,
+ compiler = NixBuild,
+ compilerFlags =
+ [ quapath,
+ "--out-link",
+ root </> nixdir </> Namespace.toPath namespace,
+ "--builders",
+ toNixFlag builder,
+ "--arg",
+ "bild",
+ str <| "import " <> root </> "Omni/Bild.nix {}"
+ ]
+ |> map Text.pack,
+ out = Meta.None,
+ outPath = outToPath Meta.None,
+ srcs = Set.empty,
+ packageSet = "",
+ mainModule = Namespace.toModule namespace,
+ builder = "base",
+ rundeps = Set.empty,
+ ..
+ }
+ |> Just
+ |> pure
+ Namespace.Scm ->
+ Meta.detectAll ";;" contentLines |> \Meta.Parsed {..} ->
+ Target
+ { langdeps = pdep,
+ sysdeps = psys,
+ compiler = Guile,
+ packageSet = "scheme.guilePackages",
+ mainModule = Namespace.toModule namespace,
+ compilerFlags =
+ [ "compile",
+ "--r7rs",
+ "--load-path=" ++ root,
+ "--output=" ++ root </> intdir </> replaceExtension quapath ".scm.go",
+ quapath
+ ]
+ |> map Text.pack,
+ builder = "base",
+ outPath = outToPath pout,
+ out = pout,
+ srcs = Set.empty, -- implement detectSchemeImports
+ -- TODO: wrapper should just be removed, instead rely on
+ -- upstream nixpkgs builders to make wrappers
+ wrapper =
+ (pout == Meta.None)
+ ?: ( Nothing,
+ [ "#!/usr/bin/env bash",
+ "guile -C \""
+ <> root
+ </> intdir
+ <> "\" -e main "
+ <> "-s "
+ <> Namespace.toPath namespace
+ <> " \"$@\""
+ ]
+ |> joinWith "\n"
+ |> Text.pack
+ |> Just
+ ),
+ rundeps = prun,
+ ..
+ }
+ |> Just
+ |> pure
+ Namespace.Rs ->
+ Meta.detectAll "//" contentLines |> \Meta.Parsed {..} ->
+ Target
+ { langdeps = pdep,
+ -- this packageSet doesn't actually exist because everyone in
+ -- nix just generates nix expressions for rust dependencies with
+ -- Cargo.lock, so I have to make it in order to use rust deps
+ packageSet = "rust.packages",
+ mainModule = Namespace.toModule namespace,
+ wrapper = Nothing,
+ sysdeps = psys <> Set.singleton "rustc",
+ out = pout,
+ compiler = Rustc,
+ compilerFlags = case pout of
+ Meta.Bin o ->
+ map
+ Text.pack
+ [ "$CODEROOT" </> path,
+ "-o",
+ o
+ ]
+ _ -> panic "can't build rust libs",
+ builder = "base",
+ outPath = outToPath pout,
+ -- implement detectRustImports
+ srcs = Set.empty,
+ rundeps = prun,
+ ..
+ }
+ |> Just
+ |> pure
+
+detectHaskellImports :: Analysis -> [Text] -> IO (Set Meta.Dep, Set FilePath)
+detectHaskellImports hmap contentLines =
+ Env.getEnv "CODEROOT" +> \root ->
+ contentLines
+ /> Text.unpack
+ /> Regex.match haskellImports
+ |> catMaybes
+ |> \imports ->
+ foldM ghcPkgFindModule Set.empty imports
+ +> \pkgs ->
+ filepaths imports
+ +> \files ->
+ findDeps root files
+ +> \deps ->
+ (pkgs <> deps, map (stripRoot root) files |> Set.fromList)
+ |> pure
+ where
+ filepaths :: [String] -> IO [FilePath]
+ filepaths imports =
+ imports
+ |> map Namespace.fromHaskellModule
+ |> map Namespace.toPath
+ |> traverse Dir.makeAbsolute
+ +> filterM Dir.doesFileExist
+ findDeps :: String -> [FilePath] -> IO (Set Meta.Dep)
+ findDeps root fps =
+ fps
+ |> traverse (pure <. Namespace.fromPath root)
+ /> catMaybes
+ -- this is still an inefficiency, because this recurses before the
+ -- hmap is updated by the fold, transitive imports will be
+ -- re-visited. you can see this with `TERM=dumb bild`. to fix this i
+ -- need shared state instead of a fold, or figure out how to do a
+ -- breadth-first search instead of depth-first.
+ +> foldM analyze (onlyHaskell hmap)
+ /> Map.elems
+ /> map langdeps
+ /> mconcat
+ onlyHaskell :: Analysis -> Analysis
+ onlyHaskell = Map.filterWithKey (\ns _ -> ext ns == Namespace.Hs)
+
+stripRoot :: FilePath -> FilePath -> FilePath
+stripRoot root f = fromMaybe f (List.stripPrefix (root <> "/") f)
+
+detectLispImports :: [Text] -> IO (Set Meta.Dep)
+detectLispImports contentLines =
+ contentLines
+ /> Text.unpack
+ /> Regex.match lispRequires
+ |> catMaybes
+ |> Set.fromList
+ |> pure
+
+-- | Finds local imports. Does not recurse to find transitive imports like
+-- 'detectHaskellImports' does. Someday I will refactor these detection
+-- functions and have a common, well-performing, complete solution.
+detectPythonImports :: [Text] -> IO (Set FilePath)
+detectPythonImports contentLines =
+ contentLines
+ /> Text.unpack
+ /> Regex.match pythonImport
+ |> catMaybes
+ /> Namespace.fromPythonModule
+ /> Namespace.toPath
+ |> filterM Dir.doesPathExist
+ /> Set.fromList
+ where
+ -- only detects 'import x' because I don't like 'from'
+ pythonImport :: Regex.RE Char String
+ pythonImport =
+ Regex.string "import"
+ *> Regex.some (Regex.psym Char.isSpace)
+ *> Regex.many (Regex.psym isModuleChar)
+ <* Regex.many Regex.anySym
+
+test_detectPythonImports :: Test.Tree
+test_detectPythonImports =
+ Test.group
+ "detectPythonImports"
+ [ Test.unit "matches import statements" <| do
+ set <- detectPythonImports ["import Omni.Log"]
+ Set.fromList ["Omni/Log.py"] @=? set,
+ Test.unit "matches import as statements" <| do
+ set <- detectPythonImports ["import Omni.Log as Log"]
+ Set.fromList ["Omni/Log.py"] @=? set
+ ]
+
+ghcPkgFindModule :: Set String -> String -> IO (Set String)
+ghcPkgFindModule acc m =
+ Env.getEnv "GHC_PACKAGE_PATH" +> \packageDb ->
+ Process.readProcess
+ "ghc-pkg"
+ ["--package-db", packageDb, "--names-only", "--simple-output", "find-module", m]
+ ""
+ /> String.lines
+ /> Set.fromList
+ /> Set.union acc
+
+isFailure :: Exit.ExitCode -> Bool
+isFailure (Exit.ExitFailure _) = True
+isFailure Exit.ExitSuccess = False
+
+isSuccess :: Exit.ExitCode -> Bool
+isSuccess Exit.ExitSuccess = True
+isSuccess _ = False
+
+test :: Bool -> Target -> IO (Exit.ExitCode, ByteString)
+test loud Target {..} = case compiler of
+ Ghc -> do
+ root <- Env.getEnv "CODEROOT"
+ run
+ <| Proc
+ { loud = loud,
+ cmd = root </> outToPath out,
+ args = ["test"],
+ ns = namespace,
+ onFailure = Log.fail ["test", nschunk namespace] >> Log.br,
+ onSuccess = Log.pass ["test", nschunk namespace] >> Log.br
+ }
+ _ ->
+ Log.warn ["test", nschunk namespace, "unavailable"]
+ >> Log.br
+ >> pure (Exit.ExitFailure 1, mempty)
+
+build :: Bool -> Bool -> Int -> Int -> Analysis -> IO [Exit.ExitCode]
+build andTest loud jobs cpus analysis =
+ Env.getEnv "CODEROOT" +> \root ->
+ forM (Map.elems analysis) <| \target@Target {..} ->
+ fst </ case compiler of
+ CPython -> case out of
+ Meta.Bin _ ->
+ Log.info ["bild", "nix", "python", nschunk namespace]
+ >> nixBuild loud jobs cpus target
+ _ ->
+ Log.info ["bild", "nix", "python", nschunk namespace, "cannot build library"]
+ >> pure (Exit.ExitSuccess, mempty)
+ Gcc ->
+ Log.info ["bild", label, "gcc", nschunk namespace]
+ >> nixBuild loud jobs cpus target
+ where
+ label = case out of
+ Meta.Bin _ -> "bin"
+ _ -> "lib"
+ Ghc -> case out of
+ Meta.None -> pure (Exit.ExitSuccess, mempty)
+ Meta.Bin _ -> do
+ Log.info ["bild", "nix", user <> "@" <> host, nschunk namespace]
+ result <- nixBuild loud jobs cpus target
+ if andTest && (isSuccess <| fst result)
+ then test loud target
+ else pure result
+ Meta.Lib _ -> do
+ Log.info ["bild", "dev", "ghc-lib", nschunk namespace]
+ proc loud namespace (toNixFlag compiler) compilerFlags
+ Guile -> do
+ Log.info ["bild", "dev", "guile", nschunk namespace]
+ _ <- proc loud namespace (toNixFlag compiler) compilerFlags
+ case wrapper of
+ Nothing -> pure (Exit.ExitSuccess, mempty)
+ Just content -> do
+ writeFile (root </> outToPath out) content
+ p <- Dir.getPermissions <| root </> outToPath out
+ Dir.setPermissions (root </> outToPath out) (Dir.setOwnerExecutable True p)
+ pure (Exit.ExitSuccess, mempty)
+ NixBuild -> do
+ Log.info ["bild", "nix", user <> "@" <> host, nschunk namespace]
+ proc loud namespace (toNixFlag compiler)
+ <| compilerFlags
+ ++ [ "--max-jobs",
+ Text.pack <| str jobs,
+ "--cores",
+ Text.pack <| str cpus
+ ]
+ Copy -> do
+ Log.warn ["bild", "copy", "not implemented yet", nschunk namespace]
+ pure (Exit.ExitSuccess, mempty)
+ Rustc ->
+ Log.info ["bild", "dev", "rust", nschunk namespace]
+ >> nixBuild loud jobs cpus target
+ Sbcl -> do
+ Log.info ["bild", "dev", "lisp", nschunk namespace]
+ proc loud namespace (toNixFlag compiler) compilerFlags
+
+data Proc = Proc
+ { loud :: Bool,
+ cmd :: String,
+ args :: [String],
+ ns :: Namespace,
+ onFailure :: IO (),
+ onSuccess :: IO ()
+ }
+
+-- | Convert minutes to milliseconds.
+toMillis :: (Num a) => a -> a
+toMillis mins = mins * 60_000_000
+
+-- | Run a subprocess, streaming output if --loud is set.
+run :: Proc -> IO (Exit.ExitCode, ByteString)
+run Proc {..} = do
+ IO.hSetBuffering stdout IO.NoBuffering
+ loud ?| Log.info ["proc", unwords <| map str <| cmd : args]
+ Conduit.proc cmd args
+ |> (\proc_ -> proc_ {Process.create_group = True})
+ |> Conduit.streamingProcess
+ +> \(Conduit.UseProvidedHandle, stdout_, stderr_, hdl) ->
+ (,,)
+ </ Async.Concurrently (Conduit.waitForStreamingProcess hdl)
+ <*> Async.Concurrently (loud ?: (puts stdout_, logs ns stdout_))
+ <*> Async.Concurrently (loud ?: (puts stderr_, logs ns stderr_))
+ |> Async.runConcurrently
+ +> \case
+ (Exit.ExitFailure n, output, outerr) ->
+ Conduit.closeStreamingProcessHandle hdl
+ >> putStr outerr
+ >> onFailure
+ >> pure (Exit.ExitFailure n, output)
+ (Exit.ExitSuccess, output, _) ->
+ Conduit.closeStreamingProcessHandle hdl
+ >> onSuccess
+ >> pure (Exit.ExitSuccess, output)
+
+-- | Helper for running a standard bild subprocess.
+proc ::
+ Bool ->
+ Namespace ->
+ String ->
+ [Text] ->
+ IO (Exit.ExitCode, ByteString)
+proc loud namespace cmd args =
+ Proc
+ { loud = loud,
+ ns = namespace,
+ cmd = cmd,
+ args = map Text.unpack args,
+ onFailure = Log.fail ["bild", nschunk namespace] >> Log.br,
+ onSuccess = Log.good ["bild", nschunk namespace] >> Log.br
+ }
+ |> run
+
+-- | Helper for printing during a subprocess
+puts ::
+ Conduit.ConduitT () ByteString (Conduit.ResourceT IO) () ->
+ IO ByteString
+puts src =
+ Conduit.runConduitRes
+ <| src
+ .| Conduit.iterM (liftIO <. putStr)
+ .| Conduit.foldC
+
+-- | Like 'puts' but logs the output via 'Omni.Log'.
+logs ::
+ Namespace ->
+ Conduit.ConduitT () ByteString (Conduit.ResourceT IO) () ->
+ IO ByteString
+logs ns src =
+ Env.lookupEnv "COLUMNS"
+ -- is there a better way to set a default?
+ /> maybe 79 (readMaybe .> fromMaybe 79)
+ +> \columns ->
+ src
+ .| Conduit.iterM
+ ( ByteString.filter (/= BSI.c2w '\n')
+ .> (\t -> Log.fmt ["info", "bild", nschunk ns, decodeUtf8 t])
+ .> Text.take (columns - 1)
+ .> (<> "…\r")
+ .> putStr
+ )
+ .| Conduit.foldC
+ |> Conduit.runConduitRes
+
+nschunk :: Namespace -> Text
+nschunk = Namespace.toPath .> Text.pack
+
+haskellImports :: Regex.RE Char String
+haskellImports =
+ Regex.string "import"
+ *> Regex.some (Regex.psym Char.isSpace)
+ *> Regex.many (Regex.psym Char.isLower)
+ *> Regex.many (Regex.psym Char.isSpace)
+ *> Regex.some (Regex.psym isModuleChar)
+ <* Regex.many Regex.anySym
+
+isModuleChar :: Char -> Bool
+isModuleChar c =
+ elem c <| concat [['A' .. 'Z'], ['a' .. 'z'], ['.', '_'], ['0' .. '9']]
+
+-- | Matches on `(require :package)` forms and returns `package`. The `require`
+-- function is technically deprecated in Common Lisp, but no new spec has been
+-- published with a replacement, and I don't wanna use asdf, so this is what we
+-- use for Lisp imports.
+lispRequires :: Regex.RE Char String
+lispRequires =
+ Regex.string "(require"
+ *> Regex.some (Regex.psym Char.isSpace)
+ *> Regex.many (Regex.psym isQuote)
+ *> Regex.many (Regex.psym isModuleChar)
+ <* Regex.many (Regex.psym (== ')'))
+ where
+ isQuote :: Char -> Bool
+ isQuote c = c `elem` ['\'', ':']
+
+nixBuild :: Bool -> Int -> Int -> Target -> IO (Exit.ExitCode, ByteString)
+nixBuild loud maxJobs cores target@(Target {..}) =
+ Env.getEnv "CODEROOT" +> \root ->
+ instantiate root |> run +> \case
+ (_, "") -> panic "instantiate did not produce a drv"
+ (Exit.ExitSuccess, drv) ->
+ drv
+ |> str
+ |> chomp
+ |> str
+ |> realise
+ |> run
+ >> run symlink
+ x -> pure x
+ where
+ instantiate root =
+ Proc
+ { loud = loud,
+ ns = namespace,
+ cmd = "nix-instantiate",
+ -- Getting the args quoted correctly is harder than it should be. This
+ -- is tightly coupled with the code in the nix builder and there's no
+ -- way around that, methinks.
+ args =
+ [ ["--argstr", "analysisJSON", str <| Aeson.encode <| (Map.singleton namespace target :: Analysis)],
+ ["--arg", "bild", str <| "import " <> root </> "Omni/Bild.nix {}"],
+ [str <| root </> "Omni/Bild/Builder.nix"]
+ ]
+ |> mconcat
+ |> map Text.unpack,
+ onFailure = Log.fail ["bild", "instantiate", nschunk namespace] >> Log.br,
+ onSuccess = pure ()
+ }
+ realise drv =
+ Proc
+ { loud = loud,
+ ns = namespace,
+ cmd = "nix-store",
+ args =
+ [ "--realise",
+ drv,
+ "--add-root",
+ nixdir </> outname out,
+ "--max-jobs",
+ str maxJobs,
+ "--cores",
+ str cores
+ ],
+ onFailure = Log.fail ["bild", "realise", nschunk namespace] >> Log.br,
+ onSuccess = Log.good ["bild", nschunk namespace] >> Log.br
+ }
+ symlink =
+ Proc
+ { loud = loud,
+ ns = namespace,
+ cmd = "ln",
+ args =
+ [ "--relative",
+ "--force",
+ "--symbolic",
+ nixdir </> outname out </> "bin" </> outname out,
+ bindir </> outname out
+ ],
+ onFailure = Log.fail ["bild", "symlink", nschunk namespace] >> Log.br,
+ onSuccess = pure ()
+ }
diff --git a/Omni/Bild.nix b/Omni/Bild.nix
new file mode 100644
index 0000000..1a31e1e
--- /dev/null
+++ b/Omni/Bild.nix
@@ -0,0 +1,241 @@
+{ nixpkgs ? import ./Bild/Nixpkgs.nix }:
+
+let
+ constants = import ./Bild/Constants.nix;
+
+ # expose some attrs from stable, keep this minimal and simple
+ stable = let stable = nixpkgs.nixos-24_05;
+ in {
+ inherit (stable)
+ sources lib makeWrapper ccacheStdenv haskell sbcl python3 nixos mkShell
+ dockerTools pkgs;
+ stdenv = stable.ccacheStdenv;
+ };
+
+ unstable = nixpkgs.nixos-unstable-small;
+
+ # get the .src attributes of all drvs in each pkgset in the `sources` list,
+ # and concat them with `:` into a Unix-style search path.
+ # makeSourcesPath :: [pkgset] -> str
+ makeSourcesPath = with stable;
+ sources:
+ lib.trivial.pipe sources [
+ (builtins.map lib.attrsets.attrValues)
+ lib.lists.flatten
+ (builtins.filter (pkg: pkg != null))
+ (builtins.map (pkg: if pkg ? src then pkg.src else pkg))
+ (lib.strings.concatStringsSep ":")
+ ];
+
+ # this is the main library definitions, recursive references can be made with
+ # `self.thing`, like in Python objects
+ self = {
+ # provided by .envrc
+ root = builtins.getEnv "CODEROOT";
+
+ inherit (stable) sources lib makeWrapper stdenv;
+
+ haskell = rec {
+ inherit (constants) ghcCompiler;
+
+ ghcVersion = ghcPackageSetFull.version;
+
+ # all available packages
+ deps = import ./Bild/Deps/Haskell.nix;
+ packages = self.lib.attrsets.getAttrs self.haskell.deps
+ stable.haskell.packages."${constants.ghcCompiler}";
+
+ # make a ghc with dependencies
+ ghcWith = stable.haskell.packages.${ghcCompiler}.ghcWithHoogle;
+
+ # ghc with all packages, used for generating bild's package database
+ ghcPackageSetFull = ghcWith (p: self.lib.attrsets.attrVals deps p);
+
+ # bild's dependencies, needs to be hand-written
+ ghcPackageSetBild = ghcWith (hpkgs:
+ with hpkgs; [
+ aeson
+ async
+ base
+ bytestring
+ conduit
+ conduit-extra
+ containers
+ directory
+ docopt
+ filepath
+ process
+ protolude
+ rainbow
+ regex-applicative
+ split
+ tasty
+ tasty-hunit
+ tasty-quickcheck
+ text
+ hostname
+ wai # can remove when removed from Omni.Log
+ ]);
+ };
+
+ lisp = { sbclWith = stable.sbcl.withPackages; };
+
+ python = {
+ packages = self.lib.attrsets.getAttrs (import ./Bild/Deps/Python.nix)
+ stable.python3.pkgs;
+ pythonWith = stable.python3.withPackages;
+ buildPythonApplication = stable.python3.pkgs.buildPythonApplication;
+ };
+
+ # c packages are just stable, filtered to just the list of deps i want
+ c.packages =
+ self.lib.attrsets.getAttrs (import ./Bild/Deps/C.nix) stable.pkgs;
+
+ # exposed packages for inclusion in builds
+ pkgs = with stable.pkgs; {
+ inherit bat bc cmark universal-ctags deadnix fd figlet fzf git
+ git-branchless gitlint groff guile hlint indent jq lolcat mypy nixfmt
+ ormolu pkg-config ripgrep rustc tree wemux;
+ llama-cpp = unstable.llama-cpp;
+ llm = python3.withPackages (p: with p; [ p.llm-ollama ]);
+ ollama = unstable.ollama;
+ ruff = unstable.ruff;
+ shellcheck = unstable.shellcheck;
+ };
+
+ # a standard nix build for bild, for bootstrapping. this should be the only
+ # hand-written builder we need
+ bild = self.stdenv.mkDerivation {
+ name = "bild";
+ srcs = self.lib.fileset.toSource {
+ root = ../.;
+ fileset = self.lib.fileset.unions [
+ ../Alpha.hs
+ ../Omni/Bild.hs
+ ../Omni/Bild/Meta.hs
+ ../Omni/Cli.hs
+ ../Omni/Log.hs
+ ../Omni/Namespace.hs
+ ../Omni/Test.hs
+ ];
+ };
+ nativeBuildInputs = [ self.haskell.ghcPackageSetBild ];
+ buildInputs = [ self.makeWrapper ];
+ propagatedBuildInputs = with self.pkgs; [
+ pkg-config
+ git
+ # this is just to get access to ghc-pkg in bild
+ (self.haskell.ghcWith (_: [ ]))
+
+ # lisp deps, remove this when i implement nix builds for lisp
+ guile
+ (self.lisp.sbclWith
+ (p: with p; [ alexandria ])) # just enough to build Example.lisp
+ ];
+ strictDeps = true;
+ ghcVersion = self.haskell.ghcVersion;
+ buildPhase = ''
+ mkdir -p $out/bin $out/lib/ghc-$ghcVersion
+ cp -r \
+ ${self.haskell.ghcPackageSetFull}/lib/ghc-$ghcVersion/package.conf.d \
+ $out/lib/ghc-$ghcVersion
+ ghc \
+ -threaded \
+ -Werror \
+ -Wall \
+ -Winvalid-haddock \
+ -haddock \
+ -i. \
+ --make Omni/Bild.hs \
+ -main-is Omni.Bild \
+ -o $out/bin/bild
+ '';
+ installPhase = ''
+ wrapProgram $out/bin/bild \
+ --prefix PATH : ${
+ self.lib.makeBinPath [
+ self.haskell.ghcPackageSetBild
+ self.pkgs.git
+ ]
+ } \
+ --set GHC_PACKAGE_PATH \
+ $out/lib/ghc-$ghcVersion/package.conf.d
+ '';
+ };
+
+ # wrapper around bild
+ runBildAnalyze = target:
+ self.stdenv.mkDerivation rec {
+ name = "bild-analysis";
+ src = ../.;
+ USER = "nixbld";
+ HOSTNAME = "nix-sandbox";
+ # we need to remove the $src root because bild expects paths relative to the
+ # working directory:
+ TARGET = "."
+ + self.lib.strings.removePrefix (toString src) (toString target);
+ buildPhase = ''
+ export CODEROOT=$(pwd)
+ mkdir $out
+ ${self.bild}/bin/bild --plan "$TARGET" 1> $out/analysis.json \
+ 2> >(tee -a $out/stderr >&2)
+ '';
+ installPhase = "exit 0";
+ };
+
+ # gather data needed for compiling by analyzing the main module. returns the
+ # json object of the build
+ analyze = target:
+ builtins.readFile (self.runBildAnalyze target + "/analysis.json");
+
+ # this does a bild build for the given target, but entirely in nix. its kinda
+ # like IFD, but not as costly, i think
+ run = target:
+ import ./Bild/Builder.nix {
+ analysisJSON = self.analyze target;
+ bild = self;
+ };
+
+ # the main development environment
+ env = stable.mkShell {
+ name = "omnidev";
+ # this should just be dev tools
+ buildInputs = with self.pkgs; [
+ bat
+ bc
+ self.bild
+ universal-ctags
+ fd
+ figlet
+ fzf
+ git
+ git-branchless
+ gitlint
+ jq
+ lolcat
+ llm
+ ormolu
+ ripgrep
+ tree
+ wemux
+ ];
+ shellHook = ''
+ export GHC_PACKAGE_PATH=${self.bild}/lib/ghc-${self.haskell.ghcVersion}/package.conf.d
+ export ALL_SOURCES=${
+ makeSourcesPath [
+ self.python.packages
+ self.haskell.packages
+ self.c.packages
+ self.sources
+ ]
+ }
+ '';
+ };
+
+ # build an operating system. 'cfg' is the NixOS config
+ os = cfg: (stable.nixos (_args: cfg)).toplevel;
+
+ # build a docker image
+ image = stable.dockerTools.buildImage;
+ };
+in self
diff --git a/Omni/Bild/Builder.nix b/Omni/Bild/Builder.nix
new file mode 100644
index 0000000..a78f311
--- /dev/null
+++ b/Omni/Bild/Builder.nix
@@ -0,0 +1,168 @@
+/* This is the library of nix builders. Some rules to follow:
+ - Keep this code as minimal as possible. I'd rather write Haskell than Nix,
+ wouldn't you?
+ - Try to reuse as much upstream Nix as possible.
+*/
+{ analysisJSON, bild }:
+with bild;
+let
+ analysis = builtins.fromJSON analysisJSON;
+
+ # common bash functions for the builder
+ commonBash = builtins.toFile "common.bash" ''
+ # Check that a command succeeds, fail and log if not.
+ function check {
+ $@ || { echo "fail: $name: $3"; exit 1; }
+ }
+ '';
+
+ build = _: target:
+ let
+ name = target.out;
+ root = builtins.getEnv "CODEROOT";
+ mainModule = target.mainModule;
+ compileLine = lib.strings.concatStringsSep " "
+ ([ target.compiler ] ++ target.compilerFlags);
+
+ allSources = target.srcs ++ [ target.quapath ];
+
+ isEmpty = x: x == null || x == [ ];
+
+ skip = [ "_" ".direnv" ];
+ filter = file: type:
+ if lib.lists.elem (builtins.baseNameOf file) skip then
+ false
+ # TODO: this means any new directory will cause a rebuild. this bad. i
+ # should recurse into the directory and match against the srcs. for now I
+ # just use preBuild to delete empty dirs
+ else if type == "directory" then
+ true
+ else if type == "regular" then
+ lib.trivial.pipe file [
+ (f: lib.strings.removePrefix "${root}/" f)
+ (f: lib.lists.elem f allSources)
+ ]
+ else
+ false;
+
+ # remove empty directories, leftover from the src filter
+ preBuild = "find . -type d -empty -delete";
+
+ src = lib.sources.cleanSourceWith {
+ inherit filter;
+ src = lib.sources.cleanSource root;
+ };
+
+ langdeps_ = if isEmpty target.langdeps then
+ [ ]
+ else
+ lib.attrsets.attrVals target.langdeps (lib.attrsets.getAttrFromPath
+ (lib.strings.splitString "." target.packageSet) bild);
+
+ sysdeps_ = if isEmpty target.sysdeps then
+ [ ]
+ else
+ lib.attrsets.attrVals target.sysdeps pkgs;
+
+ rundeps_ = if isEmpty target.rundeps then
+ [ ]
+ else
+ lib.attrsets.attrVals target.rundeps pkgs;
+
+ CODEROOT = ".";
+
+ builders = {
+ base = stdenv.mkDerivation rec {
+ inherit name src CODEROOT preBuild;
+ buildInputs = langdeps_ ++ sysdeps_;
+ installPhase = "install -D ${name} $out/bin/${name}";
+ buildPhase = compileLine;
+ };
+
+ haskell = stdenv.mkDerivation rec {
+ inherit name src CODEROOT preBuild;
+ nativeBuildInputs = [ makeWrapper ];
+ buildInputs = sysdeps_ ++ [
+ (haskell.ghcWith (p: (lib.attrsets.attrVals target.langdeps p)))
+ ];
+ buildPhase = compileLine;
+ installPhase = ''
+ install -D ${name} $out/bin/${name}
+ wrapProgram $out/bin/${name} \
+ --prefix PATH : ${lib.makeBinPath rundeps_}
+ '';
+ };
+
+ c = stdenv.mkDerivation rec {
+ inherit name src CODEROOT preBuild;
+ buildInputs = langdeps_ ++ sysdeps_;
+ installPhase = "install -D ${name} $out/bin/${name}";
+ buildPhase = lib.strings.concatStringsSep " " [
+ compileLine
+ (if isEmpty langdeps_ then
+ ""
+ else
+ "$(pkg-config --cflags ${
+ lib.strings.concatStringsSep " " target.langdeps
+ })")
+ (if isEmpty sysdeps_ then
+ ""
+ else
+ "$(pkg-config --libs ${
+ lib.strings.concatStringsSep " " target.sysdeps
+ })")
+ ];
+ };
+
+ python = python.buildPythonApplication rec {
+ inherit name src CODEROOT;
+ nativeBuildInputs = [ makeWrapper ];
+ propagatedBuildInputs = langdeps_ ++ sysdeps_ ++ rundeps_;
+ buildInputs = sysdeps_;
+ nativeCheckInputs = [ pkgs.ruff python.packages.mypy ];
+ checkPhase = ''
+ . ${commonBash}
+ cp ${../../pyproject.toml} ./pyproject.toml
+ check ruff format --exclude 'setup.py' --check .
+ check ruff check --exclude 'setup.py' --exclude '__init__.py' .
+ touch ./py.typed
+ check python -m mypy \
+ --explicit-package-bases \
+ --no-error-summary \
+ --exclude 'setup\.py$' \
+ .
+ '';
+ installCheck = ''
+ . ${commonBash}
+ check python -m ${mainModule} test
+ '';
+ preBuild = ''
+ # remove empty directories, leftover from the src filter
+ find . -type d -empty -delete
+ # initialize remaining dirs as python modules
+ find . -type d -exec touch {}/__init__.py \;
+ # generate a minimal setup.py
+ cat > setup.py << EOF
+ from setuptools import find_packages, setup
+ setup(
+ name="${name}",
+ entry_points={"console_scripts":["${name} = ${mainModule}:main"]},
+ version="0.0.0",
+ url="git://simatime.com/omni.git",
+ author="dev",
+ author_email="dev@simatime.com",
+ description="nil",
+ packages=find_packages(),
+ install_requires=[],
+ )
+ EOF
+ '';
+ pythonImportsCheck = [ mainModule ]; # sanity check
+ };
+ };
+ in builders.${target.builder};
+ # the bild caller gives us the Analysis type, which is a hashmap, but i need to
+ # return a single drv, so just take the first one for now. ideally i would only
+ # pass Target, one at a time, (perhaps parallelized in haskell land) and then i
+ # wouldn't need all of this let nesting
+in builtins.head (lib.attrsets.mapAttrsToList build analysis)
diff --git a/Omni/Bild/CcacheWrapper.nix b/Omni/Bild/CcacheWrapper.nix
new file mode 100644
index 0000000..78e5a08
--- /dev/null
+++ b/Omni/Bild/CcacheWrapper.nix
@@ -0,0 +1,57 @@
+self: super:
+
+let
+ # this should come from config.programs.ccache.cacheDir but I can't figure out
+ # how to access that from a nixpkgs overlay, so just hardcode the default
+ ccacheDir = "/var/cache/ccache";
+
+ # https://github.com/NixOS/nixpkgs/pull/216363#issuecomment-1430356886
+ fixwebkit = pkg:
+ self.useCcacheStdenv (pkg.overrideAttrs (attrs: rec {
+ preConfigure = attrs.preConfigure + ''
+ # not sure which of these works so just do them both
+ export NUMBER_OF_PROCESSORS=$NIX_BUILD_CORES
+ ninjaFlagsArray+=("-l$NIX_BUILD_CORES")
+ '';
+ }));
+in {
+ ccacheWrapper = super.ccacheWrapper.override {
+ extraConfig = ''
+ export CCACHE_COMPRESS=1
+ export CCACHE_DIR="${ccacheDir}"
+ export CCACHE_UMASK=007
+ if [ ! -d "$CCACHE_DIR" ]
+ then
+ echo "====="
+ echo "Directory '$CCACHE_DIR' does not exist"
+ echo "Please create it with:"
+ echo " sudo mkdir -m0770 '$CCACHE_DIR'"
+ echo " sudo chown root:nixbld '$CCACHE_DIR'"
+ echo "====="
+ exit 1
+ fi
+ if [ ! -w "$CCACHE_DIR" ]
+ then
+ echo "====="
+ echo "Directory '$CCACHE_DIR' is not accessible for user $(whoami)"
+ echo "Please verify its access permissions"
+ echo "====="
+ exit 1
+ fi
+ '';
+ };
+
+ useCcacheStdenv = pkg: pkg.override { stdenv = super.ccacheStdenv; };
+
+ cudann = self.useCcacheStdenv super.cudann;
+ llvm = self.useCcacheStdenv super.llvm;
+ magma = self.useCcacheStdenv super.magma;
+ nvcc = self.useCcacheStdenv super.nvcc;
+ onnx = self.useCcacheStdenv super.onnx;
+ onnxruntime = self.useCcacheStdenv super.onnxruntime;
+ webkit = fixwebkit super.webkit;
+ webkitgtk = fixwebkit super.webkitgtk;
+ webkitgtk_4_1 = fixwebkit super.webkitgtk_4_1;
+ webkitgtk_5_0 = fixwebkit super.webkitgtk_5_0;
+ webkitgtk_6_0 = fixwebkit super.webkitgtk_6_0;
+}
diff --git a/Omni/Bild/Constants.nix b/Omni/Bild/Constants.nix
new file mode 100644
index 0000000..20c992e
--- /dev/null
+++ b/Omni/Bild/Constants.nix
@@ -0,0 +1 @@
+{ ghcCompiler = "ghc948"; }
diff --git a/Omni/Bild/Deps.hs b/Omni/Bild/Deps.hs
new file mode 100644
index 0000000..c2fe53f
--- /dev/null
+++ b/Omni/Bild/Deps.hs
@@ -0,0 +1,694 @@
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TupleSections #-}
+{-# LANGUAGE ViewPatterns #-}
+
+-- | A specific-purpose dependency manager.
+--
+-- : out deps
+module Omni.Bild.Deps where
+
+import Alpha hiding (map, packageName, str, tshow)
+import Data.Aeson ((.=))
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.Key as K
+import qualified Data.Aeson.KeyMap as KM
+import qualified Data.ByteString as B
+import qualified Data.ByteString.Char8 as B8
+import qualified Data.HashMap.Strict as HMS
+import Data.HashMap.Strict.Extended
+import qualified Data.Text as T
+import Data.Text.Extended
+import GHC.Show
+import qualified Network.HTTP.Simple as HTTP
+import Niv.Cmd (Cmd, description, extraLogs, parseCmdShortcut, parsePackageSpec, updateCmd)
+import Niv.Git.Cmd
+import Niv.GitHub.Cmd
+import Niv.Local.Cmd
+import Niv.Logger
+import Niv.Sources
+import Niv.Update
+import qualified Options.Applicative as Opts
+import qualified Options.Applicative.Help.Pretty as Opts
+import qualified System.Directory as Dir
+import System.Environment (getEnv)
+import System.FilePath (takeDirectory, (</>))
+import UnliftIO
+import Prelude
+
+newtype NIO a = NIO {runNIO :: ReaderT FindSourcesJson IO a}
+ deriving (Functor, Applicative, Monad, MonadIO, MonadReader FindSourcesJson)
+
+instance MonadUnliftIO NIO where
+ withRunInIO = wrappedWithRunInIO NIO runNIO
+
+getFindSourcesJson :: NIO FindSourcesJson
+-- getFindSourcesJson = ask
+getFindSourcesJson = do
+ root <- li <| getEnv "CODEROOT"
+ pure <| AtPath <| root </> "Omni/Bild/Sources.json"
+
+li :: (MonadIO io) => IO a -> io a
+li = liftIO
+
+main :: IO ()
+main =
+ getArgs +> \case
+ ["test"] -> pure ()
+ args -> cli args
+
+cli :: [String] -> IO ()
+cli args = do
+ ((fsj, colors), nio) <-
+ pure args +> Opts.handleParseResult <. execParserPure' Opts.defaultPrefs opts
+ setColors colors
+ runReaderT (runNIO nio) fsj
+ where
+ execParserPure' pprefs pinfo [] =
+ Opts.Failure
+ <| Opts.parserFailure pprefs pinfo (Opts.ShowHelpText Nothing) mempty
+ execParserPure' pprefs pinfo args_ = Opts.execParserPure pprefs pinfo args_
+ opts = Opts.info ((,) </ ((,) </ parseFindSourcesJson <*> parseColors) <*> (parseCommand <**> Opts.helper)) <| mconcat desc
+ desc =
+ [ Opts.fullDesc,
+ Opts.headerDoc
+ <| Just
+ <| "deps - specific-purpose dependency manager"
+ ]
+ parseFindSourcesJson =
+ AtPath
+ </ Opts.strOption
+ ( Opts.long "sources-file"
+ <> Opts.short 's'
+ <> Opts.metavar "FILE"
+ <> Opts.help "Use FILE instead of Omni/Bild/Sources.json"
+ )
+ <|> pure Auto
+ parseColors =
+ (\case True -> Never; False -> Always)
+ </ Opts.switch
+ ( Opts.long "no-colors"
+ <> Opts.help "Don't use colors in output"
+ )
+
+parseCommand :: Opts.Parser (NIO ())
+parseCommand =
+ Opts.subparser
+ ( Opts.command "init" parseCmdInit
+ <> Opts.command "add" parseCmdAdd
+ <> Opts.command "show" parseCmdShow
+ <> Opts.command "update" parseCmdUpdate
+ <> Opts.command "modify" parseCmdModify
+ <> Opts.command "drop" parseCmdDrop
+ )
+
+parsePackageName :: Opts.Parser PackageName
+parsePackageName =
+ PackageName
+ </ Opts.argument Opts.str (Opts.metavar "PACKAGE")
+
+parsePackage :: Opts.Parser (PackageName, PackageSpec)
+parsePackage = (,) </ parsePackageName <*> parsePackageSpec githubCmd
+
+-------------------------------------------------------------------------------
+-- INIT
+-------------------------------------------------------------------------------
+
+-- | Whether or not to fetch nixpkgs
+data FetchNixpkgs
+ = NoNixpkgs
+ | NixpkgsFast -- Pull latest known nixpkgs
+ | NixpkgsCustom T.Text Nixpkgs -- branch, nixpkgs
+ deriving (Show)
+
+data Nixpkgs = Nixpkgs T.Text T.Text -- owner, repo
+
+instance Show Nixpkgs where
+ show (Nixpkgs o r) = T.unpack o <> "/" <> T.unpack r
+
+parseCmdInit :: Opts.ParserInfo (NIO ())
+parseCmdInit = Opts.info (cmdInit </ parseNixpkgs <**> Opts.helper) <| mconcat desc
+ where
+ desc =
+ [ Opts.fullDesc,
+ Opts.progDesc
+ "Initialize a Nix project. Existing files won't be modified."
+ ]
+
+parseNixpkgs :: Opts.Parser FetchNixpkgs
+parseNixpkgs = parseNixpkgsFast <|> parseNixpkgsLatest <|> parseNixpkgsCustom <|> parseNoNixpkgs <|> pure NixpkgsFast
+ where
+ parseNixpkgsFast =
+ Opts.flag'
+ NixpkgsFast
+ ( Opts.long "fast"
+ <> Opts.help "Use the latest nixpkgs cached at 'https://github.com/nmattia/niv/blob/master/data/nixpkgs.json'. This is the default."
+ )
+ parseNixpkgsLatest =
+ Opts.flag'
+ (NixpkgsCustom "master" (Nixpkgs "NixOS" "nixpkgs"))
+ ( Opts.long "latest"
+ <> Opts.help "Pull the latest unstable nixpkgs from NixOS/nixpkgs."
+ )
+ parseNixpkgsCustom =
+ flip NixpkgsCustom
+ </ Opts.option
+ customNixpkgsReader
+ ( Opts.long "nixpkgs"
+ <> Opts.showDefault
+ <> Opts.help "Use a custom nixpkgs repository from GitHub."
+ <> Opts.metavar "OWNER/REPO"
+ )
+ <*> Opts.strOption
+ ( Opts.long "nixpkgs-branch"
+ <> Opts.short 'b'
+ <> Opts.help "The nixpkgs branch when using --nixpkgs ...."
+ <> Opts.showDefault
+ )
+ parseNoNixpkgs =
+ Opts.flag'
+ NoNixpkgs
+ ( Opts.long "no-nixpkgs"
+ <> Opts.help "Don't add a nixpkgs entry to Sources.json."
+ )
+ customNixpkgsReader =
+ Opts.maybeReader <| \(T.pack -> repo) -> case T.splitOn "/" repo of
+ [owner, reponame] -> Just (Nixpkgs owner reponame)
+ _ -> Nothing
+
+cmdInit :: FetchNixpkgs -> NIO ()
+cmdInit nixpkgs = do
+ job "Initializing" <| do
+ fsj <- getFindSourcesJson
+ -- Writes all the default files
+ -- a path, a "create" function and an update function for each file.
+ forM_
+ [ ( pathNixSourcesNix,
+ (`createFile` initNixSourcesNixContent),
+ \path content -> do
+ if shouldUpdateNixSourcesNix content
+ then do
+ say "Updating sources.nix"
+ li <| B.writeFile path initNixSourcesNixContent
+ else say "Not updating sources.nix"
+ ),
+ ( pathNixSourcesJson fsj,
+ \path -> do
+ createFile path initNixSourcesJsonContent
+
+ -- Import nixpkgs, if necessary
+ initNixpkgs nixpkgs,
+ \path _content -> dontCreateFile path
+ )
+ ]
+ <| \(path, onCreate, onUpdate) -> do
+ exists <- li <| Dir.doesFileExist path
+ if exists then li (B.readFile path) +> onUpdate path else onCreate path
+ case fsj of
+ Auto -> pure ()
+ AtPath fp ->
+ tsay
+ <| T.unlines
+ [ T.unwords
+ [ tbold <| tblue "INFO:",
+ "You are using a custom path for sources.json."
+ ],
+ " You need to configure the sources.nix to use " <> tbold (T.pack fp) <> ":",
+ tbold " import sources.nix { sourcesFile = PATH ; }; ",
+ T.unwords
+ [ " where",
+ tbold "PATH",
+ "is the relative path from sources.nix to",
+ tbold (T.pack fp) <> "."
+ ]
+ ]
+ where
+ createFile :: FilePath -> B.ByteString -> NIO ()
+ createFile path content =
+ li <| do
+ let dir = takeDirectory path
+ Dir.createDirectoryIfMissing True dir
+ say <| "Creating " <> path
+ B.writeFile path content
+ dontCreateFile :: FilePath -> NIO ()
+ dontCreateFile path = say <| "Not creating " <> path
+
+initNixpkgs :: FetchNixpkgs -> NIO ()
+initNixpkgs nixpkgs =
+ case nixpkgs of
+ NoNixpkgs -> say "Not importing 'nixpkgs'."
+ NixpkgsFast -> do
+ say "Using known 'nixpkgs' ..."
+ packageSpec <- HTTP.getResponseBody </ HTTP.httpJSON "https://raw.githubusercontent.com/nmattia/niv/master/data/nixpkgs.json"
+ cmdAdd
+ githubCmd
+ (PackageName "nixpkgs")
+ (specToLockedAttrs packageSpec)
+ pure ()
+ NixpkgsCustom branch nixpkgs' -> do
+ say "Importing 'nixpkgs' ..."
+ let (owner, repo) = case nixpkgs' of
+ Nixpkgs o r -> (o, r)
+ cmdAdd
+ githubCmd
+ (PackageName "nixpkgs")
+ ( specToFreeAttrs
+ <| PackageSpec
+ <| KM.fromList
+ [ "owner" .= owner,
+ "repo" .= repo,
+ "branch" .= branch
+ ]
+ )
+
+-------------------------------------------------------------------------------
+-- ADD
+-------------------------------------------------------------------------------
+
+parseCmdAdd :: Opts.ParserInfo (NIO ())
+parseCmdAdd =
+ Opts.info
+ ((parseCommands <|> parseShortcuts) <**> Opts.helper)
+ <| description githubCmd
+ where
+ -- XXX: this should parse many shortcuts (github, git). Right now we only
+ -- parse GitHub because the git interface is still experimental. note to
+ -- implementer: it'll be tricky to have the correct arguments show up
+ -- without repeating "PACKAGE PACKAGE PACKAGE" for every package type.
+ parseShortcuts = parseShortcut githubCmd
+ parseShortcut cmd = uncurry (cmdAdd cmd) </ parseShortcutArgs cmd
+ parseCmd cmd = uncurry (cmdAdd cmd) </ parseCmdArgs cmd
+ parseCmdAddGit =
+ Opts.info (parseCmd gitCmd <**> Opts.helper) (description gitCmd)
+ parseCmdAddLocal =
+ Opts.info (parseCmd localCmd <**> Opts.helper) (description localCmd)
+ parseCmdAddGitHub =
+ Opts.info (parseCmd githubCmd <**> Opts.helper) (description githubCmd)
+ parseCommands =
+ Opts.subparser
+ ( Opts.hidden
+ <> Opts.commandGroup "Experimental commands:"
+ <> Opts.command "git" parseCmdAddGit
+ <> Opts.command "github" parseCmdAddGitHub
+ <> Opts.command "local" parseCmdAddLocal
+ )
+
+-- | only used in shortcuts (niv add foo/bar ...) because PACKAGE is NOT
+-- optional
+parseShortcutArgs :: Cmd -> Opts.Parser (PackageName, Attrs)
+parseShortcutArgs cmd = collapse </ parseNameAndShortcut <*> parsePackageSpec cmd
+ where
+ collapse specAndName pspec = (pname, specToLockedAttrs <| pspec <> baseSpec)
+ where
+ (pname, baseSpec) = case specAndName of
+ ((_, spec), Just pname') -> (pname', PackageSpec spec)
+ ((pname', spec), Nothing) -> (pname', PackageSpec spec)
+ parseNameAndShortcut =
+ (,)
+ </ Opts.argument
+ (Opts.maybeReader (parseCmdShortcut cmd <. T.pack))
+ (Opts.metavar "PACKAGE")
+ <*> optName
+ optName =
+ Opts.optional
+ <| PackageName
+ </ Opts.strOption
+ ( Opts.long "name"
+ <> Opts.short 'n'
+ <> Opts.metavar "NAME"
+ <> Opts.help "Set the package name to <NAME>"
+ )
+
+-- | only used in command (niv add <cmd> ...) because PACKAGE is optional
+parseCmdArgs :: Cmd -> Opts.Parser (PackageName, Attrs)
+parseCmdArgs cmd = collapse </ parseNameAndShortcut <*> parsePackageSpec cmd
+ where
+ collapse specAndName pspec = (pname, specToLockedAttrs <| pspec <> baseSpec)
+ where
+ (pname, baseSpec) = case specAndName of
+ (Just (_, spec), Just pname') -> (pname', PackageSpec spec)
+ (Just (pname', spec), Nothing) -> (pname', PackageSpec spec)
+ (Nothing, Just pname') -> (pname', PackageSpec KM.empty)
+ (Nothing, Nothing) -> (PackageName "unnamed", PackageSpec KM.empty)
+ parseNameAndShortcut =
+ (,)
+ </ Opts.optional
+ ( Opts.argument
+ (Opts.maybeReader (parseCmdShortcut cmd <. T.pack))
+ (Opts.metavar "PACKAGE")
+ )
+ <*> optName
+ optName =
+ Opts.optional
+ <| PackageName
+ </ Opts.strOption
+ ( Opts.long "name"
+ <> Opts.short 'n'
+ <> Opts.metavar "NAME"
+ <> Opts.help "Set the package name to <NAME>"
+ )
+
+cmdAdd :: Cmd -> PackageName -> Attrs -> NIO ()
+cmdAdd cmd packageName attrs = do
+ job ("Adding package " <> T.unpack (unPackageName packageName)) <| do
+ fsj <- getFindSourcesJson
+ sources <- unSources </ li (getSources fsj)
+ when (HMS.member packageName sources)
+ <| li
+ <| abortCannotAddPackageExists packageName
+ eFinalSpec <- fmap attrsToSpec </ li (doUpdate attrs cmd)
+ case eFinalSpec of
+ Left e -> li (abortUpdateFailed [(packageName, e)])
+ Right finalSpec -> do
+ say <| "Writing new sources file"
+ li
+ <| setSources fsj
+ <| Sources
+ <| HMS.insert packageName finalSpec sources
+
+-------------------------------------------------------------------------------
+-- SHOW
+-------------------------------------------------------------------------------
+
+parseCmdShow :: Opts.ParserInfo (NIO ())
+parseCmdShow =
+ Opts.info
+ ((cmdShow </ Opts.optional parsePackageName) <**> Opts.helper)
+ <| Opts.progDesc "Show information about a dependency in human-readable format"
+
+cmdShow :: Maybe PackageName -> NIO ()
+cmdShow = \case
+ Just packageName -> do
+ fsj <- getFindSourcesJson
+ sources <- unSources </ li (getSources fsj)
+ case HMS.lookup packageName sources of
+ Just pspec -> showPackage packageName pspec
+ Nothing -> li <| abortCannotShowNoSuchPackage packageName
+ Nothing -> do
+ fsj <- getFindSourcesJson
+ sources <- unSources </ li (getSources fsj)
+ forWithKeyM_ sources <| showPackage
+
+showPackage :: (MonadIO io) => PackageName -> PackageSpec -> io ()
+showPackage (PackageName pname) (PackageSpec spec) = do
+ tsay <| tbold pname
+ forM_ (KM.toList spec) <| \(attrName, attrValValue) -> do
+ let attrValue = case attrValValue of
+ Aeson.String str -> str
+ _ -> tfaint "<barabajagal>"
+ tsay <| " " <> K.toText attrName <> ": " <> attrValue
+
+-------------------------------------------------------------------------------
+-- UPDATE
+-------------------------------------------------------------------------------
+
+parseCmdUpdate :: Opts.ParserInfo (NIO ())
+parseCmdUpdate =
+ Opts.info
+ ((cmdUpdate </ Opts.optional parsePackage) <**> Opts.helper)
+ <| mconcat desc
+ where
+ desc =
+ [ Opts.fullDesc,
+ Opts.progDesc "Update dependencies",
+ Opts.headerDoc
+ <| Just
+ <| Opts.nest 2
+ <| Opts.vcat
+ [ "Examples:",
+ Opts.fill 30 "deps update" Opts.<+> "# update all packages",
+ Opts.fill 30 "deps update nixpkgs" Opts.<+> "# update nixpkgs",
+ Opts.fill 30 "deps update my-package -v beta-0.2" Opts.<+> "# update my-package to version \"beta-0.2\""
+ ]
+ ]
+
+specToFreeAttrs :: PackageSpec -> Attrs
+specToFreeAttrs = KM.toHashMapText <. fmap (Free,) <. unPackageSpec
+
+specToLockedAttrs :: PackageSpec -> Attrs
+specToLockedAttrs = KM.toHashMapText <. fmap (Locked,) <. unPackageSpec
+
+cmdUpdate :: Maybe (PackageName, PackageSpec) -> NIO ()
+cmdUpdate = \case
+ Just (packageName, cliSpec) ->
+ job ("Update " <> T.unpack (unPackageName packageName)) <| do
+ fsj <- getFindSourcesJson
+ sources <- unSources </ li (getSources fsj)
+ eFinalSpec <- case HMS.lookup packageName sources of
+ Just defaultSpec -> do
+ -- lookup the "type" to find a Cmd to run, defaulting to legacy
+ -- github
+ let cmd = case KM.lookup "type" (unPackageSpec defaultSpec) of
+ Just "git" -> gitCmd
+ Just "local" -> localCmd
+ _ -> githubCmd
+ spec = specToLockedAttrs cliSpec <> specToFreeAttrs defaultSpec
+ fmap attrsToSpec </ li (doUpdate spec cmd)
+ Nothing -> li <| abortCannotUpdateNoSuchPackage packageName
+ case eFinalSpec of
+ Left e -> li <| abortUpdateFailed [(packageName, e)]
+ Right finalSpec ->
+ li
+ <| setSources fsj
+ <| Sources
+ <| HMS.insert packageName finalSpec sources
+ Nothing ->
+ job "Updating all packages" <| do
+ fsj <- getFindSourcesJson
+ sources <- unSources </ li (getSources fsj)
+ esources' <-
+ forWithKeyM sources
+ <| \packageName defaultSpec -> do
+ tsay <| "Package: " <> unPackageName packageName
+ let initialSpec = specToFreeAttrs defaultSpec
+ -- lookup the "type" to find a Cmd to run, defaulting to legacy
+ -- github
+ let cmd = case KM.lookup "type" (unPackageSpec defaultSpec) of
+ Just "git" -> gitCmd
+ Just "local" -> localCmd
+ _ -> githubCmd
+ fmap attrsToSpec </ li (doUpdate initialSpec cmd)
+ let (failed, sources') = partitionEithersHMS esources'
+ unless (HMS.null failed)
+ <| li
+ <| abortUpdateFailed (HMS.toList failed)
+ li <| setSources fsj <| Sources sources'
+
+-- | pretty much tryEvalUpdate but we might issue some warnings first
+doUpdate :: Attrs -> Cmd -> IO (Either SomeException Attrs)
+doUpdate attrs cmd = do
+ forM_ (extraLogs cmd attrs) <| tsay
+ tryEvalUpdate attrs (updateCmd cmd)
+
+partitionEithersHMS ::
+ (Eq k, Hashable k) =>
+ HMS.HashMap k (Either a b) ->
+ (HMS.HashMap k a, HMS.HashMap k b)
+partitionEithersHMS =
+ flip HMS.foldlWithKey' (HMS.empty, HMS.empty) <| \(ls, rs) k -> \case
+ Left l -> (HMS.insert k l ls, rs)
+ Right r -> (ls, HMS.insert k r rs)
+
+-------------------------------------------------------------------------------
+-- MODIFY
+-------------------------------------------------------------------------------
+
+parseCmdModify :: Opts.ParserInfo (NIO ())
+parseCmdModify =
+ Opts.info
+ ((cmdModify </ parsePackageName <*> optName <*> parsePackageSpec githubCmd) <**> Opts.helper)
+ <| mconcat desc
+ where
+ desc =
+ [ Opts.fullDesc,
+ Opts.progDesc "Modify dependency attributes without performing an update",
+ Opts.headerDoc
+ <| Just
+ <| Opts.vcat
+ [ "Examples:",
+ "",
+ " niv modify nixpkgs -v beta-0.2",
+ " niv modify nixpkgs -a branch=nixpkgs-unstable"
+ ]
+ ]
+ optName =
+ Opts.optional
+ <| PackageName
+ </ Opts.strOption
+ ( Opts.long "name"
+ <> Opts.short 'n'
+ <> Opts.metavar "NAME"
+ <> Opts.help "Set the package name to <NAME>"
+ )
+
+cmdModify :: PackageName -> Maybe PackageName -> PackageSpec -> NIO ()
+cmdModify packageName mNewName cliSpec = do
+ tsay <| "Modifying package: " <> unPackageName packageName
+ fsj <- getFindSourcesJson
+ sources <- unSources </ li (getSources fsj)
+ finalSpec <- case HMS.lookup packageName sources of
+ Just defaultSpec -> pure <| attrsToSpec (specToLockedAttrs cliSpec <> specToFreeAttrs defaultSpec)
+ Nothing -> li <| abortCannotModifyNoSuchPackage packageName
+ case mNewName of
+ Just newName -> do
+ when (HMS.member newName sources)
+ <| li
+ <| abortCannotAddPackageExists newName
+ li <| setSources fsj <| Sources <| HMS.insert newName finalSpec <| HMS.delete packageName sources
+ Nothing ->
+ li <| setSources fsj <| Sources <| HMS.insert packageName finalSpec sources
+
+-------------------------------------------------------------------------------
+-- DROP
+-------------------------------------------------------------------------------
+
+parseCmdDrop :: Opts.ParserInfo (NIO ())
+parseCmdDrop =
+ Opts.info
+ ( (cmdDrop </ parsePackageName <*> parseDropAttributes)
+ <**> Opts.helper
+ )
+ <| mconcat desc
+ where
+ desc =
+ [ Opts.fullDesc,
+ Opts.progDesc "Drop dependency",
+ Opts.headerDoc
+ <| Just
+ <| Opts.vcat
+ [ "Examples:",
+ "",
+ " niv drop jq",
+ " niv drop my-package version"
+ ]
+ ]
+ parseDropAttributes :: Opts.Parser [T.Text]
+ parseDropAttributes =
+ many
+ <| Opts.argument Opts.str (Opts.metavar "ATTRIBUTE")
+
+cmdDrop :: PackageName -> [T.Text] -> NIO ()
+cmdDrop packageName = \case
+ [] -> do
+ tsay <| "Dropping package: " <> unPackageName packageName
+ fsj <- getFindSourcesJson
+ sources <- unSources </ li (getSources fsj)
+ when (not <| HMS.member packageName sources)
+ <| li
+ <| abortCannotDropNoSuchPackage packageName
+ li
+ <| setSources fsj
+ <| Sources
+ <| HMS.delete packageName sources
+ attrs -> do
+ tsay <| "Dropping attributes: " <> T.intercalate " " attrs
+ tsay <| "In package: " <> unPackageName packageName
+ fsj <- getFindSourcesJson
+ sources <- unSources </ li (getSources fsj)
+ packageSpec <- case HMS.lookup packageName sources of
+ Nothing ->
+ li <| abortCannotAttributesDropNoSuchPackage packageName
+ Just (PackageSpec packageSpec) ->
+ pure
+ <| PackageSpec
+ <| KM.mapMaybeWithKey
+ (\k v -> if K.toText k `elem` attrs then Nothing else Just v)
+ packageSpec
+ li
+ <| setSources fsj
+ <| Sources
+ <| HMS.insert packageName packageSpec sources
+
+-------------------------------------------------------------------------------
+-- Files and their content
+-------------------------------------------------------------------------------
+
+-- | Checks if content is different than default and if it does /not/ contain
+-- a comment line with @niv: no_update@
+shouldUpdateNixSourcesNix :: B.ByteString -> Bool
+shouldUpdateNixSourcesNix content =
+ content /= initNixSourcesNixContent
+ && not (any lineForbids (B8.lines content))
+ where
+ lineForbids :: B8.ByteString -> Bool
+ lineForbids str =
+ case B8.uncons (B8.dropWhile isSpace str) of
+ Just ('#', rest) -> case B8.stripPrefix "niv:" (B8.dropWhile isSpace rest) of
+ Just rest' -> case B8.stripPrefix "no_update" (B8.dropWhile isSpace rest') of
+ Just {} -> True
+ _ -> False
+ _ -> False
+ _ -> False
+
+-------------------------------------------------------------------------------
+-- Abort
+-------------------------------------------------------------------------------
+
+abortCannotAddPackageExists :: PackageName -> IO a
+abortCannotAddPackageExists (PackageName n) =
+ abort
+ <| T.unlines
+ [ "Cannot add package " <> n <> ".",
+ "The package already exists. Use",
+ " niv drop " <> n,
+ "and then re-add the package. Alternatively use",
+ " niv update " <> n <> " --attribute foo=bar",
+ "to update the package's attributes."
+ ]
+
+abortCannotUpdateNoSuchPackage :: PackageName -> IO a
+abortCannotUpdateNoSuchPackage (PackageName n) =
+ abort
+ <| T.unlines
+ [ "Cannot update package " <> n <> ".",
+ "The package doesn't exist. Use",
+ " niv add " <> n,
+ "to add the package."
+ ]
+
+abortCannotModifyNoSuchPackage :: PackageName -> IO a
+abortCannotModifyNoSuchPackage (PackageName n) =
+ abort
+ <| T.unlines
+ [ "Cannot modify package " <> n <> ".",
+ "The package doesn't exist. Use",
+ " niv add " <> n,
+ "to add the package."
+ ]
+
+abortCannotDropNoSuchPackage :: PackageName -> IO a
+abortCannotDropNoSuchPackage (PackageName n) =
+ abort
+ <| T.unlines
+ [ "Cannot drop package " <> n <> ".",
+ "The package doesn't exist."
+ ]
+
+abortCannotShowNoSuchPackage :: PackageName -> IO a
+abortCannotShowNoSuchPackage (PackageName n) =
+ abort
+ <| T.unlines
+ [ "Cannot show package " <> n <> ".",
+ "The package doesn't exist."
+ ]
+
+abortCannotAttributesDropNoSuchPackage :: PackageName -> IO a
+abortCannotAttributesDropNoSuchPackage (PackageName n) =
+ abort
+ <| T.unlines
+ [ "Cannot drop attributes of package " <> n <> ".",
+ "The package doesn't exist."
+ ]
+
+abortUpdateFailed :: [(PackageName, SomeException)] -> IO a
+abortUpdateFailed errs =
+ abort
+ <| T.unlines
+ <| ["One or more packages failed to update:"]
+ <> map
+ ( \(PackageName pname, e) ->
+ pname <> ": " <> tshow e
+ )
+ errs
diff --git a/Omni/Bild/Deps.nix b/Omni/Bild/Deps.nix
new file mode 100644
index 0000000..9ba0b31
--- /dev/null
+++ b/Omni/Bild/Deps.nix
@@ -0,0 +1,45 @@
+_self: super:
+
+{
+ # Needs upgrading for guile 3
+ # inspekt3d = super.callPackage ./Deps/inspekt3d.nix {};
+
+ guix = super.pkgs.stdenv.mkDerivation rec {
+ pname = "guix";
+ name = "${pname}-${version}";
+ version = super.sources.guix.version;
+ src = super.sources.guix;
+ buildInputs = with super.pkgs; [
+ guile
+ # guile-gcrypt
+ # guile-sql
+ # guile-zlib
+ # guile-lzlib
+ # guile-avahi
+ # guile-git
+ # guile-json
+ gnutls
+ gnumake
+ sqlite
+ libgcrypt
+ gcc
+ ];
+ };
+
+ llm = super.overrideSrc super.llm super.sources.llm;
+
+ nostr-rs-relay = super.callPackage ./Deps/nostr-rs-relay.nix { };
+
+ ollama = super.ollama.override { acceleration = "cuda"; };
+
+ # https://github.com/NixOS/nixpkgs/issues/317147#issuecomment-2147343125
+ radicale = super.radicale.overrideAttrs (_old: rec {
+ version = "3.2.0";
+ src = super.fetchFromGitHub {
+ owner = "Kozea";
+ repo = "Radicale";
+ rev = "v${version}";
+ hash = "sha256-RxC8VOfdTXJZiAroDHTKjJqGWu65Z5uyb4WK1LOqubQ=";
+ };
+ });
+}
diff --git a/Omni/Bild/Deps/C.nix b/Omni/Bild/Deps/C.nix
new file mode 100644
index 0000000..3f670cd
--- /dev/null
+++ b/Omni/Bild/Deps/C.nix
@@ -0,0 +1 @@
+[ "libsodium" ]
diff --git a/Omni/Bild/Deps/Haskell.nix b/Omni/Bild/Deps/Haskell.nix
new file mode 100644
index 0000000..04f3a74
--- /dev/null
+++ b/Omni/Bild/Deps/Haskell.nix
@@ -0,0 +1,72 @@
+# This is the global set of Haskell packages which gets deployed to Hoogle, and
+# is available for selecting.
+
+[
+ "MonadRandom"
+ "QuickCheck"
+ "SafeSemaphore"
+ "acid-state"
+ "aeson"
+ "async"
+ "base"
+ "bytestring"
+ "clay"
+ "cmark"
+ "cmark-lucid"
+ "conduit"
+ "conduit-extra"
+ "config-ini"
+ "containers"
+ "directory"
+ "docopt"
+ "envy"
+ "fast-logger"
+ "filepath"
+ "github"
+ "haskeline"
+ "hostname"
+ "http-types"
+ "ixset"
+ "katip"
+ "lucid"
+ "monad-logger"
+ "mtl"
+ "neat-interpolation"
+ "network-uri"
+ "niv"
+ "optparse-simple"
+ "parsec"
+ "process"
+ "protolude"
+ "quickcheck-instances"
+ "rainbow"
+ "random"
+ "regex-applicative"
+ "req"
+ "safecopy"
+ "saltine"
+ "servant"
+ "servant-auth"
+ "servant-auth-server"
+ "servant-lucid"
+ "servant-server"
+ "split"
+ "stm"
+ "tasty"
+ "tasty-hunit"
+ "tasty-quickcheck"
+ "text"
+ "time"
+ "transformers"
+ "unagi-chan"
+ "unix"
+ "unordered-containers"
+ "uuid"
+ "vector"
+ "wai"
+ "wai-app-static"
+ "wai-extra"
+ "wai-middleware-metrics"
+ "warp"
+ "x509"
+]
diff --git a/Omni/Bild/Deps/Python.nix b/Omni/Bild/Deps/Python.nix
new file mode 100644
index 0000000..b0b2465
--- /dev/null
+++ b/Omni/Bild/Deps/Python.nix
@@ -0,0 +1 @@
+[ "cryptography" "llm" "mypy" "nltk" "slixmpp" ]
diff --git a/Omni/Bild/Deps/accelerate.nix b/Omni/Bild/Deps/accelerate.nix
new file mode 100644
index 0000000..be1d2fd
--- /dev/null
+++ b/Omni/Bild/Deps/accelerate.nix
@@ -0,0 +1,16 @@
+{ fetchFromGitHub, buildPythonPackage, numpy, packaging, psutil, pyyaml, torch
+}:
+
+buildPythonPackage rec {
+ name = "accelerate";
+ version = "0.15.0";
+ propagatedBuildInputs = [ numpy packaging psutil pyyaml torch ];
+ doCheck = false;
+ src = fetchFromGitHub {
+ owner = "huggingface";
+ repo = "accelerate";
+ rev = "v${version}";
+ sha256 = "sha256-agfbOaa+Nm10HZkd2Y7zR3R37n+vLNsxCyxZax6O3Lo=";
+ };
+}
+
diff --git a/Omni/Bild/Deps/bitsandbytes.nix b/Omni/Bild/Deps/bitsandbytes.nix
new file mode 100644
index 0000000..eb32aac
--- /dev/null
+++ b/Omni/Bild/Deps/bitsandbytes.nix
@@ -0,0 +1,86 @@
+{ lib, buildPythonPackage, fetchFromGitHub, python, pythonOlder, pytestCheckHook
+, setuptools, torch, einops, lion-pytorch, scipy, symlinkJoin }:
+
+let
+ pname = "bitsandbytes";
+ version = "0.38.0";
+
+ inherit (torch) cudaPackages cudaSupport;
+ inherit (cudaPackages) cudaVersion;
+
+ # NOTE: torchvision doesn't use cudnn; torch does!
+ # For this reason it is not included.
+ cuda-common-redist = with cudaPackages; [
+ cuda_cccl # <thrust/*>
+ libcublas # cublas_v2.h
+ libcurand
+ libcusolver # cusolverDn.h
+ libcusparse # cusparse.h
+ ];
+
+ cuda-native-redist = symlinkJoin {
+ name = "cuda-native-redist-${cudaVersion}";
+ paths = with cudaPackages;
+ [
+ cuda_cudart # cuda_runtime.h cuda_runtime_api.h
+ cuda_nvcc
+ ] ++ cuda-common-redist;
+ };
+
+ cuda-redist = symlinkJoin {
+ name = "cuda-redist-${cudaVersion}";
+ paths = cuda-common-redist;
+ };
+
+in buildPythonPackage {
+ inherit pname version;
+ format = "pyproject";
+
+ disabled = pythonOlder "3.7";
+
+ src = fetchFromGitHub {
+ owner = "TimDettmers";
+ repo = pname;
+ rev = "refs/tags/${version}";
+ hash = "sha256-gGlbzTDvZNo4MhcYzLvWuB2ec7q+Qt5/LtTbJ0Rc+Kk=";
+ };
+
+ postPatch = ''
+ substituteInPlace Makefile --replace "/usr/bin/g++" "g++" --replace "lib64" "lib"
+ substituteInPlace bitsandbytes/cuda_setup/main.py \
+ --replace "binary_path = package_dir / binary_name" \
+ "binary_path = Path('$out/${python.sitePackages}/${pname}')/binary_name"
+ '' + lib.optionalString torch.cudaSupport ''
+ substituteInPlace bitsandbytes/cuda_setup/main.py \
+ --replace "/usr/local/cuda/lib64" "${cuda-native-redist}/lib"
+ '';
+
+ CUDA_HOME = "${cuda-native-redist}";
+
+ preBuild = if torch.cudaSupport then
+ with torch.cudaPackages;
+ let
+ cudaVersion = lib.concatStrings
+ (lib.splitVersion torch.cudaPackages.cudaMajorMinorVersion);
+ in "make CUDA_VERSION=${cudaVersion} cuda${cudaMajorVersion}x"
+ else
+ "make CUDA_VERSION=CPU cpuonly";
+
+ nativeBuildInputs = [ setuptools ]
+ ++ lib.optionals torch.cudaSupport [ cuda-native-redist ];
+ buildInputs = lib.optionals torch.cudaSupport [ cuda-redist ];
+
+ propagatedBuildInputs = [ torch ];
+
+ doCheck = false; # tests require CUDA and also GPU access
+ nativeCheckInputs = [ pytestCheckHook einops lion-pytorch scipy ];
+
+ pythonImportsCheck = [ "bitsandbytes" ];
+
+ meta = with lib; {
+ homepage = "https://github.com/TimDettmers/bitsandbytes";
+ description = "8-bit CUDA functions for PyTorch";
+ license = licenses.mit;
+ maintainers = with maintainers; [ bcdarwin ];
+ };
+}
diff --git a/Omni/Bild/Deps/guile-opengl.nix b/Omni/Bild/Deps/guile-opengl.nix
new file mode 100644
index 0000000..af01082
--- /dev/null
+++ b/Omni/Bild/Deps/guile-opengl.nix
@@ -0,0 +1,32 @@
+{ stdenv, lib, fetchurl, pkg-config, guile, libGL, libGLU, freeglut }:
+
+let
+ name = "guile-opengl-${version}";
+ version = "0.1.0";
+in stdenv.mkDerivation {
+ inherit name;
+
+ src = fetchurl {
+ url = "mirror://gnu/guile-opengl/${name}.tar.gz";
+ sha256 = "13qfx4xh8baryxqrv986l848ygd0piqwm6s2s90pxk9c0m9vklim";
+ };
+
+ patchPhase = ''
+ substituteInPlace glx/runtime.scm \
+ --replace '(dynamic-link "libGL")' '(dynamic-link "${libGL}/lib/libGL.so")'
+ substituteInPlace glu/runtime.scm \
+ --replace '(dynamic-link "libGLU")' '(dynamic-link "${libGLU}/lib/libGLU.so")'
+ substituteInPlace glut/runtime.scm \
+ --replace '(dynamic-link "libglut")' '(dynamic-link "${freeglut}/lib/libglut.so")'
+ '';
+
+ nativeBuildInputs = [ pkg-config guile libGL libGLU freeglut ];
+
+ meta = with lib; {
+ description = "Guile bindings for the OpenGL graphics API";
+ homepage = "https://www.gnu.org/software/guile-opengl/";
+ license = licenses.gpl3Plus;
+ maintainers = with maintainers; [ vyp ];
+ platforms = platforms.all;
+ };
+}
diff --git a/Omni/Bild/Deps/inspekt3d.nix b/Omni/Bild/Deps/inspekt3d.nix
new file mode 100644
index 0000000..3146350
--- /dev/null
+++ b/Omni/Bild/Deps/inspekt3d.nix
@@ -0,0 +1,30 @@
+{ stdenv, lib, autoreconfHook, pkg-config, guile, guile-opengl, mesa
+, glibcLocales, libfive, sources }:
+
+stdenv.mkDerivation {
+ name = "inspekt3d-unstable";
+
+ src = sources.inspekt3d;
+ version = "unstable-2018-10-17";
+
+ nativeBuildInputs = [ pkg-config autoreconfHook ];
+ buildInputs = [ guile glibcLocales mesa ];
+ propagatedBuildInputs = [ guile-opengl libfive ];
+
+ preBuild = ''
+ substituteInPlace inspekt3d/library.scm \
+ --replace '"libfive-guile"' '"${libfive}/lib/libfive-guile.so"' \
+ --replace '"libfive"' '"${libfive}/lib/libfive.so"'
+ '';
+
+ GUILE_AUTO_COMPILE = 0;
+ preConfigure = "./bootstrap";
+
+ meta = with lib; {
+ description = "Lightweight 3D viewer for Libfive written in Guile Scheme";
+ homepage = "https://sr.ht/~morgansmith/inspekt3d";
+ license = licenses.gpl3;
+ maintainers = with maintainers; [ bsima ];
+ platforms = platforms.all;
+ };
+}
diff --git a/Omni/Bild/Deps/interegular.nix b/Omni/Bild/Deps/interegular.nix
new file mode 100644
index 0000000..24065d8
--- /dev/null
+++ b/Omni/Bild/Deps/interegular.nix
@@ -0,0 +1,21 @@
+{ lib, sources, buildPythonPackage }:
+
+buildPythonPackage rec {
+ pname = "interegular";
+ version = sources.interegular.rev;
+ format = "setuptools";
+
+ src = sources.interegular;
+
+ propagatedBuildInputs = [ ];
+
+ doCheck = false; # no tests currently
+ pythonImportsCheck = [ "interegular" ];
+
+ meta = with lib; {
+ description = "Allows to check regexes for overlaps.";
+ homepage = "https://github.com/MegaIng/interegular";
+ license = licenses.mit;
+ maintainers = with maintainers; [ bsima ];
+ };
+}
diff --git a/Omni/Bild/Deps/lion-pytorch.nix b/Omni/Bild/Deps/lion-pytorch.nix
new file mode 100644
index 0000000..7b06e78
--- /dev/null
+++ b/Omni/Bild/Deps/lion-pytorch.nix
@@ -0,0 +1,27 @@
+{ lib, buildPythonPackage, pythonOlder, fetchFromGitHub, torch }:
+
+buildPythonPackage rec {
+ pname = "lion-pytorch";
+ version = "0.1.2";
+ format = "setuptools";
+ disabled = pythonOlder "3.6";
+
+ src = fetchFromGitHub {
+ owner = "lucidrains";
+ repo = "lion-pytorch";
+ rev = "refs/tags/${version}";
+ hash = "sha256-9hdpRJvCpv3PeC7f0IXpHt6i+e6LiT0QUl5jeDGelQE=";
+ };
+
+ propagatedBuildInputs = [ torch ];
+
+ pythonImportsCheck = [ "lion_pytorch" ];
+ doCheck = false; # no tests currently
+
+ meta = with lib; {
+ description = "Optimizer tuned by Google Brain using genetic algorithms";
+ homepage = "https://github.com/lucidrains/lion-pytorch";
+ license = licenses.mit;
+ maintainers = with maintainers; [ bcdarwin ];
+ };
+}
diff --git a/Omni/Bild/Deps/llm-ollama.nix b/Omni/Bild/Deps/llm-ollama.nix
new file mode 100644
index 0000000..e2b6a66
--- /dev/null
+++ b/Omni/Bild/Deps/llm-ollama.nix
@@ -0,0 +1,40 @@
+{ buildPythonPackage, fetchFromGitHub, lib, llm, ollama, pytestCheckHook
+, setuptools, pythonOlder, }:
+buildPythonPackage rec {
+ pname = "llm-ollama";
+ version = "0.3.0";
+ pyproject = true;
+
+ disabled = pythonOlder "3.8";
+
+ src = fetchFromGitHub {
+ owner = "taketwo";
+ repo = pname;
+ rev = "refs/tags/${version}";
+ hash = "sha256-Ar0Ux8BNGY0i764CEk7+48J6jnndlRIIMPZ9tFpXiy4=";
+ };
+
+ nativeBuildInputs = [ setuptools ];
+
+ buildInputs = [ llm ollama ];
+
+ propagatedBuildInputs = [ ollama ];
+
+ disabledTests = [
+ # wants to mkdir in the /homeless-shelter
+ "test_registered_models"
+ ];
+
+ nativeCheckInputs = [ pytestCheckHook ];
+
+ pythonImportsCheck = [ "llm_ollama" ];
+
+ meta = with lib; {
+ homepage = "https://github.com/taketwo/llm-ollama";
+ description =
+ "LLM plugin providing access to local Ollama models usting HTTP API";
+ changelog = "https://github.com/taketwo/llm-ollama/releases/tag/${version}";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ bsima ];
+ };
+}
diff --git a/Omni/Bild/Deps/llm-sentence-transformers.nix b/Omni/Bild/Deps/llm-sentence-transformers.nix
new file mode 100644
index 0000000..4d63c83
--- /dev/null
+++ b/Omni/Bild/Deps/llm-sentence-transformers.nix
@@ -0,0 +1,42 @@
+{ buildPythonPackage, fetchFromGitHub, lib, llm, sentence-transformers
+, pytestCheckHook, setuptools, pythonOlder, }:
+buildPythonPackage rec {
+ pname = "llm-sentence-transformers";
+ version = "0.2";
+ pyproject = true;
+
+ disabled = pythonOlder "3.8";
+
+ src = fetchFromGitHub {
+ owner = "simonw";
+ repo = pname;
+ rev = "refs/tags/${version}";
+ hash = "sha256-1NlKPWekdVLrNkIMWXLCRWn54RlAEuEDWMCDnQHNkBc=";
+ };
+
+ nativeBuildInputs = [ setuptools ];
+
+ buildInputs = [ llm sentence-transformers ];
+
+ propagatedBuildInputs = [ sentence-transformers ];
+
+ # fails because of some pydantic warnings
+ doCheck = false;
+ disabledTests = [
+ # wants to mkdir in the /homeless-shelter
+ "test_sentence_transformers"
+ ];
+
+ nativeCheckInputs = [ pytestCheckHook ];
+
+ pythonImportsCheck = [ "llm_sentence_transformers" ];
+
+ meta = with lib; {
+ homepage = "https://github.com/taketwo/llm-sentence-transformers";
+ description = "LLM plugin for embeddings using sentence-transformers";
+ changelog =
+ "https://github.com/taketwo/llm-sentence-transformers/releases/tag/${version}";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ bsima ];
+ };
+}
diff --git a/Omni/Bild/Deps/nostr-rs-relay.nix b/Omni/Bild/Deps/nostr-rs-relay.nix
new file mode 100644
index 0000000..0eef13f
--- /dev/null
+++ b/Omni/Bild/Deps/nostr-rs-relay.nix
@@ -0,0 +1,19 @@
+{ fetchFromSourcehut, rustPlatform, pkg-config, openssl }:
+
+rustPlatform.buildRustPackage rec {
+ pname = "nostr-rs-relay";
+ version = "0.7.15";
+
+ src = fetchFromSourcehut {
+ owner = "~gheartsfield";
+ repo = pname;
+ rev = version;
+ sha256 = "sha256-aa1uFJcpQPMVzIWpkQ2MW6LIzTnhXNQc220scbzwJ5k=";
+ };
+
+ cargoSha256 = "sha256-3593pjc4A4NsEnE/ZYsR1vSMCvw2ZJue4EIY6cFa2WA=";
+
+ nativeBuildInputs = [ pkg-config openssl.dev ];
+
+ buildInputs = [ openssl.dev ];
+}
diff --git a/Omni/Bild/Deps/outlines.nix b/Omni/Bild/Deps/outlines.nix
new file mode 100644
index 0000000..29ef41b
--- /dev/null
+++ b/Omni/Bild/Deps/outlines.nix
@@ -0,0 +1,34 @@
+{ lib, sources, buildPythonPackage, interegular, jinja2, lark, numpy, perscache
+, pillow, pydantic, regex, scipy, tenacity, torch }:
+
+buildPythonPackage rec {
+ pname = "outlines";
+ version = sources.outlines.rev;
+ format = "pyproject";
+
+ src = sources.outlines;
+
+ propagatedBuildInputs = [
+ interegular
+ jinja2
+ lark
+ numpy
+ perscache
+ pillow
+ pydantic
+ regex
+ scipy
+ tenacity
+ torch
+ ];
+
+ doCheck = false; # no tests currently
+ pythonImportsCheck = [ "outlines" ];
+
+ meta = with lib; {
+ description = "Probabilistic Generative Model Programming";
+ homepage = "https://github.com/normal-computing/outlines";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ bsima ];
+ };
+}
diff --git a/Omni/Bild/Deps/perscache.nix b/Omni/Bild/Deps/perscache.nix
new file mode 100644
index 0000000..508a261
--- /dev/null
+++ b/Omni/Bild/Deps/perscache.nix
@@ -0,0 +1,25 @@
+{ lib, sources, buildPythonPackage, beartype, cloudpickle, icontract, pbr }:
+
+buildPythonPackage rec {
+ pname = "perscache";
+ version = sources.perscache.rev;
+
+ src = sources.perscache;
+
+ propagatedBuildInputs = [ beartype cloudpickle icontract pbr ];
+ PBR_VERSION = version;
+
+ doCheck = false; # no tests currently
+ pythonImportsCheck = [ "perscache" ];
+
+ meta = with lib; {
+ description = ''
+ An easy to use decorator for persistent memoization: like
+ `functools.lrucache`, but results can be saved in any format to any
+ storage.
+ '';
+ homepage = "https://github.com/leshchenko1979/perscache";
+ license = licenses.mit;
+ maintainers = with maintainers; [ bsima ];
+ };
+}
diff --git a/Omni/Bild/Example.c b/Omni/Bild/Example.c
new file mode 100644
index 0000000..2f4bfd4
--- /dev/null
+++ b/Omni/Bild/Example.c
@@ -0,0 +1,15 @@
+// : out examplesodium.exe
+// : dep libsodium
+// : arg -lsodium
+#include <sodium.h>
+
+int
+main (void)
+{
+ if (sodium_init () < 0)
+ {
+ /* panic! the library couldn't be initialized; it is not safe to use */
+ }
+ printf ("Omni/Bild/Example.c: Hello world!\n");
+ return 0;
+}
diff --git a/Omni/Bild/Example.hs b/Omni/Bild/Example.hs
new file mode 100644
index 0000000..025391c
--- /dev/null
+++ b/Omni/Bild/Example.hs
@@ -0,0 +1,45 @@
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- : out example
+module Omni.Bild.Example where
+
+-- Both internal and external language dependencies are detected automatically
+-- by bild, for example here we import 'Crypto.Saltine' and list 'saltine' in
+-- 'Deps/Haskell.nix' to indicate that this is the package we want made
+-- available to bild, which will index the external package and record its
+-- modules for lookup.
+import Alpha
+import qualified Crypto.Saltine as Saltine
+import qualified Crypto.Saltine.Core.SecretBox as Saltine
+import qualified Omni.Cli as Cli
+import qualified Omni.Test as Test
+
+main :: IO ()
+main = Cli.main <| Cli.Plan help move test pure
+
+move :: Cli.Arguments -> IO ()
+move _ = putStrLn "Hello world"
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Bild.Example"
+ [ Test.unit "can use saltine package" <| do
+ Saltine.sodiumInit
+ k <- Saltine.newKey
+ n <- Saltine.newNonce
+ let msg = "foobar"
+ let encryptedMsg = Saltine.secretbox k n <| str "foobar"
+ Just msg Test.@=? str </ Saltine.secretboxOpen k n encryptedMsg
+ ]
+
+help :: Cli.Docopt
+help =
+ [Cli.docopt|
+example that tests basic haskell build
+
+Usage:
+ example
+ example test
+|]
diff --git a/Omni/Bild/Example.lisp b/Omni/Bild/Example.lisp
new file mode 100644
index 0000000..cdabe7c
--- /dev/null
+++ b/Omni/Bild/Example.lisp
@@ -0,0 +1,4 @@
+;; : out helloworld.exe
+(require 'alexandria)
+(defun main ()
+ (print "hello world"))
diff --git a/Omni/Bild/Example.py b/Omni/Bild/Example.py
new file mode 100644
index 0000000..e5766ba
--- /dev/null
+++ b/Omni/Bild/Example.py
@@ -0,0 +1,45 @@
+"""
+Test that bild can build Python stuff.
+
+Example Python file that also serves as a test case for bild.
+"""
+
+# : out example
+# : dep cryptography
+import cryptography.fernet
+import sys
+
+
+def cryptic_hello(name: str) -> str:
+ """
+ Encrypt and decrypt `name`.
+
+ Example taken from `cryptography` docs.
+
+ Raises:
+ ValueError: if decryption fails
+ """
+ key = cryptography.fernet.Fernet.generate_key()
+ f = cryptography.fernet.Fernet(key)
+ token = f.encrypt(hello(name).encode("utf-8"))
+ ret = f.decrypt(token).decode("utf-8")
+ if ret != hello(name):
+ msg = "en/decryption failed!"
+ raise ValueError(msg)
+ return ret
+
+
+def hello(name: str) -> str:
+ """Say hello."""
+ return f"Hello {name}"
+
+
+def main() -> None:
+ """Entrypoint."""
+ if "test" in sys.argv:
+ sys.stdout.write("testing success")
+ sys.stdout.write(cryptic_hello("world"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/Omni/Bild/Example.rs b/Omni/Bild/Example.rs
new file mode 100644
index 0000000..ba98dda
--- /dev/null
+++ b/Omni/Bild/Example.rs
@@ -0,0 +1,4 @@
+// : out helloworld
+fn main() {
+ println!("Hello world!");
+}
diff --git a/Omni/Bild/Functions.nix b/Omni/Bild/Functions.nix
new file mode 100644
index 0000000..8b87f86
--- /dev/null
+++ b/Omni/Bild/Functions.nix
@@ -0,0 +1,33 @@
+_: super: {
+ # Given a generic `builder`, will generate an attrset for all the packages
+ # pinned by `deps` with `builder` applied to the package. This attrset can
+ # then be merged with the rest of the packages in the set as part of an
+ # overlay or overrides.
+ overridePinnedDeps = builder:
+ super.lib.genAttrs (builtins.attrNames super.sources) builder;
+
+ # Modifies a derivation with our source and version, keeping super build
+ # rules. This will fail if build steps have changed, or if no build
+ # rules are available upstream.
+ overrideSource = depName:
+ if super ? "${depName}" && super.${depName} ? overrideAttrs then
+ super.${depName}.overrideAttrs (attrs:
+ attrs // rec {
+ version =
+ super.sources.${depName}.version or super.sources.${depName}.rev;
+ src = super.sources.${depName};
+ })
+ else
+ null;
+
+ # Simply override the 'src' attr on a drv. This is meant to be a simpler
+ # alternative to 'overrideSource' above. In an overlay, use it like:
+ # mypkg = super.overrideSrc super.mypkg super.sources.mypkg;
+ overrideSrc = dep: src:
+ dep.overrideAttrs (attrs:
+ attrs // {
+ version = src.version or src.rev;
+ src = src;
+ });
+}
+
diff --git a/Omni/Bild/Haskell.nix b/Omni/Bild/Haskell.nix
new file mode 100644
index 0000000..c744848
--- /dev/null
+++ b/Omni/Bild/Haskell.nix
@@ -0,0 +1,36 @@
+_self: super:
+
+let
+ inherit (import ./Constants.nix) ghcCompiler;
+
+ buildCabal = sel: name: sel.callCabal2nix name super.sources.${name} { };
+
+in rec {
+
+ haskell = super.haskell // {
+ packages = super.haskell.packages // {
+ "${ghcCompiler}" = super.haskell.packages."${ghcCompiler}".override
+ (_old: {
+ overrides = with super.pkgs.haskell.lib;
+ sel: sup:
+ super.overridePinnedDeps (buildCabal sel) // {
+ ap-normalize = dontCheck sup.ap-normalize;
+ clay = doJailbreak sup.clay;
+ cmark = doJailbreak sup.cmark;
+ docopt = buildCabal sel "docopt";
+ linear-generics = doJailbreak sup.linear-generics;
+ req = doJailbreak sup.req;
+ servant-auth = doJailbreak sup.servant-auth;
+ servant-auth-server = dontCheck sup.servant-auth-server;
+ shellcheck = doJailbreak sup.shellcheck;
+ string-qq = doJailbreak sup.string-qq;
+ syb-with-class = doJailbreak sup.syb-with-class;
+ th-abstraction = doJailbreak sup.th-abstraction;
+ };
+ });
+ };
+ };
+
+ ormolu = super.haskellPackages.ormolu;
+
+}
diff --git a/Omni/Bild/Meta.hs b/Omni/Bild/Meta.hs
new file mode 100644
index 0000000..44bcff0
--- /dev/null
+++ b/Omni/Bild/Meta.hs
@@ -0,0 +1,108 @@
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Small module for extracting metadata from the comments of modules.
+module Omni.Bild.Meta where
+
+import Alpha
+import qualified Data.Aeson as Aeson
+import qualified Data.Char as Char
+import qualified Data.Set as Set
+import qualified Data.Text as Text
+import qualified Text.Regex.Applicative as Regex
+
+-- | A third-party dependency. This gets mapped to some name in nixpkgs,
+-- prefixed by package set like @haskellPackages@ or
+-- @python3Packages@.
+type Dep = String
+
+-- | This is a system-level requirement, the string gets mapped to a name in
+-- nixpkgs at the top level, like @pkgs.thing@.
+type Sys = String
+
+-- | A run-time dependency. This is some executable that will be placed on
+-- @PATH@. This gets selected from @bild.pkgs@, so it must be exported there.
+type Run = String
+
+-- | An arbitrary compiler argument that may get added to the compilation
+-- command. Should be used sparingly, and not all builds will support this.
+type Arg = String
+
+data Out = Lib String | Bin String | None
+ deriving (Show, Eq)
+
+instance Aeson.ToJSON Out where
+ toJSON =
+ Aeson.String <. Text.pack <. \case
+ Bin a -> a
+ Lib a -> a
+ None -> ""
+
+data Parsed = Parsed
+ { pdep :: Set Dep,
+ parg :: Set Arg,
+ pout :: Out,
+ psys :: Set Sys,
+ prun :: Set Run
+ }
+
+detect :: (Ord a) => Regex.RE Char a -> [Text] -> Set a
+detect m cl =
+ cl
+ /> Text.unpack
+ /> Regex.match m
+ |> catMaybes
+ |> Set.fromList
+
+-- | 'Out' is always singular, so it gets a special function
+detectOut :: Regex.RE Char Out -> [Text] -> Out
+detectOut m cl =
+ cl
+ /> Text.unpack
+ /> Regex.match m
+ |> catMaybes
+ |> head
+ |> fromMaybe None
+
+detectAll :: [Char] -> [Text] -> Parsed
+detectAll m cl = Parsed {..}
+ where
+ pout = detectOut (out m <|> lib m) cl
+ detect_ re = detect (re m) cl
+ pdep = detect_ dep
+ psys = detect_ sys
+ parg = detect_ arg
+ prun = detect_ run
+
+dep :: [Char] -> Regex.RE Char Dep
+dep comment =
+ Regex.string (comment ++ " : dep ")
+ *> Regex.many (Regex.psym (not <. Char.isSpace))
+
+sys :: [Char] -> Regex.RE Char Dep
+sys comment =
+ Regex.string (comment ++ " : sys ")
+ *> Regex.many (Regex.psym (not <. Char.isSpace))
+
+out :: [Char] -> Regex.RE Char Out
+out comment =
+ Regex.string (comment ++ " : out ")
+ *> Regex.many (Regex.psym (/= ' '))
+ /> Bin
+
+lib :: [Char] -> Regex.RE Char Out
+lib comment =
+ Regex.string (comment ++ " : lib ")
+ *> Regex.many (Regex.psym (/= ' '))
+ /> Lib
+
+arg :: [Char] -> Regex.RE Char Arg
+arg comment =
+ Regex.string (comment ++ " : arg ")
+ *> Regex.many Regex.anySym
+
+run :: [Char] -> Regex.RE Char Run
+run comment =
+ Regex.string (comment ++ " : run ")
+ *> Regex.many Regex.anySym
diff --git a/Omni/Bild/Nixpkgs.nix b/Omni/Bild/Nixpkgs.nix
new file mode 100644
index 0000000..212e3f1
--- /dev/null
+++ b/Omni/Bild/Nixpkgs.nix
@@ -0,0 +1,43 @@
+let
+ sources = import ./Sources.nix { sourcesFile = ./Sources.json; };
+
+ config = {
+ allowAliases = true;
+ allowBroken = true;
+ allowUnfree = true;
+ checkMeta = true;
+ cudaSupport = true;
+ };
+
+ system = __currentSystem;
+
+ # override pinned deps with our sources, this must come before other
+ # package overlays, because of the 'null' from 'overrideSource'
+ depsOverlay = _: pkgs: pkgs.overridePinnedDeps pkgs.overrideSource;
+
+ overlays = [
+ (_: _: { inherit sources; })
+ (import ./CcacheWrapper.nix)
+ (import ./Functions.nix)
+ depsOverlay
+ (import ./Deps.nix)
+ (import ./Python.nix)
+ (import ./Haskell.nix)
+ ];
+
+ nixos-unstable-small =
+ import sources.nixos-unstable-small { inherit system config overlays; };
+
+in {
+ nixos-24_05 = import sources.nixos-24_05 {
+ inherit system config;
+ overlays = overlays ++ [
+ (_: _: {
+ # backport newer packages from unstable
+ unstable = nixos-unstable-small.pkgs;
+ })
+ ];
+ };
+
+ inherit nixos-unstable-small;
+}
diff --git a/Omni/Bild/Python.nix b/Omni/Bild/Python.nix
new file mode 100644
index 0000000..6e4f390
--- /dev/null
+++ b/Omni/Bild/Python.nix
@@ -0,0 +1,17 @@
+_self: super: {
+ python3 = super.python3.override {
+ packageOverrides = _pyself: pysuper:
+ with pysuper.pkgs.python3Packages;
+ let dontCheck = p: p.overridePythonAttrs (_: { doCheck = false; });
+ in {
+ interegular = callPackage ./Deps/interegular.nix { };
+ llm-ollama = callPackage ./Deps/llm-ollama.nix { };
+ llm-sentence-transformers =
+ callPackage ./Deps/llm-sentence-transformers.nix { };
+ mypy = dontCheck pysuper.mypy;
+ outlines = callPackage ./Deps/outlines.nix { };
+ perscache = callPackage ./Deps/perscache.nix { };
+ tokenizers = dontCheck pysuper.tokenizers;
+ };
+ };
+}
diff --git a/Omni/Bild/Sources.json b/Omni/Bild/Sources.json
new file mode 100644
index 0000000..2939283
--- /dev/null
+++ b/Omni/Bild/Sources.json
@@ -0,0 +1,198 @@
+{
+ "clay": {
+ "branch": "master",
+ "description": "A CSS preprocessor as embedded Haskell.",
+ "homepage": "",
+ "owner": "sebastiaanvisser",
+ "repo": "clay",
+ "rev": "dcc4fc6d8b55af4814bd3f9bbb6d32e2fa2751a8",
+ "sha256": "1dm71z1q7yaq0kl2yb0vr0lsbd8byq5qkdb2kvr26jq48nfq2xdc",
+ "type": "tarball",
+ "url": "https://github.com/sebastiaanvisser/clay/archive/dcc4fc6d8b55af4814bd3f9bbb6d32e2fa2751a8.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz",
+ "version": "cc7729b1b42a79e261091ff7835f7fc2a7ae3cee"
+ },
+ "docopt": {
+ "branch": "main",
+ "description": "A command-line interface description language and parser that will make you smile",
+ "homepage": "http://docopt.org/",
+ "owner": "docopt",
+ "repo": "docopt.hs",
+ "rev": "47516acafeae3e1fdc447716e6ea05c2b918ff3a",
+ "sha256": "07skrfhzx51yn4qvig3ps34qra9s5g6m4k2z42h9ys0ypyk2wf8w",
+ "type": "tarball",
+ "url": "https://github.com/docopt/docopt.hs/archive/47516acafeae3e1fdc447716e6ea05c2b918ff3a.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz",
+ "version": "0.7.0.8"
+ },
+ "ghc-exactprint": {
+ "branch": "master",
+ "description": "GHC version of haskell-src-exts exactPrint",
+ "homepage": null,
+ "owner": "alanz",
+ "repo": "ghc-exactprint",
+ "rev": "3e70715a756c46761a3a6a086a6be5dee4e60d22",
+ "sha256": "1mhmk1555n7qr25iwbm8kbjs24c9j0q01j4m2kmz6zh7r1gjayxs",
+ "type": "tarball",
+ "url": "https://github.com/alanz/ghc-exactprint/archive/3e70715a756c46761a3a6a086a6be5dee4e60d22.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz",
+ "version": "0.6.3.4"
+ },
+ "guix": {
+ "branch": "master",
+ "repo": "https://git.savannah.gnu.org/git/guix.git",
+ "rev": "a25e0518954b48753ff44ad116d0a6fb47dfb6cb",
+ "type": "git",
+ "version": "2021-06-14-unstable"
+ },
+ "inspekt3d": {
+ "branch": "master",
+ "type": "git",
+ "repo": "https://git.sr.ht/~morgansmith/inspekt3d",
+ "rev": "703f52ccbfedad2bf5240bf8183d1b573c9d54ef"
+ },
+ "interegular": {
+ "branch": "master",
+ "description": "Allows to check regexes for overlaps. Based on greenery by @qntm.",
+ "homepage": null,
+ "owner": "MegaIng",
+ "repo": "interegular",
+ "rev": "v0.2.1",
+ "sha256": "14f3jvnczq6qay2qp4rxchbdhkj00qs8kpacl0nrxgr0785km36k",
+ "type": "tarball",
+ "url": "https://github.com/MegaIng/interegular/archive/v0.2.1.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "llm": {
+ "branch": "main",
+ "description": "Access large language models from the command-line",
+ "homepage": "https://llm.datasette.io",
+ "owner": "simonw",
+ "repo": "llm",
+ "rev": "0.13.1",
+ "sha256": "0305xpmigk219i2n1slgpz3jwvpx5pdp5s8dkjz85w75xivakbin",
+ "type": "tarball",
+ "url": "https://github.com/simonw/llm/archive/0.13.1.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz",
+ "version": "0.13.1"
+ },
+ "niv": {
+ "branch": "master",
+ "description": "Easy dependency management for Nix projects",
+ "homepage": "https://github.com/nmattia/niv",
+ "owner": "nmattia",
+ "repo": "niv",
+ "rev": "e80fc8fae87cc91f449533fca6b9cadf8be69e6c",
+ "sha256": "024hnxvqk8z5n2n54rj05l91q38g9y8nwvrj46xml13kjmg4shb3",
+ "type": "tarball",
+ "url": "https://github.com/nmattia/niv/archive/e80fc8fae87cc91f449533fca6b9cadf8be69e6c.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "nixos-23_05": {
+ "branch": "nixos-23.05",
+ "description": "Nix Packages collection & NixOS",
+ "homepage": "",
+ "owner": "nixos",
+ "repo": "nixpkgs",
+ "rev": "70bdadeb94ffc8806c0570eb5c2695ad29f0e421",
+ "sha256": "05cbl1k193c9la9xhlz4y6y8ijpb2mkaqrab30zij6z4kqgclsrd",
+ "type": "tarball",
+ "url": "https://github.com/nixos/nixpkgs/archive/70bdadeb94ffc8806c0570eb5c2695ad29f0e421.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "nixos-23_11": {
+ "branch": "nixos-23.11",
+ "description": "Nix Packages collection & NixOS",
+ "homepage": "",
+ "owner": "nixos",
+ "repo": "nixpkgs",
+ "rev": "219951b495fc2eac67b1456824cc1ec1fd2ee659",
+ "sha256": "065jy7qivlbdqmbvd7r9h97b23f21axmc4r7sqmq2h0j82rmymxv",
+ "type": "tarball",
+ "url": "https://github.com/nixos/nixpkgs/archive/219951b495fc2eac67b1456824cc1ec1fd2ee659.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "nixos-24_05": {
+ "branch": "nixos-24.05",
+ "description": "Nix Packages collection & NixOS",
+ "homepage": "",
+ "owner": "nixos",
+ "repo": "nixpkgs",
+ "rev": "a9b86fc2290b69375c5542b622088eb6eca2a7c3",
+ "sha256": "1mssfzy1nsansjmp5ckyl8vbk32va3abchpg19ljyak0xblxnjs1",
+ "type": "tarball",
+ "url": "https://github.com/nixos/nixpkgs/archive/a9b86fc2290b69375c5542b622088eb6eca2a7c3.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "nixos-mailserver": {
+ "repo": "https://gitlab.com/simple-nixos-mailserver/nixos-mailserver",
+ "rev": "f535d8123c4761b2ed8138f3d202ea710a334a1d",
+ "sha256": "0csx2i8p7gbis0n5aqpm57z5f9cd8n9yabq04bg1h4mkfcf7mpl6",
+ "type": "tarball",
+ "url": "https://gitlab.com/simple-nixos-mailserver/nixos-mailserver/-/archive/f535d8123c4761b2ed8138f3d202ea710a334a1d/nixos-mailserver-f535d8123c4761b2ed8138f3d202ea710a334a1d.tar.gz",
+ "url_template": "https://gitlab.com/simple-nixos-mailserver/nixos-mailserver/-/archive/<rev>/nixos-mailserver-<rev>.tar.gz",
+ "version": "master"
+ },
+ "nixos-unstable-small": {
+ "branch": "nixos-unstable-small",
+ "description": "Nix Packages collection & NixOS",
+ "homepage": "",
+ "owner": "nixos",
+ "repo": "nixpkgs",
+ "rev": "a5e6a9e979367ee14f65d9c38119c30272f8455f",
+ "sha256": "08yfk81kpsizdzlbi8whpaarb0w0rw9aynlrvhn5gr5dfpv9hbsf",
+ "type": "tarball",
+ "url": "https://github.com/nixos/nixpkgs/archive/a5e6a9e979367ee14f65d9c38119c30272f8455f.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "outlines": {
+ "branch": "main",
+ "description": "Generative Model Programming",
+ "homepage": "https://normal-computing.github.io/outlines/",
+ "owner": "normal-computing",
+ "repo": "outlines",
+ "rev": "0.0.8",
+ "sha256": "1yvx5c5kplmr56nffqcb6ssjnmlikkaw32hxl6i4b607v3s0s6jv",
+ "type": "tarball",
+ "url": "https://github.com/normal-computing/outlines/archive/0.0.8.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "perscache": {
+ "branch": "master",
+ "description": "An easy to use decorator for persistent memoization: like `functools.lrucache`, but results can be saved in any format to any storage.",
+ "homepage": null,
+ "owner": "leshchenko1979",
+ "repo": "perscache",
+ "rev": "0.6.1",
+ "sha256": "0j2775pjll4vw1wmxkjhnb5z6z83x5lhg89abj2d8ivd17n4rhjf",
+ "type": "tarball",
+ "url": "https://github.com/leshchenko1979/perscache/archive/0.6.1.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "radicale": {
+ "branch": "master",
+ "description": "A simple CalDAV (calendar) and CardDAV (contact) server.",
+ "homepage": "https://radicale.org",
+ "owner": "kozea",
+ "repo": "radicale",
+ "rev": "d7ce2f0b98589400b0e1718cfd7bb29b7ebeaebe",
+ "sha256": "08himwwwikhnn4amqzgbbqq323xhfy7yf5vaqczkm2fw6h1s3skg",
+ "type": "tarball",
+ "url": "https://github.com/kozea/radicale/archive/d7ce2f0b98589400b0e1718cfd7bb29b7ebeaebe.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
+ },
+ "regex-applicative": {
+ "branch": "master",
+ "description": "Regex-based parsing with applicative interface",
+ "homepage": "",
+ "owner": "feuerbach",
+ "repo": "regex-applicative",
+ "rev": "449519c38e65753345e9a008362c011cb7a0a4d9",
+ "revision": "449519c38e65753345e9a008362c011cb7a0a4d9",
+ "sha256": "1vdrhsjzij5dm7rn10sic5dv9574yb0lyhzfv9psh7b08dsj8g1k",
+ "type": "tarball",
+ "url": "https://github.com/feuerbach/regex-applicative/archive/449519c38e65753345e9a008362c011cb7a0a4d9.tar.gz",
+ "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz",
+ "version": "0.3.4"
+ }
+}
diff --git a/Omni/Bild/Sources.nix b/Omni/Bild/Sources.nix
new file mode 100644
index 0000000..f7af81e
--- /dev/null
+++ b/Omni/Bild/Sources.nix
@@ -0,0 +1,207 @@
+# This file has been generated by Niv.
+
+let
+
+ #
+ # The fetchers. fetch_<type> fetches specs of type <type>.
+ #
+
+ fetch_file = pkgs: name: spec:
+ let name' = sanitizeName name + "-src";
+ in if spec.builtin or true then
+ builtins_fetchurl {
+ inherit (spec) url sha256;
+ name = name';
+ }
+ else
+ pkgs.fetchurl {
+ inherit (spec) url sha256;
+ name = name';
+ };
+
+ fetch_tarball = pkgs: name: spec:
+ let name' = sanitizeName name + "-src";
+ in if spec.builtin or true then
+ builtins_fetchTarball {
+ name = name';
+ inherit (spec) url sha256;
+ }
+ else
+ pkgs.fetchzip {
+ name = name';
+ inherit (spec) url sha256;
+ };
+
+ fetch_git = name: spec:
+ let
+ ref = if spec ? ref then
+ spec.ref
+ else if spec ? branch then
+ "refs/heads/${spec.branch}"
+ else if spec ? tag then
+ "refs/tags/${spec.tag}"
+ else
+ abort
+ "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
+ in builtins.fetchGit {
+ url = spec.repo;
+ inherit (spec) rev;
+ inherit ref;
+ };
+
+ fetch_local = spec: spec.path;
+
+ fetch_builtin-tarball = name:
+ throw ''
+ [${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
+ $ niv modify ${name} -a type=tarball -a builtin=true'';
+
+ fetch_builtin-url = name:
+ throw ''
+ [${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
+ $ niv modify ${name} -a type=file -a builtin=true'';
+
+ #
+ # Various helpers
+ #
+
+ # https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
+ sanitizeName = name:
+ (concatMapStrings (s: if builtins.isList s then "-" else s)
+ (builtins.split "[^[:alnum:]+._?=-]+"
+ ((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)));
+
+ # The set of packages used when specs are fetched using non-builtins.
+ mkPkgs = sources: system:
+ let
+ sourcesNixpkgs = import
+ (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {
+ inherit system;
+ };
+ hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
+ hasThisAsNixpkgsPath = <nixpkgs> == ./.;
+ in if builtins.hasAttr "nixpkgs" sources then
+ sourcesNixpkgs
+ else if hasNixpkgsPath && !hasThisAsNixpkgsPath then
+ import <nixpkgs> { }
+ else
+ abort ''
+ Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
+ add a package called "nixpkgs" to your sources.json.
+ '';
+
+ # The actual fetching function.
+ fetch = pkgs: name: spec:
+
+ if !builtins.hasAttr "type" spec then
+ abort "ERROR: niv spec ${name} does not have a 'type' attribute"
+ else if spec.type == "file" then
+ fetch_file pkgs name spec
+ else if spec.type == "tarball" then
+ fetch_tarball pkgs name spec
+ else if spec.type == "git" then
+ fetch_git name spec
+ else if spec.type == "local" then
+ fetch_local spec
+ else if spec.type == "builtin-tarball" then
+ fetch_builtin-tarball name
+ else if spec.type == "builtin-url" then
+ fetch_builtin-url name
+ else
+ abort
+ "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
+
+ # If the environment variable NIV_OVERRIDE_${name} is set, then use
+ # the path directly as opposed to the fetched source.
+ replace = name: drv:
+ let
+ saneName = stringAsChars
+ (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
+ ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
+ in if ersatz == "" then
+ drv
+ else
+ # this turns the string into an actual Nix path (for both absolute and
+ # relative paths)
+ if builtins.substring 0 1 ersatz == "/" then
+ /. + ersatz
+ else
+ /. + builtins.getEnv "PWD" + "/${ersatz}";
+
+ # Ports of functions for older nix versions
+
+ # a Nix version of mapAttrs if the built-in doesn't exist
+ mapAttrs = builtins.mapAttrs or (f: set:
+ with builtins;
+ listToAttrs (map (attr: {
+ name = attr;
+ value = f attr set.${attr};
+ }) (attrNames set)));
+
+ # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
+ range = first: last:
+ if first > last then
+ [ ]
+ else
+ builtins.genList (n: first + n) (last - first + 1);
+
+ # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
+ stringToCharacters = s:
+ map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
+
+ # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
+ stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
+ concatMapStrings = f: list: concatStrings (map f list);
+ concatStrings = builtins.concatStringsSep "";
+
+ # https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
+ optionalAttrs = cond: as: if cond then as else { };
+
+ # fetchTarball version that is compatible between all the versions of Nix
+ # deadnix: skip
+ builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
+ let inherit (builtins) lessThan nixVersion fetchTarball;
+ in if lessThan nixVersion "1.12" then
+ fetchTarball
+ ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
+ else
+ fetchTarball attrs;
+
+ # fetchurl version that is compatible between all the versions of Nix
+ # deadnix: skip
+ builtins_fetchurl = { url, name ? null, sha256 }@attrs:
+ let inherit (builtins) lessThan nixVersion fetchurl;
+ in if lessThan nixVersion "1.12" then
+ fetchurl
+ ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
+ else
+ fetchurl attrs;
+
+ # Create the final "sources" from the config
+ mkSources = config:
+ mapAttrs (name: spec:
+ if builtins.hasAttr "outPath" spec then
+ abort
+ "The values in sources.json should not have an 'outPath' attribute"
+ else
+ spec // { outPath = replace name (fetch config.pkgs name spec); })
+ config.sources;
+
+ # The "config" used by the fetchers
+ mkConfig = { sourcesFile ?
+ if builtins.pathExists ./sources.json then ./sources.json else null
+ , sources ? if isNull sourcesFile then
+ { }
+ else
+ builtins.fromJSON (builtins.readFile sourcesFile)
+ , system ? builtins.currentSystem, pkgs ? mkPkgs sources system }: rec {
+ # The sources, i.e. the attribute set of spec name to spec
+ inherit sources;
+
+ # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
+ inherit pkgs;
+ };
+
+in mkSources (mkConfig { }) // {
+ __functor = _: settings: mkSources (mkConfig settings);
+}
diff --git a/Omni/Bot.nix b/Omni/Bot.nix
new file mode 100644
index 0000000..934ad9c
--- /dev/null
+++ b/Omni/Bot.nix
@@ -0,0 +1,61 @@
+{ options, lib, config, pkgs, ... }:
+
+let cfg = config.services.omnibot;
+in {
+ options.services.bizbot = {
+ enable = lib.mkEnableOption "Enable the omnibot service";
+ package = lib.mkOption {
+ type = lib.types.package;
+ description = "omnibot package to use";
+ };
+ server = lib.mkOption {
+ type = lib.types.str;
+ description = "server address to connect to";
+ };
+ channel = lib.mkOption {
+ type = lib.types.str;
+ description = "channel to join";
+ };
+ dataDir = lib.mkOption {
+ type = lib.types.str;
+ description = "where omnibot will start it's ii tree";
+ };
+ };
+ config = lib.mkIf cfg.enable {
+ systemd.services = {
+ "omnibot-${cfg.server}-${cfg.channel}-ii" = {
+ wantedBy = [ "multi-user.target" ];
+ path = [ pkgs.ii ];
+ script = ''
+ ${pkgs.ii}/bin/ii ${cfg.dataDir}/irc -s ${cfg.server} -p 6667 -n omnibot
+ '';
+ postStart = ''
+ echo "/join ${cfg.channel}" > ${cfg.dataDir}/irc/${cfg.server}/in
+ '';
+ description = ''
+ omnibot ii tree for ${cfg.server}/${cfg.channel}
+ '';
+
+ };
+ "omnibot-${cfg.server}-${cfg.channel}" = {
+ path = [ cfg.package ];
+ wantedBy = [ "multi-user.target" ];
+ after = [ "omnibot-${cfg.server}-${cfg.channel}-ii.service" ];
+ script = ''
+ tail -f ${cfg.dataDir}/irc/${cfg.server}/${cfg.channel}/out \
+ | ${cfg.package}/bin/omnibot \
+ | tee -a ${cfg.dataDir}/irc/${cfg.server}/${cfg.channel}/in
+ '';
+ description = ''
+ omnibot for ${cfg.server}/${cfg.channel}
+ '';
+ serviceConfig = {
+ KillSignal = "INT";
+ Type = "simple";
+ Restart = "on-abort";
+ RestartSec = "1";
+ };
+ };
+ };
+ };
+}
diff --git a/Omni/Bot.scm b/Omni/Bot.scm
new file mode 100755
index 0000000..c526507
--- /dev/null
+++ b/Omni/Bot.scm
@@ -0,0 +1,59 @@
+;; : out omnibot
+;;
+;; Usage with ii:
+;;
+;; tail -f \#omni/out | guile -L $CODEROOT -s Omni/Bot.scm
+;;
+(define-module (Omni Bot) #:export (main))
+
+(import (ice-9 rdelim))
+(import (ice-9 match))
+(import (ice-9 regex))
+(import (ice-9 receive))
+(import (bs core))
+(import (prefix (bs string) string.))
+
+(define (log msg)
+ (display msg (current-error-port)))
+
+(define (is-command? msg)
+ (string.prefix? msg "omnibot:"))
+
+(define (parse-line line)
+ (if (eof-object? line)
+ (exit)
+ (let ([matches (regexp-exec
+ (make-regexp "<(\\S*)>(.*)" regexp/extended)
+ (string-drop line 11))])
+ (if matches
+ `(user
+ ,(match:substring matches 1)
+ ,(string.lstrip (match:substring matches 2) #\space))
+ `(system ,(string-drop line 11))))))
+
+(define (dispatch user msg)
+ (let ([msg (-> msg
+ (string-drop (string-length "omnibot:"))
+ (string.lstrip #\space))])
+ (cond
+ ((equal? msg "hi")
+ (display (fmt "~a: well, hello!" user)))
+
+ (else
+ (display (fmt "command not understood: ~a" msg))))))
+
+(define (main args)
+ (while #t
+ (match (parse-line (read-line))
+ [('user user msg)
+ (if (is-command? msg)
+ (dispatch user msg)
+ (begin
+ (log (fmt "user: ~a " user))
+ (log (fmt "message: ~a" msg))))]
+
+ [('system msg)
+ (log (fmt "system: ~a" msg))])
+
+ (newline)
+ (force-output)))
diff --git a/Omni/Ci.sh b/Omni/Ci.sh
new file mode 100755
index 0000000..609e9aa
--- /dev/null
+++ b/Omni/Ci.sh
@@ -0,0 +1,62 @@
+#!/usr/bin/env bash
+#
+# A simple ci that saves its results in a git note, formatted according to
+# RFC-2822, more or less.
+#
+# To run this manually, exec the script. It will by default run the tests for
+# HEAD, whatever you currently have checked out.
+#
+# It would be cool to use a zero-knowledge proof mechanism here to prove that
+# so-and-so ran the tests, but I'll have to research how to do that.
+#
+# ensure we don't exit on bild failure, only on CI script error
+ set +e
+ set -u
+##
+ [[ -n $(git status -s) ]] && { echo fail: dirty worktree; exit 1; }
+##
+ at=$(date -R)
+ user=$(git config --get user.name)
+ mail=$(git config --get user.email)
+##
+ commit=$(git notes --ref=ci show HEAD || true)
+ if [[ -n "$commit" ]]
+ then
+ if grep -q "Lint-is: good" <<< "$commit"
+ then
+ exit 0
+ fi
+ if grep -q "Test-is: good" <<< "$commit"
+ then
+ exit 0
+ fi
+ fi
+##
+ runlint="$CODEROOT"/_/bin/lint
+ [[ ! -f "$runlint" ]] && bild "${BILD_ARGS:-""}" "${CODEROOT:?}"/Omni/Lint.hs
+ if "$runlint" "${CODEROOT:?}"/**/*
+ then
+ lint_result="good"
+ else
+ lint_result="fail"
+ fi
+##
+ if bild "${BILD_ARGS:-""}" --test "${CODEROOT:?}"/**/*
+ then
+ test_result="good"
+ else
+ test_result="fail"
+ fi
+##
+ read -r -d '' note <<EOF
+Lint-is: $lint_result
+Test-is: $test_result
+Test-by: $user <$mail>
+Test-at: $at
+EOF
+##
+ git notes --ref=ci append -m "$note"
+##
+# exit 1 if failure
+ [[ ! "$lint_result" == "fail" && ! "$test_result" == "fail" ]]
+##
diff --git a/Omni/Cli.hs b/Omni/Cli.hs
new file mode 100644
index 0000000..8ace2a7
--- /dev/null
+++ b/Omni/Cli.hs
@@ -0,0 +1,61 @@
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+module Omni.Cli
+ ( Plan (..),
+ main,
+ Docopt.Docopt (..),
+ Docopt.Arguments,
+ Docopt.argument,
+ Docopt.docopt,
+ Docopt.getAllArgs,
+ Docopt.getArg,
+ Docopt.getArgWithDefault,
+ Docopt.longOption,
+ Docopt.shortOption,
+ Docopt.command,
+ has,
+ )
+where
+
+import Alpha
+import qualified Omni.Test as Test
+import qualified System.Console.Docopt as Docopt
+import qualified System.Environment as Environment
+
+-- | Plan is the main data structure that describes a CLI program. It's not the
+-- best name, but it works. This type is parameterized with `cfg` so you can
+-- load configuration from the environment and pass it into your Plan.
+data Plan cfg = Plan
+ { -- | Usage info, shows when given --help
+ help :: Docopt.Docopt,
+ -- | The main function takes arguments and produces effects. Maybe it should
+ -- also take `cfg` as an argument?
+ move :: Docopt.Arguments -> IO (),
+ -- | The test suite for the gram, invoked when 'test' is passed as the first
+ -- argument to the program
+ test :: Test.Tree,
+ -- | Function for cleaning up any files or resources, presumably on
+ -- shutdown. Can be just `pure` if you have nothing to tidy.
+ tidy :: cfg -> IO ()
+ }
+
+-- | The entrypoint for CLI programs, use this in your own `main`.
+main :: Plan cfg -> IO ()
+main Plan {..} =
+ Environment.getArgs
+ /> Docopt.parseArgs help
+ +> \case
+ Left err -> panic <| show err
+ Right args ->
+ if args `has` Docopt.command "test"
+ then Test.run test
+ else
+ if args `has` Docopt.longOption "help" || args `has` Docopt.shortOption 'h'
+ then Docopt.exitWithUsage help
+ else move args
+
+has :: Docopt.Arguments -> Docopt.Option -> Bool
+has = Docopt.isPresent
diff --git a/Omni/Cloud.nix b/Omni/Cloud.nix
new file mode 100644
index 0000000..08d1ba2
--- /dev/null
+++ b/Omni/Cloud.nix
@@ -0,0 +1,25 @@
+{ bild }:
+# Cloud infrastructure, always online. Mostly for messaging-related stuff.
+
+bild.os {
+ imports = [
+ ./OsBase.nix
+ ./Packages.nix
+ ./Users.nix
+ ./Cloud/Chat.nix
+ ./Cloud/Comms.nix
+ ./Cloud/Git.nix
+ ./Cloud/Hardware.nix
+ ./Cloud/Hub.nix
+ ./Cloud/Mail.nix
+ ./Cloud/Networking.nix
+ ./Cloud/Web.nix
+ ./Cloud/Znc.nix
+ "${bild.sources.nixos-mailserver}"
+ # ./Nixpert.nix
+ ];
+ networking.hostName = "simatime";
+ networking.domain = "simatime.com";
+ # the datacenter for this VM is in NYC
+ time.timeZone = "America/New_York";
+}
diff --git a/Omni/Cloud/Chat.nix b/Omni/Cloud/Chat.nix
new file mode 100644
index 0000000..7f86621
--- /dev/null
+++ b/Omni/Cloud/Chat.nix
@@ -0,0 +1,94 @@
+{ config, pkgs, ... }:
+#
+# a homeserver for matrix.org.
+#
+# this uses the config.networking.domain as the ACME host. be sure to add the
+# fqdn and element subdomains to security.acme.certs.<name>.extraDomainNames
+#
+# - nixos manual: https://nixos.org/nixos/manual/index.html#module-services-matrix
+#
+# to create new users:
+#
+# nix run nixpkgs.matrix-synapse
+# register_new_matrix_user -k <registration_shared_secret> http://localhost:<matrix_port>
+#
+let
+ fqdn = "matrix.${config.networking.domain}";
+ element = "chat.${config.networking.domain}";
+ matrix_port = 8448;
+in {
+ # matrix-synapse server. for what the settings mean, see:
+ # https://nixos.org/nixos/manual/index.html#module-services-matrix
+ #
+ services.matrix-synapse = {
+ enable = false;
+ settings.server_name = config.networking.domain;
+ #registration_shared_secret = "AkGRWSQLga3RoKRFnHhKoeCEIeZzu31y4TRzMRkMyRbBnETkVTSxilf24qySLzQn";
+ settings.listeners = [{
+ port = matrix_port;
+ bind_address = "::1";
+ type = "http";
+ tls = false;
+ x_forwarded = true;
+ resources = [{
+ names = [ "client" "federation" ];
+ compress = false;
+ }];
+ }];
+ };
+ # matrix needs a database
+ #
+ services.postgresql.enable = true;
+ # web proxy for the matrix server
+ #
+ services.nginx = {
+ enable = true;
+ recommendedTlsSettings = true;
+ recommendedOptimisation = true;
+ recommendedGzipSettings = true;
+ recommendedProxySettings = true;
+ virtualHosts = {
+ # route to matrix-synapse
+ "${config.networking.domain}" = {
+ locations."= /.well-known/matrix/server".extraConfig =
+ let server = { "m.server" = "${fqdn}:443"; };
+ in ''
+ add_header Content-Type application/json;
+ return 200 '${builtins.toJSON server}';
+ '';
+ locations."= /.well-known/matrix/client".extraConfig = let
+ client = {
+ "m.homeserver" = { "base_url" = "https://${fqdn}"; };
+ "m.identity_server" = { "base_url" = "https://vector.im"; };
+ };
+ in ''
+ add_header Content-Type application/json;
+ add_header Access-Control-Allow-Origin *;
+ return 200 '${builtins.toJSON client}';
+ '';
+ };
+ # reverse proxy for matrix client-server and server-server communication
+ "${fqdn}" = {
+ forceSSL = true;
+ useACMEHost = config.networking.domain;
+ locations."/".extraConfig = ''
+ return 404;
+ '';
+ locations."/_matrix" = {
+ proxyPass = "http://[::1]:${toString matrix_port}";
+ };
+ };
+ };
+ };
+ # matrix client, available at chat.simatime.com
+ #
+ # note that element and matrix-synapse must be on separate fqdn's to
+ # protect from XSS attacks:
+ # https://github.com/vector-im/element-web#important-security-note
+ #
+ services.nginx.virtualHosts."${element}" = {
+ useACMEHost = config.networking.domain;
+ forceSSL = true;
+ root = pkgs.element-web;
+ };
+}
diff --git a/Omni/Cloud/Comms.nix b/Omni/Cloud/Comms.nix
new file mode 100644
index 0000000..bf7a763
--- /dev/null
+++ b/Omni/Cloud/Comms.nix
@@ -0,0 +1,5 @@
+{ ... }:
+
+{
+ imports = [ ./Comms/Xmpp.nix ./Comms/Mumble.nix ];
+}
diff --git a/Omni/Cloud/Comms/Coturn.nix b/Omni/Cloud/Comms/Coturn.nix
new file mode 100644
index 0000000..93093f0
--- /dev/null
+++ b/Omni/Cloud/Comms/Coturn.nix
@@ -0,0 +1,10 @@
+{ config, ... }:
+
+{
+ services.coturn = {
+ enable = true;
+ cert = "/var/lib/acme/${config.networking.domain}/fullchain.pem";
+ pkey = "/var/lib/acme/${config.networking.domain}/key.pem";
+ cli-ip = "127.0.0.1";
+ };
+}
diff --git a/Omni/Cloud/Comms/Jitsi.nix b/Omni/Cloud/Comms/Jitsi.nix
new file mode 100644
index 0000000..17aeced
--- /dev/null
+++ b/Omni/Cloud/Comms/Jitsi.nix
@@ -0,0 +1,17 @@
+{ config, ... }:
+
+{
+ services.jitsi-meet = {
+ enable = true;
+ config = {
+ enableWelcomePage = false;
+ defaulLang = "en";
+ };
+
+ prosody.enable = true;
+ nginx.enable = true;
+ jibri.enable = false;
+ jicofo.enable = false;
+ videobridge.enable = false;
+ };
+}
diff --git a/Omni/Cloud/Comms/Mumble.nix b/Omni/Cloud/Comms/Mumble.nix
new file mode 100644
index 0000000..66d21a5
--- /dev/null
+++ b/Omni/Cloud/Comms/Mumble.nix
@@ -0,0 +1,25 @@
+{ config, ... }:
+
+# mumble and related services
+let ports = import ../Ports.nix;
+in {
+ services.murmur = {
+ enable = true;
+ openFirewall = true;
+ environmentFile = "/var/lib/murmur/murmurd.env";
+ registerHostname = config.networking.domain;
+ registerName = config.networking.domain;
+ };
+
+ services.botamusique = {
+ enable = true;
+ settings = {
+ webinterface = {
+ enabled = true;
+ listening_addr = "127.0.0.1";
+ listening_port = ports.botamusique;
+ };
+ radio = { lofi = "https://live.hunter.fm/lofi_high"; };
+ };
+ };
+}
diff --git a/Omni/Cloud/Comms/Xmpp.nix b/Omni/Cloud/Comms/Xmpp.nix
new file mode 100644
index 0000000..ad8649b
--- /dev/null
+++ b/Omni/Cloud/Comms/Xmpp.nix
@@ -0,0 +1,210 @@
+{ config, pkgs, ... }:
+#
+# xmpp chat service
+#
+let
+ rootDomain = config.networking.domain; # simatime.com
+ ssl = {
+ cert = "/var/lib/acme/${rootDomain}/fullchain.pem";
+ key = "/var/lib/acme/${rootDomain}/key.pem";
+ };
+in {
+ networking.firewall.allowedTCPPorts = [
+ # https://prosody.im/doc/ports
+ 5000 # file transfer
+ 5222 # client connections
+ 5269 # server-to-server
+ 5280 # http
+ 5281 # https
+ 5347 # external components
+ 5582 # telnet console
+ ];
+
+ services.prosody = {
+ enable = true;
+ package =
+ pkgs.prosody.override { withCommunityModules = [ "conversejs" ]; };
+
+ # when i learn how to use security.acme better, and use separate certs, then i
+ # can fix this group
+ group = "nginx";
+ admins = [ "bsima@${rootDomain}" ];
+ allowRegistration = true;
+ inherit ssl;
+ uploadHttp = {
+ domain = "upload.${rootDomain}";
+ uploadExpireAfter = toString (60 * 60 * 24 * 30); # 30 days, as seconds
+ };
+
+ modules = {
+ announce = true;
+ blocklist = true;
+ bookmarks = true;
+ bosh = true;
+ carbons = true;
+ cloud_notify = true;
+ csi = true;
+ dialback = true;
+ disco = true;
+ groups = true;
+ http_files = false; # hm, look into this
+ motd = true;
+ pep = true;
+ ping = true;
+ private = true;
+ proxy65 = true;
+ register = true;
+ roster = true;
+ server_contact_info = true;
+ smacks = true;
+ vcard = true;
+ watchregistrations = true;
+ websocket = true;
+ welcome = true;
+ };
+
+ extraConfig = ''
+ conversejs_options = {
+ allow_registration = true;
+ bosh_service_url = "https://${rootDomain}/http-bind";
+ debug = true;
+ loglevel = "debug";
+ -- default_domain = "${rootDomain}";
+ -- domain_placeholder = "${rootDomain}";
+ -- jid = "${rootDomain}";
+ -- keepalive = true;
+ -- registration_domain = "${rootDomain}";
+ websocket_url = "wss://${rootDomain}/xmpp-websocket";
+ }
+
+ cross_domain_websocket = { "https://${rootDomain}", "https://anon.${rootDomain}" }
+ cross_domain_bosh = false; -- handle this with nginx
+ consider_bosh_secure = true;
+
+ -- this is a virtualhost that allows anonymous authentication. use this
+ -- for a public lobby. the nix module doesn't support 'authentication'
+ -- so i have to do this here.
+ VirtualHost "anon.${rootDomain}"
+ authentication = "anonymous"
+ ssl = {
+ cafile = "/etc/ssl/certs/ca-bundle.crt";
+ key = "${ssl.key}";
+ certificate = "${ssl.cert}";
+ };
+ '';
+
+ muc = [
+ {
+ domain = "conference.${rootDomain}";
+ maxHistoryMessages = 10000;
+ name = "Chat Rooms";
+ restrictRoomCreation = "admin";
+ roomDefaultHistoryLength = 20;
+ roomDefaultMembersOnly = true;
+ roomDefaultModerated = true;
+ roomDefaultPublic = false;
+ }
+ {
+ domain = "chat.${rootDomain}";
+ maxHistoryMessages = 10000;
+ name = "Chat Rooms";
+ restrictRoomCreation = false;
+ roomDefaultHistoryLength = 200;
+ roomDefaultMembersOnly = false;
+ roomDefaultModerated = false;
+ roomDefaultPublic = true;
+ roomDefaultPublicJids = true;
+ }
+ ];
+
+ virtualHosts = {
+ "${rootDomain}" = {
+ domain = "${rootDomain}";
+ enabled = true;
+ inherit ssl;
+ };
+ };
+ };
+
+ services.prosody-filer = { enable = true; };
+
+ services.nginx.virtualHosts."${rootDomain}".locations = {
+ "/http-bind" = {
+ proxyPass = "https://${rootDomain}:5281/http-bind";
+ extraConfig = ''
+ proxy_set_header Host $host;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_buffering off;
+ add_header Access-Control-Allow-Origin "*";
+ '';
+ };
+
+ "/xmpp-websocket" = {
+ proxyPass = "https://${rootDomain}:5281/xmpp-websocket";
+ extraConfig = ''
+ proxy_http_version 1.1;
+ proxy_buffering off;
+ proxy_set_header Host $host;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+ proxy_read_timeout 86400;
+ add_header Access-Control-Allow-Origin "*";
+ '';
+ };
+
+ "/chat" = {
+ proxyPass = "https://${rootDomain}:5281/conversejs";
+ extraConfig = ''
+ add_header Access-Control-Allow-Origin "*";
+ '';
+ };
+ };
+
+ services.nginx.virtualHosts."anon.${rootDomain}" = {
+ useACMEHost = "${rootDomain}";
+ forceSSL = true;
+ locations = {
+ "/http-bind" = {
+ proxyPass = "https://anon.${rootDomain}:5281/http-bind";
+ extraConfig = ''
+ proxy_set_header Host $host;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_buffering off;
+ if ($request_method ~* "(GET|POST)") {
+ add_header Access-Control-Allow-Origin "*";
+ }
+ if ($request_method = OPTIONS) {
+ add_header Access-Control-Allow-Origin "*";
+ add_header Access-Control-Allow-Methods "GET, POST, OPTIONS, HEAD";
+ add_header Access-Control-Allow-Headers "Authorization, Origin, X-Requested-With, Content-Type, Accept";
+ return 200;
+ }
+ '';
+ };
+ };
+ };
+
+ users.users.nginx.extraGroups = [ "prosody" ];
+
+ security.acme.certs.${rootDomain}.extraDomainNames = [
+ # these stopped working idk why
+ #"upload.${rootDomain}"
+ #"conference.${rootDomain}"
+ "anon.${rootDomain}"
+ "chat.${rootDomain}"
+ ];
+
+ #security.acme.certs.prosody = {
+ # domain = "${domain}";
+ # group = "prosody";
+ # dnsProvider = "rfc2136";
+ # #credentialsFile = config.secrets.files.dns_creds.path;
+ # postRun = "systemctl restart prosody";
+ # extraDomainNames = [
+ # domain
+ # "upload.${domain}"
+ # ];
+ #};
+}
diff --git a/Omni/Cloud/Git.nix b/Omni/Cloud/Git.nix
new file mode 100644
index 0000000..bc97d23
--- /dev/null
+++ b/Omni/Cloud/Git.nix
@@ -0,0 +1,119 @@
+{ lib, config, pkgs, ... }:
+
+let
+ inherit (config.networking) domain;
+ root = "/var/git";
+ ports = import ./Ports.nix;
+in {
+ services = {
+ cgit.cloud = {
+ enable = true;
+ nginx.location = "/git";
+ nginx.virtualHost = "/git";
+ scanPath = "/var/git/repositories";
+ settings = {
+ strict-export = "git-daemon-export-ok";
+ css = "/git/cgit.css";
+ logo = "/git/cgit.png";
+ root-title = "ben's git repos";
+ root-desc = "building";
+ enable-git-config = 1;
+ clone-url = lib.strings.concatStringsSep " " [
+ "https://$HTTP_HOST/git/$CGIT_REPO_URL"
+ "git://$HTTP_HOST/$CGIT_REPO_URL"
+ "git@$HTTP_HOST:$CGIT_REPO_URL"
+ ];
+ };
+ };
+ gitolite = {
+ enable = true;
+ enableGitAnnex = true;
+ dataDir = root;
+ user = "git";
+ group = "git";
+ # the umask is necessary to give the git group read permissions, otherwise
+ # git-daemon et al can't access the repos
+ extraGitoliteRc = ''
+ $RC{SITE_INFO} = 'a computer is a bicycle for the mind.';
+ $RC{UMASK} = 0027;
+ $RC{GIT_CONFIG_KEYS} = '.*';
+ '';
+ adminPubkey = lib.trivial.pipe ../Keys/Ben.pub [
+ builtins.readFile
+ (lib.strings.splitString "\n")
+ lib.lists.head
+ ];
+ # commonHooks = [ ./git-hooks ];
+ };
+ gitDaemon = {
+ enable = true;
+ basePath = "${root}/repositories";
+ listenAddress = domain;
+ user = "gitDaemon";
+ group = "gitDaemon";
+ };
+ gerrit = {
+ enable = false;
+ builtinPlugins = [
+ "commit-message-length-validator"
+ "delete-project"
+ "plugin-manager"
+ "singleusergroup"
+ "reviewnotes"
+ ];
+ jvmOpts = [
+ # https://stackoverflow.com/a/71817404
+ "--add-opens"
+ "java.base/java.lang=ALL-UNNAMED"
+ "--add-opens"
+ "java.base/java.util=ALL-UNNAMED"
+ ];
+ plugins = [
+ (pkgs.fetchurl {
+ url =
+ "https://github.com/davido/gerrit-oauth-provider/releases/download/v3.5.1/gerrit-oauth-provider.jar";
+ sha256 = "sha256-MS3ElMRUrBX4miiflepMETRK3SaASqpqO3nUn9kq3Gk=";
+ })
+ ];
+ listenAddress = "[::]:${toString ports.gerrit}";
+ serverId = "cc6cca15-2a7e-4946-89b9-67f5d6d996ae";
+ settings = {
+ auth.type = "OAUTH";
+ auth.gitBasicAuthPolicy = "HTTP";
+ download.command = [ "checkout" "cherry_pick" "pull" "format_patch" ];
+ gerrit.canonicalWebUrl = "https://gerrit.${domain}";
+ httpd.listenUrl =
+ "proxy-https://${config.services.gerrit.listenAddress}";
+ plugin.gerrit-oauth-provider-github-oauth = {
+ root-url = "https://github.com";
+ client-id = "e48084aa0eebe31a2b18";
+ };
+ sshd.advertisedAddress =
+ "gerrit.${domain}:${toString ports.gerrit-ssh}";
+ sshd.listenAddress = "[::]:${toString ports.gerrit-ssh}";
+ };
+ };
+ nginx.virtualHosts."gerrit.${domain}" = {
+ forceSSL = true;
+ useACMEHost = domain;
+ locations."/" = {
+ proxyPass = "http://localhost:${toString ports.gerrit}";
+ extraConfig = ''
+ proxy_set_header X-Forwarded-For $remote_addr;
+ '';
+ };
+ };
+ };
+ # need to specify that these users can access git files by being part of the
+ # git group
+ users.users = {
+ gitDaemon = {
+ group = "gitDaemon";
+ isSystemUser = true;
+ description = "Git daemon user";
+ extraGroups = [ "git" ];
+ };
+ "nginx".extraGroups = [ "git" ];
+ };
+ users.groups = { gitDaemon = { }; };
+}
diff --git a/Omni/Cloud/Gmnisrv.nix b/Omni/Cloud/Gmnisrv.nix
new file mode 100644
index 0000000..e2a66f6
--- /dev/null
+++ b/Omni/Cloud/Gmnisrv.nix
@@ -0,0 +1,40 @@
+{ lib, config, pkgs, ... }:
+
+let cfg = config.services.gmnisrv;
+in {
+ meta.maintainers = [ lib.maintainers.bsima ];
+ options.services.gmnisrv = {
+ enable = lib.mkEnableOption "Enable the gmnisrv service";
+ listen = lib.mkOption {
+ description = "Addresses and ports on which to listen.";
+ default = lib.mkDefault "0.0.0.0:1965 [::]:1965";
+ };
+ settings = lib.mkOption {
+ # type = cfgFormat.type;
+ description = ''
+ Configuration for gmnisrv. See gmnisrv.ini(5) for supported settings.
+ '';
+ default = {
+ ":tls" = { "store" = lib.mkDefault "${cfg.dataDir}/certs"; };
+ };
+ };
+ dataDir = lib.mkOption {
+ type = lib.types.str;
+ default = "/var/lib/gemini";
+ description = "Where gmnisrv should store certs and other data.";
+ };
+ };
+ config = lib.mkIf cfg.enable {
+ systemd.services.gmnisrv = {
+ description = "gmnisrv service";
+ wantedBy = [ "multi-user.target" ];
+ after = [ "network-online.target" ];
+ script = let
+ ini = lib.generators.toINIWithGlobalSection { } {
+ globalSection = { "listen" = cfg.listen; };
+ sections = cfg.settings;
+ };
+ in "${pkgs.gmnisrv}/bin/gmnisrv -C ${ini}";
+ };
+ };
+}
diff --git a/Omni/Cloud/Grocy.nix b/Omni/Cloud/Grocy.nix
new file mode 100644
index 0000000..697c2f1
--- /dev/null
+++ b/Omni/Cloud/Grocy.nix
@@ -0,0 +1,17 @@
+{ ... }:
+
+{
+ services.grocy = {
+ enable = true;
+ hostName = "grocy.simatime.com";
+ nginx.enableSSL = false; # set in Web.nix
+ settings = {
+ calendar = {
+ firstDayOfWeek = 1;
+ showWeekNumber = true;
+ };
+ currency = "USD";
+ culture = "en";
+ };
+ };
+}
diff --git a/Omni/Cloud/Hardware.nix b/Omni/Cloud/Hardware.nix
new file mode 100644
index 0000000..8fdbd4e
--- /dev/null
+++ b/Omni/Cloud/Hardware.nix
@@ -0,0 +1,9 @@
+{ modulesPath, ... }: {
+ imports = [ (modulesPath + "/profiles/qemu-guest.nix") ];
+ boot.loader.grub.device = "/dev/vda";
+ boot.initrd.kernelModules = [ "nvme" ];
+ fileSystems."/" = {
+ device = "/dev/vda1";
+ fsType = "ext4";
+ };
+}
diff --git a/Omni/Cloud/Hub.nix b/Omni/Cloud/Hub.nix
new file mode 100644
index 0000000..39bbdd0
--- /dev/null
+++ b/Omni/Cloud/Hub.nix
@@ -0,0 +1,57 @@
+{ lib, pkgs, config, ... }:
+
+let
+ ssl = {
+ cert = "/var/lib/acme/${config.networking.domain}/fullchain.pem";
+ key = "/var/lib/acme/${config.networking.domain}/key.pem";
+ };
+ ports = import ./Ports.nix;
+ mkPlugin = path: settings: {
+ plugin = "${pkgs.uhub}/plugins/${path}";
+ inherit settings;
+ };
+ motdFile = pkgs.writeText "motd" ''
+ Meshheads write code.
+ '';
+ rulesFile = pkgs.writeText "rules" ''
+ 1. x
+ 2. (λx.M)
+ 3. (M N)
+ 4. Profit.
+ '';
+in {
+ networking.firewall.allowedUDPPorts = [ ports.meshhub ];
+ networking.firewall.allowedTCPPorts = [ ports.meshhub ];
+
+ services.uhub = {
+ "meshhub" = {
+ enable = true;
+ enableTLS = false;
+ settings = {
+ hub_name = "meshhub";
+ hub_description = "vaporware is dead. long live vaporware";
+ server_port = toString ports.meshhub;
+ server_bind_addr = "any";
+ max_users = 14; # we only have 14 users in meshheads chat
+ tls_private_key = ssl.key;
+ tls_certificate = ssl.cert;
+ tls_enable = "no";
+ tls_require = "no";
+ };
+ plugins = lib.attrsets.mapAttrsToList mkPlugin {
+ #"mod_logging.so" = {
+ # file = "/var/log/uhub/meshhub.log";
+ #};
+ "mod_welcome.so" = {
+ motd = "${motdFile}";
+ rules = "${rulesFile}";
+ };
+ "mod_chat_history.so" = {
+ history_max = "200";
+ history_default = "10";
+ history_connect = "5";
+ };
+ };
+ };
+ };
+}
diff --git a/Omni/Cloud/Mail.nix b/Omni/Cloud/Mail.nix
new file mode 100644
index 0000000..fa99cf3
--- /dev/null
+++ b/Omni/Cloud/Mail.nix
@@ -0,0 +1,54 @@
+{ ... }:
+
+/* Known issues:
+
+ - when the acme cert gets refreshed, you need to manually restart dovecot
+ - when restarting dovecot, it might hang, in that case do:
+ systemctl --job-mode=ignore-dependencies restart dovecot2 postfix
+*/
+
+{
+ mailserver = {
+ enable = true;
+ monitoring = {
+ enable = false;
+ alertAddress = "bsima@me.com";
+ };
+ fqdn = "simatime.com";
+ domains = [ "simatime.com" "bsima.me" ];
+ certificateScheme = 3; # let's encrypt
+ enableImap = true;
+ enablePop3 = true;
+ enableImapSsl = true;
+ enablePop3Ssl = true;
+ enableManageSieve = true;
+ virusScanning = false; # ur on ur own
+ localDnsResolver = true;
+
+ loginAccounts = {
+ "ben@simatime.com" = {
+ hashedPasswordFile = "/home/ben/hashed-mail-password";
+ aliases = [
+ # my default email
+ "ben@bsima.me"
+ # admin stuff
+ "postmaster@simatime.com"
+ "abuse@simatime.com"
+ ];
+ catchAll = [ "simatime.com" "bsima.me" ];
+ quota = "10G";
+ };
+ "dev@simatime.com" = {
+ hashedPasswordFile = "/home/ben/hashed-mail-password";
+ aliases = [ "dev@bsima.me" ];
+ quota = "10G";
+ };
+ "nick@simatime.com" = {
+ hashedPassword =
+ "$6$31P/Mg8k8Pezy1e$Fn1tDyssf.1EgxmLYFsQpSq6RP4wbEvP/UlBlXQhyKA9FnmFtJteXsbJM1naa8Kyylo8vZM9zmeoSthHS1slA1";
+ aliases = [ "nicolai@simatime.com" ];
+ quota = "1G";
+ };
+ };
+ };
+}
diff --git a/Omni/Cloud/Networking.nix b/Omni/Cloud/Networking.nix
new file mode 100644
index 0000000..1c1f832
--- /dev/null
+++ b/Omni/Cloud/Networking.nix
@@ -0,0 +1,48 @@
+{ lib, ... }: {
+ # This file was populated at runtime with the networking
+ # details gathered from the active system.
+ networking = {
+ nameservers = [ "8.8.8.8" ];
+ defaultGateway = "143.198.112.1";
+ defaultGateway6 = "2604:a880:400:d0::1";
+ dhcpcd.enable = false;
+ usePredictableInterfaceNames = lib.mkForce false;
+ interfaces = {
+ eth0 = {
+ ipv4.addresses = [
+ {
+ address = "143.198.118.179";
+ prefixLength = 20;
+ }
+ {
+ address = "10.10.0.7";
+ prefixLength = 16;
+ }
+ ];
+ ipv6.addresses = [
+ {
+ address = "2604:a880:400:d0::19f1:7001";
+ prefixLength = 64;
+ }
+ {
+ address = "fe80::a06e:26ff:fee1:941";
+ prefixLength = 64;
+ }
+ ];
+ ipv4.routes = [{
+ address = "143.198.112.1";
+ prefixLength = 32;
+ }];
+ ipv6.routes = [{
+ address = "2604:a880:400:d0::1";
+ prefixLength = 128;
+ }];
+ };
+
+ };
+ };
+ services.udev.extraRules = ''
+ ATTR{address}=="a2:6e:26:e1:09:41", NAME="eth0"
+ ATTR{address}=="f2:4e:52:1a:72:ef", NAME="eth1"
+ '';
+}
diff --git a/Omni/Cloud/NostrRelay.nix b/Omni/Cloud/NostrRelay.nix
new file mode 100644
index 0000000..0be8a6f
--- /dev/null
+++ b/Omni/Cloud/NostrRelay.nix
@@ -0,0 +1,39 @@
+{ config, pkgs, ... }:
+
+let
+ ports = import ./Ports.nix;
+ dataDir = "/var/lib/nostr-rs-relay";
+ # https://git.sr.ht/~gheartsfield/nostr-rs-relay/tree/master/config.toml
+ cfg = pkgs.writeText "config.toml" ''
+ [info]
+ name = "simatime"
+ relay_url = "wss://nostr.simatime.com"
+ description = "yet another nostr relay"
+
+ [database]
+ data_directory = "/var/lib/nostr-rs-relay"
+
+ [network]
+ address = "0.0.0.0"
+ port = ${toString ports.nostr-relay}
+ '';
+in {
+ config.systemd.services.nostr-relay = {
+ path = [ pkgs.nostr-rs-relay ];
+ wantedBy = [ "multi-user.target" ];
+ preStart = ''
+ mkdir -p ${dataDir}
+ cat "${cfg}" > ${dataDir}/config.toml
+ '';
+ script = "nostr-rs-relay --db ${dataDir}";
+ serviceConfig = {
+ Environment =
+ [ "RUST_LOG=info,nostr_rs_relay=info" "APP_DATA=${dataDir}" ];
+ WorkingDirectory = dataDir;
+ KillSignal = "INT";
+ Type = "simple";
+ Restart = "always";
+ RestartSec = "1";
+ };
+ };
+}
diff --git a/Omni/Cloud/Ports.nix b/Omni/Cloud/Ports.nix
new file mode 100644
index 0000000..5b8446c
--- /dev/null
+++ b/Omni/Cloud/Ports.nix
@@ -0,0 +1,46 @@
+{
+ bensIp = "24.140.205.252"; # not a port, but it's convenient
+ barrier = 24800;
+ bitcoind = 8333;
+ bitcoind-rpc = 8332;
+ botamusique = 8181;
+ dandel-rovbur = 8080;
+ delugeWeb = 8112;
+ dns = 53;
+ dragons = 8095;
+ et = 2022;
+ gemini = 1965;
+ gerrit = 8081;
+ gerrit-ssh = 2222;
+ git = 9418;
+ headscale = 8844;
+ hoogle = 8008;
+ http = 80;
+ httpdev = {
+ from = 8000;
+ to = 8099;
+ };
+ https = 443;
+ invidious = 8086;
+ jellyfin = 8096;
+ jupyter = 4000;
+ k3s = 6443;
+ libreddit = 8085;
+ meshhub = 1511;
+ mpd = 6600;
+ mpd-stream = 8097;
+ murmur = 64738;
+ nostr-relay = 8084;
+ radicale = 5232;
+ sabten = 8081;
+ ssh = 22;
+ stableDiffusion = 8501;
+ syncthing-gui = 8384;
+ tor = 144;
+ torrents = {
+ from = 3000;
+ to = 3099;
+ };
+ wireguard = 51820;
+ znc = 5000;
+}
diff --git a/Omni/Cloud/Web.nix b/Omni/Cloud/Web.nix
new file mode 100644
index 0000000..0e080f5
--- /dev/null
+++ b/Omni/Cloud/Web.nix
@@ -0,0 +1,303 @@
+{ config, ... }:
+
+let
+ rootDomain = config.networking.domain;
+ ports = import ./Ports.nix;
+in {
+ imports = [ ./Gmnisrv.nix ];
+ networking.firewall = {
+ allowedTCPPorts = [
+ ports.ssh
+ ports.git
+ ports.http
+ ports.https
+ ports.sabten
+ ports.gemini
+ ports.radicale
+ ports.znc
+ ports.gerrit-ssh
+ ];
+ };
+
+ services = {
+
+ libreddit = {
+ enable = true;
+ address = "127.0.0.1";
+ openFirewall = true;
+ port = ports.libreddit;
+ };
+
+ invidious = {
+ enable = true;
+ database.createLocally = true;
+ domain = "youtube.${rootDomain}";
+ nginx.enable = false; # do this myself, below
+ port = ports.invidious;
+ };
+
+ radicale = {
+ enable = true;
+ rights = {
+ # Allow reading root collection for authenticated users
+ root = {
+ user = ".+";
+ collection = "";
+ permissions = "R";
+ };
+ # Allow reading and writing principal collection (same as username)
+ principal = {
+ user = ".+";
+ collection = "{user}";
+ permissions = "RW";
+ };
+ # Allow reading and writing calendars and address books that are direct
+ # children of the principal collection
+ calendars = {
+ user = ".+";
+ collection = "{user}/[^/]+";
+ permissions = "rw";
+ };
+ # Allow any authenticated user to modify the public collection
+ public = {
+ user = ".*";
+ collection = "public/.*";
+ permissions = "rw";
+ };
+ };
+ settings = {
+ server = {
+ hosts = [
+ "0.0.0.0:${toString ports.radicale}"
+ "[::]:${toString ports.radicale}"
+ ];
+ };
+ auth = {
+ type = "htpasswd";
+ htpasswd_filename = "/etc/radicale/users";
+ htpasswd_encryption = "plain";
+ };
+ };
+ };
+
+ gmnisrv = {
+ enable = false;
+ listen = "0.0.0.0:${toString ports.gemini} [::]:${toString ports.gemini}";
+ settings = {
+ ":tls" = { store = "/var/lib/gmnisrv"; };
+ "bsima.me" = { "root" = "/var/web/ben"; };
+ "${rootDomain}" = {
+ "root" = "/var/web/simatime.com";
+ "cgi" = "on";
+ };
+ };
+ };
+
+ nginx = {
+ enable = true;
+ recommendedGzipSettings = true;
+ recommendedOptimisation = true;
+ recommendedProxySettings = true;
+ recommendedTlsSettings = true;
+ statusPage = true;
+
+ user = "nginx";
+ group = "nginx";
+
+ virtualHosts = {
+ ${rootDomain} = {
+ forceSSL = true;
+ enableACME = true;
+ locations = {
+ # the nginx/cgit module puts a '/' at the end of 'location', so we need to
+ # redirect '/git' to '/git/'
+ "/git".return = "301 https://$host/git/";
+ # nostr nip-5 verification
+ "/.well-known/nostr.json".return = "200 '${
+ builtins.toJSON {
+ names.bensima =
+ "2fa4b9ba71b6dab17c4723745bb7850dfdafcb6ae1a8642f76f9c64fa5f43436";
+ }
+ }'";
+ # disabled for nixpert test
+ "/" = {
+ root = "/var/web/simatime.com";
+ extraConfig = ''
+ autoindex on;
+ '';
+ };
+ # serve /~$USER paths
+ "~ ^/~(.+?)(/.*)?$" = {
+ alias = "/var/web/$1$2";
+ index = "index.html index.htm";
+ extraConfig = ''
+ autoindex on;
+ '';
+ };
+ };
+ };
+
+ "bsima.me" = {
+ locations."/" = {
+ root = "/var/web/ben";
+ index = "index.html index.htm";
+ extraConfig = ''
+ autoindex on;
+ '';
+ };
+ serverAliases = [ "www.bsima.me" ];
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ };
+
+ "hoogle.${rootDomain}" = {
+ locations."/".proxyPass =
+ "http://${ports.bensIp}:${toString ports.hoogle}";
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ };
+
+ "tv.${rootDomain}" = {
+ locations."/".proxyPass =
+ "http://${ports.bensIp}:${toString ports.jellyfin}";
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ };
+
+ "cal.${rootDomain}" = {
+ locations."/".proxyPass =
+ "http://localhost:${toString ports.radicale}";
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ extraConfig = ''
+ proxy_set_header X-Script-Name /radicale;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header Host $host;
+ proxy_pass_header Authorization;
+ '';
+ };
+
+ "reddit.${rootDomain}" = {
+ locations."/".proxyPass =
+ "http://localhost:${toString ports.libreddit}";
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ };
+ "www.reddit.${rootDomain}" = {
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ globalRedirect = "reddit.${rootDomain}";
+ };
+ "old.reddit.${rootDomain}" = {
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ globalRedirect = "reddit.${rootDomain}";
+ };
+
+ "youtube.${rootDomain}" = {
+ locations."/".proxyPass =
+ "http://localhost:${toString ports.invidious}";
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ };
+ "www.youtube.${rootDomain}" = {
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ globalRedirect = "youtube.${rootDomain}";
+ };
+ "m.youtube.${rootDomain}" = {
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ globalRedirect = "youtube.${rootDomain}";
+ };
+
+ "dandel-rovbur.${rootDomain}" = {
+ locations."/".proxyPass =
+ "http://${ports.bensIp}:${toString ports.dandel-rovbur}";
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ };
+
+ "sabten.${rootDomain}" = {
+ locations."/".proxyPass = "http://localhost:${toString ports.sabten}";
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ };
+
+ "sd.${rootDomain}" = {
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ locations."/" = {
+ proxyPass =
+ "http://${ports.bensIp}:${toString ports.stableDiffusion}";
+ proxyWebsockets = true;
+ };
+ };
+
+ "music.${rootDomain}" = {
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ locations."/".proxyPass =
+ "http://localhost:${toString ports.botamusique}";
+ };
+
+ "nostr.${rootDomain}" = {
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ locations."/" = {
+ proxyPass = "http://localhost:${toString ports.nostr-relay}";
+ extraConfig = ''
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "Upgrade";
+ proxy_set_header Host $host;
+ '';
+ };
+ };
+
+ "notebook.${rootDomain}" = {
+ forceSSL = true;
+ useACMEHost = rootDomain;
+ locations = {
+ "/" = {
+ proxyPass = "http://${ports.bensIp}:${toString ports.jupyter}";
+ proxyWebsockets = true;
+ extraConfig = ''
+ proxy_buffering off;
+ proxy_read_timeout 86400;
+ '';
+ };
+ "/(api/kernels/[^/]+/channels|terminals/websocket)/" = {
+ proxyPass = "http://${ports.bensIp}:${toString ports.jupyter}";
+ proxyWebsockets = true;
+ };
+ };
+ };
+
+ };
+ };
+ };
+
+ # This must contain all of the other domains we host
+ security.acme.certs.${rootDomain}.extraDomainNames =
+ [ "bsima.me" "www.bsima.me" ] ++ map (sub: "${sub}.${rootDomain}") [
+ "music"
+ "tv"
+ "matrix"
+ "chat"
+ "hoogle"
+ "dandel-rovbur"
+ "sabten"
+ "cal"
+ "notebook"
+ "nostr"
+ "reddit"
+ "old.reddit"
+ "www.reddit"
+ "youtube"
+ "www.youtube"
+ "m.youtube"
+ "sd"
+ "gerrit"
+ ];
+}
diff --git a/Omni/Cloud/Znc.nix b/Omni/Cloud/Znc.nix
new file mode 100644
index 0000000..e68ebc7
--- /dev/null
+++ b/Omni/Cloud/Znc.nix
@@ -0,0 +1,76 @@
+/* N.B.: generate znc passwords with 'nix-shell -p znc --command "znc --makepass"'
+
+ - https://wiki.znc.in/Configuration
+*/
+
+{ pkgs, ... }:
+
+{
+ services = {
+ znc = {
+ enable = true;
+ openFirewall = true;
+ modulePackages = with pkgs.zncModules;
+ [
+ #backlog clientaway clientbuffer
+ #ignore
+ ];
+ useLegacyConfig = false;
+ config = {
+ LoadModule = [ "adminlog" ];
+ Motd = "welcome to znc.simatime.com";
+ User.bsima = {
+ Admin = true;
+ Nick = "bsima";
+ AltNick = "bsima1";
+ LoadModule = [ "chansaver" "controlpanel" "log" ];
+ Network = {
+ efnet = {
+ Server = "irc.efnet.info +6697";
+ LoadModule = [ "simple_away" ];
+ };
+ libera = {
+ Server = "irc.libera.chat +6697";
+ LoadModule = [ "simple_away" "nickserv" "sasl" ];
+ Nick = "bsima";
+ Chan = {
+ "#emacs" = { Detached = true; };
+ "#guile" = { };
+ "#guix" = { Detached = true; };
+ "#haskell" = { };
+ "#hledger" = { };
+ "#nixos" = { };
+ "#notmuch" = { Detached = true; };
+ "#org-mode" = { Detached = true; };
+ "#scheme" = { Detached = true; };
+ "#sr.ht" = { Detached = true; };
+ "#xmonad" = { Detached = true; };
+ };
+ };
+ oftc = {
+ Server = "irc.oftc.net +6697";
+ LoadModule = [ "simple_away" "nickserv" ];
+ Nick = "bsima";
+ Chan = { "#home-manager" = { }; };
+ };
+ zeronode = {
+ Server = "irc.zeronode.net +6697";
+ LoadModule = [ "simple_away" "nickserv" ];
+ Nick = "bsima";
+ Chan = { "#NoAgenda" = { }; };
+ };
+ #sorcery = {
+ # Server = "irc.sorcery.net +6697";
+ #};
+ };
+ Pass.password = {
+ Method = "sha256";
+ Hash =
+ "bead16d806e7bf5cbbc31d572b20f01e2b253eb60e2497ce465df56306becd02";
+ Salt = "/GhmBMc+E6b7qd8muFEe";
+ };
+ };
+ };
+ };
+ };
+}
diff --git a/Omni/Cloud/post-receive.sh b/Omni/Cloud/post-receive.sh
new file mode 100755
index 0000000..179fbd0
--- /dev/null
+++ b/Omni/Cloud/post-receive.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+#
+# creates an archive of a git repo on push
+#
+# unfortunately the nixos gitolite module does not copy the 'commonHooks'
+# properly, so we have to manually deploy this like so:
+#
+# scp Omni/Cloud/post-receive \
+# root@simatime.com:/srv/git/.gitolite/hooks/common/post-receive
+#
+# One time only:
+#
+# ssh root@simatime.com "sudo -u git gitolite setup -ho"
+#
+# Also on first-time setup, might need to manually check the permissions are
+# correct on $webroot/archive or wherever else.
+#
+set -euo pipefail
+while read -r _ newrev refname
+do
+ if [[ -e ./git-daemon-export-ok ]]
+ then
+ repo=$(basename "$PWD" | sed 's/.git//g')
+ branch=$(git rev-parse --symbolic --abbrev-ref "$refname")
+ webroot="/srv/www/simatime.com/"
+ outdir="$webroot/archive/$repo/$branch"
+ mkdir -p "$outdir"
+ echo " making: https://simatime.com/archive/$repo/$branch/$newrev.tar.gz"
+ git archive "$branch" --prefix "$repo-$branch/" --format tar \
+ | gzip > "$outdir/$newrev.tar.gz"
+ echo " making: https://simatime.com/archive/$repo/$branch/$newrev.sha256"
+ hash=$(nix-prefetch-url --unpack file://"$outdir"/"$newrev".tar.gz 2>/dev/null)
+ echo "$hash" > "$outdir/$newrev.sha256"
+ echo " commit: $newrev"
+ echo " sha256: $hash"
+ echo "in omni: deps update $repo --branch $branch --rev $newrev --attribute sha256=$hash"
+ chmod -R 755 "$webroot/archive"
+ fi
+done
diff --git a/Omni/Dev/Beryllium.nix b/Omni/Dev/Beryllium.nix
new file mode 100644
index 0000000..82374c3
--- /dev/null
+++ b/Omni/Dev/Beryllium.nix
@@ -0,0 +1,14 @@
+{ bild }:
+bild.os {
+ imports = [
+ ../OsBase.nix
+ ../Packages.nix
+ ../Users.nix
+ ./Beryllium/Configuration.nix
+ ./Beryllium/Hardware.nix
+ ./Beryllium/Ollama.nix
+ ./Vpn.nix
+ ];
+ networking.hostName = "beryllium";
+ networking.domain = "beryl.simatime.com";
+}
diff --git a/Omni/Dev/Beryllium/Configuration.nix b/Omni/Dev/Beryllium/Configuration.nix
new file mode 100644
index 0000000..16f4bca
--- /dev/null
+++ b/Omni/Dev/Beryllium/Configuration.nix
@@ -0,0 +1,115 @@
+{ pkgs, ... }:
+
+let ports = import ../../Cloud/Ports.nix;
+in {
+ imports = [ # Include the results of the hardware scan.
+ ./Hardware.nix
+ ];
+
+ # Bootloader.
+ boot.loader.systemd-boot.enable = true;
+ boot.loader.efi.canTouchEfiVariables = true;
+
+ boot.kernelModules = [ "v4l2loopback" ];
+ boot.extraModulePackages = [ pkgs.linuxPackages.v4l2loopback ];
+
+ # Enable networking
+ networking.networkmanager.enable = true;
+
+ # Set your time zone.
+ time.timeZone = "America/New_York";
+
+ # Select internationalisation properties.
+ i18n.defaultLocale = "en_US.UTF-8";
+
+ i18n.extraLocaleSettings = {
+ LC_ADDRESS = "en_US.UTF-8";
+ LC_IDENTIFICATION = "en_US.UTF-8";
+ LC_MEASUREMENT = "en_US.UTF-8";
+ LC_MONETARY = "en_US.UTF-8";
+ LC_NAME = "en_US.UTF-8";
+ LC_NUMERIC = "en_US.UTF-8";
+ LC_PAPER = "en_US.UTF-8";
+ LC_TELEPHONE = "en_US.UTF-8";
+ LC_TIME = "en_US.UTF-8";
+ };
+
+ # don't auto suspend-to-RAM
+ powerManagement.enable = true;
+
+ # Enable the X11 windowing system.
+ services.xserver.enable = true;
+
+ # Enable the KDE Plasma Desktop Environment.
+ services.xserver.displayManager.gdm.enable = true;
+ services.xserver.desktopManager.gnome.enable = true;
+
+ # Configure keymap in X11
+ services.xserver = {
+ layout = "us";
+ xkbVariant = "";
+ };
+
+ # Enable CUPS to print documents.
+ services.printing.enable = true;
+
+ # Enable sound with pipewire.
+ hardware.pulseaudio.enable = false;
+ security.rtkit.enable = true;
+ services.pipewire = {
+ enable = true;
+ alsa.enable = true;
+ alsa.support32Bit = true;
+ pulse.enable = true;
+ # If you want to use JACK applications, uncomment this
+ jack.enable = true;
+ wireplumber.enable = true;
+ };
+
+ hardware.opengl.enable = true;
+ hardware.opengl.driSupport32Bit = true;
+ services.xserver.videoDrivers = [ "nvidia" ];
+ hardware.nvidia.nvidiaPersistenced = true;
+ hardware.nvidia.modesetting.enable = true;
+ hardware.nvidia.powerManagement.enable = false;
+ hardware.nvidia.powerManagement.finegrained = false;
+ hardware.nvidia.open = true;
+ hardware.nvidia.nvidiaSettings = true;
+
+ hardware.keyboard.zsa.enable = true;
+
+ services.xserver.displayManager.autoLogin.enable = false;
+ services.xserver.displayManager.autoLogin.user = "ben";
+
+ services.clight.enable = true;
+ services.clight.temperature.day = 6500;
+ services.clight.temperature.night = 1800;
+ services.clight.settings.sunrise = "7:00";
+ services.clight.settings.sunset = "17:00";
+ location.latitude = 40.8;
+ location.longitude = -81.52;
+
+ services.eternal-terminal.enable = true;
+
+ environment.systemPackages = with pkgs; [
+ v4l-utils
+ linuxPackages.v4l2loopback
+ nvtop
+ keymapp
+ wally-cli
+ # vim # Do not forget to add an editor to edit configuration.nix! The Nano editor is also installed by default.
+ # wget
+ ];
+
+ systemd.services.NetworkManager-wait-online.enable = false;
+
+ networking.firewall.allowedTCPPorts = [ ports.barrier ];
+ # This value determines the NixOS release from which the default
+ # settings for stateful data, like file locations and database versions
+ # on your system were taken. It‘s perfectly fine and recommended to leave
+ # this value at the release version of the first install of this system.
+ # Before changing this value read the documentation for this option
+ # (e.g. man configuration.nix or on https://nixos.org/nixos/options.html).
+ system.stateVersion = "23.05"; # Did you read the comment?
+
+}
diff --git a/Omni/Dev/Beryllium/Hardware.nix b/Omni/Dev/Beryllium/Hardware.nix
new file mode 100644
index 0000000..ecf425c
--- /dev/null
+++ b/Omni/Dev/Beryllium/Hardware.nix
@@ -0,0 +1,38 @@
+# Do not modify this file! It was generated by ‘nixos-generate-config’
+# and may be overwritten by future invocations. Please make changes
+# to /etc/nixos/configuration.nix instead.
+{ config, lib, modulesPath, ... }:
+
+{
+ imports = [ (modulesPath + "/installer/scan/not-detected.nix") ];
+
+ boot.initrd.availableKernelModules =
+ [ "xhci_pci" "ahci" "nvme" "usbhid" "usb_storage" "sd_mod" ];
+ boot.initrd.kernelModules = [ ];
+ boot.kernelModules = [ "kvm-amd" ];
+ boot.extraModulePackages = [ ];
+
+ fileSystems."/" = {
+ device = "/dev/disk/by-uuid/f96eaa16-d0e2-4230-aece-131ce7b630da";
+ fsType = "ext4";
+ };
+
+ fileSystems."/boot" = {
+ device = "/dev/disk/by-uuid/A34A-6527";
+ fsType = "vfat";
+ };
+
+ swapDevices = [ ];
+
+ # Enables DHCP on each ethernet and wireless interface. In case of scripted networking
+ # (the default) this is the recommended approach. When using systemd-networkd it's
+ # still possible to use this option, but it's recommended to use it in conjunction
+ # with explicit per-interface declarations with `networking.interfaces.<interface>.useDHCP`.
+ networking.useDHCP = lib.mkDefault true;
+ # networking.interfaces.enp97s0.useDHCP = lib.mkDefault true;
+ # networking.interfaces.enp99s0.useDHCP = lib.mkDefault true;
+
+ nixpkgs.hostPlatform = lib.mkDefault "x86_64-linux";
+ hardware.cpu.amd.updateMicrocode =
+ lib.mkDefault config.hardware.enableRedistributableFirmware;
+}
diff --git a/Omni/Dev/Beryllium/Ollama.nix b/Omni/Dev/Beryllium/Ollama.nix
new file mode 100644
index 0000000..35b4fe1
--- /dev/null
+++ b/Omni/Dev/Beryllium/Ollama.nix
@@ -0,0 +1,48 @@
+{ pkgs, ... }:
+/* Ollama API service
+
+ Don't put too much work into this, there's a much better and more complete
+ ollama service (with webui!) being built here:
+ https://github.com/NixOS/nixpkgs/pull/275448
+
+ If you want to spend time on it, spend time over there.
+*/
+let pkg = pkgs.unstable.ollama;
+in {
+
+ systemd.services.ollama = {
+ description = "ollama";
+ after = [ "network.target" ];
+ wantedBy = [ "multi-user.target" ];
+
+ environment = {
+ OLLAMA_HOST = "localhost:11434";
+ # Where to store LLM model files.
+ HOME = "%S/ollama";
+ OLLAMA_MODELS = "%S/ollama/models";
+ OLLAMA_DEBUG = "1";
+ };
+
+ serviceConfig = {
+ ExecStart = "${pkg}/bin/ollama serve";
+ User = "ollama";
+ Group = "ollama";
+ Type = "simple";
+ Restart = "on-failure";
+ RestartSec = 3;
+ # Persistent storage for model files, i.e. /var/lib/<StateDirectory>
+ StateDirectory = [ "ollama" ];
+ };
+ };
+
+ # for administration, make this available to users' PATH
+ environment.systemPackages = [ pkg ];
+
+ users.groups.ollama = { };
+
+ users.users.ollama = {
+ group = "ollama";
+ isSystemUser = true;
+ extraGroups = [ "render" "video" ];
+ };
+}
diff --git a/Omni/Dev/Dns.nix b/Omni/Dev/Dns.nix
new file mode 100644
index 0000000..baf79aa
--- /dev/null
+++ b/Omni/Dev/Dns.nix
@@ -0,0 +1,19 @@
+{ ... }:
+
+{
+ services.bind = {
+ enable = true;
+ forwarders = [ "8.8.8.8" "1.1.1.1" ];
+ cacheNetworks = [ "127.0.0.0/8" "192.168.0.0/24" ];
+ extraConfig = "";
+ extraOptions = ''
+ dnssec-validation auto;
+ '';
+ };
+
+ #networking.extraHosts = ''
+ # 192.168.0.1 router.home
+ # 192.168.0.196 lithium.home
+ #'';
+
+}
diff --git a/Omni/Dev/Guix.nix b/Omni/Dev/Guix.nix
new file mode 100644
index 0000000..0b261fb
--- /dev/null
+++ b/Omni/Dev/Guix.nix
@@ -0,0 +1,43 @@
+{ config, lib, pkgs, ... }:
+
+with lib;
+
+let
+
+ cfg = config.services.guix;
+
+in {
+
+ options.services.guix = {
+ enable = mkEnableOption "GNU Guix package manager";
+ };
+
+ config = mkIf cfg.enable {
+ systemd.services.guix-daemon = {
+ description = "Build daemon for GNU Guix";
+
+ wantedBy = [ "multi-user.target" ];
+
+ serviceConfig = {
+ Restart = "always";
+ ExecStart =
+ "${pkgs.guix}/bin/guix-daemon --build-users-group=guixbuild";
+ Environment = null;
+ RemainAfterExit = "yes";
+ StandardOutput = "syslog";
+ StandardError = "syslog";
+ TaskMax = "8192";
+ };
+ };
+ users = {
+ extraUsers = lib.attrs.genAttrs (lib.lists.range 1 10) (n: {
+ name = "guixbuilder${n}";
+ isSystemUser = true;
+ extraGroups = [ "guixbuild" ];
+ group = "guixbuild";
+ description = "Guix build user ${n}";
+ });
+ extraGroups = { "guixbuild" = { }; };
+ };
+ };
+}
diff --git a/Omni/Dev/Hoogle.nix b/Omni/Dev/Hoogle.nix
new file mode 100644
index 0000000..213a31c
--- /dev/null
+++ b/Omni/Dev/Hoogle.nix
@@ -0,0 +1,81 @@
+{ config, lib, pkgs, ... }:
+
+with lib;
+
+let
+
+ cfg = config.services.my-hoogle;
+
+ hoogleEnv = pkgs.buildEnv {
+ name = "hoogle";
+ paths = [ (cfg.haskellPackages.ghcWithHoogle cfg.packages) ];
+ };
+
+in {
+
+ options.services.my-hoogle = {
+ enable = mkEnableOption "Haskell documentation server";
+
+ port = mkOption {
+ type = types.int;
+ default = 8080;
+ description = ''
+ Port number Hoogle will be listening to.
+ '';
+ };
+
+ packages = mkOption {
+ default = _hp: [ ];
+ defaultText = "hp: []";
+ example = "hp: with hp; [ text lens ]";
+ description = ''
+ The Haskell packages to generate documentation for.
+
+ The option value is a function that takes the package set specified in
+ the <varname>haskellPackages</varname> option as its sole parameter and
+ returns a list of packages.
+ '';
+ };
+
+ haskellPackages = mkOption {
+ description = "Which haskell package set to use.";
+ default = pkgs.haskellPackages;
+ defaultText = "pkgs.haskellPackages";
+ };
+
+ home = mkOption {
+ type = types.str;
+ description = "Url for hoogle logo";
+ default = "https://hoogle.haskell.org";
+ };
+
+ host = mkOption {
+ type = types.str;
+ description = "Set the host to bind on.";
+ default = "127.0.0.1";
+ };
+ };
+
+ config = mkIf cfg.enable {
+ systemd.services.hoogle = {
+ description = "Haskell documentation server";
+
+ wantedBy = [ "multi-user.target" ];
+
+ serviceConfig = {
+ Restart = "always";
+ ExecStart = "${hoogleEnv}/bin/hoogle server --local --port ${
+ toString cfg.port
+ } --home ${cfg.home} --host ${cfg.host}";
+
+ DynamicUser = true;
+
+ ProtectHome = true;
+
+ RuntimeDirectory = "hoogle";
+ WorkingDirectory = "%t/hoogle";
+ };
+ };
+ };
+
+}
diff --git a/Omni/Dev/Lithium.nix b/Omni/Dev/Lithium.nix
new file mode 100644
index 0000000..567f6e0
--- /dev/null
+++ b/Omni/Dev/Lithium.nix
@@ -0,0 +1,27 @@
+{ bild }:
+# Dev machine for work and building stuff.
+
+bild.os {
+ imports = [
+ ../OsBase.nix
+ ../Packages.nix
+ ../Users.nix
+ ./Lithium/Configuration.nix
+ ./Lithium/Hardware.nix
+ ./Hoogle.nix
+ ./Networking.nix
+ ./Dns.nix
+ ../../Biz/Dragons.nix
+ #./Guix.nix # I need to package a bunch of guile libs first
+ ./Vpn.nix
+ ];
+ networking.hostName = "lithium";
+ networking.domain = "dev.simatime.com";
+ services.dragons = {
+ enable = true;
+ port = 8095;
+ package = bild.run ../../Biz/Dragons.hs;
+ keep = "/var/dragons/keep";
+ depo = "/var/dragons/depo";
+ };
+}
diff --git a/Omni/Dev/Lithium/Configuration.nix b/Omni/Dev/Lithium/Configuration.nix
new file mode 100644
index 0000000..97b00c8
--- /dev/null
+++ b/Omni/Dev/Lithium/Configuration.nix
@@ -0,0 +1,217 @@
+{ lib, pkgs, ... }:
+
+let
+ ghcCompiler = (import ../../Bild/Constants.nix).ghcCompiler;
+ ports = import ../../Cloud/Ports.nix;
+in {
+ # Use the systemd-boot EFI boot loader.
+ boot.loader.systemd-boot.enable = true;
+ boot.loader.efi.canTouchEfiVariables = true;
+ boot.enableContainers = true;
+
+ powerManagement.enable = false;
+
+ time.timeZone = "America/New_York";
+
+ fonts.fonts = with pkgs; [
+ google-fonts
+ mononoki
+ source-code-pro
+ fantasque-sans-mono
+ hack-font
+ fira
+ fira-code
+ fira-code-symbols
+ ];
+
+ environment.systemPackages =
+ [ pkgs.nvtop pkgs.k3s pkgs.wemux pkgs.tmux pkgs.wireguard-tools ];
+
+ hardware = {
+ opengl.enable = true;
+ pulseaudio = {
+ enable = true;
+ extraConfig = ''
+ load-module module-loopback
+ '';
+ };
+ };
+
+ #hardware.nvidia.nvidiaPersistenced = true;
+
+ programs.bash.enableCompletion = true;
+ programs.command-not-found.enable = true;
+ programs.gnupg.agent.enable = true;
+ programs.gnupg.agent.enableSSHSupport = true;
+ programs.mosh.enable = true;
+
+ virtualisation.docker.enable = true;
+ virtualisation.docker.liveRestore = false;
+ virtualisation.libvirtd.enable = false;
+ virtualisation.virtualbox.host.enable = false;
+ virtualisation.virtualbox.host.headless = false;
+ virtualisation.virtualbox.host.addNetworkInterface = false;
+ virtualisation.virtualbox.guest.enable = false;
+
+ services.my-hoogle.enable = true;
+ services.my-hoogle.port = ports.hoogle;
+ services.my-hoogle.home = "//hoogle.simatime.com";
+ services.my-hoogle.packages = pkgset:
+ lib.attrsets.attrVals (import ../../Bild/Deps/Haskell.nix) pkgset;
+ services.my-hoogle.haskellPackages = pkgs.haskell.packages.${ghcCompiler};
+ services.my-hoogle.host = "0.0.0.0";
+
+ services.eternal-terminal.enable = true;
+
+ services.k3s.enable = false;
+ services.k3s.role = "server";
+
+ services.syncthing.enable = true;
+ services.syncthing.guiAddress = "127.0.0.1:${toString ports.syncthing-gui}";
+ services.syncthing.openDefaultPorts = true;
+ services.syncthing.systemService = true;
+
+ services.tor.enable = true;
+ services.tor.client.enable = true;
+ services.tor.relay.role = "bridge";
+ services.tor.settings.ORPort = ports.tor;
+ services.tor.settings.Nickname = "ydeee3q1cjo83tsuqcz";
+ services.tor.settings.AccountingMax = "10 GBytes";
+ services.tor.settings.AccountingStart = "month 1 1:00";
+ services.tor.settings.ContactInfo =
+ "ContactInfo pgp:66A6AD150399D970DCA4C4E6C8218B7D0BFDECCD ciissversion:2";
+
+ services.bitcoind.mainnet.enable = true;
+ services.bitcoind.mainnet.dataDir = "/mnt/campbell/bitcoind-mainnet/data";
+ services.bitcoind.mainnet.configFile =
+ "/mnt/campbell/bitcoind-mainnet/bitcoin.conf";
+ services.bitcoind.mainnet.prune = 10000;
+
+ services.pcscd.enable = true;
+ services.logind.lidSwitch = "ignore";
+ services.logind.extraConfig = "IdleAction=ignore";
+
+ services.deluge.enable = true;
+ services.deluge.openFilesLimit = 10240;
+ services.deluge.web.enable = true;
+
+ services.printing.enable = true;
+
+ services.murmur.enable = true;
+ services.murmur.registerName = "simatime";
+ services.murmur.password = "simatime";
+ services.murmur.port = ports.murmur;
+
+ services.xserver.enable = true;
+ services.xserver.autorun = true;
+ services.xserver.layout = "us";
+ services.xserver.xkbOptions = "caps:ctrl_modifier";
+ services.xserver.videoDrivers = [ "nvidia" ];
+ services.xserver.serverFlagsSection = ''
+ Option "BlankTime" "0"
+ Option "StandbyTime" "0"
+ Option "SuspendTime" "0"
+ Option "OffTime" "0"
+ '';
+ services.xserver.displayManager.sddm.enable = true;
+ services.xserver.displayManager.sddm.enableHidpi = true;
+ # Some of these have conflicting definitions, which might be the source of my
+ # problems with the display. Start here for debugging.
+ #services.xserver.displayManager.session.manage = "desktop";
+ #services.xserver.displayManager.session.name = "home-manager";
+ #services.xserver.displayManager.session.start = ''
+ # ${pkgs.runtimeShell} $HOME/.hm-xsession &
+ # waitPID=$!
+ #'';
+ #services.xserver.desktopManager.kodi.enable = false;
+ #services.xserver.desktopManager.plasma5.enable = false;
+ services.xserver.desktopManager.xterm.enable = true;
+ services.xserver.windowManager.xmonad.enable = true;
+ services.xserver.libinput.enable = true;
+ services.xserver.libinput.touchpad.tapping = true;
+ services.xserver.modules = [ pkgs.xf86_input_wacom ];
+ services.xserver.wacom.enable = true;
+
+ services.jupyter.enable = true;
+ services.jupyter.port = ports.jupyter;
+ services.jupyter.ip = "*";
+ users.users.jupyter.group = "jupyter";
+ users.groups.jupyter = { };
+ services.jupyter.password =
+ "'argon2:$argon2id$v=19$m=10240,t=10,p=8$nvQhgk+htbIYi961YYAf1w$ekpwiTT5L4+OAods0K7EDw'";
+ services.jupyter.kernels.python3 = let
+ env = (pkgs.python3.withPackages (p:
+ with p; [
+ ipykernel
+ pandas
+ scikitlearn
+ numpy
+ matplotlib
+ sympy
+ ipywidgets
+ ]));
+ in {
+ displayName = "py3";
+ argv = [
+ "${env.interpreter}"
+ "-m"
+ "ipykernel_launcher"
+ "-f"
+ "{connection_file}"
+ ];
+ language = "python";
+ # error: must be of type 'null or path'
+ #logo32 = "${env.sitePackages}/ipykernel/resources/logo-32x32.png";
+ #logo64 = "${env.sitePackages}/ipykernel/resources/logo-64x64.png";
+ };
+
+ # previously emby
+ services.jellyfin.enable = true;
+ services.jellyfin.user = "jellyfin";
+ services.jellyfin.group = "jellyfin";
+
+ services.minidlna.enable = true;
+ services.minidlna.settings.notify_interval = 60;
+ services.minidlna.settings.friendly_name = "Sima Media";
+ services.minidlna.settings.media_dir = [
+ "V,/mnt/campbell/ben/youtube"
+ "A,/mnt/campbell/ben/music"
+ "V,/mnt/campbell/ben/torrents/done"
+ ];
+
+ services.vnstat.enable = true;
+
+ documentation.enable = true;
+ documentation.dev.enable = true;
+ documentation.doc.enable = true;
+ documentation.info.enable = true;
+ documentation.man.enable = true;
+ documentation.nixos.enable = true;
+
+ nix.settings.auto-optimise-store = true;
+ nix.settings.cores = 0; # use all available cores
+ # Since this is the dev machine, we can turn these on at the expense
+ # of extra disk space.
+ nix.extraOptions = ''
+ keep-outputs = true
+ keep-derivations = true
+ '';
+ # 1 job * 2 cores = 2 maximum cores used at any one time
+ nix.settings.max-jobs = 1;
+ nix.sshServe.enable = true;
+ nix.sshServe.keys = lib.trivial.pipe ../../Keys/Ben.pub [
+ builtins.readFile
+ (lib.strings.splitString "\n")
+ (lib.filter (s: s != ""))
+ ];
+ nix.settings.trusted-users = [ "root" "ben" ];
+
+ # This value determines the NixOS release with which your system is to be
+ # compatible, in order to avoid breaking some software such as database
+ # servers. You should change this only after NixOS release notes say you
+ # should.
+ system.stateVersion = "20.09"; # Did you read the comment?
+
+ # TODO: is this still necessary? Check nixpkgs upstream
+ users.users.jupyter.isSystemUser = true;
+}
diff --git a/Omni/Dev/Lithium/Hardware.nix b/Omni/Dev/Lithium/Hardware.nix
new file mode 100644
index 0000000..54c07f5
--- /dev/null
+++ b/Omni/Dev/Lithium/Hardware.nix
@@ -0,0 +1,32 @@
+# Do not modify this file! It was generated by ‘nixos-generate-config’
+# and may be overwritten by future invocations. Please make changes
+# to /etc/nixos/configuration.nix instead.
+{ lib, modulesPath, ... }:
+
+{
+ imports = [ (modulesPath + "/installer/scan/not-detected.nix") ];
+
+ boot.initrd.availableKernelModules =
+ [ "xhci_pci" "ahci" "usb_storage" "usbhid" "sd_mod" ];
+ boot.kernelModules = [ "kvm-intel" ];
+ boot.extraModulePackages = [ ];
+
+ fileSystems."/" = {
+ device = "/dev/disk/by-uuid/f08dd8f9-787c-4e2a-a0cc-7019edc2ce2b";
+ fsType = "ext4";
+ };
+
+ fileSystems."/boot" = {
+ device = "/dev/disk/by-uuid/C67C-D7B5";
+ fsType = "vfat";
+ };
+
+ fileSystems."/mnt/campbell" = {
+ device = "/dev/disk/by-uuid/037df3ae-4609-402c-ab1d-4593190d0ee7";
+ fsType = "ext4";
+ };
+
+ swapDevices = [ ];
+
+ powerManagement.cpuFreqGovernor = lib.mkDefault "powersave";
+}
diff --git a/Omni/Dev/Networking.nix b/Omni/Dev/Networking.nix
new file mode 100644
index 0000000..c89add7
--- /dev/null
+++ b/Omni/Dev/Networking.nix
@@ -0,0 +1,44 @@
+{ ... }:
+
+let ports = import ../Cloud/Ports.nix;
+in {
+ networking = {
+ nameservers = [ "1.1.1.1" ];
+ hostName = "lithium";
+ hosts = { "::1" = [ "localhost" "ipv6-localhost" "ipv6-loopback" ]; };
+
+ firewall = {
+ allowedTCPPorts = [
+ ports.bitcoind
+ ports.bitcoind-rpc
+ ports.delugeWeb
+ ports.et
+ ports.gemini
+ ports.git
+ ports.http
+ ports.https
+ ports.jellyfin
+ ports.jupyter
+ ports.k3s
+ ports.mpd
+ ports.mpd-stream
+ ports.murmur
+ ports.radicale
+ ports.sabten
+ ports.ssh
+ ports.stableDiffusion
+ ports.tor
+ ];
+ allowedTCPPortRanges = [ ports.torrents ports.httpdev ];
+ allowedUDPPorts = [ ports.dns ports.et ports.murmur ];
+ allowedUDPPortRanges = [ ports.torrents ];
+ };
+
+ # The global useDHCP flag is deprecated, therefore explicitly set to false here.
+ # Per-interface useDHCP will be mandatory in the future, so this generated config
+ # replicates the default behaviour.
+ useDHCP = false;
+ interfaces.enp2s0.useDHCP = true;
+ };
+
+}
diff --git a/Omni/Dev/Vpn.nix b/Omni/Dev/Vpn.nix
new file mode 100644
index 0000000..9b791b7
--- /dev/null
+++ b/Omni/Dev/Vpn.nix
@@ -0,0 +1,33 @@
+{ config, ... }:
+
+let
+ ports = import ../Cloud/Ports.nix;
+ domain = "headscale.simatime.com";
+in {
+ services.headscale = {
+ enable = true;
+ address = "0.0.0.0";
+ port = ports.headscale;
+ settings = { dns.base_domain = "simatime.com"; };
+ };
+
+ services.nginx.virtualHosts.${domain} = {
+ forceSSL = true;
+ enableAcme = true;
+ locations."/" = {
+ proxyPass = "http://localhost:${toString ports.headscale}";
+ proxyWebsockets = true;
+ };
+ };
+
+ environment.systemPackages = [ config.services.headscale.package ];
+
+ services.tailscale.enable = true;
+
+ networking.firewall = {
+ checkReversePath = "loose";
+ trustedInterfaces = [ "tailscale0" ];
+ allowedUDPPorts = [ config.services.tailscale.port ];
+ };
+
+}
diff --git a/Omni/Id.hs b/Omni/Id.hs
new file mode 100644
index 0000000..59201d5
--- /dev/null
+++ b/Omni/Id.hs
@@ -0,0 +1,56 @@
+{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- Integer-based identifier.
+module Omni.Id
+ ( Id (..),
+ mk,
+ untag,
+ )
+where
+
+import Alpha
+import Data.Aeson (FromJSON (..), ToJSON (..))
+import Data.Binary (Binary)
+import Data.Data (Data)
+import Servant (FromHttpApiData (..), ToHttpApiData (..))
+
+newtype Id entity = Id Int
+ deriving (Eq, Ord, Show, Generic, Typeable, Data)
+
+mk :: proxy entity -> Int -> Id entity
+mk _ = Id
+
+untag :: Id entity -> Int
+untag (Id i) = i
+
+instance Hashable (Id entity)
+
+instance Binary (Id entity)
+
+instance Enum (Id entity) where
+ toEnum = mk (Proxy :: Proxy entity)
+ fromEnum = untag
+
+instance NFData (Id entity) where
+ rnf (Id s) = rnf s
+
+instance FromJSON (Id entity) where
+ parseJSON = fmap Id <. parseJSON
+
+instance ToJSON (Id entity) where
+ toJSON = toJSON <. untag
+
+-- this is just provided to satisfy Monoid, no reason to actually use it
+instance Semigroup (Id entity) where
+ a <> b = mk (Proxy :: Proxy entity) <| untag a + untag b
+
+instance Monoid (Id entity) where
+ mempty = mk (Proxy :: Proxy entity) 0
+
+instance FromHttpApiData (Id entity) where
+ parseUrlPiece p = mk (Proxy :: Proxy entity) </ parseUrlPiece p
+
+instance ToHttpApiData (Id entity) where
+ toUrlPiece p = untag p |> tshow
diff --git a/Omni/Ide/MakeTags.py b/Omni/Ide/MakeTags.py
new file mode 100755
index 0000000..add07c0
--- /dev/null
+++ b/Omni/Ide/MakeTags.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+"""
+Make tags for internal or external code.
+
+This should run fast, and be executable with just Python, meaning it does not
+require a build step.
+"""
+
+# : out maketags
+# : run universal-ctags
+import argparse
+import os
+import pathlib
+import subprocess
+import tarfile
+import zipfile
+
+
+def main() -> None:
+ """Run ctags on internal or external source code.
+
+ Raises:
+ ValueError: if CODEROOT is not set
+ ArgumentError: when explicit paths aren't provided
+ """
+ coderoot = os.environ.get("CODEROOT")
+ if coderoot is None:
+ msg = "CODEROOT not set"
+ raise ValueError(msg)
+ cabsrc = pathlib.Path(coderoot) / "_" / "src"
+ cli = argparse.ArgumentParser()
+ cli.add_argument(
+ "paths",
+ nargs="*",
+ default=".",
+ help="List of paths to run ctags on. Defaults to '.'",
+ )
+ cli.add_argument(
+ "-x",
+ "--external",
+ action="store_true",
+ help=" ".join([
+ "Use this when `paths` is a list of external packages,",
+ f"they will be extracted or linked into {cabsrc}",
+ ]),
+ )
+ args = cli.parse_args()
+ if args.external and args.paths == ".":
+ msg = "requires explicit paths"
+ raise argparse.ArgumentError(argument=args.external, message=msg)
+ if args.external:
+ extract_and_copy(cabsrc, args.paths)
+ ctags(["--recurse=yes"], cwd=cabsrc)
+ else:
+ ctags(["--exclude=*_/*", "--recurse=yes"], cwd=pathlib.Path(coderoot))
+
+
+def strip_nix_hash(path: str) -> str:
+ """Remove the /nix/store/ and hash prefix from a path."""
+ hash_len = 33
+ return path.removeprefix("/nix/store/")[hash_len:]
+
+
+def extract_and_copy(cabsrc: pathlib.Path, paths: list[str]) -> None:
+ """
+ Extract and copy or link sources.
+
+ Loop over `paths`, if the path is an archive, extract it into `cabsrc`. If
+ its a directory, just symlink the directory into `cabsrc`. Either way, we
+ end up with a directory full of source trees for running ctags on.
+ """
+ for path in paths:
+ outpath: pathlib.Path = cabsrc / strip_nix_hash(path)
+ if outpath.exists():
+ continue
+ if path.endswith(".zip"):
+ out = outpath.with_suffix("")
+ if out.exists():
+ continue
+ zipfile.ZipFile(path).extractall(out) # noqa: S202
+ elif path.endswith(".tar.gz"):
+ out = outpath.with_suffix("").with_suffix("")
+ if out.exists():
+ continue
+ with tarfile.open(path) as tarball:
+ tarball.extractall(out) # noqa: S202
+ elif pathlib.Path(path).is_dir():
+ outpath.symlink_to(path)
+
+
+def ctags(args: list[str], cwd: pathlib.Path = pathlib.Path()) -> None:
+ """Call `ctags` with `args` for both emacs and vim."""
+ os.chdir(cwd)
+ excludes = [
+ "--exclude=.mypy_cache",
+ "--exclude=.git",
+ "--exclude=.direnv",
+ "--exclude=.ruff_cache",
+ ]
+ subprocess.check_call(["ctags", *excludes, *args])
+ subprocess.check_call(["ctags", "-e", *excludes, *args])
+
+
+if __name__ == "__main__":
+ main()
diff --git a/Omni/Ide/ftags.sh b/Omni/Ide/ftags.sh
new file mode 100755
index 0000000..b29d994
--- /dev/null
+++ b/Omni/Ide/ftags.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+#
+# search tags with fzf
+#
+ set -euo pipefail
+ tags=${CODEROOT:?}/tags
+ tag_search=$(
+ awk 'BEGIN { FS="\t" } !/^!/ {print toupper($4)"\t"$1"\t"$2"\t"$3}' "$tags" \
+ | cut -c1-80 \
+ | fzf-tmux \
+ --nth=1,2 \
+ --preview-window=down,border-none \
+ --bind="pgdn:preview-page-down" \
+ --bind="pgup:preview-page-up" \
+ --preview "rg --pretty --context 2 --fixed-strings --regexp {+2}"
+ )
+ ${EDITOR:-vim} \
+ "$(cut -f3 <<< "$tag_search")" \
+ -c "set nocst" \
+ -c "silent tag $(cut -f2 <<< "$tag_search")"
+##
diff --git a/Omni/Ide/hoog.sh b/Omni/Ide/hoog.sh
new file mode 100755
index 0000000..237eb78
--- /dev/null
+++ b/Omni/Ide/hoog.sh
@@ -0,0 +1,17 @@
+#!/usr/bin/env bash
+#
+# search hoogle with fzf
+#
+ set -euo pipefail
+ HOOG="hoogle search --count=200"
+ export FZF_DEFAULT_COMMAND="$HOOG $*"
+ result=$(fzf-tmux \
+ --preview-window=down,border-none \
+ --preview "hoogle search --link --info {+2}" \
+ --bind "change:reload:$HOOG {q} || true" \
+ --ansi \
+ | cut -d' ' -f 1,2 \
+ | sed -e 's/ /./g'
+ )
+ hoogle search --info "$result"
+##
diff --git a/Omni/Ide/hooks/commit-msg b/Omni/Ide/hooks/commit-msg
new file mode 100755
index 0000000..e07d1f4
--- /dev/null
+++ b/Omni/Ide/hooks/commit-msg
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+if ! gitlint --ignore-stdin --staged --msg-filename "$1" run-hook; then
+ backup="$CODEROOT"/.git/COMMIT_EDITMSG.backup
+ cp "$CODEROOT"/.git/COMMIT_EDITMSG "$backup"
+ echo "error: gitlint failed, saved your commit msg as $backup"
+ exit 1
+fi
diff --git a/Omni/Ide/hooks/post-applypatch b/Omni/Ide/hooks/post-applypatch
new file mode 100755
index 0000000..5071dc5
--- /dev/null
+++ b/Omni/Ide/hooks/post-applypatch
@@ -0,0 +1,6 @@
+#!/bin/sh
+## START BRANCHLESS CONFIG
+
+git branchless hook post-applypatch "$@"
+
+## END BRANCHLESS CONFIG
diff --git a/Omni/Ide/hooks/post-checkout b/Omni/Ide/hooks/post-checkout
new file mode 100755
index 0000000..85541a2
--- /dev/null
+++ b/Omni/Ide/hooks/post-checkout
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+set -e
+function MakeTags {
+ ${CODEROOT:?}/Omni/Ide/MakeTags.py
+}
+old=$1
+new=$2
+# filter out only the changed haskell files
+mapfile -t changed < <(git diff --diff-filter=d --name-only "$old" "$new" -- '*.hs')
+if [[ ! -r tags ]] || [[ ! -r TAGS ]]
+then
+ MakeTags "$CODEROOT"/**/*
+elif [[ ${#changed[@]} -gt 0 ]]
+then
+ MakeTags "${changed[@]}"
+fi
+## START BRANCHLESS CONFIG
+
+git branchless hook post-checkout "$@"
+## END BRANCHLESS CONFIG
diff --git a/Omni/Ide/hooks/post-commit b/Omni/Ide/hooks/post-commit
new file mode 100755
index 0000000..cd1f195
--- /dev/null
+++ b/Omni/Ide/hooks/post-commit
@@ -0,0 +1,6 @@
+#!/bin/sh
+## START BRANCHLESS CONFIG
+
+git branchless hook post-commit "$@"
+
+## END BRANCHLESS CONFIG
diff --git a/Omni/Ide/hooks/post-merge b/Omni/Ide/hooks/post-merge
new file mode 100755
index 0000000..fcfd314
--- /dev/null
+++ b/Omni/Ide/hooks/post-merge
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+"${CODEROOT:?}"/Omni/Ide/hooks/post-checkout 'HEAD@{1}' HEAD
+## START BRANCHLESS CONFIG
+
+git branchless hook post-merge "$@"
+## END BRANCHLESS CONFIG
diff --git a/Omni/Ide/hooks/post-rewrite b/Omni/Ide/hooks/post-rewrite
new file mode 100755
index 0000000..8b3237a
--- /dev/null
+++ b/Omni/Ide/hooks/post-rewrite
@@ -0,0 +1,6 @@
+#!/bin/sh
+## START BRANCHLESS CONFIG
+
+git branchless hook post-rewrite "$@"
+
+## END BRANCHLESS CONFIG
diff --git a/Omni/Ide/hooks/pre-auto-gc b/Omni/Ide/hooks/pre-auto-gc
new file mode 100755
index 0000000..c92a844
--- /dev/null
+++ b/Omni/Ide/hooks/pre-auto-gc
@@ -0,0 +1,6 @@
+#!/bin/sh
+## START BRANCHLESS CONFIG
+
+git branchless hook pre-auto-gc "$@"
+
+## END BRANCHLESS CONFIG
diff --git a/Omni/Ide/hooks/pre-commit b/Omni/Ide/hooks/pre-commit
new file mode 100755
index 0000000..06f1716
--- /dev/null
+++ b/Omni/Ide/hooks/pre-commit
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+#
+# - prevent frozen code from being checked in
+# - guard against lint errors
+##
+ set -e
+ mapfile -t changed < <(git diff-index --cached --name-only HEAD)
+ for ns in "${changed[@]}"
+ do
+ version=$("${CODEROOT:?}"/Omni/Ide/version.sh "$ns")
+ if [[ $version -eq -1 ]]; then
+ echo "info: version: $ns: deleted"
+ elif [[ $version -lt 1 ]]; then
+ echo "fail: version: $ns: $version"
+ exit 1
+ else
+ echo "info: version: $ns: $version"
+ fi
+ done
+ lint "${changed[@]}"
+##
diff --git a/Omni/Ide/hooks/pre-push b/Omni/Ide/hooks/pre-push
new file mode 100755
index 0000000..00110bd
--- /dev/null
+++ b/Omni/Ide/hooks/pre-push
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+set -euo pipefail
+remote="$1"
+z40=0000000000000000000000000000000000000000
+IFS=" "
+while read local_ref local_sha remote_ref remote_sha
+do
+ if [ "$local_sha" = $z40 ]
+ then
+ # delete, do nothing
+ continue
+ elif [ "$remote_sha" = $z40 ]
+ then
+ # new branch, test all commits since ci was implemented
+ range="11d95581fb178a5d21e88dfd8030a61886cc2519..$local_sha"
+ else
+ range="$remote_sha..$local_sha"
+ fi
+done
+gitlint --commits "$range" lint
+git test run --command ci "$range"
+git push "$remote" refs/notes/ci --no-verify
diff --git a/Omni/Ide/hooks/reference-transaction b/Omni/Ide/hooks/reference-transaction
new file mode 100755
index 0000000..ea0cce6
--- /dev/null
+++ b/Omni/Ide/hooks/reference-transaction
@@ -0,0 +1,12 @@
+#!/bin/sh
+## START BRANCHLESS CONFIG
+
+# Avoid canceling the reference transaction in the case that `branchless` fails
+# for whatever reason.
+git branchless hook reference-transaction "$@" || (
+echo 'branchless: Failed to process reference transaction!'
+echo 'branchless: Some events (e.g. branch updates) may have been lost.'
+echo 'branchless: This is a bug. Please report it.'
+)
+
+## END BRANCHLESS CONFIG
diff --git a/Omni/Ide/ns.sh b/Omni/Ide/ns.sh
new file mode 100755
index 0000000..a56ed89
--- /dev/null
+++ b/Omni/Ide/ns.sh
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+set -euo pipefail
+nss="fd --color=always --exclude=_ -t f . \"${CODEROOT:?}\" | sed \"s,${CODEROOT:?}/*,,g\""
+keybindings=$(cat <<EOF
+repl {}:enter
+repl --bash {}:alt+enter
+edit {} with $EDITOR:tab
+lint -f {}:alt+c
+bild {}:alt+space
+bild --test {}:alt+t
+exec {}:alt+e
+ship {}:ctrl+space
+create new namespace:alt+n
+change preview window:alt+0-6
+resize preview window:ctrl+/
+EOF
+)
+fzf_flags=(
+ --ansi
+ --bind "focus:transform-preview-label:echo {}"
+ --bind "?:change-preview(column -o ' -> ' -s':' -t <<< \"$keybindings\")"
+ --bind "alt-n:execute(touch {q})+reload($nss)"
+ --bind "alt-space:execute(bild {} ; read -p [fin])"
+ --bind "tab:execute($EDITOR {})"
+ --bind "alt-c:execute(lint -f {} ; read -p [fin])"
+ --bind "enter:execute(repl.sh {})"
+ --bind "alt-enter:execute(repl.sh --bash {})"
+ --bind "ctrl-space:execute(ship.sh {} ; read -p [fin])"
+ --bind "alt-t:execute(bild {} ; run.sh {} test ; read -p [fin])"
+ --bind "ctrl-/:change-preview-window(right,88|right,70%|hidden|)"
+ --bind "alt-0:change-preview(bat -p --color=always {})"
+ --bind "alt-1:change-preview(git log --color=always --date=relative --abbrev-commit --pretty=format:'%Cred%h%Creset %s / %an %Creset%C(yellow)%d%Creset%Cgreen(%cr)%Creset' -- {})"
+ --bind "alt-2:change-preview(git log --color=always {})"
+ --bind "alt-3:change-preview(git log --color=always -p {})"
+ --bind "alt-4:change-preview(git blame -c --date=short {})"
+ --bind "alt-5:change-preview(git log --pretty=short {} | git shortlog -nse)"
+ --bind "alt-6:change-preview(git log --pretty=short {} | git shortlog)"
+ --bind "backward-eof:abort"
+ --bind "pgup:preview-page-up"
+ --bind "pgdn:preview-page-down"
+ --header-first
+ --header="? for keybindings"
+ --border=top
+ --border-label="$(lolcat -f <<< "hack a namespace")"
+ --color=label:italic
+ --preview-window="bottom,80%"
+ --preview "bat -p --color=always {}"
+)
+sh -c "$nss" | fzf "${fzf_flags[@]}"
+
diff --git a/Omni/Ide/push.sh b/Omni/Ide/push.sh
new file mode 100755
index 0000000..43dff28
--- /dev/null
+++ b/Omni/Ide/push.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+# Eventually convert to haskell, see:
+# - https://github.com/awakesecurity/nix-deploy/blob/master/src/Main.hs
+# - http://www.haskellforall.com/2018/08/nixos-in-production.html
+prefix=${PWD/$CODEROOT}
+if [[ "$prefix" == "" ]]
+then
+ target="$1"
+else
+ target="$prefix.$1"
+fi
+what=$(realpath "${CODEROOT:?}/_/nix/$target")
+# hack: get the domain from the systemd service. there does not seem to be a way
+# to get it from nix-instantiate. (or, maybe i should put this in bild --plan?)
+where=$(rg --only-matching --replace '$2' --regexp '(domainname ")(.*)(")' \
+ "$what/etc/systemd/system/domainname.service")
+nix copy --to ssh://"$USER"@"$where" "$what"
+ssh "$USER"@"$where" sudo "$what"/bin/switch-to-configuration switch
+ssh "$USER"@"$where" sudo nix-env --profile /nix/var/nix/profiles/system --set "$what"
+echo "${GRN}good: push: $target${NC}"
diff --git a/Omni/Ide/repl.sh b/Omni/Ide/repl.sh
new file mode 100755
index 0000000..3b6a536
--- /dev/null
+++ b/Omni/Ide/repl.sh
@@ -0,0 +1,84 @@
+#!/usr/bin/env bash
+###
+### a simple complement to bild which only deals with launching repls
+###
+### > repl [opts] <target..>
+###
+### Starts a repl/shell for one or more targets. (Currently, all targets must
+### have the same extension for this to work.) Repls started with this script
+### should bind to `localhost:$PORT`.
+###
+### Options:
+### --bash start bash instead of the target language repl
+help() {
+ sed -rn 's/^### ?//;T;p' "$0"
+}
+if [[ $# == 0 ]] || [[ "$1" == "-h" ]]; then
+ help
+ exit 1
+fi
+##
+ set -e
+ CMD=
+ if [[ "$1" == "--bash" ]]; then
+ CMD="bash"
+ shift
+ fi
+ targets="${*:?}"
+ json=$(bild --plan "${targets[@]}")
+ mapfile -t langdeps < <(jq --raw-output '.[].langdeps | select(length > 0) | join("\n")' <<< "$json")
+ mapfile -t sysdeps < <(jq --raw-output '.[].sysdeps | select(length > 0) | join("\n")' <<< "$json")
+ mapfile -t rundeps < <(jq --raw-output '.[].rundeps | select(length > 0) | join("\n")' <<< "$json")
+ exts=$(jq --raw-output '.[].namespace.ext' <<< "$json" | sort | uniq)
+ packageSet=$(jq --raw-output '.[].packageSet' <<< "$json")
+ module=$(jq --raw-output '.[].mainModule' <<< "$json")
+ BILD="(import ${CODEROOT:?}/Omni/Bild.nix {})"
+ declare -a flags=(--packages "$BILD.pkgs.pkg-config")
+ for lib in "${sysdeps[@]}"; do
+ flags+=(--packages "$BILD.pkgs.${lib}")
+ done
+ for lib in "${rundeps[@]}"; do
+ flags+=(--packages "$BILD.pkgs.${lib}")
+ done
+ case $exts in
+ C)
+ flags+=(--packages "$BILD.pkgs.gcc")
+ command="bash"
+ ;;
+ Hs)
+ if [ -z ${var+PORT} ]; then
+ echo "warn: repl: ghci does not support binding to a port"
+ fi
+ flags+=(--packages "$BILD.haskell.ghcWith (h: with h; [${langdeps[*]}])")
+ command=${CMD:-"ghci -i${CODEROOT:?} -ghci-script ${CODEROOT:?}/.ghci ${targets[@]}"}
+ ;;
+ Scm)
+ for lib in "${langdeps[@]}"; do
+ flags+=(--packages "$BILD.guile-${lib}")
+ done
+ flags+=(--packages "$BILD.guile")
+ command=${CMD:-"guile -L ${CODEROOT:?} -C ${CODEROOT:?}/_/int --r7rs --listen=${PORT:-37146}"}
+ ;;
+ Lisp)
+ flags+=(--packages "$BILD.$packageSet (p: with p; [asdf swank ${langdeps[*]}])")
+ command=${CMD:-"sbcl --eval '(require :asdf)' --eval '(require :swank)' --eval '(swank:create-server :port ${PORT:-4005})' --load $targets"}
+ ;;
+ Rs)
+ flags+=(--packages "$BILD.pkgs.rustc")
+ command=bash
+ ;;
+ Py)
+ langdeps+=("mypy")
+ flags+=(--packages "$BILD.python.pythonWith (p: with p; [${langdeps[*]}])")
+ PYTHONPATH=$CODEROOT:$PYTHONPATH
+ pycommand="python -i $CODEROOT/Omni/Repl.py $module ${targets[*]}"
+ command=${CMD:-"$pycommand"}
+ ;;
+ *)
+ echo "unsupported targets: ${targets[*]}"
+ exit 1
+ ;;
+ esac
+##
+ nix-shell "${flags[@]}" --command "$command" --show-trace
+##
diff --git a/Omni/Ide/run.sh b/Omni/Ide/run.sh
new file mode 100755
index 0000000..506aa92
--- /dev/null
+++ b/Omni/Ide/run.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+set -eu
+target=$1
+shift
+out=$(bild --plan "$target" | jq --raw-output ".\"${target}\".out")
+exec "${CODEROOT:?}/_/bin/$out" "$@"
diff --git a/Omni/Ide/ship.sh b/Omni/Ide/ship.sh
new file mode 100755
index 0000000..8783e9b
--- /dev/null
+++ b/Omni/Ide/ship.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+#
+# ship <target>...
+#
+# lint, bild, test, and push one or more targets. if no targets are supplied,
+# ship everything we know how to ship
+##
+ set -eu
+ stuff=("${@}")
+ if [[ ${#stuff[@]} -eq 0 ]]
+ then
+ mapfile -t stuff < <(fd -t l . "$CODEROOT/_/nix/" \
+ | sed "s,$CODEROOT/_/nix/,,g" \
+ | fzf --multi --prompt="ship _/nix/" \
+ --preview="file $CODEROOT/_/nix/{}" \
+ --preview-window=bottom,wrap
+ )
+ fi
+ lint "${stuff[@]}"
+ bild --test "${stuff[@]}"
+ for thing in "${stuff[@]}"
+ do
+ push.sh "$thing"
+ done
+##
diff --git a/Omni/Ide/tips.sh b/Omni/Ide/tips.sh
new file mode 100755
index 0000000..453e464
--- /dev/null
+++ b/Omni/Ide/tips.sh
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+echo ""
+echo "omnidev" | figlet | lolcat
+echo ""
+echo " bild compile code"
+echo " repl.sh start a repl"
+echo " deps manage dependencies with niv"
+echo " tips.sh show this message"
+echo " lint auto-lint all changed files"
+echo " push.sh send a namespace to the cloud"
+echo " ship.sh lint, bild, and push one (or all) namespace(s)"
+echo ""
diff --git a/Omni/Ide/version.sh b/Omni/Ide/version.sh
new file mode 100755
index 0000000..60f9c91
--- /dev/null
+++ b/Omni/Ide/version.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+#
+# simple implementation of kelvin versioning
+##
+ ns=$1
+ if [[ -e "$1" ]]; then
+ commits=$(git log --oneline --follow "$ns" | wc -l)
+ # gold melts at 1337 kelvin, so we start with this
+ # bc we are forging gold here
+ version=$(bc -l <<< "1337 - $commits")
+ echo "$version"
+ else
+ echo -1 # signal that file doesn't exist
+ fi
+##
diff --git a/Omni/Keys/Ben.pub b/Omni/Keys/Ben.pub
new file mode 100644
index 0000000..d0e5b4a
--- /dev/null
+++ b/Omni/Keys/Ben.pub
@@ -0,0 +1,6 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDDhmSEbvX6LSk1ZO/whhAWpxwUxGPwbn7ZKVmxLcIilLdkd/vhFQKSYyMBW+21G3cMbwyFVsCyPbADoXcvV5OSIklxgitP77/2TAgkEPjyklJ4KD0QNDjpu+YGGIyVTgE9YPBhpwuUlxRhux15vN8xzAXq4f5/xpyBPekIdbEaEUZHrKN/z9g8cgw9ZMWSrchbsE3QlU8MJK78HO+v3TjH7Ip+LffWNuhckiYnzT8Duy47vgc1OYqtJaDMN/ufK7yeNILK81M1ybHGOlqYxSfV/RM7oD0P5w5YeTXMpRsOyn4YVzhWSQFrlf08XbwlZUNm6Pb8eNRjM+3YyFTcUU/S81xKwOPRNNhlPnxz+tUltCR3H/0Falu1pxJYT2qfuM9j9z9xA1bJEsSSZ1b2bsHw7ujpRmg0xsPUk7DXIQ1Kh92BFfmDoZWeqsMF1E7H8iuaVsN9k96BwbBfiB4stQqI3ycuHO9zbsa12y8AQusDbr9W8rl/vR0pKNrcNO32ojOzkblJGWgyNxDvTS4l69+qi6pMBONicUUMQnXEtJoasjpECzwlAHIYJMmFQUuloEafR8b0ZAaCw+I5SfsyYF4hHLYseHvMavxgLNZ6W4ZlaL9XmQ7ZGhh10ub4ceW61QvCzKD34yO1yl8PcmS8Fa7bZbGxkq36oCusGbD65AlY+w== ben@lithium
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDakRPWoxb//V+cQAS3Rl9zfZ+izmgESTdnkawOmwu8rlbcrKENNGyAVoPVr1jQh4toK3zUnItwel3hqAMHf+2b1jUcKcrPC3rKh9SD86vX8lJsOMmq9u/g+Fp3kkmRUps802F1/53M9dVGGuVVtQDgycutJqlWKpH5JEpiLWkkq2ExWOqv13WcHzXUwyl3Fh/ubzZunaov37k794o7hqVY/H+Je2mULmWFVO2Dms3sU+czvgxUDMKIwc2ebLNFgziIDBfL81Funrg5rPeYxv7ayUK6oyPNW5FJrx3Kd+2U1ikqxqNERmu8FYRO3CDmxbfx+EB9PVHg8LWwdrTLJbDj ben@helium
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDfWoLvvQPVAGGopBQpzJ5Ag50kEX0Sgl2shOE+39FccoMmQL1QKMpdGDom+PbY+4fupeGUAg186RbijjK6jTx4fLKpqstI1WWXRyjQ73LnFkskisqiJGZzYkO8Rck4sZqhMPYUmAhkQjRlWbgDn4RIjlCa3GIzBTqSgXNVB91e5d6/MMVChLhhX1IkfmDjrbRVgyuw9l8jiR82EQaSCqsdeQPTb4YvKjO5KxT98IEaO2/OrA1G+YJC04xAgfvA3QUgMixg/81c5swWsQhew2zt1mN3cqjVr6SwqO0ffKY4y11vMQZZZVnU810RSwPB1kIIYSsRAjibNhSbfE+IkG+VQBvkgaTHbm6otNB2g9UjqpdqEjsAJTQi/OC20L1ScVMWm4avPxbh3iJXkfFSiWC9ra0RAyWmItj9BJAFTw25xQdrhERH7jfr96KEFqzm5ULq2gKBM+/zXUEtXEQsQDqvmJ5zn80G/A87H6Wm+8McFGkAEBju8LUdY2k886RAOzf0PUp+M9MIt9GWWkpwI4DN4CHdhztmwpbONYvEno3AazOClf80t3DFzjAfX/gmxOlRYvlPL3PklAElFWzGXm79tY5hnk48cawDHiPLcxtgJKPxCN1157tBbi23x+zRBlUFZaxbyEqR5v0UA6u6lSeXxndi9Uwg1KKbPR4AAvWwHQ== bsima@bsima
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDapJC9zQFd6uTAXdY1U/cPEDmjs6QP1iBdkukOMIOSKSMoZ4lic4o5RNm1tYDytDMp+mRjGhd7LWczeTJ57RRhdxr/pjBw9A6UjxM+lUkQGiERz+hMe57Jz+TH2v1fE0JHP44z1dVIKKOpPqQA5X32sTs97XstAXSyr2kdgv15dGfbhdkDeN2KcJWdMZXhl4V8vMHMJdGKc+cIdN/SrTlcqkH8KW/cQM2LVubXWeCusEOEXt2P8SPZjZ0KNcAZsg68+RzLT4NE46J6FFwQQqr8xnBTbVEFSa56Af9fadp1n11kGEZlsHCJw1VUFj/1KJAZBDPzXofz02y6p4/tLght5xIOP8pzS14uEkvkKBshOychQ11mzWjjLTQ/I9B/xSsgl1XZMeKeMXJdOWb4Be8KkTPrrqSSOUzMR8bUAtg9dlzwaKbS66CRzy6Ma6uBXJiCnDo7yAkPsaK1PIFm4hPcgnD7nsEci/dQUovYAPrl96qU6UhNvvPe005UlYeRhRT/j7P/n2seBTLQ3PNF0UhPMP/MNc4Yx6wLcXCsMAlXgQ0AeZkHd2bPMXETJtk4vk87NOh3eUEkwgUCNjJlLZa8q0YTy/YNUA5+lIVCsmzxFzEAuMTfKju7ID23wBypuz0/w7niElVX+LA+8zlyeyFWWoouidi1fJq+fNpnAyYhdQ== ben@simatime
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC/9XcFrZVlzoLOi8QWDMvVWFWsQs6RcnTZhXT1Gv+PPRBVvCf09A2fCSEHxF+MHisHJqGvRvmCTBM74Tcn8PNoUzLD0wgyxB2zt3oT+MkjWGLifgatMiNRWF+nPU4Mhyzk7OhPvCkJ+NWXyA/cdptt7GrsY712sLC6rIW0lsHUXoiOy24+xkmBnSbxlCJQoKtMf7XqcFZ5YbZFdOiLkGdtXbLw2tRrAWGNnCs8MlNx6ZgJR9SSvaAZvu/7hfHE3srxYmfkhDX2lvXokRfQvOPQhNGiRrigVQBjrIKAKJGq3BnmuAO9BuuyFvL8Ue/9xn4MGJ/SYxHymES3AQ6/80xj cardno:18 086 142
+ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIG53aZeZWV/yA0P+CLA5BuBSJAbJbnMtFkIgxrYE+DpY Generated By Termius
diff --git a/Omni/Keys/Deploy.pub b/Omni/Keys/Deploy.pub
new file mode 100644
index 0000000..664a2d9
--- /dev/null
+++ b/Omni/Keys/Deploy.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDlLRbbXgwjF7IqObf4dZE/jj0HoT6xJR6bP/6ZrJz7NPCPIgY3GacOtBfkJp6KK0zKQdFmxNpcfb3zgpe/Ru7pkmSfI9IoWAU3aLPWK2G3tbLPmktGmF9C53OhyXgFtBGr2Q/+wSRKAfN/FrEEa2FuRBtvtcAMiwbQLbFCzlmWhE7swSBvg38ZSFrjhANsEhfNVCtsrtG16fkfrfmBFv4JIog1fEoMKmXg7rhMjpaas8+n52HMFXvjllePRpywK4wB20GOcOuDSdc3i3zs7NFuicGunEpW2S/byrHotSWHZ9VuUwPn3GJ6xorrGyvsRuPS2anhHTSBxYCqYdXg0BIYUn1x5Uhtzd8kIU06gSLsvuhqGCLNucnXAT1Zix7pSlO21be81SX4vwQEth+6Dkm6kja0ArHZL6wglF8Njd1fV9iOwvcS07clwa/2S8suFLwVrQXz16vfAfA2zi4/qeop5Sv9W4DIOZuIMPmbWZCoy7L6Fu4+x4prb8LCQNM5m4CP3HngCW8PpxtBbBJd0dcXVap1HgDTIt/CLH8ms52uX5k3bHuvzryOihSuwmi/cDZAJAmbgclM9klsZr4R/GAoAWhhGxXM2tLuiwZ2nLvCPlXbBazZpdM2aC3VIwnMwJrJFu2u9B6RSsz2ijbygecT98UmiMYK7Mk1y6GkvY+mDQ== ben@lithium
diff --git a/Omni/Keys/Dre.pub b/Omni/Keys/Dre.pub
new file mode 100644
index 0000000..ebacdf9
--- /dev/null
+++ b/Omni/Keys/Dre.pub
@@ -0,0 +1,2 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCZTvMnhYNDcc+Rs3/6XSPuCKb+zoBp39vKwNUS4X/26xWG9aYXaUWYuutxHPzqepd4Jk0TguRhdA1R9bLzQrRzCj9SF0WGaaOS5dkKIkE+Gf1nmjvfBRMMyVGbzCNUBIA3Tj0b6ZV+maGKvomZ+EvfOkjKJXlCBmj2uEfTAvVboCWgP9O8/wSeMowniHNQ3hXgljVb5cYKepiCiDPZA+80OdTkUQlISzM1oHkUGMy/dKHAs52lk+l45abxmZ7/SQ1LK3+nojRWSh3ZwqW5A8MjfmwqinxeLbSrC3MY640K3Ci9Qo3B/py3n+v3k9wcSJQCNn2FJ41FgX2e8EM438ziUKNxQIKLDHYj/eCa1MlOBmXFA7r6wo4ogFbN715LjBGbbCyHfIU4a4uBjR1mBLBBxmwy+M9oA+2yB82mh2PU61lUdsup60lWfPeghlqqyGdfYqo0D0XVNcDKcVPbHlDh6HG2XOjYdbDKIX7rsGlNSGnpnC1GOlEq5cDKcIyS7+M= andres@andres-XPS-15-9510
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCu25xihS8QrPWzsmrJWdAGp05G976pclLlBg9CEx7y0rCK7ngOKqYXzLwE/GVwBoolZc1txZ0RDmYlf7eS0+jTHvo031k31vhrg1BP+iMUErmj61dKmE4/oaHAYt31FMLAGS/W9acai4Vl4LVhiKFlYIHzQwdT6Q+dirxt+4rIKmrhqVerbF1e021Ql4drI8OsPGgmgrIUm5yiBapi0xk0ZGIr7Z0NObpncZqe/Dirl0F/KNKdk/XjUVUu2CeupdVnYl3+sd+y5lcgzzS+isK6Zt6LNGZQYPoyzEg0dgz7f7+UngaEqgoI31QqTG/Dnt6+mIXzmS3AQLT34aBRKpO+CSevCVVnXwMkHRWOGtYFZBgt7oExFrI7dSgocXmIDYOYeLYHutShBquybCXGTyxWMthw2mIBFuyKEA70nIL7VkwFlPoXx/1nRSlpgM7+LjUT4xcxgjH3qggyuhCEQm6JAOUFzXLB5rQ+PXpvFsnNiZODsxu2oMOfVxpIpgCafEM= andres@andres-XPS-15-9510
diff --git a/Omni/Keys/Nick.pub b/Omni/Keys/Nick.pub
new file mode 100644
index 0000000..4dc08fb
--- /dev/null
+++ b/Omni/Keys/Nick.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDfSOxXJTQADjROqbaiJtjbJaHTsBtuWNvQpDvXLigl9R27VqIn7dYk2STuWglwFyrvYfU1UmjgJcJ6J2KbXGTH5mhaC04MJ4aqmOR3Ynnq7nDzmtEtn1I+K7LmpFXsFXgOTzIlzggIWflGd1pPBwgWqMoPDcSqNQFPI/+rk1JOxk3e2Mq60VTp9WM9hs0AJQEyZ+wwZ0vyrj588kQb6jQUZ7qx1UZoDzPc57zREEZbQeU1Gd9FK2bCHlKOBHYlqIftSRBGGCpuo7zobhajR0xHO9RnF0NmeLbW85XhDus8vVgBg/BTDPxHEzm5jKiCkc+i3ia0Ff9mp2zgtSdXCp5jbVZ3AYfYLi1zbPWmaSdWqFx2ntOLwWR3/RHjw6+b4KmUQ4xtQHyXOijTBCH29i7VCo7l8WL+I2mSGJ7/Wtw7NFtMpVVs8/0iKt2t12FIefzvbZoWU7vbmuO7+gQI5l+F+JE6DLWOl04vT/V98WxiHA5rbCjTT/bubs4gTeCR9qNehaoM+apitpUP8HXygnxD7EJeK6JNkdub9TY663IkiKlpnWgeoDTNSP7JF/jkU0Nt8yoR2pTyxQqMFYa37/3WKjmSHk1TgxLEmlwHQFtIkTPn8PL+VLa4ACYuWUjxS4aMRpxo9eJUHdy0Y04yKxXN8BLw7FAhytm2pTXtT4zqaQ== nicksima@gmail.com
diff --git a/Omni/Lint.hs b/Omni/Lint.hs
new file mode 100644
index 0000000..45d1523
--- /dev/null
+++ b/Omni/Lint.hs
@@ -0,0 +1,310 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Global linter.
+--
+-- : out lint
+-- : run ormolu
+-- : run hlint
+-- : run ruff
+-- : run deadnix
+-- : run shellcheck
+-- : run indent
+-- : run nixfmt
+module Omni.Lint (main) where
+
+import Alpha
+import qualified Data.Aeson as Aeson
+import qualified Data.ByteString.Char8 as Char8
+import qualified Data.Map as Map
+import qualified Data.String as String
+import qualified Data.Text as Text
+import qualified Omni.Cli as Cli
+import qualified Omni.Log as Log
+import Omni.Namespace (Ext (..), Namespace (..))
+import qualified Omni.Namespace as Namespace
+import Omni.Test ((@=?))
+import qualified Omni.Test as Test
+import qualified System.Directory as Directory
+import qualified System.Environment as Environment
+import qualified System.Exit as Exit
+import qualified System.Process as Process
+
+main :: IO ()
+main = Cli.main <| Cli.Plan help move test pure
+
+move :: Cli.Arguments -> IO ()
+move args =
+ Environment.getEnv "CODEROOT" +> \root ->
+ case Cli.getAllArgs args (Cli.argument "file") of
+ [] ->
+ changedFiles
+ +> traverse Directory.makeAbsolute
+ /> map (Namespace.fromPath root)
+ /> catMaybes
+ /> Namespace.groupByExt
+ +> run mode
+ +> exit
+ files ->
+ files
+ |> filter (not <. Namespace.isCab)
+ |> traverse Directory.makeAbsolute
+ +> filterM Directory.doesFileExist
+ /> map (Namespace.fromPath root)
+ /> catMaybes
+ /> Namespace.groupByExt
+ +> run mode
+ +> exit
+ where
+ mode =
+ args
+ `Cli.has` Cli.longOption "fix"
+ ?: (Fix, Check)
+
+test :: Test.Tree
+test =
+ Test.group
+ "Omni.Lint"
+ [ Test.unit "haskell files return two Results" <| do
+ results <- run Check <| Map.singleton Hs <| [Namespace ["Omni", "Lint"] Hs]
+ length results @=? 2
+ ]
+
+help :: Cli.Docopt
+help =
+ [Cli.docopt|
+all your lint are belong to us
+
+Usage:
+ lint test
+ lint [options] [<file>...]
+
+Options:
+ -f, --fix Apply fixes automatically
+ -h, --help Print this info
+|]
+
+exit :: [Result] -> IO ()
+exit results = Exit.exitWith <| (n > 0) ?: (Exit.ExitFailure n, Exit.ExitSuccess)
+ where
+ n = length <| filter bad results
+ bad = \case
+ (Warn _) -> False
+ Done _ (Bad _) -> True
+ _ -> False
+
+printResult :: Result -> IO Result
+printResult r = case r of
+ Warn err ->
+ Log.warn ["lint", err]
+ >> Log.br
+ >> pure r
+ Done (Linter {..}) (Bad err) ->
+ Log.fail ["lint", exe]
+ >> Log.br
+ >> (err /= "")
+ ?| (putText <| Text.pack err)
+ >> pure r
+ Done (Linter {..}) Good ->
+ Log.good ["lint", exe]
+ >> Log.br
+ >> pure r
+ NoOp ext ->
+ Log.info ["lint", "noop", show ext]
+ >> pure r
+
+changedFiles :: IO [FilePath]
+changedFiles =
+ git ["merge-base", "HEAD", "origin/live"]
+ /> filter (/= '\n')
+ +> (\mb -> git ["diff", "--name-only", "--diff-filter=d", mb])
+ /> String.lines
+ where
+ git args = Process.readProcess "git" args ""
+
+data Mode
+ = -- | Just check the files and return an exit code.
+ Check
+ | -- | Fix the files in place, return 0 if successful, otherwise return 1.
+ Fix
+
+data Linter = Linter
+ { exe :: Text,
+ -- | Arguments added when in the "check" mode
+ checkArgs :: [Text],
+ -- | Arguments added when in the "fix" mode
+ fixArgs :: Maybe [Text],
+ -- | An optional function to format the output of the linter as you want
+ -- it, perhaps decoding json or something
+ decoder :: Maybe (String -> String)
+ }
+
+ormolu :: Linter
+ormolu =
+ Linter
+ { exe = "ormolu",
+ checkArgs = ["--mode", "check", "--no-cabal"],
+ fixArgs = Just ["--mode", "inplace", "--no-cabal"],
+ decoder = Nothing
+ }
+
+hlint :: Linter
+hlint =
+ Linter
+ { exe = "hlint",
+ checkArgs = [],
+ -- needs apply-refact >0.9.1.0, which needs ghc >9
+ -- fixArgs = Just ["--refactor", "--refactor-options=-i"]
+ fixArgs = Nothing,
+ decoder = Nothing
+ }
+
+ruffFormat :: Linter
+ruffFormat =
+ Linter
+ { exe = "ruff",
+ checkArgs = ["format", "--check", "--silent"],
+ fixArgs = Just ["format", "--silent"],
+ decoder = Nothing
+ }
+
+ruff :: Linter
+ruff =
+ Linter
+ { exe = "ruff",
+ checkArgs = ["check"],
+ fixArgs = Just ["check", "--fix"],
+ decoder = Nothing
+ }
+
+data DeadnixOutput = DeadnixOutput
+ { file :: FilePath,
+ results :: [DeadnixResult]
+ }
+ deriving (Generic, Aeson.FromJSON)
+
+data DeadnixResult = DeadnixResult
+ { column :: Int,
+ endColumn :: Int,
+ line :: Int,
+ message :: String
+ }
+ deriving (Generic, Aeson.FromJSON)
+
+deadnix :: Linter
+deadnix =
+ Linter
+ { exe = "deadnix",
+ checkArgs = "--fail" : commonArgs,
+ fixArgs = Just <| "--edit" : commonArgs,
+ decoder = Just decodeDeadnixOutput
+ }
+ where
+ commonArgs =
+ [ -- "--no-underscore",
+ -- "--no-lambda-pattern-names",
+ "--output-format",
+ "json"
+ ]
+
+decodeDeadnixOutput :: String -> String
+decodeDeadnixOutput deadnixJson =
+ deadnixJson |> Char8.pack |> Aeson.decodeStrict |> \case
+ Nothing -> panic "could not decode deadnix output"
+ Just o -> formatDeadnixOutput o
+ where
+ formatDeadnixOutput DeadnixOutput {..} =
+ joinWith "\n" <| map formatDeadnixResult results
+ where
+ formatDeadnixResult DeadnixResult {..} =
+ file <> ":" <> show line <> ":" <> show column <> ": " <> message
+
+nixfmt :: Linter
+nixfmt =
+ Linter
+ { exe = "nixfmt",
+ checkArgs = ["--check"],
+ fixArgs = Nothing,
+ decoder = Nothing
+ }
+
+shellcheck :: Linter
+shellcheck =
+ Linter
+ { exe = "shellcheck",
+ checkArgs = [],
+ fixArgs = Nothing,
+ decoder = Nothing
+ }
+
+indent :: Linter
+indent =
+ Linter
+ { exe = "indent",
+ checkArgs = [],
+ fixArgs = Nothing,
+ decoder = Nothing
+ }
+
+data Status = Good | Bad String
+ deriving (Show)
+
+data Result
+ = Done Linter Status
+ | Warn Text
+ | NoOp Namespace.Ext
+
+run :: Mode -> Map Namespace.Ext [Namespace] -> IO [Result]
+run mode nsmap = nsmap |> Map.assocs |> traverse (runOne mode) /> concat
+
+runOne :: Mode -> (Ext, [Namespace]) -> IO [Result]
+runOne mode (ext, ns's) = results +> traverse printResult
+ where
+ results :: IO [Result]
+ results =
+ -- i would run these with mapConcurrently, but the output gets mangled. to
+ -- do it right i need a queue for the results. someday.
+ sequence <| case ext of
+ Namespace.Hs ->
+ [ lint mode ormolu ns's,
+ lint mode hlint ns's
+ ]
+ Namespace.Py ->
+ [ lint mode ruffFormat ns's,
+ lint mode ruff ns's
+ ]
+ Namespace.Sh -> [lint mode shellcheck ns's]
+ Namespace.Nix -> [lint mode deadnix ns's, lint mode nixfmt ns's]
+ Namespace.C -> [lint mode indent ns's]
+ _ ->
+ ns's
+ |> map Namespace.toPath
+ |> joinWith ", "
+ |> str
+ |> ("no linter for " <>)
+ |> Warn
+ |> (pure :: Result -> IO Result)
+ |> (pure :: IO Result -> [IO Result])
+
+lint :: Mode -> Linter -> [Namespace] -> IO Result
+lint mode linter@Linter {..} ns's =
+ Log.info ["lint", exe, (tshow <| length ns's) <> " files"]
+ >> Process.readProcessWithExitCode (str exe) args ""
+ /> \case
+ (Exit.ExitSuccess, _, _) ->
+ Done linter Good
+ (Exit.ExitFailure _, msg, _) -> case decoder of
+ Nothing -> Done linter <| Bad msg
+ Just fmt -> Done linter <| Bad <| fmt msg
+ where
+ args = case (mode, fixArgs) of
+ (Fix, Just args_) ->
+ map str args_ ++ map Namespace.toPath ns's
+ (Fix, Nothing) ->
+ map Namespace.toPath ns's
+ (Check, _) ->
+ map str checkArgs ++ map Namespace.toPath ns's
diff --git a/Omni/Llamacpp.py b/Omni/Llamacpp.py
new file mode 100644
index 0000000..66b57d8
--- /dev/null
+++ b/Omni/Llamacpp.py
@@ -0,0 +1,43 @@
+"""
+Test that llama.cpp can build and exec in the omni repo.
+
+Note that this does not test if llama-cpp can actually execute any models. I
+(currently) use ollama for running and managing models, but I'd like to make
+sure llama-cpp still works in case I need/want to switch at some point.
+"""
+
+# : out llamacpp-test
+# : run llama-cpp
+
+import os
+import sys
+import unittest
+
+
+class TestLlamaCpp(unittest.TestCase):
+ """Test that llama.cpp is available."""
+
+ def test_in_path(self) -> None:
+ """Test that llama.cpp is in $PATH."""
+ self.assertIn("llama-cpp", os.environ.get("PATH", ""))
+
+
+def test() -> None:
+ """Run this module's test suite."""
+ suite = unittest.TestSuite()
+ suite.addTests(
+ unittest.defaultTestLoader.loadTestsFromTestCase(TestLlamaCpp),
+ )
+ unittest.TextTestRunner().run(suite)
+
+
+def main() -> None:
+ """Entrypoint."""
+ if sys.argv[1] == "test":
+ test()
+ else:
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/Omni/Log.hs b/Omni/Log.hs
new file mode 100644
index 0000000..91fcb55
--- /dev/null
+++ b/Omni/Log.hs
@@ -0,0 +1,133 @@
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Log reporting interface
+--
+-- Some guidelines:
+--
+-- * don't allow `mark` in final code
+--
+-- * don't use `br` after `info`, unless verbose mode is requested (--loud flag in bild)
+--
+-- * always use `br` after `good`, `fail`, and `pass`
+--
+-- * often use `br` after `warn`, unless its really unimportant
+--
+-- * labels should be roughly hierarchical from general->specific
+module Omni.Log
+ ( Lvl (..),
+ good,
+ pass,
+ info,
+ warn,
+ fail,
+ wipe,
+
+ -- * Debugging
+ mark,
+
+ -- * Operators
+ (~&),
+ (~?),
+
+ -- * Wai Middleware
+ wai,
+
+ -- * Low-level
+ msg,
+ fmt,
+ br,
+ )
+where
+
+import Alpha hiding (pass)
+import qualified Data.Text as Text
+import qualified Network.Wai as Wai
+import Rainbow (chunk, fore, green, magenta, red, white, yellow)
+import qualified Rainbow
+import qualified System.Environment as Env
+import qualified System.IO as IO
+import System.IO.Unsafe (unsafePerformIO)
+
+data Lvl = Good | Pass | Info | Warn | Fail | Mark
+
+-- | Get the environment. This should probably return 'Omni.App.Area' instead of
+-- 'String', but I don't want to depend on everything in 'Omni.App', so some kind
+-- of refactor is needed.
+area :: IO String
+area =
+ Env.lookupEnv "AREA"
+ /> maybe "Test" identity
+
+msg :: Lvl -> [Text] -> IO ()
+msg lvl labels =
+ area +> \case
+ "Live" -> putDumb
+ _ ->
+ Env.getEnv "TERM" +> \case
+ "dumb" -> putDumb
+ _ -> Rainbow.hPutChunks IO.stderr [fore color <| clear <> chunk txt <> "\r"]
+ where
+ -- For systemd-journal, emacs *compilation* buffers, etc.
+ putDumb = putStr <| txt <> "\n"
+ txt = fmt (label : labels)
+ (color, label) = case lvl of
+ Good -> (green, "good")
+ Pass -> (green, "pass")
+ Info -> (white, "info")
+ Warn -> (yellow, "warn")
+ Fail -> (red, "fail")
+ Mark -> (magenta, "mark")
+ clear = "\ESC[2K"
+
+-- | Helper function for formatting outputs of labels.
+fmt :: [Text] -> Text
+fmt = Text.intercalate gap
+
+gap :: Text
+gap = ": "
+
+br :: IO ()
+br = Rainbow.hPutChunks stderr ["\n"] >> IO.hFlush stderr
+
+wipe :: IO ()
+wipe = hPutStr stderr ("\r" :: Text) >> IO.hFlush stderr
+
+good, pass, info, warn, fail :: [Text] -> IO ()
+good = msg Good
+pass = msg Pass
+info = msg Info
+warn = msg Warn
+fail = msg Fail
+
+-- | Like 'Debug.trace' but follows the patterns in this module
+mark :: (Show a) => Text -> a -> a
+mark label val =
+ unsafePerformIO <| do
+ msg Mark [label, tshow val]
+ br
+ pure val
+
+-- | Pipelined version of 'mark'.
+--
+-- @
+-- mark label val = val ~& label
+-- @
+(~&) :: (Show a) => a -> Text -> a
+val ~& label = mark label val
+
+-- | Conditional mark.
+(~?) :: (Show a) => a -> (a -> Bool) -> Text -> a
+(~?) val test label = if test val then mark label val else val
+
+wai :: Wai.Middleware
+wai app req sendResponse =
+ app req <| \res ->
+ info
+ [ str <| Wai.requestMethod req,
+ show <| Wai.remoteHost req,
+ str <| Wai.rawPathInfo req
+ ]
+ >> br
+ >> sendResponse res
diff --git a/Omni/Log.py b/Omni/Log.py
new file mode 100644
index 0000000..2fbd007
--- /dev/null
+++ b/Omni/Log.py
@@ -0,0 +1,35 @@
+"""Setup logging like Omni/Log.hs."""
+
+# noqa: builtin-attribute-shadowing
+import logging
+import typing
+
+
+class LowerFormatter(logging.Formatter):
+ """A logging formatter that formats logs how I like."""
+
+ def format(self: "LowerFormatter", record: typing.Any) -> typing.Any:
+ """Use the format I like for logging."""
+ record.levelname = record.levelname.lower()
+ return super(logging.Formatter, self).format(record) # type: ignore[misc]
+
+
+def setup() -> None:
+ """Run this in your __main__ function."""
+ logging.basicConfig(
+ level=logging.DEBUG,
+ format="%(levelname)s: %(name)s: %(message)s",
+ )
+ logging.addLevelName(logging.DEBUG, "dbug")
+ logging.addLevelName(logging.ERROR, "fail")
+ logging.addLevelName(logging.INFO, "info")
+ logger = logging.getLogger(__name__)
+ formatter = LowerFormatter()
+ handler = logging.StreamHandler()
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+
+
+if __name__ == "__main__":
+ setup()
+ logging.debug("i am doing testing")
diff --git a/Omni/Look.hs b/Omni/Look.hs
new file mode 100644
index 0000000..d75d056
--- /dev/null
+++ b/Omni/Look.hs
@@ -0,0 +1,187 @@
+{-# LANGUAGE OverloadedLists #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+-- | Library of styles
+--
+-- https://leerob.io/blog/how-stripe-designs-beautiful-websites
+module Omni.Look
+ ( -- | Base stylesheets
+ fuckingStyle,
+ -- | Clay.Media extensions
+ prefersLight,
+ whenLight,
+ prefersDark,
+ whenDark,
+ noColorPreference,
+ -- | Font
+ fontStack,
+ fontSmoothing,
+ -- | Clay.Text extensions
+ textDecorationThickness,
+ textDecorationWidth,
+ -- | Elements
+ hoverButton,
+ -- | Geometry
+ marginAll,
+ marginX,
+ marginY,
+ paddingAll,
+ paddingX,
+ paddingY,
+ -- | Border
+ borderRadiusAll,
+ -- | Grid
+ gridArea,
+ gridTemplateAreas,
+ gridTemplateRows,
+ columnGap,
+ rowGap,
+ -- | Alignment
+ justifySelf,
+ )
+where
+
+import Alpha
+import Clay
+import qualified Clay.Flexbox as Flexbox
+import qualified Clay.Property as Property
+import qualified Clay.Stylesheet as Stylesheet
+
+fontStack :: Css
+fontStack = do
+ -- i like adobe source pro, maybe use that instead of camphor
+ fontFamily ["Camphor", "Open Sans", "Segoe UI"] [sansSerif]
+ textRendering optimizeLegibility
+
+fontSmoothing :: Css
+fontSmoothing = do
+ Stylesheet.key "-webkit-font-smoothing" ("antialiased" :: Text)
+ Stylesheet.key "-moz-osx-font-smoothing" ("grayscale" :: Text)
+
+hoverButton :: Css
+hoverButton =
+ button # hover ? do
+ color "#7795f8"
+ transform <| translateY <| px (-1)
+ boxShadow
+ [ bsColor (rgba 50 50 93 0.1)
+ <| shadow
+ (px 7)
+ (px 14),
+ bsColor
+ (rgba 0 0 0 0.08)
+ <| shadow
+ (px 3)
+ (px 6)
+ ]
+
+prefersDark :: Stylesheet.Feature
+prefersDark =
+ Stylesheet.Feature "prefers-color-scheme" (Just (Clay.value ("dark" :: Text)))
+
+prefersLight :: Stylesheet.Feature
+prefersLight =
+ Stylesheet.Feature "prefers-color-scheme" (Just (Clay.value ("light" :: Text)))
+
+noColorPreference :: Stylesheet.Feature
+noColorPreference =
+ Stylesheet.Feature
+ "prefers-color-scheme"
+ (Just (Clay.value ("no-preference" :: Text)))
+
+whenDark :: Css -> Css
+whenDark = query Clay.all [prefersDark]
+
+whenLight :: Css -> Css
+whenLight = query Clay.all [prefersLight]
+
+-- | The stylesheet from <https://perfectmotherfuckingwebsite.com> ported to
+-- Clay, to be used as the base for other styles.
+--
+-- Differences from the original:
+-- - expects use of header/main/footer
+-- - has a sticky footer
+-- - wider, with a bit of body padding
+fuckingStyle :: Css
+fuckingStyle = do
+ "body" ? do
+ display flex
+ minHeight (vh 100)
+ flexDirection column
+ color "#444"
+ margin (px 0) 0 0 0
+ padding (em 0.5) (em 0.5) (em 0.5) (em 0.5)
+ fontSize (px 18)
+ lineHeight (em 1.5)
+ fontFamily
+ [ "Segoe UI",
+ "Roboto",
+ "Helvetica Neue",
+ "Arial",
+ "Noto Sans",
+ "Apple Color Emoji",
+ "Segoe UI Emoji",
+ "Segoe UI Symbol",
+ "Noto Color Emoji"
+ ]
+ [sansSerif]
+ "main" ? Flexbox.flex 1 0 auto
+ "main"
+ <> "header"
+ <> "footer" ? do
+ maxWidth (px 900)
+ width (pct 100)
+ margin (em 1) auto 1 auto
+ padding (em 0) 0 0 0
+ "h1" <> "h2" <> "h3" ? lineHeight (em 1.2)
+ query Clay.all [prefersDark] <| do
+ "body" ? do
+ color white
+ background ("#444" :: Color)
+ "a:link" ? color ("#5bf" :: Color)
+ "a:visited" ? color ("#ccf" :: Color)
+
+textDecorationThickness :: Size LengthUnit -> Css
+textDecorationThickness = Stylesheet.key "text-decoration-thickness"
+
+textDecorationWidth :: Size LengthUnit -> Css
+textDecorationWidth = Stylesheet.key "text-decoration-width"
+
+marginAll :: Size a -> Css
+marginAll x = margin x x x x
+
+marginX :: Size a -> Css
+marginX n = marginLeft n <> marginRight n
+
+marginY :: Size a -> Css
+marginY n = marginTop n <> marginBottom n
+
+paddingAll :: Size a -> Css
+paddingAll x = Clay.padding x x x x
+
+paddingX :: Size a -> Css
+paddingX n = paddingLeft n <> paddingRight n
+
+paddingY :: Size a -> Css
+paddingY n = paddingTop n <> paddingBottom n
+
+borderRadiusAll :: Size a -> Css
+borderRadiusAll x = Clay.borderRadius x x x x
+
+gridArea :: Text -> Css
+gridArea = Stylesheet.key "grid-area"
+
+gridTemplateAreas :: [Property.Literal] -> Css
+gridTemplateAreas = Stylesheet.key "grid-template-areas" <. noCommas
+
+gridTemplateRows :: [Property.Literal] -> Css
+gridTemplateRows = Stylesheet.key "grid-template-columns" <. noCommas
+
+columnGap :: Size a -> Css
+columnGap = Stylesheet.key "column-gap"
+
+rowGap :: Size a -> Css
+rowGap = Stylesheet.key "row-gap"
+
+justifySelf :: JustifyContentValue -> Css
+justifySelf = Stylesheet.key "justify-self"
diff --git a/Omni/Namespace.hs b/Omni/Namespace.hs
new file mode 100644
index 0000000..ef8cefd
--- /dev/null
+++ b/Omni/Namespace.hs
@@ -0,0 +1,170 @@
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+module Omni.Namespace
+ ( Namespace (..),
+ Ext (..),
+ fromPath,
+ toPath,
+ toModule,
+ fromHaskellContent,
+ fromHaskellModule,
+ toHaskellModule,
+ toSchemeModule,
+ fromPythonModule,
+ isCab,
+ groupByExt,
+ )
+where
+
+import Alpha
+import qualified Data.Aeson as Aeson
+import qualified Data.Aeson.Types as Aeson
+import qualified Data.Char as Char
+import qualified Data.List as List
+import qualified Data.List.Split as List
+import qualified Data.Map as Map
+import qualified Data.Text as Text
+import qualified Text.Regex.Applicative as Regex
+
+data Ext
+ = C
+ | Css
+ | Hs
+ | Html
+ | Json
+ | Keys
+ | Lisp
+ | Md
+ | Nix
+ | None
+ | Py
+ | Rs
+ | Scm
+ | Sh
+ | Toml
+ deriving (Eq, Show, Generic, Aeson.ToJSON, Ord)
+
+data Namespace = Namespace {path :: [String], ext :: Ext}
+ deriving (Eq, Show, Generic, Aeson.ToJSON, Ord)
+
+instance Aeson.ToJSONKey Namespace where
+ toJSONKey = Aeson.toJSONKeyText (Text.pack <. toPath)
+
+fromPath :: String -> String -> Maybe Namespace
+fromPath coderoot absPath =
+ List.stripPrefix coderoot absPath
+ +> List.stripPrefix "/"
+ +> Regex.match (Namespace </ rePath <* dot <*> reExt)
+
+toPath :: Namespace -> FilePath
+toPath (Namespace parts ext) =
+ joinWith "/" parts <> toExt ext
+
+toExt :: Ext -> String
+toExt =
+ ("." <>) <. \case
+ C -> "c"
+ Css -> "css"
+ Hs -> "hs"
+ Html -> "html"
+ Json -> "json"
+ Keys -> "pub"
+ Lisp -> "lisp"
+ Md -> "md"
+ Nix -> "nix"
+ None -> "none"
+ Py -> "py"
+ Rs -> "rs"
+ Scm -> "scm"
+ Sh -> "sh"
+ Toml -> "toml"
+
+fromHaskellContent :: String -> Maybe Namespace
+fromHaskellContent c = case Regex.findFirstInfix haskellModule c of
+ Nothing -> Nothing
+ Just (_, Namespace {..}, _) -> Just <| Namespace (filter (/= ".") path) ext
+ where
+ haskellModule =
+ Namespace
+ </ (Regex.string "\nmodule " *> Regex.many (name <|> dot))
+ <*> pure Hs
+
+toModule :: Namespace -> String
+toModule (Namespace parts Hs) = joinWith "." parts
+toModule (Namespace parts Py) = joinWith "." parts
+toModule (Namespace parts Scm) = "(" ++ joinWith " " parts ++ ")"
+toModule (Namespace parts Rs) = joinWith "::" parts
+toModule (Namespace parts C) = joinWith "/" parts <> ".c"
+toModule (Namespace parts Nix) = joinWith "/" parts <> ".nix"
+toModule (Namespace {..}) = panic <| "toModule not implemented for " <> show ext
+
+toHaskellModule :: Namespace -> String
+toHaskellModule = toModule
+
+fromHaskellModule :: String -> Namespace
+fromHaskellModule s = Namespace (List.splitOn "." s) Hs
+
+toSchemeModule :: Namespace -> String
+toSchemeModule = toModule
+
+fromPythonModule :: String -> Namespace
+fromPythonModule s = Namespace (List.splitOn "." s) Py
+
+dot :: Regex.RE Char String
+dot = Regex.some <| Regex.sym '.'
+
+name :: Regex.RE Char String
+name =
+ Regex.many (Regex.psym Char.isUpper)
+ <> ( Regex.many
+ <| Regex.psym
+ <| \c -> Char.isAlphaNum c || c == '_' || c == '-'
+ )
+
+rePath :: Regex.RE Char [String]
+rePath = Regex.many (name <* Regex.string "/" <|> name)
+
+reExt :: Regex.RE Char Ext
+reExt =
+ C
+ <$ Regex.string "c"
+ <|> Css
+ <$ Regex.string "css"
+ <|> Hs
+ <$ Regex.string "hs"
+ <|> Html
+ <$ Regex.string "html"
+ <|> Json
+ <$ Regex.string "json"
+ <|> Keys
+ <$ Regex.string "pub"
+ <|> Lisp
+ <$ Regex.string "lisp"
+ <|> Md
+ <$ Regex.string "md"
+ <|> Nix
+ <$ Regex.string "nix"
+ <|> Py
+ <$ Regex.string "py"
+ <|> Rs
+ <$ Regex.string "rs"
+ <|> Scm
+ <$ Regex.string "scm"
+ <|> Sh
+ <$ Regex.string "sh"
+ <|> Toml
+ <$ Regex.string "toml"
+
+-- | The cab dir is for temporary files and build outputs, not for source
+-- inputs.
+isCab :: FilePath -> Bool
+isCab ('_' : _) = True
+isCab fp = "/_/" `List.isInfixOf` fp
+
+-- | Group a list of Namespaces keyed by their extensions
+groupByExt :: [Namespace] -> Map Ext [Namespace]
+groupByExt ns's = Map.fromListWith (++) [(ext ns, [ns]) | ns <- ns's]
diff --git a/Omni/OsBase.nix b/Omni/OsBase.nix
new file mode 100644
index 0000000..3e69d8e
--- /dev/null
+++ b/Omni/OsBase.nix
@@ -0,0 +1,42 @@
+{ pkgs, config, ... }:
+let ports = import ./Cloud/Ports.nix;
+in {
+ boot.tmp.cleanOnBoot = true;
+ fonts.fonts = with pkgs; [
+ google-fonts
+ mononoki
+ source-code-pro
+ fantasque-sans-mono
+ hack-font
+ fira
+ fira-code
+ fira-code-symbols
+ ];
+ networking.firewall.allowPing = true;
+ nix.settings.substituters =
+ [ "https://cache.nixos.org" ]; # "ssh://dev.simatime.com" ];
+ nix.gc.automatic = true;
+ nix.gc.dates = "Sunday 02:15";
+ nix.optimise.automatic = true;
+ nix.optimise.dates = [ "Sunday 02:30" ];
+ nix.settings.extra-sandbox-paths = [ config.programs.ccache.cacheDir ];
+ nix.settings.trusted-users = [ "ben" ];
+ programs.ccache.enable = true;
+ programs.mosh.enable = true;
+ programs.mosh.withUtempter = true;
+ security.acme.defaults.email = "ben@bsima.me";
+ security.acme.acceptTerms = true;
+ security.sudo.wheelNeedsPassword = false;
+ services.clamav.daemon.enable = true; # security
+ services.clamav.updater.enable = true; # security
+ services.fail2ban.enable = true; # security
+ services.fail2ban.ignoreIP = [ ports.bensIp ]; # my home IP
+ services.fail2ban.maxretry = 10;
+ services.openssh.enable = true;
+ services.openssh.openFirewall = true;
+ services.openssh.settings.X11Forwarding = true;
+ services.openssh.settings.PasswordAuthentication = false;
+ services.openssh.settings.PermitRootLogin = "prohibit-password";
+ system.autoUpgrade.enable = false; # 'true' breaks our nixpkgs pin
+ zramSwap.enable = true;
+}
diff --git a/Omni/Packages.nix b/Omni/Packages.nix
new file mode 100644
index 0000000..d04dfc3
--- /dev/null
+++ b/Omni/Packages.nix
@@ -0,0 +1,22 @@
+{ pkgs, ... }:
+
+# Base set of packages to be expected everywhere.
+
+with pkgs;
+
+{
+ environment.systemPackages = [
+ file
+ fd
+ gitAndTools.gitMinimal
+ htop
+ openssl
+ ranger
+ inetutils
+ traceroute
+ vim
+ vnstat
+ wireguard-tools
+ wget
+ ];
+}
diff --git a/Omni/Repl.py b/Omni/Repl.py
new file mode 100644
index 0000000..1cf2f65
--- /dev/null
+++ b/Omni/Repl.py
@@ -0,0 +1,265 @@
+"""
+Improve the standard Python REPL.
+
+This module attempts to emulate the workflow of ghci or lisp repls. It uses
+importlib to load a namespace from the provided path, typechecks it with mypy,
+and provides some tools for improving repl-driven development.
+
+This module is called in Omni/Ide/repl.sh like so:
+
+ python -i Omni/Repl.py NS PATH
+
+where NS is the dot-partitioned namespace of the main module, and PATH is the
+path to the same file. In the future this could be expanded to be a list of
+additional files to load.
+"""
+
+# : out python-improved-repl
+# : dep mypy
+import importlib
+import importlib.util
+import inspect
+import logging
+import mypy.api
+import Omni.Log as Log
+import os
+import pathlib
+import pydoc
+import string
+import subprocess
+import sys
+import textwrap
+import types
+import typing
+import unittest
+
+
+class ReplError(Exception):
+ """Type for errors at the repl."""
+
+
+def use(ns: str, path: str) -> None:
+ """
+ Load or reload the module named 'ns' from 'path'.
+
+ Like `use` in the Guile Scheme repl.
+
+ Raises:
+ ReplError: if module cannot be loaded
+ """
+ logging.info("loading %s from %s", ns, path)
+ spec = importlib.util.spec_from_file_location(ns, path)
+ if spec is None or spec.loader is None:
+ msg = f"spec could not be loaded for {ns} at {path}"
+ raise ReplError(msg)
+ module = importlib.util.module_from_spec(spec)
+ # delete module and its imported names if its already loaded
+ if ns in sys.modules:
+ del sys.modules[ns]
+ for name in module.__dict__:
+ if name in globals():
+ del globals()[name]
+ sys.modules[ns] = module
+ spec.loader.exec_module(module)
+ names = list(module.__dict__)
+ globals().update({k: getattr(module, k) for k in names})
+
+
+def typecheck(path: str) -> None:
+ """Typecheck this namespace."""
+ # this envvar is undocumented, but it works
+ # https://github.com/python/mypy/issues/13815
+ os.environ["MYPY_FORCE_COLOR"] = "1"
+ logging.info("typechecking %s", path)
+ stdout, stderr, _ = mypy.api.run([path])
+ sys.stdout.write(stdout)
+ sys.stdout.flush()
+ sys.stderr.write(stderr)
+ sys.stderr.flush()
+
+
+def edit_file(ns: str, path: str, editor: str) -> None:
+ """
+ Edit and reload the given namespace and path.
+
+ It is assumed ns and path go together. If `editor` returns something other
+ than 0, this function will not reload the ns.
+ """
+ try:
+ proc = subprocess.run([editor, path], check=False)
+ except FileNotFoundError:
+ logging.exception("editor '%s' not found", editor)
+ if proc.returncode == 0:
+ use(ns, path)
+ typecheck(path)
+
+
+class CustomRepl:
+ """Custom repl commands, heavily inspired by ghci."""
+
+ def __init__(self, ns: str, path: str, editor: str) -> None:
+ """Create the custom repl for given ns and path."""
+ self.ns = ns
+ self.path = path
+ self.editor = editor
+ self.default = sys.__excepthook__
+ self.isframe = inspect.isframe
+ self.stack = inspect.stack
+ self.stdout = sys.stdout
+ self.whitespace = string.whitespace
+
+ def setup(self) -> None:
+ """
+ Load the NS, setup hooks and prompt.
+
+ This basically does all the heavy lifting of customizing the Python
+ repl.
+ """
+ # load given namespace
+ use(self.ns, self.path)
+ typecheck(self.path)
+ # setup hooks and prompt
+ sys.excepthook = self.excepthook
+ pydoc.pager = lambda text: pydoc.pipepager(text, "more")
+ sys.ps1 = f"{self.ns}> "
+ sys.ps2 = f"{self.ns}| "
+
+ def help(self) -> str:
+ """Return help text."""
+ return textwrap.dedent(f"""
+ repl commands:
+ :e open {self.ns} in {self.editor}
+ :r reload {self.ns}
+ :t obj show the type of obj
+ obj? expands to 'help(obj)'
+ :? show this help
+ """)
+
+ def show_help(self) -> None:
+ """Print info about how to use this repl."""
+ sys.stdout.write(self.help())
+ sys.stdout.flush()
+
+ def excepthook(
+ self,
+ type_: type[BaseException],
+ value: BaseException,
+ traceback: types.TracebackType | None,
+ ) -> typing.Any:
+ """
+ Pre-process Python repl exceptions.
+
+ This is called on `sys.excepthook`, which runs when the repl doesn't
+ know how to handle some input. So, we inspect `value` and provide
+ alternate functionality, bottoming out at the default exception.
+
+ Raises:
+ ReplError: if caught exception is empty
+ """
+ # ruff: noqa: PLR0911
+ if not isinstance(value, SyntaxError):
+ return self.default(type_, value, traceback)
+ if value.text is None:
+ msg = f"value.text is None: {value}"
+ raise ReplError(msg)
+ stmt = value.text.rstrip()
+ if stmt == ":?":
+ self.show_help()
+ return None
+ if stmt.endswith("?"):
+ name = stmt.rstrip("?(" + self.whitespace)
+ self.get_help(name)
+ return None
+ if stmt == ":e":
+ self.edit()
+ return None
+ if stmt == ":r":
+ self.reload()
+ return None
+ if stmt.startswith(":t"):
+ var = stmt.split()[1]
+ self.get_type(var)
+ return None
+ return self.default(type_, value, traceback)
+
+ def get_type(self, name: str) -> typing.Any | None:
+ """Return the type of `name` to the caller."""
+ for record in self.stack():
+ frame = record[0]
+ if not self.isframe(frame):
+ continue
+ cmd = f"typing.reveal_type({name})"
+ return eval(cmd, frame.f_globals, frame.f_locals) # noqa: S307
+ return None
+
+ def get_help(self, name: str) -> typing.Any | None:
+ """Return the documentation for `name` to the caller."""
+ for record in self.stack():
+ frame = record[0]
+ if not self.isframe(frame):
+ continue
+ cmd = f"help({name})"
+ return eval(cmd, frame.f_globals, frame.f_locals) # noqa: S307
+ return None
+
+ def reload(self) -> None:
+ """Reload the current namespace."""
+ use(self.ns, self.path)
+ typecheck(self.path)
+
+ def edit(self) -> None:
+ """Edit the current namespace."""
+ edit_file(self.ns, self.path, self.editor)
+
+
+class TestCustomRepl(unittest.TestCase):
+ """Test the CustomRepl functionality."""
+
+ def setUp(self) -> None:
+ """Create a CustomRepl for testing."""
+ ns = __name__
+ path = pathlib.Path(__name__.replace(".", "/"))
+ path = path.with_suffix(".py")
+ self.repl = CustomRepl(ns, str(path), "true")
+ self.repl.setup()
+
+ def tearDown(self) -> None:
+ """Undo `self.setUp`."""
+ sys.excepthook = self.repl.default
+ del self.repl
+
+ def test_help(self) -> None:
+ """Help message should include the ns and path."""
+ self.assertIn(self.repl.ns, self.repl.help())
+
+
+def test() -> None:
+ """Run this module's test suite."""
+ suite = unittest.TestSuite()
+ suite.addTests(
+ unittest.defaultTestLoader.loadTestsFromTestCase(TestCustomRepl),
+ )
+ unittest.TextTestRunner().run(suite)
+
+
+def move() -> None:
+ """Actual entrypoint."""
+ Log.setup()
+ ns = sys.argv[1]
+ path = sys.argv[2]
+ editor = os.environ.get("EDITOR", "$EDITOR")
+ repl = CustomRepl(ns, path, editor)
+ repl.setup()
+ repl.show_help()
+
+
+def main() -> None:
+ """Entrypoint, should be replaced by a `Omni.Cli.main`."""
+ if sys.argv[1] == "test":
+ test()
+ else:
+ move()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/Omni/Sentry.sh b/Omni/Sentry.sh
new file mode 100755
index 0000000..5c9e0ac
--- /dev/null
+++ b/Omni/Sentry.sh
@@ -0,0 +1,40 @@
+#!/usr/bin/env bash
+#
+# monitors our infrastructure
+#
+#
+# color codes for use with printf
+ export RED='\033[0;31m'
+ export GRN='\033[0;32m'
+ export YEL='\033[0;33m'
+ export NC='\033[0m' # No Color
+#
+ while true
+ do
+ clear
+ printf "%s sentry\n\n" "$(date +%Y.%m.%d..%H.%M)"
+ urls=(
+ http://que.run
+ https://dragons.dev
+ https://simatime.com
+ https://tv.simatime.com
+ https://bsima.me
+ # https://herocomics.app
+ )
+ for url in "${urls[@]}"
+ do
+ code=$(curl -L --max-time 10 --silent --show-error --insecure \
+ --output /dev/null \
+ --write-out "%{http_code}" "$url")
+ case "$code" in
+ 2[0-9][0-9]) color=${GRN};;
+ 3[0-9][0-9]) color=${YEL};;
+ 4[0-9][0-9]) color=${YEL};;
+ 5[0-9][0-9]) color=${RED};;
+ *) color=${RED};;
+ esac
+ printf "%b%s %s%b\n" "$color" "$code" "$url" "$NC"
+ done
+ sleep 120
+ done
+##
diff --git a/Omni/Test.hs b/Omni/Test.hs
new file mode 100644
index 0000000..0cc00ac
--- /dev/null
+++ b/Omni/Test.hs
@@ -0,0 +1,110 @@
+{-# LANGUAGE NoImplicitPrelude #-}
+
+module Omni.Test
+ ( Tree,
+ Description,
+ Assertion,
+ run,
+ group,
+ none,
+ unit,
+ prop,
+ with,
+ assertFailure,
+ (@=?),
+ (@?=),
+ (@?!=),
+ )
+where
+
+import Alpha hiding (group)
+import qualified Data.Text as Text
+import qualified Test.Tasty as Tasty
+import qualified Test.Tasty.HUnit as HUnit
+import qualified Test.Tasty.QuickCheck as QuickCheck
+import qualified Test.Tasty.Runners as Tasty
+
+type Tree = Tasty.TestTree
+
+type Description = Tasty.TestName
+
+type Assertion = HUnit.Assertion
+
+run :: Tree -> IO ()
+run tree = do
+ Tasty.installSignalHandlers
+ case Tasty.tryIngredients Tasty.defaultIngredients mempty tree of
+ Nothing -> do
+ hPutStrLn stderr <| Text.pack "no ingredients agreed to run"
+ exitFailure
+ Just act -> act +> \ok -> if ok then exitSuccess else exitFailure
+
+group :: Description -> [Tasty.TestTree] -> Tree
+group = Tasty.testGroup
+
+unit :: Description -> Assertion -> Tree
+unit = HUnit.testCase
+
+prop :: (QuickCheck.Testable a) => Description -> a -> Tree
+prop = QuickCheck.testProperty
+
+assertFailure :: String -> Assertion
+assertFailure = HUnit.assertFailure
+
+with ::
+ -- | Startup
+ IO a ->
+ -- | Shutdown
+ (a -> IO ()) ->
+ -- | A test group where the first argument is a function that gets the resource
+ (IO a -> Tree) ->
+ Tree
+with = Tasty.withResource
+
+-- | How is this not part of HUnit??
+assertNotEqual ::
+ (Eq a, Show a, HasCallStack) =>
+ -- | The message prefix
+ String ->
+ -- | The not-expected value
+ a ->
+ -- | The actual value
+ a ->
+ HUnit.Assertion
+assertNotEqual preface notexpected actual =
+ unless (actual /= notexpected) (HUnit.assertFailure msg)
+ where
+ msg =
+ (if null preface then "" else preface ++ "\n")
+ ++ "expected not: "
+ ++ show notexpected
+ ++ "\n but got: "
+ ++ show actual
+
+-- | unexpectedValue @?!= actual
+(@?!=) ::
+ (Eq a, Show a, HasCallStack) =>
+ -- | The not-expected value
+ a ->
+ -- | The actual value
+ a ->
+ HUnit.Assertion
+expected @?!= actual = assertNotEqual "" expected actual
+
+infixl 2 @?!=
+
+-- | expectedVal @=? actualVal
+(@=?) :: (Eq a, Show a) => a -> a -> HUnit.Assertion
+a @=? b = a HUnit.@=? b
+
+infixl 2 @=?
+
+-- | actualVal @?= expectedVal
+(@?=) :: (Eq a, Show a) => a -> a -> HUnit.Assertion
+a @?= b = a HUnit.@?= b
+
+infixr 2 @?=
+
+-- | For usage in 'Omni.Cli.Plan' when you have no tests.
+none :: Tree
+none = group "none" []
diff --git a/Omni/Users.nix b/Omni/Users.nix
new file mode 100644
index 0000000..b214704
--- /dev/null
+++ b/Omni/Users.nix
@@ -0,0 +1,60 @@
+{ config, lib, ... }:
+
+let
+ readKeys = k:
+ lib.trivial.pipe k [
+ builtins.readFile
+ (lib.strings.splitString "\n")
+ (lib.filter (s: s != ""))
+ ];
+in {
+ users.groups = {
+ # group for publishing web data
+ "www-data" = { };
+ };
+ users.motd = ''
+
+ welcome to the simatime network!
+ your host is '${config.networking.hostName}'
+
+ '';
+ users.mutableUsers = false;
+ users.users = {
+ # bots
+ #
+ deploy = {
+ isNormalUser = true;
+ home = "/home/deploy";
+ openssh.authorizedKeys.keys = readKeys ./Keys/Deploy.pub;
+ extraGroups = [ "wheel" ];
+ };
+ #
+ # humans
+ #
+ root.openssh.authorizedKeys.keys = readKeys ./Keys/Ben.pub;
+
+ ben = {
+ description = "Ben Sima";
+ isNormalUser = true;
+ home = "/home/ben";
+ openssh.authorizedKeys.keys = readKeys ./Keys/Ben.pub;
+ extraGroups = [ "wheel" "docker" "bitcoind-mainnet" "git" ];
+ hashedPassword =
+ "$6$SGhdoRB6DhWe$elW8RQE1ebe8JKf1ALW8jGZTPCyn2rpq/0J8MV/A9y8qFMEhA.Z2eiexMgptohZAK5kcGOc6HIUgNzJqnDUvY.";
+ };
+ dre = {
+ description = "Andres Ulloa";
+ isNormalUser = true;
+ home = "/home/dre";
+ openssh.authorizedKeys.keys = readKeys ./Keys/Dre.pub;
+ extraGroups = [ "wheel" "git" ];
+ };
+ nick = {
+ description = "Nick Sima";
+ isNormalUser = true;
+ home = "/home/nick";
+ openssh.authorizedKeys.keys = readKeys ./Keys/Nick.pub;
+ extraGroups = [ "docker" "git" ];
+ };
+ };
+}
diff --git a/Omni/Wipbase.c b/Omni/Wipbase.c
new file mode 100644
index 0000000..31996a2
--- /dev/null
+++ b/Omni/Wipbase.c
@@ -0,0 +1,185 @@
+// : out wip
+
+
+/*
+I have the following spec, I need you to write a C program that fills this spec:
+
+- this program is called 'wip' and will record 'updates'
+- an 'update' is a line of text with a timestamp
+- this program should use 'argp.h' for processing command line arguments
+- The command line interface has two subcommands, "submit" and "review"
+- submit command:
+ - takes a line of text and writes the timestamp and line of text separated by a tab to a file called 'wip.dat'. This represents one 'update'
+ - the timestamp format should be "%Y-%m-%d %H:%M:%S"
+- review command:
+ - reads the file 'wip.dat' and prints all the updates for the given week.
+ - takes an flag '--period' for determining which week to report updates for
+ - '--period' takes an argument which can be either 'thisweek' or 'lastweek', defaulting to 'thisweek' if not given
+
+
+Here is some example usage:
+
+$ wip submit "This is my update"
+$ wip submit "This is a second update"
+$ wip review
+Updates for this week:
+ + This is my update
+ + This is a second update
+$ wip review -p thisweek
+Updates for this week:
+ + This is my update
+ + This is a second update
+$ wip review --period lastweek
+ + this was an update from last week
+*/
+
+
+#include <argp.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#define WIP_FILENAME "wip.dat"
+#define TIMESTAMP_FORMAT "%Y-%m-%d %H:%M:%S"
+
+const char *argp_program_version = "wip 1.0";
+const char *argp_program_bug_address = "<your-email@example.com>";
+
+static char doc[] = "A simple tool to record and review updates";
+
+static struct argp_option options[] = {
+ {"period", 'p', "PERIOD", 0,
+ "Set the period for review (thisweek, lastweek)"},
+ {0}
+};
+
+struct arguments
+{
+ char *command;
+ char *update;
+ char *period;
+};
+
+static error_t
+parse_opt (int key, char *arg, struct argp_state *state)
+{
+ struct arguments *arguments = state->input;
+ switch (key)
+ {
+ case 'p':
+ arguments->period = arg;
+ break;
+ case ARGP_KEY_ARG:
+ if (arguments->command == NULL)
+ {
+ arguments->command = arg;
+ }
+ else if (arguments->update == NULL)
+ {
+ arguments->update = arg;
+ }
+ break;
+ default:
+ return ARGP_ERR_UNKNOWN;
+ }
+ return 0;
+}
+
+static struct argp argp = { options, parse_opt, "COMMAND [UPDATE]", doc };
+
+void
+submit_update (char *update)
+{
+ FILE *file = fopen (WIP_FILENAME, "a");
+ if (file == NULL)
+ {
+ printf ("Could not open %s\n", WIP_FILENAME);
+ return;
+ }
+
+ time_t now;
+ time (&now);
+ struct tm *timeinfo = localtime (&now);
+
+ char timestamp[20];
+ strftime (timestamp, sizeof (timestamp), TIMESTAMP_FORMAT, timeinfo);
+
+ fprintf (file, "%s\t%s\n", timestamp, update);
+
+ fclose (file);
+}
+
+void
+review_updates (char *period)
+{
+ FILE *file = fopen (WIP_FILENAME, "r");
+ if (file == NULL)
+ {
+ printf ("Could not open %s\n", WIP_FILENAME);
+ return;
+ }
+
+ time_t now;
+ time (&now);
+ struct tm *timeinfo = localtime (&now);
+ int current_week = timeinfo->tm_yday / 7;
+ if (strcmp (period, "lastweek") == 0)
+ {
+ current_week--;
+ }
+
+ char line[256];
+ while (fgets (line, sizeof (line), file))
+ {
+ struct tm timeinfo_line = { 0 };
+ strptime (line, TIMESTAMP_FORMAT, &timeinfo_line);
+ int line_week = timeinfo_line.tm_yday / 7;
+
+ if (line_week == current_week)
+ {
+ printf ("+ %s", strchr (line, '\t') + 1);
+ }
+ }
+
+ fclose (file);
+}
+
+int
+main (int argc, char **argv)
+{
+ struct arguments arguments;
+ arguments.command = NULL;
+ arguments.update = NULL;
+ arguments.period = "thisweek";
+
+ argp_parse (&argp, argc, argv, 0, 0, &arguments);
+
+ if (arguments.command == NULL)
+ {
+ printf ("No command provided. Expected 'submit' or 'review'.\n");
+ return 1;
+ }
+
+ if (strcmp (arguments.command, "submit") == 0)
+ {
+ if (arguments.update == NULL)
+ {
+ printf ("No update provided for 'submit' command.\n");
+ return 1;
+ }
+ submit_update (arguments.update);
+ }
+ else if (strcmp (arguments.command, "review") == 0)
+ {
+ review_updates (arguments.period);
+ }
+ else
+ {
+ printf ("Unknown command '%s'. Expected 'submit' or 'review'.\n",
+ arguments.command);
+ return 1;
+ }
+
+ return 0;
+}