diff options
Diffstat (limited to 'Biz')
-rw-r--r-- | Biz/Bild.hs | 81 | ||||
-rw-r--r-- | Biz/Bild/ShellHook.sh | 10 | ||||
-rw-r--r-- | Biz/Dev.md | 10 | ||||
-rw-r--r-- | Biz/Dev.nix | 4 | ||||
-rw-r--r-- | Biz/Devalloc/Host.hs | 122 | ||||
-rw-r--r-- | Biz/Devalloc/Host.nix | 46 | ||||
-rw-r--r-- | Biz/Devalloc/Page/Home.hs | 95 | ||||
-rw-r--r-- | Biz/Devalloc/Page/Signup.hs | 46 | ||||
-rwxr-xr-x | Biz/Devalloc/main.py | 221 | ||||
-rw-r--r-- | Biz/Devalloc/pitch.md | 40 | ||||
-rwxr-xr-x | Biz/Lint.py | 7 | ||||
-rw-r--r-- | Biz/Que/Apidocs.md | 3 | ||||
-rwxr-xr-x | Biz/Que/Client.py | 186 | ||||
-rw-r--r-- | Biz/Que/Host.hs | 253 | ||||
-rw-r--r-- | Biz/Que/Host.nix | 46 | ||||
-rw-r--r-- | Biz/Que/Index.md | 73 | ||||
-rw-r--r-- | Biz/Que/Prod.nix | 61 | ||||
-rw-r--r-- | Biz/Que/Quescripts.md | 50 | ||||
-rw-r--r-- | Biz/Que/Site.hs | 137 | ||||
-rw-r--r-- | Biz/Que/Site.nix | 61 | ||||
-rw-r--r-- | Biz/Que/Style.css | 136 | ||||
-rw-r--r-- | Biz/Que/Tutorial.md | 53 |
22 files changed, 1695 insertions, 46 deletions
diff --git a/Biz/Bild.hs b/Biz/Bild.hs index 2526395..81ab44f 100644 --- a/Biz/Bild.hs +++ b/Biz/Bild.hs @@ -133,13 +133,17 @@ main = Nothing -> Exit.die "usage: bild <target>" Just target -> analyze target >>= build -type Namespace = String +data Ext = Hs | Scm | Nix + deriving (Show) + +data Namespace = Namespace [String] Ext + deriving (Show) type Dep = String type Out = String -data Compiler = Ghc | Ghcjs | Guile | Nix +data Compiler = Ghc | Ghcjs | Guile | NixBuild deriving (Show) data Target = Target @@ -164,6 +168,12 @@ analyze s = do root <- Env.getEnv "BIZ_ROOT" cwd <- Dir.getCurrentDirectory let path = cwd </> s + namespace = + require "namespace" + <| path + |> reps root "" + |> List.stripPrefix "/" + >>= Regex.match metaNamespace case File.takeExtension path of ".hs" -> do content <- String.lines </ Prelude.readFile path @@ -171,25 +181,15 @@ analyze s = do let compiler = if ".js" `List.isSuffixOf` out then Ghcjs else Ghc return Target - { namespace = - require "namespace" - <| path - |> reps root "" - |> File.dropExtension - |> reps "/" "." - |> List.stripPrefix "." - >>= Regex.match metaNamespace, - deps = content /> Regex.match metaDep |> catMaybes, + { deps = content /> Regex.match metaDep |> catMaybes, builder = user <> "@localhost", .. } ".nix" -> return Target - { namespace = reps root "" path |> List.stripPrefix "/" |> require "namespace", - path = path, - deps = [], - compiler = Nix, + { deps = [], + compiler = NixBuild, out = "", builder = join @@ -198,17 +198,17 @@ analyze s = do "@dev.simatime.com?ssh-key=/home/", user, "/.ssh/id_rsa" - ] + ], + .. } ".scm" -> return Target - { namespace = reps root "" path |> List.stripPrefix "/" |> require "namespace", - path = path, - deps = [], + { deps = [], compiler = Guile, out = "", - builder = user <> "@localhost" + builder = user <> "@localhost", + .. } e -> panic <| "bild does not know this extension: " <> Text.pack e @@ -217,7 +217,7 @@ build target@Target {..} = do root <- Env.getEnv "BIZ_ROOT" case compiler of Ghc -> do - putText <| "bild: dev: ghc: " <> Text.pack namespace + putText <| "bild: dev: ghc: " <> nsToPath namespace let outDir = root </> "_/bild/dev/bin" Dir.createDirectoryIfMissing True outDir putText <| "bild: dev: local: " <> Text.pack builder @@ -232,12 +232,12 @@ build target@Target {..} = do "--make", path, "-main-is", - namespace, + nsToHaskellModule namespace, "-o", outDir </> out ] Ghcjs -> do - putText <| "bild: dev: ghcjs: " <> Text.pack namespace + putText <| "bild: dev: ghcjs: " <> nsToPath namespace let outDir = root </> "_/bild/dev/static" Dir.createDirectoryIfMissing True outDir putText <| "bild: dev: local: " <> Text.pack builder @@ -252,19 +252,17 @@ build target@Target {..} = do "--make", path, "-main-is", - namespace, + nsToHaskellModule namespace, "-o", outDir </> out ] Guile -> do - putText <| "bild: dev: guile: " <> Text.pack namespace - let outDir = root </> "_/bild/dev/bin" - Dir.createDirectoryIfMissing True outDir + putText <| "bild: dev: guile: " <> nsToPath namespace putText <| "bild: dev: local: " <> Text.pack builder putText "bild: guile TODO" putText <| show target - Nix -> do - putText <| "bild: nix: " <> Text.pack namespace + NixBuild -> do + putText <| "bild: nix: " <> nsToPath namespace let outDir = root </> "_/bild/nix" Dir.createDirectoryIfMissing True outDir putText <| "bild: nix: remote: " <> Text.pack builder @@ -272,7 +270,7 @@ build target@Target {..} = do "nix-build" [ path, "-o", - outDir </> namespace, + outDir </> (Text.unpack <| nsToPath namespace), -- Set default arguments to nix functions "--arg", "bild", @@ -290,10 +288,29 @@ build target@Target {..} = do builder ] +nsToHaskellModule :: Namespace -> String +nsToHaskellModule (Namespace parts Hs) = joinWith "." parts +nsToHaskellModule (Namespace _ ext) = + panic <| "can't convert " <> show ext <> " to a Haskell module" + +nsToPath :: Namespace -> Text +nsToPath (Namespace parts ext) = + Text.pack + <| joinWith "/" parts + <> "." + <> lowercase (show ext) + metaNamespace :: Regex.RE Char Namespace -metaNamespace = name <> Regex.many (Regex.sym '.') <> name +metaNamespace = Namespace </ path <* Regex.sym '.' <*> ext where - name = Regex.many (Regex.psym Char.isUpper) <> Regex.many (Regex.psym Char.isLower) + name = + Regex.many (Regex.psym Char.isUpper) + <> Regex.many (Regex.psym Char.isLower) + path = Regex.many (name <* Regex.string "/" <|> name) + ext = + Nix <$ Regex.string "nix" + <|> Hs <$ Regex.string "hs" + <|> Scm <$ Regex.string "scm" metaDep :: Regex.RE Char Dep metaDep = Regex.string "-- : dep " *> Regex.many (Regex.psym Char.isAlpha) diff --git a/Biz/Bild/ShellHook.sh b/Biz/Bild/ShellHook.sh index b79f1a6..65c3f10 100644 --- a/Biz/Bild/ShellHook.sh +++ b/Biz/Bild/ShellHook.sh @@ -101,22 +101,24 @@ function ship() { # Poor man's ci function ci() { + set -e lint stuff=( Biz/Pie.hs Biz/Pie.nix + Biz/Devalloc/Host.hs + Biz/Que/Site.hs + Biz/Que/Host.hs + Biz/Que/Prod.nix Biz/Cloud.nix Biz/Dev.nix - Que/Site.hs - Que/Host.hs - Que/Prod.nix Hero/Host.hs Hero/Node.hs Hero/Prod.nix ) for thing in ${stuff[@]} do - bild $thing + bild "$thing" done } @@ -30,9 +30,10 @@ Then run `help` to see the dev commands. # Repository organization The source tree maps to the module namespace, and roughly follows the -Haskell namespace hierarchy (although nothing is enforced). The main -'common' space is `Biz`, other namespaces should be related to their -application. +Haskell namespace hierarchy (although nothing is enforced). The root namespace +for all code that we own is `Biz`; proprietary applications, products, and +infrastructure lives under there. Stuff that can be open sourced or otherwise +externalized should be outside of `Biz`. Development aspects should be localized to their sub-namespaces as much as possible. Only after sufficient iteration such that interfaces are @@ -44,9 +45,6 @@ well-defined. Likewise, the functionality and purpose of a particular namespace should be small and well-defined. Following the unix principle of "do one thing and do it well" is advised. -For building the code, we use `nix` and basically copy the namespace -hierarchy into the main build file `./default.nix`. - Namespaces are always capitalized. I would prefer always lowercase, but `ghc` _really_ wants capitalized files, so we appeas `ghc`. In Scheme this actually translates quite well and helps distinguish between types diff --git a/Biz/Dev.nix b/Biz/Dev.nix index cdb3036..d9f2cb6 100644 --- a/Biz/Dev.nix +++ b/Biz/Dev.nix @@ -9,14 +9,14 @@ bild.os { ./Users.nix ./Dev/Configuration.nix ./Dev/Hardware.nix - ../Devalloc/Host.nix + ./Devalloc/Host.nix ]; networking.hostName = "lithium"; networking.domain = "dev.simatime.com"; services.devalloc-host = { enable = true; port = 8095; - package = bild.ghc ../Devalloc/Host.hs; + package = bild.ghc ./Devalloc/Host.hs; }; } diff --git a/Biz/Devalloc/Host.hs b/Biz/Devalloc/Host.hs new file mode 100644 index 0000000..5a9ff80 --- /dev/null +++ b/Biz/Devalloc/Host.hs @@ -0,0 +1,122 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +-- Developer allocation +-- +-- : out devalloc-host +-- : dep clay +-- : dep cmark +-- : sys cmark +-- : dep envy +-- : dep lucid +-- : dep miso +-- : dep protolude +-- : dep servant +-- : dep servant-server +-- : dep warp +module Biz.Devalloc.Host + ( main, + ) +where + +import Alpha +import Biz.App (CSS (..), HtmlApp (..)) +-- import qualified CMark as Cmark + +import qualified Biz.Devalloc.Page.Home as Home +import qualified Biz.Devalloc.Page.Signup as Signup +import qualified Biz.Look +import qualified Clay +import qualified Control.Exception as Exception +import qualified Lucid +import qualified Lucid.Base as Lucid +import Miso hiding (node) +import qualified Network.Wai as Wai +import qualified Network.Wai.Handler.Warp as Warp +import Network.Wai.Middleware.RequestLogger (logStdout) +import Servant +import qualified System.Envy as Envy + +main :: IO () +main = Exception.bracket startup shutdown run + where + startup = + Envy.decodeWithDefaults Envy.defConfig >>= \cfg -> do + -- pitchText <- readFile <| pitches cfg + -- let pitch = Cmark.commonmarkToHtml [] pitchText + putText "@" + putText "devalloc" + putText <| "port: " <> (show <| port cfg) + return (cfg, serve (Proxy @AllRoutes) <| serverHandlers) + shutdown :: (Config, Application) -> IO () + shutdown _ = pure () + run :: (Config, Wai.Application) -> IO () + run (cfg, app) = Warp.run (port cfg) (logStdout app) + +type HomeServer = ToServerRoutes Home.Path HtmlApp Home.Move + +type SignupServer = ToServerRoutes Signup.Path HtmlApp Signup.Move + +type AllRoutes = HomeServer :<|> SignupServer :<|> CssRoute + +type CssRoute = "css" :> "main.css" :> Get '[CSS] Text + +cssHandlers :: Server CssRoute +cssHandlers = return . toStrict <| Clay.render look + +instance Lucid.ToHtml a => Lucid.ToHtml (HtmlApp a) where + toHtmlRaw = Lucid.toHtml + toHtml (HtmlApp x) = + Lucid.doctypehtml_ <| do + Lucid.head_ <| do + Lucid.meta_ [Lucid.charset_ "utf-8"] + jsRef "/static/all.js" + jsRef "//unpkg.com/turbolinks@5.2.0/dist/turbolinks.js" + cssRef "/css/main.css" + Lucid.body_ (Lucid.toHtml x) + where + jsRef _href = + Lucid.with + (Lucid.script_ mempty) + [ Lucid.makeAttribute "src" _href, + Lucid.makeAttribute "async" mempty, + Lucid.makeAttribute "defer" mempty + ] + cssRef _href = + Lucid.with + (Lucid.link_ mempty) + [ Lucid.rel_ "stylesheet", + Lucid.type_ "text/css", + Lucid.href_ _href + ] + +data Config = Config + { port :: Warp.Port, + -- | A yaml file of pitches + pitches :: FilePath, + node :: FilePath + } + deriving (Generic, Show) + +instance Envy.DefConfig Config where + defConfig = + Config + { port = 3000, + pitches = "./Biz/Devalloc/pitch.md", + node = "_/bild/dev/Devalloc.Node/static" + } + +instance Envy.FromEnv Config + +serverHandlers :: Server AllRoutes +serverHandlers = Home.host :<|> Signup.host :<|> cssHandlers + +look :: Clay.Css +look = do + Biz.Look.fuckingStyle + "body" Clay.? Biz.Look.fontStack diff --git a/Biz/Devalloc/Host.nix b/Biz/Devalloc/Host.nix new file mode 100644 index 0000000..51aa85d --- /dev/null +++ b/Biz/Devalloc/Host.nix @@ -0,0 +1,46 @@ +{ options +, lib +, config +, pkgs +, modulesPath +}: + +let + cfg = config.services.devalloc-host; +in +{ + options.services.devalloc-host = { + enable = lib.mkEnableOption "Enable the devalloc-host service"; + port = lib.mkOption { + type = lib.types.int; + default = 3000; + description = '' + The port on which devalloc-host will listen for + incoming HTTP traffic. + ''; + }; + package = lib.mkOption { + type = lib.types.package; + description = "devalloc-host package to use"; + }; + }; + config = lib.mkIf cfg.enable { + systemd.services.devalloc-host = { + path = [ cfg.package ]; + wantedBy = [ "multi-user.target" ]; + script = '' + ${cfg.package}/bin/devalloc-host + ''; + description = '' + Devalloc.Host + ''; + serviceConfig = { + Environment = ["PORT=${toString cfg.port}"]; + KillSignal = "INT"; + Type = "simple"; + Restart = "on-abort"; + RestartSec = "1"; + }; + }; + }; +} diff --git a/Biz/Devalloc/Page/Home.hs b/Biz/Devalloc/Page/Home.hs new file mode 100644 index 0000000..9e90e70 --- /dev/null +++ b/Biz/Devalloc/Page/Home.hs @@ -0,0 +1,95 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} + +module Biz.Devalloc.Page.Home + ( Move (..), + Path, + Form (..), + path, + view, + host, + ) +where + +import Alpha +import Biz.App (HtmlApp (..)) +import Miso +import Miso.String +import Servant.API +import Servant.Links +import Servant.Server (Handler) + +data Move = NoMove + +type Path = View Move + +newtype Form = Form + { uri :: URI + } + +path :: URI +path = linkURI <| safeLink (Proxy :: Proxy Path) (Proxy :: Proxy Path) + +host :: Handler (HtmlApp (View Move)) +host = + Form {uri = path} + |> view + |> HtmlApp + |> pure + +signup :: View Move +signup = + p_ + [] + [ a_ + [href_ "mailto:ben@bsima.me?subject=Devalloc+signup"] + [text "Request access via email"] + ] + +view :: Form -> View Move +view _ = + div_ + [] + [ h1_ [] [text "Devalloc"], + p_ + [] + [ text + "Devalloc analyzes your codebase trends, finds patterns \ + \ in how your developers work, and protects against tech debt." + ], + p_ [] [text "Just hook it up to your CI system - it will warn you when it finds a problem."], + signup, + h2_ [] [text "Identify blackholes in your codebase"], + p_ + [] + [ text + <| Miso.String.intercalate + " " + [ "What if none of your active employees have touched some part of the codebase?", + "This happens too often with legacy code, and then it turns into a huge source of tech debt.", + "Devalloc finds these \"blackholes\" and warns you about them so you can be proactive in eliminating tech debt." + ] + ], + h2_ + [] + [text "Protect against lost knowledge"], + p_ + [] + [text "Not everyone can know every part of a codebase. By finding pieces of code that only 1 or 2 people have touched, devalloc identifes siloed knowledge. This allows you to protect against the risk of this knowledge leaving the company if an employee leaves."], + h2_ + [] + [text "Don't just measure code coverage - also know your dev coverage"], + p_ + [] + [text "No matter how smart your employees are, if you are under- or over-utilizing your developers then you will never get optimal performance from your team."], + ul_ + [] + [ li_ [] [text "Find developer hot spots in your code: which pieces of code get continually rewritten, taking up valuable dev time?"], + li_ [] [text "Know how your devs work best: which ones have depth of knowledge, and which ones have breadth?"] + ], + p_ [] [text "(Paid only)"], + h2_ [] [text "See how your teams *actually* organize themselves with cluster analysis"], + p_ [] [text "Does your team feel splintered or not cohesive? Which developers work best together? Devalloc analyzes the collaboration patterns between devs and helps you form optimal pairings and teams based on shared code and mindspace."], + p_ [] [text "(Paid only)"], + signup + ] diff --git a/Biz/Devalloc/Page/Signup.hs b/Biz/Devalloc/Page/Signup.hs new file mode 100644 index 0000000..8debf53 --- /dev/null +++ b/Biz/Devalloc/Page/Signup.hs @@ -0,0 +1,46 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} + +module Biz.Devalloc.Page.Signup + ( Move (..), + Path, + Form (..), + path, + view, + host, + ) +where + +import Alpha +import Biz.App (HtmlApp (..)) +import Miso +import Miso.String +import Servant.API +import Servant.Links +import Servant.Server (Handler) + +data Move = NoMove + +type Path = View Move + +newtype Form = Form + { uri :: URI + } + +path :: URI +path = linkURI <| safeLink (Proxy :: Proxy Path) (Proxy :: Proxy Path) + +host :: Handler (HtmlApp (View Move)) +host = + Form {uri = path} + |> view + |> HtmlApp + |> pure + +view :: Form -> View Move +view _ = + div_ + [] + [ h1_ [] [text "Signup coming soon"], + p_ [] [a_ [href_ "/"] [text "Go back home"]] + ] diff --git a/Biz/Devalloc/main.py b/Biz/Devalloc/main.py new file mode 100755 index 0000000..bb10441 --- /dev/null +++ b/Biz/Devalloc/main.py @@ -0,0 +1,221 @@ +#!/usr/bin/env python +""" +Analyze developer allocation across a codebase. +""" + +import argparse +import datetime +import logging +import os +import re +import subprocess +import sys + + +def find_user(line): + """Given 'Ben Sima <ben@bsima.me>', finds `Ben Sima'. Returns the first + matching string.""" + return re.findall(r"^[^<]*", line)[0].strip() + + +def authors_for(path, active_users): + """Return a dictionary of {author: commits} for given path. Usernames not in + the 'active_users' list will be filtered out.""" + raw = subprocess.check_output( + ["git", "shortlog", "--numbered", "--summary", "--email", "--", path] + ).decode("utf-8") + lines = [s for s in raw.split("\n") if s] + data = {} + for line in lines: + parts = line.strip().split("\t") + author = find_user(parts[1]) + commits = parts[0] + if author in active_users: + data[author] = commits + return data + + +def mailmap_users(): + """Returns users from the .mailmap file.""" + users = [] + with open(".mailmap") as file: + lines = file.readlines() + for line in lines: + users.append(find_user(line)) + return users + + +MAX_SCORE = 10 + + +def score(blackhole, liability, good, total): + "Calculate the score." + weights = { + "blackhole": 0.5, + "liability": 0.7, + } + return ( + MAX_SCORE + * ( + (blackhole * weights["blackhole"]) + + (liability * weights["liability"]) + + good + ) + / total + ) + + +def get_args(): + "Parse CLI arguments." + cli = argparse.ArgumentParser(description=__doc__) + cli.add_argument("repo", default=".", help="the git repo to run on", metavar="REPO") + cli.add_argument( + "-b", + "--blackholes", + action="store_true", + help="print the blackholes (files with one or zero active contributors)", + ) + cli.add_argument( + "-l", + "--liabilities", + action="store_true", + help="print the liabilities (files with < 3 active contributors)", + ) + cli.add_argument( + "-s", + "--stale", + action="store_true", + help="print stale files (haven't been touched in 6 months)", + ) + cli.add_argument( + "-i", "--ignored", nargs="+", default=[], help="patterns to ignore in paths", + ) + cli.add_argument( + "--active-users", + nargs="+", + default=[], + help="list of active user emails. if not provided, this is loaded from .mailmap", + ) + cli.add_argument( + "-v", + "--verbosity", + help="set the log level verbosity", + choices=["debug", "warning", "error"], + default="error", + ) + return cli.parse_args() + + +def guard_git(repo): + "Guard against non-git repos." + is_git = subprocess.run( + ["git", "rev-parse"], + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + check=False, + ).returncode + if is_git != 0: + sys.exit(f"error: not a git repository: {repo}") + + +def staleness(path, now): + "How long has it been since this file was touched?" + timestamp = datetime.datetime.strptime( + subprocess.check_output(["git", "log", "-n1", "--pretty=%aI", path]) + .decode("utf-8") + .strip(), + "%Y-%m-%dT%H:%M:%S%z", + ) + delta = now - timestamp + return delta.days + + +class Repo: + "Represents a repo and stats for the repo." + + def __init__(self, ignored_paths, active_users): + self.paths = [ + p + for p in subprocess.check_output(["git", "ls-files", "--no-deleted"]) + .decode("utf-8") + .split() + if not any(i in p for i in ignored_paths) + ] + logging.debug("collecting stats") + self.stats = {} + for path in self.paths: + self.stats[path] = authors_for(path, active_users) + self.blackholes = [path for path, authors in self.stats.items() if not authors] + self.liabilities = { + path: list(authors) + for path, authors in self.stats.items() + if 1 <= len(authors) < 3 + } + now = datetime.datetime.utcnow().astimezone() + self.stale = {} + for path, _ in self.stats.items(): + _staleness = staleness(path, now) + if _staleness > 180: + self.stale[path] = _staleness + + def print_blackholes(self, full): + "Print number of blackholes, or list of all blackholes." + # note: file renames may result in false positives + n_blackhole = len(self.blackholes) + print(f"Blackholes: {n_blackhole}") + if full: + for path in self.blackholes: + print(f" {path}") + + def print_liabilities(self, full): + "Print number of liabilities, or list of all liabilities." + n_liabilities = len(self.liabilities) + print(f"Liabilities: {n_liabilities}") + if full: + for path, authors in self.liabilities.items(): + print(f" {path} ({', '.join(authors)})") + + def print_score(self): + "Print the overall score." + n_total = len(self.stats.keys()) + n_blackhole = len(self.blackholes) + n_liabilities = len(self.liabilities) + n_good = n_total - n_blackhole - n_liabilities + print("Total:", n_total) + print( + "Score: {:.2f}/{}".format( + score(n_blackhole, n_liabilities, n_good, n_total), MAX_SCORE + ) + ) + + def print_stale(self, full): + "Print stale files" + n_stale = len(self.stale) + print(f"Stale files: {n_stale}") + if full: + for path, days in self.stale.items(): + print(f" {path} ({days} days)") + + +if __name__ == "__main__": + ARGS = get_args() + logging.basicConfig(stream=sys.stderr, level=ARGS.verbosity.upper()) + + logging.debug("starting") + os.chdir(os.path.abspath(ARGS.repo)) + + guard_git(ARGS.repo) + + # if no active users provided, load from .mailmap + if ARGS.active_users == []: + if os.path.exists(".mailmap"): + ARGS.active_users = mailmap_users() + + # collect data + REPO = Repo(ARGS.ignored, ARGS.active_users) + + # print data + REPO.print_score() + REPO.print_blackholes(ARGS.blackholes) + REPO.print_liabilities(ARGS.liabilities) + REPO.print_stale(ARGS.stale) diff --git a/Biz/Devalloc/pitch.md b/Biz/Devalloc/pitch.md new file mode 100644 index 0000000..cfc0b23 --- /dev/null +++ b/Biz/Devalloc/pitch.md @@ -0,0 +1,40 @@ +# Devalloc + +Devalloc analyzes your codebase trends, finds patterns in how your developers +work, and protects against tech debt. + +Just hook it up to your CI system - it will warn you when it finds a problem. + +## Identify blackholes in your codebase + +What if none of your active employees have touched some part of the codebase? +This happens too often with legacy code, and then it turns into a huge source of +tech debt. Devalloc finds these "blackholes" and warns you about them so you +can be proactive in eliminating tech debt. + +## Protect against lost knowledge + +Not everyone can know every part of a codebase. By finding pieces of code +that only 1 or 2 people have touched, devalloc identifes siloed knowledge. This +allows you to protect against the risk of this knowledge leaving the company if +an employee leaves. + +## Don't just measure "code coverage" - also know your "dev coverage" + +No matter how smart your employees are, if you are under- or over-utilizing your +developers then you will never get optimal performance from your team. + +- Find developer "hot spots" in your code: which pieces of code get continually + rewritten, taking up valuable dev time? +- Know how your devs work best: which ones have depth of knowledge, and which + ones have breadth? + +(Paid only) + +## See how your teams *actually* organize themselves with cluster analysis + +Does your team feel splintered or not cohesive? Which developers work best +together? Devalloc analyzes the collaboration patterns between devs and helps +you form optimal pairings and teams based on shared code and mindspace. + +(Paid only) diff --git a/Biz/Lint.py b/Biz/Lint.py index fccda57..fc035cb 100755 --- a/Biz/Lint.py +++ b/Biz/Lint.py @@ -26,7 +26,7 @@ def run(cmd, file): "ormolu": ["--mode", "check"], "hlint": [], "black": ["--quiet", "--check"], - "pylint": [], + "pylint": ["--disable=invalid-name"], } # pylint: disable=subprocess-run-check ret = subprocess.run([cmd, *args[cmd], file], stdout=subprocess.PIPE) @@ -69,6 +69,8 @@ def group_files(files, extensions): def guard_todos(files): + "Fail if TODO found in text" + global ERRORS # pylint: disable=global-statement for fname in files: with open(fname) as text: if "TODO" in text.read(): @@ -92,6 +94,7 @@ if __name__ == "__main__": run("hlint", hs) for py in FILES[".py"]: print(f"lint: {py}") - run("black", py) + # Broken in our nixpkgs + # run("black", py) run("pylint", py) sys.exit(ERRORS) diff --git a/Biz/Que/Apidocs.md b/Biz/Que/Apidocs.md new file mode 100644 index 0000000..f400889 --- /dev/null +++ b/Biz/Que/Apidocs.md @@ -0,0 +1,3 @@ +% que.run Api Docs + +coming soon diff --git a/Biz/Que/Client.py b/Biz/Que/Client.py new file mode 100755 index 0000000..1063eb8 --- /dev/null +++ b/Biz/Que/Client.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python3 +""" +simple client for que.run +""" + +import argparse +import configparser +import functools +import http.client +import logging +import os +import subprocess +import sys +import time +import urllib.parse +import urllib.request as request + +MAX_TIMEOUT = 99999999 # basically never timeout + + +def auth(args): + "Returns the auth key for the given ns from ~/.config/que.conf" + logging.debug("auth") + namespace = args.target.split("/")[0] + if namespace == "pub": + return None + conf_file = os.path.expanduser("~/.config/que.conf") + if not os.path.exists(conf_file): + sys.exit("you need a ~/.config/que.conf") + cfg = configparser.ConfigParser() + cfg.read(conf_file) + return cfg[namespace]["key"] + + +def autodecode(bytestring): + """Attempt to decode bytes `bs` into common codecs, preferably utf-8. If + no decoding is available, just return the raw bytes. + + For all available codecs, see: + <https://docs.python.org/3/library/codecs.html#standard-encodings> + + """ + logging.debug("autodecode") + codecs = ["utf-8", "ascii"] + for codec in codecs: + try: + return bytestring.decode(codec) + except UnicodeDecodeError: + pass + return bytestring + + +def retry(exception, tries=4, delay=3, backoff=2): + "Decorator for retrying an action." + + def decorator(func): + @functools.wraps(func) + def func_retry(*args, **kwargs): + mtries, mdelay = tries, delay + while mtries > 1: + try: + return func(*args, **kwargs) + except exception as ex: + logging.debug(ex) + logging.debug("retrying...") + time.sleep(mdelay) + mtries -= 1 + mdelay *= backoff + return func(*args, **kwargs) + + return func_retry + + return decorator + + +def send(args): + "Send a message to the que." + logging.debug("send") + key = auth(args) + data = args.infile + req = request.Request(f"{args.host}/{args.target}") + req.add_header("User-AgenT", "Que/Client") + if key: + req.add_header("Authorization", key) + if args.serve: + logging.debug("serve") + while not time.sleep(1): + request.urlopen(req, data=data, timeout=MAX_TIMEOUT) + + else: + request.urlopen(req, data=data, timeout=MAX_TIMEOUT) + + +def then(args, msg): + "Perform an action when passed `--then`." + if args.then: + logging.debug("then") + subprocess.run( + args.then.format(msg=msg, que=args.target), check=False, shell=True, + ) + + +@retry(http.client.IncompleteRead, tries=10, delay=5, backoff=1) +@retry(http.client.RemoteDisconnected, tries=10, delay=2, backoff=2) +def recv(args): + "Receive a message from the que." + logging.debug("recv on: %s", args.target) + params = urllib.parse.urlencode({"poll": args.poll}) + req = request.Request(f"{args.host}/{args.target}?{params}") + req.add_header("User-Agent", "Que/Client") + key = auth(args) + if key: + req.add_header("Authorization", key) + with request.urlopen(req) as _req: + if args.poll: + logging.debug("poll") + while not time.sleep(1): + logging.debug("reading") + msg = autodecode(_req.readline()) + logging.debug("read") + print(msg, end="") + then(args, msg) + else: + msg = autodecode(_req.read()) + print(msg) + then(args, msg) + + +def get_args(): + "Command line parser" + cli = argparse.ArgumentParser(description=__doc__) + cli.add_argument("--debug", action="store_true", help="log to stderr") + cli.add_argument( + "--host", default="http://que.run", help="where que-server is running" + ) + cli.add_argument( + "--poll", default=False, action="store_true", help="stream data from the que" + ) + cli.add_argument( + "--then", + help=" ".join( + [ + "when polling, run this shell command after each response,", + "presumably for side effects," + r"replacing '{que}' with the target and '{msg}' with the body of the response", + ] + ), + ) + cli.add_argument( + "--serve", + default=False, + action="store_true", + help=" ".join( + [ + "when posting to the que, do so continuously in a loop.", + "this can be used for serving a webpage or other file continuously", + ] + ), + ) + cli.add_argument( + "target", help="namespace and path of the que, like 'ns/path/subpath'" + ) + cli.add_argument( + "infile", + nargs="?", + type=argparse.FileType("rb"), + help="data to put on the que. Use '-' for stdin, otherwise should be a readable file", + ) + return cli.parse_args() + + +if __name__ == "__main__": + ARGV = get_args() + if ARGV.debug: + logging.basicConfig( + format="%(asctime)s %(message)s", + level=logging.DEBUG, + datefmt="%Y.%m.%d..%H.%M.%S", + ) + try: + if ARGV.infile: + send(ARGV) + else: + recv(ARGV) + except KeyboardInterrupt: + sys.exit(0) diff --git a/Biz/Que/Host.hs b/Biz/Que/Host.hs new file mode 100644 index 0000000..4817fd6 --- /dev/null +++ b/Biz/Que/Host.hs @@ -0,0 +1,253 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE NoImplicitPrelude #-} + +-- | Interprocess communication +-- +-- Prior art: +-- - <https://github.com/jb55/httpipe> +-- - <https://patchbay.pub> +-- - <https://github.com/hargettp/courier> +-- - sorta: <https://ngrok.com/> and <https://localtunnel.github.io/www/> +-- +-- : out que-server +-- +-- : dep async +-- : dep envy +-- : dep protolude +-- : dep scotty +-- : dep stm +-- : dep unagi-chan +-- : dep unordered-containers +module Biz.Que.Host + ( main, + ) +where + +import Alpha hiding (gets, modify, poll) +import qualified Control.Concurrent.Go as Go +import qualified Control.Concurrent.STM as STM +import qualified Control.Exception as Exception +import Control.Monad.Reader (MonadTrans) +import qualified Data.ByteString.Builder.Extra as Builder +import qualified Data.ByteString.Lazy as BSL +import Data.HashMap.Lazy (HashMap) +import qualified Data.HashMap.Lazy as HashMap +import qualified Data.Text.Encoding as Encoding +import qualified Data.Text.Lazy as Text.Lazy +import qualified Data.Text.Lazy.IO as Text.Lazy.IO +import qualified Network.HTTP.Types.Status as Http +import qualified Network.Wai as Wai +import qualified Network.Wai.Handler.Warp as Warp +import Network.Wai.Middleware.RequestLogger + ( logStdout, + ) +import qualified System.Envy as Envy +import qualified Web.Scotty.Trans as Scotty +import qualified Prelude + +{-# ANN module ("HLint: ignore Reduce duplication" :: Prelude.String) #-} + +main :: IO () +main = Exception.bracket startup shutdown <| uncurry Warp.run + where + startup = + Envy.decodeWithDefaults Envy.defConfig >>= \c -> do + sync <- STM.newTVarIO initialAppState + let runActionToIO m = runReaderT (runApp m) sync + waiapp <- Scotty.scottyAppT runActionToIO <| routes c + putText "*" + putText "Que.Host" + putText <| "port: " <> (show <| quePort c) + putText <| "skey: " <> (show <| queSkey c) + return (quePort c, waiapp) + shutdown :: a -> IO a + shutdown = pure . identity + +newtype App a = App + { runApp :: ReaderT (STM.TVar AppState) IO a + } + deriving + ( Applicative, + Functor, + Monad, + MonadIO, + MonadReader + (STM.TVar AppState) + ) + +newtype AppState = AppState + { ques :: HashMap Namespace Quebase + } + +initialAppState :: AppState +initialAppState = AppState {ques = mempty} + +data Config = Config + { -- | QUE_PORT + quePort :: Warp.Port, + -- | QUE_SKEY + queSkey :: FilePath + } + deriving (Generic, Show) + +instance Envy.DefConfig Config where + defConfig = Config 3000 "/run/skey/que-admin" + +instance Envy.FromEnv Config + +routes :: Config -> Scotty.ScottyT Text.Lazy.Text App () +routes cfg = do + Scotty.middleware logStdout + let quepath = "^\\/([[:alnum:]_-]+)\\/([[:alnum:]._/-]*)$" + let namespace = "^\\/([[:alnum:]_-]+)\\/?$" -- matches '/ns' and '/ns/' but not '/ns/path' + + -- GET /index.html + Scotty.get (Scotty.literal "/index.html") <| Scotty.redirect "/_/index" + Scotty.get (Scotty.literal "/") <| Scotty.redirect "/_/index" + -- GET /_/dash + Scotty.get (Scotty.literal "/_/dash") <| do + authkey <- fromMaybe "" </ Scotty.header "Authorization" + adminkey <- liftIO <| lchomp </ Text.Lazy.IO.readFile (queSkey cfg) + if authkey == adminkey + then do + d <- app <| gets ques + Scotty.json d + else do + Scotty.status Http.methodNotAllowed405 + Scotty.text "not allowed" + -- Namespace management + Scotty.matchAny (Scotty.regex namespace) <| do + Scotty.status Http.notImplemented501 + Scotty.text "namespace management coming soon" + -- GET que + -- + -- Receive a value from a que. Blocks until a value is received, + -- then returns. If 'poll=true', then stream data from the Que to the + -- client. + Scotty.get (Scotty.regex quepath) <| do + (ns, qp) <- extract + guardNs ns ["pub", "_"] + app . modify <| upsertNamespace ns + q <- app <| que ns qp + poll <- Scotty.param "poll" !: (pure . const False) + if poll + then Scotty.stream <| streamQue q + else do + r <- liftIO <| Go.read q + Scotty.html <| fromStrict <| Encoding.decodeUtf8 r + -- POST que + -- + -- Put a value on a que. Returns immediately. + Scotty.post (Scotty.regex quepath) <| do + authkey <- fromMaybe "" </ Scotty.header "Authorization" + adminkey <- liftIO <| lchomp </ Text.Lazy.IO.readFile (queSkey cfg) + (ns, qp) <- extract + -- Only allow my IP or localhost to publish to '_' namespace + when ("_" == ns && authkey /= adminkey) + <| Scotty.status Http.methodNotAllowed405 + >> Scotty.text "not allowed: _ is a reserved namespace" + >> Scotty.finish + guardNs ns ["pub", "_"] + -- passed all auth checks + app . modify <| upsertNamespace ns + q <- app <| que ns qp + qdata <- Scotty.body + _ <- liftIO <| Go.write q <| BSL.toStrict qdata + return () + +-- | Given `guardNs ns whitelist`, if `ns` is not in the `whitelist` +-- list, return a 405 error. +guardNs :: Text.Lazy.Text -> [Text.Lazy.Text] -> Scotty.ActionT Text.Lazy.Text App () +guardNs ns whitelist = + when (not <| ns `elem` whitelist) <| do + Scotty.status Http.methodNotAllowed405 + Scotty.text + <| "not allowed: use 'pub' namespace or signup to protect '" + <> ns + <> "' at https://que.run" + Scotty.finish + +-- | recover from a scotty-thrown exception. +(!:) :: + -- | action that might throw + Scotty.ActionT Text.Lazy.Text App a -> + -- | a function providing a default response instead + (Text.Lazy.Text -> Scotty.ActionT Text.Lazy.Text App a) -> + Scotty.ActionT Text.Lazy.Text App a +(!:) = Scotty.rescue + +-- | Forever write the data from 'Que' to 'Wai.StreamingBody'. +streamQue :: Que -> Wai.StreamingBody +streamQue q write _ = loop q + where + loop c = + Go.read c + >>= (write . Builder.byteStringInsert) + >> loop c + +-- | Gets the thing from the Hashmap. Call's 'error' if key doesn't exist. +grab :: (Eq k, Hashable k) => k -> HashMap k v -> v +grab = flip (HashMap.!) + +-- | Inserts the namespace in 'AppState' if it doesn't exist. +upsertNamespace :: Namespace -> AppState -> AppState +upsertNamespace ns as = + if HashMap.member ns (ques as) + then as + else as {ques = HashMap.insert ns mempty (ques as)} + +-- | Inserts the que at the proper 'Namespace' and 'Quepath'. +insertQue :: Namespace -> Quepath -> Que -> AppState -> AppState +insertQue ns qp q as = as {ques = newQues} + where + newQues = HashMap.insert ns newQbase (ques as) + newQbase = HashMap.insert qp q <| grab ns <| ques as + +extract :: Scotty.ActionT Text.Lazy.Text App (Namespace, Quepath) +extract = do + ns <- Scotty.param "1" + path <- Scotty.param "2" + return (ns, path) + +-- | A synonym for 'lift' in order to be explicit about when we are +-- operating at the 'App' layer. +app :: MonadTrans t => App a -> t App a +app = lift + +-- | Get something from the app state +gets :: (AppState -> b) -> App b +gets f = ask >>= liftIO . STM.readTVarIO >>= return </ f + +-- | Apply a function to the app state +modify :: (AppState -> AppState) -> App () +modify f = ask >>= liftIO . atomically . flip STM.modifyTVar' f + +-- | housing for a set of que paths +type Namespace = Text.Lazy.Text + +-- | a que is just a channel of bytes +type Que = Go.Channel Message + +-- | any path can serve as an identifier for a que +type Quepath = Text + +-- | any opaque data +type Message = ByteString + +-- | a collection of ques +type Quebase = HashMap Quepath Que + +-- | Lookup or create a que +que :: Namespace -> Quepath -> App Que +que ns qp = do + _ques <- gets ques + let qbase = grab ns _ques + queExists = HashMap.member qp qbase + if queExists + then return <| grab qp qbase + else do + c <- liftIO <| Go.chan 1 + modify (insertQue ns qp c) + gets ques /> grab ns /> grab qp diff --git a/Biz/Que/Host.nix b/Biz/Que/Host.nix new file mode 100644 index 0000000..e326483 --- /dev/null +++ b/Biz/Que/Host.nix @@ -0,0 +1,46 @@ +{ options +, lib +, config +, pkgs +, modulesPath +}: + +let + cfg = config.services.que-server; +in +{ + options.services.que-server = { + enable = lib.mkEnableOption "Enable the que-server service"; + port = lib.mkOption { + type = lib.types.int; + default = 3000; + description = '' + The port on which que-server will listen for + incoming HTTP traffic. + ''; + }; + package = lib.mkOption { + type = lib.types.package; + description = "que-server package to use"; + }; + }; + config = lib.mkIf cfg.enable { + systemd.services.que-server = { + path = [ cfg.package ]; + wantedBy = [ "multi-user.target" ]; + script = '' + ${cfg.package}/bin/que-server + ''; + description = '' + Que server + ''; + serviceConfig = { + Environment = ["QUE_PORT=${toString cfg.port}"]; + KillSignal = "INT"; + Type = "simple"; + Restart = "on-abort"; + RestartSec = "1"; + }; + }; + }; +} diff --git a/Biz/Que/Index.md b/Biz/Que/Index.md new file mode 100644 index 0000000..a9db12e --- /dev/null +++ b/Biz/Que/Index.md @@ -0,0 +1,73 @@ +% que.run + +que.run is the concurrent, async runtime in the cloud + + - runtime concurrency anywhere you have a network connection + - multilanguage communicating sequential processes + - add Go-like channels to any language + - connect your microservices together with the simplest possible + plumbing + - async programming as easy as running two terminal commands + +HTTP routes on `que.run` are Golang-like channels with a namespace and a +path. For example: `https://que.run/pub/path/subpath`. + +## Quickstart + +There is a simple script `que` that acts as a client you can use to +interact with the `que.run` service. + +Download it to somewhere on your `$PATH` and make it executable: + + curl https://que.run/_/client > ~/bin/que + chmod +x ~/bin/que + que --help + +The client requires a recent version of Python 3. + +## Powerup + +que.run is free for limited use, but the real power of an asynchronous, +concurrent runtime in the cloud is unlocked with some extra power-user +features. + +- Free + - security by obscurity + - all protocols and data formats supported + - bandwidth and message sizes limited + - concurrent connections limited + - request rate limited +- Power + - protect your data with private namespaces + - remove bandwidth and size limits + - private dashboard to see all of your active ques + - 99.999% uptime +- Pro + - add durability to your ques so messages are never lost + - powerful batch api + - incredible query api + - Linux FUSE filesystem integration +- Enterprise + - all of the Power & Pro features + - on-prem deployment + - advanced que performance monitoring + - SLA for support from que.run experts + +Email `ben@bsima.me` if you want to sign up for the Power, Pro, or +Enterprise packages. + +## Quescripts + +We are collecting a repository of scripts that make awesome use of que: + +- remote desktop notifications +- two-way communication with your phone +- ephemeral, serverless chat rooms +- collaborative jukebox + +<a id="quescripts-btn" href="/_/quescripts">See the scripts</a> + +## Docs + +- [tutorial](/_/tutorial) +- [api docs](/_/apidocs) diff --git a/Biz/Que/Prod.nix b/Biz/Que/Prod.nix new file mode 100644 index 0000000..12da1eb --- /dev/null +++ b/Biz/Que/Prod.nix @@ -0,0 +1,61 @@ +{ bild, lib }: + +# The production server for que.run + +bild.os { + imports = [ + ../OsBase.nix + ../Packages.nix + ../Users.nix + ./Host.nix + ./Site.nix + ]; + networking.hostName = "prod-que"; + networking.domain = "que.run"; + services.que-server = { + enable = true; + port = 80; + package = bild.ghc ./Host.hs; + }; + boot.loader.grub.device = "/dev/vda"; + fileSystems."/" = { device = "/dev/vda1"; fsType = "ext4"; }; + swapDevices = [ + { device = "/swapfile"; } # 4GB + ]; + networking.firewall.allowedTCPPorts = [ 22 80 443 ]; + networking = { + nameservers = [ + "67.207.67.2" + "67.207.67.3" + ]; + defaultGateway = "157.245.224.1"; + defaultGateway6 = "2604:a880:2:d1::1"; + dhcpcd.enable = false; + usePredictableInterfaceNames = lib.mkForce true; + interfaces = { + eth0 = { + ipv4.addresses = [ + { address="157.245.236.44"; prefixLength=20; } + { address="10.46.0.5"; prefixLength=16; } + ]; + ipv6.addresses = [ + { address="2604:a880:2:d1::a2:5001"; prefixLength=64; } + { address="fe80::7892:a5ff:fec6:dbc3"; prefixLength=64; } + ]; + ipv4.routes = [ { address = "157.245.224.1"; prefixLength = 32; } ]; + ipv6.routes = [ { address = "2604:a880:2:d1::1"; prefixLength = 32; } ]; + }; + }; + }; + services = { + que-website = { + enable = true; + namespace = "_"; + package = bild.ghc ./Site.hs; + }; + + udev.extraRules = '' + ATTR{address}=="7a:92:a5:c6:db:c3", NAME="eth0" + ''; + }; +} diff --git a/Biz/Que/Quescripts.md b/Biz/Que/Quescripts.md new file mode 100644 index 0000000..77e7004 --- /dev/null +++ b/Biz/Que/Quescripts.md @@ -0,0 +1,50 @@ +% Quescripts + +## Remote desktop notifications + +Lets say we are running a job that takes a long time, maybe we are +compiling or running a large test suite. Instead of watching the +terminal until it completes, or flipping back to check on it every so +often, we can create a listener that displays a popup notification when +the job finishes. + +In one terminal run the listener: + + que pub/notify --then "notify-send '{que}' '{msg}'" + +In some other terminal run the job that takes forever: + + runtests ; echo "tests are done" | que pub/notify - + +When terminal 2 succeeds, terminal 1 will print "tests are done", then +call the `notify-send` command, which displays a notification toast in +Linux with title "`pub/notify`" and content "`tests are done`". + +Que paths are multi-producer and multi-consumer, so you can add as many +terminals as you want. + +On macOS you could use something like this (just watch your quotes): + + osascript -e "display notification \"{msg}\" with title \"{que}\"" + +in place of notify-send. + +## Ephemeral, serverless chat rooms + +coming soon + +## Collaborative jukebox + +It's surprisingly easy to make a collaborative jukebox. + +First start up a music player: + + que --poll pub/music --then "playsong '{msg}'" + +where `playsong` is a script that plays a file from data streaming to +`stdin`. For example [vlc](https://www.videolan.org/vlc/) does this when +you run it like `vlc -`. + +Then, anyone can submit songs with: + + que pub/music song.mp3 diff --git a/Biz/Que/Site.hs b/Biz/Que/Site.hs new file mode 100644 index 0000000..99486a4 --- /dev/null +++ b/Biz/Que/Site.hs @@ -0,0 +1,137 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE NoImplicitPrelude #-} + +-- | spawns a few processes that serve the que.run website +-- +-- : out que-website +-- +-- : dep async +-- : dep config-ini +-- : dep process +-- : dep protolude +-- : dep req +module Biz.Que.Site + ( main, + ) +where + +import Alpha +import qualified Control.Concurrent.Async as Async +import qualified Data.ByteString.Char8 as BS +import qualified Data.Ini.Config as Config +import qualified Data.Text as Text +import Data.Text.Encoding (encodeUtf8) +import qualified Data.Text.IO as Text +import Network.HTTP.Req +import qualified System.Directory as Directory +import System.Environment as Environment +import qualified System.Exit as Exit +import System.FilePath ((</>)) +import qualified System.Process as Process + +main :: IO () +main = do + (src, ns) <- + Environment.getArgs >>= \case + [src] -> return (src, "_") -- default to _ ns which is special + [src, ns] -> return (src, Text.pack ns) + _ -> Exit.die "usage: que-website <srcdir> [namespace]" + mKey <- getKey ns + putText <| "serving " <> Text.pack src <> " at " <> ns + run mKey ns + <| Sources + { index = src </> "Index.md", + client = src </> "Client.py", + quescripts = src </> "Quescripts.md", + style = src </> "Style.css", + apidocs = src </> "Apidocs.md", + tutorial = src </> "Tutorial.md" + } + +getKey :: Namespace -> IO (Maybe Key) +getKey ns = do + home <- Directory.getHomeDirectory + let file = home </> ".config" </> "que.conf" + exists <- Directory.doesFileExist file + unless exists <| panic <| "not found: " <> Text.pack file + conf <- Text.readFile file + print (home </> ".config" </> "que.conf") + auth ns + |> Config.parseIniFile conf + |> either errorParsingConf identity + |> return + +errorParsingConf :: error +errorParsingConf = panic "could not parse ~/.config/que.conf" + +data Sources = Sources + { index :: FilePath, + quescripts :: FilePath, + client :: FilePath, + style :: FilePath, + tutorial :: FilePath, + apidocs :: FilePath + } + +type Namespace = Text + +type Key = Text + +auth :: Namespace -> Config.IniParser (Maybe Key) +auth "pub" = pure Nothing +auth ns = Config.sectionMb ns <| Config.field "key" + +run :: Maybe Key -> Text -> Sources -> IO () +run key ns Sources {..} = Async.runConcurrently actions |> void + where + actions = + traverse + Async.Concurrently + [ forever <| toHtml index >>= serve key ns "index", + forever <| toHtml quescripts >>= serve key ns "quescripts", + forever <| BS.readFile client >>= serve key ns "client", + forever <| toHtml tutorial >>= serve key ns "tutorial", + forever <| toHtml apidocs >>= serve key ns "apidocs" + ] + toHtml :: FilePath -> IO ByteString + toHtml md = + BS.pack + </ Process.readProcess + "pandoc" + [ "--include-in-header", + style, + "-i", + md, + "--from", + "markdown", + "--to", + "html" + ] + [] + +serve :: Maybe Key -> Namespace -> Text -> ByteString -> IO () +serve Nothing "pub" path content = + runReq defaultHttpConfig <| do + _ <- + req + POST + (http "que.run" /: "pub" /: path) + (ReqBodyBs content) + ignoreResponse + mempty + liftIO <| return () +serve Nothing p _ _ = panic <| "no auth key provided for ns: " <> p +serve (Just key) ns path content = + runReq defaultHttpConfig <| do + let options = + header "Authorization" (encodeUtf8 key) <> responseTimeout maxBound + _ <- + req + POST + (http "que.run" /: ns /: path) + (ReqBodyBs content) + ignoreResponse + options + liftIO <| return () diff --git a/Biz/Que/Site.nix b/Biz/Que/Site.nix new file mode 100644 index 0000000..ba2eeb2 --- /dev/null +++ b/Biz/Que/Site.nix @@ -0,0 +1,61 @@ +{ options +, lib +, config +, pkgs +, modulesPath +}: + + + +let + cfg = config.services.que-website; + static = pkgs.stdenv.mkDerivation { + src = ./.; + name = "que-website-static"; + installPhase = '' + mkdir -p $out + cp ${./Apidocs.md} $out/Apidocs.md + cp ${./Index.md} $out/Index.md + cp ${./Quescripts.md} $out/Quescripts.md + cp ${./Style.css} $out/Style.css + cp ${./Tutorial.md} $out/Tutorial.md + cp ${./Client.py} $out/Client.py + ''; + }; +in +{ + options.services.que-website = { + enable = lib.mkEnableOption "Enable the que-website service"; + namespace = lib.mkOption { + type = lib.types.str; + default = "_"; + description = '' + The que namespace on which que-website will broadcast. + ''; + }; + package = lib.mkOption { + type = lib.types.package; + description = "que-website package to use"; + }; + }; + config = lib.mkIf cfg.enable { + systemd.services.que-website = { + path = [ cfg.package pkgs.pandoc ]; + wantedBy = [ "multi-user.target" ]; + script = '' + ${cfg.package}/bin/que-website ${static} ${cfg.namespace} + ''; + description = '' + Que website server + ''; + serviceConfig = { + User = "root"; + Environment = "HOME=/root"; + KillSignal = "INT"; + Type = "simple"; + Restart = "on-abort"; + RestartSec = "1"; + }; + }; + }; +} diff --git a/Biz/Que/Style.css b/Biz/Que/Style.css new file mode 100644 index 0000000..f8d1ca4 --- /dev/null +++ b/Biz/Que/Style.css @@ -0,0 +1,136 @@ +<link href="https://fonts.googleapis.com/css2?family=Source+Code+Pro:ital,wght@0,400;0,700;1,400;1,700&family=Source+Sans+Pro:ital,wght@0,400;0,700;1,400;1,700&display=swap" rel="stylesheet"> +<style> +:root { + /* base (http://chriskempson.com/projects/base16/) */ + --base00: #181818; + --base01: #282828; + --base02: #383838; + --base03: #585858; + --base04: #b8b8b8; + --base05: #d8d8d8; + --base06: #e8e8e8; + --base07: #f8f8f8; + + /* highlights */ + --base08: #ab4642; + --base09: #dc9656; + --base0A: #f7ca88; + --base0B: #a1b56c; + --base0C: #86c1b9; + --base0D: #7cafc2; + --base0E: #ba8baf; + --base0F: #a16946; +} + +/* dark theme */ +@media ( prefers-color-scheme: dark ), + ( prefers-color-scheme: no-preference ) +{ + body + { color: var(--base05); + ; background: var(--base00) + } + + header, h1, h2, h3 + { color: var(--base0A) } + + a:link, a:visited + { color: var(--base0D) } + + a:hover + { color: var(--base0C) } + + pre + { background-color: var(--base01) } + + code + { color: var(--base0B) + } + + hr + { border: 0 + ; height: 1px + ; width: 100% + ; margin: 2rem + ; background-image: linear-gradient( + to right, + /* same as --base0A */ + rgba(186, 139, 175, 0), + rgba(186, 139, 175, 0.75), + rgba(186, 139, 175, 0)) + } +} + +/* light theme */ + +@media ( prefers-color-scheme: light) +{ + body + { background-color: var(--base07) + ; color: var(--base00) + } + + a:link, a:visited + { color: var(--base0D) } + + a:hover + { color: var(--base0C) } + + pre + { background-color: var(--base06) } + + code + { color: var(--base0B) } +} + +/* structure and layout */ + +body +{ max-width: 900px +; margin: 40px auto +; padding: 0 10px +; font: 18px/1.5 + "Source Sans Pro", + sans-serif, + "Apple Color Emoji", + "Segoe UI Emoji", + "Segoe UI Symbol", + "Noto Color Emoji" +; display: flex +; flex-direction: column +; align-items: auto +} + +header#title-block-header, +h1, +h2, +h3 +{ line-height: 1.2 +; align-self: center +; text-transform: lowercase +} + +pre +{ padding: .5rem } + +pre, code +{ overflow-x: scroll +; white-space: pre +; font-family: "Source Code Pro", monospace; +} + +#quescripts-btn +{ border-width: 2px +; border-style: solid +} + +#quescripts-btn +{ font-size: 1.2rem +; padding: 1rem +; text-decoration: none +; text-align: center +; display: block +; max-width: 400px +; margin: auto +} +</style> diff --git a/Biz/Que/Tutorial.md b/Biz/Que/Tutorial.md new file mode 100644 index 0000000..6542ad3 --- /dev/null +++ b/Biz/Que/Tutorial.md @@ -0,0 +1,53 @@ +% que.run Tutorial + +## Ques + +A que is a multi-consumer, multi-producer channel available anywhere you +have a network connection. If you are familiar with Go channels, they +are pretty much the same thing. Put some values in one end, and take +them out the other end at a different time, or in a different process. + +Ques are created dynamically for every HTTP request you make. Here we +use the `que` client to create a new que at the path `pub/new-que`: + + que pub/new-que + +The `que` client is useful, but you can use anything to make the HTTP +request, for example here's the same thing with curl: + + curl https://que.run/pub/new-que + +These requests will block until a value is placed on the other +end. Let's do that now. In a separate terminal: + + echo "hello world" | que pub/new-que - + +This tells the `que` client to read the value from `stdin` and then send +it to `example/new-que`. Or with curl: + + curl https://que.run/pub/new-que -d "hello world" + +This will succeed immediately and send the string "`hello world`" over +the channel, which will be received and printed by the listener in the +other terminal. + +You can have as many producers and consumers attached to a channel as +you want. + +## Namespaces + +Ques are organized into namespaces, identified by the first fragment of +the path. In the above commands we used `pub` as the namespace, which is +a special publically-writable namespace. The other special namespace is +`_` which is reserved for internal use only. You can't write to the `_` +namespace. + +To use other namespaces and add authentication/access controls, you can +[sign up for the Power package](/_/index). + +## Events + +Just reading and writing data isn't very exciting, so let's throw in +some events. We can very quickly put together a job processor. + + que pub/new-que --then "./worker.sh '{msg}'" |