summaryrefslogtreecommitdiff
path: root/Biz
diff options
context:
space:
mode:
Diffstat (limited to 'Biz')
-rwxr-xr-xBiz/Bild.scm158
-rw-r--r--Biz/Cloud/chat.nix100
-rw-r--r--Biz/Cloud/git.nix17
-rw-r--r--Biz/Cloud/hardware.nix6
-rw-r--r--Biz/Cloud/mail.nix43
-rw-r--r--Biz/Cloud/networking.nix36
-rw-r--r--Biz/Cloud/web.nix41
-rw-r--r--Biz/Cloud/znc.nix66
-rw-r--r--Biz/Dev/configuration.nix205
-rw-r--r--Biz/Dev/hardware.nix34
-rw-r--r--Biz/Ibb/Client.hs38
-rw-r--r--Biz/Ibb/Core.hs143
-rw-r--r--Biz/Ibb/Influencers.hs407
-rw-r--r--Biz/Ibb/Keep.hs123
-rw-r--r--Biz/Ibb/Look.hs40
-rw-r--r--Biz/Ibb/Move.hs48
-rw-r--r--Biz/Ibb/Server.hs152
-rw-r--r--Biz/Ibb/service.nix42
-rw-r--r--Biz/Language/Bs.hs12
-rw-r--r--Biz/Language/Bs/Cli.hs52
-rw-r--r--Biz/Language/Bs/Eval.hs241
-rw-r--r--Biz/Language/Bs/Expr.hs154
-rw-r--r--Biz/Language/Bs/Parser.hs121
-rw-r--r--Biz/Language/Bs/Primitives.hs183
-rw-r--r--Biz/Language/Bs/Repl.hs33
-rw-r--r--Biz/Language/Bs/Test.hs2
-rw-r--r--Biz/Serval.scm194
-rw-r--r--Biz/buildOS.nix56
-rw-r--r--Biz/fathom.nix109
-rw-r--r--Biz/firefox.nix12
-rw-r--r--Biz/idea/duree-pitch.org80
-rw-r--r--Biz/idea/flash.org36
-rw-r--r--Biz/keys/ben.pub1
-rw-r--r--Biz/keys/deploy.pub1
-rw-r--r--Biz/keys/nick.pub1
-rw-r--r--Biz/packages.nix18
-rw-r--r--Biz/users.nix39
-rw-r--r--Biz/vpnHosts.nix37
38 files changed, 3081 insertions, 0 deletions
diff --git a/Biz/Bild.scm b/Biz/Bild.scm
new file mode 100755
index 0000000..4e1d490
--- /dev/null
+++ b/Biz/Bild.scm
@@ -0,0 +1,158 @@
+;;
+;; bild - a simple build tool
+;;
+;;; Notice:
+;;
+;; This is under active development. For now this is just a convenience wrapper
+;; around `nix build`. The below commentary describes how this tool *should*
+;; work.
+;;
+;;; Commentary:
+;;
+;; Design constraints
+;;
+;; - only input is a namespace, no subcommands, no packages
+;; - no need to write specific build rules
+;; - one rule for hs, one for rs, one for scm, and so on
+;; - no need to distinguish between exe and lib, just have a single output
+;; - never concerned with deployment/packaging - leave that to another tool (scp? tar?))
+;;
+;; Features
+;;
+;; - namespace maps to filesystem
+;; - no need for `bild -l` for listing available targets. Use `ls` or `tree`
+;; - you build namespaces, not files/modules/packages/etc
+;; - namespace maps to language modules
+;; - build settings can be set in the file comments
+;; - pwd is always considered the the source directory, no `src` vs `doc` etc.
+;; - build methods automaticatly detected with file extensions
+;; - flags modify the way to interact with the build
+;; - -s = jump into a shell and/or repl
+;; - -p = turn on profiling
+;; - -t = limit build by type
+;; - -e = exclude some regex in the ns tree
+;; - -o = optimize level
+;;
+;; Example Commands
+;;
+;; bild [-rpt] <target..>
+;;
+;; The general scheme is to build the things described by the targets. A target
+;; is a namespace. You can list as many as you want, but you must list at least
+;; one. It could just be `.` for the current directory. Build outputs will go
+;; into the _bild directory in the root of the project.
+;;
+;; bild biz.web
+;;
+;; Or `bild biz/web`. This shows building a file at ./biz/web.hs, this will
+;; translate to something like `ghc --make Biz.Web`.
+;;
+;; bild -r <target>
+;;
+;; Starts a repl/shell for target.
+;; - if target.hs, load ghci
+;; - if target.scm, load scheme repl
+;; - if target.clj, load a clojure repl
+;; - if target.nix, load nix-shell
+;; - and so on.
+;;
+;; bild -p <target>
+;;
+;; build target with profiling (if available)
+;;
+;; bild -t nix target
+;;
+;; only build target.nix, not target.hs and so on (in the case of multiple
+;; targets with the same name but different extension).
+;;
+;; Here is an example integration with GHC. Given the following command-line
+;; invocation to compile the namespace 'com.simatime.bild' which depends on
+;; 'com.simatime.lib':
+;;
+;; ghc com/simatime/bild.hs -i com/simatime/lib.hs -o _bild/bild -v \
+;; -main-is Biz.Bild.main
+;;
+;; The general template of which is:
+;;
+;; ghc <source> -i <deps..> -o <target> -main-is <target>.main
+;;
+;; Some definitions:
+;;
+;; - <source> is some source file
+;; - <deps..> is the stack of dependencies
+;; - <target> is the target namespace, indicated by 'bild <target>'
+;;
+;; To fill out the build template, we can parse the file for known
+;; namespaces. The general recipe is:
+;;
+;; 1. Create a list of namespaces in my git repo. This can be cached, or I can
+;; integrate with git somehow.
+;; 2. Read the <source> file corresponding to <target>
+;; 3. Look for 'import <namespace>', where <namespace> is a namespace in the
+;; aforementioned cache.
+;; 4. If found, then save current build as a continuation and compile
+;; <namespace>. Result gets put on the dependency stack
+;; 5. When finished, return to building <target>
+;;
+;; Once the build command template is filled out, we can create the nix expression.
+;;
+;; Questions
+;;
+;; - how to import (third-party) dependencies?
+;; 1 just don't have them...? yeah right
+;; 2 the target.nix could be the build description for target.hs
+;; 3 just use a default.nix for the com.whatever
+;; 4 have a deps.nix file
+;; 5 list them in the file with other settings. Starting with Haskell,
+;; have comments like `{-: PACKAGE base <5.0.0.0 :-}' or `-- : PACKAGE base <5.0.0.0'.
+;; Other languages could use `#:` for the special prefix, basically just
+;; a comment plus colon.
+;; - how to handle multiple output formats?
+;; - e.g. that ghcjs and ghc take the same input files...
+;; - say you have a .md file, you want to bild it to pdf, html, and more. What do?
+;; - i guess the nix file could return a set of drvs instead of a single drv
+;;
+;; TODO
+;; - stream output from 'nix build' subprocess
+;; - get rid of guile notes during execution
+;; - ns<->path macro
+;; - support list (scheme namespace) in ns<->path fns
+;;
+;;; Code:
+
+(define-module (Biz Bild)
+ #:use-module ((ice-9 popen) #:prefix popen/)
+ #:use-module ((ice-9 format) #:select (format))
+ #:use-module ((ice-9 rdelim) #:prefix rdelim/)
+ #:use-module ((Alpha Core) #:select (fmt))
+ #:use-module ((Alpha Shell) #:prefix sh/)
+ #:use-module ((Alpha String) #:prefix string/)
+ #:export (ns?
+ ns->path
+ path->ns
+ main))
+
+(define (main args)
+ (let* ((root (sh/exec "git rev-parse --show-toplevel"))
+ (target (cadr args))
+ (path (ns->path target)))
+ (display (fmt ":: bild ~a...\r" target))
+ (sh/exec (fmt "nix build -f ~a/default.nix ~a"
+ root target))
+ (display (fmt ":: bilt ~a" target))))
+
+(define ns? symbol?)
+
+(define (ns->path ns)
+ (let ((to-path (lambda (s) (string/replace s #\. #\/))))
+ (cond
+ ((symbol? ns) (to-path (symbol->string ns)))
+ ((string? ns) (to-path ns))
+ (else (error "ns should be a string or symbol")))))
+
+(define (path->ns path)
+ (let ((to-ns (lambda (s) (string/replace s #\/ #\.))))
+ (cond
+ ((symbol? path) (to-ns (symbol->string path)))
+ ((string? path) (to-ns path))
+ (else (error "path should be a string or symbol")))))
diff --git a/Biz/Cloud/chat.nix b/Biz/Cloud/chat.nix
new file mode 100644
index 0000000..e23b73e
--- /dev/null
+++ b/Biz/Cloud/chat.nix
@@ -0,0 +1,100 @@
+{ config, pkgs, ... }:
+#
+# a homeserver for matrix.org.
+#
+# - nixos manual: https://nixos.org/nixos/manual/index.html#module-services-matrix
+#
+# to create new users:
+#
+# nix run nixpkgs.matrix-synapse
+# register_new_matrix_user -k <registration_shared_secret> http://localhost:<matrix_port>
+#
+let
+ fqdn = "matrix.${config.networking.domain}";
+ riot = "chat.${config.networking.domain}";
+ matrix_port = 8448;
+in {
+ # matrix-synapse server. for what the settings mean, see:
+ # https://nixos.org/nixos/manual/index.html#module-services-matrix
+ #
+ services.matrix-synapse = {
+ enable = true;
+ server_name = config.networking.domain;
+ registration_shared_secret = "AkGRWSQLga3RoKRFnHhKoeCEIeZzu31y4TRzMRkMyRbBnETkVTSxilf24qySLzQn";
+ listeners = [
+ {
+ port = matrix_port;
+ bind_address = "::1";
+ type = "http";
+ tls = false;
+ x_forwarded = true;
+ resources = [
+ {
+ names = [ "client" "federation" ];
+ compress = false;
+ }
+ ];
+ }
+ ];
+ };
+ # matrix needs a database
+ #
+ services.postgresql.enable = true;
+ # web proxy for the matrix server
+ #
+ services.nginx = {
+ enable = true;
+ recommendedTlsSettings = true;
+ recommendedOptimisation = true;
+ recommendedGzipSettings = true;
+ recommendedProxySettings = true;
+ virtualHosts = {
+ # route to matrix-synapse
+ "${config.networking.domain}" = {
+ locations."= /.well-known/matrix/server".extraConfig =
+ let
+ server = { "m.server" = "${fqdn}:443"; };
+ in ''
+ add_header Content-Type application/json;
+ return 200 '${builtins.toJSON server}';
+ '';
+ locations."= /.well-known/matrix/client".extraConfig =
+ let
+ client = {
+ "m.homeserver" = { "base_url" = "https://${fqdn}"; } ;
+ "m.identity_server" = { "base_url" = "https://vector.im"; };
+ };
+ in ''
+ add_header Content-Type application/json;
+ add_header Access-Control-Allow-Origin *;
+ return 200 '${builtins.toJSON client}';
+ '';
+ };
+ # reverse proxy for matrix client-server and server-server communication
+ "${fqdn}" = {
+ enableACME = true;
+ forceSSL = true;
+ locations."/".extraConfig = ''
+ return 404;
+ '';
+ locations."/_matrix" = {
+ proxyPass = "http://[::1]:${toString matrix_port}";
+ };
+ };
+ };
+ };
+ # riot client, available at chat.simatime.com
+ #
+ # note that riot and matrix-synapse must be on separate fqdn's to
+ # protect from XSS attacks:
+ # https://github.com/vector-im/riot-web#important-security-note
+ #
+ services.nginx.virtualHosts."${riot}" = {
+ enableACME = true;
+ forceSSL = true;
+ serverAliases = [
+ "chat.${config.networking.domain}"
+ ];
+ root = pkgs.riot-web;
+ };
+}
diff --git a/Biz/Cloud/git.nix b/Biz/Cloud/git.nix
new file mode 100644
index 0000000..370f52a
--- /dev/null
+++ b/Biz/Cloud/git.nix
@@ -0,0 +1,17 @@
+{ pkgs, ... }:
+
+{
+ services = {
+ gitolite = {
+ enable = true;
+ enableGitAnnex = true;
+ dataDir = "/srv/git";
+ user = "git";
+ group = "git";
+ extraGitoliteRc = ''
+ $RC{SITE_INFO} = 'a computer is a bicycle for the mind.';
+ '';
+ adminPubkey = builtins.readFile ../keys/ben.pub;
+ };
+ };
+}
diff --git a/Biz/Cloud/hardware.nix b/Biz/Cloud/hardware.nix
new file mode 100644
index 0000000..8c88cb7
--- /dev/null
+++ b/Biz/Cloud/hardware.nix
@@ -0,0 +1,6 @@
+{ ... }:
+{
+ imports = [ <nixpkgs/nixos/modules/profiles/qemu-guest.nix> ];
+ boot.loader.grub.device = "/dev/vda";
+ fileSystems."/" = { device = "/dev/vda1"; fsType = "ext4"; };
+}
diff --git a/Biz/Cloud/mail.nix b/Biz/Cloud/mail.nix
new file mode 100644
index 0000000..81bddc2
--- /dev/null
+++ b/Biz/Cloud/mail.nix
@@ -0,0 +1,43 @@
+{ ... }:
+
+{
+ mailserver = {
+ enable = true;
+ monitoring = {
+ enable = false;
+ alertAddress = "bsima@me.com";
+ };
+ fqdn = "simatime.com";
+ domains = [ "simatime.com" "bsima.me" ];
+ certificateScheme = 3; # let's encrypt
+ enableImap = true;
+ enablePop3 = true;
+ enableImapSsl = true;
+ enablePop3Ssl = true;
+ enableManageSieve = true;
+ virusScanning = false; # ur on ur own
+ localDnsResolver = true;
+
+ loginAccounts = {
+ "ben@simatime.com" = {
+ hashedPassword = "$6$Xr180W0PqprtaFB0$9S/Ug1Yz11CaWO7UdVJxQLZWfRUE3/rarB0driXkXALugEeQDLIjG2STGQBLU23//JtK3Mz8Kwsvg1/Zo0vD2/";
+ aliases = [
+ # my default email
+ "ben@bsima.me"
+ # admin stuff
+ "postmaster@simatime.com"
+ "abuse@simatime.com"
+ ];
+ catchAll = [ "simatime.com" "bsima.me" ];
+ quota = "5G";
+ };
+ "nick@simatime.com" = {
+ hashedPassword = "$6$31P/Mg8k8Pezy1e$Fn1tDyssf.1EgxmLYFsQpSq6RP4wbEvP/UlBlXQhyKA9FnmFtJteXsbJM1naa8Kyylo8vZM9zmeoSthHS1slA1";
+ aliases = [
+ "nicolai@simatime.com"
+ ];
+ quota = "1G";
+ };
+ };
+ };
+}
diff --git a/Biz/Cloud/networking.nix b/Biz/Cloud/networking.nix
new file mode 100644
index 0000000..d943c13
--- /dev/null
+++ b/Biz/Cloud/networking.nix
@@ -0,0 +1,36 @@
+{ lib, config, ... }:
+
+{
+ networking = {
+
+ firewall = {
+ allowedTCPPorts = [ 22 80 443 ];
+ };
+
+ # This following was populated at runtime with the networking details
+ # gathered from the active system.
+ nameservers = [
+ "67.207.67.2"
+ "67.207.67.3"
+ ];
+ defaultGateway = "159.89.128.1";
+ defaultGateway6 = "2604:a880:2:d0::1";
+ dhcpcd.enable = false;
+ usePredictableInterfaceNames = lib.mkForce true;
+ interfaces = {
+ eth0 = {
+ ipv4.addresses = [
+ { address="159.89.128.69"; prefixLength=20; }
+ { address="10.46.0.6"; prefixLength=16; }
+ ];
+ ipv6.addresses = [
+ { address="2604:a880:2:d0::35:c001"; prefixLength = 64; }
+ { address="fe80::e899:c0ff:fe9c:e194"; prefixLength = 64; }
+ ];
+ };
+ };
+ };
+ services.udev.extraRules = ''
+ ATTR{address}=="ea:99:c0:9c:e1:94", NAME="eth0"
+ '';
+}
diff --git a/Biz/Cloud/web.nix b/Biz/Cloud/web.nix
new file mode 100644
index 0000000..22d7199
--- /dev/null
+++ b/Biz/Cloud/web.nix
@@ -0,0 +1,41 @@
+{ ... }:
+
+let
+ bensIp = "73.222.221.62";
+in
+{
+ services = {
+ nginx = {
+ enable = true;
+ recommendedGzipSettings = true;
+ recommendedOptimisation = true;
+ recommendedProxySettings = true;
+ recommendedTlsSettings = true;
+ virtualHosts = {
+ "bsima.me".root = "/home/ben/public_html/";
+ "www.bsima.me".root = "/home/ben/public_html/";
+ "simatime.com".locations."/".root = "/srv/www/";
+ "firefoxsync.simatime.com".locations."/".proxyPass = "http://localhost:5001";
+ "hero.simatime.com".locations."/".proxyPass = "http://${bensIp}:3001";
+ "tv.simatime.com".locations."/".proxyPass = "http://${bensIp}:8096"; # emby runs on port 8096
+ "deluge.simatime.com".locations."/".proxyPass = "http://${bensIp}:8112";
+
+ "notebook.simatime.com".locations = {
+ "/" = {
+ proxyPass = "http://${bensIp}:3099";
+ proxyWebsockets = true;
+ extraConfig = ''
+ proxy_buffering off;
+ proxy_read_timeout 86400;
+
+ '';
+ };
+ "/(api/kernels/[^/]+/channels|terminals/websocket)/" = {
+ proxyPass = "http://${bensIp}:3099";
+ proxyWebsockets = true;
+ };
+ };
+ };
+ };
+ };
+}
diff --git a/Biz/Cloud/znc.nix b/Biz/Cloud/znc.nix
new file mode 100644
index 0000000..9b1a28d
--- /dev/null
+++ b/Biz/Cloud/znc.nix
@@ -0,0 +1,66 @@
+/*
+
+N.B.: generate znc passwords with 'nix-shell -p znc --command "znc --makepass"'
+
+- https://wiki.znc.in/Configuration
+
+*/
+
+{ ... }:
+
+{
+ services = {
+ znc = {
+ enable = true;
+ mutable = false;
+ useLegacyConfig = false;
+ openFirewall = true;
+ config = {
+ LoadModule = [ "adminlog" ];
+ User.bsima = {
+ Admin = true;
+ Nick = "bsima";
+ AltNick = "bsima1";
+ LoadModule = [ "chansaver" "controlpanel" "log" ];
+ Network.freenode = {
+ Server = "chat.freenode.net +6697";
+ LoadModule = [ "simple_away" "nickserv" "sasl" ];
+ Chan = {
+ "#ai" = {};
+ "#biz" = { Modes = "+Sp"; };
+ "#bsima" = { Modes = "+Sp"; };
+ "##categorytheory" = { Detached = true; };
+ "#clojure" = { Detached = true; };
+ "#coq" = { Detached = true; };
+ "#emacs" = { Detached = true; };
+ "#guile" = { Detached = true; };
+ "#guix" = { Detached = true; };
+ "#haskell" = {};
+ "#haskell-miso" = { Detached = true; };
+ "#hledger" = {};
+ "#hnix" = { Detached = true; };
+ "#home-manager" = { Detached = true; };
+ "#ledger" = {};
+ "#nix-darwin" = { Detached = true; };
+ "#nixos" = {};
+ "#org-mode" = {};
+ "#scheme" = { Detached = true; };
+ "#servant" = { Detached = true; };
+ "#sr.ht" = { Detached = true; };
+ "#xmonad" = { Detached = true; };
+ };
+ };
+ Network.efnet = {
+ Server = "irc.efnet.info +6697";
+ LoadModule = [ "simple_away" ];
+ };
+ Pass.password = {
+ Method = "sha256";
+ Hash = "bead16d806e7bf5cbbc31d572b20f01e2b253eb60e2497ce465df56306becd02";
+ Salt = "/GhmBMc+E6b7qd8muFEe";
+ };
+ };
+ };
+ };
+ };
+}
diff --git a/Biz/Dev/configuration.nix b/Biz/Dev/configuration.nix
new file mode 100644
index 0000000..4a8839e
--- /dev/null
+++ b/Biz/Dev/configuration.nix
@@ -0,0 +1,205 @@
+{ config, lib, pkgs, ... }:
+
+let
+ murmurPort = 64738;
+ torrents = { from = 6000; to = 6999; };
+in {
+ networking = {
+ hosts = {
+ "::1" = [ "localhost" "ipv6-localhost" "ipv6-loopback" ];
+ };
+
+ firewall = {
+ allowedTCPPorts = [
+ 22 8000 8443 443 8080 8081 # standard ports
+ 500 10000 # no idea
+ 8096 # emby/jellyfin
+ 8112 # deluge
+ murmurPort
+ ];
+ allowedTCPPortRanges = [
+ { from = 3000; to = 3100; } # dev stuff
+ torrents
+ ];
+ allowedUDPPorts = [ murmurPort ];
+ allowedUDPPortRanges = [
+ torrents
+ ];
+ checkReversePath = false;
+ };
+
+ };
+
+ # Use the systemd-boot EFI boot loader.
+ boot.loader.systemd-boot.enable = true;
+ boot.loader.efi.canTouchEfiVariables = true;
+ boot.enableContainers = true;
+
+ boot.initrd.luks.devices = {
+ root = {
+ device = "/dev/disk/by-uuid/a0160f25-e0e3-4af0-8236-3e298eac957a";
+ preLVM = true;
+ };
+ };
+
+ powerManagement.enable = false;
+
+ time.timeZone = "America/Los_Angeles";
+
+ fonts.fonts = with pkgs; [
+ google-fonts mononoki source-code-pro fantasque-sans-mono hack-font
+ fira fira-code fira-code-symbols
+ ];
+
+ environment.systemPackages = [ pkgs.wemux pkgs.tmux ];
+
+ nixpkgs = {
+ config = {
+ allowUnfree = true;
+ allowBroken = true;
+ };
+ };
+
+ hardware = {
+ opengl.enable = true;
+ pulseaudio = {
+ enable = true;
+ extraConfig = ''
+ load-module module-loopback
+ '';
+ };
+ };
+
+ programs = {
+ bash.enableCompletion = true;
+ command-not-found.enable = true;
+ gnupg.agent = {
+ enable = true;
+ enableSSHSupport = true;
+ };
+ mosh.enable = true;
+ };
+
+ virtualisation = {
+ docker = {
+ enable = true;
+ liveRestore = false;
+ };
+ libvirtd.enable = true;
+ virtualbox = {
+ host = {
+ enable = false;
+ headless = false;
+ addNetworkInterface = false;
+ };
+ guest = {
+ enable = false;
+ x11 = false;
+ };
+ };
+ };
+
+ services = {
+ pcscd.enable = true;
+ logind = {
+ lidSwitch = "ignore";
+ extraConfig = "IdleAction=ignore";
+ };
+
+ deluge = {
+ enable = true;
+ openFilesLimit = 10240;
+ web.enable = true;
+ };
+
+ printing.enable = true;
+
+ murmur = {
+ enable = true;
+ registerName = "simatime";
+ password = "simatime";
+ port = murmurPort;
+ };
+
+ xserver = {
+ enable = true;
+ layout = "us";
+ xkbOptions = "caps:ctrl_modifier";
+ displayManager.sddm.enable = true;
+ desktopManager = {
+ kodi.enable = true;
+ plasma5.enable = true;
+ xterm.enable = true;
+ };
+ };
+
+ jupyter = {
+ enable = false;
+ port = 3099;
+ ip = "*";
+ password = "'sha1:4b14a407cabe:fbab8e5400f3f4f3ffbdb00e996190d6a84bf51e'";
+ kernels = {
+ python3 = let
+ env = (pkgs.python3.withPackages (p: with p; [
+ ipykernel pandas scikitlearn numpy matplotlib sympy ipywidgets
+ ]));
+ in {
+ displayName = "py3";
+ argv = [
+ "${env.interpreter}"
+ "-m"
+ "ipykernel_launcher"
+ "-f"
+ "{connection_file}"
+ ];
+ language = "python";
+ #logo32 = "${env.sitePackages}/lib/python3.6/site-packages/ipykernel/resources/logo-32x32.png";
+ #logo64 = "${env.sitePackages}/lib/python3.6/site-packages/ipykernel/resources/logo-64x64.png";
+ };
+ };
+ };
+
+ jellyfin = { # previously emby
+ enable = true;
+ user = "jellyfin";
+ group = "jellyfin";
+ };
+
+ vnstat.enable = true;
+
+ postgresql = {
+ enable = true;
+ package = pkgs.postgresql_10;
+ authentication = ''
+ local all pprjam md5
+ local all pprjam_test md5
+ '';
+ enableTCPIP = true;
+ };
+ redis = {
+ enable = true;
+ };
+ };
+
+ documentation = {
+ enable = true;
+ dev.enable = true;
+ doc.enable = true;
+ info.enable = true;
+ man.enable = true;
+ nixos.enable = true;
+ };
+
+ # Since this is the dev machine, we can turn these on at the expense of extra
+ # disk space.
+ nix.extraOptions = ''
+ keep-outputs = true
+ keep-derivations = true
+ '';
+
+ # This value determines the NixOS release with which your system is to be
+ # compatible, in order to avoid breaking some software such as database
+ # servers. You should change this only after NixOS release notes say you
+ # should.
+ system.stateVersion = "17.09"; # Did you read the comment?
+}
diff --git a/Biz/Dev/hardware.nix b/Biz/Dev/hardware.nix
new file mode 100644
index 0000000..fc0e7a0
--- /dev/null
+++ b/Biz/Dev/hardware.nix
@@ -0,0 +1,34 @@
+# Do not modify this file! It was generated by ‘nixos-generate-config’
+# and may be overwritten by future invocations. Please make changes
+# to /etc/nixos/configuration.nix instead.
+{ config, lib, pkgs, ... }:
+
+{
+ imports =
+ [ <nixpkgs/nixos/modules/installer/scan/not-detected.nix>
+ ];
+
+ boot.initrd.availableKernelModules = [ "xhci_pci" "ahci" "usbhid" "sd_mod" ];
+ boot.kernelModules = [ "kvm-intel" ];
+ boot.extraModulePackages = [ ];
+
+ fileSystems."/" =
+ { device = "/dev/disk/by-uuid/0d8b0e52-10de-4af2-bcd9-b36278352e77";
+ fsType = "ext4";
+ };
+
+ fileSystems."/boot" =
+ { device = "/dev/disk/by-uuid/9B89-85C7";
+ fsType = "vfat";
+ };
+
+ fileSystems."/mnt/lake" =
+ { device = "/dev/disk/by-uuid/037df3ae-4609-402c-ab1d-4593190d0ee7";
+ fsType = "ext4";
+ };
+
+ swapDevices = [ ];
+
+ nix.maxJobs = lib.mkDefault 4;
+ powerManagement.cpuFreqGovernor = "powersave";
+}
diff --git a/Biz/Ibb/Client.hs b/Biz/Ibb/Client.hs
new file mode 100644
index 0000000..e40ff36
--- /dev/null
+++ b/Biz/Ibb/Client.hs
@@ -0,0 +1,38 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | Front-end
+--
+-- : exe ibb.js
+--
+-- : dep clay
+-- : dep miso
+-- : dep protolude
+-- : dep servant
+-- : dep text
+-- : dep aeson
+-- : dep containers
+-- : dep ghcjs-base
+module Biz.Ibb.Client where
+
+import Alpha
+import Biz.Ibb.Core ( Action(..)
+ , see
+ , init
+ )
+import Biz.Ibb.Move ( move )
+import Miso ( App(..)
+ , defaultEvents
+ , miso
+ )
+
+main :: IO ()
+main = miso $ \u -> App { model = init u, .. }
+ where
+ initialAction = FetchPeople
+ update = move
+ view = see
+ events = defaultEvents
+ subs = []
+ mountPoint = Nothing
diff --git a/Biz/Ibb/Core.hs b/Biz/Ibb/Core.hs
new file mode 100644
index 0000000..fb82ff0
--- /dev/null
+++ b/Biz/Ibb/Core.hs
@@ -0,0 +1,143 @@
+{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+-- | Main app logic
+module Biz.Ibb.Core where
+
+import Alpha
+import Network.RemoteData
+import Data.Aeson hiding ( Success )
+import Data.Data ( Data
+ , Typeable
+ )
+import Data.Text ( Text )
+import GHC.Generics ( Generic )
+import Miso
+import Miso.String
+import Servant.API
+import Servant.Links
+
+-- * entity data types
+
+data Person = Person
+ { _name :: Text
+ -- ^ Their full name.
+ , _pic :: Text
+ -- ^ A link to their picture.
+ , _twitter :: Text
+ -- ^ Their twitter handle, without the `@` prefix.
+ , _website :: Text
+ -- ^ Their main website, fully formed: `https://example.com`
+ , _books :: [Book]
+ -- ^ A short list of the books they recommend.
+ , _blurb :: Text
+ -- ^ A short "about" section, like you would see on the jacket flap of a book.
+ } deriving (Generic, Show, Eq, Typeable, Data, Ord)
+
+instance FromJSON Person
+instance ToJSON Person
+
+data Book = Book
+ { _title :: Text
+ , _author :: Text
+ , _amznref :: Text
+ -- ^ Amazon REF number, for creating affiliate links.
+ } deriving (Generic, Show, Eq, Typeable, Data, Ord)
+
+instance FromJSON Book
+instance ToJSON Book
+
+-- * app data types
+
+type AppRoutes = Home
+
+type Home = View Action
+
+data Model = Model
+ { uri :: URI
+ , people :: WebData [Person]
+ } deriving (Show, Eq)
+
+type WebData a = RemoteData MisoString a
+
+init :: URI -> Model
+init u = Model u Loading
+
+data Action
+ = Nop
+ | ChangeRoute URI
+ | HandleRoute URI
+ | FetchPeople
+ | SetPeople (WebData [Person])
+ deriving (Show, Eq)
+
+home :: Model -> View Action
+home m = see m
+
+handlers :: Model -> View Action
+handlers = home
+
+notfound :: View Action
+notfound = div_ [] [text "404"]
+
+goHome :: URI
+goHome = linkURI $ safeLink (Proxy :: Proxy AppRoutes) (Proxy :: Proxy Home)
+
+see :: Model -> View Action
+see m = div_
+ [class_ "container mt-5"]
+ [ div_
+ [class_ "jumbotron"]
+ [ h1_ [class_ "display-4"] [text "Influenced by books"]
+ , p_ [class_ "lead"]
+ [text "Influential people and the books that made them."]
+ , p_
+ [class_ "lead"]
+ [ a_
+ [href_ "http://eepurl.com/ghBFjv"]
+ [ text
+ "Get new book recommendations from the world's influencers in your email."
+ ]
+ ]
+ ]
+ , div_ [class_ "card-columns"] $ case people m of
+ NotAsked -> [text "Initializing..."]
+ Loading -> [text "Loading..."]
+ Failure err -> [text err]
+ Success ps -> seePerson </ ps
+ ]
+
+seePerson :: Person -> View Action
+seePerson person = div_
+ [class_ "card"]
+ [ div_ [class_ "card-img"]
+ [img_ [class_ "card-img img-fluid", src_ $ ms $ _pic person]]
+ , div_
+ [class_ "card-body"]
+ [ h4_ [class_ "card-title"] [text $ ms $ _name person]
+ , h6_
+ []
+ [ a_
+ [ class_ "fab fa-twitter"
+ , href_ $ "https://twitter.com/" <> (ms $ _twitter person)
+ ]
+ []
+ , a_ [class_ "fas fa-globe", href_ $ ms $ _website person] []
+ ]
+ , p_ [class_ "card-text"]
+ [text $ ms $ _blurb person, ul_ [] $ seeBook </ _books person]
+ ]
+ ]
+
+seeBook :: Book -> View Action
+seeBook book = li_
+ []
+ [ a_
+ [ class_ "text-dark"
+ , href_ $ "https://www.amazon.com/dp/" <> (ms $ _amznref book)
+ ]
+ [text $ ms $ _title book]
+ ]
diff --git a/Biz/Ibb/Influencers.hs b/Biz/Ibb/Influencers.hs
new file mode 100644
index 0000000..c31e962
--- /dev/null
+++ b/Biz/Ibb/Influencers.hs
@@ -0,0 +1,407 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE DeriveDataTypeable #-}
+
+module Biz.Ibb.Influencers where
+
+import Biz.Ibb.Core
+
+allPeople :: [Person]
+allPeople =
+ [ Person { _name = "Joe Rogan"
+ , _pic = "https://pbs.twimg.com/profile_images/552307347851210752/vrXDcTFC_400x400.jpeg"
+ , _twitter = "joerogan"
+ , _blurb = "Stand up comic/mixed martial arts fanatic/psychedelic adventurer Host of The Joe Rogan Experience"
+ , _website = "http://joerogan.com"
+ , _books = [ Book {_title = "Food of the Gods"
+ , _author = "Terence McKenna"
+ , _amznref = "0553371304"
+ }
+ , Book { _title = "The War of Art"
+ , _author ="Steven Pressfield"
+ , _amznref ="B007A4SDCG"
+ }
+ ]
+ }
+ , Person { _name = "Beyoncé"
+ , _pic = "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTxT84sF19lxdnSiblIXAp-Y4wAigpQn8sZ2GtAerIR_ApiiEJfFQ"
+ , _twitter = "Beyonce"
+ , _blurb = "American singer, songwriter, actress, record producer and dancer"
+ , _website = "http://beyonce.com"
+ , _books = [ Book { _title = "What Will It Take To Make A Woman President?"
+ , _author = "Marianne Schnall"
+ , _amznref = "B00E257Y7G"}
+ ]
+ }
+ , Person { _name = "Barrack Obama"
+ , _pic = "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQeLzftR36p0hYI-EKNa5fm7CYDuN-vyz23_R48ocqa8X1nPr6C"
+ , _twitter = "BarackObama"
+ , _blurb = "Dad, husband, President, citizen. 44th POTUS"
+ , _website = "http://barackobama.com"
+ , _books = [ Book { _title = "An American Marriage"
+ , _author = "Tayari Jones"
+ , _amznref = "B01NCUXEFR"}
+ , Book { _title = "Americanah"
+ , _author = "Chimamanda Ngozi Adichie"
+ , _amznref = "B00A9ET4MC"}
+ ]
+ }
+ , Person { _name = "Warren Buffet"
+ , _pic = "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQQbmnUykS6zqgzaf44tsq1RAsnHe6H7fapEoSqUwAoJGSFKbAPSw"
+ , _twitter = "WarrenBuffett"
+ , _blurb = "Chairman and CEO of Berkshire Hathaway"
+ , _website = "http://berkshirehathaway.com"
+ , _books = [ Book { _title = "The Intelligent Investor"
+ , _author = "Benjamin Graham"
+ , _amznref = "B000FC12C8"}
+ , Book { _title = "Security Analysis"
+ , _author = "Benjamin Graham"
+ , _amznref = "B0037JO5J8"}
+ ]
+ }
+ , Person { _name = "Bill Gates"
+ , _pic = "https://pbs.twimg.com/profile_images/988775660163252226/XpgonN0X_400x400.jpg"
+ , _twitter = "BillGates"
+ , _blurb = "Sharing things I'm learning through my foundation work and other interests. Founder of Microsoft and Bill & Melinda Gates Foundation"
+ , _website = "https://www.gatesnotes.com"
+ , _books = [ Book { _title = "Leonardo da Vinci"
+ , _author = "Walter Isaacson"
+ , _amznref = "1501139169"
+ }
+ , Book { _title = "Educated"
+ , _author = "Tara Wetsover"
+ , _amznref = "B072BLVM83"
+ }
+ ]
+ }
+ , Person { _name = "Stephen King"
+ , _pic = "https://pbs.twimg.com/profile_images/378800000836981162/b683f7509ec792c3e481ead332940cdc_400x400.jpeg"
+ , _twitter = "StephenKing"
+ , _blurb = "World renowned Author"
+ , _website = "https://stephenking.com/"
+ , _books = [ Book { _title = "Red Moon"
+ , _author = "Benjamin Percy"
+ , _amznref = "B008TU2592"
+ }
+ , Book { _title = "The Marauders"
+ , _author = "Tom Cooper"
+ , _amznref = "B00MKZBVTM"
+ }
+ ]
+ }
+ , Person { _name = "Tobi Lütke"
+ , _pic = "https://pbs.twimg.com/profile_images/551403375141457920/28EOlhnM_400x400.jpeg"
+ , _twitter = "tobi"
+ , _blurb = "Shopify CEO by day, Dad in the evening, hacker at night. - Rails Core alumni; Author of ActiveMerchant, Liquid. Comprehensivist"
+ , _website = "https://www.shopify.com"
+ , _books = [ Book { _title = "Influence"
+ , _author ="Robert B. Cialdini"
+ , _amznref = "006124189X"
+ }
+ , Book { _title = "High Output Management"
+ , _author ="Andrew S. Grove"
+ , _amznref = "B015VACHOK"
+ }
+ ]
+ }
+ , Person { _name = "Susan Cain"
+ , _pic = "https://pbs.twimg.com/profile_images/1474290079/SusanCain5smaller-1_400x400.jpg"
+ , _twitter = "susancain"
+ , _blurb = "Bestselling author, award-winning speaker, http://Quietrev.com curator. Lover of bittersweet music & bittersweet chocolate, in equal measure."
+ , _website = "https://www.quietrev.com"
+ , _books = [ Book { _title = "Bird by Bird"
+ , _author ="Anne Lamott"
+ , _amznref = "0385480016"
+ }
+ , Book { _title = "Waking Up"
+ , _author ="Sam Harris"
+ , _amznref = "1451636024"
+ }
+ ]
+ }
+ , Person { _name = "Oprah Winfrey"
+ , _pic = "https://pbs.twimg.com/profile_images/1013835283698049025/q5ZN4yv3_400x400.jpg"
+ , _twitter = "Oprah"
+ , _blurb = "Oprah Winfrey is an American media executive, actress, talk show host, television producer and philanthropis"
+ , _website = "http://www.oprah.com/index.html"
+ , _books = [ Book { _title = "A New Earth"
+ , _author ="Eckhart Tolle"
+ , _amznref = "B000PC0S5K"
+ }
+ , Book { _title = "The Poisonwood Bible"
+ , _author ="Barbara Kingsolver"
+ , _amznref = "B000QTE9WU"
+ }
+ ]
+ }
+ , Person { _name = "Patrick Collison"
+ , _pic = "https://pbs.twimg.com/profile_images/825622525342199809/_iAaSUQf_400x400.jpg"
+ , _twitter = "patrickc"
+ , _blurb = "Fallibilist, optimist. Stripe CEO"
+ , _website = "https://patrickcollison.com"
+ , _books = [ Book { _title = "How Judges Think"
+ , _author ="Richard A. Posner"
+ , _amznref = "0674048067"
+ }
+ , Book { _title = "Programmers at Work"
+ , _author ="Susan Lammers"
+ , _amznref = "1556152116"
+ }
+ ]
+ }
+ , Person { _name = "Luis Von Ahn"
+ , _pic = "https://pbs.twimg.com/profile_images/1020343581087678464/NIXD5MdC_400x400.jpg"
+ , _twitter = "LuisvonAhn"
+ , _blurb = "CEO & co-founder of duolingo. Invented reCAPTCHA. Computer science professor at Carnegie Mellon. Proud Guatemalan"
+ , _website = "https://www.duolingo.com/"
+ , _books = [ Book { _title = "Zero to One"
+ , _author ="Peter Thiel"
+ , _amznref = "B00J6YBOFQ"
+ }
+ , Book { _title = "The Hard Thing About Hard Things"
+ , _author ="Ben Horowitz"
+ , _amznref = "B00DQ845EA"
+ }
+ ]
+ }
+ , Person { _name = "Bryan Johnson"
+ , _pic = "https://pbs.twimg.com/profile_images/1055165076372475904/vNp60sSl_400x400.jpg"
+ , _twitter = "bryan_johnson"
+ , _blurb = "Founder of Kernel, OS Fund and Braintree. Trying to go where there is no destination"
+ , _website = "https://bryanjohnson.co"
+ , _books = [ Book { _title = "A Good Man"
+ , _author ="Mark Shriver"
+ , _amznref = "B007CLBH0M"
+ }
+ , Book { _title = "Shackleton"
+ , _author ="Nick Bertozzi"
+ , _amznref = "1596434511"
+ }
+ ]
+ }
+ , Person { _name = "Peter Thiel"
+ , _pic = "https://pbs.twimg.com/profile_images/1121220551/Peter_Thiel_400x400.jpg"
+ , _twitter = "peterthiel"
+ , _blurb = "Technology entrepreneur, investor, philanthropist."
+ , _website = "http://zerotoonebook.com"
+ , _books = [ Book { _title = "Deceit, Desire, and the Novel"
+ , _author ="René Girard"
+ , _amznref = "0801818303"
+ }
+ , Book { _title = "Violence and the Sacred"
+ , _author ="René Girard"
+ , _amznref = "0801822181"
+ }
+ ]
+ }
+ , Person { _name = "Tim Ferris"
+ , _pic = "https://pbs.twimg.com/profile_images/49918572/half-face-ice_400x400.jpg"
+ , _twitter = "tferriss"
+ , _blurb = "Author of 5 #1 NYT/WSJ bestsellers, investor (FB, Uber, Twitter, 50+ more: http://angel.co/tim ), host of The Tim Ferriss Show podcast (300M+ downloads)"
+ , _website = "http://tim.blog"
+ , _books = [ Book { _title = "10% Happier"
+ , _author ="Dan Harris"
+ , _amznref = "0062265431"
+ }
+ , Book { _title = "A Guide to the Good Life"
+ , _author ="William Irvine"
+ , _amznref = "B0040JHNQG"
+ }
+ ]
+ }
+ , Person { _name = "Allen Walton"
+ , _pic = "https://pbs.twimg.com/profile_images/1038905908678545409/yUbF9Ruc_400x400.jpg"
+ , _twitter = "allenthird"
+ , _blurb = "Created http://SpyGuy.com and blogs about stuff at http://AllenWalton.com . All things ecommerce, personal dev, and Simpsons."
+ , _website = "https://www.allenwalton.com"
+ , _books = [ Book { _title = "4 Hour Work Week"
+ , _author ="Tim Ferris"
+ , _amznref = "B002WE46UW"
+ }
+ , Book { _title = "Choose Yourself"
+ , _author ="James Altucher"
+ , _amznref = "B00CO8D3G4"
+ }
+ ]
+ }
+ , Person { _name = "Peter Mallouk"
+ , _pic = "https://pbs.twimg.com/profile_images/713172266968715264/KsyDYghf_400x400.jpg"
+ , _twitter = "PeterMallouk"
+ , _blurb = "President of Creative Planning. Author “5 Mistakes Every Investor Makes & How to Avoid Them”. Radically moderate."
+ , _website = "https://creativeplanning.com"
+ , _books = [ Book { _title = "Awareness"
+ , _author ="Anthony de Mello SJ"
+ , _amznref = "B005GFBP6W"
+ }
+ , Book { _title = "The Prophet"
+ , _author ="Kahlil Gibran"
+ , _amznref = "B07NDJ3LMW"
+ }
+ ]
+ }
+ , Person { _name = "Adam Robinson"
+ , _pic = "https://pbs.twimg.com/profile_images/822708907051077632/y5KyboMV_400x400.jpg"
+ , _twitter = "IAmAdamRobinson"
+ , _blurb = "Entrepreneur. Systems builder. Wizard. Shaman of global financial markets. Manifester. Didact. Do-gooder. Alchemist. Aphorist. Seeker. Embracer of possibility."
+ , _website = "http://robinsonglobalstrategies.com"
+ , _books = [ Book { _title = "Wishcraft"
+ , _author ="Barbara Sher"
+ , _amznref = "0345465180"
+ }
+ , Book { _title = "You Can Be a Stock Market Genius"
+ , _author ="Joel Greenblatt"
+ , _amznref = "0684832135"
+ }
+ ]
+ }
+ , Person { _name = "Andrew Weil"
+ , _pic = "https://pbs.twimg.com/profile_images/987461787422359553/mpoZAmPH_400x400.jpg"
+ , _twitter = "DrWeil"
+ , _blurb = "A world-renowned leader and pioneer in the field of integrative medicine, a healing oriented approach to health care which encompasses body, mind, and spirit."
+ , _website = "https://www.drweil.com"
+ , _books = [ Book { _title = "The Way Of Life According To Lao Tzu"
+ , _author = "Witter Byner"
+ , _amznref = "0399512985"
+ }
+ , Book { _title = "The Psychology of Romantic Love"
+ , _author ="Nathaniel Branden"
+ , _amznref = "B0012RMVJI"
+ }
+ ]
+ }
+ , Person { _name = "Hubert Joly"
+ , _pic = "https://scontent-ort2-2.xx.fbcdn.net/v/t1.0-1/c1.0.193.193a/38444401_2156120597936470_9028564067043770368_n.jpg?_nc_cat=104&_nc_ht=scontent-ort2-2.xx&oh=162142edb167f389a5b585a329e4993a&oe=5CE1D578"
+ , _twitter = "BBYCEO"
+ , _blurb = "CEO of Best Buy"
+ , _website = "https://www.bestbuy.com"
+ , _books = [ Book { _title = "Who Says Elephants Can't Dance"
+ , _author = "Louis. V. Gerstner"
+ , _amznref = "0060523808"
+ }
+ , Book { _title = "Onward"
+ , _author ="Howard Schultz"
+ , _amznref = "1609613821"
+ }
+ ]
+ }
+ , Person { _name = "Esther Perel"
+ , _pic = "https://pbs.twimg.com/profile_images/1091062675151319040/MzxCcgdU_400x400.jpg"
+ , _twitter = "EstherPerel"
+ , _blurb = "Exploring modern relationships. Author of MatingInCaptivity and TheStateOfAffairsBook. Podcast: WhereShouldWeBegin. Psychotherapist in NYC."
+ , _website = "https://www.estherperel.com"
+ , _books = [ Book { _title = "Crime And Punishment"
+ , _author = "Fyodor Dostoyevsky"
+ , _amznref = "B07NL94DFD"
+ }
+ , Book { _title = "If This Is a Man and The Truce"
+ , _author ="Primo Levi"
+ , _amznref = "0349100136"
+ }
+ ]
+ }
+ , Person { _name ="Neil deGrasse Tyson"
+ , _pic = "https://pbs.twimg.com/profile_images/74188698/NeilTysonOriginsA-Crop_400x400.jpg"
+ , _twitter = "neiltyson"
+ , _blurb = "Astrophysicistthe. Fifth head since 1935 of the world-renowned Hayden Planetarium in New York City and the first occupant of its Frederick P. Rose Directorship. Research associate of the Department of Astrophysics at the American Museum of Natural History."
+ , _website = "https://www.haydenplanetarium.org/tyson/"
+ , _books = [ Book { _title = "The Prince"
+ , _author = "Machiavelli"
+ , _amznref = "B07ND3CM16"
+ }
+ , Book { _title = "The Art of War"
+ , _author ="Sun Tzu"
+ , _amznref = "1545211957"
+ }
+ ]
+ }
+ , Person { _name = "Mark Cuban"
+ , _pic = "https://pbs.twimg.com/profile_images/1422637130/mccigartrophy_400x400.jpg"
+ , _twitter = "mcuban"
+ , _blurb = "Owner of Dallas Mavericks, Shark on ABC’s Shark Tank, chairman and CEO of AXS tv, and investor in an ever-growing portfolio of businesses"
+ , _website = "http://markcubancompanies.com/"
+ , _books = [ Book { _title = "The Fountainhead"
+ , _author = "Ayn Rend"
+ , _amznref = "0452273331"
+ }
+ , Book { _title = "The Gospel of Wealth "
+ , _author ="Andrew Carnegie"
+ , _amznref = "1409942171"
+ }
+ ]
+ }
+ , Person { _name = "Robert Herjavec"
+ , _pic = "https://pbs.twimg.com/profile_images/608643660876423170/DgxUW3eZ_400x400.jpg"
+ , _twitter = "robertherjavec"
+ , _blurb = "Dad, Husband, Founder & CEO of global cybersecurity firm HerjavecGroup, Shark on ABC’s Shark Tank, Former Dragon, Author"
+ , _website = "https://www.robertherjavec.com/"
+ , _books = [ Book { _title = "Why I Run"
+ , _author = "Mark Sutcliffe"
+ , _amznref = "B007OC9P3A"
+ }
+ , Book { _title = "Swim with the Sharks Without Being Eaten Alive"
+ , _author ="Harvey B. Mackay"
+ , _amznref = "006074281X"
+ }
+ ]
+ }
+ , Person { _name = "Caterina Fake"
+ , _pic = "https://pbs.twimg.com/profile_images/378800000509318185/d968d62d1bc39f2c82d3fa44db478525_400x400.jpeg"
+ , _twitter = "Caterina"
+ , _blurb = "Host, Should this Exist? Investor, Yes VC. Cofounder: Flickr, Hunch, Sesat School. Etsy. Sundance. Homeschooling, film, literature. Dogs."
+ , _website = "https://caterina.net"
+ , _books = [ Book { _title = "Growth of the Soil"
+ , _author = "Knut Hamsun"
+ , _amznref = "0343181967"
+ }
+ , Book { _title = "The Thousand Autumns of Jacob de Zoet"
+ , _author ="David Mitchell"
+ , _amznref = "0812976363"
+ }
+ ]
+ }
+ , Person { _name = "Daymond John"
+ , _pic = "https://pbs.twimg.com/profile_images/1048022980863954944/eZvGANn0_400x400.jpg"
+ , _twitter = "TheSharkDaymond"
+ , _blurb = "CEO of FUBU, Shark on ABC’s Shark Tank, Author."
+ , _website = "https://daymondjohn.com/"
+ , _books = [ Book { _title = "Think and Grow Rich"
+ , _author = "Napoleon Hill"
+ , _amznref = "1585424331"
+ }
+ , Book { _title = "How to Win Friends & Influence People"
+ , _author ="Dale Carnegie"
+ , _amznref = "0671027034"
+ }
+ ]
+ }
+ , Person { _name = "Kevin O'Leary"
+ , _pic = "https://pbs.twimg.com/profile_images/1067383195597889536/cP6tNEt0_400x400.jpg"
+ , _twitter = "kevinolearytv"
+ , _blurb = "Chairman O'Shares ETFs, 4 time Emmy Award winning Shark Tank's Mr. Wonderful, bestselling author, CNBC contributor, wine maker, guitar dude and photographer."
+ , _website = "http://askmrwonderful.eone.libsynpro.com/"
+ , _books = [ Book { _title = "Competitive Advantage"
+ , _author = "Michael Porter"
+ , _amznref = "0684841460"
+ }
+ , Book { _title = "Secrets of Closing the Sale"
+ , _author ="Zig Ziglar"
+ , _amznref = "0425081028"
+ }
+ ]
+ }
+ , Person { _name = "Alex Rodriguez"
+ , _pic = "https://pbs.twimg.com/profile_images/796405335388848128/LbvsjCA3_400x400.jpg"
+ , _twitter = "AROD"
+ , _blurb = "3-time MVP • 14-time All Star • World Series Champ • CEO of @_ARodCorp• @FoxSports Commentator/Analyst • Special Advisor to the Yankees, @ABCSharkTank and ESPN"
+ , _website = "http://www.arodcorp.com/"
+ , _books = [ Book { _title = "Blitzscaling"
+ , _author = "Reid Hoffman"
+ , _amznref = "1524761419"
+ }
+ , Book { _title = "Measure What Matters"
+ , _author ="John Doerr"
+ , _amznref = "0525536221"
+ }
+ ]
+ }
+ ]
diff --git a/Biz/Ibb/Keep.hs b/Biz/Ibb/Keep.hs
new file mode 100644
index 0000000..ad6dc5c
--- /dev/null
+++ b/Biz/Ibb/Keep.hs
@@ -0,0 +1,123 @@
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE TypeFamilies #-}
+{-# OPTIONS_GHC -fno-warn-orphans #-}
+
+-- | Keep is a database built on Data.Acid.
+--
+-- If this proves useful, maybe we could make it a more general thing. Like
+-- `Biz.Keep`. I could wrap all the safecopy stuff in my own template haskell
+-- like `$(keep ''MyType)`.
+--
+module Biz.Ibb.Keep where
+
+import Biz.Ibb.Core (Person(..), Book(..))
+import qualified Biz.Ibb.Influencers as Influencers
+import Control.Monad.Reader (ask)
+import Control.Monad.State (get, put)
+import Data.Acid (Update, makeAcidic)
+import qualified Data.Acid as Acid
+import Data.Data (Data, Typeable)
+import Data.IxSet (Indexable(..), IxSet, ixFun, ixSet)
+import qualified Data.IxSet as IxSet
+import Data.SafeCopy
+import Data.Text (Text)
+import qualified Data.Text as Text
+
+-- * Keep
+
+-- | Main database. Need to think of a better name for this.
+data IbbKeep = IbbKeep
+ { _people :: IxSet Person
+ }
+ deriving (Data, Typeable)
+
+$(deriveSafeCopy 0 'base ''IbbKeep)
+
+-- * Index @Person@
+
+$(deriveSafeCopy 0 'base ''Person)
+
+newtype PersonName =
+ PersonName Text deriving (Eq, Ord, Data, Typeable)
+
+newtype PersonBlurb =
+ PersonBlurb Text deriving (Eq, Ord, Data, Typeable)
+
+instance Indexable Person where
+ empty = ixSet
+ [ ixFun $ \p -> [ PersonName $ _name p ]
+ , ixFun $ \p -> [ _pic p ]
+ , ixFun $ \p -> [ _twitter p ]
+ , ixFun $ \p -> [ _website p ]
+ , ixFun $ \p -> [ _books p ]
+ , ixFun $ \p -> [ PersonBlurb $ _blurb p ]
+ ]
+
+-- | updates the `IbbKeep` with a new `Person`
+newPerson :: Text -> Text -> Update IbbKeep Person
+newPerson name blurb = do
+ k <- get
+ put $ k { _people = IxSet.insert p (_people k)
+ }
+ return p
+ where
+ p = Person
+ { _name = name
+ , _pic = Text.empty
+ , _twitter = Text.empty
+ , _website = Text.empty
+ , _books = []
+ , _blurb = blurb
+ }
+
+getPeople :: Int -> Acid.Query IbbKeep [Person]
+getPeople n = do
+ keep <- ask
+ return $ take n $ IxSet.toList $ _people keep
+
+-- * Index @Book@
+
+$(deriveSafeCopy 0 'base ''Book)
+
+newtype BookTitle =
+ BookTitle Text deriving (Eq, Ord, Data, Typeable)
+
+newtype BookAuthor =
+ BookAuthor Text deriving (Eq, Ord, Data, Typeable)
+
+instance Indexable Book where
+ empty = ixSet
+ [ ixFun $ \b -> [ BookTitle $ _title b ]
+ , ixFun $ \b -> [ BookAuthor $ _author b ]
+ , ixFun $ \b -> [ _amznref b ]
+ ]
+
+-- | updates the `IbbKeep` with a new `Book`
+--newBook :: Text -> Text -> Text -> Update IbbKeep Book
+--newBook title author amznref = do
+-- ibbKeep <- get
+-- put $ ibbKeep { _books = IxSet.insert b (_books ibbKeep)
+-- , _people = _people ibbKeep
+-- }
+-- return b
+-- where
+-- b = Book { _title = title
+-- , _author = author
+-- , _amznref = amznref
+-- }
+
+-- * Opening the keep
+
+-- defines @NewPerson@ for us.
+$(makeAcidic ''IbbKeep ['newPerson, 'getPeople])
+
+initialIbbKeep :: IbbKeep
+initialIbbKeep = IbbKeep
+ { _people = IxSet.fromList Influencers.allPeople
+ }
+
+openLocal :: String -> IO (Acid.AcidState IbbKeep)
+openLocal dir =
+ Acid.openLocalStateFrom dir initialIbbKeep
diff --git a/Biz/Ibb/Look.hs b/Biz/Ibb/Look.hs
new file mode 100644
index 0000000..5f7ca6b
--- /dev/null
+++ b/Biz/Ibb/Look.hs
@@ -0,0 +1,40 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- | The look and feel of Ibb
+module Biz.Ibb.Look where
+
+import Alpha hiding ( Selector )
+import Clay
+import qualified Clay.Flexbox as Flexbox
+import qualified Clay.Media as Media
+import qualified Clay.Render as Clay
+import qualified Clay.Stylesheet as Stylesheet
+
+main :: Css
+main = do
+ "html" <> "body" ? do
+ width (pct 100)
+ display flex
+ flexDirection column
+ alignItems center
+ alignContent center
+ justifyContent center
+ ".container" ? do
+ maxWidth (px 900)
+ display flex
+ justifyContent center
+ flexDirection column
+ fontFamily ["GillSans", "Calibri", "Trebuchet"] [sansSerif]
+ headings ? do
+ fontFamily
+ [ "Palatino"
+ , "Palatino Linotype"
+ , "Hoefler Text"
+ , "Times New Roman"
+ , "Times"
+ ]
+ [serif]
+
+headings :: Selector
+headings = h1 <> h2 <> h3 <> h4 <> h5 <> h6
diff --git a/Biz/Ibb/Move.hs b/Biz/Ibb/Move.hs
new file mode 100644
index 0000000..1e635ac
--- /dev/null
+++ b/Biz/Ibb/Move.hs
@@ -0,0 +1,48 @@
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+-- | App update logic
+module Biz.Ibb.Move
+ ( move
+ -- * Server interactions
+ , fetchPeople
+ )
+where
+
+import Alpha
+import Biz.Ibb.Core as Core
+import Network.RemoteData
+import Data.Aeson
+import JavaScript.Web.XMLHttpRequest ( Request(..)
+ , Method(GET)
+ , RequestData(NoData)
+ , contents
+ , xhrByteString
+ )
+import Miso
+import Miso.String
+
+move :: Action -> Model -> Effect Action Model
+move Nop m = noEff m
+move (HandleRoute u) m = m { uri = u } <# pure Nop
+move (ChangeRoute u) m = m <# do
+ pushURI u >> pure Nop
+move FetchPeople m = m <# (SetPeople </ fetchPeople)
+move (SetPeople ps) m = noEff m { people = ps }
+
+fetchPeople :: IO (WebData [Core.Person])
+fetchPeople = do
+ mjson <- contents </ xhrByteString req
+ case mjson of
+ Nothing -> pure $ Failure "could not read from server"
+ Just a ->
+ pure $ fromEither $ either (Left . ms) pure $ eitherDecodeStrict a
+ where
+ req = Request { reqMethod = GET
+ -- FIXME: can replace this hardcoding with a function?
+ , reqURI = "/api/people"
+ , reqLogin = Nothing
+ , reqHeaders = []
+ , reqWithCredentials = False
+ , reqData = NoData
+ }
diff --git a/Biz/Ibb/Server.hs b/Biz/Ibb/Server.hs
new file mode 100644
index 0000000..9f1ac5f
--- /dev/null
+++ b/Biz/Ibb/Server.hs
@@ -0,0 +1,152 @@
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE MultiParamTypeClasses #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE TypeApplications #-}
+{-# LANGUAGE TypeOperators #-}
+
+-- | Server
+--
+-- : exe ibb
+--
+-- : dep clay
+-- : dep miso
+-- : dep protolude
+-- : dep servant
+-- : dep text
+-- : dep MonadRandom
+-- : dep acid-state
+-- : dep bytestring
+-- : dep ixset
+-- : dep random
+-- : dep safecopy
+-- : dep scotty
+-- : dep servant-server
+-- : dep text
+module Biz.Ibb.Server where
+
+import Alpha
+import qualified Clay
+import Biz.Ibb.Core
+import qualified Biz.Ibb.Keep as Keep
+import qualified Biz.Ibb.Look as Look
+import Network.RemoteData
+import Data.Acid ( AcidState )
+import qualified Data.Acid.Abstract as Acid
+import Data.Maybe ( fromMaybe )
+import qualified Data.Text.Lazy as Lazy
+import qualified Data.Text.Lazy.Encoding as Lazy
+import qualified Lucid as L
+import Lucid.Base
+import Miso
+import Network.HTTP.Media ( (//)
+ , (/:)
+ )
+import Network.HTTP.Types
+import Network.Wai
+import Network.Wai.Application.Static
+import Network.Wai.Handler.Warp
+import Network.Wai.Middleware.Gzip
+import Network.Wai.Middleware.RequestLogger
+import Servant
+import System.Environment ( lookupEnv )
+
+main :: IO ()
+main = do
+ say "rise: ibb"
+ staticDir <- fromMaybe "static" <$> lookupEnv "STATIC_DIR" :: IO [Char]
+ port <- read <$> fromMaybe "3000" <$> lookupEnv "PORT" :: IO Int
+ keep <- Keep.openLocal "_keep/"
+ say "port: 3000"
+ run port $ logStdout $ compress $ app staticDir $ keep
+ where compress = gzip def { gzipFiles = GzipCompress }
+
+newtype HtmlPage a = HtmlPage a
+ deriving (Show, Eq)
+
+instance L.ToHtml a => L.ToHtml (HtmlPage a) where
+ toHtmlRaw = L.toHtml
+ toHtml (HtmlPage x) = L.doctypehtml_ $ do
+ L.head_ $ do
+ L.meta_ [L.charset_ "utf-8"]
+ jsRef "/static/ibb.js"
+ cssRef "/css/main.css"
+ L.body_ $ do
+ page
+ where
+ page = L.toHtml x
+ jsRef href = L.with
+ (L.script_ mempty)
+ [ makeAttribute "src" href
+ , makeAttribute "type" "text/javascript"
+ , makeAttribute "async" mempty
+ , makeAttribute "defer" mempty
+ ]
+ cssRef href = L.with
+ (L.link_ mempty)
+ [L.rel_ "stylesheet", L.type_ "text/css", L.href_ href]
+
+type ServerRoutes = ToServerRoutes AppRoutes HtmlPage Action
+
+handle404 :: Application
+handle404 _ respond =
+ respond
+ $ responseLBS status404 [("Content-Type", "text/html")]
+ $ renderBS
+ $ toHtml
+ $ HtmlPage
+ $ notfound
+
+newtype CSS = CSS
+ { unCSS :: Text
+ }
+
+instance MimeRender CSS Text where
+ mimeRender _ = Lazy.encodeUtf8 . Lazy.fromStrict
+
+instance Accept CSS where
+ contentType _ = "text" // "css" /: ("charset", "utf-8")
+
+type CssRoute = "css" :> "main.css" :> Get '[CSS] Text
+
+type Routes
+ = "static"
+ :>
+ Raw
+ :<|>
+ CssRoute
+ :<|>
+ ServerRoutes
+ :<|>
+ "api"
+ :>
+ ApiRoutes
+ :<|>
+ Raw
+
+cssHandlers :: Server CssRoute
+cssHandlers = return . Lazy.toStrict . Clay.render $ Look.main
+
+app :: [Char] -> AcidState Keep.IbbKeep -> Application
+app staticDir keep =
+ serve (Proxy @Routes)
+ $ static
+ :<|> cssHandlers
+ :<|> serverHandlers
+ :<|> apiHandlers keep
+ :<|> Tagged handle404
+ where static = serveDirectoryWith (defaultWebAppSettings $ staticDir)
+
+type ApiRoutes = "people" :> Get '[JSON] [Person]
+
+serverHandlers :: Server ServerRoutes
+serverHandlers = homeHandler
+ where
+ send f u = pure $ HtmlPage $ f Model { uri = u, people = NotAsked }
+ homeHandler = send home goHome
+
+-- | for now we just have one api endpoint, which returns all the people
+apiHandlers :: AcidState Keep.IbbKeep -> Server ApiRoutes
+apiHandlers keep = Acid.query' keep $ Keep.GetPeople 20
diff --git a/Biz/Ibb/service.nix b/Biz/Ibb/service.nix
new file mode 100644
index 0000000..f9d0f36
--- /dev/null
+++ b/Biz/Ibb/service.nix
@@ -0,0 +1,42 @@
+{ options
+, lib
+, config
+, pkgs
+, modulesPath
+}:
+
+let
+ cfg = config.services.ibb;
+in
+{
+ options.services.ibb = {
+ enable = lib.mkEnableOption "Enable the IBB service";
+ port = lib.mkOption {
+ type = lib.types.string;
+ default = "3000";
+ description = ''
+ The port on which IBB will listen for
+ incoming HTTP traffic.
+ '';
+ };
+ };
+ config = lib.mkIf cfg.enable {
+ systemd.services.ibb = {
+ path = with pkgs; [ ibb bash ];
+ wantedBy = [ "multi-user.target" ];
+ script = ''
+ PORT=${cfg.port} ./bin/ibb
+ '';
+ description = ''
+ Influenced By Books website
+ '';
+ serviceConfig = {
+ WorkingDirectory = pkgs.ibb;
+ KillSignal = "INT";
+ Type = "simple";
+ Restart = "on-abort";
+ RestartSec = "10";
+ };
+ };
+ };
+}
diff --git a/Biz/Language/Bs.hs b/Biz/Language/Bs.hs
new file mode 100644
index 0000000..a810706
--- /dev/null
+++ b/Biz/Language/Bs.hs
@@ -0,0 +1,12 @@
+-- https://github.com/write-you-a-scheme-v2/scheme
+-- https://github.com/justinethier/husk-scheme
+module Language.Bs
+ ( module X
+ ) where
+
+import Language.Bs.Cli as X
+import Language.Bs.Eval as X
+import Language.Bs.Expr as X
+import Language.Bs.Parser as X
+import Language.Bs.Primitives as X
+import Language.Bs.Repl as X
diff --git a/Biz/Language/Bs/Cli.hs b/Biz/Language/Bs/Cli.hs
new file mode 100644
index 0000000..4c48c86
--- /dev/null
+++ b/Biz/Language/Bs/Cli.hs
@@ -0,0 +1,52 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+module Language.Bs.Cli (
+ run
+) where
+
+import Data.String
+import Data.Text.IO as TIO
+import Language.Bs.Eval -- evalFile :: T.Text -> IO ()
+import Language.Bs.Repl -- Repl.mainLoop :: IO ()
+import Options.Applicative
+import Protolude
+import System.Directory
+
+-- SOURCES
+--http://book.realworldhaskell.org/read/io.html
+-- https://github.com/pcapriotti/optparse-applicative
+-- https://hackage.haskell.org/package/optparse-applicative
+
+runScript :: FilePath -> IO ()
+runScript fname = do
+ exists <- doesFileExist fname
+ if exists
+ then TIO.readFile fname >>= evalFile fname
+ else TIO.putStrLn "File does not exist."
+
+data LineOpts = UseReplLineOpts | RunScriptLineOpts String
+
+parseLineOpts :: Parser LineOpts
+parseLineOpts = runScriptOpt <|> runReplOpt
+ where
+ runScriptOpt =
+ RunScriptLineOpts <$> strOption (long "script"
+ <> short 's'
+ <> metavar "SCRIPT"
+ <> help "File containing the script you want to run")
+ runReplOpt =
+ UseReplLineOpts <$ flag' () (long "repl"
+ <> short 'r'
+ <> help "Run as interavtive read/evaluate/print/loop")
+
+schemeEntryPoint :: LineOpts -> IO ()
+schemeEntryPoint UseReplLineOpts = mainLoop --repl
+schemeEntryPoint (RunScriptLineOpts script) = runScript script
+
+run :: IO ()
+run = execParser opts >>= schemeEntryPoint
+ where
+ opts = info (helper <*> parseLineOpts)
+ ( fullDesc
+ <> header "Executable binary for Write You A Scheme v2.0"
+ <> progDesc "contains an entry point for both running scripts and repl" )
diff --git a/Biz/Language/Bs/Eval.hs b/Biz/Language/Bs/Eval.hs
new file mode 100644
index 0000000..290170b
--- /dev/null
+++ b/Biz/Language/Bs/Eval.hs
@@ -0,0 +1,241 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+module Language.Bs.Eval (
+ evalText
+, evalFile
+, runParseTest
+, safeExec
+, runASTinEnv
+, basicEnv
+, fileToEvalForm
+, textToEvalForm
+, getFileContents
+) where
+
+import Control.Exception
+import Control.Monad.Reader
+import qualified Data.Map as Map
+import Data.String
+import qualified Data.Text as T
+import qualified Data.Text.IO as TIO
+import Language.Bs.Expr
+import Language.Bs.Parser
+import Language.Bs.Primitives
+import Protolude
+import System.Directory
+
+funcEnv :: Map.Map T.Text Expr
+funcEnv = Map.fromList $ primEnv
+ <> [ ("read" , IFun $ IFunc $ unop readFn)
+ , ("parse", IFun $ IFunc $ unop parseFn)
+ , ("eval", IFun $ IFunc $ unop eval)
+ , ("show", IFun $ IFunc $ unop (return . Tape . ppexpr))
+ ]
+
+basicEnv :: Env
+basicEnv = Env Map.empty funcEnv
+
+readFn :: Expr -> Eval Expr
+readFn (Tape txt) = lineToEvalForm txt
+readFn val = throw $ TypeMismatch "read expects string, instead got:" val
+
+parseFn :: Expr -> Eval Expr
+parseFn (Tape txt) = either (throw . ParseError . show) return $ readExpr txt
+parseFn val = throw $ TypeMismatch "parse expects string, instead got:" val
+
+safeExec :: IO a -> IO (Either String a)
+safeExec m = do
+ result <- Control.Exception.try m
+ case result of
+ Left (eTop :: SomeException) ->
+ case fromException eTop of
+ Just (enclosed :: LispError) ->
+ return $ Left (show enclosed)
+ Nothing ->
+ return $ Left (show eTop)
+ Right val ->
+ return $ Right val
+
+runASTinEnv :: Env -> Eval b -> IO b
+runASTinEnv code action = runReaderT (unEval action) code
+
+lineToEvalForm :: T.Text -> Eval Expr
+lineToEvalForm input = either (throw . ParseError . show ) eval $ readExpr input
+
+evalFile :: FilePath -> T.Text -> IO () -- program file
+evalFile filePath fileExpr = (runASTinEnv basicEnv $ fileToEvalForm filePath fileExpr) >>= print
+
+fileToEvalForm :: FilePath -> T.Text -> Eval Expr
+fileToEvalForm filePath input = either (throw . ParseError . show ) evalBody $ readExprFile filePath input
+
+runParseTest :: T.Text -> T.Text -- for view AST
+runParseTest input = either (T.pack . show) (T.pack . show) $ readExpr input
+
+getFileContents :: FilePath -> IO T.Text
+getFileContents fname = do
+ exists <- doesFileExist fname
+ if exists then TIO.readFile fname else return "File does not exist."
+
+textToEvalForm :: T.Text -> Eval Expr
+textToEvalForm input = either (throw . ParseError . show ) evalBody $ readExpr input
+
+evalText :: T.Text -> IO () --REPL
+evalText textExpr = do
+ res <- runASTinEnv basicEnv $ textToEvalForm textExpr
+ print res
+
+getVar :: Expr -> Eval Expr
+getVar (Atom atom) = do
+ Env{..} <- ask
+ case Map.lookup atom (Map.union fenv env) of -- lookup, but prefer functions
+ Just x -> return x
+ Nothing -> throw $ UnboundVar atom
+getVar n = throw $ TypeMismatch "failure to get variable: " n
+
+ensureAtom :: Expr -> Eval Expr
+ensureAtom n@(Atom _) = return n
+ensureAtom n@(List _) = throw $ TypeMismatch "got list" n
+ensureAtom n = throw $ TypeMismatch "expected an atomic value" n
+
+extractVar :: Expr -> T.Text
+extractVar (Atom atom) = atom
+extractVar n = throw $ TypeMismatch "expected an atomic value" n
+
+getEven :: [t] -> [t]
+getEven [] = []
+getEven (x:xs) = x : getOdd xs
+
+getOdd :: [t] -> [t]
+getOdd [] = []
+getOdd (_:xs) = getEven xs
+
+applyFunc :: Expr -> [Expr] -> [Expr] -> Eval Expr
+applyFunc expr params args = bindArgsEval params args expr
+
+bindArgsEval :: [Expr] -> [Expr] -> Expr -> Eval Expr
+bindArgsEval params args expr = do
+ Env{..} <- ask
+ let newVars = zipWith (\a b -> (extractVar a,b)) params args
+ let (newEnv, newFenv) = Map.partition (not . isFunc) $ Map.fromList newVars
+ local (const $ Env (newEnv <> env) (newFenv <> fenv)) $ eval expr
+
+isFunc :: Expr -> Bool
+isFunc (List ((Atom "lambda"):_)) = True
+isFunc _ = False
+
+eval :: Expr -> Eval Expr
+eval (List [Atom "dumpEnv", x]) = do
+ Env{..} <- ask
+ liftIO $ print $ toList env
+ liftIO $ print $ toList fenv
+ eval x
+
+eval (Numb i) = return $ Numb i
+eval (Tape s) = return $ Tape s
+eval (Bool b) = return $ Bool b
+eval (List []) = return Nil
+eval Nil = return Nil
+eval n@(Atom _) = getVar n
+
+eval (List [Atom "showSF", rest]) = return . Tape . T.pack $ show rest
+eval (List ((:) (Atom "showSF") rest)) = return . Tape . T.pack . show $ List rest
+
+eval (List [Atom "quote", val]) = return val
+
+eval (List [Atom "if", pred_, then_, else_]) = do
+ ifRes <- eval pred_
+ case ifRes of
+ (Bool True) -> eval then_
+ (Bool False) -> eval else_
+ _ ->
+ throw $ BadSpecialForm "if's first arg must eval into a boolean"
+eval (List ( (:) (Atom "if") _)) =
+ throw $ BadSpecialForm "(if <bool> <s-expr> <s-expr>)"
+
+eval (List [Atom "begin", rest]) = evalBody rest
+eval (List ((:) (Atom "begin") rest )) = evalBody $ List rest
+
+-- top-level define
+-- TODO: how to make this eval correctly?
+eval (List [Atom "define", List (name:args), body]) = do
+ Env{..} <- ask
+ _ <- eval body
+ bindArgsEval (name:args) [body] name
+
+eval (List [Atom "define", name, body]) = do
+ Env{..} <- ask
+ _ <- eval body
+ bindArgsEval [name] [body] name
+
+eval (List [Atom "let", List pairs, expr]) = do
+ Env{..} <- ask
+ atoms <- mapM ensureAtom $ getEven pairs
+ vals <- mapM eval $ getOdd pairs
+ bindArgsEval atoms vals expr
+
+eval (List (Atom "let":_) ) =
+ throw $ BadSpecialForm "let function expects list of parameters and S-Expression body\n(let <pairs> <s-expr>)"
+
+
+eval (List [Atom "lambda", List params, expr]) = do
+ ctx <- ask
+ return $ Func (IFunc $ applyFunc expr params) ctx
+eval (List (Atom "lambda":_) ) = throw $ BadSpecialForm "lambda function expects list of parameters and S-Expression body\n(lambda <params> <s-expr>)"
+
+
+-- needed to get cadr, etc to work
+eval (List [Atom "cdr", List [Atom "quote", List (_:xs)]]) =
+ return $ List xs
+eval (List [Atom "cdr", arg@(List (x:xs))]) =
+ case x of
+ -- proxy for if the list can be evaluated
+ Atom _ -> do
+ val <- eval arg
+ eval $ List [Atom "cdr", val]
+ _ -> return $ List xs
+
+
+eval (List [Atom "car", List [Atom "quote", List (x:_)]]) =
+ return $ x
+eval (List [Atom "car", arg@(List (x:_))]) =
+ case x of
+ Atom _ -> do
+ val <- eval arg
+ eval $ List [Atom "car", val]
+ _ -> return $ x
+
+
+eval (List ((:) x xs)) = do
+ Env{..} <- ask
+ funVar <- eval x
+ xVal <- mapM eval xs
+ case funVar of
+ (IFun (IFunc internalFn)) ->
+ internalFn xVal
+
+ (Func (IFunc definedFn) (Env benv _)) ->
+ local (const $ Env benv fenv) $ definedFn xVal
+
+ _ ->
+ throw $ NotFunction funVar
+
+updateEnv :: T.Text -> Expr -> Env -> Env
+updateEnv var e@(IFun _) Env{..} = Env env $ Map.insert var e fenv
+updateEnv var e@(Func _ _) Env{..} = Env env $ Map.insert var e fenv
+updateEnv var e Env{..} = Env (Map.insert var e env) fenv
+
+evalBody :: Expr -> Eval Expr
+evalBody (List [List ((:) (Atom "define") [Atom var, defExpr]), rest]) = do
+ evalVal <- eval defExpr
+ ctx <- ask
+ local (const $ updateEnv var evalVal ctx) $ eval rest
+
+evalBody (List ((:) (List ((:) (Atom "define") [Atom var, defExpr])) rest)) = do
+ evalVal <- eval defExpr
+ ctx <- ask
+ local (const $ updateEnv var evalVal ctx) $ evalBody $ List rest
+
+evalBody x = eval x
diff --git a/Biz/Language/Bs/Expr.hs b/Biz/Language/Bs/Expr.hs
new file mode 100644
index 0000000..a39c7b6
--- /dev/null
+++ b/Biz/Language/Bs/Expr.hs
@@ -0,0 +1,154 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE DeriveDataTypeable #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+module Language.Bs.Expr where
+
+import Data.String (String)
+import Data.Text (Text)
+import qualified Data.Text as T
+import GHC.Show
+import Protolude hiding (show)
+import qualified Text.PrettyPrint.Leijen.Text as PP
+import Text.PrettyPrint.Leijen.Text hiding ((<$>))
+
+type Ctx = Map Text Expr
+data Env = Env { env :: Ctx, fenv :: Ctx }
+ deriving (Eq)
+
+newtype Eval a = Eval { unEval :: ReaderT Env IO a }
+ deriving (Monad, Functor, Applicative, MonadReader Env, MonadIO)
+
+data IFunc = IFunc { fn :: [Expr] -> Eval Expr }
+ deriving (Typeable)
+
+instance Eq IFunc where
+ (==) _ _ = False
+
+data Expr
+ = Atom Text
+ | List [Expr]
+ | Numb Integer
+ | Tape Text
+ | IFun IFunc -- TODO: call this Kern
+ | Func IFunc Env
+ | Bool Bool
+ | Nil
+ deriving (Typeable, Eq)
+
+instance Show Expr where
+ show = T.unpack . ppexpr
+
+data LispErrorType
+ = NumArgs Integer [Expr]
+ | LengthOfList Text Int
+ | ExpectedList Text
+ | ParseError String
+ | TypeMismatch Text Expr
+ | BadSpecialForm Text
+ | NotFunction Expr
+ | UnboundVar Text
+ | Default Expr
+ | ReadFileError Text
+ deriving (Typeable)
+
+data LispError = LispError Expr LispErrorType
+
+instance Show LispErrorType where
+ show = T.unpack . ppexpr
+
+instance Show LispError where
+ show = T.unpack . ppexpr
+
+instance Exception LispErrorType
+instance Exception LispError
+
+ppexpr :: Pretty a => a -> Text
+ppexpr x = PP.displayTStrict (PP.renderPretty 1.0 70 (pretty x))
+
+--prettyList :: [Doc] -> Doc
+--prettyList = encloseSep lparen rparen PP.space
+
+instance Pretty Expr where
+ pretty v =
+ case v of
+ Atom a ->
+ textStrict a
+
+ List ls ->
+ prettyList $ fmap pretty ls
+
+ Numb n ->
+ integer n
+
+ Tape t ->
+ textStrict "\"" <> textStrict t <> textStrict "\""
+
+ IFun _ ->
+ textStrict "<internal function>"
+
+ Func _ _ ->
+ textStrict "<lambda function>"
+
+ Bool True ->
+ textStrict "#t"
+
+ Bool False ->
+ textStrict "#f"
+
+ Nil ->
+ textStrict "'()"
+
+instance Pretty LispErrorType where
+ pretty err = case err of
+ NumArgs i args ->
+ textStrict "number of arguments"
+ <$$> textStrict "expected"
+ <+> textStrict (T.pack $ show i)
+ <$$> textStrict "received"
+ <+> textStrict (T.pack $ show $ length args)
+
+
+ LengthOfList txt i ->
+ textStrict "length of list in:"
+ <+> textStrict txt
+ <$$> textStrict "length:"
+ <+> textStrict (T.pack $ show i)
+
+ ParseError txt ->
+ textStrict "cannot parse expr:"
+ <+> textStrict (T.pack txt)
+
+ TypeMismatch txt expr ->
+ textStrict "type mismatch:"
+ <$$> textStrict txt
+ <$$> pretty expr
+
+ BadSpecialForm txt ->
+ textStrict "bad special form:"
+ <$$> textStrict txt
+
+ NotFunction expr ->
+ textStrict "not a function"
+ <$$> pretty expr
+
+ UnboundVar txt ->
+ textStrict "unbound variable:"
+ <$$> textStrict txt
+
+ Default _ ->
+ textStrict "default error"
+
+ ReadFileError txt ->
+ textStrict "error reading file:"
+ <$$> textStrict txt
+
+ ExpectedList txt ->
+ textStrict "expected list:"
+ <$$> textStrict txt
+
+instance Pretty LispError where
+ pretty (LispError expr typ) =
+ textStrict "error evaluating:"
+ <$$> indent 4 (pretty expr)
+ <$$> pretty typ
diff --git a/Biz/Language/Bs/Parser.hs b/Biz/Language/Bs/Parser.hs
new file mode 100644
index 0000000..3044a60
--- /dev/null
+++ b/Biz/Language/Bs/Parser.hs
@@ -0,0 +1,121 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+module Language.Bs.Parser (
+ readExpr
+, readExprFile
+) where
+
+import Control.Monad (fail)
+import Control.Monad (mzero)
+import Data.Char (digitToInt)
+import Data.Functor.Identity (Identity)
+import Data.String
+import qualified Data.Text as T
+import Language.Bs.Expr
+import Protolude hiding ((<|>), try)
+import Text.Parsec
+import qualified Text.Parsec.Language as Lang
+import Text.Parsec.Text
+import qualified Text.Parsec.Token as Tok
+
+lexer :: Tok.GenTokenParser T.Text () Identity
+lexer = Tok.makeTokenParser style
+
+style :: Tok.GenLanguageDef T.Text () Identity
+style = Lang.emptyDef {
+ Tok.commentStart = "#|"
+ , Tok.commentEnd = "|#"
+ , Tok.commentLine = ";"
+ , Tok.opStart = mzero
+ , Tok.opLetter = mzero
+ , Tok.identStart = letter <|> oneOf "!$%&*/:<=>?^_~"
+ , Tok.identLetter = digit <|> letter <|> oneOf "!$%&*/:<=>?^_~+-.@"
+ }
+
+parens :: Parser a -> Parser a
+parens = Tok.parens lexer
+
+whitespace :: Parser ()
+whitespace = Tok.whiteSpace lexer
+
+lexeme :: Parser a -> Parser a
+lexeme = Tok.lexeme lexer
+
+quoted :: Parser a -> Parser a
+quoted p = try (char '\'') *> p
+
+identifier :: Parser T.Text
+identifier = T.pack <$> (Tok.identifier lexer <|> specialIdentifier) <?> "identifier"
+ where
+ specialIdentifier :: Parser String
+ specialIdentifier = lexeme $ try $
+ string "-" <|> string "+" <|> string "..."
+
+-- | The @Radix@ type consists of a base integer (e.g. @10@) and a parser for
+-- digits in that base (e.g. @digit@).
+type Radix = (Integer, Parser Char)
+
+-- | Parse an integer, given a radix as output by @radix@.
+-- Copied from Text.Parsec.Token
+numberWithRadix :: Radix -> Parser Integer
+numberWithRadix (base, baseDigit) = do
+ digits <- many1 baseDigit
+ let n = foldl (\x d -> base*x + toInteger (digitToInt d)) 0 digits
+ seq n (return n)
+
+decimal :: Parser Integer
+decimal = Tok.decimal lexer
+
+-- | Parse a sign, return either @id@ or @negate@ based on the sign parsed.
+-- Copied from Text.Parsec.Token
+sign :: Parser (Integer -> Integer)
+sign = char '-' *> return negate
+ <|> char '+' *> return identity
+ <|> return identity
+
+intRadix :: Radix -> Parser Integer
+intRadix r = sign <*> numberWithRadix r
+
+textLiteral :: Parser T.Text
+textLiteral = T.pack <$> Tok.stringLiteral lexer
+
+nil :: Parser ()
+nil = try ((char '\'') *> string "()") *> return () <?> "nil"
+
+hashVal :: Parser Expr
+hashVal = lexeme $ char '#'
+ *> (char 't' *> return (Bool True)
+ <|> char 'f' *> return (Bool False)
+ <|> char 'b' *> (Numb <$> intRadix (2, oneOf "01"))
+ <|> char 'o' *> (Numb <$> intRadix (8, octDigit))
+ <|> char 'd' *> (Numb <$> intRadix (10, digit))
+ <|> char 'x' *> (Numb <$> intRadix (16, hexDigit))
+ <|> oneOf "ei" *> fail "Unsupported: exactness"
+ <|> char '(' *> fail "Unsupported: vector"
+ <|> char '\\' *> fail "Unsupported: char")
+
+
+lispVal :: Parser Expr
+lispVal = hashVal
+ <|> Nil <$ nil
+ <|> Numb <$> try (sign <*> decimal)
+ <|> Atom <$> identifier
+ <|> Tape <$> textLiteral
+ <|> _Quote <$> quoted lispVal
+ <|> List <$> parens manyExpr
+
+manyExpr :: Parser [Expr]
+manyExpr = lispVal `sepBy` whitespace
+
+_Quote :: Expr -> Expr
+_Quote x = List [Atom "quote", x]
+
+contents :: Parser a -> ParsecT T.Text () Identity a
+contents p = whitespace *> lexeme p <* eof
+
+readExpr :: T.Text -> Either ParseError Expr
+readExpr = parse (contents lispVal) "<stdin>"
+
+readExprFile :: SourceName -> T.Text -> Either ParseError Expr
+readExprFile = parse (contents (List <$> manyExpr))
diff --git a/Biz/Language/Bs/Primitives.hs b/Biz/Language/Bs/Primitives.hs
new file mode 100644
index 0000000..c074c59
--- /dev/null
+++ b/Biz/Language/Bs/Primitives.hs
@@ -0,0 +1,183 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+-- | bs primitives
+--
+-- I would like to reduce the number of primitives in the language to some
+-- minimal number, like SKI combinator or Nock instructions. I'm not sure what
+-- the minimal number is. The idea is to move primitives from here into core.scm
+-- over time.
+module Language.Bs.Primitives where
+
+import Control.Exception
+import Control.Monad.Except
+import Data.Text as T
+import Data.Text.IO as TIO
+import Language.Bs.Expr
+import Network.HTTP
+import Protolude
+import System.Directory
+import System.IO
+
+type Prim = [(T.Text, Expr)]
+type Unary = Expr -> Eval Expr
+type Binary = Expr -> Expr -> Eval Expr
+
+mkF :: ([Expr] -> Eval Expr) -> Expr
+mkF = IFun . IFunc
+
+primEnv :: Prim
+primEnv = [
+ ("+" , mkF $ binopFold (numOp (+)) (Numb 0) )
+ , ("*" , mkF $ binopFold (numOp (*)) (Numb 1) )
+ , ("string-append", mkF $ binopFold (strOp (<>)) (Tape "") )
+ , ("-" , mkF $ binop $ numOp (-))
+ , ("<" , mkF $ binop $ numCmp (<))
+ , ("<=" , mkF $ binop $ numCmp (<=))
+ , (">" , mkF $ binop $ numCmp (>))
+ , (">=" , mkF $ binop $ numCmp (>=))
+ , ("==" , mkF $ binop $ numCmp (==))
+ , ("even?" , mkF $ unop $ numBool even)
+ , ("odd?" , mkF $ unop $ numBool odd)
+ , ("neg?" , mkF $ unop $ numBool (< 0))
+ , ("pos?" , mkF $ unop $ numBool (> 0))
+ , ("eq?" , mkF $ binop eqCmd )
+ , ("null?" , mkF $ unop (eqCmd Nil) )
+ , ("bl-eq?" , mkF $ binop $ eqOp (==))
+ , ("and" , mkF $ binopFold (eqOp (&&)) (Bool True))
+ , ("or" , mkF $ binopFold (eqOp (||)) (Bool False))
+ , ("not" , mkF $ unop $ notOp)
+ , ("cons" , mkF $ Language.Bs.Primitives.cons)
+ , ("cdr" , mkF $ Language.Bs.Primitives.cdr)
+ , ("car" , mkF $ Language.Bs.Primitives.car)
+ , ("quote" , mkF $ quote)
+ , ("file?" , mkF $ unop fileExists)
+ , ("slurp" , mkF $ unop slurp)
+ , ("wslurp" , mkF $ unop wSlurp)
+ , ("put" , mkF $ binop put_)
+ ]
+
+unop :: Unary -> [Expr] -> Eval Expr
+unop op [x] = op x
+unop _ args = throw $ NumArgs 1 args
+
+binop :: Binary -> [Expr] -> Eval Expr
+binop op [x,y] = op x y
+binop _ args = throw $ NumArgs 2 args
+
+fileExists :: Expr -> Eval Expr
+fileExists (Tape txt) = Bool <$> liftIO (doesFileExist $ T.unpack txt)
+fileExists val = throw $ TypeMismatch "read expects string, instead got: " val
+
+slurp :: Expr -> Eval Expr
+slurp (Tape txt) = liftIO $ wFileSlurp txt
+slurp val = throw $ TypeMismatch "read expects string, instead got: " val
+
+wFileSlurp :: T.Text -> IO Expr
+wFileSlurp fileName = withFile (T.unpack fileName) ReadMode go
+ where go = readTextFile fileName
+
+openURL :: T.Text -> IO Expr
+openURL x = do
+ req <- simpleHTTP (getRequest $ T.unpack x)
+ body <- getResponseBody req
+ return $ Tape $ T.pack body
+
+wSlurp :: Expr -> Eval Expr
+wSlurp (Tape txt) = liftIO $ openURL txt
+wSlurp val = throw $ TypeMismatch "wSlurp expects a string, instead got: " val
+
+readTextFile :: T.Text -> Handle -> IO Expr
+readTextFile fileName h = do
+ exists <- doesFileExist $ T.unpack fileName
+ if exists
+ then (TIO.hGetContents h) >>= (return . Tape)
+ else throw $ ReadFileError $ T.concat [" file does not exist: ", fileName]
+
+put_ :: Expr -> Expr -> Eval Expr
+put_ (Tape file) (Tape msg) = liftIO $ wFilePut file msg
+put_ (Tape _) val = throw $ TypeMismatch "put expects string in the second argument (try using show), instead got: " val
+put_ val _ = throw $ TypeMismatch "put expects string, instead got: " val
+
+wFilePut :: T.Text -> T.Text -> IO Expr
+wFilePut fileName msg = withFile (T.unpack fileName) WriteMode go
+ where go = putTextFile fileName msg
+
+putTextFile :: T.Text -> T.Text -> Handle -> IO Expr
+putTextFile fileName msg h = do
+ canWrite <- hIsWritable h
+ if canWrite
+ then (TIO.hPutStr h msg) >> (return $ Tape msg)
+ else throw $ ReadFileError $ T.concat [" file does not exist: ", fileName]
+
+binopFold :: Binary -> Expr -> [Expr] -> Eval Expr
+binopFold op farg args = case args of
+ []-> throw $ NumArgs 2 args
+ [a,b] -> op a b
+ _ -> foldM op farg args
+
+numBool :: (Integer -> Bool) -> Expr -> Eval Expr
+numBool op (Numb x) = return $ Bool $ op x
+numBool _ x = throw $ TypeMismatch "numeric op " x
+
+numOp :: (Integer -> Integer -> Integer) -> Expr -> Expr -> Eval Expr
+numOp op (Numb x) (Numb y) = return $ Numb $ op x y
+numOp _ Nil (Numb y) = return $ Numb y
+numOp _ (Numb x) Nil = return $ Numb x
+numOp _ x (Numb _) = throw $ TypeMismatch "numeric op" x
+numOp _ (Numb _) y = throw $ TypeMismatch "numeric op" y
+numOp _ x _ = throw $ TypeMismatch "numeric op" x
+
+strOp :: (T.Text -> T.Text -> T.Text) -> Expr -> Expr -> Eval Expr
+strOp op (Tape x) (Tape y) = return $ Tape $ op x y
+strOp _ Nil (Tape y) = return $ Tape y
+strOp _ (Tape x) Nil = return $ Tape x
+strOp _ x (Tape _) = throw $ TypeMismatch "string op" x
+strOp _ (Tape _) y = throw $ TypeMismatch "string op" y
+strOp _ x _ = throw $ TypeMismatch "string op" x
+
+eqOp :: (Bool -> Bool -> Bool) -> Expr -> Expr -> Eval Expr
+eqOp op (Bool x) (Bool y) = return $ Bool $ op x y
+eqOp _ x (Bool _) = throw $ TypeMismatch "bool op" x
+eqOp _ (Bool _) y = throw $ TypeMismatch "bool op" y
+eqOp _ x _ = throw $ TypeMismatch "bool op" x
+
+numCmp :: (Integer -> Integer -> Bool) -> Expr -> Expr -> Eval Expr
+numCmp op (Numb x) (Numb y) = return . Bool $ op x y
+numCmp _ x (Numb _) = throw $ TypeMismatch "numeric op" x
+numCmp _ (Numb _) y = throw $ TypeMismatch "numeric op" y
+numCmp _ x _ = throw $ TypeMismatch "numeric op" x
+
+notOp :: Expr -> Eval Expr
+notOp (Bool True) = return $ Bool False
+notOp (Bool False) = return $ Bool True
+notOp x = throw $ TypeMismatch " not expects Bool" x
+
+eqCmd :: Expr -> Expr -> Eval Expr
+eqCmd (Atom x) (Atom y) = return . Bool $ x == y
+eqCmd (Numb x) (Numb y) = return . Bool $ x == y
+eqCmd (Tape x) (Tape y) = return . Bool $ x == y
+eqCmd (Bool x) (Bool y) = return . Bool $ x == y
+eqCmd Nil Nil = return $ Bool True
+eqCmd _ _ = return $ Bool False
+
+cons :: [Expr] -> Eval Expr
+cons [x,(List ys)] = return $ List $ x:ys
+cons [x,y] = return $ List [x,y]
+cons _ = throw $ ExpectedList "cons, in second argument"
+
+car :: [Expr] -> Eval Expr
+car [List [] ] = return Nil
+car [List (x:_)] = return x
+car [] = return Nil
+car _ = throw $ ExpectedList "car"
+
+cdr :: [Expr] -> Eval Expr
+cdr [List (_:xs)] = return $ List xs
+cdr [List []] = return Nil
+cdr [] = return Nil
+cdr _ = throw $ ExpectedList "cdr"
+
+quote :: [Expr] -> Eval Expr
+quote [List xs] = return $ List $ Atom "quote" : xs
+quote [expr] = return $ List $ Atom "quote" : [expr]
+quote args = throw $ NumArgs 1 args
diff --git a/Biz/Language/Bs/Repl.hs b/Biz/Language/Bs/Repl.hs
new file mode 100644
index 0000000..64ffaa2
--- /dev/null
+++ b/Biz/Language/Bs/Repl.hs
@@ -0,0 +1,33 @@
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+module Language.Bs.Repl (
+mainLoop
+) where
+
+import Control.Monad.Trans
+import Data.String
+import Data.Text as T
+import Language.Bs.Eval
+import Protolude
+import System.Console.Haskeline
+
+type Repl a = InputT IO a
+
+mainLoop :: IO ()
+mainLoop = runInputT defaultSettings repl
+
+repl :: Repl ()
+repl = do
+ minput <- getInputLine "bs> "
+ case minput of
+ Nothing -> outputStrLn "bye."
+ Just input -> (liftIO $ process input) >> repl
+ --Just input -> (liftIO $ processToAST input) >> repl
+
+process :: String -> IO ()
+process str = do
+ res <- safeExec $ evalText $ T.pack str
+ either putStrLn return res
+
+processToAST :: String -> IO ()
+processToAST str = print $ runParseTest $ T.pack str
diff --git a/Biz/Language/Bs/Test.hs b/Biz/Language/Bs/Test.hs
new file mode 100644
index 0000000..4a40036
--- /dev/null
+++ b/Biz/Language/Bs/Test.hs
@@ -0,0 +1,2 @@
+-- TODO
+module Language.Bs.Test where
diff --git a/Biz/Serval.scm b/Biz/Serval.scm
new file mode 100644
index 0000000..87cc238
--- /dev/null
+++ b/Biz/Serval.scm
@@ -0,0 +1,194 @@
+;;
+;; Serval - fast container management
+;;
+;; `Container management' simply refers to tracking the configuration
+;; for individual containers and their running state.
+;;
+;; Serval stores container configuration in a directory, which forms the
+;; database. Each container is associated with a `.kit' file, which is a
+;; serialized s-expr of a `@Kit' record type.
+;;
+;; Runtime state is offloaded to systemd, and certain commands simply
+;; reach out to `systemctl' and `machinectl' for this functionality.
+;;
+;; Serval does not concern itself with deployment. For that, use `nix copy'.
+;;
+;; Currently Serval only supports a single physical machine: if we want
+;; to cluster containers across machines, we must find a way to store
+;; and reason about the host in addition to the container. This might
+;; mean absorbing some functionality that systemd currently performs for
+;; us.
+;;
+;; FILES
+;;
+;; /var/lib/serval/<kit-name>.kit - kit state (serialized s-expr)
+;; /var/lib/serval/<kit-name>/ - root directory for the kit
+;; /nix/var/nix/profiles/per-kit/<kit-name> - symlink to cfg in /nix/store
+;;
+;; TODO
+;; - save-kit function (write kit to /var/lib/serval/<name>.kit)
+;; - profiles in /nix/var/nix/profiles/per-kit
+;; - each of the below commented functions for state manipulation
+;;
+(define-module (Biz Serval)
+ #:use-module ((ice-9 getopt-long))
+ #:use-module ((ice-9 match)
+ #:select (match))
+ #:use-module ((srfi srfi-9)
+ #:select (define-record-type))
+ #:use-module ((Alpha Core)
+ #:select (second rest fmt prn first comment nil))
+ #:use-module ((Alpha Test)
+ #:select (testing))
+ #:use-module ((Alpha Shell) #:prefix Shell.)
+ #:export (main))
+
+(define *data-dir* "/var/lib/serval")
+(define *nix-profiles-dir* "/nix/var/nix/profiles")
+
+;; TODO: I would really like a better command line parser...
+;; getopt-long sucks
+(define (main args)
+ ;; pop first arg if its the executable
+ (let* ([args (if (equal? (first args) "Biz/Serval.scm")
+ (rest args)
+ args)]
+ [cmd (first args)])
+ (match cmd
+ ["new" (new-kit! args)]
+ ["del" (del-kit! args)]
+ ["start" (start-kit! args)]
+ ["stop" (stop-kit! args)]
+ ["scale" (prn "TODO: scale running kits")]
+ ["ssh" (run-in-kit! args)]
+ ["info" (prn "TODO: show kit")]
+ ["ls" ("TODO: list available kits")]
+ [else (prn "help")])))
+
+(define-record-type @Kit
+ (Kit name nix-path system-path host-address
+ host-port local-address auto-start)
+ kit?
+ ;; a unique name for this kit
+ (name kit-name)
+ ;; location in the nix store
+ (nix-path get-nix-path set-nix-path!)
+ ;; this is like /etc/nixos/conf.nix in NixOS proper. At
+ ;; initialization, this is just `/var/lib/serval/$kit'. Afterwards,
+ ;; it's `/nix/var/nix/profiles/per-kit/$kit'.
+ (system-path get-system-path set-system-path!)
+ ;; host IP
+ (host-address get-host-address set-host-address!)
+ ;; host port
+ (host-port get-host-port set-host-port!)
+ ;; the private IP
+ (local-address get-local-address set-local-address!)
+ ;; should this kit start when the host starts?
+ (auto-start get-auto-start set-auto-start!))
+
+(define-syntax for
+ (syntax-rules ()
+ ((_ a b) (map b a))
+ ((_ a ... b) (map b a ...))))
+
+(define (zip a b)
+ "Combine a and b into a single list of pairs."
+ ;; TODO: zip-list, zip-with, in Core
+ (apply map cons (list a b)))
+
+(define (serialize kit)
+ "Turns a kit into an association list."
+ (let* ((fields (record-type-fields @Kit))
+ (values (for fields
+ (lambda (field) ((record-accessor @Kit field) kit)))))
+ (zip fields values)))
+
+(define (deserialize alist)
+ "Creates a @Kit from an association list."
+ (apply Kit (map rest alist)))
+
+(define (save-kit! kit)
+ (call-with-output-file (fmt "~a/~a.kit" *data-dir* (kit-name kit))
+ (lambda (a) (write (serialize kit) a))))
+
+(define (load-kit! kit-name)
+ (call-with-input-file (fmt "~a/~a.kit" *data-dir* kit-name)
+ (lambda (a) (deserialize (read a)))))
+
+;; TODO
+(define (find-available-address)
+ "10.233.0.1")
+
+;; top-level commands, each take an argstr
+
+(define (setup!)
+ "Initial setup, only need to run once."
+ (Shell.exec (fmt "mkdir -p ~a" *nix-profiles-dir*))
+ (Shell.exec (fmt "mkdir -p ~a" *data-dir*)))
+
+(define (new-kit! args)
+ "Creates a new kit:
+1. first arg is name
+2. second arg is nix-path
+3. rest args parsed by getopt-long
+
+TODO: ensure kit-name is unique
+"
+ (let* ([name (first args)]
+ [nix-path (second args)]
+ [option-spec '((auto-start (single-char #\a) (value #f)))]
+ [options (getopt-long args option-spec)]
+ [auto-start (option-ref options 'auto-start #f)]
+ [local-address (find-available-address)]
+ [kit (Kit name nix-path "fixme-system-path" "fixme-host-address"
+ "fixme-host-port" local-address auto-start)])
+ (save-kit! kit)
+ (prn ;; Shell.exec
+ (fmt "nix-env -p ~a/per-kit/system --set ~a"
+ *nix-profiles-dir* (get-system-path kit)))
+ kit))
+
+(define (del-kit! args)
+ (let ([name (first args)])
+ (Shell.exec (fmt "rm ~a/~a" *data-dir* name))))
+
+(define (list-kits)
+ (Shell.exec (fmt "ls ~a" *data-dir*)))
+
+(define (update-kit! args)
+ ;; TODO: load kit and update with new config file
+ (let ([kit nil])
+ (Shell.exec
+ (fmt "nix-env -p ~a/system -I nixos-config=~a -f <nixpkgs/nixos> --set -A system"
+ *nix-profiles-dir*
+ (get-system-path nil)))))
+
+(define (run-in-kit! args)
+ (let ([kit nil])
+ (Shell.exec
+ (fmt "systemd-run --machine ~a --pty --quiet -- ~{~a~}"
+ (kit-name kit) args))))
+
+(define (is-kit-running? kit)
+ (Shell.exec
+ (fmt "systemctl show kit@~a" (kit-name kit))))
+
+(define (start-kit! kit)
+ (Shell.exec
+ (fmt "systemctl start kit@~a" (kit-name kit))))
+
+(define (stop-kit! kit)
+ (let* ([force-stop #f]
+ [cmd (if force-stop
+ (fmt "machinectl terminate ~a" (kit-name kit))
+ (fmt "systemctl stop kit@~a" (kit-name kit)))])
+ (Shell.exec cmd)))
+
+(define (restart-kit! kit)
+ (stop-kit! kit)
+ (start-kit! kit))
+
+(define (get-leader kit)
+ "Return the PID of the init process of the kit."
+ (Shell.exec
+ (fmt "machinectl show ~a -p Leader" (kit-name kit))))
diff --git a/Biz/buildOS.nix b/Biz/buildOS.nix
new file mode 100644
index 0000000..9e6c2f2
--- /dev/null
+++ b/Biz/buildOS.nix
@@ -0,0 +1,56 @@
+nixos:
+{ ipAddress ? null
+, enableVpn ? false
+, vpnConnectTo ? ""
+, vpnRsaPrivateKeyFile ? null
+, vpnEd25519PrivateKeyFile ? null
+, deps ? {} # an attrset overlayed to pkgs
+, configuration # see: configuration.nix(5)
+}:
+# assert enableVpn -> builtins.isString ipAddress;
+# assert enableVpn -> builtins.isString vpnRsaPrivateKeyFile;
+# assert enableVpn -> builtins.isString vpnEd25519PrivateKeyFile;
+let
+ vpnExtraConfig = if enableVpn then ''
+ ConnectTo = ${vpnConnectTo}
+ Ed25519PrivateKeyFile = "${vpnEd25519PrivateKeyFile}"
+ PrivateKeyFile = "${vpnRsaPrivateKeyFile}"
+ '' else "";
+ overlay = self: super: deps;
+ defaults = {
+ boot.cleanTmpDir = true;
+ #networking.interfaces.simatime-vpn = [{ ipv4.address = ipAddress; }];
+ networking.firewall.allowPing = true;
+ nix.binaryCaches = [ "https://cache.nixos.org" ];
+ nix.gc.automatic = true;
+ nix.gc.dates = "Sunday 02:15";
+ nix.maxJobs = 1; # "auto";
+ nix.optimise.automatic = true;
+ nix.optimise.dates = [ "Sunday 02:30" ];
+ nixpkgs.overlays = [ overlay ];
+ programs.mosh.enable = true;
+ programs.mosh.withUtempter = true;
+ security.acme.email = "ben@bsima.me";
+ security.acme.acceptTerms = true;
+ security.sudo.wheelNeedsPassword = false;
+ services.clamav.daemon.enable = true; # security
+ services.clamav.updater.enable = true; # security
+ services.fail2ban.enable = true; # security
+ services.openssh.enable = true;
+ services.openssh.openFirewall = true;
+ services.openssh.forwardX11 = true;
+ services.openssh.passwordAuthentication = false;
+ #services.tinc.networks.simatime-vpn.extraConfig = vpnExtraConfig;
+ #services.tinc.networks.simatime-vpn.debugLevel = 3;
+ #services.tinc.networks.simatime-vpn.interfaceType = "tap";
+ #services.tinc.networks.simatime-vpn.hosts = import ./vpnHosts.nix;
+ system.autoUpgrade.enable = false; # 'true' breaks our nixpkgs pin
+ };
+ os = nixos {
+ system = "x86_64-linux";
+ configuration = (defaults // configuration);
+ };
+in {
+ system = os.system;
+ vm = os.vm;
+}
diff --git a/Biz/fathom.nix b/Biz/fathom.nix
new file mode 100644
index 0000000..40e8b0b
--- /dev/null
+++ b/Biz/fathom.nix
@@ -0,0 +1,109 @@
+{ options
+, lib
+, config
+, pkgs
+, modulesPath
+, stdenv
+}:
+
+with lib;
+
+let
+ cfg = config.services.fathom
+ pkgs.fathom = stdenv.mkDerivation rec {
+ name = "fathom-v${version}";
+ version = "1.2.1";
+ src = builtins.fetchurl {
+ url = "https://github.com/usefathom/fathom/releases/download/v${version}/fathom_${version}_linux_amd64.tar.gz";
+ sha256 = "0sfpxh2xrvz992k0ynib57zzpcr0ikga60552i14m13wppw836nh";
+ };
+ sourceRoot = ".";
+ dontBuild = true;
+ installPhase = ''
+ mkdir -p $out/bin
+ cp fathom $out/bin
+ cp LICENSE $out
+ cp README.md $out
+ '';
+ };
+in {
+ options.services.fathom = {
+ enable = lib.mkEnableOption "Enable the Fathom Analytics service";
+
+ port = mkOption {
+ type = types.string;
+ default = "3000";
+ description = ''
+ The port on which Fathom will listen for
+ incoming HTTP traffic.
+ '';
+ };
+
+ gzip = mkOption {
+ type = types.bool;
+ default = true;
+ description = "Whether or not to enable gzip compression.";
+ };
+
+ debug = mkOption {
+ type = types.bool;
+ default = false;
+ description = "Whether or not to enable debug mode.";
+ };
+
+ dataDir = mkOption {
+ type = types.path;
+ default = "/var/lib/fathom";
+ description = "Fathom data directory";
+ };
+ };
+
+ config = mkIf cfg.enable {
+ systemd.services.fathom = {
+ wantedBy = [ "multi-user.target" ];
+ after = [ "network.target" ];
+
+ environment = {
+ FATHOM_SERVER_ADDR = cfg.port;
+ FATHOM_GZIP = builtins.toString cfg.gzip;
+ FATHOM_DEBUG = builtins.toString cfg.debug;
+ FATHOM_DATABASE_DRIVER = "sqlite3";
+ FATHOM_DATABASE_NAME = "${cfg.dataDir}/fathom.db";
+ FATHOM_SECRET = "random-secret-string";
+ };
+ preStart = ''
+ echo "[fathom] creating ${cfg.dataDir}"
+ mkdir -p ${cfg.dataDir}
+ chown -R fathom:fathom ${cfg.dataDir}
+ echo "[fathom]" creating ${cfg.dataDir}/.env
+ env | grep "^FATHOM" > ${cfg.dataDir}/.env
+ '';
+ description = ''
+ Fathom Analytics
+ '';
+
+ serviceConfig = {
+ Type = "simple";
+ User = "fathom";
+ Group = "fathom";
+ ExecStart = "${pkgs.fathom}/bin/fathom server";
+ KillSignal = "INT";
+ WorkingDirectory = cfg.dataDir;
+ Restart = "on-failure";
+ RestartSec = "10";
+ PermissionsStartOnly = "true";
+ };
+ };
+
+ environment.systemPackages = [ pkgs.fathom ];
+
+ users = {
+ groups = { fathom = {}; };
+ users.fathom = {
+ description = "Fathom daemon user";
+ home = cfg.dataDir;
+ group = "fathom";
+ };
+ };
+ };
+}
diff --git a/Biz/firefox.nix b/Biz/firefox.nix
new file mode 100644
index 0000000..12316fb
--- /dev/null
+++ b/Biz/firefox.nix
@@ -0,0 +1,12 @@
+{ ... }:
+
+{
+ services = {
+ firefox.syncserver = {
+ enable = true;
+ allowNewUsers = true;
+ listen.port = 5001;
+ publicUri = "http://firefoxsync.simatime.com";
+ };
+ };
+}
diff --git a/Biz/idea/duree-pitch.org b/Biz/idea/duree-pitch.org
new file mode 100644
index 0000000..d4d9d6f
--- /dev/null
+++ b/Biz/idea/duree-pitch.org
@@ -0,0 +1,80 @@
+#+TITLE: Duree: automated universal database
+#+SUBTITLE: seeking pre-seed funding
+#+AUTHOR: Ben Sima <ben@bsima.me>
+#+EMAIL: ben@bsima.me
+#+OPTIONS: H:1 num:nil toc:nil
+#+LATEX_CLASS: article
+#+LATEX_CLASS_OPTIONS:
+#+LATEX_HEADER:
+#+LATEX_HEADER_EXTRA:
+#+LATEX_COMPILER: pdflatex
+#+DATE: \today
+#+startup: beamer
+#+LaTeX_CLASS: beamer
+#+LaTeX_CLASS_OPTIONS: [presentation,smaller]
+Start with this:
+ - https://news.ycombinator.com/item?id=14605
+ - https://news.ycombinator.com/item?id=14754
+Then build AI layers on top.
+* Problem
+Developers spend too much time managing database schemas. Every database
+migration is a risk to the business because of the high possibility of data
+corruption. If the data is modeled incorrectly at the beginning, it requires a
+lot of work (months of developer time) to gut the system and re-architect it.
+* Solution
+- Using machine learning and AI, we automatically detect the schema of your data.
+- Data can be dumped into a noSQL database withouth the developer thinking much
+ about structure, then we infer the structure automatically.
+- We can also generate a library of queries and provide an auto-generated client
+ in the choosen language of our users.
+* Existing solutions
+- Libraries like alembic and migra (Python) make data migrations easier, but
+ don't help you make queries or properly model data.
+- ORMs help with queries but don't give you much insight into the deep structure
+ of your data (you still have to do manual joins) and don't help you properly
+ model data.
+- Graph QL is the closest competitor, but requires manually writing types and
+ knowing about the deep structure of your data. We automate both.
+
+* Unsolved problems
+- Unsure whether to build this on top of existing noSQL databases, or to develop
+ our own data store. Could re-use an existing [[https://en.wikipedia.org/wiki/Category:Database_engines][database engine]] to provide an
+ end-to-end database solution.
+* Key metrics
+- How much time do developers spend dealing with database migrations? What does
+ this cost the business? We can decrease this, decreasing costs.
+- How costly are failed data migrations and backups? We reduce this risk.
+* Unique value proposition
+We can automate the backend data mangling for 90% of software applications.
+* Unfair advantage
+- I have domain expertise, having worked on similar schemaless database problems
+ before.
+- First-mover advantage in this space. Everyone else is focused on making
+ database migrations easier, we want to make them obsolete.
+* Channels
+- Cold calling mongoDB et al users.
+* Customer segments
+- *Early adopters:* users of mongoDB and graphQL who want to spend time writing
+ application code, not managing database schemas. The MVP would be to generate
+ the Graph QL code from their Mongo database automatically.
+- Will expand support to other databases one by one. The tech could be used on
+ any database... or we expand by offering our own data store.
+* Cost structure
+** Fixed costs
+ - Initial development will take about 3 months (~$30k)
+ - Each new database support will take a month or two of development.
+** Variable costs
+ - Initial analysis will be compute-heavy.
+ - Following analyses can be computationally cheap by buildiing off of the
+ existing model.
+ - Customer acquisition could be expensive, will likely hire a small sales
+ team.
+* Revenue streams
+- $100 per month per database analyzed
+ - our hosted service connects to their database directly
+ - includes client libraries via graphQL
+ - may increase this if it turns out we save companies a lot more than $100/mo,
+ which is likely
+- enterprise licenses available for on-prem
+ - allows them to have complete control over their database access
+ - necessary for HIPAA/PCI compliance
diff --git a/Biz/idea/flash.org b/Biz/idea/flash.org
new file mode 100644
index 0000000..1c392f0
--- /dev/null
+++ b/Biz/idea/flash.org
@@ -0,0 +1,36 @@
+#+title: Flash
+#+description: a system for quickly testing business ideas
+
+- Each marketing iteration for a product requires some gear. A "gear" pack is just a yaml
+ file with all data for a single flash test. It will include ad content,
+ pricing info, links to necessary images, and so on.
+ - even better: store these in a database? Depends on how often we need to edit them...
+- Data gets marshalled into a bunch of templates, one for each sales pipeline in
+ the /Traction/ book by Gabriel Weinberg (7 pipelines total)
+- Each sales pipeline will have a number of integrations, we'll need at least
+ one for each pipeline before going to production. E.g.:
+ - google adwords
+ - facebook ads
+ - email lists (sendgrid)
+ - simple marketing website
+ - producthunt
+ - etc
+- Pipelines will need to capture metrics on a pre-set schedule.
+ - Above integrations must also pull performance numbers from Adwords etc APIs.
+ - Will need some kind of scheduled job queue or robot background worker to handle this.
+ - A simple dashboard might also be useful, not sure.
+- Metrics determine the performance of a pipeline. After the defined trial
+ duration, some pipelines will be dropped. The high-performing pipelines we
+ double-down on.
+- Metrics to watch:
+ - conversion rate
+ - usage time - minutes spent on site/app
+ - money spent per customer
+ - see baremetrics for more ideas
+- This can eventually be integrated to a larger product design platform (what Sam
+ Altman calls a "product improvement engine" in his playbook - PIE?).
+ - metric improvement can be plotted on a relative scale
+ - "If you improve your product 5% every week, it will really compound." - Sam
+ - PIE will differ from Flash in that Flash is only for the early stages of a
+ product - sell it before you build it. PIE will operate on existing products
+ to make them better.
diff --git a/Biz/keys/ben.pub b/Biz/keys/ben.pub
new file mode 100644
index 0000000..c661508
--- /dev/null
+++ b/Biz/keys/ben.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDDhmSEbvX6LSk1ZO/whhAWpxwUxGPwbn7ZKVmxLcIilLdkd/vhFQKSYyMBW+21G3cMbwyFVsCyPbADoXcvV5OSIklxgitP77/2TAgkEPjyklJ4KD0QNDjpu+YGGIyVTgE9YPBhpwuUlxRhux15vN8xzAXq4f5/xpyBPekIdbEaEUZHrKN/z9g8cgw9ZMWSrchbsE3QlU8MJK78HO+v3TjH7Ip+LffWNuhckiYnzT8Duy47vgc1OYqtJaDMN/ufK7yeNILK81M1ybHGOlqYxSfV/RM7oD0P5w5YeTXMpRsOyn4YVzhWSQFrlf08XbwlZUNm6Pb8eNRjM+3YyFTcUU/S81xKwOPRNNhlPnxz+tUltCR3H/0Falu1pxJYT2qfuM9j9z9xA1bJEsSSZ1b2bsHw7ujpRmg0xsPUk7DXIQ1Kh92BFfmDoZWeqsMF1E7H8iuaVsN9k96BwbBfiB4stQqI3ycuHO9zbsa12y8AQusDbr9W8rl/vR0pKNrcNO32ojOzkblJGWgyNxDvTS4l69+qi6pMBONicUUMQnXEtJoasjpECzwlAHIYJMmFQUuloEafR8b0ZAaCw+I5SfsyYF4hHLYseHvMavxgLNZ6W4ZlaL9XmQ7ZGhh10ub4ceW61QvCzKD34yO1yl8PcmS8Fa7bZbGxkq36oCusGbD65AlY+w== ben@lithium
diff --git a/Biz/keys/deploy.pub b/Biz/keys/deploy.pub
new file mode 100644
index 0000000..664a2d9
--- /dev/null
+++ b/Biz/keys/deploy.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDlLRbbXgwjF7IqObf4dZE/jj0HoT6xJR6bP/6ZrJz7NPCPIgY3GacOtBfkJp6KK0zKQdFmxNpcfb3zgpe/Ru7pkmSfI9IoWAU3aLPWK2G3tbLPmktGmF9C53OhyXgFtBGr2Q/+wSRKAfN/FrEEa2FuRBtvtcAMiwbQLbFCzlmWhE7swSBvg38ZSFrjhANsEhfNVCtsrtG16fkfrfmBFv4JIog1fEoMKmXg7rhMjpaas8+n52HMFXvjllePRpywK4wB20GOcOuDSdc3i3zs7NFuicGunEpW2S/byrHotSWHZ9VuUwPn3GJ6xorrGyvsRuPS2anhHTSBxYCqYdXg0BIYUn1x5Uhtzd8kIU06gSLsvuhqGCLNucnXAT1Zix7pSlO21be81SX4vwQEth+6Dkm6kja0ArHZL6wglF8Njd1fV9iOwvcS07clwa/2S8suFLwVrQXz16vfAfA2zi4/qeop5Sv9W4DIOZuIMPmbWZCoy7L6Fu4+x4prb8LCQNM5m4CP3HngCW8PpxtBbBJd0dcXVap1HgDTIt/CLH8ms52uX5k3bHuvzryOihSuwmi/cDZAJAmbgclM9klsZr4R/GAoAWhhGxXM2tLuiwZ2nLvCPlXbBazZpdM2aC3VIwnMwJrJFu2u9B6RSsz2ijbygecT98UmiMYK7Mk1y6GkvY+mDQ== ben@lithium
diff --git a/Biz/keys/nick.pub b/Biz/keys/nick.pub
new file mode 100644
index 0000000..4dc08fb
--- /dev/null
+++ b/Biz/keys/nick.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDfSOxXJTQADjROqbaiJtjbJaHTsBtuWNvQpDvXLigl9R27VqIn7dYk2STuWglwFyrvYfU1UmjgJcJ6J2KbXGTH5mhaC04MJ4aqmOR3Ynnq7nDzmtEtn1I+K7LmpFXsFXgOTzIlzggIWflGd1pPBwgWqMoPDcSqNQFPI/+rk1JOxk3e2Mq60VTp9WM9hs0AJQEyZ+wwZ0vyrj588kQb6jQUZ7qx1UZoDzPc57zREEZbQeU1Gd9FK2bCHlKOBHYlqIftSRBGGCpuo7zobhajR0xHO9RnF0NmeLbW85XhDus8vVgBg/BTDPxHEzm5jKiCkc+i3ia0Ff9mp2zgtSdXCp5jbVZ3AYfYLi1zbPWmaSdWqFx2ntOLwWR3/RHjw6+b4KmUQ4xtQHyXOijTBCH29i7VCo7l8WL+I2mSGJ7/Wtw7NFtMpVVs8/0iKt2t12FIefzvbZoWU7vbmuO7+gQI5l+F+JE6DLWOl04vT/V98WxiHA5rbCjTT/bubs4gTeCR9qNehaoM+apitpUP8HXygnxD7EJeK6JNkdub9TY663IkiKlpnWgeoDTNSP7JF/jkU0Nt8yoR2pTyxQqMFYa37/3WKjmSHk1TgxLEmlwHQFtIkTPn8PL+VLa4ACYuWUjxS4aMRpxo9eJUHdy0Y04yKxXN8BLw7FAhytm2pTXtT4zqaQ== nicksima@gmail.com
diff --git a/Biz/packages.nix b/Biz/packages.nix
new file mode 100644
index 0000000..4ffbbf8
--- /dev/null
+++ b/Biz/packages.nix
@@ -0,0 +1,18 @@
+{ pkgs, ... }:
+
+with pkgs;
+
+{
+ environment.systemPackages = [
+ file
+ gitAndTools.gitFull
+ htop
+ python3
+ ranger
+ telnet
+ tinc_pre
+ traceroute
+ vnstat
+ wget
+ ];
+}
diff --git a/Biz/users.nix b/Biz/users.nix
new file mode 100644
index 0000000..b52043e
--- /dev/null
+++ b/Biz/users.nix
@@ -0,0 +1,39 @@
+{ config, ... }:
+
+{
+ users.motd = ''
+
+ welcome to the simatime network!
+ your host is '${config.networking.hostName}'
+
+ '';
+ users.mutableUsers = false;
+ users.users = { #
+ # bots
+ #
+ deploy = {
+ isNormalUser = true;
+ home = "/home/deploy";
+ openssh.authorizedKeys.keyFiles = [ ./keys/deploy.pub ];
+ extraGroups = [ "wheel" ];
+ };
+ #
+ # humans
+ #
+ root.openssh.authorizedKeys.keyFiles = [ ./keys/ben.pub ];
+ ben = {
+ description = "Ben Sima";
+ isNormalUser = true;
+ home = "/home/ben";
+ openssh.authorizedKeys.keyFiles = [ ./keys/ben.pub ];
+ extraGroups = [ "wheel" "networkmanager" "docker" ];
+ };
+ nick = {
+ description = "Nick Sima";
+ isNormalUser = true;
+ home = "/home/nick";
+ openssh.authorizedKeys.keyFiles = [ ./keys/nick.pub ];
+ extraGroups = [ "docker" ];
+ };
+ };
+}
diff --git a/Biz/vpnHosts.nix b/Biz/vpnHosts.nix
new file mode 100644
index 0000000..1a66e92
--- /dev/null
+++ b/Biz/vpnHosts.nix
@@ -0,0 +1,37 @@
+let
+ mkVpnPeer = { address, subnet, ed25519PublicKey, rsaPublicKey }: ''
+ Address = ${address}
+ Subnet = ${subnet}
+ Ed25519PublicKey = ${ed25519PublicKey}
+ ${rsaPublicKey}
+ '';
+in {
+ "com.simatime" = mkVpnPeer {
+ address = "159.89.128.69";
+ subnet = "10.1.1.25";
+ ed25519PublicKey = "TODO";
+ rsaPublicKey = ''
+ TODO
+ '';
+ };
+ "com.simatime.dev" = mkVpnPeer {
+ address = "69.181.254.154";
+ subnet = "10.1.1.21";
+ ed25519PublicKey = "s5/rbuM7WaYqaZH0BP4/mYefrl3uWfaT+Ew4gmSsh8F";
+ rsaPublicKey = ''
+ -----BEGIN RSA PUBLIC KEY-----
+ MIICCgKCAgEAydQHK4jUQnp4ZSqIB/fjfLxILqy/IHR6DPiUp/HustFDOaLKSVM8
+ 75fVtBybiEkUmXLU3Bg8WX9zR+llTf3za1B13w+uJpcR4FS/LhAN/wgHCdgHUb4W
+ D7YZzGUnLhPAu3Ivnu5QZ6vzigqtbPCIFfwGDW2RGjq3iJMag1sM/xBOZrSn+zsZ
+ azCEP/snY30UE5ggrxJSMpZXSpS9u266nTblo8gTwfjdzrC93gmNNIxdHpeYGb0O
+ VGdaMmExq5Ny4flG2qtWA0u8nDscg7bEVIYfPjZr1G2FT5A0Ma4kteu6TeYpQEd9
+ 0if3lRb48iMwh1VBfXBps9Heexz0HjG6EAku2B1mEL5orjmC3jJK0DpuXnwVN5pz
+ B+UrFnqbFykeHxZD5RdAB1tcuHZlJ/mQyZRQMJtkifFLdj4iBBK+si05GpodGhIz
+ iXkMYRIOja9/4EyukDdU2i2yEOmgif6DhIh4awss1b2Crtxs2bg6/xi2Hy63IQEy
+ u8LxuiPGA69NsaFZz49SXXJw11KQt5g7WE0jweYXmT3VO6yZlktGdJjzXyhaw7ma
+ G9VgHvxh+K/mDZ2SXwDcINzwYwZxxqcxcmA4o8glCKQyVHIT5hlo7QkSzK4P+GgN
+ Js+sRDreM6Rha2zcOaJWZ5IO2Xva6AZZ29oO5m4V/CYPCuMAzXwV2GMCAwEAAQ==
+ -----END RSA PUBLIC KEY-----
+ '';
+ };
+}