From 19f5402bec9f6346463b83536cc22d7f4525bc18 Mon Sep 17 00:00:00 2001 From: Ben Sima Date: Mon, 26 Oct 2020 17:52:56 -0400 Subject: Capitalize rest of files --- Biz/Bild/Repl.nix | 1 - Biz/Bild/ShellHook.sh | 6 +-- Biz/Cloud/git.nix | 2 +- Biz/fathom.nix | 109 ----------------------------------------------- Biz/firefox.nix | 12 ------ Biz/idea/duree-pitch.org | 80 ---------------------------------- Biz/idea/flash.org | 36 ---------------- Biz/users.nix | 8 ++-- Que/Host.hs | 2 +- Que/Site.hs | 16 +++---- Que/Site.nix | 12 +++--- default.nix | 34 +++++++-------- 12 files changed, 39 insertions(+), 279 deletions(-) delete mode 100644 Biz/Bild/Repl.nix delete mode 100644 Biz/fathom.nix delete mode 100644 Biz/firefox.nix delete mode 100644 Biz/idea/duree-pitch.org delete mode 100644 Biz/idea/flash.org diff --git a/Biz/Bild/Repl.nix b/Biz/Bild/Repl.nix deleted file mode 100644 index 94edf3a..0000000 --- a/Biz/Bild/Repl.nix +++ /dev/null @@ -1 +0,0 @@ -{ nixpkgs = import ./.; } diff --git a/Biz/Bild/ShellHook.sh b/Biz/Bild/ShellHook.sh index 75a0842..89751d3 100644 --- a/Biz/Bild/ShellHook.sh +++ b/Biz/Bild/ShellHook.sh @@ -45,9 +45,7 @@ function hero() { fi } -function lint() { - alias lint=$BIZ_ROOT/Biz/lint.py -} +alias lint=$BIZ_ROOT/Biz/Lint.py function pie() { runghc Biz.Pie $@ @@ -74,7 +72,7 @@ function push() { # TODO: convert to haskell function ship() { set -ex - $BIZ_ROOT/Biz/lint.py + $BIZ_ROOT/Biz/Lint.py stuff=(${1}) if [[ ${#stuff[@]} -eq 0 ]] then diff --git a/Biz/Cloud/git.nix b/Biz/Cloud/git.nix index 370f52a..6104ced 100644 --- a/Biz/Cloud/git.nix +++ b/Biz/Cloud/git.nix @@ -11,7 +11,7 @@ extraGitoliteRc = '' $RC{SITE_INFO} = 'a computer is a bicycle for the mind.'; ''; - adminPubkey = builtins.readFile ../keys/ben.pub; + adminPubkey = builtins.readFile ../Keys/Ben.pub; }; }; } diff --git a/Biz/fathom.nix b/Biz/fathom.nix deleted file mode 100644 index 40e8b0b..0000000 --- a/Biz/fathom.nix +++ /dev/null @@ -1,109 +0,0 @@ -{ options -, lib -, config -, pkgs -, modulesPath -, stdenv -}: - -with lib; - -let - cfg = config.services.fathom - pkgs.fathom = stdenv.mkDerivation rec { - name = "fathom-v${version}"; - version = "1.2.1"; - src = builtins.fetchurl { - url = "https://github.com/usefathom/fathom/releases/download/v${version}/fathom_${version}_linux_amd64.tar.gz"; - sha256 = "0sfpxh2xrvz992k0ynib57zzpcr0ikga60552i14m13wppw836nh"; - }; - sourceRoot = "."; - dontBuild = true; - installPhase = '' - mkdir -p $out/bin - cp fathom $out/bin - cp LICENSE $out - cp README.md $out - ''; - }; -in { - options.services.fathom = { - enable = lib.mkEnableOption "Enable the Fathom Analytics service"; - - port = mkOption { - type = types.string; - default = "3000"; - description = '' - The port on which Fathom will listen for - incoming HTTP traffic. - ''; - }; - - gzip = mkOption { - type = types.bool; - default = true; - description = "Whether or not to enable gzip compression."; - }; - - debug = mkOption { - type = types.bool; - default = false; - description = "Whether or not to enable debug mode."; - }; - - dataDir = mkOption { - type = types.path; - default = "/var/lib/fathom"; - description = "Fathom data directory"; - }; - }; - - config = mkIf cfg.enable { - systemd.services.fathom = { - wantedBy = [ "multi-user.target" ]; - after = [ "network.target" ]; - - environment = { - FATHOM_SERVER_ADDR = cfg.port; - FATHOM_GZIP = builtins.toString cfg.gzip; - FATHOM_DEBUG = builtins.toString cfg.debug; - FATHOM_DATABASE_DRIVER = "sqlite3"; - FATHOM_DATABASE_NAME = "${cfg.dataDir}/fathom.db"; - FATHOM_SECRET = "random-secret-string"; - }; - preStart = '' - echo "[fathom] creating ${cfg.dataDir}" - mkdir -p ${cfg.dataDir} - chown -R fathom:fathom ${cfg.dataDir} - echo "[fathom]" creating ${cfg.dataDir}/.env - env | grep "^FATHOM" > ${cfg.dataDir}/.env - ''; - description = '' - Fathom Analytics - ''; - - serviceConfig = { - Type = "simple"; - User = "fathom"; - Group = "fathom"; - ExecStart = "${pkgs.fathom}/bin/fathom server"; - KillSignal = "INT"; - WorkingDirectory = cfg.dataDir; - Restart = "on-failure"; - RestartSec = "10"; - PermissionsStartOnly = "true"; - }; - }; - - environment.systemPackages = [ pkgs.fathom ]; - - users = { - groups = { fathom = {}; }; - users.fathom = { - description = "Fathom daemon user"; - home = cfg.dataDir; - group = "fathom"; - }; - }; - }; -} diff --git a/Biz/firefox.nix b/Biz/firefox.nix deleted file mode 100644 index 12316fb..0000000 --- a/Biz/firefox.nix +++ /dev/null @@ -1,12 +0,0 @@ -{ ... }: - -{ - services = { - firefox.syncserver = { - enable = true; - allowNewUsers = true; - listen.port = 5001; - publicUri = "http://firefoxsync.simatime.com"; - }; - }; -} diff --git a/Biz/idea/duree-pitch.org b/Biz/idea/duree-pitch.org deleted file mode 100644 index d4d9d6f..0000000 --- a/Biz/idea/duree-pitch.org +++ /dev/null @@ -1,80 +0,0 @@ -#+TITLE: Duree: automated universal database -#+SUBTITLE: seeking pre-seed funding -#+AUTHOR: Ben Sima -#+EMAIL: ben@bsima.me -#+OPTIONS: H:1 num:nil toc:nil -#+LATEX_CLASS: article -#+LATEX_CLASS_OPTIONS: -#+LATEX_HEADER: -#+LATEX_HEADER_EXTRA: -#+LATEX_COMPILER: pdflatex -#+DATE: \today -#+startup: beamer -#+LaTeX_CLASS: beamer -#+LaTeX_CLASS_OPTIONS: [presentation,smaller] -Start with this: - - https://news.ycombinator.com/item?id=14605 - - https://news.ycombinator.com/item?id=14754 -Then build AI layers on top. -* Problem -Developers spend too much time managing database schemas. Every database -migration is a risk to the business because of the high possibility of data -corruption. If the data is modeled incorrectly at the beginning, it requires a -lot of work (months of developer time) to gut the system and re-architect it. -* Solution -- Using machine learning and AI, we automatically detect the schema of your data. -- Data can be dumped into a noSQL database withouth the developer thinking much - about structure, then we infer the structure automatically. -- We can also generate a library of queries and provide an auto-generated client - in the choosen language of our users. -* Existing solutions -- Libraries like alembic and migra (Python) make data migrations easier, but - don't help you make queries or properly model data. -- ORMs help with queries but don't give you much insight into the deep structure - of your data (you still have to do manual joins) and don't help you properly - model data. -- Graph QL is the closest competitor, but requires manually writing types and - knowing about the deep structure of your data. We automate both. - -* Unsolved problems -- Unsure whether to build this on top of existing noSQL databases, or to develop - our own data store. Could re-use an existing [[https://en.wikipedia.org/wiki/Category:Database_engines][database engine]] to provide an - end-to-end database solution. -* Key metrics -- How much time do developers spend dealing with database migrations? What does - this cost the business? We can decrease this, decreasing costs. -- How costly are failed data migrations and backups? We reduce this risk. -* Unique value proposition -We can automate the backend data mangling for 90% of software applications. -* Unfair advantage -- I have domain expertise, having worked on similar schemaless database problems - before. -- First-mover advantage in this space. Everyone else is focused on making - database migrations easier, we want to make them obsolete. -* Channels -- Cold calling mongoDB et al users. -* Customer segments -- *Early adopters:* users of mongoDB and graphQL who want to spend time writing - application code, not managing database schemas. The MVP would be to generate - the Graph QL code from their Mongo database automatically. -- Will expand support to other databases one by one. The tech could be used on - any database... or we expand by offering our own data store. -* Cost structure -** Fixed costs - - Initial development will take about 3 months (~$30k) - - Each new database support will take a month or two of development. -** Variable costs - - Initial analysis will be compute-heavy. - - Following analyses can be computationally cheap by buildiing off of the - existing model. - - Customer acquisition could be expensive, will likely hire a small sales - team. -* Revenue streams -- $100 per month per database analyzed - - our hosted service connects to their database directly - - includes client libraries via graphQL - - may increase this if it turns out we save companies a lot more than $100/mo, - which is likely -- enterprise licenses available for on-prem - - allows them to have complete control over their database access - - necessary for HIPAA/PCI compliance diff --git a/Biz/idea/flash.org b/Biz/idea/flash.org deleted file mode 100644 index 1c392f0..0000000 --- a/Biz/idea/flash.org +++ /dev/null @@ -1,36 +0,0 @@ -#+title: Flash -#+description: a system for quickly testing business ideas - -- Each marketing iteration for a product requires some gear. A "gear" pack is just a yaml - file with all data for a single flash test. It will include ad content, - pricing info, links to necessary images, and so on. - - even better: store these in a database? Depends on how often we need to edit them... -- Data gets marshalled into a bunch of templates, one for each sales pipeline in - the /Traction/ book by Gabriel Weinberg (7 pipelines total) -- Each sales pipeline will have a number of integrations, we'll need at least - one for each pipeline before going to production. E.g.: - - google adwords - - facebook ads - - email lists (sendgrid) - - simple marketing website - - producthunt - - etc -- Pipelines will need to capture metrics on a pre-set schedule. - - Above integrations must also pull performance numbers from Adwords etc APIs. - - Will need some kind of scheduled job queue or robot background worker to handle this. - - A simple dashboard might also be useful, not sure. -- Metrics determine the performance of a pipeline. After the defined trial - duration, some pipelines will be dropped. The high-performing pipelines we - double-down on. -- Metrics to watch: - - conversion rate - - usage time - minutes spent on site/app - - money spent per customer - - see baremetrics for more ideas -- This can eventually be integrated to a larger product design platform (what Sam - Altman calls a "product improvement engine" in his playbook - PIE?). - - metric improvement can be plotted on a relative scale - - "If you improve your product 5% every week, it will really compound." - Sam - - PIE will differ from Flash in that Flash is only for the early stages of a - product - sell it before you build it. PIE will operate on existing products - to make them better. diff --git a/Biz/users.nix b/Biz/users.nix index b52043e..c7c4041 100644 --- a/Biz/users.nix +++ b/Biz/users.nix @@ -14,25 +14,25 @@ deploy = { isNormalUser = true; home = "/home/deploy"; - openssh.authorizedKeys.keyFiles = [ ./keys/deploy.pub ]; + openssh.authorizedKeys.keyFiles = [ ./Keys/Deploy.pub ]; extraGroups = [ "wheel" ]; }; # # humans # - root.openssh.authorizedKeys.keyFiles = [ ./keys/ben.pub ]; + root.openssh.authorizedKeys.keyFiles = [ ./Keys/Ben.pub ]; ben = { description = "Ben Sima"; isNormalUser = true; home = "/home/ben"; - openssh.authorizedKeys.keyFiles = [ ./keys/ben.pub ]; + openssh.authorizedKeys.keyFiles = [ ./Keys/Ben.pub ]; extraGroups = [ "wheel" "networkmanager" "docker" ]; }; nick = { description = "Nick Sima"; isNormalUser = true; home = "/home/nick"; - openssh.authorizedKeys.keyFiles = [ ./keys/nick.pub ]; + openssh.authorizedKeys.keyFiles = [ ./Keys/Nick.pub ]; extraGroups = [ "docker" ]; }; }; diff --git a/Que/Host.hs b/Que/Host.hs index 5b51dba..3303709 100644 --- a/Que/Host.hs +++ b/Que/Host.hs @@ -60,7 +60,7 @@ main = Exception.bracket startup shutdown <| uncurry Warp.run putText "Que.Host" putText <| "port: " <> (show <| quePort c) putText <| "skey: " <> (show <| queSkey c) - return (port c, waiapp) + return (quePort c, waiapp) shutdown :: a -> IO a shutdown = pure . identity diff --git a/Que/Site.hs b/Que/Site.hs index 2b35956..794dd04 100644 --- a/Que/Site.hs +++ b/Que/Site.hs @@ -39,14 +39,14 @@ main = do _ -> Exit.die "usage: que-website [namespace]" mKey <- getKey ns putText <| "serving " <> Text.pack src <> " at " <> ns - run mKey ns <| - Sources - { index = src "index.md", - client = src "client.py", - quescripts = src "quescripts.md", - style = src "style.css", - apidocs = src "apidocs.md", - tutorial = src "tutorial.md" + run mKey ns + <| Sources + { index = src "Index.md", + client = src "Client.py", + quescripts = src "Quescripts.md", + style = src "Style.css", + apidocs = src "Apidocs.md", + tutorial = src "Tutorial.md" } getKey :: Namespace -> IO (Maybe Key) diff --git a/Que/Site.nix b/Que/Site.nix index 6a24d9d..685b3a6 100644 --- a/Que/Site.nix +++ b/Que/Site.nix @@ -12,12 +12,12 @@ let name = "que-website-static"; installPhase = '' mkdir -p $out - cp ${./apidocs.md} $out/apidocs.md - cp ${./index.md} $out/index.md - cp ${./quescripts.md} $out/quescripts.md - cp ${./style.css} $out/style.css - cp ${./tutorial.md} $out/tutorial.md - cp ${./client.py} $out/client.py + cp ${./Apidocs.md} $out/Apidocs.md + cp ${./Index.md} $out/Index.md + cp ${./Quescripts.md} $out/Quescripts.md + cp ${./Style.css} $out/Style.css + cp ${./Tutorial.md} $out/Tutorial.md + cp ${./Client.py} $out/Client.py ''; }; in diff --git a/default.nix b/default.nix index 2915990..8f2a382 100644 --- a/default.nix +++ b/default.nix @@ -11,15 +11,15 @@ in rec { # Biz.Cloud = build.os { imports = [ - ./Biz/packages.nix - ./Biz/users.nix - ./Biz/Cloud/chat.nix - ./Biz/Cloud/git.nix - ./Biz/Cloud/hardware.nix - ./Biz/Cloud/mail.nix - ./Biz/Cloud/networking.nix - ./Biz/Cloud/web.nix - ./Biz/Cloud/znc.nix + ./Biz/Packages.nix + ./Biz/Users.nix + ./Biz/Cloud/Chat.nix + ./Biz/Cloud/Git.nix + ./Biz/Cloud/Hardware.nix + ./Biz/Cloud/Mail.nix + ./Biz/Cloud/Networking.nix + ./Biz/Cloud/Web.nix + ./Biz/Cloud/Znc.nix nixos-mailserver ]; networking.hostName = "simatime"; @@ -29,10 +29,10 @@ in rec { # Biz.Dev = build.os { imports = [ - ./Biz/packages.nix - ./Biz/users.nix - ./Biz/Dev/configuration.nix - ./Biz/Dev/hardware.nix + ./Biz/Packages.nix + ./Biz/Users.nix + ./Biz/Dev/Configuration.nix + ./Biz/Dev/Hardware.nix ]; networking.hostName = "lithium"; networking.domain = "dev.simatime.com"; @@ -41,8 +41,8 @@ in rec { # Que.Prod = build.os { imports = [ - ./Biz/packages.nix - ./Biz/users.nix + ./Biz/Packages.nix + ./Biz/Users.nix ./Que/Host.nix ./Que/Site.nix ./Que/Prod.nix @@ -63,8 +63,8 @@ in rec { # Production server for herocomics.app Hero.Prod = build.os { imports = [ - ./Biz/packages.nix - ./Biz/users.nix + ./Biz/Packages.nix + ./Biz/Users.nix ./Hero/Service.nix ./Hero/Prod.nix ]; -- cgit v1.2.3