Skip to content

Commit

Permalink
Merge pull request #127 from jcpsantiago/using-sqlite
Browse files Browse the repository at this point in the history
use SQLite instead of Postgres
  • Loading branch information
jcpsantiago authored May 9, 2024
2 parents cafc642 + 05097cf commit ce8f09e
Show file tree
Hide file tree
Showing 15 changed files with 119 additions and 103 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,4 @@ pom.xml.asc
/target/

## ---------------------------------------------------------
thearqivist_db
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
* [#88](https://github.com/jcpsantiago/thearqivist/issues/88) Automatically join public channels
* dev: update hack locally details for starting a REPL
* [#63](https://github.com/jcpsantiago/thearqivist/issues/63) Enable recurrent jobs: new `jobs` table, rebased migrations, new modals
* Switch to SQLite, dropping the requirements for Hikari and Postgres

## 0.1.0 - 2023-04-20

Expand Down
7 changes: 6 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,12 @@ To run The Arqivist yourself you need

* admin access to a Confluence account
* admin access to a Slack workspace
* a running instance of The Arqivist + a Postgres database (see data model at [dbdiagram.io](https://dbdiagram.io/d/6551f3787d8bbd6465102527)
* a running instance of The Arqivist

## Data persistence

The Arqivist uses SQLite as the backend database.
See data model at [dbdiagram.io](https://dbdiagram.io/d/6551f3787d8bbd6465102527)

## Installing in Confluence

Expand Down
3 changes: 1 addition & 2 deletions deps.edn
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,8 @@

;; Persistence
com.github.seancorfield/next.jdbc {:mvn/version "1.3.909"}
hikari-cp/hikari-cp {:mvn/version "3.0.1"}
migratus/migratus {:mvn/version "1.5.6"}
org.postgresql/postgresql {:mvn/version "42.7.1"}
org.xerial/sqlite-jdbc {:mvn/version "3.45.3.0"}

;; HTTP server
http-kit/http-kit {:mvn/version "2.7.0"}
Expand Down
86 changes: 43 additions & 43 deletions resources/migrations/20231113185356-init-tables.up.sql
Original file line number Diff line number Diff line change
@@ -1,54 +1,54 @@
CREATE TABLE
IF NOT EXISTS atlassian_tenants (
id serial primary key,
key varchar(255),
tenant_name varchar(255),
account_id varchar(255),
client_key varchar(255),
shared_secret varchar(255),
base_url varchar(255),
base_url_short varchar(255),
display_url varchar(255),
product_type varchar(255),
description varchar(255),
service_entitlement_number varchar(255),
oauth_client_id varchar(255),
valid_license boolean,
is_evaluation boolean,
created_at timestamp default current_timestamp
);
id integer primary key autoincrement,
key text,
tenant_name text,
account_id text,
client_key text,
shared_secret text,
base_url text,
base_url_short text,
display_url text,
product_type text,
description text,
service_entitlement_number text,
oauth_client_id text,
valid_license integer,
is_evaluation integer,
created_at integer default (unixepoch('now'))
) STRICT;

--;;
CREATE TABLE
IF NOT EXISTS slack_teams (
id serial primary key,
atlassian_tenant_id int REFERENCES atlassian_tenants ON DELETE CASCADE,
app_id varchar(255),
external_team_id varchar(255),
team_name varchar(255),
registering_user varchar(255),
scopes varchar(255),
access_token varchar(255),
bot_user_id varchar(255),
created_at timestamp default current_timestamp
);
id integer primary key autoincrement,
atlassian_tenant_id integer REFERENCES atlassian_tenants ON DELETE CASCADE,
app_id text,
external_team_id text,
team_name text,
registering_user text,
scopes text,
access_token text,
bot_user_id text,
created_at integer default (unixepoch('now'))
) STRICT;

--;;
CREATE TABLE
IF NOT EXISTS jobs (
id serial primary key,
slack_team_id int NOT NULL REFERENCES slack_teams ON DELETE CASCADE,
slack_channel_id varchar(255) NOT NULL,
owner_slack_user_id varchar(255) NOT NULL,
timezone varchar(255) NOT NULL,
frequency varchar(255) NOT NULL,
target varchar(255) NOT NULL,
target_url varchar(255),
last_slack_conversation_datetime timestamp,
last_slack_conversation_ts varchar(255),
due_date timestamp,
n_runs int,
updated_at timestamp,
created_at timestamp default current_timestamp,
id integer primary key autoincrement,
slack_team_id integer NOT NULL REFERENCES slack_teams ON DELETE CASCADE,
slack_channel_id text NOT NULL,
owner_slack_user_id text NOT NULL,
timezone text NOT NULL,
frequency text NOT NULL,
target text NOT NULL,
target_url text,
last_slack_conversation_datetime integer,
last_slack_conversation_ts text,
due_date integer,
n_runs integer,
updated_at integer,
created_at integer default (unixepoch('now')),
CHECK (frequency in ('once', 'daily', 'weekly'))
);
) STRICT;
1 change: 0 additions & 1 deletion src/jcpsantiago/arqivist/api/confluence/pages.clj
Original file line number Diff line number Diff line change
Expand Up @@ -298,4 +298,3 @@
[:a {:href "https://arqivist.app", :target "_blank"}
"The Arqivist"]
" is made with ♥ in Berlin."]]]]]])))

9 changes: 3 additions & 6 deletions src/jcpsantiago/arqivist/api/slack/handlers.clj
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,16 @@
[clojure.spec.alpha :as spec]
[clojure.string :refer [trim]]
[com.brunobonacci.mulog :as mulog]
[jcpsantiago.arqivist.api.slack.ui-blocks :as ui]
[jcpsantiago.arqivist.api.slack.pages :as pages]
[jcpsantiago.arqivist.api.slack.specs :as specs]
[jcpsantiago.arqivist.api.slack.ui-blocks :as ui]
[jcpsantiago.arqivist.api.slack.utils :as utils]
[jcpsantiago.arqivist.messages :as messages]
[jcpsantiago.arqivist.utils :as core-utils]
[jcpsantiago.arqivist.specs :as core-specs]
[jcpsantiago.arqivist.utils :as core-utils]
[jsonista.core :as json]
[next.jdbc.sql :as sql]
;; needed because PostgreSQL can't translate java datetime into SQL timestamp
;; https://cljdoc.org/d/com.github.seancorfield/next.jdbc/1.3.894/api/next.jdbc.date-time
[next.jdbc.date-time]
[ring.util.response :refer [bad-request response content-type]]))
[ring.util.response :refer [bad-request content-type response]]))

;;
;; ------------------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion src/jcpsantiago/arqivist/api/slack/specs.clj
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@
(spec/def :slack_teams/scopes string?)
(spec/def :slack_teams/access_token string?)
(spec/def :slack_teams/bot_user_id string?)
(spec/def :slack_teams/created_at inst?)
(spec/def :slack_teams/created_at int?)
(spec/def :slack_teams/atlassian_tenant_id pos-int?)

(spec/def ::team-attributes
Expand Down
33 changes: 21 additions & 12 deletions src/jcpsantiago/arqivist/api/slack/ui_blocks.clj
Original file line number Diff line number Diff line change
Expand Up @@ -124,28 +124,37 @@
"
[job]
(let [{:keys [:jobs/owner_slack_user_id :jobs/frequency :jobs/due_date
:jobs/created_at :jobs/last_slack_conversation_datetime]} job
created_at_ts (to-seconds-from-epoch created_at)
last-slack-conversation-ts (to-seconds-from-epoch last_slack_conversation_datetime)
due_date_ts (to-seconds-from-epoch due_date)]
:jobs/created_at :jobs/last_slack_conversation_datetime
:jobs/timezone]} job
due_date_tz (when due_date
(java-time/local-date-time
(java-time/instant (* 1000 due_date))
"UTC"))
last_slack_conversation_tz (java-time/local-date-time
(java-time/instant (* 1000 last_slack_conversation_datetime))
timezone)]
;; NOTE: Slack does not allow more than 10 fields per block
;; each "row" here would be two fields so we can have a max of 5 k-v pairs
(two-column-section
[["*Owner*: " (str "<@" owner_slack_user_id ">")]
["*Created at*: " (slack-nice-datetime created_at_ts "{date_num}" created_at)]
["*Created at*: " (slack-nice-datetime created_at "{date_num}" created_at)]
["*Frequency*:" (str "`" frequency "`")]
["*Next archival at*:" (slack-nice-datetime due_date_ts "{date_num} 12:00 AM" due_date)]
["*Archived until*:" (slack-nice-datetime last-slack-conversation-ts "{date_num} {time}" last_slack_conversation_datetime)]])))
["*Next archival at*:" (if due_date_tz
(slack-nice-datetime due_date "{date_num}" due_date_tz)
"Not scheduled")]
["*Archived until*:" (slack-nice-datetime last_slack_conversation_datetime "{date_num} {time}" last_slack_conversation_tz)]])))

(defn exists-once-modal
"
Modal informing the user the current channel has already been saved once
"
[request existing-job]
(let [{{{:keys [team_domain channel_name channel_id user_id user_name]} :form} :parameters} request
{:keys [:jobs/last_slack_conversation_datetime
{:keys [:jobs/last_slack_conversation_datetime :jobs/timezone
:jobs/slack_channel_id :jobs/target_url]} existing-job
last-slack-conversation-ts (quot (java-time/to-millis-from-epoch last_slack_conversation_datetime) 1000)]
last_slack_conversation_tz (java-time/local-date-time
(java-time/instant (* 1000 last_slack_conversation_datetime))
timezone)]

{:type "modal"
:callback_id "exists-once-confirmation"
Expand Down Expand Up @@ -178,9 +187,9 @@
:text (str
"If you would you like to setup a *recurrent archival* instead of a one time manual job, "
"select another frequency, otherwise please select `once` again. I'll "
"create a new archive with messages since "
(str "<!date^" last-slack-conversation-ts "^{date_num} {time}|" last_slack_conversation_datetime "> ")
"without overwriting the previous archive.")}}
"append new messages since "
(str "<!date^" last_slack_conversation_datetime "^{date_num} {time}|" last_slack_conversation_tz "> ")
"to the archive.")}}

{:type "divider"}
{:type "input"
Expand Down
2 changes: 1 addition & 1 deletion src/jcpsantiago/arqivist/core.clj
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
(mulog/set-global-context!
;; TODO: get the version from a file or config, issue #23
{:app-name "The Arqivist"
:version "2023-12-18.1"
:version "2024-05-09.1"
:service-profile (System/getenv "ARQIVIST_SERVICE_PROFILE")})
(mulog/log ::application-starup :arguments args)
(if team
Expand Down
17 changes: 9 additions & 8 deletions src/jcpsantiago/arqivist/messages.clj
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@
[frequency]
(case frequency
"once" nil
"daily" (java-time/+ (java-time/local-date-time) (java-time/days 1))
"weekly" (java-time/+ (java-time/local-date-time) (java-time/weeks 1))))
"daily" (java-time/+ (java-time/instant) (java-time/days 1))
"weekly" (java-time/+ (java-time/instant) (java-time/weeks 1))))

;; TODO:
;; * clean up in case it's a first-time run and creating the archive fails
Expand Down Expand Up @@ -81,19 +81,20 @@
last-ts (->> messages (sort-by :ts) last :ts)
last-datetime (-> last-ts
(string/replace #"\..+" "")
(Long/parseLong)
(java.time.Instant/ofEpochSecond))
(Long/parseLong))
n-runs (if (nil? (:jobs/n_runs job))
1
(inc (:jobs/n_runs job)))
job-due-date (due-date frequency)
updates {:jobs/target_url (:archive-url archival-response)
:jobs/frequency frequency
;; FIXME: n_runs can't be null, inc explodes, check beforehand
:jobs/n_runs n-runs
:jobs/last_slack_conversation_ts last-ts
:jobs/last_slack_conversation_datetime last-datetime
:jobs/due_date (due-date frequency)
:jobs/updated_at (java-time/local-date-time)}]
:jobs/due_date (when job-due-date
(.getEpochSecond job-due-date))
:jobs/updated_at (core-utils/unix-epoch)}]

(sql/update!
(:db-connection system)
Expand Down Expand Up @@ -141,7 +142,8 @@
atlassian_tenant_id (get-in request [:slack-team-attributes :slack_teams/atlassian_tenant_id])
confluence-tenant-attributes (sql/get-by-id db-connection :atlassian_tenants atlassian_tenant_id)
db-io-result (db-fn system job)
job (merge job (select-keys db-io-result [:jobs/id]))]
;; NOTE: SQLite returns :last_insert_rowid() as the key, which is invalid clj, so get the value directly
job (merge job {:jobs/id (first (vals db-io-result))})]

(mulog/log ::start-job-db-io
;; FIXME: get the name of the function used as a string
Expand All @@ -166,4 +168,3 @@
:local-time (java.time.LocalDateTime/now))
(let [{:keys [channel_id user_id]} (-> request :parameters :form :payload :view :private_metadata read-string)]
(core-utils/ephemeral-error-message! user_id channel_id (:slack-connection request)))))))

9 changes: 4 additions & 5 deletions src/jcpsantiago/arqivist/specs.clj
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,17 @@
(spec/def :jobs/frequency #{"once" "daily" "weekly"})
(spec/def :jobs/target #{"confluence"})
(spec/def :jobs/target_url string?)
(spec/def :jobs/last_slack_conversation_datetime inst?)
(spec/def :jobs/last_slack_conversation_datetime int?)
(spec/def :jobs/last_slack_conversation_ts string?)
;; NOTE: `once` jobs won't have a due date
(spec/def :jobs/due_date (spec/nilable inst?))
(spec/def :jobs/due_date (spec/nilable int?))
(spec/def :jobs/n_runs int?)
(spec/def :jobs/updated_at inst?)
(spec/def :jobs/created_at inst?)
(spec/def :jobs/updated_at (spec/nilable int?))
(spec/def :jobs/created_at int?)

(spec/def ::job
(spec/keys
:req [:jobs/slack_team_id :jobs/slack_channel_id :jobs/owner_slack_user_id :jobs/timezone
:jobs/frequency :jobs/target]
:opt [:jobs/id :jobs/last_slack_conversation_ts :jobs/due_date :jobs/n_runs :jobs/updated_at :jobs/created_at
:jobs/target_url :jobs/last_slack_conversation_datetime]))

38 changes: 18 additions & 20 deletions src/jcpsantiago/arqivist/system.clj
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
(:require
[com.brunobonacci.mulog :as mulog]
[donut.system :as donut]
[hikari-cp.core :as hikari]
[org.httpkit.client :as http-client]
[org.httpkit.server :as http-server]
[jcpsantiago.arqivist.router :as router]
[jsonista.core :as jsonista]
[migratus.core :as migratus]))
[migratus.core :as migratus]
[next.jdbc :as jdbc]))

;; TODO: move into system utils namespace?
(defn ngrok-tunnel-url
Expand Down Expand Up @@ -48,7 +48,7 @@
(try
(migratus/migrate creds)
(catch
org.postgresql.util.PSQLException e
Exception e
(mulog/log ::connecting-db-failed
:error-message (ex-message e)
:error-data (ex-data e)))))
Expand All @@ -58,21 +58,27 @@
:db {:datasource (donut/ref [:db :db-connection])}}}})

(def db-connection
"Database connection component.
Uses HikariCP to create and manage a connection pool."
"Database connection component."
#::donut{:start (fn create-db-connection
[{{:keys [options]} ::donut/config}]
[{{:keys [db-spec]} ::donut/config}]
(mulog/log ::creating-db-connection
:db-spec db-spec
:local-time (java.time.LocalDateTime/now))
(hikari/make-datasource options))
(let [db-connection (jdbc/get-datasource db-spec)]
;; NOTE: https://kerkour.com/sqlite-for-servers
(jdbc/execute! db-connection ["PRAGMA journal_mode = WAL;"])
(jdbc/execute! db-connection ["PRAGMA synchronous = NORMAL;"])
(jdbc/execute! db-connection ["PRAGMA cache_size = 1000000000;"])
(jdbc/execute! db-connection ["PRAGMA foreign_keys = true;"])
(jdbc/execute! db-connection ["PRAGMA temp_store = memory;"])
db-connection))

:stop (fn closing-db-connection
[{::donut/keys [instance]}]
[{::donut/keys [_instance]}]
(mulog/log ::closing-db-connection
:local-time (java.time.LocalDateTime/now))
(hikari/close-datasource instance))
:local-time (java.time.LocalDateTime/now)))

:config {:options (donut/ref [:env :datasource-options])}})
:config {:db-spec {:dbtype "sqlite" :dbname "thearqivist_db"}}})

(def http-server
"Webserver component using http-kit"
Expand Down Expand Up @@ -129,15 +135,7 @@

:port (parse-long (or (System/getenv "PORT")
(System/getenv "ARQIVIST_PORT")
"8989"))

:datasource-options {;; NOTE: No idea what each of these actually do, should learn :D
:maximum-pool-size 5
:minimum-idle 2
:idle-timeout 12000
:max-lifetime 300000
:jdbc-url (or (System/getenv "DATABASE_URL")
"jdbc:postgresql://localhost/arqivist?user=arqivist&password=arqivist")}}
"8989"))}

;; Event logger
:event-log {:publisher event-logger}
Expand Down
Loading

0 comments on commit ce8f09e

Please sign in to comment.