feat: add base database management

This commit is contained in:
Alexander Navarro 2025-05-07 14:36:26 -04:00
parent 1d5a517395
commit 1e3c235b78
11 changed files with 132 additions and 20 deletions

12
.idea/dataSources.xml generated Normal file
View file

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
<data-source source="LOCAL" name="db.sql" uuid="63174beb-6dc0-40c0-9b07-b6f1fa6a2b72">
<driver-ref>sqlite.xerial</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>org.sqlite.JDBC</jdbc-driver>
<jdbc-url>jdbc:sqlite:$USER_HOME$/.local/share/readwise-bulk-upload/db.sql</jdbc-url>
<working-dir>$ProjectFileDir$</working-dir>
</data-source>
</component>
</project>

41
.idea/sqlDataSources.xml generated Normal file
View file

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DdlMappings">
<mapping uuid="eedfdaf6-31da-4977-9d85-3305b34ce4b6" name="DDL Mapping">
<data-sources db="63174beb-6dc0-40c0-9b07-b6f1fa6a2b72" ddl="3e504b62-07f8-4764-93df-2dc331b571e1" />
<scope>
<node negative="1">
<node kind="schema" qname="main" />
<node kind="database" qname="@" />
</node>
</scope>
</mapping>
</component>
<component name="SqlDataSourceStorage">
<option name="dataSources">
<list>
<State>
<option name="id" value="3e504b62-07f8-4764-93df-2dc331b571e1" />
<option name="name" value="Migrations" />
<option name="dbmsName" value="SQLITE" />
<option name="urls">
<array>
<option value="file://$PROJECT_DIR$/migrations" />
</array>
</option>
<option name="outLayout" value="File per object with order.groovy" />
<option name="scriptOptions">
<map>
<entry key="UseCompactDef" value="1" />
</map>
</option>
<option name="scopes">
<map>
<entry key="$PROJECT_DIR$/migrations" value="" />
</map>
</option>
</State>
</list>
</option>
</component>
</project>

8
.idea/sqldialects.xml generated Normal file
View file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SqlDialectMappings">
<file url="file://$PROJECT_DIR$/migrations" dialect="SQLite" />
<file url="file://$PROJECT_DIR$/migrations/002_statuses.sql" dialect="SQLite" />
<file url="PROJECT" dialect="SQLite" />
</component>
</project>

View file

@ -7,7 +7,7 @@ edition = "2024"
thiserror = "2.0.12" thiserror = "2.0.12"
directories = "6.0.0" directories = "6.0.0"
tokio = { version = "1.45.0", features = ["default", "rt", "rt-multi-thread", "macros"] } tokio = { version = "1.45.0", features = ["default", "rt", "rt-multi-thread", "macros"] }
sqlx = { version = "0.8", features = [ "runtime-tokio", "sqlite", "chrono" ] } sqlx = { version = "0.8", features = [ "runtime-tokio", "sqlite", "chrono", "migrate" ] }
clap = { version = "4.5.37", features = ["derive"] } clap = { version = "4.5.37", features = ["derive"] }
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
chrono = {version = "0.4.41", features = ["serde"]} chrono = {version = "0.4.41", features = ["serde"]}

20
migrations/0001_jobs.sql Normal file
View file

@ -0,0 +1,20 @@
create table jobs
(
id integer
constraint jobs_pk
primary key autoincrement,
task_id integer not null
constraint jobs_tasks_id_fk
references tasks,
status_id integer not null
constraint jobs_statuses_id_fk
references statuses,
output TEXT,
started_at TEXT default CURRENT_TIMESTAMP not null,
finished_at TEXT,
"order" integer not null
);
create index jobs_task_id_order_index
on jobs (task_id asc, "order" desc);

View file

@ -0,0 +1,13 @@
create table statuses
(
id integer
constraint task_statuses_pk
primary key autoincrement,
name TEXT not null
);
insert into statuses (id, name)
values (1, 'Pending'),
(2, 'In progress'),
(3, 'Completed'),
(4, 'Failed');

19
migrations/0003_tasks.sql Normal file
View file

@ -0,0 +1,19 @@
create table tasks
(
id integer not null
constraint tasks_pk
primary key autoincrement,
payload_key ANY not null
constraint tasks_payload_key
unique,
payload TEXT not null,
status_id integer not null
constraint tasks_task_statuses_id_fk
references statuses,
created_at TEXT default CURRENT_TIMESTAMP not null,
updated_at TEXT
);
create unique index tasks_payload_key_uindex
on tasks (payload_key);

View file

@ -13,7 +13,10 @@ pub enum Error {
#[error(transparent)] #[error(transparent)]
Sqlx(#[from] sqlx::Error), Sqlx(#[from] sqlx::Error),
#[error(transparent)]
Migration(#[from] sqlx::migrate::MigrateError),
#[error(transparent)] #[error(transparent)]
Io(#[from] tokio::io::Error), Io(#[from] tokio::io::Error),

View file

@ -1,7 +1,7 @@
use std::fs::File; use std::fs::File;
use clap::Parser; use clap::Parser;
use readwise_bulk_upload::config::Args; use readwise_bulk_upload::config::Args;
use readwise_bulk_upload::readwise::Document; use readwise_bulk_upload::readwise::DocumentPayload;
use readwise_bulk_upload::sql::get_database; use readwise_bulk_upload::sql::get_database;
use readwise_bulk_upload::{Error, Result}; use readwise_bulk_upload::{Error, Result};
@ -15,7 +15,7 @@ async fn main() -> Result<()> {
args.path().display() args.path().display()
)))?; )))?;
let documents: Vec<Document> = serde_json::from_reader(file)?; let documents: Vec<DocumentPayload> = serde_json::from_reader(file)?;
let db = get_database().await?; let db = get_database().await?;

View file

@ -1,29 +1,23 @@
use serde::{de, Deserialize, Deserializer};
use chrono::{DateTime, Local}; use chrono::{DateTime, Local};
use serde::{Deserialize, Deserializer, de};
use serde_json::Value; use serde_json::Value;
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct Document { pub struct DocumentPayload {
#[serde(deserialize_with = "str_to_int")] title: String,
id: u64, summary: Option<String>,
title: String ,
note: Option<String>,
excerpt: Option<String>,
url: String, url: String,
folder: String,
#[serde(deserialize_with = "single_or_vec")] #[serde(deserialize_with = "single_or_vec")]
tags: Vec<String>, tags: Vec<String>,
created: DateTime<Local>, published_date: DateTime<Local>,
cover: Option<String>, location: String,
#[serde(deserialize_with = "str_to_bool")]
favorite: bool
} }
fn str_to_int<'de, D: Deserializer<'de>>(deserializer: D) -> Result<u64, D::Error> { fn str_to_int<'de, D: Deserializer<'de>>(deserializer: D) -> Result<u64, D::Error> {
Ok(match Value::deserialize(deserializer)? { Ok(match Value::deserialize(deserializer)? {
Value::String(s) => s.parse().map_err(de::Error::custom)?, Value::String(s) => s.parse().map_err(de::Error::custom)?,
Value::Number(num) => num.as_u64().ok_or(de::Error::custom("Invalid number"))?, Value::Number(num) => num.as_u64().ok_or(de::Error::custom("Invalid number"))?,
_ => return Err(de::Error::custom("wrong type")) _ => return Err(de::Error::custom("wrong type")),
}) })
} }
@ -31,15 +25,15 @@ fn str_to_bool<'de, D: Deserializer<'de>>(deserializer: D) -> Result<bool, D::Er
Ok(match Value::deserialize(deserializer)? { Ok(match Value::deserialize(deserializer)? {
Value::String(s) => s.parse().map_err(de::Error::custom)?, Value::String(s) => s.parse().map_err(de::Error::custom)?,
Value::Bool(b) => b, Value::Bool(b) => b,
_ => return Err(de::Error::custom("wrong type")) _ => return Err(de::Error::custom("wrong type")),
}) })
} }
fn single_or_vec<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Vec<String>, D::Error> { fn single_or_vec<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Vec<String>, D::Error> {
Ok(match Value::deserialize(deserializer)? { Ok(match Value::deserialize(deserializer)? {
Value::String(s) => vec!(s.parse().map_err(de::Error::custom)?), Value::String(s) => vec![s.parse().map_err(de::Error::custom)?],
Value::Array(arr) => arr.into_iter().map(|a| a.to_string()).collect(), Value::Array(arr) => arr.into_iter().map(|a| a.to_string()).collect(),
Value::Null => Vec::new(), Value::Null => Vec::new(),
_ => return Err(de::Error::custom("wrong type")) _ => return Err(de::Error::custom("wrong type")),
}) })
} }

View file

@ -18,6 +18,8 @@ pub async fn get_database() -> crate::Result<SqlitePool> {
.journal_mode(SqliteJournalMode::Wal); .journal_mode(SqliteJournalMode::Wal);
let pool = SqlitePool::connect_with(opts).await?; let pool = SqlitePool::connect_with(opts).await?;
sqlx::migrate!("./migrations").run(&pool).await?;
Ok(pool) Ok(pool)
} }