initial commit

This commit is contained in:
yggverse 2026-03-19 04:56:49 +02:00
parent 7821fbb237
commit 8539bffeb9
7 changed files with 1280 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/target

1002
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

17
Cargo.toml Normal file
View file

@ -0,0 +1,17 @@
[package]
name = "flarumdown"
version = "0.1.0"
edition = "2024"
license = "MIT"
readme = "README.md"
description = "Flarum is down - read as Markdown"
keywords = ["archive", "conversion", "backup", "static", "dump"]
categories = ["command-line-utilities", "parsing", "text-processing", "value-formatting"]
repository = "https://github.com/YGGverse/flarumdown"
[dependencies]
anyhow = "1.0.102"
chrono = "0.4.44"
clap = { version = "4.6.0", features = ["derive"] }
html-to-markdown-rs = "2.28.2"
rusqlite = { version = "0.39.0", features = ["chrono"]}

View file

@ -1,2 +1,8 @@
# flarumdown # flarumdown
Flarum is down - read as Markdown Flarum is down - read as Markdown
CLI tool for Flarum v2 that allows to export public DB into the Markdown format.
> [!IMPORTANT]
> In development

19
src/config.rs Normal file
View file

@ -0,0 +1,19 @@
use clap::Parser;
use std::path::PathBuf;
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
pub struct Config {
/// Path to database
#[arg(short, long)]
pub source: PathBuf,
/// Path to export markdown
#[arg(short, long)]
pub target: PathBuf,
/// Collect discussions with given tag slug only
/// * keep empty to export all
#[arg(short, long)]
pub filter_tag: Vec<String>,
}

104
src/database.rs Normal file
View file

@ -0,0 +1,104 @@
use chrono::{DateTime, Utc};
use rusqlite::{Connection, Error};
use std::path::PathBuf;
pub struct User {
pub id: i64,
pub username: String,
}
pub struct Tag {
pub id: i64,
//pub name: String,
pub slug: String,
}
pub struct Discussion {
pub id: i64,
pub user_id: i64,
pub first_post_id: i64,
pub created_at: DateTime<Utc>,
pub title: String,
//pub slug: String,
}
pub struct Post {
pub id: i64,
pub user_id: i64,
pub created_at: DateTime<Utc>,
pub edited_at: Option<DateTime<Utc>>,
pub content: String,
}
pub struct Database(Connection);
impl Database {
pub fn connect(path: PathBuf) -> Result<Self, Error> {
Ok(Self(Connection::open(path)?))
}
pub fn users(&mut self) -> Result<Vec<User>, Error> {
self.0
.prepare("SELECT `id`, `username` FROM `users`")?
.query_map([], |row| {
Ok(User {
id: row.get(0)?,
username: row.get(1)?,
})
})?
.collect()
}
pub fn tags(&mut self) -> Result<Vec<Tag>, Error> {
self.0
.prepare("SELECT `id`, `name`, `slug` FROM `tags`")?
.query_map([], |row| {
Ok(Tag {
id: row.get(0)?,
//name: row.get(1)?,
slug: row.get(2)?,
})
})?
.collect()
}
pub fn discussions(&mut self) -> Result<Vec<Discussion>, Error> {
self.0.prepare(
"SELECT `id`, `user_id`, `first_post_id`, `created_at`, `title`, `slug`
FROM `discussions` WHERE `is_private` <> 1 AND `is_approved` <> 0 AND `hidden_at` IS NULL",
)?.query_map([], |row| {
Ok(Discussion {
id: row.get(0)?,
user_id: row.get(1)?,
first_post_id: row.get(2)?,
created_at: row.get(3)?,
title: row.get(4)?,
//slug: row.get(5)?,
})
})?
.collect()
}
pub fn discussion_tag_ids(&mut self, discussion_id: i64) -> Result<Vec<i64>, Error> {
self.0
.prepare("SELECT `tag_id` FROM `discussion_tag` WHERE `discussion_id` = ?")?
.query_map([discussion_id], |row| row.get(0))?
.collect()
}
pub fn posts(&mut self, discussion_id: i64) -> Result<Vec<Post>, Error> {
self.0.prepare(
"SELECT `id`, `user_id`, `created_at`, `edited_at`, `content`
FROM `posts` WHERE `discussion_id` = ? AND `is_private` <> 1 AND `is_approved` <> 0 AND `hidden_at` IS NULL
ORDER BY `number` ASC",
)?.query_map([discussion_id], |row| {
Ok(Post {
id: row.get(0)?,
user_id: row.get(1)?,
created_at: row.get(2)?,
edited_at: row.get(3)?,
content: row.get(4)?,
})
})?.collect()
}
}

131
src/main.rs Normal file
View file

@ -0,0 +1,131 @@
mod config;
mod database;
use anyhow::Result;
use chrono::{DateTime, Utc};
use clap::Parser;
use config::Config;
use database::Database;
use html_to_markdown_rs::convert;
use std::{
collections::HashMap,
fs::{File, create_dir_all, remove_dir_all},
io::Write,
path::PathBuf,
};
pub struct User {
pub username: String,
}
pub struct Post {
pub id: i64,
pub user_id: i64,
pub created_at: DateTime<Utc>,
pub edited_at: Option<DateTime<Utc>>,
pub content: String,
}
pub struct Discussion {
pub id: i64,
pub created_at: DateTime<Utc>,
pub title: String,
pub posts: Vec<Post>,
}
fn main() -> Result<()> {
let config = Config::parse();
if config.target.exists() {
remove_dir_all(&config.target)?
}
create_dir_all(&config.target)?;
let mut db = Database::connect(config.source)?;
let mut users = HashMap::new();
for user in db.users()? {
assert!(
users
.insert(
user.id,
User {
username: user.username,
}
)
.is_none()
)
}
let mut tags = HashMap::new();
for tag in db.tags()? {
if !config.filter_tag.is_empty() && !config.filter_tag.contains(&tag.slug) {
continue;
}
assert!(tags.insert(tag.id, tag.slug).is_none())
}
let mut discussions = Vec::new();
for discussion in db.discussions()? {
if !db
.discussion_tag_ids(discussion.id)?
.iter()
.any(|id| tags.contains_key(id))
{
continue;
}
assert!(users.contains_key(&discussion.user_id));
let mut posts = Vec::new();
for post in db.posts(discussion.id)? {
posts.push(Post {
id: post.id,
user_id: post.user_id,
created_at: post.created_at,
edited_at: post.edited_at,
content: post.content,
})
}
assert_eq!(discussion.first_post_id, posts.first().unwrap().id);
discussions.push(Discussion {
id: discussion.id,
created_at: discussion.created_at,
title: discussion.title,
posts,
})
}
for discussion in discussions {
let mut file = File::create_new({
let mut path = PathBuf::from(&config.target);
path.push(format!("{}.md", discussion.id));
path
})?;
file.write_all(
{
let mut page = Vec::new();
page.push(format!("# {}", discussion.title));
page.push({
let mut content = Vec::new();
for post in discussion.posts {
content.push(format!(
"@{} / {}{}",
users.get(&post.user_id).unwrap().username,
post.created_at,
post.edited_at
.map(|edited_at| format!(" / {}", edited_at))
.unwrap_or_default()
));
content.push("---".into());
content.push(convert(&post.content, None)?)
}
content.join("\n")
});
page.join("\n")
}
.as_bytes(),
)?
}
Ok(())
}