Compare commits

...

6 Commits

Author SHA1 Message Date
05a0b32d9b refactor md parser, TODO: parse_str
Some checks failed
Test the running changes / Test (push) Failing after 44s
2025-11-14 02:28:22 +02:00
ddd6d072f9 added worspace bacon.toml 2025-11-14 02:21:17 +02:00
9d12ad5050 testing on dev push, half-baked deployment workflow 2025-11-14 02:19:16 +02:00
4a7f5eabe9 added lock files
All checks were successful
Test the running changes / Test (push) Successful in 39s
2025-11-13 14:30:42 +02:00
29688e07e4 test on master push
All checks were successful
Test the running changes / Test (push) Successful in 38s
2025-11-10 12:07:09 +02:00
c10ad0f6a6 added test workflow 2025-11-10 00:15:26 +02:00
17 changed files with 690 additions and 45 deletions

View File

@@ -0,0 +1,22 @@
name: Test the running changes
on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "master" ]
env:
CARGO_TERM_COLOR: always
jobs:
test:
name: Test
runs-on: rust-latest
steps:
- run: apt-get update -y && apt-get install nodejs -y
- uses: actions/checkout@v4
- run: cargo build --verbose
- run: cargo clippy -- -D warnings
- run: cargo test --verbose

View File

@@ -0,0 +1,32 @@
name: Build and deploy Docker image
on:
push:
branches:
- master
env:
CARGO_TERM_COLOR: always
jobs:
deploy:
name: Deploy
runs-on: rust-latest
steps:
- run: apt-get update -y && apt-get install nodejs -y
- uses: actions/checkout@v4
- run: cargo build --release
- uses: docker/setup-buildx-action@v3
with:
config-inline: |
[registry."gitea-http.apps.svc.cluster.local:3000"]
http = true
insecure = true
- uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
push: true
tags: "gitea-http.apps.svc.cluster.local:3000".....

2
.gitignore vendored
View File

@@ -3,5 +3,3 @@ web/
target/
result
Cargo.lock
flake.lock

22
Cargo.lock generated Normal file
View File

@@ -0,0 +1,22 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "cracked_md"
version = "0.1.0"
dependencies = [
"fstools",
]
[[package]]
name = "fstools"
version = "0.1.0"
[[package]]
name = "stdsrv"
version = "0.1.0"
dependencies = [
"cracked_md",
"fstools",
]

129
bacon.toml Normal file
View File

@@ -0,0 +1,129 @@
# This is a configuration file for the bacon tool
#
# Complete help on configuration: https://dystroy.org/bacon/config/
#
# You may check the current default at
# https://github.com/Canop/bacon/blob/main/defaults/default-bacon.toml
default_job = "check"
env.CARGO_TERM_COLOR = "always"
[jobs.check]
command = ["cargo", "check"]
need_stdout = false
[jobs.check-all]
command = ["cargo", "check", "--all-targets"]
need_stdout = false
# Run clippy on the default target
[jobs.clippy]
command = ["cargo", "clippy"]
need_stdout = false
# Run clippy on all targets
# To disable some lints, you may change the job this way:
# [jobs.clippy-all]
# command = [
# "cargo", "clippy",
# "--all-targets",
# "--",
# "-A", "clippy::bool_to_int_with_if",
# "-A", "clippy::collapsible_if",
# "-A", "clippy::derive_partial_eq_without_eq",
# ]
# need_stdout = false
[jobs.clippy-all]
command = ["cargo", "clippy", "--all-targets"]
need_stdout = false
# Run clippy in pedantic mode
# The 'dismiss' feature may come handy
[jobs.pedantic]
command = [
"cargo", "clippy",
"--",
"-W", "clippy::pedantic",
]
need_stdout = false
# This job lets you run
# - all tests: bacon test
# - a specific test: bacon test -- config::test_default_files
# - the tests of a package: bacon test -- -- -p config
[jobs.test]
command = [
"cargo", "nextest", "run",
"--hide-progress-bar", "--failure-output", "final"
]
need_stdout = true
analyzer = "nextest"
[jobs.nextest]
command = [
"cargo", "nextest", "run",
"--hide-progress-bar", "--failure-output", "final"
]
need_stdout = true
analyzer = "nextest"
[jobs.doc]
command = ["cargo", "doc", "--no-deps"]
need_stdout = false
# If the doc compiles, then it opens in your browser and bacon switches
# to the previous job
[jobs.doc-open]
command = ["cargo", "doc", "--no-deps", "--open"]
need_stdout = false
on_success = "back" # so that we don't open the browser at each change
# You can run your application and have the result displayed in bacon,
# if it makes sense for this crate.
[jobs.run]
command = [
"cargo", "run",
# put launch parameters for your program behind a `--` separator
]
need_stdout = true
allow_warnings = true
background = true
# Run your long-running application (eg server) and have the result displayed in bacon.
# For programs that never stop (eg a server), `background` is set to false
# to have the cargo run output immediately displayed instead of waiting for
# program's end.
# 'on_change_strategy' is set to `kill_then_restart` to have your program restart
# on every change (an alternative would be to use the 'F5' key manually in bacon).
# If you often use this job, it makes sense to override the 'r' key by adding
# a binding `r = job:run-long` at the end of this file .
# A custom kill command such as the one suggested below is frequently needed to kill
# long running programs (uncomment it if you need it)
[jobs.run-long]
command = [
"cargo", "run",
# put launch parameters for your program behind a `--` separator
]
need_stdout = true
allow_warnings = true
background = false
on_change_strategy = "kill_then_restart"
# kill = ["pkill", "-TERM", "-P"]
# This parameterized job runs the example of your choice, as soon
# as the code compiles.
# Call it as
# bacon ex -- my-example
[jobs.ex]
command = ["cargo", "run", "--example"]
need_stdout = true
allow_warnings = true
# You may define here keybindings that would be specific to
# a project, for example a shortcut to launch a specific job.
# Shortcuts to internal functions (scrolling, toggling, etc.)
# should go in your personal global prefs.toml file instead.
[keybindings]
# alt-m = "job:my-job"
c = "job:clippy-all" # comment this to have 'c' run clippy on only the default target
p = "job:pedantic"

14
cracked_md/Cargo.lock generated Normal file
View File

@@ -0,0 +1,14 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "cracked_md"
version = "0.1.0"
dependencies = [
"fstools",
]
[[package]]
name = "fstools"
version = "0.1.0"

View File

@@ -171,8 +171,11 @@ mod convert_md_to_html_test {
#[test]
fn single_header() {
let md = "# Header 1";
let html = parse(md).to_html();
let ast = match parse(md) {
Ok(a) => a,
Err(e) => panic!("{}", e),
};
let html = ast.to_html();
assert_eq!(
html,
@@ -180,11 +183,22 @@ mod convert_md_to_html_test {
);
}
#[test]
fn single_header_wrong_format() {
let md = "#Whoops";
let ast = parse(md);
assert!(ast.is_err());
}
#[test]
fn nested_bold_headers_and_nested_code_paragraph() {
let md = "# *Bold* header 1\n## Header 2\nrun `sudo rm -rf /` on your computer";
let html = parse(md).to_html();
let ast = match parse(md) {
Ok(a) => a,
Err(e) => panic!("{}", e),
};
let html = ast.to_html();
assert_eq!(
html,
@@ -192,3 +206,21 @@ mod convert_md_to_html_test {
);
}
}
#[cfg(test)]
mod parse_real_md {
use std::fs;
use crate::parser::parse;
#[test]
fn go() {
let file = "./test.md";
let md = fs::read_to_string(file).expect("reading ./test.md failed");
let _ast = match parse(&md).map_err(|e| e.set_file(file.into())) {
Ok(a) => a,
Err(e) => panic!("{}", e),
};
}
}

View File

@@ -1,4 +1,4 @@
#![deny(dead_code, unused_imports)]
#![deny(unused_imports)]
use fstools::crawl_fs;
use parser::parse;
@@ -11,9 +11,91 @@ use std::{
use to_html::ToHtml;
pub mod ast;
mod parse_trait;
pub mod parser;
pub mod to_html;
#[derive(Debug)]
pub struct MdParseError {
file: Option<PathBuf>,
line: Option<usize>,
//col: Option<usize>,
expected: String,
got: String,
}
impl MdParseError {
pub fn new(expected: impl ToString, got: impl ToString) -> Self {
Self {
file: None,
line: None,
//col: None,
expected: expected.to_string(),
got: got.to_string(),
}
}
pub fn from_line(line: usize, expected: impl ToString, got: impl ToString) -> Self {
Self {
file: None,
line: Some(line),
//col: None,
expected: expected.to_string(),
got: got.to_string(),
}
}
/*
pub fn from_col(col: usize, expected: impl ToString, got: impl ToString) -> Self {
Self {
file: None,
line: None,
col: Some(col),
expected: expected.to_string(),
got: got.to_string(),
}
}
*/
pub fn set_line(self, line: usize) -> Self {
Self {
file: self.file,
line: Some(line),
//col: self.col,
expected: self.expected,
got: self.got,
}
}
pub fn set_file(self, file: PathBuf) -> Self {
Self {
file: Some(file),
line: self.line,
//col: self.col,
expected: self.expected,
got: self.got,
}
}
}
impl Display for MdParseError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// no error message :/
let file = self.file.clone().unwrap_or("<unknown>".into());
write!(
f,
"Parse error in '{}' on line {}: expected '{}', got '{}'",
file.display(),
self.line.unwrap_or(0),
//self.col.unwrap_or(0),
self.expected,
self.got
)
}
}
impl std::error::Error for MdParseError {}
#[derive(Debug)]
pub enum Error {
OutDirIsNotEmpty,
@@ -24,6 +106,7 @@ pub enum Error {
FileWrite,
FileCreate,
DirCreate,
Parse(MdParseError),
}
impl Display for Error {
@@ -32,6 +115,12 @@ impl Display for Error {
}
}
impl From<MdParseError> for Error {
fn from(value: MdParseError) -> Self {
Error::Parse(value)
}
}
impl std::error::Error for Error {}
type Result<T> = std::result::Result<T, crate::Error>;
@@ -44,7 +133,7 @@ pub fn generate(indir: &PathBuf, outdir: &PathBuf, force: bool) -> Result<()> {
// read and parse md file
let content = fs::read_to_string(&fullpath).map_err(|_e| Error::FileRead)?;
let html = parse(&content).to_html();
let html = parse(&content)?.to_html();
// write html data to file
let mut newpath = outdir.to_owned();

View File

@@ -0,0 +1,111 @@
/*
use crate::MdParseError;
pub type Pattern<T> = Vec<PatternToken<T>>;
pub enum PatternToken<T> {
Once(T),
Optional(T),
AtLeastOnce(T),
NTimes(T),
}
/// panics: on invalid pattern
pub fn char_pattern(s: &str) -> Pattern<char> {
let mut s_chars = s.chars().peekable();
let mut pat: Pattern<char> = Vec::new();
while let Some(token) = s_chars.next() {
pat.push(if let Some(&next) = s_chars.peek() {
match next {
'?' => {
s_chars.next().unwrap();
PatternToken::Optional(token)
}
'+' => {
s_chars.next().unwrap();
PatternToken::AtLeastOnce(token)
}
'*' => {
s_chars.next().unwrap();
PatternToken::NTimes(token)
}
_ => PatternToken::Once(token),
}
} else {
PatternToken::Once(token)
});
}
pat
}
pub trait ParsePattern: Iterator + Clone {
fn parse<T>(&mut self, expect: Pattern<T>) -> Result<Vec<Self::Item>, MdParseError>
where
T: PartialEq<<Self as Iterator>::Item>,
{
let mut consumed = Vec::new();
let mut cloned = self.clone();
for pat_token in expect {
match pat_token {
PatternToken::Once(c) => {
if !cloned.next().map(|v| c == v).unwrap_or(false) {
return None;
}
}
PatternToken::Optional(c) => if cloned.peek().map(|v| c == *v).unwrap_or(false) {},
}
}
*self = cloned;
Some(consumed)
}
}
*/
pub trait Parse: Iterator {
fn follows(&mut self, token: char) -> bool;
fn parse_token(&mut self, token: char) -> bool {
if self.follows(token) {
let _ = self.next();
true
} else {
false
}
}
fn parse_str(&mut self, _tokens: &str) -> bool {
todo!()
}
}
impl Parse for std::iter::Peekable<std::str::Chars<'_>> {
fn follows(&mut self, token: char) -> bool {
self.peek().map(|c| c == &token).unwrap_or(false)
}
}
impl Parse for std::iter::Peekable<std::iter::Enumerate<std::str::Chars<'_>>> {
fn follows(&mut self, token: char) -> bool {
self.peekable()
.peek()
.map(|&(_i, c)| c == token)
.unwrap_or(false)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn chars_parse_follows_double() {
let mut c = "abc".chars().peekable();
assert!(c.follows('a'));
assert!(c.follows('a'));
}
}

View File

@@ -3,14 +3,15 @@ mod inline;
use block::parse_blocks;
use crate::ast::Document;
use crate::{MdParseError, ast::Document};
pub fn parse(s: &str) -> Document {
Document {
blocks: parse_blocks(s),
}
pub fn parse(s: &str) -> Result<Document, MdParseError> {
Ok(Document {
blocks: parse_blocks(s)?,
})
}
/*
#[cfg(test)]
mod test {
use crate::ast::*;
@@ -145,3 +146,4 @@ mod test {
);
}
}
// */

View File

@@ -1,22 +1,106 @@
use crate::ast::Block;
use super::inline::parse_inlines;
use crate::{MdParseError, ast::Block};
pub fn parse_blocks(input: &str) -> Vec<Block> {
use crate::parse_trait::Parse;
pub fn parse_blocks(input: &str) -> Result<Vec<Block>, MdParseError> {
let mut blocks = Vec::new();
let mut lines = input.lines().enumerate().peekable();
while let Some((i, line)) = lines.next() {
let mut line_chars = line.chars().peekable();
// header
let mut heading_level = 0;
while line_chars.parse_token('#') {
if heading_level < 6 {
heading_level += 1;
}
}
if heading_level > 0 {
if !line_chars.parse_token(' ') {
Err(MdParseError::from_line(
i + 1,
"<space> after #",
"no <space>",
))?;
}
let line_content: String = line_chars.collect();
blocks.push(Block::Heading {
level: heading_level,
content: parse_inlines(&line_content)?,
});
continue;
}
// quote TODO
/*
if line_chars.parse_str("> ") {
let content: String = line_chars.collect();
let quote_blocks = parse_blocks(&content).map_err(|e| e.set_line(i + 1))?;
blocks.push(Block::Quote(quote_blocks));
continue;
}
*/
// code
if line_chars.parse_str("```") {
let lang_line: String = line_chars.collect();
let lang = if lang_line.is_empty() {
None
} else {
Some(lang_line)
};
let mut code = String::new();
for (j, line) in lines.by_ref() {
let mut code_line_chars = line.chars().peekable();
// code block end
if code_line_chars.parse_str("```") {
let remaining: String = code_line_chars.collect();
if remaining.is_empty() {
blocks.push(Block::Code {
language: lang,
content: code,
});
break;
} else {
Err(MdParseError::from_line(
j + 1,
"```",
format!("```{}", remaining),
))?;
}
} else {
code.push_str(line);
code.push('\n');
}
}
Err(MdParseError::from_line(i + 1, "a terminating '```'", ""))?;
}
// lists TODO
}
Ok(blocks)
}
/*
pub fn parse_blocks(input: &str) -> Result<Vec<Block>, MdParseError> {
let mut blocks = Vec::new();
let mut lines = input.lines().peekable();
let mut lines = input.lines().enumerate().peekable();
while let Some(line) = lines.next() {
while let Some((i, line)) = lines.next() {
if line.starts_with("#") {
let level = line.chars().take_while(|&c| c == '#').count() as u8;
let text = line[level as usize..].trim();
blocks.push(Block::Heading {
level,
content: parse_inlines(text),
content: parse_inlines(text).map_err(|e| e.set_line(i + 1))?,
});
} else if let Some(quote_body) = line.strip_prefix(">") {
let quote_blocks = parse_blocks(quote_body);
let quote_blocks = parse_blocks(quote_body).map_err(|e| e.set_line(i + 1))?;
blocks.push(Block::Quote(quote_blocks));
} else if line.starts_with("```") {
let lang_line = line.strip_prefix("```").unwrap().to_string();
@@ -26,8 +110,16 @@ pub fn parse_blocks(input: &str) -> Vec<Block> {
Some(lang_line)
};
let mut code = String::new();
while lines.peek().is_some() && !lines.peek().unwrap().starts_with("```") {
code.push_str(&format!("{}\n", lines.next().unwrap()));
while lines.peek().is_some()
&& !lines
.peek()
.ok_or(MdParseError::from_line(i + 1, "a line", ""))?
.1
.starts_with("```")
{
if let Some((_i, l)) = lines.next() {
code.push_str(&format!("{}\n", l));
}
}
lines.next();
blocks.push(Block::Code {
@@ -37,9 +129,12 @@ pub fn parse_blocks(input: &str) -> Vec<Block> {
} else if line.trim().is_empty() {
continue;
} else {
blocks.push(Block::Paragraph(parse_inlines(line)));
blocks.push(Block::Paragraph(
parse_inlines(line).map_err(|e| e.set_line(i + 1))?,
));
}
}
blocks
Ok(blocks)
}
*/

View File

@@ -1,61 +1,65 @@
use crate::ast::Inline;
use crate::{MdParseError, ast::Inline};
pub fn parse_inlines(input: &str) -> Vec<Inline> {
pub fn parse_inlines(input: &str) -> Result<Vec<Inline>, MdParseError> {
let mut inlines = Vec::new();
let mut chars = input.chars().peekable();
while let Some(c) = chars.next() {
match c {
'*' => {
let inner = collect_until(&mut chars, '*');
inlines.push(Inline::Bold(parse_inlines(&inner)));
let inner = collect_until(&mut chars, '*')?;
inlines.push(Inline::Bold(parse_inlines(&inner)?));
}
'_' => {
let inner = collect_until(&mut chars, '_');
inlines.push(Inline::Italic(parse_inlines(&inner)));
let inner = collect_until(&mut chars, '_')?;
inlines.push(Inline::Italic(parse_inlines(&inner)?));
}
'`' => {
let code = collect_until(&mut chars, '`');
let code = collect_until(&mut chars, '`')?;
inlines.push(Inline::Code(code));
}
'[' => {
let text = collect_until(&mut chars, ']');
if chars.next() == Some('(') {
let href = collect_until(&mut chars, ')');
let text = collect_until(&mut chars, ']')?;
if let Some('(') = chars.next() {
let href = collect_until(&mut chars, ')')?;
inlines.push(Inline::Link {
text: parse_inlines(&text),
text: parse_inlines(&text)?,
href,
});
} else {
Err(MdParseError::new(
"(<href>)",
chars.next().unwrap_or_default(),
))?;
}
}
_ => {
let mut text = String::new();
text.push(c);
while let Some(&nc) = chars.peek() {
while let Some(nc) = chars.next() {
if matches!(nc, '*' | '_' | '`' | '[') {
break;
}
text.push(chars.next().unwrap());
text.push(nc);
}
inlines.push(Inline::Text(text));
}
}
}
inlines
Ok(inlines)
}
fn collect_until<I: Iterator<Item = char>>(
chars: &mut std::iter::Peekable<I>,
end: char,
) -> String {
) -> Result<String, MdParseError> {
let mut s = String::new();
while let Some(&c) = chars.peek() {
while let Some(c) = chars.next() {
if c == end {
chars.next();
break;
return Ok(s);
}
s.push(chars.next().unwrap());
s.push(c);
}
s
Err(MdParseError::new(end, ""))
}

View File

@@ -1,4 +1,3 @@
pub trait ToHtml {
fn to_html(self) -> String;
}

7
cracked_md/test.md Normal file
View File

@@ -0,0 +1,7 @@
# Header *1kkkkkkkkkkkkkkkkkkkkkk*
this is some code: `abc
```code
oiajwefoijao089uaoisdjfoijasdfoijasdofij

60
flake.lock generated Normal file
View File

@@ -0,0 +1,60 @@
{
"nodes": {
"flake-utlis": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1762596750,
"narHash": "sha256-rXXuz51Bq7DHBlfIjN7jO8Bu3du5TV+3DSADBX7/9YQ=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b6a8526db03f735b89dd5ff348f53f752e7ddc8e",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-unstable",
"type": "indirect"
}
},
"root": {
"inputs": {
"flake-utlis": "flake-utlis",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

7
fstools/Cargo.lock generated Normal file
View File

@@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "fstools"
version = "0.1.0"

22
stdsrv/Cargo.lock generated Normal file
View File

@@ -0,0 +1,22 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "cracked_md"
version = "0.1.0"
dependencies = [
"fstools",
]
[[package]]
name = "fstools"
version = "0.1.0"
[[package]]
name = "stdsrv"
version = "0.1.0"
dependencies = [
"cracked_md",
"fstools",
]