Compare commits
13 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 94a6dcbba6 | |||
| d76d1afd9d | |||
| 7c895c8191 | |||
| 500ab56be0 | |||
| 91ff9a4caf | |||
| a3158ba034 | |||
| bf8f64b904 | |||
| 26803724c6 | |||
| ec6f32f0c1 | |||
| b70693edce | |||
| 0fe4b4132d | |||
|
|
a26dfbd290 | ||
| 91a3dc4546 |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -16,3 +16,8 @@ target/
|
|||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
#.idea/
|
#.idea/
|
||||||
|
|
||||||
|
# ignorable files in migrations/
|
||||||
|
**/__pycache__
|
||||||
|
migrations/env/
|
||||||
|
migrations/test_db.sqlite
|
||||||
|
|||||||
7
Cargo.lock
generated
Normal file
7
Cargo.lock
generated
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# This file is automatically @generated by Cargo.
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
version = 4
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "outline-rs"
|
||||||
|
version = "0.1.0"
|
||||||
9
Cargo.toml
Normal file
9
Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
[package]
|
||||||
|
name = "outline-rs"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
|
||||||
16
README.md
16
README.md
@@ -1,2 +1,16 @@
|
|||||||
# outline-rs
|
# Outline-rs
|
||||||
|
|
||||||
|
lightweight relational database-backed personal knowledge management outliner
|
||||||
|
|
||||||
|
*Very* under construction!
|
||||||
|
|
||||||
|
## Motivation
|
||||||
|
|
||||||
|
Two PKM apps I've tried use Electron frontends, and I want a library that can
|
||||||
|
back different frontends depending on the environment. I have been
|
||||||
|
using a low-power ARM laptop recently and I can't be running a whole browser
|
||||||
|
engine just to take notes.
|
||||||
|
Also, containing all note data in a database will allow for extending with
|
||||||
|
sync servers as a first-class experience rather than an afterthought.
|
||||||
|
|
||||||
|
Using this as a project to learn rust as I go.
|
||||||
|
|||||||
149
migrations/alembic.ini
Normal file
149
migrations/alembic.ini
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts.
|
||||||
|
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||||
|
# format, relative to the token %(here)s which refers to the location of this
|
||||||
|
# ini file
|
||||||
|
script_location = %(here)s/alembic
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
# Or organize into date-based subdirectories (requires recursive_version_locations = true)
|
||||||
|
# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory. for multiple paths, the path separator
|
||||||
|
# is defined by "path_separator" below.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the tzdata library which can be installed by adding
|
||||||
|
# `alembic[tz]` to the pip requirements.
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to <script_location>/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "path_separator"
|
||||||
|
# below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||||
|
|
||||||
|
# path_separator; This indicates what character is used to split lists of file
|
||||||
|
# paths, including version_locations and prepend_sys_path within configparser
|
||||||
|
# files such as alembic.ini.
|
||||||
|
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||||
|
# to provide os-dependent path splitting.
|
||||||
|
#
|
||||||
|
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||||
|
# take place if path_separator is not present in alembic.ini. If this
|
||||||
|
# option is omitted entirely, fallback logic is as follows:
|
||||||
|
#
|
||||||
|
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||||
|
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||||
|
# behavior of splitting on spaces and/or commas.
|
||||||
|
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||||
|
# behavior of splitting on spaces, commas, or colons.
|
||||||
|
#
|
||||||
|
# Valid values for path_separator are:
|
||||||
|
#
|
||||||
|
# path_separator = :
|
||||||
|
# path_separator = ;
|
||||||
|
# path_separator = space
|
||||||
|
# path_separator = newline
|
||||||
|
#
|
||||||
|
# Use os.pathsep. Default configuration used for new projects.
|
||||||
|
path_separator = os
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
# database URL. This is consumed by the user-maintained env.py script only.
|
||||||
|
# other means of configuring database URLs may be customized within the env.py
|
||||||
|
# file.
|
||||||
|
sqlalchemy.url = sqlite+pysqlite:///test_db.sqlite
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = module
|
||||||
|
# ruff.module = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration. This is also consumed by the user-maintained
|
||||||
|
# env.py script only.
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARNING
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARNING
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
1
migrations/alembic/README
Normal file
1
migrations/alembic/README
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
||||||
78
migrations/alembic/env.py
Normal file
78
migrations/alembic/env.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = None
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
28
migrations/alembic/script.py.mako
Normal file
28
migrations/alembic/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
@@ -0,0 +1,48 @@
|
|||||||
|
"""initial outline-rs schema
|
||||||
|
|
||||||
|
Revision ID: 040713502ba4
|
||||||
|
Revises:
|
||||||
|
Create Date: 2026-03-30 10:47:36.255978
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '040713502ba4'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
|
||||||
|
# block table holds individual bullet points in the outline
|
||||||
|
op.create_table(
|
||||||
|
"blocks",
|
||||||
|
sa.Column("id", sa.INTEGER, primary_key=True),
|
||||||
|
sa.Column("first_child_id", sa.INTEGER, sa.schema.ForeignKey("blocks.id")),
|
||||||
|
sa.Column("next_sibling_id", sa.INTEGER, sa.schema.ForeignKey("blocks.id")),
|
||||||
|
sa.Column("page_id", sa.INTEGER, sa.schema.ForeignKey("pages.id"), index=True),
|
||||||
|
sa.Column("content", sa.types.UnicodeText)
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"pages",
|
||||||
|
sa.Column("id", sa.INTEGER, primary_key=True),
|
||||||
|
sa.Column("root_block_id", sa.INTEGER, sa.schema.ForeignKey("blocks.id")),
|
||||||
|
sa.Column("title", sa.types.UnicodeText)
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
op.drop_table("blocks")
|
||||||
|
op.drop_table("pages")
|
||||||
|
pass
|
||||||
2
migrations/requirements.txt
Normal file
2
migrations/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
alembic
|
||||||
|
|
||||||
111
rustseq/Cargo.lock
generated
111
rustseq/Cargo.lock
generated
@@ -1,111 +0,0 @@
|
|||||||
# This file is automatically @generated by Cargo.
|
|
||||||
# It is not intended for manual editing.
|
|
||||||
version = 4
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bitflags"
|
|
||||||
version = "2.9.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fallible-iterator"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fallible-streaming-iterator"
|
|
||||||
version = "0.1.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "foldhash"
|
|
||||||
version = "0.1.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hashbrown"
|
|
||||||
version = "0.15.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
|
||||||
dependencies = [
|
|
||||||
"foldhash",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hashlink"
|
|
||||||
version = "0.10.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
|
|
||||||
dependencies = [
|
|
||||||
"hashbrown",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "libsqlite3-sys"
|
|
||||||
version = "0.32.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fbb8270bb4060bd76c6e96f20c52d80620f1d82a3470885694e41e0f81ef6fe7"
|
|
||||||
dependencies = [
|
|
||||||
"pkg-config",
|
|
||||||
"vcpkg",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pkg-config"
|
|
||||||
version = "0.3.32"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rusqlite"
|
|
||||||
version = "0.34.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "37e34486da88d8e051c7c0e23c3f15fd806ea8546260aa2fec247e97242ec143"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags",
|
|
||||||
"fallible-iterator",
|
|
||||||
"fallible-streaming-iterator",
|
|
||||||
"hashlink",
|
|
||||||
"libsqlite3-sys",
|
|
||||||
"smallvec",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustseq"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"rusqlite",
|
|
||||||
"streaming-iterator",
|
|
||||||
"tree_iterators_rs",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "smallvec"
|
|
||||||
version = "1.15.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "streaming-iterator"
|
|
||||||
version = "0.1.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2b2231b7c3057d5e4ad0156fb3dc807d900806020c5ffa3ee6ff2c8c76fb8520"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tree_iterators_rs"
|
|
||||||
version = "3.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "93243e2901d558d73622302dfb013dc698e291a707a423435cd5feb8805b966f"
|
|
||||||
dependencies = [
|
|
||||||
"streaming-iterator",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "vcpkg"
|
|
||||||
version = "0.2.15"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "rustseq"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2024"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
rusqlite = "0.34.0"
|
|
||||||
streaming-iterator = "0.1.9"
|
|
||||||
tree_iterators_rs = "3.1.0"
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
use std::result;
|
|
||||||
|
|
||||||
use rusqlite;
|
|
||||||
|
|
||||||
pub struct DB {
|
|
||||||
connection: rusqlite::Connection
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DB {
|
|
||||||
pub fn connect(path: &str) -> Self {
|
|
||||||
let connection = rusqlite::Connection::open(path).expect("failed to open DB");
|
|
||||||
DB {
|
|
||||||
connection: connection
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_tables(&self) {
|
|
||||||
self.connection.execute_batch(
|
|
||||||
"CREATE TABLE blocks (
|
|
||||||
id INTEGER NOT NULL UNIQUE,
|
|
||||||
content TEXT,
|
|
||||||
parent INTEGER,
|
|
||||||
next_sibling INTEGER,
|
|
||||||
page INTEGER,
|
|
||||||
PRIMARY KEY(id AUTOINCREMENT),
|
|
||||||
FOREIGN KEY(next_sibling) REFERENCES blocks(id) ON DELETE SET NULL,
|
|
||||||
FOREIGN KEY(parent) REFERENCES blocks(id) ON DELETE SET NULL
|
|
||||||
FOREIGN KEY(page) REFERENCES pages(id) ON DELETE SET NULL);
|
|
||||||
|
|
||||||
CREATE TABLE pages (
|
|
||||||
id INTEGER NOT NULL UNIQUE,
|
|
||||||
title TEXT,
|
|
||||||
first_block INTEGER,
|
|
||||||
PRIMARY KEY(id AUTOINCREMENT));
|
|
||||||
").expect("failed to create table");
|
|
||||||
}
|
|
||||||
|
|
||||||
/// insert a new block into the database, and get back its ID
|
|
||||||
pub fn insert_block(&mut self, new_block: &mut BlockRow) -> Result<i64, rusqlite::Error> {
|
|
||||||
|
|
||||||
let transaction = self.connection.transaction()?;
|
|
||||||
|
|
||||||
transaction.execute("INSERT INTO blocks (content, parent, next_sibling, page) VALUES (?1, ?2, ?3, ?4);",
|
|
||||||
(&new_block.content, &new_block.parent_id, &new_block.sibling_id, &new_block.page_id))?;
|
|
||||||
|
|
||||||
new_block.id = Some(transaction.last_insert_rowid());
|
|
||||||
transaction.commit()?;
|
|
||||||
|
|
||||||
Ok(new_block.id.expect("no block ID"))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_block(&mut self, updated_block: &BlockRow) -> Result<(), rusqlite::Error> {
|
|
||||||
let transaction = self.connection.transaction()?;
|
|
||||||
|
|
||||||
transaction.execute("UPDATE blocks SET content=?1, parent=?2, next_sibling=?3, page=?4 WHERE id=?5",
|
|
||||||
(&updated_block.content, updated_block.parent_id, updated_block.sibling_id, updated_block.page_id, updated_block.id))?;
|
|
||||||
|
|
||||||
transaction.commit()?;
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
/// insert a new page into the database, and get back its ID
|
|
||||||
pub fn insert_page(&mut self, new_page: &mut PageRow) -> Result<i64, rusqlite::Error> {
|
|
||||||
let transaction = self.connection.transaction()?;
|
|
||||||
|
|
||||||
transaction.execute("INSERT INTO pages (title, first_block) VALUES (?1, ?2);",
|
|
||||||
(&new_page.title, &new_page.root_block_id))?;
|
|
||||||
|
|
||||||
new_page.id = Some(transaction.last_insert_rowid());
|
|
||||||
|
|
||||||
transaction.commit()?;
|
|
||||||
Ok(new_page.id.expect("no page ID"))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_page(&mut self, updated_page: &PageRow) -> Result<(), rusqlite::Error> {
|
|
||||||
let transaction = self.connection.transaction()?;
|
|
||||||
|
|
||||||
transaction.execute("UPDATE blocks SET title=?1, first_block=?2 WHERE id=?3",
|
|
||||||
(&updated_page.title, &updated_page.root_block_id))?;
|
|
||||||
|
|
||||||
transaction.commit()?;
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_page_blocks(&mut self, page: &PageRow) -> Result<Vec<BlockRow>, rusqlite::Error> {
|
|
||||||
let transaction = self.connection.transaction()?;
|
|
||||||
|
|
||||||
let mut statement = transaction.prepare("SELECT * FROM blocks WHERE page=?1")?;
|
|
||||||
|
|
||||||
let rows = statement.query_map((page.id.unwrap(),), |row| {
|
|
||||||
Ok(BlockRow{
|
|
||||||
id: row.get(0)?,
|
|
||||||
content: row.get(1)?,
|
|
||||||
parent_id: row.get(2)?,
|
|
||||||
sibling_id: row.get(3)?,
|
|
||||||
page_id: row.get(4)?
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut row_block_vector = Vec::new();
|
|
||||||
|
|
||||||
for row in rows {
|
|
||||||
row_block_vector.push(row.unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(row_block_vector);
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// a BlockRow is a direct translation of a block row from the
|
|
||||||
/// database. It will be used to create a tree of Block objects
|
|
||||||
/// in memory.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct BlockRow {
|
|
||||||
pub id: Option<i64>,
|
|
||||||
pub content: String,
|
|
||||||
|
|
||||||
pub parent_id: Option<i64>,
|
|
||||||
pub sibling_id: Option<i64>,
|
|
||||||
|
|
||||||
pub page_id: Option<i64>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BlockRow {
|
|
||||||
pub fn new(content: &str, parent_id: Option<i64>, sibling_id: Option<i64>, page_id: Option<i64>) -> Self {
|
|
||||||
BlockRow { id: None, content: String::from(content), parent_id: parent_id, sibling_id: sibling_id, page_id: page_id }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct PageRow {
|
|
||||||
pub id: Option<i64>,
|
|
||||||
pub title: String,
|
|
||||||
|
|
||||||
pub root_block_id: Option<i64>
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
mod page;
|
|
||||||
mod db;
|
|
||||||
|
|
||||||
use std::fs;
|
|
||||||
|
|
||||||
use db::{DB, PageRow, BlockRow};
|
|
||||||
use page::Page;
|
|
||||||
|
|
||||||
const TEST_DB_PATH: &str = "test_db.sqlite";
|
|
||||||
|
|
||||||
fn init_test_db() -> DB {
|
|
||||||
// delete the existing test db
|
|
||||||
fs::remove_file(TEST_DB_PATH).unwrap_or_default();
|
|
||||||
|
|
||||||
// connect to the DB which creates the file
|
|
||||||
let database = DB::connect(TEST_DB_PATH);
|
|
||||||
|
|
||||||
// run SQL to create the tables defined for rustseq
|
|
||||||
database.create_tables();
|
|
||||||
|
|
||||||
return database;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let mut database = init_test_db();
|
|
||||||
|
|
||||||
// put a page in the database
|
|
||||||
let mut page_row = PageRow{id: None, title: String::from("test page"), root_block_id: None};
|
|
||||||
database.insert_page(&mut page_row).unwrap();
|
|
||||||
|
|
||||||
// put some blocks in the database
|
|
||||||
let mut block1 = BlockRow::new("Hello World", None, None, page_row.id);
|
|
||||||
let mut block2 = BlockRow::new("This is a child block", None, None, page_row.id);
|
|
||||||
let mut block3 = BlockRow::new("this is a sibling", None, None, page_row.id);
|
|
||||||
let mut block4 = BlockRow::new("this is a sub-sibling", None, None, page_row.id);
|
|
||||||
|
|
||||||
|
|
||||||
database.insert_block(&mut block1).unwrap();
|
|
||||||
database.insert_block(&mut block2).unwrap();
|
|
||||||
database.insert_block(&mut block3).unwrap();
|
|
||||||
database.insert_block(&mut block4).unwrap();
|
|
||||||
|
|
||||||
block2.parent_id = block1.id;
|
|
||||||
block3.parent_id = block1.id;
|
|
||||||
block4.parent_id = block3.id;
|
|
||||||
|
|
||||||
// 1
|
|
||||||
// / \
|
|
||||||
// 2 3
|
|
||||||
// /
|
|
||||||
// 4
|
|
||||||
|
|
||||||
database.update_block(&block2).unwrap();
|
|
||||||
database.update_block(&block3).unwrap();
|
|
||||||
database.update_block(&block4).unwrap();
|
|
||||||
|
|
||||||
// get the page's blocks from the database
|
|
||||||
let page_blocks = database.get_page_blocks(&page_row).unwrap();
|
|
||||||
|
|
||||||
println!("{:#?}", page_blocks);
|
|
||||||
|
|
||||||
// put those blocks into the Page tree structure
|
|
||||||
let mut internal_page = Page::new(page_row, page_blocks);
|
|
||||||
internal_page.build_tree();
|
|
||||||
// internal_page.set_root_block(internal_page.get_block_data_ref()[0].id);
|
|
||||||
internal_page.print_tree();
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,244 +0,0 @@
|
|||||||
|
|
||||||
use std::{collections::HashMap};
|
|
||||||
|
|
||||||
|
|
||||||
use tree_iterators_rs::prelude::*;
|
|
||||||
use streaming_iterator::StreamingIteratorMut;
|
|
||||||
|
|
||||||
use crate::db::{BlockRow, PageRow};
|
|
||||||
|
|
||||||
/// A page is represented by a page object from the
|
|
||||||
/// database, and a tree of block objects with the data
|
|
||||||
/// for each node contained in the block_data `HashMap`
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Page {
|
|
||||||
page_data: PageRow,
|
|
||||||
block_data: HashMap<i64, BlockRow>,
|
|
||||||
block_tree: Tree<i64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Page {
|
|
||||||
pub fn new(page_row: PageRow, block_data: Vec<BlockRow>) -> Self {
|
|
||||||
|
|
||||||
let mut block_map = HashMap::new();
|
|
||||||
|
|
||||||
for block in block_data.into_iter() {
|
|
||||||
block_map.insert(block.id.unwrap(), block);
|
|
||||||
}
|
|
||||||
|
|
||||||
Page {
|
|
||||||
page_data: page_row,
|
|
||||||
block_data: block_map,
|
|
||||||
block_tree: Tree {
|
|
||||||
value: 0,
|
|
||||||
children: Vec::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// create a tree of `Tree<usize>` nodes representing the blocks of the page.
|
|
||||||
/// Each node contains the ID of a block and a vector of child `Tree<usize>`
|
|
||||||
/// nodes.
|
|
||||||
pub fn build_tree(&mut self) {
|
|
||||||
|
|
||||||
let leaf_block_ids = self.get_leaves();
|
|
||||||
|
|
||||||
// key is tree parent ID, value is the subtree itself
|
|
||||||
let mut leaves: Vec<Tree<i64>> = Vec::new();
|
|
||||||
|
|
||||||
// each leaf block becomes a Tree node owned by the leaves vector
|
|
||||||
for block_id in leaf_block_ids {
|
|
||||||
leaves.push(
|
|
||||||
Tree {
|
|
||||||
value: block_id,
|
|
||||||
children: Vec::new()
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// vector of complete subtrees
|
|
||||||
let mut subtrees: Vec<Tree<i64>> = Vec::new();
|
|
||||||
|
|
||||||
// store which subtree each block's node is in
|
|
||||||
// key: block id, value: subtree vector index
|
|
||||||
let mut known_nodes: HashMap<i64, usize> = HashMap::new();
|
|
||||||
|
|
||||||
// OK, now try for each leaf getting to the root before going to another leaf
|
|
||||||
// subtrees from `subtrees` will be joined into new subtrees here
|
|
||||||
// attach subtrees to their parents up until they reach the root
|
|
||||||
|
|
||||||
// Take the leaf from the vector
|
|
||||||
for (subtree_index, leaf) in leaves.into_iter().enumerate() {
|
|
||||||
// go from the leaf until reaching the root or until reaching an already visited node
|
|
||||||
let mut current_node_opt: Option<Tree<i64>> = Some(leaf);
|
|
||||||
|
|
||||||
while let Some(ref current_node) = current_node_opt {
|
|
||||||
// build the subtree up until reaching the root or a known node
|
|
||||||
|
|
||||||
// store the current node value in the known_nodes map
|
|
||||||
known_nodes.insert(current_node.value, subtree_index);
|
|
||||||
|
|
||||||
// get the block data of the current node
|
|
||||||
let current_node_block_data = self.block_data.get(¤t_node.value).expect("the current node must refer to a block in block_data");
|
|
||||||
|
|
||||||
// the parent ID is a value, and if not it's a child of the root node
|
|
||||||
let parent_id_opt = current_node_block_data.parent_id;
|
|
||||||
|
|
||||||
match parent_id_opt {
|
|
||||||
Some(parent_id) => {
|
|
||||||
// we are not yet at the root node
|
|
||||||
// is the parent node a known node?
|
|
||||||
if let Some(known_node_subtree_index) = known_nodes.get(&parent_id) {
|
|
||||||
// iterate the indicated subtree to find the node, append the current node
|
|
||||||
// to its children and break
|
|
||||||
let known_node_subtree = subtrees.get_mut(*known_node_subtree_index).expect("if it's a known node, the subtree must exist in subtrees");
|
|
||||||
let mut known_node_iter = known_node_subtree.dfs_preorder_iter_mut().attach_context();
|
|
||||||
|
|
||||||
while let Some(context) = known_node_iter.next_mut() {
|
|
||||||
|
|
||||||
// the context ancestors field includes the current node's value
|
|
||||||
let current_node_value = context.ancestors().last().unwrap();
|
|
||||||
if **current_node_value != parent_id { continue; }
|
|
||||||
|
|
||||||
let children = context.children_mut();
|
|
||||||
children.push(current_node.clone());
|
|
||||||
current_node_opt = None;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
// create the parent node, update current_node and continue
|
|
||||||
let parent_node = Tree {
|
|
||||||
value: parent_id,
|
|
||||||
children: vec![current_node.clone()]
|
|
||||||
};
|
|
||||||
|
|
||||||
current_node_opt = Some(parent_node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
// the current node is a child of the root node
|
|
||||||
// store this subtree in subtrees
|
|
||||||
subtrees.push(current_node.clone());
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//make all final subtrees the children of a single 0-value node
|
|
||||||
let mut root_node = Tree{value: 0, children: Vec::with_capacity(subtrees.len())};
|
|
||||||
for subtree in subtrees.into_iter() {
|
|
||||||
root_node.children.push(subtree);
|
|
||||||
}
|
|
||||||
let unsorted_tree = root_node;
|
|
||||||
// self.block_tree = self.sort_children(unsorted_tree);
|
|
||||||
self.block_tree = unsorted_tree;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// traverse the tree depth-first and print the value of each node along the way
|
|
||||||
pub fn print_tree(&self) {
|
|
||||||
let nodes: Vec<&i64> = self.block_tree.dfs_preorder_iter().collect();
|
|
||||||
println!("{nodes:?}");
|
|
||||||
}
|
|
||||||
|
|
||||||
/// find the leaf nodes for the tree.
|
|
||||||
/// these are the blocks which no other block calls parent
|
|
||||||
fn get_leaves(&self) -> Vec<i64> {
|
|
||||||
// hash map containing child block ID vectors for each parent ID
|
|
||||||
let mut blocks_by_parent: HashMap<i64, Vec<i64>> = HashMap::new();
|
|
||||||
|
|
||||||
for (block_id, block) in self.block_data.iter() {
|
|
||||||
let parent_id = block.parent_id.unwrap_or(0);
|
|
||||||
if let None = blocks_by_parent.get_mut(&parent_id) {
|
|
||||||
blocks_by_parent.insert(parent_id, Vec::new());
|
|
||||||
}
|
|
||||||
blocks_by_parent.get_mut(&parent_id).unwrap().push(*block_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut leaf_block_ids = Vec::new();
|
|
||||||
|
|
||||||
for (block_id, _block) in self.block_data.iter() {
|
|
||||||
// check that no block has this block as a parent
|
|
||||||
if let None = blocks_by_parent.get(block_id) {
|
|
||||||
leaf_block_ids.push(*block_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return leaf_block_ids;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// iterate through the built tree and sort child nodes by their
|
|
||||||
/// next_sibling fields
|
|
||||||
/// return a properly sorted tree
|
|
||||||
fn sort_children(&self, mut built_tree: Tree<i64>) -> Tree<i64> {
|
|
||||||
let mut tree_iter = built_tree.dfs_preorder_iter_mut().attach_context();
|
|
||||||
|
|
||||||
// for each node in the tree, sort its children
|
|
||||||
while let Some(node) = tree_iter.next_mut() {
|
|
||||||
let children = node.children_mut();
|
|
||||||
let mut sorted_children = Vec::new();
|
|
||||||
// find the child with no next sibling
|
|
||||||
let last_sibling = children.iter().find(|node| {
|
|
||||||
self.block_data.get(&node.value).unwrap().sibling_id.is_none()
|
|
||||||
});
|
|
||||||
sorted_children.push(last_sibling);
|
|
||||||
}
|
|
||||||
|
|
||||||
Tree{value: 0, children:Vec::new()}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
|
|
||||||
fn setup_tree() -> Tree<i64> {
|
|
||||||
Tree{value: 1, children: vec![
|
|
||||||
Tree{value: 2, children: vec![]},
|
|
||||||
Tree{value: 3, children: vec![
|
|
||||||
Tree{value: 4, children: vec![]}
|
|
||||||
]}
|
|
||||||
]}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setup_blocks() -> Vec<BlockRow> {
|
|
||||||
vec![
|
|
||||||
BlockRow{id: Some(1), content: String::from("I'm block 1"), parent_id: None, sibling_id: None, page_id: None},
|
|
||||||
BlockRow{id: Some(2), content: String::from("I'm block 2"), parent_id: Some(1), sibling_id: Some(3), page_id: None},
|
|
||||||
BlockRow{id: Some(3), content: String::from("I'm block 3"), parent_id: Some(1), sibling_id: None, page_id: None},
|
|
||||||
BlockRow{id: Some(4), content: String::from("I'm block 4"), parent_id: Some(3), sibling_id: None, page_id: None},
|
|
||||||
BlockRow{id: Some(5), content: String::from("I'm block 5"), parent_id: None, sibling_id: Some(1), page_id: None}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setup_page() -> Page {
|
|
||||||
Page::new(PageRow{id:None, title:String::from(""), root_block_id: None}, setup_blocks())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn iterate_tree(page: &Page) -> Vec<i64> {
|
|
||||||
let tree = page.block_tree.clone();
|
|
||||||
tree.dfs_preorder().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn all_nodes_in_tree() {
|
|
||||||
let mut page = setup_page();
|
|
||||||
page.build_tree();
|
|
||||||
|
|
||||||
// check the length of the iterated tree is equal to the number of blocks plus one for the root node
|
|
||||||
let iterated_tree = iterate_tree(&page);
|
|
||||||
assert_eq!(page.block_data.len() + 1, iterated_tree.len())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tree_siblings_sorted() {
|
|
||||||
let mut page = setup_page();
|
|
||||||
page.build_tree();
|
|
||||||
|
|
||||||
let iterated_tree = iterate_tree(&page);
|
|
||||||
assert_eq!(vec![0, 5, 1, 2, 3, 4], iterated_tree);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
163
src/blocktree.rs
Normal file
163
src/blocktree.rs
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct NoteBlock {
|
||||||
|
// IDs are int64 as this is the datatype of rowids in sqlite
|
||||||
|
id: i128,
|
||||||
|
pub first_child_id: Option<i128>,
|
||||||
|
pub next_sibling_id: Option<i128>,
|
||||||
|
|
||||||
|
content: String
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NoteBlock {
|
||||||
|
pub fn new(id: i128, first_child_id: Option<i128>, next_sibling_id: Option<i128>, content: String) -> Self {
|
||||||
|
Self {id, first_child_id, next_sibling_id, content}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BlockTreeNode {
|
||||||
|
// a tree of nodes, where each node refers to its block by id.
|
||||||
|
// the id is used as the key in a page's block table.
|
||||||
|
block_id: i128,
|
||||||
|
block_level: Option<usize>,
|
||||||
|
first_child_node: Option<Box<BlockTreeNode>>,
|
||||||
|
next_sibling_node: Option<Box<BlockTreeNode>>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BlockTreeNode {
|
||||||
|
fn new(block_id: i128) -> Self {
|
||||||
|
BlockTreeNode {
|
||||||
|
block_id,
|
||||||
|
block_level: None,
|
||||||
|
first_child_node: None,
|
||||||
|
next_sibling_node: None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct BlockTreeIterator<'a> {
|
||||||
|
node_stack: Vec<&'a BlockTreeNode>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> BlockTreeIterator<'a> {
|
||||||
|
pub fn new(root_node: &'a BlockTreeNode) -> Self {
|
||||||
|
Self {
|
||||||
|
node_stack: vec![root_node]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for BlockTreeIterator<'a> {
|
||||||
|
type Item = i128;
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
|
||||||
|
// stack is empty, we are done otherwise continue with current_node
|
||||||
|
let Some(current_node) = self.node_stack.pop() else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
// if the current node has a next sibling, stack it.
|
||||||
|
if let Some(next_sibling_node) = ¤t_node.next_sibling_node {
|
||||||
|
self.node_stack.push(next_sibling_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the current node has a first child, stack it.
|
||||||
|
if let Some(first_child_node) = ¤t_node.first_child_node {
|
||||||
|
self.node_stack.push(first_child_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
// yield a clone of the block pointed to by current_node
|
||||||
|
Some(current_node.block_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct NotePage {
|
||||||
|
title: String,
|
||||||
|
id: i128,
|
||||||
|
pub block_tree_root: BlockTreeNode,
|
||||||
|
|
||||||
|
block_table: HashMap<i128, NoteBlock>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NotePage {
|
||||||
|
pub fn new(title: String, id: i128, root_block: NoteBlock) -> Self {
|
||||||
|
let mut new_page = Self {
|
||||||
|
title,
|
||||||
|
id,
|
||||||
|
block_tree_root: BlockTreeNode::new(root_block.id),
|
||||||
|
block_table: HashMap::new()
|
||||||
|
};
|
||||||
|
new_page.block_tree_root.block_level = Some(0);
|
||||||
|
new_page.block_table.insert(root_block.id, root_block);
|
||||||
|
new_page
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(self: &mut Self, block: NoteBlock) {
|
||||||
|
self.block_table.insert(block.id, block);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build_tree(self: &mut Self) {
|
||||||
|
let mut node_stack = Vec::new();
|
||||||
|
node_stack.push(&mut self.block_tree_root);
|
||||||
|
while !node_stack.is_empty() {
|
||||||
|
println!("{:?}", node_stack);
|
||||||
|
let current_node = node_stack.pop().unwrap();
|
||||||
|
let current_block = self.block_table.get(¤t_node.block_id).unwrap();
|
||||||
|
if let Some(next_sibling_id) = current_block.next_sibling_id {
|
||||||
|
let mut sibling_node = BlockTreeNode::new(next_sibling_id);
|
||||||
|
sibling_node.block_level = current_node.block_level;
|
||||||
|
current_node.next_sibling_node = Some(Box::new(sibling_node));
|
||||||
|
node_stack.push(current_node.next_sibling_node.as_mut().unwrap());
|
||||||
|
}
|
||||||
|
if let Some(first_child_id) = current_block.first_child_id {
|
||||||
|
let mut child_node = BlockTreeNode::new(first_child_id);
|
||||||
|
child_node.block_level = Some(current_node.block_level.unwrap() + 1);
|
||||||
|
current_node.first_child_node = Some(Box::new(child_node));
|
||||||
|
node_stack.push(current_node.first_child_node.as_mut().unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct NotePageIterator<'a> {
|
||||||
|
page: &'a NotePage,
|
||||||
|
node_stack: Vec<&'a BlockTreeNode>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> NotePageIterator<'a> {
|
||||||
|
fn new(page: &'a NotePage) -> Self {
|
||||||
|
Self {
|
||||||
|
page: page,
|
||||||
|
node_stack: vec![&page.block_tree_root]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for NotePageIterator<'a> {
|
||||||
|
type Item = NoteBlock;
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
|
||||||
|
// stack is empty, we are done otherwise continue with current_node
|
||||||
|
let Some(current_node) = self.node_stack.pop() else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
// if the current node has a next sibling, stack it.
|
||||||
|
if let Some(next_sibling_node) = ¤t_node.next_sibling_node {
|
||||||
|
self.node_stack.push(next_sibling_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the current node has a first child, stack it.
|
||||||
|
if let Some(first_child_node) = ¤t_node.first_child_node {
|
||||||
|
self.node_stack.push(first_child_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
// yield a clone of the block pointed to by current_node
|
||||||
|
Some(self.page.block_table.get(¤t_node.block_id).unwrap().clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
1
src/lib.rs
Normal file
1
src/lib.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod blocktree;
|
||||||
44
src/main.rs
Normal file
44
src/main.rs
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use outline_rs::blocktree::{NoteBlock, NotePage, BlockTreeIterator};
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let root_block = NoteBlock::new(0,Some(1),None,String::from("hello"));
|
||||||
|
|
||||||
|
let mut block_map = HashMap::new();
|
||||||
|
for idx in 1..=5 {
|
||||||
|
let new_block = NoteBlock::new(idx,None, None, String::from("I'm block {idx}"));
|
||||||
|
block_map.insert(idx,new_block);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut page = NotePage::new(String::from("page 1"), 0, root_block);
|
||||||
|
|
||||||
|
// take each new block and give it some children/siblings
|
||||||
|
let mut block1 = block_map.remove(&1).unwrap();
|
||||||
|
block1.next_sibling_id = Some(2);
|
||||||
|
page.insert(block1);
|
||||||
|
|
||||||
|
let mut block2 = block_map.remove(&2).unwrap();
|
||||||
|
block2.first_child_id = Some(3);
|
||||||
|
block2.next_sibling_id = Some(4);
|
||||||
|
page.insert(block2);
|
||||||
|
|
||||||
|
let mut block3 = block_map.remove(&3).unwrap();
|
||||||
|
page.insert(block3);
|
||||||
|
|
||||||
|
let mut block4 = block_map.remove(&4).unwrap();
|
||||||
|
block4.first_child_id = Some(5);
|
||||||
|
page.insert(block4);
|
||||||
|
|
||||||
|
let mut block5 = block_map.remove(&5).unwrap();
|
||||||
|
page.insert(block5);
|
||||||
|
|
||||||
|
page.build_tree();
|
||||||
|
|
||||||
|
println!("{:?}", page);
|
||||||
|
// println!("{:?}", page.block_tree_root.first_child_node)
|
||||||
|
let iter = BlockTreeIterator::new(&page.block_tree_root);
|
||||||
|
for id in iter {
|
||||||
|
println!("{}", id);
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user