Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

34 changes: 33 additions & 1 deletion crates/notedeck_columns/src/decks.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use std::collections::{hash_map::ValuesMut, HashMap};

use enostr::Pubkey;
use nostrdb::Transaction;
use nostrdb::{Note, NoteBuilder, Transaction};
use notedeck::AppContext;
use tokenator::{NoteTokenWriter, TokenBuffer, TokenWriter};
use tracing::{error, info};

use crate::{
Expand Down Expand Up @@ -301,6 +302,37 @@ impl Deck {
self.name = changes.name;
self.icon = changes.icon;
}

pub fn to_note(&self, seckey: &[u8; 32]) -> Note {
let mut builder = NoteBuilder::new();
builder = builder
.start_tag()
.tag_str("name")
.tag_str(&self.name)
.start_tag()
.tag_str("icon")
.tag_str(self.icon.to_string().as_str());

let mut column_writers = Vec::new();
for column in self.columns.columns() {
if let Some(route) = column.router().first().filter(|r| r.exportable_to_note()) {
let mut writer = TokenWriter::new(TokenBuffer::ToNote(NoteTokenWriter::default()));
route.serialize_tokens(&mut writer);
column_writers.push(writer);
}
}

for writer in &column_writers {
if let TokenBuffer::ToNote(buf) = &writer.buf {
builder = buf.to_builder(builder);
}
}

builder
.sign(seckey)
.build()
.expect("failed to build Deck note")
}
}

pub fn demo_decks(
Expand Down
35 changes: 35 additions & 0 deletions crates/notedeck_columns/src/route.rs
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,37 @@ impl Route {
Route::EditProfile(_) => ColumnTitle::simple("Edit Profile"),
}
}

pub fn exportable_to_note(&self) -> bool {
match &self {
Route::Timeline(timeline_kind) => match timeline_kind {
TimelineKind::List(list_kind) => match list_kind {
ListKind::Contact(_) => true,
},
TimelineKind::Algo(algo) => match algo {
AlgoTimeline::LastPerPubkey(list_kind) => match list_kind {
ListKind::Contact(_) => true,
},
},
TimelineKind::Notifications(_) => true,
TimelineKind::Profile(_) => true,
TimelineKind::Thread(_) => false,
TimelineKind::Universe => true,
TimelineKind::Generic(_) => false,
TimelineKind::Hashtag(_) => true,
},
Route::Accounts(_) => false,
Route::Reply(_) => false,
Route::Quote(_) => false,
Route::Relays => false,
Route::ComposeNote => false,
Route::AddColumn(_) => false,
Route::EditProfile(_) => false,
Route::Support => false,
Route::NewDeck => false,
Route::EditDeck(_) => false,
}
}
}

// TODO: add this to egui-nav so we don't have to deal with returning
Expand Down Expand Up @@ -309,6 +340,10 @@ impl<R: Clone> Router<R> {
pub fn routes(&self) -> &Vec<R> {
&self.routes
}

pub fn first(&self) -> Option<&R> {
self.routes.first()
}
}

impl fmt::Display for Route {
Expand Down
1 change: 1 addition & 0 deletions crates/tokenator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ description = "A simple library for parsing a serializing string tokens"

[dependencies]
hex = { workspace = true }
nostrdb = { workspace = true }
94 changes: 86 additions & 8 deletions crates/tokenator/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,21 @@ pub enum ParseError<'a> {
}

pub struct TokenWriter {
delim: &'static str,
tokens_written: usize,
buf: Vec<u8>,
pub buf: TokenBuffer,
}

impl Default for TokenWriter {
fn default() -> Self {
Self::new(":")
}
pub enum TokenBuffer {
ToString(StrTokenWriter),
ToNote(NoteTokenWriter),
}

impl TokenWriter {
pub struct StrTokenWriter {
pub delim: &'static str,
pub tokens_written: usize,
pub buf: Vec<u8>,
}

impl StrTokenWriter {
pub fn new(delim: &'static str) -> Self {
let buf = vec![];
let tokens_written = 0;
Expand Down Expand Up @@ -66,6 +69,81 @@ impl TokenWriter {
}
}

const DELIM: &str = ":";

pub struct NoteTokenWriter {
pub delim: &'static str,
pub buf: Vec<String>,
}

impl Default for NoteTokenWriter {
fn default() -> Self {
Self {
delim: DELIM,
buf: Default::default(),
}
}
}

impl Default for StrTokenWriter {
fn default() -> Self {
Self::new(DELIM)
}
}

impl Default for TokenWriter {
fn default() -> Self {
Self::new(TokenBuffer::ToString(StrTokenWriter::default()))
}
}

impl TokenWriter {
pub fn new(buf: TokenBuffer) -> Self {
Self { buf }
}

pub fn write_token(&mut self, token: &str) {
match &mut self.buf {
TokenBuffer::ToString(string_token_writer) => string_token_writer.write_token(token),
TokenBuffer::ToNote(note_token_writer) => note_token_writer.write_token(token),
}
}

pub fn str(&self) -> String {
match &self.buf {
TokenBuffer::ToString(string_token_writer) => string_token_writer.str().to_owned(),
TokenBuffer::ToNote(note_token_writer) => note_token_writer.to_string(),
}
}
}

impl NoteTokenWriter {
pub fn write_token(&mut self, token: &str) {
self.buf.push(token.to_owned());
}

pub fn to_builder<'a>(
&self,
mut builder: nostrdb::NoteBuilder<'a>,
) -> nostrdb::NoteBuilder<'a> {
if !self.buf.is_empty() {
builder = builder.start_tag().tag_str("col");

for token in &self.buf {
builder = builder.tag_str(token);
}
}

builder
}
}

impl std::fmt::Display for NoteTokenWriter {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.buf.join(self.delim))
}
}

#[derive(Clone)]
pub struct TokenParser<'a> {
tokens: &'a [&'a str],
Expand Down
Loading