Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion apps/obsidian/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
"@types/node": "^20",
"@types/react": "catalog:obsidian",
"@types/react-dom": "catalog:obsidian",
"@types/mime-types": "3.0.1",
"autoprefixer": "^10.4.21",
"builtin-modules": "3.3.0",
"dotenv": "^16.4.5",
Expand All @@ -41,10 +42,11 @@
"@repo/utils": "workspace:*",
"@supabase/supabase-js": "catalog:",
"date-fns": "^4.1.0",
"mime-types": "^3.0.1",
"nanoid": "^4.0.2",
"react": "catalog:obsidian",
"react-dom": "catalog:obsidian",
"tailwindcss-animate": "^1.0.7",
"tldraw": "3.14.2"
}
}
}
97 changes: 83 additions & 14 deletions apps/obsidian/src/utils/publishNode.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import type { FrontMatterCache, TFile } from "obsidian";
import type { default as DiscourseGraphPlugin } from "~/index";
import { getLoggedInClient, getSupabaseContext } from "./supabaseContext";
import { addFile } from "@repo/database/lib/files";
import mime from "mime-types";

export const publishNode = async ({
plugin,
Expand All @@ -26,21 +28,88 @@ export const publishNode = async ({
if (!myGroup) throw new Error("Cannot get group");
const existingPublish =
(frontmatter.publishedToGroups as undefined | string[]) || [];
if (existingPublish.includes(myGroup)) return; // already published
const publishResponse = await client.from("ResourceAccess").insert({
/* eslint-disable @typescript-eslint/naming-convention */
account_uid: myGroup,
source_local_id: nodeId,
space_id: spaceId,
/* eslint-enable @typescript-eslint/naming-convention */
});
const idResponse = await client
.from("Content")
.select("last_modified")
.eq("source_local_id", nodeId)
.eq("space_id", spaceId)
.eq("variant", "full")
.maybeSingle();
if (idResponse.error || !idResponse.data) {
throw idResponse.error || new Error("no data while fetching node");
}
const lastModifiedDb = new Date(
idResponse.data.last_modified + "Z",
).getTime();
const embeds = plugin.app.metadataCache.getFileCache(file)?.embeds ?? [];
const attachments = embeds
.map(({ link }) => {
const attachment = plugin.app.metadataCache.getFirstLinkpathDest(
link,
file.path,
);
if (attachment === null) {
console.warn("Could not find file for " + link);
}
return attachment;
})
.filter((a) => !!a);
const lastModified = Math.max(
file.stat.mtime,
...attachments.map((a) => a.stat.mtime),
);

if (existingPublish.includes(myGroup) && lastModified <= lastModifiedDb)
return; // already published
const publishResponse = await client.from("ResourceAccess").upsert(
{
/* eslint-disable @typescript-eslint/naming-convention */
account_uid: myGroup,
source_local_id: nodeId,
space_id: spaceId,
/* eslint-enable @typescript-eslint/naming-convention */
},
{ ignoreDuplicates: true },
);
if (publishResponse.error && publishResponse.error.code !== "23505")
// 23505 is duplicate key, which counts as a success.
throw publishResponse.error;
await plugin.app.fileManager.processFrontMatter(
file,
(fm: Record<string, unknown>) => {
fm.publishedToGroups = [...existingPublish, myGroup];
},
);

const existingFiles: string[] = [];
for (const attachment of attachments) {
const mimetype = mime.lookup(attachment.path) || "application/octet-stream";
if (mimetype.startsWith("text/")) continue;
existingFiles.push(attachment.path);
const content = await plugin.app.vault.readBinary(attachment);
await addFile({
client,
spaceId,
sourceLocalId: nodeId,
fname: attachment.path,
mimetype,
created: new Date(attachment.stat.ctime),
lastModified: new Date(attachment.stat.mtime),
content,
});
}
let cleanupCommand = client
.from("FileReference")
.delete()
.eq("space_id", spaceId)
.eq("source_local_id", nodeId);
if (existingFiles.length)
cleanupCommand = cleanupCommand.notIn("filepath", [
...new Set(existingFiles),
]);
const cleanupResult = await cleanupCommand;
// do not fail on cleanup
if (cleanupResult.error) console.error(cleanupResult.error);

if (!existingPublish.includes(myGroup))
await plugin.app.fileManager.processFrontMatter(
file,
(fm: Record<string, unknown>) => {
fm.publishedToGroups = [...existingPublish, myGroup];
},
);
};
1 change: 1 addition & 0 deletions packages/database/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
"@repo/utils": "workspace:*",
"@supabase/auth-js": "catalog:",
"@supabase/functions-js": "catalog:",
"@supabase/storage-js": "catalog:",
"@supabase/supabase-js": "catalog:",
"tslib": "2.5.1"
},
Expand Down
94 changes: 94 additions & 0 deletions packages/database/src/dbTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -511,6 +511,70 @@ export type Database = {
},
]
}
file_gc: {
Row: {
filehash: string
}
Insert: {
filehash: string
}
Update: {
filehash?: string
}
Relationships: []
}
FileReference: {
Row: {
created: string
filehash: string
filepath: string
last_modified: string
source_local_id: string
space_id: number
variant: Database["public"]["Enums"]["ContentVariant"] | null
}
Insert: {
created: string
filehash: string
filepath: string
last_modified: string
source_local_id: string
space_id: number
variant?: Database["public"]["Enums"]["ContentVariant"] | null
}
Update: {
created?: string
filehash?: string
filepath?: string
last_modified?: string
source_local_id?: string
space_id?: number
variant?: Database["public"]["Enums"]["ContentVariant"] | null
}
Relationships: [
{
foreignKeyName: "FileReference_content_fkey"
columns: ["space_id", "source_local_id", "variant"]
isOneToOne: false
referencedRelation: "Content"
referencedColumns: ["space_id", "source_local_id", "variant"]
},
{
foreignKeyName: "FileReference_content_fkey"
columns: ["space_id", "source_local_id", "variant"]
isOneToOne: false
referencedRelation: "my_contents"
referencedColumns: ["space_id", "source_local_id", "variant"]
},
{
foreignKeyName: "FileReference_content_fkey"
columns: ["space_id", "source_local_id", "variant"]
isOneToOne: false
referencedRelation: "my_contents_with_embedding_openai_text_embedding_3_small_1536"
referencedColumns: ["space_id", "source_local_id", "variant"]
},
]
}
group_membership: {
Row: {
admin: boolean | null
Expand Down Expand Up @@ -1153,6 +1217,33 @@ export type Database = {
},
]
}
my_file_references: {
Row: {
created: string | null
filehash: string | null
filepath: string | null
last_modified: string | null
source_local_id: string | null
space_id: number | null
}
Insert: {
created?: string | null
filehash?: string | null
filepath?: string | null
last_modified?: string | null
source_local_id?: string | null
space_id?: number | null
}
Update: {
created?: string | null
filehash?: string | null
filepath?: string | null
last_modified?: string | null
source_local_id?: string | null
space_id?: number | null
}
Relationships: []
}
my_spaces: {
Row: {
id: number | null
Expand Down Expand Up @@ -1434,6 +1525,8 @@ export type Database = {
Returns: undefined
}
extract_references: { Args: { refs: Json }; Returns: number[] }
file_access: { Args: { hashvalue: string }; Returns: boolean }
file_exists: { Args: { hashvalue: string }; Returns: boolean }
generic_entity_access: {
Args: {
target_id: number
Expand Down Expand Up @@ -1890,3 +1983,4 @@ export const Constants = {
},
},
} as const

56 changes: 56 additions & 0 deletions packages/database/src/lib/files.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import type { DGSupabaseClient } from "./client";

const ASSETS_BUCKET_NAME = "assets";

export const addFile = async ({
client, spaceId, sourceLocalId, fname, mimetype, created, lastModified, content
}:{
client: DGSupabaseClient,
spaceId: number,
sourceLocalId: string,
fname: string,
mimetype: string,
created: Date,
lastModified: Date,
content: ArrayBuffer
}): Promise<void> => {
// This assumes the content fits in memory.
const uint8Array = new Uint8Array(content);
const hashBuffer = await crypto.subtle.digest('SHA-256', uint8Array);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashvalue = hashArray.map((h) => h.toString(16).padStart(2, '0')).join('');
const lookForDup = await client.rpc("file_exists",{hashvalue})
if (lookForDup.error) throw lookForDup.error;
const exists = lookForDup.data;
if (!exists) {
// we should use upsert here for sync issues, but we get obscure rls errors.
const uploadResult = await client.storage.from(ASSETS_BUCKET_NAME).upload(hashvalue, content, {contentType: mimetype});
// eslint-disable-next-line @typescript-eslint/no-explicit-any
if (uploadResult.error && String((uploadResult.error as Record<string, any>).statusCode) !== "409")
throw uploadResult.error;
}
// not doing an upsert because it does not update on conflict
const frefResult = await client.from("FileReference").insert({
/* eslint-disable @typescript-eslint/naming-convention */
space_id: spaceId,
source_local_id: sourceLocalId,
last_modified: lastModified.toISOString(),
/* eslint-enable @typescript-eslint/naming-convention */
filepath: fname,
filehash: hashvalue,
created: created.toISOString()
});

if (frefResult.error) {
if (frefResult.error.code === "23505") {
const updateResult = await client.from("FileReference").update({
// eslint-disable-next-line @typescript-eslint/naming-convention
last_modified: lastModified.toISOString(),
filehash: hashvalue,
created: created.toISOString()
}).eq("source_local_id", sourceLocalId).eq("space_id", spaceId).eq("filepath", fname);
if (updateResult.error) throw updateResult.error;
} else
throw frefResult.error;
}
}
1 change: 1 addition & 0 deletions packages/database/supabase/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ schema_paths = [
'./schemas/account.sql',
'./schemas/content.sql',
'./schemas/embedding.sql',
'./schemas/assets.sql',
'./schemas/concept.sql',
'./schemas/contributor.sql',
'./schemas/sync.sql',
Expand Down
Loading