Skip to content

Commit 44bb595

Browse files
committed
Improve error handling
1 parent be5e8bf commit 44bb595

File tree

1 file changed

+70
-42
lines changed

1 file changed

+70
-42
lines changed

main.ts

+70-42
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import {
1515
type Vault,
1616
parseLinktext,
1717
resolveSubpath,
18+
type HeadingCache,
1819
} from "obsidian";
1920

2021
import OpenAI from "openai";
@@ -118,8 +119,9 @@ function initMessages(
118119
* @param markdownFile
119120
* @param vault Used to read main file and embedded files
120121
* @param metadataCache Access file caches (for parsing) and link resolution
121-
* @param debug
122-
* @returns
122+
* @param debug If True, will print debug output to console
123+
* @returns List of content parts, ready for concatenation into OpenAI-style request
124+
* @raises Error if conversion could not take place. This should not happen in normal operation.
123125
*/
124126
async function convertRangeToContentParts(
125127
startOffset: number | null,
@@ -131,10 +133,10 @@ async function convertRangeToContentParts(
131133
): Promise<OpenAI.Chat.Completions.ChatCompletionContentPart[]> {
132134
const cache = metadataCache.getFileCache(markdownFile);
133135
if (!cache) {
134-
console.error(
135-
`convertRangeToContentParts() could not find cache for ${markdownFile.path}`
136-
);
137-
return [];
136+
const errMsg = `convertRangeToContentParts() could not find cache for ${markdownFile.path}`;
137+
console.error(errMsg);
138+
// if we can't find the cache, there is something seriously wrong, so we interrupt processing completely
139+
throw new Error(errMsg);
138140
}
139141
const embeds = cache?.embeds || [];
140142

@@ -290,6 +292,12 @@ async function convertRangeToContentParts(
290292
return contentParts;
291293
}
292294

295+
interface IThreadMessages {
296+
messages: OpenAI.ChatCompletionMessageParam[];
297+
heading: HeadingCache;
298+
rangeEnd: EditorPosition;
299+
}
300+
293301
// find current cursor position, determine its heading path, then convert that path into messages
294302
// app needed for: metadataCache, vault
295303
// editor needed for: getCursor, getLine, lastLine, getRange, etc.
@@ -299,9 +307,11 @@ async function convertCurrentThreadToMessages(
299307
app: App,
300308
editor: Editor,
301309
debug = false
302-
) {
310+
): Promise<IThreadMessages> {
303311
const cache = app.metadataCache.getFileCache(markdownFile);
304-
if (!cache) return null;
312+
if (!cache)
313+
throw new Error(`Could not find cache for ${markdownFile.path}`);
314+
305315
const headings = cache.headings || [];
306316

307317
// find heading containing the cursor, and then the path of containing headings up the tree
@@ -330,7 +340,8 @@ async function convertCurrentThreadToMessages(
330340
}
331341
}
332342

333-
if (!currentHeading) return null;
343+
if (!currentHeading)
344+
throw new Error(`No headings to work with in ${markdownFile.path}`);
334345

335346
const messages = initMessages(systemMessage);
336347

@@ -376,10 +387,10 @@ async function convertCurrentThreadToMessages(
376387
} else {
377388
// this is a user message, so we do multi-part / ContentPart[]
378389

379-
const embeds = cache.embeds || [];
380390
const startOffset = heading.position.end.offset + 1;
381391
const endOffset = rangeEndOffset;
382392

393+
// raised exceptions will propagate to convertThreadToMessages()'s caller and be shown as a notice
383394
const contentParts = await convertRangeToContentParts(
384395
startOffset,
385396
endOffset,
@@ -388,7 +399,6 @@ async function convertCurrentThreadToMessages(
388399
app.metadataCache,
389400
debug
390401
);
391-
392402
messages.push({
393403
role: role,
394404
content: contentParts,
@@ -397,8 +407,9 @@ async function convertCurrentThreadToMessages(
397407
}
398408

399409
if (!heading) {
400-
console.error("Unexpected that we have no last heading here.");
401-
return null;
410+
const errMsg = "Really unexpected that we have no last heading here.";
411+
console.error(errMsg);
412+
throw new Error(errMsg);
402413
}
403414

404415
return { messages, heading, rangeEnd };
@@ -447,15 +458,17 @@ export default class AIChatAsMDPlugin extends Plugin {
447458
return;
448459
}
449460

450-
const mhe = await convertCurrentThreadToMessages(
451-
markdownFile,
452-
systemPrompt,
453-
this.app,
454-
editor,
455-
this.settings.debug
456-
);
457-
if (!mhe) {
458-
new Notice("No headings found");
461+
let mhe: IThreadMessages;
462+
try {
463+
mhe = await convertCurrentThreadToMessages(
464+
markdownFile,
465+
systemPrompt,
466+
this.app,
467+
editor,
468+
this.settings.debug
469+
);
470+
} catch (e) {
471+
new Notice(`Error converting thread to messages: ${e}`);
459472
return;
460473
}
461474

@@ -633,19 +646,27 @@ export default class AIChatAsMDPlugin extends Plugin {
633646
this.app.vault.getFileByPath(systemPromptFilename);
634647
if (!systemPromptFile) {
635648
new Notice(
636-
`AI Chat as MD could not read system prompt file "${systemPromptFilename}". Please fix its path in the plugin settings or in this file's frontmatter.`
649+
`AI Chat as MD could not read system prompt file "${systemPromptFilename}". Please check its path in the plugin settings or in this file's frontmatter.`
637650
);
638651
return null;
639652
}
640653

641-
const sysContentParts = await convertRangeToContentParts(
642-
null,
643-
null,
644-
systemPromptFile,
645-
this.app.vault,
646-
this.app.metadataCache,
647-
this.settings.debug
648-
);
654+
let sysContentParts: OpenAI.Chat.Completions.ChatCompletionContentPart[];
655+
try {
656+
sysContentParts = await convertRangeToContentParts(
657+
null,
658+
null,
659+
systemPromptFile,
660+
this.app.vault,
661+
this.app.metadataCache,
662+
this.settings.debug
663+
);
664+
} catch (e) {
665+
new Notice(
666+
`Error parsing system prompt file "${systemPromptFilename}": ${e}`
667+
);
668+
return null;
669+
}
649670

650671
// concatenate all of the "text" members
651672
// effectively throwing out type == "image"
@@ -694,17 +715,24 @@ export default class AIChatAsMDPlugin extends Plugin {
694715
return;
695716
}
696717
const messages = initMessages(systemPrompt);
697-
messages.push({
698-
role: "user",
699-
content: await convertRangeToContentParts(
700-
selStartOffset,
701-
selEndOffset,
702-
markdownFile,
703-
this.app.vault,
704-
this.app.metadataCache,
705-
this.settings.debug
706-
),
707-
});
718+
try {
719+
messages.push({
720+
role: "user",
721+
content: await convertRangeToContentParts(
722+
selStartOffset,
723+
selEndOffset,
724+
markdownFile,
725+
this.app.vault,
726+
this.app.metadataCache,
727+
this.settings.debug
728+
),
729+
});
730+
} catch (e) {
731+
new Notice(
732+
`Error converting selection to OpenAI-style messages: ${e}`
733+
);
734+
return;
735+
}
708736

709737
if (this.settings.debug) {
710738
console.log("About to send to AI:", messages);

0 commit comments

Comments
 (0)