Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove results #5972

Merged
merged 1 commit into from
Jan 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 24 additions & 36 deletions apps/desktop/src/components/CommitMessageInput.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import ContextMenuSection from '$components/ContextMenuSection.svelte';
import DropDownButton from '$components/DropDownButton.svelte';
import { PromptService } from '$lib/ai/promptService';
import { isFailure } from '$lib/ai/result';
import { AIService, type DiffInput } from '$lib/ai/service';
import { BranchStack } from '$lib/branches/branch';
import { SelectedOwnership } from '$lib/branches/ownership';
Expand Down Expand Up @@ -101,46 +100,35 @@
}

async function generateCommitMessage() {
const diffInput = await getDiffInput();

aiLoading = true;

const prompt = promptService.selectedCommitPrompt(project.id);

let firstToken = true;

const generatedMessageResult = await aiService.summarizeCommit({
diffInput,
useEmojiStyle: $commitGenerationUseEmojis,
useBriefStyle: $commitGenerationExtraConcise,
commitTemplate: prompt,
branchName: $stack.series[0]?.name,
onToken: (t) => {
if (firstToken) {
commitMessage = '';
firstToken = false;
try {
const diffInput = await getDiffInput();

const prompt = promptService.selectedCommitPrompt(project.id);

let firstToken = true;

const output = await aiService.summarizeCommit({
diffInput,
useEmojiStyle: $commitGenerationUseEmojis,
useBriefStyle: $commitGenerationExtraConcise,
commitTemplate: prompt,
branchName: $stack.series[0]?.name,
onToken: (t) => {
if (firstToken) {
commitMessage = '';
firstToken = false;
}
commitMessage += t;
}
commitMessage += t;
}
});

if (isFailure(generatedMessageResult)) {
showError('Failed to generate commit message', generatedMessageResult.failure);
aiLoading = false;
return;
}

const generatedMessage = generatedMessageResult.value;
});

if (generatedMessage) {
commitMessage = generatedMessage;
} else {
showError('Failed to generate commit message', 'Prompt returned no response');
if (output) {
commitMessage = output;
}
} finally {
aiLoading = false;
return;
}

aiLoading = false;
}

async function runMessageHook() {
Expand Down
42 changes: 20 additions & 22 deletions apps/desktop/src/components/PrDetailsModal.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import DropDownButton from '$components/DropDownButton.svelte';
import Markdown from '$components/Markdown.svelte';
import ScrollableContainer from '$components/ScrollableContainer.svelte';
import { isFailure } from '$lib/ai/result';
import { AIService } from '$lib/ai/service';
import { BaseBranch } from '$lib/baseBranch/baseBranch';
import { BranchStack } from '$lib/branches/branch';
Expand Down Expand Up @@ -257,31 +256,30 @@

let firstToken = true;

const descriptionResult = await aiService?.describePR({
title: prTitle.value,
body: prBody.value,
directive: aiDescriptionDirective,
commitMessages: commits.map((c) => c.description),
prBodyTemplate: templateBody,
onToken: (token) => {
if (firstToken) {
prBody.reset();
firstToken = false;
try {
const description = await aiService?.describePR({
title: prTitle.value,
body: prBody.value,
directive: aiDescriptionDirective,
commitMessages: commits.map((c) => c.description),
prBodyTemplate: templateBody,
onToken: (token) => {
if (firstToken) {
prBody.reset();
firstToken = false;
}
prBody.append(token);
}
prBody.append(token);
}
});
});

if (isFailure(descriptionResult)) {
showError('Failed to generate commit message', descriptionResult.failure);
if (description) {
prBody.set(description);
}
} finally {
aiIsLoading = false;
return;
aiDescriptionDirective = undefined;
await tick();
}

prBody.set(descriptionResult.value);
aiIsLoading = false;
aiDescriptionDirective = undefined;
await tick();
}

function handleModalKeydown(e: KeyboardEvent) {
Expand Down
12 changes: 1 addition & 11 deletions apps/desktop/src/components/SeriesHeader.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import PullRequestCard from '$components/PullRequestCard.svelte';
import SeriesHeaderContextMenu from '$components/SeriesHeaderContextMenu.svelte';
import { PromptService } from '$lib/ai/promptService';
import { isFailure } from '$lib/ai/result';
import { AIService } from '$lib/ai/service';
import { BaseBranch } from '$lib/baseBranch/baseBranch';
import { BranchStack } from '$lib/branches/branch';
Expand All @@ -27,7 +26,6 @@
import { getForge } from '$lib/forge/interface/forge';
import { getForgeListingService } from '$lib/forge/interface/forgeListingService';
import { getForgePrService } from '$lib/forge/interface/forgePrService';
import { showError } from '$lib/notifications/toasts';
import { Project } from '$lib/project/project';
import { openExternalUrl } from '$lib/utils/url';
import { getContext, getContextStore } from '@gitbutler/shared/context';
Expand Down Expand Up @@ -222,19 +220,11 @@
let hunks = (await Promise.all(hunk_promises)).flat();

const prompt = promptService.selectedBranchPrompt(project.id);
const messageResult = await aiService.summarizeBranch({
const message = await aiService.summarizeBranch({
hunks,
branchTemplate: prompt
});

if (isFailure(messageResult)) {
showError('Failed to generate branch name', messageResult.failure);

return;
}

const message = messageResult.value;

if (message && message !== branch.name) {
branchController.updateSeriesName(stack.id, branch.name, message);
}
Expand Down
40 changes: 16 additions & 24 deletions apps/desktop/src/lib/ai/anthropicClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,9 @@ import {
SHORT_DEFAULT_BRANCH_TEMPLATE,
SHORT_DEFAULT_PR_TEMPLATE
} from '$lib/ai/prompts';
import { andThenAsync, wrapAsync } from '$lib/ai/result';
import { ok, type Result } from '$lib/ai/result';
import { type AIEvalOptions } from '$lib/ai/types';
import { type AIClient, type AnthropicModelName, type Prompt } from '$lib/ai/types';
import Anthropic from '@anthropic-ai/sdk';
import type { RawMessageStreamEvent } from '@anthropic-ai/sdk/resources/messages.mjs';
import type { Stream } from '@anthropic-ai/sdk/streaming.mjs';

const DEFAULT_MAX_TOKENS = 1024;

Expand All @@ -32,28 +28,24 @@ export class AnthropicAIClient implements AIClient {
});
}

async evaluate(prompt: Prompt, options?: AIEvalOptions): Promise<Result<string, Error>> {
const responseResult = await wrapAsync<Stream<RawMessageStreamEvent>, Error>(async () => {
const [messages, system] = splitPromptMessages(prompt);
return await this.client.messages.create({
max_tokens: options?.maxTokens ?? DEFAULT_MAX_TOKENS,
system,
messages,
model: this.modelName,
stream: true
});
async evaluate(prompt: Prompt, options?: AIEvalOptions): Promise<string> {
const [messages, system] = splitPromptMessages(prompt);
const response = await this.client.messages.create({
max_tokens: options?.maxTokens ?? DEFAULT_MAX_TOKENS,
system,
messages,
model: this.modelName,
stream: true
});

return await andThenAsync(responseResult, async (response) => {
const buffer: string[] = [];
for await (const event of response) {
if (event.type === 'content_block_delta' && event.delta.type === 'text_delta') {
const token = event.delta.text;
options?.onToken?.(token);
buffer.push(token);
}
const buffer: string[] = [];
for await (const event of response) {
if (event.type === 'content_block_delta' && event.delta.type === 'text_delta') {
const token = event.delta.text;
options?.onToken?.(token);
buffer.push(token);
}
return ok(buffer.join(''));
});
}
return buffer.join('');
}
}
44 changes: 19 additions & 25 deletions apps/desktop/src/lib/ai/butlerClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import {
SHORT_DEFAULT_COMMIT_TEMPLATE,
SHORT_DEFAULT_PR_TEMPLATE
} from '$lib/ai/prompts';
import { andThenAsync, ok, wrapAsync, type Result } from '$lib/ai/result';
import { ModelKind, type AIClient, type AIEvalOptions, type Prompt } from '$lib/ai/types';
import { stringStreamGenerator } from '$lib/utils/promise';
import type { HttpClient } from '@gitbutler/shared/network/httpClient';
Expand Down Expand Up @@ -34,33 +33,28 @@ export class ButlerAIClient implements AIClient {
private modelKind: ModelKind
) {}

async evaluate(prompt: Prompt, options?: AIEvalOptions): Promise<Result<string, Error>> {
async evaluate(prompt: Prompt, options?: AIEvalOptions): Promise<string> {
const [messages, system] = splitPromptMessagesIfNecessary(this.modelKind, prompt);
const response = await wrapAsync<Response, Error>(
async () =>
await this.cloud.postRaw('ai/stream', {
body: {
messages,
system,
max_tokens: 3600,
model_kind: this.modelKind
}
})
);

return await andThenAsync(response, async (r) => {
const reader = r.body?.getReader();
if (!reader) {
return ok('');
const response = await this.cloud.postRaw('ai/stream', {
body: {
messages,
system,
max_tokens: 3600,
model_kind: this.modelKind
}
});

const buffer: string[] = [];
for await (const chunk of stringStreamGenerator(reader)) {
options?.onToken?.(chunk);
buffer.push(chunk);
}
const reader = response.body?.getReader();
if (!reader) {
return '';
}

return ok(buffer.join(''));
});
const buffer: string[] = [];
for await (const chunk of stringStreamGenerator(reader)) {
options?.onToken?.(chunk);
buffer.push(chunk);
}

return buffer.join('');
}
}
51 changes: 21 additions & 30 deletions apps/desktop/src/lib/ai/ollamaClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import {
LONG_DEFAULT_COMMIT_TEMPLATE,
SHORT_DEFAULT_PR_TEMPLATE
} from '$lib/ai/prompts';
import { andThen, buildFailureFromAny, ok, wrap, wrapAsync, type Result } from '$lib/ai/result';
import { MessageRole, type PromptMessage, type AIClient, type Prompt } from '$lib/ai/types';
import { isNonEmptyObject } from '@gitbutler/ui/utils/typeguards';
import { fetch } from '@tauri-apps/plugin-http';
Expand Down Expand Up @@ -87,22 +86,18 @@ export class OllamaClient implements AIClient {
private modelName: string
) {}

async evaluate(prompt: Prompt): Promise<Result<string, Error>> {
async evaluate(prompt: Prompt): Promise<string> {
const messages = this.formatPrompt(prompt);

const responseResult = await this.chat(messages);
const response = await this.chat(messages);

return andThen(responseResult, (response) => {
const rawResponseResult = wrap<unknown, Error>(() => JSON.parse(response.message.content));
const rawResponse = JSON.parse(response.message.content);

return andThen(rawResponseResult, (rawResponse) => {
if (!isOllamaChatMessageFormat(rawResponse)) {
return buildFailureFromAny('Invalid response: ' + response.message.content);
}
if (!isOllamaChatMessageFormat(rawResponse)) {
throw new Error('Invalid response: ' + response.message.content);
}

return ok(rawResponse.result);
});
});
return rawResponse.result;
}

/**
Expand Down Expand Up @@ -137,19 +132,17 @@ ${JSON.stringify(OLLAMA_CHAT_MESSAGE_FORMAT_SCHEMA, null, 2)}`
* @param request - The OllamaChatRequest object containing the request details.
* @returns A Promise that resolves to the Response object.
*/
private async fetchChat(request: OllamaChatRequest): Promise<Result<any, Error>> {
private async fetchChat(request: OllamaChatRequest): Promise<unknown> {
const url = new URL(OllamaAPEndpoint.Chat, this.endpoint);
const body = JSON.stringify(request);
return await wrapAsync(
async () =>
await fetch(url.toString(), {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body
}).then(async (response) => await response.json())
);

return await fetch(url.toString(), {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body
}).then(async (response) => await response.json());
}

/**
Expand All @@ -162,7 +155,7 @@ ${JSON.stringify(OLLAMA_CHAT_MESSAGE_FORMAT_SCHEMA, null, 2)}`
private async chat(
messages: Prompt,
options?: OllamaRequestOptions
): Promise<Result<OllamaChatResponse, Error>> {
): Promise<OllamaChatResponse> {
const result = await this.fetchChat({
model: this.modelName,
stream: false,
Expand All @@ -171,12 +164,10 @@ ${JSON.stringify(OLLAMA_CHAT_MESSAGE_FORMAT_SCHEMA, null, 2)}`
format: 'json'
});

return andThen(result, (result) => {
if (!isOllamaChatResponse(result)) {
return buildFailureFromAny('Invalid response\n' + JSON.stringify(result.data));
}
if (!isOllamaChatResponse(result)) {
throw new Error('Invalid response\n' + JSON.stringify(result));
}

return ok(result);
});
return result;
}
}
Loading
Loading