Skip to content

Commit

Permalink
fix: fixed ask for API key on first start, accept and auto-detect ant…
Browse files Browse the repository at this point in the history
…hropic keys
  • Loading branch information
ErikBjare committed Aug 7, 2024
1 parent 8c20800 commit 8b33013
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 33 deletions.
41 changes: 37 additions & 4 deletions gptme/init.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from dotenv import load_dotenv

from .config import load_config
from .config import config_path, load_config, set_config_value
from .dirs import get_readline_history_file
from .llm import get_recommended_model, init_llm
from .models import set_default_model
Expand All @@ -29,8 +29,11 @@ def init(provider: str | None, model: str | None, interactive: bool):
load_dotenv()

config = load_config()

# get from config
if not provider:
provider = config.get_env("PROVIDER")

if not provider:
# auto-detect depending on if OPENAI_API_KEY or ANTHROPIC_API_KEY is set
if config.get_env("OPENAI_API_KEY"):
Expand All @@ -39,11 +42,16 @@ def init(provider: str | None, model: str | None, interactive: bool):
elif config.get_env("ANTHROPIC_API_KEY"):
print("Found Anthropic API key, using Anthropic provider")
provider = "anthropic"
else:
raise ValueError("No API key found, couldn't auto-detect provider")
# ask user for API key
elif interactive:
provider, _ = ask_for_api_key()

# fail
if not provider:
raise ValueError("No API key found, couldn't auto-detect provider")

# set up API_KEY and API_BASE, needs to be done before loading history to avoid saving API_KEY
init_llm(provider, interactive)
init_llm(provider)

if not model:
model = config.get_env("MODEL") or get_recommended_model()
Expand Down Expand Up @@ -91,3 +99,28 @@ def _load_readline_history() -> None:
readline.add_history(line)

atexit.register(readline.write_history_file, history_file)


def ask_for_api_key():
"""Interactively ask user for API key"""
print("No API key set for OpenAI or Anthropic.")
print(
"""You can get one at:
- OpenAI: https://platform.openai.com/account/api-keys
- Anthropic: https://console.anthropic.com/settings/keys
"""
)
api_key = input("Your OpenAI or Anthropic API key: ").strip()

if api_key.startswith("sk-ant-"):
provider = "anthropic"
env_var = "ANTHROPIC_API_KEY"
else:
provider = "openai"
env_var = "OPENAI_API_KEY"

# TODO: test API key
# Save to config
set_config_value(f"env.{env_var}", api_key)
print(f"API key saved to config at {config_path}")
return provider, api_key
32 changes: 5 additions & 27 deletions gptme/llm.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import logging
import shutil
import sys
from typing import Generator, Iterator, Tuple
from collections.abc import Generator, Iterator

from anthropic import Anthropic
from openai import AzureOpenAI, OpenAI
from rich import print

from .config import config_path, get_config, set_config_value
from .config import get_config
from .constants import PROMPT_ASSISTANT
from .message import Message
from .models import MODELS
Expand All @@ -25,23 +25,14 @@
anthropic_client: Anthropic | None = None


def init_llm(llm: str, interactive: bool):
def init_llm(llm: str):
global oai_client, anthropic_client

# set up API_KEY (if openai) and API_BASE (if local)
config = get_config()

# TODO: use llm/model from config if specified and not passed as args
if llm == "openai":
if api_key := config.get_env("OPENAI_API_KEY", None):
pass
elif interactive:
api_key = ask_for_api_key()
# recursively call init_llm to start over with init
return init_llm(llm, interactive)
else:
print("Error: OPENAI_API_KEY not set in env or config, see README.")
sys.exit(1)
api_key = config.get_env_required("OPENAI_API_KEY")
oai_client = OpenAI(api_key=api_key)
elif llm == "azure":
api_key = config.get_env_required("AZURE_OPENAI_API_KEY")
Expand Down Expand Up @@ -69,19 +60,6 @@ def init_llm(llm: str, interactive: bool):
assert oai_client or anthropic_client


def ask_for_api_key():
"""Interactively ask user for API key"""
print("No API key set for OpenAI.")
print("You can get one at https://platform.openai.com/account/api-keys\n")
api_key = input("Your OpenAI API key: ").strip()

# TODO: test API key
# Save to config
set_config_value("env.OPENAI_API_KEY", api_key)
print(f"API key saved to config at {config_path}")
return api_key


def reply(messages: list[Message], model: str, stream: bool = False) -> Message:
if stream:
return _reply_stream(messages, model)
Expand Down Expand Up @@ -137,7 +115,7 @@ def _chat_complete(messages: list[Message], model: str) -> str:

def _transform_system_messages_anthropic(
messages: list[Message],
) -> Tuple[list[Message], str]:
) -> tuple[list[Message], str]:
# transform system messages into system kwarg for anthropic
# for first system message, transform it into a system kwarg
assert messages[0].role == "system"
Expand Down
4 changes: 2 additions & 2 deletions gptme/models.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
from dataclasses import dataclass
from typing import Optional, TypedDict
from typing import TypedDict

from typing_extensions import NotRequired

Expand All @@ -12,7 +12,7 @@ class ModelMeta:
provider: str
model: str
context: int
max_output: Optional[int] = None
max_output: int | None = None

# price in USD per 1M tokens
# if price is not set, it is assumed to be 0
Expand Down

0 comments on commit 8b33013

Please sign in to comment.