Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Integrate moonshot models to camel #1526

Merged
merged 11 commits into from
Feb 4, 2025
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@
# InternLM API (https://internlm.intern-ai.org.cn/api/tokens)
# INTERNLM_API_KEY="Fill your API key here"

# Moonshot API (https://platform.moonshot.cn/)
# MOONSHOT_API_KEY="Fill your API key here"

# JINA API (https://jina.ai/)
# JINA_API_KEY="Fill your API key here"

Expand Down
3 changes: 3 additions & 0 deletions camel/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from .internlm_config import INTERNLM_API_PARAMS, InternLMConfig
from .litellm_config import LITELLM_API_PARAMS, LiteLLMConfig
from .mistral_config import MISTRAL_API_PARAMS, MistralConfig
from .moonshot_config import MOONSHOT_API_PARAMS, MoonshotConfig
from .nvidia_config import NVIDIA_API_PARAMS, NvidiaConfig
from .ollama_config import OLLAMA_API_PARAMS, OllamaConfig
from .openai_config import OPENAI_API_PARAMS, ChatGPTConfig
Expand Down Expand Up @@ -79,4 +80,6 @@
'DEEPSEEK_API_PARAMS',
'InternLMConfig',
'INTERNLM_API_PARAMS',
'MoonshotConfig',
"MOONSHOT_API_PARAMS",
]
45 changes: 45 additions & 0 deletions camel/configs/moonshot_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========

from typing import Optional

from camel.configs.base_config import BaseConfig


class MoonshotConfig(BaseConfig):
r"""Defines the parameters for generating chat completions using the
Moonshot API. You can refer to the following link for more details:
https://platform.moonshot.cn/docs/api-reference

Args:
stream (bool, optional): Whether to stream the response.
(default: :obj:`False`)
temperature (float, optional): Controls randomness in the response.
Lower values make the output more focused and deterministic.
(default: :obj:`0.3`)
max_tokens (int, optional): The maximum number of tokens to generate.
(default: :obj:`None`)
tools (list, optional): List of tools that model can use for function
calling. Each tool should be a dictionary containing type, function
name, description, and parameters.
(default: :obj:`None`)
"""

stream: bool = False
temperature: float = 0.3
max_tokens: Optional[int] = None
tools: Optional[list] = None


MOONSHOT_API_PARAMS = {param for param in MoonshotConfig.model_fields.keys()}
2 changes: 2 additions & 0 deletions camel/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from .mistral_model import MistralModel
from .model_factory import ModelFactory
from .model_manager import ModelManager, ModelProcessingError
from .moonshot_model import MoonshotModel
from .nemotron_model import NemotronModel
from .nvidia_model import NvidiaModel
from .ollama_model import OllamaModel
Expand Down Expand Up @@ -70,4 +71,5 @@
'DeepSeekModel',
'FishAudioModel',
'InternLMModel',
'MoonshotModel',
]
3 changes: 3 additions & 0 deletions camel/models/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from camel.models.internlm_model import InternLMModel
from camel.models.litellm_model import LiteLLMModel
from camel.models.mistral_model import MistralModel
from camel.models.moonshot_model import MoonshotModel
from camel.models.nvidia_model import NvidiaModel
from camel.models.ollama_model import OllamaModel
from camel.models.openai_compatible_model import OpenAICompatibleModel
Expand Down Expand Up @@ -127,6 +128,8 @@ def create(
model_class = DeepSeekModel
elif model_platform.is_internlm and model_type.is_internlm:
model_class = InternLMModel
elif model_platform.is_moonshot and model_type.is_moonshot:
model_class = MoonshotModel
elif model_type == ModelType.STUB:
model_class = StubModel

Expand Down
138 changes: 138 additions & 0 deletions camel/models/moonshot_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========

import os
from typing import Any, Dict, List, Optional, Union

from openai import OpenAI, Stream

from camel.configs import MOONSHOT_API_PARAMS, MoonshotConfig
from camel.messages import OpenAIMessage
from camel.models import BaseModelBackend
from camel.types import (
ChatCompletion,
ChatCompletionChunk,
ModelType,
)
from camel.utils import (
BaseTokenCounter,
OpenAITokenCounter,
api_keys_required,
)


class MoonshotModel(BaseModelBackend):
r"""Moonshot API in a unified BaseModelBackend interface.

Args:
model_type (Union[ModelType, str]): Model for which a backend is
created, one of Moonshot series.
model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
that will be fed into :obj:`openai.ChatCompletion.create()`. If
:obj:`None`, :obj:`MoonshotConfig().as_dict()` will be used.
(default: :obj:`None`)
api_key (Optional[str], optional): The API key for authenticating with
the Moonshot service. (default: :obj:`None`)
url (Optional[str], optional): The url to the Moonshot service.
(default: :obj:`https://api.moonshot.cn/v1`)
token_counter (Optional[BaseTokenCounter], optional): Token counter to
use for the model. If not provided, :obj:`OpenAITokenCounter(
ModelType.GPT_4)` will be used.
(default: :obj:`None`)
"""

@api_keys_required([("api_key", "MOONSHOT_API_KEY")])
def __init__(
self,
model_type: Union[ModelType, str],
model_config_dict: Optional[Dict[str, Any]] = None,
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
if model_config_dict is None:
model_config_dict = MoonshotConfig().as_dict()
api_key = api_key or os.environ.get("MOONSHOT_API_KEY")
url = url or os.environ.get(
"MOONSHOT_API_BASE_URL",
"https://api.moonshot.cn/v1",
)
super().__init__(
model_type, model_config_dict, api_key, url, token_counter
)
self._client = OpenAI(
api_key=self._api_key,
timeout=180,
max_retries=3,
base_url=self._url,
)

def run(
self,
messages: List[OpenAIMessage],
) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
r"""Runs inference of Moonshot chat completion.

Args:
messages (List[OpenAIMessage]): Message list with the chat history
in OpenAI API format.

Returns:
Union[ChatCompletion, Stream[ChatCompletionChunk]]:
`ChatCompletion` in the non-stream mode, or
`Stream[ChatCompletionChunk]` in the stream mode.
"""
response = self._client.chat.completions.create(
messages=messages,
model=self.model_type,
**self.model_config_dict,
)
return response

@property
def token_counter(self) -> BaseTokenCounter:
r"""Initialize the token counter for the model backend.

Returns:
OpenAITokenCounter: The token counter following the model's
tokenization style.
"""
if not self._token_counter:
self._token_counter = OpenAITokenCounter(ModelType.GPT_4O_MINI)
return self._token_counter

def check_model_config(self):
r"""Check whether the model configuration contains any
unexpected arguments to Moonshot API.

Raises:
ValueError: If the model configuration dictionary contains any
unexpected arguments to Moonshot API.
"""
for param in self.model_config_dict:
if param not in MOONSHOT_API_PARAMS:
raise ValueError(
f"Unexpected argument `{param}` is "
"input into Moonshot model backend."
)

@property
def stream(self) -> bool:
r"""Returns whether the model is in stream mode, which sends partial
results each time.

Returns:
bool: Whether the model is in stream mode.
"""
return self.model_config_dict.get('stream', False)
22 changes: 22 additions & 0 deletions camel/types/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,11 @@ class ModelType(UnifiedModelType, Enum):
INTERNLM2_5_LATEST = "internlm2.5-latest"
INTERNLM2_PRO_CHAT = "internlm2-pro-chat"

# Moonshot models
MOONSHOT_V1_8K = "moonshot-v1-8k"
MOONSHOT_V1_32K = "moonshot-v1-32k"
MOONSHOT_V1_128K = "moonshot-v1-128k"

def __str__(self):
return self.value

Expand Down Expand Up @@ -422,6 +427,14 @@ def is_internlm(self) -> bool:
ModelType.INTERNLM2_PRO_CHAT,
}

@property
def is_moonshot(self) -> bool:
return self in {
ModelType.MOONSHOT_V1_8K,
ModelType.MOONSHOT_V1_32K,
ModelType.MOONSHOT_V1_128K,
}

@property
def is_sglang(self) -> bool:
return self in {
Expand Down Expand Up @@ -469,6 +482,7 @@ def token_limit(self) -> int:
ModelType.QWEN_VL_PLUS,
ModelType.NVIDIA_LLAMA3_70B,
ModelType.TOGETHER_MISTRAL_7B,
ModelType.MOONSHOT_V1_8K,
}:
return 8_192
elif self in {
Expand Down Expand Up @@ -502,6 +516,7 @@ def token_limit(self) -> int:
ModelType.INTERNLM2_PRO_CHAT,
ModelType.TOGETHER_MIXTRAL_8_7B,
ModelType.SGLANG_MISTRAL_7B,
ModelType.MOONSHOT_V1_32K,
}:
return 32_768
elif self in {
Expand Down Expand Up @@ -546,6 +561,7 @@ def token_limit(self) -> int:
ModelType.SGLANG_LLAMA_3_1_405B,
ModelType.SGLANG_LLAMA_3_2_1B,
ModelType.SGLANG_MIXTRAL_NEMO,
ModelType.MOONSHOT_V1_128K,
}:
return 128_000
elif self in {
Expand Down Expand Up @@ -767,6 +783,7 @@ class ModelPlatformType(Enum):
DEEPSEEK = "deepseek"
SGLANG = "sglang"
INTERNLM = "internlm"
MOONSHOT = "moonshot"

@property
def is_openai(self) -> bool:
Expand Down Expand Up @@ -874,6 +891,11 @@ def is_internlm(self) -> bool:
r"""Returns whether this platform is InternLM."""
return self is ModelPlatformType.INTERNLM

@property
def is_moonshot(self) -> bool:
r"""Returns whether this platform is Moonshot model."""
return self is ModelPlatformType.MOONSHOT


class AudioModelType(Enum):
TTS_1 = "tts-1"
Expand Down
5 changes: 5 additions & 0 deletions camel/types/unified_model_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,11 @@ def is_internlm(self) -> bool:
r"""Returns whether the model is a InternLM model."""
return True

@property
def is_moonshot(self) -> bool:
r"""Returns whether this platform is Moonshot model."""
return True

@property
def support_native_structured_output(self) -> bool:
r"""Returns whether the model supports native structured output."""
Expand Down
3 changes: 3 additions & 0 deletions docs/key_modules/models.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ The following table lists currently supported model platforms by CAMEL.
| Mistral AI | open-mixtral-8x7b | N |
| Mistral AI | open-mixtral-8x22b | N |
| Mistral AI | open-codestral-mamba | N |
| Moonshot | moonshot-v1-8k | N |
| Moonshot | moonshot-v1-32k | N |
| Moonshot | moonshot-v1-128k | N |
| Anthropic | claude-3-5-sonnet-latest | Y |
| Anthropic | claude-3-5-haiku-latest | N |
| Anthropic | claude-3-haiku-20240307 | Y |
Expand Down
46 changes: 46 additions & 0 deletions examples/models/moonshot_model_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========

from camel.agents import ChatAgent
from camel.configs import MoonshotConfig
from camel.models import ModelFactory
from camel.types import ModelPlatformType, ModelType

model = ModelFactory.create(
model_platform=ModelPlatformType.MOONSHOT,
model_type=ModelType.MOONSHOT_V1_8K,
model_config_dict=MoonshotConfig(temperature=0.2).as_dict(),
)

# Define system message
sys_msg = "You are a helpful assistant."

# Set agent
camel_agent = ChatAgent(system_message=sys_msg, model=model)

user_msg = """Say hi to CAMEL AI, one open-source community
dedicated to the study of autonomous and communicative agents."""

# Get response information
response = camel_agent.step(user_msg)
print(response.msgs[0].content)

'''
===============================================================================
Hi CAMEL AI! It's great to hear about your open-source community dedicated to
the study of autonomous and communicative agents. I'm here to help and support
you in any way I can. If you have any questions or need assistance with your
research, feel free to ask!
===============================================================================
'''
Loading
Loading