Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: ChatAgent support api key as parameter #551

Merged
merged 9 commits into from
May 26, 2024
8 changes: 6 additions & 2 deletions camel/agents/chat_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,9 @@ class ChatAgent(BaseAgent):
responses. (default :obj:`ModelType.GPT_3_5_TURBO`)
model_config (BaseConfig, optional): Configuration options for the
LLM model. (default: :obj:`None`)
api_key (str, optional): The API key for authenticating with the
LLM service. Only OpenAI and Anthropic model supported (default:
:obj:`None`)
memory (AgentMemory, optional): The agent memory for managing chat
messages. If `None`, a :obj:`ChatHistoryMemory` will be used.
(default: :obj:`None`)
Expand All @@ -108,6 +111,7 @@ def __init__(
system_message: BaseMessage,
model_type: Optional[ModelType] = None,
model_config: Optional[BaseConfig] = None,
api_key: Optional[str] = None,
memory: Optional[AgentMemory] = None,
message_window_size: Optional[int] = None,
token_limit: Optional[int] = None,
Expand All @@ -133,9 +137,9 @@ def __init__(
self.func_dict[func.get_function_name()] = func.func

self.model_config = model_config or ChatGPTConfig()

self._api_key = api_key
self.model_backend: BaseModelBackend = ModelFactory.create(
self.model_type, self.model_config.__dict__
self.model_type, self.model_config.__dict__, self._api_key
)
self.model_token_limit = token_limit or self.model_backend.token_limit
context_creator = ScoreBasedContextCreator(
Expand Down
9 changes: 7 additions & 2 deletions camel/embeddings/openai_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
from typing import Any, List
import os
from typing import Any, List, Optional

from openai import OpenAI

Expand All @@ -26,6 +27,8 @@ class OpenAIEmbedding(BaseEmbedding[str]):
Args:
model (OpenAiEmbeddingModel, optional): The model type to be used for
generating embeddings. (default: :obj:`ModelType.ADA_2`)
api_key (Optional[str]): The API key for authenticating with the
OpenAI service. (default: :obj:`None`)

Raises:
RuntimeError: If an unsupported model type is specified.
Expand All @@ -34,12 +37,14 @@ class OpenAIEmbedding(BaseEmbedding[str]):
def __init__(
self,
model_type: EmbeddingModelType = EmbeddingModelType.ADA_2,
api_key: Optional[str] = None,
) -> None:
if not model_type.is_openai:
raise ValueError("Invalid OpenAI embedding model type.")
self.model_type = model_type
self.output_dim = model_type.output_dim
self.client = OpenAI()
self._api_key = api_key or os.environ.get("OPENAI_API_KEY")
self.client = OpenAI(timeout=60, max_retries=3, api_key=self._api_key)

@api_key_required
def embed_list(
Expand Down
18 changes: 16 additions & 2 deletions camel/models/anthropic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,24 @@ class AnthropicModel(BaseModelBackend):
r"""Anthropic API in a unified BaseModelBackend interface."""

def __init__(
self, model_type: ModelType, model_config_dict: Dict[str, Any]
self,
model_type: ModelType,
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
) -> None:
r"""Constructor for Anthropic backend.

Args:
model_type (ModelType): Model for which a backend is created,
one of GPT_* series.
model_config_dict (Dict[str, Any]): A dictionary that will
be fed into openai.ChatCompletion.create().
api_key (Optional[str]): The API key for authenticating with the
Anthropic service. (default: :obj:`None`)
"""
super().__init__(model_type, model_config_dict)
self.client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"])
self._api_key = api_key or os.environ.get("ANTHROPIC_API_KEY")
self.client = Anthropic(api_key=self._api_key)
self._token_counter: Optional[BaseTokenCounter] = None

def _convert_response_from_anthropic_to_openai(self, response):
Expand Down
10 changes: 8 additions & 2 deletions camel/models/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Union
from typing import Any, Dict, List, Optional, Union

from openai import Stream

Expand All @@ -27,17 +27,23 @@ class BaseModelBackend(ABC):
"""

def __init__(
self, model_type: ModelType, model_config_dict: Dict[str, Any]
self,
model_type: ModelType,
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
) -> None:
r"""Constructor for the model backend.

Args:
model_type (ModelType): Model for which a backend is created.
model_config_dict (Dict[str, Any]): A config dictionary.
api_key (Optional[str]): The API key for authenticating with the
LLM service.
"""
self.model_type = model_type

self.model_config_dict = model_config_dict
self._api_key = api_key
self.check_model_config()

@property
Expand Down
10 changes: 7 additions & 3 deletions camel/models/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
from typing import Any, Dict
from typing import Any, Dict, Optional

from camel.models.anthropic_model import AnthropicModel
from camel.models.base_model import BaseModelBackend
Expand All @@ -30,14 +30,18 @@ class ModelFactory:

@staticmethod
def create(
model_type: ModelType, model_config_dict: Dict
model_type: ModelType,
model_config_dict: Dict,
api_key: Optional[str] = None,
) -> BaseModelBackend:
r"""Creates an instance of `BaseModelBackend` of the specified type.

Args:
model_type (ModelType): Model for which a backend is created.
model_config_dict (Dict): A dictionary that will be fed into
the backend constructor.
api_key (Optional[str]): The API key for authenticating with the
LLM service.

Raises:
ValueError: If there is not backend for the model.
Expand All @@ -57,5 +61,5 @@ def create(
else:
raise ValueError(f"Unknown model type `{model_type}` is input")

inst = model_class(model_type, model_config_dict)
inst = model_class(model_type, model_config_dict, api_key)
return inst
12 changes: 10 additions & 2 deletions camel/models/openai_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,10 @@ class OpenAIModel(BaseModelBackend):
r"""OpenAI API in a unified BaseModelBackend interface."""

def __init__(
self, model_type: ModelType, model_config_dict: Dict[str, Any]
self,
model_type: ModelType,
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
) -> None:
r"""Constructor for OpenAI backend.

Expand All @@ -36,10 +39,15 @@ def __init__(
one of GPT_* series.
model_config_dict (Dict[str, Any]): A dictionary that will
be fed into openai.ChatCompletion.create().
api_key (Optional[str]): The API key for authenticating with the
OpenAI service. (default: :obj:`None`)
"""
super().__init__(model_type, model_config_dict)
url = os.environ.get('OPENAI_API_BASE_URL', None)
self._client = OpenAI(timeout=60, max_retries=3, base_url=url)
self._api_key = api_key or os.environ.get("OPENAI_API_KEY")
self._client = OpenAI(
timeout=60, max_retries=3, base_url=url, api_key=self._api_key
)
self._token_counter: Optional[BaseTokenCounter] = None

@property
Expand Down
6 changes: 5 additions & 1 deletion camel/models/stub_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,15 @@ class StubModel(BaseModelBackend):
model_type = ModelType.STUB

def __init__(
self, model_type: ModelType, model_config_dict: Dict[str, Any]
self,
model_type: ModelType,
model_config_dict: Dict[str, Any],
api_key: Optional[str] = None,
) -> None:
r"""All arguments are unused for the dummy model."""
super().__init__(model_type, model_config_dict)
self._token_counter: Optional[BaseTokenCounter] = None
self._api_key = api_key

@property
def token_counter(self) -> BaseTokenCounter:
Expand Down
9 changes: 4 additions & 5 deletions camel/utils/commons.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,7 @@ def get_lazy_imported_types_module():


def api_key_required(func: F) -> F:
r"""Decorator that checks if the OpenAI API key is available in the
environment variables.
r"""Decorator that checks if the API key is available either as an environment variable or passed directly.

Args:
func (callable): The function to be wrapped.
Expand All @@ -68,14 +67,14 @@ def api_key_required(func: F) -> F:
callable: The decorated function.

Raises:
ValueError: If the OpenAI API key is not found in the environment
variables.
ValueError: If the API key is not found, either as an environment
variable or directly passed.
"""

@wraps(func)
def wrapper(self, *args, **kwargs):
if self.model_type.is_openai:
if 'OPENAI_API_KEY' not in os.environ:
if not self._api_key and 'OPENAI_API_KEY' not in os.environ:
raise ValueError('OpenAI API key not found.')
return func(self, *args, **kwargs)
elif self.model_type.is_anthropic:
Expand Down