Skip to content

Commit

Permalink
fix: fix formatting problem for yapf.
Browse files Browse the repository at this point in the history
  • Loading branch information
L4zyy committed Apr 4, 2024
1 parent 2a1c025 commit 35a974b
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 57 deletions.
25 changes: 9 additions & 16 deletions camel/models/azure_openai_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,26 +49,21 @@ def __init__(
super().__init__(model_type, model_config_dict)

self.model_type = backend_config_dict.get(
"model_type", os.environ.get("AZURE_MODEL_TYPE", None)
)
"model_type", os.environ.get("AZURE_MODEL_TYPE", None))
self.deployment_name = backend_config_dict.get(
"deployment_name", os.environ.get("AZURE_DEPLOYMENT_NAME", None)
)
"deployment_name", os.environ.get("AZURE_DEPLOYMENT_NAME", None))
self.azure_endpoint = backend_config_dict.get(
"azure_endpoint", os.environ.get("AZURE_ENDPOINT", None)
)
"azure_endpoint", os.environ.get("AZURE_ENDPOINT", None))
self.api_version = backend_config_dict.get(
"api_version",
os.environ.get("AZURE_API_VERSION", "2023-10-01-preview"),
)

assert model_type is not None, "Azure model type is not provided."
assert (
self.deployment_name is not None
), "Azure model deployment name is not provided."
assert (
self.azure_endpoint is not None
), "Azure endpoint is not provided."
assert (self.deployment_name
is not None), "Azure model deployment name is not provided."
assert (self.azure_endpoint
is not None), "Azure endpoint is not provided."

if isinstance(self.model_type, str):
self.model_type = ModelType[self.model_type.upper()]
Expand Down Expand Up @@ -126,10 +121,8 @@ def check_model_config(self):
"""
for param in self.model_config_dict:
if param not in OPENAI_API_PARAMS_WITH_FUNCTIONS:
raise ValueError(
f"Unexpected argument `{param}` is "
"input into OpenAI model backend."
)
raise ValueError(f"Unexpected argument `{param}` is "
"input into OpenAI model backend.")

@property
def stream(self) -> bool:
Expand Down
5 changes: 2 additions & 3 deletions camel/models/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,8 @@ def create(
raise ValueError(f"Unknown model type `{model_type}` is input")

if backend_config_dict:
inst = model_class(
model_type, model_config_dict, backend_config_dict
)
inst = model_class(model_type, model_config_dict,
backend_config_dict)
else:
inst = model_class(model_type, model_config_dict)

Expand Down
46 changes: 16 additions & 30 deletions examples/azure/role_playing_with_azure_openai_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,18 +30,15 @@ def main(model_type=None, chat_turn_limit=50) -> None:
)

print(
Fore.GREEN
+ f"AI Assistant sys message:\n{role_play_session.assistant_sys_msg}\n"
)
print(
Fore.BLUE + f"AI User sys message:\n{role_play_session.user_sys_msg}\n"
)
Fore.GREEN +
f"AI Assistant sys message:\n{role_play_session.assistant_sys_msg}\n")
print(Fore.BLUE +
f"AI User sys message:\n{role_play_session.user_sys_msg}\n")

print(Fore.YELLOW + f"Original task prompt:\n{task_prompt}\n")
print(
Fore.CYAN
+ f"Specified task prompt:\n{role_play_session.specified_task_prompt}\n"
)
Fore.CYAN +
f"Specified task prompt:\n{role_play_session.specified_task_prompt}\n")
print(Fore.RED + f"Final task prompt:\n{role_play_session.task_prompt}\n")

n = 0
Expand All @@ -51,31 +48,20 @@ def main(model_type=None, chat_turn_limit=50) -> None:
assistant_response, user_response = role_play_session.step(input_msg)

if assistant_response.terminated:
print(
Fore.GREEN
+ (
"AI Assistant terminated. Reason: "
f"{assistant_response.info['termination_reasons']}."
)
)
print(Fore.GREEN +
("AI Assistant terminated. Reason: "
f"{assistant_response.info['termination_reasons']}."))
break
if user_response.terminated:
print(
Fore.GREEN
+ (
"AI User terminated. "
f"Reason: {user_response.info['termination_reasons']}."
)
)
print(Fore.GREEN +
("AI User terminated. "
f"Reason: {user_response.info['termination_reasons']}."))
break

print_text_animated(
Fore.BLUE + f"AI User:\n\n{user_response.msg.content}\n"
)
print_text_animated(
Fore.GREEN + "AI Assistant:\n\n"
f"{assistant_response.msg.content}\n"
)
print_text_animated(Fore.BLUE +
f"AI User:\n\n{user_response.msg.content}\n")
print_text_animated(Fore.GREEN + "AI Assistant:\n\n"
f"{assistant_response.msg.content}\n")

if "CAMEL_TASK_DONE" in user_response.msg.content:
break
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ def main(model_type=None, chat_turn_limit=50, model_path=" ",
task_prompt = "Develop a trading bot for the stock market"

agent_kwargs = {
role: dict(
role:
dict(
model_type=model_type,
model_config=OpenSourceConfig(
model_path=model_path,
Expand Down
10 changes: 3 additions & 7 deletions test/models/test_azure_openai_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,8 @@ def test_azure_openai_model_unexpected_argument():
model_config_dict = model_config.__dict__

with pytest.raises(
ValueError,
match=re.escape(
(
"Unexpected argument `model_path` is "
"input into OpenAI model backend."
)
),
ValueError,
match=re.escape(("Unexpected argument `model_path` is "
"input into OpenAI model backend.")),
):
_ = AzureOpenAIModel(model_type, model_config_dict)

0 comments on commit 35a974b

Please sign in to comment.