Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python: Fix OpenAI/API plugins based on samples path changes. Black formatting for notebooks. #6167

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
async def add_secret_to_key_vault(kernel: Kernel, plugin: KernelPlugin):
"""Adds a secret to the Azure Key Vault."""
result = await kernel.invoke(
functions=plugin["SetSecret"],
function=plugin["SetSecret"],
path_params={"secret-name": "Foo"},
query_params={"api-version": "7.0"},
request_body={"value": "Bar", "enabled": True},
Expand All @@ -30,9 +30,10 @@ async def add_secret_to_key_vault(kernel: Kernel, plugin: KernelPlugin):
async def get_secret_from_key_vault(kernel: Kernel, plugin: KernelPlugin):
"""Gets a secret from the Azure Key Vault."""
result = await kernel.invoke(
functions=plugin["GetSecret"],
path_params={"secret-name ": "Foo"},
function=plugin["GetSecret"],
path_params={"secret-name": "Foo"},
query_params={"api-version": "7.0"},
request_body={},
headers={},
)

Expand Down Expand Up @@ -136,7 +137,7 @@ async def main():
kernel = Kernel()

openai_spec_file = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "resources", "open_ai_plugins", "akv-openai.json"
os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "open_ai_plugins", "akv-openai.json"
)
with open(openai_spec_file, "r") as file:
openai_spec = file.read()
Expand All @@ -155,6 +156,7 @@ async def main():
)

await add_secret_to_key_vault(kernel, plugin)
await get_secret_from_key_vault(kernel, plugin)


if __name__ == "__main__":
Expand Down
4 changes: 1 addition & 3 deletions python/samples/concepts/plugins/openapi/openapi_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@ async def main():
"""Client"""
kernel = sk.Kernel()

openapi_plugin = kernel.import_plugin_from_openapi(
plugin_name="openApiPlugin", openapi_document_path="./openapi.yaml"
)
openapi_plugin = kernel.add_plugin_from_openapi(plugin_name="openApiPlugin", openapi_document_path="./openapi.yaml")

arguments = {
"request_body": '{"input": "hello world"}',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
},
"api": {
"type": "openapi",
"url": "file:///./python/samples/kernel-syntax-examples/resources/open_ai_plugins/akv-openapi.yaml"
"url": "file:///./python/samples/concepts/resources/open_ai_plugins/akv-openapi.yaml"
},
"logo_url": "",
"contact_email": "",
Expand Down
22 changes: 11 additions & 11 deletions python/samples/getting_started/03-prompt-function-inline.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@
"from services import Service\n",
"\n",
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
"selectedService = Service.OpenAI"
"selectedService = Service.AzureOpenAI"
]
},
{
Expand All @@ -77,24 +77,24 @@
"\n",
"service_id = None\n",
"if selectedService == Service.OpenAI:\n",
" from semantic_kernel.connectors.ai.open_ai import OpenAITextCompletion\n",
" from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion\n",
" from semantic_kernel.utils.settings import openai_settings_from_dot_env\n",
"\n",
" api_key, org_id = openai_settings_from_dot_env()\n",
" service_id = \"oai_text_completion\"\n",
" service_id = \"oai_chat_completion\"\n",
" kernel.add_service(\n",
" OpenAITextCompletion(\n",
" OpenAIChatCompletion(\n",
" service_id=service_id, ai_model_id=\"gpt-3.5-turbo-instruct\", api_key=api_key, org_id=org_id\n",
" ),\n",
" )\n",
"elif selectedService == Service.AzureOpenAI:\n",
" from semantic_kernel.connectors.ai.open_ai import AzureTextCompletion\n",
" from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion\n",
" from semantic_kernel.utils.settings import azure_openai_settings_from_dot_env\n",
"\n",
" deployment, api_key, endpoint = azure_openai_settings_from_dot_env()\n",
" service_id = \"aoai_text_completion\"\n",
" service_id = \"aoai_chat_completion\"\n",
" kernel.add_service(\n",
" AzureTextCompletion(service_id=service_id, deployment_name=deployment, endpoint=endpoint, api_key=api_key),\n",
" AzureChatCompletion(service_id=service_id, deployment_name=deployment, endpoint=endpoint, api_key=api_key),\n",
" )"
]
},
Expand All @@ -116,7 +116,7 @@
"metadata": {},
"outputs": [],
"source": [
"from semantic_kernel.connectors.ai.open_ai import OpenAITextPromptExecutionSettings\n",
"from semantic_kernel.connectors.ai.open_ai import OpenAIChatPromptExecutionSettings\n",
"from semantic_kernel.prompt_template import PromptTemplateConfig, InputVariable\n",
"\n",
"\n",
Expand All @@ -125,14 +125,14 @@
"\"\"\"\n",
"\n",
"if selectedService == Service.OpenAI:\n",
" execution_settings = OpenAITextPromptExecutionSettings(\n",
" execution_settings = OpenAIChatPromptExecutionSettings(\n",
" service_id=service_id,\n",
" ai_model_id=\"gpt-3.5-turbo-instruct\",\n",
" max_tokens=2000,\n",
" temperature=0.7,\n",
" )\n",
"elif selectedService == Service.AzureOpenAI:\n",
" execution_settings = OpenAITextPromptExecutionSettings(\n",
" execution_settings = OpenAIChatPromptExecutionSettings(\n",
" service_id=service_id,\n",
" ai_model_id=deployment,\n",
" max_tokens=2000,\n",
Expand Down Expand Up @@ -344,7 +344,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.3"
"version": "3.11.9"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,7 @@
"from semantic_kernel.functions import KernelFunction\n",
"from semantic_kernel.prompt_template import PromptTemplateConfig\n",
"\n",
"\n",
"async def setup_chat_with_memory(\n",
" kernel: Kernel,\n",
" service_id: str,\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@
"source": [
"if selectedService == Service.AzureOpenAI:\n",
" prompt = \"provide me a list of possible meanings for the acronym 'ORLD'\"\n",
" \n",
"\n",
" results = await azure_text_service.complete(prompt=prompt, settings=oai_text_prompt_execution_settings)\n",
" i = 1\n",
" for result in results:\n",
Expand Down Expand Up @@ -226,7 +226,7 @@
"source": [
"if selectedService == Service.HuggingFace:\n",
" prompt = \"The purpose of a rubber duck is\"\n",
" \n",
"\n",
" results = await hf_text_service.complete(prompt=prompt, prompt_execution_settings=hf_prompt_execution_settings)\n",
" print(\"\".join(results))"
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -342,23 +342,33 @@ def _create_function_from_operation(
name=operation.id,
)
async def run_openapi_operation(
path_params: Annotated[dict | str | None, "A dictionary of path parameters"] = None,
query_params: Annotated[dict | str | None, "A dictionary of query parameters"] = None,
headers: Annotated[dict | str | None, "A dictionary of headers"] = None,
request_body: Annotated[dict | str | None, "A dictionary of the request body"] = None,
path_params: Annotated[
dict | str | None, "A dictionary of path parameters. If not specified return empty string."
] = None,
query_params: Annotated[
dict | str | None, "A dictionary of query parameters. If not specified return empty string."
] = None,
headers: Annotated[dict | str | None, "A dictionary of headers. If not specified return empty string."] = None,
request_body: Annotated[
dict | str | None, "A dictionary of the request body. If not specified return empty string."
] = None,
) -> str:
def parse_params(param):
if param == "" or param is None:
return {}
if isinstance(param, str):
try:
return json.loads(param)
except json.JSONDecodeError:
raise ValueError(f"Invalid JSON string: {param}")
return param

response = await runner.run_operation(
operation,
path_params=(
json.loads(path_params) if isinstance(path_params, str) else path_params if path_params else None
),
query_params=(
json.loads(query_params) if isinstance(query_params, str) else query_params if query_params else None
),
headers=json.loads(headers) if isinstance(headers, str) else headers if headers else None,
request_body=(
json.loads(request_body) if isinstance(request_body, str) else request_body if request_body else None
),
path_params=parse_params(path_params),
query_params=parse_params(query_params),
headers=parse_params(headers),
request_body=parse_params(request_body),
)
return response

Expand Down