Skip to content

Commit

Permalink
[HF][fix] Use ParameterizedModelParser instead of ModelParser
Browse files Browse the repository at this point in the history
Fixing this error:
```
Exception: HuggingFaceImage2TextTransformer.run() got an unexpected keyword argument 'callback_manager'
```

This seems like a pretty brittle bug. If we use `ModelParser` isntead of `ParameterizedModelParser`, we get a bug because `callback_manager` is not a supported kwargs for the `run_inference()` method. This gets filtered out by the `ParameterizedModelParser.run()` method (https://github.com/lastmile-ai/aiconfig/blob/0ceb17636bae2b5416e7b415cdc87fd71b5ba3b0/python/src/aiconfig/default_parsers/parameterized_model_parser.py#L48-L54) but these kwags are not accepted  by the `ModelParser.run()` method (https://github.com/lastmile-ai/aiconfig/blob/0ceb17636bae2b5416e7b415cdc87fd71b5ba3b0/python/src/aiconfig/model_parser.py#L62-L69)

## Test Plan

Before

After
  • Loading branch information
Rossdan Craig [email protected] committed Jan 11, 2024
1 parent cdbd431 commit 31a762f
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,17 @@
import torch
from transformers import pipeline, Pipeline
from aiconfig_extension_hugging_face.local_inference.util import get_hf_model
from aiconfig import ModelParser, InferenceOptions

from aiconfig.callback import CallbackEvent
from aiconfig.default_parsers.parameterized_model_parser import ParameterizedModelParser
from aiconfig.model_parser import InferenceOptions
from aiconfig.schema import Prompt, Output, ExecuteResult, Attachment

if TYPE_CHECKING:
from aiconfig import AIConfigRuntime


class HuggingFaceAutomaticSpeechRecognitionTransformer(ModelParser):
class HuggingFaceAutomaticSpeechRecognitionTransformer(ParameterizedModelParser):
"""
Model Parser for HuggingFace ASR (Automatic Speech Recognition) models.
"""
Expand Down Expand Up @@ -85,7 +87,7 @@ async def deserialize(
await aiconfig.callback_manager.run_callbacks(CallbackEvent("on_deserialize_complete", __name__, {"output": completion_data}))
return completion_data

async def run(self, prompt: Prompt, aiconfig: "AIConfigRuntime", options: InferenceOptions, parameters: Dict[str, Any]) -> list[Output]:
async def run_inference(self, prompt: Prompt, aiconfig: "AIConfigRuntime", options: InferenceOptions, parameters: Dict[str, Any]) -> List[Output]:
await aiconfig.callback_manager.run_callbacks(
CallbackEvent(
"on_run_start",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,9 @@

from aiconfig_extension_hugging_face.local_inference.util import get_hf_model

from aiconfig import ModelParser, InferenceOptions
from aiconfig.callback import CallbackEvent
from aiconfig.default_parsers.parameterized_model_parser import ParameterizedModelParser
from aiconfig.model_parser import InferenceOptions
from aiconfig.schema import (
Attachment,
ExecuteResult,
Expand All @@ -24,7 +25,7 @@
from aiconfig import AIConfigRuntime


class HuggingFaceImage2TextTransformer(ModelParser):
class HuggingFaceImage2TextTransformer(ParameterizedModelParser):
def __init__(self):
"""
Returns:
Expand Down Expand Up @@ -117,7 +118,7 @@ async def deserialize(
await aiconfig.callback_manager.run_callbacks(CallbackEvent("on_deserialize_complete", __name__, {"output": completion_params}))
return completion_params

async def run(self, prompt: Prompt, aiconfig: "AIConfigRuntime", options: InferenceOptions, parameters: Dict[str, Any]) -> list[Output]:
async def run_inference(self, prompt: Prompt, aiconfig: "AIConfigRuntime", options: InferenceOptions, parameters: Dict[str, Any]) -> List[Output]:
await aiconfig.callback_manager.run_callbacks(
CallbackEvent(
"on_run_start",
Expand Down

0 comments on commit 31a762f

Please sign in to comment.