Skip to content

Commit

Permalink
Add name customization
Browse files Browse the repository at this point in the history
  • Loading branch information
rizerphe committed Jul 3, 2023
1 parent e6e507d commit 2879b4d
Show file tree
Hide file tree
Showing 9 changed files with 170 additions and 9 deletions.
6 changes: 5 additions & 1 deletion docs/conversation.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ def my_awesome_function(...):
...

@conversation.add_function(
name="my_really_amazing_function",
description="The most amazing function of them all",
save_return=True,
serialize=False,
remove_call=False,
Expand All @@ -51,8 +53,10 @@ conversation.remove_function(my_awesome_function)
conversation.remove_function("my_amazing_function")
```

The arguments passed to `add_function` are the same as those an [OpenAIFunction](openai_functions.OpenAIFunction) inherently has:
All of the keyword arguments passed to `add_function` are optional; most of them are the same as those an [OpenAIFunction](openai_functions.OpenAIFunction) inherently has:

- `name` - the overwritten function name, otherwise will default to the function name
- `description` - the overwritten function description sent to the AI - will use the description from the docstring by default
- `save_return` - whether to send the return value of the function back to the AI; some functions - mainly those that don't return anything - don't need to do this
- `serialize` - whether to serialize the function's return value before sending the result back to the AI; openai expects a function call to be a string, so if this is False, the result of the function execution should be a string. Otherwise, it will use JSON serialization, so if `serialize` is set to True, the function return needs to be JSON-serializable
- `remove_call` - whether to remove the function call message itself; be careful to avoid infinite loops when using with `save_return=False`; the function should then, for example, disappear from the schema; it's your responsibility to make sure this happens
Expand Down
5 changes: 5 additions & 0 deletions docs/nlp_interface.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ def get_current_weather(location: str):
...

@nlp(
name="set_current_weather",
description="Set the current weather for a given location",
system_prompt="You're an AI capable of changing the weather.",
model="gpt-4-0613"
)
Expand All @@ -39,6 +41,9 @@ def set_current_weather(location: str, description: str):

The parameters it takes are:

- `name` - the name of the function sent to the AI, defaulting to the function name itself
- `description` - the description of the function sent to the AI, defaults to getting the short description from the function's docstring
- `save_return` - whether to send the return value of the function back to the AI; some functions - mainly those that don't return anything - don't need to do this
- `system_prompt` - if provided, when asking the AI, the conversation will start with this system prompt, letting you modify the behavior of the model
- `model` - this is just the model to use; currently (July 1st 2023) only `gpt-3.5-turbo-0613`, `gpt-3.5-turbo-16k-0613` and `gpt-4-0613` are supported

Expand Down
4 changes: 4 additions & 0 deletions docs/skills.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ def get_current_weather(location: str) -> dict:
...

@skill.add_function(
name="set_weather_but_for_real",
description="Set the weather or something",
save_return=True,
serialize=False,
remove_call=False,
Expand All @@ -23,6 +25,8 @@ schema = skill.functions_schema

The parameters here are:

- `name` - the name of the function sent to the AI, defaulting to the function name itself
- `description` - the description of the function sent to the AI, defaults to getting the short description from the function's docstring
- `save_return` - whether to send the return value of the function back to the AI; some functions - mainly those that don't return anything - don't need to do this
- `serialize` - whether to serialize the function's return value before sending the result back to the AI; openai expects a function call to be a string, so if this is False, the result of the function execution should be a string. Otherwise, it will use JSON serialization, so if `serialize` is set to True, the function return needs to be JSON-serializable
- `remove_call` - whether to remove the function call message itself; be careful to avoid infinite loops when using with `save_return=False`; the function should then, for example, disappear from the schema; it's your responsibility to make sure this happens
Expand Down
13 changes: 13 additions & 0 deletions openai_functions/conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,6 +293,8 @@ def add_function(
self,
function: Callable[..., JsonType],
*,
name: str | None = None,
description: str | None = None,
save_return: bool = True,
serialize: bool = True,
remove_call: bool = False,
Expand All @@ -304,6 +306,8 @@ def add_function(
def add_function(
self,
*,
name: str | None = None,
description: str | None = None,
save_return: bool = True,
serialize: bool = True,
remove_call: bool = False,
Expand All @@ -315,6 +319,8 @@ def add_function(
self,
function: OpenAIFunction | Callable[..., JsonType] | None = None,
*,
name: str | None = None,
description: str | None = None,
save_return: bool = True,
serialize: bool = True,
remove_call: bool = False,
Expand All @@ -327,6 +333,9 @@ def add_function(
Args:
function (OpenAIFunction | Callable[..., JsonType]): The function to add
name (str): The name of the function. Defaults to the function's name.
description (str): The description of the function. Defaults to getting
the short description from the function's docstring.
save_return (bool): Whether to send the return value of this function back
to the AI. Defaults to True.
serialize (bool): Whether to serialize the return value of this function.
Expand All @@ -342,13 +351,17 @@ def add_function(
"""
if function is None:
return self.skills.add_function(
name=name,
description=description,
save_return=save_return,
serialize=serialize,
remove_call=remove_call,
interpret_as_response=interpret_as_response,
)
return self.skills.add_function(
function,
name=name,
description=description,
save_return=save_return,
serialize=serialize,
remove_call=remove_call,
Expand Down
14 changes: 14 additions & 0 deletions openai_functions/functions/sets.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ def add_function(
self,
function: Callable[..., JsonType],
*,
name: str | None = None,
description: str | None = None,
save_return: bool = True,
serialize: bool = True,
remove_call: bool = False,
Expand All @@ -70,6 +72,8 @@ def add_function(
def add_function(
self,
*,
name: str | None = None,
description: str | None = None,
save_return: bool = True,
serialize: bool = True,
remove_call: bool = False,
Expand All @@ -81,6 +85,8 @@ def add_function(
self,
function: OpenAIFunction | Callable[..., JsonType] | None = None,
*,
name: str | None = None,
description: str | None = None,
save_return: bool = True,
serialize: bool = True,
remove_call: bool = False,
Expand All @@ -93,6 +99,9 @@ def add_function(
Args:
function (OpenAIFunction | Callable[..., JsonType]): The function
name (str): The name of the function. Defaults to the function's name.
description (str): The description of the function. Defaults to getting
the short description from the function's docstring.
save_return (bool): Whether to send the return value of this
function to the AI. Defaults to True.
serialize (bool): Whether to serialize the return value of this
Expand All @@ -117,14 +126,19 @@ def add_function(
WrapperConfig(
None, save_return, serialize, remove_call, interpret_as_response
),
name=name,
description=description,
)
)
return function

return partial(
self.add_function,
name=name,
description=description,
save_return=save_return,
serialize=serialize,
remove_call=remove_call,
interpret_as_response=interpret_as_response,
)

Expand Down
79 changes: 79 additions & 0 deletions openai_functions/functions/togglable_set.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
"""A function set disabled by default that exposes a function to enable it."""
from __future__ import annotations
from typing import TYPE_CHECKING

from .basic_set import BasicFunctionSet
from .exceptions import FunctionNotFoundError
from .functions import FunctionResult
from .functions import OpenAIFunction

if TYPE_CHECKING:
from ..json_type import JsonType
from ..openai_types import FunctionCall


class TogglableSet(BasicFunctionSet):
"""A function set that is disabled by default and can be enabled by the AI.
Args:
enable_function_name (str): The name of the function to enable the set
enable_function_description (str, optional): The description of the enable
function. By default no description is provided.
functions (list[OpenAIFunction], optional): The functions in the set.
"""

def __init__(
self,
enable_function_name: str,
enable_function_description: str | None = None,
functions: list[OpenAIFunction] | None = None,
) -> None:
super().__init__(functions)
self.enabled = False
self.enable_function_name = enable_function_name
self.enable_function_description = enable_function_description

def enable(self) -> None:
"""Enable the function set."""
self.enabled = True

@property
def _enable_function_schema(self) -> dict[str, JsonType]:
"""Get the schema for the enable function."""
schema: dict[str, JsonType] = {
"name": self.enable_function_name,
"parameters": {},
}
if self.enable_function_description:
schema["description"] = self.enable_function_description
return schema

@property
def functions_schema(self) -> list[JsonType]:
"""Get the functions schema, in the format OpenAI expects
Returns:
JsonType: The schema of all the available functions
"""
if self.enabled:
return super().functions_schema
return [self._enable_function_schema]

def run_function(self, input_data: FunctionCall) -> FunctionResult:
"""Run the function, enabling the set if the enable function is called.
Args:
input_data (FunctionCall): The function call
Returns:
FunctionResult: The function output
Raises:
FunctionNotFoundError: If the function is not found
"""
if not self.enabled:
if input_data["name"] == self.enable_function_name:
self.enable()
return FunctionResult(self.enable_function_name, None, True)
raise FunctionNotFoundError(f"Function {input_data['name']} not found")
return super().run_function(input_data)
14 changes: 11 additions & 3 deletions openai_functions/functions/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,21 @@ def __init__(
self,
func: Callable[..., Any],
config: WrapperConfig | None = None,
name: str | None = None,
description: str | None = None,
) -> None:
"""Initialize a FunctionWrapper
Args:
func (Callable[..., JsonType]): The function to wrap
config (WrapperConfig | None, optional): The configuration for the wrapper.
name (str | None): The name override for the function.
description (str | None): The description override for the function.
"""
self.func = func
self.config = config or WrapperConfig()
self._name = name
self._description = description

@property
def parsers(self) -> list[Type[ArgSchemaParser]]:
Expand Down Expand Up @@ -185,7 +191,7 @@ def name(self) -> str:
Returns:
str: The name
"""
return self.func.__name__
return self._name or self.func.__name__

@property
def schema(self) -> dict[str, JsonType]:
Expand All @@ -202,8 +208,10 @@ def schema(self) -> dict[str, JsonType]:
"required": self.required_arguments,
},
}
if self.parsed_docs.short_description:
schema["description"] = self.parsed_docs.short_description
if self.parsed_docs.short_description or self._description:
schema["description"] = (
self.parsed_docs.short_description or self._description
)
return schema

def parse_argument(self, argument: inspect.Parameter) -> ArgSchemaParser:
Expand Down
Loading

0 comments on commit 2879b4d

Please sign in to comment.