Skip to content

Commit ee2e8ba

Browse files
committed
Added pydocs and minor fixes
1 parent 0524290 commit ee2e8ba

File tree

3 files changed

+212
-35
lines changed

3 files changed

+212
-35
lines changed

azure/functions/decorators/function_app.py

Lines changed: 185 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1450,16 +1450,40 @@ def assistant_skill_trigger(self,
14501450
Union[DataType, str]] = None,
14511451
**kwargs: Any) -> Callable[..., Any]:
14521452
"""
1453-
TODO: PYDocs
1454-
"""
1453+
Assistants build on top of the chat functionality to provide assistants
1454+
with custom skills defined as functions. This internally uses the
1455+
function calling feature OpenAIs GPT models to select which functions
1456+
to invoke and when.
1457+
Ref: https://platform.openai.com/docs/guides/function-calling
1458+
1459+
You can define functions that can be triggered by assistants by using
1460+
1461+
the `assistantSkillTrigger` trigger binding. These functions are
1462+
invoked by the extension when a assistant signals that it would like
1463+
to invoke a function in response to a user prompt.
1464+
1465+
The name of the function, the description provided by the trigger,
1466+
and the parameter name are all hints that the underlying language model
1467+
use to determine when and how to invoke an assistant function.
1468+
1469+
:param arg_name: The name of the variable that represents
1470+
:param function_description: The description of the assistant function,
1471+
which is provided to the model.
1472+
:param data_type: Defines how Functions runtime should treat the
1473+
parameter value.
1474+
:param kwargs: Keyword arguments for specifying additional binding
1475+
fields to include in the binding json.
1476+
1477+
:return: Decorator function.
14551478
1479+
"""
14561480
@self._configure_function_builder
14571481
def wrap(fb):
14581482
def decorator():
14591483
fb.add_trigger(
14601484
trigger=AssistantSkillTrigger(
14611485
name=arg_name,
1462-
task_description=task_description,
1486+
function_description=task_description,
14631487
data_type=parse_singular_param_to_enum(data_type,
14641488
DataType),
14651489
**kwargs))
@@ -2576,8 +2600,6 @@ def dapr_state_output(self,
25762600
25772601
:param arg_name: The name of the variable that represents DaprState
25782602
output object in function code.
2579-
:param arg_name: The name of the variable that represents DaprState
2580-
input object in function code.
25812603
:param state_store: State store containing the state for keys.
25822604
:param key: The name of the key.
25832605
:param dapr_address: Dapr address, it is optional field, by default
@@ -2631,8 +2653,6 @@ def dapr_invoke_output(self,
26312653
26322654
:param arg_name: The name of the variable that represents DaprState
26332655
output object in function code.
2634-
:param arg_name: The name of the variable that represents DaprState
2635-
input object in function code.
26362656
:param app_id: The dapr app name to invoke.
26372657
:param method_name: The method name of the app to invoke.
26382658
:param http_verb: The http verb of the app to invoke.
@@ -2687,8 +2707,6 @@ def dapr_publish_output(self,
26872707
26882708
:param arg_name: The name of the variable that represents DaprState
26892709
output object in function code.
2690-
:param arg_name: The name of the variable that represents DaprState
2691-
input object in function code.
26922710
:param pub_sub_name: The pub/sub name to publish to.
26932711
:param topic: The name of the topic to publish to.
26942712
:param dapr_address: Dapr address, it is optional field, by default
@@ -2742,8 +2760,6 @@ def dapr_binding_output(self,
27422760
27432761
:param arg_name: The name of the variable that represents DaprState
27442762
output object in function code.
2745-
:param arg_name: The name of the variable that represents DaprState
2746-
input object in function code.
27472763
:param binding_name: The configured name of the binding.
27482764
:param operation: The configured operation.
27492765
:param dapr_address: Dapr address, it is optional field, by default
@@ -2777,17 +2793,49 @@ def decorator():
27772793
def text_completion_input(self,
27782794
arg_name: str,
27792795
prompt: str,
2780-
model: Optional[
2781-
OpenAIModels] = OpenAIModels.DefaultChatModel, # NoQA
2796+
model: Optional[OpenAIModels] = OpenAIModels.DefaultChatModel, # NoQA
27822797
temperature: Optional[str] = "0.5",
27832798
top_p: Optional[str] = None,
27842799
max_tokens: Optional[str] = "100",
2785-
data_type: Optional[
2786-
Union[DataType, str]] = None,
2800+
data_type: Optional[Union[DataType, str]] = None,
27872801
**kwargs) \
27882802
-> Callable[..., Any]:
27892803
"""
2790-
TODO: pydocs
2804+
The textCompletion input binding can be used to invoke the
2805+
OpenAI Chat Completions API and return the results to the function.
2806+
2807+
Ref: https://platform.openai.com/docs/guides/text-generation/chat-completions-vs-completions # NoQA
2808+
2809+
The examples below define "who is" HTTP-triggered functions with a
2810+
hardcoded `"who is {name}?"` prompt, where `{name}` is the substituted
2811+
with the value in the HTTP request path. The OpenAI input binding
2812+
invokes the OpenAI GPT endpoint to surface the answer to the prompt to
2813+
the function, which then returns the result text as the response
2814+
content.
2815+
2816+
:param arg_name: The name of the variable that represents DaprState
2817+
output object in function code.
2818+
:param prompt: The prompt to generate completions for, encoded as a
2819+
string.
2820+
:param model: the ID of the model to use.
2821+
:param temperature: The sampling temperature to use, between 0 and 2.
2822+
Higher values like 0.8 will make the output more random, while lower
2823+
values like 0.2 will make it more focused and deterministic.
2824+
:param top_p: An alternative to sampling with temperature, called
2825+
nucleus sampling, where the model considers the results of the tokens
2826+
with top_p probability mass. So 0.1 means only the tokens comprising
2827+
the top 10% probability mass are considered. It's generally recommend
2828+
to use this or temperature
2829+
:param max_tokens: The maximum number of tokens to generate in the
2830+
completion. The token count of your prompt plus max_tokens cannot
2831+
exceed the model's context length. Most models have a context length of
2832+
2048 tokens (except for the newest models, which support 4096).
2833+
:param data_type: Defines how Functions runtime should treat the
2834+
parameter value
2835+
:param kwargs: Keyword arguments for specifying additional binding
2836+
fields to include in the binding json
2837+
2838+
:return: Decorator function.
27912839
"""
27922840

27932841
@self._configure_function_builder
@@ -2816,7 +2864,17 @@ def assistant_create_output(self, arg_name: str,
28162864
**kwargs) \
28172865
-> Callable[..., Any]:
28182866
"""
2819-
TODO: pydocs
2867+
The assistantCreate output binding creates a new assistant with a
2868+
specified system prompt.
2869+
2870+
:param arg_name: The name of the variable that represents DaprState
2871+
output object in function code.
2872+
:param data_type: Defines how Functions runtime should treat the
2873+
parameter value
2874+
:param kwargs: Keyword arguments for specifying additional binding
2875+
fields to include in the binding json
2876+
2877+
:return: Decorator function.
28202878
"""
28212879

28222880
@self._configure_function_builder
@@ -2843,7 +2901,21 @@ def assistant_query_input(self,
28432901
**kwargs) \
28442902
-> Callable[..., Any]:
28452903
"""
2846-
TODO: pydocs
2904+
The assistantQuery input binding fetches the assistant history and
2905+
passes it to the function.
2906+
2907+
:param arg_name: The name of the variable that represents DaprState
2908+
output object in function code.
2909+
:param timestamp_utc: the timestamp of the earliest message in the chat
2910+
history to fetch. The timestamp should be in ISO 8601 format - for
2911+
example, 2023-08-01T00:00:00Z.
2912+
:param id: The ID of the Assistant to query.
2913+
:param data_type: Defines how Functions runtime should treat the
2914+
parameter value
2915+
:param kwargs: Keyword arguments for specifying additional binding
2916+
fields to include in the binding json
2917+
2918+
:return: Decorator function.
28472919
"""
28482920

28492921
@self._configure_function_builder
@@ -2866,12 +2938,27 @@ def decorator():
28662938
def assistant_post_input(self, arg_name: str,
28672939
id: str,
28682940
user_message: str,
2941+
model: Optional[str] = None,
28692942
data_type: Optional[
28702943
Union[DataType, str]] = None,
28712944
**kwargs) \
28722945
-> Callable[..., Any]:
28732946
"""
2874-
TODO: pydocs
2947+
The assistantPost output binding sends a message to the assistant and
2948+
saves the response in its internal state.
2949+
2950+
:param arg_name: The name of the variable that represents DaprState
2951+
output object in function code.
2952+
:param id: The ID of the assistant to update.
2953+
:param user_message: The user message that user has entered for
2954+
assistant to respond to.
2955+
:param model: The OpenAI chat model to use.
2956+
:param data_type: Defines how Functions runtime should treat the
2957+
parameter value
2958+
:param kwargs: Keyword arguments for specifying additional binding
2959+
fields to include in the binding json
2960+
2961+
:return: Decorator function.
28752962
"""
28762963

28772964
@self._configure_function_builder
@@ -2882,6 +2969,7 @@ def decorator():
28822969
name=arg_name,
28832970
id=id,
28842971
user_message=user_message,
2972+
model=model,
28852973
data_type=parse_singular_param_to_enum(data_type,
28862974
DataType),
28872975
**kwargs))
@@ -2903,7 +2991,27 @@ def embeddings_input(self,
29032991
**kwargs) \
29042992
-> Callable[..., Any]:
29052993
"""
2906-
TODO: pydocs
2994+
The embeddings input decorator creates embeddings which will be used to
2995+
measure the readiness of text strings.
2996+
2997+
Ref: https://platform.openai.com/docs/guides/embeddings
2998+
2999+
:param arg_name: The name of the variable that represents DaprState
3000+
output object in function code.
3001+
:param input: The input source containing the data to generate
3002+
embeddings for.
3003+
:param input_type: The type of the input.
3004+
:param model: The ID of the model to use.
3005+
:param max_chunk_length: The maximum number of characters to chunk the
3006+
input into. Default value: 8 * 1024
3007+
:param max_overlap: The maximum number of characters to overlap
3008+
between chunks. Default value: 128
3009+
:param data_type: Defines how Functions runtime should treat the
3010+
parameter value
3011+
:param kwargs: Keyword arguments for specifying additional binding
3012+
fields to include in the binding json
3013+
3014+
:return: Decorator function.
29073015
"""
29083016

29093017
@self._configure_function_builder
@@ -2931,19 +3039,41 @@ def semantic_search_input(self,
29313039
connection_name: str,
29323040
collection: str,
29333041
query: Optional[str] = None,
2934-
embeddings_model: Optional[
2935-
OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel, # NoQA
2936-
chat_model: Optional[
2937-
OpenAIModels] = OpenAIModels.DefaultChatModel, # NoQA
2938-
system_prompt: Optional[
2939-
str] = semantic_search_system_prompt,
3042+
embeddings_model: Optional[OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel, # NoQA
3043+
chat_model: Optional[OpenAIModels] = OpenAIModels.DefaultChatModel, # NoQA
3044+
system_prompt: Optional[str] = semantic_search_system_prompt, # NoQA
29403045
max_knowledge_count: Optional[int] = 1,
29413046
data_type: Optional[
29423047
Union[DataType, str]] = None,
29433048
**kwargs) \
29443049
-> Callable[..., Any]:
29453050
"""
2946-
TODO: pydocs
3051+
The embeddings input decorator creates embeddings which will be used to
3052+
measure the readiness of text strings.
3053+
3054+
Ref: https://platform.openai.com/docs/guides/embeddings
3055+
3056+
:param arg_name: The name of the variable that represents DaprState
3057+
output object in function code.
3058+
:param connection_name: app setting or environment variable which
3059+
contains a connection string value.
3060+
:param collection: The name of the collection or table to search or
3061+
store.
3062+
:param query: The semantic query text to use for searching.
3063+
:param embeddings_model: The ID of the model to use for embeddings.
3064+
The default value is "text-embedding-ada-002".
3065+
:param chat_model: The name of the Large Language Model to invoke for
3066+
chat responses. The default value is "gpt-3.5-turbo".
3067+
:param system_prompt: Optional The system prompt to use for prompting
3068+
the large language model.
3069+
:param max_knowledge_count: Optional. The number of knowledge items to
3070+
inject into the SystemPrompt. Default value: 1
3071+
:param data_type: Optional. Defines how Functions runtime should treat
3072+
the parameter value. Default value: None
3073+
:param kwargs: Keyword arguments for specifying additional binding
3074+
fields to include in the binding json
3075+
3076+
:return: Decorator function.
29473077
"""
29483078

29493079
@self._configure_function_builder
@@ -2974,16 +3104,41 @@ def embeddings_store_output(self,
29743104
input_type: InputType,
29753105
connection_name: str,
29763106
collection: str,
2977-
model: Optional[
2978-
OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel, # NoQA
3107+
model: Optional[OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel, # NoQA
29793108
max_chunk_length: Optional[int] = 8 * 1024,
29803109
max_overlap: Optional[int] = 128,
29813110
data_type: Optional[
29823111
Union[DataType, str]] = None,
29833112
**kwargs) \
29843113
-> Callable[..., Any]:
29853114
"""
2986-
TODO: pydocs
3115+
Supported list of embeddings store is extensible, and more can be
3116+
added by authoring a specially crafted NuGet package. Visit the
3117+
currently supported vector specific folder for specific usage
3118+
information:
3119+
3120+
- Azure AI Search
3121+
- Azure Data Explorer
3122+
- Azure Cosmos DB using MongoDB
3123+
3124+
:param arg_name: The name of the variable that represents DaprState
3125+
output object in function code.
3126+
:param input: The input to generate embeddings for.
3127+
:param input_type: The type of the input.
3128+
:param connection_name: The name of an app setting or environment
3129+
variable which contains a connection string value
3130+
:param collection: The collection or table to search.
3131+
:param model: The ID of the model to use.
3132+
:param max_chunk_length: The maximum number of characters to chunk the
3133+
input into.
3134+
:param max_overlap: The maximum number of characters to overlap between
3135+
chunks.
3136+
:param data_type: Optional. Defines how Functions runtime should treat
3137+
the parameter value. Default value: None
3138+
:param kwargs: Keyword arguments for specifying additional binding
3139+
fields to include in the binding json
3140+
3141+
:return: Decorator function.
29873142
"""
29883143

29893144
@self._configure_function_builder

azure/functions/decorators/openai.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,10 @@ def get_binding_name() -> str:
3131

3232
def __init__(self,
3333
name: str,
34-
task_description: str,
34+
function_description: str,
3535
data_type: Optional[DataType] = None,
3636
**kwargs):
37-
self.task_description = task_description
37+
self.function_description = function_description
3838
super().__init__(name=name, data_type=data_type)
3939

4040

@@ -159,11 +159,13 @@ def get_binding_name():
159159
def __init__(self, name: str,
160160
id: str,
161161
user_message: str,
162+
model: Optional[str] = None,
162163
data_type: Optional[DataType] = None,
163164
**kwargs):
164165
self.name = name
165166
self.id = id
166167
self.user_message = user_message
168+
self.model = model
167169
super().__init__(name=name, data_type=data_type)
168170

169171

0 commit comments

Comments
 (0)