Skip to content

fix: OpenAI decorators: Optional aiConnectionName param added #283

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 50 additions & 0 deletions azure/functions/decorators/function_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -3217,6 +3217,7 @@ def decorator():
def text_completion_input(self,
arg_name: str,
prompt: str,
ai_connection_name: Optional[str] = "",
chat_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultChatModel,
Expand All @@ -3243,6 +3244,14 @@ def text_completion_input(self,
:param arg_name: The name of binding parameter in the function code.
:param prompt: The prompt to generate completions for, encoded as a
string.
:param ai_connection_name: The name of the configuration section for
AI service connectivity settings. For Azure OpenAI: If specified, looks
for "Endpoint" and/or "Key" values in this configuration section. If
not specified or the section doesn't exist, falls back to environment
variables: AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_KEY. For
user-assigned managed identity authentication, this property is
required. For OpenAI service (non-Azure), set the OPENAI_API_KEY
environment variable.
:param model: @deprecated. Use chat_model instead. The model parameter
is unused and will be removed in future versions.
:param chat_model: The deployment name or model name of OpenAI Chat
Expand Down Expand Up @@ -3277,6 +3286,7 @@ def decorator():
binding=TextCompletionInput(
name=arg_name,
prompt=prompt,
ai_connection_name=ai_connection_name,
chat_model=chat_model,
temperature=temperature,
top_p=top_p,
Expand Down Expand Up @@ -3378,6 +3388,7 @@ def decorator():
def assistant_post_input(self, arg_name: str,
id: str,
user_message: str,
ai_connection_name: Optional[str] = "",
chat_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultChatModel,
Expand All @@ -3399,6 +3410,14 @@ def assistant_post_input(self, arg_name: str,
:param id: The ID of the assistant to update.
:param user_message: The user message that user has entered for
assistant to respond to.
:param ai_connection_name: The name of the configuration section for
AI service connectivity settings. For Azure OpenAI: If specified, looks
for "Endpoint" and/or "Key" values in this configuration section. If
not specified or the section doesn't exist, falls back to environment
variables: AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_KEY. For
user-assigned managed identity authentication, this property is
required. For OpenAI service (non-Azure), set the OPENAI_API_KEY
environment variable.
:param model: @deprecated. Use chat_model instead. The model parameter
is unused and will be removed in future versions.
:param chat_model: The deployment name or model name of OpenAI Chat
Expand Down Expand Up @@ -3439,6 +3458,7 @@ def decorator():
name=arg_name,
id=id,
user_message=user_message,
ai_connection_name=ai_connection_name,
chat_model=chat_model,
chat_storage_connection_setting=chat_storage_connection_setting, # noqa: E501
collection_name=collection_name,
Expand All @@ -3459,6 +3479,7 @@ def embeddings_input(self,
arg_name: str,
input: str,
input_type: InputType,
ai_connection_name: Optional[str] = "",
embeddings_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultEmbeddingsModel,
Expand All @@ -3478,6 +3499,14 @@ def embeddings_input(self,
:param input: The input source containing the data to generate
embeddings for.
:param input_type: The type of the input.
:param ai_connection_name: The name of the configuration section for
AI service connectivity settings. For Azure OpenAI: If specified, looks
for "Endpoint" and/or "Key" values in this configuration section. If
not specified or the section doesn't exist, falls back to environment
variables: AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_KEY. For
user-assigned managed identity authentication, this property is
required. For OpenAI service (non-Azure), set the OPENAI_API_KEY
environment variable.
:param model: @deprecated. Use embeddings_model instead. The model
parameter is unused and will be removed in future versions.
:param embeddings_model: The deployment name or model name for OpenAI
Expand All @@ -3502,6 +3531,7 @@ def decorator():
name=arg_name,
input=input,
input_type=input_type,
ai_connection_name=ai_connection_name,
embeddings_model=embeddings_model,
max_chunk_length=max_chunk_length,
max_overlap=max_overlap,
Expand All @@ -3519,6 +3549,7 @@ def semantic_search_input(self,
search_connection_name: str,
collection: str,
query: Optional[str] = None,
ai_connection_name: Optional[str] = "",
embeddings_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultEmbeddingsModel,
Expand Down Expand Up @@ -3552,6 +3583,14 @@ def semantic_search_input(self,
:param collection: The name of the collection or table to search or
store.
:param query: The semantic query text to use for searching.
:param ai_connection_name: The name of the configuration section for
AI service connectivity settings. For Azure OpenAI: If specified, looks
for "Endpoint" and/or "Key" values in this configuration section. If
not specified or the section doesn't exist, falls back to environment
variables: AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_KEY. For
user-assigned managed identity authentication, this property is
required. For OpenAI service (non-Azure), set the OPENAI_API_KEY
environment variable.
:param embeddings_model: The deployment name or model name for OpenAI
Embeddings. The default value is "text-embedding-ada-002".
:param chat_model: The deployment name or model name of OpenAI Chat
Expand Down Expand Up @@ -3592,6 +3631,7 @@ def decorator():
search_connection_name=search_connection_name,
collection=collection,
query=query,
ai_connection_name=ai_connection_name,
embeddings_model=embeddings_model,
chat_model=chat_model,
system_prompt=system_prompt,
Expand All @@ -3615,6 +3655,7 @@ def embeddings_store_output(self,
input_type: InputType,
store_connection_name: str,
collection: str,
ai_connection_name: Optional[str] = "",
embeddings_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultEmbeddingsModel,
Expand All @@ -3640,6 +3681,14 @@ def embeddings_store_output(self,
:param store_connection_name: The name of an app setting or environment
variable which contains a vectore store connection setting value
:param collection: The collection or table to search.
:param ai_connection_name: The name of the configuration section for
AI service connectivity settings. For Azure OpenAI: If specified, looks
for "Endpoint" and/or "Key" values in this configuration section. If
not specified or the section doesn't exist, falls back to environment
variables: AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_KEY. For
user-assigned managed identity authentication, this property is
required. For OpenAI service (non-Azure), set the OPENAI_API_KEY
environment variable.
:param model: @deprecated. Use embeddings_model instead. The model
parameter is unused and will be removed in future versions.
:param embeddings_model: The deployment name or model name for OpenAI
Expand All @@ -3666,6 +3715,7 @@ def decorator():
input_type=input_type,
store_connection_name=store_connection_name,
collection=collection,
ai_connection_name=ai_connection_name,
embeddings_model=embeddings_model,
max_chunk_length=max_chunk_length,
max_overlap=max_overlap,
Expand Down
10 changes: 10 additions & 0 deletions azure/functions/decorators/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ def get_binding_name() -> str:
def __init__(self,
name: str,
prompt: str,
ai_connection_name: Optional[str] = "",
chat_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultChatModel,
Expand All @@ -61,6 +62,7 @@ def __init__(self,
data_type: Optional[DataType] = None,
**kwargs):
self.prompt = prompt
self.ai_connection_name = ai_connection_name
self.chat_model = chat_model
self.temperature = temperature
self.top_p = top_p
Expand Down Expand Up @@ -100,6 +102,7 @@ def __init__(self,
name: str,
input: str,
input_type: InputType,
ai_connection_name: Optional[str] = "",
embeddings_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultEmbeddingsModel,
Expand All @@ -110,6 +113,7 @@ def __init__(self,
self.name = name
self.input = input
self.input_type = input_type
self.ai_connection_name = ai_connection_name
self.embeddings_model = embeddings_model
self.max_chunk_length = max_chunk_length
self.max_overlap = max_overlap
Expand Down Expand Up @@ -144,6 +148,7 @@ def __init__(self,
search_connection_name: str,
collection: str,
query: Optional[str] = None,
ai_connection_name: Optional[str] = "",
embeddings_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultEmbeddingsModel,
Expand All @@ -162,6 +167,7 @@ def __init__(self,
self.search_connection_name = search_connection_name
self.collection = collection
self.query = query
self.ai_connection_name = ai_connection_name
self.embeddings_model = embeddings_model
self.chat_model = chat_model
self.system_prompt = system_prompt
Expand All @@ -182,6 +188,7 @@ def get_binding_name():
def __init__(self, name: str,
id: str,
user_message: str,
ai_connection_name: Optional[str] = "",
chat_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultChatModel,
Expand All @@ -196,6 +203,7 @@ def __init__(self, name: str,
self.name = name
self.id = id
self.user_message = user_message
self.ai_connection_name = ai_connection_name
self.chat_model = chat_model
self.chat_storage_connection_setting = chat_storage_connection_setting
self.collection_name = collection_name
Expand All @@ -218,6 +226,7 @@ def __init__(self,
input_type: InputType,
store_connection_name: str,
collection: str,
ai_connection_name: Optional[str] = "",
embeddings_model: Optional
[Union[str, OpenAIModels]]
= OpenAIModels.DefaultEmbeddingsModel,
Expand All @@ -230,6 +239,7 @@ def __init__(self,
self.input_type = input_type
self.store_connection_name = store_connection_name
self.collection = collection
self.ai_connection_name = ai_connection_name
self.embeddings_model = embeddings_model
self.max_chunk_length = max_chunk_length
self.max_overlap = max_overlap
Expand Down
20 changes: 20 additions & 0 deletions tests/decorators/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ def test_text_completion_input_valid_creation(self):
is_reasoning_model=False,
data_type=DataType.UNDEFINED,
chat_model=OpenAIModels.DefaultChatModel,
ai_connection_name="test_connection",
dummy_field="dummy",
)
self.assertEqual(input.get_binding_name(), "textCompletion")
Expand All @@ -66,6 +67,7 @@ def test_text_completion_input_valid_creation(self):
"direction": BindingDirection.IN,
"chatModel": OpenAIModels.DefaultChatModel,
"isReasoningModel": False,
"aiConnectionName": "test_connection",
},
)

Expand All @@ -78,6 +80,7 @@ def test_text_completion_input_with_string_chat_model(self):
is_reasoning_model=True,
data_type=DataType.UNDEFINED,
chat_model="gpt-4o",
ai_connection_name="test_connection",
dummy_field="dummy",
)
self.assertEqual(input.get_binding_name(), "textCompletion")
Expand All @@ -94,6 +97,7 @@ def test_text_completion_input_with_string_chat_model(self):
"direction": BindingDirection.IN,
"chatModel": "gpt-4o",
"isReasoningModel": True,
"aiConnectionName": "test_connection",
},
)

Expand Down Expand Up @@ -133,6 +137,7 @@ def test_embeddings_input_valid_creation(self):
embeddings_model="test_model",
max_overlap=1,
max_chunk_length=1,
ai_connection_name="test_connection",
dummy_field="dummy",
)
self.assertEqual(input.get_binding_name(), "embeddings")
Expand All @@ -148,6 +153,7 @@ def test_embeddings_input_valid_creation(self):
"maxOverlap": 1,
"maxChunkLength": 1,
"direction": BindingDirection.IN,
"aiConnectionName": "test_connection",
"dummyField": "dummy",
},
)
Expand All @@ -161,6 +167,7 @@ def test_embeddings_input_with_enum_embeddings_model(self):
embeddings_model=OpenAIModels.DefaultEmbeddingsModel,
max_overlap=1,
max_chunk_length=1,
ai_connection_name="test_connection",
dummy_field="dummy",
)
self.assertEqual(input.get_binding_name(), "embeddings")
Expand All @@ -176,6 +183,7 @@ def test_embeddings_input_with_enum_embeddings_model(self):
"maxOverlap": 1,
"maxChunkLength": 1,
"direction": BindingDirection.IN,
"aiConnectionName": "test_connection",
"dummyField": "dummy",
},
)
Expand Down Expand Up @@ -206,6 +214,7 @@ def test_assistant_post_input_valid_creation(self):
temperature="1",
max_tokens="1",
is_reasoning_model=False,
ai_connection_name="test_connection",
data_type=DataType.UNDEFINED,
dummy_field="dummy",
)
Expand All @@ -222,6 +231,7 @@ def test_assistant_post_input_valid_creation(self):
"temperature": "1",
"maxTokens": "1",
"isReasoningModel": False,
"aiConnectionName": "test_connection",
"dataType": DataType.UNDEFINED,
"direction": BindingDirection.IN,
"dummyField": "dummy",
Expand All @@ -240,6 +250,7 @@ def test_assistant_post_input_with_enum_chat_model(self):
temperature="1",
max_tokens="1",
is_reasoning_model=False,
ai_connection_name="test_connection",
data_type=DataType.UNDEFINED,
dummy_field="dummy",
)
Expand All @@ -256,6 +267,7 @@ def test_assistant_post_input_with_enum_chat_model(self):
"temperature": "1",
"maxTokens": "1",
"isReasoningModel": False,
"aiConnectionName": "test_connection",
"dataType": DataType.UNDEFINED,
"direction": BindingDirection.IN,
"dummyField": "dummy",
Expand All @@ -277,6 +289,7 @@ def test_semantic_search_input_valid_creation(self):
temperature="1",
max_tokens="1",
is_reasoning_model=False,
ai_connection_name="test_connection",
dummy_field="dummy_field",
)
self.assertEqual(input.get_binding_name(), "semanticSearch")
Expand All @@ -297,6 +310,7 @@ def test_semantic_search_input_valid_creation(self):
"temperature": "1",
"maxTokens": "1",
"isReasoningModel": False,
"aiConnectionName": "test_connection",
"query": "test_query",
},
)
Expand All @@ -315,6 +329,7 @@ def test_semantic_search_input_with_string_models(self):
temperature="1",
max_tokens="1",
is_reasoning_model=True,
ai_connection_name="test_connection",
dummy_field="dummy_field",
)
self.assertEqual(input.get_binding_name(), "semanticSearch")
Expand All @@ -335,6 +350,7 @@ def test_semantic_search_input_with_string_models(self):
"temperature": "1",
"maxTokens": "1",
"isReasoningModel": True,
"aiConnectionName": "test_connection",
"query": "test_query",
},
)
Expand All @@ -350,6 +366,7 @@ def test_embeddings_store_output_valid_creation(self):
max_chunk_length=1,
collection="test_collection",
embeddings_model=OpenAIModels.DefaultEmbeddingsModel, # noqa: E501
ai_connection_name="test_connection",
dummy_field="dummy_field",
)
self.assertEqual(output.get_binding_name(), "embeddingsStore")
Expand All @@ -368,6 +385,7 @@ def test_embeddings_store_output_valid_creation(self):
"maxOverlap": 1,
"maxChunkLength": 1,
"type": "embeddingsStore",
"aiConnectionName": "test_connection",
},
)

Expand All @@ -382,6 +400,7 @@ def test_embeddings_store_output_with_string_embeddings_model(self):
max_chunk_length=1,
collection="test_collection",
embeddings_model="text-embedding-3-small",
ai_connection_name="test_connection",
dummy_field="dummy_field",
)
self.assertEqual(output.get_binding_name(), "embeddingsStore")
Expand All @@ -400,5 +419,6 @@ def test_embeddings_store_output_with_string_embeddings_model(self):
"maxOverlap": 1,
"maxChunkLength": 1,
"type": "embeddingsStore",
"aiConnectionName": "test_connection",
},
)
Loading