fix: Remove Temperature from OpenAI Integration (#6023)

This commit is contained in:
Michael Genson
2025-08-25 01:36:15 -05:00
committed by GitHub
parent 01d3d5d325
commit 323a8100db
3 changed files with 4 additions and 8 deletions

View File

@@ -398,7 +398,7 @@ class AppSettings(AppLoggingSettings):
Sending database data may increase accuracy in certain requests,
but will incur additional API costs
"""
OPENAI_REQUEST_TIMEOUT: int = 60
OPENAI_REQUEST_TIMEOUT: int = 300
"""
The number of seconds to wait for an OpenAI request to complete before cancelling the request
"""

View File

@@ -135,9 +135,7 @@ class OpenAIService(BaseService):
)
return "\n".join(content_parts)
async def _get_raw_response(
self, prompt: str, content: list[dict], temperature=0.2, force_json_response=True
) -> ChatCompletion:
async def _get_raw_response(self, prompt: str, content: list[dict], force_json_response=True) -> ChatCompletion:
client = self.get_client()
return await client.chat.completions.create(
messages=[
@@ -151,7 +149,6 @@ class OpenAIService(BaseService):
},
],
model=self.model,
temperature=temperature,
response_format={"type": "json_object"} if force_json_response else NOT_GIVEN,
)
@@ -161,7 +158,6 @@ class OpenAIService(BaseService):
message: str,
*,
images: list[OpenAIImageBase] | None = None,
temperature=0.2,
force_json_response=True,
) -> str | None:
"""Send data to OpenAI and return the response message content"""
@@ -174,7 +170,7 @@ class OpenAIService(BaseService):
for image in images or []:
user_messages.append(image.build_message())
response = await self._get_raw_response(prompt, user_messages, temperature, force_json_response)
response = await self._get_raw_response(prompt, user_messages, force_json_response)
if not response.choices:
return None
return response.choices[0].message.content