From 9f52ee87e0951a56875713d4c202a0e88b792511 Mon Sep 17 00:00:00 2001 From: ishaansehgal99 Date: Thu, 10 Oct 2024 18:04:51 -0700 Subject: [PATCH] fix: Slight fix no need to parse inference result --- ragengine/inference/custom_inference.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/ragengine/inference/custom_inference.py b/ragengine/inference/custom_inference.py index 5e49e04b6..bdfb0a61d 100644 --- a/ragengine/inference/custom_inference.py +++ b/ragengine/inference/custom_inference.py @@ -3,7 +3,7 @@ from llama_index.llms.openai import OpenAI from llama_index.core.llms.callbacks import llm_completion_callback import requests -from config import INFERENCE_URL, INFERENCE_ACCESS_SECRET, RESPONSE_FIELD +from config import INFERENCE_URL, INFERENCE_ACCESS_SECRET #, RESPONSE_FIELD class CustomInference(CustomLLM): params: dict = {} @@ -44,9 +44,8 @@ def _custom_api_complete(self, prompt: str, **kwargs: Any) -> CompletionResponse response_data = response.json() # Dynamically extract the field from the response based on the specified response_field - completion_text = response_data.get(RESPONSE_FIELD, "No response field found") - - return CompletionResponse(text=completion_text) + # completion_text = response_data.get(RESPONSE_FIELD, "No response field found") # not necessary for now + return CompletionResponse(text=str(response_data)) @property def metadata(self) -> LLMMetadata: