Skip to content

Commit

Permalink
fix: Slight fix no need to parse inference result
Browse files Browse the repository at this point in the history
  • Loading branch information
ishaansehgal99 committed Oct 11, 2024
1 parent 83ab9a3 commit 9f52ee8
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions ragengine/inference/custom_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from llama_index.llms.openai import OpenAI
from llama_index.core.llms.callbacks import llm_completion_callback
import requests
from config import INFERENCE_URL, INFERENCE_ACCESS_SECRET, RESPONSE_FIELD
from config import INFERENCE_URL, INFERENCE_ACCESS_SECRET #, RESPONSE_FIELD

class CustomInference(CustomLLM):
params: dict = {}
Expand Down Expand Up @@ -44,9 +44,8 @@ def _custom_api_complete(self, prompt: str, **kwargs: Any) -> CompletionResponse
response_data = response.json()

# Dynamically extract the field from the response based on the specified response_field
completion_text = response_data.get(RESPONSE_FIELD, "No response field found")

return CompletionResponse(text=completion_text)
# completion_text = response_data.get(RESPONSE_FIELD, "No response field found") # not necessary for now
return CompletionResponse(text=str(response_data))

@property
def metadata(self) -> LLMMetadata:
Expand Down

0 comments on commit 9f52ee8

Please sign in to comment.