I have been trying to programmatically run Conversation Bot created in Dialogflow using Python.
In This process I am trying to override the default summarization prompt being used for the DataStore based bot to answer the question. It seems like the information in query param is being ignored, Any help would be appreciated.
import time
import uuid
from google.cloud.dialogflowcx_v3beta1 import (
SessionsClient,
DetectIntentRequest,
TextInput,
QueryInput,
AgentsClient,
QueryParameters,
GenerativeSettings,
LlmModelSettings,
)
from google.api_core.client_options import ClientOptions
from google.protobuf import json_format
start_pipeline = time.time()
def extract_search_snippets(query_result):
return (
query_result.data_store_connection_signals.search_snippets,
query_result.data_store_connection_signals.cited_snippets,
)
def extract_safety_and_grounding_sginals(query_result):
return (
[query_result.data_store_connection_signals.grounding_signals],
[query_result.data_store_connection_signals.safety_signals],
)
custom_prompt = “”"
Please reply to this query in not more than 1 sentence.
Sources:
$sources
$conversation
Human: $original-query
AI:
“”"
def init():
project_id = “”
location_id = “global”
agent_id = “”
agent_path = f"projects/{project_id}/locations/{location_id}/agents/{agent_id}"
def get_response_text(agent: str, session_id: str, texts: list[str], language_code: str):
agent_components = AgentsClient.parse_agent_path(agent)
location_id = agent_components["location"]
client_options = None
if location_id != "global":
api_endpoint = f"{location_id}-dialogflow.googleapis.com:443"
client_options = ClientOptions(api_endpoint=api_endpoint)
session_client = SessionsClient(client_options=client_options)
session_path = f"{agent}/sessions/{session_id}"
response_messages = []
search_snippets = []
cited_snippets = []
grounding_signal = []
safety_signal = []
from google.protobuf import json_format
for text in texts:
text_input = TextInput(text=text)
query_input = QueryInput(text=text_input, language_code=language_code)
query_params = None
if custom_prompt:
llm_settings = LlmModelSettings(prompt_text=custom_prompt)
# gen_settings = GenerativeSettings(llm_model_settings=llm_settings)
# gen_settings_dict = json_format.MessageToDict(gen_settings._pb)
llm_settings = json_format.MessageToDict(llm_settings._pb)
# print(gen_settings_dict)
# query_params = QueryParameters(generative_settings=gen_settings)
query_params = QueryParameters(parameters={"llmModelSettings": llm_settings})
request = DetectIntentRequest(session=session_path, query_input=query_input, query_params=query_params)
response = session_client.detect_intent(request=request)
response_messages += [
" ".join(msg.text.text) for msg in response.query_result.response_messages if msg.text
]
ss, cs = extract_search_snippets(response.query_result)
gs, ts = extract_safety_and_grounding_sginals(response.query_result)
search_snippets += ss
cited_snippets += cs
grounding_signal += gs
safety_signal += ts
return (
" ".join(response_messages),
search_snippets,
cited_snippets,
grounding_signal,
safety_signal,
)
return (
get_response_text,
agent_path,
)