def process_message()

in ai-ml/llm-serving-gemma/gradio/app/app.py [0:0]


def process_message(message, history):
    user_prompt_format = ""
    system_prompt_format = ""

    # if env prompts are set, use those
    if "USER_PROMPT" in os.environ:
        user_prompt_format = os.environ["USER_PROMPT"]

    if "SYSTEM_PROMPT" in os.environ:
        system_prompt_format = os.environ["SYSTEM_PROMPT"]

    print("* History: " + str(history))

    user_message = ""
    system_message = ""
    history_message = ""

    if len(history) > 0:
        # we have history
        for item in history:
            user_message = user_prompt_format.replace("prompt", item[0])
            system_message = system_prompt_format.replace("prompt", item[1])
            history_message = history_message + user_message + system_message

    new_user_message = user_prompt_format.replace("prompt", message)

    # append the history with the new message and close with the turn
    aggregated_message = history_message + new_user_message
    return aggregated_message