onNewChunk: async()

in src/widgets/AIChat/components/Chat/index.tsx [123:217]


    onNewChunk: async (value, accumulator) => {
      setIsRequestLoading(false);

      const _accumulator = [...accumulator];

      if (value.last) {
        setConversation((prev) => {
          const updatedHistory = [...prev.conversationHistory];
          const historyLength = updatedHistory.length;
          const lastHistoryItem = updatedHistory[historyLength - 1];
          lastHistoryItem.response.message = value.generated;
          lastHistoryItem.response.inProgress = false;

          const newConversation = {
            ...prev,
            conversationHistory: updatedHistory,
          };

          updateConversation(newConversation);

          return newConversation;
        });
        return _accumulator;
      }

      let chatStream = _accumulator.join('');
      let generatedText = '';

      if (value.thought) {
        const thought = value.thought;

        setConversation((prev) => {
          const historyLength = prev.conversationHistory.length;
          const lastHistoryItem = prev.conversationHistory[historyLength - 1];
          if (!lastHistoryItem.response.thoughts) lastHistoryItem.response.thoughts = [];

          const alreadyExistingStateThought = lastHistoryItem.response.thoughts.find(
            (t) => t.id_ === thought.id_
          );

          if (alreadyExistingStateThought) {
            alreadyExistingStateThought.in_progress = thought.in_progress;
            alreadyExistingStateThought.message += thought.message;

            formatAiMessage(alreadyExistingStateThought.message).then(
              (res) => (alreadyExistingStateThought.processedChunks = res)
            );

            if (thought.in_progress === false) {
              alreadyExistingStateThought.in_progress = false;
            }
          } else {
            if (thought.message.trim() !== '') {
              lastHistoryItem.response.thoughts = [...lastHistoryItem.response.thoughts, thought];
            }
          }

          return {
            ...prev,
            conversationHistory: [...prev.conversationHistory],
          };
        });
      } else {
        generatedText = value.generated_chunk;

        if (generatedText.trim() !== '') {
          _accumulator.push(generatedText);
        }

        if ((chatStream.match(/```/g) || []).length % 2 === 1) {
          chatStream += '```'; // append a closing ```
        }

        const formattedChunks = await formatAiMessage(chatStream);

        setConversation((prev) => {
          const updatedHistory = [...prev.conversationHistory];
          const historyLength = updatedHistory.length;
          const lastHistoryItem = updatedHistory[historyLength - 1];

          if (!lastHistoryItem.response.inProgress) {
            lastHistoryItem.response.inProgress = true;
          }

          lastHistoryItem.response.processedChunks = [...formattedChunks];

          return {
            ...prev,
            conversationHistory: updatedHistory,
          };
        });
      }

      return _accumulator;
    },