[go: nahoru, domu]

Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Allow Connection of ChatGenerator to AnswerBuilder #7897

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions haystack/components/builders/answer_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@
# SPDX-License-Identifier: Apache-2.0

import re
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Union

from haystack import Document, GeneratedAnswer, component, logging
from haystack.dataclasses.chat_message import ChatMessage

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -56,7 +57,7 @@ def __init__(self, pattern: Optional[str] = None, reference_pattern: Optional[st
def run(
self,
query: str,
replies: List[str],
replies: Union[List[str], List[ChatMessage]],
meta: Optional[List[Dict[str, Any]]] = None,
documents: Optional[List[Document]] = None,
pattern: Optional[str] = None,
Expand All @@ -68,7 +69,7 @@ def run(
:param query:
The query used in the prompts for the Generator.
:param replies:
The output of the Generator.
The output of the Generator. Can be a list of strings or a list of ChatMessage objects.
:param meta:
The metadata returned by the Generator. If not specified, the generated answer will contain no metadata.
:param documents:
Expand Down Expand Up @@ -104,6 +105,11 @@ def run(
pattern = pattern or self.pattern
reference_pattern = reference_pattern or self.reference_pattern

# Extract content from ChatMessage objects if replies is a list of ChatMessages
if isinstance(replies[0], ChatMessage):
meta = [msg.meta for msg in replies]
replies = [msg.content for msg in replies]

all_answers = []
for reply, metadata in zip(replies, meta):
referenced_docs = []
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
---
enhancements:
- |
You can now directly connect a ChatGenerator to an AnswerBuilder
124 changes: 123 additions & 1 deletion test/components/builders/test_answer_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@

import pytest

from haystack import GeneratedAnswer, Document
from haystack import Document, GeneratedAnswer
from haystack.components.builders.answer_builder import AnswerBuilder
from haystack.dataclasses.chat_message import ChatMessage, ChatRole


class TestAnswerBuilder:
Expand Down Expand Up @@ -150,3 +151,124 @@ def test_run_with_reference_pattern_set_at_runtime(self):
assert len(answers[0].documents) == 2
assert answers[0].documents[0].content == "test doc 2"
assert answers[0].documents[1].content == "test doc 3"

def test_run_with_chat_message_replies_without_pattern(self):
component = AnswerBuilder()
replies = [
ChatMessage(
content="Answer: AnswerString",
role=ChatRole.ASSISTANT,
name=None,
meta={
"model": "gpt-3.5-turbo-0613",
"index": 0,
"finish_reason": "stop",
"usage": {"prompt_tokens": 32, "completion_tokens": 153, "total_tokens": 185},
},
)
]
output = component.run(query="test query", replies=replies, meta=[{}])
answers = output["answers"]
assert len(answers) == 1
assert answers[0].data == "Answer: AnswerString"
assert answers[0].meta == {
"model": "gpt-3.5-turbo-0613",
"index": 0,
"finish_reason": "stop",
"usage": {"prompt_tokens": 32, "completion_tokens": 153, "total_tokens": 185},
}
assert answers[0].query == "test query"
assert answers[0].documents == []
assert isinstance(answers[0], GeneratedAnswer)

def test_run_with_chat_message_replies_with_pattern(self):
component = AnswerBuilder(pattern=r"Answer: (.*)")
replies = [
ChatMessage(
content="Answer: AnswerString",
role=ChatRole.ASSISTANT,
name=None,
meta={
"model": "gpt-3.5-turbo-0613",
"index": 0,
"finish_reason": "stop",
"usage": {"prompt_tokens": 32, "completion_tokens": 153, "total_tokens": 185},
},
)
]
output = component.run(query="test query", replies=replies, meta=[{}])
answers = output["answers"]
assert len(answers) == 1
assert answers[0].data == "AnswerString"
assert answers[0].meta == {
"model": "gpt-3.5-turbo-0613",
"index": 0,
"finish_reason": "stop",
"usage": {"prompt_tokens": 32, "completion_tokens": 153, "total_tokens": 185},
}
assert answers[0].query == "test query"
assert answers[0].documents == []
assert isinstance(answers[0], GeneratedAnswer)

def test_run_with_chat_message_replies_with_documents(self):
component = AnswerBuilder(reference_pattern="\\[(\\d+)\\]")
replies = [
ChatMessage(
content="Answer: AnswerString[2]",
role=ChatRole.ASSISTANT,
name=None,
meta={
"model": "gpt-3.5-turbo-0613",
"index": 0,
"finish_reason": "stop",
"usage": {"prompt_tokens": 32, "completion_tokens": 153, "total_tokens": 185},
},
)
]
output = component.run(
query="test query",
replies=replies,
meta=[{}],
documents=[Document(content="test doc 1"), Document(content="test doc 2")],
)
answers = output["answers"]
assert len(answers) == 1
assert answers[0].data == "Answer: AnswerString[2]"
assert answers[0].meta == {
"model": "gpt-3.5-turbo-0613",
"index": 0,
"finish_reason": "stop",
"usage": {"prompt_tokens": 32, "completion_tokens": 153, "total_tokens": 185},
}
assert answers[0].query == "test query"
assert len(answers[0].documents) == 1
assert answers[0].documents[0].content == "test doc 2"

def test_run_with_chat_message_replies_with_pattern_set_at_runtime(self):
component = AnswerBuilder(pattern="unused pattern")
replies = [
ChatMessage(
content="Answer: AnswerString",
role=ChatRole.ASSISTANT,
name=None,
meta={
"model": "gpt-3.5-turbo-0613",
"index": 0,
"finish_reason": "stop",
"usage": {"prompt_tokens": 32, "completion_tokens": 153, "total_tokens": 185},
},
)
]
output = component.run(query="test query", replies=replies, meta=[{}], pattern=r"Answer: (.*)")
answers = output["answers"]
assert len(answers) == 1
assert answers[0].data == "AnswerString"
assert answers[0].meta == {
"model": "gpt-3.5-turbo-0613",
"index": 0,
"finish_reason": "stop",
"usage": {"prompt_tokens": 32, "completion_tokens": 153, "total_tokens": 185},
}
assert answers[0].query == "test query"
assert answers[0].documents == []
assert isinstance(answers[0], GeneratedAnswer)
Loading