# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("fractalego/conversation-qa")
model = AutoModelForCausalLM.from_pretrained("fractalego/conversation-qa")Quick Links
YAML Metadata Warning:empty or missing yaml metadata in repo card
Check out the documentation for more information.
Conversational QA
This framework is trained on the CoQA dataset.
Install
pip install conversation-qa
Example
from conversation_qa import QA, Dialogue
qa = QA("fractalego/conversation-qa")
dialogue = Dialogue()
dialogue.add_dialogue_pair("Where was the cat?", "The fence.")
text = "A white cat is on the fence."
query = "What color is it?"
qa.get_answer(text, dialogue.get_text(), query)
- Downloads last month
- 13
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="fractalego/conversation-qa")