Spaces:
Runtime error
Runtime error
| import os | |
| os.environ["TRANSFORMERS_CACHE"] = "/tmp/transformers_cache" | |
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| import transformers | |
| import torch | |
| model = "tiiuae/falcon-180b-chat" | |
| tokenizer = AutoTokenizer.from_pretrained(model) | |
| pipeline = transformers.pipeline( | |
| "text-generation", | |
| model=model, | |
| tokenizer=tokenizer, | |
| torch_dtype=torch.bfloat16, | |
| trust_remote_code=True, | |
| device_map="auto", | |
| ) | |
| def generate_headlines(topic): | |
| sequences = pipeline( | |
| f"Create at most 5 headlines that highlight {topic}. The headlines should be concise, attention-grabbing, and suitable for use in a news video.", | |
| max_length=200, | |
| do_sample=True, | |
| top_k=10, | |
| num_return_sequences=5, | |
| eos_token_id=tokenizer.eos_token_id, | |
| ) | |
| headlines = [seq['generated_text'] for seq in sequences] | |
| return "\n".join(headlines) | |
| iface = gr.Interface( | |
| fn=generate_headlines, | |
| inputs=gr.inputs.Textbox(placeholder="Enter the topic"), | |
| outputs="text", | |
| examples=[["Climate Change"], ["AI Innovations"], ["Space Exploration"]] | |
| ) | |
| iface.launch() | |