Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import transformers | |
| # @st.cache(hash_funcs={tokenizers.Tokenizer: id, tokenizers.Tokenizer: id}) | |
| def load_stuff(): | |
| model = transformers.AutoModelForCausalLM.from_pretrained("distilgpt2") | |
| tokenizer = transformers.AutoTokenizer.from_pretrained("distilgpt2") | |
| return model, tokenizer | |
| st.image("./img.jpg") | |
| model, tokenizer = load_stuff() | |
| user_inputed_text = st.text_input("Insert text") | |
| if len(user_inputed_text) == 0: | |
| outputs_text = "no text provided. write some text, meatbag" | |
| else: | |
| outputs = model.generate( | |
| **tokenizer([user_inputed_text], return_tensors='pt'), | |
| max_new_tokens=50, do_sample=True, | |
| ) | |
| outputs_text = tokenizer.decode(outputs[0]) | |
| st.text_area(label='output', value=outputs_text) | |