# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("BreadAi/StoryPy")
model = AutoModelForCausalLM.from_pretrained("BreadAi/StoryPy")Quick Links
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("BreadAi/StoryPy")
model = AutoModelForCausalLM.from_pretrained("BreadAi/StoryPy")
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="BreadAi/StoryPy")