awacke1's picture
Create app.py
7c520c0 verified
raw
history blame
6.52 kB
import streamlit as st
import streamlit.components.v1 as components
from transformers import pipeline
from diffusers import StableDiffusionPipeline
from datasets import load_dataset
from peft import PeftConfig
from accelerate import Accelerator
from optimum.onnxruntime import ORTModelForSequenceClassification
import torch
import time
# Cache resource-intensive models
@st.cache_resource
def load_diffuser_model():
return StableDiffusionPipeline.from_pretrained("runwayml/stable-diffusion-v1-5")
# Sidebar navigation
st.sidebar.title("πŸ€— Libraries Demo")
st.sidebar.markdown("Explore text πŸ“, images πŸ–ΌοΈ, and model ops πŸ”— with Hugging Face and Arcee!")
page = st.sidebar.selectbox(
"Choose a Section",
[
"🏠 Home",
"πŸ”„ Workflow",
"πŸ“ Transformers",
"πŸ–ΌοΈ Diffusers",
"πŸ“Š Datasets",
"βš™οΈ PEFT",
"πŸš€ Accelerate",
"⚑ Optimum",
"πŸ“š DistillKit",
"πŸ”— MergeKit",
"❄️ Spectrum"
],
help="Select a library to explore!"
)
# Mermaid graph for DistillKit, MergeKit, and Spectrum workflows
mermaid_code = """
graph TD
subgraph DistillKit
A1[Load Teacher Model] --> B1[Load Student Model]
B1 --> C1[Configure Distillation]
C1 --> D1[Perform Distillation]
D1 --> E1[Evaluate Model]
end
subgraph MergeKit
A2[Select Models] --> B2[Choose Merge Method]
B2 --> C2[Set Parameters]
C2 --> D2[Merge Models]
D2 --> E2[Save Merged Model]
end
subgraph Spectrum
A3[Load Model] --> B3[Analyze Layers]
B3 --> C3[Generate Config]
C3 --> D3[Apply Freezing]
D3 --> E3[Train/Evaluate Model]
end
"""
# Home Page
if page == "🏠 Home":
st.title("Hugging Face & Arcee Libraries Demo 🌟")
st.markdown("""
Welcome to an interactive demo of powerful libraries for text, image, and model processing!
- **πŸ“ Text**: Analyze or generate text with Transformers.
- **πŸ–ΌοΈ Images**: Create visuals with Diffusers.
- **πŸ”— Models**: Distill, merge, and optimize with Arcee's DistillKit, MergeKit, and Spectrum.
Navigate via the sidebar to explore each library!
""")
# Workflow Page with Mermaid Graph
elif page == "πŸ”„ Workflow":
st.header("πŸ”„ Workflows: DistillKit, MergeKit, Spectrum")
st.markdown("See how inputs flow to outputs in Arcee’s libraries with this Mermaid graph:")
components.html(f"""
<div id="mermaid"></div>
<script src="https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js"></script>
<script>
mermaid.initialize({{ startOnLoad: true }});
const mermaidCode = `{mermaid_code}`;
mermaid.render('graph', mermaidCode, (svgCode) => {{
document.getElementById('mermaid').innerHTML = svgCode;
}});
</script>
""", height=600)
# Transformers Section
elif page == "πŸ“ Transformers":
st.header("πŸ“ Transformers")
st.markdown("Process text with pre-trained models.")
task = st.selectbox("Task", ["Sentiment Analysis", "Text Generation"])
text = st.text_area("Input Text", "")
if st.button("Run") and text:
with st.spinner("Processing..."):
if task == "Sentiment Analysis":
result = pipeline("sentiment-analysis")(text)
st.write(f"Result: {result[0]['label']} (Score: {result[0]['score']:.2f})")
else:
result = pipeline("text-generation")(text, max_length=50)[0]['generated_text']
st.write(f"Generated: {result}")
# Diffusers Section
elif page == "πŸ–ΌοΈ Diffusers":
st.header("πŸ–ΌοΈ Diffusers")
st.markdown("Generate images from text.")
prompt = st.text_input("Prompt", "A futuristic city")
if st.button("Generate"):
with st.spinner("Generating..."):
pipe = load_diffuser_model()
image = pipe(prompt).images[0]
st.image(image, caption=prompt)
# Datasets Section
elif page == "πŸ“Š Datasets":
st.header("πŸ“Š Datasets")
st.markdown("Load and explore datasets.")
dataset = st.selectbox("Dataset", ["imdb", "squad"])
if st.button("Load"):
data = load_dataset(dataset, split="train[:5]")
st.write(data)
# PEFT Section
elif page == "βš™οΈ PEFT":
st.header("βš™οΈ PEFT")
st.markdown("Parameter-efficient fine-tuning.")
text = st.text_area("Text", "")
if st.button("Classify") and text:
st.write("Simulated PEFT classification: Positive")
# Accelerate Section
elif page == "πŸš€ Accelerate":
st.header("πŸš€ Accelerate")
st.markdown("Optimize across devices.")
text = st.text_area("Text", "")
if st.button("Analyze") and text:
accelerator = Accelerator()
result = pipeline("sentiment-analysis")(text)
st.write(f"Result: {result[0]['label']} (Score: {result[0]['score']:.2f})")
# Optimum Section
elif page == "⚑ Optimum":
st.header("⚑ Optimum")
st.markdown("Hardware-accelerated inference.")
text = st.text_area("Text", "")
if st.button("Classify") and text:
st.write("Simulated Optimum result: Positive")
# DistillKit Section
elif page == "πŸ“š DistillKit":
st.header("πŸ“š DistillKit: Model Distillation")
st.markdown("Distill large models into smaller, efficient ones. Here are the top 5 functions:")
# 1. Load teacher model
teacher = st.selectbox("Teacher Model", ["arcee-ai/Arcee-Spark", "bert-base-uncased"])
st.write(f"1. Loaded teacher: {teacher}")
# 2. Load student model
student = st.selectbox("Student Model", ["Qwen/Qwen2-1.5B", "distilbert-base-uncased"])
st.write(f"2. Loaded student: {student}")
# 3. Configure distillation
temp = st.slider("Temperature", 1.0, 5.0, 2.0)
alpha = st.slider("Alpha", 0.0, 1.0, 0.5)
st.write(f"3. Config: Temp={temp}, Alpha={alpha}")
# 4. Perform distillation (simulated)
if st.button("Distill"):
with st.spinner("Distilling..."):
time.sleep(2)
st.success("4. Distillation complete!")
# 5. Evaluate distilled model
st.write("5. Evaluating...")
metrics = {"accuracy": 0.85, "loss": 0.12}
st.write(f"Metrics: {metrics}")
st.markdown("""
**How It Works:**
DistillKit compresses a teacher model into a student model using distillation techniques.
```python
config = {"teacher": "arcee-ai/Arcee-Spark", "student": "Qwen/Qwen2-1.5B", "temp": 2.0, "alpha": 0.5}