File size: 4,369 Bytes
84c8c70 65b652a d68baee 0aa9b9e 65b652a 6cd37e9 65b652a d68baee 6cd37e9 8e0d190 84c8c70 d68baee 6cd37e9 d4e4758 64381ca d4e4758 8b87ea0 e3d55c5 84c8c70 de3a841 e1a0499 84c8c70 de3a841 8476b94 84c8c70 8b87ea0 84c8c70 5cfb11e 84c8c70 5cfb11e 84c8c70 5cfb11e 84c8c70 d68baee 84c8c70 d68baee 84c8c70 d68baee 055c405 84c8c70 c014f08 84c8c70 8b87ea0 996caad 84c8c70 8b87ea0 84c8c70 5cfb11e 84c8c70 8b87ea0 c16815a 5cfb11e c16815a 996caad c16815a 84c8c70 5cfb11e 38e46b5 a298161 c16815a b921994 8b87ea0 996caad 84c8c70 b921994 84c8c70 b921994 84c8c70 b921994 adf2684 8b87ea0 adf2684 84c8c70 6cd37e9 84c8c70 de3a841 84c8c70 d68baee b921994 84c8c70 d68baee 38e46b5 d68baee 8d2f904 84c8c70 38e46b5 ae6518e 84c8c70 8d2f904 055c405 d681cb6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 |
import streamlit as st
import numpy as np
import mlxtend
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.datasets import make_moons, make_circles, make_blobs
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense,InputLayer
from tensorflow.keras.optimizers import SGD
from tensorflow.keras import regularizers
from mlxtend.plotting import plot_decision_regions
st.markdown("""<style>
.stApp {background-image: url("https://tse3.mm.bing.net/th/id/OIP.bI6tp0W5MMPsDMUDlitlJQHaE8?pid=Api&P=0&h=180");
background-size: cover;
background-position: center;
background-repeat: no-repeat;}</style>""",unsafe_allow_html=True)
st.write("## A Neural Network Playground")
st.sidebar.write("### Dataset Settings")
n_samples = st.sidebar.number_input("No.of Samples",min_value=300,max_value=2000,step = 100,value=500)
dataset_name = st.sidebar.selectbox("Dataset", ["Blobs","Moons", "Circles"])
test_ratio = st.sidebar.slider("Test Ratio", 0.1, 0.5, 0.2, 0.05)
noise = st.sidebar.slider("Noise", 0.0, 0.5, 0.1, 0.01)
all = int(n_samples - (n_samples*test_ratio))
batch_size = st.sidebar.slider("Batch Size",40,all)
def generate_dataset(name, noise, test_ratio):
if name == "Moons":
x, y = make_moons(n_samples=n_samples, noise=noise, random_state=42)
elif name == "Circles":
x, y = make_circles(n_samples=n_samples, noise=noise, random_state=42)
else:
x, y = make_blobs(n_samples=n_samples, centers=2, random_state=42, cluster_std=1.5)
return train_test_split(x, y, test_size=test_ratio, random_state=42)
x_train, x_test, y_train, y_test = generate_dataset(dataset_name, noise, test_ratio)
std = StandardScaler()
x_train = std.fit_transform(x_train)
x_test = std.transform(x_test)
col1, col2, col3, col4, col5 = st.columns(5)
epochs = col1.number_input("Epochs",min_value=100,max_value=500,step=10)
lr = col2.selectbox("Learning Rate", [0.1,0.0001,0.01,0.03,1,3,10])
activation = col3.selectbox("Activation", ["sigmoid", "tanh", "relu"])
regularization = col4.selectbox("Regularization", ["None", "L1", "L2"])
reg_rate = col5.selectbox("Reg Rate",[0,0.001,0.01,0.03,0.1,1,3,10])
st.write("### Hidden Layers")
if "layers" not in st.session_state:
st.session_state.layers = [2]
colA, colB = st.columns(2)
if colA.button("➕ Add Layer"):
st.session_state.layers.append(2)
if colB.button("➖ Remove Layer") and len(st.session_state.layers) > 1:
st.session_state.layers.pop()
cola , colb = st.columns(2)
with colb:
layer_neurons = []
st.write("###### Configure Neurons per Layer")
for i, neurons in enumerate(st.session_state.layers):
n = st.slider(f"Layer {i+1} Neurons", 1, 20, neurons, 1)
layer_neurons.append(n)
with cola:
fig, ax = plt.subplots(figsize=(4,3))
sns.scatterplot(x=x_train[:,0], y=x_train[:,1], hue=y_train, s=15)
ax.set_title("Before Training")
st.pyplot(fig)
if st.button("🚀Start Training"):
model = Sequential()
if regularization == "L1":
reg = regularizers.l1(reg_rate)
elif regularization == "L2":
reg = regularizers.l2(reg_rate)
else:
reg = None
model.add(InputLayer(input_shape=(2,)))
for n in layer_neurons[0:]:
model.add(Dense(n, activation=activation, kernel_regularizer=reg))
model.add(Dense(1, activation="sigmoid"))
model.compile(optimizer=SGD(learning_rate=lr), loss="binary_crossentropy", metrics=["accuracy"])
hist = model.fit(x_train, y_train, epochs=epochs,
batch_size=batch_size, verbose=0,
validation_data=(x_test, y_test))
col1, col2 = st.columns(2)
fig1, ax1 = plt.subplots(figsize=(4,3))
plot_decision_regions(X=x_train,y=y_train,clf=model)
plt.title("Decision Boundary")
plt.legend()
col1.pyplot(fig1)
fig2, ax2 = plt.subplots(figsize=(4,3))
ax2.plot(hist.history["loss"], label="Train Loss")
ax2.plot(hist.history["val_loss"], label="Val Loss")
ax2.legend()
ax2.set_title("Loss vs Epochs")
col2.pyplot(fig2)
st.write(f"##### Training Loss: {hist.history['loss'][-1]:.3f}")
st.write(f"##### Validation Loss: {hist.history['val_loss'][-1]:.3f}")
|