Spaces:
Sleeping
Sleeping
File size: 4,955 Bytes
8c6c50c 24dec54 6889e04 8c6c50c 24dec54 8c6c50c 24dec54 8c6c50c de738a1 24dec54 8c6c50c de738a1 24dec54 8c6c50c de738a1 8c6c50c 24dec54 8c6c50c de738a1 24dec54 8c6c50c 24dec54 8c6c50c 24dec54 de738a1 8c6c50c de738a1 24dec54 6889e04 8c6c50c de738a1 6889e04 24dec54 8c6c50c 24dec54 8c6c50c de738a1 8c6c50c 24dec54 de738a1 24dec54 8c6c50c 2f6255f 02fabe3 8c6c50c de738a1 8c6c50c de738a1 2f6255f 8c6c50c de738a1 8c6c50c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Input
from tensorflow.keras.regularizers import l2
from tensorflow.keras.callbacks import EarlyStopping
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import gradio as gr
# Updated dataset with 56-day strength values (Including 0.48 w/c ratio)
w_c_ratio = [0.36, 0.40, 0.44, 0.48, 0.52] * 6
curing_days = [7] * 10 + [28] * 10 + [56] * 10
fly_ash_content = [22] * 5 + [33] * 5 + [22] * 5 + [33] * 5 + [22] * 5 + [33] * 5
compressive_strength = [
22.82, 20.17, 17.46, 18.37, 12.79, # 7 days, 22% fly ash
18.68, 16.78, 15.15, 12.84, 10.20, # 7 days, 33% fly ash
30.36, 28.78, 27.01, 26.24, 22.50, # 28 days, 22% fly ash
29.19, 26.67, 24.62, 19.79, 15.47, # 28 days, 33% fly ash
32.68, 30.31, 28.51, 27.41, 24.07, # 56 days, 22% fly ash
32.13, 29.33, 27.20, 21.37, 16.58 # 56 days, 33% fly ash
]
data = {
"w/c_ratio": w_c_ratio,
"inverse_wc_ratio": [1/x for x in w_c_ratio],
"wc_ratio_squared": [x**2 for x in w_c_ratio], # New feature
"log_curing_days": [np.log(x) for x in curing_days], # New feature
"curing_days": curing_days,
"fly_ash_content": fly_ash_content,
"compressive_strength": compressive_strength
}
df = pd.DataFrame(data)
# Define input (X) and output (y)
X = df[['w/c_ratio', 'inverse_wc_ratio', 'wc_ratio_squared', 'log_curing_days', 'curing_days', 'fly_ash_content']]
y = df['compressive_strength']
# Standardize input features
scaler = StandardScaler()
X_scaled = scaler.fit_transform(X)
y_mean = y.mean()
y_std = y.std()
y_scaled = (y - y_mean) / y_std # Scale target variable
# Split data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X_scaled, y_scaled, test_size=0.2, random_state=42)
# Define optimized ANN model
model = Sequential([
Input(shape=(X_train.shape[1],)),
Dense(256, activation=tf.keras.activations.swish, kernel_regularizer=l2(0.003)),
Dropout(0.05),
Dense(128, activation=tf.keras.activations.swish, kernel_regularizer=l2(0.005)),
Dropout(0.1),
Dense(64, activation=tf.keras.activations.swish, kernel_regularizer=l2(0.005)),
Dense(32, activation=tf.keras.activations.swish, kernel_regularizer=l2(0.005)),
Dense(16, activation=tf.keras.activations.swish, kernel_regularizer=l2(0.005)),
Dense(1, activation='linear')
])
# Compile the model
model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.0002),
loss='mse', # Using MSE for better regression learning
metrics=['mae'])
# Train the model with early stopping
early_stopping = EarlyStopping(monitor='val_loss', patience=50, restore_best_weights=True)
model.fit(X_train, y_train, epochs=5000, batch_size=32, validation_data=(X_test, y_test), verbose=0, callbacks=[early_stopping])
# Save and reload model using new Keras format
model.save("ann_model.keras")
model = keras.models.load_model("ann_model.keras")
# Function for prediction
def predict_strength(wc_ratio, curing_days, fly_ash):
input_data = np.array([[wc_ratio, 1/wc_ratio, wc_ratio**2, np.log(curing_days), curing_days, fly_ash]])
input_scaled = scaler.transform(input_data)
prediction_scaled = model.predict(input_scaled)[0][0]
prediction = (prediction_scaled * y_std) + y_mean # Inverse scaling
# Generate strength trend graph up to 56 days
curing_days_range = np.linspace(7, 56, 10)
predicted_values = [model.predict(scaler.transform([[wc_ratio, 1/wc_ratio, wc_ratio**2, np.log(d), d, fly_ash]]))[0][0] for d in curing_days_range]
predicted_values = [(p * y_std) + y_mean for p in predicted_values] # Inverse scaling
import matplotlib.pyplot as plt
plt.figure(figsize=(7, 5))
plt.plot(curing_days_range, predicted_values, marker='o', linestyle='-', color='blue', label="Predicted Compressive Strength")
plt.xlabel("Curing Period (Days)")
plt.ylabel("Predicted Compressive Strength (MPa)")
plt.title("Strength Prediction Trend")
plt.grid(True, linestyle="--", alpha=0.7)
plt.legend()
plt.savefig("prediction_plot.png")
plt.close()
return prediction, "prediction_plot.png"
# Gradio UI
demo = gr.Blocks()
with demo:
gr.Markdown("# Compressive Strength Predictor")
with gr.Row():
wc_input = gr.Number(label="Enter w/c ratio")
days_input = gr.Number(label="Enter curing days")
fly_ash_dropdown = gr.Dropdown([22, 33], label="Fly Ash % in PPC", value=22)
predict_button = gr.Button("Predict")
output_strength = gr.Number(label="Predicted Compressive Strength (MPa)")
output_graph = gr.Image()
predict_button.click(predict_strength, inputs=[wc_input, days_input, fly_ash_dropdown], outputs=[output_strength, output_graph])
demo.launch(share=True)
|