File size: 3,958 Bytes
6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 3ecaeaf 6d7c397 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
import dash
from dash import dcc, html
import pandas as pd
import numpy as np
from datetime import datetime, timedelta
from sklearn.ensemble import IsolationForest
from sklearn.preprocessing import StandardScaler
import plotly.graph_objs as go
# Initialize the Dash app
app = dash.Dash(__name__)
server = app.server
# Generate mock data
def generate_mock_data(n_samples=100):
current_time = datetime.now()
timestamps = [current_time - timedelta(minutes=i) for i in range(n_samples)]
data = pd.DataFrame({
'timestamp': timestamps,
'network_traffic': np.random.normal(1000, 200, n_samples),
'failed_logins': np.random.poisson(5, n_samples),
'suspicious_ips': np.random.poisson(2, n_samples),
'data_exfiltration': np.random.normal(50, 10, n_samples)
})
return data
# Detect anomalies
def detect_anomalies(df):
isolation_forest = IsolationForest(contamination=0.1, random_state=42)
scaler = StandardScaler()
features = ['network_traffic', 'failed_logins', 'suspicious_ips', 'data_exfiltration']
X = df[features]
X_scaled = scaler.fit_transform(X)
return isolation_forest.fit_predict(X_scaled) == -1
# Generate data and detect anomalies
df = generate_mock_data()
anomalies = detect_anomalies(df)
# Create figures
def create_network_traffic_figure():
fig = go.Figure()
# Normal traffic
fig.add_trace(go.Scatter(
x=df[~anomalies]['timestamp'],
y=df[~anomalies]['network_traffic'],
name='Normal Traffic',
mode='lines',
line=dict(color='blue')
))
# Anomalies
fig.add_trace(go.Scatter(
x=df[anomalies]['timestamp'],
y=df[anomalies]['network_traffic'],
name='Anomalies',
mode='markers',
marker=dict(color='red', size=10)
))
fig.update_layout(
title='Network Traffic with Anomaly Detection',
xaxis_title='Time',
yaxis_title='Network Traffic (bytes)',
template='plotly_white'
)
return fig
# Create metrics
def generate_metrics():
return {
'Total Anomalies': int(sum(anomalies)),
'Average Network Traffic': f"{float(df['network_traffic'].mean()):.2f}",
'Max Failed Logins': int(df['failed_logins'].max()),
}
# Define the app layout
app.layout = html.Div([
html.H1("AI-Enhanced Cybersecurity Dashboard",
style={'textAlign': 'center', 'padding': '20px'}),
# Metrics Section
html.Div([
html.H2("Key Metrics", style={'textAlign': 'center'}),
html.Div([
html.Table(
[html.Tr([html.Th(k), html.Td(v)]) for k, v in generate_metrics().items()],
style={'margin': 'auto', 'border-collapse': 'collapse'}
)
])
], style={'padding': '20px'}),
# Network Traffic Graph
html.Div([
html.H2("Network Traffic Analysis", style={'textAlign': 'center'}),
dcc.Graph(figure=create_network_traffic_figure())
], style={'padding': '20px'})
])
# Add CSS
app.index_string = '''
<!DOCTYPE html>
<html>
<head>
{%metas%}
<title>Cybersecurity Dashboard</title>
{%favicon%}
{%css%}
<style>
body {
font-family: Arial, sans-serif;
margin: 0;
background-color: #f0f2f5;
}
table {
width: 100%;
max-width: 600px;
}
th, td {
padding: 12px;
text-align: left;
border-bottom: 1px solid #ddd;
}
th {
background-color: #f8f9fa;
}
</style>
</head>
<body>
{%app_entry%}
<footer>
{%config%}
{%scripts%}
{%renderer%}
</footer>
</body>
</html>
'''
if __name__ == '__main__':
app.run_server(debug=True) |