File size: 1,677 Bytes
95e28b4
 
 
69d53e9
95e28b4
 
 
0ab2427
5e548b7
 
f00407e
 
 
 
 
 
 
 
 
 
 
 
 
95e28b4
 
5e548b7
 
7bdaeec
 
 
 
 
95e28b4
7bdaeec
 
 
95e28b4
5e548b7
95e28b4
 
 
1dee290
e25e7dc
5e548b7
 
95e28b4
 
 
7bdaeec
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import tensorflow as tf
import tensorflow_io as tfio
import gradio as gr
import os

# Load your pre-trained model
model = tf.keras.models.load_model('capuchin_bird_audio.h5')
class_names = ['This Is Not A Capuchin bird','It is a capuchin Bird']
# Function to preprocess input for the model
def test_preprocess_1(file_path):
        file_contents = tf.io.read_file(file_path)
        wav, sample_rate = tf.audio.decode_wav(file_contents, desired_channels=1)
        wav = tf.squeeze(wav, axis=-1)
        sample_rate = tf.cast(sample_rate, dtype=tf.int64)
        wav = tfio.audio.resample(wav, rate_in=sample_rate, rate_out=16000)
        wav = wav[:48000]
        zero_padding = tf.zeros([48000] - tf.shape(wav), dtype=tf.float32)
        wav = tf.concat([zero_padding, wav], 0)
        spectrogram = tf.signal.stft(wav, frame_length=320, frame_step=32)
        spectrogram = tf.abs(spectrogram)
        spectrogram = tf.expand_dims(spectrogram, axis=2)
        spectrogram = tf.expand_dims(spectrogram, axis=0)
        return spectrogram

# Function to make predictions
def predict_audio(wav):
    input_data = test_preprocess_1(wav)
    prediction = model.predict(input_data)

    # Threshold logic
    if prediction > 0.5:
        result = class_names[1]
    else:
        result = class_names[0]

    return result


# Gradio Interface
iface = gr.Interface(
    fn=predict_audio,
    title='Capuchin Bird Classification',
    description='Upload an audio file to classify whether it is a Capuchin bird or not.',
    inputs=gr.Audio(sources=['upload'],label="Input Audio",type="filepath"),
    outputs='text',
)

# Launch the interface on localhost
iface.launch(share=True)