chore: import tensorflow and keras
This commit is contained in:
parent
aafdde18d1
commit
91bd13ee1d
@ -4,13 +4,11 @@ import pandas as pd
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pandas.core.frame import DataFrame
|
from pandas.core.frame import DataFrame
|
||||||
from math import floor
|
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, '/Users/Markus/Prosjekter git/Slovakia 2021/python_speech_features/python_speech_features')
|
sys.path.insert(0, '/Users/Markus/Prosjekter git/Slovakia 2021/python_speech_features/python_speech_features')
|
||||||
from python_speech_features.python_speech_features import mfcc
|
from python_speech_features.python_speech_features import mfcc
|
||||||
import json
|
import json
|
||||||
#import librosa
|
|
||||||
#from Present_data import get_data
|
|
||||||
|
|
||||||
# Global variables for MFCC
|
# Global variables for MFCC
|
||||||
MFCC_STEPSIZE = 0.5 # Seconds
|
MFCC_STEPSIZE = 0.5 # Seconds
|
||||||
|
61
Neural_Network_Analysis.py
Normal file
61
Neural_Network_Analysis.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import json
|
||||||
|
import numpy as np
|
||||||
|
from sklearn.model_selection import train_test_split
|
||||||
|
import tensorflow as tf
|
||||||
|
import tf.keras as keras
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# path to json file that stores MFCCs and genre labels for each processed segment
|
||||||
|
DATA_PATH = str(Path.cwd()) + "mfcc_data.json"
|
||||||
|
|
||||||
|
def load_data(data_path):
|
||||||
|
|
||||||
|
with open(data_path, "r") as fp:
|
||||||
|
data = json.load(fp)
|
||||||
|
|
||||||
|
# convert lists to numpy arrays
|
||||||
|
X = np.array(data["mfcc"])
|
||||||
|
y = np.array(data["labels"])
|
||||||
|
|
||||||
|
print("Data succesfully loaded!")
|
||||||
|
|
||||||
|
return X, y
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
# load data
|
||||||
|
X, y = load_data(DATA_PATH)
|
||||||
|
|
||||||
|
# create train/test split
|
||||||
|
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3)
|
||||||
|
|
||||||
|
# build network topology
|
||||||
|
model = keras.Sequential([
|
||||||
|
|
||||||
|
# input layer
|
||||||
|
keras.layers.Flatten(input_shape=(X.shape[1], X.shape[2])),
|
||||||
|
|
||||||
|
# 1st dense layer
|
||||||
|
keras.layers.Dense(512, activation='relu'),
|
||||||
|
|
||||||
|
# 2nd dense layer
|
||||||
|
keras.layers.Dense(256, activation='relu'),
|
||||||
|
|
||||||
|
# 3rd dense layer
|
||||||
|
keras.layers.Dense(64, activation='relu'),
|
||||||
|
|
||||||
|
# output layer
|
||||||
|
keras.layers.Dense(10, activation='softmax')
|
||||||
|
])
|
||||||
|
|
||||||
|
# compile model
|
||||||
|
optimiser = keras.optimizers.Adam(learning_rate=0.0001)
|
||||||
|
model.compile(optimizer=optimiser,
|
||||||
|
loss='sparse_categorical_crossentropy',
|
||||||
|
metrics=['accuracy'])
|
||||||
|
|
||||||
|
model.summary()
|
||||||
|
|
||||||
|
# train model
|
||||||
|
history = model.fit(X_train, y_train, validation_data=(X_test, y_test), batch_size=32, epochs=50)
|
Loading…
Reference in New Issue
Block a user