首页 > 解决方案 > 元组索引超出范围 - 训练音频模型

问题描述

我正在尝试分析音频文件并根据提取的特征训练系统,但在拟合模型时出现错误,提示“元组索引超出范围”。我在打印语句旁边的注释中提供了我正在使用的所有数组的形状。你能帮我理解在定义模型时如何定义尺寸吗?

如果需要更多详细信息,请告诉我。

import glob
import numpy as np
import pandas as pd
import random
import librosa
import librosa.display
import glob
import os
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelBinarizer
from tensorflow.keras.layers import LSTM, Dense, Dropout, Flatten
from tensorflow.keras.models import Sequential
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint

X, sample_rate = librosa.load(r'C:\Users\Sumanth\Desktop\voice\Speaker-275-3.wav', res_type='kaiser_fast')
print(X.shape) # Shape is (439238,)

#extracting the MFCC feature from Audio signal
mfccs = librosa.feature.mfcc(y=X, sr=sample_rate, n_mfcc=40)
print(mfccs.shape) # Shape is (40, 858)

#manually assigning the label as 275
z = np.asarray(275)

#Validation data
val_x, sample_rate = librosa.load(r'C:\Users\Sumanth\Desktop\voice\Speaker-275-2.wav', res_type='kaiser_fast')
print(val_x.shape) # Shape is (292826,)

val_y=np.asarray(275)

#Building the model
model = Sequential()
model.add(Dense(256, input_shape=(858,),activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(275,activation='softmax'))
model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
#training our model
model.fit(mfccs, z, epochs=5, validation_data=(val_x, val_y))

- - - - - - - - - -错误 - - - - - - - - - - - - - - - ----------------------


IndexError                                Traceback (most recent call last)
<ipython-input-31-adaf98404d0e> in <module>
     40 model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
     41 #training our model
---> 42 model.fit(mfccs, z, epochs=5, validation_data=(val_x, val_y))
     43 
     44 

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)
    726         max_queue_size=max_queue_size,
    727         workers=workers,
--> 728         use_multiprocessing=use_multiprocessing)
    729 
    730   def evaluate(self,

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_v2.py in fit(self, model, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, **kwargs)
    222           validation_data=validation_data,
    223           validation_steps=validation_steps,
--> 224           distribution_strategy=strategy)
    225 
    226       total_samples = _get_total_number_of_samples(training_data_adapter)

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_v2.py in _process_training_inputs(model, x, y, batch_size, epochs, sample_weights, class_weights, steps_per_epoch, validation_split, validation_data, validation_steps, shuffle, distribution_strategy, max_queue_size, workers, use_multiprocessing)
    545         max_queue_size=max_queue_size,
    546         workers=workers,
--> 547         use_multiprocessing=use_multiprocessing)
    548     val_adapter = None
    549     if validation_data:

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_v2.py in _process_inputs(model, x, y, batch_size, epochs, sample_weights, class_weights, shuffle, steps, distribution_strategy, max_queue_size, workers, use_multiprocessing)
    592         batch_size=batch_size,
    593         check_steps=False,
--> 594         steps=steps)
    595   adapter = adapter_cls(
    596       x,

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training.py in _standardize_user_data(self, x, y, sample_weight, class_weight, batch_size, check_steps, steps_name, steps, validation_split, shuffle, extract_tensors_from_dataset)
   2532       # Check that all arrays have the same length.
   2533       if not self._distribution_strategy:
-> 2534         training_utils.check_array_lengths(x, y, sample_weights)
   2535         if self._is_graph_network and not self.run_eagerly:
   2536           # Additional checks to avoid users mistakenly using improper loss fns.

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_utils.py in check_array_lengths(inputs, targets, weights)
    661 
    662   set_x = set_of_lengths(inputs)
--> 663   set_y = set_of_lengths(targets)
    664   set_w = set_of_lengths(weights)
    665   if len(set_x) > 1:

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_utils.py in set_of_lengths(x)
    656       return set([
    657           y.shape[0]
--> 658           for y in x
    659           if y is not None and not is_tensor_or_composite_tensor(y)
    660       ])

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_utils.py in <listcomp>(.0)
    657           y.shape[0]
    658           for y in x
--> 659           if y is not None and not is_tensor_or_composite_tensor(y)
    660       ])
    661 

IndexError: tuple index out of range

标签: python-3.xtensorflowmachine-learningkerasaudio-analysis

解决方案


推荐阅读