首页 > 解决方案 > 如何将 KerasClassifier 与 OneVsRestClassifier 一起使用?

问题描述

我正在尝试使用KerasClassifierwithOneVsRestClassifier将多类分类问题拆分为不同的二进制分类子问题。

这是我使用的代码:

import keras as k
import scipy as sp
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import RandomizedSearchCV

def build_keras_model(loss = 'binary_crossentropy', metrics = ['accuracy'], optimiser = 'adam', 
                  learning_rate = 0.001, n_neurons = 30, n_layers = 1, n_classes = 2,
                  l1_reg = 0.001, l2_reg = 0.001, batch_norm = False, dropout = None, 
                  input_shape = (8,)):

    model = k.models.Sequential()

    model.add(k.layers.Dense(n_neurons, 
                             input_shape = input_shape,
                             kernel_regularizer = k.regularizers.l1_l2(l1 = l1_reg, l2 = l2_reg),
                             activation = 'relu'))
    if batch_norm is True:
        model.add(k.layers.BatchNormalization())
    if dropout is not None:
        model.add(k.layers.Dropout(dropout))

    i = 1   
    while i < n_layers:
        model.add(k.layers.Dense(n_neurons,
                                 kernel_regularizer = k.regularizers.l1_l2(l1 = l1_reg, l2 = l2_reg),
                                 activation = 'relu'))
        if batch_norm is True:
            model.add(k.layers.BatchNormalization())
        if dropout is not None:
            model.add(k.layers.Dropout(dropout))
        i += 1
    del i

    model.add(k.layers.Dense(1, activation = 'sigmoid'))

    if optimiser == 'adam':
        koptimiser = k.optimizers.Adam(lr = learning_rate)
    elif optimiser == 'adamax':
        koptimiser = k.optimizers.Adamax(lr = learning_rate)
    elif optimiser == 'nadam':
        koptimiser = k.optimizers.Nadam(lr = learning_rate)
    else:
        print('Unknown optimiser type')

    model.compile(optimizer = koptimiser, loss = loss, metrics = metrics)

    model.summary()

    return model

#==============================================================

parameters =    {
            'estimator__optimiser': ['adam', 'adamax', 'nadam'],
            'estimator__learning_rate': sp.stats.uniform(0.0005, 0.0015),
            'estimator__epochs': sp.stats.randint(10, 50),
            'estimator__n_neurons': sp.stats.randint(20, 61),
            'estimator__n_layers': sp.stats.randint(1, 3),
            'estimator__n_classes': [2],
            'estimator__batch_size': sp.stats.randint(1, 11),
            'estimator__l1_reg': sp.stats.reciprocal(1e-3, 1e1),
            'estimator__l2_reg': sp.stats.reciprocal(1e-3, 1e1),
            'estimator__dropout': [None],
            'estimator__metrics': [['accuracy']],
            'estimator__loss': ['binary_crossentropy'],
            'estimator__input_shape': [(X_train.shape[1],)]
            }


keras_model = OneVsRestClassifier(KerasClassifier(build_fn = build_keras_model, 
                                                  class_weight='balanced',
                                                  verbose = 1))

clf = RandomizedSearchCV(keras_model, 
                         parameters, 
                         n_iter = 1, 
                         scoring = 'balanced_accuracy', 
                         n_jobs = 1, 
                         cv = 5, 
                         random_state = 100)


clf.fit(X_train, y_train)

model = clf.best_estimator_

此代码失败并出现以下错误:

/opt/conda/lib/python3.6/site-packages/sklearn/base.py in clone(estimator, safe) 71 raise RuntimeError('Cannot clone object %s, as constructor' 72' 要么不设置,要么不修改参数 %s' % ---> 73 (estimator, name)) 74 return new_object 75

RuntimeError: Cannot clone object ,因为构造函数没有设置或修改参数 l1_reg

标签: pythonkerasscikit-learn

解决方案


推荐阅读