gpt4 book ai didi

python - Keras:加权二进制交叉熵实现

转载 作者:太空狗 更新时间:2023-10-30 00:18:33 26 4
gpt4 key购买 nike

我是 Keras(和一般的 ML)的新手,我正在尝试训练二元分类器。我使用加权二元交叉熵作为损失函数,但我不确定如何测试我的实现是否正确。

这是加权二元交叉熵的准确实现吗?我怎么能测试它是否是?

def weighted_binary_crossentropy(self, y_true, y_pred):
logloss = -(y_true * K.log(y_pred) * self.weights[0] + \
(1 - y_true) * K.log(1 - y_pred) * self.weights[1])

return K.mean(logloss, axis=-1)

最佳答案

truepred 损失之上,Keras 训练 val 损失包括正则化损失。下面是一个简单的测试方案,以及 binary_crossentropyl2 权重(不是“事件”)损失的有效实现。

更新:更完整地实现weights loss .


工作实现: (数值稳定版)

def binary_crossentropy(y_true, y_pred, sample_weight=1):
if len(y_pred.shape)==1:
y_pred = np.atleast_2d(y_pred).T
y_pred = [max(min(pred[0], 1-K.epsilon()), K.epsilon()) for pred in y_pred]
y_true,y_pred,sample_weight = force_2d_shape([y_true,y_pred,sample_weight])

logits = np.log(y_pred) - np.log(1-y_pred) # sigmoid inverse
neg_abs_logits = -np.abs(logits)
relu_logits = (logits > 0)*logits

loss_vec = relu_logits - logits*y_true + np.log(1 + np.exp(neg_abs_logits))
return np.mean(sample_weight*loss_vec)

def force_2d_shape(arr_list):
for arr_idx, arr in enumerate(arr_list):
if len(np.array(arr).shape) != 2:
arr_list[arr_idx] = np.atleast_2d(arr).T
return arr_list
def l1l2_weight_loss(model):
l1l2_loss = 0
for layer in model.layers:
if 'layer' in layer.__dict__ or 'cell' in layer.__dict__:
l1l2_loss += _l1l2_rnn_loss(layer)
continue

if 'kernel_regularizer' in layer.__dict__ or \
'bias_regularizer' in layer.__dict__:
l1l2_lambda_k, l1l2_lambda_b = [0,0], [0,0] # defaults
if layer.__dict__['kernel_regularizer'] is not None:
l1l2_lambda_k = list(layer.kernel_regularizer.__dict__.values())
if layer.__dict__['bias_regularizer'] is not None:
l1l2_lambda_b = list(layer.bias_regularizer.__dict__.values())

if any([(_lambda != 0) for _lambda in (l1l2_lambda_k + l1l2_lambda_b)]):
W = layer.get_weights()

for idx,_lambda in enumerate(l1l2_lambda_k + l1l2_lambda_b):
if _lambda != 0:
_pow = 2**(idx % 2) # 1 if idx is even (l1), 2 if odd (l2)
l1l2_loss += _lambda*np.sum(np.abs(W[idx//2])**_pow)
return l1l2_loss
def _l1l2_rnn_loss(layer):
l1l2_loss = 0
if 'backward_layer' in layer.__dict__:
bidirectional = True
_layer = layer.layer
else:
_layer = layer
bidirectional = False
ldict = _layer.cell.__dict__

if 'kernel_regularizer' in ldict or \
'recurrent_regularizer' in ldict or \
'bias_regularizer' in ldict:
l1l2_lambda_k, l1l2_lambda_r, l1l2_lambda_b = [0,0], [0,0], [0,0]
if ldict['kernel_regularizer'] is not None:
l1l2_lambda_k = list(_layer.kernel_regularizer.__dict__.values())
if ldict['recurrent_regularizer'] is not None:
l1l2_lambda_r = list(_layer.recurrent_regularizer.__dict__.values())
if ldict['bias_regularizer'] is not None:
l1l2_lambda_b = list(_layer.bias_regularizer.__dict__.values())

all_lambda = l1l2_lambda_k + l1l2_lambda_r + l1l2_lambda_b
if any([(_lambda != 0) for _lambda in all_lambda]):
W = layer.get_weights()
idx_incr = len(W)//2 # accounts for 'use_bias'

for idx,_lambda in enumerate(all_lambda):
if _lambda != 0:
_pow = 2**(idx % 2) # 1 if idx is even (l1), 2 if odd (l2)
l1l2_loss += _lambda*np.sum(np.abs(W[idx//2])**_pow)
if bidirectional:
l1l2_loss += _lambda*np.sum(
np.abs(W[idx//2 + idx_incr])**_pow)
return l1l2_loss

测试实现:

from keras.layers import Input, Dense, LSTM, GRU, Bidirectional
from keras.models import Model
from keras.regularizers import l1, l2, l1_l2
import numpy as np

ipt = Input(shape=(1200,16))
x = LSTM(60, activation='relu', return_sequences=True,
recurrent_regularizer=l2(1e-3),)(ipt)
x = Bidirectional(GRU(60, activation='relu', bias_regularizer =l1(1e-4)))(x)
out = Dense(1, activation='sigmoid', kernel_regularizer =l1_l2(2e-4))(x)
model = Model(ipt,out)

model.compile(loss='binary_crossentropy', optimizer='adam')
X = np.random.rand(10,1200,16) # (batch_size, timesteps, input_dim)
Y = np.random.randint(0,2,(10,1))
class_weights = {'0':1, '1': 6}
sample_weights = np.array([class_weights[str(label[0])] for label in Y])
keras_loss   = model.evaluate(X,Y,sample_weight=sample_weights)
custom_loss = binary_crossentropy(Y, model.predict(X))
custom_loss += l1l2_weight_loss(model)

print('%.6f'%keras_loss + ' -- keras_loss')
print('%.6f'%custom_loss + ' -- custom_loss')
0.763822 -- keras_loss
0.763822 -- 自定义损失

关于python - Keras:加权二进制交叉熵实现,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/50925072/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com