assert len(operator.inputs) == 1
assert operator.specific_config["stateful"] is False
assert operator.specific_config["go_backwards"] is False
x = operator.inputs[0]
w_input = converter.create_constant_variable(operator, "kernel:0", OrderCN)
w_hidden = converter.create_constant_variable(operator, "recurrent_kernel:0", OrderCN)
if operator.specific_config["use_bias"]:
b = converter.create_constant_variable(operator, "bias:0", OrderC)
else:
b = None
lstm_opr = LSTM(None, operator.specific_config["use_bias"], operator.specific_config["return_sequences"],
use_initial_c=False, use_initial_h=False,
activation=operator.specific_config["activation"],
recurrent_activation=operator.specific_config["recurrent_activation"])
y, _ = lstm_opr(x, w_input, w_hidden, b)
operator.outputs = [y]
After Change
assert k_op.stateful is False, "[KerasConverter] Currently, LSTM.stateful is not supported"
assert k_op.go_backwards is False, "[KerasConverter] Currently, LSTM.go_backwards is not supported"
x = converter.get_variable(converter.get_input_tensor(k_op)[0])
w_input = converter.convert_to_constant_variable(k_op.kernel, OrderCN)
w_hidden = converter.convert_to_constant_variable(k_op.recurrent_kernel, OrderCN)
if k_op.use_bias:
b = converter.convert_to_constant_variable(k_op.bias, OrderC)
else:
b = None
y, = LSTM(None, k_op.use_bias, k_op.return_sequences,
use_initial_c=False, use_initial_h=False,
activation=k_op.activation,
recurrent_activation=k_op.recurrent_activation)(x, w_input, w_hidden, b)
converter.set_variable(converter.get_output_tensor(k_op)[0], y)