Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[remove fluid.layers.cross_entropy] remove unit tests (part 2) #48913

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions python/paddle/fluid/tests/unittests/test_adam_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -806,7 +806,12 @@ def _test(
input=fc_1, size=2, param_attr=weight_attr2, act='softmax'
)

cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction,
label=label,
reduction='none',
use_softmax=False,
)
loss = paddle.mean(cost)
beta1_init = 0.9
beta2_init = 0.999
Expand Down Expand Up @@ -966,7 +971,9 @@ def test_adam_exception(self):
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')

cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
loss = paddle.mean(cost)
adam = fluid.optimizer.Adam(use_global_beta_pow=True)
adam.minimize(loss)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ def convolutional_neural_network(use_py_reader):
)

prediction = fluid.layers.fc(input=conv_pool_2, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
acc = paddle.static.accuracy(input=prediction, label=label)
i = fluid.layers.zeros(shape=[1], dtype='int64')
Expand Down
4 changes: 3 additions & 1 deletion python/paddle/fluid/tests/unittests/test_compiled_program.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,9 @@ def build_simple_model(self):
)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
prediction = fluid.layers.fc(input=img, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)

def compile_program_not_compiled(self):
Expand Down
9 changes: 7 additions & 2 deletions python/paddle/fluid/tests/unittests/test_cross_entropy_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import numpy as np
from op_test import OpTest, randomize_probability

import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid import Program, program_guard
Expand Down Expand Up @@ -419,7 +420,9 @@ def test_Variable():
lab1 = fluid.create_lod_tensor(
np.array([-1, 3, 5, 5]), [[1, 1, 1, 1]], fluid.CPUPlace()
)
fluid.layers.cross_entropy(x1, lab1)
paddle.nn.functional.cross_entropy(
x1, lab1, reduction='none', use_softmax=False
)

self.assertRaises(TypeError, test_Variable)

Expand All @@ -432,7 +435,9 @@ def test_dtype():
lab2 = fluid.layers.data(
name='lab2', shape=[3, 4, 5, 6], dtype="int32"
)
fluid.layers.cross_entropy(x2, lab2)
paddle.nn.functional.cross_entropy(
x2, lab2, reduction='none', use_softmax=False
)

self.assertRaises(TypeError, test_dtype)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,12 @@ def simple_fc_net(places, use_legacy_py_reader, use_double_buffer):
hidden, size=CLASS_NUM, act='softmax'
)
loss = paddle.mean(
fluid.layers.cross_entropy(input=predict_label, label=label)
paddle.nn.functional.cross_entropy(
input=predict_label,
label=label,
reduction='none',
use_softmax=False,
)
)

optimizer = fluid.optimizer.Adam()
Expand Down
16 changes: 12 additions & 4 deletions python/paddle/fluid/tests/unittests/test_desc_clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,9 @@ def get_model(batch_size):

# Train program
predict = cnn_model(images)
cost = fluid.layers.cross_entropy(input=predict, label=label)
cost = paddle.nn.functional.cross_entropy(
input=predict, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

# Evaluator
Expand Down Expand Up @@ -188,9 +190,11 @@ def test_clone_with_stop_gradient(self):
hidden1 = fluid.layers.fc(input=img, size=200, act='relu')
hidden1.stop_gradient = True
hidden2 = fluid.layers.dropout(hidden1, dropout_prob=0.5)
loss = fluid.layers.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
input=fluid.layers.fc(hidden2, size=10, act='softmax'),
label=fluid.layers.data(name='label', shape=[1], dtype='int64'),
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss)
test_program = train_program.clone(for_test=False)
Expand Down Expand Up @@ -226,9 +230,11 @@ def false_fn():

hidden2 = fluid.layers.cond(cond, true_fn, false_fn)

loss = fluid.layers.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
input=fluid.layers.fc(hidden2, size=10, act='softmax'),
label=fluid.layers.data(name='label', shape=[1], dtype='int64'),
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss)
test_program = train_program.clone(for_test=False)
Expand Down Expand Up @@ -266,9 +272,11 @@ def false_fn():
return hidden2

hidden2 = fluid.layers.cond(cond, true_fn, false_fn)
loss = fluid.layers.cross_entropy(
loss = paddle.nn.functional.cross_entropy(
input=fluid.layers.fc(hidden2, size=10, act='softmax'),
label=fluid.layers.data(name='label', shape=[1], dtype='int64'),
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss)
test_program = train_program.clone(for_test=False)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ def test_a_sync_optimizer1(self):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@ def test_a_sync_optimizer3(self):
fc_1 = paddle.fluid.layers.fc(input=x_embedding, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ def test_a_sync_optimizer2(self):
fc_1 = paddle.fluid.layers.fc(input=emb, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
os.environ["FLAGS_LAUNCH_BARRIER"] = "0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ def test_a_sync_optimizer_trainer(self):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

Expand Down Expand Up @@ -84,8 +84,8 @@ def test_a_sync_optimizer_pserver(self):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,9 @@ def embedding_layer(input):

with fluid.device_guard("gpu"):
labels = fluid.layers.cast(inputs[-1], dtype="int64")
cost = fluid.layers.cross_entropy(input=predict, label=labels)
cost = paddle.nn.functional.cross_entropy(
input=predict, label=labels, reduction='none', use_softmax=False
)
avg_cost = paddle.sum(cost)

return avg_cost
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,9 @@ def forward(self, inputs, label):
x = paddle.reshape(x, shape=[-1, self.pool_2_shape])
cost = self._linear(x)
cost = paddle.nn.functional.softmax(cost)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
return avg_loss

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,9 @@ def test_mnist_forward_float32(self):
label.stop_gradient = True

cost = mnist(img)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)

dy_out = avg_loss.numpy()
Expand Down Expand Up @@ -167,7 +169,9 @@ def test_mnist_forward_float32(self):
)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
cost = mnist(img)
loss = fluid.layers.cross_entropy(cost, label)
loss = paddle.nn.functional.cross_entropy(
cost, label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)

# initialize params and fetch them
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ def simple_fc_net():
),
)
prediction = fluid.layers.fc(hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
optimizer = fluid.optimizer.Adam(learning_rate=1e-3)
optimizer.minimize(loss)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,9 @@ def gru_net(
gru_max_tanh = paddle.tanh(gru_max)
fc1 = fluid.layers.fc(input=gru_max_tanh, size=hid_dim2, act='tanh')
prediction = fluid.layers.fc(input=fc1, size=class_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
return avg_cost

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ def lstm_net(
lstm_max_tanh = paddle.tanh(lstm_max)
fc1 = fluid.layers.fc(input=lstm_max_tanh, size=hid_dim2, act='tanh')
prediction = fluid.layers.fc(input=fc1, size=class_dim, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)
return avg_cost

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,12 @@ def _simple_fc_net(self, in_size, label_size, class_num, hidden_sizes):

predict_label = fluid.layers.fc(hidden, size=class_num, act='softmax')
loss = paddle.mean(
fluid.layers.cross_entropy(input=predict_label, label=label)
paddle.nn.functional.cross_entropy(
input=predict_label,
label=label,
reduction='none',
use_softmax=False,
)
)

optimizer = fluid.optimizer.Adam()
Expand Down
4 changes: 3 additions & 1 deletion python/paddle/fluid/tests/unittests/test_fetch_unmerged.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,9 @@ def conv_net(self, img, label):
)
hidden = fluid.layers.fc(input=conv_pool_2, size=32, act='relu')
prediction = fluid.layers.fc(input=hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss)
return avg_loss, prediction

Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/tests/unittests/test_fleet_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ def test_distributed_strategy_auto(self):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

Expand Down
7 changes: 6 additions & 1 deletion python/paddle/fluid/tests/unittests/test_fleet_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,12 @@ def test_single_error():

fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
cost = paddle.nn.functional.cross_entropy(
input=prediction,
label=input_y,
reduction='none',
use_softmax=False,
)
avg_cost = paddle.mean(x=cost)
fleet.init(is_collective=True)

Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/tests/unittests/test_fleet_base_2.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ def test_ps_minimize(self):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

Expand Down
8 changes: 4 additions & 4 deletions python/paddle/fluid/tests/unittests/test_fleet_base_3.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ def test_collective_minimize(self):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

Expand Down Expand Up @@ -71,8 +71,8 @@ def test_fleet_get_applied_optimizer(self):
fc_1 = paddle.fluid.layers.fc(input=input_x, size=64, act='tanh')
fc_2 = paddle.fluid.layers.fc(input=fc_1, size=64, act='tanh')
prediction = paddle.fluid.layers.fc(input=[fc_2], size=2, act='softmax')
cost = paddle.fluid.layers.cross_entropy(
input=prediction, label=input_y
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

Expand Down
8 changes: 6 additions & 2 deletions python/paddle/fluid/tests/unittests/test_fleet_base_single.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,9 @@ def test_single_run_collective_minimize(self):

fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

fleet.init(is_collective=True)
Expand Down Expand Up @@ -124,7 +126,9 @@ def test_single_run_ps_minimize(self):

fc_1 = fluid.layers.fc(input=input_x, size=64, act='tanh')
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=input_y)
cost = paddle.nn.functional.cross_entropy(
input=prediction, label=input_y, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost)

fleet.init()
Expand Down
4 changes: 3 additions & 1 deletion python/paddle/fluid/tests/unittests/test_fuse_bn_act_pass.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,9 @@ def build_program(self, main_program, startup_program, use_cuda, seed=1):
input=hidden3, act='relu', data_layout='NHWC'
)
prediction = fluid.layers.fc(input=hidden4, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=y)
loss = paddle.nn.functional.cross_entropy(
input=prediction, label=y, reduction='none', use_softmax=False
)
loss = paddle.mean(loss)
sgd = fluid.optimizer.SGD(learning_rate=0.001)
if use_cuda:
Expand Down
Loading