Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add parameter in histogram op and add histogram_bin_edges op #56771

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion paddle/phi/api/yaml/op_compat.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1370,7 +1370,7 @@

- op : histogram
inputs :
input : X
{input : X, weight : Weight}
outputs :
out : Out

Expand Down
3 changes: 2 additions & 1 deletion paddle/phi/api/yaml/ops.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1144,12 +1144,13 @@
backward : heaviside_grad

- op : histogram
args : (Tensor input, int64_t bins = 100, int min = 0, int max = 0)
args : (Tensor input, Tensor weight, int64_t bins = 100, int min = 0, int max = 0, bool density = false)
output : Tensor(out)
infer_meta :
func : HistogramInferMeta
kernel :
func : histogram
optional : weight

- op : huber_loss
args : (Tensor input, Tensor label, float delta)
Expand Down
54 changes: 54 additions & 0 deletions paddle/phi/infermeta/binary.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1711,6 +1711,60 @@ void GridSampleBaseInferMeta(const MetaTensor& x,
out->share_lod(x);
}

void HistogramInferMeta(const MetaTensor& input,
const MetaTensor& weight,
int64_t bins,
int min,
int max,
bool density,
MetaTensor* out) {
auto input_dim = input.dims();
if (weight) {
auto weight_dim = weight.dims();
PADDLE_ENFORCE_EQ(
weight_dim,
input_dim,
phi::errors::InvalidArgument(
"The 'shape' of Input(Weight) must be equal to the 'shape' of "
"Input(X)."
"But received: the 'shape' of Input(Weight) is [%s],"
"the 'shape' of Input(X) is [%s]",
weight_dim,
input_dim));
PADDLE_ENFORCE_EQ(
input.dtype() == weight.dtype(),
true,
phi::errors::InvalidArgument(
"The 'dtpye' of Input(Weight) must be equal to the 'dtype' of "
"Input(X)."
"But received: the 'dtype' of Input(Weight) is [%s],"
"the 'dtype' of Input(X) is [%s]",
weight.dtype(),
input.dtype()));
}
PADDLE_ENFORCE_GE(bins,
1,
phi::errors::InvalidArgument(
"The bins should be greater than or equal to 1."
"But received nbins is %d",
bins));
PADDLE_ENFORCE_GE(
max,
min,
phi::errors::InvalidArgument("max must be larger or equal to min."
"But received max is %d, min is %d",
max,
min));

out->set_dims({bins});
out->share_lod(input);
if (density) {
out->set_dtype(DataType::FLOAT32);
} else {
out->set_dtype(input.dtype());
}
}

void HuberLossInferMeta(const MetaTensor& input,
const MetaTensor& label,
float delta,
Expand Down
8 changes: 8 additions & 0 deletions paddle/phi/infermeta/binary.h
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,14 @@ void GridSampleBaseInferMeta(const MetaTensor& x,
MetaTensor* out,
MetaConfig config = MetaConfig());

void HistogramInferMeta(const MetaTensor& input,
const MetaTensor& weight,
int64_t bins,
int min,
int max,
bool density,
MetaTensor* out);

void HuberLossInferMeta(const MetaTensor& input_meta,
const MetaTensor& label_meta,
float delta,
Expand Down
21 changes: 0 additions & 21 deletions paddle/phi/infermeta/unary.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1754,27 +1754,6 @@ void GumbelSoftmaxInferMeta(const MetaTensor& x,
UnchangedInferMetaCheckAxis(x, axis, out);
}

void HistogramInferMeta(
const MetaTensor& input, int64_t bins, int min, int max, MetaTensor* out) {
PADDLE_ENFORCE_GE(bins,
1,
phi::errors::InvalidArgument(
"The bins should be greater than or equal to 1."
"But received nbins is %d",
bins));
PADDLE_ENFORCE_GE(
max,
min,
phi::errors::InvalidArgument("max must be larger or equal to min."
"But received max is %d, min is %d",
max,
min));

out->set_dims({bins});
out->share_lod(input);
out->set_dtype(DataType::INT64);
}

void IdentityLossInferMeta(const MetaTensor& x,
int reduction,
MetaTensor* out) {
Expand Down
2 changes: 0 additions & 2 deletions paddle/phi/infermeta/unary.h
Original file line number Diff line number Diff line change
Expand Up @@ -266,8 +266,6 @@ void GumbelSoftmaxInferMeta(const MetaTensor& x,
bool hard,
int axis,
MetaTensor* out);
void HistogramInferMeta(
const MetaTensor& input, int64_t bins, int min, int max, MetaTensor* out);

void IdentityLossInferMeta(const MetaTensor& x, int reduction, MetaTensor* out);

Expand Down
59 changes: 47 additions & 12 deletions paddle/phi/kernels/cpu/histogram_kernel.cc
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand All @@ -23,9 +23,11 @@ namespace phi {
template <typename T, typename Context>
void HistogramKernel(const Context& dev_ctx,
const DenseTensor& input,
const paddle::optional<DenseTensor>& weight,
int64_t bins,
int min,
int max,
bool density,
DenseTensor* output) {
auto& nbins = bins;
auto& minval = min;
Expand All @@ -34,11 +36,11 @@ void HistogramKernel(const Context& dev_ctx,
const T* input_data = input.data<T>();
auto input_numel = input.numel();

int64_t* out_data = dev_ctx.template Alloc<int64_t>(output);
phi::funcs::SetConstant<Context, int64_t>()(
dev_ctx, output, static_cast<int64_t>(0));

if (input_data == nullptr) return;
if (input_data == nullptr) {
dev_ctx.template Alloc<T>(output);
phi::funcs::SetConstant<Context, T>()(dev_ctx, output, static_cast<T>(0));
return;
}

T output_min = static_cast<T>(minval);
T output_max = static_cast<T>(maxval);
Expand Down Expand Up @@ -67,11 +69,44 @@ void HistogramKernel(const Context& dev_ctx,
maxval,
minval));

for (int64_t i = 0; i < input_numel; i++) {
if (input_data[i] >= output_min && input_data[i] <= output_max) {
const int64_t bin = (int64_t)((input_data[i] - output_min) * nbins /
(output_max - output_min));
out_data[std::min(bin, nbins - 1)] += 1;
bool has_weight = weight.is_initialized();
auto weight_data =
(weight.get_ptr() == nullptr ? nullptr : weight.get_ptr()->data<T>());

// compute output
if (density) {
T total = static_cast<T>(0);
for (int64_t i = 0; i < input_numel; i++) {
if (input_data[i] >= output_min && input_data[i] <= output_max) {
total +=
has_weight ? static_cast<T>(weight_data[i]) : static_cast<T>(1);
}
}
float* out_data = dev_ctx.template Alloc<float>(output);
phi::funcs::SetConstant<Context, float>()(
dev_ctx, output, static_cast<float>(0));

const float interval_len =
static_cast<float>(output_max - output_min) / nbins;
for (int64_t i = 0; i < input_numel; i++) {
if (input_data[i] >= output_min && input_data[i] <= output_max) {
const int64_t bin = (int64_t)((input_data[i] - output_min) * nbins /
(output_max - output_min));
T weight_idx = weight_data == nullptr ? 1 : weight_data[i];
out_data[std::min(bin, nbins - 1)] +=
(static_cast<float>(weight_idx) / total) / interval_len;
}
}
} else {
T* out_data = dev_ctx.template Alloc<T>(output);
phi::funcs::SetConstant<Context, T>()(dev_ctx, output, static_cast<T>(0));
for (int64_t i = 0; i < input_numel; i++) {
if (input_data[i] >= output_min && input_data[i] <= output_max) {
const int64_t bin = (int64_t)((input_data[i] - output_min) * nbins /
(output_max - output_min));
T weight_idx = weight_data == nullptr ? 1 : weight_data[i];
out_data[std::min(bin, nbins - 1)] += weight_idx;
}
}
}
}
Expand All @@ -86,5 +121,5 @@ PD_REGISTER_KERNEL(histogram,
double,
int,
int64_t) {
kernel->OutputAt(0).SetDataType(paddle::DataType::INT64);
kernel->OutputAt(0).SetDataType(phi::DataType::UNDEFINED);
}
Loading