Skip to content

Commit

Permalink
【Error Message No. 34】 paddle/phi* (#62861)
Browse files Browse the repository at this point in the history
* fix paddle/phi*

* fix

* fix
  • Loading branch information
jinyouzhi authored Mar 20, 2024
1 parent 6925c9d commit 09e91bc
Showing 1 changed file with 56 additions and 11 deletions.
67 changes: 56 additions & 11 deletions paddle/phi/kernels/fusion/cutlass/fused_conv2d_add_act_kernel.cu
Original file line number Diff line number Diff line change
Expand Up @@ -51,19 +51,53 @@ void FusedConv2dAddActKernel(const Context& ctx,
auto in_dims = x.dims();
auto filter_dims = filter.dims();
auto out_dims = output->dims();
CHECK_EQ(in_dims.size() == 4UL, true);
CHECK_EQ(filter_dims.size() == 4UL, true);
CHECK_EQ(strides.size() == 2UL, true);
CHECK_EQ(dilations.size() == 2UL, true);
PADDLE_ENFORCE_EQ(
in_dims.size(),
4UL,
phi::errors::InvalidArgument(
"The input tensor X's dimensions should be 4, but got %d.",
in_dims.size()));
PADDLE_ENFORCE_EQ(
filter_dims.size(),
4UL,
phi::errors::InvalidArgument(
"The input tensor filter's dimensions must be 4, but got %d.",
filter_dims.size()));
PADDLE_ENFORCE_EQ(
strides.size(),
2UL,
phi::errors::InvalidArgument("The size of strides must be 2, but got %d.",
strides.size()));
PADDLE_ENFORCE_EQ(
dilations.size(),
4UL,
phi::errors::InvalidArgument(
"The size of dilations must be 2, but got %d.", dilations.size()));

CHECK_EQ(padding_algorithm == "EXPLICIT", true);
CHECK_EQ(data_format == "NHWC", true);
PADDLE_ENFORCE_EQ(padding_algorithm,
"EXPLICIT",
phi::errors::InvalidArgument(
"The padding_algorithm must be EXPLICIT, but got %s.",
padding_algorithm));
PADDLE_ENFORCE_EQ(
data_format,
"NHWC",
phi::errors::InvalidArgument("The data_format must be NHWC, but got %s.",
data_format));
const int batch = in_dims[0];
const int ic = in_dims[3];
const int ih = in_dims[1];
const int iw = in_dims[2];

CHECK_EQ(ic == groups * filter_dims[3], true);
PADDLE_ENFORCE_EQ(
ic,
groups * filter_dims[3],
phi::errors::InvalidArgument(
"The last dimension of X (%d) must be equal to "
"groups (%d) multiply the last dimension of filter (%d).",
ic,
groups,
filter_dims[3]));
int pad_h0 = 0;
int pad_h1 = 0;
int pad_w0 = 0;
Expand Down Expand Up @@ -94,7 +128,11 @@ void FusedConv2dAddActKernel(const Context& ctx,
const int kh = filter_dims[1];
const int kw = filter_dims[2];

CHECK_EQ(out_dims.size() == 4UL, true);
PADDLE_ENFORCE_EQ(
out_dims.size(),
4UL,
phi::errors::InvalidArgument(
"The output's dimensions must be 4, but got %d.", out_dims.size()));
const int oh = out_dims[1];
const int ow = out_dims[2];

Expand Down Expand Up @@ -161,7 +199,8 @@ void FusedConv2dAddActKernel(const Context& ctx,

void* dlhandler = phi::dynload::GetCutlassConv2dHandle();
func conv_func = NULL;
CHECK_EQ(dlhandler == NULL, false);
PADDLE_ENFORCE_NOT_NULL(
dlhandler, phi::errors::NotFound("Fail to get CutlassConv2d handler."));

// conv2d_depthwise
if (groups == ic && ic == oc) {
Expand All @@ -173,7 +212,10 @@ void FusedConv2dAddActKernel(const Context& ctx,
params.workspace = tmp_ptr->ptr();
// cutlass conv2d_depthwise not support residual
if (residual) {
CHECK_EQ(residual->data<T>() == nullptr, true);
PADDLE_ENFORCE_EQ(residual->data<T>(),
nullptr,
phi::errors::InvalidArgument(
"The pointer of residual's data must be null."));
}
if (activation == "relu") {
conv_func = (func)(dlsym(dlhandler, "Conv2dDepthwiseBiasRelu"));
Expand All @@ -194,7 +236,10 @@ void FusedConv2dAddActKernel(const Context& ctx,
}

// below: fused_conv2d_add_act && groups == 1
CHECK_EQ(groups == 1, true);
PADDLE_ENFORCE_EQ(groups,
1,
phi::errors::InvalidArgument(
"The groups must be 1, but got %d.", groups));
if (residual) {
if (activation == "relu") {
params.residual = reinterpret_cast<const void*>(residual->data<T>());
Expand Down

0 comments on commit 09e91bc

Please sign in to comment.