Skip to content

Commit 463f831

Browse files
committed
Merge branch 'develop' into clang-tidy/modernize-raw-string-literal
2 parents 2bbf981 + 7426676 commit 463f831

File tree

140 files changed

+2549
-1283
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

140 files changed

+2549
-1283
lines changed

.clang-tidy

+1-1
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@ Checks: '
174174
modernize-make-unique,
175175
-modernize-pass-by-value,
176176
modernize-raw-string-literal,
177-
-modernize-redundant-void-arg,
177+
modernize-redundant-void-arg,
178178
-modernize-replace-auto-ptr,
179179
-modernize-replace-random-shuffle,
180180
-modernize-shrink-to-fit,

paddle/cinn/backends/ir_schedule_test.cc

+2-2
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ void TestSplitThrow() {
177177
std::vector<Expr> vec_ast{ast_expr};
178178
ir::ModuleExpr mod_expr(vec_ast);
179179
ir::IRSchedule ir_sch(
180-
mod_expr, -1, false, ir::ScheduleErrorMessageLevel::kGeneral);
180+
mod_expr, -1, false, utils::ErrorMessageLevel::kGeneral);
181181
auto fused = ir_sch.Fuse("B", {0, 1});
182182
// statement that cause the exception
183183
auto splited = ir_sch.Split(fused, {-1, -1});
@@ -196,7 +196,7 @@ void TestSplitThrow() {
196196
auto source_code = codegen.Compile(module, CodeGenC::OutputKind::CImpl);
197197
}
198198
TEST(IrSchedule, split_throw) {
199-
ASSERT_THROW(TestSplitThrow(), ir::enforce::EnforceNotMet);
199+
ASSERT_THROW(TestSplitThrow(), utils::enforce::EnforceNotMet);
200200
}
201201

202202
TEST(IrSchedule, reorder1) {

paddle/cinn/frontend/paddle_model_to_program.cc

-6
Original file line numberDiff line numberDiff line change
@@ -398,12 +398,6 @@ void PaddleModelToProgram::AddOpMapper_relu6() {
398398
CHECK_EQ(op_desc.Output("Out").size(), 1UL);
399399
auto out_name = op_desc.Output("Out").front();
400400

401-
absl::flat_hash_map<std::string, hlir::framework::NodeAttr::attr_t> attrs;
402-
CHECK(op_desc.HasAttr("threshold"));
403-
CHECK_EQ(op_desc.GetAttr<float>("threshold"), 6.0f)
404-
<< "Threshold of Relu6 is not 6! To be implemented.";
405-
attrs["threshold"] = op_desc.GetAttr<float>("threshold");
406-
407401
auto x = GetVar(TransValidVarName(x_name));
408402
auto out = net_builder_->Relu6(x);
409403
AddVar(TransValidVarName(out_name), out);

paddle/cinn/hlir/framework/new_ir_compiler.h

+38-15
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
// limitations under the License.
1414

1515
#pragma once
16+
#include <absl/types/variant.h>
1617
#include <memory>
1718
#include <unordered_map>
1819
#include "paddle/cinn/common/context.h"
@@ -30,9 +31,15 @@ namespace cinn {
3031
namespace hlir {
3132
namespace framework {
3233

33-
// TODO(Aurelius): Need add name mapping logic in REGISTER_CINN_OP
34-
// macros or attempt to unify Op name with Paddle and CINN.
35-
static const std::unordered_map<std::string, std::string> OP_NAMES = {
34+
struct CompatibleInfo {
35+
static constexpr char* kInputPrefix = "input_";
36+
static constexpr char* kOutputPrefix = "output_";
37+
// TODO(Aurelius): Need add name mapping logic in REGISTER_CINN_OP
38+
// macros or attempt to unify Op name with Paddle and CINN.
39+
static const std::unordered_map<std::string, std::string> OP_NAMES;
40+
};
41+
42+
const std::unordered_map<std::string, std::string> CompatibleInfo::OP_NAMES = {
3643
{"pd.full", "fill_constant"}, {"pd.matmul", "matmul"}};
3744

3845
// TODO(Aurelius84): Need abstract this logic to implement Proxy for
@@ -70,18 +77,32 @@ class NewIRCompiler final {
7077
compiler_->Build(build_module, "");
7178

7279
auto instructions = BuildInstructions(groups);
80+
81+
// TODO(Aurelius84): Instantiate all tensors on compile-time, which is
82+
// controlled by 'options.with_instantiate_variables' in GraphCompiler.
83+
// Moreover, it's better to implement InsertBufferHandlers() logic
84+
// to automatically insert Malloc and Free instructions.
85+
for (auto& name : scope_->var_names()) {
86+
std::string var_name({name.data(), name.size()});
87+
VLOG(4) << "Instantiate " << var_name << " on compile-time";
88+
auto* var = scope_->Var<Tensor>(var_name);
89+
auto& tensor = absl::get<Tensor>(*var);
90+
tensor->mutable_data(target_, tensor->type());
91+
}
7392
return std::make_unique<Program>(scope_, std::move(instructions));
7493
}
7594

7695
std::vector<ir::LoweredFunc> GetOpFunc(const ::ir::Operation& op, int idx) {
7796
std::vector<ir::Tensor> inputs;
7897
std::vector<common::CINNValue> cinn_inputs;
79-
VLOG(4) << "GetOpFunc for op: " << op.name();
98+
auto op_name = op.name();
99+
VLOG(4) << "GetOpFunc for op: " << op_name;
80100
// step 1: Deal with Oprands
81101
for (int i = 0; i < op.num_operands(); ++i) {
82102
auto in_value = op.operand(i);
83103
// TODO(Aurelius84): For now, use addr as name but it's not wise.
84-
std::string input_id = std::to_string(std::hash<::ir::Value>()(in_value));
104+
std::string input_id = CompatibleInfo::kInputPrefix +
105+
std::to_string(std::hash<::ir::Value>()(in_value));
85106
// NOTE(Aurelius84): whether need to support other Type?
86107
auto type_info =
87108
in_value.type().dyn_cast<paddle::dialect::DenseTensorType>();
@@ -100,8 +121,7 @@ class NewIRCompiler final {
100121
cinn_inputs.push_back(common::CINNValue(temp));
101122
}
102123
for (auto out_name : OpGetOutputNames(op)) {
103-
cinn_inputs.push_back(
104-
common::CINNValue(op.name().substr(3) + "_" + out_name));
124+
cinn_inputs.push_back(common::CINNValue(out_name));
105125
}
106126

107127
VLOG(4) << "inputs.size(): " << inputs.size();
@@ -124,14 +144,14 @@ class NewIRCompiler final {
124144
{
125145
VLOG(4) << "op.attributes():" << op.attributes().size();
126146
auto attrs = utils::ConvertAttributes(op.attributes());
127-
node_attrs.node_name = OP_NAMES.at(op.name());
147+
node_attrs.node_name = CompatibleInfo::OP_NAMES.at(op_name);
128148
node_attrs.attr_store = std::move(attrs);
129149
}
130150
auto& strategy = Operator::GetAttrs<StrategyFunction>("CINNStrategy");
131151
// NOTE(Aurelius84): Do we need replace all hlir::framework Operator with
132152
// ::ir::Program ?
133153
const hlir::framework::Operator* cinn_op =
134-
Operator::Get(OP_NAMES.at(op.name()));
154+
Operator::Get(CompatibleInfo::OP_NAMES.at(op_name));
135155
auto impl = OpStrategy::SelectImpl(
136156
strategy[cinn_op](node_attrs, inputs, out_types, out_shapes, target_));
137157
common::CINNValuePack C =
@@ -223,7 +243,8 @@ class NewIRCompiler final {
223243
std::unordered_set<std::string> repeat;
224244
for (int i = 0; i < op.num_operands(); ++i) {
225245
auto value = op.operand(i);
226-
std::string name = std::to_string(std::hash<::ir::Value>()(value));
246+
std::string name = CompatibleInfo::kInputPrefix +
247+
std::to_string(std::hash<::ir::Value>()(value));
227248
if (repeat.count(name)) {
228249
continue;
229250
}
@@ -237,7 +258,8 @@ class NewIRCompiler final {
237258
std::vector<std::string> names;
238259
for (int i = 0; i < op.num_results(); ++i) {
239260
auto value = op.result(i);
240-
std::string name = std::to_string(std::hash<::ir::Value>()(value));
261+
std::string name = CompatibleInfo::kOutputPrefix +
262+
std::to_string(std::hash<::ir::Value>()(value));
241263
names.push_back(std::move(name));
242264
}
243265
return names;
@@ -257,11 +279,12 @@ std::shared_ptr<Scope> BuildScope(const Target& target,
257279
std::unordered_set<::ir::Value> visited;
258280
auto scope = std::make_shared<Scope>();
259281

260-
auto create_var = [&](::ir::Value value) {
282+
auto create_var = [&](const std::string& name_prefix, ::ir::Value value) {
261283
if (visited.count(value) > 0) return;
262284
visited.emplace(value);
263285

264-
std::string name = std::to_string(std::hash<::ir::Value>()(value));
286+
std::string name =
287+
name_prefix + std::to_string(std::hash<::ir::Value>()(value));
265288
auto type_info = value.type().dyn_cast<paddle::dialect::DenseTensorType>();
266289
auto* var = scope->Var<Tensor>(name);
267290
auto& tensor = absl::get<Tensor>(*var);
@@ -279,12 +302,12 @@ std::shared_ptr<Scope> BuildScope(const Target& target,
279302
// visit OpOprands
280303
for (auto i = 0; i < (*it)->num_operands(); ++i) {
281304
auto in_value = (*it)->operand(i);
282-
create_var(in_value);
305+
create_var(CompatibleInfo::kInputPrefix, in_value);
283306
}
284307

285308
for (auto i = 0; i < (*it)->num_results(); ++i) {
286309
auto out_value = (*it)->result(i);
287-
create_var(out_value);
310+
create_var(CompatibleInfo::kOutputPrefix, out_value);
288311
}
289312
}
290313
return scope;

paddle/cinn/ir/schedule/ir_schedule.cc

+12-14
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
#include "paddle/cinn/optim/replace_var_with_expr.h"
4242
#include "paddle/cinn/utils/string.h"
4343

44-
DECLARE_int32(cinn_schedule_error_message_level);
44+
DECLARE_int32(cinn_error_message_level);
4545

4646
namespace cinn {
4747
namespace ir {
@@ -54,12 +54,11 @@ class ScheduleImpl {
5454
ScheduleImpl() = default;
5555
explicit ScheduleImpl(const ModuleExpr& module_expr,
5656
bool debug_flag = false,
57-
ScheduleErrorMessageLevel err_msg_level =
58-
ScheduleErrorMessageLevel::kGeneral)
57+
utils::ErrorMessageLevel err_msg_level =
58+
utils::ErrorMessageLevel::kGeneral)
5959
: module_expr_(module_expr), debug_flag_(debug_flag) {
60-
err_msg_level_ = static_cast<ScheduleErrorMessageLevel>(
61-
FLAGS_cinn_schedule_error_message_level ||
62-
static_cast<int>(err_msg_level));
60+
err_msg_level_ = static_cast<utils::ErrorMessageLevel>(
61+
FLAGS_cinn_error_message_level || static_cast<int>(err_msg_level));
6362
}
6463
explicit ScheduleImpl(ModuleExpr&& module_expr)
6564
: module_expr_(std::move(module_expr)) {}
@@ -138,8 +137,7 @@ class ScheduleImpl {
138137

139138
ModuleExpr module_expr_;
140139
bool debug_flag_{false};
141-
ScheduleErrorMessageLevel err_msg_level_ =
142-
ScheduleErrorMessageLevel::kGeneral;
140+
utils::ErrorMessageLevel err_msg_level_ = utils::ErrorMessageLevel::kGeneral;
143141
};
144142

145143
/** \brief A macro that guards the beginning of each implementation of schedule
@@ -152,10 +150,10 @@ class ScheduleImpl {
152150
* @param err_msg_level A ScheduleErrorMessageLevel enum, level of error message
153151
* printing
154152
*/
155-
#define CINN_IR_SCHEDULE_END(primitive, err_msg_level) \
156-
} \
157-
catch (const IRScheduleErrorHandler& err_hanlder) { \
158-
CINN_THROW(err_hanlder.FormatErrorMessage(primitive, err_msg_level)); \
153+
#define CINN_IR_SCHEDULE_END(err_msg_level) \
154+
} \
155+
catch (const utils::ErrorHandler& err_hanlder) { \
156+
CINN_THROW(err_hanlder.FormatErrorMessage(err_msg_level)); \
159157
}
160158

161159
std::vector<Expr> ScheduleImpl::Split(const Expr& loop,
@@ -177,7 +175,7 @@ std::vector<Expr> ScheduleImpl::Split(const Expr& loop,
177175
std::vector<int> processed_factors;
178176
CINN_IR_SCHEDULE_BEGIN();
179177
processed_factors = ValidateFactors(factors, tot_extent, this->module_expr_);
180-
CINN_IR_SCHEDULE_END("split", this->err_msg_level_);
178+
CINN_IR_SCHEDULE_END(this->err_msg_level_);
181179
int prod_size = std::accumulate(processed_factors.begin(),
182180
processed_factors.end(),
183181
1,
@@ -2316,7 +2314,7 @@ IRSchedule::IRSchedule() {}
23162314
IRSchedule::IRSchedule(const ModuleExpr& module_expr,
23172315
utils::LinearRandomEngine::StateType rand_seed,
23182316
bool debug_flag,
2319-
ScheduleErrorMessageLevel err_msg_level) {
2317+
utils::ErrorMessageLevel err_msg_level) {
23202318
impl_ =
23212319
std::make_unique<ScheduleImpl>(module_expr, debug_flag, err_msg_level);
23222320
this->InitSeed(rand_seed);

paddle/cinn/ir/schedule/ir_schedule.h

+3-16
Original file line numberDiff line numberDiff line change
@@ -24,25 +24,12 @@
2424
#include "paddle/cinn/ir/schedule/schedule_desc.h"
2525
#include "paddle/cinn/ir/tensor.h"
2626
#include "paddle/cinn/ir/utils/ir_mutator.h"
27+
#include "paddle/cinn/utils/error.h"
2728
#include "paddle/cinn/utils/random_engine.h"
2829

2930
namespace cinn {
3031
namespace ir {
3132

32-
/**
33-
* \brief Indicates the level of printing error message in the current Schedule
34-
*/
35-
enum class ScheduleErrorMessageLevel : int32_t {
36-
/** \brief Print an error message in short mode.
37-
* Short mode shows which and where the schedule error happens*/
38-
kGeneral = 0,
39-
/** \brief Print an error message in detailed mode.
40-
* Detailed mode shows which and where the schedule error happens, and the
41-
* schedule input parameters.
42-
*/
43-
kDetailed = 1,
44-
};
45-
4633
/**
4734
* A struct representing a module that contains Expr. This struct is only used
4835
* in Schedule process.
@@ -85,8 +72,8 @@ class IRSchedule {
8572
explicit IRSchedule(const ModuleExpr& modexpr,
8673
utils::LinearRandomEngine::StateType rand_seed = -1,
8774
bool debug_flag = false,
88-
ScheduleErrorMessageLevel err_msg_level =
89-
ScheduleErrorMessageLevel::kGeneral);
75+
utils::ErrorMessageLevel err_msg_level =
76+
utils::ErrorMessageLevel::kGeneral);
9077
IRSchedule(ir::ModuleExpr&& mod_expr,
9178
ScheduleDesc&& trace,
9279
utils::LinearRandomEngine::StateType rand_seed = -1);

paddle/cinn/ir/schedule/ir_schedule_error.cc

+5-40
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,11 @@ namespace cinn {
2020
namespace ir {
2121

2222
std::string IRScheduleErrorHandler::GeneralErrorMessage() const {
23-
return this->err_msg_;
23+
std::ostringstream os;
24+
os << "[IRScheduleError] An error occurred in the scheduel primitive < "
25+
<< this->primitive_ << " >. " << std::endl;
26+
os << this->err_msg_;
27+
return os.str();
2428
}
2529

2630
std::string IRScheduleErrorHandler::DetailedErrorMessage() const {
@@ -31,44 +35,5 @@ std::string IRScheduleErrorHandler::DetailedErrorMessage() const {
3135
return os.str();
3236
}
3337

34-
std::string IRScheduleErrorHandler::FormatErrorMessage(
35-
const std::string& primitive,
36-
const ScheduleErrorMessageLevel& err_msg_level) const {
37-
std::ostringstream os;
38-
std::string err_msg = err_msg_level == ScheduleErrorMessageLevel::kDetailed
39-
? DetailedErrorMessage()
40-
: GeneralErrorMessage();
41-
42-
os << "[IRScheduleError] An error occurred in the scheduel primitive <"
43-
<< primitive << ">. " << std::endl;
44-
os << "[Error info] " << err_msg;
45-
return os.str();
46-
}
47-
48-
std::string NegativeFactorErrorMessage(const int64_t& factor,
49-
const size_t& idx) {
50-
std::ostringstream os;
51-
os << "The params in factors of Split should be positive. However, the "
52-
"factor at position "
53-
<< idx << " is " << factor << std::endl;
54-
return os.str();
55-
}
56-
57-
std::string InferFactorErrorMessage() {
58-
std::ostringstream os;
59-
os << "The params in factors of Split should not be less than -1 or have "
60-
"more than one -1!"
61-
<< std::endl;
62-
return os.str();
63-
}
64-
65-
std::string FactorProductErrorMessage() {
66-
std::ostringstream os;
67-
os << "In Split, the factors' product should be not larger than or equal "
68-
"to original loop's extent!"
69-
<< std::endl;
70-
return os.str();
71-
}
72-
7338
} // namespace ir
7439
} // namespace cinn

0 commit comments

Comments
 (0)