Skip to content

Commit

Permalink
Feature/support output layout (#972)
Browse files Browse the repository at this point in the history
* add postprocess for "NHWC","NCHW" and index: "0,2,3,1" ...
* Support outputLayout
* Support layout by dims: "0,2,3,1", "0,4,1,3,2", "1,0"...
---------

Co-authored-by: yanghaoqi <yanghaoqi_intern@canaan-creative.com>
Co-authored-by: curioyang <curioyang@users.noreply.github.com>
  • Loading branch information
3 people authored Jun 20, 2023
1 parent 3b6577c commit 243ca46
Show file tree
Hide file tree
Showing 11 changed files with 496 additions and 80 deletions.
2 changes: 0 additions & 2 deletions python/nncase/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,8 +242,6 @@ def __process_compile_options(self, compile_options: CompileOptions) -> ClCompil
self._compile_options.preprocess = compile_options.preprocess
self._compile_options.input_layout = compile_options.input_layout
self._compile_options.output_layout = compile_options.output_layout
if compile_options.output_layout != "":
raise NotImplementedError("Setting output layout is currently not supported.")
if compile_options.input_type == "uint8":
self._compile_options.input_type = _nncase.InputType.Uint8
elif compile_options.input_type == "int8":
Expand Down
12 changes: 11 additions & 1 deletion src/Nncase.Compiler/Compiler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public async Task<IRModule> ImportModuleAsync(Stream content)

await RunPassAsync(pmg => BroadcastOutputNamesAfterImportPass(pmg), "BroadcastOutputNamesAfterImport");
await RunPassAsync(pmg => pmg.Add<ShapeInferPass>(), "ShapeInferAfterImport");
await RunPassAsync(pmg => pmg.Add<AddPreProcess>(), "AddPreProcessAfterImport");
await RunPassAsync(pmg => AddPreAndPostProcess(pmg), "AddPreAndPostProcessAfterImport");

var inferSucc = CompilerServices.InferenceType(module.Entry!);
if (!inferSucc)
Expand All @@ -73,6 +73,16 @@ public void BroadcastOutputNamesAfterImportPass(IPassManager passManager)
});
}

public void AddPreAndPostProcess(IPassManager passManager)
{
passManager.Add<AddPreProcess>();
passManager.Add<AddPostProcess>();
passManager.AddWithName<DataflowPass>("FoldNopBinary").Configure(p =>
{
p.Add<Passes.Rules.Neutral.FoldNopBinary>();
});
}

public void TargetIndependentPass(IPassManager passManager)
{
var quantMode = _compileSession.CompileOptions.QuantizeOptions.ModelQuantMode;
Expand Down
65 changes: 65 additions & 0 deletions src/Nncase.Passes/Rules/Neutral/AddPostProcess.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
// Copyright (c) Canaan Inc. All rights reserved.
// Licensed under the Apache license. See LICENSE file in the project root for full license information.

using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using Nncase.IR;
using Nncase.IR.Imaging;
using Nncase.IR.Math;
using Nncase.Passes;
using Nncase.PatternMatch;
using OrtKISharp;
using static Nncase.IR.F.Math;
using static Nncase.IR.F.NN;
using static Nncase.IR.F.Tensors;
using static Nncase.IR.TypePatternUtility;
using static Nncase.PatternMatch.F.Math;
using static Nncase.PatternMatch.Utility;
using Pad = Nncase.IR.NN.Pad;

namespace Nncase.Passes.Rules.Neutral;

/// <summary>
/// Add preprocess in model.
/// </summary>
[RuleGenerator]
public sealed class AddPostProcess : ModulePass
{
/// <summary>
/// Postprocess: support outputLayout.
/// </summary>
/// <param name="module"> The graph. </param>
/// <param name="options"> RunPassContext. </param>
/// <returns> Return a new graph with postprocess. </returns>
protected override Task<IRModule> RunCoreAsync(IRModule module, RunPassContext options)
{
var preProcess = CompileSession.CompileOptions.PreProcess;
var modelLayout = CompileSession.CompileOptions.ModelLayout;
var outputLayout = CompileSession.CompileOptions.OutputLayout;

var entry = (IR.Function)module.Entry!;

if (preProcess && modelLayout != outputLayout && outputLayout != string.Empty)
{
var newOutput = outputLayout switch
{
"NHWC" when modelLayout == "NCHW" => Transpose(entry.Body, new[] { 0, 2, 3, 1 }),
"NCHW" when modelLayout == "NHWC" => Transpose(entry.Body, new[] { 0, 3, 1, 2 }),
_ => Transpose(
entry.Body,
Array.ConvertAll(
outputLayout.Replace(" ", string.Empty, StringComparison.OrdinalIgnoreCase).Split(","),
int.Parse)),
};
var newEntry = entry.With(body: newOutput);
module.Remove(entry);
module.Add(newEntry);
module.Entry = newEntry;
}

return Task.FromResult(module);
}
}
163 changes: 106 additions & 57 deletions src/Nncase.Passes/Rules/Neutral/AddPreProcess.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Toolkit.HighPerformance;
using Nncase.IR;
using Nncase.IR.Imaging;
using Nncase.IR.Math;
Expand All @@ -28,6 +29,12 @@ namespace Nncase.Passes.Rules.Neutral;
[RuleGenerator]
public sealed class AddPreProcess : ModulePass
{
/// <summary>
/// Main func for AddPreProcess.
/// </summary>
/// <param name="module"> The graph. </param>
/// <param name="options"> RunPassContext. </param>
/// <returns> Return a new graph with preprocess and postprocess. </returns>
protected override Task<IRModule> RunCoreAsync(IRModule module, RunPassContext options)
{
var preProcess = CompileSession.CompileOptions.PreProcess;
Expand Down Expand Up @@ -56,35 +63,58 @@ protected override Task<IRModule> RunCoreAsync(IRModule module, RunPassContext o
Expr newInput = a;
var oldShape = input.CheckedShape;

int n, c, h, w;
if (inputLayout == "NHWC")
{
(n, h, w, c) = (inputShape[0], inputShape[1], inputShape[2], inputShape[3]);
}
else
{
(n, c, h, w) = (inputShape[0], inputShape[1], inputShape[2], inputShape[3]);
}

// Convert new input to NCHW
if (inputLayout == "NHWC")
var newInputPerm = Array.Empty<int>();
if (inputLayout != string.Empty)
{
newInput = Transpose(newInput, new int[4] { 0, 3, 1, 2 });
if (inputLayout != "NHWC" && inputLayout != "NCHW")
{
newInputPerm = Array.ConvertAll(
inputLayout.Replace(" ", string.Empty, StringComparison.OrdinalIgnoreCase).Split(","),
int.Parse);
}

newInput = inputLayout switch
{
"NHWC" => Transpose(newInput, new[] { 0, 3, 1, 2 }),
"NCHW" => Transpose(newInput, new[] { 0, 1, 2, 3 }),
_ => Transpose(newInput, newInputPerm),
};
}

// SwapRB
if (swapRB && c != 1)
int n = 0, c = 0, h = 0, w = 0;
if (inputShape.Length == 4)
{
var axes = new int[4] { 0, 1, 2, 3 };
var strides = new int[4] { 1, 1, 1, 1 };
newInput = Concat(
new IR.Tuple(new[] { Slice(newInput, new int[4] { 0, 2, 0, 0 }, new int[4] { n, 3, h, w }, axes, strides),
Slice(newInput, new int[4] { 0, 1, 0, 0 }, new int[4] { n, 2, h, w }, axes, strides),
Slice(newInput, new int[4] { 0, 0, 0, 0 }, new int[4] { n, 1, h, w }, axes, strides), }),
1);

// TODO: fix slice neg strides shape inference
// newInput = Slice(newInput, new int[] {n, c, h, w },new[] { 0, 0, 0, 0 }, axes, strides);
if (inputLayout == "NHWC")
{
(n, h, w, c) = (inputShape[0], inputShape[1], inputShape[2], inputShape[3]);
}
else if (inputLayout == "NCHW")
{
(n, c, h, w) = (inputShape[0], inputShape[1], inputShape[2], inputShape[3]);
}
else
{
(n, c, h, w) = (inputShape[newInputPerm[0]], inputShape[newInputPerm[1]], inputShape[newInputPerm[2]], inputShape[newInputPerm[3]]);
}

// SwapRB
if (swapRB && c != 1)
{
var axes = new int[4] { 0, 1, 2, 3 };
var strides = new int[4] { 1, 1, 1, 1 };
newInput = Concat(
new IR.Tuple(new[]
{
Slice(newInput, new int[4] { 0, 2, 0, 0 }, new int[4] { n, 3, h, w }, axes, strides),
Slice(newInput, new int[4] { 0, 1, 0, 0 }, new int[4] { n, 2, h, w }, axes, strides),
Slice(newInput, new int[4] { 0, 0, 0, 0 }, new int[4] { n, 1, h, w }, axes, strides),
}),
1);

// TODO: fix slice neg strides shape inference
// newInput = Slice(newInput, new int[] {n, c, h, w },new[] { 0, 0, 0, 0 }, axes, strides);
}
}

// Dequantize to float
Expand All @@ -95,54 +125,73 @@ protected override Task<IRModule> RunCoreAsync(IRModule module, RunPassContext o
}

// Letterbox
int modelH, modelW;

if (modelLayout != "NCHW")
{
(modelH, modelW) = (oldShape[1].FixedValue, oldShape[2].FixedValue);
}
else
if (inputShape.Length == 4)
{
(modelH, modelW) = (oldShape[2].FixedValue, oldShape[3].FixedValue);
}

if (modelH != h || modelW != w)
{
var ratio = Math.Min(modelH / (float)h, modelW / (float)w);

var pads = Tensor.From<int>(new[] { 0, 0, 0, 0, 0, 0, 0, 0 }, new Shape(new[] { 4, 2 }));

var resizeH = Math.Round(h * ratio);
var resizeW = Math.Round(w * ratio);

var padH = modelH - resizeH;
var padW = modelW - resizeW;
var resizeShape = new int[] { n, c, (int)resizeH, (int)resizeW };

pads[2, 0] = (int)Math.Round((padH / 2) - 0.1);
pads[2, 1] = (int)padH - (int)Math.Round((padH / 2) - 0.1);
pads[3, 0] = (int)Math.Round((padW / 2) - 0.1);
pads[3, 1] = (int)padW - (int)Math.Round((padW / 2) - 0.1);

newInput = IR.F.NN.Pad(IR.F.Imaging.ResizeImage(ImageResizeMode.Bilinear, newInput, float.NaN, resizeShape, ImageResizeTransformationMode.HalfPixel), pads, PadMode.Constant, letterBoxValue);
int modelH, modelW;

if (modelLayout != "NCHW")
{
(modelH, modelW) = (oldShape[1].FixedValue, oldShape[2].FixedValue);
}
else
{
(modelH, modelW) = (oldShape[2].FixedValue, oldShape[3].FixedValue);
}

if (modelH != h || modelW != w)
{
var ratio = Math.Min(modelH / (float)h, modelW / (float)w);

var pads = Tensor.From<int>(new[] { 0, 0, 0, 0, 0, 0, 0, 0 }, new Shape(new[] { 4, 2 }));

var resizeH = Math.Round(h * ratio);
var resizeW = Math.Round(w * ratio);

var padH = modelH - resizeH;
var padW = modelW - resizeW;
var resizeShape = new int[] { n, c, (int)resizeH, (int)resizeW };

pads[2, 0] = (int)Math.Round((padH / 2) - 0.1);
pads[2, 1] = (int)padH - (int)Math.Round((padH / 2) - 0.1);
pads[3, 0] = (int)Math.Round((padW / 2) - 0.1);
pads[3, 1] = (int)padW - (int)Math.Round((padW / 2) - 0.1);

newInput = IR.F.NN.Pad(
IR.F.Imaging.ResizeImage(
ImageResizeMode.Bilinear,
newInput,
float.NaN,
resizeShape,
ImageResizeTransformationMode.HalfPixel),
pads,
PadMode.Constant,
letterBoxValue);
}
}

// Normalization
if (mean.Length != 0)
{
newInput = (newInput - Tensor.From(mean, new[] { 1, mean.Length, 1, 1 })) / Tensor.From(std, new[] { 1, std.Length, 1, 1 });
newInput = mean.Length switch
{
3 when inputShape.Length == 4 => (newInput - Tensor.From(mean, new[] { 1, mean.Length, 1, 1 })) /
Tensor.From(std, new[] { 1, std.Length, 1, 1 }),
_ => (newInput - Tensor.From(new float[] { mean[0] }, new[] { 1 })) /
Tensor.From(new float[] { std[0] }, new[] { 1 }),
};

// newInput = Binary(BinaryOp.Div, Binary(BinaryOp.Sub, newInput, Tensor.From(mean, new []{1,3,1,1})), Const.FromTensor(std) );
}

// Convert to model layout
if (modelLayout == "NHWC")
if (modelLayout == "NHWC" && inputShape.Length == 4)
{
newInput = Transpose(newInput, new[] { 0, 2, 3, 1 });
}

var y = new Passes.Mutators.Substitutor(expr => object.ReferenceEquals(expr, input) ? newInput : null).Rewrite(entry.Body);
var x = new Passes.Mutators.Substitutor(expr => object.ReferenceEquals(expr, input) ? a : null).Rewrite(entry);
new Passes.Mutators.Substitutor(expr => object.ReferenceEquals(expr, input) ? newInput : null).Rewrite(
entry.Body);
new Passes.Mutators.Substitutor(expr => object.ReferenceEquals(expr, input) ? a : null).Rewrite(entry);
}

return Task.FromResult(module);
Expand Down
13 changes: 11 additions & 2 deletions tests/evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def generate_evaluates(self, cfg, case_dir: str,
evaluator = self.compiler.create_evaluator(3)
self.set_inputs(evaluator, preprocess)
evaluator.run()
eval_output_paths = self.dump_outputs(eval_dir, evaluator)
eval_output_paths = self.dump_outputs(eval_dir, preprocess, evaluator)
return eval_output_paths

def set_inputs(self, evaluator, preprocess):
Expand All @@ -47,10 +47,19 @@ def set_inputs(self, evaluator, preprocess):
self.transform_input((i['data']), preprocess['input_type'], "infer")[0])
evaluator.set_input_tensor(idx, input_tensor)

def dump_outputs(self, eval_dir, evaluator):
def dump_outputs(self, eval_dir, preprocess, evaluator):
eval_output_paths = []
for i in range(evaluator.outputs_size):
result = evaluator.get_output_tensor(i).to_numpy()
if preprocess['preprocess']:
if(preprocess['output_layout'] == 'NHWC' and self.model_type in ['caffe', 'onnx']):
result = np.transpose(result, [0, 3, 1, 2])
elif (preprocess['output_layout'] == 'NCHW' and self.model_type in ['tflite']):
result = np.transpose(result, [0, 2, 3, 1])
elif preprocess['output_layout'] not in ["NCHW", "NHWC"]:
tmp_perm = [int(idx) for idx in preprocess['output_layout'].split(",")]
result = np.transpose(
result, preprocess_utils.get_source_transpose_index(tmp_perm))
os.makedirs(eval_dir, exist_ok=True)
eval_output_paths.append((
os.path.join(eval_dir, f'nncase_result_{i}.bin'),
Expand Down
Loading

0 comments on commit 243ca46

Please sign in to comment.