forked from jiuyuan/InfiniTensor
fix concat&pooling test code
This commit is contained in:
parent
39484e0cc4
commit
70950e3fbb
3
env.sh
3
env.sh
|
@ -47,6 +47,7 @@ export KUNLUN_HOME=/usr/local/xpu
|
|||
# ├── tools
|
||||
# ├── version
|
||||
# └── XTDK
|
||||
export ASCEND_HOME=/usr/local/Ascend/ascend-toolkit/6.3
|
||||
#export ASCEND_HOME=/usr/local/Ascend/ascend-toolkit/6.3
|
||||
export ASCEND_HOME=/usr/local/Ascend/ascend-toolkit/latest
|
||||
source /usr/local/Ascend/ascend-toolkit/set_env.sh
|
||||
source /usr/local/Ascend/toolbox/set_env.sh
|
||||
|
|
|
@ -25,16 +25,27 @@ void testConcat(const std::function<void(void *, size_t, DataType)> &generator,
|
|||
inputCpu2->dataMalloc();
|
||||
inputCpu2->setData(generator);
|
||||
|
||||
// GPU
|
||||
// NPU
|
||||
Graph npuGraph = make_ref<GraphObj>(npuRuntime);
|
||||
auto inputNpu1 = npuGraph->cloneTensor(inputCpu1);
|
||||
auto inputNpu2 = npuGraph->cloneTensor(inputCpu2);
|
||||
auto npuOp =
|
||||
npuGraph->addOp<T>(TensorVec{inputNpu1, inputNpu2}, nullptr, 2);
|
||||
npuGraph->dataMalloc();
|
||||
inputNpu1->setData(generator);
|
||||
inputNpu2->setData(generator);
|
||||
npuRuntime->run(npuGraph);
|
||||
auto outputNpu = npuOp->getOutput();
|
||||
auto outputNpu2Cpu = outputNpu->clone(cpuRuntime);
|
||||
|
||||
/********************************************************/
|
||||
auto inputTest1 = inputNpu1->clone(cpuRuntime);
|
||||
auto inputTest2 = inputNpu2->clone(cpuRuntime);
|
||||
inputTest1->printData();
|
||||
inputTest2->printData();
|
||||
|
||||
/********************************************************/
|
||||
|
||||
// Check
|
||||
inputCpu1->print();
|
||||
inputCpu1->printData();
|
||||
|
|
|
@ -44,8 +44,7 @@ void testMatmul(const std::function<void(void *, size_t, DataType)> &generatorA,
|
|||
inputCpu2->setData(generatorB);
|
||||
cpuRuntime->run(cpuGraph);
|
||||
auto outputCpu = cpuOp->getOutput();
|
||||
outputCpu->print();
|
||||
outputNpu2Cpu->print();
|
||||
|
||||
// Check
|
||||
EXPECT_TRUE(outputCpu->equalData(outputNpu2Cpu));
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ void testPooling(const std::function<void(void *, size_t, DataType)> &generator,
|
|||
auto npuOp =
|
||||
npuGraph->addOp<T>(inputNpu, nullptr, 3, 3, 1, 1, 1, 1, 2, 2, 0);
|
||||
npuGraph->dataMalloc();
|
||||
inputNpu->setData(generator);
|
||||
npuRuntime->run(npuGraph);
|
||||
auto outputNpu = npuOp->getOutput();
|
||||
auto outputNpu2Cpu = outputNpu->clone(cpuRuntime);
|
||||
|
@ -36,7 +37,7 @@ void testPooling(const std::function<void(void *, size_t, DataType)> &generator,
|
|||
}
|
||||
|
||||
TEST(cnnl_Pooling, run) {
|
||||
//testPooling<MaxPoolObj>(IncrementalGenerator(), Shape{1, 1, 5, 5});
|
||||
// testPooling<MaxPoolObj>(IncrementalGenerator(), Shape{1, 1, 5, 5});
|
||||
testPooling<AvgPoolObj>(IncrementalGenerator(), Shape{1, 1, 5, 5});
|
||||
}
|
||||
|
||||
|
|
|
@ -50,13 +50,13 @@ TEST(ascend_Unary, run) {
|
|||
testUnary<CosObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
testUnary<ACosObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
testUnary<ATanObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
//testUnary<CeilObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
//testUnary<FloorObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
//testUnary<ExpObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
// testUnary<CeilObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
// testUnary<FloorObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
// testUnary<ExpObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
testUnary<NegObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
//testUnary<ReciprocalObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
// testUnary<ReciprocalObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
testUnary<SqrtObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
//testUnary<RoundObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
// testUnary<RoundObj>(IncrementalGenerator(), Shape{1, 2, 2, 3});
|
||||
}
|
||||
|
||||
} // namespace infini
|
||||
|
|
Loading…
Reference in New Issue