forked from jiuyuan/InfiniTensor
add fp16 support to silu cuda op
This commit is contained in:
parent
936797b960
commit
0f1c04d864
|
@ -315,6 +315,8 @@ void unary_kernel(const Operator &_op) {
|
|||
} else if (op->getOpType() == OpType::Silu) {
|
||||
if (_op->getDType() == DataType::Float32) {
|
||||
silu_kernel<float>((float *)inputData, (float *)outputData, num);
|
||||
} else if (_op->getDType() == DataType::Float16){
|
||||
silu_kernel<half>((half *)inputData, (half *)outputData, num);
|
||||
} else {
|
||||
IT_TODO_HALT();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue