Compare commits

...

3 Commits

Author SHA1 Message Date
Liyan Zheng e991b3261b Add: clone for operators 2022-11-19 20:26:41 +08:00
Liyan Zheng f133f00478 [Intermediate state] Add: Graph ctor for OpVec 2022-11-19 20:26:33 +08:00
Liyan Zheng e549f21867 Add: tensor fuid 2022-11-19 20:26:20 +08:00
26 changed files with 170 additions and 5 deletions

View File

@ -14,9 +14,11 @@ class GraphObj : public Object {
public:
GraphObj(Runtime runtime) : runtime(runtime){};
GraphObj(Runtime runtime, OpVec ops_in);
string toString() const override;
Tensor addTensor(Shape dim, DataType dtype = DataType::Float32);
Tensor addTensor(const Tensor &tensor);
Tensor cloneTensor(const Tensor &tensor) {
auto ret = addTensor(tensor->getDims(), tensor->getDType());
ret->dataMalloc();
@ -45,8 +47,14 @@ class GraphObj : public Object {
}
const TensorVec &getTensors() const { return tensors; }
const TensorVec &getInputs() const { return inputs; }
const TensorVec &getOutputs() const { return outputs; }
const TensorVec &getInputs() const {
IT_TODO_HALT();
return inputs;
}
const TensorVec &getOutputs() const {
IT_TODO_HALT();
return outputs;
}
const OpVec &getOperators() const { return ops; }
OpVec getComputeOps() const;
// TensorVec &getInputs();

View File

@ -27,6 +27,27 @@ class Guid {
operator GuidBaseType() const { return guid; }
};
class Fuid {
private:
GuidBaseType fuid;
private:
GuidBaseType generateFuid() {
static GuidBaseType guidCnt = 0;
return ++guidCnt;
}
public:
Fuid() { fuid = generateFuid(); }
Fuid(const Guid &rhs) { fuid = generateFuid(); }
Fuid &operator=(const Guid &rhs) {
fuid = generateFuid();
return *this;
}
operator GuidBaseType() const { return fuid; }
};
class Object {
protected:
Guid guid;

View File

@ -197,6 +197,9 @@ class OperatorObj : public Object {
virtual int numInputs() const = 0;
virtual int numOutputs() const = 0;
Operator cloneAndResetConnections(const TensorVec &newInputs,
const TensorVec &newOutputs);
protected:
optional<vector<Shape>> inferShape() const;
vector<DataType> inferDataType() const;
@ -213,8 +216,17 @@ class OperatorObj : public Object {
* and output shapes.
*/
virtual vector<int> getWorkloadVector() const { IT_TODO_HALT(); }
virtual Operator clone() const {
IT_TODO_HALT();
return nullptr;
}
};
#define OP_CLONE(OpObj) \
virtual Operator clone() const override { \
return infini::make_ref<OpObj>(*this); \
}
} // namespace infini
namespace std {

View File

@ -10,6 +10,8 @@ using Shape = vector<ShapeElem>;
class TensorObj : public TensorBaseObj {
private:
Shape shape;
Fuid fuid; // Tensor cloned from a common tensor share the same id. Tensors
// constructed from common constructor has a new id.
public:
TensorObj(const Shape &shape, DataType dtype, Runtime runtime);
@ -25,6 +27,7 @@ class TensorObj : public TensorBaseObj {
using TensorBaseObj::getData;
VType getData(const Shape &pos) const;
void dataMalloc();
GuidBaseType getFuid() const { return fuid; }
void load(std::string file_path);
void save(std::string file_path);
@ -51,7 +54,15 @@ class TensorObj : public TensorBaseObj {
}
generator(data->getPtr<void *>(), size(), dtype);
}
Tensor clone(Runtime runtime) {
Tensor clone() const {
auto obj = make_ref<TensorObj>(*this);
obj->freeData();
obj->inputOf.clear();
obj->outputOf.reset();
return obj;
}
Tensor clone(Runtime runtime) const {
// TODO: use copy constructor
auto obj = make_ref<TensorObj>(shape, dtype, runtime);
obj->dataMalloc();
obj->copyData(this);

View File

@ -33,6 +33,7 @@ class TensorBaseObj : public Object {
data = blob;
}
Blob getDataBlob() const { return data; }
void freeData() { data = nullptr; }
template <typename T> T getRawDataPtr() const {
static_assert(std::is_pointer_v<T>,
"Raw data pointer has a type of pointer");

View File

@ -26,6 +26,7 @@ class G2BMMObj : public OperatorObj {
G2BMMObj(GraphObj *graph, Tensor A, Tensor B, Tensor C, const int width,
const int dilation, Tensor bias = nullptr,
ActType act = ActType::None);
OP_CLONE(G2BMMObj);
std::string toString() const override;
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;

View File

@ -24,6 +24,7 @@ class GBMMObj : public OperatorObj {
*/
GBMMObj(GraphObj *graph, Tensor A, Tensor B, Tensor C, const int dilation,
Tensor bias = nullptr, ActType act = ActType::None);
OP_CLONE(GBMMObj);
std::string toString() const override;
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;

View File

@ -10,6 +10,7 @@ class BatchNormObj : public OperatorObj {
BatchNormObj(GraphObj *graph, Tensor input, Tensor output, Tensor mean,
Tensor var, Tensor scale, Tensor bias, float momentum = 0.9,
float eps = 1e-5, bool training = false);
OP_CLONE(BatchNormObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;
std::string toString() const override;

View File

@ -77,6 +77,7 @@ class ConvObj : public ConvBaseObj {
PaddingMode mode = PaddingMode::Same, int sh = 1, int sw = 1,
int dh = 1, int dw = 1, Tensor bias = nullptr,
ActType act = ActType::None);
OP_CLONE(ConvObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;
ActType getAct() const { return act; }
@ -104,6 +105,7 @@ class ConvTransposed2dObj : public ConvBaseObj {
int sh = 1, int sw = 1, int dh = 1, int dw = 1,
int oph = 0, int opw = 0, int group = 1,
Tensor bias = nullptr, ActType act = ActType::None);
OP_CLONE(ConvTransposed2dObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;
ActType getAct() const { return act; }

View File

@ -23,6 +23,7 @@ class ElementWiseObj : public OperatorObj {
prefix##Obj(GraphObj *graph, Tensor input0, Tensor input1, \
Tensor output) \
: ElementWiseObj(type, graph, input0, input1, output) {} \
OP_CLONE(prefix##Obj); \
};
DEFINE_ELEMENT_WISE_OBJ(Add, OpType::Add)

View File

@ -8,6 +8,7 @@ class ExtendObj : public OperatorObj {
public:
ExtendObj(GraphObj *graph, Tensor input, Tensor output, int dim,
int num = 1);
OP_CLONE(ExtendObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;
std::string toString() const override;

View File

@ -9,6 +9,7 @@ class GatherObj : public OperatorObj {
public:
GatherObj(GraphObj *graph, Tensor input, Tensor index, Tensor output,
int axis);
OP_CLONE(GatherObj);
std::string toString() const override;
int numInputs() const override { return 2; }
int numOutputs() const override { return 1; }

View File

@ -29,6 +29,7 @@ class MatmulObj : public OperatorObj {
MatmulObj(GraphObj *graph, Tensor A, Tensor B, Tensor C,
bool transA = false, bool transB = false, Tensor bias = nullptr,
ActType act = ActType::None);
OP_CLONE(MatmulObj);
std::string toString() const override;
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;

View File

@ -17,6 +17,7 @@ class MemBoundObj : public OperatorObj {
const TensorVec &output,
const std::vector<nnet::Tensor> &nnetInputs, nnet::Expr expr,
double exec_time, std::string hint = {});
OP_CLONE(MemBoundObj);
std::string toString() const override;
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;

View File

@ -10,6 +10,7 @@ class PadObj : public OperatorObj {
// pad for appointed axises,if axis is empty,then pad for all axises.
PadObj(GraphObj *graph, Tensor input, Tensor output,
const vector<int> &pads, const optional<const vector<int>> &axis);
OP_CLONE(PadObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;
std::string toString() const override;

View File

@ -14,6 +14,7 @@ class PoolingObj : public OperatorObj {
public:
PoolingObj(GraphObj *graph, OpType optype, Tensor input, Tensor output,
int kh, int kw, int dh, int dw, int ph, int pw, int sh, int sw);
OP_CLONE(PoolingObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;
std::string toString() const override;

View File

@ -10,6 +10,7 @@ class ReduceMeanObj : public OperatorObj {
ReduceMeanObj(GraphObj *graph, Tensor input, Tensor output,
const optional<const vector<int>> &axis,
bool keepDims = true);
OP_CLONE(ReduceMeanObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;
std::string toString() const override;

View File

@ -8,6 +8,7 @@ class ReshapeObj : public OperatorObj {
public:
ReshapeObj(GraphObj *graph, Tensor input, Tensor output, const Shape &dims);
OP_CLONE(ReshapeObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;

View File

@ -51,6 +51,7 @@ class ResizeObj : public OperatorObj {
GraphObj *graph, Tensor input, Tensor output,
const std::optional<vector<int>> &axes, Tensor scales, ECoeffMode mode,
ECoordinateTransMode coordTransMode = ECoordinateTransMode::halfPixel);
OP_CLONE(ResizeObj);
vector<DataType> inferDataType(const TensorVec &inputs) const override;
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;

View File

@ -10,6 +10,7 @@ class SliceObj : public OperatorObj {
const vector<int> &starts, const vector<int> &ends,
const optional<vector<int>> &axis,
const optional<vector<int>> &steps);
OP_CLONE(SliceObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;
std::string toString() const override;

View File

@ -10,6 +10,7 @@ class SplitObj : public OperatorObj {
int dim, int num);
SplitObj(GraphObj *graph, Tensor input, std::optional<TensorVec> outputs,
int dim, const vector<int> &ratio);
OP_CLONE(SplitObj);
optional<vector<Shape>> inferShape(const TensorVec &inputs) const override;

View File

@ -21,6 +21,7 @@ class UnaryObj : public OperatorObj {
public: \
prefix##Obj(GraphObj *graph, Tensor input, Tensor output) \
: UnaryObj(type, graph, input, output) {} \
OP_CLONE(prefix##Obj); \
};
DEFINE_UNARY_OBJ(Relu, OpType::Relu)

View File

@ -1,7 +1,32 @@
#include "core/graph.h"
#include <queue>
namespace infini {
GraphObj::GraphObj(Runtime runtime, OpVec ops_in) : runtime(runtime) {
map<GuidBaseType, Tensor> tensorPool;
// Clone tensors
for (const auto &op : ops_in) {
for (const auto &t : op->getInputs())
if (tensorPool.find(t->getFuid()) == tensorPool.end())
tensorPool[t->getFuid()] = t->clone();
for (const auto &t : op->getOutputs())
if (tensorPool.find(t->getFuid()) == tensorPool.end())
tensorPool[t->getFuid()] = t->clone();
}
for (const auto &[_, t] : tensorPool)
addTensor(t);
// Clone operators and add connections
for (const auto &op : ops_in) {
TensorVec inputs, outputs;
for (const auto &t : op->getInputs())
inputs.emplace_back(tensorPool.at(t->getFuid()));
for (const auto &t : op->getOutputs())
outputs.emplace_back(tensorPool.at(t->getFuid()));
addOperatorAndConnect(op->cloneAndResetConnections(inputs, outputs));
}
}
void GraphObj::addOperatorAndConnect(const Operator &op) {
ops.push_back(op);
for (auto &input : op->getInputs()) {
@ -53,6 +78,11 @@ Tensor GraphObj::addTensor(Shape dim, DataType dtype) {
return tensor;
}
Tensor GraphObj::addTensor(const Tensor &tensor) {
tensors.emplace_back(tensor);
return tensor;
}
OpVec GraphObj::getComputeOps() const {
OpVec opList;
for (auto op : ops)

View File

@ -93,4 +93,15 @@ vector<DataType> OperatorObj::inferDataType() const {
return inferDataType(inputs);
}
Operator OperatorObj::cloneAndResetConnections(const TensorVec &newInputs,
const TensorVec &newOutputs) {
Operator op = clone();
op->inputs = newInputs;
op->outputs = newOutputs;
op->predecessors.clear();
op->successors.clear();
IT_ASSERT(op->checkValid(nullptr));
return op;
}
} // namespace infini

View File

@ -14,8 +14,9 @@ VType TensorObj::getData(const Shape &pos) const {
}
string TensorObj::toString() const {
string ret = "Tensor " + std::to_string(guid) + ", shape " +
vecToString(shape) + ", dtype " + dtype.toString();
string ret = "Tensor " + std::to_string(guid) + ", Fuid " +
std::to_string(fuid) + ", shape " + vecToString(shape) +
", dtype " + dtype.toString();
vector<GuidBaseType> inputOfGuid;
for (const auto &op : inputOf)
inputOfGuid.emplace_back(op.lock()->getGuid());

View File

@ -2,6 +2,7 @@
#include "core/graph.h"
#include "core/runtime.h"
#include "operators/matmul.h"
#include "operators/unary.h"
#include "test.h"
namespace infini {
@ -57,4 +58,55 @@ TEST(Graph, perf_engine) {
EXPECT_TRUE(matmul->getOutput()->equalData(ans));
}
TEST(Graph, test_tensor_id) {
Runtime runtime = CpuRuntimeObj::getInstance();
Graph g = make_ref<GraphObj>(runtime);
Tensor i0 = g->addTensor({1, 2, 3}, DataType::UInt32);
Tensor w0 = g->addTensor({1, 3, 4}, DataType::UInt32);
Tensor o0 = g->addTensor({1, 2, 4}, DataType::UInt32);
g->dataMalloc();
i0->copyData(vector<uint32_t>{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12});
w0->copyData(vector<uint32_t>{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12});
auto i1 = g->addTensor(i0->clone());
auto matmul = g->addOpWithOutputs<MatmulObj>(i0, w0, o0);
g->print();
EXPECT_NE(i0->getGuid(), i1->getGuid());
EXPECT_EQ(i0->getFuid(), i1->getFuid());
EXPECT_NE(i0->getDataBlob(), nullptr);
EXPECT_EQ(i1->getDataBlob(), nullptr);
}
TEST(Graph, test_OpVec_ctor) {
Runtime runtime = CpuRuntimeObj::getInstance();
Graph g = make_ref<GraphObj>(runtime);
Tensor i0 = g->addTensor({1, 2, 3}, DataType::UInt32);
Tensor w0 = g->addTensor({1, 3, 4}, DataType::UInt32);
Tensor o0 = g->addTensor({1, 2, 4}, DataType::UInt32);
g->dataMalloc();
i0->copyData(vector<uint32_t>{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12});
w0->copyData(vector<uint32_t>{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12});
auto o1 = g->addTensor(o0->clone());
auto matmul = g->addOpWithOutputs<MatmulObj>(i0, w0, o0);
g->addOp<ReluObj>(o1, nullptr);
g->print();
puts("=========");
OpVec ops = g->getOperators();
Graph g2 = make_ref<GraphObj>(runtime, ops);
g2->print();
// Check if the two tensors with the same FUID (o0,o1) remain only one in g2
EXPECT_EQ(g2->getTensors().size(), 4u);
EXPECT_EQ(g2->getOperators().size(), 2u);
map<pair<int, int>, int> inputOutput2Cnt = {
{{1, 0}, 2}, {{1, 1}, 1}, {{0, 1}, 1}};
for (auto t : g2->getTensors()) {
pair<int, int> key = {t->getInputOf().size(),
t->getOutputOf() != nullptr};
EXPECT_GE(inputOutput2Cnt[key], 0);
inputOutput2Cnt[key]--;
}
for (auto [u, v] : inputOutput2Cnt) {
EXPECT_EQ(v, 0);
}
}
} // namespace infini