forked from jiuyuan/InfiniTensor
Compare commits
merge into: p01753428:master
p01753428:master
p01753428:add_leaky_relu
p01753428:fix_test_off
p01753428:cuda-attention
p01753428:ascend
p01753428:dev-leakyrelu
p01753428:instance_norm
p01753428:cuda-transpose
p01753428:kvcache_backup
p01753428:kvcache_attention_fp16
p01753428:kunlun_temp
p01753428:dist/graph
p01753428:dist_bench
p01753428:bang-softmax
p01753428:dropout
p01753428:update_pybind11
p01753428:support_fp16
p01753428:add_paddle_model
p01753428:point2point
p01753428:NNET_231111_from_master
p01753428:NNET_231111
p01753428:allocator_memPool
p01753428:test_codegen
p01753428:change_path
p01753428:xpu_allreduce
p01753428:dev-dynamic-graph
p01753428:dev-dynamic-graph-allocator
p01753428:dump/init
p01753428:gpt
p01753428:nnet_e2e_for_merge
p01753428:gencode
p01753428:optimization-pass
p01753428:dev-memory
p01753428:conv_half
p01753428:benchmark_conv
p01753428:benchmark_softmax
p01753428:benchmark
p01753428:dcj/for_multiple_datatype
p01753428:Conv_NHWC
p01753428:NNET_bias
p01753428:NNET_bias_0630
p01753428:constroy/doc_on_ares
p01753428:pure_engine
p01753428:v0630
p01753428:NNET_e2e
p01753428:update_doc
p01753428:model_test
p01753428:TC_revision
p01753428:NNET_gcn
p01753428:NNET_op_test
p01753428:NNET_OpSearch
p01753428:NNET_gcn_fuse
p01753428:fsrcnn-conv-bias-act-fuse
p01753428:NNET_e2e_for_merge
p01753428:NNET_eliminateOP
p01753428:NNET_anyOp
p01753428:NNET_transpose
p01753428:cpu_backend2
p01753428:NNET_e2e_fix
p01753428:NNET_GAN
p01753428:test_onnx
p01753428:activation
p01753428:ddp
p01753428:search_engine
p01753428:power-fusion
p01753428:op_timer
p01753428:graph-onnx
p01753428:graphFactory
p01753428:case-fsrcnn
p01753428:testAccuracy
p01753428:train_wanghailu_1010
p01753428:broadcast_wanghailu_0916
p01753428:test-models
jiuyuan:test-models
pull from: p01753428:master
p01753428:add_leaky_relu
p01753428:fix_test_off
p01753428:cuda-attention
p01753428:ascend
p01753428:dev-leakyrelu
p01753428:instance_norm
p01753428:cuda-transpose
p01753428:master
p01753428:kvcache_backup
p01753428:kvcache_attention_fp16
p01753428:kunlun_temp
p01753428:dist/graph
p01753428:dist_bench
p01753428:bang-softmax
p01753428:dropout
p01753428:update_pybind11
p01753428:support_fp16
p01753428:add_paddle_model
p01753428:point2point
p01753428:NNET_231111_from_master
p01753428:NNET_231111
p01753428:allocator_memPool
p01753428:test_codegen
p01753428:change_path
p01753428:xpu_allreduce
p01753428:dev-dynamic-graph
p01753428:dev-dynamic-graph-allocator
p01753428:dump/init
p01753428:gpt
p01753428:nnet_e2e_for_merge
p01753428:gencode
p01753428:optimization-pass
p01753428:dev-memory
p01753428:conv_half
p01753428:benchmark_conv
p01753428:benchmark_softmax
p01753428:benchmark
p01753428:dcj/for_multiple_datatype
p01753428:Conv_NHWC
p01753428:NNET_bias
p01753428:NNET_bias_0630
p01753428:constroy/doc_on_ares
p01753428:pure_engine
p01753428:v0630
p01753428:NNET_e2e
p01753428:update_doc
p01753428:model_test
p01753428:TC_revision
p01753428:NNET_gcn
p01753428:NNET_op_test
p01753428:NNET_OpSearch
p01753428:NNET_gcn_fuse
p01753428:fsrcnn-conv-bias-act-fuse
p01753428:NNET_e2e_for_merge
p01753428:NNET_eliminateOP
p01753428:NNET_anyOp
p01753428:NNET_transpose
p01753428:cpu_backend2
p01753428:NNET_e2e_fix
p01753428:NNET_GAN
p01753428:test_onnx
p01753428:activation
p01753428:ddp
p01753428:search_engine
p01753428:power-fusion
p01753428:op_timer
p01753428:graph-onnx
p01753428:graphFactory
p01753428:case-fsrcnn
p01753428:testAccuracy
p01753428:train_wanghailu_1010
p01753428:broadcast_wanghailu_0916
jiuyuan:code_generate
jiuyuan:support_ascend_fp16_zyz
jiuyuan:aug-cuda-op-need
jiuyuan:master
jiuyuan:fix_runtime
jiuyuan:support_ascend_fp16
jiuyuan:cxjj
jiuyuan:fix_fp16_matmul
jiuyuan:bang-rmsnorm
jiuyuan:cuda-attention
jiuyuan:operator-test
jiuyuan:bang-rms-soft
jiuyuan:cuda-transpose
jiuyuan:add_leaky_relu
jiuyuan:instance_norm
jiuyuan:kvcache_backup
jiuyuan:kvcache_attention_fp16
jiuyuan:kunlun_temp
jiuyuan:dist/graph
jiuyuan:dist_bench
jiuyuan:dropout
jiuyuan:update_pybind11
jiuyuan:support_fp16
jiuyuan:add_paddle_model
jiuyuan:point2point
jiuyuan:NNET_231111_from_master
jiuyuan:NNET_231111
jiuyuan:allocator_memPool
jiuyuan:test_codegen
jiuyuan:change_path
jiuyuan:xpu_allreduce
jiuyuan:dev-dynamic-graph
jiuyuan:dev-dynamic-graph-allocator
jiuyuan:dump/init
jiuyuan:gpt
jiuyuan:nnet_e2e_for_merge
jiuyuan:gencode
jiuyuan:optimization-pass
jiuyuan:dev-memory
jiuyuan:conv_half
jiuyuan:benchmark_conv
jiuyuan:benchmark_softmax
jiuyuan:benchmark
jiuyuan:dcj/for_multiple_datatype
jiuyuan:Conv_NHWC
jiuyuan:NNET_bias
jiuyuan:NNET_bias_0630
jiuyuan:constroy/doc_on_ares
jiuyuan:pure_engine
jiuyuan:v0630
jiuyuan:NNET_e2e
jiuyuan:update_doc
jiuyuan:model_test
jiuyuan:TC_revision
jiuyuan:NNET_gcn
jiuyuan:NNET_op_test
jiuyuan:NNET_OpSearch
jiuyuan:NNET_gcn_fuse
jiuyuan:fsrcnn-conv-bias-act-fuse
jiuyuan:NNET_e2e_for_merge
jiuyuan:NNET_eliminateOP
jiuyuan:NNET_anyOp
jiuyuan:NNET_transpose
jiuyuan:cpu_backend2
jiuyuan:NNET_e2e_fix
jiuyuan:NNET_GAN
jiuyuan:test_onnx
jiuyuan:activation
jiuyuan:ddp
jiuyuan:search_engine
jiuyuan:power-fusion
jiuyuan:op_timer
jiuyuan:graph-onnx
jiuyuan:graphFactory
jiuyuan:case-fsrcnn
jiuyuan:testAccuracy
jiuyuan:train_wanghailu_1010
jiuyuan:broadcast_wanghailu_0916
p01753428:test-models
jiuyuan:test-models
These branches are equal. There is no need to create a pull request.