Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions csrc/build_aclnn.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/bash

# build custom ops
cd custom_ops/
bash build.sh custom_ops -cascend910_93

# install custom ops
./build_out/custom_ops/run/CANN_ascend910_93_ubuntu_aarch64.run --install-path=/usr/local/Ascend/ascend-toolkit/latest/opp/
source /usr/local/Ascend/ascend-toolkit/latest/opp/vendors/customize/bin/set_env.bash
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

critical

The source command on this line will only affect the environment of the script's execution shell. When this script is executed, it runs in a sub-shell, and any environment variables set within it are lost when the script finishes. If the intention is to modify the environment of the calling shell, this script should be sourced (e.g., source csrc/build_aclnn.sh) rather than executed. The #!/bin/bash shebang is misleading if the script is meant to be sourced. This can lead to silent failures in the environment setup.

73 changes: 73 additions & 0 deletions csrc/custom_ops/build.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
#!/bin/bash
SCRIPT_PATH=$(cd "$(dirname "$0")" && pwd)/$(basename "$0")
export ROOT_PATH=$(dirname "$SCRIPT_PATH")
echo ROOT_PATH: $ROOT_PATH
if [ ! -d "./build_out" ]; then
mkdir build_out
fi
export SRC_PATH="${ROOT_PATH}"
export BUILD_OUT_PATH="${ROOT_PATH}/build_out"
export SCRIPTS_PATH="${ROOT_PATH}/scripts"

export BUILD_TYPE="Release"
MODULE_NAME="all"
MODULE_BUILD_ARG=""
IS_MODULE_EXIST=0

function PrintHelp() {
echo "
./build.sh [module name] <opt>...
If there are no parameters, all modules are compiled in default mode
module list: [custom_ops]

opt:
-d: Enable debug
"
}

function ProcessArg() {
while getopts "dh" opt; do
case $opt in
d)
export BUILD_TYPE="Debug"
;;
h)
PrintHelp
exit 0
;;
esac
done
shift $(($OPTIND-1))
}

function IsModuleName() {
if [ -z "$1" ]; then
return 1
fi

if [[ $1 == -* ]]; then
return 1
else
return 0
fi
}

if IsModuleName $@; then
MODULE_NAME=$1
shift
else
ProcessArg $@
fi

if [[ "$MODULE_NAME" == "all" || "$MODULE_NAME" == "custom_ops" ]]; then
IS_MODULE_EXIST=1
echo "./scripts/build.sh $@"
./scripts/build.sh $@
if [ $? -ne 0 ]; then
exit 1
fi
fi

if [ $IS_MODULE_EXIST -eq 0 ]; then
echo "module not exist"
fi
40 changes: 40 additions & 0 deletions csrc/custom_ops/kernels/AddCustom.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
[
{
"op": "AddCustom",
"language": "cpp",
"input_desc": [
{
"name": "x",
"param_type": "required",
"format": [
"ND"
],
"type": [
"float16"
]
},
{
"name": "y",
"param_type": "required",
"format": [
"ND"
],
"type": [
"float16"
]
}
],
"output_desc": [
{
"name": "z",
"param_type": "required",
"format": [
"ND"
],
"type": [
"float16"
]
}
]
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
/*
* Copyright (c) Huawei Technologies Co., Ltd. 2025-2025. All rights reserved.
* Description: DispatchGmmCombineDecode operator definition file
* Author: WANG Qiankun
* Create: 2025-07-19
* Note:
* History: 2025-07-19 create DispatchGmmCombineDecode operator definition file
*/
#include "register/op_def_registry.h"

namespace ops {
class DispatchGmmCombineDecode : public OpDef
{
public:
explicit DispatchGmmCombineDecode(const char *name) : OpDef(name)
{
this->Input("x")
.ParamType(REQUIRED)
.DataType({ge::DT_BF16, ge::DT_FLOAT16})
.Format({ge::FORMAT_ND, ge::FORMAT_ND})
.UnknownShapeFormat({ge::FORMAT_ND, ge::FORMAT_ND});
this->Input("expert_ids")
.ParamType(REQUIRED)
.DataType({ge::DT_INT32, ge::DT_INT32})
.Format({ge::FORMAT_ND, ge::FORMAT_ND})
.UnknownShapeFormat({ge::FORMAT_ND, ge::FORMAT_ND});
this->Input("gmm1_permuted_weight")
.ParamType(REQUIRED)
.DataType({ge::DT_INT8, ge::DT_INT8})
.Format({ge::FORMAT_FRACTAL_NZ, ge::FORMAT_FRACTAL_NZ})
.UnknownShapeFormat({ge::FORMAT_FRACTAL_NZ, ge::FORMAT_FRACTAL_NZ});
this->Input("gmm1_permuted_weight_scale")
.ParamType(REQUIRED)
.DataType({ge::DT_FLOAT, ge::DT_FLOAT})
.Format({ge::FORMAT_ND, ge::FORMAT_ND})
.UnknownShapeFormat({ge::FORMAT_ND, ge::FORMAT_ND});
this->Input("gmm2_weight")
.ParamType(REQUIRED)
.DataType({ge::DT_INT8, ge::DT_INT8})
.Format({ge::FORMAT_FRACTAL_NZ, ge::FORMAT_FRACTAL_NZ})
.UnknownShapeFormat({ge::FORMAT_FRACTAL_NZ, ge::FORMAT_FRACTAL_NZ});
this->Input("gmm2_weight_scale")
.ParamType(REQUIRED)
.DataType({ge::DT_FLOAT, ge::DT_FLOAT})
.Format({ge::FORMAT_ND, ge::FORMAT_ND})
.UnknownShapeFormat({ge::FORMAT_ND, ge::FORMAT_ND});
this->Input("expert_smooth_scales")
.ParamType(OPTIONAL)
.DataType({ge::DT_FLOAT, ge::DT_FLOAT})
.Format({ge::FORMAT_ND, ge::FORMAT_ND})
.UnknownShapeFormat({ge::FORMAT_ND, ge::FORMAT_ND});
this->Input("expert_scales")
.ParamType(OPTIONAL)
.DataType({ge::DT_FLOAT, ge::DT_FLOAT})
.Format({ge::FORMAT_ND, ge::FORMAT_ND})
.UnknownShapeFormat({ge::FORMAT_ND, ge::FORMAT_ND});
this->Output("output")
.ParamType(REQUIRED)
.DataType({ge::DT_BF16, ge::DT_FLOAT16})
.Format({ge::FORMAT_ND, ge::FORMAT_ND})
.UnknownShapeFormat({ge::FORMAT_ND, ge::FORMAT_ND});
this->Output("ep_recv_count")
.ParamType(REQUIRED)
.DataType({ge::DT_INT32, ge::DT_INT32})
.Format({ge::FORMAT_ND, ge::FORMAT_ND})
.UnknownShapeFormat({ge::FORMAT_ND, ge::FORMAT_ND});
this->Attr("group_ep").String();
this->Attr("ep_rank_size").Int();
this->Attr("ep_rank_id").Int();
this->Attr("moe_expert_num").Int();
this->Attr("share_expert_num").Int();
this->Attr("share_expert_rank_num").Int();
this->Attr("quant_mode").Int();
this->Attr("global_bs").Int();

this->MC2().HcclGroup({"group_ep"});
this->AICore().AddConfig("ascend910_93");
}
};

OP_ADD(DispatchGmmCombineDecode);
} // namespace ops
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
/*
* Copyright (c) Huawei Technologies Co., Ltd. 2025-2025. All rights reserved.
* Description: DispatchGmmCombineDecode tiling function implementation file
* Author: Guo Ren
* Create: 2025-07-22
* Note:
* History: 2025-07-13 create DispatchGmmCombineDecode infer function file
*/

#include <cstdint>
#include "error_log.h"
#include "graph/utils/type_utils.h"
#include "register/op_def_registry.h"

namespace ge {
constexpr uint32_t EXPAND_X_INDEX = 0;
constexpr uint32_t EXPERT_IDS_INDEX = 1;
constexpr uint32_t OUTPUT_X_INDEX = 0;
constexpr uint32_t OUTPUT_REC_COUNT_INDEX = 1;

constexpr uint32_t ATTR_GROUP_EP_INDEX = 0;
constexpr uint32_t ATTR_EP_RANK_SIZE_INDEX = 1;
constexpr uint32_t ATTR_EP_RANK_ID_INDEX = 2;
constexpr uint32_t ATTR_MOE_EXPERT_NUM_INDEX = 3;
constexpr uint32_t ATTR_SHARE_EXPERT_NUM_INDEX = 4;
constexpr uint32_t ATTR_SHARE_EXPERT_RANK_NUM_INDEX = 5;
constexpr uint32_t ATTR_QUANT_MODE_INDEX = 6;
constexpr uint32_t ATTR_GLOBAL_BS_INDEX = 7;

static ge::graphStatus InferShape(gert::InferShapeContext *context)
{
const char *nodeName = context->GetNodeName();
// infer output shape
const gert::Shape *expandXShape = context->GetInputShape(EXPAND_X_INDEX);
const gert::Shape *expertIdsShape = context->GetInputShape(EXPERT_IDS_INDEX);
gert::Shape *expandXOutShape = context->GetOutputShape(OUTPUT_X_INDEX);
gert::Shape *recvCountOutShape = context->GetOutputShape(OUTPUT_REC_COUNT_INDEX);
if (expandXShape == nullptr || expertIdsShape == nullptr || expandXOutShape == nullptr ||
recvCountOutShape == nullptr) {
return GRAPH_FAILED;
}
if (expandXShape->GetDimNum() < 2 || expertIdsShape->GetDimNum() < 1) {
return GRAPH_FAILED;
}

int bs = expertIdsShape->GetDim(0);
int h = expandXShape->GetDim(1);

expandXOutShape->SetDimNum(expandXShape->GetDimNum());
expandXOutShape->SetDim(0, bs);
expandXOutShape->SetDim(1, h);

// infer recvCount shape
auto attrs = context->GetAttrs();
OP_TILING_CHECK(attrs == nullptr, OP_LOGE(nodeName, "attrs is nullptr."), return ge::GRAPH_FAILED);

auto epRankSizePtr = attrs->GetAttrPointer<int64_t>(ATTR_EP_RANK_SIZE_INDEX);
auto epRankIdPtr = attrs->GetAttrPointer<int64_t>(ATTR_EP_RANK_ID_INDEX);
auto moeExpertNumPtr = attrs->GetAttrPointer<int64_t>(ATTR_MOE_EXPERT_NUM_INDEX);
auto sharedExpertRankNumPtr = attrs->GetAttrPointer<int64_t>(ATTR_SHARE_EXPERT_RANK_NUM_INDEX);

OP_TILING_CHECK(epRankIdPtr == nullptr, OP_LOGE(nodeName, "epRankIdPtr is nullptr."), return ge::GRAPH_FAILED);
OP_TILING_CHECK(moeExpertNumPtr == nullptr, OP_LOGE(nodeName, "moeExpertNumPtr is nullptr."),
return ge::GRAPH_FAILED);
OP_TILING_CHECK(epRankSizePtr == nullptr, OP_LOGE(nodeName, "epRankSizePtr is nullptr."), return ge::GRAPH_FAILED);
OP_TILING_CHECK(sharedExpertRankNumPtr == nullptr, OP_LOGE(nodeName, "sharedExpertRankNumPtr is nullptr."),
return ge::GRAPH_FAILED);
uint32_t epRankSize = static_cast<uint32_t>(*epRankSizePtr);
uint32_t moeExpertNum = static_cast<uint32_t>(*moeExpertNumPtr);
uint32_t epRankId = static_cast<uint32_t>(*epRankIdPtr);
uint32_t sharedExpertRankNum = static_cast<uint32_t>(*sharedExpertRankNumPtr);

recvCountOutShape->SetDimNum(1);
bool isShareExpert = (epRankId < sharedExpertRankNum);
if (isShareExpert) {
recvCountOutShape->SetDim(0, epRankSize);
} else {
recvCountOutShape->SetDim(0, epRankSize * (moeExpertNum / (epRankSize - sharedExpertRankNum)));
}

return GRAPH_SUCCESS;
}

static ge::graphStatus InferDataType(gert::InferDataTypeContext *context)
{
const auto expandXDataType = context->GetInputDataType(EXPAND_X_INDEX);
context->SetOutputDataType(OUTPUT_X_INDEX, expandXDataType);
context->SetOutputDataType(OUTPUT_REC_COUNT_INDEX, ge::DT_INT32);
return ge::GRAPH_SUCCESS;
}

IMPL_OP(DispatchGmmCombineDecode).InferShape(InferShape).InferDataType(InferDataType);
} // namespace ge
Loading
Loading