Coverage for hyper_parallel / core / shard / ops / parallel_ops_register.py: 100%
5 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-03-01 07:33 +0800
« prev ^ index » next coverage.py v7.13.1, created at 2026-03-01 07:33 +0800
1# Copyright 2025 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15"""
16Parallel layout decorator
17"""
18_DISTRIBUTED_OPS = {}
20def register_distributed_op(op_name, op_class):
21 """
22 Register a distributed operator implementation.
24 Args:
25 op_name (str): Name of the operator
26 op_class (class): Distributed operator implementation class
27 """
28 _DISTRIBUTED_OPS[op_name] = op_class
30def get_distributed_op(op_name):
31 """
32 Get distributed operator implementation by operator name.
34 Args:
35 op_name (str): Name of the operator
37 Returns:
38 object: Distributed operator instance or None if not found
39 """
40 return _DISTRIBUTED_OPS.get(op_name, None)