Coverage for  / home / jenkins / .local / lib / python3.10 / site-packages / hyper_parallel / core / shard / ops / parallel_tuple_elementwise.py: 50%

6 statements  

« prev     ^ index     » next       coverage.py v7.13.1, created at 2026-05-11 07:26 +0800

1# Copyright 2025 Huawei Technologies Co., Ltd 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14# ============================================================================ 

15""" 

16Element-wise distributed operator implementation. 

17""" 

18 

19from .parallel_ops import DistributedOp 

20 

21 

22class TupleElementWiseDistributedOp(DistributedOp): 

23 """ 

24 Distributed implementation for tuple element-wise operators. 

25 

26 Inherits from DistributedOp and provides element-wise specific implementations. 

27 """ 

28 def infer_layout(self, layouts, extra_args=None): 

29 """ 

30 Infer output layouts for element-wise operations. 

31 

32 For element-wise operations, all inputs should have the same layout, 

33 and the output will have the same layout. 

34 

35 Args: 

36 primitive: Primitive instance 

37 layouts: Layouts of input tensors 

38 

39 Returns: 

40 tuple: Layout for output tensor. 

41 

42 Raises: 

43 ValueError: If input layouts are not compatible. 

44 """ 

45 if not layouts: 

46 return None 

47 

48 return layouts