Commit 087777e8 authored by saad.aswad's avatar saad.aswad

[P2] Impl Manual Reduce logic

parent 7a70f960
......@@ -2,7 +2,7 @@
from mpi4py import MPI
import numpy as np
def manual_reduce(sendbuf, recvbuf, op, root, comm):
def manual_reduce(sendbuf, recvbuf, op=MPI.SUM, root=0, comm=MPI.COMM_WORLD):
"""
Implementation of MPI_Reduce using tree-based communication.
Toplogy:
......@@ -12,18 +12,71 @@ def manual_reduce(sendbuf, recvbuf, op, root, comm):
rank = comm.Get_rank()
size = comm.Get_size()
# Start with local data
# We assume sendbuf is a numpy array (for addition)
# Copy sendbuf to a local accumulator (or directly to recvbuf if leaf,
# but we need a temp buffer for accumulation)
# Initialize accumulator with local data
# We assume sendbuf is a numpy array.
# recvbuf is where result goes on root.
# We need a temporary buffer equal to sendbuf size/type.
# Logic:
# 1. Receive from children (if any).
# 2. Add to local accumulator.
# 3. If rank != root, Send accumulator to parent.
# 4. If rank == root, copy accumulator to recvbuf.
# For simplicity, assume sendbuf is a numpy array or scalar.
# If scalar, wrap/unwrap. But assume numpy for assignment.
import numpy as np
pass
# Local accumulator
acc = np.copy(sendbuf)
# Children
left = 2 * rank + 1
right = 2 * rank + 2
# Receive from Left
if left < size:
# We need to know the size of data to recv?
# In MPI_Reduce, count/datatype are known. Here rely on numpy auto-detect or logic.
# sendbuf.shape provides the expected shape.
temp_recv = np.empty_like(sendbuf)
comm.Recv(temp_recv, source=left, tag=111)
acc += temp_recv
# Receive from Right
if right < size:
temp_recv = np.empty_like(sendbuf)
comm.Recv(temp_recv, source=right, tag=111)
acc += temp_recv
# Send to Parent or Store Result
if rank == root:
# We are root, result is in acc.
# Copy to recvbuf.
if recvbuf is not null: # In mpi4py recvbuf can be None on non-root, but here we are root.
# Ensure recvbuf is suitable (mutable).
recvbuf[:] = acc[:]
else:
parent = (rank - 1) // 2
comm.Send(acc, dest=parent, tag=111)
def main():
pass
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
# Generate random vector
N = 10
local_vec = np.random.randint(0, 10, N).astype(np.int32)
# print(f"Rank {rank}: {local_vec}")
# Verify with standard Reduce to check correctness
std_result = np.zeros(N, dtype=np.int32) if rank == 0 else None
comm.Reduce(local_vec, std_result, op=MPI.SUM, root=0)
# Manual Reduce
manual_result = np.zeros(N, dtype=np.int32) if rank == 0 else None
manual_reduce(local_vec, manual_result, op=MPI.SUM, root=0, comm=comm)
if rank == 0:
if np.array_equal(std_result, manual_result):
print("Rank 0: Manual Tree Reduce SUCCESS")
print(f"Result: {manual_result[:5]}...")
else:
print("Rank 0: Manual Tree Reduce FAILURE")
print(f"Std: {std_result}")
print(f"Man: {manual_result}")
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment