Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in
Toggle navigation
MPI_HW
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
hasan.bahjat
MPI_HW
Commits
d59d2cbd
Commit
d59d2cbd
authored
Nov 18, 2024
by
hasan.bahjat
💬
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Add Second Question Solution
parent
1822eacc
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
165 additions
and
0 deletions
+165
-0
AverageCalc.c
AverageCalc.c
+165
-0
No files found.
AverageCalc.c
0 → 100644
View file @
d59d2cbd
#include <mpi.h>
#include <iostream>
#include <vector>
#include <numeric>
#include <cstdlib>
#include <ctime>
using
namespace
std
;
// constants and globa variables
// Generated Matrix Size
const
int
MATRIX_SIZE
=
1000
;
// Generated Numbers Range
const
int
P
=
100
;
// Function to intialize the numbers
void
intialize
(
vector
<
int
>&
generatedNumbers
){
// resize the vector shape
generatedNumbers
.
resize
(
MATRIX_SIZE
);
srand
(
time
(
0
));
for
(
int
i
=
0
;
i
<
MATRIX_SIZE
;
i
++
)
{
// Generate random integers between 0 and and P -1
generatedNumbers
[
i
]
=
rand
()
%
100
;
}
}
// Function to calculate the local sum
int
myAccumulater
(
vector
<
int
>&
numbers
)
{
int
sum
=
0
;
for
(
int
value
:
numbers
)
{
sum
+=
value
;
}
return
sum
;
}
int
main
(
int
argc
,
char
**
argv
)
{
MPI_Init
(
&
argc
,
&
argv
);
int
rank
,
size
;
MPI_Comm_rank
(
MPI_COMM_WORLD
,
&
rank
);
MPI_Comm_size
(
MPI_COMM_WORLD
,
&
size
);
vector
<
int
>
generatedNumbers
;
int
chunk_size
=
MATRIX_SIZE
/
size
;
int
remainder
=
MATRIX_SIZE
%
size
;
if
(
rank
<
remainder
)
{
chunk_size
+=
1
;
// Add extra element for processes handling remainder
}
vector
<
int
>
chunk
(
chunk_size
);
if
(
rank
==
0
)
{
// intialize the numbers
intialize
(
generatedNumbers
);
printf
(
"[Master @ #] Root process initialized matrix
\n
[Master @ #] (first 10 elements): "
);
for
(
int
i
=
0
;
i
<
10
;
i
++
)
{
printf
(
"%d "
,
generatedNumbers
[
i
]
);
}
printf
(
"
\n
[Master @ #]
\n
"
);
// Divivde the work to slices
// each slice to a worder (Process)
// slcie offset
int
offset
=
0
;
// slice number
int
i
=
1
;
for
(
i
=
1
;
i
<
size
;
i
++
)
{
// calculate the slice size
int
send_size
=
MATRIX_SIZE
/
size
+
(
i
<
remainder
?
1
:
0
);
// send the data ti the process
MPI_Send
(
generatedNumbers
.
data
()
+
offset
,
// data
send_size
,
// chunk size
MPI_INT
,
// size of chunk item
i
,
0
,
// source and dest
MPI_COMM_WORLD
//coomunication world
);
/// increasing the slcie offset
offset
+=
send_size
;
}
// assigne a slice also to the master process s(every one should work ): !.)
chunk
.
assign
(
generatedNumbers
.
begin
(),
generatedNumbers
.
begin
()
+
chunk_size
);
}
else
{
// workder process
// recieving the data
MPI_Recv
(
chunk
.
data
(),
chunk_size
,
MPI_INT
,
0
,
0
,
MPI_COMM_WORLD
,
MPI_STATUS_IGNORE
);
}
// Print detailed information for each process
int
startIndex
=
(
rank
<
remainder
)
?
rank
*
(
MATRIX_SIZE
/
size
+
1
)
:
rank
*
(
MATRIX_SIZE
/
size
)
+
remainder
;
int
endIndex
=
startIndex
+
chunk_size
-
1
;
printf
(
"[Worker @ %d] I'm responsible for indices [ %d- %d ]
\n
"
,
rank
,
startIndex
,
endIndex
);
// finding the local sum of our chnk
int
localSum
=
myAccumulater
(
chunk
);
// calc the local average
int
localAverage
=
localSum
/
chunk_size
;
printf
(
"[Worker @ %d] Process %d calculated local average: %d
\n
"
,
rank
,
rank
,
localAverage
);
if
(
rank
==
0
)
{
int
globalAverage
=
0
;
vector
<
int
>
localAverages
(
size
);
localAverages
[
0
]
=
localAverage
;
// revice the local averages from each process
for
(
int
i
=
1
;
i
<
size
;
i
++
)
{
MPI_Recv
(
&
localAverages
[
i
],
1
,
MPI_INT
,
i
,
1
,
// SRC / DsT
MPI_COMM_WORLD
,
MPI_STATUS_IGNORE
);
}
// find the global average
globalAverage
=
myAccumulater
(
localAverages
)
/
size
;
printf
(
"[Master @ #] Global average calculated from all local averages: %d
\n
"
,
globalAverage
);
}
else
{
MPI_Send
(
&
localAverage
,
1
,
MPI_INT
,
0
,
1
,
MPI_COMM_WORLD
);
}
MPI_Finalize
();
return
0
;
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment