>

COSC330/530 Parallel and Distributed Computing

Lecture 15 - Communication Structures in MPI

Dr. Mitchell Welch


Reading


Summary


Collective Communication


Collective Communication


Collective Communication


Collective Communication


Alt text


Collective Communication


Collective Communication

#include "mpi.h"
int MPI_Gather (void *sndbuf,  int sndcnt, MPI_Datatype sndtyp, 
                void *rcvbuf,  int rcvcnt, MPI_Datatype rcvtyp, 
                int root, MPI_Comm comm )

Collective Communication


Collective Communication


Alt text


Collective Communication


Collective Communication


Collective Communication

#include "mpi.h"
int MPI_Scatter(void *sndbuf, int sndcnt, MPI_Datatype sndtyp, 
                void *rcvbuf, int rcvcnt, MPI_Datatype rcvtyp, 
                int root, MPI_Comm comm )

Collective Communication


Collective Communication


Alt text


Collective Communication


Collective Communication


Matrix Multiplication (v1)


Matrix Multiplication (v1)


Matrix Multiplication (v1)


Tree-Structured Communication


Tree-Structured Communication


Tree-Structured Communication


Alt text


Tree-Structured Communication


Tree-Structured Communication

#include <stdio.h>
#include <stdlib.h>
#include <mpi.h>
const int MAX_CONTRIB = 10;

int Global_sum(int my_contrib, int my_rank, int p, MPI_Comm comm);

int main(int argc, char* argv[]) {
   int      p, my_rank;
   MPI_Comm comm;
   int      my_contrib;
   int      sum;

   MPI_Init(&argc, &argv);
   comm = MPI_COMM_WORLD;
   MPI_Comm_size(comm, &p);
   MPI_Comm_rank(comm, &my_rank);

...

Tree-Structured Communication

...

   srandom(my_rank+1);
   my_contrib = random() % MAX_CONTRIB;
   printf("Proc %d > my_contrib = %d\n", my_rank, my_contrib);
   sum = Global_sum(my_contrib, my_rank, p, comm);
   if (my_rank == 0)
      printf("Proc %d > global sum = %d\n", my_rank, sum);
   MPI_Finalize();
   return 0;
}  


Tree-Structured Communication


int Global_sum(int my_contrib, int my_rank, int p, MPI_Comm comm) { int sum = my_contrib; int temp; int partner; int done = 0; unsigned bitmask = 1; MPI_Status status; ...

Tree-Structured Communication


... while (!done && bitmask < p) { partner = my_rank ^ bitmask; if (my_rank < partner) { MPI_Recv(&temp, 1, MPI_INT, partner, 0, comm, &status); sum += temp; bitmask <<= 1; } else { MPI_Send(&sum, 1, MPI_INT, partner, 0, comm); done = 1; } } /*Valid only on 0 */ return sum; } /* Global_sum */

Tree-Structured Communication


X Y X^Y
0 0 0
0 1 1
1 0 1
1 1 0

]


Tree-Structured Communication

r t 001 010 100
0 000 001 010 100
1 001 000 x x
2 010 011 000 x
3 011 010 x x
4 100 101 110 000
5 101 100 x x
6 110 111 100 x
7 111 110 x x

Tree-Structured Communication

[cosc330@bourbaki examples] $ mpirun -np 8 treeGlobalSum
Proc 3 > my_contrib = 1
Proc 4 > my_contrib = 5
Proc 5 > my_contrib = 1
Proc 6 > my_contrib = 7
Proc 7 > my_contrib = 6
Proc 0 > my_contrib = 3
Proc 0 > global sum = 29
Proc 1 > my_contrib = 0
Proc 2 > my_contrib = 6
[cosc330@bourbaki examples] $ 


Tree-Structured Communication


Butterfly Communication


Butterfly Communication


Alt text


Butterfly Communication


Butterfly Communication


int Global_sum(int my_contrib, int my_rank, int p, MPI_Comm comm) { int sum = my_contrib; int temp; int partner; unsigned bitmask = 1; while (bitmask < p) { partner = my_rank ^ bitmask; MPI_Sendrecv(&sum, 1, MPI_INT, partner, 0, &temp, 1, MPI_INT, partner, 0, comm, MPI_STATUS_IGNORE); sum += temp; bitmask <<= 1; } return sum; } /* Global_sum */

Butterfly Communication


[cosc330@bourbaki examples] $ mpirun -np 4 butterflyGlobalSum Process Values: Proc 0 > 3 Proc 1 > 3 Proc 2 > 10 Proc 3 > 6 Process Totals: Proc 0 > 22 Proc 1 > 22 Proc 2 > 22 Proc 3 > 22

Butterfly Communication

#include <mpi.h>
int MPI_Sendrecv(const void *sendbuf, int sendcount, MPI_Datatype sendtype,
    int dest, int sendtag, void *recvbuf, int recvcount,
    MPI_Datatype recvtype, int source, int recvtag,
    MPI_Comm comm, MPI_Status *status)


Butterfly Communication


Butterfly Communication


Summary


Reading