test2
5 years ago
7 changed files with 160 additions and 1 deletions
@ -1,2 +1,93 @@ |
|||||
# examples-mpi2 |
# mpi & Open JDK ! |
||||
|
|
||||
|
Instances of Ubuntu Linux with [MPICH](https://www.mpich.org) -- portable implementation of Message Passing Interface (MPI) standard. Designed for MPI program development and deployment. |
||||
|
|
||||
|
## LabInstance mpi2 |
||||
|
|
||||
|
|
||||
|
![alt text](images/swarmlab-network.png "") |
||||
|
|
||||
|
|
||||
|
|
||||
|
|
||||
|
[inspired by NLKNguyen](https://github.com/NLKNguyen/alpine-mpich) |
||||
|
|
||||
|
|
||||
|
## Quickstart |
||||
|
|
||||
|
This is a quickstart guide of howto use this *LabInstance to deploy MPI programs* |
||||
|
|
||||
|
### HowTo use it |
||||
|
|
||||
|
|
||||
|
- On step "*1. Select Git Repo*" [see here](http://docs.swarmlab.io/SwarmLab-HowTos/HowTo-create-lab.adoc.html) |
||||
|
choose "*examples-mpi*" |
||||
|
|
||||
|
- On step "*2. Select swarm service*" choose "*ondemand_mpi2_master*" |
||||
|
|
||||
|
- On step "*3. Save Lab Instance*" put the name you want |
||||
|
|
||||
|
- Save it. |
||||
|
|
||||
|
Your are ready to run your fresh *LabInstance* [see here](http://docs.swarmlab.io/SwarmLab-HowTos/HowTo-create-lab.adoc.html#_run_instance) |
||||
|
|
||||
|
|
||||
|
### Default Configuration |
||||
|
|
||||
|
- Working Directory |
||||
|
|
||||
|
> /ubuntu |
||||
|
|
||||
|
- Default user for MPI |
||||
|
|
||||
|
> ubuntu |
||||
|
|
||||
|
- Buid-in Web Server |
||||
|
|
||||
|
--- |
||||
|
**INFO** |
||||
|
|
||||
|
*Lab_Instance Name* = The name you give [here](http://docs.swarmlab.io/SwarmLab-HowTos/HowTo-create-lab.adoc.html#_step_3_save_lab_instance) |
||||
|
|
||||
|
You can also find [here](http://docs.swarmlab.io/SwarmLab-HowTos/index.adoc.html#_main_lab_intance_interface) |
||||
|
|
||||
|
Usage: |
||||
|
|
||||
|
> touch /var/www/html/[name]/file |
||||
|
|
||||
|
--- |
||||
|
|
||||
|
|
||||
|
- The host file that contains addresses of connected workers can be created with: |
||||
|
|
||||
|
> |
||||
|
> get_hosts > /ubuntu/hosts |
||||
|
> |
||||
|
|
||||
|
- Compile |
||||
|
|
||||
|
> mpicc -o /ubuntu/mpi_hello_world examples/mpi_hello_world.c |
||||
|
|
||||
|
- run the MPI hello world program |
||||
|
|
||||
|
> sudo -u mpi mpirun -n 10 -f /ubuntu/hosts /ubuntu/mpi_hello_world |
||||
|
|
||||
|
|
||||
|
--- |
||||
|
**NOTE:** copy hello world to all clients |
||||
|
|
||||
|
```bash |
||||
|
# scp hello wrold to all clients. sudo -u mpi for auto scp connect. see /home/mpi/.ssh |
||||
|
while read -r line; |
||||
|
do |
||||
|
sudo -u mpi scp /ubuntu/mpi_hello_world ubuntu@$line:/ubuntu/mpi_hello_world |
||||
|
done < /ubuntu/hosts |
||||
|
``` |
||||
|
--- |
||||
|
|
||||
|
|
||||
|
--- |
||||
|
**MORE INFO** |
||||
|
|
||||
|
See swarmlab and examples directory |
||||
|
--- |
||||
|
@ -0,0 +1,13 @@ |
|||||
|
all: config install log |
||||
|
|
||||
|
config: |
||||
|
|
||||
|
/bin/bash ./runbefore |
||||
|
|
||||
|
install: |
||||
|
|
||||
|
/bin/bash ./run |
||||
|
|
||||
|
log: |
||||
|
|
||||
|
/bin/bash ./runafter |
@ -0,0 +1,30 @@ |
|||||
|
#include <mpi.h> |
||||
|
#include <stdio.h> |
||||
|
|
||||
|
int main(int argc, char** argv) { |
||||
|
// Initialize the MPI environment. The two arguments to MPI Init are not
|
||||
|
// currently used by MPI implementations, but are there in case future
|
||||
|
// implementations might need the arguments.
|
||||
|
MPI_Init(NULL, NULL); |
||||
|
|
||||
|
// Get the number of processes
|
||||
|
int world_size; |
||||
|
MPI_Comm_size(MPI_COMM_WORLD, &world_size); |
||||
|
|
||||
|
// Get the rank of the process
|
||||
|
int world_rank; |
||||
|
MPI_Comm_rank(MPI_COMM_WORLD, &world_rank); |
||||
|
|
||||
|
// Get the name of the processor
|
||||
|
char processor_name[MPI_MAX_PROCESSOR_NAME]; |
||||
|
int name_len; |
||||
|
MPI_Get_processor_name(processor_name, &name_len); |
||||
|
|
||||
|
// Print off a hello world message
|
||||
|
printf("Hello world from processor %s, rank %d out of %d processors\n", |
||||
|
processor_name, world_rank, world_size); |
||||
|
|
||||
|
// Finalize the MPI environment. No more MPI calls can be made after this
|
||||
|
MPI_Finalize(); |
||||
|
} |
||||
|
|
@ -0,0 +1,17 @@ |
|||||
|
#!/bin/bash |
||||
|
|
||||
|
# compile |
||||
|
mpicc -o /ubuntu/mpi_hello_world i./mpi_hello_world.c |
||||
|
|
||||
|
# scp hello wrold to all clients. sudo -u mpi for auto scp connect. see /home/mpi/.ssh |
||||
|
while read -r line; |
||||
|
do |
||||
|
scp /ubuntu/mpi_hello_world ubuntu@$line:/ubuntu/mpi_hello_world |
||||
|
done < /ubuntu/hosts |
||||
|
|
||||
|
# cd to work dir |
||||
|
cd /ubuntu |
||||
|
|
||||
|
# run it! |
||||
|
mpirun -n 10 -f /ubuntu/hosts /ubuntu/mpi_hello_world 2>&1 | tee /tmp/output.log |
||||
|
|
@ -0,0 +1,4 @@ |
|||||
|
#!/bin/bash |
||||
|
|
||||
|
echo "do more jobs" |
||||
|
#cat /tmp/output.log |
@ -0,0 +1,4 @@ |
|||||
|
#!/bin/bash |
||||
|
|
||||
|
get_hosts > /ubuntu/hosts |
||||
|
|
After Width: | Height: | Size: 80 KiB |
Loading…
Reference in new issue