Package PyFoam :: Package Execution :: Module ParallelExecution
[hide private]
[frames] | no frames]

Source Code for Module PyFoam.Execution.ParallelExecution

  1  #  ICE Revision: $Id: /local/openfoam/Python/PyFoam/PyFoam/Execution/ParallelExecution.py 6510 2010-04-21T16:13:50.349113Z bgschaid  $  
  2  """Things that are needed for convenient parallel Execution""" 
  3   
  4  from PyFoam.Basics.Utilities import Utilities 
  5  from PyFoam.FoamInformation import foamMPI 
  6  from PyFoam.Error import error,warning,debug 
  7  from PyFoam import configuration as config 
  8   
  9  from os import path,environ,system 
 10  from string import strip 
 11  import commands 
 12   
13 -class LAMMachine(Utilities):
14 """Wrapper class for starting an stopping a LAM-Machine"""
15 - def __init__(self,machines=None,nr=None):
16 """@param machines: Name of the file with the machine information 17 @param nr: Number of processes""" 18 19 Utilities.__init__(self) 20 21 self.stop() 22 23 if machines=="": 24 machines=None 25 26 if machines==None and foamMPI()=="LAM": 27 error("Machinefile must be specified for LAM") 28 29 if machines==None and nr==None: 30 error("Either machinefile or Nr of CPUs must be specified for MPI type",foamMPI()) 31 32 self.mFile=machines 33 self.procNr=nr 34 35 self.boot() 36 if not self.machineOK(): 37 error("Error: LAM was not started")
38
39 - def machineOK(self):
40 """Check whether the LAM machine was properly booted""" 41 if self.running: 42 if(foamMPI()=="LAM"): 43 if self.cpuNr()<=0: 44 self.running=False 45 46 return self.running
47
48 - def stop(self):
49 """Stops a LAM-machine (if one is running)""" 50 self.running=False 51 if(foamMPI()=="LAM"): 52 self.execute("lamhalt -v")
53
54 - def boot(self):
55 """Boots a LAM-machine using the machine-file""" 56 if foamMPI()=="LAM": 57 warning("LAM is untested. Any Feedback most welcome") 58 self.execute("lamboot -s -v "+self.mFile) 59 self.running=True 60 elif foamMPI()=="OPENMPI" or foamMPI()=="SYSTEMOPENMPI": 61 self.running=True 62 else: 63 error(" Unknown or missing MPI-Implementation: "+foamMPI())
64
65 - def cpuNr(self):
66 if(foamMPI()=="LAM"): 67 if self.running: 68 lines=self.execute("lamnodes") 69 nr=0 70 for l in lines: 71 tmp=l.split(':') 72 if len(tmp)>1: 73 nr+=int(tmp[1]) 74 return nr 75 else: 76 return -1 77 elif(foamMPI()=="OPENMPI" or foamMPI()=="SYSTEMOPENMPI"): 78 if self.mFile: 79 f=open(self.mFile) 80 l=map(strip,f.readlines()) 81 f.close() 82 nr=0 83 for m in l: 84 tmp=m.split() 85 if len(tmp)==1: 86 nr+=1 87 elif len(tmp)==0: 88 pass 89 else: 90 error("Machinefile not valid (I think): more than one element in one line:"+str(tmp)+"\nPropably you wrote a line in the form 'node1 cpu=2', but I only understand 'node1\\nnode1'") 91 92 if self.procNr==None: 93 return nr 94 else: 95 return min(nr,self.procNr) 96 97 elif self.procNr: 98 return self.procNr 99 else: 100 error("Can't determine Nr of CPUs without machinefile")
101
102 - def buildMPIrun(self,argv,expandApplication=True):
103 """Builds a list with a working mpirun command (for that MPI-Implementation) 104 @param argv: the original arguments that are to be wrapped 105 @param expandApplication: Expand the 106 @return: list with the correct mpirun-command""" 107 108 nr=str(self.cpuNr()) 109 mpirun=[config().get("MPI","run_"+foamMPI(),default="mpirun")] 110 111 mpirun+=eval(config().get("MPI","options_"+foamMPI()+"_pre",default="[]")) 112 113 if(foamMPI()=="LAM"): 114 mpirun+=["-np",nr] 115 elif(foamMPI()=="OPENMPI" or foamMPI()=="SYSTEMOPENMPI"): 116 nr=[] 117 if self.procNr!=None: 118 nr=["-np",str(self.procNr)] 119 machine=[] 120 if self.mFile!=None: 121 machine=["-machinefile",self.mFile] 122 mpirun+=nr+machine 123 else: 124 error(" Unknown or missing MPI-Implementation for mpirun: "+foamMPI()) 125 126 mpirun+=eval(config().get("MPI","options_"+foamMPI()+"_post",default="[]")) 127 128 progname=argv[0] 129 if expandApplication: 130 stat,progname=commands.getstatusoutput('which '+progname) 131 if stat: 132 progname=argv[0] 133 warning("which can not find a match for",progname,". Hoping for the best") 134 135 mpirun+=[progname]+argv[1:3]+["-parallel"]+argv[3:] 136 137 if config().getdebug("ParallelExecution"): 138 debug("MPI:",foamMPI()) 139 debug("Arguments:",mpirun) 140 system("which mpirun") 141 system("which rsh") 142 debug("Environment",environ) 143 for a in mpirun: 144 if a in environ: 145 debug("Transfering variable",a,"with value",environ[a]) 146 147 return mpirun
148
149 - def writeMetis(self,sDir):
150 """Write the parameter-File for a metis decomposition 151 @param sDir: Solution directory 152 @type sDir: PyFoam.RunDictionary.SolutionDirectory""" 153 154 params="method metis;\n" 155 156 self.writeDecomposition(sDir,params)
157
158 - def writeSimple(self,sDir,direction):
159 """Write the parameter-File for a metis decomposition 160 @param sDir: Solution directory 161 @type sDir: PyFoam.RunDictionary.SolutionDirectory 162 @param direction: direction in which to decompose (0=x, 1=y, 2=z)""" 163 164 params ="method simple;\n" 165 params+="\nsimpleCoeffs\n{\n\t n \t (" 166 if direction==0: 167 params+=str(self.cpuNr())+" " 168 else: 169 params+="1 " 170 if direction==1: 171 params+=str(self.cpuNr())+" " 172 else: 173 params+="1 " 174 if direction==2: 175 params+=str(self.cpuNr()) 176 else: 177 params+="1" 178 params+=");\n\t delta \t 0.001;\n}\n" 179 180 self.writeDecomposition(sDir,params)
181
182 - def writeDecomposition(self,sDir,par):
183 """Write parameter file for a decomposition 184 @param par:Parameters specific for that kind of decomposition 185 @type par:str 186 @param sDir: Solution directory 187 @type sDir: PyFoam.RunDictionary.SolutionDirectory""" 188 189 f=open(path.join(sDir.systemDir(),"decomposeParDict"),"w") 190 self.writeDictionaryHeader(f) 191 f.write("// * * * * * * * * * //\n\n") 192 f.write("numberOfSubdomains "+str(self.cpuNr())+";\n\n") 193 f.write(par) 194 f.write("\n\n// * * * * * * * * * //") 195 f.close()
196