Package PyFoam :: Package RunDictionary :: Module ParsedParameterFile
[hide private]
[frames] | no frames]

Source Code for Module PyFoam.RunDictionary.ParsedParameterFile

  1  #  ICE Revision: $Id: /local/openfoam/Python/PyFoam/PyFoam/RunDictionary/ParsedParameterFile.py 5389 2009-08-16T18:20:31.169405Z bgschaid  $  
  2  """Parameter file is read into memory and modified there""" 
  3   
  4  from FileBasis import FileBasisBackup 
  5  from PyFoam.Basics.PlyParser import PlyParser 
  6  from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator 
  7   
  8  from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList 
  9   
 10  from PyFoam.Error import error 
 11   
 12  from os import path 
 13  from copy import deepcopy 
 14   
15 -class ParsedParameterFile(FileBasisBackup):
16 """ Parameterfile whose complete representation is read into 17 memory, can be manipulated and afterwards written to disk""" 18
19 - def __init__(self, 20 name, 21 backup=False, 22 debug=False, 23 boundaryDict=False, 24 listDict=False, 25 listDictWithHeader=False, 26 listLengthUnparsed=None, 27 noHeader=False, 28 noBody=False, 29 doMacroExpansion=False, 30 dontRead=False, 31 createZipped=True):
32 """@param name: The name of the parameter file 33 @param backup: create a backup-copy of the file 34 @param boundaryDict: the file to parse is a boundary file 35 @param listDict: the file only contains a list 36 @param listDictWithHeader: the file only contains a list and a header 37 @param listLengthUnparsed: Lists longer than that length are not parsed 38 @param noHeader: don't expect a header 39 @param noBody: don't read the body of the file (only the header) 40 @param doMacroExpansion: expand #include and $var 41 @param dontRead: Do not read the file during construction 42 """ 43 44 self.noHeader=noHeader 45 self.noBody=noBody 46 FileBasisBackup.__init__(self, 47 name, 48 backup=backup, 49 createZipped=createZipped) 50 self.debug=debug 51 self.boundaryDict=boundaryDict 52 self.listDict=listDict 53 self.listDictWithHeader=listDictWithHeader 54 self.listLengthUnparsed=listLengthUnparsed 55 self.doMacros=doMacroExpansion 56 57 self.header=None 58 self.content=None 59 60 if not dontRead: 61 self.readFile()
62
63 - def parse(self,content):
64 """Constructs a representation of the file""" 65 parser=FoamFileParser(content, 66 debug=self.debug, 67 fName=self.name, 68 boundaryDict=self.boundaryDict, 69 listDict=self.listDict, 70 listDictWithHeader=self.listDictWithHeader, 71 listLengthUnparsed=self.listLengthUnparsed, 72 noHeader=self.noHeader, 73 noBody=self.noBody, 74 doMacroExpansion=self.doMacros) 75 76 self.content=parser.getData() 77 self.header=parser.getHeader() 78 return self.content
79
80 - def __contains__(self,key):
81 return key in self.content
82
83 - def __getitem__(self,key):
84 return self.content[key]
85
86 - def __setitem__(self,key,value):
87 self.content[key]=value
88
89 - def __delitem__(self,key):
90 del self.content[key]
91
92 - def __len__(self):
93 return len(self.content)
94
95 - def __iter__(self):
96 for key in self.content: 97 yield key
98
99 - def __str__(self):
100 """Generates a string from the contents in memory 101 Used to be called makeString""" 102 103 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n" 104 105 generator=FoamFileGenerator(self.content,header=self.header) 106 string+=generator.makeString(firstLevel=True) 107 108 return string
109
110 -class WriteParameterFile(ParsedParameterFile):
111 """A specialization that is used to only write to the file"""
112 - def __init__(self, 113 name, 114 backup=False, 115 className="dictionary", 116 objectName=None, 117 createZipped=False):
118 ParsedParameterFile.__init__(self, 119 name, 120 backup=backup, 121 dontRead=True, 122 createZipped=createZipped) 123 124 if objectName==None: 125 objectName=path.basename(name) 126 127 self.content={} 128 self.header={"version":"2.0", 129 "format":"ascii", 130 "class":className, 131 "object":objectName}
132
133 -class Enumerate(object):
134 - def __init__(self, names):
135 for number, name in enumerate(names): 136 setattr(self, name, number)
137 138 inputModes=Enumerate(["merge","error","warn","protect","overwrite","default"]) 139
140 -class FoamFileParser(PlyParser):
141 """Class that parses a string that contains the contents of an 142 OpenFOAM-file and builds a nested structure of directories and 143 lists from it""" 144
145 - def __init__(self, 146 content, 147 fName=None, 148 debug=False, 149 noHeader=False, 150 noBody=False, 151 doMacroExpansion=False, 152 boundaryDict=False, 153 preserveComments=True, 154 preserveNewlines=True, 155 listDict=False, 156 listDictWithHeader=False, 157 listLengthUnparsed=None):
158 """@param content: the string to be parsed 159 @param fName: Name of the actual file (if any) 160 @param debug: output debug information during parsing 161 @param noHeader: switch that turns off the parsing of the header""" 162 163 self.fName=fName 164 self.data=None 165 self.header=None 166 self.debug=debug 167 self.listLengthUnparsed=listLengthUnparsed 168 self.doMacros=doMacroExpansion 169 self.preserveComments=preserveComments 170 self.preserveNewLines=preserveNewlines 171 172 self.collectDecorations=False 173 self.inputMode=inputModes.merge 174 175 self._decorationBuffer="" 176 177 startCnt=0 178 179 self.dictStack=[DictProxy()] 180 181 if noBody: 182 self.start='noBody' 183 startCnt+=1 184 185 if noHeader: 186 self.start='noHeader' 187 startCnt+=1 188 189 if listDict: 190 self.start='pureList' 191 startCnt+=1 192 self.dictStack=[] 193 194 if listDictWithHeader: 195 self.start='pureListWithHeader' 196 startCnt+=1 197 198 if boundaryDict: 199 self.start='boundaryDict' 200 startCnt+=1 201 202 if startCnt>1: 203 error("Only one start symbol can be specified.",startCnt,"are specified") 204 205 PlyParser.__init__(self,debug=debug) 206 207 #sys.setrecursionlimit(50000) 208 #print sys.getrecursionlimit() 209 210 self.emptyCnt=0 211 212 self.header,self.data=self.parse(content)
213
214 - def __contains__(self,key):
215 return key in self.data
216
217 - def __getitem__(self,key):
218 return self.data[key]
219
220 - def __setitem__(self,key,value):
221 self.data[key]=value
222
223 - def __delitem__(self,key):
224 del self.data[key]
225
226 - def __iter__(self):
227 for key in self.data: 228 yield key
229 230 ## def __len__(self): 231 ## if self.data==None: 232 ## return 0 233 ## else: 234 ## return len(self.data) 235
236 - def resetDecoration(self):
237 self._decorationBuffer=""
238
239 - def addToDecoration(self,text):
240 if self.collectDecorations: 241 self._decorationBuffer+=text
242
243 - def addCommentToDecorations(self,text):
244 if self.preserveComments: 245 self.addToDecoration(text)
246
247 - def addNewlinesToDecorations(self,text):
248 if self.preserveNewLines: 249 self.addToDecoration(text)
250
251 - def getDecoration(self):
252 tmp=self._decorationBuffer 253 self.resetDecoration() 254 if len(tmp)>0: 255 if tmp[-1]=='\n': 256 tmp=tmp[:-1] 257 return tmp
258
259 - def directory(self):
260 if self.fName==None: 261 return path.curdir 262 else: 263 return path.dirname(self.fName)
264
265 - def getData(self):
266 """ Get the data structure""" 267 return self.data
268
269 - def getHeader(self):
270 """ Get the OpenFOAM-header""" 271 return self.header
272
273 - def printContext(self,c,ind):
274 """Prints the context of the current index""" 275 print "------" 276 print c[max(0,ind-100):max(0,ind-1)] 277 print "------" 278 print ">",c[ind-1],"<" 279 print "------" 280 print c[min(len(c),ind):min(len(c),ind+100)] 281 print "------"
282
283 - def parserError(self,text,c,ind):
284 """Prints the error message of the parser and exit""" 285 print "PARSER ERROR:",text 286 print "On index",ind 287 self.printContext(c,ind) 288 raise PyFoamParserError("Unspecified")
289
290 - def condenseAllPreFixLists(self,orig):
291 """Checks whether this list is a list that consists only of prefix-Lists""" 292 isAllPreList=False 293 if (len(orig) % 2)==0: 294 isAllPreList=True 295 for i in range(0,len(orig),2): 296 if type(orig[i])==int and (type(orig[i+1]) in [list,Vector,Tensor,SymmTensor]): 297 if len(orig[i+1])!=orig[i]: 298 isAllPreList=False 299 break 300 else: 301 isAllPreList=False 302 break 303 304 if isAllPreList: 305 return orig[1::2] 306 else: 307 return orig
308 309 tokens = ( 310 'NAME', 311 'ICONST', 312 'FCONST', 313 'SCONST', 314 'FOAMFILE', 315 'UNIFORM', 316 'NONUNIFORM', 317 'UNPARSEDCHUNK', 318 'REACTION', 319 'SUBSTITUTION', 320 'MERGE', 321 'OVERWRITE', 322 'ERROR', 323 'WARN', 324 'PROTECT', 325 'DEFAULT', 326 'INCLUDE', 327 'INCLUDEIFPRESENT', 328 'REMOVE', 329 'INPUTMODE', 330 'KANALGITTER', 331 ) 332 333 reserved = { 334 'FoamFile' : 'FOAMFILE', 335 'uniform' : 'UNIFORM', 336 'nonuniform' : 'NONUNIFORM', 337 'include' : 'INCLUDE', 338 'includeIfPresent': 'INCLUDEIFPRESENT', 339 'remove' : 'REMOVE', 340 'inputMode' : 'INPUTMODE', 341 'merge' : 'MERGE', 342 'overwrite' : 'OVERWRITE', 343 'error' : 'ERROR', 344 'warn' : 'WARN', 345 'protect' : 'PROTECT', 346 'default' : 'DEFAULT', 347 } 348 349 states = ( 350 ('unparsed', 'exclusive'), 351 ) 352
353 - def t_unparsed_left(self,t):
354 r'\(' 355 t.lexer.level+=1
356 # print "left",t.lexer.level, 357
358 - def t_unparsed_right(self,t):
359 r'\)' 360 t.lexer.level-=1 361 # print "right",t.lexer.level, 362 if t.lexer.level < 0 : 363 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1] 364 # print t.value 365 t.lexer.lexpos-=1 366 t.type = "UNPARSEDCHUNK" 367 t.lexer.lineno += t.value.count('\n') 368 t.lexer.begin('INITIAL') 369 return t
370 371 t_unparsed_ignore = ' \t\n0123456789.-+e' 372
373 - def t_unparsed_error(self,t):
374 print "Error",t.lexer.lexdata[t.lexer.lexpos] 375 t.lexer.skip(1)
376
377 - def t_NAME(self,t):
378 r'[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 379 t.type=self.reserved.get(t.value,'NAME') 380 if t.value[-1]==")": 381 if t.value.count(")")>t.value.count("("): 382 # Give back the last ) because it propably belongs to a list 383 t.value=t.value[:-1] 384 t.lexer.lexpos-=1 385 386 return t
387
388 - def t_SUBSTITUITION(self,t):
389 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 390 t.type=self.reserved.get(t.value,'SUBSTITUTION') 391 if t.value[-1]==")": 392 if t.value.count(")")>t.value.count("("): 393 # Give back the last ) because it propably belongs to a list 394 t.value=t.value[:-1] 395 t.lexer.lexpos-=1 396 397 return t
398 399 t_KANALGITTER = r'\#' 400 401 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' 402 403 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' 404 405 t_SCONST = r'\"([^\\\n]|(\\.))*?\"' 406 407 literals = "(){};[]" 408 409 t_ignore=" \t\r" 410 411 # Define a rule so we can track line numbers
412 - def t_newline(self,t):
413 r'\n+' 414 t.lexer.lineno += len(t.value) 415 now=t.lexer.lexpos 416 next=t.lexer.lexdata.find('\n',now) 417 if next>=0: 418 line=t.lexer.lexdata[now:next] 419 pos=line.find("=") 420 if pos>=0: 421 if ((line.find("//")>=0 and line.find("//")<pos)) or (line.find("/*")>=0 and line.find("/*")<pos) or (line.find('"')>=0 and line.find('"')<pos): 422 return 423 t.value = line 424 t.type = "REACTION" 425 t.lexer.lineno += 1 426 t.lexer.lexpos = next 427 return t
428 # self.addNewlinesToDecorations(t.value) 429 430 # C or C++ comment (ignore)
431 - def t_ccode_comment(self,t):
432 r'(/\*(.|\n)*?\*/)|(//.*)' 433 t.lexer.lineno += t.value.count('\n') 434 self.addCommentToDecorations(t.value) 435 pass
436 437 # Error handling rule
438 - def t_error(self,t):
439 raise PyFoamParserError("Illegal character '%s'" % t.value[0])
440 # t.lexer.skip(1) # the old days when illegal characters were accepted 441
442 - def p_global(self,p):
443 'global : header dictbody' 444 p[0] = ( p[1] , p[2] )
445
446 - def p_gotHeader(self,p):
447 'gotHeader :' 448 p.lexer.lexpos=len(p.lexer.lexdata)
449
450 - def p_noBody(self,p):
451 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' ''' 452 p[0] = ( p[3] , {} )
453
454 - def p_noHeader(self,p):
455 'noHeader : dictbody' 456 p[0] = ( None , p[1] )
457
458 - def p_pureList(self,p):
459 'pureList : list' 460 p[0] = ( None , p[1] )
461
462 - def p_pureListWithHeader(self,p):
463 '''pureListWithHeader : header list 464 | header prelist ''' 465 p[0] = ( p[1] , p[2] )
466
467 - def p_boundaryDict(self,p):
468 '''boundaryDict : header list 469 | header prelist ''' 470 # p[0] = ( p[1] , dict(zip(p[2][::2],p[2][1::2])) ) 471 p[0] = ( p[1] , p[2] )
472
473 - def p_header(self,p):
474 'header : FOAMFILE dictionary' 475 p[0] = p[2]
476
477 - def p_macro(self,p):
478 '''macro : KANALGITTER include 479 | KANALGITTER inputMode 480 | KANALGITTER remove''' 481 p[0] = p[1]+p[2]+"\n" 482 if self.doMacros: 483 p[0]="// "+p[0]
484
485 - def p_include(self,p):
486 '''include : INCLUDE SCONST 487 | INCLUDEIFPRESENT SCONST''' 488 if self.doMacros: 489 fName=path.join(self.directory(),p[2][1:-1]) 490 read=True 491 if p[1]=="includeIfPresent" and not path.exists(fName): 492 read=False 493 if read and not path.exists(fName): 494 raise PyFoamParserError("The included file "+fName+" does not exist") 495 if read: 496 data=ParsedParameterFile(fName,noHeader=True) 497 into=self.dictStack[-1] 498 for k in data: 499 into[k]=data[k] 500 501 p[0] = p[1] + " " + p[2]
502
503 - def p_inputMode(self,p):
504 '''inputMode : INPUTMODE ERROR 505 | INPUTMODE WARN 506 | INPUTMODE PROTECT 507 | INPUTMODE DEFAULT 508 | INPUTMODE MERGE 509 | INPUTMODE OVERWRITE''' 510 p[0] = p[1] + " " + p[2] 511 self.inputMode=getattr(inputModes,p[2])
512
513 - def p_remove(self,p):
514 '''remove : REMOVE word 515 | REMOVE wlist''' 516 p[0] = p[1] + " " 517 if type(p[2])==str: 518 p[0]+=p[2] 519 else: 520 p[0]+="( " 521 for w in p[2]: 522 p[0]+=w+" " 523 p[0]+=")"
524
525 - def p_integer(self,p):
526 '''integer : ICONST''' 527 p[0] = int(p[1])
528
529 - def p_float(self,p):
530 '''integer : FCONST''' 531 p[0] = float(p[1])
532
533 - def p_enter_dict(self,p):
534 '''enter_dict :''' 535 self.dictStack.append(DictProxy())
536
537 - def p_exit_dict(self,p):
538 '''exit_dict :''' 539 p[0]=self.dictStack.pop()
540
541 - def p_dictionary(self,p):
542 '''dictionary : '{' enter_dict dictbody '}' exit_dict 543 | '{' '}' ''' 544 if len(p)==6: 545 p[0] = p[5] 546 else: 547 p[0] = DictProxy()
548
549 - def p_dictbody(self,p):
550 '''dictbody : dictbody dictline 551 | dictline 552 | empty''' 553 554 if len(p)==3: 555 p[0]=p[1] 556 p[0][p[2][0]]=p[2][1] 557 p[0].addDecoration(p[2][0],self.getDecoration()) 558 else: 559 p[0]=self.dictStack[-1] 560 561 if p[1]: 562 p[0][p[1][0]]=p[1][1]
563 564
565 - def p_list(self,p):
566 '''list : '(' itemlist ')' ''' 567 p[0] = self.condenseAllPreFixLists(p[2]) 568 if len(p[2])==3 or len(p[2])==9 or len(p[2])==6: 569 isVector=True 570 for i in p[2]: 571 try: 572 float(i) 573 except: 574 isVector=False 575 if isVector: 576 if len(p[2])==3: 577 p[0]=apply(Vector,p[2]) 578 elif len(p[2])==9: 579 p[0]=apply(Tensor,p[2]) 580 else: 581 p[0]=apply(SymmTensor,p[2])
582
583 - def p_wlist(self,p):
584 '''wlist : '(' wordlist ')' ''' 585 p[0] = p[2]
586
587 - def p_unparsed(self,p):
588 '''unparsed : UNPARSEDCHUNK''' 589 p[0] = Unparsed(p[1])
590
591 - def p_prelist_seen(self,p):
592 '''prelist_seen : ''' 593 if self.listLengthUnparsed!=None: 594 # print "Hepp" 595 if int(p[-1])>=self.listLengthUnparsed: 596 # print "Ho",p.lexer.lexpos,p.lexer.lexdata[p.lexer.lexpos-1:p.lexer.lexpos+2],p[1],len(p[1]) 597 p.lexer.begin('unparsed') 598 p.lexer.level=0 599 p.lexer.code_start = p.lexer.lexpos
600 601 # t=p.lexer.token() 602 603 ## print t.type 604 ## return t 605 # p[0] = None 606
607 - def p_prelist(self,p):
608 '''prelist : integer prelist_seen '(' itemlist ')' 609 | integer prelist_seen '(' unparsed ')' ''' 610 if type(p[4])==Unparsed: 611 p[0] = UnparsedList(int(p[1]),p[4].data) 612 else: 613 p[0] = self.condenseAllPreFixLists(p[4])
614
615 - def p_itemlist(self,p):
616 '''itemlist : itemlist item 617 | item ''' 618 if len(p)==2: 619 if p[1]==None: 620 p[0]=[] 621 else: 622 p[0]=[ p[1] ] 623 else: 624 p[0]=p[1] 625 p[0].append(p[2])
626
627 - def p_wordlist(self,p):
628 '''wordlist : wordlist word 629 | word ''' 630 if len(p)==2: 631 if p[1]==None: 632 p[0]=[] 633 else: 634 p[0]=[ p[1] ] 635 else: 636 p[0]=p[1] 637 p[0].append(p[2])
638
639 - def p_word(self,p):
640 '''word : NAME 641 | UNIFORM 642 | NONUNIFORM 643 | MERGE 644 | OVERWRITE 645 | DEFAULT 646 | WARN 647 | PROTECT 648 | ERROR''' 649 p[0]=p[1]
650
651 - def p_substitution(self,p):
652 '''substitution : SUBSTITUTION''' 653 if self.doMacros: 654 nm=p[1][1:] 655 p[0]="<Symbol '"+nm+"' not found>" 656 if nm in self.dictStack[0]: 657 p[0]=deepcopy(self.dictStack[0][nm]) 658 else: 659 p[0]=p[1]
660
661 - def p_dictkey(self,p):
662 '''dictkey : word 663 | SCONST''' 664 p[0]=p[1]
665
666 - def p_dictline(self,p):
667 '''dictline : dictkey dictitem ';' 668 | dictkey list ';' 669 | dictkey prelist ';' 670 | dictkey fieldvalue ';' 671 | macro 672 | dictkey dictionary''' 673 if len(p)==4 and type(p[2])==list: 674 # remove the prefix from long lists (if present) 675 doAgain=True 676 tmp=p[2] 677 while doAgain: 678 doAgain=False 679 for i in range(len(tmp)-1): 680 if type(tmp[i])==int and type(tmp[i+1]) in [list]: 681 if tmp[i]==len(tmp[i+1]): 682 nix=tmp[:i]+tmp[i+1:] 683 for i in range(len(tmp)): 684 tmp.pop() 685 tmp.extend(nix) 686 doAgain=True 687 break 688 if len(p)>=3: 689 p[0] = ( p[1] , p[2] ) 690 else: 691 p[0] = ( self.emptyCnt , p[1] ) 692 self.emptyCnt+=1
693
694 - def p_number(self,p):
695 '''number : integer 696 | FCONST''' 697 p[0] = p[1]
698
699 - def p_dimension(self,p):
700 '''dimension : '[' number number number number number number number ']' 701 | '[' number number number number number ']' ''' 702 result=p[2:-1] 703 if len(result)==5: 704 result+=[0,0] 705 706 p[0]=apply(Dimension,result)
707
708 - def p_vector(self,p):
709 '''vector : '(' number number number ')' ''' 710 p[0]=apply(Vector,p[2:5])
711
712 - def p_tensor(self,p):
713 '''tensor : '(' number number number number number number number number number ')' ''' 714 p[0]=apply(Tensor,p[2:11])
715
716 - def p_symmtensor(self,p):
717 '''symmtensor : '(' number number number number number number ')' ''' 718 p[0]=apply(SymmTensor,p[2:8])
719
720 - def p_fieldvalue_uniform(self,p):
721 '''fieldvalue : UNIFORM number 722 | UNIFORM vector 723 | UNIFORM tensor 724 | UNIFORM symmtensor''' 725 p[0] = Field(p[2])
726
727 - def p_fieldvalue_nonuniform(self,p):
728 '''fieldvalue : NONUNIFORM NAME list 729 | NONUNIFORM NAME prelist''' 730 p[0] = Field(p[3],name=p[2])
731
732 - def p_dictitem(self,p):
733 '''dictitem : longitem 734 | pitem''' 735 if type(p[1])==tuple: 736 p[0]=TupleProxy(p[1]) 737 else: 738 p[0] = p[1]
739
740 - def p_longitem(self,p):
741 '''longitem : pitemlist pitem''' 742 p[0] = p[1]+(p[2],)
743
744 - def p_pitemlist(self,p):
745 '''pitemlist : pitemlist pitem 746 | pitem ''' 747 if len(p)==2: 748 p[0]=(p[1],) 749 else: 750 ## print type(p[1][-1]) 751 ## if type(p[1][-1])==int and type(p[2])==tuple: 752 ## print "Hepp",p[2] 753 p[0]=p[1]+(p[2],)
754
755 - def p_pitem(self,p):
756 '''pitem : word 757 | SCONST 758 | number 759 | dictionary 760 | list 761 | dimension 762 | substitution 763 | empty''' 764 p[0] = p[1]
765
766 - def p_item(self,p):
767 '''item : pitem 768 | REACTION 769 | list 770 | dictionary''' 771 p[0] = p[1]
772
773 - def p_empty(self,p):
774 'empty :' 775 pass
776
777 - def p_error(self,p):
778 raise PyFoamParserError("Syntax error at token", p) # .type, p.lineno
779 # Just discard the token and tell the parser it's okay. 780 # self.yacc.errok() 781
782 -class PyFoamParserError:
783 - def __init__(self,descr,data=None):
784 self.descr=descr 785 self.data=data
786
787 - def __str__(self):
788 result="Error in PyFoamParser: '"+self.descr+"'" 789 if self.data!=None: 790 val=self.data.value 791 if len(val)>100: 792 val=val[:40]+" .... "+val[-40:] 793 794 result+=" @ %r (Type: %s ) in line %d at position %d" % (val, 795 self.data.type, 796 self.data.lineno, 797 self.data.lexpos) 798 799 return result
800
801 - def __repr__(self):
802 return str(self)
803
804 -class FoamStringParser(FoamFileParser):
805 """Convenience class that parses only a headerless OpenFOAM dictionary""" 806
807 - def __init__(self,content,debug=False):
808 """@param content: the string to be parsed 809 @param debug: output debug information during parsing""" 810 811 FoamFileParser.__init__(self,content,debug=debug,noHeader=True,boundaryDict=False)
812
813 - def __str__(self):
814 return str(FoamFileGenerator(self.data))
815
816 -class ParsedBoundaryDict(ParsedParameterFile):
817 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file""" 818
819 - def __init__(self,name,backup=False,debug=False):
820 """@param name: The name of the parameter file 821 @param backup: create a backup-copy of the file""" 822 823 ParsedParameterFile.__init__(self,name,backup=backup,debug=debug,boundaryDict=True)
824
825 - def parse(self,content):
826 """Constructs a representation of the file""" 827 temp=ParsedParameterFile.parse(self,content) 828 self.content={} 829 for i in range(0,len(temp),2): 830 self.content[temp[i]]=temp[i+1] 831 return self.content
832
833 - def __str__(self):
834 string="// File generated by PyFoam - sorry for the ugliness\n\n" 835 temp=[] 836 for k,v in self.content.iteritems(): 837 temp.append((k,v)) 838 839 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"]))) 840 841 temp2=[] 842 843 for b in temp: 844 temp2.append(b[0]) 845 temp2.append(b[1]) 846 847 generator=FoamFileGenerator(temp2,header=self.header) 848 string+=str(generator) 849 850 return string
851
852 -class ParsedFileHeader(ParsedParameterFile):
853 """Only parse the header of a file""" 854
855 - def __init__(self,name):
856 ParsedParameterFile.__init__(self,name,backup=False,noBody=True)
857
858 - def __getitem__(self,name):
859 return self.header[name]
860
861 - def __contains__(self,name):
862 return name in self.header
863
864 - def __len__(self):
865 return len(self.header)
866