1
2 """Parameter file is read into memory and modified there"""
3
4 from FileBasis import FileBasisBackup
5 from PyFoam.Basics.PlyParser import PlyParser
6 from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator
7
8 from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList
9
10 from PyFoam.Error import error
11
12 from os import path
13 from copy import deepcopy
14
16 """ Parameterfile whose complete representation is read into
17 memory, can be manipulated and afterwards written to disk"""
18
19 - def __init__(self,
20 name,
21 backup=False,
22 debug=False,
23 boundaryDict=False,
24 listDict=False,
25 listDictWithHeader=False,
26 listLengthUnparsed=None,
27 noHeader=False,
28 noBody=False,
29 doMacroExpansion=False,
30 dontRead=False,
31 createZipped=True):
32 """@param name: The name of the parameter file
33 @param backup: create a backup-copy of the file
34 @param boundaryDict: the file to parse is a boundary file
35 @param listDict: the file only contains a list
36 @param listDictWithHeader: the file only contains a list and a header
37 @param listLengthUnparsed: Lists longer than that length are not parsed
38 @param noHeader: don't expect a header
39 @param noBody: don't read the body of the file (only the header)
40 @param doMacroExpansion: expand #include and $var
41 @param dontRead: Do not read the file during construction
42 """
43
44 self.noHeader=noHeader
45 self.noBody=noBody
46 FileBasisBackup.__init__(self,
47 name,
48 backup=backup,
49 createZipped=createZipped)
50 self.debug=debug
51 self.boundaryDict=boundaryDict
52 self.listDict=listDict
53 self.listDictWithHeader=listDictWithHeader
54 self.listLengthUnparsed=listLengthUnparsed
55 self.doMacros=doMacroExpansion
56
57 self.header=None
58 self.content=None
59
60 if not dontRead:
61 self.readFile()
62
64 """Constructs a representation of the file"""
65 parser=FoamFileParser(content,
66 debug=self.debug,
67 fName=self.name,
68 boundaryDict=self.boundaryDict,
69 listDict=self.listDict,
70 listDictWithHeader=self.listDictWithHeader,
71 listLengthUnparsed=self.listLengthUnparsed,
72 noHeader=self.noHeader,
73 noBody=self.noBody,
74 doMacroExpansion=self.doMacros)
75
76 self.content=parser.getData()
77 self.header=parser.getHeader()
78 return self.content
79
81 return key in self.content
82
84 return self.content[key]
85
87 self.content[key]=value
88
91
93 return len(self.content)
94
96 for key in self.content:
97 yield key
98
100 """Generates a string from the contents in memory
101 Used to be called makeString"""
102
103 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n"
104
105 generator=FoamFileGenerator(self.content,header=self.header)
106 string+=generator.makeString(firstLevel=True)
107
108 return string
109
111 """A specialization that is used to only write to the file"""
112 - def __init__(self,
113 name,
114 backup=False,
115 className="dictionary",
116 objectName=None,
117 createZipped=False):
118 ParsedParameterFile.__init__(self,
119 name,
120 backup=backup,
121 dontRead=True,
122 createZipped=createZipped)
123
124 if objectName==None:
125 objectName=path.basename(name)
126
127 self.content={}
128 self.header={"version":"2.0",
129 "format":"ascii",
130 "class":className,
131 "object":objectName}
132
135 for number, name in enumerate(names):
136 setattr(self, name, number)
137
138 inputModes=Enumerate(["merge","error","warn","protect","overwrite","default"])
139
141 """Class that parses a string that contains the contents of an
142 OpenFOAM-file and builds a nested structure of directories and
143 lists from it"""
144
145 - def __init__(self,
146 content,
147 fName=None,
148 debug=False,
149 noHeader=False,
150 noBody=False,
151 doMacroExpansion=False,
152 boundaryDict=False,
153 preserveComments=True,
154 preserveNewlines=True,
155 listDict=False,
156 listDictWithHeader=False,
157 listLengthUnparsed=None):
158 """@param content: the string to be parsed
159 @param fName: Name of the actual file (if any)
160 @param debug: output debug information during parsing
161 @param noHeader: switch that turns off the parsing of the header"""
162
163 self.fName=fName
164 self.data=None
165 self.header=None
166 self.debug=debug
167 self.listLengthUnparsed=listLengthUnparsed
168 self.doMacros=doMacroExpansion
169 self.preserveComments=preserveComments
170 self.preserveNewLines=preserveNewlines
171
172 self.collectDecorations=False
173 self.inputMode=inputModes.merge
174
175 self._decorationBuffer=""
176
177 startCnt=0
178
179 self.dictStack=[DictProxy()]
180
181 if noBody:
182 self.start='noBody'
183 startCnt+=1
184
185 if noHeader:
186 self.start='noHeader'
187 startCnt+=1
188
189 if listDict:
190 self.start='pureList'
191 startCnt+=1
192 self.dictStack=[]
193
194 if listDictWithHeader:
195 self.start='pureListWithHeader'
196 startCnt+=1
197
198 if boundaryDict:
199 self.start='boundaryDict'
200 startCnt+=1
201
202 if startCnt>1:
203 error("Only one start symbol can be specified.",startCnt,"are specified")
204
205 PlyParser.__init__(self,debug=debug)
206
207
208
209
210 self.emptyCnt=0
211
212 self.header,self.data=self.parse(content)
213
215 return key in self.data
216
218 return self.data[key]
219
222
225
227 for key in self.data:
228 yield key
229
230
231
232
233
234
235
237 self._decorationBuffer=""
238
240 if self.collectDecorations:
241 self._decorationBuffer+=text
242
246
250
252 tmp=self._decorationBuffer
253 self.resetDecoration()
254 if len(tmp)>0:
255 if tmp[-1]=='\n':
256 tmp=tmp[:-1]
257 return tmp
258
260 if self.fName==None:
261 return path.curdir
262 else:
263 return path.dirname(self.fName)
264
266 """ Get the data structure"""
267 return self.data
268
270 """ Get the OpenFOAM-header"""
271 return self.header
272
273 - def printContext(self,c,ind):
274 """Prints the context of the current index"""
275 print "------"
276 print c[max(0,ind-100):max(0,ind-1)]
277 print "------"
278 print ">",c[ind-1],"<"
279 print "------"
280 print c[min(len(c),ind):min(len(c),ind+100)]
281 print "------"
282
284 """Prints the error message of the parser and exit"""
285 print "PARSER ERROR:",text
286 print "On index",ind
287 self.printContext(c,ind)
288 raise PyFoamParserError("Unspecified")
289
291 """Checks whether this list is a list that consists only of prefix-Lists"""
292 isAllPreList=False
293 if (len(orig) % 2)==0:
294 isAllPreList=True
295 for i in range(0,len(orig),2):
296 if type(orig[i])==int and (type(orig[i+1]) in [list,Vector,Tensor,SymmTensor]):
297 if len(orig[i+1])!=orig[i]:
298 isAllPreList=False
299 break
300 else:
301 isAllPreList=False
302 break
303
304 if isAllPreList:
305 return orig[1::2]
306 else:
307 return orig
308
309 tokens = (
310 'NAME',
311 'ICONST',
312 'FCONST',
313 'SCONST',
314 'FOAMFILE',
315 'UNIFORM',
316 'NONUNIFORM',
317 'UNPARSEDCHUNK',
318 'REACTION',
319 'SUBSTITUTION',
320 'MERGE',
321 'OVERWRITE',
322 'ERROR',
323 'WARN',
324 'PROTECT',
325 'DEFAULT',
326 'INCLUDE',
327 'INCLUDEIFPRESENT',
328 'REMOVE',
329 'INPUTMODE',
330 'KANALGITTER',
331 )
332
333 reserved = {
334 'FoamFile' : 'FOAMFILE',
335 'uniform' : 'UNIFORM',
336 'nonuniform' : 'NONUNIFORM',
337 'include' : 'INCLUDE',
338 'includeIfPresent': 'INCLUDEIFPRESENT',
339 'remove' : 'REMOVE',
340 'inputMode' : 'INPUTMODE',
341 'merge' : 'MERGE',
342 'overwrite' : 'OVERWRITE',
343 'error' : 'ERROR',
344 'warn' : 'WARN',
345 'protect' : 'PROTECT',
346 'default' : 'DEFAULT',
347 }
348
349 states = (
350 ('unparsed', 'exclusive'),
351 )
352
354 r'\('
355 t.lexer.level+=1
356
357
359 r'\)'
360 t.lexer.level-=1
361
362 if t.lexer.level < 0 :
363 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1]
364
365 t.lexer.lexpos-=1
366 t.type = "UNPARSEDCHUNK"
367 t.lexer.lineno += t.value.count('\n')
368 t.lexer.begin('INITIAL')
369 return t
370
371 t_unparsed_ignore = ' \t\n0123456789.-+e'
372
374 print "Error",t.lexer.lexdata[t.lexer.lexpos]
375 t.lexer.skip(1)
376
387
389 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*'
390 t.type=self.reserved.get(t.value,'SUBSTITUTION')
391 if t.value[-1]==")":
392 if t.value.count(")")>t.value.count("("):
393
394 t.value=t.value[:-1]
395 t.lexer.lexpos-=1
396
397 return t
398
399 t_KANALGITTER = r'\#'
400
401 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
402
403 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
404
405 t_SCONST = r'\"([^\\\n]|(\\.))*?\"'
406
407 literals = "(){};[]"
408
409 t_ignore=" \t\r"
410
411
413 r'\n+'
414 t.lexer.lineno += len(t.value)
415 now=t.lexer.lexpos
416 next=t.lexer.lexdata.find('\n',now)
417 if next>=0:
418 line=t.lexer.lexdata[now:next]
419 pos=line.find("=")
420 if pos>=0:
421 if ((line.find("//")>=0 and line.find("//")<pos)) or (line.find("/*")>=0 and line.find("/*")<pos) or (line.find('"')>=0 and line.find('"')<pos):
422 return
423 t.value = line
424 t.type = "REACTION"
425 t.lexer.lineno += 1
426 t.lexer.lexpos = next
427 return t
428
429
430
436
437
440
441
443 'global : header dictbody'
444 p[0] = ( p[1] , p[2] )
445
447 'gotHeader :'
448 p.lexer.lexpos=len(p.lexer.lexdata)
449
450 - def p_noBody(self,p):
451 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' '''
452 p[0] = ( p[3] , {} )
453
455 'noHeader : dictbody'
456 p[0] = ( None , p[1] )
457
459 'pureList : list'
460 p[0] = ( None , p[1] )
461
463 '''pureListWithHeader : header list
464 | header prelist '''
465 p[0] = ( p[1] , p[2] )
466
468 '''boundaryDict : header list
469 | header prelist '''
470
471 p[0] = ( p[1] , p[2] )
472
474 'header : FOAMFILE dictionary'
475 p[0] = p[2]
476
478 '''macro : KANALGITTER include
479 | KANALGITTER inputMode
480 | KANALGITTER remove'''
481 p[0] = p[1]+p[2]+"\n"
482 if self.doMacros:
483 p[0]="// "+p[0]
484
486 '''include : INCLUDE SCONST
487 | INCLUDEIFPRESENT SCONST'''
488 if self.doMacros:
489 fName=path.join(self.directory(),p[2][1:-1])
490 read=True
491 if p[1]=="includeIfPresent" and not path.exists(fName):
492 read=False
493 if read and not path.exists(fName):
494 raise PyFoamParserError("The included file "+fName+" does not exist")
495 if read:
496 data=ParsedParameterFile(fName,noHeader=True)
497 into=self.dictStack[-1]
498 for k in data:
499 into[k]=data[k]
500
501 p[0] = p[1] + " " + p[2]
502
512
514 '''remove : REMOVE word
515 | REMOVE wlist'''
516 p[0] = p[1] + " "
517 if type(p[2])==str:
518 p[0]+=p[2]
519 else:
520 p[0]+="( "
521 for w in p[2]:
522 p[0]+=w+" "
523 p[0]+=")"
524
526 '''integer : ICONST'''
527 p[0] = int(p[1])
528
530 '''integer : FCONST'''
531 p[0] = float(p[1])
532
536
538 '''exit_dict :'''
539 p[0]=self.dictStack.pop()
540
542 '''dictionary : '{' enter_dict dictbody '}' exit_dict
543 | '{' '}' '''
544 if len(p)==6:
545 p[0] = p[5]
546 else:
547 p[0] = DictProxy()
548
549 - def p_dictbody(self,p):
550 '''dictbody : dictbody dictline
551 | dictline
552 | empty'''
553
554 if len(p)==3:
555 p[0]=p[1]
556 p[0][p[2][0]]=p[2][1]
557 p[0].addDecoration(p[2][0],self.getDecoration())
558 else:
559 p[0]=self.dictStack[-1]
560
561 if p[1]:
562 p[0][p[1][0]]=p[1][1]
563
564
582
584 '''wlist : '(' wordlist ')' '''
585 p[0] = p[2]
586
588 '''unparsed : UNPARSEDCHUNK'''
589 p[0] = Unparsed(p[1])
590
592 '''prelist_seen : '''
593 if self.listLengthUnparsed!=None:
594
595 if int(p[-1])>=self.listLengthUnparsed:
596
597 p.lexer.begin('unparsed')
598 p.lexer.level=0
599 p.lexer.code_start = p.lexer.lexpos
600
601
602
603
604
605
606
608 '''prelist : integer prelist_seen '(' itemlist ')'
609 | integer prelist_seen '(' unparsed ')' '''
610 if type(p[4])==Unparsed:
611 p[0] = UnparsedList(int(p[1]),p[4].data)
612 else:
613 p[0] = self.condenseAllPreFixLists(p[4])
614
616 '''itemlist : itemlist item
617 | item '''
618 if len(p)==2:
619 if p[1]==None:
620 p[0]=[]
621 else:
622 p[0]=[ p[1] ]
623 else:
624 p[0]=p[1]
625 p[0].append(p[2])
626
628 '''wordlist : wordlist word
629 | word '''
630 if len(p)==2:
631 if p[1]==None:
632 p[0]=[]
633 else:
634 p[0]=[ p[1] ]
635 else:
636 p[0]=p[1]
637 p[0].append(p[2])
638
640 '''word : NAME
641 | UNIFORM
642 | NONUNIFORM
643 | MERGE
644 | OVERWRITE
645 | DEFAULT
646 | WARN
647 | PROTECT
648 | ERROR'''
649 p[0]=p[1]
650
652 '''substitution : SUBSTITUTION'''
653 if self.doMacros:
654 nm=p[1][1:]
655 p[0]="<Symbol '"+nm+"' not found>"
656 if nm in self.dictStack[0]:
657 p[0]=deepcopy(self.dictStack[0][nm])
658 else:
659 p[0]=p[1]
660
662 '''dictkey : word
663 | SCONST'''
664 p[0]=p[1]
665
667 '''dictline : dictkey dictitem ';'
668 | dictkey list ';'
669 | dictkey prelist ';'
670 | dictkey fieldvalue ';'
671 | macro
672 | dictkey dictionary'''
673 if len(p)==4 and type(p[2])==list:
674
675 doAgain=True
676 tmp=p[2]
677 while doAgain:
678 doAgain=False
679 for i in range(len(tmp)-1):
680 if type(tmp[i])==int and type(tmp[i+1]) in [list]:
681 if tmp[i]==len(tmp[i+1]):
682 nix=tmp[:i]+tmp[i+1:]
683 for i in range(len(tmp)):
684 tmp.pop()
685 tmp.extend(nix)
686 doAgain=True
687 break
688 if len(p)>=3:
689 p[0] = ( p[1] , p[2] )
690 else:
691 p[0] = ( self.emptyCnt , p[1] )
692 self.emptyCnt+=1
693
695 '''number : integer
696 | FCONST'''
697 p[0] = p[1]
698
700 '''dimension : '[' number number number number number number number ']'
701 | '[' number number number number number ']' '''
702 result=p[2:-1]
703 if len(result)==5:
704 result+=[0,0]
705
706 p[0]=apply(Dimension,result)
707
709 '''vector : '(' number number number ')' '''
710 p[0]=apply(Vector,p[2:5])
711
713 '''tensor : '(' number number number number number number number number number ')' '''
714 p[0]=apply(Tensor,p[2:11])
715
717 '''symmtensor : '(' number number number number number number ')' '''
718 p[0]=apply(SymmTensor,p[2:8])
719
726
731
733 '''dictitem : longitem
734 | pitem'''
735 if type(p[1])==tuple:
736 p[0]=TupleProxy(p[1])
737 else:
738 p[0] = p[1]
739
741 '''longitem : pitemlist pitem'''
742 p[0] = p[1]+(p[2],)
743
745 '''pitemlist : pitemlist pitem
746 | pitem '''
747 if len(p)==2:
748 p[0]=(p[1],)
749 else:
750
751
752
753 p[0]=p[1]+(p[2],)
754
756 '''pitem : word
757 | SCONST
758 | number
759 | dictionary
760 | list
761 | dimension
762 | substitution
763 | empty'''
764 p[0] = p[1]
765
767 '''item : pitem
768 | REACTION
769 | list
770 | dictionary'''
771 p[0] = p[1]
772
776
779
780
781
784 self.descr=descr
785 self.data=data
786
788 result="Error in PyFoamParser: '"+self.descr+"'"
789 if self.data!=None:
790 val=self.data.value
791 if len(val)>100:
792 val=val[:40]+" .... "+val[-40:]
793
794 result+=" @ %r (Type: %s ) in line %d at position %d" % (val,
795 self.data.type,
796 self.data.lineno,
797 self.data.lexpos)
798
799 return result
800
803
805 """Convenience class that parses only a headerless OpenFOAM dictionary"""
806
807 - def __init__(self,content,debug=False):
812
815
817 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file"""
818
819 - def __init__(self,name,backup=False,debug=False):
824
825 - def parse(self,content):
826 """Constructs a representation of the file"""
827 temp=ParsedParameterFile.parse(self,content)
828 self.content={}
829 for i in range(0,len(temp),2):
830 self.content[temp[i]]=temp[i+1]
831 return self.content
832
834 string="// File generated by PyFoam - sorry for the ugliness\n\n"
835 temp=[]
836 for k,v in self.content.iteritems():
837 temp.append((k,v))
838
839 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"])))
840
841 temp2=[]
842
843 for b in temp:
844 temp2.append(b[0])
845 temp2.append(b[1])
846
847 generator=FoamFileGenerator(temp2,header=self.header)
848 string+=str(generator)
849
850 return string
851
853 """Only parse the header of a file"""
854
857
859 return self.header[name]
860
862 return name in self.header
863
865 return len(self.header)
866