View Javadoc

1   /**
2    * Copyright (c) 2008-2012, http://www.snakeyaml.org
3    *
4    * Licensed under the Apache License, Version 2.0 (the "License");
5    * you may not use this file except in compliance with the License.
6    * You may obtain a copy of the License at
7    *
8    *     http://www.apache.org/licenses/LICENSE-2.0
9    *
10   * Unless required by applicable law or agreed to in writing, software
11   * distributed under the License is distributed on an "AS IS" BASIS,
12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13   * See the License for the specific language governing permissions and
14   * limitations under the License.
15   */
16  package org.yaml.snakeyaml.parser;
17  
18  import java.util.HashMap;
19  import java.util.List;
20  import java.util.Map;
21  
22  import org.yaml.snakeyaml.DumperOptions.Version;
23  import org.yaml.snakeyaml.error.Mark;
24  import org.yaml.snakeyaml.error.YAMLException;
25  import org.yaml.snakeyaml.events.AliasEvent;
26  import org.yaml.snakeyaml.events.DocumentEndEvent;
27  import org.yaml.snakeyaml.events.DocumentStartEvent;
28  import org.yaml.snakeyaml.events.Event;
29  import org.yaml.snakeyaml.events.ImplicitTuple;
30  import org.yaml.snakeyaml.events.MappingEndEvent;
31  import org.yaml.snakeyaml.events.MappingStartEvent;
32  import org.yaml.snakeyaml.events.ScalarEvent;
33  import org.yaml.snakeyaml.events.SequenceEndEvent;
34  import org.yaml.snakeyaml.events.SequenceStartEvent;
35  import org.yaml.snakeyaml.events.StreamEndEvent;
36  import org.yaml.snakeyaml.events.StreamStartEvent;
37  import org.yaml.snakeyaml.nodes.Tag;
38  import org.yaml.snakeyaml.reader.StreamReader;
39  import org.yaml.snakeyaml.scanner.Scanner;
40  import org.yaml.snakeyaml.scanner.ScannerImpl;
41  import org.yaml.snakeyaml.tokens.AliasToken;
42  import org.yaml.snakeyaml.tokens.AnchorToken;
43  import org.yaml.snakeyaml.tokens.BlockEntryToken;
44  import org.yaml.snakeyaml.tokens.DirectiveToken;
45  import org.yaml.snakeyaml.tokens.ScalarToken;
46  import org.yaml.snakeyaml.tokens.StreamEndToken;
47  import org.yaml.snakeyaml.tokens.StreamStartToken;
48  import org.yaml.snakeyaml.tokens.TagToken;
49  import org.yaml.snakeyaml.tokens.TagTuple;
50  import org.yaml.snakeyaml.tokens.Token;
51  import org.yaml.snakeyaml.util.ArrayStack;
52  
53  /**
54   * <pre>
55   * # The following YAML grammar is LL(1) and is parsed by a recursive descent
56   * parser.
57   * stream            ::= STREAM-START implicit_document? explicit_document* STREAM-END
58   * implicit_document ::= block_node DOCUMENT-END*
59   * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
60   * block_node_or_indentless_sequence ::=
61   *                       ALIAS
62   *                       | properties (block_content | indentless_block_sequence)?
63   *                       | block_content
64   *                       | indentless_block_sequence
65   * block_node        ::= ALIAS
66   *                       | properties block_content?
67   *                       | block_content
68   * flow_node         ::= ALIAS
69   *                       | properties flow_content?
70   *                       | flow_content
71   * properties        ::= TAG ANCHOR? | ANCHOR TAG?
72   * block_content     ::= block_collection | flow_collection | SCALAR
73   * flow_content      ::= flow_collection | SCALAR
74   * block_collection  ::= block_sequence | block_mapping
75   * flow_collection   ::= flow_sequence | flow_mapping
76   * block_sequence    ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
77   * indentless_sequence   ::= (BLOCK-ENTRY block_node?)+
78   * block_mapping     ::= BLOCK-MAPPING_START
79   *                       ((KEY block_node_or_indentless_sequence?)?
80   *                       (VALUE block_node_or_indentless_sequence?)?)*
81   *                       BLOCK-END
82   * flow_sequence     ::= FLOW-SEQUENCE-START
83   *                       (flow_sequence_entry FLOW-ENTRY)*
84   *                       flow_sequence_entry?
85   *                       FLOW-SEQUENCE-END
86   * flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
87   * flow_mapping      ::= FLOW-MAPPING-START
88   *                       (flow_mapping_entry FLOW-ENTRY)*
89   *                       flow_mapping_entry?
90   *                       FLOW-MAPPING-END
91   * flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
92   * FIRST sets:
93   * stream: { STREAM-START }
94   * explicit_document: { DIRECTIVE DOCUMENT-START }
95   * implicit_document: FIRST(block_node)
96   * block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
97   * flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
98   * block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
99   * flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
100  * block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
101  * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
102  * block_sequence: { BLOCK-SEQUENCE-START }
103  * block_mapping: { BLOCK-MAPPING-START }
104  * block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
105  * indentless_sequence: { ENTRY }
106  * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
107  * flow_sequence: { FLOW-SEQUENCE-START }
108  * flow_mapping: { FLOW-MAPPING-START }
109  * flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
110  * flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
111  * </pre>
112  * 
113  * Since writing a recursive-descendant parser is a straightforward task, we do
114  * not give many comments here.
115  */
116 public final class ParserImpl implements Parser {
117     private static final Map<String, String> DEFAULT_TAGS = new HashMap<String, String>();
118     static {
119         DEFAULT_TAGS.put("!", "!");
120         DEFAULT_TAGS.put("!!", Tag.PREFIX);
121     }
122 
123     private final Scanner scanner;
124     private Event currentEvent;
125     private final ArrayStack<Production> states;
126     private final ArrayStack<Mark> marks;
127     private Production state;
128     private VersionTagsTuple directives;
129 
130     public ParserImpl(StreamReader reader) {
131         this.scanner = new ScannerImpl(reader);
132         currentEvent = null;
133         directives = new VersionTagsTuple(null, new HashMap<String, String>(DEFAULT_TAGS));
134         states = new ArrayStack<Production>(100);
135         marks = new ArrayStack<Mark>(10);
136         state = new ParseStreamStart();
137     }
138 
139     /**
140      * Check the type of the next event.
141      */
142     public boolean checkEvent(Event.ID choices) {
143         peekEvent();
144         if (currentEvent != null) {
145             if (currentEvent.is(choices)) {
146                 return true;
147             }
148         }
149         return false;
150     }
151 
152     /**
153      * Get the next event.
154      */
155     public Event peekEvent() {
156         if (currentEvent == null) {
157             if (state != null) {
158                 currentEvent = state.produce();
159             }
160         }
161         return currentEvent;
162     }
163 
164     /**
165      * Get the next event and proceed further.
166      */
167     public Event getEvent() {
168         peekEvent();
169         Event value = currentEvent;
170         currentEvent = null;
171         return value;
172     }
173 
174     /**
175      * <pre>
176      * stream    ::= STREAM-START implicit_document? explicit_document* STREAM-END
177      * implicit_document ::= block_node DOCUMENT-END*
178      * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
179      * </pre>
180      */
181     private class ParseStreamStart implements Production {
182         public Event produce() {
183             // Parse the stream start.
184             StreamStartToken token = (StreamStartToken) scanner.getToken();
185             Event event = new StreamStartEvent(token.getStartMark(), token.getEndMark());
186             // Prepare the next state.
187             state = new ParseImplicitDocumentStart();
188             return event;
189         }
190     }
191 
192     private class ParseImplicitDocumentStart implements Production {
193         public Event produce() {
194             // Parse an implicit document.
195             if (!scanner.checkToken(Token.ID.Directive, Token.ID.DocumentStart, Token.ID.StreamEnd)) {
196                 directives = new VersionTagsTuple(null, DEFAULT_TAGS);
197                 Token token = scanner.peekToken();
198                 Mark startMark = token.getStartMark();
199                 Mark endMark = startMark;
200                 Event event = new DocumentStartEvent(startMark, endMark, false, null, null);
201                 // Prepare the next state.
202                 states.push(new ParseDocumentEnd());
203                 state = new ParseBlockNode();
204                 return event;
205             } else {
206                 Production p = new ParseDocumentStart();
207                 return p.produce();
208             }
209         }
210     }
211 
212     private class ParseDocumentStart implements Production {
213         public Event produce() {
214             // Parse any extra document end indicators.
215             while (scanner.checkToken(Token.ID.DocumentEnd)) {
216                 scanner.getToken();
217             }
218             // Parse an explicit document.
219             Event event;
220             if (!scanner.checkToken(Token.ID.StreamEnd)) {
221                 Token token = scanner.peekToken();
222                 Mark startMark = token.getStartMark();
223                 VersionTagsTuple tuple = processDirectives();
224                 if (!scanner.checkToken(Token.ID.DocumentStart)) {
225                     throw new ParserException(null, null, "expected '<document start>', but found "
226                             + scanner.peekToken().getTokenId(), scanner.peekToken().getStartMark());
227                 }
228                 token = scanner.getToken();
229                 Mark endMark = token.getEndMark();
230                 event = new DocumentStartEvent(startMark, endMark, true, tuple.getVersion(),
231                         tuple.getTags());
232                 states.push(new ParseDocumentEnd());
233                 state = new ParseDocumentContent();
234             } else {
235                 // Parse the end of the stream.
236                 StreamEndToken token = (StreamEndToken) scanner.getToken();
237                 event = new StreamEndEvent(token.getStartMark(), token.getEndMark());
238                 if (!states.isEmpty()) {
239                     throw new YAMLException("Unexpected end of stream. States left: " + states);
240                 }
241                 if (!marks.isEmpty()) {
242                     throw new YAMLException("Unexpected end of stream. Marks left: " + marks);
243                 }
244                 state = null;
245             }
246             return event;
247         }
248     }
249 
250     private class ParseDocumentEnd implements Production {
251         public Event produce() {
252             // Parse the document end.
253             Token token = scanner.peekToken();
254             Mark startMark = token.getStartMark();
255             Mark endMark = startMark;
256             boolean explicit = false;
257             if (scanner.checkToken(Token.ID.DocumentEnd)) {
258                 token = scanner.getToken();
259                 endMark = token.getEndMark();
260                 explicit = true;
261             }
262             Event event = new DocumentEndEvent(startMark, endMark, explicit);
263             // Prepare the next state.
264             state = new ParseDocumentStart();
265             return event;
266         }
267     }
268 
269     private class ParseDocumentContent implements Production {
270         public Event produce() {
271             Event event;
272             if (scanner.checkToken(Token.ID.Directive, Token.ID.DocumentStart,
273                     Token.ID.DocumentEnd, Token.ID.StreamEnd)) {
274                 event = processEmptyScalar(scanner.peekToken().getStartMark());
275                 state = states.pop();
276                 return event;
277             } else {
278                 Production p = new ParseBlockNode();
279                 return p.produce();
280             }
281         }
282     }
283 
284     @SuppressWarnings("unchecked")
285     private VersionTagsTuple processDirectives() {
286         Version yamlVersion = null;
287         HashMap<String, String> tagHandles = new HashMap<String, String>();
288         while (scanner.checkToken(Token.ID.Directive)) {
289             @SuppressWarnings("rawtypes")
290             DirectiveToken token = (DirectiveToken) scanner.getToken();
291             if (token.getName().equals("YAML")) {
292                 if (yamlVersion != null) {
293                     throw new ParserException(null, null, "found duplicate YAML directive",
294                             token.getStartMark());
295                 }
296                 List<Integer> value = (List<Integer>) token.getValue();
297                 Integer major = value.get(0);
298                 if (major != 1) {
299                     throw new ParserException(null, null,
300                             "found incompatible YAML document (version 1.* is required)",
301                             token.getStartMark());
302                 }
303                 Integer minor = value.get(1);
304                 switch (minor) {
305                 case 0:
306                     yamlVersion = Version.V1_0;
307                     break;
308 
309                 default:
310                     yamlVersion = Version.V1_1;
311                     break;
312                 }
313             } else if (token.getName().equals("TAG")) {
314                 List<String> value = (List<String>) token.getValue();
315                 String handle = value.get(0);
316                 String prefix = value.get(1);
317                 if (tagHandles.containsKey(handle)) {
318                     throw new ParserException(null, null, "duplicate tag handle " + handle,
319                             token.getStartMark());
320                 }
321                 tagHandles.put(handle, prefix);
322             }
323         }
324         if (yamlVersion != null || !tagHandles.isEmpty()) {
325             // directives in the document found - drop the previous
326             for (String key : DEFAULT_TAGS.keySet()) {
327                 // do not overwrite re-defined tags
328                 if (!tagHandles.containsKey(key)) {
329                     tagHandles.put(key, DEFAULT_TAGS.get(key));
330                 }
331             }
332             directives = new VersionTagsTuple(yamlVersion, tagHandles);
333         }
334         return directives;
335     }
336 
337     /**
338      * <pre>
339      *  block_node_or_indentless_sequence ::= ALIAS
340      *                | properties (block_content | indentless_block_sequence)?
341      *                | block_content
342      *                | indentless_block_sequence
343      *  block_node    ::= ALIAS
344      *                    | properties block_content?
345      *                    | block_content
346      *  flow_node     ::= ALIAS
347      *                    | properties flow_content?
348      *                    | flow_content
349      *  properties    ::= TAG ANCHOR? | ANCHOR TAG?
350      *  block_content     ::= block_collection | flow_collection | SCALAR
351      *  flow_content      ::= flow_collection | SCALAR
352      *  block_collection  ::= block_sequence | block_mapping
353      *  flow_collection   ::= flow_sequence | flow_mapping
354      * </pre>
355      */
356 
357     private class ParseBlockNode implements Production {
358         public Event produce() {
359             return parseNode(true, false);
360         }
361     }
362 
363     private Event parseFlowNode() {
364         return parseNode(false, false);
365     }
366 
367     private Event parseBlockNodeOrIndentlessSequence() {
368         return parseNode(true, true);
369     }
370 
371     private Event parseNode(boolean block, boolean indentlessSequence) {
372         Event event;
373         Mark startMark = null;
374         Mark endMark = null;
375         Mark tagMark = null;
376         if (scanner.checkToken(Token.ID.Alias)) {
377             AliasToken token = (AliasToken) scanner.getToken();
378             event = new AliasEvent(token.getValue(), token.getStartMark(), token.getEndMark());
379             state = states.pop();
380         } else {
381             String anchor = null;
382             TagTuple tagTokenTag = null;
383             if (scanner.checkToken(Token.ID.Anchor)) {
384                 AnchorToken token = (AnchorToken) scanner.getToken();
385                 startMark = token.getStartMark();
386                 endMark = token.getEndMark();
387                 anchor = token.getValue();
388                 if (scanner.checkToken(Token.ID.Tag)) {
389                     TagToken tagToken = (TagToken) scanner.getToken();
390                     tagMark = tagToken.getStartMark();
391                     endMark = tagToken.getEndMark();
392                     tagTokenTag = tagToken.getValue();
393                 }
394             } else if (scanner.checkToken(Token.ID.Tag)) {
395                 TagToken tagToken = (TagToken) scanner.getToken();
396                 startMark = tagToken.getStartMark();
397                 tagMark = startMark;
398                 endMark = tagToken.getEndMark();
399                 tagTokenTag = tagToken.getValue();
400                 if (scanner.checkToken(Token.ID.Anchor)) {
401                     AnchorToken token = (AnchorToken) scanner.getToken();
402                     endMark = token.getEndMark();
403                     anchor = token.getValue();
404                 }
405             }
406             String tag = null;
407             if (tagTokenTag != null) {
408                 String handle = tagTokenTag.getHandle();
409                 String suffix = tagTokenTag.getSuffix();
410                 if (handle != null) {
411                     if (!directives.getTags().containsKey(handle)) {
412                         throw new ParserException("while parsing a node", startMark,
413                                 "found undefined tag handle " + handle, tagMark);
414                     }
415                     tag = directives.getTags().get(handle) + suffix;
416                 } else {
417                     tag = suffix;
418                 }
419             }
420             if (startMark == null) {
421                 startMark = scanner.peekToken().getStartMark();
422                 endMark = startMark;
423             }
424             event = null;
425             boolean implicit = (tag == null || tag.equals("!"));
426             if (indentlessSequence && scanner.checkToken(Token.ID.BlockEntry)) {
427                 endMark = scanner.peekToken().getEndMark();
428                 event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
429                         Boolean.FALSE);
430                 state = new ParseIndentlessSequenceEntry();
431             } else {
432                 if (scanner.checkToken(Token.ID.Scalar)) {
433                     ScalarToken token = (ScalarToken) scanner.getToken();
434                     endMark = token.getEndMark();
435                     ImplicitTuple implicitValues;
436                     if ((token.getPlain() && tag == null) || "!".equals(tag)) {
437                         implicitValues = new ImplicitTuple(true, false);
438                     } else if (tag == null) {
439                         implicitValues = new ImplicitTuple(false, true);
440                     } else {
441                         implicitValues = new ImplicitTuple(false, false);
442                     }
443                     event = new ScalarEvent(anchor, tag, implicitValues, token.getValue(),
444                             startMark, endMark, token.getStyle());
445                     state = states.pop();
446                 } else if (scanner.checkToken(Token.ID.FlowSequenceStart)) {
447                     endMark = scanner.peekToken().getEndMark();
448                     event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
449                             Boolean.TRUE);
450                     state = new ParseFlowSequenceFirstEntry();
451                 } else if (scanner.checkToken(Token.ID.FlowMappingStart)) {
452                     endMark = scanner.peekToken().getEndMark();
453                     event = new MappingStartEvent(anchor, tag, implicit, startMark, endMark,
454                             Boolean.TRUE);
455                     state = new ParseFlowMappingFirstKey();
456                 } else if (block && scanner.checkToken(Token.ID.BlockSequenceStart)) {
457                     endMark = scanner.peekToken().getStartMark();
458                     event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
459                             Boolean.FALSE);
460                     state = new ParseBlockSequenceFirstEntry();
461                 } else if (block && scanner.checkToken(Token.ID.BlockMappingStart)) {
462                     endMark = scanner.peekToken().getStartMark();
463                     event = new MappingStartEvent(anchor, tag, implicit, startMark, endMark,
464                             Boolean.FALSE);
465                     state = new ParseBlockMappingFirstKey();
466                 } else if (anchor != null || tag != null) {
467                     // Empty scalars are allowed even if a tag or an anchor is
468                     // specified.
469                     event = new ScalarEvent(anchor, tag, new ImplicitTuple(implicit, false), "",
470                             startMark, endMark, (char) 0);
471                     state = states.pop();
472                 } else {
473                     String node;
474                     if (block) {
475                         node = "block";
476                     } else {
477                         node = "flow";
478                     }
479                     Token token = scanner.peekToken();
480                     throw new ParserException("while parsing a " + node + " node", startMark,
481                             "expected the node content, but found " + token.getTokenId(),
482                             token.getStartMark());
483                 }
484             }
485         }
486         return event;
487     }
488 
489     // block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
490     // BLOCK-END
491 
492     private class ParseBlockSequenceFirstEntry implements Production {
493         public Event produce() {
494             Token token = scanner.getToken();
495             marks.push(token.getStartMark());
496             return new ParseBlockSequenceEntry().produce();
497         }
498     }
499 
500     private class ParseBlockSequenceEntry implements Production {
501         public Event produce() {
502             if (scanner.checkToken(Token.ID.BlockEntry)) {
503                 BlockEntryToken token = (BlockEntryToken) scanner.getToken();
504                 if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.BlockEnd)) {
505                     states.push(new ParseBlockSequenceEntry());
506                     return new ParseBlockNode().produce();
507                 } else {
508                     state = new ParseBlockSequenceEntry();
509                     return processEmptyScalar(token.getEndMark());
510                 }
511             }
512             if (!scanner.checkToken(Token.ID.BlockEnd)) {
513                 Token token = scanner.peekToken();
514                 throw new ParserException("while parsing a block collection", marks.pop(),
515                         "expected <block end>, but found " + token.getTokenId(),
516                         token.getStartMark());
517             }
518             Token token = scanner.getToken();
519             Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark());
520             state = states.pop();
521             marks.pop();
522             return event;
523         }
524     }
525 
526     // indentless_sequence ::= (BLOCK-ENTRY block_node?)+
527 
528     private class ParseIndentlessSequenceEntry implements Production {
529         public Event produce() {
530             if (scanner.checkToken(Token.ID.BlockEntry)) {
531                 Token token = scanner.getToken();
532                 if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.Key, Token.ID.Value,
533                         Token.ID.BlockEnd)) {
534                     states.push(new ParseIndentlessSequenceEntry());
535                     return new ParseBlockNode().produce();
536                 } else {
537                     state = new ParseIndentlessSequenceEntry();
538                     return processEmptyScalar(token.getEndMark());
539                 }
540             }
541             Token token = scanner.peekToken();
542             Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark());
543             state = states.pop();
544             return event;
545         }
546     }
547 
548     private class ParseBlockMappingFirstKey implements Production {
549         public Event produce() {
550             Token token = scanner.getToken();
551             marks.push(token.getStartMark());
552             return new ParseBlockMappingKey().produce();
553         }
554     }
555 
556     private class ParseBlockMappingKey implements Production {
557         public Event produce() {
558             if (scanner.checkToken(Token.ID.Key)) {
559                 Token token = scanner.getToken();
560                 if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) {
561                     states.push(new ParseBlockMappingValue());
562                     return parseBlockNodeOrIndentlessSequence();
563                 } else {
564                     state = new ParseBlockMappingValue();
565                     return processEmptyScalar(token.getEndMark());
566                 }
567             }
568             if (!scanner.checkToken(Token.ID.BlockEnd)) {
569                 Token token = scanner.peekToken();
570                 throw new ParserException("while parsing a block mapping", marks.pop(),
571                         "expected <block end>, but found " + token.getTokenId(),
572                         token.getStartMark());
573             }
574             Token token = scanner.getToken();
575             Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark());
576             state = states.pop();
577             marks.pop();
578             return event;
579         }
580     }
581 
582     private class ParseBlockMappingValue implements Production {
583         public Event produce() {
584             if (scanner.checkToken(Token.ID.Value)) {
585                 Token token = scanner.getToken();
586                 if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) {
587                     states.push(new ParseBlockMappingKey());
588                     return parseBlockNodeOrIndentlessSequence();
589                 } else {
590                     state = new ParseBlockMappingKey();
591                     return processEmptyScalar(token.getEndMark());
592                 }
593             }
594             state = new ParseBlockMappingKey();
595             Token token = scanner.peekToken();
596             return processEmptyScalar(token.getStartMark());
597         }
598     }
599 
600     /**
601      * <pre>
602      * flow_sequence     ::= FLOW-SEQUENCE-START
603      *                       (flow_sequence_entry FLOW-ENTRY)*
604      *                       flow_sequence_entry?
605      *                       FLOW-SEQUENCE-END
606      * flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
607      * Note that while production rules for both flow_sequence_entry and
608      * flow_mapping_entry are equal, their interpretations are different.
609      * For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
610      * generate an inline mapping (set syntax).
611      * </pre>
612      */
613     private class ParseFlowSequenceFirstEntry implements Production {
614         public Event produce() {
615             Token token = scanner.getToken();
616             marks.push(token.getStartMark());
617             return new ParseFlowSequenceEntry(true).produce();
618         }
619     }
620 
621     private class ParseFlowSequenceEntry implements Production {
622         private boolean first = false;
623 
624         public ParseFlowSequenceEntry(boolean first) {
625             this.first = first;
626         }
627 
628         public Event produce() {
629             if (!scanner.checkToken(Token.ID.FlowSequenceEnd)) {
630                 if (!first) {
631                     if (scanner.checkToken(Token.ID.FlowEntry)) {
632                         scanner.getToken();
633                     } else {
634                         Token token = scanner.peekToken();
635                         throw new ParserException("while parsing a flow sequence", marks.pop(),
636                                 "expected ',' or ']', but got " + token.getTokenId(),
637                                 token.getStartMark());
638                     }
639                 }
640                 if (scanner.checkToken(Token.ID.Key)) {
641                     Token token = scanner.peekToken();
642                     Event event = new MappingStartEvent(null, null, true, token.getStartMark(),
643                             token.getEndMark(), Boolean.TRUE);
644                     state = new ParseFlowSequenceEntryMappingKey();
645                     return event;
646                 } else if (!scanner.checkToken(Token.ID.FlowSequenceEnd)) {
647                     states.push(new ParseFlowSequenceEntry(false));
648                     return parseFlowNode();
649                 }
650             }
651             Token token = scanner.getToken();
652             Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark());
653             state = states.pop();
654             marks.pop();
655             return event;
656         }
657     }
658 
659     private class ParseFlowSequenceEntryMappingKey implements Production {
660         public Event produce() {
661             Token token = scanner.getToken();
662             if (!scanner.checkToken(Token.ID.Value, Token.ID.FlowEntry, Token.ID.FlowSequenceEnd)) {
663                 states.push(new ParseFlowSequenceEntryMappingValue());
664                 return parseFlowNode();
665             } else {
666                 state = new ParseFlowSequenceEntryMappingValue();
667                 return processEmptyScalar(token.getEndMark());
668             }
669         }
670     }
671 
672     private class ParseFlowSequenceEntryMappingValue implements Production {
673         public Event produce() {
674             if (scanner.checkToken(Token.ID.Value)) {
675                 Token token = scanner.getToken();
676                 if (!scanner.checkToken(Token.ID.FlowEntry, Token.ID.FlowSequenceEnd)) {
677                     states.push(new ParseFlowSequenceEntryMappingEnd());
678                     return parseFlowNode();
679                 } else {
680                     state = new ParseFlowSequenceEntryMappingEnd();
681                     return processEmptyScalar(token.getEndMark());
682                 }
683             } else {
684                 state = new ParseFlowSequenceEntryMappingEnd();
685                 Token token = scanner.peekToken();
686                 return processEmptyScalar(token.getStartMark());
687             }
688         }
689     }
690 
691     private class ParseFlowSequenceEntryMappingEnd implements Production {
692         public Event produce() {
693             state = new ParseFlowSequenceEntry(false);
694             Token token = scanner.peekToken();
695             return new MappingEndEvent(token.getStartMark(), token.getEndMark());
696         }
697     }
698 
699     /**
700      * <pre>
701      *   flow_mapping  ::= FLOW-MAPPING-START
702      *          (flow_mapping_entry FLOW-ENTRY)*
703      *          flow_mapping_entry?
704      *          FLOW-MAPPING-END
705      *   flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
706      * </pre>
707      */
708     private class ParseFlowMappingFirstKey implements Production {
709         public Event produce() {
710             Token token = scanner.getToken();
711             marks.push(token.getStartMark());
712             return new ParseFlowMappingKey(true).produce();
713         }
714     }
715 
716     private class ParseFlowMappingKey implements Production {
717         private boolean first = false;
718 
719         public ParseFlowMappingKey(boolean first) {
720             this.first = first;
721         }
722 
723         public Event produce() {
724             if (!scanner.checkToken(Token.ID.FlowMappingEnd)) {
725                 if (!first) {
726                     if (scanner.checkToken(Token.ID.FlowEntry)) {
727                         scanner.getToken();
728                     } else {
729                         Token token = scanner.peekToken();
730                         throw new ParserException("while parsing a flow mapping", marks.pop(),
731                                 "expected ',' or '}', but got " + token.getTokenId(),
732                                 token.getStartMark());
733                     }
734                 }
735                 if (scanner.checkToken(Token.ID.Key)) {
736                     Token token = scanner.getToken();
737                     if (!scanner.checkToken(Token.ID.Value, Token.ID.FlowEntry,
738                             Token.ID.FlowMappingEnd)) {
739                         states.push(new ParseFlowMappingValue());
740                         return parseFlowNode();
741                     } else {
742                         state = new ParseFlowMappingValue();
743                         return processEmptyScalar(token.getEndMark());
744                     }
745                 } else if (!scanner.checkToken(Token.ID.FlowMappingEnd)) {
746                     states.push(new ParseFlowMappingEmptyValue());
747                     return parseFlowNode();
748                 }
749             }
750             Token token = scanner.getToken();
751             Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark());
752             state = states.pop();
753             marks.pop();
754             return event;
755         }
756     }
757 
758     private class ParseFlowMappingValue implements Production {
759         public Event produce() {
760             if (scanner.checkToken(Token.ID.Value)) {
761                 Token token = scanner.getToken();
762                 if (!scanner.checkToken(Token.ID.FlowEntry, Token.ID.FlowMappingEnd)) {
763                     states.push(new ParseFlowMappingKey(false));
764                     return parseFlowNode();
765                 } else {
766                     state = new ParseFlowMappingKey(false);
767                     return processEmptyScalar(token.getEndMark());
768                 }
769             } else {
770                 state = new ParseFlowMappingKey(false);
771                 Token token = scanner.peekToken();
772                 return processEmptyScalar(token.getStartMark());
773             }
774         }
775     }
776 
777     private class ParseFlowMappingEmptyValue implements Production {
778         public Event produce() {
779             state = new ParseFlowMappingKey(false);
780             return processEmptyScalar(scanner.peekToken().getStartMark());
781         }
782     }
783 
784     /**
785      * <pre>
786      * block_mapping     ::= BLOCK-MAPPING_START
787      *           ((KEY block_node_or_indentless_sequence?)?
788      *           (VALUE block_node_or_indentless_sequence?)?)*
789      *           BLOCK-END
790      * </pre>
791      */
792     private Event processEmptyScalar(Mark mark) {
793         return new ScalarEvent(null, null, new ImplicitTuple(true, false), "", mark, mark, (char) 0);
794     }
795 }