View Javadoc

1   /**
2    * Copyright (c) 2008-2011, http://www.snakeyaml.org
3    *
4    * Licensed under the Apache License, Version 2.0 (the "License");
5    * you may not use this file except in compliance with the License.
6    * You may obtain a copy of the License at
7    *
8    *     http://www.apache.org/licenses/LICENSE-2.0
9    *
10   * Unless required by applicable law or agreed to in writing, software
11   * distributed under the License is distributed on an "AS IS" BASIS,
12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13   * See the License for the specific language governing permissions and
14   * limitations under the License.
15   */
16  
17  package org.yaml.snakeyaml.parser;
18  
19  import java.util.ArrayList;
20  import java.util.HashMap;
21  import java.util.List;
22  import java.util.Map;
23  
24  import org.yaml.snakeyaml.error.Mark;
25  import org.yaml.snakeyaml.error.YAMLException;
26  import org.yaml.snakeyaml.events.AliasEvent;
27  import org.yaml.snakeyaml.events.DocumentEndEvent;
28  import org.yaml.snakeyaml.events.DocumentStartEvent;
29  import org.yaml.snakeyaml.events.Event;
30  import org.yaml.snakeyaml.events.ImplicitTuple;
31  import org.yaml.snakeyaml.events.MappingEndEvent;
32  import org.yaml.snakeyaml.events.MappingStartEvent;
33  import org.yaml.snakeyaml.events.ScalarEvent;
34  import org.yaml.snakeyaml.events.SequenceEndEvent;
35  import org.yaml.snakeyaml.events.SequenceStartEvent;
36  import org.yaml.snakeyaml.events.StreamEndEvent;
37  import org.yaml.snakeyaml.events.StreamStartEvent;
38  import org.yaml.snakeyaml.nodes.Tag;
39  import org.yaml.snakeyaml.reader.StreamReader;
40  import org.yaml.snakeyaml.scanner.Scanner;
41  import org.yaml.snakeyaml.scanner.ScannerImpl;
42  import org.yaml.snakeyaml.tokens.AliasToken;
43  import org.yaml.snakeyaml.tokens.AnchorToken;
44  import org.yaml.snakeyaml.tokens.BlockEntryToken;
45  import org.yaml.snakeyaml.tokens.DirectiveToken;
46  import org.yaml.snakeyaml.tokens.ScalarToken;
47  import org.yaml.snakeyaml.tokens.StreamEndToken;
48  import org.yaml.snakeyaml.tokens.StreamStartToken;
49  import org.yaml.snakeyaml.tokens.TagToken;
50  import org.yaml.snakeyaml.tokens.TagTuple;
51  import org.yaml.snakeyaml.tokens.Token;
52  import org.yaml.snakeyaml.util.ArrayStack;
53  
54  /**
55   * <pre>
56   * # The following YAML grammar is LL(1) and is parsed by a recursive descent
57   * parser.
58   * stream            ::= STREAM-START implicit_document? explicit_document* STREAM-END
59   * implicit_document ::= block_node DOCUMENT-END*
60   * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
61   * block_node_or_indentless_sequence ::=
62   *                       ALIAS
63   *                       | properties (block_content | indentless_block_sequence)?
64   *                       | block_content
65   *                       | indentless_block_sequence
66   * block_node        ::= ALIAS
67   *                       | properties block_content?
68   *                       | block_content
69   * flow_node         ::= ALIAS
70   *                       | properties flow_content?
71   *                       | flow_content
72   * properties        ::= TAG ANCHOR? | ANCHOR TAG?
73   * block_content     ::= block_collection | flow_collection | SCALAR
74   * flow_content      ::= flow_collection | SCALAR
75   * block_collection  ::= block_sequence | block_mapping
76   * flow_collection   ::= flow_sequence | flow_mapping
77   * block_sequence    ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
78   * indentless_sequence   ::= (BLOCK-ENTRY block_node?)+
79   * block_mapping     ::= BLOCK-MAPPING_START
80   *                       ((KEY block_node_or_indentless_sequence?)?
81   *                       (VALUE block_node_or_indentless_sequence?)?)*
82   *                       BLOCK-END
83   * flow_sequence     ::= FLOW-SEQUENCE-START
84   *                       (flow_sequence_entry FLOW-ENTRY)*
85   *                       flow_sequence_entry?
86   *                       FLOW-SEQUENCE-END
87   * flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
88   * flow_mapping      ::= FLOW-MAPPING-START
89   *                       (flow_mapping_entry FLOW-ENTRY)*
90   *                       flow_mapping_entry?
91   *                       FLOW-MAPPING-END
92   * flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
93   * FIRST sets:
94   * stream: { STREAM-START }
95   * explicit_document: { DIRECTIVE DOCUMENT-START }
96   * implicit_document: FIRST(block_node)
97   * block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
98   * flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
99   * block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
100  * flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
101  * block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
102  * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
103  * block_sequence: { BLOCK-SEQUENCE-START }
104  * block_mapping: { BLOCK-MAPPING-START }
105  * block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
106  * indentless_sequence: { ENTRY }
107  * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
108  * flow_sequence: { FLOW-SEQUENCE-START }
109  * flow_mapping: { FLOW-MAPPING-START }
110  * flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
111  * flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
112  * </pre>
113  * 
114  * Since writing a recursive-descendant parser is a straightforward task, we do
115  * not give many comments here.
116  */
117 public final class ParserImpl implements Parser {
118     private static final Map<String, String> DEFAULT_TAGS = new HashMap<String, String>();
119     static {
120         DEFAULT_TAGS.put("!", "!");
121         DEFAULT_TAGS.put("!!", Tag.PREFIX);
122     }
123 
124     private final Scanner scanner;
125     private Event currentEvent;
126     private List<Integer> yamlVersion;
127     private Map<String, String> tagHandles;
128     private final ArrayStack<Production> states;
129     private final ArrayStack<Mark> marks;
130     private Production state;
131 
132     public ParserImpl(StreamReader reader) {
133         this.scanner = new ScannerImpl(reader);
134         currentEvent = null;
135         yamlVersion = null;
136         tagHandles = new HashMap<String, String>();
137         states = new ArrayStack<Production>(100);
138         marks = new ArrayStack<Mark>(10);
139         state = new ParseStreamStart();
140     }
141 
142     /**
143      * Check the type of the next event.
144      */
145     public boolean checkEvent(Event.ID choices) {
146         peekEvent();
147         if (currentEvent != null) {
148             if (currentEvent.is(choices)) {
149                 return true;
150             }
151         }
152         return false;
153     }
154 
155     /**
156      * Get the next event.
157      */
158     public Event peekEvent() {
159         if (currentEvent == null) {
160             if (state != null) {
161                 currentEvent = state.produce();
162             }
163         }
164         return currentEvent;
165     }
166 
167     /**
168      * Get the next event and proceed further.
169      */
170     public Event getEvent() {
171         peekEvent();
172         Event value = currentEvent;
173         currentEvent = null;
174         return value;
175     }
176 
177     /**
178      * <pre>
179      * stream    ::= STREAM-START implicit_document? explicit_document* STREAM-END
180      * implicit_document ::= block_node DOCUMENT-END*
181      * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
182      * </pre>
183      */
184     private class ParseStreamStart implements Production {
185         public Event produce() {
186             // Parse the stream start.
187             StreamStartToken token = (StreamStartToken) scanner.getToken();
188             Event event = new StreamStartEvent(token.getStartMark(), token.getEndMark());
189             // Prepare the next state.
190             state = new ParseImplicitDocumentStart();
191             return event;
192         }
193     }
194 
195     private class ParseImplicitDocumentStart implements Production {
196         public Event produce() {
197             // Parse an implicit document.
198             if (!scanner.checkToken(Token.ID.Directive, Token.ID.DocumentStart, Token.ID.StreamEnd)) {
199                 tagHandles = DEFAULT_TAGS;
200                 Token token = scanner.peekToken();
201                 Mark startMark = token.getStartMark();
202                 Mark endMark = startMark;
203                 Event event = new DocumentStartEvent(startMark, endMark, false, null, null);
204                 // Prepare the next state.
205                 states.push(new ParseDocumentEnd());
206                 state = new ParseBlockNode();
207                 return event;
208             } else {
209                 Production p = new ParseDocumentStart();
210                 return p.produce();
211             }
212         }
213     }
214 
215     private class ParseDocumentStart implements Production {
216         @SuppressWarnings("unchecked")
217         public Event produce() {
218             // Parse any extra document end indicators.
219             while (scanner.checkToken(Token.ID.DocumentEnd)) {
220                 scanner.getToken();
221             }
222             // Parse an explicit document.
223             Event event;
224             if (!scanner.checkToken(Token.ID.StreamEnd)) {
225                 Token token = scanner.peekToken();
226                 Mark startMark = token.getStartMark();
227                 List<Object> version_tags = processDirectives();
228                 List<Object> version = (List<Object>) version_tags.get(0);
229                 Map<String, String> tags = (Map<String, String>) version_tags.get(1);
230                 if (!scanner.checkToken(Token.ID.DocumentStart)) {
231                     throw new ParserException(null, null, "expected '<document start>', but found "
232                             + scanner.peekToken().getTokenId(), scanner.peekToken().getStartMark());
233                 }
234                 token = scanner.getToken();
235                 Mark endMark = token.getEndMark();
236                 Integer[] versionInteger;
237                 if (version != null) {
238                     versionInteger = new Integer[2];
239                     versionInteger = version.toArray(versionInteger);
240                 } else {
241                     versionInteger = null;
242                 }
243                 event = new DocumentStartEvent(startMark, endMark, true, versionInteger, tags);
244                 states.push(new ParseDocumentEnd());
245                 state = new ParseDocumentContent();
246             } else {
247                 // Parse the end of the stream.
248                 StreamEndToken token = (StreamEndToken) scanner.getToken();
249                 event = new StreamEndEvent(token.getStartMark(), token.getEndMark());
250                 if (!states.isEmpty()) {
251                     throw new YAMLException("Unexpected end of stream. States left: " + states);
252                 }
253                 if (!marks.isEmpty()) {
254                     throw new YAMLException("Unexpected end of stream. Marks left: " + marks);
255                 }
256                 state = null;
257             }
258             return event;
259         }
260     }
261 
262     private class ParseDocumentEnd implements Production {
263         public Event produce() {
264             // Parse the document end.
265             Token token = scanner.peekToken();
266             Mark startMark = token.getStartMark();
267             Mark endMark = startMark;
268             boolean explicit = false;
269             if (scanner.checkToken(Token.ID.DocumentEnd)) {
270                 token = scanner.getToken();
271                 endMark = token.getEndMark();
272                 explicit = true;
273             }
274             Event event = new DocumentEndEvent(startMark, endMark, explicit);
275             // Prepare the next state.
276             state = new ParseDocumentStart();
277             return event;
278         }
279     }
280 
281     private class ParseDocumentContent implements Production {
282         public Event produce() {
283             Event event;
284             if (scanner.checkToken(Token.ID.Directive, Token.ID.DocumentStart,
285                     Token.ID.DocumentEnd, Token.ID.StreamEnd)) {
286                 event = processEmptyScalar(scanner.peekToken().getStartMark());
287                 state = states.pop();
288                 return event;
289             } else {
290                 Production p = new ParseBlockNode();
291                 return p.produce();
292             }
293         }
294     }
295 
296     @SuppressWarnings("unchecked")
297     private List<Object> processDirectives() {
298         yamlVersion = null;
299         tagHandles = new HashMap<String, String>();
300         while (scanner.checkToken(Token.ID.Directive)) {
301             @SuppressWarnings("rawtypes")
302             DirectiveToken token = (DirectiveToken) scanner.getToken();
303             if (token.getName().equals("YAML")) {
304                 if (yamlVersion != null) {
305                     throw new ParserException(null, null, "found duplicate YAML directive",
306                             token.getStartMark());
307                 }
308                 List<Integer> value = (List<Integer>) token.getValue();
309                 Integer major = value.get(0);
310                 if (major != 1) {
311                     throw new ParserException(null, null,
312                             "found incompatible YAML document (version 1.* is required)",
313                             token.getStartMark());
314                 }
315                 yamlVersion = (List<Integer>) token.getValue();
316             } else if (token.getName().equals("TAG")) {
317                 List<String> value = (List<String>) token.getValue();
318                 String handle = value.get(0);
319                 String prefix = value.get(1);
320                 if (tagHandles.containsKey(handle)) {
321                     throw new ParserException(null, null, "duplicate tag handle " + handle,
322                             token.getStartMark());
323                 }
324                 tagHandles.put(handle, prefix);
325             }
326         }
327         List<Object> value = new ArrayList<Object>(2);
328         value.add(yamlVersion);
329         if (!tagHandles.isEmpty()) {
330             value.add(new HashMap<String, String>(tagHandles));
331         } else {
332             value.add(new HashMap<String, String>());
333         }
334         for (String key : DEFAULT_TAGS.keySet()) {
335             if (!tagHandles.containsKey(key)) {
336                 tagHandles.put(key, DEFAULT_TAGS.get(key));
337             }
338         }
339         return value;
340     }
341 
342     /**
343      * <pre>
344      *  block_node_or_indentless_sequence ::= ALIAS
345      *                | properties (block_content | indentless_block_sequence)?
346      *                | block_content
347      *                | indentless_block_sequence
348      *  block_node    ::= ALIAS
349      *                    | properties block_content?
350      *                    | block_content
351      *  flow_node     ::= ALIAS
352      *                    | properties flow_content?
353      *                    | flow_content
354      *  properties    ::= TAG ANCHOR? | ANCHOR TAG?
355      *  block_content     ::= block_collection | flow_collection | SCALAR
356      *  flow_content      ::= flow_collection | SCALAR
357      *  block_collection  ::= block_sequence | block_mapping
358      *  flow_collection   ::= flow_sequence | flow_mapping
359      * </pre>
360      */
361 
362     private class ParseBlockNode implements Production {
363         public Event produce() {
364             return parseNode(true, false);
365         }
366     }
367 
368     private Event parseFlowNode() {
369         return parseNode(false, false);
370     }
371 
372     private Event parseBlockNodeOrIndentlessSequence() {
373         return parseNode(true, true);
374     }
375 
376     private Event parseNode(boolean block, boolean indentlessSequence) {
377         Event event;
378         Mark startMark = null;
379         Mark endMark = null;
380         Mark tagMark = null;
381         if (scanner.checkToken(Token.ID.Alias)) {
382             AliasToken token = (AliasToken) scanner.getToken();
383             event = new AliasEvent(token.getValue(), token.getStartMark(), token.getEndMark());
384             state = states.pop();
385         } else {
386             String anchor = null;
387             TagTuple tagTokenTag = null;
388             if (scanner.checkToken(Token.ID.Anchor)) {
389                 AnchorToken token = (AnchorToken) scanner.getToken();
390                 startMark = token.getStartMark();
391                 endMark = token.getEndMark();
392                 anchor = token.getValue();
393                 if (scanner.checkToken(Token.ID.Tag)) {
394                     TagToken tagToken = (TagToken) scanner.getToken();
395                     tagMark = tagToken.getStartMark();
396                     endMark = tagToken.getEndMark();
397                     tagTokenTag = tagToken.getValue();
398                 }
399             } else if (scanner.checkToken(Token.ID.Tag)) {
400                 TagToken tagToken = (TagToken) scanner.getToken();
401                 startMark = tagToken.getStartMark();
402                 tagMark = startMark;
403                 endMark = tagToken.getEndMark();
404                 tagTokenTag = tagToken.getValue();
405                 if (scanner.checkToken(Token.ID.Anchor)) {
406                     AnchorToken token = (AnchorToken) scanner.getToken();
407                     endMark = token.getEndMark();
408                     anchor = token.getValue();
409                 }
410             }
411             String tag = null;
412             if (tagTokenTag != null) {
413                 String handle = tagTokenTag.getHandle();
414                 String suffix = tagTokenTag.getSuffix();
415                 if (handle != null) {
416                     if (!tagHandles.containsKey(handle)) {
417                         throw new ParserException("while parsing a node", startMark,
418                                 "found undefined tag handle " + handle, tagMark);
419                     }
420                     tag = tagHandles.get(handle) + suffix;
421                 } else {
422                     tag = suffix;
423                 }
424             }
425             if (startMark == null) {
426                 startMark = scanner.peekToken().getStartMark();
427                 endMark = startMark;
428             }
429             event = null;
430             boolean implicit = (tag == null || tag.equals("!"));
431             if (indentlessSequence && scanner.checkToken(Token.ID.BlockEntry)) {
432                 endMark = scanner.peekToken().getEndMark();
433                 event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
434                         Boolean.FALSE);
435                 state = new ParseIndentlessSequenceEntry();
436             } else {
437                 if (scanner.checkToken(Token.ID.Scalar)) {
438                     ScalarToken token = (ScalarToken) scanner.getToken();
439                     endMark = token.getEndMark();
440                     ImplicitTuple implicitValues;
441                     if ((token.getPlain() && tag == null) || "!".equals(tag)) {
442                         implicitValues = new ImplicitTuple(true, false);
443                     } else if (tag == null) {
444                         implicitValues = new ImplicitTuple(false, true);
445                     } else {
446                         implicitValues = new ImplicitTuple(false, false);
447                     }
448                     event = new ScalarEvent(anchor, tag, implicitValues, token.getValue(),
449                             startMark, endMark, token.getStyle());
450                     state = states.pop();
451                 } else if (scanner.checkToken(Token.ID.FlowSequenceStart)) {
452                     endMark = scanner.peekToken().getEndMark();
453                     event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
454                             Boolean.TRUE);
455                     state = new ParseFlowSequenceFirstEntry();
456                 } else if (scanner.checkToken(Token.ID.FlowMappingStart)) {
457                     endMark = scanner.peekToken().getEndMark();
458                     event = new MappingStartEvent(anchor, tag, implicit, startMark, endMark,
459                             Boolean.TRUE);
460                     state = new ParseFlowMappingFirstKey();
461                 } else if (block && scanner.checkToken(Token.ID.BlockSequenceStart)) {
462                     endMark = scanner.peekToken().getStartMark();
463                     event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
464                             Boolean.FALSE);
465                     state = new ParseBlockSequenceFirstEntry();
466                 } else if (block && scanner.checkToken(Token.ID.BlockMappingStart)) {
467                     endMark = scanner.peekToken().getStartMark();
468                     event = new MappingStartEvent(anchor, tag, implicit, startMark, endMark,
469                             Boolean.FALSE);
470                     state = new ParseBlockMappingFirstKey();
471                 } else if (anchor != null || tag != null) {
472                     // Empty scalars are allowed even if a tag or an anchor is
473                     // specified.
474                     event = new ScalarEvent(anchor, tag, new ImplicitTuple(implicit, false), "",
475                             startMark, endMark, (char) 0);
476                     state = states.pop();
477                 } else {
478                     String node;
479                     if (block) {
480                         node = "block";
481                     } else {
482                         node = "flow";
483                     }
484                     Token token = scanner.peekToken();
485                     throw new ParserException("while parsing a " + node + " node", startMark,
486                             "expected the node content, but found " + token.getTokenId(),
487                             token.getStartMark());
488                 }
489             }
490         }
491         return event;
492     }
493 
494     // block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
495     // BLOCK-END
496 
497     private class ParseBlockSequenceFirstEntry implements Production {
498         public Event produce() {
499             Token token = scanner.getToken();
500             marks.push(token.getStartMark());
501             return new ParseBlockSequenceEntry().produce();
502         }
503     }
504 
505     private class ParseBlockSequenceEntry implements Production {
506         public Event produce() {
507             if (scanner.checkToken(Token.ID.BlockEntry)) {
508                 BlockEntryToken token = (BlockEntryToken) scanner.getToken();
509                 if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.BlockEnd)) {
510                     states.push(new ParseBlockSequenceEntry());
511                     return new ParseBlockNode().produce();
512                 } else {
513                     state = new ParseBlockSequenceEntry();
514                     return processEmptyScalar(token.getEndMark());
515                 }
516             }
517             if (!scanner.checkToken(Token.ID.BlockEnd)) {
518                 Token token = scanner.peekToken();
519                 throw new ParserException("while parsing a block collection", marks.pop(),
520                         "expected <block end>, but found " + token.getTokenId(),
521                         token.getStartMark());
522             }
523             Token token = scanner.getToken();
524             Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark());
525             state = states.pop();
526             marks.pop();
527             return event;
528         }
529     }
530 
531     // indentless_sequence ::= (BLOCK-ENTRY block_node?)+
532 
533     private class ParseIndentlessSequenceEntry implements Production {
534         public Event produce() {
535             if (scanner.checkToken(Token.ID.BlockEntry)) {
536                 Token token = scanner.getToken();
537                 if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.Key, Token.ID.Value,
538                         Token.ID.BlockEnd)) {
539                     states.push(new ParseIndentlessSequenceEntry());
540                     return new ParseBlockNode().produce();
541                 } else {
542                     state = new ParseIndentlessSequenceEntry();
543                     return processEmptyScalar(token.getEndMark());
544                 }
545             }
546             Token token = scanner.peekToken();
547             Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark());
548             state = states.pop();
549             return event;
550         }
551     }
552 
553     private class ParseBlockMappingFirstKey implements Production {
554         public Event produce() {
555             Token token = scanner.getToken();
556             marks.push(token.getStartMark());
557             return new ParseBlockMappingKey().produce();
558         }
559     }
560 
561     private class ParseBlockMappingKey implements Production {
562         public Event produce() {
563             if (scanner.checkToken(Token.ID.Key)) {
564                 Token token = scanner.getToken();
565                 if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) {
566                     states.push(new ParseBlockMappingValue());
567                     return parseBlockNodeOrIndentlessSequence();
568                 } else {
569                     state = new ParseBlockMappingValue();
570                     return processEmptyScalar(token.getEndMark());
571                 }
572             }
573             if (!scanner.checkToken(Token.ID.BlockEnd)) {
574                 Token token = scanner.peekToken();
575                 throw new ParserException("while parsing a block mapping", marks.pop(),
576                         "expected <block end>, but found " + token.getTokenId(),
577                         token.getStartMark());
578             }
579             Token token = scanner.getToken();
580             Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark());
581             state = states.pop();
582             marks.pop();
583             return event;
584         }
585     }
586 
587     private class ParseBlockMappingValue implements Production {
588         public Event produce() {
589             if (scanner.checkToken(Token.ID.Value)) {
590                 Token token = scanner.getToken();
591                 if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) {
592                     states.push(new ParseBlockMappingKey());
593                     return parseBlockNodeOrIndentlessSequence();
594                 } else {
595                     state = new ParseBlockMappingKey();
596                     return processEmptyScalar(token.getEndMark());
597                 }
598             }
599             state = new ParseBlockMappingKey();
600             Token token = scanner.peekToken();
601             return processEmptyScalar(token.getStartMark());
602         }
603     }
604 
605     /**
606      * <pre>
607      * flow_sequence     ::= FLOW-SEQUENCE-START
608      *                       (flow_sequence_entry FLOW-ENTRY)*
609      *                       flow_sequence_entry?
610      *                       FLOW-SEQUENCE-END
611      * flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
612      * Note that while production rules for both flow_sequence_entry and
613      * flow_mapping_entry are equal, their interpretations are different.
614      * For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
615      * generate an inline mapping (set syntax).
616      * </pre>
617      */
618     private class ParseFlowSequenceFirstEntry implements Production {
619         public Event produce() {
620             Token token = scanner.getToken();
621             marks.push(token.getStartMark());
622             return new ParseFlowSequenceEntry(true).produce();
623         }
624     }
625 
626     private class ParseFlowSequenceEntry implements Production {
627         private boolean first = false;
628 
629         public ParseFlowSequenceEntry(boolean first) {
630             this.first = first;
631         }
632 
633         public Event produce() {
634             if (!scanner.checkToken(Token.ID.FlowSequenceEnd)) {
635                 if (!first) {
636                     if (scanner.checkToken(Token.ID.FlowEntry)) {
637                         scanner.getToken();
638                     } else {
639                         Token token = scanner.peekToken();
640                         throw new ParserException("while parsing a flow sequence", marks.pop(),
641                                 "expected ',' or ']', but got " + token.getTokenId(),
642                                 token.getStartMark());
643                     }
644                 }
645                 if (scanner.checkToken(Token.ID.Key)) {
646                     Token token = scanner.peekToken();
647                     Event event = new MappingStartEvent(null, null, true, token.getStartMark(),
648                             token.getEndMark(), Boolean.TRUE);
649                     state = new ParseFlowSequenceEntryMappingKey();
650                     return event;
651                 } else if (!scanner.checkToken(Token.ID.FlowSequenceEnd)) {
652                     states.push(new ParseFlowSequenceEntry(false));
653                     return parseFlowNode();
654                 }
655             }
656             Token token = scanner.getToken();
657             Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark());
658             state = states.pop();
659             marks.pop();
660             return event;
661         }
662     }
663 
664     private class ParseFlowSequenceEntryMappingKey implements Production {
665         public Event produce() {
666             Token token = scanner.getToken();
667             if (!scanner.checkToken(Token.ID.Value, Token.ID.FlowEntry, Token.ID.FlowSequenceEnd)) {
668                 states.push(new ParseFlowSequenceEntryMappingValue());
669                 return parseFlowNode();
670             } else {
671                 state = new ParseFlowSequenceEntryMappingValue();
672                 return processEmptyScalar(token.getEndMark());
673             }
674         }
675     }
676 
677     private class ParseFlowSequenceEntryMappingValue implements Production {
678         public Event produce() {
679             if (scanner.checkToken(Token.ID.Value)) {
680                 Token token = scanner.getToken();
681                 if (!scanner.checkToken(Token.ID.FlowEntry, Token.ID.FlowSequenceEnd)) {
682                     states.push(new ParseFlowSequenceEntryMappingEnd());
683                     return parseFlowNode();
684                 } else {
685                     state = new ParseFlowSequenceEntryMappingEnd();
686                     return processEmptyScalar(token.getEndMark());
687                 }
688             } else {
689                 state = new ParseFlowSequenceEntryMappingEnd();
690                 Token token = scanner.peekToken();
691                 return processEmptyScalar(token.getStartMark());
692             }
693         }
694     }
695 
696     private class ParseFlowSequenceEntryMappingEnd implements Production {
697         public Event produce() {
698             state = new ParseFlowSequenceEntry(false);
699             Token token = scanner.peekToken();
700             return new MappingEndEvent(token.getStartMark(), token.getEndMark());
701         }
702     }
703 
704     /**
705      * <pre>
706      *   flow_mapping  ::= FLOW-MAPPING-START
707      *          (flow_mapping_entry FLOW-ENTRY)*
708      *          flow_mapping_entry?
709      *          FLOW-MAPPING-END
710      *   flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
711      * </pre>
712      */
713     private class ParseFlowMappingFirstKey implements Production {
714         public Event produce() {
715             Token token = scanner.getToken();
716             marks.push(token.getStartMark());
717             return new ParseFlowMappingKey(true).produce();
718         }
719     }
720 
721     private class ParseFlowMappingKey implements Production {
722         private boolean first = false;
723 
724         public ParseFlowMappingKey(boolean first) {
725             this.first = first;
726         }
727 
728         public Event produce() {
729             if (!scanner.checkToken(Token.ID.FlowMappingEnd)) {
730                 if (!first) {
731                     if (scanner.checkToken(Token.ID.FlowEntry)) {
732                         scanner.getToken();
733                     } else {
734                         Token token = scanner.peekToken();
735                         throw new ParserException("while parsing a flow mapping", marks.pop(),
736                                 "expected ',' or '}', but got " + token.getTokenId(),
737                                 token.getStartMark());
738                     }
739                 }
740                 if (scanner.checkToken(Token.ID.Key)) {
741                     Token token = scanner.getToken();
742                     if (!scanner.checkToken(Token.ID.Value, Token.ID.FlowEntry,
743                             Token.ID.FlowMappingEnd)) {
744                         states.push(new ParseFlowMappingValue());
745                         return parseFlowNode();
746                     } else {
747                         state = new ParseFlowMappingValue();
748                         return processEmptyScalar(token.getEndMark());
749                     }
750                 } else if (!scanner.checkToken(Token.ID.FlowMappingEnd)) {
751                     states.push(new ParseFlowMappingEmptyValue());
752                     return parseFlowNode();
753                 }
754             }
755             Token token = scanner.getToken();
756             Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark());
757             state = states.pop();
758             marks.pop();
759             return event;
760         }
761     }
762 
763     private class ParseFlowMappingValue implements Production {
764         public Event produce() {
765             if (scanner.checkToken(Token.ID.Value)) {
766                 Token token = scanner.getToken();
767                 if (!scanner.checkToken(Token.ID.FlowEntry, Token.ID.FlowMappingEnd)) {
768                     states.push(new ParseFlowMappingKey(false));
769                     return parseFlowNode();
770                 } else {
771                     state = new ParseFlowMappingKey(false);
772                     return processEmptyScalar(token.getEndMark());
773                 }
774             } else {
775                 state = new ParseFlowMappingKey(false);
776                 Token token = scanner.peekToken();
777                 return processEmptyScalar(token.getStartMark());
778             }
779         }
780     }
781 
782     private class ParseFlowMappingEmptyValue implements Production {
783         public Event produce() {
784             state = new ParseFlowMappingKey(false);
785             return processEmptyScalar(scanner.peekToken().getStartMark());
786         }
787     }
788 
789     /**
790      * <pre>
791      * block_mapping     ::= BLOCK-MAPPING_START
792      *           ((KEY block_node_or_indentless_sequence?)?
793      *           (VALUE block_node_or_indentless_sequence?)?)*
794      *           BLOCK-END
795      * </pre>
796      */
797     private Event processEmptyScalar(Mark mark) {
798         return new ScalarEvent(null, null, new ImplicitTuple(true, false), "", mark, mark, (char) 0);
799     }
800 }