Просмотр исходного кода

Support decoding and encoding comments.

Gustavo Niemeyer 7 лет назад
Родитель
Сommit
a1ff040dd5
7 измененных файлов с 1467 добавлено и 71 удалено
  1. 50 19
      decode.go
  2. 267 22
      emitterc.go
  3. 34 14
      encode.go
  4. 821 0
      node_test.go
  5. 71 1
      parserc.go
  6. 194 15
      scannerc.go
  7. 30 0
      yamlh.go

+ 50 - 19
decode.go

@@ -33,16 +33,19 @@ const (
 )
 
 type Node struct {
-	Kind     NodeKind
-	Style    Style
-	Line     int
-	Column   int
-	Tag      string
-	Value    string
+	Kind   NodeKind
+	Style  Style
+	Line   int
+	Column int
+	Tag    string
+	Value  string
 	// TODO Alias should probably be the string, and then perhaps have a hidden cache?
 	Alias    *Node // Resolved alias for alias nodes.
 	Anchors  map[string]*Node
 	Children []*Node
+	Header   string
+	Inline   string
+	Footer   string
 }
 
 func (n *Node) implicit() bool {
@@ -59,11 +62,24 @@ func (n *Node) ShortTag() string {
 
 func (n *Node) LongTag() string {
 	if n.Tag == "" || n.Tag == "!" {
-		if n.Style&(SingleQuotedStyle|DoubleQuotedStyle) != 0 {
-			return yaml_STR_TAG
+		switch n.Kind {
+		case MappingNode:
+			return yaml_MAP_TAG
+		case SequenceNode:
+			return yaml_SEQ_TAG
+		case AliasNode:
+			if n.Alias != nil {
+				return n.Alias.LongTag()
+			}
+		case ScalarNode:
+			if n.Style&(SingleQuotedStyle|DoubleQuotedStyle) != 0 {
+				return yaml_STR_TAG
+			}
+			tag, _ := resolve("", n.Value)
+			return tag
 		}
-		tag, _ := resolve("", n.Value)
-		return tag
+		return ""
+
 	} else if strings.HasPrefix(n.Tag, "!!") {
 		return longTagPrefix + n.Tag[2:]
 	}
@@ -211,14 +227,18 @@ func (p *parser) parse() *Node {
 func (p *parser) node(kind NodeKind) *Node {
 	return &Node{
 		Kind:   kind,
-		Line:   p.event.start_mark.line,
-		Column: p.event.start_mark.column,
+		Line:   p.event.start_mark.line + 1,
+		Column: p.event.start_mark.column + 1,
+		Header: string(p.event.header_comment),
+		Inline: string(p.event.inline_comment),
+		Footer: string(p.event.footer_comment),
 	}
 }
 
-func (p *parser) parseChild(parent *Node) {
+func (p *parser) parseChild(parent *Node) *Node {
 	child := p.parse()
 	parent.Children = append(parent.Children, child)
+	return child
 }
 
 func (p *parser) document() *Node {
@@ -227,6 +247,9 @@ func (p *parser) document() *Node {
 	p.doc = n
 	p.expect(yaml_DOCUMENT_START_EVENT)
 	p.parseChild(n)
+	if p.peek() == yaml_DOCUMENT_END_EVENT {
+		n.Footer = string(p.event.footer_comment)
+	}
 	p.expect(yaml_DOCUMENT_END_EVENT)
 	return n
 }
@@ -273,6 +296,8 @@ func (p *parser) sequence() *Node {
 	for p.peek() != yaml_SEQUENCE_END_EVENT {
 		p.parseChild(n)
 	}
+	n.Inline = string(p.event.inline_comment)
+	n.Footer = string(p.event.footer_comment)
 	p.expect(yaml_SEQUENCE_END_EVENT)
 	return n
 }
@@ -286,9 +311,15 @@ func (p *parser) mapping() *Node {
 	p.anchor(n, p.event.anchor)
 	p.expect(yaml_MAPPING_START_EVENT)
 	for p.peek() != yaml_MAPPING_END_EVENT {
-		p.parseChild(n)
-		p.parseChild(n)
+		k := p.parseChild(n)
+		v := p.parseChild(n)
+		if v.Footer != "" {
+			k.Footer = v.Footer
+			v.Footer = ""
+		}
 	}
+	n.Inline = string(p.event.inline_comment)
+	n.Footer = string(p.event.footer_comment)
 	p.expect(yaml_MAPPING_END_EVENT)
 	return n
 }
@@ -332,7 +363,7 @@ func (d *decoder) terror(n *Node, tag string, out reflect.Value) {
 			value = " `" + value + "`"
 		}
 	}
-	d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.Line+1, shortTag(tag), value, out.Type()))
+	d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.Line, shortTag(tag), value, out.Type()))
 }
 
 func (d *decoder) callUnmarshaler(n *Node, u Unmarshaler) (good bool) {
@@ -713,7 +744,7 @@ func (d *decoder) mapping(n *Node, out reflect.Value) (good bool) {
 
 func (d *decoder) setMapIndex(n *Node, out, k, v reflect.Value) {
 	if d.strict && out.MapIndex(k) != zeroValue {
-		d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.Line+1, k.Interface()))
+		d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.Line, k.Interface()))
 		return
 	}
 	out.SetMapIndex(k, v)
@@ -782,7 +813,7 @@ func (d *decoder) mappingStruct(n *Node, out reflect.Value) (good bool) {
 		if info, ok := sinfo.FieldsMap[name.String()]; ok {
 			if d.strict {
 				if doneFields[info.Id] {
-					d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.Line+1, name.String(), out.Type()))
+					d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.Line, name.String(), out.Type()))
 					continue
 				}
 				doneFields[info.Id] = true
@@ -802,7 +833,7 @@ func (d *decoder) mappingStruct(n *Node, out reflect.Value) (good bool) {
 			d.unmarshal(n.Children[i+1], value)
 			d.setMapIndex(n.Children[i+1], inlineMap, name, value)
 		} else if d.strict {
-			d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.Line+1, name.String(), out.Type()))
+			d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.Line, name.String(), out.Type()))
 		}
 	}
 	return true

+ 267 - 22
emitterc.go

@@ -43,8 +43,13 @@ func put_break(emitter *yaml_emitter_t) bool {
 	default:
 		panic("unknown line break setting")
 	}
+	if emitter.column == 0 {
+		emitter.space_above = true
+	}
 	emitter.column = 0
 	emitter.line++
+	// [Go] Do this here and below and drop from everywhere else (see commented lines).
+	emitter.indention = true
 	return true
 }
 
@@ -97,8 +102,13 @@ func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool {
 		if !write(emitter, s, i) {
 			return false
 		}
+		if emitter.column == 0 {
+			emitter.space_above = true
+		}
 		emitter.column = 0
 		emitter.line++
+		// [Go] Do this here and above and drop from everywhere else (see commented lines).
+		emitter.indention = true
 	}
 	return true
 }
@@ -228,16 +238,22 @@ func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bo
 		return yaml_emitter_emit_document_end(emitter, event)
 
 	case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE:
-		return yaml_emitter_emit_flow_sequence_item(emitter, event, true)
+		return yaml_emitter_emit_flow_sequence_item(emitter, event, true, false)
+
+	case yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE:
+		return yaml_emitter_emit_flow_sequence_item(emitter, event, false, true)
 
 	case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE:
-		return yaml_emitter_emit_flow_sequence_item(emitter, event, false)
+		return yaml_emitter_emit_flow_sequence_item(emitter, event, false, false)
 
 	case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE:
-		return yaml_emitter_emit_flow_mapping_key(emitter, event, true)
+		return yaml_emitter_emit_flow_mapping_key(emitter, event, true, false)
+
+	case yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE:
+		return yaml_emitter_emit_flow_mapping_key(emitter, event, false, true)
 
 	case yaml_EMIT_FLOW_MAPPING_KEY_STATE:
-		return yaml_emitter_emit_flow_mapping_key(emitter, event, false)
+		return yaml_emitter_emit_flow_mapping_key(emitter, event, false, false)
 
 	case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE:
 		return yaml_emitter_emit_flow_mapping_value(emitter, event, true)
@@ -298,6 +314,7 @@ func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t
 	emitter.column = 0
 	emitter.whitespace = true
 	emitter.indention = true
+	emitter.space_above = true
 
 	if emitter.encoding != yaml_UTF8_ENCODING {
 		if !yaml_emitter_write_bom(emitter) {
@@ -399,6 +416,15 @@ func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event
 			}
 		}
 
+		if len(emitter.header_comment) > 0 {
+			if !yaml_emitter_process_header_comment(emitter) {
+				return false
+			}
+			if !put_break(emitter) {
+				return false
+			}
+		}
+
 		emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE
 		return true
 	}
@@ -425,7 +451,20 @@ func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event
 // Expect the root node.
 func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool {
 	emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE)
-	return yaml_emitter_emit_node(emitter, event, true, false, false, false)
+
+	if !yaml_emitter_process_header_comment(emitter) {
+		return false
+	}
+	if !yaml_emitter_emit_node(emitter, event, true, false, false, false) {
+		return false
+	}
+	if !yaml_emitter_process_inline_comment(emitter) {
+		return false
+	}
+	if !yaml_emitter_process_footer_comment(emitter) {
+		return false
+	}
+	return true
 }
 
 // Expect DOCUMENT-END.
@@ -436,6 +475,14 @@ func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t
 	if !yaml_emitter_write_indent(emitter) {
 		return false
 	}
+	if len(emitter.footer_comment) > 0 {
+		if !put_break(emitter) {
+			return false
+		}
+		if !yaml_emitter_process_footer_comment(emitter) {
+			return false
+		}
+	}
 	if !event.implicit {
 		// [Go] Allocate the slice elsewhere.
 		if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
@@ -454,7 +501,7 @@ func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t
 }
 
 // Expect a flow item node.
-func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
+func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool {
 	if first {
 		if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) {
 			return false
@@ -480,29 +527,62 @@ func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_e
 		if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) {
 			return false
 		}
+		if !yaml_emitter_process_inline_comment(emitter) {
+			return false
+		}
+		if !yaml_emitter_process_footer_comment(emitter) {
+			return false
+		}
 		emitter.state = emitter.states[len(emitter.states)-1]
 		emitter.states = emitter.states[:len(emitter.states)-1]
 
 		return true
 	}
 
-	if !first {
+	if !first && !trail {
 		if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
 			return false
 		}
 	}
 
+	if !yaml_emitter_process_header_comment(emitter) {
+		return false
+	}
+	if emitter.column == 0 {
+		if !yaml_emitter_write_indent(emitter) {
+			return false
+		}
+	}
+
 	if emitter.canonical || emitter.column > emitter.best_width {
 		if !yaml_emitter_write_indent(emitter) {
 			return false
 		}
 	}
-	emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE)
-	return yaml_emitter_emit_node(emitter, event, false, true, false, false)
+	if len(emitter.inline_comment) > 0 || len(emitter.footer_comment) > 0 {
+		emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE)
+	} else {
+		emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE)
+	}
+	if !yaml_emitter_emit_node(emitter, event, false, true, false, false) {
+		return false
+	}
+	if len(emitter.inline_comment) > 0 || len(emitter.footer_comment) > 0 {
+		if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
+			return false
+		}
+	}
+	if !yaml_emitter_process_inline_comment(emitter) {
+		return false
+	}
+	if !yaml_emitter_process_footer_comment(emitter) {
+		return false
+	}
+	return true
 }
 
 // Expect a flow key node.
-func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
+func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool {
 	if first {
 		if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) {
 			return false
@@ -528,16 +608,32 @@ func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_eve
 		if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) {
 			return false
 		}
+		if !yaml_emitter_process_inline_comment(emitter) {
+			return false
+		}
+		if !yaml_emitter_process_footer_comment(emitter) {
+			return false
+		}
 		emitter.state = emitter.states[len(emitter.states)-1]
 		emitter.states = emitter.states[:len(emitter.states)-1]
 		return true
 	}
 
-	if !first {
+	if !first && !trail {
 		if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
 			return false
 		}
 	}
+
+	if !yaml_emitter_process_header_comment(emitter) {
+		return false
+	}
+	if emitter.column == 0 {
+		if !yaml_emitter_write_indent(emitter) {
+			return false
+		}
+	}
+
 	if emitter.canonical || emitter.column > emitter.best_width {
 		if !yaml_emitter_write_indent(emitter) {
 			return false
@@ -571,8 +667,26 @@ func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_e
 			return false
 		}
 	}
-	emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE)
-	return yaml_emitter_emit_node(emitter, event, false, false, true, false)
+	if len(emitter.inline_comment) > 0 || len(emitter.footer_comment) > 0 {
+		emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE)
+	} else {
+		emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE)
+	}
+	if !yaml_emitter_emit_node(emitter, event, false, false, true, false) {
+		return false
+	}
+	if len(emitter.inline_comment) > 0 || len(emitter.footer_comment) > 0 {
+		if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
+			return false
+		}
+	}
+	if !yaml_emitter_process_inline_comment(emitter) {
+		return false
+	}
+	if !yaml_emitter_process_footer_comment(emitter) {
+		return false
+	}
+	return true
 }
 
 // Expect a block item node.
@@ -589,6 +703,9 @@ func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_
 		emitter.states = emitter.states[:len(emitter.states)-1]
 		return true
 	}
+	if !yaml_emitter_process_header_comment(emitter) {
+		return false
+	}
 	if !yaml_emitter_write_indent(emitter) {
 		return false
 	}
@@ -596,7 +713,16 @@ func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_
 		return false
 	}
 	emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE)
-	return yaml_emitter_emit_node(emitter, event, false, true, false, false)
+	if !yaml_emitter_emit_node(emitter, event, false, true, false, false) {
+		return false
+	}
+	if !yaml_emitter_process_inline_comment(emitter) {
+		return false
+	}
+	if !yaml_emitter_process_footer_comment(emitter) {
+		return false
+	}
+	return true
 }
 
 // Expect a block key node.
@@ -613,6 +739,9 @@ func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_ev
 		emitter.states = emitter.states[:len(emitter.states)-1]
 		return true
 	}
+	if !yaml_emitter_process_header_comment(emitter) {
+		return false
+	}
 	if !yaml_emitter_write_indent(emitter) {
 		return false
 	}
@@ -642,7 +771,16 @@ func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_
 		}
 	}
 	emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE)
-	return yaml_emitter_emit_node(emitter, event, false, false, true, false)
+	if !yaml_emitter_emit_node(emitter, event, false, false, true, false) {
+		return false
+	}
+	if !yaml_emitter_process_inline_comment(emitter) {
+		return false
+	}
+	if !yaml_emitter_process_footer_comment(emitter) {
+		return false
+	}
+	return true
 }
 
 // Expect a node.
@@ -908,6 +1046,68 @@ func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool {
 	panic("unknown scalar style")
 }
 
+// Write a header comment.
+func yaml_emitter_process_header_comment(emitter *yaml_emitter_t) bool {
+	if len(emitter.header_comment) == 0 {
+		return true
+	}
+	space_above := emitter.space_above
+	if !emitter.indention {
+		if !put_break(emitter) {
+			return false
+		}
+	}
+	if !space_above &&
+		emitter.state != yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE &&
+		emitter.state != yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE &&
+		emitter.state != yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE &&
+		emitter.state != yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE {
+		if !put_break(emitter) {
+			return false
+		}
+	}
+	if !yaml_emitter_write_indent(emitter) {
+		return false
+	}
+	if !yaml_emitter_write_comment(emitter, emitter.header_comment) {
+		return false
+	}
+	emitter.header_comment = emitter.header_comment[:0]
+	return true
+}
+
+// Write an inline comment.
+func yaml_emitter_process_inline_comment(emitter *yaml_emitter_t) bool {
+	if len(emitter.inline_comment) == 0 {
+		return true
+	}
+	if !emitter.whitespace {
+		if !put(emitter, ' ') {
+			return false
+		}
+	}
+	if !yaml_emitter_write_comment(emitter, emitter.inline_comment) {
+		return false
+	}
+	emitter.inline_comment = emitter.inline_comment[:0]
+	return true
+}
+
+// Write a footer comment.
+func yaml_emitter_process_footer_comment(emitter *yaml_emitter_t) bool {
+	if len(emitter.footer_comment) == 0 {
+		return true
+	}
+	if !yaml_emitter_write_indent(emitter) {
+		return false
+	}
+	if !yaml_emitter_write_comment(emitter, emitter.footer_comment) {
+		return false
+	}
+	emitter.footer_comment = emitter.footer_comment[:0]
+	return true
+}
+
 // Check if a %YAML directive is valid.
 func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool {
 	if version_directive.major != 1 || version_directive.minor != 1 {
@@ -1137,6 +1337,16 @@ func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bo
 	emitter.tag_data.suffix = nil
 	emitter.scalar_data.value = nil
 
+	if len(event.header_comment) > 0 {
+		emitter.header_comment = event.header_comment
+	}
+	if len(event.inline_comment) > 0 {
+		emitter.inline_comment = event.inline_comment
+	}
+	if len(event.footer_comment) > 0 {
+		emitter.footer_comment = event.footer_comment
+	}
+
 	switch event.typ {
 	case yaml_ALIAS_EVENT:
 		if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) {
@@ -1214,7 +1424,8 @@ func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool {
 		}
 	}
 	emitter.whitespace = true
-	emitter.indention = true
+	//emitter.indention = true
+	emitter.space_above = false
 	return true
 }
 
@@ -1341,7 +1552,7 @@ func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allo
 			if !write_break(emitter, value, &i) {
 				return false
 			}
-			emitter.indention = true
+			//emitter.indention = true
 			breaks = true
 		} else {
 			if breaks {
@@ -1397,7 +1608,7 @@ func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []by
 			if !write_break(emitter, value, &i) {
 				return false
 			}
-			emitter.indention = true
+			//emitter.indention = true
 			breaks = true
 		} else {
 			if breaks {
@@ -1599,7 +1810,7 @@ func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bo
 	if !put_break(emitter) {
 		return false
 	}
-	emitter.indention = true
+	//emitter.indention = true
 	emitter.whitespace = true
 	breaks := true
 	for i := 0; i < len(value); {
@@ -1607,7 +1818,7 @@ func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bo
 			if !write_break(emitter, value, &i) {
 				return false
 			}
-			emitter.indention = true
+			//emitter.indention = true
 			breaks = true
 		} else {
 			if breaks {
@@ -1637,7 +1848,7 @@ func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) boo
 	if !put_break(emitter) {
 		return false
 	}
-	emitter.indention = true
+	//emitter.indention = true
 	emitter.whitespace = true
 
 	breaks := true
@@ -1658,7 +1869,7 @@ func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) boo
 			if !write_break(emitter, value, &i) {
 				return false
 			}
-			emitter.indention = true
+			//emitter.indention = true
 			breaks = true
 		} else {
 			if breaks {
@@ -1683,3 +1894,37 @@ func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) boo
 	}
 	return true
 }
+
+func yaml_emitter_write_comment(emitter *yaml_emitter_t, comment []byte) bool {
+	// [Go] TODO Emit "# " when necessary.
+	breaks := false
+	for i := 0; i < len(comment); {
+		if is_space(comment, i) {
+			if !write(emitter, comment, &i) {
+				return false
+			}
+		} else if is_break(comment, i) {
+			if !write_break(emitter, comment, &i) {
+				return false
+			}
+			//emitter.indention = true
+			breaks = true
+		} else {
+			if breaks && !yaml_emitter_write_indent(emitter) {
+				return false
+			}
+			if !write(emitter, comment, &i) {
+				return false
+			}
+			emitter.indention = false
+			breaks = false
+		}
+	}
+	if !breaks && !put_break(emitter) {
+		return false
+	}
+
+	emitter.whitespace = true
+	//emitter.indention = true
+	return true
+}

+ 34 - 14
encode.go

@@ -75,11 +75,15 @@ func (e *encoder) must(ok bool) {
 
 func (e *encoder) marshalDoc(tag string, in reflect.Value) {
 	e.init()
-	yaml_document_start_event_initialize(&e.event, nil, nil, true)
-	e.emit()
-	e.marshal(tag, in)
-	yaml_document_end_event_initialize(&e.event, true)
-	e.emit()
+	if node, ok := in.Interface().(*Node); ok && node.Kind == DocumentNode {
+		e.nodev(in)
+	} else {
+		yaml_document_start_event_initialize(&e.event, nil, nil, true)
+		e.emit()
+		e.marshal(tag, in)
+		yaml_document_end_event_initialize(&e.event, true)
+		e.emit()
+	}
 }
 
 func (e *encoder) marshal(tag string, in reflect.Value) {
@@ -294,7 +298,7 @@ func (e *encoder) stringv(tag string, in reflect.Value) {
 	default:
 		style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
 	}
-	e.emitScalar(s, "", tag, style)
+	e.emitScalar(s, "", tag, style, nil, nil, nil)
 }
 
 func (e *encoder) boolv(tag string, in reflect.Value) {
@@ -304,23 +308,23 @@ func (e *encoder) boolv(tag string, in reflect.Value) {
 	} else {
 		s = "false"
 	}
-	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
 }
 
 func (e *encoder) intv(tag string, in reflect.Value) {
 	s := strconv.FormatInt(in.Int(), 10)
-	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
 }
 
 func (e *encoder) uintv(tag string, in reflect.Value) {
 	s := strconv.FormatUint(in.Uint(), 10)
-	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
 }
 
 func (e *encoder) timev(tag string, in reflect.Value) {
 	t := in.Interface().(time.Time)
 	s := t.Format(time.RFC3339Nano)
-	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
 }
 
 func (e *encoder) floatv(tag string, in reflect.Value) {
@@ -339,16 +343,20 @@ func (e *encoder) floatv(tag string, in reflect.Value) {
 	case "NaN":
 		s = ".nan"
 	}
-	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
+	e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
 }
 
 func (e *encoder) nilv() {
-	e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
+	e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE, nil, nil, nil)
 }
 
-func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
+func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t, header, inline, footer []byte) {
+	// TODO Kill this function. Replace all initialize calls by their underlining Go literals.
 	implicit := tag == ""
 	e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
+	e.event.header_comment = header
+	e.event.inline_comment = inline
+	e.event.footer_comment = footer
 	e.emit()
 }
 
@@ -359,9 +367,15 @@ func (e *encoder) nodev(in reflect.Value) {
 func (e *encoder) node(node *Node) {
 	switch node.Kind {
 	case DocumentNode:
+		yaml_document_start_event_initialize(&e.event, nil, nil, true)
+		e.event.header_comment = []byte(node.Header)
+		e.emit()
 		for _, node := range node.Children {
 			e.node(node)
 		}
+		yaml_document_end_event_initialize(&e.event, true)
+		e.event.footer_comment = []byte(node.Footer)
+		e.emit()
 
 	case SequenceNode:
 		style := yaml_BLOCK_SEQUENCE_STYLE
@@ -369,11 +383,14 @@ func (e *encoder) node(node *Node) {
 			style = yaml_FLOW_SEQUENCE_STYLE
 		}
 		e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(node.Tag), node.implicit(), style))
+		e.event.header_comment = []byte(node.Header)
 		e.emit()
 		for _, node := range node.Children {
 			e.node(node)
 		}
 		e.must(yaml_sequence_end_event_initialize(&e.event))
+		e.event.inline_comment = []byte(node.Inline)
+		e.event.footer_comment = []byte(node.Footer)
 		e.emit()
 
 	case MappingNode:
@@ -382,6 +399,7 @@ func (e *encoder) node(node *Node) {
 			style = yaml_FLOW_MAPPING_STYLE
 		}
 		yaml_mapping_start_event_initialize(&e.event, nil, []byte(node.Tag), node.implicit(), style)
+		e.event.header_comment = []byte(node.Header)
 		e.emit()
 
 		for i := 0; i+1 < len(node.Children); i += 2 {
@@ -390,6 +408,8 @@ func (e *encoder) node(node *Node) {
 		}
 
 		yaml_mapping_end_event_initialize(&e.event)
+		e.event.inline_comment = []byte(node.Inline)
+		e.event.footer_comment = []byte(node.Footer)
 		e.emit()
 
 	case ScalarNode, AliasNode:
@@ -408,7 +428,7 @@ func (e *encoder) node(node *Node) {
 		if style == yaml_PLAIN_SCALAR_STYLE && strings.Contains(node.Value, "\n") {
 			style = yaml_LITERAL_SCALAR_STYLE
 		}
-		e.emitScalar(node.Value, "", node.Tag, style)
+		e.emitScalar(node.Value, "", node.Tag, style, []byte(node.Header), []byte(node.Inline), []byte(node.Footer))
 
 		// TODO Check if binaries are being decoded into node.Value or not.
 		//switch {

+ 821 - 0
node_test.go

@@ -0,0 +1,821 @@
+package yaml_test
+
+import (
+	"os"
+
+	. "gopkg.in/check.v1"
+	"gopkg.in/niemeyer/ynext.v3"
+)
+
+var nodeTests = []struct {
+	yaml string
+	tag  string
+	node yaml.Node
+}{
+	{
+		"null\n",
+		"!!null",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  "null",
+				Line:   1,
+				Column: 1,
+				Tag:    "",
+			}},
+		},
+	}, {
+		"foo\n",
+		"!!str",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  "foo",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"\"foo\"\n",
+		"!!str",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Style:  yaml.DoubleQuotedStyle,
+				Value:  "foo",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"'foo'\n",
+		"!!str",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Style:  yaml.SingleQuotedStyle,
+				Value:  "foo",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"''\n",
+		"!!str",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Style:  yaml.SingleQuotedStyle,
+				Value:  "",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"|\n  foo\n  bar\n",
+		"!!str",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Style:  yaml.LiteralStyle,
+				Value:  "foo\nbar\n",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"true\n",
+		"!!bool",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  "true",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"-10\n",
+		"!!int",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  "-10",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"4294967296\n",
+		"!!int",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  "4294967296",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"0.1000\n",
+		"!!float",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  "0.1000",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"-.inf\n",
+		"!!float",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  "-.inf",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		".nan\n",
+		"!!float",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  ".nan",
+				Line:   1,
+				Column: 1,
+			}},
+		},
+	}, {
+		"{}\n",
+		"!!map",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.MappingNode,
+				Style:  yaml.FlowStyle,
+				Value:  "",
+				Line:   1,
+				Column: 1,
+				Tag:    "",
+			}},
+		},
+	}, {
+		"a: b c\n",
+		"!!map",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.MappingNode,
+				Value:  "",
+				Line:   1,
+				Column: 1,
+				Tag:    "",
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Value:  "a",
+					Line:   1,
+					Column: 1,
+				}, {
+					Kind:   yaml.ScalarNode,
+					Value:  "b c",
+					Line:   1,
+					Column: 4,
+				}},
+			}},
+		},
+	}, {
+		"a:\n  b: c\n  d: e\n",
+		"!!map",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.MappingNode,
+				Line:   1,
+				Column: 1,
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Value:  "a",
+					Line:   1,
+					Column: 1,
+				}, {
+					Kind:   yaml.MappingNode,
+					Line:   2,
+					Column: 3,
+					Children: []*yaml.Node{{
+						Kind:   yaml.ScalarNode,
+						Value:  "b",
+						Line:   2,
+						Column: 3,
+					}, {
+						Kind:   yaml.ScalarNode,
+						Value:  "c",
+						Line:   2,
+						Column: 6,
+					}, {
+						Kind:   yaml.ScalarNode,
+						Value:  "d",
+						Line:   3,
+						Column: 3,
+					}, {
+						Kind:   yaml.ScalarNode,
+						Value:  "e",
+						Line:   3,
+						Column: 6,
+					}},
+				}},
+			}},
+		},
+	}, {
+		"- a\n- b\n",
+		"!!seq",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.SequenceNode,
+				Value:  "",
+				Line:   1,
+				Column: 1,
+				Tag:    "",
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Value:  "a",
+					Line:   1,
+					Column: 3,
+				}, {
+					Kind:   yaml.ScalarNode,
+					Value:  "b",
+					Line:   2,
+					Column: 3,
+				}},
+			}},
+		},
+	}, {
+		"- a\n- - b\n  - c\n",
+		"!!seq",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.SequenceNode,
+				Line:   1,
+				Column: 1,
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Value:  "a",
+					Line:   1,
+					Column: 3,
+				}, {
+					Kind:   yaml.SequenceNode,
+					Line:   2,
+					Column: 3,
+					Children: []*yaml.Node{{
+						Kind:   yaml.ScalarNode,
+						Value:  "b",
+						Line:   2,
+						Column: 5,
+					}, {
+						Kind:   yaml.ScalarNode,
+						Value:  "c",
+						Line:   3,
+						Column: 5,
+					}},
+				}},
+			}},
+		},
+	}, {
+		"[a, b]\n",
+		"!!seq",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.SequenceNode,
+				Style:  yaml.FlowStyle,
+				Value:  "",
+				Line:   1,
+				Column: 1,
+				Tag:    "",
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Value:  "a",
+					Line:   1,
+					Column: 2,
+				}, {
+					Kind:   yaml.ScalarNode,
+					Value:  "b",
+					Line:   1,
+					Column: 5,
+				}},
+			}},
+		},
+	}, {
+		"- a\n- [b, c]\n",
+		"!!seq",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    1,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.SequenceNode,
+				Line:   1,
+				Column: 1,
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Value:  "a",
+					Line:   1,
+					Column: 3,
+				}, {
+					Kind:   yaml.SequenceNode,
+					Style:  yaml.FlowStyle,
+					Line:   2,
+					Column: 3,
+					Children: []*yaml.Node{{
+						Kind:   yaml.ScalarNode,
+						Value:  "b",
+						Line:   2,
+						Column: 4,
+					}, {
+						Kind:   yaml.ScalarNode,
+						Value:  "c",
+						Line:   2,
+						Column: 7,
+					}},
+				}},
+			}},
+		},
+	}, {
+		"# One\n# Two\ntrue # Three\n# Four\n# Five\n",
+		"!!bool",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    3,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  "true",
+				Line:   3,
+				Column: 1,
+				Header: "# One\n# Two",
+				Inline: "# Three",
+				Footer: "# Four\n# Five",
+			}},
+		},
+	}, {
+		"# DH1\n\n# DH2\n\n# H1\n# H2\ntrue # I\n# F1\n# F2\n\n# DF1\n\n# DF2\n",
+		"!!bool",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    7,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Header:  "# DH1\n\n# DH2",
+			Footer:  "# DF1\n\n# DF2",
+			Children: []*yaml.Node{{
+				Kind:   yaml.ScalarNode,
+				Value:  "true",
+				Line:   7,
+				Column: 1,
+				Header: "# H1\n# H2",
+				Inline: "# I",
+				Footer: "# F1\n# F2",
+			}},
+		},
+	}, {
+		"# DH1\n\n# DH2\n\n# HA1\n# HA2\nka: va # IA\n# FA1\n# FA2\n\n# HB1\n# HB2\nkb: vb # IB\n# FB1\n# FB2\n\n# DF1\n\n# DF2\n",
+		"!!map",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    7,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Header:  "# DH1\n\n# DH2",
+			Footer:  "# DF1\n\n# DF2",
+			Children: []*yaml.Node{{
+				Kind:   yaml.MappingNode,
+				Line:   7,
+				Column: 1,
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Line:   7,
+					Column: 1,
+					Value:  "ka",
+					Header: "# HA1\n# HA2",
+					Footer: "# FA1\n# FA2",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   7,
+					Column: 5,
+					Value:  "va",
+					Inline: "# IA",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   13,
+					Column: 1,
+					Value:  "kb",
+					Header: "# HB1\n# HB2",
+					Footer: "# FB1\n# FB2",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   13,
+					Column: 5,
+					Value:  "vb",
+					Inline: "# IB",
+				}},
+			}},
+		},
+	}, {
+		"# DH1\n\n# DH2\n\n# HA1\n# HA2\n- la # IA\n# FA1\n# FA2\n\n# HB1\n# HB2\n- lb # IB\n# FB1\n# FB2\n\n# DF1\n\n# DF2\n",
+		"!!seq",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    7,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Header:  "# DH1\n\n# DH2",
+			Footer:  "# DF1\n\n# DF2",
+			Children: []*yaml.Node{{
+				Kind:   yaml.SequenceNode,
+				Line:   7,
+				Column: 1,
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Line:   7,
+					Column: 3,
+					Value:  "la",
+					Header: "# HA1\n# HA2",
+					Inline: "# IA",
+					Footer: "# FA1\n# FA2",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   13,
+					Column: 3,
+					Value:  "lb",
+					Header: "# HB1\n# HB2",
+					Inline: "# IB",
+					Footer: "# FB1\n# FB2",
+				}},
+			}},
+		},
+	}, {
+		"# DH1\n\n- la # IA\n\n# HB1\n- lb\n",
+		"!!seq",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    3,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Header:  "# DH1",
+			Children: []*yaml.Node{{
+				Kind:   yaml.SequenceNode,
+				Line:   3,
+				Column: 1,
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Line:   3,
+					Column: 3,
+					Value:  "la",
+					Inline: "# IA",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   6,
+					Column: 3,
+					Value:  "lb",
+					Header: "# HB1",
+				}},
+			}},
+		},
+	}, {
+		"# DH1\n\n# HA1\nka:\n  # HB1\n  kb:\n  # HC1\n  # HC2\n  - lc # IC\n  # FC1\n  # FC2\n\n  # HD1\n  - ld # ID\n  # FD1\n\n# DF1\n",
+		"!!map",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    4,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Header:  "# DH1",
+			Footer:  "# DF1",
+			Children: []*yaml.Node{{
+				Kind:   yaml.MappingNode,
+				Line:   4,
+				Column: 1,
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Line:   4,
+					Column: 1,
+					Value:  "ka",
+					Header: "# HA1",
+				}, {
+					Kind:   yaml.MappingNode,
+					Line:   6,
+					Column: 3,
+					Children: []*yaml.Node{{
+						Kind:   yaml.ScalarNode,
+						Line:   6,
+						Column: 3,
+						Value:  "kb",
+						Header: "# HB1",
+					}, {
+						Kind:   yaml.SequenceNode,
+						Line:   9,
+						Column: 3,
+						Children: []*yaml.Node{{
+							Kind:   yaml.ScalarNode,
+							Line:   9,
+							Column: 5,
+							Value:  "lc",
+							Header: "# HC1\n# HC2",
+							Inline: "# IC",
+							Footer: "# FC1\n# FC2",
+						}, {
+							Kind:   yaml.ScalarNode,
+							Line:   14,
+							Column: 5,
+							Value:  "ld",
+							Header: "# HD1",
+
+							Inline: "# ID",
+							Footer: "# FD1",
+						}},
+					}},
+				}},
+			}},
+		},
+	}, {
+		"# H1\n[la, lb] # I\n# F1\n",
+		"!!seq",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    2,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Children: []*yaml.Node{{
+				Kind:   yaml.SequenceNode,
+				Style:  yaml.FlowStyle,
+				Line:   2,
+				Column: 1,
+				Header: "# H1",
+				Inline: "# I",
+				Footer: "# F1",
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Line:   2,
+					Column: 2,
+					Value:  "la",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   2,
+					Column: 6,
+					Value:  "lb",
+				}},
+			}},
+		},
+	}, {
+		"# DH1\n\n# SH1\n[\n  # HA1\n  la, # IA\n  # FA1\n\n  # HB1\n  lb, # IB\n  # FB1\n]\n# SF1\n\n# DF1\n",
+		"!!seq",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    4,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Header:  "# DH1",
+			Footer:  "# DF1",
+			Children: []*yaml.Node{{
+				Kind:   yaml.SequenceNode,
+				Style:  yaml.FlowStyle,
+				Line:   4,
+				Column: 1,
+				Header: "# SH1",
+				Footer: "# SF1",
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Line:   6,
+					Column: 3,
+					Value:  "la",
+					Header: "# HA1",
+					Inline: "# IA",
+					Footer: "# FA1",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   10,
+					Column: 3,
+					Value:  "lb",
+					Header: "# HB1",
+					Inline: "# IB",
+					Footer: "# FB1",
+				}},
+			}},
+		},
+	}, {
+		"# DH1\n\n# SH1\n[\n  # HA1\n  la,\n  # FA1\n\n  # HB1\n  lb,\n  # FB1\n]\n# SF1\n\n# DF1\n",
+		"!!seq",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    4,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Header:  "# DH1",
+			Footer:  "# DF1",
+			Children: []*yaml.Node{{
+				Kind:   yaml.SequenceNode,
+				Style:  yaml.FlowStyle,
+				Line:   4,
+				Column: 1,
+				Header: "# SH1",
+				Footer: "# SF1",
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Line:   6,
+					Column: 3,
+					Value:  "la",
+					Header: "# HA1",
+					Footer: "# FA1",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   10,
+					Column: 3,
+					Value:  "lb",
+					Header: "# HB1",
+					Footer: "# FB1",
+				}},
+			}},
+		},
+	}, {
+		"# DH1\n\n# MH1\n{\n  # HA1\n  ka: va, # IA\n  # FA1\n\n  # HB1\n  kb: vb, # IB\n  # FB1\n}\n# MF1\n\n# DF1\n",
+		"!!map",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    4,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Header:  "# DH1",
+			Footer:  "# DF1",
+			Children: []*yaml.Node{{
+				Kind:   yaml.MappingNode,
+				Style:  yaml.FlowStyle,
+				Line:   4,
+				Column: 1,
+				Header: "# MH1",
+				Footer: "# MF1",
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Line:   6,
+					Column: 3,
+					Value:  "ka",
+					Header: "# HA1",
+					Footer: "# FA1",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   6,
+					Column: 7,
+					Value:  "va",
+					Inline: "# IA",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   10,
+					Column: 3,
+					Value:  "kb",
+					Header: "# HB1",
+					Footer: "# FB1",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   10,
+					Column: 7,
+					Value:  "vb",
+					Inline: "# IB",
+				}},
+			}},
+		},
+	}, {
+		"# DH1\n\n# MH1\n{\n  # HA1\n  ka: va,\n  # FA1\n\n  # HB1\n  kb: vb,\n  # FB1\n}\n# MF1\n\n# DF1\n",
+		"!!map",
+		yaml.Node{
+			Kind:    yaml.DocumentNode,
+			Line:    4,
+			Column:  1,
+			Anchors: map[string]*yaml.Node{},
+			Header:  "# DH1",
+			Footer:  "# DF1",
+			Children: []*yaml.Node{{
+				Kind:   yaml.MappingNode,
+				Style:  yaml.FlowStyle,
+				Line:   4,
+				Column: 1,
+				Header: "# MH1",
+				Footer: "# MF1",
+				Children: []*yaml.Node{{
+					Kind:   yaml.ScalarNode,
+					Line:   6,
+					Column: 3,
+					Value:  "ka",
+					Header: "# HA1",
+					Footer: "# FA1",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   6,
+					Column: 7,
+					Value:  "va",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   10,
+					Column: 3,
+					Value:  "kb",
+					Header: "# HB1",
+					Footer: "# FB1",
+				}, {
+					Kind:   yaml.ScalarNode,
+					Line:   10,
+					Column: 7,
+					Value:  "vb",
+				}},
+			}},
+		},
+	},
+}
+
+func (s *S) TestNodeRoundtrip(c *C) {
+	defer os.Setenv("TZ", os.Getenv("TZ"))
+	os.Setenv("TZ", "UTC")
+	for i, item := range nodeTests {
+		c.Logf("test %d: %q", i, item.yaml)
+		var node yaml.Node
+		err := yaml.Unmarshal([]byte(item.yaml), &node)
+		c.Assert(err, IsNil)
+		c.Assert(node, DeepEquals, item.node)
+		data, err := yaml.Marshal(&node)
+		c.Assert(err, IsNil)
+		c.Assert(string(data), Equals, item.yaml)
+		if len(node.Children) > 0 {
+			c.Assert(node.Children[0].ShortTag(), Equals, item.tag)
+		}
+	}
+}

+ 71 - 1
parserc.go

@@ -45,11 +45,42 @@ import (
 // Peek the next token in the token queue.
 func peek_token(parser *yaml_parser_t) *yaml_token_t {
 	if parser.token_available || yaml_parser_fetch_more_tokens(parser) {
-		return &parser.tokens[parser.tokens_head]
+		token := &parser.tokens[parser.tokens_head]
+		yaml_parser_unfold_comments(parser, token)
+		return token
 	}
 	return nil
 }
 
+// yaml_parser_unfold_comments walks through the comments queue and joins all
+// comments behind the position of the provided token into the respective
+// top-level comment slices in the parser.
+func yaml_parser_unfold_comments(parser *yaml_parser_t, token *yaml_token_t) {
+	for parser.comments_head < len(parser.comments) && token.start_mark.index >= parser.comments[parser.comments_head].after.index {
+		comment := &parser.comments[parser.comments_head]
+		if len(comment.header) > 0 {
+			if len(parser.header_comment) > 0 {
+				parser.header_comment = append(parser.header_comment, '\n')
+			}
+			parser.header_comment = append(parser.header_comment, comment.header...)
+		}
+		if len(comment.footer) > 0 {
+			if len(parser.footer_comment) > 0 {
+				parser.footer_comment = append(parser.footer_comment, '\n')
+			}
+			parser.footer_comment = append(parser.footer_comment, comment.footer...)
+		}
+		if len(comment.inline) > 0 {
+			if len(parser.inline_comment) > 0 {
+				parser.inline_comment = append(parser.inline_comment, '\n')
+			}
+			parser.inline_comment = append(parser.inline_comment, comment.inline...)
+		}
+		*comment = yaml_comment_t{}
+		parser.comments_head++
+	}
+}
+
 // Remove the next token from the queue (must be called after peek_token).
 func skip_token(parser *yaml_parser_t) {
 	parser.token_available = false
@@ -224,10 +255,32 @@ func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t
 		parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)
 		parser.state = yaml_PARSE_BLOCK_NODE_STATE
 
+		var header_comment []byte
+		if len(parser.header_comment) > 0 {
+			// [Go] Scan the header comment backwards, and if an empty line is found, break
+			//      the header so the part before the last empty line goes into the
+			//      document header, while the bottom of it goes into a follow up event.
+			for i := len(parser.header_comment)-1; i > 0; i-- {
+				if parser.header_comment[i] == '\n' {
+					if i == len(parser.header_comment)-1 {
+						header_comment = parser.header_comment[:i]
+						parser.header_comment = parser.header_comment[i+1:]
+						break
+					} else if parser.header_comment[i-1] == '\n' {
+						header_comment = parser.header_comment[:i-1]
+						parser.header_comment = parser.header_comment[i+1:]
+						break
+					}
+				}
+			}
+		}
+
 		*event = yaml_event_t{
 			typ:        yaml_DOCUMENT_START_EVENT,
 			start_mark: token.start_mark,
 			end_mark:   token.end_mark,
+
+			header_comment: header_comment,
 		}
 
 	} else if token.typ != yaml_STREAM_END_TOKEN {
@@ -326,10 +379,22 @@ func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t)
 		start_mark: start_mark,
 		end_mark:   end_mark,
 		implicit:   implicit,
+
+		footer_comment: parser.header_comment,
 	}
+	parser.header_comment = nil
 	return true
 }
 
+func yaml_parser_set_event_comments(parser *yaml_parser_t, event *yaml_event_t) {
+	event.header_comment = parser.header_comment
+	event.inline_comment = parser.inline_comment
+	event.footer_comment = parser.footer_comment
+	parser.header_comment = nil
+	parser.inline_comment = nil
+	parser.footer_comment = nil
+}
+
 // Parse the productions:
 // block_node_or_indentless_sequence    ::=
 //                          ALIAS
@@ -486,6 +551,7 @@ func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, i
 			quoted_implicit: quoted_implicit,
 			style:           yaml_style_t(token.style),
 		}
+		yaml_parser_set_event_comments(parser, event)
 		skip_token(parser)
 		return true
 	}
@@ -502,6 +568,7 @@ func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, i
 			implicit:   implicit,
 			style:      yaml_style_t(yaml_FLOW_SEQUENCE_STYLE),
 		}
+		yaml_parser_set_event_comments(parser, event)
 		return true
 	}
 	if token.typ == yaml_FLOW_MAPPING_START_TOKEN {
@@ -516,6 +583,7 @@ func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, i
 			implicit:   implicit,
 			style:      yaml_style_t(yaml_FLOW_MAPPING_STYLE),
 		}
+		yaml_parser_set_event_comments(parser, event)
 		return true
 	}
 	if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN {
@@ -820,6 +888,7 @@ func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_ev
 		start_mark: token.start_mark,
 		end_mark:   token.end_mark,
 	}
+	yaml_parser_set_event_comments(parser, event)
 
 	skip_token(parser)
 	return true
@@ -959,6 +1028,7 @@ func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event
 		start_mark: token.start_mark,
 		end_mark:   token.end_mark,
 	}
+	yaml_parser_set_event_comments(parser, event)
 	skip_token(parser)
 	return true
 }

+ 194 - 15
scannerc.go

@@ -629,8 +629,11 @@ func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool {
 		// Check if we really need to fetch more tokens.
 		need_more_tokens := false
 
-		if parser.tokens_head == len(parser.tokens) {
-			// Queue is empty.
+		// [Go] When parsing flow items, force the queue to have at least
+		// two items so that comments after commas may be associated
+		// with the value being parsed before them.
+		if parser.tokens_head == len(parser.tokens) || parser.flow_level > 0 && parser.tokens_head >= len(parser.tokens)-1 {
+			// Queue is empty or has just one element inside a flow context.
 			need_more_tokens = true
 		} else {
 			// Check if any potential simple key may occupy the head position.
@@ -662,7 +665,7 @@ func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool {
 }
 
 // The dispatcher for token fetchers.
-func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool {
+func yaml_parser_fetch_next_token(parser *yaml_parser_t) (ok bool) {
 	// Ensure that the buffer is initialized.
 	if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
 		return false
@@ -717,6 +720,25 @@ func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool {
 		return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN)
 	}
 
+	comment_mark := parser.mark
+	if parser.flow_level > 0 && buf[pos] == ',' && len(parser.tokens) > 0 {
+		// Associate any following comments with the prior token.
+		comment_mark = parser.tokens[len(parser.tokens)-1].start_mark
+	}
+	defer func() {
+		if !ok {
+			return
+		}
+		if !yaml_parser_scan_inline_comment(parser, comment_mark) {
+			ok = false
+			return
+		}
+		if !yaml_parser_scan_footer_comment(parser, comment_mark) {
+			ok = false
+			return
+		}
+	}()
+
 	// Is it the flow sequence start indicator?
 	if buf[pos] == '[' {
 		return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN)
@@ -810,7 +832,7 @@ func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool {
 	// if it is followed by a non-space character.
 	//
 	// The last rule is more restrictive than the specification requires.
-	// [Go] Make this logic more reasonable.
+	// [Go] TODO Make this logic more reasonable.
 	//switch parser.buffer[parser.buffer_pos] {
 	//case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`':
 	//}
@@ -1097,6 +1119,7 @@ func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_
 
 // Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
 func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool {
+
 	// The indicators '[' and '{' may start a simple key.
 	if !yaml_parser_save_simple_key(parser) {
 		return false
@@ -1455,11 +1478,8 @@ func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool {
 
 		// Eat a comment until a line break.
 		if parser.buffer[parser.buffer_pos] == '#' {
-			for !is_breakz(parser.buffer, parser.buffer_pos) {
-				skip(parser)
-				if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
-					return false
-				}
+			if !yaml_parser_scan_header_comment(parser, parser.mark) {
+				return false
 			}
 		}
 
@@ -1557,6 +1577,10 @@ func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool
 	}
 
 	if parser.buffer[parser.buffer_pos] == '#' {
+		// [Go] Discard this inline comment for the time being.
+		//if !yaml_parser_scan_inline_comment(parser, start_mark) {
+		//	return false
+		//}
 		for !is_breakz(parser.buffer, parser.buffer_pos) {
 			skip(parser)
 			if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
@@ -1972,7 +1996,7 @@ func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte
 	//      '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&',
 	//      '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']',
 	//      '%'.
-	// [Go] Convert this into more reasonable logic.
+	// [Go] TODO Convert this into more reasonable logic.
 	for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' ||
 		parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' ||
 		parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' ||
@@ -2127,11 +2151,9 @@ func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, l
 		}
 	}
 	if parser.buffer[parser.buffer_pos] == '#' {
-		for !is_breakz(parser.buffer, parser.buffer_pos) {
-			skip(parser)
-			if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
-				return false
-			}
+		// [Go] TODO This logic is untested and maybe broken.
+		if !yaml_parser_scan_inline_comment(parser, start_mark) {
+			return false
 		}
 	}
 
@@ -2694,3 +2716,160 @@ func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) b
 	}
 	return true
 }
+
+
+func yaml_parser_scan_inline_comment(parser *yaml_parser_t, after yaml_mark_t) bool {
+	if parser.mark.column == 0 {
+		return true
+	}
+
+	parser.comments = append(parser.comments, yaml_comment_t{after: after})
+	comment := &parser.comments[len(parser.comments)-1].inline
+
+	for peek := 0; peek < 512; peek++ {
+		if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
+			break
+		}
+		if is_blank(parser.buffer, parser.buffer_pos+peek) {
+			continue
+		}
+		if parser.buffer[parser.buffer_pos+peek] == '#' {
+			if len(*comment) > 0 {
+				*comment = append(*comment, '\n')
+			}
+			for !is_breakz(parser.buffer, parser.buffer_pos+peek) {
+				*comment = append(*comment, parser.buffer[parser.buffer_pos+peek])
+				peek++
+				if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
+					return false
+				}
+			}
+
+			// Skip until after the consumed comment line.
+			until := parser.buffer_pos + peek
+			for parser.buffer_pos < until {
+				if is_break(parser.buffer, parser.buffer_pos) {
+					//break // Leave the break in the buffer so calling this function twice is safe.
+					if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+						return false
+					}
+					skip_line(parser)
+				} else {
+					skip(parser)
+				}
+			}
+		}
+		break
+	}
+	return true
+}
+
+func yaml_parser_scan_header_comment(parser *yaml_parser_t, after yaml_mark_t) bool {
+	parser.comments = append(parser.comments, yaml_comment_t{after: after})
+	comment := &parser.comments[len(parser.comments)-1].header
+	breaks := false
+	for peek := 0; peek < 512; peek++ {
+		if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
+			break
+		}
+		if parser.buffer[parser.buffer_pos+peek] == 0 {
+			break
+		}
+		if is_blank(parser.buffer, parser.buffer_pos+peek) {
+			continue
+		}
+		if is_break(parser.buffer, parser.buffer_pos+peek) {
+			if !breaks {
+				*comment = append(*comment, '\n')
+			}
+			breaks = true
+		} else if parser.buffer[parser.buffer_pos+peek] == '#' {
+			if len(*comment) > 0 {
+				*comment = append(*comment, '\n')
+			}
+			breaks = false
+			for !is_breakz(parser.buffer, parser.buffer_pos+peek) {
+				*comment = append(*comment, parser.buffer[parser.buffer_pos+peek])
+				peek++
+				if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
+					return false
+				}
+			}
+
+			// Skip until after the consumed comment line.
+			until := parser.buffer_pos + peek
+			for parser.buffer_pos < until {
+				if is_break(parser.buffer, parser.buffer_pos) {
+					if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+						return false
+					}
+					skip_line(parser)
+				} else {
+					skip(parser)
+				}
+			}
+			peek = 0
+		} else {
+			break
+		}
+	}
+	return true
+}
+
+func yaml_parser_scan_footer_comment(parser *yaml_parser_t, after yaml_mark_t) bool {
+	parser.comments = append(parser.comments, yaml_comment_t{after: after})
+	comment := &parser.comments[len(parser.comments)-1].footer
+	original := *comment
+	breaks := false
+	peek := 0
+	for ; peek < 32768; peek++ {
+		if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
+			break
+		}
+		c := parser.buffer[parser.buffer_pos+peek]
+		if c == 0 {
+			break
+		}
+		if is_blank(parser.buffer, parser.buffer_pos+peek) {
+			continue
+		}
+		if is_break(parser.buffer, parser.buffer_pos+peek) {
+			if breaks {
+				break
+			}
+			breaks = true
+		} else if c == '#' {
+			if len(*comment) > 0 {
+				*comment = append(*comment, '\n')
+			}
+			for !is_breakz(parser.buffer, parser.buffer_pos+peek) {
+				*comment = append(*comment, parser.buffer[parser.buffer_pos+peek])
+				peek++
+				if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) {
+					return false
+				}
+			}
+			breaks = true
+		} else if c == ']' || c == '}' {
+			break
+		} else {
+			// Abort and allow that next line to have the comment as its header.
+			*comment = original
+			return true
+		}
+	}
+
+	// Skip until after the consumed comment lines.
+	until := parser.buffer_pos + peek
+	for parser.buffer_pos < until {
+		if is_break(parser.buffer, parser.buffer_pos) {
+			if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
+				return false
+			}
+			skip_line(parser)
+		} else {
+			skip(parser)
+		}
+	}
+	return true
+}

+ 30 - 0
yamlh.go

@@ -279,6 +279,11 @@ type yaml_event_t struct {
 	// The list of tag directives (for yaml_DOCUMENT_START_EVENT).
 	tag_directives []yaml_tag_directive_t
 
+	// The comments
+	header_comment []byte
+	inline_comment []byte
+	footer_comment []byte
+
 	// The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT).
 	anchor []byte
 
@@ -562,6 +567,15 @@ type yaml_parser_t struct {
 	offset int         // The offset of the current position (in bytes).
 	mark   yaml_mark_t // The mark of the current position.
 
+	// Comments
+
+	header_comment []byte // The current header comments
+	inline_comment []byte // The current inline comments
+	footer_comment []byte // The current footer comments
+
+	comments      []yaml_comment_t // The folded comments for all parsed tokens
+	comments_head int
+
 	// Scanner stuff
 
 	stream_start_produced bool // Have we started to scan the input stream?
@@ -594,6 +608,13 @@ type yaml_parser_t struct {
 	document *yaml_document_t // The currently parsed document.
 }
 
+type yaml_comment_t struct {
+	after  yaml_mark_t
+	header []byte
+	inline []byte
+	footer []byte
+}
+
 // Emitter Definitions
 
 // The prototype of a write handler.
@@ -624,8 +645,10 @@ const (
 	yaml_EMIT_DOCUMENT_CONTENT_STATE           // Expect the content of a document.
 	yaml_EMIT_DOCUMENT_END_STATE               // Expect DOCUMENT-END.
 	yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE   // Expect the first item of a flow sequence.
+	yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE   // Expect the next item of a flow sequence, with the comma already written out
 	yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE         // Expect an item of a flow sequence.
 	yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE     // Expect the first key of a flow mapping.
+	yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE     // Expect the next key of a flow mapping, with the comma already written out
 	yaml_EMIT_FLOW_MAPPING_KEY_STATE           // Expect a key of a flow mapping.
 	yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE  // Expect a value for a simple key of a flow mapping.
 	yaml_EMIT_FLOW_MAPPING_VALUE_STATE         // Expect a value of a flow mapping.
@@ -697,6 +720,8 @@ type yaml_emitter_t struct {
 	indention  bool // If the last character was an indentation character (' ', '-', '?', ':')?
 	open_ended bool // If an explicit document end is required?
 
+	space_above bool // If there's an empty line right above?
+
 	// Anchor analysis.
 	anchor_data struct {
 		anchor []byte // The anchor value.
@@ -720,6 +745,11 @@ type yaml_emitter_t struct {
 		style                 yaml_scalar_style_t // The output style.
 	}
 
+	// Comments
+	header_comment []byte
+	inline_comment []byte
+	footer_comment []byte
+
 	// Dumper stuff
 
 	opened bool // If the stream was already opened?