Path: blob/main/vendor/gopkg.in/yaml.v3/parserc.go
2872 views
//1// Copyright (c) 2011-2019 Canonical Ltd2// Copyright (c) 2006-2010 Kirill Simonov3//4// Permission is hereby granted, free of charge, to any person obtaining a copy of5// this software and associated documentation files (the "Software"), to deal in6// the Software without restriction, including without limitation the rights to7// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies8// of the Software, and to permit persons to whom the Software is furnished to do9// so, subject to the following conditions:10//11// The above copyright notice and this permission notice shall be included in all12// copies or substantial portions of the Software.13//14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE20// SOFTWARE.2122package yaml2324import (25"bytes"26)2728// The parser implements the following grammar:29//30// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END31// implicit_document ::= block_node DOCUMENT-END*32// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*33// block_node_or_indentless_sequence ::=34// ALIAS35// | properties (block_content | indentless_block_sequence)?36// | block_content37// | indentless_block_sequence38// block_node ::= ALIAS39// | properties block_content?40// | block_content41// flow_node ::= ALIAS42// | properties flow_content?43// | flow_content44// properties ::= TAG ANCHOR? | ANCHOR TAG?45// block_content ::= block_collection | flow_collection | SCALAR46// flow_content ::= flow_collection | SCALAR47// block_collection ::= block_sequence | block_mapping48// flow_collection ::= flow_sequence | flow_mapping49// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END50// indentless_sequence ::= (BLOCK-ENTRY block_node?)+51// block_mapping ::= BLOCK-MAPPING_START52// ((KEY block_node_or_indentless_sequence?)?53// (VALUE block_node_or_indentless_sequence?)?)*54// BLOCK-END55// flow_sequence ::= FLOW-SEQUENCE-START56// (flow_sequence_entry FLOW-ENTRY)*57// flow_sequence_entry?58// FLOW-SEQUENCE-END59// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?60// flow_mapping ::= FLOW-MAPPING-START61// (flow_mapping_entry FLOW-ENTRY)*62// flow_mapping_entry?63// FLOW-MAPPING-END64// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?6566// Peek the next token in the token queue.67func peek_token(parser *yaml_parser_t) *yaml_token_t {68if parser.token_available || yaml_parser_fetch_more_tokens(parser) {69token := &parser.tokens[parser.tokens_head]70yaml_parser_unfold_comments(parser, token)71return token72}73return nil74}7576// yaml_parser_unfold_comments walks through the comments queue and joins all77// comments behind the position of the provided token into the respective78// top-level comment slices in the parser.79func yaml_parser_unfold_comments(parser *yaml_parser_t, token *yaml_token_t) {80for parser.comments_head < len(parser.comments) && token.start_mark.index >= parser.comments[parser.comments_head].token_mark.index {81comment := &parser.comments[parser.comments_head]82if len(comment.head) > 0 {83if token.typ == yaml_BLOCK_END_TOKEN {84// No heads on ends, so keep comment.head for a follow up token.85break86}87if len(parser.head_comment) > 0 {88parser.head_comment = append(parser.head_comment, '\n')89}90parser.head_comment = append(parser.head_comment, comment.head...)91}92if len(comment.foot) > 0 {93if len(parser.foot_comment) > 0 {94parser.foot_comment = append(parser.foot_comment, '\n')95}96parser.foot_comment = append(parser.foot_comment, comment.foot...)97}98if len(comment.line) > 0 {99if len(parser.line_comment) > 0 {100parser.line_comment = append(parser.line_comment, '\n')101}102parser.line_comment = append(parser.line_comment, comment.line...)103}104*comment = yaml_comment_t{}105parser.comments_head++106}107}108109// Remove the next token from the queue (must be called after peek_token).110func skip_token(parser *yaml_parser_t) {111parser.token_available = false112parser.tokens_parsed++113parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN114parser.tokens_head++115}116117// Get the next event.118func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool {119// Erase the event object.120*event = yaml_event_t{}121122// No events after the end of the stream or error.123if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE {124return true125}126127// Generate the next event.128return yaml_parser_state_machine(parser, event)129}130131// Set parser error.132func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool {133parser.error = yaml_PARSER_ERROR134parser.problem = problem135parser.problem_mark = problem_mark136return false137}138139func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool {140parser.error = yaml_PARSER_ERROR141parser.context = context142parser.context_mark = context_mark143parser.problem = problem144parser.problem_mark = problem_mark145return false146}147148// State dispatcher.149func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool {150//trace("yaml_parser_state_machine", "state:", parser.state.String())151152switch parser.state {153case yaml_PARSE_STREAM_START_STATE:154return yaml_parser_parse_stream_start(parser, event)155156case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:157return yaml_parser_parse_document_start(parser, event, true)158159case yaml_PARSE_DOCUMENT_START_STATE:160return yaml_parser_parse_document_start(parser, event, false)161162case yaml_PARSE_DOCUMENT_CONTENT_STATE:163return yaml_parser_parse_document_content(parser, event)164165case yaml_PARSE_DOCUMENT_END_STATE:166return yaml_parser_parse_document_end(parser, event)167168case yaml_PARSE_BLOCK_NODE_STATE:169return yaml_parser_parse_node(parser, event, true, false)170171case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:172return yaml_parser_parse_node(parser, event, true, true)173174case yaml_PARSE_FLOW_NODE_STATE:175return yaml_parser_parse_node(parser, event, false, false)176177case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:178return yaml_parser_parse_block_sequence_entry(parser, event, true)179180case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:181return yaml_parser_parse_block_sequence_entry(parser, event, false)182183case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:184return yaml_parser_parse_indentless_sequence_entry(parser, event)185186case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:187return yaml_parser_parse_block_mapping_key(parser, event, true)188189case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:190return yaml_parser_parse_block_mapping_key(parser, event, false)191192case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:193return yaml_parser_parse_block_mapping_value(parser, event)194195case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:196return yaml_parser_parse_flow_sequence_entry(parser, event, true)197198case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:199return yaml_parser_parse_flow_sequence_entry(parser, event, false)200201case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:202return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event)203204case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:205return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event)206207case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:208return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event)209210case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:211return yaml_parser_parse_flow_mapping_key(parser, event, true)212213case yaml_PARSE_FLOW_MAPPING_KEY_STATE:214return yaml_parser_parse_flow_mapping_key(parser, event, false)215216case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:217return yaml_parser_parse_flow_mapping_value(parser, event, false)218219case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:220return yaml_parser_parse_flow_mapping_value(parser, event, true)221222default:223panic("invalid parser state")224}225}226227// Parse the production:228// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END229// ************230func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool {231token := peek_token(parser)232if token == nil {233return false234}235if token.typ != yaml_STREAM_START_TOKEN {236return yaml_parser_set_parser_error(parser, "did not find expected <stream-start>", token.start_mark)237}238parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE239*event = yaml_event_t{240typ: yaml_STREAM_START_EVENT,241start_mark: token.start_mark,242end_mark: token.end_mark,243encoding: token.encoding,244}245skip_token(parser)246return true247}248249// Parse the productions:250// implicit_document ::= block_node DOCUMENT-END*251// *252// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*253// *************************254func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool {255256token := peek_token(parser)257if token == nil {258return false259}260261// Parse extra document end indicators.262if !implicit {263for token.typ == yaml_DOCUMENT_END_TOKEN {264skip_token(parser)265token = peek_token(parser)266if token == nil {267return false268}269}270}271272if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN &&273token.typ != yaml_TAG_DIRECTIVE_TOKEN &&274token.typ != yaml_DOCUMENT_START_TOKEN &&275token.typ != yaml_STREAM_END_TOKEN {276// Parse an implicit document.277if !yaml_parser_process_directives(parser, nil, nil) {278return false279}280parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)281parser.state = yaml_PARSE_BLOCK_NODE_STATE282283var head_comment []byte284if len(parser.head_comment) > 0 {285// [Go] Scan the header comment backwards, and if an empty line is found, break286// the header so the part before the last empty line goes into the287// document header, while the bottom of it goes into a follow up event.288for i := len(parser.head_comment) - 1; i > 0; i-- {289if parser.head_comment[i] == '\n' {290if i == len(parser.head_comment)-1 {291head_comment = parser.head_comment[:i]292parser.head_comment = parser.head_comment[i+1:]293break294} else if parser.head_comment[i-1] == '\n' {295head_comment = parser.head_comment[:i-1]296parser.head_comment = parser.head_comment[i+1:]297break298}299}300}301}302303*event = yaml_event_t{304typ: yaml_DOCUMENT_START_EVENT,305start_mark: token.start_mark,306end_mark: token.end_mark,307308head_comment: head_comment,309}310311} else if token.typ != yaml_STREAM_END_TOKEN {312// Parse an explicit document.313var version_directive *yaml_version_directive_t314var tag_directives []yaml_tag_directive_t315start_mark := token.start_mark316if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) {317return false318}319token = peek_token(parser)320if token == nil {321return false322}323if token.typ != yaml_DOCUMENT_START_TOKEN {324yaml_parser_set_parser_error(parser,325"did not find expected <document start>", token.start_mark)326return false327}328parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)329parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE330end_mark := token.end_mark331332*event = yaml_event_t{333typ: yaml_DOCUMENT_START_EVENT,334start_mark: start_mark,335end_mark: end_mark,336version_directive: version_directive,337tag_directives: tag_directives,338implicit: false,339}340skip_token(parser)341342} else {343// Parse the stream end.344parser.state = yaml_PARSE_END_STATE345*event = yaml_event_t{346typ: yaml_STREAM_END_EVENT,347start_mark: token.start_mark,348end_mark: token.end_mark,349}350skip_token(parser)351}352353return true354}355356// Parse the productions:357// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*358// ***********359//360func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool {361token := peek_token(parser)362if token == nil {363return false364}365366if token.typ == yaml_VERSION_DIRECTIVE_TOKEN ||367token.typ == yaml_TAG_DIRECTIVE_TOKEN ||368token.typ == yaml_DOCUMENT_START_TOKEN ||369token.typ == yaml_DOCUMENT_END_TOKEN ||370token.typ == yaml_STREAM_END_TOKEN {371parser.state = parser.states[len(parser.states)-1]372parser.states = parser.states[:len(parser.states)-1]373return yaml_parser_process_empty_scalar(parser, event,374token.start_mark)375}376return yaml_parser_parse_node(parser, event, true, false)377}378379// Parse the productions:380// implicit_document ::= block_node DOCUMENT-END*381// *************382// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*383//384func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool {385token := peek_token(parser)386if token == nil {387return false388}389390start_mark := token.start_mark391end_mark := token.start_mark392393implicit := true394if token.typ == yaml_DOCUMENT_END_TOKEN {395end_mark = token.end_mark396skip_token(parser)397implicit = false398}399400parser.tag_directives = parser.tag_directives[:0]401402parser.state = yaml_PARSE_DOCUMENT_START_STATE403*event = yaml_event_t{404typ: yaml_DOCUMENT_END_EVENT,405start_mark: start_mark,406end_mark: end_mark,407implicit: implicit,408}409yaml_parser_set_event_comments(parser, event)410if len(event.head_comment) > 0 && len(event.foot_comment) == 0 {411event.foot_comment = event.head_comment412event.head_comment = nil413}414return true415}416417func yaml_parser_set_event_comments(parser *yaml_parser_t, event *yaml_event_t) {418event.head_comment = parser.head_comment419event.line_comment = parser.line_comment420event.foot_comment = parser.foot_comment421parser.head_comment = nil422parser.line_comment = nil423parser.foot_comment = nil424parser.tail_comment = nil425parser.stem_comment = nil426}427428// Parse the productions:429// block_node_or_indentless_sequence ::=430// ALIAS431// *****432// | properties (block_content | indentless_block_sequence)?433// ********** *434// | block_content | indentless_block_sequence435// *436// block_node ::= ALIAS437// *****438// | properties block_content?439// ********** *440// | block_content441// *442// flow_node ::= ALIAS443// *****444// | properties flow_content?445// ********** *446// | flow_content447// *448// properties ::= TAG ANCHOR? | ANCHOR TAG?449// *************************450// block_content ::= block_collection | flow_collection | SCALAR451// ******452// flow_content ::= flow_collection | SCALAR453// ******454func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool {455//defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)()456457token := peek_token(parser)458if token == nil {459return false460}461462if token.typ == yaml_ALIAS_TOKEN {463parser.state = parser.states[len(parser.states)-1]464parser.states = parser.states[:len(parser.states)-1]465*event = yaml_event_t{466typ: yaml_ALIAS_EVENT,467start_mark: token.start_mark,468end_mark: token.end_mark,469anchor: token.value,470}471yaml_parser_set_event_comments(parser, event)472skip_token(parser)473return true474}475476start_mark := token.start_mark477end_mark := token.start_mark478479var tag_token bool480var tag_handle, tag_suffix, anchor []byte481var tag_mark yaml_mark_t482if token.typ == yaml_ANCHOR_TOKEN {483anchor = token.value484start_mark = token.start_mark485end_mark = token.end_mark486skip_token(parser)487token = peek_token(parser)488if token == nil {489return false490}491if token.typ == yaml_TAG_TOKEN {492tag_token = true493tag_handle = token.value494tag_suffix = token.suffix495tag_mark = token.start_mark496end_mark = token.end_mark497skip_token(parser)498token = peek_token(parser)499if token == nil {500return false501}502}503} else if token.typ == yaml_TAG_TOKEN {504tag_token = true505tag_handle = token.value506tag_suffix = token.suffix507start_mark = token.start_mark508tag_mark = token.start_mark509end_mark = token.end_mark510skip_token(parser)511token = peek_token(parser)512if token == nil {513return false514}515if token.typ == yaml_ANCHOR_TOKEN {516anchor = token.value517end_mark = token.end_mark518skip_token(parser)519token = peek_token(parser)520if token == nil {521return false522}523}524}525526var tag []byte527if tag_token {528if len(tag_handle) == 0 {529tag = tag_suffix530tag_suffix = nil531} else {532for i := range parser.tag_directives {533if bytes.Equal(parser.tag_directives[i].handle, tag_handle) {534tag = append([]byte(nil), parser.tag_directives[i].prefix...)535tag = append(tag, tag_suffix...)536break537}538}539if len(tag) == 0 {540yaml_parser_set_parser_error_context(parser,541"while parsing a node", start_mark,542"found undefined tag handle", tag_mark)543return false544}545}546}547548implicit := len(tag) == 0549if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN {550end_mark = token.end_mark551parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE552*event = yaml_event_t{553typ: yaml_SEQUENCE_START_EVENT,554start_mark: start_mark,555end_mark: end_mark,556anchor: anchor,557tag: tag,558implicit: implicit,559style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),560}561return true562}563if token.typ == yaml_SCALAR_TOKEN {564var plain_implicit, quoted_implicit bool565end_mark = token.end_mark566if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') {567plain_implicit = true568} else if len(tag) == 0 {569quoted_implicit = true570}571parser.state = parser.states[len(parser.states)-1]572parser.states = parser.states[:len(parser.states)-1]573574*event = yaml_event_t{575typ: yaml_SCALAR_EVENT,576start_mark: start_mark,577end_mark: end_mark,578anchor: anchor,579tag: tag,580value: token.value,581implicit: plain_implicit,582quoted_implicit: quoted_implicit,583style: yaml_style_t(token.style),584}585yaml_parser_set_event_comments(parser, event)586skip_token(parser)587return true588}589if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN {590// [Go] Some of the events below can be merged as they differ only on style.591end_mark = token.end_mark592parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE593*event = yaml_event_t{594typ: yaml_SEQUENCE_START_EVENT,595start_mark: start_mark,596end_mark: end_mark,597anchor: anchor,598tag: tag,599implicit: implicit,600style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE),601}602yaml_parser_set_event_comments(parser, event)603return true604}605if token.typ == yaml_FLOW_MAPPING_START_TOKEN {606end_mark = token.end_mark607parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE608*event = yaml_event_t{609typ: yaml_MAPPING_START_EVENT,610start_mark: start_mark,611end_mark: end_mark,612anchor: anchor,613tag: tag,614implicit: implicit,615style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),616}617yaml_parser_set_event_comments(parser, event)618return true619}620if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN {621end_mark = token.end_mark622parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE623*event = yaml_event_t{624typ: yaml_SEQUENCE_START_EVENT,625start_mark: start_mark,626end_mark: end_mark,627anchor: anchor,628tag: tag,629implicit: implicit,630style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),631}632if parser.stem_comment != nil {633event.head_comment = parser.stem_comment634parser.stem_comment = nil635}636return true637}638if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN {639end_mark = token.end_mark640parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE641*event = yaml_event_t{642typ: yaml_MAPPING_START_EVENT,643start_mark: start_mark,644end_mark: end_mark,645anchor: anchor,646tag: tag,647implicit: implicit,648style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE),649}650if parser.stem_comment != nil {651event.head_comment = parser.stem_comment652parser.stem_comment = nil653}654return true655}656if len(anchor) > 0 || len(tag) > 0 {657parser.state = parser.states[len(parser.states)-1]658parser.states = parser.states[:len(parser.states)-1]659660*event = yaml_event_t{661typ: yaml_SCALAR_EVENT,662start_mark: start_mark,663end_mark: end_mark,664anchor: anchor,665tag: tag,666implicit: implicit,667quoted_implicit: false,668style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),669}670return true671}672673context := "while parsing a flow node"674if block {675context = "while parsing a block node"676}677yaml_parser_set_parser_error_context(parser, context, start_mark,678"did not find expected node content", token.start_mark)679return false680}681682// Parse the productions:683// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END684// ******************** *********** * *********685//686func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {687if first {688token := peek_token(parser)689if token == nil {690return false691}692parser.marks = append(parser.marks, token.start_mark)693skip_token(parser)694}695696token := peek_token(parser)697if token == nil {698return false699}700701if token.typ == yaml_BLOCK_ENTRY_TOKEN {702mark := token.end_mark703prior_head_len := len(parser.head_comment)704skip_token(parser)705yaml_parser_split_stem_comment(parser, prior_head_len)706token = peek_token(parser)707if token == nil {708return false709}710if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN {711parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE)712return yaml_parser_parse_node(parser, event, true, false)713} else {714parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE715return yaml_parser_process_empty_scalar(parser, event, mark)716}717}718if token.typ == yaml_BLOCK_END_TOKEN {719parser.state = parser.states[len(parser.states)-1]720parser.states = parser.states[:len(parser.states)-1]721parser.marks = parser.marks[:len(parser.marks)-1]722723*event = yaml_event_t{724typ: yaml_SEQUENCE_END_EVENT,725start_mark: token.start_mark,726end_mark: token.end_mark,727}728729skip_token(parser)730return true731}732733context_mark := parser.marks[len(parser.marks)-1]734parser.marks = parser.marks[:len(parser.marks)-1]735return yaml_parser_set_parser_error_context(parser,736"while parsing a block collection", context_mark,737"did not find expected '-' indicator", token.start_mark)738}739740// Parse the productions:741// indentless_sequence ::= (BLOCK-ENTRY block_node?)+742// *********** *743func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool {744token := peek_token(parser)745if token == nil {746return false747}748749if token.typ == yaml_BLOCK_ENTRY_TOKEN {750mark := token.end_mark751prior_head_len := len(parser.head_comment)752skip_token(parser)753yaml_parser_split_stem_comment(parser, prior_head_len)754token = peek_token(parser)755if token == nil {756return false757}758if token.typ != yaml_BLOCK_ENTRY_TOKEN &&759token.typ != yaml_KEY_TOKEN &&760token.typ != yaml_VALUE_TOKEN &&761token.typ != yaml_BLOCK_END_TOKEN {762parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE)763return yaml_parser_parse_node(parser, event, true, false)764}765parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE766return yaml_parser_process_empty_scalar(parser, event, mark)767}768parser.state = parser.states[len(parser.states)-1]769parser.states = parser.states[:len(parser.states)-1]770771*event = yaml_event_t{772typ: yaml_SEQUENCE_END_EVENT,773start_mark: token.start_mark,774end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark?775}776return true777}778779// Split stem comment from head comment.780//781// When a sequence or map is found under a sequence entry, the former head comment782// is assigned to the underlying sequence or map as a whole, not the individual783// sequence or map entry as would be expected otherwise. To handle this case the784// previous head comment is moved aside as the stem comment.785func yaml_parser_split_stem_comment(parser *yaml_parser_t, stem_len int) {786if stem_len == 0 {787return788}789790token := peek_token(parser)791if token == nil || token.typ != yaml_BLOCK_SEQUENCE_START_TOKEN && token.typ != yaml_BLOCK_MAPPING_START_TOKEN {792return793}794795parser.stem_comment = parser.head_comment[:stem_len]796if len(parser.head_comment) == stem_len {797parser.head_comment = nil798} else {799// Copy suffix to prevent very strange bugs if someone ever appends800// further bytes to the prefix in the stem_comment slice above.801parser.head_comment = append([]byte(nil), parser.head_comment[stem_len+1:]...)802}803}804805// Parse the productions:806// block_mapping ::= BLOCK-MAPPING_START807// *******************808// ((KEY block_node_or_indentless_sequence?)?809// *** *810// (VALUE block_node_or_indentless_sequence?)?)*811//812// BLOCK-END813// *********814//815func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {816if first {817token := peek_token(parser)818if token == nil {819return false820}821parser.marks = append(parser.marks, token.start_mark)822skip_token(parser)823}824825token := peek_token(parser)826if token == nil {827return false828}829830// [Go] A tail comment was left from the prior mapping value processed. Emit an event831// as it needs to be processed with that value and not the following key.832if len(parser.tail_comment) > 0 {833*event = yaml_event_t{834typ: yaml_TAIL_COMMENT_EVENT,835start_mark: token.start_mark,836end_mark: token.end_mark,837foot_comment: parser.tail_comment,838}839parser.tail_comment = nil840return true841}842843if token.typ == yaml_KEY_TOKEN {844mark := token.end_mark845skip_token(parser)846token = peek_token(parser)847if token == nil {848return false849}850if token.typ != yaml_KEY_TOKEN &&851token.typ != yaml_VALUE_TOKEN &&852token.typ != yaml_BLOCK_END_TOKEN {853parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE)854return yaml_parser_parse_node(parser, event, true, true)855} else {856parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE857return yaml_parser_process_empty_scalar(parser, event, mark)858}859} else if token.typ == yaml_BLOCK_END_TOKEN {860parser.state = parser.states[len(parser.states)-1]861parser.states = parser.states[:len(parser.states)-1]862parser.marks = parser.marks[:len(parser.marks)-1]863*event = yaml_event_t{864typ: yaml_MAPPING_END_EVENT,865start_mark: token.start_mark,866end_mark: token.end_mark,867}868yaml_parser_set_event_comments(parser, event)869skip_token(parser)870return true871}872873context_mark := parser.marks[len(parser.marks)-1]874parser.marks = parser.marks[:len(parser.marks)-1]875return yaml_parser_set_parser_error_context(parser,876"while parsing a block mapping", context_mark,877"did not find expected key", token.start_mark)878}879880// Parse the productions:881// block_mapping ::= BLOCK-MAPPING_START882//883// ((KEY block_node_or_indentless_sequence?)?884//885// (VALUE block_node_or_indentless_sequence?)?)*886// ***** *887// BLOCK-END888//889//890func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {891token := peek_token(parser)892if token == nil {893return false894}895if token.typ == yaml_VALUE_TOKEN {896mark := token.end_mark897skip_token(parser)898token = peek_token(parser)899if token == nil {900return false901}902if token.typ != yaml_KEY_TOKEN &&903token.typ != yaml_VALUE_TOKEN &&904token.typ != yaml_BLOCK_END_TOKEN {905parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE)906return yaml_parser_parse_node(parser, event, true, true)907}908parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE909return yaml_parser_process_empty_scalar(parser, event, mark)910}911parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE912return yaml_parser_process_empty_scalar(parser, event, token.start_mark)913}914915// Parse the productions:916// flow_sequence ::= FLOW-SEQUENCE-START917// *******************918// (flow_sequence_entry FLOW-ENTRY)*919// * **********920// flow_sequence_entry?921// *922// FLOW-SEQUENCE-END923// *****************924// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?925// *926//927func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {928if first {929token := peek_token(parser)930if token == nil {931return false932}933parser.marks = append(parser.marks, token.start_mark)934skip_token(parser)935}936token := peek_token(parser)937if token == nil {938return false939}940if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {941if !first {942if token.typ == yaml_FLOW_ENTRY_TOKEN {943skip_token(parser)944token = peek_token(parser)945if token == nil {946return false947}948} else {949context_mark := parser.marks[len(parser.marks)-1]950parser.marks = parser.marks[:len(parser.marks)-1]951return yaml_parser_set_parser_error_context(parser,952"while parsing a flow sequence", context_mark,953"did not find expected ',' or ']'", token.start_mark)954}955}956957if token.typ == yaml_KEY_TOKEN {958parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE959*event = yaml_event_t{960typ: yaml_MAPPING_START_EVENT,961start_mark: token.start_mark,962end_mark: token.end_mark,963implicit: true,964style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),965}966skip_token(parser)967return true968} else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {969parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE)970return yaml_parser_parse_node(parser, event, false, false)971}972}973974parser.state = parser.states[len(parser.states)-1]975parser.states = parser.states[:len(parser.states)-1]976parser.marks = parser.marks[:len(parser.marks)-1]977978*event = yaml_event_t{979typ: yaml_SEQUENCE_END_EVENT,980start_mark: token.start_mark,981end_mark: token.end_mark,982}983yaml_parser_set_event_comments(parser, event)984985skip_token(parser)986return true987}988989//990// Parse the productions:991// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?992// *** *993//994func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool {995token := peek_token(parser)996if token == nil {997return false998}999if token.typ != yaml_VALUE_TOKEN &&1000token.typ != yaml_FLOW_ENTRY_TOKEN &&1001token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {1002parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE)1003return yaml_parser_parse_node(parser, event, false, false)1004}1005mark := token.end_mark1006skip_token(parser)1007parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE1008return yaml_parser_process_empty_scalar(parser, event, mark)1009}10101011// Parse the productions:1012// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?1013// ***** *1014//1015func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {1016token := peek_token(parser)1017if token == nil {1018return false1019}1020if token.typ == yaml_VALUE_TOKEN {1021skip_token(parser)1022token := peek_token(parser)1023if token == nil {1024return false1025}1026if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {1027parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE)1028return yaml_parser_parse_node(parser, event, false, false)1029}1030}1031parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE1032return yaml_parser_process_empty_scalar(parser, event, token.start_mark)1033}10341035// Parse the productions:1036// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?1037// *1038//1039func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool {1040token := peek_token(parser)1041if token == nil {1042return false1043}1044parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE1045*event = yaml_event_t{1046typ: yaml_MAPPING_END_EVENT,1047start_mark: token.start_mark,1048end_mark: token.start_mark, // [Go] Shouldn't this be end_mark?1049}1050return true1051}10521053// Parse the productions:1054// flow_mapping ::= FLOW-MAPPING-START1055// ******************1056// (flow_mapping_entry FLOW-ENTRY)*1057// * **********1058// flow_mapping_entry?1059// ******************1060// FLOW-MAPPING-END1061// ****************1062// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?1063// * *** *1064//1065func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {1066if first {1067token := peek_token(parser)1068parser.marks = append(parser.marks, token.start_mark)1069skip_token(parser)1070}10711072token := peek_token(parser)1073if token == nil {1074return false1075}10761077if token.typ != yaml_FLOW_MAPPING_END_TOKEN {1078if !first {1079if token.typ == yaml_FLOW_ENTRY_TOKEN {1080skip_token(parser)1081token = peek_token(parser)1082if token == nil {1083return false1084}1085} else {1086context_mark := parser.marks[len(parser.marks)-1]1087parser.marks = parser.marks[:len(parser.marks)-1]1088return yaml_parser_set_parser_error_context(parser,1089"while parsing a flow mapping", context_mark,1090"did not find expected ',' or '}'", token.start_mark)1091}1092}10931094if token.typ == yaml_KEY_TOKEN {1095skip_token(parser)1096token = peek_token(parser)1097if token == nil {1098return false1099}1100if token.typ != yaml_VALUE_TOKEN &&1101token.typ != yaml_FLOW_ENTRY_TOKEN &&1102token.typ != yaml_FLOW_MAPPING_END_TOKEN {1103parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE)1104return yaml_parser_parse_node(parser, event, false, false)1105} else {1106parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE1107return yaml_parser_process_empty_scalar(parser, event, token.start_mark)1108}1109} else if token.typ != yaml_FLOW_MAPPING_END_TOKEN {1110parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE)1111return yaml_parser_parse_node(parser, event, false, false)1112}1113}11141115parser.state = parser.states[len(parser.states)-1]1116parser.states = parser.states[:len(parser.states)-1]1117parser.marks = parser.marks[:len(parser.marks)-1]1118*event = yaml_event_t{1119typ: yaml_MAPPING_END_EVENT,1120start_mark: token.start_mark,1121end_mark: token.end_mark,1122}1123yaml_parser_set_event_comments(parser, event)1124skip_token(parser)1125return true1126}11271128// Parse the productions:1129// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?1130// * ***** *1131//1132func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool {1133token := peek_token(parser)1134if token == nil {1135return false1136}1137if empty {1138parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE1139return yaml_parser_process_empty_scalar(parser, event, token.start_mark)1140}1141if token.typ == yaml_VALUE_TOKEN {1142skip_token(parser)1143token = peek_token(parser)1144if token == nil {1145return false1146}1147if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN {1148parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE)1149return yaml_parser_parse_node(parser, event, false, false)1150}1151}1152parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE1153return yaml_parser_process_empty_scalar(parser, event, token.start_mark)1154}11551156// Generate an empty scalar event.1157func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool {1158*event = yaml_event_t{1159typ: yaml_SCALAR_EVENT,1160start_mark: mark,1161end_mark: mark,1162value: nil, // Empty1163implicit: true,1164style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),1165}1166return true1167}11681169var default_tag_directives = []yaml_tag_directive_t{1170{[]byte("!"), []byte("!")},1171{[]byte("!!"), []byte("tag:yaml.org,2002:")},1172}11731174// Parse directives.1175func yaml_parser_process_directives(parser *yaml_parser_t,1176version_directive_ref **yaml_version_directive_t,1177tag_directives_ref *[]yaml_tag_directive_t) bool {11781179var version_directive *yaml_version_directive_t1180var tag_directives []yaml_tag_directive_t11811182token := peek_token(parser)1183if token == nil {1184return false1185}11861187for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN {1188if token.typ == yaml_VERSION_DIRECTIVE_TOKEN {1189if version_directive != nil {1190yaml_parser_set_parser_error(parser,1191"found duplicate %YAML directive", token.start_mark)1192return false1193}1194if token.major != 1 || token.minor != 1 {1195yaml_parser_set_parser_error(parser,1196"found incompatible YAML document", token.start_mark)1197return false1198}1199version_directive = &yaml_version_directive_t{1200major: token.major,1201minor: token.minor,1202}1203} else if token.typ == yaml_TAG_DIRECTIVE_TOKEN {1204value := yaml_tag_directive_t{1205handle: token.value,1206prefix: token.prefix,1207}1208if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) {1209return false1210}1211tag_directives = append(tag_directives, value)1212}12131214skip_token(parser)1215token = peek_token(parser)1216if token == nil {1217return false1218}1219}12201221for i := range default_tag_directives {1222if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) {1223return false1224}1225}12261227if version_directive_ref != nil {1228*version_directive_ref = version_directive1229}1230if tag_directives_ref != nil {1231*tag_directives_ref = tag_directives1232}1233return true1234}12351236// Append a tag directive to the directives stack.1237func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool {1238for i := range parser.tag_directives {1239if bytes.Equal(value.handle, parser.tag_directives[i].handle) {1240if allow_duplicates {1241return true1242}1243return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark)1244}1245}12461247// [Go] I suspect the copy is unnecessary. This was likely done1248// because there was no way to track ownership of the data.1249value_copy := yaml_tag_directive_t{1250handle: make([]byte, len(value.handle)),1251prefix: make([]byte, len(value.prefix)),1252}1253copy(value_copy.handle, value.handle)1254copy(value_copy.prefix, value.prefix)1255parser.tag_directives = append(parser.tag_directives, value_copy)1256return true1257}125812591260