Path: blob/main/vendor/go.yaml.in/yaml/v3/parserc.go
2872 views
//1// Copyright (c) 2011-2019 Canonical Ltd2// Copyright (c) 2006-2010 Kirill Simonov3//4// Permission is hereby granted, free of charge, to any person obtaining a copy of5// this software and associated documentation files (the "Software"), to deal in6// the Software without restriction, including without limitation the rights to7// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies8// of the Software, and to permit persons to whom the Software is furnished to do9// so, subject to the following conditions:10//11// The above copyright notice and this permission notice shall be included in all12// copies or substantial portions of the Software.13//14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE20// SOFTWARE.2122package yaml2324import (25"bytes"26)2728// The parser implements the following grammar:29//30// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END31// implicit_document ::= block_node DOCUMENT-END*32// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*33// block_node_or_indentless_sequence ::=34// ALIAS35// | properties (block_content | indentless_block_sequence)?36// | block_content37// | indentless_block_sequence38// block_node ::= ALIAS39// | properties block_content?40// | block_content41// flow_node ::= ALIAS42// | properties flow_content?43// | flow_content44// properties ::= TAG ANCHOR? | ANCHOR TAG?45// block_content ::= block_collection | flow_collection | SCALAR46// flow_content ::= flow_collection | SCALAR47// block_collection ::= block_sequence | block_mapping48// flow_collection ::= flow_sequence | flow_mapping49// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END50// indentless_sequence ::= (BLOCK-ENTRY block_node?)+51// block_mapping ::= BLOCK-MAPPING_START52// ((KEY block_node_or_indentless_sequence?)?53// (VALUE block_node_or_indentless_sequence?)?)*54// BLOCK-END55// flow_sequence ::= FLOW-SEQUENCE-START56// (flow_sequence_entry FLOW-ENTRY)*57// flow_sequence_entry?58// FLOW-SEQUENCE-END59// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?60// flow_mapping ::= FLOW-MAPPING-START61// (flow_mapping_entry FLOW-ENTRY)*62// flow_mapping_entry?63// FLOW-MAPPING-END64// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?6566// Peek the next token in the token queue.67func peek_token(parser *yaml_parser_t) *yaml_token_t {68if parser.token_available || yaml_parser_fetch_more_tokens(parser) {69token := &parser.tokens[parser.tokens_head]70yaml_parser_unfold_comments(parser, token)71return token72}73return nil74}7576// yaml_parser_unfold_comments walks through the comments queue and joins all77// comments behind the position of the provided token into the respective78// top-level comment slices in the parser.79func yaml_parser_unfold_comments(parser *yaml_parser_t, token *yaml_token_t) {80for parser.comments_head < len(parser.comments) && token.start_mark.index >= parser.comments[parser.comments_head].token_mark.index {81comment := &parser.comments[parser.comments_head]82if len(comment.head) > 0 {83if token.typ == yaml_BLOCK_END_TOKEN {84// No heads on ends, so keep comment.head for a follow up token.85break86}87if len(parser.head_comment) > 0 {88parser.head_comment = append(parser.head_comment, '\n')89}90parser.head_comment = append(parser.head_comment, comment.head...)91}92if len(comment.foot) > 0 {93if len(parser.foot_comment) > 0 {94parser.foot_comment = append(parser.foot_comment, '\n')95}96parser.foot_comment = append(parser.foot_comment, comment.foot...)97}98if len(comment.line) > 0 {99if len(parser.line_comment) > 0 {100parser.line_comment = append(parser.line_comment, '\n')101}102parser.line_comment = append(parser.line_comment, comment.line...)103}104*comment = yaml_comment_t{}105parser.comments_head++106}107}108109// Remove the next token from the queue (must be called after peek_token).110func skip_token(parser *yaml_parser_t) {111parser.token_available = false112parser.tokens_parsed++113parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN114parser.tokens_head++115}116117// Get the next event.118func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool {119// Erase the event object.120*event = yaml_event_t{}121122// No events after the end of the stream or error.123if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE {124return true125}126127// Generate the next event.128return yaml_parser_state_machine(parser, event)129}130131// Set parser error.132func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool {133parser.error = yaml_PARSER_ERROR134parser.problem = problem135parser.problem_mark = problem_mark136return false137}138139func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool {140parser.error = yaml_PARSER_ERROR141parser.context = context142parser.context_mark = context_mark143parser.problem = problem144parser.problem_mark = problem_mark145return false146}147148// State dispatcher.149func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool {150//trace("yaml_parser_state_machine", "state:", parser.state.String())151152switch parser.state {153case yaml_PARSE_STREAM_START_STATE:154return yaml_parser_parse_stream_start(parser, event)155156case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:157return yaml_parser_parse_document_start(parser, event, true)158159case yaml_PARSE_DOCUMENT_START_STATE:160return yaml_parser_parse_document_start(parser, event, false)161162case yaml_PARSE_DOCUMENT_CONTENT_STATE:163return yaml_parser_parse_document_content(parser, event)164165case yaml_PARSE_DOCUMENT_END_STATE:166return yaml_parser_parse_document_end(parser, event)167168case yaml_PARSE_BLOCK_NODE_STATE:169return yaml_parser_parse_node(parser, event, true, false)170171case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:172return yaml_parser_parse_node(parser, event, true, true)173174case yaml_PARSE_FLOW_NODE_STATE:175return yaml_parser_parse_node(parser, event, false, false)176177case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:178return yaml_parser_parse_block_sequence_entry(parser, event, true)179180case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:181return yaml_parser_parse_block_sequence_entry(parser, event, false)182183case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:184return yaml_parser_parse_indentless_sequence_entry(parser, event)185186case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:187return yaml_parser_parse_block_mapping_key(parser, event, true)188189case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:190return yaml_parser_parse_block_mapping_key(parser, event, false)191192case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:193return yaml_parser_parse_block_mapping_value(parser, event)194195case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:196return yaml_parser_parse_flow_sequence_entry(parser, event, true)197198case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:199return yaml_parser_parse_flow_sequence_entry(parser, event, false)200201case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:202return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event)203204case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:205return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event)206207case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:208return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event)209210case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:211return yaml_parser_parse_flow_mapping_key(parser, event, true)212213case yaml_PARSE_FLOW_MAPPING_KEY_STATE:214return yaml_parser_parse_flow_mapping_key(parser, event, false)215216case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:217return yaml_parser_parse_flow_mapping_value(parser, event, false)218219case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:220return yaml_parser_parse_flow_mapping_value(parser, event, true)221222default:223panic("invalid parser state")224}225}226227// Parse the production:228// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END229//230// ************231func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool {232token := peek_token(parser)233if token == nil {234return false235}236if token.typ != yaml_STREAM_START_TOKEN {237return yaml_parser_set_parser_error(parser, "did not find expected <stream-start>", token.start_mark)238}239parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE240*event = yaml_event_t{241typ: yaml_STREAM_START_EVENT,242start_mark: token.start_mark,243end_mark: token.end_mark,244encoding: token.encoding,245}246skip_token(parser)247return true248}249250// Parse the productions:251// implicit_document ::= block_node DOCUMENT-END*252//253// *254//255// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*256//257// *************************258func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool {259260token := peek_token(parser)261if token == nil {262return false263}264265// Parse extra document end indicators.266if !implicit {267for token.typ == yaml_DOCUMENT_END_TOKEN {268skip_token(parser)269token = peek_token(parser)270if token == nil {271return false272}273}274}275276if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN &&277token.typ != yaml_TAG_DIRECTIVE_TOKEN &&278token.typ != yaml_DOCUMENT_START_TOKEN &&279token.typ != yaml_STREAM_END_TOKEN {280// Parse an implicit document.281if !yaml_parser_process_directives(parser, nil, nil) {282return false283}284parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)285parser.state = yaml_PARSE_BLOCK_NODE_STATE286287var head_comment []byte288if len(parser.head_comment) > 0 {289// [Go] Scan the header comment backwards, and if an empty line is found, break290// the header so the part before the last empty line goes into the291// document header, while the bottom of it goes into a follow up event.292for i := len(parser.head_comment) - 1; i > 0; i-- {293if parser.head_comment[i] == '\n' {294if i == len(parser.head_comment)-1 {295head_comment = parser.head_comment[:i]296parser.head_comment = parser.head_comment[i+1:]297break298} else if parser.head_comment[i-1] == '\n' {299head_comment = parser.head_comment[:i-1]300parser.head_comment = parser.head_comment[i+1:]301break302}303}304}305}306307*event = yaml_event_t{308typ: yaml_DOCUMENT_START_EVENT,309start_mark: token.start_mark,310end_mark: token.end_mark,311312head_comment: head_comment,313}314315} else if token.typ != yaml_STREAM_END_TOKEN {316// Parse an explicit document.317var version_directive *yaml_version_directive_t318var tag_directives []yaml_tag_directive_t319start_mark := token.start_mark320if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) {321return false322}323token = peek_token(parser)324if token == nil {325return false326}327if token.typ != yaml_DOCUMENT_START_TOKEN {328yaml_parser_set_parser_error(parser,329"did not find expected <document start>", token.start_mark)330return false331}332parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)333parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE334end_mark := token.end_mark335336*event = yaml_event_t{337typ: yaml_DOCUMENT_START_EVENT,338start_mark: start_mark,339end_mark: end_mark,340version_directive: version_directive,341tag_directives: tag_directives,342implicit: false,343}344skip_token(parser)345346} else {347// Parse the stream end.348parser.state = yaml_PARSE_END_STATE349*event = yaml_event_t{350typ: yaml_STREAM_END_EVENT,351start_mark: token.start_mark,352end_mark: token.end_mark,353}354skip_token(parser)355}356357return true358}359360// Parse the productions:361// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*362//363// ***********364func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool {365token := peek_token(parser)366if token == nil {367return false368}369370if token.typ == yaml_VERSION_DIRECTIVE_TOKEN ||371token.typ == yaml_TAG_DIRECTIVE_TOKEN ||372token.typ == yaml_DOCUMENT_START_TOKEN ||373token.typ == yaml_DOCUMENT_END_TOKEN ||374token.typ == yaml_STREAM_END_TOKEN {375parser.state = parser.states[len(parser.states)-1]376parser.states = parser.states[:len(parser.states)-1]377return yaml_parser_process_empty_scalar(parser, event,378token.start_mark)379}380return yaml_parser_parse_node(parser, event, true, false)381}382383// Parse the productions:384// implicit_document ::= block_node DOCUMENT-END*385//386// *************387//388// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*389func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool {390token := peek_token(parser)391if token == nil {392return false393}394395start_mark := token.start_mark396end_mark := token.start_mark397398implicit := true399if token.typ == yaml_DOCUMENT_END_TOKEN {400end_mark = token.end_mark401skip_token(parser)402implicit = false403}404405parser.tag_directives = parser.tag_directives[:0]406407parser.state = yaml_PARSE_DOCUMENT_START_STATE408*event = yaml_event_t{409typ: yaml_DOCUMENT_END_EVENT,410start_mark: start_mark,411end_mark: end_mark,412implicit: implicit,413}414yaml_parser_set_event_comments(parser, event)415if len(event.head_comment) > 0 && len(event.foot_comment) == 0 {416event.foot_comment = event.head_comment417event.head_comment = nil418}419return true420}421422func yaml_parser_set_event_comments(parser *yaml_parser_t, event *yaml_event_t) {423event.head_comment = parser.head_comment424event.line_comment = parser.line_comment425event.foot_comment = parser.foot_comment426parser.head_comment = nil427parser.line_comment = nil428parser.foot_comment = nil429parser.tail_comment = nil430parser.stem_comment = nil431}432433// Parse the productions:434// block_node_or_indentless_sequence ::=435//436// ALIAS437// *****438// | properties (block_content | indentless_block_sequence)?439// ********** *440// | block_content | indentless_block_sequence441// *442//443// block_node ::= ALIAS444//445// *****446// | properties block_content?447// ********** *448// | block_content449// *450//451// flow_node ::= ALIAS452//453// *****454// | properties flow_content?455// ********** *456// | flow_content457// *458//459// properties ::= TAG ANCHOR? | ANCHOR TAG?460//461// *************************462//463// block_content ::= block_collection | flow_collection | SCALAR464//465// ******466//467// flow_content ::= flow_collection | SCALAR468//469// ******470func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool {471//defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)()472473token := peek_token(parser)474if token == nil {475return false476}477478if token.typ == yaml_ALIAS_TOKEN {479parser.state = parser.states[len(parser.states)-1]480parser.states = parser.states[:len(parser.states)-1]481*event = yaml_event_t{482typ: yaml_ALIAS_EVENT,483start_mark: token.start_mark,484end_mark: token.end_mark,485anchor: token.value,486}487yaml_parser_set_event_comments(parser, event)488skip_token(parser)489return true490}491492start_mark := token.start_mark493end_mark := token.start_mark494495var tag_token bool496var tag_handle, tag_suffix, anchor []byte497var tag_mark yaml_mark_t498if token.typ == yaml_ANCHOR_TOKEN {499anchor = token.value500start_mark = token.start_mark501end_mark = token.end_mark502skip_token(parser)503token = peek_token(parser)504if token == nil {505return false506}507if token.typ == yaml_TAG_TOKEN {508tag_token = true509tag_handle = token.value510tag_suffix = token.suffix511tag_mark = token.start_mark512end_mark = token.end_mark513skip_token(parser)514token = peek_token(parser)515if token == nil {516return false517}518}519} else if token.typ == yaml_TAG_TOKEN {520tag_token = true521tag_handle = token.value522tag_suffix = token.suffix523start_mark = token.start_mark524tag_mark = token.start_mark525end_mark = token.end_mark526skip_token(parser)527token = peek_token(parser)528if token == nil {529return false530}531if token.typ == yaml_ANCHOR_TOKEN {532anchor = token.value533end_mark = token.end_mark534skip_token(parser)535token = peek_token(parser)536if token == nil {537return false538}539}540}541542var tag []byte543if tag_token {544if len(tag_handle) == 0 {545tag = tag_suffix546tag_suffix = nil547} else {548for i := range parser.tag_directives {549if bytes.Equal(parser.tag_directives[i].handle, tag_handle) {550tag = append([]byte(nil), parser.tag_directives[i].prefix...)551tag = append(tag, tag_suffix...)552break553}554}555if len(tag) == 0 {556yaml_parser_set_parser_error_context(parser,557"while parsing a node", start_mark,558"found undefined tag handle", tag_mark)559return false560}561}562}563564implicit := len(tag) == 0565if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN {566end_mark = token.end_mark567parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE568*event = yaml_event_t{569typ: yaml_SEQUENCE_START_EVENT,570start_mark: start_mark,571end_mark: end_mark,572anchor: anchor,573tag: tag,574implicit: implicit,575style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),576}577return true578}579if token.typ == yaml_SCALAR_TOKEN {580var plain_implicit, quoted_implicit bool581end_mark = token.end_mark582if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') {583plain_implicit = true584} else if len(tag) == 0 {585quoted_implicit = true586}587parser.state = parser.states[len(parser.states)-1]588parser.states = parser.states[:len(parser.states)-1]589590*event = yaml_event_t{591typ: yaml_SCALAR_EVENT,592start_mark: start_mark,593end_mark: end_mark,594anchor: anchor,595tag: tag,596value: token.value,597implicit: plain_implicit,598quoted_implicit: quoted_implicit,599style: yaml_style_t(token.style),600}601yaml_parser_set_event_comments(parser, event)602skip_token(parser)603return true604}605if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN {606// [Go] Some of the events below can be merged as they differ only on style.607end_mark = token.end_mark608parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE609*event = yaml_event_t{610typ: yaml_SEQUENCE_START_EVENT,611start_mark: start_mark,612end_mark: end_mark,613anchor: anchor,614tag: tag,615implicit: implicit,616style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE),617}618yaml_parser_set_event_comments(parser, event)619return true620}621if token.typ == yaml_FLOW_MAPPING_START_TOKEN {622end_mark = token.end_mark623parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE624*event = yaml_event_t{625typ: yaml_MAPPING_START_EVENT,626start_mark: start_mark,627end_mark: end_mark,628anchor: anchor,629tag: tag,630implicit: implicit,631style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),632}633yaml_parser_set_event_comments(parser, event)634return true635}636if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN {637end_mark = token.end_mark638parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE639*event = yaml_event_t{640typ: yaml_SEQUENCE_START_EVENT,641start_mark: start_mark,642end_mark: end_mark,643anchor: anchor,644tag: tag,645implicit: implicit,646style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),647}648if parser.stem_comment != nil {649event.head_comment = parser.stem_comment650parser.stem_comment = nil651}652return true653}654if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN {655end_mark = token.end_mark656parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE657*event = yaml_event_t{658typ: yaml_MAPPING_START_EVENT,659start_mark: start_mark,660end_mark: end_mark,661anchor: anchor,662tag: tag,663implicit: implicit,664style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE),665}666if parser.stem_comment != nil {667event.head_comment = parser.stem_comment668parser.stem_comment = nil669}670return true671}672if len(anchor) > 0 || len(tag) > 0 {673parser.state = parser.states[len(parser.states)-1]674parser.states = parser.states[:len(parser.states)-1]675676*event = yaml_event_t{677typ: yaml_SCALAR_EVENT,678start_mark: start_mark,679end_mark: end_mark,680anchor: anchor,681tag: tag,682implicit: implicit,683quoted_implicit: false,684style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),685}686return true687}688689context := "while parsing a flow node"690if block {691context = "while parsing a block node"692}693yaml_parser_set_parser_error_context(parser, context, start_mark,694"did not find expected node content", token.start_mark)695return false696}697698// Parse the productions:699// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END700//701// ******************** *********** * *********702func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {703if first {704token := peek_token(parser)705if token == nil {706return false707}708parser.marks = append(parser.marks, token.start_mark)709skip_token(parser)710}711712token := peek_token(parser)713if token == nil {714return false715}716717if token.typ == yaml_BLOCK_ENTRY_TOKEN {718mark := token.end_mark719prior_head_len := len(parser.head_comment)720skip_token(parser)721yaml_parser_split_stem_comment(parser, prior_head_len)722token = peek_token(parser)723if token == nil {724return false725}726if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN {727parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE)728return yaml_parser_parse_node(parser, event, true, false)729} else {730parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE731return yaml_parser_process_empty_scalar(parser, event, mark)732}733}734if token.typ == yaml_BLOCK_END_TOKEN {735parser.state = parser.states[len(parser.states)-1]736parser.states = parser.states[:len(parser.states)-1]737parser.marks = parser.marks[:len(parser.marks)-1]738739*event = yaml_event_t{740typ: yaml_SEQUENCE_END_EVENT,741start_mark: token.start_mark,742end_mark: token.end_mark,743}744745skip_token(parser)746return true747}748749context_mark := parser.marks[len(parser.marks)-1]750parser.marks = parser.marks[:len(parser.marks)-1]751return yaml_parser_set_parser_error_context(parser,752"while parsing a block collection", context_mark,753"did not find expected '-' indicator", token.start_mark)754}755756// Parse the productions:757// indentless_sequence ::= (BLOCK-ENTRY block_node?)+758//759// *********** *760func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool {761token := peek_token(parser)762if token == nil {763return false764}765766if token.typ == yaml_BLOCK_ENTRY_TOKEN {767mark := token.end_mark768prior_head_len := len(parser.head_comment)769skip_token(parser)770yaml_parser_split_stem_comment(parser, prior_head_len)771token = peek_token(parser)772if token == nil {773return false774}775if token.typ != yaml_BLOCK_ENTRY_TOKEN &&776token.typ != yaml_KEY_TOKEN &&777token.typ != yaml_VALUE_TOKEN &&778token.typ != yaml_BLOCK_END_TOKEN {779parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE)780return yaml_parser_parse_node(parser, event, true, false)781}782parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE783return yaml_parser_process_empty_scalar(parser, event, mark)784}785parser.state = parser.states[len(parser.states)-1]786parser.states = parser.states[:len(parser.states)-1]787788*event = yaml_event_t{789typ: yaml_SEQUENCE_END_EVENT,790start_mark: token.start_mark,791end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark?792}793return true794}795796// Split stem comment from head comment.797//798// When a sequence or map is found under a sequence entry, the former head comment799// is assigned to the underlying sequence or map as a whole, not the individual800// sequence or map entry as would be expected otherwise. To handle this case the801// previous head comment is moved aside as the stem comment.802func yaml_parser_split_stem_comment(parser *yaml_parser_t, stem_len int) {803if stem_len == 0 {804return805}806807token := peek_token(parser)808if token == nil || token.typ != yaml_BLOCK_SEQUENCE_START_TOKEN && token.typ != yaml_BLOCK_MAPPING_START_TOKEN {809return810}811812parser.stem_comment = parser.head_comment[:stem_len]813if len(parser.head_comment) == stem_len {814parser.head_comment = nil815} else {816// Copy suffix to prevent very strange bugs if someone ever appends817// further bytes to the prefix in the stem_comment slice above.818parser.head_comment = append([]byte(nil), parser.head_comment[stem_len+1:]...)819}820}821822// Parse the productions:823// block_mapping ::= BLOCK-MAPPING_START824//825// *******************826// ((KEY block_node_or_indentless_sequence?)?827// *** *828// (VALUE block_node_or_indentless_sequence?)?)*829//830// BLOCK-END831// *********832func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {833if first {834token := peek_token(parser)835if token == nil {836return false837}838parser.marks = append(parser.marks, token.start_mark)839skip_token(parser)840}841842token := peek_token(parser)843if token == nil {844return false845}846847// [Go] A tail comment was left from the prior mapping value processed. Emit an event848// as it needs to be processed with that value and not the following key.849if len(parser.tail_comment) > 0 {850*event = yaml_event_t{851typ: yaml_TAIL_COMMENT_EVENT,852start_mark: token.start_mark,853end_mark: token.end_mark,854foot_comment: parser.tail_comment,855}856parser.tail_comment = nil857return true858}859860if token.typ == yaml_KEY_TOKEN {861mark := token.end_mark862skip_token(parser)863token = peek_token(parser)864if token == nil {865return false866}867if token.typ != yaml_KEY_TOKEN &&868token.typ != yaml_VALUE_TOKEN &&869token.typ != yaml_BLOCK_END_TOKEN {870parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE)871return yaml_parser_parse_node(parser, event, true, true)872} else {873parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE874return yaml_parser_process_empty_scalar(parser, event, mark)875}876} else if token.typ == yaml_BLOCK_END_TOKEN {877parser.state = parser.states[len(parser.states)-1]878parser.states = parser.states[:len(parser.states)-1]879parser.marks = parser.marks[:len(parser.marks)-1]880*event = yaml_event_t{881typ: yaml_MAPPING_END_EVENT,882start_mark: token.start_mark,883end_mark: token.end_mark,884}885yaml_parser_set_event_comments(parser, event)886skip_token(parser)887return true888}889890context_mark := parser.marks[len(parser.marks)-1]891parser.marks = parser.marks[:len(parser.marks)-1]892return yaml_parser_set_parser_error_context(parser,893"while parsing a block mapping", context_mark,894"did not find expected key", token.start_mark)895}896897// Parse the productions:898// block_mapping ::= BLOCK-MAPPING_START899//900// ((KEY block_node_or_indentless_sequence?)?901//902// (VALUE block_node_or_indentless_sequence?)?)*903// ***** *904// BLOCK-END905func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {906token := peek_token(parser)907if token == nil {908return false909}910if token.typ == yaml_VALUE_TOKEN {911mark := token.end_mark912skip_token(parser)913token = peek_token(parser)914if token == nil {915return false916}917if token.typ != yaml_KEY_TOKEN &&918token.typ != yaml_VALUE_TOKEN &&919token.typ != yaml_BLOCK_END_TOKEN {920parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE)921return yaml_parser_parse_node(parser, event, true, true)922}923parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE924return yaml_parser_process_empty_scalar(parser, event, mark)925}926parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE927return yaml_parser_process_empty_scalar(parser, event, token.start_mark)928}929930// Parse the productions:931// flow_sequence ::= FLOW-SEQUENCE-START932//933// *******************934// (flow_sequence_entry FLOW-ENTRY)*935// * **********936// flow_sequence_entry?937// *938// FLOW-SEQUENCE-END939// *****************940//941// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?942//943// *944func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {945if first {946token := peek_token(parser)947if token == nil {948return false949}950parser.marks = append(parser.marks, token.start_mark)951skip_token(parser)952}953token := peek_token(parser)954if token == nil {955return false956}957if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {958if !first {959if token.typ == yaml_FLOW_ENTRY_TOKEN {960skip_token(parser)961token = peek_token(parser)962if token == nil {963return false964}965} else {966context_mark := parser.marks[len(parser.marks)-1]967parser.marks = parser.marks[:len(parser.marks)-1]968return yaml_parser_set_parser_error_context(parser,969"while parsing a flow sequence", context_mark,970"did not find expected ',' or ']'", token.start_mark)971}972}973974if token.typ == yaml_KEY_TOKEN {975parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE976*event = yaml_event_t{977typ: yaml_MAPPING_START_EVENT,978start_mark: token.start_mark,979end_mark: token.end_mark,980implicit: true,981style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),982}983skip_token(parser)984return true985} else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {986parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE)987return yaml_parser_parse_node(parser, event, false, false)988}989}990991parser.state = parser.states[len(parser.states)-1]992parser.states = parser.states[:len(parser.states)-1]993parser.marks = parser.marks[:len(parser.marks)-1]994995*event = yaml_event_t{996typ: yaml_SEQUENCE_END_EVENT,997start_mark: token.start_mark,998end_mark: token.end_mark,999}1000yaml_parser_set_event_comments(parser, event)10011002skip_token(parser)1003return true1004}10051006// Parse the productions:1007// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?1008//1009// *** *1010func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool {1011token := peek_token(parser)1012if token == nil {1013return false1014}1015if token.typ != yaml_VALUE_TOKEN &&1016token.typ != yaml_FLOW_ENTRY_TOKEN &&1017token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {1018parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE)1019return yaml_parser_parse_node(parser, event, false, false)1020}1021mark := token.end_mark1022skip_token(parser)1023parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE1024return yaml_parser_process_empty_scalar(parser, event, mark)1025}10261027// Parse the productions:1028// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?1029//1030// ***** *1031func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {1032token := peek_token(parser)1033if token == nil {1034return false1035}1036if token.typ == yaml_VALUE_TOKEN {1037skip_token(parser)1038token := peek_token(parser)1039if token == nil {1040return false1041}1042if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {1043parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE)1044return yaml_parser_parse_node(parser, event, false, false)1045}1046}1047parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE1048return yaml_parser_process_empty_scalar(parser, event, token.start_mark)1049}10501051// Parse the productions:1052// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?1053//1054// *1055func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool {1056token := peek_token(parser)1057if token == nil {1058return false1059}1060parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE1061*event = yaml_event_t{1062typ: yaml_MAPPING_END_EVENT,1063start_mark: token.start_mark,1064end_mark: token.start_mark, // [Go] Shouldn't this be end_mark?1065}1066return true1067}10681069// Parse the productions:1070// flow_mapping ::= FLOW-MAPPING-START1071//1072// ******************1073// (flow_mapping_entry FLOW-ENTRY)*1074// * **********1075// flow_mapping_entry?1076// ******************1077// FLOW-MAPPING-END1078// ****************1079//1080// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?1081// - *** *1082func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {1083if first {1084token := peek_token(parser)1085parser.marks = append(parser.marks, token.start_mark)1086skip_token(parser)1087}10881089token := peek_token(parser)1090if token == nil {1091return false1092}10931094if token.typ != yaml_FLOW_MAPPING_END_TOKEN {1095if !first {1096if token.typ == yaml_FLOW_ENTRY_TOKEN {1097skip_token(parser)1098token = peek_token(parser)1099if token == nil {1100return false1101}1102} else {1103context_mark := parser.marks[len(parser.marks)-1]1104parser.marks = parser.marks[:len(parser.marks)-1]1105return yaml_parser_set_parser_error_context(parser,1106"while parsing a flow mapping", context_mark,1107"did not find expected ',' or '}'", token.start_mark)1108}1109}11101111if token.typ == yaml_KEY_TOKEN {1112skip_token(parser)1113token = peek_token(parser)1114if token == nil {1115return false1116}1117if token.typ != yaml_VALUE_TOKEN &&1118token.typ != yaml_FLOW_ENTRY_TOKEN &&1119token.typ != yaml_FLOW_MAPPING_END_TOKEN {1120parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE)1121return yaml_parser_parse_node(parser, event, false, false)1122} else {1123parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE1124return yaml_parser_process_empty_scalar(parser, event, token.start_mark)1125}1126} else if token.typ != yaml_FLOW_MAPPING_END_TOKEN {1127parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE)1128return yaml_parser_parse_node(parser, event, false, false)1129}1130}11311132parser.state = parser.states[len(parser.states)-1]1133parser.states = parser.states[:len(parser.states)-1]1134parser.marks = parser.marks[:len(parser.marks)-1]1135*event = yaml_event_t{1136typ: yaml_MAPPING_END_EVENT,1137start_mark: token.start_mark,1138end_mark: token.end_mark,1139}1140yaml_parser_set_event_comments(parser, event)1141skip_token(parser)1142return true1143}11441145// Parse the productions:1146// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?1147// - ***** *1148func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool {1149token := peek_token(parser)1150if token == nil {1151return false1152}1153if empty {1154parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE1155return yaml_parser_process_empty_scalar(parser, event, token.start_mark)1156}1157if token.typ == yaml_VALUE_TOKEN {1158skip_token(parser)1159token = peek_token(parser)1160if token == nil {1161return false1162}1163if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN {1164parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE)1165return yaml_parser_parse_node(parser, event, false, false)1166}1167}1168parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE1169return yaml_parser_process_empty_scalar(parser, event, token.start_mark)1170}11711172// Generate an empty scalar event.1173func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool {1174*event = yaml_event_t{1175typ: yaml_SCALAR_EVENT,1176start_mark: mark,1177end_mark: mark,1178value: nil, // Empty1179implicit: true,1180style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),1181}1182return true1183}11841185var default_tag_directives = []yaml_tag_directive_t{1186{[]byte("!"), []byte("!")},1187{[]byte("!!"), []byte("tag:yaml.org,2002:")},1188}11891190// Parse directives.1191func yaml_parser_process_directives(parser *yaml_parser_t,1192version_directive_ref **yaml_version_directive_t,1193tag_directives_ref *[]yaml_tag_directive_t) bool {11941195var version_directive *yaml_version_directive_t1196var tag_directives []yaml_tag_directive_t11971198token := peek_token(parser)1199if token == nil {1200return false1201}12021203for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN {1204if token.typ == yaml_VERSION_DIRECTIVE_TOKEN {1205if version_directive != nil {1206yaml_parser_set_parser_error(parser,1207"found duplicate %YAML directive", token.start_mark)1208return false1209}1210if token.major != 1 || token.minor != 1 {1211yaml_parser_set_parser_error(parser,1212"found incompatible YAML document", token.start_mark)1213return false1214}1215version_directive = &yaml_version_directive_t{1216major: token.major,1217minor: token.minor,1218}1219} else if token.typ == yaml_TAG_DIRECTIVE_TOKEN {1220value := yaml_tag_directive_t{1221handle: token.value,1222prefix: token.prefix,1223}1224if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) {1225return false1226}1227tag_directives = append(tag_directives, value)1228}12291230skip_token(parser)1231token = peek_token(parser)1232if token == nil {1233return false1234}1235}12361237for i := range default_tag_directives {1238if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) {1239return false1240}1241}12421243if version_directive_ref != nil {1244*version_directive_ref = version_directive1245}1246if tag_directives_ref != nil {1247*tag_directives_ref = tag_directives1248}1249return true1250}12511252// Append a tag directive to the directives stack.1253func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool {1254for i := range parser.tag_directives {1255if bytes.Equal(value.handle, parser.tag_directives[i].handle) {1256if allow_duplicates {1257return true1258}1259return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark)1260}1261}12621263// [Go] I suspect the copy is unnecessary. This was likely done1264// because there was no way to track ownership of the data.1265value_copy := yaml_tag_directive_t{1266handle: make([]byte, len(value.handle)),1267prefix: make([]byte, len(value.prefix)),1268}1269copy(value_copy.handle, value.handle)1270copy(value_copy.prefix, value.prefix)1271parser.tag_directives = append(parser.tag_directives, value_copy)1272return true1273}127412751276