parent
f669ef6bec
commit
6636cf1bea
14 changed files with 67 additions and 68 deletions
|
@ -50,7 +50,7 @@ func TestRootMappingFsDirnames(t *testing.T) {
|
||||||
fif, err := rfs.Stat(filepath.Join("cf2", testfile))
|
fif, err := rfs.Stat(filepath.Join("cf2", testfile))
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.Equal("myfile.txt", fif.Name())
|
assert.Equal("myfile.txt", fif.Name())
|
||||||
assert.Equal("f2t/myfile.txt", fif.(RealFilenameInfo).RealFilename())
|
assert.Equal(filepath.FromSlash("f2t/myfile.txt"), fif.(RealFilenameInfo).RealFilename())
|
||||||
|
|
||||||
root, err := rfs.Open(filepathSeparator)
|
root, err := rfs.Open(filepathSeparator)
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
|
|
|
@ -191,7 +191,7 @@ func TestSiteBuildErrors(t *testing.T) {
|
||||||
},
|
},
|
||||||
assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
|
assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
|
||||||
assert.Error(err)
|
assert.Error(err)
|
||||||
assert.Contains(err.Error(), `"content/mytoml.md": render of "page" failed: execute of template failed: panic in Execute`)
|
assert.Contains(err.Error(), `execute of template failed: panic in Execute`)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -631,12 +631,10 @@ func assertShouldNotBuild(t *testing.T, sites *HugoSites) {
|
||||||
for _, p := range s.rawAllPages {
|
for _, p := range s.rawAllPages {
|
||||||
// No HTML when not processed
|
// No HTML when not processed
|
||||||
require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte("</")), p.BaseFileName()+": "+string(p.workContent))
|
require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte("</")), p.BaseFileName()+": "+string(p.workContent))
|
||||||
// TODO(bep) 2errors
|
|
||||||
/*
|
|
||||||
require.Equal(t, p.shouldBuild(), p.content() != "", fmt.Sprintf("%v:%v", p.content(), p.shouldBuild()))
|
|
||||||
|
|
||||||
require.Equal(t, p.shouldBuild(), p.content() != "", p.BaseFileName())
|
require.Equal(t, p.shouldBuild(), p.content() != "", fmt.Sprintf("%v:%v", p.content(), p.shouldBuild()))
|
||||||
*/
|
|
||||||
|
require.Equal(t, p.shouldBuild(), p.content() != "", p.BaseFileName())
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1685,9 +1685,13 @@ func (p *Page) shouldRenderTo(f output.Format) bool {
|
||||||
return found
|
return found
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RawContent returns the un-rendered source content without
|
||||||
|
// any leading front matter.
|
||||||
func (p *Page) RawContent() string {
|
func (p *Page) RawContent() string {
|
||||||
// TODO(bep) 2errors
|
if p.source.posMainContent == -1 {
|
||||||
return string(p.source.parsed.Input())
|
return ""
|
||||||
|
}
|
||||||
|
return string(p.source.parsed.Input()[p.source.posMainContent:])
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) FullFilePath() string {
|
func (p *Page) FullFilePath() string {
|
||||||
|
|
|
@ -46,11 +46,11 @@ type rawPageContent struct {
|
||||||
hasSummaryDivider bool
|
hasSummaryDivider bool
|
||||||
|
|
||||||
// The AST of the parsed page. Contains information about:
|
// The AST of the parsed page. Contains information about:
|
||||||
// shortcBackup3odes, front matter, summary indicators.
|
// shortcodes, front matter, summary indicators.
|
||||||
// TODO(bep) 2errors add this to a new rawPagecContent struct
|
|
||||||
// with frontMatterItem (pos) etc.
|
|
||||||
// * RawContent, RawContentWithoutFrontMatter
|
|
||||||
parsed pageparser.Result
|
parsed pageparser.Result
|
||||||
|
|
||||||
|
// Returns the position in bytes after any front matter.
|
||||||
|
posMainContent int
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bep) lazy consolidate
|
// TODO(bep) lazy consolidate
|
||||||
|
@ -58,6 +58,7 @@ func (p *Page) mapContent() error {
|
||||||
p.shortcodeState = newShortcodeHandler(p)
|
p.shortcodeState = newShortcodeHandler(p)
|
||||||
s := p.shortcodeState
|
s := p.shortcodeState
|
||||||
p.renderable = true
|
p.renderable = true
|
||||||
|
p.source.posMainContent = -1
|
||||||
|
|
||||||
result := bp.GetBuffer()
|
result := bp.GetBuffer()
|
||||||
defer bp.PutBuffer(result)
|
defer bp.PutBuffer(result)
|
||||||
|
@ -81,8 +82,8 @@ Loop:
|
||||||
case it.Type == pageparser.TypeIgnore:
|
case it.Type == pageparser.TypeIgnore:
|
||||||
case it.Type == pageparser.TypeHTMLComment:
|
case it.Type == pageparser.TypeHTMLComment:
|
||||||
// Ignore. This is only a leading Front matter comment.
|
// Ignore. This is only a leading Front matter comment.
|
||||||
case it.Type == pageparser.TypeHTMLDocument:
|
case it.Type == pageparser.TypeHTMLStart:
|
||||||
// This is HTML only. No shortcode, front matter etc.
|
// This is HTML without front matter. It can still have shortcodes.
|
||||||
p.renderable = false
|
p.renderable = false
|
||||||
result.Write(it.Val)
|
result.Write(it.Val)
|
||||||
case it.IsFrontMatter():
|
case it.IsFrontMatter():
|
||||||
|
@ -99,12 +100,17 @@ Loop:
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
next := iter.Peek()
|
||||||
|
if !next.IsDone() {
|
||||||
|
p.source.posMainContent = next.Pos
|
||||||
|
}
|
||||||
|
|
||||||
if !p.shouldBuild() {
|
if !p.shouldBuild() {
|
||||||
// Nothing more to do.
|
// Nothing more to do.
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
case it.Type == pageparser.TypeLeadSummaryDivider, it.Type == pageparser.TypeSummaryDividerOrg:
|
case it.Type == pageparser.TypeLeadSummaryDivider:
|
||||||
result.Write(internalSummaryDivider)
|
result.Write(internalSummaryDivider)
|
||||||
p.source.hasSummaryDivider = true
|
p.source.hasSummaryDivider = true
|
||||||
// Need to determine if the page is truncated.
|
// Need to determine if the page is truncated.
|
||||||
|
@ -172,7 +178,6 @@ func (p *Page) parse(reader io.Reader) error {
|
||||||
parsed: parseResult,
|
parsed: parseResult,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bep) 2errors
|
|
||||||
p.lang = p.File.Lang()
|
p.lang = p.File.Lang()
|
||||||
|
|
||||||
if p.s != nil && p.s.owner != nil {
|
if p.s != nil && p.s.owner != nil {
|
||||||
|
|
|
@ -767,8 +767,7 @@ Simple Page With Some Date`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Issue #2601
|
// Issue #2601
|
||||||
// TODO(bep) 2errors
|
func TestPageRawContent(t *testing.T) {
|
||||||
func _TestPageRawContent(t *testing.T) {
|
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
cfg, fs := newTestCfg()
|
cfg, fs := newTestCfg()
|
||||||
|
|
||||||
|
@ -784,7 +783,7 @@ title: Raw
|
||||||
require.Len(t, s.RegularPages, 1)
|
require.Len(t, s.RegularPages, 1)
|
||||||
p := s.RegularPages[0]
|
p := s.RegularPages[0]
|
||||||
|
|
||||||
require.Contains(t, p.RawContent(), "**Raw**")
|
require.Equal(t, p.RawContent(), "**Raw**")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1042,8 +1041,7 @@ func TestWordCountWithAllCJKRunesWithoutHasCJKLanguage(t *testing.T) {
|
||||||
testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithAllCJKRunes)
|
testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithAllCJKRunes)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bep) 2errors
|
func TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) {
|
||||||
func _TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) {
|
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
settings := map[string]interface{}{"hasCJKLanguage": true}
|
settings := map[string]interface{}{"hasCJKLanguage": true}
|
||||||
|
|
||||||
|
@ -1056,8 +1054,7 @@ func _TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) {
|
||||||
testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithAllCJKRunes)
|
testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithAllCJKRunes)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bep) 2errors
|
func TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) {
|
||||||
func _TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) {
|
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
settings := map[string]interface{}{"hasCJKLanguage": true}
|
settings := map[string]interface{}{"hasCJKLanguage": true}
|
||||||
|
|
||||||
|
@ -1164,7 +1161,6 @@ func TestShouldRenderContent(t *testing.T) {
|
||||||
render bool
|
render bool
|
||||||
}{
|
}{
|
||||||
{contentNoFrontmatter, true},
|
{contentNoFrontmatter, true},
|
||||||
// TODO(bep) 2errors {invalidFrontmatterShortDelim, true},
|
|
||||||
{renderNoFrontmatter, false},
|
{renderNoFrontmatter, false},
|
||||||
{contentWithCommentedFrontmatter, true},
|
{contentWithCommentedFrontmatter, true},
|
||||||
{contentWithCommentedTextFrontmatter, true},
|
{contentWithCommentedTextFrontmatter, true},
|
||||||
|
|
|
@ -134,8 +134,7 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
|
||||||
|
|
||||||
if shouldRender {
|
if shouldRender {
|
||||||
if err := pageOutput.renderResources(); err != nil {
|
if err := pageOutput.renderResources(); err != nil {
|
||||||
// TODO(bep) 2errors
|
s.SendError(page.errorf(err, "failed to render page resources"))
|
||||||
s.Log.ERROR.Printf("Failed to render resources for page %q: %s", page, err)
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -147,7 +146,7 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
|
||||||
} else {
|
} else {
|
||||||
layouts, err = s.layouts(pageOutput)
|
layouts, err = s.layouts(pageOutput)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.Log.ERROR.Printf("Failed to resolve layout output %q for page %q: %s", outFormat.Name, page, err)
|
s.Log.ERROR.Printf("Failed to resolve layout for output %q for page %q: %s", outFormat.Name, page, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -451,8 +451,7 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bep) 2errors
|
func TestSkipRender(t *testing.T) {
|
||||||
func _TestSkipRender(t *testing.T) {
|
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
sources := [][2]string{
|
sources := [][2]string{
|
||||||
{filepath.FromSlash("sect/doc1.html"), "---\nmarkup: markdown\n---\n# title\nsome *content*"},
|
{filepath.FromSlash("sect/doc1.html"), "---\nmarkup: markdown\n---\n# title\nsome *content*"},
|
||||||
|
|
|
@ -103,10 +103,9 @@ const (
|
||||||
tEOF
|
tEOF
|
||||||
|
|
||||||
// page items
|
// page items
|
||||||
TypeHTMLDocument // document starting with < as first non-whitespace
|
TypeHTMLStart // document starting with < as first non-whitespace
|
||||||
TypeHTMLComment // We ignore leading comments
|
TypeHTMLComment // We ignore leading comments
|
||||||
TypeLeadSummaryDivider // <!--more-->
|
TypeLeadSummaryDivider // <!--more-->, # more
|
||||||
TypeSummaryDividerOrg // # more
|
|
||||||
TypeFrontMatterYAML
|
TypeFrontMatterYAML
|
||||||
TypeFrontMatterTOML
|
TypeFrontMatterTOML
|
||||||
TypeFrontMatterJSON
|
TypeFrontMatterJSON
|
||||||
|
|
|
@ -48,6 +48,8 @@ type pageLexer struct {
|
||||||
start int // item start position
|
start int // item start position
|
||||||
width int // width of last element
|
width int // width of last element
|
||||||
|
|
||||||
|
// The summary divider to look for.
|
||||||
|
summaryDivider []byte
|
||||||
// Set when we have parsed any summary divider
|
// Set when we have parsed any summary divider
|
||||||
summaryDividerChecked bool
|
summaryDividerChecked bool
|
||||||
|
|
||||||
|
@ -69,7 +71,6 @@ func (l *pageLexer) Input() []byte {
|
||||||
|
|
||||||
// note: the input position here is normally 0 (start), but
|
// note: the input position here is normally 0 (start), but
|
||||||
// can be set if position of first shortcode is known
|
// can be set if position of first shortcode is known
|
||||||
// TODO(bep) 2errors byte
|
|
||||||
func newPageLexer(input []byte, inputPosition int, stateStart stateFunc) *pageLexer {
|
func newPageLexer(input []byte, inputPosition int, stateStart stateFunc) *pageLexer {
|
||||||
lexer := &pageLexer{
|
lexer := &pageLexer{
|
||||||
input: input,
|
input: input,
|
||||||
|
@ -117,7 +118,7 @@ var (
|
||||||
delimTOML = []byte("+++")
|
delimTOML = []byte("+++")
|
||||||
delimYAML = []byte("---")
|
delimYAML = []byte("---")
|
||||||
delimOrg = []byte("#+")
|
delimOrg = []byte("#+")
|
||||||
htmlCOmmentStart = []byte("<!--")
|
htmlCommentStart = []byte("<!--")
|
||||||
htmlCOmmentEnd = []byte("-->")
|
htmlCOmmentEnd = []byte("-->")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -195,17 +196,18 @@ func (l *pageLexer) consumeCRLF() bool {
|
||||||
|
|
||||||
func lexMainSection(l *pageLexer) stateFunc {
|
func lexMainSection(l *pageLexer) stateFunc {
|
||||||
// Fast forward as far as possible.
|
// Fast forward as far as possible.
|
||||||
var l1, l2, l3 int
|
var l1, l2 int
|
||||||
if !l.summaryDividerChecked {
|
|
||||||
// TODO(bep) 2errors make the summary divider per type
|
if !l.summaryDividerChecked && l.summaryDivider != nil {
|
||||||
l1 = l.index(summaryDivider)
|
l1 = l.index(l.summaryDivider)
|
||||||
l2 = l.index(summaryDividerOrg)
|
if l1 == -1 {
|
||||||
if l1 == -1 && l2 == -1 {
|
|
||||||
l.summaryDividerChecked = true
|
l.summaryDividerChecked = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
l3 = l.index(leftDelimSc)
|
|
||||||
skip := minPositiveIndex(l1, l2, l3)
|
l2 = l.index(leftDelimSc)
|
||||||
|
skip := minPositiveIndex(l1, l2)
|
||||||
|
|
||||||
if skip > 0 {
|
if skip > 0 {
|
||||||
l.pos += skip
|
l.pos += skip
|
||||||
}
|
}
|
||||||
|
@ -225,23 +227,14 @@ func lexMainSection(l *pageLexer) stateFunc {
|
||||||
return lexShortcodeLeftDelim
|
return lexShortcodeLeftDelim
|
||||||
}
|
}
|
||||||
|
|
||||||
if !l.summaryDividerChecked {
|
if !l.summaryDividerChecked && l.summaryDivider != nil {
|
||||||
if l.hasPrefix(summaryDivider) {
|
if l.hasPrefix(l.summaryDivider) {
|
||||||
if l.pos > l.start {
|
if l.pos > l.start {
|
||||||
l.emit(tText)
|
l.emit(tText)
|
||||||
}
|
}
|
||||||
l.summaryDividerChecked = true
|
l.summaryDividerChecked = true
|
||||||
l.pos += len(summaryDivider)
|
l.pos += len(l.summaryDivider)
|
||||||
//l.consumeCRLF()
|
|
||||||
l.emit(TypeLeadSummaryDivider)
|
l.emit(TypeLeadSummaryDivider)
|
||||||
} else if l.hasPrefix(summaryDividerOrg) {
|
|
||||||
if l.pos > l.start {
|
|
||||||
l.emit(tText)
|
|
||||||
}
|
|
||||||
l.summaryDividerChecked = true
|
|
||||||
l.pos += len(summaryDividerOrg)
|
|
||||||
//l.consumeCRLF()
|
|
||||||
l.emit(TypeSummaryDividerOrg)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -261,6 +254,8 @@ func (l *pageLexer) isShortCodeStart() bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
func lexIntroSection(l *pageLexer) stateFunc {
|
func lexIntroSection(l *pageLexer) stateFunc {
|
||||||
|
l.summaryDivider = summaryDivider
|
||||||
|
|
||||||
LOOP:
|
LOOP:
|
||||||
for {
|
for {
|
||||||
r := l.next()
|
r := l.next()
|
||||||
|
@ -283,7 +278,7 @@ LOOP:
|
||||||
// No front matter.
|
// No front matter.
|
||||||
if r == '<' {
|
if r == '<' {
|
||||||
l.backup()
|
l.backup()
|
||||||
if l.hasPrefix(htmlCOmmentStart) {
|
if l.hasPrefix(htmlCommentStart) {
|
||||||
right := l.index(htmlCOmmentEnd)
|
right := l.index(htmlCOmmentEnd)
|
||||||
if right == -1 {
|
if right == -1 {
|
||||||
return l.errorf("starting HTML comment with no end")
|
return l.errorf("starting HTML comment with no end")
|
||||||
|
@ -291,10 +286,14 @@ LOOP:
|
||||||
l.pos += right + len(htmlCOmmentEnd)
|
l.pos += right + len(htmlCOmmentEnd)
|
||||||
l.emit(TypeHTMLComment)
|
l.emit(TypeHTMLComment)
|
||||||
} else {
|
} else {
|
||||||
// Not need to look further. Hugo treats this as plain HTML,
|
if l.pos > l.start {
|
||||||
// no front matter, no shortcodes, no nothing.
|
l.emit(tText)
|
||||||
l.pos = len(l.input)
|
}
|
||||||
l.emit(TypeHTMLDocument)
|
l.next()
|
||||||
|
// This is the start of a plain HTML document with no
|
||||||
|
// front matter. I still can contain shortcodes, so we
|
||||||
|
// have to keep looking.
|
||||||
|
l.emit(TypeHTMLStart)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break LOOP
|
break LOOP
|
||||||
|
@ -365,10 +364,11 @@ func lexFrontMatterOrgMode(l *pageLexer) stateFunc {
|
||||||
#+DESCRIPTION: Just another golang parser for org content!
|
#+DESCRIPTION: Just another golang parser for org content!
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
l.summaryDivider = summaryDividerOrg
|
||||||
|
|
||||||
l.backup()
|
l.backup()
|
||||||
|
|
||||||
if !l.hasPrefix(delimOrg) {
|
if !l.hasPrefix(delimOrg) {
|
||||||
// TODO(bep) consider error
|
|
||||||
return lexMainSection
|
return lexMainSection
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -48,7 +48,7 @@ func Parse(r io.Reader) (Result, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseMainSection(input []byte, from int) Result {
|
func parseMainSection(input []byte, from int) Result {
|
||||||
lexer := newPageLexer(input, from, lexMainSection) // TODO(bep) 2errors
|
lexer := newPageLexer(input, from, lexMainSection)
|
||||||
lexer.run()
|
lexer.run()
|
||||||
return lexer
|
return lexer
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,7 @@ var (
|
||||||
tstFrontMatterJSON = nti(TypeFrontMatterJSON, tstJSON+"\r\n")
|
tstFrontMatterJSON = nti(TypeFrontMatterJSON, tstJSON+"\r\n")
|
||||||
tstSomeText = nti(tText, "\nSome text.\n")
|
tstSomeText = nti(tText, "\nSome text.\n")
|
||||||
tstSummaryDivider = nti(TypeLeadSummaryDivider, "<!--more-->")
|
tstSummaryDivider = nti(TypeLeadSummaryDivider, "<!--more-->")
|
||||||
tstSummaryDividerOrg = nti(TypeSummaryDividerOrg, "# more")
|
tstHtmlStart = nti(TypeHTMLStart, "<")
|
||||||
|
|
||||||
tstORG = `
|
tstORG = `
|
||||||
#+TITLE: T1
|
#+TITLE: T1
|
||||||
|
@ -54,8 +54,8 @@ var crLfReplacer = strings.NewReplacer("\r", "#", "\n", "$")
|
||||||
var frontMatterTests = []lexerTest{
|
var frontMatterTests = []lexerTest{
|
||||||
{"empty", "", []Item{tstEOF}},
|
{"empty", "", []Item{tstEOF}},
|
||||||
{"Byte order mark", "\ufeff\nSome text.\n", []Item{nti(TypeIgnore, "\ufeff"), tstSomeText, tstEOF}},
|
{"Byte order mark", "\ufeff\nSome text.\n", []Item{nti(TypeIgnore, "\ufeff"), tstSomeText, tstEOF}},
|
||||||
{"HTML Document", ` <html> `, []Item{nti(TypeHTMLDocument, " <html> "), tstEOF}},
|
{"HTML Document", ` <html> `, []Item{nti(tText, " "), tstHtmlStart, nti(tText, "html> "), tstEOF}},
|
||||||
{"HTML Document 2", `<html><h1>Hugo Rocks</h1></html>`, []Item{nti(TypeHTMLDocument, "<html><h1>Hugo Rocks</h1></html>"), tstEOF}},
|
{"HTML Document with shortcode", `<html>{{< sc1 >}}</html>`, []Item{tstHtmlStart, nti(tText, "html>"), tstLeftNoMD, tstSC1, tstRightNoMD, nti(tText, "</html>"), tstEOF}},
|
||||||
{"No front matter", "\nSome text.\n", []Item{tstSomeText, tstEOF}},
|
{"No front matter", "\nSome text.\n", []Item{tstSomeText, tstEOF}},
|
||||||
{"YAML front matter", "---\nfoo: \"bar\"\n---\n\nSome text.\n", []Item{tstFrontMatterYAML, tstSomeText, tstEOF}},
|
{"YAML front matter", "---\nfoo: \"bar\"\n---\n\nSome text.\n", []Item{tstFrontMatterYAML, tstSomeText, tstEOF}},
|
||||||
{"YAML empty front matter", "---\n---\n\nSome text.\n", []Item{nti(TypeFrontMatterYAML, ""), tstSomeText, tstEOF}},
|
{"YAML empty front matter", "---\n---\n\nSome text.\n", []Item{nti(TypeFrontMatterYAML, ""), tstSomeText, tstEOF}},
|
||||||
|
@ -65,7 +65,7 @@ var frontMatterTests = []lexerTest{
|
||||||
{"TOML front matter", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n", []Item{tstFrontMatterTOML, tstSomeText, tstEOF}},
|
{"TOML front matter", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n", []Item{tstFrontMatterTOML, tstSomeText, tstEOF}},
|
||||||
{"JSON front matter", tstJSON + "\r\n\nSome text.\n", []Item{tstFrontMatterJSON, tstSomeText, tstEOF}},
|
{"JSON front matter", tstJSON + "\r\n\nSome text.\n", []Item{tstFrontMatterJSON, tstSomeText, tstEOF}},
|
||||||
{"ORG front matter", tstORG + "\nSome text.\n", []Item{tstFrontMatterORG, tstSomeText, tstEOF}},
|
{"ORG front matter", tstORG + "\nSome text.\n", []Item{tstFrontMatterORG, tstSomeText, tstEOF}},
|
||||||
{"Summary divider ORG", tstORG + "\nSome text.\n# more\nSome text.\n", []Item{tstFrontMatterORG, tstSomeText, tstSummaryDividerOrg, tstSomeText, tstEOF}},
|
{"Summary divider ORG", tstORG + "\nSome text.\n# more\nSome text.\n", []Item{tstFrontMatterORG, tstSomeText, nti(TypeLeadSummaryDivider, "# more"), tstSomeText, tstEOF}},
|
||||||
{"Summary divider", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->\nSome text.\n", []Item{tstFrontMatterTOML, tstSomeText, tstSummaryDivider, tstSomeText, tstEOF}},
|
{"Summary divider", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->\nSome text.\n", []Item{tstFrontMatterTOML, tstSomeText, tstSummaryDivider, tstSomeText, tstEOF}},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -120,7 +120,6 @@ func (ns *Namespace) GetJSON(urlParts ...string) (v interface{}, err error) {
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, _errors.Wrapf(err, "failed to get getJSON resource %q", url)
|
return nil, _errors.Wrapf(err, "failed to get getJSON resource %q", url)
|
||||||
return nil, nil
|
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -179,7 +179,7 @@ func (t *TemplateAdapter) addFileContext(name string, inerr error) error {
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
// TODO(bep) 2errors text vs HTML
|
|
||||||
fe, ok := herrors.WithFileContext(ferr, realFilename, f, lineMatcher)
|
fe, ok := herrors.WithFileContext(ferr, realFilename, f, lineMatcher)
|
||||||
if ok || !hasMaster {
|
if ok || !hasMaster {
|
||||||
return fe
|
return fe
|
||||||
|
|
Loading…
Add table
Reference in a new issue