161 changed files with 17436 additions and 6745 deletions
@ -1,88 +0,0 @@
|
||||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package integrations |
||||
|
||||
import ( |
||||
"bytes" |
||||
"image" |
||||
"image/png" |
||||
"io" |
||||
"mime/multipart" |
||||
"net/http" |
||||
"testing" |
||||
|
||||
"code.gitea.io/gitea/modules/test" |
||||
"github.com/stretchr/testify/assert" |
||||
) |
||||
|
||||
func generateImg() bytes.Buffer { |
||||
// Generate image
|
||||
myImage := image.NewRGBA(image.Rect(0, 0, 32, 32)) |
||||
var buff bytes.Buffer |
||||
png.Encode(&buff, myImage) |
||||
return buff |
||||
} |
||||
|
||||
func createAttachment(t *testing.T, session *TestSession, repoURL, filename string, buff bytes.Buffer, expectedStatus int) string { |
||||
body := &bytes.Buffer{} |
||||
|
||||
//Setup multi-part
|
||||
writer := multipart.NewWriter(body) |
||||
part, err := writer.CreateFormFile("file", filename) |
||||
assert.NoError(t, err) |
||||
_, err = io.Copy(part, &buff) |
||||
assert.NoError(t, err) |
||||
err = writer.Close() |
||||
assert.NoError(t, err) |
||||
|
||||
csrf := GetCSRF(t, session, repoURL) |
||||
|
||||
req := NewRequestWithBody(t, "POST", "/attachments", body) |
||||
req.Header.Add("X-Csrf-Token", csrf) |
||||
req.Header.Add("Content-Type", writer.FormDataContentType()) |
||||
resp := session.MakeRequest(t, req, expectedStatus) |
||||
|
||||
if expectedStatus != http.StatusOK { |
||||
return "" |
||||
} |
||||
var obj map[string]string |
||||
DecodeJSON(t, resp, &obj) |
||||
return obj["uuid"] |
||||
} |
||||
|
||||
func TestCreateAnonymousAttachment(t *testing.T) { |
||||
prepareTestEnv(t) |
||||
session := emptyTestSession(t) |
||||
createAttachment(t, session, "user2/repo1", "image.png", generateImg(), http.StatusFound) |
||||
} |
||||
|
||||
func TestCreateIssueAttachement(t *testing.T) { |
||||
prepareTestEnv(t) |
||||
const repoURL = "user2/repo1" |
||||
session := loginUser(t, "user2") |
||||
uuid := createAttachment(t, session, repoURL, "image.png", generateImg(), http.StatusOK) |
||||
|
||||
req := NewRequest(t, "GET", repoURL+"/issues/new") |
||||
resp := session.MakeRequest(t, req, http.StatusOK) |
||||
htmlDoc := NewHTMLParser(t, resp.Body) |
||||
|
||||
link, exists := htmlDoc.doc.Find("form").Attr("action") |
||||
assert.True(t, exists, "The template has changed") |
||||
|
||||
postData := map[string]string{ |
||||
"_csrf": htmlDoc.GetCSRF(), |
||||
"title": "New Issue With Attachement", |
||||
"content": "some content", |
||||
"files[0]": uuid, |
||||
} |
||||
|
||||
req = NewRequestWithValues(t, "POST", link, postData) |
||||
resp = session.MakeRequest(t, req, http.StatusFound) |
||||
test.RedirectURL(resp) // check that redirect URL exists
|
||||
|
||||
//Validate that attachement is available
|
||||
req = NewRequest(t, "GET", "/attachments/"+uuid) |
||||
session.MakeRequest(t, req, http.StatusOK) |
||||
} |
@ -0,0 +1,137 @@
|
||||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package integrations |
||||
|
||||
import ( |
||||
"bytes" |
||||
"image" |
||||
"image/png" |
||||
"io" |
||||
"io/ioutil" |
||||
"mime/multipart" |
||||
"net/http" |
||||
"os" |
||||
"path" |
||||
"testing" |
||||
|
||||
"code.gitea.io/gitea/models" |
||||
"code.gitea.io/gitea/modules/test" |
||||
|
||||
"github.com/stretchr/testify/assert" |
||||
) |
||||
|
||||
func generateImg() bytes.Buffer { |
||||
// Generate image
|
||||
myImage := image.NewRGBA(image.Rect(0, 0, 32, 32)) |
||||
var buff bytes.Buffer |
||||
png.Encode(&buff, myImage) |
||||
return buff |
||||
} |
||||
|
||||
func createAttachment(t *testing.T, session *TestSession, repoURL, filename string, buff bytes.Buffer, expectedStatus int) string { |
||||
body := &bytes.Buffer{} |
||||
|
||||
//Setup multi-part
|
||||
writer := multipart.NewWriter(body) |
||||
part, err := writer.CreateFormFile("file", filename) |
||||
assert.NoError(t, err) |
||||
_, err = io.Copy(part, &buff) |
||||
assert.NoError(t, err) |
||||
err = writer.Close() |
||||
assert.NoError(t, err) |
||||
|
||||
csrf := GetCSRF(t, session, repoURL) |
||||
|
||||
req := NewRequestWithBody(t, "POST", "/attachments", body) |
||||
req.Header.Add("X-Csrf-Token", csrf) |
||||
req.Header.Add("Content-Type", writer.FormDataContentType()) |
||||
resp := session.MakeRequest(t, req, expectedStatus) |
||||
|
||||
if expectedStatus != http.StatusOK { |
||||
return "" |
||||
} |
||||
var obj map[string]string |
||||
DecodeJSON(t, resp, &obj) |
||||
return obj["uuid"] |
||||
} |
||||
|
||||
func TestCreateAnonymousAttachment(t *testing.T) { |
||||
prepareTestEnv(t) |
||||
session := emptyTestSession(t) |
||||
createAttachment(t, session, "user2/repo1", "image.png", generateImg(), http.StatusFound) |
||||
} |
||||
|
||||
func TestCreateIssueAttachment(t *testing.T) { |
||||
prepareTestEnv(t) |
||||
const repoURL = "user2/repo1" |
||||
session := loginUser(t, "user2") |
||||
uuid := createAttachment(t, session, repoURL, "image.png", generateImg(), http.StatusOK) |
||||
|
||||
req := NewRequest(t, "GET", repoURL+"/issues/new") |
||||
resp := session.MakeRequest(t, req, http.StatusOK) |
||||
htmlDoc := NewHTMLParser(t, resp.Body) |
||||
|
||||
link, exists := htmlDoc.doc.Find("form").Attr("action") |
||||
assert.True(t, exists, "The template has changed") |
||||
|
||||
postData := map[string]string{ |
||||
"_csrf": htmlDoc.GetCSRF(), |
||||
"title": "New Issue With Attachment", |
||||
"content": "some content", |
||||
"files": uuid, |
||||
} |
||||
|
||||
req = NewRequestWithValues(t, "POST", link, postData) |
||||
resp = session.MakeRequest(t, req, http.StatusFound) |
||||
test.RedirectURL(resp) // check that redirect URL exists
|
||||
|
||||
//Validate that attachment is available
|
||||
req = NewRequest(t, "GET", "/attachments/"+uuid) |
||||
session.MakeRequest(t, req, http.StatusOK) |
||||
} |
||||
|
||||
func TestGetAttachment(t *testing.T) { |
||||
prepareTestEnv(t) |
||||
adminSession := loginUser(t, "user1") |
||||
user2Session := loginUser(t, "user2") |
||||
user8Session := loginUser(t, "user8") |
||||
emptySession := emptyTestSession(t) |
||||
testCases := []struct { |
||||
name string |
||||
uuid string |
||||
createFile bool |
||||
session *TestSession |
||||
want int |
||||
}{ |
||||
{"LinkedIssueUUID", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", true, user2Session, http.StatusOK}, |
||||
{"LinkedCommentUUID", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a17", true, user2Session, http.StatusOK}, |
||||
{"linked_release_uuid", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a19", true, user2Session, http.StatusOK}, |
||||
{"NotExistingUUID", "b0eebc99-9c0b-4ef8-bb6d-6bb9bd380a18", false, user2Session, http.StatusNotFound}, |
||||
{"FileMissing", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a18", false, user2Session, http.StatusInternalServerError}, |
||||
{"NotLinked", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a20", true, user2Session, http.StatusNotFound}, |
||||
{"NotLinkedAccessibleByUploader", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a20", true, user8Session, http.StatusOK}, |
||||
{"PublicByNonLogged", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11", true, emptySession, http.StatusOK}, |
||||
{"PrivateByNonLogged", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12", true, emptySession, http.StatusNotFound}, |
||||
{"PrivateAccessibleByAdmin", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12", true, adminSession, http.StatusOK}, |
||||
{"PrivateAccessibleByUser", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12", true, user2Session, http.StatusOK}, |
||||
{"RepoNotAccessibleByUser", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12", true, user8Session, http.StatusNotFound}, |
||||
{"OrgNotAccessibleByUser", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a21", true, user8Session, http.StatusNotFound}, |
||||
} |
||||
for _, tc := range testCases { |
||||
t.Run(tc.name, func(t *testing.T) { |
||||
//Write empty file to be available for response
|
||||
if tc.createFile { |
||||
localPath := models.AttachmentLocalPath(tc.uuid) |
||||
err := os.MkdirAll(path.Dir(localPath), os.ModePerm) |
||||
assert.NoError(t, err) |
||||
err = ioutil.WriteFile(localPath, []byte("hello world"), 0644) |
||||
assert.NoError(t, err) |
||||
} |
||||
//Actual test
|
||||
req := NewRequest(t, "GET", "/attachments/"+tc.uuid) |
||||
tc.session.MakeRequest(t, req, tc.want) |
||||
}) |
||||
} |
||||
} |
@ -0,0 +1,17 @@
|
||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package migrations |
||||
|
||||
import ( |
||||
"xorm.io/xorm" |
||||
) |
||||
|
||||
func addBlockOnRejectedReviews(x *xorm.Engine) error { |
||||
type ProtectedBranch struct { |
||||
BlockOnRejectedReviews bool `xorm:"NOT NULL DEFAULT false"` |
||||
} |
||||
|
||||
return x.Sync2(new(ProtectedBranch)) |
||||
} |
@ -0,0 +1,507 @@
|
||||
// Copyright 2019 Yusuke Inuzuka
|
||||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Most of what follows is a subtly changed version of github.com/yuin/goldmark/extension/footnote.go
|
||||
|
||||
package common |
||||
|
||||
import ( |
||||
"bytes" |
||||
"fmt" |
||||
"os" |
||||
"strconv" |
||||
"unicode" |
||||
|
||||
"github.com/yuin/goldmark" |
||||
"github.com/yuin/goldmark/ast" |
||||
"github.com/yuin/goldmark/parser" |
||||
"github.com/yuin/goldmark/renderer" |
||||
"github.com/yuin/goldmark/renderer/html" |
||||
"github.com/yuin/goldmark/text" |
||||
"github.com/yuin/goldmark/util" |
||||
) |
||||
|
||||
// CleanValue will clean a value to make it safe to be an id
|
||||
// This function is quite different from the original goldmark function
|
||||
// and more closely matches the output from the shurcooL sanitizer
|
||||
// In particular Unicode letters and numbers are a lot more than a-zA-Z0-9...
|
||||
func CleanValue(value []byte) []byte { |
||||
value = bytes.TrimSpace(value) |
||||
rs := bytes.Runes(value) |
||||
result := make([]rune, 0, len(rs)) |
||||
needsDash := false |
||||
for _, r := range rs { |
||||
switch { |
||||
case unicode.IsLetter(r) || unicode.IsNumber(r): |
||||
if needsDash && len(result) > 0 { |
||||
result = append(result, '-') |
||||
} |
||||
needsDash = false |
||||
result = append(result, unicode.ToLower(r)) |
||||
default: |
||||
needsDash = true |
||||
} |
||||
} |
||||
return []byte(string(result)) |
||||
} |
||||
|
||||
// Most of what follows is a subtly changed version of github.com/yuin/goldmark/extension/footnote.go
|
||||
|
||||
// A FootnoteLink struct represents a link to a footnote of Markdown
|
||||
// (PHP Markdown Extra) text.
|
||||
type FootnoteLink struct { |
||||
ast.BaseInline |
||||
Index int |
||||
Name []byte |
||||
} |
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *FootnoteLink) Dump(source []byte, level int) { |
||||
m := map[string]string{} |
||||
m["Index"] = fmt.Sprintf("%v", n.Index) |
||||
m["Name"] = fmt.Sprintf("%v", n.Name) |
||||
ast.DumpHelper(n, source, level, m, nil) |
||||
} |
||||
|
||||
// KindFootnoteLink is a NodeKind of the FootnoteLink node.
|
||||
var KindFootnoteLink = ast.NewNodeKind("GiteaFootnoteLink") |
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *FootnoteLink) Kind() ast.NodeKind { |
||||
return KindFootnoteLink |
||||
} |
||||
|
||||
// NewFootnoteLink returns a new FootnoteLink node.
|
||||
func NewFootnoteLink(index int, name []byte) *FootnoteLink { |
||||
return &FootnoteLink{ |
||||
Index: index, |
||||
Name: name, |
||||
} |
||||
} |
||||
|
||||
// A FootnoteBackLink struct represents a link to a footnote of Markdown
|
||||
// (PHP Markdown Extra) text.
|
||||
type FootnoteBackLink struct { |
||||
ast.BaseInline |
||||
Index int |
||||
Name []byte |
||||
} |
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *FootnoteBackLink) Dump(source []byte, level int) { |
||||
m := map[string]string{} |
||||
m["Index"] = fmt.Sprintf("%v", n.Index) |
||||
m["Name"] = fmt.Sprintf("%v", n.Name) |
||||
ast.DumpHelper(n, source, level, m, nil) |
||||
} |
||||
|
||||
// KindFootnoteBackLink is a NodeKind of the FootnoteBackLink node.
|
||||
var KindFootnoteBackLink = ast.NewNodeKind("GiteaFootnoteBackLink") |
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *FootnoteBackLink) Kind() ast.NodeKind { |
||||
return KindFootnoteBackLink |
||||
} |
||||
|
||||
// NewFootnoteBackLink returns a new FootnoteBackLink node.
|
||||
func NewFootnoteBackLink(index int, name []byte) *FootnoteBackLink { |
||||
return &FootnoteBackLink{ |
||||
Index: index, |
||||
Name: name, |
||||
} |
||||
} |
||||
|
||||
// A Footnote struct represents a footnote of Markdown
|
||||
// (PHP Markdown Extra) text.
|
||||
type Footnote struct { |
||||
ast.BaseBlock |
||||
Ref []byte |
||||
Index int |
||||
Name []byte |
||||
} |
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *Footnote) Dump(source []byte, level int) { |
||||
m := map[string]string{} |
||||
m["Index"] = fmt.Sprintf("%v", n.Index) |
||||
m["Ref"] = fmt.Sprintf("%s", n.Ref) |
||||
m["Name"] = fmt.Sprintf("%v", n.Name) |
||||
ast.DumpHelper(n, source, level, m, nil) |
||||
} |
||||
|
||||
// KindFootnote is a NodeKind of the Footnote node.
|
||||
var KindFootnote = ast.NewNodeKind("GiteaFootnote") |
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *Footnote) Kind() ast.NodeKind { |
||||
return KindFootnote |
||||
} |
||||
|
||||
// NewFootnote returns a new Footnote node.
|
||||
func NewFootnote(ref []byte) *Footnote { |
||||
return &Footnote{ |
||||
Ref: ref, |
||||
Index: -1, |
||||
Name: ref, |
||||
} |
||||
} |
||||
|
||||
// A FootnoteList struct represents footnotes of Markdown
|
||||
// (PHP Markdown Extra) text.
|
||||
type FootnoteList struct { |
||||
ast.BaseBlock |
||||
Count int |
||||
} |
||||
|
||||
// Dump implements Node.Dump.
|
||||
func (n *FootnoteList) Dump(source []byte, level int) { |
||||
m := map[string]string{} |
||||
m["Count"] = fmt.Sprintf("%v", n.Count) |
||||
ast.DumpHelper(n, source, level, m, nil) |
||||
} |
||||
|
||||
// KindFootnoteList is a NodeKind of the FootnoteList node.
|
||||
var KindFootnoteList = ast.NewNodeKind("GiteaFootnoteList") |
||||
|
||||
// Kind implements Node.Kind.
|
||||
func (n *FootnoteList) Kind() ast.NodeKind { |
||||
return KindFootnoteList |
||||
} |
||||
|
||||
// NewFootnoteList returns a new FootnoteList node.
|
||||
func NewFootnoteList() *FootnoteList { |
||||
return &FootnoteList{ |
||||
Count: 0, |
||||
} |
||||
} |
||||
|
||||
var footnoteListKey = parser.NewContextKey() |
||||
|
||||
type footnoteBlockParser struct { |
||||
} |
||||
|
||||
var defaultFootnoteBlockParser = &footnoteBlockParser{} |
||||
|
||||
// NewFootnoteBlockParser returns a new parser.BlockParser that can parse
|
||||
// footnotes of the Markdown(PHP Markdown Extra) text.
|
||||
func NewFootnoteBlockParser() parser.BlockParser { |
||||
return defaultFootnoteBlockParser |
||||
} |
||||
|
||||
func (b *footnoteBlockParser) Trigger() []byte { |
||||
return []byte{'['} |
||||
} |
||||
|
||||
func (b *footnoteBlockParser) Open(parent ast.Node, reader text.Reader, pc parser.Context) (ast.Node, parser.State) { |
||||
line, segment := reader.PeekLine() |
||||
pos := pc.BlockOffset() |
||||
if pos < 0 || line[pos] != '[' { |
||||
return nil, parser.NoChildren |
||||
} |
||||
pos++ |
||||
if pos > len(line)-1 || line[pos] != '^' { |
||||
return nil, parser.NoChildren |
||||
} |
||||
open := pos + 1 |
||||
closes := 0 |
||||
closure := util.FindClosure(line[pos+1:], '[', ']', false, false) |
||||
closes = pos + 1 + closure |
||||
next := closes + 1 |
||||
if closure > -1 { |
||||
if next >= len(line) || line[next] != ':' { |
||||
return nil, parser.NoChildren |
||||
} |
||||
} else { |
||||
return nil, parser.NoChildren |
||||
} |
||||
padding := segment.Padding |
||||
label := reader.Value(text.NewSegment(segment.Start+open-padding, segment.Start+closes-padding)) |
||||
if util.IsBlank(label) { |
||||
return nil, parser.NoChildren |
||||
} |
||||
item := NewFootnote(label) |
||||
|
||||
pos = next + 1 - padding |
||||
if pos >= len(line) { |
||||
reader.Advance(pos) |
||||
return item, parser.NoChildren |
||||
} |
||||
reader.AdvanceAndSetPadding(pos, padding) |
||||
return item, parser.HasChildren |
||||
} |
||||
|
||||
func (b *footnoteBlockParser) Continue(node ast.Node, reader text.Reader, pc parser.Context) parser.State { |
||||
line, _ := reader.PeekLine() |
||||
if util.IsBlank(line) { |
||||
return parser.Continue | parser.HasChildren |
||||
} |
||||
childpos, padding := util.IndentPosition(line, reader.LineOffset(), 4) |
||||
if childpos < 0 { |
||||
return parser.Close |
||||
} |
||||
reader.AdvanceAndSetPadding(childpos, padding) |
||||
return parser.Continue | parser.HasChildren |
||||
} |
||||
|
||||
func (b *footnoteBlockParser) Close(node ast.Node, reader text.Reader, pc parser.Context) { |
||||
var list *FootnoteList |
||||
if tlist := pc.Get(footnoteListKey); tlist != nil { |
||||
list = tlist.(*FootnoteList) |
||||
} else { |
||||
list = NewFootnoteList() |
||||
pc.Set(footnoteListKey, list) |
||||
node.Parent().InsertBefore(node.Parent(), node, list) |
||||
} |
||||
node.Parent().RemoveChild(node.Parent(), node) |
||||
list.AppendChild(list, node) |
||||
} |
||||
|
||||
func (b *footnoteBlockParser) CanInterruptParagraph() bool { |
||||
return true |
||||
} |
||||
|
||||
func (b *footnoteBlockParser) CanAcceptIndentedLine() bool { |
||||
return false |
||||
} |
||||
|
||||
type footnoteParser struct { |
||||
} |
||||
|
||||
var defaultFootnoteParser = &footnoteParser{} |
||||
|
||||
// NewFootnoteParser returns a new parser.InlineParser that can parse
|
||||
// footnote links of the Markdown(PHP Markdown Extra) text.
|
||||
func NewFootnoteParser() parser.InlineParser { |
||||
return defaultFootnoteParser |
||||
} |
||||
|
||||
func (s *footnoteParser) Trigger() []byte { |
||||
// footnote syntax probably conflict with the image syntax.
|
||||
// So we need trigger this parser with '!'.
|
||||
return []byte{'!', '['} |
||||
} |
||||
|
||||
func (s *footnoteParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node { |
||||
line, segment := block.PeekLine() |
||||
pos := 1 |
||||
if len(line) > 0 && line[0] == '!' { |
||||
pos++ |
||||
} |
||||
if pos >= len(line) || line[pos] != '^' { |
||||
return nil |
||||
} |
||||
pos++ |
||||
if pos >= len(line) { |
||||
return nil |
||||
} |
||||
open := pos |
||||
closure := util.FindClosure(line[pos:], '[', ']', false, false) |
||||
if closure < 0 { |
||||
return nil |
||||
} |
||||
closes := pos + closure |
||||
value := block.Value(text.NewSegment(segment.Start+open, segment.Start+closes)) |
||||
block.Advance(closes + 1) |
||||
|
||||
var list *FootnoteList |
||||
if tlist := pc.Get(footnoteListKey); tlist != nil { |
||||
list = tlist.(*FootnoteList) |
||||
} |
||||
if list == nil { |
||||
return nil |
||||
} |
||||
index := 0 |
||||
name := []byte{} |
||||
for def := list.FirstChild(); def != nil; def = def.NextSibling() { |
||||
d := def.(*Footnote) |
||||
if bytes.Equal(d.Ref, value) { |
||||
if d.Index < 0 { |
||||
list.Count++ |
||||
d.Index = list.Count |
||||
val := CleanValue(d.Name) |
||||
if len(val) == 0 { |
||||
val = []byte(strconv.Itoa(d.Index)) |
||||
} |
||||
d.Name = pc.IDs().Generate(val, KindFootnote) |
||||
} |
||||
index = d.Index |
||||
name = d.Name |
||||
break |
||||
} |
||||
} |
||||
if index == 0 { |
||||
return nil |
||||
} |
||||
|
||||
return NewFootnoteLink(index, name) |
||||
} |
||||
|
||||
type footnoteASTTransformer struct { |
||||
} |
||||
|
||||
var defaultFootnoteASTTransformer = &footnoteASTTransformer{} |
||||
|
||||
// NewFootnoteASTTransformer returns a new parser.ASTTransformer that
|
||||
// insert a footnote list to the last of the document.
|
||||
func NewFootnoteASTTransformer() parser.ASTTransformer { |
||||
return defaultFootnoteASTTransformer |
||||
} |
||||
|
||||
func (a *footnoteASTTransformer) Transform(node *ast.Document, reader text.Reader, pc parser.Context) { |
||||
var list *FootnoteList |
||||
if tlist := pc.Get(footnoteListKey); tlist != nil { |
||||
list = tlist.(*FootnoteList) |
||||
} else { |
||||
return |
||||
} |
||||
pc.Set(footnoteListKey, nil) |
||||
for footnote := list.FirstChild(); footnote != nil; { |
||||
var container ast.Node = footnote |
||||
next := footnote.NextSibling() |
||||
if fc := container.LastChild(); fc != nil && ast.IsParagraph(fc) { |
||||
container = fc |
||||
} |
||||
footnoteNode := footnote.(*Footnote) |
||||
index := footnoteNode.Index |
||||
name := footnoteNode.Name |
||||
if index < 0 { |
||||
list.RemoveChild(list, footnote) |
||||
} else { |
||||
container.AppendChild(container, NewFootnoteBackLink(index, name)) |
||||
} |
||||
footnote = next |
||||
} |
||||
list.SortChildren(func(n1, n2 ast.Node) int { |
||||
if n1.(*Footnote).Index < n2.(*Footnote).Index { |
||||
return -1 |
||||
} |
||||
return 1 |
||||
}) |
||||
if list.Count <= 0 { |
||||
list.Parent().RemoveChild(list.Parent(), list) |
||||
return |
||||
} |
||||
|
||||
node.AppendChild(node, list) |
||||
} |
||||
|
||||
// FootnoteHTMLRenderer is a renderer.NodeRenderer implementation that
|
||||
// renders FootnoteLink nodes.
|
||||
type FootnoteHTMLRenderer struct { |
||||
html.Config |
||||
} |
||||
|
||||
// NewFootnoteHTMLRenderer returns a new FootnoteHTMLRenderer.
|
||||
func NewFootnoteHTMLRenderer(opts ...html.Option) renderer.NodeRenderer { |
||||
r := &FootnoteHTMLRenderer{ |
||||
Config: html.NewConfig(), |
||||
} |
||||
for _, opt := range opts { |
||||
opt.SetHTMLOption(&r.Config) |
||||
} |
||||
return r |
||||
} |
||||
|
||||
// RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs.
|
||||
func (r *FootnoteHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) { |
||||
reg.Register(KindFootnoteLink, r.renderFootnoteLink) |
||||
reg.Register(KindFootnoteBackLink, r.renderFootnoteBackLink) |
||||
reg.Register(KindFootnote, r.renderFootnote) |
||||
reg.Register(KindFootnoteList, r.renderFootnoteList) |
||||
} |
||||
|
||||
func (r *FootnoteHTMLRenderer) renderFootnoteLink(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) { |
||||
if entering { |
||||
n := node.(*FootnoteLink) |
||||
n.Dump(source, 0) |
||||
is := strconv.Itoa(n.Index) |
||||
_, _ = w.WriteString(`<sup id="fnref:`) |
||||
_, _ = w.Write(n.Name) |
||||
_, _ = w.WriteString(`"><a href="#fn:`) |
||||
_, _ = w.Write(n.Name) |
||||
_, _ = w.WriteString(`" class="footnote-ref" role="doc-noteref">`) |
||||
_, _ = w.WriteString(is) |
||||
_, _ = w.WriteString(`</a></sup>`) |
||||
} |
||||
return ast.WalkContinue, nil |
||||
} |
||||
|
||||
func (r *FootnoteHTMLRenderer) renderFootnoteBackLink(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) { |
||||
if entering { |
||||
n := node.(*FootnoteBackLink) |
||||
fmt.Fprintf(os.Stdout, "source:\n%s\n", string(n.Text(source))) |
||||
_, _ = w.WriteString(` <a href="#fnref:`) |
||||
_, _ = w.Write(n.Name) |
||||
_, _ = w.WriteString(`" class="footnote-backref" role="doc-backlink">`) |
||||
_, _ = w.WriteString("↩︎") |
||||
_, _ = w.WriteString(`</a>`) |
||||
} |
||||
return ast.WalkContinue, nil |
||||
} |
||||
|
||||
func (r *FootnoteHTMLRenderer) renderFootnote(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) { |
||||
n := node.(*Footnote) |
||||
if entering { |
||||
fmt.Fprintf(os.Stdout, "source:\n%s\n", string(n.Text(source))) |
||||
_, _ = w.WriteString(`<li id="fn:`) |
||||
_, _ = w.Write(n.Name) |
||||
_, _ = w.WriteString(`" role="doc-endnote"`) |
||||
if node.Attributes() != nil { |
||||
html.RenderAttributes(w, node, html.ListItemAttributeFilter) |
||||
} |
||||
_, _ = w.WriteString(">\n") |
||||
} else { |
||||
_, _ = w.WriteString("</li>\n") |
||||
} |
||||
return ast.WalkContinue, nil |
||||
} |
||||
|
||||
func (r *FootnoteHTMLRenderer) renderFootnoteList(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) { |
||||
tag := "div" |
||||
if entering { |
||||
_, _ = w.WriteString("<") |
||||
_, _ = w.WriteString(tag) |
||||
_, _ = w.WriteString(` class="footnotes" role="doc-endnotes"`) |
||||
if node.Attributes() != nil { |
||||
html.RenderAttributes(w, node, html.GlobalAttributeFilter) |
||||
} |
||||
_ = w.WriteByte('>') |
||||
if r.Config.XHTML { |
||||
_, _ = w.WriteString("\n<hr />\n") |
||||
} else { |
||||
_, _ = w.WriteString("\n<hr>\n") |
||||
} |
||||
_, _ = w.WriteString("<ol>\n") |
||||
} else { |
||||
_, _ = w.WriteString("</ol>\n") |
||||
_, _ = w.WriteString("</") |
||||
_, _ = w.WriteString(tag) |
||||
_, _ = w.WriteString(">\n") |
||||
} |
||||
return ast.WalkContinue, nil |
||||
} |
||||
|
||||
type footnoteExtension struct{} |
||||
|
||||
// FootnoteExtension represents the Gitea Footnote
|
||||
var FootnoteExtension = &footnoteExtension{} |
||||
|
||||
// Extend extends the markdown converter with the Gitea Footnote parser
|
||||
func (e *footnoteExtension) Extend(m goldmark.Markdown) { |
||||
m.Parser().AddOptions( |
||||
parser.WithBlockParsers( |
||||
util.Prioritized(NewFootnoteBlockParser(), 999), |
||||
), |
||||
parser.WithInlineParsers( |
||||
util.Prioritized(NewFootnoteParser(), 101), |
||||
), |
||||
parser.WithASTTransformers( |
||||
util.Prioritized(NewFootnoteASTTransformer(), 999), |
||||
), |
||||
) |
||||
m.Renderer().AddOptions(renderer.WithNodeRenderers( |
||||
util.Prioritized(NewFootnoteHTMLRenderer(), 500), |
||||
)) |
||||
} |
@ -0,0 +1,19 @@
|
||||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package common |
||||
|
||||
import ( |
||||
"mvdan.cc/xurls/v2" |
||||
) |
||||
|
||||
var ( |
||||
// NOTE: All below regex matching do not perform any extra validation.
|
||||
// Thus a link is produced even if the linked entity does not exist.
|
||||
// While fast, this is also incorrect and lead to false positives.
|
||||
// TODO: fix invalid linking issue
|
||||
|
||||
// LinkRegex is a regexp matching a valid link
|
||||
LinkRegex, _ = xurls.StrictMatchingScheme("https?://") |
||||
) |
@ -0,0 +1,156 @@
|
||||
// Copyright 2019 Yusuke Inuzuka
|
||||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Most of this file is a subtly changed version of github.com/yuin/goldmark/extension/linkify.go
|
||||
|
||||
package common |
||||
|
||||
import ( |
||||
"bytes" |
||||
"regexp" |
||||
|
||||
"github.com/yuin/goldmark" |
||||
"github.com/yuin/goldmark/ast" |
||||
"github.com/yuin/goldmark/parser" |
||||
"github.com/yuin/goldmark/text" |
||||
"github.com/yuin/goldmark/util" |
||||
) |
||||
|
||||
var wwwURLRegxp = regexp.MustCompile(`^www\.[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}((?:/|[#?])[-a-zA-Z0-9@:%_\+.~#!?&//=\(\);,'">\^{}\[\]` + "`" + `]*)?`) |
||||
|
||||
type linkifyParser struct { |
||||
} |
||||
|
||||
var defaultLinkifyParser = &linkifyParser{} |
||||
|
||||
// NewLinkifyParser return a new InlineParser can parse
|
||||
// text that seems like a URL.
|
||||
func NewLinkifyParser() parser.InlineParser { |
||||
return defaultLinkifyParser |
||||
} |
||||
|
||||
func (s *linkifyParser) Trigger() []byte { |
||||
// ' ' indicates any white spaces and a line head
|
||||
return []byte{' ', '*', '_', '~', '('} |
||||
} |
||||
|
||||
var protoHTTP = []byte("http:") |
||||
var protoHTTPS = []byte("https:") |
||||
var protoFTP = []byte("ftp:") |
||||
var domainWWW = []byte("www.") |
||||
|
||||
func (s *linkifyParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node { |
||||
if pc.IsInLinkLabel() { |
||||
return nil |
||||
} |
||||
line, segment := block.PeekLine() |
||||
consumes := 0 |
||||
start := segment.Start |
||||
c := line[0] |
||||
// advance if current position is not a line head.
|
||||
if c == ' ' || c == '*' || c == '_' || c == '~' || c == '(' { |
||||
consumes++ |
||||
start++ |
||||
line = line[1:] |
||||
} |
||||
|
||||
var m []int |
||||
var protocol []byte |
||||
var typ ast.AutoLinkType = ast.AutoLinkURL |
||||
if bytes.HasPrefix(line, protoHTTP) || bytes.HasPrefix(line, protoHTTPS) || bytes.HasPrefix(line, protoFTP) { |
||||
m = LinkRegex.FindSubmatchIndex(line) |
||||
} |
||||
if m == nil && bytes.HasPrefix(line, domainWWW) { |
||||
m = wwwURLRegxp.FindSubmatchIndex(line) |
||||
protocol = []byte("http") |
||||
} |
||||
if m != nil { |
||||
lastChar := line[m[1]-1] |
||||
if lastChar == '.' { |
||||
m[1]-- |
||||
} else if lastChar == ')' { |
||||
closing := 0 |
||||
for i := m[1] - 1; i >= m[0]; i-- { |
||||
if line[i] == ')' { |
||||
closing++ |
||||
} else if line[i] == '(' { |
||||
closing-- |
||||
} |
||||
} |
||||
if closing > 0 { |
||||
m[1] -= closing |
||||
} |
||||
} else if lastChar == ';' { |
||||
i := m[1] - 2 |
||||
for ; i >= m[0]; i-- { |
||||
if util.IsAlphaNumeric(line[i]) { |
||||
continue |
||||
} |
||||
break |
||||
} |
||||
if i != m[1]-2 { |
||||
if line[i] == '&' { |
||||
m[1] -= m[1] - i |
||||
} |
||||
} |
||||
} |
||||
} |
||||
if m == nil { |
||||
if len(line) > 0 && util.IsPunct(line[0]) { |
||||
return nil |
||||
} |
||||
typ = ast.AutoLinkEmail |
||||
stop := util.FindEmailIndex(line) |
||||
if stop < 0 { |
||||
return nil |
||||
} |
||||
at := bytes.IndexByte(line, '@') |
||||
m = []int{0, stop, at, stop - 1} |
||||
if m == nil || bytes.IndexByte(line[m[2]:m[3]], '.') < 0 { |
||||
return nil |
||||
} |
||||
lastChar := line[m[1]-1] |
||||
if lastChar == '.' { |
||||
m[1]-- |
||||
} |
||||
if m[1] < len(line) { |
||||
nextChar := line[m[1]] |
||||
if nextChar == '-' || nextChar == '_' { |
||||
return nil |
||||
} |
||||
} |
||||
} |
||||
if m == nil { |
||||
return nil |
||||
} |
||||
if consumes != 0 { |
||||
s := segment.WithStop(segment.Start + 1) |
||||
ast.MergeOrAppendTextSegment(parent, s) |
||||
} |
||||
consumes += m[1] |
||||
block.Advance(consumes) |
||||
n := ast.NewTextSegment(text.NewSegment(start, start+m[1])) |
||||
link := ast.NewAutoLink(typ, n) |
||||
link.Protocol = protocol |
||||
return link |
||||
} |
||||
|
||||
func (s *linkifyParser) CloseBlock(parent ast.Node, pc parser.Context) { |
||||
// nothing to do
|
||||
} |
||||
|
||||
type linkify struct { |
||||
} |
||||
|
||||
// Linkify is an extension that allow you to parse text that seems like a URL.
|
||||
var Linkify = &linkify{} |
||||
|
||||
func (e *linkify) Extend(m goldmark.Markdown) { |
||||
m.Parser().AddOptions( |
||||
parser.WithInlineParsers( |
||||
util.Prioritized(NewLinkifyParser(), 999), |
||||
), |
||||
) |
||||
} |
@ -0,0 +1,178 @@
|
||||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package markdown |
||||
|
||||
import ( |
||||
"bytes" |
||||
"fmt" |
||||
"strings" |
||||
|
||||
"code.gitea.io/gitea/modules/markup" |
||||
"code.gitea.io/gitea/modules/markup/common" |
||||
giteautil "code.gitea.io/gitea/modules/util" |
||||
|
||||
"github.com/yuin/goldmark/ast" |
||||
east "github.com/yuin/goldmark/extension/ast" |
||||
"github.com/yuin/goldmark/parser" |
||||
"github.com/yuin/goldmark/renderer" |
||||
"github.com/yuin/goldmark/renderer/html" |
||||
"github.com/yuin/goldmark/text" |
||||
"github.com/yuin/goldmark/util" |
||||
) |
||||
|
||||
var byteMailto = []byte("mailto:") |
||||
|
||||
// GiteaASTTransformer is a default transformer of the goldmark tree.
|
||||
type GiteaASTTransformer struct{} |
||||
|
||||
// Transform transforms the given AST tree.
|
||||
func (g *GiteaASTTransformer) Transform(node *ast.Document, reader text.Reader, pc parser.Context) { |
||||
_ = ast.Walk(node, func(n ast.Node, entering bool) (ast.WalkStatus, error) { |
||||
if !entering { |
||||
return ast.WalkContinue, nil |
||||
} |
||||
|
||||
switch v := n.(type) { |
||||
case *ast.Image: |
||||
// Images need two things:
|
||||
//
|
||||
// 1. Their src needs to munged to be a real value
|
||||
// 2. If they're not wrapped with a link they need a link wrapper
|
||||
|
||||
// Check if the destination is a real link
|
||||
link := v.Destination |
||||
if len(link) > 0 && !markup.IsLink(link) { |
||||
prefix := pc.Get(urlPrefixKey).(string) |
||||
if pc.Get(isWikiKey).(bool) { |
||||
prefix = giteautil.URLJoin(prefix, "wiki", "raw") |
||||
} |
||||
prefix = strings.Replace(prefix, "/src/", "/media/", 1) |
||||
|
||||
lnk := string(link) |
||||
lnk = giteautil.URLJoin(prefix, lnk) |
||||
lnk = strings.Replace(lnk, " ", "+", -1) |
||||
link = []byte(lnk) |
||||
} |
||||
v.Destination = link |
||||
|
||||
parent := n.Parent() |
||||
// Create a link around image only if parent is not already a link
|
||||
if _, ok := parent.(*ast.Link); !ok && parent != nil { |
||||
wrap := ast.NewLink() |
||||
wrap.Destination = link |
||||
wrap.Title = v.Title |
||||
parent.ReplaceChild(parent, n, wrap) |
||||
wrap.AppendChild(wrap, n) |
||||
} |
||||
case *ast.Link: |
||||
// Links need their href to munged to be a real value
|
||||
link := v.Destination |
||||
if len(link) > 0 && !markup.IsLink(link) && |
||||
link[0] != '#' && !bytes.HasPrefix(link, byteMailto) { |
||||
// special case: this is not a link, a hash link or a mailto:, so it's a
|
||||
// relative URL
|
||||
lnk := string(link) |
||||
if pc.Get(isWikiKey).(bool) { |
||||
lnk = giteautil.URLJoin("wiki", lnk) |
||||
} |
||||
link = []byte(giteautil.URLJoin(pc.Get(urlPrefixKey).(string), lnk)) |
||||
} |
||||
v.Destination = link |
||||
} |
||||
return ast.WalkContinue, nil |
||||
}) |
||||
} |
||||
|
||||
type prefixedIDs struct { |
||||
values map[string]bool |
||||
} |
||||
|
||||
// Generate generates a new element id.
|
||||
func (p *prefixedIDs) Generate(value []byte, kind ast.NodeKind) []byte { |
||||
dft := []byte("id") |
||||
if kind == ast.KindHeading { |
||||
dft = []byte("heading") |
||||
} |
||||
return p.GenerateWithDefault(value, dft) |
||||
} |
||||
|
||||
// Generate generates a new element id.
|
||||
func (p *prefixedIDs) GenerateWithDefault(value []byte, dft []byte) []byte { |
||||
result := common.CleanValue(value) |
||||
if len(result) == 0 { |
||||
result = dft |
||||
} |
||||
if !bytes.HasPrefix(result, []byte("user-content-")) { |
||||
result = append([]byte("user-content-"), result...) |
||||
} |
||||
if _, ok := p.values[util.BytesToReadOnlyString(result)]; !ok { |
||||
p.values[util.BytesToReadOnlyString(result)] = true |
||||
return result |
||||
} |
||||
for i := 1; ; i++ { |
||||
newResult := fmt.Sprintf("%s-%d", result, i) |
||||
if _, ok := p.values[newResult]; !ok { |
||||
p.values[newResult] = true |
||||
return []byte(newResult) |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Put puts a given element id to the used ids table.
|
||||
func (p *prefixedIDs) Put(value []byte) { |
||||
p.values[util.BytesToReadOnlyString(value)] = true |
||||
} |
||||
|
||||
func newPrefixedIDs() *prefixedIDs { |
||||
return &prefixedIDs{ |
||||
values: map[string]bool{}, |
||||
} |
||||
} |
||||
|
||||
// NewTaskCheckBoxHTMLRenderer creates a TaskCheckBoxHTMLRenderer to render tasklists
|
||||
// in the gitea form.
|
||||
func NewTaskCheckBoxHTMLRenderer(opts ...html.Option) renderer.NodeRenderer { |
||||
r := &TaskCheckBoxHTMLRenderer{ |
||||
Config: html.NewConfig(), |
||||
} |
||||
for _, opt := range opts { |
||||
opt.SetHTMLOption(&r.Config) |
||||
} |
||||
return r |
||||
} |
||||
|
||||
// TaskCheckBoxHTMLRenderer is a renderer.NodeRenderer implementation that
|
||||
// renders checkboxes in list items.
|
||||
// Overrides the default goldmark one to present the gitea format
|
||||
type TaskCheckBoxHTMLRenderer struct { |
||||
html.Config |
||||
} |
||||
|
||||
// RegisterFuncs implements renderer.NodeRenderer.RegisterFuncs.
|
||||
func (r *TaskCheckBoxHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) { |
||||
reg.Register(east.KindTaskCheckBox, r.renderTaskCheckBox) |
||||
} |
||||
|
||||
func (r *TaskCheckBoxHTMLRenderer) renderTaskCheckBox(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) { |
||||
if !entering { |
||||
return ast.WalkContinue, nil |
||||
} |
||||
n := node.(*east.TaskCheckBox) |
||||
|
||||
end := ">" |
||||
if r.XHTML { |
||||
end = " />" |
||||
} |
||||
var err error |
||||
if n.IsChecked { |
||||
_, err = w.WriteString(`<span class="ui fitted disabled checkbox"><input type="checkbox" disabled="disabled"` + end + `<label` + end + `</span>`) |
||||
} else { |
||||
_, err = w.WriteString(`<span class="ui checked fitted disabled checkbox"><input type="checkbox" checked="" disabled="disabled"` + end + `<label` + end + `</span>`) |
||||
} |
||||
if err != nil { |
||||
return ast.WalkStop, err |
||||
} |
||||
return ast.WalkContinue, nil |
||||
} |
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,17 +1,21 @@
|
||||
<!DOCTYPE html> |
||||
<html> |
||||
<head> |
||||
<style> |
||||
.footer { font-size:small; color:#666;} |
||||
</style> |
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> |
||||
<title>{{.Subject}}</title> |
||||
</head> |
||||
|
||||
<body> |
||||
<p>@{{.Doer.Name}} assigned you to the {{if .IsPull}}pull request{{else}}issue{{end}} <a href="{{.Link}}">#{{.Issue.Index}}</a> in repository {{.Repo}}.</p> |
||||
<p> |
||||
--- |
||||
<br> |
||||
<a href="{{.Link}}">View it on {{AppName}}</a>. |
||||
</p> |
||||
|
||||
<div class="footer"> |
||||
<p> |
||||
--- |
||||
<br> |
||||
<a href="{{.Link}}">View it on {{AppName}}</a>. |
||||
</p> |
||||
</div> |
||||
</body> |
||||
</html> |
||||
|
@ -1,16 +1,21 @@
|
||||
<!DOCTYPE html> |
||||
<html> |
||||
<head> |
||||
<style> |
||||
.footer { font-size:small; color:#666;} |
||||
</style> |
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> |
||||
<title>{{.Subject}}</title> |
||||
</head> |
||||
|
||||
<body> |
||||
<p>You have been added as a collaborator of repository: <code>{{.RepoName}}</code></p> |
||||
<p> |
||||
--- |
||||
<br> |
||||
<a href="{{.Link}}">View it on Gitea</a>. |
||||
</p> |
||||
<div class="footer"> |
||||
<p> |
||||
--- |
||||
<br> |
||||
<a href="{{.Link}}">View it on {{AppName}}</a>. |
||||
</p> |
||||
</div> |
||||
</body> |
||||
</html> |
||||
|
@ -1,8 +0,0 @@
|
||||
*.out |
||||
*.swp |
||||
*.8 |
||||
*.6 |
||||
_obj |
||||
_test* |
||||
markdown |
||||
tags |
@ -1,17 +0,0 @@
|
||||
sudo: false |
||||
language: go |
||||
go: |
||||
- "1.10.x" |
||||
- "1.11.x" |
||||
- tip |
||||
matrix: |
||||
fast_finish: true |
||||
allow_failures: |
||||
- go: tip |
||||
install: |
||||
- # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). |
||||
script: |
||||
- go get -t -v ./... |
||||
- diff -u <(echo -n) <(gofmt -d -s .) |
||||
- go tool vet . |
||||
- go test -v ./... |
@ -1,29 +0,0 @@
|
||||
Blackfriday is distributed under the Simplified BSD License: |
||||
|
||||
> Copyright © 2011 Russ Ross |
||||
> All rights reserved. |
||||
> |
||||
> Redistribution and use in source and binary forms, with or without |
||||
> modification, are permitted provided that the following conditions |
||||
> are met: |
||||
> |
||||
> 1. Redistributions of source code must retain the above copyright |
||||
> notice, this list of conditions and the following disclaimer. |
||||
> |
||||
> 2. Redistributions in binary form must reproduce the above |
||||
> copyright notice, this list of conditions and the following |
||||
> disclaimer in the documentation and/or other materials provided with |
||||
> the distribution. |
||||
> |
||||
> THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
> "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
> LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS |
||||
> FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE |
||||
> COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, |
||||
> INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, |
||||
> BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; |
||||
> LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER |
||||
> CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT |
||||
> LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN |
||||
> ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
||||
> POSSIBILITY OF SUCH DAMAGE. |
@ -1,291 +0,0 @@
|
||||
Blackfriday [](https://travis-ci.org/russross/blackfriday) |
||||
=========== |
||||
|
||||
Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It |
||||
is paranoid about its input (so you can safely feed it user-supplied |
||||
data), it is fast, it supports common extensions (tables, smart |
||||
punctuation substitutions, etc.), and it is safe for all utf-8 |
||||
(unicode) input. |
||||
|
||||
HTML output is currently supported, along with Smartypants |
||||
extensions. |
||||
|
||||
It started as a translation from C of [Sundown][3]. |
||||
|
||||
|
||||
Installation |
||||
------------ |
||||
|
||||
Blackfriday is compatible with any modern Go release. With Go 1.7 and git |
||||
installed: |
||||
|
||||
go get gopkg.in/russross/blackfriday.v2 |
||||
|
||||
will download, compile, and install the package into your `$GOPATH` |
||||
directory hierarchy. Alternatively, you can achieve the same if you |
||||
import it into a project: |
||||
|
||||
import "gopkg.in/russross/blackfriday.v2" |
||||
|
||||
and `go get` without parameters. |
||||
|
||||
|
||||
Versions |
||||
-------- |
||||
|
||||
Currently maintained and recommended version of Blackfriday is `v2`. It's being |
||||
developed on its own branch: https://github.com/russross/blackfriday/tree/v2 and the |
||||
documentation is available at |
||||
https://godoc.org/gopkg.in/russross/blackfriday.v2. |
||||
|
||||
It is `go get`-able via via [gopkg.in][6] at `gopkg.in/russross/blackfriday.v2`, |
||||
but we highly recommend using package management tool like [dep][7] or |
||||
[Glide][8] and make use of semantic versioning. With package management you |
||||
should import `github.com/russross/blackfriday` and specify that you're using |
||||
version 2.0.0. |
||||
|
||||
Version 2 offers a number of improvements over v1: |
||||
|
||||
* Cleaned up API |
||||
* A separate call to [`Parse`][4], which produces an abstract syntax tree for |
||||
the document |
||||
* Latest bug fixes |
||||
* Flexibility to easily add your own rendering extensions |
||||
|
||||
Potential drawbacks: |
||||
|
||||
* Our benchmarks show v2 to be slightly slower than v1. Currently in the |
||||
ballpark of around 15%. |
||||
* API breakage. If you can't afford modifying your code to adhere to the new API |
||||
and don't care too much about the new features, v2 is probably not for you. |
||||
* Several bug fixes are trailing behind and still need to be forward-ported to |
||||
v2. See issue [#348](https://github.com/russross/blackfriday/issues/348) for |
||||
tracking. |
||||
|
||||
Usage |
||||
----- |
||||
|
||||
For the most sensible markdown processing, it is as simple as getting your input |
||||
into a byte slice and calling: |
||||
|
||||
```go |
||||
output := blackfriday.Run(input) |
||||
``` |
||||
|
||||
Your input will be parsed and the output rendered with a set of most popular |
||||
extensions enabled. If you want the most basic feature set, corresponding with |
||||
the bare Markdown specification, use: |
||||
|
||||
```go |
||||
output := blackfriday.Run(input, blackfriday.WithNoExtensions()) |
||||
``` |
||||
|
||||
### Sanitize untrusted content |
||||
|
||||
Blackfriday itself does nothing to protect against malicious content. If you are |
||||
dealing with user-supplied markdown, we recommend running Blackfriday's output |
||||
through HTML sanitizer such as [Bluemonday][5]. |
||||
|
||||
Here's an example of simple usage of Blackfriday together with Bluemonday: |
||||
|
||||
```go |
||||
import ( |
||||
"github.com/microcosm-cc/bluemonday" |
||||
"github.com/russross/blackfriday" |
||||
) |
||||
|
||||
// ... |
||||
unsafe := blackfriday.Run(input) |
||||
html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) |
||||
``` |
||||
|
||||
### Custom options |
||||
|
||||
If you want to customize the set of options, use `blackfriday.WithExtensions`, |
||||
`blackfriday.WithRenderer` and `blackfriday.WithRefOverride`. |
||||
|
||||
You can also check out `blackfriday-tool` for a more complete example |
||||
of how to use it. Download and install it using: |
||||
|
||||
go get github.com/russross/blackfriday-tool |
||||
|
||||
This is a simple command-line tool that allows you to process a |
||||
markdown file using a standalone program. You can also browse the |
||||
source directly on github if you are just looking for some example |
||||
code: |
||||
|
||||
* <http://github.com/russross/blackfriday-tool> |
||||
|
||||
Note that if you have not already done so, installing |
||||
`blackfriday-tool` will be sufficient to download and install |
||||
blackfriday in addition to the tool itself. The tool binary will be |
||||
installed in `$GOPATH/bin`. This is a statically-linked binary that |
||||
can be copied to wherever you need it without worrying about |
||||
dependencies and library versions. |
||||
|
||||
|
||||
Features |
||||
-------- |
||||
|
||||
All features of Sundown are supported, including: |
||||
|
||||
* **Compatibility**. The Markdown v1.0.3 test suite passes with |
||||
the `--tidy` option. Without `--tidy`, the differences are |
||||
mostly in whitespace and entity escaping, where blackfriday is |
||||
more consistent and cleaner. |
||||
|
||||
* **Common extensions**, including table support, fenced code |
||||
blocks, autolinks, strikethroughs, non-strict emphasis, etc. |
||||
|
||||
* **Safety**. Blackfriday is paranoid when parsing, making it safe |
||||
to feed untrusted user input without fear of bad things |
||||
happening. The test suite stress tests this and there are no |
||||
known inputs that make it crash. If you find one, please let me |
||||
know and send me the input that does it. |
||||
|
||||
NOTE: "safety" in this context means *runtime safety only*. In order to |
||||
protect yourself against JavaScript injection in untrusted content, see |
||||
[this example](https://github.com/russross/blackfriday#sanitize-untrusted-content). |
||||
|
||||
* **Fast processing**. It is fast enough to render on-demand in |
||||
most web applications without having to cache the output. |
||||
|
||||
* **Thread safety**. You can run multiple parsers in different |
||||
goroutines without ill effect. There is no dependence on global |
||||
shared state. |
||||
|
||||
* **Minimal dependencies**. Blackfriday only depends on standard |
||||
library packages in Go. The source code is pretty |
||||
self-contained, so it is easy to add to any project, including |
||||
Google App Engine projects. |
||||
|
||||
* **Standards compliant**. Output successfully validates using the |
||||
W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional. |
||||
|
||||
|
||||
Extensions |
||||
---------- |
||||
|
||||
In addition to the standard markdown syntax, this package |
||||
implements the following extensions: |
||||
|
||||
* **Intra-word emphasis supression**. The `_` character is |
||||
commonly used inside words when discussing code, so having |
||||
markdown interpret it as an emphasis command is usually the |
||||
wrong thing. Blackfriday lets you treat all emphasis markers as |
||||
normal characters when they occur inside a word. |
||||
|
||||
* **Tables**. Tables can be created by drawing them in the input |
||||
using a simple syntax: |
||||
|
||||
``` |
||||
Name | Age |
||||
--------|------ |
||||
Bob | 27 |
||||
Alice | 23 |
||||
``` |
||||
|
||||
* **Fenced code blocks**. In addition to the normal 4-space |
||||
indentation to mark code blocks, you can explicitly mark them |
||||
and supply a language (to make syntax highlighting simple). Just |
||||
mark it like this: |
||||
|
||||
```go |
||||
func getTrue() bool { |
||||
return true |
||||
} |
||||
``` |
||||
|
||||
You can use 3 or more backticks to mark the beginning of the |
||||
block, and the same number to mark the end of the block. |
||||
|
||||
* **Definition lists**. A simple definition list is made of a single-line |
||||
term followed by a colon and the definition for that term. |
||||
|
||||
Cat |
||||
: Fluffy animal everyone likes |
||||
|
||||
Internet |
||||
: Vector of transmission for pictures of cats |
||||
|
||||
Terms must be separated from the previous definition by a blank line. |
||||
|
||||
* **Footnotes**. A marker in the text that will become a superscript number; |
||||
a footnote definition that will be placed in a list of footnotes at the |
||||
end of the document. A footnote looks like this: |
||||
|
||||
This is a footnote.[^1] |
||||
|
||||
[^1]: the footnote text. |
||||
|
||||
* **Autolinking**. Blackfriday can find URLs that have not been |
||||
explicitly marked as links and turn them into links. |
||||
|
||||
* **Strikethrough**. Use two tildes (`~~`) to mark text that |
||||
should be crossed out. |
||||
|
||||
* **Hard line breaks**. With this extension enabled newlines in the input |
||||
translate into line breaks in the output. This extension is off by default. |
||||
|
||||
* **Smart quotes**. Smartypants-style punctuation substitution is |
||||
supported, turning normal double- and single-quote marks into |
||||
curly quotes, etc. |
||||
|
||||
* **LaTeX-style dash parsing** is an additional option, where `--` |
||||
is translated into `–`, and `---` is translated into |
||||
`—`. This differs from most smartypants processors, which |
||||
turn a single hyphen into an ndash and a double hyphen into an |
||||
mdash. |
||||
|
||||
* **Smart fractions**, where anything that looks like a fraction |
||||
is translated into suitable HTML (instead of just a few special |
||||
cases like most smartypant processors). For example, `4/5` |
||||
becomes `<sup>4</sup>⁄<sub>5</sub>`, which renders as |
||||
<sup>4</sup>⁄<sub>5</sub>. |
||||
|
||||
|
||||
Other renderers |
||||
--------------- |
||||
|
||||
Blackfriday is structured to allow alternative rendering engines. Here |
||||
are a few of note: |
||||
|
||||
* [github_flavored_markdown](https://godoc.org/github.com/shurcooL/github_flavored_markdown): |
||||
provides a GitHub Flavored Markdown renderer with fenced code block |
||||
highlighting, clickable heading anchor links. |
||||
|
||||
It's not customizable, and its goal is to produce HTML output |
||||
equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode), |
||||
except the rendering is performed locally. |
||||
|
||||
* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt, |
||||
but for markdown. |
||||
|
||||
* [LaTeX output](https://github.com/Ambrevar/Blackfriday-LaTeX): |
||||
renders output as LaTeX. |
||||
|
||||
* [Blackfriday-Confluence](https://github.com/kentaro-m/blackfriday-confluence): provides a [Confluence Wiki Markup](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html) renderer. |
||||
|
||||
|
||||
Todo |
||||
---- |
||||
|
||||
* More unit testing |
||||
* Improve unicode support. It does not understand all unicode |
||||
rules (about what constitutes a letter, a punctuation symbol, |
||||
etc.), so it may fail to detect word boundaries correctly in |
||||
some instances. It is safe on all utf-8 input. |
||||
|
||||
|
||||
License |
||||
------- |
||||
|
||||
[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt) |
||||
|
||||
|
||||
[1]: https://daringfireball.net/projects/markdown/ "Markdown" |
||||
[2]: https://golang.org/ "Go Language" |
||||
[3]: https://github.com/vmg/sundown "Sundown" |
||||
[4]: https://godoc.org/gopkg.in/russross/blackfriday.v2#Parse "Parse func" |
||||
[5]: https://github.com/microcosm-cc/bluemonday "Bluemonday" |
||||
[6]: https://labix.org/gopkg.in "gopkg.in" |
File diff suppressed because it is too large
Load Diff
@ -1,18 +0,0 @@
|
||||
// Package blackfriday is a markdown processor.
|
||||
//
|
||||
// It translates plain text with simple formatting rules into an AST, which can
|
||||
// then be further processed to HTML (provided by Blackfriday itself) or other
|
||||
// formats (provided by the community).
|
||||
//
|
||||
// The simplest way to invoke Blackfriday is to call the Run function. It will
|
||||
// take a text input and produce a text output in HTML (or other format).
|
||||
//
|
||||
// A slightly more sophisticated way to use Blackfriday is to create a Markdown
|
||||
// processor and to call Parse, which returns a syntax tree for the input
|
||||
// document. You can leverage Blackfriday's parsing for content extraction from
|
||||
// markdown documents. You can assign a custom renderer and set various options
|
||||
// to the Markdown processor.
|
||||
//
|
||||
// If you're interested in calling Blackfriday from command line, see
|
||||
// https://github.com/russross/blackfriday-tool.
|
||||
package blackfriday |
@ -1,34 +0,0 @@
|
||||
package blackfriday |
||||
|
||||
import ( |
||||
"html" |
||||
"io" |
||||
) |
||||
|
||||
var htmlEscaper = [256][]byte{ |
||||
'&': []byte("&"), |
||||
'<': []byte("<"), |
||||
'>': []byte(">"), |
||||
'"': []byte("""), |
||||
} |
||||
|
||||
func escapeHTML(w io.Writer, s []byte) { |
||||
var start, end int |
||||
for end < len(s) { |
||||
escSeq := htmlEscaper[s[end]] |
||||
if escSeq != nil { |
||||
w.Write(s[start:end]) |
||||
w.Write(escSeq) |
||||
start = end + 1 |
||||
} |
||||
end++ |
||||
} |
||||
if start < len(s) && end <= len(s) { |
||||
w.Write(s[start:end]) |
||||
} |
||||
} |
||||
|
||||
func escLink(w io.Writer, text []byte) { |
||||
unesc := html.UnescapeString(string(text)) |
||||
escapeHTML(w, []byte(unesc)) |
||||
} |
@ -1 +0,0 @@
|
||||
module github.com/russross/blackfriday/v2 |
@ -1,949 +0,0 @@
|
||||
//
|
||||
// Blackfriday Markdown Processor
|
||||
// Available at http://github.com/russross/blackfriday
|
||||
//
|
||||
// Copyright © 2011 Russ Ross <russ@russross.com>.
|
||||
// Distributed under the Simplified BSD License.
|
||||
// See README.md for details.
|
||||
//
|
||||
|
||||
//
|
||||
//
|
||||
// HTML rendering backend
|
||||
//
|
||||
//
|
||||
|
||||
package blackfriday |
||||
|
||||
import ( |
||||
"bytes" |
||||
"fmt" |
||||
"io" |
||||
"regexp" |
||||
"strings" |
||||
) |
||||
|
||||
// HTMLFlags control optional behavior of HTML renderer.
|
||||
type HTMLFlags int |
||||
|
||||
// HTML renderer configuration options.
|
||||
const ( |
||||
HTMLFlagsNone HTMLFlags = 0 |
||||
SkipHTML HTMLFlags = 1 << iota // Skip preformatted HTML blocks
|
||||
SkipImages // Skip embedded images
|
||||
SkipLinks // Skip all links
|
||||
Safelink // Only link to trusted protocols
|
||||
NofollowLinks // Only link with rel="nofollow"
|
||||
NoreferrerLinks // Only link with rel="noreferrer"
|
||||
NoopenerLinks // Only link with rel="noopener"
|
||||
HrefTargetBlank // Add a blank target
|
||||
CompletePage // Generate a complete HTML page
|
||||
UseXHTML // Generate XHTML output instead of HTML
|
||||
FootnoteReturnLinks // Generate a link at the end of a footnote to return to the source
|
||||
Smartypants // Enable smart punctuation substitutions
|
||||
SmartypantsFractions // Enable smart fractions (with Smartypants)
|
||||
SmartypantsDashes // Enable smart dashes (with Smartypants)
|
||||
SmartypantsLatexDashes // Enable LaTeX-style dashes (with Smartypants)
|
||||
SmartypantsAngledQuotes // Enable angled double quotes (with Smartypants) for double quotes rendering
|
||||
SmartypantsQuotesNBSP // Enable « French guillemets » (with Smartypants)
|
||||
TOC // Generate a table of contents
|
||||
) |
||||
|
||||
var ( |
||||
htmlTagRe = regexp.MustCompile("(?i)^" + htmlTag) |
||||
) |
||||
|
||||
const ( |
||||
htmlTag = "(?:" + openTag + "|" + closeTag + "|" + htmlComment + "|" + |
||||
processingInstruction + "|" + declaration + "|" + cdata + ")" |
||||
closeTag = "</" + tagName + "\\s*[>]" |
||||
openTag = "<" + tagName + attribute + "*" + "\\s*/?>" |
||||
attribute = "(?:" + "\\s+" + attributeName + attributeValueSpec + "?)" |
||||
attributeValue = "(?:" + unquotedValue + "|" + singleQuotedValue + "|" + doubleQuotedValue + ")" |
||||
attributeValueSpec = "(?:" + "\\s*=" + "\\s*" + attributeValue + ")" |
||||
attributeName = "[a-zA-Z_:][a-zA-Z0-9:._-]*" |
||||
cdata = "<!\\[CDATA\\[[\\s\\S]*?\\]\\]>" |
||||
declaration = "<![A-Z]+" + "\\s+[^>]*>" |
||||
doubleQuotedValue = "\"[^\"]*\"" |
||||
htmlComment = "<!---->|<!--(?:-?[^>-])(?:-?[^-])*-->" |
||||
processingInstruction = "[<][?].*?[?][>]" |
||||
singleQuotedValue = "'[^']*'" |
||||
tagName = "[A-Za-z][A-Za-z0-9-]*" |
||||
unquotedValue = "[^\"'=<>`\\x00-\\x20]+" |
||||
) |
||||
|
||||
// HTMLRendererParameters is a collection of supplementary parameters tweaking
|
||||
// the behavior of various parts of HTML renderer.
|
||||
type HTMLRendererParameters struct { |
||||
// Prepend this text to each relative URL.
|
||||
AbsolutePrefix string |
||||
// Add this text to each footnote anchor, to ensure uniqueness.
|
||||
FootnoteAnchorPrefix string |
||||
// Show this text inside the <a> tag for a footnote return link, if the
|
||||
// HTML_FOOTNOTE_RETURN_LINKS flag is enabled. If blank, the string
|
||||
// <sup>[return]</sup> is used.
|
||||
FootnoteReturnLinkContents string |
||||
// If set, add this text to the front of each Heading ID, to ensure
|
||||
// uniqueness.
|
||||
HeadingIDPrefix string |
||||
// If set, add this text to the back of each Heading ID, to ensure uniqueness.
|
||||
HeadingIDSuffix string |
||||
// Increase heading levels: if the offset is 1, <h1> becomes <h2> etc.
|
||||
// Negative offset is also valid.
|
||||
// Resulting levels are clipped between 1 and 6.
|
||||
HeadingLevelOffset int |
||||
|
||||
Title string // Document title (used if CompletePage is set)
|
||||
CSS string // Optional CSS file URL (used if CompletePage is set)
|
||||
Icon string // Optional icon file URL (used if CompletePage is set)
|
||||
|
||||
Flags HTMLFlags // Flags allow customizing this renderer's behavior
|
||||
} |
||||
|
||||
// HTMLRenderer is a type that implements the Renderer interface for HTML output.
|
||||
//
|
||||
// Do not create this directly, instead use the NewHTMLRenderer function.
|
||||
type HTMLRenderer struct { |
||||
HTMLRendererParameters |
||||
|
||||
closeTag string // how to end singleton tags: either " />" or ">"
|
||||
|
||||
// Track heading IDs to prevent ID collision in a single generation.
|
||||
headingIDs map[string]int |
||||
|
||||
lastOutputLen int |
||||
disableTags int |
||||
|
||||
sr *SPRenderer |
||||
} |
||||
|
||||
const ( |
||||
xhtmlClose = " />" |
||||
htmlClose = ">" |
||||
) |
||||
|
||||
// NewHTMLRenderer creates and configures an HTMLRenderer object, which
|
||||
// satisfies the Renderer interface.
|
||||
func NewHTMLRenderer(params HTMLRendererParameters) *HTMLRenderer { |
||||
// configure the rendering engine
|
||||
closeTag := htmlClose |
||||
if params.Flags&UseXHTML != 0 { |
||||
closeTag = xhtmlClose |
||||
} |
||||
|
||||
if params.FootnoteReturnLinkContents == "" { |
||||
params.FootnoteReturnLinkContents = `<sup>[return]</sup>` |
||||
} |
||||
|
||||
return &HTMLRenderer{ |
||||
HTMLRendererParameters: params, |
||||
|
||||
closeTag: closeTag, |
||||
headingIDs: make(map[string]int), |
||||
|
||||
sr: NewSmartypantsRenderer(params.Flags), |
||||
} |
||||
} |
||||
|
||||
func isHTMLTag(tag []byte, tagname string) bool { |
||||
found, _ := findHTMLTagPos(tag, tagname) |
||||
return found |
||||
} |
||||
|
||||
// Look for a character, but ignore it when it's in any kind of quotes, it
|
||||
// might be JavaScript
|
||||
func skipUntilCharIgnoreQuotes(html []byte, start int, char byte) int { |
||||
inSingleQuote := false |
||||
inDoubleQuote := false |
||||
inGraveQuote := false |
||||
i := start |
||||
for i < len(html) { |
||||
switch { |
||||
case html[i] == char && !inSingleQuote && !inDoubleQuote && !inGraveQuote: |
||||
return i |
||||
case html[i] == '\'': |
||||
inSingleQuote = !inSingleQuote |
||||
case html[i] == '"': |
||||
inDoubleQuote = !inDoubleQuote |
||||
case html[i] == '`': |
||||
inGraveQuote = !inGraveQuote |
||||
} |
||||
i++ |
||||
} |
||||
return start |
||||
} |
||||
|
||||
func findHTMLTagPos(tag []byte, tagname string) (bool, int) { |
||||
i := 0 |
||||
if i < len(tag) && tag[0] != '<' { |
||||
return false, -1 |
||||
} |
||||
i++ |
||||
i = skipSpace(tag, i) |
||||
|
||||
if i < len(tag) && tag[i] == '/' { |
||||
i++ |
||||
} |
||||
|
||||
i = skipSpace(tag, i) |
||||
j := 0 |
||||
for ; i < len(tag); i, j = i+1, j+1 { |
||||
if j >= len(tagname) { |
||||
break |
||||
} |
||||
|
||||
if strings.ToLower(string(tag[i]))[0] != tagname[j] { |
||||
return false, -1 |
||||
} |
||||
} |
||||
|
||||
if i == len(tag) { |
||||
return false, -1 |
||||
} |
||||
|
||||
rightAngle := skipUntilCharIgnoreQuotes(tag, i, '>') |
||||
if rightAngle >= i { |
||||
return true, rightAngle |
||||
} |
||||
|
||||
return false, -1 |
||||
} |
||||
|
||||
func skipSpace(tag []byte, i int) int { |
||||
for i < len(tag) && isspace(tag[i]) { |
||||
i++ |
||||
} |
||||
return i |
||||
} |
||||
|
||||
func isRelativeLink(link []byte) (yes bool) { |
||||
// a tag begin with '#'
|
||||
if link[0] == '#' { |
||||
return true |
||||
} |
||||
|
||||
// link begin with '/' but not '//', the second maybe a protocol relative link
|
||||
if len(link) >= 2 && link[0] == '/' && link[1] != '/' { |
||||
return true |
||||
} |
||||
|
||||
// only the root '/'
|
||||
if len(link) == 1 && link[0] == '/' { |
||||
return true |
||||
} |
||||
|
||||
// current directory : begin with "./"
|
||||
if bytes.HasPrefix(link, []byte("./")) { |
||||
return true |
||||
} |
||||
|
||||
// parent directory : begin with "../"
|
||||
if bytes.HasPrefix(link, []byte("../")) { |
||||
return true |
||||
} |
||||
|
||||
return false |
||||
} |
||||
|
||||
func (r *HTMLRenderer) ensureUniqueHeadingID(id string) string { |
||||
for count, found := r.headingIDs[id]; found; count, found = r.headingIDs[id] { |
||||
tmp := fmt.Sprintf("%s-%d", id, count+1) |
||||
|
||||
if _, tmpFound := r.headingIDs[tmp]; !tmpFound { |
||||
r.headingIDs[id] = count + 1 |
||||
id = tmp |
||||
} else { |
||||
id = id + "-1" |
||||
} |
||||
} |
||||
|
||||
if _, found := r.headingIDs[id]; !found { |
||||
r.headingIDs[id] = 0 |
||||
} |
||||
|
||||
return id |
||||
} |
||||
|
||||
func (r *HTMLRenderer) addAbsPrefix(link []byte) []byte { |
||||
if r.AbsolutePrefix != "" && isRelativeLink(link) && link[0] != '.' { |
||||
newDest := r.AbsolutePrefix |
||||
if link[0] != '/' { |
||||
newDest += "/" |
||||
} |
||||
newDest += string(link) |
||||
return []byte(newDest) |
||||
} |
||||
return link |
||||
} |
||||
|
||||
func appendLinkAttrs(attrs []string, flags HTMLFlags, link []byte) []string { |
||||
if isRelativeLink(link) { |
||||
return attrs |
||||
} |
||||
val := []string{} |
||||
if flags&NofollowLinks != 0 { |
||||
val = append(val, "nofollow") |
||||
} |
||||
if flags&NoreferrerLinks != 0 { |
||||
val = append(val, "noreferrer") |
||||
} |
||||
if flags&NoopenerLinks != 0 { |
||||
val = append(val, "noopener") |
||||
} |
||||
if flags&HrefTargetBlank != 0 { |
||||
attrs = append(attrs, "target=\"_blank\"") |
||||
} |
||||
if len(val) == 0 { |
||||
return attrs |
||||
} |
||||
attr := fmt.Sprintf("rel=%q", strings.Join(val, " ")) |
||||
return append(attrs, attr) |
||||
} |
||||
|
||||
func isMailto(link []byte) bool { |
||||
return bytes.HasPrefix(link, []byte("mailto:")) |
||||
} |
||||
|
||||
func needSkipLink(flags HTMLFlags, dest []byte) bool { |
||||
if flags&SkipLinks != 0 { |
||||
return true |
||||
} |
||||
return flags&Safelink != 0 && !isSafeLink(dest) && !isMailto(dest) |
||||
} |
||||
|
||||
func isSmartypantable(node *Node) bool { |
||||
pt := node.Parent.Type |
||||
return pt != Link && pt != CodeBlock && pt != Code |
||||
} |
||||
|
||||
func appendLanguageAttr(attrs []string, info []byte) []string { |
||||
if len(info) == 0 { |
||||
return attrs |
||||
} |
||||
endOfLang := bytes.IndexAny(info, "\t ") |
||||
if endOfLang < 0 { |
||||
endOfLang = len(info) |
||||
} |
||||
return append(attrs, fmt.Sprintf("class=\"language-%s\"", info[:endOfLang])) |
||||
} |
||||
|
||||
func (r *HTMLRenderer) tag(w io.Writer, name []byte, attrs []string) { |
||||
w.Write(name) |
||||
if len(attrs) > 0 { |
||||
w.Write(spaceBytes) |
||||
w.Write([]byte(strings.Join(attrs, " "))) |
||||
} |
||||
w.Write(gtBytes) |
||||
r.lastOutputLen = 1 |
||||
} |
||||
|
||||
func footnoteRef(prefix string, node *Node) []byte { |
||||
urlFrag := prefix + string(slugify(node.Destination)) |
||||
anchor := fmt.Sprintf(`<a href="#fn:%s">%d</a>`, urlFrag, node.NoteID) |
||||
return []byte(fmt.Sprintf(`<sup class="footnote-ref" id="fnref:%s">%s</sup>`, urlFrag, anchor)) |
||||
} |
||||
|
||||
func footnoteItem(prefix string, slug []byte) []byte { |
||||
return []byte(fmt.Sprintf(`<li id="fn:%s%s">`, prefix, slug)) |
||||
} |
||||
|
||||
func footnoteReturnLink(prefix, returnLink string, slug []byte) []byte { |
||||
const format = ` <a class="footnote-return" href="#fnref:%s%s">%s</a>` |
||||
return []byte(fmt.Sprintf(format, prefix, slug, returnLink)) |
||||
} |
||||
|
||||
func itemOpenCR(node *Node) bool { |
||||
if node.Prev == nil { |
||||
return false |
||||
} |
||||
ld := node.Parent.ListData |
||||
return !ld.Tight && ld.ListFlags&ListTypeDefinition == 0 |
||||
} |
||||
|
||||
func skipParagraphTags(node *Node) bool { |
||||
grandparent := node.Parent.Parent |
||||
if grandparent == nil || grandparent.Type != List { |
||||
return false |
||||
} |
||||
tightOrTerm := grandparent.Tight || node.Parent.ListFlags&ListTypeTerm != 0 |
||||
return grandparent.Type == List && tightOrTerm |
||||
} |
||||
|
||||
func cellAlignment(align CellAlignFlags) string { |
||||
switch align { |
||||
case TableAlignmentLeft: |
||||
return "left" |
||||
case TableAlignmentRight: |
||||
return "right" |
||||
case TableAlignmentCenter: |
||||
return "center" |
||||
default: |
||||
return "" |
||||
} |
||||
} |
||||
|
||||
func (r *HTMLRenderer) out(w io.Writer, text []byte) { |
||||
if r.disableTags > 0 { |
||||
w.Write(htmlTagRe.ReplaceAll(text, []byte{})) |
||||
} else { |
||||
w.Write(text) |
||||
} |
||||
r.lastOutputLen = len(text) |
||||
} |
||||
|
||||
func (r *HTMLRenderer) cr(w io.Writer) { |
||||
if r.lastOutputLen > 0 { |
||||
r.out(w, nlBytes) |
||||
} |
||||
} |
||||
|
||||
var ( |
||||
nlBytes = []byte{'\n'} |
||||
gtBytes = []byte{'>'} |
||||
spaceBytes = []byte{' '} |
||||
) |
||||
|
||||
var ( |
||||
brTag = []byte("<br>") |
||||
brXHTMLTag = []byte("<br />") |
||||
emTag = []byte("<em>") |
||||
emCloseTag = []byte("</em>") |
||||
strongTag = []byte("<strong>") |
||||
strongCloseTag = []byte("</strong>") |
||||
delTag = []byte("<del>") |
||||
delCloseTag = []byte("</del>") |
||||
ttTag = []byte("<tt>") |
||||
ttCloseTag = []byte("</tt>") |
||||
aTag = []byte("<a") |
||||
aCloseTag = []byte("</a>") |
||||
preTag = []byte("<pre>") |
||||
preCloseTag = []byte("</pre>") |
||||
codeTag = []byte("<code>") |
||||
codeCloseTag = []byte("</code>") |
||||
pTag = []byte("<p>") |
||||
pCloseTag = []byte("</p>") |
||||
blockquoteTag = []byte("<blockquote>") |
||||
blockquoteCloseTag = []byte("</blockquote>") |
||||
hrTag = []byte("<hr>") |
||||
hrXHTMLTag = []byte("<hr />") |
||||
ulTag = []byte("<ul>") |
||||
ulCloseTag = []byte("</ul>") |
||||
olTag = []byte("<ol>") |
||||
olCloseTag = []byte("</ol>") |
||||
dlTag = []byte("<dl>") |
||||
dlCloseTag = []byte("</dl>") |
||||
liTag = []byte("<li>") |
||||
liCloseTag = []byte("</li>") |
||||
ddTag = []byte("<dd>") |
||||
ddCloseTag = []byte("</dd>") |
||||
dtTag = []byte("<dt>") |
||||
dtCloseTag = []byte("</dt>") |
||||
tableTag = []byte("<table>") |
||||
tableCloseTag = []byte("</table>") |
||||
tdTag = []byte("<td") |
||||
tdCloseTag = []byte("</td>") |
||||
thTag = []byte("<th") |
||||
thCloseTag = []byte("</th>") |
||||
theadTag = []byte("<thead>") |
||||
theadCloseTag = []byte("</thead>") |
||||
tbodyTag = []byte("<tbody>") |
||||
tbodyCloseTag = []byte("</tbody>") |
||||
trTag = []byte("<tr>") |
||||
trCloseTag = []byte("</tr>") |
||||
h1Tag = []byte("<h1") |
||||
h1CloseTag = []byte("</h1>") |
||||
h2Tag = []byte("<h2") |
||||
h2CloseTag = []byte("</h2>") |
||||
h3Tag = []byte("<h3") |
||||
h3CloseTag = []byte("</h3>") |
||||
h4Tag = []byte("<h4") |
||||
h4CloseTag = []byte("</h4>") |
||||
h5Tag = []byte("<h5") |
||||
h5CloseTag = []byte("</h5>") |
||||
h6Tag = []byte("<h6") |
||||
h6CloseTag = []byte("</h6>") |
||||
|
||||
footnotesDivBytes = []byte("\n<div class=\"footnotes\">\n\n") |
||||
footnotesCloseDivBytes = []byte("\n</div>\n") |
||||
) |
||||
|
||||
func headingTagsFromLevel(level int) ([]byte, []byte) { |
||||
if level <= 1 { |
||||
return h1Tag, h1CloseTag |
||||
} |
||||
switch level { |
||||
case 2: |
||||
return h2Tag, h2CloseTag |
||||
case 3: |
||||
return h3Tag, h3CloseTag |
||||
case 4: |
||||
return h4Tag, h4CloseTag |
||||
case 5: |
||||
return h5Tag, h5CloseTag |
||||
} |
||||
return h6Tag, h6CloseTag |
||||
} |
||||
|
||||
func (r *HTMLRenderer) outHRTag(w io.Writer) { |
||||
if r.Flags&UseXHTML == 0 { |
||||
r.out(w, hrTag) |
||||
} else { |
||||
r.out(w, hrXHTMLTag) |
||||
} |
||||
} |
||||
|
||||
// RenderNode is a default renderer of a single node of a syntax tree. For
|
||||
// block nodes it will be called twice: first time with entering=true, second
|
||||
// time with entering=false, so that it could know when it's working on an open
|
||||
// tag and when on close. It writes the result to w.
|
||||
//
|
||||
// The return value is a way to tell the calling walker to adjust its walk
|
||||
// pattern: e.g. it can terminate the traversal by returning Terminate. Or it
|
||||
// can ask the walker to skip a subtree of this node by returning SkipChildren.
|
||||
// The typical behavior is to return GoToNext, which asks for the usual
|
||||
// traversal to the next node.
|
||||
func (r *HTMLRenderer) RenderNode(w io.Writer, node *Node, entering bool) WalkStatus { |
||||
attrs := []string{} |
||||
switch node.Type { |
||||
case Text: |
||||
if r.Flags&Smartypants != 0 { |
||||
var tmp bytes.Buffer |
||||
escapeHTML(&tmp, node.Literal) |
||||
r.sr.Process(w, tmp.Bytes()) |
||||
} else { |
||||
if node.Parent.Type == Link { |
||||
escLink(w, node.Literal) |
||||
} else { |
||||
escapeHTML(w, node.Literal) |
||||
} |
||||
} |
||||
case Softbreak: |
||||
r.cr(w) |
||||
// TODO: make it configurable via out(renderer.softbreak)
|
||||
case Hardbreak: |
||||
if r.Flags&UseXHTML == 0 { |
||||
r.out(w, brTag) |
||||
} else { |
||||
r.out(w, brXHTMLTag) |
||||
} |
||||
r.cr(w) |
||||
case Emph: |
||||
if entering { |
||||
r.out(w, emTag) |
||||
} else { |
||||
r.out(w, emCloseTag) |
||||
} |
||||
case Strong: |
||||
if entering { |
||||
r.out(w, strongTag) |
||||
} else { |
||||
r.out(w, strongCloseTag) |
||||
} |
||||
case Del: |
||||
if entering { |
||||
r.out(w, delTag) |
||||
} else { |
||||
r.out(w, delCloseTag) |
||||
} |
||||
case HTMLSpan: |
||||
if r.Flags&SkipHTML != 0 { |
||||
break |
||||
} |
||||
r.out(w, node.Literal) |
||||
case Link: |
||||
// mark it but don't link it if it is not a safe link: no smartypants
|
||||
dest := node.LinkData.Destination |
||||
if needSkipLink(r.Flags, dest) { |
||||
if entering { |
||||
r.out(w, ttTag) |
||||
} else { |
||||
r.out(w, ttCloseTag) |
||||
} |
||||
} else { |
||||
if entering { |
||||
dest = r.addAbsPrefix(dest) |
||||
var hrefBuf bytes.Buffer |
||||
hrefBuf.WriteString("href=\"") |
||||
escLink(&hrefBuf, dest) |
||||
hrefBuf.WriteByte('"') |
||||
attrs = append(attrs, hrefBuf.String()) |
||||
if node.NoteID != 0 { |
||||
r.out(w, footnoteRef(r.FootnoteAnchorPrefix, node)) |
||||
break |
||||
} |
||||
attrs = appendLinkAttrs(attrs, r.Flags, dest) |
||||
if len(node.LinkData.Title) > 0 { |
||||
var titleBuff bytes.Buffer |
||||
titleBuff.WriteString("title=\"") |
||||
escapeHTML(&titleBuff, node.LinkData.Title) |
||||
titleBuff.WriteByte('"') |
||||
attrs = append(attrs, titleBuff.String()) |
||||
} |
||||
r.tag(w, aTag, attrs) |
||||
} else { |
||||
if node.NoteID != 0 { |
||||
break |
||||
} |
||||
r.out(w, aCloseTag) |
||||
} |
||||
} |
||||
case Image: |
||||
if r.Flags&SkipImages != 0 { |
||||
return SkipChildren |
||||
} |
||||
if entering { |
||||
dest := node.LinkData.Destination |
||||
dest = r.addAbsPrefix(dest) |
||||
if r.disableTags == 0 { |
||||
//if options.safe && potentiallyUnsafe(dest) {
|
||||
//out(w, `<img src="" alt="`)
|
||||
//} else {
|
||||
r.out(w, []byte(`<img src="`)) |
||||
escLink(w, dest) |
||||
r.out(w, []byte(`" alt="`)) |
||||
//}
|
||||
} |
||||
r.disableTags++ |
||||
} else { |
||||
r.disableTags-- |
||||
if r.disableTags == 0 { |
||||
if node.LinkData.Title != nil { |
||||
r.out(w, []byte(`" title="`)) |
||||
escapeHTML(w, node.LinkData.Title) |
||||
} |
||||
r.out(w, []byte(`" />`)) |
||||
} |
||||
} |
||||
case Code: |
||||
r.out(w, codeTag) |
||||
escapeHTML(w, node.Literal) |
||||
r.out(w, codeCloseTag) |
||||
case Document: |
||||
break |
||||
case Paragraph: |
||||
if skipParagraphTags(node) { |
||||
break |
||||
} |
||||
if entering { |
||||
// TODO: untangle this clusterfuck about when the newlines need
|
||||
// to be added and when not.
|
||||
if node.Prev != nil { |
||||
switch node.Prev.Type { |
||||
case HTMLBlock, List, Paragraph, Heading, CodeBlock, BlockQuote, HorizontalRule: |
||||
r.cr(w) |
||||
} |
||||
} |
||||
if node.Parent.Type == BlockQuote && node.Prev == nil { |
||||
r.cr(w) |
||||
} |
||||
r.out(w, pTag) |
||||
} else { |
||||
r.out(w, pCloseTag) |
||||
if !(node.Parent.Type == Item && node.Next == nil) { |
||||
r.cr(w) |
||||
} |
||||
} |
||||
case BlockQuote: |
||||
if entering { |
||||
r.cr(w) |
||||
r.out(w, blockquoteTag) |
||||
} else { |
||||
r.out(w, blockquoteCloseTag) |
||||
r.cr(w) |
||||
} |
||||
case HTMLBlock: |
||||
if r.Flags&SkipHTML != 0 { |
||||
break |
||||
} |
||||
r.cr(w) |
||||
r.out(w, node.Literal) |
||||
r.cr(w) |
||||
case Heading: |
||||
headingLevel := r.HTMLRendererParameters.HeadingLevelOffset + node.Level |
||||
openTag, closeTag := headingTagsFromLevel(headingLevel) |
||||
if entering { |
||||
if node.IsTitleblock { |
||||
attrs = append(attrs, `class="title"`) |
||||
} |
||||
if node.HeadingID != "" { |
||||
id := r.ensureUniqueHeadingID(node.HeadingID) |
||||
if r.HeadingIDPrefix != "" { |
||||
id = r.HeadingIDPrefix + id |
||||
} |
||||
if r.HeadingIDSuffix != "" { |
||||
id = id + r.HeadingIDSuffix |
||||
} |
||||
attrs = append(attrs, fmt.Sprintf(`id="%s"`, id)) |
||||
} |
||||
r.cr(w) |
||||
r.tag(w, openTag, attrs) |
||||
} else { |
||||
r.out(w, closeTag) |
||||
if !(node.Parent.Type == Item && node.Next == nil) { |
||||
r.cr(w) |
||||
} |
||||
} |
||||
case HorizontalRule: |
||||
r.cr(w) |
||||
r.outHRTag(w) |
||||
r.cr(w) |
||||
case List: |
||||
openTag := ulTag |
||||
closeTag := ulCloseTag |
||||
if node.ListFlags&ListTypeOrdered != 0 { |
||||
openTag = olTag |
||||
closeTag = olCloseTag |
||||
} |
||||
if node.ListFlags&ListTypeDefinition != 0 { |
||||
openTag = dlTag |
||||
closeTag = dlCloseTag |
||||
} |
||||
if entering { |
||||
if node.IsFootnotesList { |
||||
r.out(w, footnotesDivBytes) |
||||
r.outHRTag(w) |
||||
r.cr(w) |
||||
} |
||||
r.cr(w) |
||||
if node.Parent.Type == Item && node.Parent.Parent.Tight { |
||||
r.cr(w) |
||||
} |
||||
r.tag(w, openTag[:len(openTag)-1], attrs) |
||||
r.cr(w) |
||||
} else { |
||||
r.out(w, closeTag) |
||||
//cr(w)
|
||||
//if node.parent.Type != Item {
|
||||
// cr(w)
|
||||
//}
|
||||
if node.Parent.Type == Item && node.Next != nil { |
||||
r.cr(w) |
||||
} |
||||
if node.Parent.Type == Document || node.Parent.Type == BlockQuote { |
||||
r.cr(w) |
||||
} |
||||
if node.IsFootnotesList { |
||||
r.out(w, footnotesCloseDivBytes) |
||||
} |
||||
} |
||||
case Item: |
||||
openTag := liTag |
||||
closeTag := liCloseTag |
||||
if node.ListFlags&ListTypeDefinition != 0 { |
||||
openTag = ddTag |
||||
closeTag = ddCloseTag |
||||
} |
||||
if node.ListFlags&ListTypeTerm != 0 { |
||||
openTag = dtTag |
||||
closeTag = dtCloseTag |
||||
} |
||||
if entering { |
||||
if itemOpenCR(node) { |
||||
r.cr(w) |
||||
} |
||||
if node.ListData.RefLink != nil { |
||||
slug := slugify(node.ListData.RefLink) |
||||
r.out(w, footnoteItem(r.FootnoteAnchorPrefix, slug)) |
||||
break |
||||
} |
||||
r.out(w, openTag) |
||||
} else { |
||||
if node.ListData.RefLink != nil { |
||||
slug := slugify(node.ListData.RefLink) |
||||
if r.Flags&FootnoteReturnLinks != 0 { |
||||
r.out(w, footnoteReturnLink(r.FootnoteAnchorPrefix, r.FootnoteReturnLinkContents, slug)) |
||||
} |
||||
} |
||||
r.out(w, closeTag) |
||||
r.cr(w) |
||||
} |
||||
case CodeBlock: |
||||
attrs = appendLanguageAttr(attrs, node.Info) |
||||
r.cr(w) |
||||
r.out(w, preTag) |
||||
r.tag(w, codeTag[:len(codeTag)-1], attrs) |
||||
escapeHTML(w, node.Literal) |
||||
r.out(w, codeCloseTag) |
||||
r.out(w, preCloseTag) |
||||
if node.Parent.Type != Item { |
||||
r.cr(w) |
||||
} |
||||
case Table: |
||||
if entering { |
||||
r.cr(w) |
||||
r.out(w, tableTag) |
||||
} else { |
||||
r.out(w, tableCloseTag) |
||||
r.cr(w) |
||||
} |
||||
case TableCell: |
||||
openTag := tdTag |
||||
closeTag := tdCloseTag |
||||
if node.IsHeader { |
||||
openTag = thTag |
||||
closeTag = thCloseTag |
||||
} |
||||
if entering { |
||||
align := cellAlignment(node.Align) |
||||
if align != "" { |
||||
attrs = append(attrs, fmt.Sprintf(`align="%s"`, align)) |
||||
} |
||||
if node.Prev == nil { |
||||
r.cr(w) |
||||
} |
||||
r.tag(w, openTag, attrs) |
||||
} else { |
||||
r.out(w, closeTag) |
||||
r.cr(w) |
||||
} |
||||
case TableHead: |
||||
if entering { |
||||
r.cr(w) |
||||
r.out(w, theadTag) |
||||
} else { |
||||
r.out(w, theadCloseTag) |
||||
r.cr(w) |
||||
} |
||||
case TableBody: |
||||
if entering { |
||||
r.cr(w) |
||||
r.out(w, tbodyTag) |
||||
// XXX: this is to adhere to a rather silly test. Should fix test.
|
||||
if node.FirstChild == nil { |
||||
r.cr(w) |
||||
} |
||||
} else { |
||||
r.out(w, tbodyCloseTag) |
||||
r.cr(w) |
||||
} |
||||
case TableRow: |
||||
if entering { |
||||
r.cr(w) |
||||
r.out(w, trTag) |
||||
} else { |
||||
r.out(w, trCloseTag) |
||||
r.cr(w) |
||||
} |
||||
default: |
||||
panic("Unknown node type " + node.Type.String()) |
||||
} |
||||
return GoToNext |
||||
} |
||||
|
||||
// RenderHeader writes HTML document preamble and TOC if requested.
|
||||
func (r *HTMLRenderer) RenderHeader(w io.Writer, ast *Node) { |
||||
r.writeDocumentHeader(w) |
||||
if r.Flags&TOC != 0 { |
||||
r.writeTOC(w, ast) |
||||
} |
||||
} |
||||
|
||||
// RenderFooter writes HTML document footer.
|
||||
func (r *HTMLRenderer) RenderFooter(w io.Writer, ast *Node) { |
||||
if r.Flags&CompletePage == 0 { |
||||
return |
||||
} |
||||
io.WriteString(w, "\n</body>\n</html>\n") |
||||
} |
||||
|
||||
func (r *HTMLRenderer) writeDocumentHeader(w io.Writer) { |
||||
if r.Flags&CompletePage == 0 { |
||||
return |
||||
} |
||||
ending := "" |
||||
if r.Flags&UseXHTML != 0 { |
||||
io.WriteString(w, "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" ") |
||||
io.WriteString(w, "\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n") |
||||
io.WriteString(w, "<html xmlns=\"http://www.w3.org/1999/xhtml\">\n") |
||||
ending = " /" |
||||
} else { |
||||
io.WriteString(w, "<!DOCTYPE html>\n") |
||||
io.WriteString(w, "<html>\n") |
||||
} |
||||
io.WriteString(w, "<head>\n") |
||||
io.WriteString(w, " <title>") |
||||
if r.Flags&Smartypants != 0 { |
||||
r.sr.Process(w, []byte(r.Title)) |
||||
} else { |
||||
escapeHTML(w, []byte(r.Title)) |
||||
} |
||||
io.WriteString(w, "</title>\n") |
||||
io.WriteString(w, " <meta name=\"GENERATOR\" content=\"Blackfriday Markdown Processor v") |
||||
io.WriteString(w, Version) |
||||
io.WriteString(w, "\"") |
||||
io.WriteString(w, ending) |
||||
io.WriteString(w, ">\n") |
||||
io.WriteString(w, " <meta charset=\"utf-8\"") |
||||
io.WriteString(w, ending) |
||||
io.WriteString(w, ">\n") |
||||
if r.CSS != "" { |
||||
io.WriteString(w, " <link rel=\"stylesheet\" type=\"text/css\" href=\"") |
||||
escapeHTML(w, []byte(r.CSS)) |
||||
io.WriteString(w, "\"") |
||||
io.WriteString(w, ending) |
||||
io.WriteString(w, ">\n") |
||||
} |
||||
if r.Icon != "" { |
||||
io.WriteString(w, " <link rel=\"icon\" type=\"image/x-icon\" href=\"") |
||||
escapeHTML(w, []byte(r.Icon)) |
||||
io.WriteString(w, "\"") |
||||
io.WriteString(w, ending) |
||||
io.WriteString(w, ">\n") |
||||
} |
||||
io.WriteString(w, "</head>\n") |
||||
io.WriteString(w, "<body>\n\n") |
||||
} |
||||
|
||||
func (r *HTMLRenderer) writeTOC(w io.Writer, ast *Node) { |
||||
buf := bytes.Buffer{} |
||||
|
||||
inHeading := false |
||||
tocLevel := 0 |
||||
headingCount := 0 |
||||
|
||||
ast.Walk(func(node *Node, entering bool) WalkStatus { |
||||
if node.Type == Heading && !node.HeadingData.IsTitleblock { |
||||
inHeading = entering |
||||
if entering { |
||||
node.HeadingID = fmt.Sprintf("toc_%d", headingCount) |
||||
if node.Level == tocLevel { |
||||
buf.WriteString("</li>\n\n<li>") |
||||
} else if node.Level < tocLevel { |
||||
for node.Level < tocLevel { |
||||
tocLevel-- |
||||
buf.WriteString("</li>\n</ul>") |
||||
} |
||||
buf.WriteString("</li>\n\n<li>") |
||||
} else { |
||||
for node.Level > tocLevel { |
||||
tocLevel++ |
||||
buf.WriteString("\n<ul>\n<li>") |
||||
} |
||||
} |
||||
|
||||
fmt.Fprintf(&buf, `<a href="#toc_%d">`, headingCount) |
||||
headingCount++ |
||||
} else { |
||||
buf.WriteString("</a>") |
||||
} |
||||
return GoToNext |
||||
} |
||||
|
||||
if inHeading { |
||||
return r.RenderNode(&buf, node, entering) |
||||
} |
||||
|
||||
return GoToNext |
||||
}) |
||||
|
||||
for ; tocLevel > 0; tocLevel-- { |
||||
buf.WriteString("</li>\n</ul>") |
||||
} |
||||
|
||||
if buf.Len() > 0 { |
||||
io.WriteString(w, "<nav>\n") |
||||
w.Write(buf.Bytes()) |
||||
io.WriteString(w, "\n\n</nav>\n") |
||||
} |
||||
r.lastOutputLen = buf.Len() |
||||
} |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue