mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-11-25 13:52:43 +01:00
Fix data URI scramble (#16098)
* Removed unused method. * No prefix for data uris. * Added test to prevent regressions.
This commit is contained in:
parent
0909695204
commit
21cde5c439
4 changed files with 23 additions and 19 deletions
|
@ -364,24 +364,19 @@ func visitNode(ctx *RenderContext, procs []processor, node *html.Node, visitText
|
||||||
}
|
}
|
||||||
case html.ElementNode:
|
case html.ElementNode:
|
||||||
if node.Data == "img" {
|
if node.Data == "img" {
|
||||||
attrs := node.Attr
|
for _, attr := range node.Attr {
|
||||||
for idx, attr := range attrs {
|
|
||||||
if attr.Key != "src" {
|
if attr.Key != "src" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
link := []byte(attr.Val)
|
if len(attr.Val) > 0 && !isLinkStr(attr.Val) && !strings.HasPrefix(attr.Val, "data:image/") {
|
||||||
if len(link) > 0 && !IsLink(link) {
|
|
||||||
prefix := ctx.URLPrefix
|
prefix := ctx.URLPrefix
|
||||||
if ctx.IsWiki {
|
if ctx.IsWiki {
|
||||||
prefix = util.URLJoin(prefix, "wiki", "raw")
|
prefix = util.URLJoin(prefix, "wiki", "raw")
|
||||||
}
|
}
|
||||||
prefix = strings.Replace(prefix, "/src/", "/media/", 1)
|
prefix = strings.Replace(prefix, "/src/", "/media/", 1)
|
||||||
|
|
||||||
lnk := string(link)
|
attr.Val = util.URLJoin(prefix, attr.Val)
|
||||||
lnk = util.URLJoin(prefix, lnk)
|
|
||||||
link = []byte(lnk)
|
|
||||||
}
|
}
|
||||||
node.Attr[idx].Val = string(link)
|
|
||||||
}
|
}
|
||||||
} else if node.Data == "a" {
|
} else if node.Data == "a" {
|
||||||
visitText = false
|
visitText = false
|
||||||
|
|
|
@ -444,3 +444,23 @@ func Test_ParseClusterFuzz(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotContains(t, res.String(), "<html")
|
assert.NotContains(t, res.String(), "<html")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestIssue16020(t *testing.T) {
|
||||||
|
setting.AppURL = AppURL
|
||||||
|
setting.AppSubURL = AppSubURL
|
||||||
|
|
||||||
|
var localMetas = map[string]string{
|
||||||
|
"user": "go-gitea",
|
||||||
|
"repo": "gitea",
|
||||||
|
}
|
||||||
|
|
||||||
|
data := `<img src="data:image/png;base64,i//V"/>`
|
||||||
|
|
||||||
|
var res strings.Builder
|
||||||
|
err := PostProcess(&RenderContext{
|
||||||
|
URLPrefix: "https://example.com",
|
||||||
|
Metas: localMetas,
|
||||||
|
}, strings.NewReader(data), &res)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, data, res.String())
|
||||||
|
}
|
||||||
|
|
|
@ -131,13 +131,3 @@ func SanitizeReader(r io.Reader) *bytes.Buffer {
|
||||||
NewSanitizer()
|
NewSanitizer()
|
||||||
return sanitizer.policy.SanitizeReader(r)
|
return sanitizer.policy.SanitizeReader(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SanitizeBytes takes a []byte slice that contains a HTML fragment or document and applies policy whitelist.
|
|
||||||
func SanitizeBytes(b []byte) []byte {
|
|
||||||
if len(b) == 0 {
|
|
||||||
// nothing to sanitize
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
NewSanitizer()
|
|
||||||
return sanitizer.policy.SanitizeBytes(b)
|
|
||||||
}
|
|
||||||
|
|
|
@ -49,7 +49,6 @@ func Test_Sanitizer(t *testing.T) {
|
||||||
|
|
||||||
for i := 0; i < len(testCases); i += 2 {
|
for i := 0; i < len(testCases); i += 2 {
|
||||||
assert.Equal(t, testCases[i+1], Sanitize(testCases[i]))
|
assert.Equal(t, testCases[i+1], Sanitize(testCases[i]))
|
||||||
assert.Equal(t, testCases[i+1], string(SanitizeBytes([]byte(testCases[i]))))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue