mirror of
https://github.com/go-gitea/gitea.git
synced 2025-07-03 00:01:21 -04:00
Compare commits
18 Commits
99d6934f39
...
3c86ce7d8a
Author | SHA1 | Date | |
---|---|---|---|
|
3c86ce7d8a | ||
|
97fc87af89 | ||
|
6fe5c4c4d9 | ||
|
58bc9cb107 | ||
|
08bf2d64fd | ||
|
fd19585760 | ||
|
7a13e5a58a | ||
|
6326db333f | ||
|
dd1fd89185 | ||
|
1d4ad5aa2b | ||
|
35f0b5a3ec | ||
|
90f96c301e | ||
|
6596b92140 | ||
|
f3364ec57f | ||
|
8dbf13b1cb | ||
|
a94e472788 | ||
|
09bb19ad01 | ||
|
176962c03e |
@ -91,6 +91,7 @@ module.exports = {
|
||||
plugins: ['@vitest/eslint-plugin'],
|
||||
globals: vitestPlugin.environments.env.globals,
|
||||
rules: {
|
||||
'github/unescaped-html-literal': [0],
|
||||
'@vitest/consistent-test-filename': [0],
|
||||
'@vitest/consistent-test-it': [0],
|
||||
'@vitest/expect-expect': [0],
|
||||
@ -423,7 +424,7 @@ module.exports = {
|
||||
'github/no-useless-passive': [2],
|
||||
'github/prefer-observers': [2],
|
||||
'github/require-passive-events': [2],
|
||||
'github/unescaped-html-literal': [0],
|
||||
'github/unescaped-html-literal': [2],
|
||||
'grouped-accessor-pairs': [2],
|
||||
'guard-for-in': [0],
|
||||
'id-blacklist': [0],
|
||||
|
@ -6,17 +6,17 @@ package fileicon
|
||||
import "code.gitea.io/gitea/modules/git"
|
||||
|
||||
type EntryInfo struct {
|
||||
FullName string
|
||||
BaseName string
|
||||
EntryMode git.EntryMode
|
||||
SymlinkToMode git.EntryMode
|
||||
IsOpen bool
|
||||
}
|
||||
|
||||
func EntryInfoFromGitTreeEntry(gitEntry *git.TreeEntry) *EntryInfo {
|
||||
ret := &EntryInfo{FullName: gitEntry.Name(), EntryMode: gitEntry.Mode()}
|
||||
func EntryInfoFromGitTreeEntry(commit *git.Commit, fullPath string, gitEntry *git.TreeEntry) *EntryInfo {
|
||||
ret := &EntryInfo{BaseName: gitEntry.Name(), EntryMode: gitEntry.Mode()}
|
||||
if gitEntry.IsLink() {
|
||||
if te, err := gitEntry.FollowLink(); err == nil && te.IsDir() {
|
||||
ret.SymlinkToMode = te.Mode()
|
||||
if res, err := git.EntryFollowLink(commit, fullPath, gitEntry); err == nil && res.TargetEntry.IsDir() {
|
||||
ret.SymlinkToMode = res.TargetEntry.Mode()
|
||||
}
|
||||
}
|
||||
return ret
|
||||
|
@ -5,7 +5,6 @@ package fileicon
|
||||
|
||||
import (
|
||||
"html/template"
|
||||
"path"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
@ -134,7 +133,7 @@ func (m *MaterialIconProvider) FindIconName(entry *EntryInfo) string {
|
||||
return "folder-git"
|
||||
}
|
||||
|
||||
fileNameLower := strings.ToLower(path.Base(entry.FullName))
|
||||
fileNameLower := strings.ToLower(entry.BaseName)
|
||||
if entry.EntryMode.IsDir() {
|
||||
if s, ok := m.rules.FolderNames[fileNameLower]; ok {
|
||||
return s
|
||||
|
@ -20,8 +20,8 @@ func TestMain(m *testing.M) {
|
||||
func TestFindIconName(t *testing.T) {
|
||||
unittest.PrepareTestEnv(t)
|
||||
p := fileicon.DefaultMaterialIconProvider()
|
||||
assert.Equal(t, "php", p.FindIconName(&fileicon.EntryInfo{FullName: "foo.php", EntryMode: git.EntryModeBlob}))
|
||||
assert.Equal(t, "php", p.FindIconName(&fileicon.EntryInfo{FullName: "foo.PHP", EntryMode: git.EntryModeBlob}))
|
||||
assert.Equal(t, "javascript", p.FindIconName(&fileicon.EntryInfo{FullName: "foo.js", EntryMode: git.EntryModeBlob}))
|
||||
assert.Equal(t, "visualstudio", p.FindIconName(&fileicon.EntryInfo{FullName: "foo.vba", EntryMode: git.EntryModeBlob}))
|
||||
assert.Equal(t, "php", p.FindIconName(&fileicon.EntryInfo{BaseName: "foo.php", EntryMode: git.EntryModeBlob}))
|
||||
assert.Equal(t, "php", p.FindIconName(&fileicon.EntryInfo{BaseName: "foo.PHP", EntryMode: git.EntryModeBlob}))
|
||||
assert.Equal(t, "javascript", p.FindIconName(&fileicon.EntryInfo{BaseName: "foo.js", EntryMode: git.EntryModeBlob}))
|
||||
assert.Equal(t, "visualstudio", p.FindIconName(&fileicon.EntryInfo{BaseName: "foo.vba", EntryMode: git.EntryModeBlob}))
|
||||
}
|
||||
|
@ -22,17 +22,22 @@ func (b *Blob) Name() string {
|
||||
return b.name
|
||||
}
|
||||
|
||||
// GetBlobContent Gets the limited content of the blob as raw text
|
||||
func (b *Blob) GetBlobContent(limit int64) (string, error) {
|
||||
// GetBlobBytes Gets the limited content of the blob
|
||||
func (b *Blob) GetBlobBytes(limit int64) ([]byte, error) {
|
||||
if limit <= 0 {
|
||||
return "", nil
|
||||
return nil, nil
|
||||
}
|
||||
dataRc, err := b.DataAsync()
|
||||
if err != nil {
|
||||
return "", err
|
||||
return nil, err
|
||||
}
|
||||
defer dataRc.Close()
|
||||
buf, err := util.ReadWithLimit(dataRc, int(limit))
|
||||
return util.ReadWithLimit(dataRc, int(limit))
|
||||
}
|
||||
|
||||
// GetBlobContent Gets the limited content of the blob as raw text
|
||||
func (b *Blob) GetBlobContent(limit int64) (string, error) {
|
||||
buf, err := b.GetBlobBytes(limit)
|
||||
return string(buf), err
|
||||
}
|
||||
|
||||
@ -99,11 +104,9 @@ loop:
|
||||
|
||||
// GuessContentType guesses the content type of the blob.
|
||||
func (b *Blob) GuessContentType() (typesniffer.SniffedType, error) {
|
||||
r, err := b.DataAsync()
|
||||
buf, err := b.GetBlobBytes(typesniffer.SniffContentSize)
|
||||
if err != nil {
|
||||
return typesniffer.SniffedType{}, err
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
return typesniffer.DetectContentTypeFromReader(r)
|
||||
return typesniffer.DetectContentType(buf), nil
|
||||
}
|
||||
|
@ -20,7 +20,8 @@ import (
|
||||
|
||||
// Commit represents a git commit.
|
||||
type Commit struct {
|
||||
Tree
|
||||
Tree // FIXME: bad design, this field can be nil if the commit is from "last commit cache"
|
||||
|
||||
ID ObjectID // The ID of this commit object
|
||||
Author *Signature
|
||||
Committer *Signature
|
||||
|
@ -32,22 +32,6 @@ func (err ErrNotExist) Unwrap() error {
|
||||
return util.ErrNotExist
|
||||
}
|
||||
|
||||
// ErrSymlinkUnresolved entry.FollowLink error
|
||||
type ErrSymlinkUnresolved struct {
|
||||
Name string
|
||||
Message string
|
||||
}
|
||||
|
||||
func (err ErrSymlinkUnresolved) Error() string {
|
||||
return fmt.Sprintf("%s: %s", err.Name, err.Message)
|
||||
}
|
||||
|
||||
// IsErrSymlinkUnresolved if some error is ErrSymlinkUnresolved
|
||||
func IsErrSymlinkUnresolved(err error) bool {
|
||||
_, ok := err.(ErrSymlinkUnresolved)
|
||||
return ok
|
||||
}
|
||||
|
||||
// ErrBranchNotExist represents a "BranchNotExist" kind of error.
|
||||
type ErrBranchNotExist struct {
|
||||
Name string
|
||||
|
@ -11,7 +11,7 @@ import (
|
||||
)
|
||||
|
||||
// GetTreeEntryByPath get the tree entries according the sub dir
|
||||
func (t *Tree) GetTreeEntryByPath(relpath string) (*TreeEntry, error) {
|
||||
func (t *Tree) GetTreeEntryByPath(relpath string) (_ *TreeEntry, err error) {
|
||||
if len(relpath) == 0 {
|
||||
return &TreeEntry{
|
||||
ptree: t,
|
||||
@ -21,27 +21,25 @@ func (t *Tree) GetTreeEntryByPath(relpath string) (*TreeEntry, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// FIXME: This should probably use git cat-file --batch to be a bit more efficient
|
||||
relpath = path.Clean(relpath)
|
||||
parts := strings.Split(relpath, "/")
|
||||
var err error
|
||||
|
||||
tree := t
|
||||
for i, name := range parts {
|
||||
if i == len(parts)-1 {
|
||||
entries, err := tree.ListEntries()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, v := range entries {
|
||||
if v.Name() == name {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tree, err = tree.SubTree(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, name := range parts[:len(parts)-1] {
|
||||
tree, err = tree.SubTree(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
name := parts[len(parts)-1]
|
||||
entries, err := tree.ListEntries()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, v := range entries {
|
||||
if v.Name() == name {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
return nil, ErrNotExist{"", relpath}
|
||||
|
@ -5,7 +5,7 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"io"
|
||||
"path"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
@ -24,77 +24,57 @@ func (te *TreeEntry) Type() string {
|
||||
}
|
||||
}
|
||||
|
||||
// FollowLink returns the entry pointed to by a symlink
|
||||
func (te *TreeEntry) FollowLink() (*TreeEntry, error) {
|
||||
if !te.IsLink() {
|
||||
return nil, ErrSymlinkUnresolved{te.Name(), "not a symlink"}
|
||||
}
|
||||
|
||||
// read the link
|
||||
r, err := te.Blob().DataAsync()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
closed := false
|
||||
defer func() {
|
||||
if !closed {
|
||||
_ = r.Close()
|
||||
}
|
||||
}()
|
||||
buf := make([]byte, te.Size())
|
||||
_, err = io.ReadFull(r, buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_ = r.Close()
|
||||
closed = true
|
||||
|
||||
lnk := string(buf)
|
||||
t := te.ptree
|
||||
|
||||
// traverse up directories
|
||||
for ; t != nil && strings.HasPrefix(lnk, "../"); lnk = lnk[3:] {
|
||||
t = t.ptree
|
||||
}
|
||||
|
||||
if t == nil {
|
||||
return nil, ErrSymlinkUnresolved{te.Name(), "points outside of repo"}
|
||||
}
|
||||
|
||||
target, err := t.GetTreeEntryByPath(lnk)
|
||||
if err != nil {
|
||||
if IsErrNotExist(err) {
|
||||
return nil, ErrSymlinkUnresolved{te.Name(), "broken link"}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return target, nil
|
||||
type EntryFollowResult struct {
|
||||
SymlinkContent string
|
||||
TargetFullPath string
|
||||
TargetEntry *TreeEntry
|
||||
}
|
||||
|
||||
// FollowLinks returns the entry ultimately pointed to by a symlink
|
||||
func (te *TreeEntry) FollowLinks(optLimit ...int) (*TreeEntry, error) {
|
||||
func EntryFollowLink(commit *Commit, fullPath string, te *TreeEntry) (*EntryFollowResult, error) {
|
||||
if !te.IsLink() {
|
||||
return nil, ErrSymlinkUnresolved{te.Name(), "not a symlink"}
|
||||
return nil, util.ErrorWrap(util.ErrUnprocessableContent, "%q is not a symlink", fullPath)
|
||||
}
|
||||
|
||||
// git's filename max length is 4096, hopefully a link won't be longer than multiple of that
|
||||
const maxSymlinkSize = 20 * 4096
|
||||
if te.Blob().Size() > maxSymlinkSize {
|
||||
return nil, util.ErrorWrap(util.ErrUnprocessableContent, "%q content exceeds symlink limit", fullPath)
|
||||
}
|
||||
|
||||
link, err := te.Blob().GetBlobContent(maxSymlinkSize)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if strings.HasPrefix(link, "/") {
|
||||
// It's said that absolute path will be stored as is in Git
|
||||
return &EntryFollowResult{SymlinkContent: link}, util.ErrorWrap(util.ErrUnprocessableContent, "%q is an absolute symlink", fullPath)
|
||||
}
|
||||
|
||||
targetFullPath := path.Join(path.Dir(fullPath), link)
|
||||
targetEntry, err := commit.GetTreeEntryByPath(targetFullPath)
|
||||
if err != nil {
|
||||
return &EntryFollowResult{SymlinkContent: link}, err
|
||||
}
|
||||
return &EntryFollowResult{SymlinkContent: link, TargetFullPath: targetFullPath, TargetEntry: targetEntry}, nil
|
||||
}
|
||||
|
||||
func EntryFollowLinks(commit *Commit, firstFullPath string, firstTreeEntry *TreeEntry, optLimit ...int) (res *EntryFollowResult, err error) {
|
||||
limit := util.OptionalArg(optLimit, 10)
|
||||
entry := te
|
||||
treeEntry, fullPath := firstTreeEntry, firstFullPath
|
||||
for range limit {
|
||||
if !entry.IsLink() {
|
||||
res, err = EntryFollowLink(commit, fullPath, treeEntry)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
treeEntry, fullPath = res.TargetEntry, res.TargetFullPath
|
||||
if !treeEntry.IsLink() {
|
||||
break
|
||||
}
|
||||
next, err := entry.FollowLink()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if next.ID == entry.ID {
|
||||
return nil, ErrSymlinkUnresolved{entry.Name(), "recursive link"}
|
||||
}
|
||||
entry = next
|
||||
}
|
||||
if entry.IsLink() {
|
||||
return nil, ErrSymlinkUnresolved{te.Name(), "too many levels of symbolic links"}
|
||||
if treeEntry.IsLink() {
|
||||
return res, util.ErrorWrap(util.ErrUnprocessableContent, "%q has too many links", firstFullPath)
|
||||
}
|
||||
return entry, nil
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// returns the Tree pointed to by this TreeEntry, or nil if this is not a tree
|
||||
|
76
modules/git/tree_entry_common_test.go
Normal file
76
modules/git/tree_entry_common_test.go
Normal file
@ -0,0 +1,76 @@
|
||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package git
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFollowLink(t *testing.T) {
|
||||
r, err := openRepositoryWithDefaultContext("tests/repos/repo1_bare")
|
||||
require.NoError(t, err)
|
||||
defer r.Close()
|
||||
|
||||
commit, err := r.GetCommit("37991dec2c8e592043f47155ce4808d4580f9123")
|
||||
require.NoError(t, err)
|
||||
|
||||
// get the symlink
|
||||
{
|
||||
lnkFullPath := "foo/bar/link_to_hello"
|
||||
lnk, err := commit.Tree.GetTreeEntryByPath("foo/bar/link_to_hello")
|
||||
require.NoError(t, err)
|
||||
assert.True(t, lnk.IsLink())
|
||||
|
||||
// should be able to dereference to target
|
||||
res, err := EntryFollowLink(commit, lnkFullPath, lnk)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "hello", res.TargetEntry.Name())
|
||||
assert.Equal(t, "foo/nar/hello", res.TargetFullPath)
|
||||
assert.False(t, res.TargetEntry.IsLink())
|
||||
assert.Equal(t, "b14df6442ea5a1b382985a6549b85d435376c351", res.TargetEntry.ID.String())
|
||||
}
|
||||
|
||||
{
|
||||
// should error when called on a normal file
|
||||
entry, err := commit.Tree.GetTreeEntryByPath("file1.txt")
|
||||
require.NoError(t, err)
|
||||
res, err := EntryFollowLink(commit, "file1.txt", entry)
|
||||
assert.ErrorIs(t, err, util.ErrUnprocessableContent)
|
||||
assert.Nil(t, res)
|
||||
}
|
||||
|
||||
{
|
||||
// should error for broken links
|
||||
entry, err := commit.Tree.GetTreeEntryByPath("foo/broken_link")
|
||||
require.NoError(t, err)
|
||||
assert.True(t, entry.IsLink())
|
||||
res, err := EntryFollowLink(commit, "foo/broken_link", entry)
|
||||
assert.ErrorIs(t, err, util.ErrNotExist)
|
||||
assert.Equal(t, "nar/broken_link", res.SymlinkContent)
|
||||
}
|
||||
|
||||
{
|
||||
// should error for external links
|
||||
entry, err := commit.Tree.GetTreeEntryByPath("foo/outside_repo")
|
||||
require.NoError(t, err)
|
||||
assert.True(t, entry.IsLink())
|
||||
res, err := EntryFollowLink(commit, "foo/outside_repo", entry)
|
||||
assert.ErrorIs(t, err, util.ErrNotExist)
|
||||
assert.Equal(t, "../../outside_repo", res.SymlinkContent)
|
||||
}
|
||||
|
||||
{
|
||||
// testing fix for short link bug
|
||||
entry, err := commit.Tree.GetTreeEntryByPath("foo/link_short")
|
||||
require.NoError(t, err)
|
||||
res, err := EntryFollowLink(commit, "foo/link_short", entry)
|
||||
assert.ErrorIs(t, err, util.ErrNotExist)
|
||||
assert.Equal(t, "a", res.SymlinkContent)
|
||||
}
|
||||
}
|
@ -19,16 +19,12 @@ type TreeEntry struct {
|
||||
gogitTreeEntry *object.TreeEntry
|
||||
ptree *Tree
|
||||
|
||||
size int64
|
||||
sized bool
|
||||
fullName string
|
||||
size int64
|
||||
sized bool
|
||||
}
|
||||
|
||||
// Name returns the name of the entry
|
||||
func (te *TreeEntry) Name() string {
|
||||
if te.fullName != "" {
|
||||
return te.fullName
|
||||
}
|
||||
return te.gogitTreeEntry.Name
|
||||
}
|
||||
|
||||
@ -55,7 +51,7 @@ func (te *TreeEntry) Size() int64 {
|
||||
return te.size
|
||||
}
|
||||
|
||||
// IsSubModule if the entry is a sub module
|
||||
// IsSubModule if the entry is a submodule
|
||||
func (te *TreeEntry) IsSubModule() bool {
|
||||
return te.gogitTreeEntry.Mode == filemode.Submodule
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ type EntryMode int
|
||||
// one of these.
|
||||
const (
|
||||
// EntryModeNoEntry is possible if the file was added or removed in a commit. In the case of
|
||||
// added the base commit will not have the file in its tree so a mode of 0o000000 is used.
|
||||
// when adding the base commit doesn't have the file in its tree, a mode of 0o000000 is used.
|
||||
EntryModeNoEntry EntryMode = 0o000000
|
||||
|
||||
EntryModeBlob EntryMode = 0o100644
|
||||
@ -30,7 +30,7 @@ func (e EntryMode) String() string {
|
||||
return strconv.FormatInt(int64(e), 8)
|
||||
}
|
||||
|
||||
// IsSubModule if the entry is a sub module
|
||||
// IsSubModule if the entry is a submodule
|
||||
func (e EntryMode) IsSubModule() bool {
|
||||
return e == EntryModeCommit
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ func (te *TreeEntry) Size() int64 {
|
||||
return te.size
|
||||
}
|
||||
|
||||
// IsSubModule if the entry is a sub module
|
||||
// IsSubModule if the entry is a submodule
|
||||
func (te *TreeEntry) IsSubModule() bool {
|
||||
return te.entryMode.IsSubModule()
|
||||
}
|
||||
|
@ -53,50 +53,3 @@ func TestEntriesCustomSort(t *testing.T) {
|
||||
assert.Equal(t, "bcd", entries[6].Name())
|
||||
assert.Equal(t, "abc", entries[7].Name())
|
||||
}
|
||||
|
||||
func TestFollowLink(t *testing.T) {
|
||||
r, err := openRepositoryWithDefaultContext("tests/repos/repo1_bare")
|
||||
assert.NoError(t, err)
|
||||
defer r.Close()
|
||||
|
||||
commit, err := r.GetCommit("37991dec2c8e592043f47155ce4808d4580f9123")
|
||||
assert.NoError(t, err)
|
||||
|
||||
// get the symlink
|
||||
lnk, err := commit.Tree.GetTreeEntryByPath("foo/bar/link_to_hello")
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, lnk.IsLink())
|
||||
|
||||
// should be able to dereference to target
|
||||
target, err := lnk.FollowLink()
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "hello", target.Name())
|
||||
assert.False(t, target.IsLink())
|
||||
assert.Equal(t, "b14df6442ea5a1b382985a6549b85d435376c351", target.ID.String())
|
||||
|
||||
// should error when called on normal file
|
||||
target, err = commit.Tree.GetTreeEntryByPath("file1.txt")
|
||||
assert.NoError(t, err)
|
||||
_, err = target.FollowLink()
|
||||
assert.EqualError(t, err, "file1.txt: not a symlink")
|
||||
|
||||
// should error for broken links
|
||||
target, err = commit.Tree.GetTreeEntryByPath("foo/broken_link")
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, target.IsLink())
|
||||
_, err = target.FollowLink()
|
||||
assert.EqualError(t, err, "broken_link: broken link")
|
||||
|
||||
// should error for external links
|
||||
target, err = commit.Tree.GetTreeEntryByPath("foo/outside_repo")
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, target.IsLink())
|
||||
_, err = target.FollowLink()
|
||||
assert.EqualError(t, err, "outside_repo: points outside of repo")
|
||||
|
||||
// testing fix for short link bug
|
||||
target, err = commit.Tree.GetTreeEntryByPath("foo/link_short")
|
||||
assert.NoError(t, err)
|
||||
_, err = target.FollowLink()
|
||||
assert.EqualError(t, err, "link_short: broken link")
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ func (t *Tree) ListEntriesRecursiveWithSize() (Entries, error) {
|
||||
seen := map[plumbing.Hash]bool{}
|
||||
walker := object.NewTreeWalker(t.gogitTree, true, seen)
|
||||
for {
|
||||
fullName, entry, err := walker.Next()
|
||||
_, entry, err := walker.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
@ -84,7 +84,6 @@ func (t *Tree) ListEntriesRecursiveWithSize() (Entries, error) {
|
||||
ID: ParseGogitHash(entry.Hash),
|
||||
gogitTreeEntry: &entry,
|
||||
ptree: t,
|
||||
fullName: fullName,
|
||||
}
|
||||
entries = append(entries, convertedEntry)
|
||||
}
|
||||
|
@ -6,13 +6,14 @@ package console
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"path"
|
||||
"unicode/utf8"
|
||||
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
trend "github.com/buildkite/terminal-to-html/v3"
|
||||
"github.com/go-enry/go-enry/v2"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@ -22,6 +23,8 @@ func init() {
|
||||
// Renderer implements markup.Renderer
|
||||
type Renderer struct{}
|
||||
|
||||
var _ markup.RendererContentDetector = (*Renderer)(nil)
|
||||
|
||||
// Name implements markup.Renderer
|
||||
func (Renderer) Name() string {
|
||||
return "console"
|
||||
@ -40,15 +43,36 @@ func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
|
||||
}
|
||||
|
||||
// CanRender implements markup.RendererContentDetector
|
||||
func (Renderer) CanRender(filename string, input io.Reader) bool {
|
||||
buf, err := io.ReadAll(input)
|
||||
if err != nil {
|
||||
func (Renderer) CanRender(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) bool {
|
||||
if !sniffedType.IsTextPlain() {
|
||||
return false
|
||||
}
|
||||
if enry.GetLanguage(path.Base(filename), buf) != enry.OtherLanguage {
|
||||
|
||||
s := util.UnsafeBytesToString(prefetchBuf)
|
||||
rs := []rune(s)
|
||||
cnt := 0
|
||||
firstErrPos := -1
|
||||
isCtrlSep := func(p int) bool {
|
||||
return p < len(rs) && (rs[p] == ';' || rs[p] == 'm')
|
||||
}
|
||||
for i, c := range rs {
|
||||
if c == 0 {
|
||||
return false
|
||||
}
|
||||
if c == '\x1b' {
|
||||
match := i+1 < len(rs) && rs[i+1] == '['
|
||||
if match && (isCtrlSep(i+2) || isCtrlSep(i+3) || isCtrlSep(i+4) || isCtrlSep(i+5)) {
|
||||
cnt++
|
||||
}
|
||||
}
|
||||
if c == utf8.RuneError && firstErrPos == -1 {
|
||||
firstErrPos = i
|
||||
}
|
||||
}
|
||||
if firstErrPos != -1 && firstErrPos != len(rs)-1 {
|
||||
return false
|
||||
}
|
||||
return bytes.ContainsRune(buf, '\x1b')
|
||||
return cnt >= 2 // only render it as console output if there are at least two escape sequences
|
||||
}
|
||||
|
||||
// Render renders terminal colors to HTML with all specific handling stuff.
|
||||
|
@ -8,23 +8,39 @@ import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestRenderConsole(t *testing.T) {
|
||||
var render Renderer
|
||||
kases := map[string]string{
|
||||
"\x1b[37m\x1b[40mnpm\x1b[0m \x1b[0m\x1b[32minfo\x1b[0m \x1b[0m\x1b[35mit worked if it ends with\x1b[0m ok": "<span class=\"term-fg37 term-bg40\">npm</span> <span class=\"term-fg32\">info</span> <span class=\"term-fg35\">it worked if it ends with</span> ok",
|
||||
cases := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"\x1b[37m\x1b[40mnpm\x1b[0m \x1b[0m\x1b[32minfo\x1b[0m \x1b[0m\x1b[35mit worked if it ends with\x1b[0m ok", `<span class="term-fg37 term-bg40">npm</span> <span class="term-fg32">info</span> <span class="term-fg35">it worked if it ends with</span> ok`},
|
||||
{"\x1b[1;2m \x1b[123m 啊", `<span class="term-fg2"> 啊</span>`},
|
||||
{"\x1b[1;2m \x1b[123m \xef", `<span class="term-fg2"> <20></span>`},
|
||||
{"\x1b[1;2m \x1b[123m \xef \xef", ``},
|
||||
{"\x1b[12", ``},
|
||||
{"\x1b[1", ``},
|
||||
{"\x1b[FOO\x1b[", ``},
|
||||
{"\x1b[mFOO\x1b[m", `FOO`},
|
||||
}
|
||||
|
||||
for k, v := range kases {
|
||||
var render Renderer
|
||||
for i, c := range cases {
|
||||
var buf strings.Builder
|
||||
canRender := render.CanRender("test", strings.NewReader(k))
|
||||
assert.True(t, canRender)
|
||||
st := typesniffer.DetectContentType([]byte(c.input))
|
||||
canRender := render.CanRender("test", st, []byte(c.input))
|
||||
if c.expected == "" {
|
||||
assert.False(t, canRender, "case %d: expected not to render", i)
|
||||
continue
|
||||
}
|
||||
|
||||
err := render.Render(markup.NewRenderContext(t.Context()), strings.NewReader(k), &buf)
|
||||
assert.True(t, canRender)
|
||||
err := render.Render(markup.NewRenderContext(t.Context()), strings.NewReader(c.input), &buf)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, v, buf.String())
|
||||
assert.Equal(t, c.expected, buf.String())
|
||||
}
|
||||
}
|
||||
|
@ -4,12 +4,12 @@
|
||||
package markup
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
)
|
||||
|
||||
// Renderer defines an interface for rendering markup file to HTML
|
||||
@ -37,7 +37,7 @@ type ExternalRenderer interface {
|
||||
// RendererContentDetector detects if the content can be rendered
|
||||
// by specified renderer
|
||||
type RendererContentDetector interface {
|
||||
CanRender(filename string, input io.Reader) bool
|
||||
CanRender(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) bool
|
||||
}
|
||||
|
||||
var (
|
||||
@ -60,13 +60,9 @@ func GetRendererByFileName(filename string) Renderer {
|
||||
}
|
||||
|
||||
// DetectRendererType detects the markup type of the content
|
||||
func DetectRendererType(filename string, input io.Reader) string {
|
||||
buf, err := io.ReadAll(input)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
func DetectRendererType(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) string {
|
||||
for _, renderer := range renderers {
|
||||
if detector, ok := renderer.(RendererContentDetector); ok && detector.CanRender(filename, bytes.NewReader(buf)) {
|
||||
if detector, ok := renderer.(RendererContentDetector); ok && detector.CanRender(filename, sniffedType, prefetchBuf) {
|
||||
return renderer.Name()
|
||||
}
|
||||
}
|
||||
|
@ -116,15 +116,17 @@ type ContentsExtResponse struct {
|
||||
|
||||
// ContentsResponse contains information about a repo's entry's (dir, file, symlink, submodule) metadata and content
|
||||
type ContentsResponse struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
SHA string `json:"sha"`
|
||||
LastCommitSHA string `json:"last_commit_sha"`
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
SHA string `json:"sha"`
|
||||
|
||||
LastCommitSHA *string `json:"last_commit_sha,omitempty"`
|
||||
// swagger:strfmt date-time
|
||||
LastCommitterDate time.Time `json:"last_committer_date"`
|
||||
LastCommitterDate *time.Time `json:"last_committer_date,omitempty"`
|
||||
// swagger:strfmt date-time
|
||||
LastAuthorDate time.Time `json:"last_author_date"`
|
||||
LastCommitMessage string `json:"last_commit_message"`
|
||||
LastAuthorDate *time.Time `json:"last_author_date,omitempty"`
|
||||
LastCommitMessage *string `json:"last_commit_message,omitempty"`
|
||||
|
||||
// `type` will be `file`, `dir`, `symlink`, or `submodule`
|
||||
Type string `json:"type"`
|
||||
Size int64 `json:"size"`
|
||||
@ -142,8 +144,8 @@ type ContentsResponse struct {
|
||||
SubmoduleGitURL *string `json:"submodule_git_url"`
|
||||
Links *FileLinksResponse `json:"_links"`
|
||||
|
||||
LfsOid *string `json:"lfs_oid"`
|
||||
LfsSize *int64 `json:"lfs_size"`
|
||||
LfsOid *string `json:"lfs_oid,omitempty"`
|
||||
LfsSize *int64 `json:"lfs_size,omitempty"`
|
||||
}
|
||||
|
||||
// FileCommitResponse contains information generated from a Git commit for a repo's file.
|
||||
|
@ -6,18 +6,14 @@ package typesniffer
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Use at most this many bytes to determine Content Type.
|
||||
const sniffLen = 1024
|
||||
const SniffContentSize = 1024
|
||||
|
||||
const (
|
||||
MimeTypeImageSvg = "image/svg+xml"
|
||||
@ -26,22 +22,30 @@ const (
|
||||
MimeTypeApplicationOctetStream = "application/octet-stream"
|
||||
)
|
||||
|
||||
var (
|
||||
svgComment = regexp.MustCompile(`(?s)<!--.*?-->`)
|
||||
svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
|
||||
svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
|
||||
)
|
||||
var globalVars = sync.OnceValue(func() (ret struct {
|
||||
svgComment, svgTagRegex, svgTagInXMLRegex *regexp.Regexp
|
||||
},
|
||||
) {
|
||||
ret.svgComment = regexp.MustCompile(`(?s)<!--.*?-->`)
|
||||
ret.svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
|
||||
ret.svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
|
||||
return ret
|
||||
})
|
||||
|
||||
// SniffedType contains information about a blobs type.
|
||||
// SniffedType contains information about a blob's type.
|
||||
type SniffedType struct {
|
||||
contentType string
|
||||
}
|
||||
|
||||
// IsText etects if content format is plain text.
|
||||
// IsText detects if the content format is text family, including text/plain, text/html, text/css, etc.
|
||||
func (ct SniffedType) IsText() bool {
|
||||
return strings.Contains(ct.contentType, "text/")
|
||||
}
|
||||
|
||||
func (ct SniffedType) IsTextPlain() bool {
|
||||
return strings.Contains(ct.contentType, "text/plain")
|
||||
}
|
||||
|
||||
// IsImage detects if data is an image format
|
||||
func (ct SniffedType) IsImage() bool {
|
||||
return strings.Contains(ct.contentType, "image/")
|
||||
@ -57,12 +61,12 @@ func (ct SniffedType) IsPDF() bool {
|
||||
return strings.Contains(ct.contentType, "application/pdf")
|
||||
}
|
||||
|
||||
// IsVideo detects if data is an video format
|
||||
// IsVideo detects if data is a video format
|
||||
func (ct SniffedType) IsVideo() bool {
|
||||
return strings.Contains(ct.contentType, "video/")
|
||||
}
|
||||
|
||||
// IsAudio detects if data is an video format
|
||||
// IsAudio detects if data is a video format
|
||||
func (ct SniffedType) IsAudio() bool {
|
||||
return strings.Contains(ct.contentType, "audio/")
|
||||
}
|
||||
@ -103,33 +107,34 @@ func detectFileTypeBox(data []byte) (brands []string, found bool) {
|
||||
return brands, true
|
||||
}
|
||||
|
||||
// DetectContentType extends http.DetectContentType with more content types. Defaults to text/unknown if input is empty.
|
||||
// DetectContentType extends http.DetectContentType with more content types. Defaults to text/plain if input is empty.
|
||||
func DetectContentType(data []byte) SniffedType {
|
||||
if len(data) == 0 {
|
||||
return SniffedType{"text/unknown"}
|
||||
return SniffedType{"text/plain"}
|
||||
}
|
||||
|
||||
ct := http.DetectContentType(data)
|
||||
|
||||
if len(data) > sniffLen {
|
||||
data = data[:sniffLen]
|
||||
if len(data) > SniffContentSize {
|
||||
data = data[:SniffContentSize]
|
||||
}
|
||||
|
||||
vars := globalVars()
|
||||
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
|
||||
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
|
||||
detectByXML := strings.Contains(ct, "text/xml")
|
||||
if detectByHTML || detectByXML {
|
||||
dataProcessed := svgComment.ReplaceAll(data, nil)
|
||||
dataProcessed := vars.svgComment.ReplaceAll(data, nil)
|
||||
dataProcessed = bytes.TrimSpace(dataProcessed)
|
||||
if detectByHTML && svgTagRegex.Match(dataProcessed) ||
|
||||
detectByXML && svgTagInXMLRegex.Match(dataProcessed) {
|
||||
if detectByHTML && vars.svgTagRegex.Match(dataProcessed) ||
|
||||
detectByXML && vars.svgTagInXMLRegex.Match(dataProcessed) {
|
||||
ct = MimeTypeImageSvg
|
||||
}
|
||||
}
|
||||
|
||||
if strings.HasPrefix(ct, "audio/") && bytes.HasPrefix(data, []byte("ID3")) {
|
||||
// The MP3 detection is quite inaccurate, any content with "ID3" prefix will result in "audio/mpeg".
|
||||
// So remove the "ID3" prefix and detect again, if result is text, then it must be text content.
|
||||
// So remove the "ID3" prefix and detect again, then if the result is "text", it must be text content.
|
||||
// This works especially because audio files contain many unprintable/invalid characters like `0x00`
|
||||
ct2 := http.DetectContentType(data[3:])
|
||||
if strings.HasPrefix(ct2, "text/") {
|
||||
@ -155,15 +160,3 @@ func DetectContentType(data []byte) SniffedType {
|
||||
}
|
||||
return SniffedType{ct}
|
||||
}
|
||||
|
||||
// DetectContentTypeFromReader guesses the content type contained in the reader.
|
||||
func DetectContentTypeFromReader(r io.Reader) (SniffedType, error) {
|
||||
buf := make([]byte, sniffLen)
|
||||
n, err := util.ReadAtMost(r, buf)
|
||||
if err != nil {
|
||||
return SniffedType{}, fmt.Errorf("DetectContentTypeFromReader io error: %w", err)
|
||||
}
|
||||
buf = buf[:n]
|
||||
|
||||
return DetectContentType(buf), nil
|
||||
}
|
||||
|
@ -4,7 +4,6 @@
|
||||
package typesniffer
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"strings"
|
||||
@ -17,7 +16,7 @@ func TestDetectContentTypeLongerThanSniffLen(t *testing.T) {
|
||||
// Pre-condition: Shorter than sniffLen detects SVG.
|
||||
assert.Equal(t, "image/svg+xml", DetectContentType([]byte(`<!-- Comment --><svg></svg>`)).contentType)
|
||||
// Longer than sniffLen detects something else.
|
||||
assert.NotEqual(t, "image/svg+xml", DetectContentType([]byte(`<!-- `+strings.Repeat("x", sniffLen)+` --><svg></svg>`)).contentType)
|
||||
assert.NotEqual(t, "image/svg+xml", DetectContentType([]byte(`<!-- `+strings.Repeat("x", SniffContentSize)+` --><svg></svg>`)).contentType)
|
||||
}
|
||||
|
||||
func TestIsTextFile(t *testing.T) {
|
||||
@ -116,22 +115,13 @@ func TestIsAudio(t *testing.T) {
|
||||
assert.True(t, DetectContentType([]byte("ID3Toy\n====\t* hi 🌞, ..."+"🌛"[0:2])).IsText()) // test ID3 tag with incomplete UTF8 char
|
||||
}
|
||||
|
||||
func TestDetectContentTypeFromReader(t *testing.T) {
|
||||
mp3, _ := base64.StdEncoding.DecodeString("SUQzBAAAAAABAFRYWFgAAAASAAADbWFqb3JfYnJhbmQAbXA0MgBUWFhYAAAAEQAAA21pbm9yX3Zl")
|
||||
st, err := DetectContentTypeFromReader(bytes.NewReader(mp3))
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, st.IsAudio())
|
||||
}
|
||||
|
||||
func TestDetectContentTypeOgg(t *testing.T) {
|
||||
oggAudio, _ := hex.DecodeString("4f67675300020000000000000000352f0000000000007dc39163011e01766f72626973000000000244ac0000000000000071020000000000b8014f6767530000")
|
||||
st, err := DetectContentTypeFromReader(bytes.NewReader(oggAudio))
|
||||
assert.NoError(t, err)
|
||||
st := DetectContentType(oggAudio)
|
||||
assert.True(t, st.IsAudio())
|
||||
|
||||
oggVideo, _ := hex.DecodeString("4f676753000200000000000000007d9747ef000000009b59daf3012a807468656f7261030201001e00110001e000010e00020000001e00000001000001000001")
|
||||
st, err = DetectContentTypeFromReader(bytes.NewReader(oggVideo))
|
||||
assert.NoError(t, err)
|
||||
st = DetectContentType(oggVideo)
|
||||
assert.True(t, st.IsVideo())
|
||||
}
|
||||
|
||||
|
@ -17,8 +17,8 @@ var (
|
||||
ErrNotExist = errors.New("resource does not exist") // also implies HTTP 404
|
||||
ErrAlreadyExist = errors.New("resource already exists") // also implies HTTP 409
|
||||
|
||||
// ErrUnprocessableContent implies HTTP 422, syntax of the request content was correct,
|
||||
// but server was unable to process the contained instructions
|
||||
// ErrUnprocessableContent implies HTTP 422, the syntax of the request content is correct,
|
||||
// but the server is unable to process the contained instructions
|
||||
ErrUnprocessableContent = errors.New("unprocessable content")
|
||||
)
|
||||
|
||||
|
@ -2769,6 +2769,8 @@ branch.new_branch_from = Create new branch from "%s"
|
||||
branch.renamed = Branch %s was renamed to %s.
|
||||
branch.rename_default_or_protected_branch_error = Only admins can rename default or protected branches.
|
||||
branch.rename_protected_branch_failed = This branch is protected by glob-based protection rules.
|
||||
branch.commits_divergence_from = Commits divergence: %[1]d behind and %[2]d ahead of %[3]s
|
||||
branch.commits_no_divergence = The same as branch %[1]s
|
||||
|
||||
tag.create_tag = Create tag %s
|
||||
tag.create_tag_operation = Create tag
|
||||
@ -2782,6 +2784,7 @@ topic.done = Done
|
||||
topic.count_prompt = You cannot select more than 25 topics
|
||||
topic.format_prompt = Topics must start with a letter or number, can include dashes ('-') and dots ('.'), can be up to 35 characters long. Letters must be lowercase.
|
||||
|
||||
find_file.follow_symlink= Follow this symlink to where it is pointing at
|
||||
find_file.go_to_file = Go to file
|
||||
find_file.no_matching = No matching file found
|
||||
|
||||
|
@ -1969,6 +1969,7 @@ pulls.cmd_instruction_checkout_title=Basculer
|
||||
pulls.cmd_instruction_checkout_desc=Depuis votre dépôt, basculer sur une nouvelle branche et tester des modifications.
|
||||
pulls.cmd_instruction_merge_title=Fusionner
|
||||
pulls.cmd_instruction_merge_desc=Fusionner les modifications et mettre à jour sur Gitea.
|
||||
pulls.cmd_instruction_merge_warning=Attention : cette opération ne peut pas fusionner la demande d’ajout car la « détection automatique de fusion manuelle » n’a pas été activée
|
||||
pulls.clear_merge_message=Effacer le message de fusion
|
||||
pulls.clear_merge_message_hint=Effacer le message de fusion ne supprimera que le message de la révision, mais pas les pieds de révision générés tels que "Co-Authored-By:".
|
||||
|
||||
@ -2768,6 +2769,8 @@ branch.new_branch_from=`Créer une nouvelle branche à partir de "%s"`
|
||||
branch.renamed=La branche %s à été renommée en %s.
|
||||
branch.rename_default_or_protected_branch_error=Seuls les administrateurs peuvent renommer les branches par défaut ou protégées.
|
||||
branch.rename_protected_branch_failed=Cette branche est protégée par des règles de protection basées sur des globs.
|
||||
branch.commits_divergence_from=Divergence de révisions : %[1]d en retard et %[2]d en avance sur %[3]s
|
||||
branch.commits_no_divergence=Identique à la branche %[1]s
|
||||
|
||||
tag.create_tag=Créer l'étiquette %s
|
||||
tag.create_tag_operation=Créer une étiquette
|
||||
|
@ -1969,6 +1969,7 @@ pulls.cmd_instruction_checkout_title=Seiceáil
|
||||
pulls.cmd_instruction_checkout_desc=Ó stór tionscadail, seiceáil brainse nua agus déan tástáil ar na hathruithe.
|
||||
pulls.cmd_instruction_merge_title=Cumaisc
|
||||
pulls.cmd_instruction_merge_desc=Cumaisc na hathruithe agus nuashonrú ar Gitea.
|
||||
pulls.cmd_instruction_merge_warning=Rabhadh: Ní féidir iarratas tarraingthe cumaisc a dhéanamh leis an oibríocht seo mar nach bhfuil "autodetect manual merge" cumasaithe.
|
||||
pulls.clear_merge_message=Glan an teachtaireacht chumaisc
|
||||
pulls.clear_merge_message_hint=Má imrítear an teachtaireacht chumaisc ní bhainfear ach ábhar na teachtaireachta tiomanta agus coimeádfar leantóirí git ginte ar nós "Co-Authored-By …".
|
||||
|
||||
@ -2768,6 +2769,8 @@ branch.new_branch_from=`Cruthaigh brainse nua ó "%s"`
|
||||
branch.renamed=Ainmníodh brainse %s go %s.
|
||||
branch.rename_default_or_protected_branch_error=Ní féidir ach le riarthóirí brainsí réamhshocraithe nó cosanta a athainmniú.
|
||||
branch.rename_protected_branch_failed=Tá an brainse seo faoi chosaint ag rialacha cosanta domhanda.
|
||||
branch.commits_divergence_from=Déanann sé dialltacht a thiomnú: %[1]d taobh thiar agus %[2]d chun tosaigh ar %[3]s
|
||||
branch.commits_no_divergence=Mar an gcéanna le brainse %[1]s
|
||||
|
||||
tag.create_tag=Cruthaigh clib %s
|
||||
tag.create_tag_operation=Cruthaigh clib
|
||||
@ -2781,6 +2784,7 @@ topic.done=Déanta
|
||||
topic.count_prompt=Ní féidir leat níos mó ná 25 topaicí a roghnú
|
||||
topic.format_prompt=Ní mór do thopaicí tosú le litir nó uimhir, is féidir daiseanna ('-') agus poncanna ('.') a áireamh, a bheith suas le 35 carachtar ar fad. Ní mór litreacha a bheith i litreacha beaga.
|
||||
|
||||
find_file.follow_symlink=Lean an nasc siombalach seo go dtí an áit a bhfuil sé ag pointeáil air
|
||||
find_file.go_to_file=Téigh go dtí an comhad
|
||||
find_file.no_matching=Níl aon chomhad meaitseála le fáil
|
||||
|
||||
|
@ -1562,8 +1562,8 @@ issues.filter_project=Planeamento
|
||||
issues.filter_project_all=Todos os planeamentos
|
||||
issues.filter_project_none=Nenhum planeamento
|
||||
issues.filter_assignee=Encarregado
|
||||
issues.filter_assignee_no_assignee=Não atribuído
|
||||
issues.filter_assignee_any_assignee=Atribuído a qualquer pessoa
|
||||
issues.filter_assignee_no_assignee=Não atribuída
|
||||
issues.filter_assignee_any_assignee=Atribuída a alguém
|
||||
issues.filter_poster=Autor(a)
|
||||
issues.filter_user_placeholder=Procurar utilizadores
|
||||
issues.filter_user_no_select=Todos os utilizadores
|
||||
@ -1969,6 +1969,7 @@ pulls.cmd_instruction_checkout_title=Checkout
|
||||
pulls.cmd_instruction_checkout_desc=A partir do seu repositório, crie um novo ramo e teste nele as modificações.
|
||||
pulls.cmd_instruction_merge_title=Integrar
|
||||
pulls.cmd_instruction_merge_desc=Integrar as modificações e enviar para o Gitea.
|
||||
pulls.cmd_instruction_merge_warning=Aviso: Esta operação não pode executar pedidos de integração porque a opção "auto-identificar integração manual" não está habilitada.
|
||||
pulls.clear_merge_message=Apagar mensagem de integração
|
||||
pulls.clear_merge_message_hint=Apagar a mensagem de integração apenas remove o conteúdo da mensagem de cometimento e mantém os rodapés do git, tais como "Co-Autorado-Por …".
|
||||
|
||||
@ -2768,6 +2769,8 @@ branch.new_branch_from=`Criar um novo ramo a partir do ramo "%s"`
|
||||
branch.renamed=O ramo %s foi renomeado para %s.
|
||||
branch.rename_default_or_protected_branch_error=Só os administradores é que podem renomear o ramo principal ou ramos protegidos.
|
||||
branch.rename_protected_branch_failed=Este ramo está protegido por regras de salvaguarda baseadas em padrões glob.
|
||||
branch.commits_divergence_from=Divergência nos cometimentos: %[1]d atrás e %[2]d à frente de %[3]s
|
||||
branch.commits_no_divergence=Idêntico ao ramo %[1]s
|
||||
|
||||
tag.create_tag=Criar etiqueta %s
|
||||
tag.create_tag_operation=Criar etiqueta
|
||||
@ -2781,6 +2784,7 @@ topic.done=Concluído
|
||||
topic.count_prompt=Não pode escolher mais do que 25 tópicos
|
||||
topic.format_prompt=Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') ou pontos ('.') e podem ter até 35 caracteres. As letras têm que ser minúsculas.
|
||||
|
||||
find_file.follow_symlink=Seguir esta ligação simbólica para onde ela está apontando
|
||||
find_file.go_to_file=Ir para o ficheiro
|
||||
find_file.no_matching=Não foi encontrado qualquer ficheiro correspondente
|
||||
|
||||
|
@ -420,8 +420,9 @@ remember_me=记住此设备
|
||||
remember_me.compromised=登录令牌不再有效,因为它可能表明帐户已被破坏。请检查您的帐户是否有异常活动。
|
||||
forgot_password_title=忘记密码
|
||||
forgot_password=忘记密码?
|
||||
need_account=需要一个帐户?
|
||||
sign_up_now=还没账号?马上注册。
|
||||
need_account=需要一个帐户?
|
||||
sign_up_tip=您正在系统中注册第一个帐户,它拥有管理员权限。请仔细记住您的用户名和密码。 如果您忘记了用户名或密码,请参阅 Gitea 文档以恢复账户。
|
||||
sign_up_now=立即注册。
|
||||
sign_up_successful=帐户创建成功。欢迎!
|
||||
confirmation_mail_sent_prompt_ex=一封新的确认邮件已经发送到 <b>%s</b>。请在下一个 %s 中检查您的收件箱以完成注册流程。 如果您的注册邮箱地址不正确,您可以重新登录并更改它。
|
||||
must_change_password=更新您的密码
|
||||
@ -485,7 +486,7 @@ sspi_auth_failed=SSPI 认证失败
|
||||
password_pwned=此密码出现在 <a target="_blank" rel="noopener noreferrer" href="%s">被盗密码</a> 列表上并且曾经被公开。 请使用另一个密码再试一次。
|
||||
password_pwned_err=无法完成对 HaveIBeenPwned 的请求
|
||||
last_admin=您不能删除最后一个管理员。必须至少保留一个管理员。
|
||||
signin_passkey=使用密钥登录
|
||||
signin_passkey=使用通行密钥登录
|
||||
back_to_sign_in=返回登录页面
|
||||
|
||||
[mail]
|
||||
@ -518,7 +519,7 @@ register_success=注册成功
|
||||
issue_assigned.pull=@%[1]s 已将仓库 %[3]s 中的合并请求 %[2]s 指派给您
|
||||
issue_assigned.issue=@%[1]s 已将仓库 %[3]s 中的工单 %[2]s 指派给您
|
||||
|
||||
issue.x_mentioned_you=<b>@%s</b> 提到了您:
|
||||
issue.x_mentioned_you=<b>@%s</b> 提及了您:
|
||||
issue.action.force_push=<b>%[1]s</b> 强制从 %[3]s 推送 <b>%[2]s</b> 至 [4]s。
|
||||
issue.action.push_1=<b>@%[1]s</b> 推送了 %[3]d 个提交到 %[2]s
|
||||
issue.action.push_n=<b>@%[1]s</b> 推送了 %[3]d 个提交到 %[2]s
|
||||
@ -838,7 +839,7 @@ ssh_desc=这些 SSH 公钥已经关联到您的账号。相应的私钥拥有完
|
||||
principal_desc=这些 SSH 证书规则已关联到您的账号将允许完全访问您所有仓库。
|
||||
gpg_desc=这些 GPG 公钥已经关联到您的账号。请妥善保管您的私钥因为他们将被用于认证提交。
|
||||
ssh_helper=<strong>需要帮助?</strong> 请查看有关 <a href="%s">如何生成 SSH 密钥</a> 或 <a href="%s">常见 SSH 问题</a> 寻找答案。
|
||||
gpg_helper=<strong>需要帮助吗?</strong>看一看 GitHub <a href="%s">关于 GPG</a> 的指导。
|
||||
gpg_helper=<strong>需要帮助?</strong>看一看 GitHub <a href="%s">关于 GPG</a> 的指导。
|
||||
add_new_key=增加 SSH 密钥
|
||||
add_new_gpg_key=添加的 GPG 密钥
|
||||
key_content_ssh_placeholder=以 'ssh-ed25519'、 'ssh-rsa'、 'ecdsa-sha2-nistp256'、'ecdsa-sha2-nistp384'、'ecdsa-sha2-nistp521'、 'sk-ecdsa-sha2-nistp256@openssh.com' 或 'sk-ssh-ed25519@openssh.com' 开头
|
||||
@ -1016,10 +1017,10 @@ delete_account_title=删除当前帐户
|
||||
delete_account_desc=确实要永久删除此用户帐户吗?
|
||||
|
||||
email_notifications.enable=启用邮件通知
|
||||
email_notifications.onmention=只在被提到时邮件通知
|
||||
email_notifications.onmention=仅被提及时通知
|
||||
email_notifications.disable=停用邮件通知
|
||||
email_notifications.submit=邮件通知设置
|
||||
email_notifications.andyourown=和您自己的通知
|
||||
email_notifications.submit=设置邮件通知
|
||||
email_notifications.andyourown=仅与您相关的通知
|
||||
|
||||
visibility=用户可见性
|
||||
visibility.public=公开
|
||||
@ -1061,6 +1062,7 @@ fork_no_valid_owners=这个代码仓库无法被派生,因为没有有效的
|
||||
fork.blocked_user=无法克隆仓库,因为您被仓库所有者屏蔽。
|
||||
use_template=使用此模板
|
||||
open_with_editor=用 %s 打开
|
||||
|
||||
download_zip=下载 ZIP
|
||||
download_tar=下载 TAR.GZ
|
||||
download_bundle=下载 BUNDLE
|
||||
@ -1070,12 +1072,12 @@ repo_desc=描述
|
||||
repo_desc_helper=输入简要描述 (可选)
|
||||
repo_no_desc=无详细信息
|
||||
repo_lang=语言
|
||||
repo_gitignore_helper=选择 .gitignore 模板。
|
||||
repo_gitignore_helper=选择 .gitignore 模板
|
||||
repo_gitignore_helper_desc=从常见语言的模板列表中选择忽略跟踪的文件。默认情况下,由开发或构建工具生成的特殊文件都包含在 .gitignore 中。
|
||||
issue_labels=工单标签
|
||||
issue_labels_helper=选择一个工单标签集
|
||||
license=授权许可
|
||||
license_helper=选择授权许可文件。
|
||||
license_helper=选择授权许可文件
|
||||
license_helper_desc=许可证说明了其他人可以和不可以用您的代码做什么。不确定哪一个适合您的项目?见 <a target="_blank" rel="noopener noreferrer" href="%s">选择一个许可证</a>
|
||||
multiple_licenses=多许可证
|
||||
object_format=对象格式
|
||||
@ -1228,6 +1230,7 @@ migrate.migrating_issues=迁移工单
|
||||
migrate.migrating_pulls=迁移合并请求
|
||||
migrate.cancel_migrating_title=取消迁移
|
||||
migrate.cancel_migrating_confirm=您想要取消此次迁移吗?
|
||||
migration_status=迁移状态
|
||||
|
||||
mirror_from=镜像自地址
|
||||
forked_from=派生自
|
||||
@ -1353,6 +1356,7 @@ editor.update=更新 %s
|
||||
editor.delete=删除 %s
|
||||
editor.patch=应用补丁
|
||||
editor.patching=打补丁:
|
||||
editor.fail_to_apply_patch=无法应用补丁
|
||||
editor.new_patch=新补丁
|
||||
editor.commit_message_desc=添加一个可选的扩展描述...
|
||||
editor.signoff_desc=在提交日志消息末尾添加签署人信息。
|
||||
@ -1372,6 +1376,7 @@ editor.branch_already_exists=此仓库已存在名为「%s」的分支。
|
||||
editor.directory_is_a_file=目录名「%s」已作为文件名在此仓库中存在。
|
||||
editor.file_is_a_symlink=`「%s」是一个符号链接,无法在 Web 编辑器中编辑`
|
||||
editor.filename_is_a_directory=文件名「%s」已作为目录名在此仓库中存在。
|
||||
editor.file_modifying_no_longer_exists=正在修改的文件「%s」已不存在于此仓库。
|
||||
editor.file_changed_while_editing=文件内容在您进行编辑时已经发生变动。<a target="_blank" rel="noopener noreferrer" href="%s">单击此处</a> 查看变动的具体内容,或者 <strong>再次提交</strong> 覆盖已发生的变动。
|
||||
editor.file_already_exists=此仓库已经存在名为「%s」的文件。
|
||||
editor.commit_id_not_matching=提交 ID 与您开始编辑时的 ID 不匹配。请提交到补丁分支然后合并。
|
||||
@ -1392,7 +1397,15 @@ editor.user_no_push_to_branch=用户不能推送到分支
|
||||
editor.require_signed_commit=分支需要签名提交
|
||||
editor.cherry_pick=拣选提交 %s 到:
|
||||
editor.revert=将 %s 还原到:
|
||||
editor.failed_to_commit=提交更改失败。
|
||||
editor.failed_to_commit_summary=错误信息:
|
||||
|
||||
editor.fork_create=派生仓库发起请求变更
|
||||
editor.fork_create_description=您不能直接编辑此仓库。您可以从此仓库派生,进行编辑并创建一个拉取请求。
|
||||
editor.fork_edit_description=您不能直接编辑此仓库。 更改将写入您的派生仓库 <b>%s</b>,以便您可以创建一个拉取请求。
|
||||
editor.fork_not_editable=你已经派生了这个仓库,但是你的分叉是不可编辑的。
|
||||
editor.fork_failed_to_push_branch=推送分支 %s 到仓库失败。
|
||||
editor.fork_branch_exists=分支 "%s" 已存在于您的派生仓库中,请选择一个新的分支名称。
|
||||
|
||||
commits.desc=浏览代码修改历史
|
||||
commits.commits=次代码提交
|
||||
@ -1714,6 +1727,8 @@ issues.remove_time_estimate_at=删除预估时间 %s
|
||||
issues.time_estimate_invalid=预计时间格式无效
|
||||
issues.start_tracking_history=`开始工作 %s`
|
||||
issues.tracker_auto_close=当此工单关闭时,自动停止计时器
|
||||
issues.stopwatch_already_stopped=此工单的计时器已经停止
|
||||
issues.stopwatch_already_created=此工单的计时器已经存在
|
||||
issues.tracking_already_started=`您已经开始对 <a href="%s">另一个工单</a> 进行时间跟踪!`
|
||||
issues.stop_tracking=停止计时器
|
||||
issues.stop_tracking_history=工作 <b>%[1]s</b> 于 %[2]s 停止
|
||||
@ -1955,6 +1970,7 @@ pulls.cmd_instruction_checkout_title=检出
|
||||
pulls.cmd_instruction_checkout_desc=从您的仓库中检出一个新的分支并测试变更。
|
||||
pulls.cmd_instruction_merge_title=合并
|
||||
pulls.cmd_instruction_merge_desc=合并变更并更新到 Gitea 上
|
||||
pulls.cmd_instruction_merge_warning=警告:此操作不能合并该合并请求,因为「自动检测手动合并」未启用
|
||||
pulls.clear_merge_message=清除合并信息
|
||||
pulls.clear_merge_message_hint=清除合并消息只会删除提交消息内容,并保留生成的 Git 附加内容,如「Co-Authored-By…」。
|
||||
|
||||
@ -2150,6 +2166,7 @@ settings.collaboration.write=可写权限
|
||||
settings.collaboration.read=可读权限
|
||||
settings.collaboration.owner=所有者
|
||||
settings.collaboration.undefined=未定义
|
||||
settings.collaboration.per_unit=单元权限
|
||||
settings.hooks=Web 钩子
|
||||
settings.githooks=管理 Git 钩子
|
||||
settings.basic_settings=基本设置
|
||||
@ -2368,6 +2385,7 @@ settings.event_repository=仓库
|
||||
settings.event_repository_desc=创建或删除仓库
|
||||
settings.event_header_issue=工单事件
|
||||
settings.event_issues=工单
|
||||
settings.event_issues_desc=工单已打开、已关闭、已重新打开或已编辑。
|
||||
settings.event_issue_assign=工单已指派
|
||||
settings.event_issue_assign_desc=工单已指派或取消指派。
|
||||
settings.event_issue_label=工单增删标签
|
||||
@ -2378,6 +2396,7 @@ settings.event_issue_comment=工单评论
|
||||
settings.event_issue_comment_desc=工单评论已创建、编辑或删除。
|
||||
settings.event_header_pull_request=合并请求事件
|
||||
settings.event_pull_request=合并请求
|
||||
settings.event_pull_request_desc=合并请求已打开、关闭、重新打开或编辑。
|
||||
settings.event_pull_request_assign=合并请求已指派
|
||||
settings.event_pull_request_assign_desc=合并请求已指派或取消指派。
|
||||
settings.event_pull_request_label=合并请求增删标签
|
||||
@ -2395,6 +2414,8 @@ settings.event_pull_request_review_request_desc=合并请求评审已请求或
|
||||
settings.event_pull_request_approvals=合并请求批准
|
||||
settings.event_pull_request_merge=合并请求合并
|
||||
settings.event_header_workflow=工作流程事件
|
||||
settings.event_workflow_run=工作流运行
|
||||
settings.event_workflow_run_desc=Gitea 工作流队列中、等待中、正在进行或已完成的任务。
|
||||
settings.event_workflow_job=工作流任务
|
||||
settings.event_workflow_job_desc=Gitea 工作流队列中、等待中、正在进行或已完成的任务。
|
||||
settings.event_package=软件包
|
||||
@ -2773,7 +2794,7 @@ error.broken_git_hook=此仓库的 Git 钩子似乎已损坏。 请按照 <a tar
|
||||
[graphs]
|
||||
component_loading=正在加载 %s...
|
||||
component_loading_failed=无法加载 %s
|
||||
component_loading_info=这可能需要一点…
|
||||
component_loading_info=这可能需要一点时间…
|
||||
component_failed_to_load=意外的错误发生了。
|
||||
code_frequency.what=代码频率
|
||||
contributors.what=贡献
|
||||
@ -2802,6 +2823,7 @@ team_permission_desc=权限
|
||||
team_unit_desc=允许访问仓库单元
|
||||
team_unit_disabled=(已禁用)
|
||||
|
||||
form.name_been_taken=组织名称「%s」已经被占用。
|
||||
form.name_reserved=组织名称「%s」是保留的。
|
||||
form.name_pattern_not_allowed=组织名中不允许使用「%s」格式。
|
||||
form.create_org_not_allowed=此账号禁止创建组织
|
||||
@ -2824,12 +2846,27 @@ settings.visibility.private_shortname=私有
|
||||
settings.update_settings=更新组织设置
|
||||
settings.update_setting_success=组织设置已更新。
|
||||
|
||||
settings.rename=修改组织名称
|
||||
settings.rename_desc=更改组织名称同时会更改组织的 URL 地址并释放旧的名称。
|
||||
settings.rename_success=组织 %[1]s 已成功重命名为 %[2]s。
|
||||
settings.rename_no_change=组织名称没有变化。
|
||||
settings.rename_new_org_name=新组织名称
|
||||
settings.rename_failed=由于内部错误,重命名组织失败
|
||||
settings.rename_notices_1=此操作 <strong>无法</strong> 被回滚。
|
||||
settings.rename_notices_2=在被人使用前,旧名称将会被重定向。
|
||||
|
||||
settings.update_avatar_success=组织头像已经更新。
|
||||
settings.delete=删除组织
|
||||
settings.delete_account=删除当前组织
|
||||
settings.delete_prompt=删除操作会永久清除该组织的信息,并且 <strong>不可恢复</strong>!
|
||||
settings.delete_prompt=删除操作会永久清除该组织的信息,并且 <strong>无法</strong> 恢复!
|
||||
settings.name_confirm=输入组织名称以确认:
|
||||
settings.delete_notices_1=此操作 <strong>无法</strong> 被回滚。
|
||||
settings.delete_notices_2=此操作将永久删除 <strong>%s</strong> 的所有<strong>仓库</strong>,包括 Git 数据、 工单、评论、百科和协作者的操作权限。
|
||||
settings.delete_notices_3=此操作将永久删除 <strong>%s</strong> 的所有 <strong>软件包</strong>。
|
||||
settings.delete_notices_4=此操作将永久删除 <strong>%s</strong> 的所有 <strong>项目</strong>。
|
||||
settings.confirm_delete_account=确认删除组织
|
||||
settings.delete_failed=由于内部错误,删除组织失败
|
||||
settings.delete_successful=组织 <b>%s</b> 已成功删除。
|
||||
settings.hooks_desc=在此处添加的 Web 钩子将会应用到该组织下的 <strong>所有仓库</strong>。
|
||||
|
||||
settings.labels_desc=添加能够被该组织下的 <strong>所有仓库</strong> 的工单使用的标签。
|
||||
@ -3720,8 +3757,8 @@ none=还没有密钥。
|
||||
; These keys are also for "edit secret", the keys are kept as-is to avoid unnecessary re-translation
|
||||
creation.description=组织描述
|
||||
creation.name_placeholder=不区分大小写,仅限字母数字或下划线且不能以 GITEA_ 或 GITHUB_ 开头
|
||||
creation.value_placeholder=输入任何内容,开头和结尾的空白将会被忽略。
|
||||
creation.description_placeholder=输入简短描述(可选)。
|
||||
creation.value_placeholder=输入任何内容,开头和结尾的空白将会被忽略
|
||||
creation.description_placeholder=输入简短描述(可选)
|
||||
|
||||
save_success=密钥「%s」保存成功。
|
||||
save_failed=密钥保存失败。
|
||||
@ -3806,6 +3843,7 @@ runs.no_runs=工作流尚未运行过。
|
||||
runs.empty_commit_message=(空白的提交消息)
|
||||
runs.expire_log_message=旧的日志已清除。
|
||||
runs.delete=删除工作流运行
|
||||
runs.cancel=取消工作流运行
|
||||
runs.delete.description=您确定要永久删除此工作流运行吗?此操作无法撤消。
|
||||
runs.not_done=此工作流运行尚未完成。
|
||||
runs.view_workflow_file=查看工作流文件
|
||||
|
64
package-lock.json
generated
64
package-lock.json
generated
@ -28,7 +28,6 @@
|
||||
"dropzone": "6.0.0-beta.2",
|
||||
"easymde": "2.20.0",
|
||||
"esbuild-loader": "4.3.0",
|
||||
"escape-goat": "4.0.0",
|
||||
"fast-glob": "3.3.3",
|
||||
"htmx.org": "2.0.6",
|
||||
"idiomorph": "0.7.3",
|
||||
@ -40,6 +39,7 @@
|
||||
"minimatch": "10.0.2",
|
||||
"monaco-editor": "0.52.2",
|
||||
"monaco-editor-webpack-plugin": "7.1.0",
|
||||
"online-3d-viewer": "0.16.0",
|
||||
"pdfobject": "2.3.1",
|
||||
"perfect-debounce": "1.0.0",
|
||||
"postcss": "8.5.5",
|
||||
@ -2026,6 +2026,16 @@
|
||||
"vue": "^3.2.29"
|
||||
}
|
||||
},
|
||||
"node_modules/@simonwep/pickr": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmmirror.com/@simonwep/pickr/-/pickr-1.9.0.tgz",
|
||||
"integrity": "sha512-oEYvv15PyfZzjoAzvXYt3UyNGwzsrpFxLaZKzkOSd0WYBVwLd19iJerePDONxC1iF6+DpcswPdLIM2KzCJuYFg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"core-js": "3.32.2",
|
||||
"nanopop": "2.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@stoplight/better-ajv-errors": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@stoplight/better-ajv-errors/-/better-ajv-errors-1.0.3.tgz",
|
||||
@ -5337,6 +5347,17 @@
|
||||
"integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/core-js": {
|
||||
"version": "3.32.2",
|
||||
"resolved": "https://registry.npmmirror.com/core-js/-/core-js-3.32.2.tgz",
|
||||
"integrity": "sha512-pxXSw1mYZPDGvTQqEc5vgIb83jGQKFGYWY76z4a7weZXUolw3G+OvpZqSRcfYOoOVUQJYEPsWeQK8pKEnUtWxQ==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/core-js"
|
||||
}
|
||||
},
|
||||
"node_modules/core-js-compat": {
|
||||
"version": "3.43.0",
|
||||
"resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.43.0.tgz",
|
||||
@ -6541,18 +6562,6 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/escape-goat": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-4.0.0.tgz",
|
||||
"integrity": "sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/escape-string-regexp": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
|
||||
@ -7721,6 +7730,12 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/fflate": {
|
||||
"version": "0.8.2",
|
||||
"resolved": "https://registry.npmmirror.com/fflate/-/fflate-0.8.2.tgz",
|
||||
"integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/file-entry-cache": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
|
||||
@ -10285,6 +10300,12 @@
|
||||
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/nanopop": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmmirror.com/nanopop/-/nanopop-2.3.0.tgz",
|
||||
"integrity": "sha512-fzN+T2K7/Ah25XU02MJkPZ5q4Tj5FpjmIYq4rvoHX4yb16HzFdCO6JxFFn5Y/oBhQ8no8fUZavnyIv9/+xkBBw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/napi-postinstall": {
|
||||
"version": "0.2.4",
|
||||
"resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.2.4.tgz",
|
||||
@ -10525,6 +10546,17 @@
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"node_modules/online-3d-viewer": {
|
||||
"version": "0.16.0",
|
||||
"resolved": "https://registry.npmmirror.com/online-3d-viewer/-/online-3d-viewer-0.16.0.tgz",
|
||||
"integrity": "sha512-Mcmo41TM3K+svlMDRH8ySKSY2e8s7Sssdb5U9LV3gkFKVWGGuS304Vk5gqxopAJbE72DpsC67Ve3YNtcAuROwQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@simonwep/pickr": "1.9.0",
|
||||
"fflate": "0.8.2",
|
||||
"three": "0.176.0"
|
||||
}
|
||||
},
|
||||
"node_modules/optionator": {
|
||||
"version": "0.9.4",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
|
||||
@ -13193,6 +13225,12 @@
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/three": {
|
||||
"version": "0.176.0",
|
||||
"resolved": "https://registry.npmmirror.com/three/-/three-0.176.0.tgz",
|
||||
"integrity": "sha512-PWRKYWQo23ojf9oZSlRGH8K09q7nRSWx6LY/HF/UUrMdYgN9i1e2OwJYHoQjwc6HF/4lvvYLC5YC1X8UJL2ZpA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/throttle-debounce": {
|
||||
"version": "5.0.2",
|
||||
"resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-5.0.2.tgz",
|
||||
|
@ -27,7 +27,6 @@
|
||||
"dropzone": "6.0.0-beta.2",
|
||||
"easymde": "2.20.0",
|
||||
"esbuild-loader": "4.3.0",
|
||||
"escape-goat": "4.0.0",
|
||||
"fast-glob": "3.3.3",
|
||||
"htmx.org": "2.0.6",
|
||||
"idiomorph": "0.7.3",
|
||||
@ -39,6 +38,7 @@
|
||||
"minimatch": "10.0.2",
|
||||
"monaco-editor": "0.52.2",
|
||||
"monaco-editor-webpack-plugin": "7.1.0",
|
||||
"online-3d-viewer": "0.16.0",
|
||||
"pdfobject": "2.3.1",
|
||||
"perfect-debounce": "1.0.0",
|
||||
"postcss": "8.5.5",
|
||||
|
@ -467,7 +467,9 @@ func CommonRoutes() *web.Router {
|
||||
g.MatchPath("HEAD", "/<group:*>/repodata/<filename>", rpm.CheckRepositoryFileExistence)
|
||||
g.MatchPath("GET", "/<group:*>/repodata/<filename>", rpm.GetRepositoryFile)
|
||||
g.MatchPath("PUT", "/<group:*>/upload", reqPackageAccess(perm.AccessModeWrite), rpm.UploadPackageFile)
|
||||
// this URL pattern is only used internally in the RPM index, it is generated by us, the filename part is not really used (can be anything)
|
||||
g.MatchPath("HEAD,GET", "/<group:*>/package/<name>/<version>/<architecture>", rpm.DownloadPackageFile)
|
||||
g.MatchPath("HEAD,GET", "/<group:*>/package/<name>/<version>/<architecture>/<filename>", rpm.DownloadPackageFile)
|
||||
g.MatchPath("DELETE", "/<group:*>/package/<name>/<version>/<architecture>", reqPackageAccess(perm.AccessModeWrite), rpm.DeletePackageFile)
|
||||
}, reqPackageAccess(perm.AccessModeRead))
|
||||
|
||||
|
@ -812,7 +812,8 @@ func GetContentsExt(ctx *context.APIContext) {
|
||||
// required: true
|
||||
// - name: filepath
|
||||
// in: path
|
||||
// description: path of the dir, file, symlink or submodule in the repo
|
||||
// description: path of the dir, file, symlink or submodule in the repo. Swagger requires path parameter to be "required",
|
||||
// you can leave it empty or pass a single dot (".") to get the root directory.
|
||||
// type: string
|
||||
// required: true
|
||||
// - name: ref
|
||||
@ -823,7 +824,8 @@ func GetContentsExt(ctx *context.APIContext) {
|
||||
// - name: includes
|
||||
// in: query
|
||||
// description: By default this API's response only contains file's metadata. Use comma-separated "includes" options to retrieve more fields.
|
||||
// Option "file_content" will try to retrieve the file content, option "lfs_metadata" will try to retrieve LFS metadata.
|
||||
// Option "file_content" will try to retrieve the file content, "lfs_metadata" will try to retrieve LFS metadata,
|
||||
// "commit_metadata" will try to retrieve commit metadata, and "commit_message" will try to retrieve commit message.
|
||||
// type: string
|
||||
// required: false
|
||||
// responses:
|
||||
@ -832,6 +834,9 @@ func GetContentsExt(ctx *context.APIContext) {
|
||||
// "404":
|
||||
// "$ref": "#/responses/notFound"
|
||||
|
||||
if treePath := ctx.PathParam("*"); treePath == "." || treePath == "/" {
|
||||
ctx.SetPathParam("*", "") // workaround for swagger, it requires path parameter to be "required", but we need to list root directory
|
||||
}
|
||||
opts := files_service.GetContentsOrListOptions{TreePath: ctx.PathParam("*")}
|
||||
for includeOpt := range strings.SplitSeq(ctx.FormString("includes"), ",") {
|
||||
if includeOpt == "" {
|
||||
@ -842,6 +847,10 @@ func GetContentsExt(ctx *context.APIContext) {
|
||||
opts.IncludeSingleFileContent = true
|
||||
case "lfs_metadata":
|
||||
opts.IncludeLfsMetadata = true
|
||||
case "commit_metadata":
|
||||
opts.IncludeCommitMetadata = true
|
||||
case "commit_message":
|
||||
opts.IncludeCommitMessage = true
|
||||
default:
|
||||
ctx.APIError(http.StatusBadRequest, fmt.Sprintf("unknown include option %q", includeOpt))
|
||||
return
|
||||
@ -883,7 +892,11 @@ func GetContents(ctx *context.APIContext) {
|
||||
// "$ref": "#/responses/ContentsResponse"
|
||||
// "404":
|
||||
// "$ref": "#/responses/notFound"
|
||||
ret := getRepoContents(ctx, files_service.GetContentsOrListOptions{TreePath: ctx.PathParam("*"), IncludeSingleFileContent: true})
|
||||
ret := getRepoContents(ctx, files_service.GetContentsOrListOptions{
|
||||
TreePath: ctx.PathParam("*"),
|
||||
IncludeSingleFileContent: true,
|
||||
IncludeCommitMetadata: true,
|
||||
})
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
|
@ -244,7 +244,7 @@ func editFileOpenExisting(ctx *context.Context) (prefetch []byte, dataRc io.Read
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
if fInfo.isLFSFile {
|
||||
if fInfo.isLFSFile() {
|
||||
lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
|
||||
if err != nil {
|
||||
_ = dataRc.Close()
|
||||
@ -298,7 +298,7 @@ func EditFile(ctx *context.Context) {
|
||||
ctx.Data["FileSize"] = fInfo.fileSize
|
||||
|
||||
// Only some file types are editable online as text.
|
||||
if fInfo.isLFSFile {
|
||||
if fInfo.isLFSFile() {
|
||||
ctx.Data["NotEditableReason"] = ctx.Tr("repo.editor.cannot_edit_lfs_files")
|
||||
} else if !fInfo.st.IsRepresentableAsText() {
|
||||
ctx.Data["NotEditableReason"] = ctx.Tr("repo.editor.cannot_edit_non_text_files")
|
||||
|
@ -443,6 +443,10 @@ func ViewPullMergeBox(ctx *context.Context) {
|
||||
preparePullViewPullInfo(ctx, issue)
|
||||
preparePullViewReviewAndMerge(ctx, issue)
|
||||
ctx.Data["PullMergeBoxReloading"] = issue.PullRequest.IsChecking()
|
||||
|
||||
// TODO: it should use a dedicated struct to render the pull merge box, to make sure all data is prepared correctly
|
||||
ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
|
||||
ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
|
||||
ctx.HTML(http.StatusOK, tplPullMergeBox)
|
||||
}
|
||||
|
||||
|
@ -267,8 +267,10 @@ func LFSFileGet(ctx *context.Context) {
|
||||
buf = buf[:n]
|
||||
|
||||
st := typesniffer.DetectContentType(buf)
|
||||
// FIXME: there is no IsPlainText set, but template uses it
|
||||
ctx.Data["IsTextFile"] = st.IsText()
|
||||
ctx.Data["FileSize"] = meta.Size
|
||||
// FIXME: the last field is the URL-base64-encoded filename, it should not be "direct"
|
||||
ctx.Data["RawFileLink"] = fmt.Sprintf("%s%s/%s.git/info/lfs/objects/%s/%s", setting.AppURL, url.PathEscape(ctx.Repo.Repository.OwnerName), url.PathEscape(ctx.Repo.Repository.Name), url.PathEscape(meta.Oid), "direct")
|
||||
switch {
|
||||
case st.IsRepresentableAsText():
|
||||
@ -309,8 +311,6 @@ func LFSFileGet(ctx *context.Context) {
|
||||
}
|
||||
ctx.Data["LineNums"] = gotemplate.HTML(output.String())
|
||||
|
||||
case st.IsPDF():
|
||||
ctx.Data["IsPDFFile"] = true
|
||||
case st.IsVideo():
|
||||
ctx.Data["IsVideoFile"] = true
|
||||
case st.IsAudio():
|
||||
|
@ -6,6 +6,7 @@ package repo
|
||||
import (
|
||||
"html/template"
|
||||
"net/http"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
pull_model "code.gitea.io/gitea/models/pull"
|
||||
@ -111,7 +112,7 @@ func transformDiffTreeForWeb(renderedIconPool *fileicon.RenderedIconPool, diffTr
|
||||
item := &WebDiffFileItem{FullName: file.HeadPath, DiffStatus: file.Status}
|
||||
item.IsViewed = filesViewedState[item.FullName] == pull_model.Viewed
|
||||
item.NameHash = git.HashFilePathForWebUI(item.FullName)
|
||||
item.FileIcon = fileicon.RenderEntryIconHTML(renderedIconPool, &fileicon.EntryInfo{FullName: file.HeadPath, EntryMode: file.HeadMode})
|
||||
item.FileIcon = fileicon.RenderEntryIconHTML(renderedIconPool, &fileicon.EntryInfo{BaseName: path.Base(file.HeadPath), EntryMode: file.HeadMode})
|
||||
|
||||
switch file.HeadMode {
|
||||
case git.EntryModeTree:
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@ -59,60 +60,63 @@ const (
|
||||
)
|
||||
|
||||
type fileInfo struct {
|
||||
isTextFile bool
|
||||
isLFSFile bool
|
||||
fileSize int64
|
||||
lfsMeta *lfs.Pointer
|
||||
st typesniffer.SniffedType
|
||||
fileSize int64
|
||||
lfsMeta *lfs.Pointer
|
||||
st typesniffer.SniffedType
|
||||
}
|
||||
|
||||
func getFileReader(ctx gocontext.Context, repoID int64, blob *git.Blob) ([]byte, io.ReadCloser, *fileInfo, error) {
|
||||
dataRc, err := blob.DataAsync()
|
||||
func (fi *fileInfo) isLFSFile() bool {
|
||||
return fi.lfsMeta != nil && fi.lfsMeta.Oid != ""
|
||||
}
|
||||
|
||||
func getFileReader(ctx gocontext.Context, repoID int64, blob *git.Blob) (buf []byte, dataRc io.ReadCloser, fi *fileInfo, err error) {
|
||||
dataRc, err = blob.DataAsync()
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
buf := make([]byte, 1024)
|
||||
const prefetchSize = lfs.MetaFileMaxSize
|
||||
|
||||
buf = make([]byte, prefetchSize)
|
||||
n, _ := util.ReadAtMost(dataRc, buf)
|
||||
buf = buf[:n]
|
||||
|
||||
st := typesniffer.DetectContentType(buf)
|
||||
isTextFile := st.IsText()
|
||||
fi = &fileInfo{fileSize: blob.Size(), st: typesniffer.DetectContentType(buf)}
|
||||
|
||||
// FIXME: what happens when README file is an image?
|
||||
if !isTextFile || !setting.LFS.StartServer {
|
||||
return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
|
||||
if !fi.st.IsText() || !setting.LFS.StartServer {
|
||||
return buf, dataRc, fi, nil
|
||||
}
|
||||
|
||||
pointer, _ := lfs.ReadPointerFromBuffer(buf)
|
||||
if !pointer.IsValid() { // fallback to plain file
|
||||
return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
|
||||
if !pointer.IsValid() { // fallback to a plain file
|
||||
return buf, dataRc, fi, nil
|
||||
}
|
||||
|
||||
meta, err := git_model.GetLFSMetaObjectByOid(ctx, repoID, pointer.Oid)
|
||||
if err != nil { // fallback to plain file
|
||||
if err != nil { // fallback to a plain file
|
||||
log.Warn("Unable to access LFS pointer %s in repo %d: %v", pointer.Oid, repoID, err)
|
||||
return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
|
||||
return buf, dataRc, fi, nil
|
||||
}
|
||||
|
||||
dataRc.Close()
|
||||
|
||||
// close the old dataRc and open the real LFS target
|
||||
_ = dataRc.Close()
|
||||
dataRc, err = lfs.ReadMetaObject(pointer)
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
buf = make([]byte, 1024)
|
||||
buf = make([]byte, prefetchSize)
|
||||
n, err = util.ReadAtMost(dataRc, buf)
|
||||
if err != nil {
|
||||
dataRc.Close()
|
||||
return nil, nil, nil, err
|
||||
_ = dataRc.Close()
|
||||
return nil, nil, fi, err
|
||||
}
|
||||
buf = buf[:n]
|
||||
|
||||
st = typesniffer.DetectContentType(buf)
|
||||
|
||||
return buf, dataRc, &fileInfo{st.IsText(), true, meta.Size, &meta.Pointer, st}, nil
|
||||
fi.st = typesniffer.DetectContentType(buf)
|
||||
fi.fileSize = blob.Size()
|
||||
fi.lfsMeta = &meta.Pointer
|
||||
return buf, dataRc, fi, nil
|
||||
}
|
||||
|
||||
func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool {
|
||||
@ -257,7 +261,9 @@ func prepareDirectoryFileIcons(ctx *context.Context, files []git.CommitInfo) {
|
||||
renderedIconPool := fileicon.NewRenderedIconPool()
|
||||
fileIcons := map[string]template.HTML{}
|
||||
for _, f := range files {
|
||||
fileIcons[f.Entry.Name()] = fileicon.RenderEntryIconHTML(renderedIconPool, fileicon.EntryInfoFromGitTreeEntry(f.Entry))
|
||||
fullPath := path.Join(ctx.Repo.TreePath, f.Entry.Name())
|
||||
entryInfo := fileicon.EntryInfoFromGitTreeEntry(ctx.Repo.Commit, fullPath, f.Entry)
|
||||
fileIcons[f.Entry.Name()] = fileicon.RenderEntryIconHTML(renderedIconPool, entryInfo)
|
||||
}
|
||||
fileIcons[".."] = fileicon.RenderEntryIconHTML(renderedIconPool, fileicon.EntryInfoFolder())
|
||||
ctx.Data["FileIcons"] = fileIcons
|
||||
|
@ -23,6 +23,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/markup"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/typesniffer"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/services/context"
|
||||
issue_service "code.gitea.io/gitea/services/issue"
|
||||
@ -40,7 +41,128 @@ func prepareLatestCommitInfo(ctx *context.Context) bool {
|
||||
return loadLatestCommitData(ctx, commit)
|
||||
}
|
||||
|
||||
func prepareToRenderFile(ctx *context.Context, entry *git.TreeEntry) {
|
||||
func prepareFileViewLfsAttrs(ctx *context.Context) (*attribute.Attributes, bool) {
|
||||
attrsMap, err := attribute.CheckAttributes(ctx, ctx.Repo.GitRepo, ctx.Repo.CommitID, attribute.CheckAttributeOpts{
|
||||
Filenames: []string{ctx.Repo.TreePath},
|
||||
Attributes: []string{attribute.LinguistGenerated, attribute.LinguistVendored, attribute.LinguistLanguage, attribute.GitlabLanguage},
|
||||
})
|
||||
if err != nil {
|
||||
ctx.ServerError("attribute.CheckAttributes", err)
|
||||
return nil, false
|
||||
}
|
||||
attrs := attrsMap[ctx.Repo.TreePath]
|
||||
if attrs == nil {
|
||||
// this case shouldn't happen, just in case.
|
||||
setting.PanicInDevOrTesting("no attributes found for %s", ctx.Repo.TreePath)
|
||||
attrs = attribute.NewAttributes()
|
||||
}
|
||||
ctx.Data["IsVendored"], ctx.Data["IsGenerated"] = attrs.GetVendored().Value(), attrs.GetGenerated().Value()
|
||||
return attrs, true
|
||||
}
|
||||
|
||||
func handleFileViewRenderMarkup(ctx *context.Context, filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte, utf8Reader io.Reader) bool {
|
||||
markupType := markup.DetectMarkupTypeByFileName(filename)
|
||||
if markupType == "" {
|
||||
markupType = markup.DetectRendererType(filename, sniffedType, prefetchBuf)
|
||||
}
|
||||
if markupType == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
ctx.Data["HasSourceRenderedToggle"] = true
|
||||
|
||||
if ctx.FormString("display") == "source" {
|
||||
return false
|
||||
}
|
||||
|
||||
ctx.Data["MarkupType"] = markupType
|
||||
metas := ctx.Repo.Repository.ComposeRepoFileMetas(ctx)
|
||||
metas["RefTypeNameSubURL"] = ctx.Repo.RefTypeNameSubURL()
|
||||
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
|
||||
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
|
||||
CurrentTreePath: path.Dir(ctx.Repo.TreePath),
|
||||
}).
|
||||
WithMarkupType(markupType).
|
||||
WithRelativePath(ctx.Repo.TreePath).
|
||||
WithMetas(metas)
|
||||
|
||||
var err error
|
||||
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, utf8Reader)
|
||||
if err != nil {
|
||||
ctx.ServerError("Render", err)
|
||||
return true
|
||||
}
|
||||
// to prevent iframe from loading third-party url
|
||||
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
|
||||
return true
|
||||
}
|
||||
|
||||
func handleFileViewRenderSource(ctx *context.Context, filename string, attrs *attribute.Attributes, fInfo *fileInfo, utf8Reader io.Reader) bool {
|
||||
if ctx.FormString("display") == "rendered" || !fInfo.st.IsRepresentableAsText() {
|
||||
return false
|
||||
}
|
||||
|
||||
if !fInfo.st.IsText() {
|
||||
if ctx.FormString("display") == "" {
|
||||
// not text but representable as text, e.g. SVG
|
||||
// since there is no "display" is specified, let other renders to handle
|
||||
return false
|
||||
}
|
||||
ctx.Data["HasSourceRenderedToggle"] = true
|
||||
}
|
||||
|
||||
buf, _ := io.ReadAll(utf8Reader)
|
||||
// The Open Group Base Specification: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html
|
||||
// empty: 0 lines; "a": 1 incomplete-line; "a\n": 1 line; "a\nb": 1 line, 1 incomplete-line;
|
||||
// Gitea uses the definition (like most modern editors):
|
||||
// empty: 0 lines; "a": 1 line; "a\n": 2 lines; "a\nb": 2 lines;
|
||||
// When rendering, the last empty line is not rendered in UI, while the line-number is still counted, to tell users that the file contains a trailing EOL.
|
||||
// To make the UI more consistent, it could use an icon mark to indicate that there is no trailing EOL, and show line-number as the rendered lines.
|
||||
// This NumLines is only used for the display on the UI: "xxx lines"
|
||||
if len(buf) == 0 {
|
||||
ctx.Data["NumLines"] = 0
|
||||
} else {
|
||||
ctx.Data["NumLines"] = bytes.Count(buf, []byte{'\n'}) + 1
|
||||
}
|
||||
|
||||
language := attrs.GetLanguage().Value()
|
||||
fileContent, lexerName, err := highlight.File(filename, language, buf)
|
||||
ctx.Data["LexerName"] = lexerName
|
||||
if err != nil {
|
||||
log.Error("highlight.File failed, fallback to plain text: %v", err)
|
||||
fileContent = highlight.PlainText(buf)
|
||||
}
|
||||
status := &charset.EscapeStatus{}
|
||||
statuses := make([]*charset.EscapeStatus, len(fileContent))
|
||||
for i, line := range fileContent {
|
||||
statuses[i], fileContent[i] = charset.EscapeControlHTML(line, ctx.Locale)
|
||||
status = status.Or(statuses[i])
|
||||
}
|
||||
ctx.Data["EscapeStatus"] = status
|
||||
ctx.Data["FileContent"] = fileContent
|
||||
ctx.Data["LineEscapeStatus"] = statuses
|
||||
return true
|
||||
}
|
||||
|
||||
func handleFileViewRenderImage(ctx *context.Context, fInfo *fileInfo, prefetchBuf []byte) bool {
|
||||
if !fInfo.st.IsImage() {
|
||||
return false
|
||||
}
|
||||
if fInfo.st.IsSvgImage() && !setting.UI.SVG.Enabled {
|
||||
return false
|
||||
}
|
||||
if fInfo.st.IsSvgImage() {
|
||||
ctx.Data["HasSourceRenderedToggle"] = true
|
||||
} else {
|
||||
img, _, err := image.DecodeConfig(bytes.NewReader(prefetchBuf))
|
||||
if err == nil { // ignore the error for the formats that are not supported by image.DecodeConfig
|
||||
ctx.Data["ImageSize"] = fmt.Sprintf("%dx%dpx", img.Width, img.Height)
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func prepareFileView(ctx *context.Context, entry *git.TreeEntry) {
|
||||
ctx.Data["IsViewFile"] = true
|
||||
ctx.Data["HideRepoInfo"] = true
|
||||
|
||||
@ -86,11 +208,8 @@ func prepareToRenderFile(ctx *context.Context, entry *git.TreeEntry) {
|
||||
}
|
||||
}
|
||||
|
||||
isDisplayingSource := ctx.FormString("display") == "source"
|
||||
isDisplayingRendered := !isDisplayingSource
|
||||
|
||||
// Don't call any other repository functions depends on git.Repository until the dataRc closed to
|
||||
// avoid create unnecessary temporary cat file.
|
||||
// avoid creating an unnecessary temporary cat file.
|
||||
buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, blob)
|
||||
if err != nil {
|
||||
ctx.ServerError("getFileReader", err)
|
||||
@ -98,207 +217,62 @@ func prepareToRenderFile(ctx *context.Context, entry *git.TreeEntry) {
|
||||
}
|
||||
defer dataRc.Close()
|
||||
|
||||
if fInfo.isLFSFile {
|
||||
if fInfo.isLFSFile() {
|
||||
ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/media/" + ctx.Repo.RefTypeNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
|
||||
}
|
||||
|
||||
isRepresentableAsText := fInfo.st.IsRepresentableAsText()
|
||||
if !isRepresentableAsText {
|
||||
// If we can't show plain text, always try to render.
|
||||
isDisplayingSource = false
|
||||
isDisplayingRendered = true
|
||||
if !prepareFileViewEditorButtons(ctx) {
|
||||
return
|
||||
}
|
||||
ctx.Data["IsLFSFile"] = fInfo.isLFSFile
|
||||
|
||||
ctx.Data["IsLFSFile"] = fInfo.isLFSFile()
|
||||
ctx.Data["FileSize"] = fInfo.fileSize
|
||||
ctx.Data["IsTextFile"] = fInfo.isTextFile
|
||||
ctx.Data["IsRepresentableAsText"] = isRepresentableAsText
|
||||
ctx.Data["IsDisplayingSource"] = isDisplayingSource
|
||||
ctx.Data["IsDisplayingRendered"] = isDisplayingRendered
|
||||
ctx.Data["IsRepresentableAsText"] = fInfo.st.IsRepresentableAsText()
|
||||
ctx.Data["IsExecutable"] = entry.IsExecutable()
|
||||
ctx.Data["CanCopyContent"] = fInfo.st.IsRepresentableAsText() || fInfo.st.IsImage()
|
||||
|
||||
isTextSource := fInfo.isTextFile || isDisplayingSource
|
||||
ctx.Data["IsTextSource"] = isTextSource
|
||||
if isTextSource {
|
||||
ctx.Data["CanCopyContent"] = true
|
||||
}
|
||||
|
||||
// Check LFS Lock
|
||||
lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
|
||||
ctx.Data["LFSLock"] = lfsLock
|
||||
if err != nil {
|
||||
ctx.ServerError("GetTreePathLock", err)
|
||||
attrs, ok := prepareFileViewLfsAttrs(ctx)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
if lfsLock != nil {
|
||||
u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetTreePathLock", err)
|
||||
return
|
||||
}
|
||||
ctx.Data["LFSLockOwner"] = u.Name
|
||||
ctx.Data["LFSLockOwnerHomeLink"] = u.HomeLink()
|
||||
ctx.Data["LFSLockHint"] = ctx.Tr("repo.editor.this_file_locked")
|
||||
}
|
||||
|
||||
// read all needed attributes which will be used later
|
||||
// there should be no performance different between reading 2 or 4 here
|
||||
attrsMap, err := attribute.CheckAttributes(ctx, ctx.Repo.GitRepo, ctx.Repo.CommitID, attribute.CheckAttributeOpts{
|
||||
Filenames: []string{ctx.Repo.TreePath},
|
||||
Attributes: []string{attribute.LinguistGenerated, attribute.LinguistVendored, attribute.LinguistLanguage, attribute.GitlabLanguage},
|
||||
})
|
||||
if err != nil {
|
||||
ctx.ServerError("attribute.CheckAttributes", err)
|
||||
return
|
||||
}
|
||||
attrs := attrsMap[ctx.Repo.TreePath]
|
||||
if attrs == nil {
|
||||
// this case shouldn't happen, just in case.
|
||||
setting.PanicInDevOrTesting("no attributes found for %s", ctx.Repo.TreePath)
|
||||
attrs = attribute.NewAttributes()
|
||||
}
|
||||
// TODO: in the future maybe we need more accurate flags, for example:
|
||||
// * IsRepresentableAsText: some files are text, some are not
|
||||
// * IsRenderableXxx: some files are rendered by backend "markup" engine, some are rendered by frontend (pdf, 3d)
|
||||
// * DefaultViewMode: when there is no "display" query parameter, which view mode should be used by default, source or rendered
|
||||
|
||||
utf8Reader := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
|
||||
switch {
|
||||
case isRepresentableAsText:
|
||||
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
|
||||
ctx.Data["IsFileTooLarge"] = true
|
||||
break
|
||||
}
|
||||
|
||||
if fInfo.st.IsSvgImage() {
|
||||
ctx.Data["IsImageFile"] = true
|
||||
ctx.Data["CanCopyContent"] = true
|
||||
ctx.Data["HasSourceRenderedToggle"] = true
|
||||
}
|
||||
|
||||
rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
|
||||
|
||||
shouldRenderSource := ctx.FormString("display") == "source"
|
||||
readmeExist := util.IsReadmeFileName(blob.Name())
|
||||
ctx.Data["ReadmeExist"] = readmeExist
|
||||
|
||||
markupType := markup.DetectMarkupTypeByFileName(blob.Name())
|
||||
if markupType == "" {
|
||||
markupType = markup.DetectRendererType(blob.Name(), bytes.NewReader(buf))
|
||||
}
|
||||
if markupType != "" {
|
||||
ctx.Data["HasSourceRenderedToggle"] = true
|
||||
}
|
||||
if markupType != "" && !shouldRenderSource {
|
||||
ctx.Data["IsMarkup"] = true
|
||||
ctx.Data["MarkupType"] = markupType
|
||||
metas := ctx.Repo.Repository.ComposeRepoFileMetas(ctx)
|
||||
metas["RefTypeNameSubURL"] = ctx.Repo.RefTypeNameSubURL()
|
||||
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
|
||||
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
|
||||
CurrentTreePath: path.Dir(ctx.Repo.TreePath),
|
||||
}).
|
||||
WithMarkupType(markupType).
|
||||
WithRelativePath(ctx.Repo.TreePath).
|
||||
WithMetas(metas)
|
||||
|
||||
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, rd)
|
||||
if err != nil {
|
||||
ctx.ServerError("Render", err)
|
||||
return
|
||||
}
|
||||
// to prevent iframe load third-party url
|
||||
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
|
||||
} else {
|
||||
buf, _ := io.ReadAll(rd)
|
||||
|
||||
// The Open Group Base Specification: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html
|
||||
// empty: 0 lines; "a": 1 incomplete-line; "a\n": 1 line; "a\nb": 1 line, 1 incomplete-line;
|
||||
// Gitea uses the definition (like most modern editors):
|
||||
// empty: 0 lines; "a": 1 line; "a\n": 2 lines; "a\nb": 2 lines;
|
||||
// When rendering, the last empty line is not rendered in UI, while the line-number is still counted, to tell users that the file contains a trailing EOL.
|
||||
// To make the UI more consistent, it could use an icon mark to indicate that there is no trailing EOL, and show line-number as the rendered lines.
|
||||
// This NumLines is only used for the display on the UI: "xxx lines"
|
||||
if len(buf) == 0 {
|
||||
ctx.Data["NumLines"] = 0
|
||||
} else {
|
||||
ctx.Data["NumLines"] = bytes.Count(buf, []byte{'\n'}) + 1
|
||||
}
|
||||
|
||||
language := attrs.GetLanguage().Value()
|
||||
fileContent, lexerName, err := highlight.File(blob.Name(), language, buf)
|
||||
ctx.Data["LexerName"] = lexerName
|
||||
if err != nil {
|
||||
log.Error("highlight.File failed, fallback to plain text: %v", err)
|
||||
fileContent = highlight.PlainText(buf)
|
||||
}
|
||||
status := &charset.EscapeStatus{}
|
||||
statuses := make([]*charset.EscapeStatus, len(fileContent))
|
||||
for i, line := range fileContent {
|
||||
statuses[i], fileContent[i] = charset.EscapeControlHTML(line, ctx.Locale)
|
||||
status = status.Or(statuses[i])
|
||||
}
|
||||
ctx.Data["EscapeStatus"] = status
|
||||
ctx.Data["FileContent"] = fileContent
|
||||
ctx.Data["LineEscapeStatus"] = statuses
|
||||
}
|
||||
|
||||
case fInfo.st.IsPDF():
|
||||
ctx.Data["IsPDFFile"] = true
|
||||
case fInfo.fileSize >= setting.UI.MaxDisplayFileSize:
|
||||
ctx.Data["IsFileTooLarge"] = true
|
||||
case handleFileViewRenderMarkup(ctx, entry.Name(), fInfo.st, buf, utf8Reader):
|
||||
// it also sets ctx.Data["FileContent"] and more
|
||||
ctx.Data["IsMarkup"] = true
|
||||
case handleFileViewRenderSource(ctx, entry.Name(), attrs, fInfo, utf8Reader):
|
||||
// it also sets ctx.Data["FileContent"] and more
|
||||
ctx.Data["IsDisplayingSource"] = true
|
||||
case handleFileViewRenderImage(ctx, fInfo, buf):
|
||||
ctx.Data["IsImageFile"] = true
|
||||
case fInfo.st.IsVideo():
|
||||
ctx.Data["IsVideoFile"] = true
|
||||
case fInfo.st.IsAudio():
|
||||
ctx.Data["IsAudioFile"] = true
|
||||
case fInfo.st.IsImage() && (setting.UI.SVG.Enabled || !fInfo.st.IsSvgImage()):
|
||||
ctx.Data["IsImageFile"] = true
|
||||
ctx.Data["CanCopyContent"] = true
|
||||
default:
|
||||
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
|
||||
ctx.Data["IsFileTooLarge"] = true
|
||||
break
|
||||
}
|
||||
|
||||
// TODO: this logic duplicates with "isRepresentableAsText=true", it is not the same as "LFSFileGet" in "lfs.go"
|
||||
// It is used by "external renders", markupRender will execute external programs to get rendered content.
|
||||
if markupType := markup.DetectMarkupTypeByFileName(blob.Name()); markupType != "" {
|
||||
rd := io.MultiReader(bytes.NewReader(buf), dataRc)
|
||||
ctx.Data["IsMarkup"] = true
|
||||
ctx.Data["MarkupType"] = markupType
|
||||
|
||||
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
|
||||
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
|
||||
CurrentTreePath: path.Dir(ctx.Repo.TreePath),
|
||||
}).
|
||||
WithMarkupType(markupType).
|
||||
WithRelativePath(ctx.Repo.TreePath)
|
||||
|
||||
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, rd)
|
||||
if err != nil {
|
||||
ctx.ServerError("Render", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
// unable to render anything, show the "view raw" or let frontend handle it
|
||||
}
|
||||
|
||||
ctx.Data["IsVendored"], ctx.Data["IsGenerated"] = attrs.GetVendored().Value(), attrs.GetGenerated().Value()
|
||||
|
||||
if fInfo.st.IsImage() && !fInfo.st.IsSvgImage() {
|
||||
img, _, err := image.DecodeConfig(bytes.NewReader(buf))
|
||||
if err == nil {
|
||||
// There are Image formats go can't decode
|
||||
// Instead of throwing an error in that case, we show the size only when we can decode
|
||||
ctx.Data["ImageSize"] = fmt.Sprintf("%dx%dpx", img.Width, img.Height)
|
||||
}
|
||||
}
|
||||
|
||||
prepareToRenderButtons(ctx, lfsLock)
|
||||
}
|
||||
|
||||
func prepareToRenderButtons(ctx *context.Context, lfsLock *git_model.LFSLock) {
|
||||
func prepareFileViewEditorButtons(ctx *context.Context) bool {
|
||||
// archived or mirror repository, the buttons should not be shown
|
||||
if !ctx.Repo.Repository.CanEnableEditor() {
|
||||
return
|
||||
return true
|
||||
}
|
||||
|
||||
// The buttons should not be shown if it's not a branch
|
||||
if !ctx.Repo.RefFullName.IsBranch() {
|
||||
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
|
||||
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
|
||||
return
|
||||
return true
|
||||
}
|
||||
|
||||
if !ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
|
||||
@ -306,7 +280,24 @@ func prepareToRenderButtons(ctx *context.Context, lfsLock *git_model.LFSLock) {
|
||||
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.fork_before_edit")
|
||||
ctx.Data["CanDeleteFile"] = true
|
||||
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_have_write_access")
|
||||
return
|
||||
return true
|
||||
}
|
||||
|
||||
lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
|
||||
ctx.Data["LFSLock"] = lfsLock
|
||||
if err != nil {
|
||||
ctx.ServerError("GetTreePathLock", err)
|
||||
return false
|
||||
}
|
||||
if lfsLock != nil {
|
||||
u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetTreePathLock", err)
|
||||
return false
|
||||
}
|
||||
ctx.Data["LFSLockOwner"] = u.Name
|
||||
ctx.Data["LFSLockOwnerHomeLink"] = u.HomeLink()
|
||||
ctx.Data["LFSLockHint"] = ctx.Tr("repo.editor.this_file_locked")
|
||||
}
|
||||
|
||||
// it's a lfs file and the user is not the owner of the lock
|
||||
@ -315,4 +306,5 @@ func prepareToRenderButtons(ctx *context.Context, lfsLock *git_model.LFSLock) {
|
||||
ctx.Data["EditFileTooltip"] = util.Iif(isLFSLocked, ctx.Tr("repo.editor.this_file_locked"), ctx.Tr("repo.editor.edit_this_file"))
|
||||
ctx.Data["CanDeleteFile"] = !isLFSLocked
|
||||
ctx.Data["DeleteFileTooltip"] = util.Iif(isLFSLocked, ctx.Tr("repo.editor.this_file_locked"), ctx.Tr("repo.editor.delete_this_file"))
|
||||
return true
|
||||
}
|
||||
|
@ -143,7 +143,7 @@ func prepareToRenderDirectory(ctx *context.Context) {
|
||||
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefFullName.ShortName())
|
||||
}
|
||||
|
||||
subfolder, readmeFile, err := findReadmeFileInEntries(ctx, entries, true)
|
||||
subfolder, readmeFile, err := findReadmeFileInEntries(ctx, ctx.Repo.TreePath, entries, true)
|
||||
if err != nil {
|
||||
ctx.ServerError("findReadmeFileInEntries", err)
|
||||
return
|
||||
@ -339,7 +339,7 @@ func prepareToRenderDirOrFile(entry *git.TreeEntry) func(ctx *context.Context) {
|
||||
if entry.IsDir() {
|
||||
prepareToRenderDirectory(ctx)
|
||||
} else {
|
||||
prepareToRenderFile(ctx, entry)
|
||||
prepareFileView(ctx, entry)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -377,8 +377,8 @@ func prepareHomeTreeSideBarSwitch(ctx *context.Context) {
|
||||
|
||||
func redirectSrcToRaw(ctx *context.Context) bool {
|
||||
// GitHub redirects a tree path with "?raw=1" to the raw path
|
||||
// It is useful to embed some raw contents into markdown files,
|
||||
// then viewing the markdown in "src" path could embed the raw content correctly.
|
||||
// It is useful to embed some raw contents into Markdown files,
|
||||
// then viewing the Markdown in "src" path could embed the raw content correctly.
|
||||
if ctx.Repo.TreePath != "" && ctx.FormBool("raw") {
|
||||
ctx.Redirect(ctx.Repo.RepoLink + "/raw/" + ctx.Repo.RefTypeNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath))
|
||||
return true
|
||||
@ -386,6 +386,20 @@ func redirectSrcToRaw(ctx *context.Context) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func redirectFollowSymlink(ctx *context.Context, treePathEntry *git.TreeEntry) bool {
|
||||
if ctx.Repo.TreePath == "" || !ctx.FormBool("follow_symlink") {
|
||||
return false
|
||||
}
|
||||
if treePathEntry.IsLink() {
|
||||
if res, err := git.EntryFollowLinks(ctx.Repo.Commit, ctx.Repo.TreePath, treePathEntry); err == nil {
|
||||
redirect := ctx.Repo.RepoLink + "/src/" + ctx.Repo.RefTypeNameSubURL() + "/" + util.PathEscapeSegments(res.TargetFullPath) + "?" + ctx.Req.URL.RawQuery
|
||||
ctx.Redirect(redirect)
|
||||
return true
|
||||
} // else: don't handle the links we cannot resolve, so ignore the error
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Home render repository home page
|
||||
func Home(ctx *context.Context) {
|
||||
if handleRepoHomeFeed(ctx) {
|
||||
@ -394,6 +408,7 @@ func Home(ctx *context.Context) {
|
||||
if redirectSrcToRaw(ctx) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check whether the repo is viewable: not in migration, and the code unit should be enabled
|
||||
// Ideally the "feed" logic should be after this, but old code did so, so keep it as-is.
|
||||
checkHomeCodeViewable(ctx)
|
||||
@ -424,6 +439,10 @@ func Home(ctx *context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
if redirectFollowSymlink(ctx, entry) {
|
||||
return
|
||||
}
|
||||
|
||||
// prepare the tree path
|
||||
var treeNames, paths []string
|
||||
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.RefTypeNameSubURL()
|
||||
|
@ -32,15 +32,7 @@ import (
|
||||
// entries == ctx.Repo.Commit.SubTree(ctx.Repo.TreePath).ListEntries()
|
||||
//
|
||||
// FIXME: There has to be a more efficient way of doing this
|
||||
func findReadmeFileInEntries(ctx *context.Context, entries []*git.TreeEntry, tryWellKnownDirs bool) (string, *git.TreeEntry, error) {
|
||||
// Create a list of extensions in priority order
|
||||
// 1. Markdown files - with and without localisation - e.g. README.en-us.md or README.md
|
||||
// 2. Txt files - e.g. README.txt
|
||||
// 3. No extension - e.g. README
|
||||
exts := append(localizedExtensions(".md", ctx.Locale.Language()), ".txt", "") // sorted by priority
|
||||
extCount := len(exts)
|
||||
readmeFiles := make([]*git.TreeEntry, extCount+1)
|
||||
|
||||
func findReadmeFileInEntries(ctx *context.Context, parentDir string, entries []*git.TreeEntry, tryWellKnownDirs bool) (string, *git.TreeEntry, error) {
|
||||
docsEntries := make([]*git.TreeEntry, 3) // (one of docs/, .gitea/ or .github/)
|
||||
for _, entry := range entries {
|
||||
if tryWellKnownDirs && entry.IsDir() {
|
||||
@ -62,16 +54,23 @@ func findReadmeFileInEntries(ctx *context.Context, entries []*git.TreeEntry, try
|
||||
docsEntries[2] = entry
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Create a list of extensions in priority order
|
||||
// 1. Markdown files - with and without localisation - e.g. README.en-us.md or README.md
|
||||
// 2. Txt files - e.g. README.txt
|
||||
// 3. No extension - e.g. README
|
||||
exts := append(localizedExtensions(".md", ctx.Locale.Language()), ".txt", "") // sorted by priority
|
||||
extCount := len(exts)
|
||||
readmeFiles := make([]*git.TreeEntry, extCount+1)
|
||||
for _, entry := range entries {
|
||||
if i, ok := util.IsReadmeFileExtension(entry.Name(), exts...); ok {
|
||||
log.Debug("Potential readme file: %s", entry.Name())
|
||||
fullPath := path.Join(parentDir, entry.Name())
|
||||
if readmeFiles[i] == nil || base.NaturalSortLess(readmeFiles[i].Name(), entry.Blob().Name()) {
|
||||
if entry.IsLink() {
|
||||
target, err := entry.FollowLinks()
|
||||
if err != nil && !git.IsErrSymlinkUnresolved(err) {
|
||||
return "", nil, err
|
||||
} else if target != nil && (target.IsExecutable() || target.IsRegular()) {
|
||||
res, err := git.EntryFollowLinks(ctx.Repo.Commit, fullPath, entry)
|
||||
if err == nil && (res.TargetEntry.IsExecutable() || res.TargetEntry.IsRegular()) {
|
||||
readmeFiles[i] = entry
|
||||
}
|
||||
} else {
|
||||
@ -80,6 +79,7 @@ func findReadmeFileInEntries(ctx *context.Context, entries []*git.TreeEntry, try
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var readmeFile *git.TreeEntry
|
||||
for _, f := range readmeFiles {
|
||||
if f != nil {
|
||||
@ -103,7 +103,7 @@ func findReadmeFileInEntries(ctx *context.Context, entries []*git.TreeEntry, try
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
subfolder, readmeFile, err := findReadmeFileInEntries(ctx, childEntries, false)
|
||||
subfolder, readmeFile, err := findReadmeFileInEntries(ctx, parentDir, childEntries, false)
|
||||
if err != nil && !git.IsErrNotExist(err) {
|
||||
return "", nil, err
|
||||
}
|
||||
@ -139,46 +139,52 @@ func localizedExtensions(ext, languageCode string) (localizedExts []string) {
|
||||
}
|
||||
|
||||
func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFile *git.TreeEntry) {
|
||||
target := readmeFile
|
||||
if readmeFile != nil && readmeFile.IsLink() {
|
||||
target, _ = readmeFile.FollowLinks()
|
||||
}
|
||||
if target == nil {
|
||||
// if findReadmeFile() failed and/or gave us a broken symlink (which it shouldn't)
|
||||
// simply skip rendering the README
|
||||
if readmeFile == nil {
|
||||
return
|
||||
}
|
||||
|
||||
readmeFullPath := path.Join(ctx.Repo.TreePath, subfolder, readmeFile.Name())
|
||||
readmeTargetEntry := readmeFile
|
||||
if readmeFile.IsLink() {
|
||||
if res, err := git.EntryFollowLinks(ctx.Repo.Commit, readmeFullPath, readmeFile); err == nil {
|
||||
readmeTargetEntry = res.TargetEntry
|
||||
} else {
|
||||
readmeTargetEntry = nil // if we cannot resolve the symlink, we cannot render the readme, ignore the error
|
||||
}
|
||||
}
|
||||
if readmeTargetEntry == nil {
|
||||
return // if no valid README entry found, skip rendering the README
|
||||
}
|
||||
|
||||
ctx.Data["RawFileLink"] = ""
|
||||
ctx.Data["ReadmeInList"] = path.Join(subfolder, readmeFile.Name()) // the relative path to the readme file to the current tree path
|
||||
ctx.Data["ReadmeExist"] = true
|
||||
ctx.Data["FileIsSymlink"] = readmeFile.IsLink()
|
||||
|
||||
buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, target.Blob())
|
||||
buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, readmeTargetEntry.Blob())
|
||||
if err != nil {
|
||||
ctx.ServerError("getFileReader", err)
|
||||
return
|
||||
}
|
||||
defer dataRc.Close()
|
||||
|
||||
ctx.Data["FileIsText"] = fInfo.isTextFile
|
||||
ctx.Data["FileTreePath"] = path.Join(ctx.Repo.TreePath, subfolder, readmeFile.Name())
|
||||
ctx.Data["FileIsText"] = fInfo.st.IsText()
|
||||
ctx.Data["FileTreePath"] = readmeFullPath
|
||||
ctx.Data["FileSize"] = fInfo.fileSize
|
||||
ctx.Data["IsLFSFile"] = fInfo.isLFSFile
|
||||
ctx.Data["IsLFSFile"] = fInfo.isLFSFile()
|
||||
|
||||
if fInfo.isLFSFile {
|
||||
if fInfo.isLFSFile() {
|
||||
filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.Name()))
|
||||
ctx.Data["RawFileLink"] = fmt.Sprintf("%s.git/info/lfs/objects/%s/%s", ctx.Repo.Repository.Link(), url.PathEscape(fInfo.lfsMeta.Oid), url.PathEscape(filenameBase64))
|
||||
}
|
||||
|
||||
if !fInfo.isTextFile {
|
||||
if !fInfo.st.IsText() {
|
||||
return
|
||||
}
|
||||
|
||||
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
|
||||
// Pretend that this is a normal text file to display 'This file is too large to be shown'
|
||||
ctx.Data["IsFileTooLarge"] = true
|
||||
ctx.Data["IsTextFile"] = true
|
||||
return
|
||||
}
|
||||
|
||||
@ -190,10 +196,10 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil
|
||||
|
||||
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
|
||||
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
|
||||
CurrentTreePath: path.Join(ctx.Repo.TreePath, subfolder),
|
||||
CurrentTreePath: path.Dir(readmeFullPath),
|
||||
}).
|
||||
WithMarkupType(markupType).
|
||||
WithRelativePath(path.Join(ctx.Repo.TreePath, subfolder, readmeFile.Name())) // ctx.Repo.TreePath is the directory not the Readme so we must append the Readme filename (and path).
|
||||
WithRelativePath(readmeFullPath)
|
||||
|
||||
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, rd)
|
||||
if err != nil {
|
||||
@ -212,7 +218,7 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil
|
||||
ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlHTML(template.HTML(contentEscaped), ctx.Locale)
|
||||
}
|
||||
|
||||
if !fInfo.isLFSFile && ctx.Repo.Repository.CanEnableEditor() {
|
||||
if !fInfo.isLFSFile() && ctx.Repo.Repository.CanEnableEditor() {
|
||||
ctx.Data["CanEditReadmeFile"] = true
|
||||
}
|
||||
}
|
||||
|
@ -203,9 +203,6 @@ func ViewPackageVersion(ctx *context.Context) {
|
||||
}
|
||||
ctx.Data["PackageRegistryHost"] = registryHostURL.Host
|
||||
|
||||
var pvs []*packages_model.PackageVersion
|
||||
pvsTotal := int64(0)
|
||||
|
||||
switch pd.Package.Type {
|
||||
case packages_model.TypeAlpine:
|
||||
branches := make(container.Set[string])
|
||||
@ -296,12 +293,16 @@ func ViewPackageVersion(ctx *context.Context) {
|
||||
}
|
||||
}
|
||||
ctx.Data["ContainerImageMetadata"] = imageMetadata
|
||||
}
|
||||
var pvs []*packages_model.PackageVersion
|
||||
var pvsTotal int64
|
||||
if pd.Package.Type == packages_model.TypeContainer {
|
||||
pvs, pvsTotal, err = container_model.SearchImageTags(ctx, &container_model.ImageTagsSearchOptions{
|
||||
Paginator: db.NewAbsoluteListOptions(0, 5),
|
||||
PackageID: pd.Package.ID,
|
||||
IsTagged: true,
|
||||
})
|
||||
default:
|
||||
} else {
|
||||
pvs, pvsTotal, err = packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
||||
Paginator: db.NewAbsoluteListOptions(0, 5),
|
||||
PackageID: pd.Package.ID,
|
||||
@ -312,7 +313,6 @@ func ViewPackageVersion(ctx *context.Context) {
|
||||
ctx.ServerError("", err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Data["LatestVersions"] = pvs
|
||||
ctx.Data["TotalVersionCount"] = pvsTotal
|
||||
|
||||
|
@ -39,6 +39,8 @@ type GetContentsOrListOptions struct {
|
||||
TreePath string
|
||||
IncludeSingleFileContent bool // include the file's content when the tree path is a file
|
||||
IncludeLfsMetadata bool
|
||||
IncludeCommitMetadata bool
|
||||
IncludeCommitMessage bool
|
||||
}
|
||||
|
||||
// GetContentsOrList gets the metadata of a file's contents (*ContentsResponse) if treePath not a tree
|
||||
@ -132,40 +134,46 @@ func getFileContentsByEntryInternal(_ context.Context, repo *repo_model.Reposito
|
||||
}
|
||||
selfURLString := selfURL.String()
|
||||
|
||||
err = gitRepo.AddLastCommitCache(repo.GetCommitsCountCacheKey(refCommit.InputRef, refType != git.RefTypeCommit), repo.FullName(), refCommit.CommitID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lastCommit, err := refCommit.Commit.GetCommitByPath(opts.TreePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// All content types have these fields in populated
|
||||
contentsResponse := &api.ContentsResponse{
|
||||
Name: entry.Name(),
|
||||
Path: opts.TreePath,
|
||||
SHA: entry.ID.String(),
|
||||
LastCommitSHA: lastCommit.ID.String(),
|
||||
Size: entry.Size(),
|
||||
URL: &selfURLString,
|
||||
Name: entry.Name(),
|
||||
Path: opts.TreePath,
|
||||
SHA: entry.ID.String(),
|
||||
Size: entry.Size(),
|
||||
URL: &selfURLString,
|
||||
Links: &api.FileLinksResponse{
|
||||
Self: &selfURLString,
|
||||
},
|
||||
}
|
||||
|
||||
// GitHub doesn't have these fields in the response, but we could follow other similar APIs to name them
|
||||
// https://docs.github.com/en/rest/commits/commits?apiVersion=2022-11-28#list-commits
|
||||
if lastCommit.Committer != nil {
|
||||
contentsResponse.LastCommitterDate = lastCommit.Committer.When
|
||||
}
|
||||
if lastCommit.Author != nil {
|
||||
contentsResponse.LastAuthorDate = lastCommit.Author.When
|
||||
}
|
||||
contentsResponse.LastCommitMessage = lastCommit.Message()
|
||||
if opts.IncludeCommitMetadata || opts.IncludeCommitMessage {
|
||||
err = gitRepo.AddLastCommitCache(repo.GetCommitsCountCacheKey(refCommit.InputRef, refType != git.RefTypeCommit), repo.FullName(), refCommit.CommitID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Now populate the rest of the ContentsResponse based on entry type
|
||||
lastCommit, err := refCommit.Commit.GetCommitByPath(opts.TreePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if opts.IncludeCommitMetadata {
|
||||
contentsResponse.LastCommitSHA = util.ToPointer(lastCommit.ID.String())
|
||||
// GitHub doesn't have these fields in the response, but we could follow other similar APIs to name them
|
||||
// https://docs.github.com/en/rest/commits/commits?apiVersion=2022-11-28#list-commits
|
||||
if lastCommit.Committer != nil {
|
||||
contentsResponse.LastCommitterDate = util.ToPointer(lastCommit.Committer.When)
|
||||
}
|
||||
if lastCommit.Author != nil {
|
||||
contentsResponse.LastAuthorDate = util.ToPointer(lastCommit.Author.When)
|
||||
}
|
||||
}
|
||||
if opts.IncludeCommitMessage {
|
||||
contentsResponse.LastCommitMessage = util.ToPointer(lastCommit.Message())
|
||||
}
|
||||
}
|
||||
|
||||
// Now populate the rest of the ContentsResponse based on the entry type
|
||||
if entry.IsRegular() || entry.IsExecutable() {
|
||||
contentsResponse.Type = string(ContentTypeRegular)
|
||||
// if it is listing the repo root dir, don't waste system resources on reading content
|
||||
|
@ -5,57 +5,21 @@ package files
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/routers/api/v1/utils"
|
||||
"code.gitea.io/gitea/services/contexttest"
|
||||
|
||||
_ "code.gitea.io/gitea/models/actions"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
unittest.MainTest(m)
|
||||
}
|
||||
|
||||
func getExpectedReadmeContentsResponse() *api.ContentsResponse {
|
||||
treePath := "README.md"
|
||||
sha := "4b4851ad51df6a7d9f25c979345979eaeb5b349f"
|
||||
encoding := "base64"
|
||||
content := "IyByZXBvMQoKRGVzY3JpcHRpb24gZm9yIHJlcG8x"
|
||||
selfURL := "https://try.gitea.io/api/v1/repos/user2/repo1/contents/" + treePath + "?ref=master"
|
||||
htmlURL := "https://try.gitea.io/user2/repo1/src/branch/master/" + treePath
|
||||
gitURL := "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/" + sha
|
||||
downloadURL := "https://try.gitea.io/user2/repo1/raw/branch/master/" + treePath
|
||||
return &api.ContentsResponse{
|
||||
Name: treePath,
|
||||
Path: treePath,
|
||||
SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
|
||||
LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
|
||||
LastCommitterDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||
LastAuthorDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||
LastCommitMessage: "Initial commit\n",
|
||||
Type: "file",
|
||||
Size: 30,
|
||||
Encoding: &encoding,
|
||||
Content: &content,
|
||||
URL: &selfURL,
|
||||
HTMLURL: &htmlURL,
|
||||
GitURL: &gitURL,
|
||||
DownloadURL: &downloadURL,
|
||||
Links: &api.FileLinksResponse{
|
||||
Self: &selfURL,
|
||||
GitURL: &gitURL,
|
||||
HTMLURL: &htmlURL,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetContents(t *testing.T) {
|
||||
unittest.PrepareTestEnv(t)
|
||||
ctx, _ := contexttest.MockContext(t, "user2/repo1")
|
||||
@ -64,45 +28,8 @@ func TestGetContents(t *testing.T) {
|
||||
contexttest.LoadRepoCommit(t, ctx)
|
||||
contexttest.LoadUser(t, ctx, 2)
|
||||
contexttest.LoadGitRepo(t, ctx)
|
||||
defer ctx.Repo.GitRepo.Close()
|
||||
repo, gitRepo := ctx.Repo.Repository, ctx.Repo.GitRepo
|
||||
refCommit, err := utils.ResolveRefCommit(ctx, ctx.Repo.Repository, ctx.Repo.Repository.DefaultBranch)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Run("GetContentsOrList(README.md)-MetaOnly", func(t *testing.T) {
|
||||
expectedContentsResponse := getExpectedReadmeContentsResponse()
|
||||
expectedContentsResponse.Encoding = nil // because will be in a list, doesn't have encoding and content
|
||||
expectedContentsResponse.Content = nil
|
||||
extResp, err := GetContentsOrList(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: "README.md", IncludeSingleFileContent: false})
|
||||
assert.Equal(t, expectedContentsResponse, extResp.FileContents)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("GetContentsOrList(README.md)", func(t *testing.T) {
|
||||
expectedContentsResponse := getExpectedReadmeContentsResponse()
|
||||
extResp, err := GetContentsOrList(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: "README.md", IncludeSingleFileContent: true})
|
||||
assert.Equal(t, expectedContentsResponse, extResp.FileContents)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("GetContentsOrList(RootDir)", func(t *testing.T) {
|
||||
readmeContentsResponse := getExpectedReadmeContentsResponse()
|
||||
readmeContentsResponse.Encoding = nil // because will be in a list, doesn't have encoding and content
|
||||
readmeContentsResponse.Content = nil
|
||||
expectedContentsListResponse := []*api.ContentsResponse{readmeContentsResponse}
|
||||
// even if IncludeFileContent is true, it has no effect for directory listing
|
||||
extResp, err := GetContentsOrList(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: "", IncludeSingleFileContent: true})
|
||||
assert.Equal(t, expectedContentsListResponse, extResp.DirContents)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("GetContentsOrList(NoSuchTreePath)", func(t *testing.T) {
|
||||
extResp, err := GetContentsOrList(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: "no-such/file.md"})
|
||||
assert.Error(t, err)
|
||||
assert.EqualError(t, err, "object does not exist [id: , rel_path: no-such]")
|
||||
assert.Nil(t, extResp.DirContents)
|
||||
assert.Nil(t, extResp.FileContents)
|
||||
})
|
||||
// GetContentsOrList's behavior is fully tested in integration tests, so we don't need to test it here.
|
||||
|
||||
t.Run("GetBlobBySHA", func(t *testing.T) {
|
||||
sha := "65f1bf27bc3bf70f64657658635e66094edbcb4d"
|
||||
|
@ -22,7 +22,12 @@ import (
|
||||
func GetContentsListFromTreePaths(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, treePaths []string) (files []*api.ContentsResponse) {
|
||||
var size int64
|
||||
for _, treePath := range treePaths {
|
||||
fileContents, _ := GetFileContents(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: treePath, IncludeSingleFileContent: true}) // ok if fails, then will be nil
|
||||
// ok if fails, then will be nil
|
||||
fileContents, _ := GetFileContents(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{
|
||||
TreePath: treePath,
|
||||
IncludeSingleFileContent: true,
|
||||
IncludeCommitMetadata: true,
|
||||
})
|
||||
if fileContents != nil && fileContents.Content != nil && *fileContents.Content != "" {
|
||||
// if content isn't empty (e.g., due to the single blob being too large), add file size to response size
|
||||
size += int64(len(*fileContents.Content))
|
||||
|
@ -161,7 +161,7 @@ func newTreeViewNodeFromEntry(ctx context.Context, renderedIconPool *fileicon.Re
|
||||
FullPath: path.Join(parentDir, entry.Name()),
|
||||
}
|
||||
|
||||
entryInfo := fileicon.EntryInfoFromGitTreeEntry(entry)
|
||||
entryInfo := fileicon.EntryInfoFromGitTreeEntry(commit, node.FullPath, entry)
|
||||
node.EntryIcon = fileicon.RenderEntryIconHTML(renderedIconPool, entryInfo)
|
||||
if entryInfo.EntryMode.IsDir() {
|
||||
entryInfo.IsOpen = true
|
||||
|
@ -29,7 +29,7 @@ export default {
|
||||
important: true, // the frameworks are mixed together, so tailwind needs to override other framework's styles
|
||||
content: [
|
||||
isProduction && '!./templates/devtest/**/*',
|
||||
isProduction && '!./web_src/js/standalone/devtest.js',
|
||||
isProduction && '!./web_src/js/standalone/devtest.ts',
|
||||
'!./templates/swagger/v1_json.tmpl',
|
||||
'!./templates/user/auth/oidc_wellknown.tmpl',
|
||||
'!**/*_test.go',
|
||||
|
@ -82,6 +82,8 @@
|
||||
</table>
|
||||
{{end}}{{/* end if .IsFileTooLarge */}}
|
||||
<div class="code-line-menu tippy-target">
|
||||
{{/*FIXME: the "HasSourceRenderedToggle" is never set on blame page, it should mean "whether the file is renderable".
|
||||
If the file is renderable, then it must has the "display=source" parameter to make sure the file view page shows the source code, then line number works. */}}
|
||||
{{if $.Permission.CanRead ctx.Consts.RepoUnitTypeIssues}}
|
||||
<a class="item ref-in-new-issue" role="menuitem" data-url-issue-new="{{.RepoLink}}/issues/new" data-url-param-body-link="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}" rel="nofollow noindex">{{ctx.Locale.Tr "repo.issues.context.reference_issue"}}</a>
|
||||
{{end}}
|
||||
|
@ -107,8 +107,14 @@
|
||||
{{end}}
|
||||
</td>
|
||||
<td class="two wide ui">
|
||||
{{if and (not .DBBranch.IsDeleted) $.DefaultBranchBranch}}
|
||||
<div class="commit-divergence">
|
||||
{{if and (not .DBBranch.IsDeleted) $.DefaultBranchBranch}}
|
||||
{{$tooltipDivergence := ""}}
|
||||
{{if or .CommitsBehind .CommitsAhead}}
|
||||
{{$tooltipDivergence = ctx.Locale.Tr "repo.branch.commits_divergence_from" .CommitsBehind .CommitsAhead $.DefaultBranchBranch.DBBranch.Name}}
|
||||
{{else}}
|
||||
{{$tooltipDivergence = ctx.Locale.Tr "repo.branch.commits_no_divergence" $.DefaultBranchBranch.DBBranch.Name}}
|
||||
{{end}}
|
||||
<div class="commit-divergence" data-tooltip-content="{{$tooltipDivergence}}">
|
||||
<div class="bar-group">
|
||||
<div class="count count-behind">{{.CommitsBehind}}</div>
|
||||
{{/* old code bears 0/0.0 = NaN output, so it might output invalid "width: NaNpx", it just works and doesn't caues any problem. */}}
|
||||
@ -119,7 +125,7 @@
|
||||
<div class="bar bar-ahead" style="width: {{Eval 100 "*" .CommitsAhead "/" "(" .CommitsBehind "+" .CommitsAhead "+" 0.0 ")"}}%"></div>
|
||||
</div>
|
||||
</div>
|
||||
{{end}}
|
||||
{{end}}
|
||||
</td>
|
||||
<td class="two wide tw-text-right">
|
||||
{{if not .LatestPullRequest}}
|
||||
|
@ -5,7 +5,7 @@
|
||||
{{range $i, $v := .TreeNames}}
|
||||
<div class="breadcrumb-divider">/</div>
|
||||
{{if eq $i $l}}
|
||||
<input id="file-name" maxlength="255" value="{{$v}}" placeholder="{{ctx.Locale.Tr (Iif $.PageIsUpload "repo.editor.add_subdir" "repo.editor.name_your_file")}}" data-editorconfig="{{$.EditorconfigJson}}" required autofocus>
|
||||
<input id="file-name" maxlength="255" value="{{$v}}" placeholder="{{ctx.Locale.Tr (Iif $.PageIsUpload "repo.editor.add_subdir" "repo.editor.name_your_file")}}" data-editorconfig="{{$.EditorconfigJson}}" {{Iif $.PageIsUpload "" "required"}} autofocus>
|
||||
<span data-tooltip-content="{{ctx.Locale.Tr "repo.editor.filename_help"}}">{{svg "octicon-info"}}</span>
|
||||
{{else}}
|
||||
<span class="section"><a href="{{$.BranchLink}}/{{index $.TreePaths $i | PathEscapeSegments}}">{{$v}}</a></span>
|
||||
|
@ -22,7 +22,7 @@
|
||||
<span class="label-filter-exclude-info">{{ctx.Locale.Tr "repo.issues.filter_label_exclude"}}</span>
|
||||
<div class="divider"></div>
|
||||
<a class="item label-filter-query-default" href="{{QueryBuild $queryLink "labels" NIL}}">{{ctx.Locale.Tr "repo.issues.filter_label_no_select"}}</a>
|
||||
<a class="item label-filter-query-not-set" href="{{QueryBuild $queryLink "labels" 0}}">{{ctx.Locale.Tr "repo.issues.filter_label_select_no_label"}}</a>
|
||||
<a class="item label-filter-query-not-set" href="{{QueryBuild $queryLink "labels" "0"}}">{{ctx.Locale.Tr "repo.issues.filter_label_select_no_label"}}</a>
|
||||
{{/* The logic here is not the same as the label selector in the issue sidebar.
|
||||
The one in the issue sidebar renders "repo labels | divider | org labels".
|
||||
Maybe the logic should be updated to be consistent.*/}}
|
||||
|
@ -15,7 +15,7 @@
|
||||
<input type="text" placeholder="{{ctx.Locale.Tr "repo.issues.filter_milestone"}}">
|
||||
</div>
|
||||
<div class="divider"></div>
|
||||
<a class="{{if not $.MilestoneID}}active selected {{end}}item" href="{{QueryBuild $queryLink "milestone" 0}}">{{ctx.Locale.Tr "repo.issues.filter_milestone_all"}}</a>
|
||||
<a class="{{if not $.MilestoneID}}active selected {{end}}item" href="{{QueryBuild $queryLink "milestone" NIL}}">{{ctx.Locale.Tr "repo.issues.filter_milestone_all"}}</a>
|
||||
<a class="{{if $.MilestoneID}}{{if eq $.MilestoneID -1}}active selected {{end}}{{end}}item" href="{{QueryBuild $queryLink "milestone" -1}}">{{ctx.Locale.Tr "repo.issues.filter_milestone_none"}}</a>
|
||||
{{if .OpenMilestones}}
|
||||
<div class="divider"></div>
|
||||
|
@ -1,5 +1,5 @@
|
||||
{{if and (or .HasIssuesOrPullsWritePermission .IsIssuePoster) (not .HasMerged) (not .Issue.IsClosed) (not .IsPullWorkInProgress)}}
|
||||
<a class="toggle-wip tw-block tw-mt-2" data-title="{{.Issue.Title}}" data-wip-prefix="{{index .PullRequestWorkInProgressPrefixes 0}}" data-update-url="{{.Issue.Link}}/title">
|
||||
<a data-global-init="initPullRequestWipToggle" data-title="{{.Issue.Title}}" data-wip-prefix="{{index .PullRequestWorkInProgressPrefixes 0}}" data-update-url="{{.Issue.Link}}/title">
|
||||
{{ctx.Locale.Tr "repo.pulls.still_in_progress"}} {{ctx.Locale.Tr "repo.pulls.add_prefix" (index .PullRequestWorkInProgressPrefixes 0)}}
|
||||
</a>
|
||||
{{end}}
|
||||
|
@ -95,7 +95,7 @@
|
||||
{{ctx.Locale.Tr "repo.pulls.cannot_merge_work_in_progress"}}
|
||||
</div>
|
||||
{{if or .HasIssuesOrPullsWritePermission .IsIssuePoster}}
|
||||
<button class="ui compact button toggle-wip" data-title="{{.Issue.Title}}" data-wip-prefix="{{.WorkInProgressPrefix}}" data-update-url="{{.Issue.Link}}/title">
|
||||
<button class="ui compact button" data-global-init="initPullRequestWipToggle" data-title="{{.Issue.Title}}" data-wip-prefix="{{.WorkInProgressPrefix}}" data-update-url="{{.Issue.Link}}/title">
|
||||
{{ctx.Locale.Tr "repo.pulls.remove_prefix" .WorkInProgressPrefix}}
|
||||
</button>
|
||||
{{end}}
|
||||
|
@ -30,8 +30,6 @@
|
||||
<audio controls src="{{$.RawFileLink}}">
|
||||
<strong>{{ctx.Locale.Tr "repo.audio_not_supported_in_browser"}}</strong>
|
||||
</audio>
|
||||
{{else if .IsPDFFile}}
|
||||
<div class="pdf-content is-loading" data-global-init="initPdfViewer" data-src="{{$.RawFileLink}}" data-fallback-button-text="{{ctx.Locale.Tr "diff.view_file"}}"></div>
|
||||
{{else}}
|
||||
<a href="{{$.RawFileLink}}" rel="nofollow" class="tw-p-4">{{ctx.Locale.Tr "repo.file_view_raw"}}</a>
|
||||
{{end}}
|
||||
|
@ -1,4 +1,6 @@
|
||||
<div {{if .ReadmeInList}}id="readme" {{end}}class="{{TabSizeClass .Editorconfig .FileTreePath}} non-diff-file-content">
|
||||
<div {{if .ReadmeInList}}id="readme"{{end}} class="{{TabSizeClass .Editorconfig .FileTreePath}} non-diff-file-content"
|
||||
data-global-init="initRepoFileView" data-raw-file-link="{{.RawFileLink}}">
|
||||
|
||||
{{- if .FileError}}
|
||||
<div class="ui error message">
|
||||
<div class="text left tw-whitespace-pre">{{.FileError}}</div>
|
||||
@ -32,13 +34,14 @@
|
||||
{{template "repo/file_info" .}}
|
||||
{{end}}
|
||||
</div>
|
||||
<div class="file-header-right file-actions tw-flex tw-items-center tw-flex-wrap">
|
||||
{{if .HasSourceRenderedToggle}}
|
||||
<div class="ui compact icon buttons">
|
||||
<a href="?display=source" class="ui mini basic button {{if .IsDisplayingSource}}active{{end}}" data-tooltip-content="{{ctx.Locale.Tr "repo.file_view_source"}}">{{svg "octicon-code" 15}}</a>
|
||||
<a href="{{$.Link}}" class="ui mini basic button {{if .IsDisplayingRendered}}active{{end}}" data-tooltip-content="{{ctx.Locale.Tr "repo.file_view_rendered"}}">{{svg "octicon-file" 15}}</a>
|
||||
</div>
|
||||
{{end}}
|
||||
<div class="file-header-right file-actions flex-text-block tw-flex-wrap">
|
||||
{{/* this componment is also controlled by frontend plugin renders */}}
|
||||
<div class="ui compact icon buttons file-view-toggle-buttons {{Iif .HasSourceRenderedToggle "" "tw-hidden"}}">
|
||||
{{if .IsRepresentableAsText}}
|
||||
<a href="?display=source" class="ui mini basic button file-view-toggle-source {{if .IsDisplayingSource}}active{{end}}" data-tooltip-content="{{ctx.Locale.Tr "repo.file_view_source"}}">{{svg "octicon-code" 15}}</a>
|
||||
{{end}}
|
||||
<a href="?display=rendered" class="ui mini basic button file-view-toggle-rendered {{if not .IsDisplayingSource}}active{{end}}" data-tooltip-content="{{ctx.Locale.Tr "repo.file_view_rendered"}}">{{svg "octicon-file" 15}}</a>
|
||||
</div>
|
||||
{{if not .ReadmeInList}}
|
||||
<div class="ui buttons tw-mr-1">
|
||||
<a class="ui mini basic button" href="{{$.RawFileLink}}">{{ctx.Locale.Tr "repo.file_raw"}}</a>
|
||||
@ -55,7 +58,10 @@
|
||||
{{end}}
|
||||
</div>
|
||||
<a download class="btn-octicon" data-tooltip-content="{{ctx.Locale.Tr "repo.download_file"}}" href="{{$.RawFileLink}}">{{svg "octicon-download"}}</a>
|
||||
<a class="btn-octicon {{if not .CanCopyContent}} disabled{{end}}" data-global-click="onCopyContentButtonClick" {{if or .IsImageFile (and .HasSourceRenderedToggle (not .IsDisplayingSource))}} data-link="{{$.RawFileLink}}"{{end}} data-tooltip-content="{{if .CanCopyContent}}{{ctx.Locale.Tr "copy_content"}}{{else}}{{ctx.Locale.Tr "copy_type_unsupported"}}{{end}}">{{svg "octicon-copy"}}</a>
|
||||
<a class="btn-octicon {{if not .CanCopyContent}}disabled{{end}}" data-global-click="onCopyContentButtonClick"
|
||||
{{if not .IsDisplayingSource}}data-raw-file-link="{{$.RawFileLink}}"{{end}}
|
||||
data-tooltip-content="{{if .CanCopyContent}}{{ctx.Locale.Tr "copy_content"}}{{else}}{{ctx.Locale.Tr "copy_type_unsupported"}}{{end}}"
|
||||
>{{svg "octicon-copy"}}</a>
|
||||
{{if .EnableFeed}}
|
||||
<a class="btn-octicon" href="{{$.RepoLink}}/rss/{{$.RefTypeNameSubURL}}/{{PathEscapeSegments .TreePath}}" data-tooltip-content="{{ctx.Locale.Tr "rss_feed"}}">
|
||||
{{svg "octicon-rss"}}
|
||||
@ -82,20 +88,36 @@
|
||||
{{end}}
|
||||
</div>
|
||||
</h4>
|
||||
|
||||
<div class="ui bottom attached table unstackable segment">
|
||||
{{if not (or .IsMarkup .IsRenderedHTML)}}
|
||||
{{template "repo/unicode_escape_prompt" dict "EscapeStatus" .EscapeStatus "root" $}}
|
||||
{{if not .IsMarkup}}
|
||||
{{template "repo/unicode_escape_prompt" dict "EscapeStatus" .EscapeStatus}}
|
||||
{{end}}
|
||||
<div class="file-view{{if .IsMarkup}} markup {{.MarkupType}}{{else if .IsPlainText}} plain-text{{else if .IsTextSource}} code-view{{end}}">
|
||||
<div class="file-view {{if .IsMarkup}}markup {{.MarkupType}}{{else if .IsPlainText}}plain-text{{else if .IsDisplayingSource}}code-view{{end}}">
|
||||
{{if .IsFileTooLarge}}
|
||||
{{template "shared/filetoolarge" dict "RawFileLink" .RawFileLink}}
|
||||
{{else if not .FileSize}}
|
||||
{{template "shared/fileisempty"}}
|
||||
{{else if .IsMarkup}}
|
||||
{{if .FileContent}}{{.FileContent}}{{end}}
|
||||
{{.FileContent}}
|
||||
{{else if .IsPlainText}}
|
||||
<pre>{{if .FileContent}}{{.FileContent}}{{end}}</pre>
|
||||
{{else if not .IsTextSource}}
|
||||
{{else if .FileContent}}
|
||||
<table>
|
||||
<tbody>
|
||||
{{range $idx, $code := .FileContent}}
|
||||
{{$line := Eval $idx "+" 1}}
|
||||
<tr>
|
||||
<td id="L{{$line}}" class="lines-num"><span id="L{{$line}}" data-line-number="{{$line}}"></span></td>
|
||||
{{if $.EscapeStatus.Escaped}}
|
||||
<td class="lines-escape">{{if (index $.LineEscapeStatus $idx).Escaped}}<button class="toggle-escape-button btn interact-bg" title="{{if (index $.LineEscapeStatus $idx).HasInvisible}}{{ctx.Locale.Tr "repo.invisible_runes_line"}} {{end}}{{if (index $.LineEscapeStatus $idx).HasAmbiguous}}{{ctx.Locale.Tr "repo.ambiguous_runes_line"}}{{end}}"></button>{{end}}</td>
|
||||
{{end}}
|
||||
<td rel="L{{$line}}" class="lines-code chroma"><code class="code-inner">{{$code}}</code></td>
|
||||
</tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
{{else}}
|
||||
<div class="view-raw">
|
||||
{{if .IsImageFile}}
|
||||
<img alt="{{$.RawFileLink}}" src="{{$.RawFileLink}}">
|
||||
@ -107,35 +129,23 @@
|
||||
<audio controls src="{{$.RawFileLink}}">
|
||||
<strong>{{ctx.Locale.Tr "repo.audio_not_supported_in_browser"}}</strong>
|
||||
</audio>
|
||||
{{else if .IsPDFFile}}
|
||||
<div class="pdf-content is-loading" data-global-init="initPdfViewer" data-src="{{$.RawFileLink}}" data-fallback-button-text="{{ctx.Locale.Tr "repo.diff.view_file"}}"></div>
|
||||
{{else}}
|
||||
<a href="{{$.RawFileLink}}" rel="nofollow" class="tw-p-4">{{ctx.Locale.Tr "repo.file_view_raw"}}</a>
|
||||
<div class="file-view-render-container">
|
||||
<div class="file-view-raw-prompt tw-p-4">
|
||||
<a href="{{$.RawFileLink}}" rel="nofollow">{{ctx.Locale.Tr "repo.file_view_raw"}}</a>
|
||||
</div>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
{{else if .FileSize}}
|
||||
<table>
|
||||
<tbody>
|
||||
{{range $idx, $code := .FileContent}}
|
||||
{{$line := Eval $idx "+" 1}}
|
||||
<tr>
|
||||
<td id="L{{$line}}" class="lines-num"><span id="L{{$line}}" data-line-number="{{$line}}"></span></td>
|
||||
{{if $.EscapeStatus.Escaped}}
|
||||
<td class="lines-escape">{{if (index $.LineEscapeStatus $idx).Escaped}}<button class="toggle-escape-button btn interact-bg" title="{{if (index $.LineEscapeStatus $idx).HasInvisible}}{{ctx.Locale.Tr "repo.invisible_runes_line"}} {{end}}{{if (index $.LineEscapeStatus $idx).HasAmbiguous}}{{ctx.Locale.Tr "repo.ambiguous_runes_line"}}{{end}}"></button>{{end}}</td>
|
||||
{{end}}
|
||||
<td rel="L{{$line}}" class="lines-code chroma"><code class="code-inner">{{$code}}</code></td>
|
||||
</tr>
|
||||
{{end}}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="code-line-menu tippy-target">
|
||||
{{if $.Permission.CanRead ctx.Consts.RepoUnitTypeIssues}}
|
||||
<a class="item ref-in-new-issue" role="menuitem" data-url-issue-new="{{.RepoLink}}/issues/new" data-url-param-body-link="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}" rel="nofollow noindex">{{ctx.Locale.Tr "repo.issues.context.reference_issue"}}</a>
|
||||
{{end}}
|
||||
<a class="item view_git_blame" role="menuitem" href="{{.Repository.Link}}/blame/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}">{{ctx.Locale.Tr "repo.view_git_blame"}}</a>
|
||||
<a class="item copy-line-permalink" role="menuitem" data-url="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}">{{ctx.Locale.Tr "repo.file_copy_permalink"}}</a>
|
||||
</div>
|
||||
{{end}}
|
||||
</div>
|
||||
|
||||
<div class="code-line-menu tippy-target">
|
||||
{{if $.Permission.CanRead ctx.Consts.RepoUnitTypeIssues}}
|
||||
<a class="item ref-in-new-issue" role="menuitem" data-url-issue-new="{{.RepoLink}}/issues/new" data-url-param-body-link="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}" rel="nofollow noindex">{{ctx.Locale.Tr "repo.issues.context.reference_issue"}}</a>
|
||||
{{end}}
|
||||
<a class="item view_git_blame" role="menuitem" href="{{.Repository.Link}}/blame/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}">{{ctx.Locale.Tr "repo.view_git_blame"}}</a>
|
||||
<a class="item copy-line-permalink" role="menuitem" data-url="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}">{{ctx.Locale.Tr "repo.file_copy_permalink"}}</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -41,6 +41,9 @@
|
||||
</a>
|
||||
{{else}}
|
||||
<a class="entry-name" href="{{$.TreeLink}}/{{PathEscapeSegments $entry.Name}}" title="{{$entry.Name}}">{{$entry.Name}}</a>
|
||||
{{if $entry.IsLink}}
|
||||
<a class="entry-symbol-link flex-text-inline" data-tooltip-content title="{{ctx.Locale.Tr "repo.find_file.follow_symlink"}}" href="{{$.TreeLink}}/{{PathEscapeSegments $entry.Name}}?follow_symlink=1">{{svg "octicon-link" 12}}</a>
|
||||
{{end}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
</div>
|
||||
|
4
templates/swagger/v1_json.tmpl
generated
4
templates/swagger/v1_json.tmpl
generated
@ -7547,7 +7547,7 @@
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "path of the dir, file, symlink or submodule in the repo",
|
||||
"description": "path of the dir, file, symlink or submodule in the repo. Swagger requires path parameter to be \"required\", you can leave it empty or pass a single dot (\".\") to get the root directory.",
|
||||
"name": "filepath",
|
||||
"in": "path",
|
||||
"required": true
|
||||
@ -7560,7 +7560,7 @@
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "By default this API's response only contains file's metadata. Use comma-separated \"includes\" options to retrieve more fields. Option \"file_content\" will try to retrieve the file content, option \"lfs_metadata\" will try to retrieve LFS metadata.",
|
||||
"description": "By default this API's response only contains file's metadata. Use comma-separated \"includes\" options to retrieve more fields. Option \"file_content\" will try to retrieve the file content, \"lfs_metadata\" will try to retrieve LFS metadata, \"commit_metadata\" will try to retrieve commit metadata, and \"commit_message\" will try to retrieve commit message.",
|
||||
"name": "includes",
|
||||
"in": "query"
|
||||
}
|
||||
|
@ -157,9 +157,14 @@ gpgkey=%sapi/packages/%s/rpm/repository.key`,
|
||||
t.Run("Download", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
// download the package without the file name
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s", groupURL, packageName, packageVersion, packageArchitecture))
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
assert.Equal(t, content, resp.Body.Bytes())
|
||||
|
||||
// download the package with a file name (it can be anything)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s/any-file-name", groupURL, packageName, packageVersion, packageArchitecture))
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
assert.Equal(t, content, resp.Body.Bytes())
|
||||
})
|
||||
|
||||
@ -447,7 +452,8 @@ gpgkey=%sapi/packages/%s/rpm/repository.key`,
|
||||
pub, err := openpgp.ReadArmoredKeyRing(gpgResp.Body)
|
||||
require.NoError(t, err)
|
||||
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s", groupURL, packageName, packageVersion, packageArchitecture))
|
||||
rpmFileName := fmt.Sprintf("%s-%s.%s.rpm", packageName, packageVersion, packageArchitecture)
|
||||
req = NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s/%s", groupURL, packageName, packageVersion, packageArchitecture, rpmFileName))
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
_, sigs, err := rpmutils.Verify(resp.Body, pub)
|
||||
|
@ -19,6 +19,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/services/context"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
@ -52,13 +53,13 @@ func getCreateFileOptions() api.CreateFileOptions {
|
||||
func normalizeFileContentResponseCommitTime(c *api.ContentsResponse) {
|
||||
// decoded JSON response may contain different timezone from the one parsed by git commit
|
||||
// so we need to normalize the time to UTC to make "assert.Equal" pass
|
||||
c.LastCommitterDate = c.LastCommitterDate.UTC()
|
||||
c.LastAuthorDate = c.LastAuthorDate.UTC()
|
||||
c.LastCommitterDate = util.ToPointer(c.LastCommitterDate.UTC())
|
||||
c.LastAuthorDate = util.ToPointer(c.LastAuthorDate.UTC())
|
||||
}
|
||||
|
||||
type apiFileResponseInfo struct {
|
||||
repoFullName, commitID, treePath, lastCommitSHA, lastCommitMessage string
|
||||
lastCommitterWhen, lastAuthorWhen time.Time
|
||||
repoFullName, commitID, treePath, lastCommitSHA string
|
||||
lastCommitterWhen, lastAuthorWhen time.Time
|
||||
}
|
||||
|
||||
func getExpectedFileResponseForCreate(info apiFileResponseInfo) *api.FileResponse {
|
||||
@ -74,10 +75,9 @@ func getExpectedFileResponseForCreate(info apiFileResponseInfo) *api.FileRespons
|
||||
Name: path.Base(info.treePath),
|
||||
Path: info.treePath,
|
||||
SHA: sha,
|
||||
LastCommitSHA: info.lastCommitSHA,
|
||||
LastCommitterDate: info.lastCommitterWhen,
|
||||
LastAuthorDate: info.lastAuthorWhen,
|
||||
LastCommitMessage: info.lastCommitMessage,
|
||||
LastCommitSHA: util.ToPointer(info.lastCommitSHA),
|
||||
LastCommitterDate: util.ToPointer(info.lastCommitterWhen),
|
||||
LastAuthorDate: util.ToPointer(info.lastAuthorWhen),
|
||||
Size: 16,
|
||||
Type: "file",
|
||||
Encoding: &encoding,
|
||||
@ -194,7 +194,6 @@ func TestAPICreateFile(t *testing.T) {
|
||||
lastCommitSHA: lastCommit.ID.String(),
|
||||
lastCommitterWhen: lastCommit.Committer.When,
|
||||
lastAuthorWhen: lastCommit.Author.When,
|
||||
lastCommitMessage: lastCommit.Message(),
|
||||
})
|
||||
var fileResponse api.FileResponse
|
||||
DecodeJSON(t, resp, &fileResponse)
|
||||
|
@ -18,6 +18,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/services/context"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
@ -60,9 +61,9 @@ func getExpectedFileResponseForUpdate(info apiFileResponseInfo) *api.FileRespons
|
||||
Name: path.Base(info.treePath),
|
||||
Path: info.treePath,
|
||||
SHA: sha,
|
||||
LastCommitSHA: info.lastCommitSHA,
|
||||
LastCommitterDate: info.lastCommitterWhen,
|
||||
LastAuthorDate: info.lastAuthorWhen,
|
||||
LastCommitSHA: util.ToPointer(info.lastCommitSHA),
|
||||
LastCommitterDate: util.ToPointer(info.lastCommitterWhen),
|
||||
LastAuthorDate: util.ToPointer(info.lastAuthorWhen),
|
||||
Type: "file",
|
||||
Size: 20,
|
||||
Encoding: &encoding,
|
||||
|
@ -18,6 +18,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/gitrepo"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
api "code.gitea.io/gitea/modules/structs"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
repo_service "code.gitea.io/gitea/services/repository"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
@ -35,10 +36,9 @@ func getExpectedContentsListResponseForContents(ref, refType, lastCommitSHA stri
|
||||
Name: path.Base(treePath),
|
||||
Path: treePath,
|
||||
SHA: sha,
|
||||
LastCommitSHA: lastCommitSHA,
|
||||
LastCommitterDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||
LastAuthorDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||
LastCommitMessage: "Initial commit",
|
||||
LastCommitSHA: util.ToPointer(lastCommitSHA),
|
||||
LastCommitterDate: util.ToPointer(time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400))),
|
||||
LastAuthorDate: util.ToPointer(time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400))),
|
||||
Type: "file",
|
||||
Size: 30,
|
||||
URL: &selfURL,
|
||||
@ -66,7 +66,6 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
||||
repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) // public repo
|
||||
repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3}) // public repo
|
||||
repo16 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 16}) // private repo
|
||||
treePath := "" // root dir
|
||||
|
||||
// Get user2's token
|
||||
session := loginUser(t, user2.Name)
|
||||
@ -95,7 +94,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
||||
// ref is default ref
|
||||
ref := repo1.DefaultBranch
|
||||
refType := "branch"
|
||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents?ref=%s", user2.Name, repo1.Name, ref)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
var contentsListResponse []*api.ContentsResponse
|
||||
DecodeJSON(t, resp, &contentsListResponse)
|
||||
@ -107,7 +106,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
||||
|
||||
// No ref
|
||||
refType = "branch"
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s", user2.Name, repo1.Name, treePath)
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/", user2.Name, repo1.Name)
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &contentsListResponse)
|
||||
assert.NotNil(t, contentsListResponse)
|
||||
@ -118,7 +117,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
||||
// ref is the branch we created above in setup
|
||||
ref = newBranch
|
||||
refType = "branch"
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents?ref=%s", user2.Name, repo1.Name, ref)
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &contentsListResponse)
|
||||
assert.NotNil(t, contentsListResponse)
|
||||
@ -132,7 +131,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
||||
// ref is the new tag we created above in setup
|
||||
ref = newTag
|
||||
refType = "tag"
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/?ref=%s", user2.Name, repo1.Name, ref)
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &contentsListResponse)
|
||||
assert.NotNil(t, contentsListResponse)
|
||||
@ -146,7 +145,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
||||
// ref is a commit
|
||||
ref = commitID
|
||||
refType = "commit"
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/?ref=%s", user2.Name, repo1.Name, ref)
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
DecodeJSON(t, resp, &contentsListResponse)
|
||||
assert.NotNil(t, contentsListResponse)
|
||||
@ -155,21 +154,21 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
||||
|
||||
// Test file contents a file with a bad ref
|
||||
ref = "badref"
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/?ref=%s", user2.Name, repo1.Name, ref)
|
||||
MakeRequest(t, req, http.StatusNotFound)
|
||||
|
||||
// Test accessing private ref with user token that does not have access - should fail
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s", user2.Name, repo16.Name, treePath).
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/", user2.Name, repo16.Name).
|
||||
AddTokenAuth(token4)
|
||||
MakeRequest(t, req, http.StatusNotFound)
|
||||
|
||||
// Test access private ref of owner of token
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/readme.md", user2.Name, repo16.Name).
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/", user2.Name, repo16.Name).
|
||||
AddTokenAuth(token2)
|
||||
MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
// Test access of org org3 private repo file by owner user2
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s", org3.Name, repo3.Name, treePath).
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/", org3.Name, repo3.Name).
|
||||
AddTokenAuth(token2)
|
||||
MakeRequest(t, req, http.StatusOK)
|
||||
}
|
||||
|
@ -35,10 +35,9 @@ func getExpectedContentsResponseForContents(ref, refType, lastCommitSHA string)
|
||||
Name: treePath,
|
||||
Path: treePath,
|
||||
SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
|
||||
LastCommitSHA: lastCommitSHA,
|
||||
LastCommitterDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||
LastAuthorDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||
LastCommitMessage: "Initial commit",
|
||||
LastCommitSHA: util.ToPointer(lastCommitSHA),
|
||||
LastCommitterDate: util.ToPointer(time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400))),
|
||||
LastAuthorDate: util.ToPointer(time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400))),
|
||||
Type: "file",
|
||||
Size: 30,
|
||||
Encoding: util.ToPointer("base64"),
|
||||
@ -98,11 +97,16 @@ func testAPIGetContents(t *testing.T, u *url.URL) {
|
||||
require.NoError(t, err)
|
||||
/*** END SETUP ***/
|
||||
|
||||
// not found
|
||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/no-such/file.md", user2.Name, repo1.Name)
|
||||
resp := MakeRequest(t, req, http.StatusNotFound)
|
||||
assert.Contains(t, resp.Body.String(), "object does not exist [id: , rel_path: no-such]")
|
||||
|
||||
// ref is default ref
|
||||
ref := repo1.DefaultBranch
|
||||
refType := "branch"
|
||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
var contentsResponse api.ContentsResponse
|
||||
DecodeJSON(t, resp, &contentsResponse)
|
||||
lastCommit, _ := gitRepo.GetCommitByPath("README.md")
|
||||
@ -117,7 +121,7 @@ func testAPIGetContents(t *testing.T, u *url.URL) {
|
||||
expectedContentsResponse = getExpectedContentsResponseForContents(repo1.DefaultBranch, refType, lastCommit.ID.String())
|
||||
assert.Equal(t, *expectedContentsResponse, contentsResponse)
|
||||
|
||||
// ref is the branch we created above in setup
|
||||
// ref is the branch we created above in setup
|
||||
ref = newBranch
|
||||
refType = "branch"
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||
@ -207,14 +211,30 @@ func testAPIGetContentsExt(t *testing.T) {
|
||||
session := loginUser(t, "user2")
|
||||
token2 := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
|
||||
t.Run("DirContents", func(t *testing.T) {
|
||||
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs?ref=sub-home-md-img-check")
|
||||
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext?ref=sub-home-md-img-check")
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
var contentsResponse api.ContentsExtResponse
|
||||
DecodeJSON(t, resp, &contentsResponse)
|
||||
assert.Nil(t, contentsResponse.FileContents)
|
||||
assert.NotNil(t, contentsResponse.DirContents)
|
||||
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/.?ref=sub-home-md-img-check")
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
contentsResponse = api.ContentsExtResponse{}
|
||||
DecodeJSON(t, resp, &contentsResponse)
|
||||
assert.Nil(t, contentsResponse.FileContents)
|
||||
assert.NotNil(t, contentsResponse.DirContents)
|
||||
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs?ref=sub-home-md-img-check")
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
contentsResponse = api.ContentsExtResponse{}
|
||||
DecodeJSON(t, resp, &contentsResponse)
|
||||
assert.Nil(t, contentsResponse.FileContents)
|
||||
assert.Equal(t, "README.md", contentsResponse.DirContents[0].Name)
|
||||
assert.Nil(t, contentsResponse.DirContents[0].Encoding)
|
||||
assert.Nil(t, contentsResponse.DirContents[0].Content)
|
||||
assert.Nil(t, contentsResponse.DirContents[0].LastCommitSHA)
|
||||
assert.Nil(t, contentsResponse.DirContents[0].LastCommitMessage)
|
||||
|
||||
// "includes=file_content" shouldn't affect directory listing
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs?ref=sub-home-md-img-check&includes=file_content")
|
||||
@ -241,7 +261,7 @@ func testAPIGetContentsExt(t *testing.T) {
|
||||
assert.Equal(t, util.ToPointer("0b8d8b5f15046343fd32f451df93acc2bdd9e6373be478b968e4cad6b6647351"), respFile.LfsOid)
|
||||
})
|
||||
t.Run("FileContents", func(t *testing.T) {
|
||||
// by default, no file content is returned
|
||||
// by default, no file content or commit info is returned
|
||||
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs/README.md?ref=sub-home-md-img-check")
|
||||
resp := MakeRequest(t, req, http.StatusOK)
|
||||
var contentsResponse api.ContentsExtResponse
|
||||
@ -250,9 +270,11 @@ func testAPIGetContentsExt(t *testing.T) {
|
||||
assert.Equal(t, "README.md", contentsResponse.FileContents.Name)
|
||||
assert.Nil(t, contentsResponse.FileContents.Encoding)
|
||||
assert.Nil(t, contentsResponse.FileContents.Content)
|
||||
assert.Nil(t, contentsResponse.FileContents.LastCommitSHA)
|
||||
assert.Nil(t, contentsResponse.FileContents.LastCommitMessage)
|
||||
|
||||
// file content is only returned when `includes=file_content`
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs/README.md?ref=sub-home-md-img-check&includes=file_content")
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs/README.md?ref=sub-home-md-img-check&includes=file_content,commit_metadata,commit_message")
|
||||
resp = MakeRequest(t, req, http.StatusOK)
|
||||
contentsResponse = api.ContentsExtResponse{}
|
||||
DecodeJSON(t, resp, &contentsResponse)
|
||||
@ -260,6 +282,8 @@ func testAPIGetContentsExt(t *testing.T) {
|
||||
assert.Equal(t, "README.md", contentsResponse.FileContents.Name)
|
||||
assert.NotNil(t, contentsResponse.FileContents.Encoding)
|
||||
assert.NotNil(t, contentsResponse.FileContents.Content)
|
||||
assert.Equal(t, "4649299398e4d39a5c09eb4f534df6f1e1eb87cc", *contentsResponse.FileContents.LastCommitSHA)
|
||||
assert.Equal(t, "Test how READMEs render images when found in a subfolder\n", *contentsResponse.FileContents.LastCommitMessage)
|
||||
|
||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/lfs/contents-ext/jpeg.jpg?includes=file_content").AddTokenAuth(token2)
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
@ -271,6 +295,8 @@ func testAPIGetContentsExt(t *testing.T) {
|
||||
assert.Equal(t, "jpeg.jpg", respFile.Name)
|
||||
assert.NotNil(t, respFile.Encoding)
|
||||
assert.NotNil(t, respFile.Content)
|
||||
assert.Nil(t, contentsResponse.FileContents.LastCommitSHA)
|
||||
assert.Nil(t, contentsResponse.FileContents.LastCommitMessage)
|
||||
assert.Equal(t, util.ToPointer(int64(107)), respFile.LfsSize)
|
||||
assert.Equal(t, util.ToPointer("0b8d8b5f15046343fd32f451df93acc2bdd9e6373be478b968e4cad6b6647351"), respFile.LfsOid)
|
||||
})
|
||||
|
@ -68,14 +68,15 @@ func TestLFSRender(t *testing.T) {
|
||||
req := NewRequest(t, "GET", "/user2/lfs/src/branch/master/crypt.bin")
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
doc := NewHTMLParser(t, resp.Body).doc
|
||||
doc := NewHTMLParser(t, resp.Body)
|
||||
|
||||
fileInfo := doc.Find("div.file-info-entry").First().Text()
|
||||
assert.Contains(t, fileInfo, "LFS")
|
||||
|
||||
rawLink, exists := doc.Find("div.file-view > div.view-raw > a").Attr("href")
|
||||
assert.True(t, exists, "Download link should render instead of content because this is a binary file")
|
||||
assert.Equal(t, "/user2/lfs/media/branch/master/crypt.bin", rawLink, "The download link should use the proper /media link because it's in LFS")
|
||||
// find new file view container
|
||||
fileViewContainer := doc.Find("[data-global-init=initRepoFileView]")
|
||||
assert.Equal(t, "/user2/lfs/media/branch/master/crypt.bin", fileViewContainer.AttrOr("data-raw-file-link", ""))
|
||||
AssertHTMLElement(t, doc, ".view-raw > .file-view-render-container > .file-view-raw-prompt", 1)
|
||||
})
|
||||
|
||||
// check that a directory with a README file shows its text
|
||||
|
@ -27,6 +27,7 @@ import (
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestRepoView(t *testing.T) {
|
||||
@ -41,6 +42,7 @@ func TestRepoView(t *testing.T) {
|
||||
t.Run("BlameFileInRepo", testBlameFileInRepo)
|
||||
t.Run("ViewRepoDirectory", testViewRepoDirectory)
|
||||
t.Run("ViewRepoDirectoryReadme", testViewRepoDirectoryReadme)
|
||||
t.Run("ViewRepoSymlink", testViewRepoSymlink)
|
||||
t.Run("MarkDownReadmeImage", testMarkDownReadmeImage)
|
||||
t.Run("MarkDownReadmeImageSubfolder", testMarkDownReadmeImageSubfolder)
|
||||
t.Run("GeneratedSourceLink", testGeneratedSourceLink)
|
||||
@ -412,6 +414,21 @@ func testViewRepoDirectoryReadme(t *testing.T) {
|
||||
missing("symlink-loop", "/user2/readme-test/src/branch/symlink-loop/")
|
||||
}
|
||||
|
||||
func testViewRepoSymlink(t *testing.T) {
|
||||
session := loginUser(t, "user2")
|
||||
req := NewRequest(t, "GET", "/user2/readme-test/src/branch/symlink")
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||
AssertHTMLElement(t, htmlDoc, ".entry-symbol-link", true)
|
||||
followSymbolLinkHref := htmlDoc.Find(".entry-symbol-link").AttrOr("href", "")
|
||||
require.Equal(t, "/user2/readme-test/src/branch/symlink/README.md?follow_symlink=1", followSymbolLinkHref)
|
||||
|
||||
req = NewRequest(t, "GET", followSymbolLinkHref)
|
||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
||||
assert.Equal(t, "/user2/readme-test/src/branch/symlink/some/other/path/awefulcake.txt?follow_symlink=1", resp.Header().Get("Location"))
|
||||
}
|
||||
|
||||
func testMarkDownReadmeImage(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
|
@ -155,10 +155,9 @@ func getExpectedFileResponseForRepoFilesCreate(commitID string, lastCommit *git.
|
||||
Name: path.Base(treePath),
|
||||
Path: treePath,
|
||||
SHA: "103ff9234cefeee5ec5361d22b49fbb04d385885",
|
||||
LastCommitSHA: lastCommit.ID.String(),
|
||||
LastCommitterDate: lastCommit.Committer.When,
|
||||
LastAuthorDate: lastCommit.Author.When,
|
||||
LastCommitMessage: "Creates new/file.txt\n",
|
||||
LastCommitSHA: util.ToPointer(lastCommit.ID.String()),
|
||||
LastCommitterDate: util.ToPointer(lastCommit.Committer.When),
|
||||
LastAuthorDate: util.ToPointer(lastCommit.Author.When),
|
||||
Type: "file",
|
||||
Size: 18,
|
||||
Encoding: &encoding,
|
||||
@ -226,10 +225,9 @@ func getExpectedFileResponseForRepoFilesUpdate(commitID, filename, lastCommitSHA
|
||||
Name: filename,
|
||||
Path: filename,
|
||||
SHA: "dbf8d00e022e05b7e5cf7e535de857de57925647",
|
||||
LastCommitSHA: lastCommitSHA,
|
||||
LastCommitterDate: lastCommitterWhen,
|
||||
LastAuthorDate: lastAuthorWhen,
|
||||
LastCommitMessage: "Updates README.md\n",
|
||||
LastCommitSHA: util.ToPointer(lastCommitSHA),
|
||||
LastCommitterDate: util.ToPointer(lastCommitterWhen),
|
||||
LastAuthorDate: util.ToPointer(lastAuthorWhen),
|
||||
Type: "file",
|
||||
Size: 43,
|
||||
Encoding: &encoding,
|
||||
@ -330,19 +328,18 @@ func getExpectedFileResponseForRepoFilesUpdateRename(commitID, lastCommitSHA str
|
||||
downloadURL := setting.AppURL + "user2/lfs/raw/branch/master/" + detail.filename
|
||||
// don't set time related fields because there might be different time in one operation
|
||||
responses = append(responses, &api.ContentsResponse{
|
||||
Name: detail.filename,
|
||||
Path: detail.filename,
|
||||
SHA: detail.sha,
|
||||
LastCommitSHA: lastCommitSHA,
|
||||
LastCommitMessage: "Rename files\n",
|
||||
Type: "file",
|
||||
Size: detail.size,
|
||||
Encoding: util.ToPointer("base64"),
|
||||
Content: &detail.content,
|
||||
URL: &selfURL,
|
||||
HTMLURL: &htmlURL,
|
||||
GitURL: &gitURL,
|
||||
DownloadURL: &downloadURL,
|
||||
Name: detail.filename,
|
||||
Path: detail.filename,
|
||||
SHA: detail.sha,
|
||||
LastCommitSHA: util.ToPointer(lastCommitSHA),
|
||||
Type: "file",
|
||||
Size: detail.size,
|
||||
Encoding: util.ToPointer("base64"),
|
||||
Content: &detail.content,
|
||||
URL: &selfURL,
|
||||
HTMLURL: &htmlURL,
|
||||
GitURL: &gitURL,
|
||||
DownloadURL: &downloadURL,
|
||||
Links: &api.FileLinksResponse{
|
||||
Self: &selfURL,
|
||||
GitURL: &gitURL,
|
||||
@ -540,7 +537,7 @@ func TestChangeRepoFilesForUpdateWithFileRename(t *testing.T) {
|
||||
lastCommit, _ := commit.GetCommitByPath(opts.Files[0].TreePath)
|
||||
expectedFileResponse := getExpectedFileResponseForRepoFilesUpdateRename(commit.ID.String(), lastCommit.ID.String())
|
||||
for _, file := range filesResponse.Files {
|
||||
file.LastCommitterDate, file.LastAuthorDate = time.Time{}, time.Time{} // there might be different time in one operation, so we ignore them
|
||||
file.LastCommitterDate, file.LastAuthorDate = nil, nil // there might be different time in one operation, so we ignore them
|
||||
}
|
||||
assert.Len(t, filesResponse.Files, 4)
|
||||
assert.Equal(t, expectedFileResponse.Files, filesResponse.Files)
|
||||
|
@ -52,8 +52,7 @@ form.single-button-form.is-loading .button {
|
||||
}
|
||||
|
||||
.markup pre.is-loading,
|
||||
.editor-loading.is-loading,
|
||||
.pdf-content.is-loading {
|
||||
.editor-loading.is-loading {
|
||||
height: var(--height-loading);
|
||||
}
|
||||
|
||||
|
@ -183,42 +183,6 @@ td .commit-summary {
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
.view-raw {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.view-raw > * {
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.view-raw audio,
|
||||
.view-raw video,
|
||||
.view-raw img {
|
||||
margin: 1rem 0;
|
||||
border-radius: 0;
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
.view-raw img[src$=".svg" i] {
|
||||
max-height: 600px !important;
|
||||
max-width: 600px !important;
|
||||
}
|
||||
|
||||
.pdf-content {
|
||||
width: 100%;
|
||||
height: 600px;
|
||||
border: none !important;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.pdf-content .pdf-fallback-button {
|
||||
margin: 50px auto;
|
||||
}
|
||||
|
||||
.repository.file.list .non-diff-file-content .plain-text {
|
||||
padding: 1em 2em;
|
||||
}
|
||||
@ -241,10 +205,6 @@ td .commit-summary {
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
.non-diff-file-content .pdfobject {
|
||||
border-radius: 0 0 var(--border-radius) var(--border-radius);
|
||||
}
|
||||
|
||||
.repo-editor-header {
|
||||
width: 100%;
|
||||
}
|
||||
|
@ -60,3 +60,33 @@
|
||||
.file-view.code-view .ui.button.code-line-button:hover {
|
||||
background: var(--color-secondary);
|
||||
}
|
||||
|
||||
.view-raw {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.view-raw > * {
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.view-raw audio,
|
||||
.view-raw video,
|
||||
.view-raw img {
|
||||
margin: 1rem;
|
||||
border-radius: 0;
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
.view-raw img[src$=".svg" i] {
|
||||
max-height: 600px !important;
|
||||
max-width: 600px !important;
|
||||
}
|
||||
|
||||
.file-view-render-container {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.file-view-render-container :last-child {
|
||||
border-radius: 0 0 var(--border-radius) var(--border-radius); /* to match the "ui segment" bottom radius */
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
// to make sure the error handler always works, we should never import `window.config`, because
|
||||
// some user's custom template breaks it.
|
||||
import type {Intent} from './types.ts';
|
||||
import {html} from './utils/html.ts';
|
||||
|
||||
// This sets up the URL prefix used in webpack's chunk loading.
|
||||
// This file must be imported before any lazy-loading is being attempted.
|
||||
@ -23,7 +24,7 @@ export function showGlobalErrorMessage(msg: string, msgType: Intent = 'error') {
|
||||
let msgDiv = msgContainer.querySelector<HTMLDivElement>(`.js-global-error[data-global-error-msg-compact="${msgCompact}"]`);
|
||||
if (!msgDiv) {
|
||||
const el = document.createElement('div');
|
||||
el.innerHTML = `<div class="ui container js-global-error tw-my-[--page-spacing]"><div class="ui ${msgType} message tw-text-center tw-whitespace-pre-line"></div></div>`;
|
||||
el.innerHTML = html`<div class="ui container js-global-error tw-my-[--page-spacing]"><div class="ui ${msgType} message tw-text-center tw-whitespace-pre-line"></div></div>`;
|
||||
msgDiv = el.childNodes[0] as HTMLDivElement;
|
||||
}
|
||||
// merge duplicated messages into "the message (count)" format
|
||||
|
@ -2,6 +2,7 @@ import {reactive} from 'vue';
|
||||
import {GET} from '../modules/fetch.ts';
|
||||
import {pathEscapeSegments} from '../utils/url.ts';
|
||||
import {createElementFromHTML} from '../utils/dom.ts';
|
||||
import {html} from '../utils/html.ts';
|
||||
|
||||
export function createViewFileTreeStore(props: { repoLink: string, treePath: string, currentRefNameSubURL: string}) {
|
||||
const store = reactive({
|
||||
@ -16,7 +17,7 @@ export function createViewFileTreeStore(props: { repoLink: string, treePath: str
|
||||
if (!document.querySelector(`.global-svg-icon-pool #${svgId}`)) poolSvgs.push(svgContent);
|
||||
}
|
||||
if (poolSvgs.length) {
|
||||
const svgContainer = createElementFromHTML('<div class="global-svg-icon-pool tw-hidden"></div>');
|
||||
const svgContainer = createElementFromHTML(html`<div class="global-svg-icon-pool tw-hidden"></div>`);
|
||||
svgContainer.innerHTML = poolSvgs.join('');
|
||||
document.body.append(svgContainer);
|
||||
}
|
||||
|
@ -43,13 +43,16 @@ export function initGlobalDeleteButton(): void {
|
||||
|
||||
fomanticQuery(modal).modal({
|
||||
closable: false,
|
||||
onApprove: async () => {
|
||||
onApprove: () => {
|
||||
// if `data-type="form"` exists, then submit the form by the selector provided by `data-form="..."`
|
||||
if (btn.getAttribute('data-type') === 'form') {
|
||||
const formSelector = btn.getAttribute('data-form');
|
||||
const form = document.querySelector<HTMLFormElement>(formSelector);
|
||||
if (!form) throw new Error(`no form named ${formSelector} found`);
|
||||
modal.classList.add('is-loading'); // the form is not in the modal, so also add loading indicator to the modal
|
||||
form.classList.add('is-loading');
|
||||
form.submit();
|
||||
return false; // prevent modal from closing automatically
|
||||
}
|
||||
|
||||
// prepare an AJAX form by data attributes
|
||||
@ -62,12 +65,15 @@ export function initGlobalDeleteButton(): void {
|
||||
postData.append('id', value);
|
||||
}
|
||||
}
|
||||
|
||||
const response = await POST(btn.getAttribute('data-url'), {data: postData});
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
window.location.href = data.redirect;
|
||||
}
|
||||
(async () => {
|
||||
const response = await POST(btn.getAttribute('data-url'), {data: postData});
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
window.location.href = data.redirect;
|
||||
}
|
||||
})();
|
||||
modal.classList.add('is-loading'); // the request is in progress, so also add loading indicator to the modal
|
||||
return false; // prevent modal from closing automatically
|
||||
},
|
||||
}).modal('show');
|
||||
});
|
||||
@ -158,13 +164,7 @@ function onShowModalClick(el: HTMLElement, e: MouseEvent) {
|
||||
}
|
||||
}
|
||||
|
||||
fomanticQuery(elModal).modal('setting', {
|
||||
onApprove: () => {
|
||||
// "form-fetch-action" can handle network errors gracefully,
|
||||
// so keep the modal dialog to make users can re-submit the form if anything wrong happens.
|
||||
if (elModal.querySelector('.form-fetch-action')) return false;
|
||||
},
|
||||
}).modal('show');
|
||||
fomanticQuery(elModal).modal('show');
|
||||
}
|
||||
|
||||
export function initGlobalButtons(): void {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {svg} from '../../svg.ts';
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {html, htmlRaw} from '../../utils/html.ts';
|
||||
import {createElementFromHTML} from '../../utils/dom.ts';
|
||||
import {fomanticQuery} from '../../modules/fomantic/base.ts';
|
||||
|
||||
@ -12,17 +12,17 @@ type ConfirmModalOptions = {
|
||||
}
|
||||
|
||||
export function createConfirmModal({header = '', content = '', confirmButtonColor = 'primary'}:ConfirmModalOptions = {}): HTMLElement {
|
||||
const headerHtml = header ? `<div class="header">${htmlEscape(header)}</div>` : '';
|
||||
return createElementFromHTML(`
|
||||
<div class="ui g-modal-confirm modal">
|
||||
${headerHtml}
|
||||
<div class="content">${htmlEscape(content)}</div>
|
||||
<div class="actions">
|
||||
<button class="ui cancel button">${svg('octicon-x')} ${htmlEscape(i18n.modal_cancel)}</button>
|
||||
<button class="ui ${confirmButtonColor} ok button">${svg('octicon-check')} ${htmlEscape(i18n.modal_confirm)}</button>
|
||||
</div>
|
||||
</div>
|
||||
`);
|
||||
const headerHtml = header ? html`<div class="header">${header}</div>` : '';
|
||||
return createElementFromHTML(html`
|
||||
<div class="ui g-modal-confirm modal">
|
||||
${htmlRaw(headerHtml)}
|
||||
<div class="content">${content}</div>
|
||||
<div class="actions">
|
||||
<button class="ui cancel button">${htmlRaw(svg('octicon-x'))} ${i18n.modal_cancel}</button>
|
||||
<button class="ui ${confirmButtonColor} ok button">${htmlRaw(svg('octicon-check'))} ${i18n.modal_confirm}</button>
|
||||
</div>
|
||||
</div>
|
||||
`.trim());
|
||||
}
|
||||
|
||||
export function confirmModal(modal: HTMLElement | ConfirmModalOptions): Promise<boolean> {
|
||||
|
@ -114,7 +114,7 @@ async function handleUploadFiles(editor: CodeMirrorEditor | TextareaEditor, drop
|
||||
|
||||
export function removeAttachmentLinksFromMarkdown(text: string, fileUuid: string) {
|
||||
text = text.replace(new RegExp(`!?\\[([^\\]]+)\\]\\(/?attachments/${fileUuid}\\)`, 'g'), '');
|
||||
text = text.replace(new RegExp(`<img[^>]+src="/?attachments/${fileUuid}"[^>]*>`, 'g'), '');
|
||||
text = text.replace(new RegExp(`[<]img[^>]+src="/?attachments/${fileUuid}"[^>]*>`, 'g'), '');
|
||||
return text;
|
||||
}
|
||||
|
||||
|
@ -72,6 +72,7 @@ export function initCompLabelEdit(pageSelector: string) {
|
||||
return false;
|
||||
}
|
||||
submitFormFetchAction(form);
|
||||
return false;
|
||||
},
|
||||
}).modal('show');
|
||||
};
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {htmlEscape} from '../../utils/html.ts';
|
||||
import {fomanticQuery} from '../../modules/fomantic/base.ts';
|
||||
|
||||
const {appSubUrl} = window.config;
|
||||
|
@ -9,17 +9,17 @@ const {i18n} = window.config;
|
||||
export function initCopyContent() {
|
||||
registerGlobalEventFunc('click', 'onCopyContentButtonClick', async (btn: HTMLElement) => {
|
||||
if (btn.classList.contains('disabled') || btn.classList.contains('is-loading')) return;
|
||||
let content;
|
||||
let isRasterImage = false;
|
||||
const link = btn.getAttribute('data-link');
|
||||
const rawFileLink = btn.getAttribute('data-raw-file-link');
|
||||
|
||||
// when data-link is present, we perform a fetch. this is either because
|
||||
// the text to copy is not in the DOM, or it is an image which should be
|
||||
let content, isRasterImage = false;
|
||||
|
||||
// when "data-raw-link" is present, we perform a fetch. this is either because
|
||||
// the text to copy is not in the DOM, or it is an image that should be
|
||||
// fetched to copy in full resolution
|
||||
if (link) {
|
||||
if (rawFileLink) {
|
||||
btn.classList.add('is-loading', 'loading-icon-2px');
|
||||
try {
|
||||
const res = await GET(link, {credentials: 'include', redirect: 'follow'});
|
||||
const res = await GET(rawFileLink, {credentials: 'include', redirect: 'follow'});
|
||||
const contentType = res.headers.get('content-type');
|
||||
|
||||
if (contentType.startsWith('image/') && !contentType.startsWith('image/svg')) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {svg} from '../svg.ts';
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {html} from '../utils/html.ts';
|
||||
import {clippie} from 'clippie';
|
||||
import {showTemporaryTooltip} from '../modules/tippy.ts';
|
||||
import {GET, POST} from '../modules/fetch.ts';
|
||||
@ -33,14 +33,14 @@ export function generateMarkdownLinkForAttachment(file: Partial<CustomDropzoneFi
|
||||
// Scale down images from HiDPI monitors. This uses the <img> tag because it's the only
|
||||
// method to change image size in Markdown that is supported by all implementations.
|
||||
// Make the image link relative to the repo path, then the final URL is "/sub-path/owner/repo/attachments/{uuid}"
|
||||
fileMarkdown = `<img width="${Math.round(width / dppx)}" alt="${htmlEscape(file.name)}" src="attachments/${htmlEscape(file.uuid)}">`;
|
||||
fileMarkdown = html`<img width="${Math.round(width / dppx)}" alt="${file.name}" src="attachments/${file.uuid}">`;
|
||||
} else {
|
||||
// Markdown always renders the image with a relative path, so the final URL is "/sub-path/owner/repo/attachments/{uuid}"
|
||||
// TODO: it should also use relative path for consistency, because absolute is ambiguous for "/sub-path/attachments" or "/attachments"
|
||||
fileMarkdown = ``;
|
||||
}
|
||||
} else if (isVideoFile(file)) {
|
||||
fileMarkdown = `<video src="attachments/${htmlEscape(file.uuid)}" title="${htmlEscape(file.name)}" controls></video>`;
|
||||
fileMarkdown = html`<video src="attachments/${file.uuid}" title="${file.name}" controls></video>`;
|
||||
}
|
||||
return fileMarkdown;
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import emojis from '../../../assets/emoji.json' with {type: 'json'};
|
||||
import {html} from '../utils/html.ts';
|
||||
|
||||
const {assetUrlPrefix, customEmojis} = window.config;
|
||||
|
||||
@ -24,12 +25,11 @@ for (const key of emojiKeys) {
|
||||
export function emojiHTML(name: string) {
|
||||
let inner;
|
||||
if (Object.hasOwn(customEmojis, name)) {
|
||||
inner = `<img alt=":${name}:" src="${assetUrlPrefix}/img/emoji/${name}.png">`;
|
||||
inner = html`<img alt=":${name}:" src="${assetUrlPrefix}/img/emoji/${name}.png">`;
|
||||
} else {
|
||||
inner = emojiString(name);
|
||||
}
|
||||
|
||||
return `<span class="emoji" title=":${name}:">${inner}</span>`;
|
||||
return html`<span class="emoji" title=":${name}:">${inner}</span>`;
|
||||
}
|
||||
|
||||
// retrieve string for given emoji name
|
||||
|
76
web_src/js/features/file-view.ts
Normal file
76
web_src/js/features/file-view.ts
Normal file
@ -0,0 +1,76 @@
|
||||
import type {FileRenderPlugin} from '../render/plugin.ts';
|
||||
import {newRenderPlugin3DViewer} from '../render/plugins/3d-viewer.ts';
|
||||
import {newRenderPluginPdfViewer} from '../render/plugins/pdf-viewer.ts';
|
||||
import {registerGlobalInitFunc} from '../modules/observer.ts';
|
||||
import {createElementFromHTML, showElem, toggleClass} from '../utils/dom.ts';
|
||||
import {html} from '../utils/html.ts';
|
||||
import {basename} from '../utils.ts';
|
||||
|
||||
const plugins: FileRenderPlugin[] = [];
|
||||
|
||||
function initPluginsOnce(): void {
|
||||
if (plugins.length) return;
|
||||
plugins.push(newRenderPlugin3DViewer(), newRenderPluginPdfViewer());
|
||||
}
|
||||
|
||||
function findFileRenderPlugin(filename: string, mimeType: string): FileRenderPlugin | null {
|
||||
return plugins.find((plugin) => plugin.canHandle(filename, mimeType)) || null;
|
||||
}
|
||||
|
||||
function showRenderRawFileButton(elFileView: HTMLElement, renderContainer: HTMLElement | null): void {
|
||||
const toggleButtons = elFileView.querySelector('.file-view-toggle-buttons');
|
||||
showElem(toggleButtons);
|
||||
const displayingRendered = Boolean(renderContainer);
|
||||
toggleClass(toggleButtons.querySelectorAll('.file-view-toggle-source'), 'active', !displayingRendered); // it may not exist
|
||||
toggleClass(toggleButtons.querySelector('.file-view-toggle-rendered'), 'active', displayingRendered);
|
||||
// TODO: if there is only one button, hide it?
|
||||
}
|
||||
|
||||
async function renderRawFileToContainer(container: HTMLElement, rawFileLink: string, mimeType: string) {
|
||||
const elViewRawPrompt = container.querySelector('.file-view-raw-prompt');
|
||||
if (!rawFileLink || !elViewRawPrompt) throw new Error('unexpected file view container');
|
||||
|
||||
let rendered = false, errorMsg = '';
|
||||
try {
|
||||
const plugin = findFileRenderPlugin(basename(rawFileLink), mimeType);
|
||||
if (plugin) {
|
||||
container.classList.add('is-loading');
|
||||
container.setAttribute('data-render-name', plugin.name); // not used yet
|
||||
await plugin.render(container, rawFileLink);
|
||||
rendered = true;
|
||||
}
|
||||
} catch (e) {
|
||||
errorMsg = `${e}`;
|
||||
} finally {
|
||||
container.classList.remove('is-loading');
|
||||
}
|
||||
|
||||
if (rendered) {
|
||||
elViewRawPrompt.remove();
|
||||
return;
|
||||
}
|
||||
|
||||
// remove all children from the container, and only show the raw file link
|
||||
container.replaceChildren(elViewRawPrompt);
|
||||
|
||||
if (errorMsg) {
|
||||
const elErrorMessage = createElementFromHTML(html`<div class="ui error message">${errorMsg}</div>`);
|
||||
elViewRawPrompt.insertAdjacentElement('afterbegin', elErrorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
export function initRepoFileView(): void {
|
||||
registerGlobalInitFunc('initRepoFileView', async (elFileView: HTMLElement) => {
|
||||
initPluginsOnce();
|
||||
const rawFileLink = elFileView.getAttribute('data-raw-file-link');
|
||||
const mimeType = elFileView.getAttribute('data-mime-type') || ''; // not used yet
|
||||
// TODO: we should also provide the prefetched file head bytes to let the plugin decide whether to render or not
|
||||
const plugin = findFileRenderPlugin(basename(rawFileLink), mimeType);
|
||||
if (!plugin) return;
|
||||
|
||||
const renderContainer = elFileView.querySelector<HTMLElement>('.file-view-render-container');
|
||||
showRenderRawFileButton(elFileView, renderContainer);
|
||||
// maybe in the future multiple plugins can render the same file, so we should not assume only one plugin will render it
|
||||
if (renderContainer) await renderRawFileToContainer(renderContainer, rawFileLink, mimeType);
|
||||
});
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {html, htmlRaw} from '../utils/html.ts';
|
||||
import {createCodeEditor} from './codeeditor.ts';
|
||||
import {hideElem, queryElems, showElem, createElementFromHTML} from '../utils/dom.ts';
|
||||
import {attachRefIssueContextPopup} from './contextpopup.ts';
|
||||
@ -87,10 +87,10 @@ export function initRepoEditor() {
|
||||
if (i < parts.length - 1) {
|
||||
if (trimValue.length) {
|
||||
const linkElement = createElementFromHTML(
|
||||
`<span class="section"><a href="#">${htmlEscape(value)}</a></span>`,
|
||||
html`<span class="section"><a href="#">${value}</a></span>`,
|
||||
);
|
||||
const dividerElement = createElementFromHTML(
|
||||
`<div class="breadcrumb-divider">/</div>`,
|
||||
html`<div class="breadcrumb-divider">/</div>`,
|
||||
);
|
||||
links.push(linkElement);
|
||||
dividers.push(dividerElement);
|
||||
@ -113,7 +113,7 @@ export function initRepoEditor() {
|
||||
if (!warningDiv) {
|
||||
warningDiv = document.createElement('div');
|
||||
warningDiv.classList.add('ui', 'warning', 'message', 'flash-message', 'flash-warning', 'space-related');
|
||||
warningDiv.innerHTML = '<p>File path contains leading or trailing whitespace.</p>';
|
||||
warningDiv.innerHTML = html`<p>File path contains leading or trailing whitespace.</p>`;
|
||||
// Add display 'block' because display is set to 'none' in formantic\build\semantic.css
|
||||
warningDiv.style.display = 'block';
|
||||
const inputContainer = document.querySelector('.repo-editor-header');
|
||||
@ -196,7 +196,8 @@ export function initRepoEditor() {
|
||||
})();
|
||||
}
|
||||
|
||||
export function renderPreviewPanelContent(previewPanel: Element, content: string) {
|
||||
previewPanel.innerHTML = `<div class="render-content markup">${content}</div>`;
|
||||
export function renderPreviewPanelContent(previewPanel: Element, htmlContent: string) {
|
||||
// the content is from the server, so it is safe to use innerHTML
|
||||
previewPanel.innerHTML = html`<div class="render-content markup">${htmlRaw(htmlContent)}</div>`;
|
||||
attachRefIssueContextPopup(previewPanel.querySelectorAll('p .ref-issue'));
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import {updateIssuesMeta} from './repo-common.ts';
|
||||
import {toggleElem, queryElems, isElemVisible} from '../utils/dom.ts';
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {html} from '../utils/html.ts';
|
||||
import {confirmModal} from './comp/ConfirmModal.ts';
|
||||
import {showErrorToast} from '../modules/toast.ts';
|
||||
import {createSortable} from '../modules/sortable.ts';
|
||||
@ -138,10 +138,10 @@ function initDropdownUserRemoteSearch(el: Element) {
|
||||
// the content is provided by backend IssuePosters handler
|
||||
processedResults.length = 0;
|
||||
for (const item of resp.results) {
|
||||
let html = `<img class="ui avatar tw-align-middle" src="${htmlEscape(item.avatar_link)}" aria-hidden="true" alt width="20" height="20"><span class="gt-ellipsis">${htmlEscape(item.username)}</span>`;
|
||||
if (item.full_name) html += `<span class="search-fullname tw-ml-2">${htmlEscape(item.full_name)}</span>`;
|
||||
let nameHtml = html`<img class="ui avatar tw-align-middle" src="${item.avatar_link}" aria-hidden="true" alt width="20" height="20"><span class="gt-ellipsis">${item.username}</span>`;
|
||||
if (item.full_name) nameHtml += html`<span class="search-fullname tw-ml-2">${item.full_name}</span>`;
|
||||
if (selectedUsername.toLowerCase() === item.username.toLowerCase()) selectedUsername = item.username;
|
||||
processedResults.push({value: item.username, name: html});
|
||||
processedResults.push({value: item.username, name: nameHtml});
|
||||
}
|
||||
resp.results = processedResults;
|
||||
return resp;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {html, htmlEscape} from '../utils/html.ts';
|
||||
import {createTippy, showTemporaryTooltip} from '../modules/tippy.ts';
|
||||
import {
|
||||
addDelegatedEventListener,
|
||||
@ -17,6 +17,7 @@ import {showErrorToast} from '../modules/toast.ts';
|
||||
import {initRepoIssueSidebar} from './repo-issue-sidebar.ts';
|
||||
import {fomanticQuery} from '../modules/fomantic/base.ts';
|
||||
import {ignoreAreYouSure} from '../vendor/jquery.are-you-sure.ts';
|
||||
import {registerGlobalInitFunc} from '../modules/observer.ts';
|
||||
|
||||
const {appSubUrl} = window.config;
|
||||
|
||||
@ -45,8 +46,7 @@ export function initRepoIssueSidebarDependency() {
|
||||
if (String(issue.id) === currIssueId) continue;
|
||||
filteredResponse.results.push({
|
||||
value: issue.id,
|
||||
name: `<div class="gt-ellipsis">#${issue.number} ${htmlEscape(issue.title)}</div>
|
||||
<div class="text small tw-break-anywhere">${htmlEscape(issue.repository.full_name)}</div>`,
|
||||
name: html`<div class="gt-ellipsis">#${issue.number} ${issue.title}</div><div class="text small tw-break-anywhere">${issue.repository.full_name}</div>`,
|
||||
});
|
||||
}
|
||||
return filteredResponse;
|
||||
@ -416,25 +416,20 @@ export function initRepoIssueWipNewTitle() {
|
||||
|
||||
export function initRepoIssueWipToggle() {
|
||||
// Toggle WIP for existing PR
|
||||
queryElems(document, '.toggle-wip', (el) => el.addEventListener('click', async (e) => {
|
||||
registerGlobalInitFunc('initPullRequestWipToggle', (toggleWip) => toggleWip.addEventListener('click', async (e) => {
|
||||
e.preventDefault();
|
||||
const toggleWip = el;
|
||||
const title = toggleWip.getAttribute('data-title');
|
||||
const wipPrefix = toggleWip.getAttribute('data-wip-prefix');
|
||||
const updateUrl = toggleWip.getAttribute('data-update-url');
|
||||
|
||||
try {
|
||||
const params = new URLSearchParams();
|
||||
params.append('title', title?.startsWith(wipPrefix) ? title.slice(wipPrefix.length).trim() : `${wipPrefix.trim()} ${title}`);
|
||||
|
||||
const response = await POST(updateUrl, {data: params});
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to toggle WIP status');
|
||||
}
|
||||
window.location.reload();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
const params = new URLSearchParams();
|
||||
params.append('title', title?.startsWith(wipPrefix) ? title.slice(wipPrefix.length).trim() : `${wipPrefix.trim()} ${title}`);
|
||||
const response = await POST(updateUrl, {data: params});
|
||||
if (!response.ok) {
|
||||
showErrorToast(`Failed to toggle 'work in progress' status`);
|
||||
return;
|
||||
}
|
||||
window.location.reload();
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {hideElem, querySingleVisibleElem, showElem, toggleElem} from '../utils/dom.ts';
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {htmlEscape} from '../utils/html.ts';
|
||||
import {fomanticQuery} from '../modules/fomantic/base.ts';
|
||||
import {sanitizeRepoName} from './repo-common.ts';
|
||||
|
||||
|
@ -2,6 +2,7 @@ import {validateTextareaNonEmpty, initComboMarkdownEditor} from './comp/ComboMar
|
||||
import {fomanticMobileScreen} from '../modules/fomantic.ts';
|
||||
import {POST} from '../modules/fetch.ts';
|
||||
import type {ComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
|
||||
import {html, htmlRaw} from '../utils/html.ts';
|
||||
|
||||
async function initRepoWikiFormEditor() {
|
||||
const editArea = document.querySelector<HTMLTextAreaElement>('.repository.wiki .combo-markdown-editor textarea');
|
||||
@ -30,7 +31,7 @@ async function initRepoWikiFormEditor() {
|
||||
const response = await POST(editor.previewUrl, {data: formData});
|
||||
const data = await response.text();
|
||||
lastContent = newContent;
|
||||
previewTarget.innerHTML = `<div class="render-content markup ui segment">${data}</div>`;
|
||||
previewTarget.innerHTML = html`<div class="render-content markup ui segment">${htmlRaw(data)}</div>`;
|
||||
} catch (error) {
|
||||
console.error('Error rendering preview:', error);
|
||||
} finally {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {emojiKeys, emojiHTML, emojiString} from './emoji.ts';
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {html, htmlRaw} from '../utils/html.ts';
|
||||
|
||||
type TributeItem = Record<string, any>;
|
||||
|
||||
@ -26,17 +26,18 @@ export async function attachTribute(element: HTMLElement) {
|
||||
return emojiString(item.original);
|
||||
},
|
||||
menuItemTemplate: (item: TributeItem) => {
|
||||
return `<div class="tribute-item">${emojiHTML(item.original)}<span>${htmlEscape(item.original)}</span></div>`;
|
||||
return html`<div class="tribute-item">${htmlRaw(emojiHTML(item.original))}<span>${item.original}</span></div>`;
|
||||
},
|
||||
}, { // mentions
|
||||
values: window.config.mentionValues ?? [],
|
||||
requireLeadingSpace: true,
|
||||
menuItemTemplate: (item: TributeItem) => {
|
||||
return `
|
||||
const fullNameHtml = item.original.fullname && item.original.fullname !== '' ? html`<span class="fullname">${item.original.fullname}</span>` : '';
|
||||
return html`
|
||||
<div class="tribute-item">
|
||||
<img alt src="${htmlEscape(item.original.avatar)}" width="21" height="21"/>
|
||||
<span class="name">${htmlEscape(item.original.name)}</span>
|
||||
${item.original.fullname && item.original.fullname !== '' ? `<span class="fullname">${htmlEscape(item.original.fullname)}</span>` : ''}
|
||||
<img alt src="${item.original.avatar}" width="21" height="21"/>
|
||||
<span class="name">${item.original.name}</span>
|
||||
${htmlRaw(fullNameHtml)}
|
||||
</div>
|
||||
`;
|
||||
},
|
||||
|
@ -19,7 +19,7 @@ import {initRepoIssueContentHistory} from './features/repo-issue-content.ts';
|
||||
import {initStopwatch} from './features/stopwatch.ts';
|
||||
import {initFindFileInRepo} from './features/repo-findfile.ts';
|
||||
import {initMarkupContent} from './markup/content.ts';
|
||||
import {initPdfViewer} from './render/pdf.ts';
|
||||
import {initRepoFileView} from './features/file-view.ts';
|
||||
import {initUserAuthOauth2, initUserCheckAppUrl} from './features/user-auth.ts';
|
||||
import {initRepoPullRequestAllowMaintainerEdit, initRepoPullRequestReview, initRepoIssueSidebarDependency, initRepoIssueFilterItemLabel} from './features/repo-issue.ts';
|
||||
import {initRepoEllipsisButton, initCommitStatuses} from './features/repo-commit.ts';
|
||||
@ -159,10 +159,11 @@ onDomReady(() => {
|
||||
initUserAuthWebAuthnRegister,
|
||||
initUserSettings,
|
||||
initRepoDiffView,
|
||||
initPdfViewer,
|
||||
initColorPickers,
|
||||
|
||||
initOAuth2SettingsDisableCheckbox,
|
||||
|
||||
initRepoFileView,
|
||||
]);
|
||||
|
||||
// it must be the last one, then the "querySelectorAll" only needs to be executed once for global init functions.
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {html, htmlRaw} from '../utils/html.ts';
|
||||
|
||||
type Processor = (el: HTMLElement) => string | HTMLElement | void;
|
||||
|
||||
@ -38,10 +38,10 @@ function prepareProcessors(ctx:ProcessorContext): Processors {
|
||||
IMG(el: HTMLElement) {
|
||||
const alt = el.getAttribute('alt') || 'image';
|
||||
const src = el.getAttribute('src');
|
||||
const widthAttr = el.hasAttribute('width') ? ` width="${htmlEscape(el.getAttribute('width') || '')}"` : '';
|
||||
const heightAttr = el.hasAttribute('height') ? ` height="${htmlEscape(el.getAttribute('height') || '')}"` : '';
|
||||
const widthAttr = el.hasAttribute('width') ? htmlRaw` width="${el.getAttribute('width') || ''}"` : '';
|
||||
const heightAttr = el.hasAttribute('height') ? htmlRaw` height="${el.getAttribute('height') || ''}"` : '';
|
||||
if (widthAttr || heightAttr) {
|
||||
return `<img alt="${htmlEscape(alt)}"${widthAttr}${heightAttr} src="${htmlEscape(src)}">`;
|
||||
return html`<img alt="${alt}"${widthAttr}${heightAttr} src="${src}">`;
|
||||
}
|
||||
return ``;
|
||||
},
|
||||
|
@ -2,6 +2,7 @@ import {isDarkTheme} from '../utils.ts';
|
||||
import {makeCodeCopyButton} from './codecopy.ts';
|
||||
import {displayError} from './common.ts';
|
||||
import {queryElems} from '../utils/dom.ts';
|
||||
import {html, htmlRaw} from '../utils/html.ts';
|
||||
|
||||
const {mermaidMaxSourceCharacters} = window.config;
|
||||
|
||||
@ -46,7 +47,7 @@ export async function initMarkupCodeMermaid(elMarkup: HTMLElement): Promise<void
|
||||
|
||||
const iframe = document.createElement('iframe');
|
||||
iframe.classList.add('markup-content-iframe', 'tw-invisible');
|
||||
iframe.srcdoc = `<html><head><style>${iframeCss}</style></head><body>${svg}</body></html>`;
|
||||
iframe.srcdoc = html`<html><head><style>${htmlRaw(iframeCss)}</style></head><body>${htmlRaw(svg)}</body></html>`;
|
||||
|
||||
const mermaidBlock = document.createElement('div');
|
||||
mermaidBlock.classList.add('mermaid-block', 'is-loading', 'tw-hidden');
|
||||
|
@ -9,8 +9,9 @@ const fomanticModalFn = $.fn.modal;
|
||||
export function initAriaModalPatch() {
|
||||
if ($.fn.modal === ariaModalFn) throw new Error('initAriaModalPatch could only be called once');
|
||||
$.fn.modal = ariaModalFn;
|
||||
$.fn.fomanticExt.onModalBeforeHidden = onModalBeforeHidden;
|
||||
(ariaModalFn as FomanticInitFunction).settings = fomanticModalFn.settings;
|
||||
$.fn.fomanticExt.onModalBeforeHidden = onModalBeforeHidden;
|
||||
$.fn.modal.settings.onApprove = onModalApproveDefault;
|
||||
}
|
||||
|
||||
// the patched `$.fn.modal` modal function
|
||||
@ -34,6 +35,29 @@ function ariaModalFn(this: any, ...args: Parameters<FomanticInitFunction>) {
|
||||
function onModalBeforeHidden(this: any) {
|
||||
const $modal = $(this);
|
||||
const elModal = $modal[0];
|
||||
queryElems(elModal, 'form', (form: HTMLFormElement) => form.reset());
|
||||
hideToastsFrom(elModal.closest('.ui.dimmer') ?? document.body);
|
||||
|
||||
// reset the form after the modal is hidden, after other modal events and handlers (e.g. "onApprove", form submit)
|
||||
setTimeout(() => {
|
||||
queryElems(elModal, 'form', (form: HTMLFormElement) => form.reset());
|
||||
}, 0);
|
||||
}
|
||||
|
||||
function onModalApproveDefault(this: any) {
|
||||
const $modal = $(this);
|
||||
const selectors = $modal.modal('setting', 'selector');
|
||||
const elModal = $modal[0];
|
||||
const elApprove = elModal.querySelector(selectors.approve);
|
||||
const elForm = elApprove?.closest('form');
|
||||
if (!elForm) return true; // no form, just allow closing the modal
|
||||
|
||||
// "form-fetch-action" can handle network errors gracefully,
|
||||
// so keep the modal dialog to make users can re-submit the form if anything wrong happens.
|
||||
if (elForm.matches('.form-fetch-action')) return false;
|
||||
|
||||
// There is an abuse for the "modal" + "form" combination, the "Approve" button is a traditional form submit button in the form.
|
||||
// Then "approve" and "submit" occur at the same time, the modal will be closed immediately before the form is submitted.
|
||||
// So here we prevent the modal from closing automatically by returning false, add the "is-loading" class to the form element.
|
||||
elForm.classList.add('is-loading');
|
||||
return false;
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ import tippy, {followCursor} from 'tippy.js';
|
||||
import {isDocumentFragmentOrElementNode} from '../utils/dom.ts';
|
||||
import {formatDatetime} from '../utils/time.ts';
|
||||
import type {Content, Instance, Placement, Props} from 'tippy.js';
|
||||
import {html} from '../utils/html.ts';
|
||||
|
||||
type TippyOpts = {
|
||||
role?: string,
|
||||
@ -9,7 +10,7 @@ type TippyOpts = {
|
||||
} & Partial<Props>;
|
||||
|
||||
const visibleInstances = new Set<Instance>();
|
||||
const arrowSvg = `<svg width="16" height="7"><path d="m0 7 8-7 8 7Z" class="tippy-svg-arrow-outer"/><path d="m0 8 8-7 8 7Z" class="tippy-svg-arrow-inner"/></svg>`;
|
||||
const arrowSvg = html`<svg width="16" height="7"><path d="m0 7 8-7 8 7Z" class="tippy-svg-arrow-outer"/><path d="m0 8 8-7 8 7Z" class="tippy-svg-arrow-inner"/></svg>`;
|
||||
|
||||
export function createTippy(target: Element, opts: TippyOpts = {}): Instance {
|
||||
// the callback functions should be destructured from opts,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {htmlEscape} from '../utils/html.ts';
|
||||
import {svg} from '../svg.ts';
|
||||
import {animateOnce, queryElems, showElem} from '../utils/dom.ts';
|
||||
import Toastify from 'toastify-js'; // don't use "async import", because when network error occurs, the "async import" also fails and nothing is shown
|
||||
|
@ -1,17 +0,0 @@
|
||||
import {htmlEscape} from 'escape-goat';
|
||||
import {registerGlobalInitFunc} from '../modules/observer.ts';
|
||||
|
||||
export async function initPdfViewer() {
|
||||
registerGlobalInitFunc('initPdfViewer', async (el: HTMLInputElement) => {
|
||||
const pdfobject = await import(/* webpackChunkName: "pdfobject" */'pdfobject');
|
||||
|
||||
const src = el.getAttribute('data-src');
|
||||
const fallbackText = el.getAttribute('data-fallback-button-text');
|
||||
pdfobject.embed(src, el, {
|
||||
fallbackLink: htmlEscape`
|
||||
<a role="button" class="ui basic button pdf-fallback-button" href="[url]">${fallbackText}</a>
|
||||
`,
|
||||
});
|
||||
el.classList.remove('is-loading');
|
||||
});
|
||||
}
|
10
web_src/js/render/plugin.ts
Normal file
10
web_src/js/render/plugin.ts
Normal file
@ -0,0 +1,10 @@
|
||||
export type FileRenderPlugin = {
|
||||
// unique plugin name
|
||||
name: string;
|
||||
|
||||
// test if plugin can handle a specified file
|
||||
canHandle: (filename: string, mimeType: string) => boolean;
|
||||
|
||||
// render file content
|
||||
render: (container: HTMLElement, fileUrl: string, options?: any) => Promise<void>;
|
||||
}
|
60
web_src/js/render/plugins/3d-viewer.ts
Normal file
60
web_src/js/render/plugins/3d-viewer.ts
Normal file
@ -0,0 +1,60 @@
|
||||
import type {FileRenderPlugin} from '../plugin.ts';
|
||||
import {extname} from '../../utils.ts';
|
||||
|
||||
// support common 3D model file formats, use online-3d-viewer library for rendering
|
||||
|
||||
// eslint-disable-next-line multiline-comment-style
|
||||
/* a simple text STL file example:
|
||||
solid SimpleTriangle
|
||||
facet normal 0 0 1
|
||||
outer loop
|
||||
vertex 0 0 0
|
||||
vertex 1 0 0
|
||||
vertex 0 1 0
|
||||
endloop
|
||||
endfacet
|
||||
endsolid SimpleTriangle
|
||||
*/
|
||||
|
||||
export function newRenderPlugin3DViewer(): FileRenderPlugin {
|
||||
// Some extensions are text-based formats:
|
||||
// .3mf .amf .brep: XML
|
||||
// .fbx: XML or BINARY
|
||||
// .dae .gltf: JSON
|
||||
// .ifc, .igs, .iges, .stp, .step are: TEXT
|
||||
// .stl .ply: TEXT or BINARY
|
||||
// .obj .off .wrl: TEXT
|
||||
// So we need to be able to render when the file is recognized as plaintext file by backend.
|
||||
//
|
||||
// It needs more logic to make it overall right (render a text 3D model automatically):
|
||||
// we need to distinguish the ambiguous filename extensions.
|
||||
// For example: "*.obj, *.off, *.step" might be or not be a 3D model file.
|
||||
// So when it is a text file, we can't assume that "we only render it by 3D plugin",
|
||||
// otherwise the end users would be impossible to view its real content when the file is not a 3D model.
|
||||
const SUPPORTED_EXTENSIONS = [
|
||||
'.3dm', '.3ds', '.3mf', '.amf', '.bim', '.brep',
|
||||
'.dae', '.fbx', '.fcstd', '.glb', '.gltf',
|
||||
'.ifc', '.igs', '.iges', '.stp', '.step',
|
||||
'.stl', '.obj', '.off', '.ply', '.wrl',
|
||||
];
|
||||
|
||||
return {
|
||||
name: '3d-model-viewer',
|
||||
|
||||
canHandle(filename: string, _mimeType: string): boolean {
|
||||
const ext = extname(filename).toLowerCase();
|
||||
return SUPPORTED_EXTENSIONS.includes(ext);
|
||||
},
|
||||
|
||||
async render(container: HTMLElement, fileUrl: string): Promise<void> {
|
||||
// TODO: height and/or max-height?
|
||||
const OV = await import(/* webpackChunkName: "online-3d-viewer" */'online-3d-viewer');
|
||||
const viewer = new OV.EmbeddedViewer(container, {
|
||||
backgroundColor: new OV.RGBAColor(59, 68, 76, 0),
|
||||
defaultColor: new OV.RGBColor(65, 131, 196),
|
||||
edgeSettings: new OV.EdgeSettings(false, new OV.RGBColor(0, 0, 0), 1),
|
||||
});
|
||||
viewer.LoadModelFromUrlList([fileUrl]);
|
||||
},
|
||||
};
|
||||
}
|
20
web_src/js/render/plugins/pdf-viewer.ts
Normal file
20
web_src/js/render/plugins/pdf-viewer.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import type {FileRenderPlugin} from '../plugin.ts';
|
||||
|
||||
export function newRenderPluginPdfViewer(): FileRenderPlugin {
|
||||
return {
|
||||
name: 'pdf-viewer',
|
||||
|
||||
canHandle(filename: string, _mimeType: string): boolean {
|
||||
return filename.toLowerCase().endsWith('.pdf');
|
||||
},
|
||||
|
||||
async render(container: HTMLElement, fileUrl: string): Promise<void> {
|
||||
const PDFObject = await import(/* webpackChunkName: "pdfobject" */'pdfobject');
|
||||
// TODO: the PDFObject library does not support dynamic height adjustment,
|
||||
container.style.height = `${window.innerHeight - 100}px`;
|
||||
if (!PDFObject.default.embed(fileUrl, container)) {
|
||||
throw new Error('Unable to render the PDF file');
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
import {defineComponent, h, type PropType} from 'vue';
|
||||
import {parseDom, serializeXml} from './utils.ts';
|
||||
import {html, htmlRaw} from './utils/html.ts';
|
||||
import giteaDoubleChevronLeft from '../../public/assets/img/svg/gitea-double-chevron-left.svg';
|
||||
import giteaDoubleChevronRight from '../../public/assets/img/svg/gitea-double-chevron-right.svg';
|
||||
import giteaEmptyCheckbox from '../../public/assets/img/svg/gitea-empty-checkbox.svg';
|
||||
@ -220,7 +221,7 @@ export const SvgIcon = defineComponent({
|
||||
const classes = Array.from(svgOuter.classList);
|
||||
if (this.symbolId) {
|
||||
classes.push('tw-hidden', 'svg-symbol-container');
|
||||
svgInnerHtml = `<symbol id="${this.symbolId}" viewBox="${attrs['^viewBox']}">${svgInnerHtml}</symbol>`;
|
||||
svgInnerHtml = html`<symbol id="${this.symbolId}" viewBox="${attrs['^viewBox']}">${htmlRaw(svgInnerHtml)}</symbol>`;
|
||||
}
|
||||
// create VNode
|
||||
return h('svg', {
|
||||
|
@ -314,6 +314,7 @@ export function replaceTextareaSelection(textarea: HTMLTextAreaElement, text: st
|
||||
export function createElementFromHTML<T extends HTMLElement>(htmlString: string): T {
|
||||
htmlString = htmlString.trim();
|
||||
// some tags like "tr" are special, it must use a correct parent container to create
|
||||
// eslint-disable-next-line github/unescaped-html-literal -- FIXME: maybe we need to use other approaches to create elements from HTML, e.g. using DOMParser
|
||||
if (htmlString.startsWith('<tr')) {
|
||||
const container = document.createElement('table');
|
||||
container.innerHTML = htmlString;
|
||||
|
8
web_src/js/utils/html.test.ts
Normal file
8
web_src/js/utils/html.test.ts
Normal file
@ -0,0 +1,8 @@
|
||||
import {html, htmlEscape, htmlRaw} from './html.ts';
|
||||
|
||||
test('html', async () => {
|
||||
expect(html`<a>${'<>&\'"'}</a>`).toBe(`<a><>&'"</a>`);
|
||||
expect(html`<a>${htmlRaw('<img>')}</a>`).toBe(`<a><img></a>`);
|
||||
expect(html`<a>${htmlRaw`<img ${'&'}>`}</a>`).toBe(`<a><img &></a>`);
|
||||
expect(htmlEscape(`<a></a>`)).toBe(`<a></a>`);
|
||||
});
|
32
web_src/js/utils/html.ts
Normal file
32
web_src/js/utils/html.ts
Normal file
@ -0,0 +1,32 @@
|
||||
export function htmlEscape(s: string, ...args: Array<any>): string {
|
||||
if (args.length !== 0) throw new Error('use html or htmlRaw instead of htmlEscape'); // check legacy usages
|
||||
return s.replace(/&/g, '&')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>');
|
||||
}
|
||||
|
||||
class rawObject {
|
||||
private readonly value: string;
|
||||
constructor(v: string) { this.value = v }
|
||||
toString(): string { return this.value }
|
||||
}
|
||||
|
||||
export function html(tmpl: TemplateStringsArray, ...parts: Array<any>): string {
|
||||
let output = tmpl[0];
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
const value = parts[i];
|
||||
const valueEscaped = (value instanceof rawObject) ? value.toString() : htmlEscape(String(parts[i]));
|
||||
output = output + valueEscaped + tmpl[i + 1];
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
export function htmlRaw(s: string|TemplateStringsArray, ...tmplParts: Array<any>): rawObject {
|
||||
if (typeof s === 'string') {
|
||||
if (tmplParts.length !== 0) throw new Error("either htmlRaw('str') or htmlRaw`tmpl`");
|
||||
return new rawObject(s);
|
||||
}
|
||||
return new rawObject(html(s, ...tmplParts));
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user