mirror of
https://github.com/go-gitea/gitea.git
synced 2025-07-04 00:01:16 -04:00
Compare commits
No commits in common. "3c86ce7d8a56de89dd3bfa2b96a9551e27202bda" and "99d6934f394d15e6dcf024ad85d6f452adce567c" have entirely different histories.
3c86ce7d8a
...
99d6934f39
@ -91,7 +91,6 @@ module.exports = {
|
|||||||
plugins: ['@vitest/eslint-plugin'],
|
plugins: ['@vitest/eslint-plugin'],
|
||||||
globals: vitestPlugin.environments.env.globals,
|
globals: vitestPlugin.environments.env.globals,
|
||||||
rules: {
|
rules: {
|
||||||
'github/unescaped-html-literal': [0],
|
|
||||||
'@vitest/consistent-test-filename': [0],
|
'@vitest/consistent-test-filename': [0],
|
||||||
'@vitest/consistent-test-it': [0],
|
'@vitest/consistent-test-it': [0],
|
||||||
'@vitest/expect-expect': [0],
|
'@vitest/expect-expect': [0],
|
||||||
@ -424,7 +423,7 @@ module.exports = {
|
|||||||
'github/no-useless-passive': [2],
|
'github/no-useless-passive': [2],
|
||||||
'github/prefer-observers': [2],
|
'github/prefer-observers': [2],
|
||||||
'github/require-passive-events': [2],
|
'github/require-passive-events': [2],
|
||||||
'github/unescaped-html-literal': [2],
|
'github/unescaped-html-literal': [0],
|
||||||
'grouped-accessor-pairs': [2],
|
'grouped-accessor-pairs': [2],
|
||||||
'guard-for-in': [0],
|
'guard-for-in': [0],
|
||||||
'id-blacklist': [0],
|
'id-blacklist': [0],
|
||||||
|
@ -6,17 +6,17 @@ package fileicon
|
|||||||
import "code.gitea.io/gitea/modules/git"
|
import "code.gitea.io/gitea/modules/git"
|
||||||
|
|
||||||
type EntryInfo struct {
|
type EntryInfo struct {
|
||||||
BaseName string
|
FullName string
|
||||||
EntryMode git.EntryMode
|
EntryMode git.EntryMode
|
||||||
SymlinkToMode git.EntryMode
|
SymlinkToMode git.EntryMode
|
||||||
IsOpen bool
|
IsOpen bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func EntryInfoFromGitTreeEntry(commit *git.Commit, fullPath string, gitEntry *git.TreeEntry) *EntryInfo {
|
func EntryInfoFromGitTreeEntry(gitEntry *git.TreeEntry) *EntryInfo {
|
||||||
ret := &EntryInfo{BaseName: gitEntry.Name(), EntryMode: gitEntry.Mode()}
|
ret := &EntryInfo{FullName: gitEntry.Name(), EntryMode: gitEntry.Mode()}
|
||||||
if gitEntry.IsLink() {
|
if gitEntry.IsLink() {
|
||||||
if res, err := git.EntryFollowLink(commit, fullPath, gitEntry); err == nil && res.TargetEntry.IsDir() {
|
if te, err := gitEntry.FollowLink(); err == nil && te.IsDir() {
|
||||||
ret.SymlinkToMode = res.TargetEntry.Mode()
|
ret.SymlinkToMode = te.Mode()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
|
@ -5,6 +5,7 @@ package fileicon
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"html/template"
|
"html/template"
|
||||||
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
@ -133,7 +134,7 @@ func (m *MaterialIconProvider) FindIconName(entry *EntryInfo) string {
|
|||||||
return "folder-git"
|
return "folder-git"
|
||||||
}
|
}
|
||||||
|
|
||||||
fileNameLower := strings.ToLower(entry.BaseName)
|
fileNameLower := strings.ToLower(path.Base(entry.FullName))
|
||||||
if entry.EntryMode.IsDir() {
|
if entry.EntryMode.IsDir() {
|
||||||
if s, ok := m.rules.FolderNames[fileNameLower]; ok {
|
if s, ok := m.rules.FolderNames[fileNameLower]; ok {
|
||||||
return s
|
return s
|
||||||
|
@ -20,8 +20,8 @@ func TestMain(m *testing.M) {
|
|||||||
func TestFindIconName(t *testing.T) {
|
func TestFindIconName(t *testing.T) {
|
||||||
unittest.PrepareTestEnv(t)
|
unittest.PrepareTestEnv(t)
|
||||||
p := fileicon.DefaultMaterialIconProvider()
|
p := fileicon.DefaultMaterialIconProvider()
|
||||||
assert.Equal(t, "php", p.FindIconName(&fileicon.EntryInfo{BaseName: "foo.php", EntryMode: git.EntryModeBlob}))
|
assert.Equal(t, "php", p.FindIconName(&fileicon.EntryInfo{FullName: "foo.php", EntryMode: git.EntryModeBlob}))
|
||||||
assert.Equal(t, "php", p.FindIconName(&fileicon.EntryInfo{BaseName: "foo.PHP", EntryMode: git.EntryModeBlob}))
|
assert.Equal(t, "php", p.FindIconName(&fileicon.EntryInfo{FullName: "foo.PHP", EntryMode: git.EntryModeBlob}))
|
||||||
assert.Equal(t, "javascript", p.FindIconName(&fileicon.EntryInfo{BaseName: "foo.js", EntryMode: git.EntryModeBlob}))
|
assert.Equal(t, "javascript", p.FindIconName(&fileicon.EntryInfo{FullName: "foo.js", EntryMode: git.EntryModeBlob}))
|
||||||
assert.Equal(t, "visualstudio", p.FindIconName(&fileicon.EntryInfo{BaseName: "foo.vba", EntryMode: git.EntryModeBlob}))
|
assert.Equal(t, "visualstudio", p.FindIconName(&fileicon.EntryInfo{FullName: "foo.vba", EntryMode: git.EntryModeBlob}))
|
||||||
}
|
}
|
||||||
|
@ -22,22 +22,17 @@ func (b *Blob) Name() string {
|
|||||||
return b.name
|
return b.name
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetBlobBytes Gets the limited content of the blob
|
// GetBlobContent Gets the limited content of the blob as raw text
|
||||||
func (b *Blob) GetBlobBytes(limit int64) ([]byte, error) {
|
func (b *Blob) GetBlobContent(limit int64) (string, error) {
|
||||||
if limit <= 0 {
|
if limit <= 0 {
|
||||||
return nil, nil
|
return "", nil
|
||||||
}
|
}
|
||||||
dataRc, err := b.DataAsync()
|
dataRc, err := b.DataAsync()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return "", err
|
||||||
}
|
}
|
||||||
defer dataRc.Close()
|
defer dataRc.Close()
|
||||||
return util.ReadWithLimit(dataRc, int(limit))
|
buf, err := util.ReadWithLimit(dataRc, int(limit))
|
||||||
}
|
|
||||||
|
|
||||||
// GetBlobContent Gets the limited content of the blob as raw text
|
|
||||||
func (b *Blob) GetBlobContent(limit int64) (string, error) {
|
|
||||||
buf, err := b.GetBlobBytes(limit)
|
|
||||||
return string(buf), err
|
return string(buf), err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -104,9 +99,11 @@ loop:
|
|||||||
|
|
||||||
// GuessContentType guesses the content type of the blob.
|
// GuessContentType guesses the content type of the blob.
|
||||||
func (b *Blob) GuessContentType() (typesniffer.SniffedType, error) {
|
func (b *Blob) GuessContentType() (typesniffer.SniffedType, error) {
|
||||||
buf, err := b.GetBlobBytes(typesniffer.SniffContentSize)
|
r, err := b.DataAsync()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return typesniffer.SniffedType{}, err
|
return typesniffer.SniffedType{}, err
|
||||||
}
|
}
|
||||||
return typesniffer.DetectContentType(buf), nil
|
defer r.Close()
|
||||||
|
|
||||||
|
return typesniffer.DetectContentTypeFromReader(r)
|
||||||
}
|
}
|
||||||
|
@ -20,8 +20,7 @@ import (
|
|||||||
|
|
||||||
// Commit represents a git commit.
|
// Commit represents a git commit.
|
||||||
type Commit struct {
|
type Commit struct {
|
||||||
Tree // FIXME: bad design, this field can be nil if the commit is from "last commit cache"
|
Tree
|
||||||
|
|
||||||
ID ObjectID // The ID of this commit object
|
ID ObjectID // The ID of this commit object
|
||||||
Author *Signature
|
Author *Signature
|
||||||
Committer *Signature
|
Committer *Signature
|
||||||
|
@ -32,6 +32,22 @@ func (err ErrNotExist) Unwrap() error {
|
|||||||
return util.ErrNotExist
|
return util.ErrNotExist
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ErrSymlinkUnresolved entry.FollowLink error
|
||||||
|
type ErrSymlinkUnresolved struct {
|
||||||
|
Name string
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err ErrSymlinkUnresolved) Error() string {
|
||||||
|
return fmt.Sprintf("%s: %s", err.Name, err.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsErrSymlinkUnresolved if some error is ErrSymlinkUnresolved
|
||||||
|
func IsErrSymlinkUnresolved(err error) bool {
|
||||||
|
_, ok := err.(ErrSymlinkUnresolved)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
// ErrBranchNotExist represents a "BranchNotExist" kind of error.
|
// ErrBranchNotExist represents a "BranchNotExist" kind of error.
|
||||||
type ErrBranchNotExist struct {
|
type ErrBranchNotExist struct {
|
||||||
Name string
|
Name string
|
||||||
|
@ -11,7 +11,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// GetTreeEntryByPath get the tree entries according the sub dir
|
// GetTreeEntryByPath get the tree entries according the sub dir
|
||||||
func (t *Tree) GetTreeEntryByPath(relpath string) (_ *TreeEntry, err error) {
|
func (t *Tree) GetTreeEntryByPath(relpath string) (*TreeEntry, error) {
|
||||||
if len(relpath) == 0 {
|
if len(relpath) == 0 {
|
||||||
return &TreeEntry{
|
return &TreeEntry{
|
||||||
ptree: t,
|
ptree: t,
|
||||||
@ -21,18 +21,13 @@ func (t *Tree) GetTreeEntryByPath(relpath string) (_ *TreeEntry, err error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: This should probably use git cat-file --batch to be a bit more efficient
|
||||||
relpath = path.Clean(relpath)
|
relpath = path.Clean(relpath)
|
||||||
parts := strings.Split(relpath, "/")
|
parts := strings.Split(relpath, "/")
|
||||||
|
var err error
|
||||||
tree := t
|
tree := t
|
||||||
for _, name := range parts[:len(parts)-1] {
|
for i, name := range parts {
|
||||||
tree, err = tree.SubTree(name)
|
if i == len(parts)-1 {
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
name := parts[len(parts)-1]
|
|
||||||
entries, err := tree.ListEntries()
|
entries, err := tree.ListEntries()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -42,5 +37,12 @@ func (t *Tree) GetTreeEntryByPath(relpath string) (_ *TreeEntry, err error) {
|
|||||||
return v, nil
|
return v, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
tree, err = tree.SubTree(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil, ErrNotExist{"", relpath}
|
return nil, ErrNotExist{"", relpath}
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
package git
|
package git
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"path"
|
"io"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -24,57 +24,77 @@ func (te *TreeEntry) Type() string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type EntryFollowResult struct {
|
// FollowLink returns the entry pointed to by a symlink
|
||||||
SymlinkContent string
|
func (te *TreeEntry) FollowLink() (*TreeEntry, error) {
|
||||||
TargetFullPath string
|
|
||||||
TargetEntry *TreeEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
func EntryFollowLink(commit *Commit, fullPath string, te *TreeEntry) (*EntryFollowResult, error) {
|
|
||||||
if !te.IsLink() {
|
if !te.IsLink() {
|
||||||
return nil, util.ErrorWrap(util.ErrUnprocessableContent, "%q is not a symlink", fullPath)
|
return nil, ErrSymlinkUnresolved{te.Name(), "not a symlink"}
|
||||||
}
|
}
|
||||||
|
|
||||||
// git's filename max length is 4096, hopefully a link won't be longer than multiple of that
|
// read the link
|
||||||
const maxSymlinkSize = 20 * 4096
|
r, err := te.Blob().DataAsync()
|
||||||
if te.Blob().Size() > maxSymlinkSize {
|
|
||||||
return nil, util.ErrorWrap(util.ErrUnprocessableContent, "%q content exceeds symlink limit", fullPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
link, err := te.Blob().GetBlobContent(maxSymlinkSize)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if strings.HasPrefix(link, "/") {
|
closed := false
|
||||||
// It's said that absolute path will be stored as is in Git
|
defer func() {
|
||||||
return &EntryFollowResult{SymlinkContent: link}, util.ErrorWrap(util.ErrUnprocessableContent, "%q is an absolute symlink", fullPath)
|
if !closed {
|
||||||
|
_ = r.Close()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
buf := make([]byte, te.Size())
|
||||||
|
_, err = io.ReadFull(r, buf)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
_ = r.Close()
|
||||||
|
closed = true
|
||||||
|
|
||||||
|
lnk := string(buf)
|
||||||
|
t := te.ptree
|
||||||
|
|
||||||
|
// traverse up directories
|
||||||
|
for ; t != nil && strings.HasPrefix(lnk, "../"); lnk = lnk[3:] {
|
||||||
|
t = t.ptree
|
||||||
}
|
}
|
||||||
|
|
||||||
targetFullPath := path.Join(path.Dir(fullPath), link)
|
if t == nil {
|
||||||
targetEntry, err := commit.GetTreeEntryByPath(targetFullPath)
|
return nil, ErrSymlinkUnresolved{te.Name(), "points outside of repo"}
|
||||||
if err != nil {
|
|
||||||
return &EntryFollowResult{SymlinkContent: link}, err
|
|
||||||
}
|
}
|
||||||
return &EntryFollowResult{SymlinkContent: link, TargetFullPath: targetFullPath, TargetEntry: targetEntry}, nil
|
|
||||||
|
target, err := t.GetTreeEntryByPath(lnk)
|
||||||
|
if err != nil {
|
||||||
|
if IsErrNotExist(err) {
|
||||||
|
return nil, ErrSymlinkUnresolved{te.Name(), "broken link"}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return target, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func EntryFollowLinks(commit *Commit, firstFullPath string, firstTreeEntry *TreeEntry, optLimit ...int) (res *EntryFollowResult, err error) {
|
// FollowLinks returns the entry ultimately pointed to by a symlink
|
||||||
limit := util.OptionalArg(optLimit, 10)
|
func (te *TreeEntry) FollowLinks(optLimit ...int) (*TreeEntry, error) {
|
||||||
treeEntry, fullPath := firstTreeEntry, firstFullPath
|
if !te.IsLink() {
|
||||||
for range limit {
|
return nil, ErrSymlinkUnresolved{te.Name(), "not a symlink"}
|
||||||
res, err = EntryFollowLink(commit, fullPath, treeEntry)
|
|
||||||
if err != nil {
|
|
||||||
return res, err
|
|
||||||
}
|
}
|
||||||
treeEntry, fullPath = res.TargetEntry, res.TargetFullPath
|
limit := util.OptionalArg(optLimit, 10)
|
||||||
if !treeEntry.IsLink() {
|
entry := te
|
||||||
|
for range limit {
|
||||||
|
if !entry.IsLink() {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
next, err := entry.FollowLink()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
if treeEntry.IsLink() {
|
if next.ID == entry.ID {
|
||||||
return res, util.ErrorWrap(util.ErrUnprocessableContent, "%q has too many links", firstFullPath)
|
return nil, ErrSymlinkUnresolved{entry.Name(), "recursive link"}
|
||||||
}
|
}
|
||||||
return res, nil
|
entry = next
|
||||||
|
}
|
||||||
|
if entry.IsLink() {
|
||||||
|
return nil, ErrSymlinkUnresolved{te.Name(), "too many levels of symbolic links"}
|
||||||
|
}
|
||||||
|
return entry, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns the Tree pointed to by this TreeEntry, or nil if this is not a tree
|
// returns the Tree pointed to by this TreeEntry, or nil if this is not a tree
|
||||||
|
@ -1,76 +0,0 @@
|
|||||||
// Copyright 2024 The Gitea Authors. All rights reserved.
|
|
||||||
// SPDX-License-Identifier: MIT
|
|
||||||
|
|
||||||
package git
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestFollowLink(t *testing.T) {
|
|
||||||
r, err := openRepositoryWithDefaultContext("tests/repos/repo1_bare")
|
|
||||||
require.NoError(t, err)
|
|
||||||
defer r.Close()
|
|
||||||
|
|
||||||
commit, err := r.GetCommit("37991dec2c8e592043f47155ce4808d4580f9123")
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
// get the symlink
|
|
||||||
{
|
|
||||||
lnkFullPath := "foo/bar/link_to_hello"
|
|
||||||
lnk, err := commit.Tree.GetTreeEntryByPath("foo/bar/link_to_hello")
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.True(t, lnk.IsLink())
|
|
||||||
|
|
||||||
// should be able to dereference to target
|
|
||||||
res, err := EntryFollowLink(commit, lnkFullPath, lnk)
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "hello", res.TargetEntry.Name())
|
|
||||||
assert.Equal(t, "foo/nar/hello", res.TargetFullPath)
|
|
||||||
assert.False(t, res.TargetEntry.IsLink())
|
|
||||||
assert.Equal(t, "b14df6442ea5a1b382985a6549b85d435376c351", res.TargetEntry.ID.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// should error when called on a normal file
|
|
||||||
entry, err := commit.Tree.GetTreeEntryByPath("file1.txt")
|
|
||||||
require.NoError(t, err)
|
|
||||||
res, err := EntryFollowLink(commit, "file1.txt", entry)
|
|
||||||
assert.ErrorIs(t, err, util.ErrUnprocessableContent)
|
|
||||||
assert.Nil(t, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// should error for broken links
|
|
||||||
entry, err := commit.Tree.GetTreeEntryByPath("foo/broken_link")
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.True(t, entry.IsLink())
|
|
||||||
res, err := EntryFollowLink(commit, "foo/broken_link", entry)
|
|
||||||
assert.ErrorIs(t, err, util.ErrNotExist)
|
|
||||||
assert.Equal(t, "nar/broken_link", res.SymlinkContent)
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// should error for external links
|
|
||||||
entry, err := commit.Tree.GetTreeEntryByPath("foo/outside_repo")
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.True(t, entry.IsLink())
|
|
||||||
res, err := EntryFollowLink(commit, "foo/outside_repo", entry)
|
|
||||||
assert.ErrorIs(t, err, util.ErrNotExist)
|
|
||||||
assert.Equal(t, "../../outside_repo", res.SymlinkContent)
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
// testing fix for short link bug
|
|
||||||
entry, err := commit.Tree.GetTreeEntryByPath("foo/link_short")
|
|
||||||
require.NoError(t, err)
|
|
||||||
res, err := EntryFollowLink(commit, "foo/link_short", entry)
|
|
||||||
assert.ErrorIs(t, err, util.ErrNotExist)
|
|
||||||
assert.Equal(t, "a", res.SymlinkContent)
|
|
||||||
}
|
|
||||||
}
|
|
@ -21,10 +21,14 @@ type TreeEntry struct {
|
|||||||
|
|
||||||
size int64
|
size int64
|
||||||
sized bool
|
sized bool
|
||||||
|
fullName string
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the name of the entry
|
// Name returns the name of the entry
|
||||||
func (te *TreeEntry) Name() string {
|
func (te *TreeEntry) Name() string {
|
||||||
|
if te.fullName != "" {
|
||||||
|
return te.fullName
|
||||||
|
}
|
||||||
return te.gogitTreeEntry.Name
|
return te.gogitTreeEntry.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,7 +55,7 @@ func (te *TreeEntry) Size() int64 {
|
|||||||
return te.size
|
return te.size
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsSubModule if the entry is a submodule
|
// IsSubModule if the entry is a sub module
|
||||||
func (te *TreeEntry) IsSubModule() bool {
|
func (te *TreeEntry) IsSubModule() bool {
|
||||||
return te.gogitTreeEntry.Mode == filemode.Submodule
|
return te.gogitTreeEntry.Mode == filemode.Submodule
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ type EntryMode int
|
|||||||
// one of these.
|
// one of these.
|
||||||
const (
|
const (
|
||||||
// EntryModeNoEntry is possible if the file was added or removed in a commit. In the case of
|
// EntryModeNoEntry is possible if the file was added or removed in a commit. In the case of
|
||||||
// when adding the base commit doesn't have the file in its tree, a mode of 0o000000 is used.
|
// added the base commit will not have the file in its tree so a mode of 0o000000 is used.
|
||||||
EntryModeNoEntry EntryMode = 0o000000
|
EntryModeNoEntry EntryMode = 0o000000
|
||||||
|
|
||||||
EntryModeBlob EntryMode = 0o100644
|
EntryModeBlob EntryMode = 0o100644
|
||||||
@ -30,7 +30,7 @@ func (e EntryMode) String() string {
|
|||||||
return strconv.FormatInt(int64(e), 8)
|
return strconv.FormatInt(int64(e), 8)
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsSubModule if the entry is a submodule
|
// IsSubModule if the entry is a sub module
|
||||||
func (e EntryMode) IsSubModule() bool {
|
func (e EntryMode) IsSubModule() bool {
|
||||||
return e == EntryModeCommit
|
return e == EntryModeCommit
|
||||||
}
|
}
|
||||||
|
@ -57,7 +57,7 @@ func (te *TreeEntry) Size() int64 {
|
|||||||
return te.size
|
return te.size
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsSubModule if the entry is a submodule
|
// IsSubModule if the entry is a sub module
|
||||||
func (te *TreeEntry) IsSubModule() bool {
|
func (te *TreeEntry) IsSubModule() bool {
|
||||||
return te.entryMode.IsSubModule()
|
return te.entryMode.IsSubModule()
|
||||||
}
|
}
|
||||||
|
@ -53,3 +53,50 @@ func TestEntriesCustomSort(t *testing.T) {
|
|||||||
assert.Equal(t, "bcd", entries[6].Name())
|
assert.Equal(t, "bcd", entries[6].Name())
|
||||||
assert.Equal(t, "abc", entries[7].Name())
|
assert.Equal(t, "abc", entries[7].Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestFollowLink(t *testing.T) {
|
||||||
|
r, err := openRepositoryWithDefaultContext("tests/repos/repo1_bare")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
defer r.Close()
|
||||||
|
|
||||||
|
commit, err := r.GetCommit("37991dec2c8e592043f47155ce4808d4580f9123")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// get the symlink
|
||||||
|
lnk, err := commit.Tree.GetTreeEntryByPath("foo/bar/link_to_hello")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, lnk.IsLink())
|
||||||
|
|
||||||
|
// should be able to dereference to target
|
||||||
|
target, err := lnk.FollowLink()
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, "hello", target.Name())
|
||||||
|
assert.False(t, target.IsLink())
|
||||||
|
assert.Equal(t, "b14df6442ea5a1b382985a6549b85d435376c351", target.ID.String())
|
||||||
|
|
||||||
|
// should error when called on normal file
|
||||||
|
target, err = commit.Tree.GetTreeEntryByPath("file1.txt")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
_, err = target.FollowLink()
|
||||||
|
assert.EqualError(t, err, "file1.txt: not a symlink")
|
||||||
|
|
||||||
|
// should error for broken links
|
||||||
|
target, err = commit.Tree.GetTreeEntryByPath("foo/broken_link")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, target.IsLink())
|
||||||
|
_, err = target.FollowLink()
|
||||||
|
assert.EqualError(t, err, "broken_link: broken link")
|
||||||
|
|
||||||
|
// should error for external links
|
||||||
|
target, err = commit.Tree.GetTreeEntryByPath("foo/outside_repo")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, target.IsLink())
|
||||||
|
_, err = target.FollowLink()
|
||||||
|
assert.EqualError(t, err, "outside_repo: points outside of repo")
|
||||||
|
|
||||||
|
// testing fix for short link bug
|
||||||
|
target, err = commit.Tree.GetTreeEntryByPath("foo/link_short")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
_, err = target.FollowLink()
|
||||||
|
assert.EqualError(t, err, "link_short: broken link")
|
||||||
|
}
|
||||||
|
@ -69,7 +69,7 @@ func (t *Tree) ListEntriesRecursiveWithSize() (Entries, error) {
|
|||||||
seen := map[plumbing.Hash]bool{}
|
seen := map[plumbing.Hash]bool{}
|
||||||
walker := object.NewTreeWalker(t.gogitTree, true, seen)
|
walker := object.NewTreeWalker(t.gogitTree, true, seen)
|
||||||
for {
|
for {
|
||||||
_, entry, err := walker.Next()
|
fullName, entry, err := walker.Next()
|
||||||
if err == io.EOF {
|
if err == io.EOF {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -84,6 +84,7 @@ func (t *Tree) ListEntriesRecursiveWithSize() (Entries, error) {
|
|||||||
ID: ParseGogitHash(entry.Hash),
|
ID: ParseGogitHash(entry.Hash),
|
||||||
gogitTreeEntry: &entry,
|
gogitTreeEntry: &entry,
|
||||||
ptree: t,
|
ptree: t,
|
||||||
|
fullName: fullName,
|
||||||
}
|
}
|
||||||
entries = append(entries, convertedEntry)
|
entries = append(entries, convertedEntry)
|
||||||
}
|
}
|
||||||
|
@ -6,14 +6,13 @@ package console
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"io"
|
"io"
|
||||||
"unicode/utf8"
|
"path"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/markup"
|
"code.gitea.io/gitea/modules/markup"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/typesniffer"
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
|
||||||
|
|
||||||
trend "github.com/buildkite/terminal-to-html/v3"
|
trend "github.com/buildkite/terminal-to-html/v3"
|
||||||
|
"github.com/go-enry/go-enry/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -23,8 +22,6 @@ func init() {
|
|||||||
// Renderer implements markup.Renderer
|
// Renderer implements markup.Renderer
|
||||||
type Renderer struct{}
|
type Renderer struct{}
|
||||||
|
|
||||||
var _ markup.RendererContentDetector = (*Renderer)(nil)
|
|
||||||
|
|
||||||
// Name implements markup.Renderer
|
// Name implements markup.Renderer
|
||||||
func (Renderer) Name() string {
|
func (Renderer) Name() string {
|
||||||
return "console"
|
return "console"
|
||||||
@ -43,36 +40,15 @@ func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CanRender implements markup.RendererContentDetector
|
// CanRender implements markup.RendererContentDetector
|
||||||
func (Renderer) CanRender(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) bool {
|
func (Renderer) CanRender(filename string, input io.Reader) bool {
|
||||||
if !sniffedType.IsTextPlain() {
|
buf, err := io.ReadAll(input)
|
||||||
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if enry.GetLanguage(path.Base(filename), buf) != enry.OtherLanguage {
|
||||||
s := util.UnsafeBytesToString(prefetchBuf)
|
|
||||||
rs := []rune(s)
|
|
||||||
cnt := 0
|
|
||||||
firstErrPos := -1
|
|
||||||
isCtrlSep := func(p int) bool {
|
|
||||||
return p < len(rs) && (rs[p] == ';' || rs[p] == 'm')
|
|
||||||
}
|
|
||||||
for i, c := range rs {
|
|
||||||
if c == 0 {
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if c == '\x1b' {
|
return bytes.ContainsRune(buf, '\x1b')
|
||||||
match := i+1 < len(rs) && rs[i+1] == '['
|
|
||||||
if match && (isCtrlSep(i+2) || isCtrlSep(i+3) || isCtrlSep(i+4) || isCtrlSep(i+5)) {
|
|
||||||
cnt++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if c == utf8.RuneError && firstErrPos == -1 {
|
|
||||||
firstErrPos = i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if firstErrPos != -1 && firstErrPos != len(rs)-1 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return cnt >= 2 // only render it as console output if there are at least two escape sequences
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Render renders terminal colors to HTML with all specific handling stuff.
|
// Render renders terminal colors to HTML with all specific handling stuff.
|
||||||
|
@ -8,39 +8,23 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/markup"
|
"code.gitea.io/gitea/modules/markup"
|
||||||
"code.gitea.io/gitea/modules/typesniffer"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestRenderConsole(t *testing.T) {
|
func TestRenderConsole(t *testing.T) {
|
||||||
cases := []struct {
|
|
||||||
input string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"\x1b[37m\x1b[40mnpm\x1b[0m \x1b[0m\x1b[32minfo\x1b[0m \x1b[0m\x1b[35mit worked if it ends with\x1b[0m ok", `<span class="term-fg37 term-bg40">npm</span> <span class="term-fg32">info</span> <span class="term-fg35">it worked if it ends with</span> ok`},
|
|
||||||
{"\x1b[1;2m \x1b[123m 啊", `<span class="term-fg2"> 啊</span>`},
|
|
||||||
{"\x1b[1;2m \x1b[123m \xef", `<span class="term-fg2"> <20></span>`},
|
|
||||||
{"\x1b[1;2m \x1b[123m \xef \xef", ``},
|
|
||||||
{"\x1b[12", ``},
|
|
||||||
{"\x1b[1", ``},
|
|
||||||
{"\x1b[FOO\x1b[", ``},
|
|
||||||
{"\x1b[mFOO\x1b[m", `FOO`},
|
|
||||||
}
|
|
||||||
|
|
||||||
var render Renderer
|
var render Renderer
|
||||||
for i, c := range cases {
|
kases := map[string]string{
|
||||||
var buf strings.Builder
|
"\x1b[37m\x1b[40mnpm\x1b[0m \x1b[0m\x1b[32minfo\x1b[0m \x1b[0m\x1b[35mit worked if it ends with\x1b[0m ok": "<span class=\"term-fg37 term-bg40\">npm</span> <span class=\"term-fg32\">info</span> <span class=\"term-fg35\">it worked if it ends with</span> ok",
|
||||||
st := typesniffer.DetectContentType([]byte(c.input))
|
|
||||||
canRender := render.CanRender("test", st, []byte(c.input))
|
|
||||||
if c.expected == "" {
|
|
||||||
assert.False(t, canRender, "case %d: expected not to render", i)
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for k, v := range kases {
|
||||||
|
var buf strings.Builder
|
||||||
|
canRender := render.CanRender("test", strings.NewReader(k))
|
||||||
assert.True(t, canRender)
|
assert.True(t, canRender)
|
||||||
err := render.Render(markup.NewRenderContext(t.Context()), strings.NewReader(c.input), &buf)
|
|
||||||
|
err := render.Render(markup.NewRenderContext(t.Context()), strings.NewReader(k), &buf)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, c.expected, buf.String())
|
assert.Equal(t, v, buf.String())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,12 +4,12 @@
|
|||||||
package markup
|
package markup
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"io"
|
"io"
|
||||||
"path"
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/typesniffer"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Renderer defines an interface for rendering markup file to HTML
|
// Renderer defines an interface for rendering markup file to HTML
|
||||||
@ -37,7 +37,7 @@ type ExternalRenderer interface {
|
|||||||
// RendererContentDetector detects if the content can be rendered
|
// RendererContentDetector detects if the content can be rendered
|
||||||
// by specified renderer
|
// by specified renderer
|
||||||
type RendererContentDetector interface {
|
type RendererContentDetector interface {
|
||||||
CanRender(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) bool
|
CanRender(filename string, input io.Reader) bool
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -60,9 +60,13 @@ func GetRendererByFileName(filename string) Renderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// DetectRendererType detects the markup type of the content
|
// DetectRendererType detects the markup type of the content
|
||||||
func DetectRendererType(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) string {
|
func DetectRendererType(filename string, input io.Reader) string {
|
||||||
|
buf, err := io.ReadAll(input)
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
for _, renderer := range renderers {
|
for _, renderer := range renderers {
|
||||||
if detector, ok := renderer.(RendererContentDetector); ok && detector.CanRender(filename, sniffedType, prefetchBuf) {
|
if detector, ok := renderer.(RendererContentDetector); ok && detector.CanRender(filename, bytes.NewReader(buf)) {
|
||||||
return renderer.Name()
|
return renderer.Name()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -119,14 +119,12 @@ type ContentsResponse struct {
|
|||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Path string `json:"path"`
|
Path string `json:"path"`
|
||||||
SHA string `json:"sha"`
|
SHA string `json:"sha"`
|
||||||
|
LastCommitSHA string `json:"last_commit_sha"`
|
||||||
LastCommitSHA *string `json:"last_commit_sha,omitempty"`
|
|
||||||
// swagger:strfmt date-time
|
// swagger:strfmt date-time
|
||||||
LastCommitterDate *time.Time `json:"last_committer_date,omitempty"`
|
LastCommitterDate time.Time `json:"last_committer_date"`
|
||||||
// swagger:strfmt date-time
|
// swagger:strfmt date-time
|
||||||
LastAuthorDate *time.Time `json:"last_author_date,omitempty"`
|
LastAuthorDate time.Time `json:"last_author_date"`
|
||||||
LastCommitMessage *string `json:"last_commit_message,omitempty"`
|
LastCommitMessage string `json:"last_commit_message"`
|
||||||
|
|
||||||
// `type` will be `file`, `dir`, `symlink`, or `submodule`
|
// `type` will be `file`, `dir`, `symlink`, or `submodule`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Size int64 `json:"size"`
|
Size int64 `json:"size"`
|
||||||
@ -144,8 +142,8 @@ type ContentsResponse struct {
|
|||||||
SubmoduleGitURL *string `json:"submodule_git_url"`
|
SubmoduleGitURL *string `json:"submodule_git_url"`
|
||||||
Links *FileLinksResponse `json:"_links"`
|
Links *FileLinksResponse `json:"_links"`
|
||||||
|
|
||||||
LfsOid *string `json:"lfs_oid,omitempty"`
|
LfsOid *string `json:"lfs_oid"`
|
||||||
LfsSize *int64 `json:"lfs_size,omitempty"`
|
LfsSize *int64 `json:"lfs_size"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// FileCommitResponse contains information generated from a Git commit for a repo's file.
|
// FileCommitResponse contains information generated from a Git commit for a repo's file.
|
||||||
|
@ -6,14 +6,18 @@ package typesniffer
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
const SniffContentSize = 1024
|
// Use at most this many bytes to determine Content Type.
|
||||||
|
const sniffLen = 1024
|
||||||
|
|
||||||
const (
|
const (
|
||||||
MimeTypeImageSvg = "image/svg+xml"
|
MimeTypeImageSvg = "image/svg+xml"
|
||||||
@ -22,30 +26,22 @@ const (
|
|||||||
MimeTypeApplicationOctetStream = "application/octet-stream"
|
MimeTypeApplicationOctetStream = "application/octet-stream"
|
||||||
)
|
)
|
||||||
|
|
||||||
var globalVars = sync.OnceValue(func() (ret struct {
|
var (
|
||||||
svgComment, svgTagRegex, svgTagInXMLRegex *regexp.Regexp
|
svgComment = regexp.MustCompile(`(?s)<!--.*?-->`)
|
||||||
},
|
svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
|
||||||
) {
|
svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
|
||||||
ret.svgComment = regexp.MustCompile(`(?s)<!--.*?-->`)
|
)
|
||||||
ret.svgTagRegex = regexp.MustCompile(`(?si)\A\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
|
|
||||||
ret.svgTagInXMLRegex = regexp.MustCompile(`(?si)\A<\?xml\b.*?\?>\s*(?:(<!DOCTYPE\s+svg([\s:]+.*?>|>))\s*)*<svg\b`)
|
|
||||||
return ret
|
|
||||||
})
|
|
||||||
|
|
||||||
// SniffedType contains information about a blob's type.
|
// SniffedType contains information about a blobs type.
|
||||||
type SniffedType struct {
|
type SniffedType struct {
|
||||||
contentType string
|
contentType string
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsText detects if the content format is text family, including text/plain, text/html, text/css, etc.
|
// IsText etects if content format is plain text.
|
||||||
func (ct SniffedType) IsText() bool {
|
func (ct SniffedType) IsText() bool {
|
||||||
return strings.Contains(ct.contentType, "text/")
|
return strings.Contains(ct.contentType, "text/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ct SniffedType) IsTextPlain() bool {
|
|
||||||
return strings.Contains(ct.contentType, "text/plain")
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsImage detects if data is an image format
|
// IsImage detects if data is an image format
|
||||||
func (ct SniffedType) IsImage() bool {
|
func (ct SniffedType) IsImage() bool {
|
||||||
return strings.Contains(ct.contentType, "image/")
|
return strings.Contains(ct.contentType, "image/")
|
||||||
@ -61,12 +57,12 @@ func (ct SniffedType) IsPDF() bool {
|
|||||||
return strings.Contains(ct.contentType, "application/pdf")
|
return strings.Contains(ct.contentType, "application/pdf")
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsVideo detects if data is a video format
|
// IsVideo detects if data is an video format
|
||||||
func (ct SniffedType) IsVideo() bool {
|
func (ct SniffedType) IsVideo() bool {
|
||||||
return strings.Contains(ct.contentType, "video/")
|
return strings.Contains(ct.contentType, "video/")
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsAudio detects if data is a video format
|
// IsAudio detects if data is an video format
|
||||||
func (ct SniffedType) IsAudio() bool {
|
func (ct SniffedType) IsAudio() bool {
|
||||||
return strings.Contains(ct.contentType, "audio/")
|
return strings.Contains(ct.contentType, "audio/")
|
||||||
}
|
}
|
||||||
@ -107,34 +103,33 @@ func detectFileTypeBox(data []byte) (brands []string, found bool) {
|
|||||||
return brands, true
|
return brands, true
|
||||||
}
|
}
|
||||||
|
|
||||||
// DetectContentType extends http.DetectContentType with more content types. Defaults to text/plain if input is empty.
|
// DetectContentType extends http.DetectContentType with more content types. Defaults to text/unknown if input is empty.
|
||||||
func DetectContentType(data []byte) SniffedType {
|
func DetectContentType(data []byte) SniffedType {
|
||||||
if len(data) == 0 {
|
if len(data) == 0 {
|
||||||
return SniffedType{"text/plain"}
|
return SniffedType{"text/unknown"}
|
||||||
}
|
}
|
||||||
|
|
||||||
ct := http.DetectContentType(data)
|
ct := http.DetectContentType(data)
|
||||||
|
|
||||||
if len(data) > SniffContentSize {
|
if len(data) > sniffLen {
|
||||||
data = data[:SniffContentSize]
|
data = data[:sniffLen]
|
||||||
}
|
}
|
||||||
|
|
||||||
vars := globalVars()
|
|
||||||
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
|
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
|
||||||
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
|
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
|
||||||
detectByXML := strings.Contains(ct, "text/xml")
|
detectByXML := strings.Contains(ct, "text/xml")
|
||||||
if detectByHTML || detectByXML {
|
if detectByHTML || detectByXML {
|
||||||
dataProcessed := vars.svgComment.ReplaceAll(data, nil)
|
dataProcessed := svgComment.ReplaceAll(data, nil)
|
||||||
dataProcessed = bytes.TrimSpace(dataProcessed)
|
dataProcessed = bytes.TrimSpace(dataProcessed)
|
||||||
if detectByHTML && vars.svgTagRegex.Match(dataProcessed) ||
|
if detectByHTML && svgTagRegex.Match(dataProcessed) ||
|
||||||
detectByXML && vars.svgTagInXMLRegex.Match(dataProcessed) {
|
detectByXML && svgTagInXMLRegex.Match(dataProcessed) {
|
||||||
ct = MimeTypeImageSvg
|
ct = MimeTypeImageSvg
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.HasPrefix(ct, "audio/") && bytes.HasPrefix(data, []byte("ID3")) {
|
if strings.HasPrefix(ct, "audio/") && bytes.HasPrefix(data, []byte("ID3")) {
|
||||||
// The MP3 detection is quite inaccurate, any content with "ID3" prefix will result in "audio/mpeg".
|
// The MP3 detection is quite inaccurate, any content with "ID3" prefix will result in "audio/mpeg".
|
||||||
// So remove the "ID3" prefix and detect again, then if the result is "text", it must be text content.
|
// So remove the "ID3" prefix and detect again, if result is text, then it must be text content.
|
||||||
// This works especially because audio files contain many unprintable/invalid characters like `0x00`
|
// This works especially because audio files contain many unprintable/invalid characters like `0x00`
|
||||||
ct2 := http.DetectContentType(data[3:])
|
ct2 := http.DetectContentType(data[3:])
|
||||||
if strings.HasPrefix(ct2, "text/") {
|
if strings.HasPrefix(ct2, "text/") {
|
||||||
@ -160,3 +155,15 @@ func DetectContentType(data []byte) SniffedType {
|
|||||||
}
|
}
|
||||||
return SniffedType{ct}
|
return SniffedType{ct}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DetectContentTypeFromReader guesses the content type contained in the reader.
|
||||||
|
func DetectContentTypeFromReader(r io.Reader) (SniffedType, error) {
|
||||||
|
buf := make([]byte, sniffLen)
|
||||||
|
n, err := util.ReadAtMost(r, buf)
|
||||||
|
if err != nil {
|
||||||
|
return SniffedType{}, fmt.Errorf("DetectContentTypeFromReader io error: %w", err)
|
||||||
|
}
|
||||||
|
buf = buf[:n]
|
||||||
|
|
||||||
|
return DetectContentType(buf), nil
|
||||||
|
}
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
package typesniffer
|
package typesniffer
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"strings"
|
"strings"
|
||||||
@ -16,7 +17,7 @@ func TestDetectContentTypeLongerThanSniffLen(t *testing.T) {
|
|||||||
// Pre-condition: Shorter than sniffLen detects SVG.
|
// Pre-condition: Shorter than sniffLen detects SVG.
|
||||||
assert.Equal(t, "image/svg+xml", DetectContentType([]byte(`<!-- Comment --><svg></svg>`)).contentType)
|
assert.Equal(t, "image/svg+xml", DetectContentType([]byte(`<!-- Comment --><svg></svg>`)).contentType)
|
||||||
// Longer than sniffLen detects something else.
|
// Longer than sniffLen detects something else.
|
||||||
assert.NotEqual(t, "image/svg+xml", DetectContentType([]byte(`<!-- `+strings.Repeat("x", SniffContentSize)+` --><svg></svg>`)).contentType)
|
assert.NotEqual(t, "image/svg+xml", DetectContentType([]byte(`<!-- `+strings.Repeat("x", sniffLen)+` --><svg></svg>`)).contentType)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIsTextFile(t *testing.T) {
|
func TestIsTextFile(t *testing.T) {
|
||||||
@ -115,13 +116,22 @@ func TestIsAudio(t *testing.T) {
|
|||||||
assert.True(t, DetectContentType([]byte("ID3Toy\n====\t* hi 🌞, ..."+"🌛"[0:2])).IsText()) // test ID3 tag with incomplete UTF8 char
|
assert.True(t, DetectContentType([]byte("ID3Toy\n====\t* hi 🌞, ..."+"🌛"[0:2])).IsText()) // test ID3 tag with incomplete UTF8 char
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDetectContentTypeFromReader(t *testing.T) {
|
||||||
|
mp3, _ := base64.StdEncoding.DecodeString("SUQzBAAAAAABAFRYWFgAAAASAAADbWFqb3JfYnJhbmQAbXA0MgBUWFhYAAAAEQAAA21pbm9yX3Zl")
|
||||||
|
st, err := DetectContentTypeFromReader(bytes.NewReader(mp3))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, st.IsAudio())
|
||||||
|
}
|
||||||
|
|
||||||
func TestDetectContentTypeOgg(t *testing.T) {
|
func TestDetectContentTypeOgg(t *testing.T) {
|
||||||
oggAudio, _ := hex.DecodeString("4f67675300020000000000000000352f0000000000007dc39163011e01766f72626973000000000244ac0000000000000071020000000000b8014f6767530000")
|
oggAudio, _ := hex.DecodeString("4f67675300020000000000000000352f0000000000007dc39163011e01766f72626973000000000244ac0000000000000071020000000000b8014f6767530000")
|
||||||
st := DetectContentType(oggAudio)
|
st, err := DetectContentTypeFromReader(bytes.NewReader(oggAudio))
|
||||||
|
assert.NoError(t, err)
|
||||||
assert.True(t, st.IsAudio())
|
assert.True(t, st.IsAudio())
|
||||||
|
|
||||||
oggVideo, _ := hex.DecodeString("4f676753000200000000000000007d9747ef000000009b59daf3012a807468656f7261030201001e00110001e000010e00020000001e00000001000001000001")
|
oggVideo, _ := hex.DecodeString("4f676753000200000000000000007d9747ef000000009b59daf3012a807468656f7261030201001e00110001e000010e00020000001e00000001000001000001")
|
||||||
st = DetectContentType(oggVideo)
|
st, err = DetectContentTypeFromReader(bytes.NewReader(oggVideo))
|
||||||
|
assert.NoError(t, err)
|
||||||
assert.True(t, st.IsVideo())
|
assert.True(t, st.IsVideo())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,8 +17,8 @@ var (
|
|||||||
ErrNotExist = errors.New("resource does not exist") // also implies HTTP 404
|
ErrNotExist = errors.New("resource does not exist") // also implies HTTP 404
|
||||||
ErrAlreadyExist = errors.New("resource already exists") // also implies HTTP 409
|
ErrAlreadyExist = errors.New("resource already exists") // also implies HTTP 409
|
||||||
|
|
||||||
// ErrUnprocessableContent implies HTTP 422, the syntax of the request content is correct,
|
// ErrUnprocessableContent implies HTTP 422, syntax of the request content was correct,
|
||||||
// but the server is unable to process the contained instructions
|
// but server was unable to process the contained instructions
|
||||||
ErrUnprocessableContent = errors.New("unprocessable content")
|
ErrUnprocessableContent = errors.New("unprocessable content")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -2769,8 +2769,6 @@ branch.new_branch_from = Create new branch from "%s"
|
|||||||
branch.renamed = Branch %s was renamed to %s.
|
branch.renamed = Branch %s was renamed to %s.
|
||||||
branch.rename_default_or_protected_branch_error = Only admins can rename default or protected branches.
|
branch.rename_default_or_protected_branch_error = Only admins can rename default or protected branches.
|
||||||
branch.rename_protected_branch_failed = This branch is protected by glob-based protection rules.
|
branch.rename_protected_branch_failed = This branch is protected by glob-based protection rules.
|
||||||
branch.commits_divergence_from = Commits divergence: %[1]d behind and %[2]d ahead of %[3]s
|
|
||||||
branch.commits_no_divergence = The same as branch %[1]s
|
|
||||||
|
|
||||||
tag.create_tag = Create tag %s
|
tag.create_tag = Create tag %s
|
||||||
tag.create_tag_operation = Create tag
|
tag.create_tag_operation = Create tag
|
||||||
@ -2784,7 +2782,6 @@ topic.done = Done
|
|||||||
topic.count_prompt = You cannot select more than 25 topics
|
topic.count_prompt = You cannot select more than 25 topics
|
||||||
topic.format_prompt = Topics must start with a letter or number, can include dashes ('-') and dots ('.'), can be up to 35 characters long. Letters must be lowercase.
|
topic.format_prompt = Topics must start with a letter or number, can include dashes ('-') and dots ('.'), can be up to 35 characters long. Letters must be lowercase.
|
||||||
|
|
||||||
find_file.follow_symlink= Follow this symlink to where it is pointing at
|
|
||||||
find_file.go_to_file = Go to file
|
find_file.go_to_file = Go to file
|
||||||
find_file.no_matching = No matching file found
|
find_file.no_matching = No matching file found
|
||||||
|
|
||||||
|
@ -1969,7 +1969,6 @@ pulls.cmd_instruction_checkout_title=Basculer
|
|||||||
pulls.cmd_instruction_checkout_desc=Depuis votre dépôt, basculer sur une nouvelle branche et tester des modifications.
|
pulls.cmd_instruction_checkout_desc=Depuis votre dépôt, basculer sur une nouvelle branche et tester des modifications.
|
||||||
pulls.cmd_instruction_merge_title=Fusionner
|
pulls.cmd_instruction_merge_title=Fusionner
|
||||||
pulls.cmd_instruction_merge_desc=Fusionner les modifications et mettre à jour sur Gitea.
|
pulls.cmd_instruction_merge_desc=Fusionner les modifications et mettre à jour sur Gitea.
|
||||||
pulls.cmd_instruction_merge_warning=Attention : cette opération ne peut pas fusionner la demande d’ajout car la « détection automatique de fusion manuelle » n’a pas été activée
|
|
||||||
pulls.clear_merge_message=Effacer le message de fusion
|
pulls.clear_merge_message=Effacer le message de fusion
|
||||||
pulls.clear_merge_message_hint=Effacer le message de fusion ne supprimera que le message de la révision, mais pas les pieds de révision générés tels que "Co-Authored-By:".
|
pulls.clear_merge_message_hint=Effacer le message de fusion ne supprimera que le message de la révision, mais pas les pieds de révision générés tels que "Co-Authored-By:".
|
||||||
|
|
||||||
@ -2769,8 +2768,6 @@ branch.new_branch_from=`Créer une nouvelle branche à partir de "%s"`
|
|||||||
branch.renamed=La branche %s à été renommée en %s.
|
branch.renamed=La branche %s à été renommée en %s.
|
||||||
branch.rename_default_or_protected_branch_error=Seuls les administrateurs peuvent renommer les branches par défaut ou protégées.
|
branch.rename_default_or_protected_branch_error=Seuls les administrateurs peuvent renommer les branches par défaut ou protégées.
|
||||||
branch.rename_protected_branch_failed=Cette branche est protégée par des règles de protection basées sur des globs.
|
branch.rename_protected_branch_failed=Cette branche est protégée par des règles de protection basées sur des globs.
|
||||||
branch.commits_divergence_from=Divergence de révisions : %[1]d en retard et %[2]d en avance sur %[3]s
|
|
||||||
branch.commits_no_divergence=Identique à la branche %[1]s
|
|
||||||
|
|
||||||
tag.create_tag=Créer l'étiquette %s
|
tag.create_tag=Créer l'étiquette %s
|
||||||
tag.create_tag_operation=Créer une étiquette
|
tag.create_tag_operation=Créer une étiquette
|
||||||
|
@ -1969,7 +1969,6 @@ pulls.cmd_instruction_checkout_title=Seiceáil
|
|||||||
pulls.cmd_instruction_checkout_desc=Ó stór tionscadail, seiceáil brainse nua agus déan tástáil ar na hathruithe.
|
pulls.cmd_instruction_checkout_desc=Ó stór tionscadail, seiceáil brainse nua agus déan tástáil ar na hathruithe.
|
||||||
pulls.cmd_instruction_merge_title=Cumaisc
|
pulls.cmd_instruction_merge_title=Cumaisc
|
||||||
pulls.cmd_instruction_merge_desc=Cumaisc na hathruithe agus nuashonrú ar Gitea.
|
pulls.cmd_instruction_merge_desc=Cumaisc na hathruithe agus nuashonrú ar Gitea.
|
||||||
pulls.cmd_instruction_merge_warning=Rabhadh: Ní féidir iarratas tarraingthe cumaisc a dhéanamh leis an oibríocht seo mar nach bhfuil "autodetect manual merge" cumasaithe.
|
|
||||||
pulls.clear_merge_message=Glan an teachtaireacht chumaisc
|
pulls.clear_merge_message=Glan an teachtaireacht chumaisc
|
||||||
pulls.clear_merge_message_hint=Má imrítear an teachtaireacht chumaisc ní bhainfear ach ábhar na teachtaireachta tiomanta agus coimeádfar leantóirí git ginte ar nós "Co-Authored-By …".
|
pulls.clear_merge_message_hint=Má imrítear an teachtaireacht chumaisc ní bhainfear ach ábhar na teachtaireachta tiomanta agus coimeádfar leantóirí git ginte ar nós "Co-Authored-By …".
|
||||||
|
|
||||||
@ -2769,8 +2768,6 @@ branch.new_branch_from=`Cruthaigh brainse nua ó "%s"`
|
|||||||
branch.renamed=Ainmníodh brainse %s go %s.
|
branch.renamed=Ainmníodh brainse %s go %s.
|
||||||
branch.rename_default_or_protected_branch_error=Ní féidir ach le riarthóirí brainsí réamhshocraithe nó cosanta a athainmniú.
|
branch.rename_default_or_protected_branch_error=Ní féidir ach le riarthóirí brainsí réamhshocraithe nó cosanta a athainmniú.
|
||||||
branch.rename_protected_branch_failed=Tá an brainse seo faoi chosaint ag rialacha cosanta domhanda.
|
branch.rename_protected_branch_failed=Tá an brainse seo faoi chosaint ag rialacha cosanta domhanda.
|
||||||
branch.commits_divergence_from=Déanann sé dialltacht a thiomnú: %[1]d taobh thiar agus %[2]d chun tosaigh ar %[3]s
|
|
||||||
branch.commits_no_divergence=Mar an gcéanna le brainse %[1]s
|
|
||||||
|
|
||||||
tag.create_tag=Cruthaigh clib %s
|
tag.create_tag=Cruthaigh clib %s
|
||||||
tag.create_tag_operation=Cruthaigh clib
|
tag.create_tag_operation=Cruthaigh clib
|
||||||
@ -2784,7 +2781,6 @@ topic.done=Déanta
|
|||||||
topic.count_prompt=Ní féidir leat níos mó ná 25 topaicí a roghnú
|
topic.count_prompt=Ní féidir leat níos mó ná 25 topaicí a roghnú
|
||||||
topic.format_prompt=Ní mór do thopaicí tosú le litir nó uimhir, is féidir daiseanna ('-') agus poncanna ('.') a áireamh, a bheith suas le 35 carachtar ar fad. Ní mór litreacha a bheith i litreacha beaga.
|
topic.format_prompt=Ní mór do thopaicí tosú le litir nó uimhir, is féidir daiseanna ('-') agus poncanna ('.') a áireamh, a bheith suas le 35 carachtar ar fad. Ní mór litreacha a bheith i litreacha beaga.
|
||||||
|
|
||||||
find_file.follow_symlink=Lean an nasc siombalach seo go dtí an áit a bhfuil sé ag pointeáil air
|
|
||||||
find_file.go_to_file=Téigh go dtí an comhad
|
find_file.go_to_file=Téigh go dtí an comhad
|
||||||
find_file.no_matching=Níl aon chomhad meaitseála le fáil
|
find_file.no_matching=Níl aon chomhad meaitseála le fáil
|
||||||
|
|
||||||
|
@ -1562,8 +1562,8 @@ issues.filter_project=Planeamento
|
|||||||
issues.filter_project_all=Todos os planeamentos
|
issues.filter_project_all=Todos os planeamentos
|
||||||
issues.filter_project_none=Nenhum planeamento
|
issues.filter_project_none=Nenhum planeamento
|
||||||
issues.filter_assignee=Encarregado
|
issues.filter_assignee=Encarregado
|
||||||
issues.filter_assignee_no_assignee=Não atribuída
|
issues.filter_assignee_no_assignee=Não atribuído
|
||||||
issues.filter_assignee_any_assignee=Atribuída a alguém
|
issues.filter_assignee_any_assignee=Atribuído a qualquer pessoa
|
||||||
issues.filter_poster=Autor(a)
|
issues.filter_poster=Autor(a)
|
||||||
issues.filter_user_placeholder=Procurar utilizadores
|
issues.filter_user_placeholder=Procurar utilizadores
|
||||||
issues.filter_user_no_select=Todos os utilizadores
|
issues.filter_user_no_select=Todos os utilizadores
|
||||||
@ -1969,7 +1969,6 @@ pulls.cmd_instruction_checkout_title=Checkout
|
|||||||
pulls.cmd_instruction_checkout_desc=A partir do seu repositório, crie um novo ramo e teste nele as modificações.
|
pulls.cmd_instruction_checkout_desc=A partir do seu repositório, crie um novo ramo e teste nele as modificações.
|
||||||
pulls.cmd_instruction_merge_title=Integrar
|
pulls.cmd_instruction_merge_title=Integrar
|
||||||
pulls.cmd_instruction_merge_desc=Integrar as modificações e enviar para o Gitea.
|
pulls.cmd_instruction_merge_desc=Integrar as modificações e enviar para o Gitea.
|
||||||
pulls.cmd_instruction_merge_warning=Aviso: Esta operação não pode executar pedidos de integração porque a opção "auto-identificar integração manual" não está habilitada.
|
|
||||||
pulls.clear_merge_message=Apagar mensagem de integração
|
pulls.clear_merge_message=Apagar mensagem de integração
|
||||||
pulls.clear_merge_message_hint=Apagar a mensagem de integração apenas remove o conteúdo da mensagem de cometimento e mantém os rodapés do git, tais como "Co-Autorado-Por …".
|
pulls.clear_merge_message_hint=Apagar a mensagem de integração apenas remove o conteúdo da mensagem de cometimento e mantém os rodapés do git, tais como "Co-Autorado-Por …".
|
||||||
|
|
||||||
@ -2769,8 +2768,6 @@ branch.new_branch_from=`Criar um novo ramo a partir do ramo "%s"`
|
|||||||
branch.renamed=O ramo %s foi renomeado para %s.
|
branch.renamed=O ramo %s foi renomeado para %s.
|
||||||
branch.rename_default_or_protected_branch_error=Só os administradores é que podem renomear o ramo principal ou ramos protegidos.
|
branch.rename_default_or_protected_branch_error=Só os administradores é que podem renomear o ramo principal ou ramos protegidos.
|
||||||
branch.rename_protected_branch_failed=Este ramo está protegido por regras de salvaguarda baseadas em padrões glob.
|
branch.rename_protected_branch_failed=Este ramo está protegido por regras de salvaguarda baseadas em padrões glob.
|
||||||
branch.commits_divergence_from=Divergência nos cometimentos: %[1]d atrás e %[2]d à frente de %[3]s
|
|
||||||
branch.commits_no_divergence=Idêntico ao ramo %[1]s
|
|
||||||
|
|
||||||
tag.create_tag=Criar etiqueta %s
|
tag.create_tag=Criar etiqueta %s
|
||||||
tag.create_tag_operation=Criar etiqueta
|
tag.create_tag_operation=Criar etiqueta
|
||||||
@ -2784,7 +2781,6 @@ topic.done=Concluído
|
|||||||
topic.count_prompt=Não pode escolher mais do que 25 tópicos
|
topic.count_prompt=Não pode escolher mais do que 25 tópicos
|
||||||
topic.format_prompt=Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') ou pontos ('.') e podem ter até 35 caracteres. As letras têm que ser minúsculas.
|
topic.format_prompt=Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') ou pontos ('.') e podem ter até 35 caracteres. As letras têm que ser minúsculas.
|
||||||
|
|
||||||
find_file.follow_symlink=Seguir esta ligação simbólica para onde ela está apontando
|
|
||||||
find_file.go_to_file=Ir para o ficheiro
|
find_file.go_to_file=Ir para o ficheiro
|
||||||
find_file.no_matching=Não foi encontrado qualquer ficheiro correspondente
|
find_file.no_matching=Não foi encontrado qualquer ficheiro correspondente
|
||||||
|
|
||||||
|
@ -420,9 +420,8 @@ remember_me=记住此设备
|
|||||||
remember_me.compromised=登录令牌不再有效,因为它可能表明帐户已被破坏。请检查您的帐户是否有异常活动。
|
remember_me.compromised=登录令牌不再有效,因为它可能表明帐户已被破坏。请检查您的帐户是否有异常活动。
|
||||||
forgot_password_title=忘记密码
|
forgot_password_title=忘记密码
|
||||||
forgot_password=忘记密码?
|
forgot_password=忘记密码?
|
||||||
need_account=需要一个帐户?
|
need_account=需要一个帐户?
|
||||||
sign_up_tip=您正在系统中注册第一个帐户,它拥有管理员权限。请仔细记住您的用户名和密码。 如果您忘记了用户名或密码,请参阅 Gitea 文档以恢复账户。
|
sign_up_now=还没账号?马上注册。
|
||||||
sign_up_now=立即注册。
|
|
||||||
sign_up_successful=帐户创建成功。欢迎!
|
sign_up_successful=帐户创建成功。欢迎!
|
||||||
confirmation_mail_sent_prompt_ex=一封新的确认邮件已经发送到 <b>%s</b>。请在下一个 %s 中检查您的收件箱以完成注册流程。 如果您的注册邮箱地址不正确,您可以重新登录并更改它。
|
confirmation_mail_sent_prompt_ex=一封新的确认邮件已经发送到 <b>%s</b>。请在下一个 %s 中检查您的收件箱以完成注册流程。 如果您的注册邮箱地址不正确,您可以重新登录并更改它。
|
||||||
must_change_password=更新您的密码
|
must_change_password=更新您的密码
|
||||||
@ -486,7 +485,7 @@ sspi_auth_failed=SSPI 认证失败
|
|||||||
password_pwned=此密码出现在 <a target="_blank" rel="noopener noreferrer" href="%s">被盗密码</a> 列表上并且曾经被公开。 请使用另一个密码再试一次。
|
password_pwned=此密码出现在 <a target="_blank" rel="noopener noreferrer" href="%s">被盗密码</a> 列表上并且曾经被公开。 请使用另一个密码再试一次。
|
||||||
password_pwned_err=无法完成对 HaveIBeenPwned 的请求
|
password_pwned_err=无法完成对 HaveIBeenPwned 的请求
|
||||||
last_admin=您不能删除最后一个管理员。必须至少保留一个管理员。
|
last_admin=您不能删除最后一个管理员。必须至少保留一个管理员。
|
||||||
signin_passkey=使用通行密钥登录
|
signin_passkey=使用密钥登录
|
||||||
back_to_sign_in=返回登录页面
|
back_to_sign_in=返回登录页面
|
||||||
|
|
||||||
[mail]
|
[mail]
|
||||||
@ -519,7 +518,7 @@ register_success=注册成功
|
|||||||
issue_assigned.pull=@%[1]s 已将仓库 %[3]s 中的合并请求 %[2]s 指派给您
|
issue_assigned.pull=@%[1]s 已将仓库 %[3]s 中的合并请求 %[2]s 指派给您
|
||||||
issue_assigned.issue=@%[1]s 已将仓库 %[3]s 中的工单 %[2]s 指派给您
|
issue_assigned.issue=@%[1]s 已将仓库 %[3]s 中的工单 %[2]s 指派给您
|
||||||
|
|
||||||
issue.x_mentioned_you=<b>@%s</b> 提及了您:
|
issue.x_mentioned_you=<b>@%s</b> 提到了您:
|
||||||
issue.action.force_push=<b>%[1]s</b> 强制从 %[3]s 推送 <b>%[2]s</b> 至 [4]s。
|
issue.action.force_push=<b>%[1]s</b> 强制从 %[3]s 推送 <b>%[2]s</b> 至 [4]s。
|
||||||
issue.action.push_1=<b>@%[1]s</b> 推送了 %[3]d 个提交到 %[2]s
|
issue.action.push_1=<b>@%[1]s</b> 推送了 %[3]d 个提交到 %[2]s
|
||||||
issue.action.push_n=<b>@%[1]s</b> 推送了 %[3]d 个提交到 %[2]s
|
issue.action.push_n=<b>@%[1]s</b> 推送了 %[3]d 个提交到 %[2]s
|
||||||
@ -839,7 +838,7 @@ ssh_desc=这些 SSH 公钥已经关联到您的账号。相应的私钥拥有完
|
|||||||
principal_desc=这些 SSH 证书规则已关联到您的账号将允许完全访问您所有仓库。
|
principal_desc=这些 SSH 证书规则已关联到您的账号将允许完全访问您所有仓库。
|
||||||
gpg_desc=这些 GPG 公钥已经关联到您的账号。请妥善保管您的私钥因为他们将被用于认证提交。
|
gpg_desc=这些 GPG 公钥已经关联到您的账号。请妥善保管您的私钥因为他们将被用于认证提交。
|
||||||
ssh_helper=<strong>需要帮助?</strong> 请查看有关 <a href="%s">如何生成 SSH 密钥</a> 或 <a href="%s">常见 SSH 问题</a> 寻找答案。
|
ssh_helper=<strong>需要帮助?</strong> 请查看有关 <a href="%s">如何生成 SSH 密钥</a> 或 <a href="%s">常见 SSH 问题</a> 寻找答案。
|
||||||
gpg_helper=<strong>需要帮助?</strong>看一看 GitHub <a href="%s">关于 GPG</a> 的指导。
|
gpg_helper=<strong>需要帮助吗?</strong>看一看 GitHub <a href="%s">关于 GPG</a> 的指导。
|
||||||
add_new_key=增加 SSH 密钥
|
add_new_key=增加 SSH 密钥
|
||||||
add_new_gpg_key=添加的 GPG 密钥
|
add_new_gpg_key=添加的 GPG 密钥
|
||||||
key_content_ssh_placeholder=以 'ssh-ed25519'、 'ssh-rsa'、 'ecdsa-sha2-nistp256'、'ecdsa-sha2-nistp384'、'ecdsa-sha2-nistp521'、 'sk-ecdsa-sha2-nistp256@openssh.com' 或 'sk-ssh-ed25519@openssh.com' 开头
|
key_content_ssh_placeholder=以 'ssh-ed25519'、 'ssh-rsa'、 'ecdsa-sha2-nistp256'、'ecdsa-sha2-nistp384'、'ecdsa-sha2-nistp521'、 'sk-ecdsa-sha2-nistp256@openssh.com' 或 'sk-ssh-ed25519@openssh.com' 开头
|
||||||
@ -1017,10 +1016,10 @@ delete_account_title=删除当前帐户
|
|||||||
delete_account_desc=确实要永久删除此用户帐户吗?
|
delete_account_desc=确实要永久删除此用户帐户吗?
|
||||||
|
|
||||||
email_notifications.enable=启用邮件通知
|
email_notifications.enable=启用邮件通知
|
||||||
email_notifications.onmention=仅被提及时通知
|
email_notifications.onmention=只在被提到时邮件通知
|
||||||
email_notifications.disable=停用邮件通知
|
email_notifications.disable=停用邮件通知
|
||||||
email_notifications.submit=设置邮件通知
|
email_notifications.submit=邮件通知设置
|
||||||
email_notifications.andyourown=仅与您相关的通知
|
email_notifications.andyourown=和您自己的通知
|
||||||
|
|
||||||
visibility=用户可见性
|
visibility=用户可见性
|
||||||
visibility.public=公开
|
visibility.public=公开
|
||||||
@ -1062,7 +1061,6 @@ fork_no_valid_owners=这个代码仓库无法被派生,因为没有有效的
|
|||||||
fork.blocked_user=无法克隆仓库,因为您被仓库所有者屏蔽。
|
fork.blocked_user=无法克隆仓库,因为您被仓库所有者屏蔽。
|
||||||
use_template=使用此模板
|
use_template=使用此模板
|
||||||
open_with_editor=用 %s 打开
|
open_with_editor=用 %s 打开
|
||||||
|
|
||||||
download_zip=下载 ZIP
|
download_zip=下载 ZIP
|
||||||
download_tar=下载 TAR.GZ
|
download_tar=下载 TAR.GZ
|
||||||
download_bundle=下载 BUNDLE
|
download_bundle=下载 BUNDLE
|
||||||
@ -1072,12 +1070,12 @@ repo_desc=描述
|
|||||||
repo_desc_helper=输入简要描述 (可选)
|
repo_desc_helper=输入简要描述 (可选)
|
||||||
repo_no_desc=无详细信息
|
repo_no_desc=无详细信息
|
||||||
repo_lang=语言
|
repo_lang=语言
|
||||||
repo_gitignore_helper=选择 .gitignore 模板
|
repo_gitignore_helper=选择 .gitignore 模板。
|
||||||
repo_gitignore_helper_desc=从常见语言的模板列表中选择忽略跟踪的文件。默认情况下,由开发或构建工具生成的特殊文件都包含在 .gitignore 中。
|
repo_gitignore_helper_desc=从常见语言的模板列表中选择忽略跟踪的文件。默认情况下,由开发或构建工具生成的特殊文件都包含在 .gitignore 中。
|
||||||
issue_labels=工单标签
|
issue_labels=工单标签
|
||||||
issue_labels_helper=选择一个工单标签集
|
issue_labels_helper=选择一个工单标签集
|
||||||
license=授权许可
|
license=授权许可
|
||||||
license_helper=选择授权许可文件
|
license_helper=选择授权许可文件。
|
||||||
license_helper_desc=许可证说明了其他人可以和不可以用您的代码做什么。不确定哪一个适合您的项目?见 <a target="_blank" rel="noopener noreferrer" href="%s">选择一个许可证</a>
|
license_helper_desc=许可证说明了其他人可以和不可以用您的代码做什么。不确定哪一个适合您的项目?见 <a target="_blank" rel="noopener noreferrer" href="%s">选择一个许可证</a>
|
||||||
multiple_licenses=多许可证
|
multiple_licenses=多许可证
|
||||||
object_format=对象格式
|
object_format=对象格式
|
||||||
@ -1230,7 +1228,6 @@ migrate.migrating_issues=迁移工单
|
|||||||
migrate.migrating_pulls=迁移合并请求
|
migrate.migrating_pulls=迁移合并请求
|
||||||
migrate.cancel_migrating_title=取消迁移
|
migrate.cancel_migrating_title=取消迁移
|
||||||
migrate.cancel_migrating_confirm=您想要取消此次迁移吗?
|
migrate.cancel_migrating_confirm=您想要取消此次迁移吗?
|
||||||
migration_status=迁移状态
|
|
||||||
|
|
||||||
mirror_from=镜像自地址
|
mirror_from=镜像自地址
|
||||||
forked_from=派生自
|
forked_from=派生自
|
||||||
@ -1356,7 +1353,6 @@ editor.update=更新 %s
|
|||||||
editor.delete=删除 %s
|
editor.delete=删除 %s
|
||||||
editor.patch=应用补丁
|
editor.patch=应用补丁
|
||||||
editor.patching=打补丁:
|
editor.patching=打补丁:
|
||||||
editor.fail_to_apply_patch=无法应用补丁
|
|
||||||
editor.new_patch=新补丁
|
editor.new_patch=新补丁
|
||||||
editor.commit_message_desc=添加一个可选的扩展描述...
|
editor.commit_message_desc=添加一个可选的扩展描述...
|
||||||
editor.signoff_desc=在提交日志消息末尾添加签署人信息。
|
editor.signoff_desc=在提交日志消息末尾添加签署人信息。
|
||||||
@ -1376,7 +1372,6 @@ editor.branch_already_exists=此仓库已存在名为「%s」的分支。
|
|||||||
editor.directory_is_a_file=目录名「%s」已作为文件名在此仓库中存在。
|
editor.directory_is_a_file=目录名「%s」已作为文件名在此仓库中存在。
|
||||||
editor.file_is_a_symlink=`「%s」是一个符号链接,无法在 Web 编辑器中编辑`
|
editor.file_is_a_symlink=`「%s」是一个符号链接,无法在 Web 编辑器中编辑`
|
||||||
editor.filename_is_a_directory=文件名「%s」已作为目录名在此仓库中存在。
|
editor.filename_is_a_directory=文件名「%s」已作为目录名在此仓库中存在。
|
||||||
editor.file_modifying_no_longer_exists=正在修改的文件「%s」已不存在于此仓库。
|
|
||||||
editor.file_changed_while_editing=文件内容在您进行编辑时已经发生变动。<a target="_blank" rel="noopener noreferrer" href="%s">单击此处</a> 查看变动的具体内容,或者 <strong>再次提交</strong> 覆盖已发生的变动。
|
editor.file_changed_while_editing=文件内容在您进行编辑时已经发生变动。<a target="_blank" rel="noopener noreferrer" href="%s">单击此处</a> 查看变动的具体内容,或者 <strong>再次提交</strong> 覆盖已发生的变动。
|
||||||
editor.file_already_exists=此仓库已经存在名为「%s」的文件。
|
editor.file_already_exists=此仓库已经存在名为「%s」的文件。
|
||||||
editor.commit_id_not_matching=提交 ID 与您开始编辑时的 ID 不匹配。请提交到补丁分支然后合并。
|
editor.commit_id_not_matching=提交 ID 与您开始编辑时的 ID 不匹配。请提交到补丁分支然后合并。
|
||||||
@ -1397,15 +1392,7 @@ editor.user_no_push_to_branch=用户不能推送到分支
|
|||||||
editor.require_signed_commit=分支需要签名提交
|
editor.require_signed_commit=分支需要签名提交
|
||||||
editor.cherry_pick=拣选提交 %s 到:
|
editor.cherry_pick=拣选提交 %s 到:
|
||||||
editor.revert=将 %s 还原到:
|
editor.revert=将 %s 还原到:
|
||||||
editor.failed_to_commit=提交更改失败。
|
|
||||||
editor.failed_to_commit_summary=错误信息:
|
|
||||||
|
|
||||||
editor.fork_create=派生仓库发起请求变更
|
|
||||||
editor.fork_create_description=您不能直接编辑此仓库。您可以从此仓库派生,进行编辑并创建一个拉取请求。
|
|
||||||
editor.fork_edit_description=您不能直接编辑此仓库。 更改将写入您的派生仓库 <b>%s</b>,以便您可以创建一个拉取请求。
|
|
||||||
editor.fork_not_editable=你已经派生了这个仓库,但是你的分叉是不可编辑的。
|
|
||||||
editor.fork_failed_to_push_branch=推送分支 %s 到仓库失败。
|
|
||||||
editor.fork_branch_exists=分支 "%s" 已存在于您的派生仓库中,请选择一个新的分支名称。
|
|
||||||
|
|
||||||
commits.desc=浏览代码修改历史
|
commits.desc=浏览代码修改历史
|
||||||
commits.commits=次代码提交
|
commits.commits=次代码提交
|
||||||
@ -1727,8 +1714,6 @@ issues.remove_time_estimate_at=删除预估时间 %s
|
|||||||
issues.time_estimate_invalid=预计时间格式无效
|
issues.time_estimate_invalid=预计时间格式无效
|
||||||
issues.start_tracking_history=`开始工作 %s`
|
issues.start_tracking_history=`开始工作 %s`
|
||||||
issues.tracker_auto_close=当此工单关闭时,自动停止计时器
|
issues.tracker_auto_close=当此工单关闭时,自动停止计时器
|
||||||
issues.stopwatch_already_stopped=此工单的计时器已经停止
|
|
||||||
issues.stopwatch_already_created=此工单的计时器已经存在
|
|
||||||
issues.tracking_already_started=`您已经开始对 <a href="%s">另一个工单</a> 进行时间跟踪!`
|
issues.tracking_already_started=`您已经开始对 <a href="%s">另一个工单</a> 进行时间跟踪!`
|
||||||
issues.stop_tracking=停止计时器
|
issues.stop_tracking=停止计时器
|
||||||
issues.stop_tracking_history=工作 <b>%[1]s</b> 于 %[2]s 停止
|
issues.stop_tracking_history=工作 <b>%[1]s</b> 于 %[2]s 停止
|
||||||
@ -1970,7 +1955,6 @@ pulls.cmd_instruction_checkout_title=检出
|
|||||||
pulls.cmd_instruction_checkout_desc=从您的仓库中检出一个新的分支并测试变更。
|
pulls.cmd_instruction_checkout_desc=从您的仓库中检出一个新的分支并测试变更。
|
||||||
pulls.cmd_instruction_merge_title=合并
|
pulls.cmd_instruction_merge_title=合并
|
||||||
pulls.cmd_instruction_merge_desc=合并变更并更新到 Gitea 上
|
pulls.cmd_instruction_merge_desc=合并变更并更新到 Gitea 上
|
||||||
pulls.cmd_instruction_merge_warning=警告:此操作不能合并该合并请求,因为「自动检测手动合并」未启用
|
|
||||||
pulls.clear_merge_message=清除合并信息
|
pulls.clear_merge_message=清除合并信息
|
||||||
pulls.clear_merge_message_hint=清除合并消息只会删除提交消息内容,并保留生成的 Git 附加内容,如「Co-Authored-By…」。
|
pulls.clear_merge_message_hint=清除合并消息只会删除提交消息内容,并保留生成的 Git 附加内容,如「Co-Authored-By…」。
|
||||||
|
|
||||||
@ -2166,7 +2150,6 @@ settings.collaboration.write=可写权限
|
|||||||
settings.collaboration.read=可读权限
|
settings.collaboration.read=可读权限
|
||||||
settings.collaboration.owner=所有者
|
settings.collaboration.owner=所有者
|
||||||
settings.collaboration.undefined=未定义
|
settings.collaboration.undefined=未定义
|
||||||
settings.collaboration.per_unit=单元权限
|
|
||||||
settings.hooks=Web 钩子
|
settings.hooks=Web 钩子
|
||||||
settings.githooks=管理 Git 钩子
|
settings.githooks=管理 Git 钩子
|
||||||
settings.basic_settings=基本设置
|
settings.basic_settings=基本设置
|
||||||
@ -2385,7 +2368,6 @@ settings.event_repository=仓库
|
|||||||
settings.event_repository_desc=创建或删除仓库
|
settings.event_repository_desc=创建或删除仓库
|
||||||
settings.event_header_issue=工单事件
|
settings.event_header_issue=工单事件
|
||||||
settings.event_issues=工单
|
settings.event_issues=工单
|
||||||
settings.event_issues_desc=工单已打开、已关闭、已重新打开或已编辑。
|
|
||||||
settings.event_issue_assign=工单已指派
|
settings.event_issue_assign=工单已指派
|
||||||
settings.event_issue_assign_desc=工单已指派或取消指派。
|
settings.event_issue_assign_desc=工单已指派或取消指派。
|
||||||
settings.event_issue_label=工单增删标签
|
settings.event_issue_label=工单增删标签
|
||||||
@ -2396,7 +2378,6 @@ settings.event_issue_comment=工单评论
|
|||||||
settings.event_issue_comment_desc=工单评论已创建、编辑或删除。
|
settings.event_issue_comment_desc=工单评论已创建、编辑或删除。
|
||||||
settings.event_header_pull_request=合并请求事件
|
settings.event_header_pull_request=合并请求事件
|
||||||
settings.event_pull_request=合并请求
|
settings.event_pull_request=合并请求
|
||||||
settings.event_pull_request_desc=合并请求已打开、关闭、重新打开或编辑。
|
|
||||||
settings.event_pull_request_assign=合并请求已指派
|
settings.event_pull_request_assign=合并请求已指派
|
||||||
settings.event_pull_request_assign_desc=合并请求已指派或取消指派。
|
settings.event_pull_request_assign_desc=合并请求已指派或取消指派。
|
||||||
settings.event_pull_request_label=合并请求增删标签
|
settings.event_pull_request_label=合并请求增删标签
|
||||||
@ -2414,8 +2395,6 @@ settings.event_pull_request_review_request_desc=合并请求评审已请求或
|
|||||||
settings.event_pull_request_approvals=合并请求批准
|
settings.event_pull_request_approvals=合并请求批准
|
||||||
settings.event_pull_request_merge=合并请求合并
|
settings.event_pull_request_merge=合并请求合并
|
||||||
settings.event_header_workflow=工作流程事件
|
settings.event_header_workflow=工作流程事件
|
||||||
settings.event_workflow_run=工作流运行
|
|
||||||
settings.event_workflow_run_desc=Gitea 工作流队列中、等待中、正在进行或已完成的任务。
|
|
||||||
settings.event_workflow_job=工作流任务
|
settings.event_workflow_job=工作流任务
|
||||||
settings.event_workflow_job_desc=Gitea 工作流队列中、等待中、正在进行或已完成的任务。
|
settings.event_workflow_job_desc=Gitea 工作流队列中、等待中、正在进行或已完成的任务。
|
||||||
settings.event_package=软件包
|
settings.event_package=软件包
|
||||||
@ -2794,7 +2773,7 @@ error.broken_git_hook=此仓库的 Git 钩子似乎已损坏。 请按照 <a tar
|
|||||||
[graphs]
|
[graphs]
|
||||||
component_loading=正在加载 %s...
|
component_loading=正在加载 %s...
|
||||||
component_loading_failed=无法加载 %s
|
component_loading_failed=无法加载 %s
|
||||||
component_loading_info=这可能需要一点时间…
|
component_loading_info=这可能需要一点…
|
||||||
component_failed_to_load=意外的错误发生了。
|
component_failed_to_load=意外的错误发生了。
|
||||||
code_frequency.what=代码频率
|
code_frequency.what=代码频率
|
||||||
contributors.what=贡献
|
contributors.what=贡献
|
||||||
@ -2823,7 +2802,6 @@ team_permission_desc=权限
|
|||||||
team_unit_desc=允许访问仓库单元
|
team_unit_desc=允许访问仓库单元
|
||||||
team_unit_disabled=(已禁用)
|
team_unit_disabled=(已禁用)
|
||||||
|
|
||||||
form.name_been_taken=组织名称「%s」已经被占用。
|
|
||||||
form.name_reserved=组织名称「%s」是保留的。
|
form.name_reserved=组织名称「%s」是保留的。
|
||||||
form.name_pattern_not_allowed=组织名中不允许使用「%s」格式。
|
form.name_pattern_not_allowed=组织名中不允许使用「%s」格式。
|
||||||
form.create_org_not_allowed=此账号禁止创建组织
|
form.create_org_not_allowed=此账号禁止创建组织
|
||||||
@ -2846,27 +2824,12 @@ settings.visibility.private_shortname=私有
|
|||||||
settings.update_settings=更新组织设置
|
settings.update_settings=更新组织设置
|
||||||
settings.update_setting_success=组织设置已更新。
|
settings.update_setting_success=组织设置已更新。
|
||||||
|
|
||||||
settings.rename=修改组织名称
|
|
||||||
settings.rename_desc=更改组织名称同时会更改组织的 URL 地址并释放旧的名称。
|
|
||||||
settings.rename_success=组织 %[1]s 已成功重命名为 %[2]s。
|
|
||||||
settings.rename_no_change=组织名称没有变化。
|
|
||||||
settings.rename_new_org_name=新组织名称
|
|
||||||
settings.rename_failed=由于内部错误,重命名组织失败
|
|
||||||
settings.rename_notices_1=此操作 <strong>无法</strong> 被回滚。
|
|
||||||
settings.rename_notices_2=在被人使用前,旧名称将会被重定向。
|
|
||||||
|
|
||||||
settings.update_avatar_success=组织头像已经更新。
|
settings.update_avatar_success=组织头像已经更新。
|
||||||
settings.delete=删除组织
|
settings.delete=删除组织
|
||||||
settings.delete_account=删除当前组织
|
settings.delete_account=删除当前组织
|
||||||
settings.delete_prompt=删除操作会永久清除该组织的信息,并且 <strong>无法</strong> 恢复!
|
settings.delete_prompt=删除操作会永久清除该组织的信息,并且 <strong>不可恢复</strong>!
|
||||||
settings.name_confirm=输入组织名称以确认:
|
|
||||||
settings.delete_notices_1=此操作 <strong>无法</strong> 被回滚。
|
|
||||||
settings.delete_notices_2=此操作将永久删除 <strong>%s</strong> 的所有<strong>仓库</strong>,包括 Git 数据、 工单、评论、百科和协作者的操作权限。
|
|
||||||
settings.delete_notices_3=此操作将永久删除 <strong>%s</strong> 的所有 <strong>软件包</strong>。
|
|
||||||
settings.delete_notices_4=此操作将永久删除 <strong>%s</strong> 的所有 <strong>项目</strong>。
|
|
||||||
settings.confirm_delete_account=确认删除组织
|
settings.confirm_delete_account=确认删除组织
|
||||||
settings.delete_failed=由于内部错误,删除组织失败
|
|
||||||
settings.delete_successful=组织 <b>%s</b> 已成功删除。
|
|
||||||
settings.hooks_desc=在此处添加的 Web 钩子将会应用到该组织下的 <strong>所有仓库</strong>。
|
settings.hooks_desc=在此处添加的 Web 钩子将会应用到该组织下的 <strong>所有仓库</strong>。
|
||||||
|
|
||||||
settings.labels_desc=添加能够被该组织下的 <strong>所有仓库</strong> 的工单使用的标签。
|
settings.labels_desc=添加能够被该组织下的 <strong>所有仓库</strong> 的工单使用的标签。
|
||||||
@ -3757,8 +3720,8 @@ none=还没有密钥。
|
|||||||
; These keys are also for "edit secret", the keys are kept as-is to avoid unnecessary re-translation
|
; These keys are also for "edit secret", the keys are kept as-is to avoid unnecessary re-translation
|
||||||
creation.description=组织描述
|
creation.description=组织描述
|
||||||
creation.name_placeholder=不区分大小写,仅限字母数字或下划线且不能以 GITEA_ 或 GITHUB_ 开头
|
creation.name_placeholder=不区分大小写,仅限字母数字或下划线且不能以 GITEA_ 或 GITHUB_ 开头
|
||||||
creation.value_placeholder=输入任何内容,开头和结尾的空白将会被忽略
|
creation.value_placeholder=输入任何内容,开头和结尾的空白将会被忽略。
|
||||||
creation.description_placeholder=输入简短描述(可选)
|
creation.description_placeholder=输入简短描述(可选)。
|
||||||
|
|
||||||
save_success=密钥「%s」保存成功。
|
save_success=密钥「%s」保存成功。
|
||||||
save_failed=密钥保存失败。
|
save_failed=密钥保存失败。
|
||||||
@ -3843,7 +3806,6 @@ runs.no_runs=工作流尚未运行过。
|
|||||||
runs.empty_commit_message=(空白的提交消息)
|
runs.empty_commit_message=(空白的提交消息)
|
||||||
runs.expire_log_message=旧的日志已清除。
|
runs.expire_log_message=旧的日志已清除。
|
||||||
runs.delete=删除工作流运行
|
runs.delete=删除工作流运行
|
||||||
runs.cancel=取消工作流运行
|
|
||||||
runs.delete.description=您确定要永久删除此工作流运行吗?此操作无法撤消。
|
runs.delete.description=您确定要永久删除此工作流运行吗?此操作无法撤消。
|
||||||
runs.not_done=此工作流运行尚未完成。
|
runs.not_done=此工作流运行尚未完成。
|
||||||
runs.view_workflow_file=查看工作流文件
|
runs.view_workflow_file=查看工作流文件
|
||||||
|
64
package-lock.json
generated
64
package-lock.json
generated
@ -28,6 +28,7 @@
|
|||||||
"dropzone": "6.0.0-beta.2",
|
"dropzone": "6.0.0-beta.2",
|
||||||
"easymde": "2.20.0",
|
"easymde": "2.20.0",
|
||||||
"esbuild-loader": "4.3.0",
|
"esbuild-loader": "4.3.0",
|
||||||
|
"escape-goat": "4.0.0",
|
||||||
"fast-glob": "3.3.3",
|
"fast-glob": "3.3.3",
|
||||||
"htmx.org": "2.0.6",
|
"htmx.org": "2.0.6",
|
||||||
"idiomorph": "0.7.3",
|
"idiomorph": "0.7.3",
|
||||||
@ -39,7 +40,6 @@
|
|||||||
"minimatch": "10.0.2",
|
"minimatch": "10.0.2",
|
||||||
"monaco-editor": "0.52.2",
|
"monaco-editor": "0.52.2",
|
||||||
"monaco-editor-webpack-plugin": "7.1.0",
|
"monaco-editor-webpack-plugin": "7.1.0",
|
||||||
"online-3d-viewer": "0.16.0",
|
|
||||||
"pdfobject": "2.3.1",
|
"pdfobject": "2.3.1",
|
||||||
"perfect-debounce": "1.0.0",
|
"perfect-debounce": "1.0.0",
|
||||||
"postcss": "8.5.5",
|
"postcss": "8.5.5",
|
||||||
@ -2026,16 +2026,6 @@
|
|||||||
"vue": "^3.2.29"
|
"vue": "^3.2.29"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@simonwep/pickr": {
|
|
||||||
"version": "1.9.0",
|
|
||||||
"resolved": "https://registry.npmmirror.com/@simonwep/pickr/-/pickr-1.9.0.tgz",
|
|
||||||
"integrity": "sha512-oEYvv15PyfZzjoAzvXYt3UyNGwzsrpFxLaZKzkOSd0WYBVwLd19iJerePDONxC1iF6+DpcswPdLIM2KzCJuYFg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"core-js": "3.32.2",
|
|
||||||
"nanopop": "2.3.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@stoplight/better-ajv-errors": {
|
"node_modules/@stoplight/better-ajv-errors": {
|
||||||
"version": "1.0.3",
|
"version": "1.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@stoplight/better-ajv-errors/-/better-ajv-errors-1.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@stoplight/better-ajv-errors/-/better-ajv-errors-1.0.3.tgz",
|
||||||
@ -5347,17 +5337,6 @@
|
|||||||
"integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==",
|
"integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/core-js": {
|
|
||||||
"version": "3.32.2",
|
|
||||||
"resolved": "https://registry.npmmirror.com/core-js/-/core-js-3.32.2.tgz",
|
|
||||||
"integrity": "sha512-pxXSw1mYZPDGvTQqEc5vgIb83jGQKFGYWY76z4a7weZXUolw3G+OvpZqSRcfYOoOVUQJYEPsWeQK8pKEnUtWxQ==",
|
|
||||||
"hasInstallScript": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"funding": {
|
|
||||||
"type": "opencollective",
|
|
||||||
"url": "https://opencollective.com/core-js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/core-js-compat": {
|
"node_modules/core-js-compat": {
|
||||||
"version": "3.43.0",
|
"version": "3.43.0",
|
||||||
"resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.43.0.tgz",
|
"resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.43.0.tgz",
|
||||||
@ -6562,6 +6541,18 @@
|
|||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/escape-goat": {
|
||||||
|
"version": "4.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-4.0.0.tgz",
|
||||||
|
"integrity": "sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/escape-string-regexp": {
|
"node_modules/escape-string-regexp": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
|
||||||
@ -7730,12 +7721,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/fflate": {
|
|
||||||
"version": "0.8.2",
|
|
||||||
"resolved": "https://registry.npmmirror.com/fflate/-/fflate-0.8.2.tgz",
|
|
||||||
"integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/file-entry-cache": {
|
"node_modules/file-entry-cache": {
|
||||||
"version": "6.0.1",
|
"version": "6.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
|
||||||
@ -10300,12 +10285,6 @@
|
|||||||
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/nanopop": {
|
|
||||||
"version": "2.3.0",
|
|
||||||
"resolved": "https://registry.npmmirror.com/nanopop/-/nanopop-2.3.0.tgz",
|
|
||||||
"integrity": "sha512-fzN+T2K7/Ah25XU02MJkPZ5q4Tj5FpjmIYq4rvoHX4yb16HzFdCO6JxFFn5Y/oBhQ8no8fUZavnyIv9/+xkBBw==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/napi-postinstall": {
|
"node_modules/napi-postinstall": {
|
||||||
"version": "0.2.4",
|
"version": "0.2.4",
|
||||||
"resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.2.4.tgz",
|
"resolved": "https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.2.4.tgz",
|
||||||
@ -10546,17 +10525,6 @@
|
|||||||
"wrappy": "1"
|
"wrappy": "1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/online-3d-viewer": {
|
|
||||||
"version": "0.16.0",
|
|
||||||
"resolved": "https://registry.npmmirror.com/online-3d-viewer/-/online-3d-viewer-0.16.0.tgz",
|
|
||||||
"integrity": "sha512-Mcmo41TM3K+svlMDRH8ySKSY2e8s7Sssdb5U9LV3gkFKVWGGuS304Vk5gqxopAJbE72DpsC67Ve3YNtcAuROwQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@simonwep/pickr": "1.9.0",
|
|
||||||
"fflate": "0.8.2",
|
|
||||||
"three": "0.176.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/optionator": {
|
"node_modules/optionator": {
|
||||||
"version": "0.9.4",
|
"version": "0.9.4",
|
||||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
|
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
|
||||||
@ -13225,12 +13193,6 @@
|
|||||||
"node": ">=0.8"
|
"node": ">=0.8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/three": {
|
|
||||||
"version": "0.176.0",
|
|
||||||
"resolved": "https://registry.npmmirror.com/three/-/three-0.176.0.tgz",
|
|
||||||
"integrity": "sha512-PWRKYWQo23ojf9oZSlRGH8K09q7nRSWx6LY/HF/UUrMdYgN9i1e2OwJYHoQjwc6HF/4lvvYLC5YC1X8UJL2ZpA==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/throttle-debounce": {
|
"node_modules/throttle-debounce": {
|
||||||
"version": "5.0.2",
|
"version": "5.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-5.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-5.0.2.tgz",
|
||||||
|
@ -27,6 +27,7 @@
|
|||||||
"dropzone": "6.0.0-beta.2",
|
"dropzone": "6.0.0-beta.2",
|
||||||
"easymde": "2.20.0",
|
"easymde": "2.20.0",
|
||||||
"esbuild-loader": "4.3.0",
|
"esbuild-loader": "4.3.0",
|
||||||
|
"escape-goat": "4.0.0",
|
||||||
"fast-glob": "3.3.3",
|
"fast-glob": "3.3.3",
|
||||||
"htmx.org": "2.0.6",
|
"htmx.org": "2.0.6",
|
||||||
"idiomorph": "0.7.3",
|
"idiomorph": "0.7.3",
|
||||||
@ -38,7 +39,6 @@
|
|||||||
"minimatch": "10.0.2",
|
"minimatch": "10.0.2",
|
||||||
"monaco-editor": "0.52.2",
|
"monaco-editor": "0.52.2",
|
||||||
"monaco-editor-webpack-plugin": "7.1.0",
|
"monaco-editor-webpack-plugin": "7.1.0",
|
||||||
"online-3d-viewer": "0.16.0",
|
|
||||||
"pdfobject": "2.3.1",
|
"pdfobject": "2.3.1",
|
||||||
"perfect-debounce": "1.0.0",
|
"perfect-debounce": "1.0.0",
|
||||||
"postcss": "8.5.5",
|
"postcss": "8.5.5",
|
||||||
|
@ -467,9 +467,7 @@ func CommonRoutes() *web.Router {
|
|||||||
g.MatchPath("HEAD", "/<group:*>/repodata/<filename>", rpm.CheckRepositoryFileExistence)
|
g.MatchPath("HEAD", "/<group:*>/repodata/<filename>", rpm.CheckRepositoryFileExistence)
|
||||||
g.MatchPath("GET", "/<group:*>/repodata/<filename>", rpm.GetRepositoryFile)
|
g.MatchPath("GET", "/<group:*>/repodata/<filename>", rpm.GetRepositoryFile)
|
||||||
g.MatchPath("PUT", "/<group:*>/upload", reqPackageAccess(perm.AccessModeWrite), rpm.UploadPackageFile)
|
g.MatchPath("PUT", "/<group:*>/upload", reqPackageAccess(perm.AccessModeWrite), rpm.UploadPackageFile)
|
||||||
// this URL pattern is only used internally in the RPM index, it is generated by us, the filename part is not really used (can be anything)
|
|
||||||
g.MatchPath("HEAD,GET", "/<group:*>/package/<name>/<version>/<architecture>", rpm.DownloadPackageFile)
|
g.MatchPath("HEAD,GET", "/<group:*>/package/<name>/<version>/<architecture>", rpm.DownloadPackageFile)
|
||||||
g.MatchPath("HEAD,GET", "/<group:*>/package/<name>/<version>/<architecture>/<filename>", rpm.DownloadPackageFile)
|
|
||||||
g.MatchPath("DELETE", "/<group:*>/package/<name>/<version>/<architecture>", reqPackageAccess(perm.AccessModeWrite), rpm.DeletePackageFile)
|
g.MatchPath("DELETE", "/<group:*>/package/<name>/<version>/<architecture>", reqPackageAccess(perm.AccessModeWrite), rpm.DeletePackageFile)
|
||||||
}, reqPackageAccess(perm.AccessModeRead))
|
}, reqPackageAccess(perm.AccessModeRead))
|
||||||
|
|
||||||
|
@ -812,8 +812,7 @@ func GetContentsExt(ctx *context.APIContext) {
|
|||||||
// required: true
|
// required: true
|
||||||
// - name: filepath
|
// - name: filepath
|
||||||
// in: path
|
// in: path
|
||||||
// description: path of the dir, file, symlink or submodule in the repo. Swagger requires path parameter to be "required",
|
// description: path of the dir, file, symlink or submodule in the repo
|
||||||
// you can leave it empty or pass a single dot (".") to get the root directory.
|
|
||||||
// type: string
|
// type: string
|
||||||
// required: true
|
// required: true
|
||||||
// - name: ref
|
// - name: ref
|
||||||
@ -824,8 +823,7 @@ func GetContentsExt(ctx *context.APIContext) {
|
|||||||
// - name: includes
|
// - name: includes
|
||||||
// in: query
|
// in: query
|
||||||
// description: By default this API's response only contains file's metadata. Use comma-separated "includes" options to retrieve more fields.
|
// description: By default this API's response only contains file's metadata. Use comma-separated "includes" options to retrieve more fields.
|
||||||
// Option "file_content" will try to retrieve the file content, "lfs_metadata" will try to retrieve LFS metadata,
|
// Option "file_content" will try to retrieve the file content, option "lfs_metadata" will try to retrieve LFS metadata.
|
||||||
// "commit_metadata" will try to retrieve commit metadata, and "commit_message" will try to retrieve commit message.
|
|
||||||
// type: string
|
// type: string
|
||||||
// required: false
|
// required: false
|
||||||
// responses:
|
// responses:
|
||||||
@ -834,9 +832,6 @@ func GetContentsExt(ctx *context.APIContext) {
|
|||||||
// "404":
|
// "404":
|
||||||
// "$ref": "#/responses/notFound"
|
// "$ref": "#/responses/notFound"
|
||||||
|
|
||||||
if treePath := ctx.PathParam("*"); treePath == "." || treePath == "/" {
|
|
||||||
ctx.SetPathParam("*", "") // workaround for swagger, it requires path parameter to be "required", but we need to list root directory
|
|
||||||
}
|
|
||||||
opts := files_service.GetContentsOrListOptions{TreePath: ctx.PathParam("*")}
|
opts := files_service.GetContentsOrListOptions{TreePath: ctx.PathParam("*")}
|
||||||
for includeOpt := range strings.SplitSeq(ctx.FormString("includes"), ",") {
|
for includeOpt := range strings.SplitSeq(ctx.FormString("includes"), ",") {
|
||||||
if includeOpt == "" {
|
if includeOpt == "" {
|
||||||
@ -847,10 +842,6 @@ func GetContentsExt(ctx *context.APIContext) {
|
|||||||
opts.IncludeSingleFileContent = true
|
opts.IncludeSingleFileContent = true
|
||||||
case "lfs_metadata":
|
case "lfs_metadata":
|
||||||
opts.IncludeLfsMetadata = true
|
opts.IncludeLfsMetadata = true
|
||||||
case "commit_metadata":
|
|
||||||
opts.IncludeCommitMetadata = true
|
|
||||||
case "commit_message":
|
|
||||||
opts.IncludeCommitMessage = true
|
|
||||||
default:
|
default:
|
||||||
ctx.APIError(http.StatusBadRequest, fmt.Sprintf("unknown include option %q", includeOpt))
|
ctx.APIError(http.StatusBadRequest, fmt.Sprintf("unknown include option %q", includeOpt))
|
||||||
return
|
return
|
||||||
@ -892,11 +883,7 @@ func GetContents(ctx *context.APIContext) {
|
|||||||
// "$ref": "#/responses/ContentsResponse"
|
// "$ref": "#/responses/ContentsResponse"
|
||||||
// "404":
|
// "404":
|
||||||
// "$ref": "#/responses/notFound"
|
// "$ref": "#/responses/notFound"
|
||||||
ret := getRepoContents(ctx, files_service.GetContentsOrListOptions{
|
ret := getRepoContents(ctx, files_service.GetContentsOrListOptions{TreePath: ctx.PathParam("*"), IncludeSingleFileContent: true})
|
||||||
TreePath: ctx.PathParam("*"),
|
|
||||||
IncludeSingleFileContent: true,
|
|
||||||
IncludeCommitMetadata: true,
|
|
||||||
})
|
|
||||||
if ctx.Written() {
|
if ctx.Written() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -244,7 +244,7 @@ func editFileOpenExisting(ctx *context.Context) (prefetch []byte, dataRc io.Read
|
|||||||
return nil, nil, nil
|
return nil, nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if fInfo.isLFSFile() {
|
if fInfo.isLFSFile {
|
||||||
lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
|
lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
_ = dataRc.Close()
|
_ = dataRc.Close()
|
||||||
@ -298,7 +298,7 @@ func EditFile(ctx *context.Context) {
|
|||||||
ctx.Data["FileSize"] = fInfo.fileSize
|
ctx.Data["FileSize"] = fInfo.fileSize
|
||||||
|
|
||||||
// Only some file types are editable online as text.
|
// Only some file types are editable online as text.
|
||||||
if fInfo.isLFSFile() {
|
if fInfo.isLFSFile {
|
||||||
ctx.Data["NotEditableReason"] = ctx.Tr("repo.editor.cannot_edit_lfs_files")
|
ctx.Data["NotEditableReason"] = ctx.Tr("repo.editor.cannot_edit_lfs_files")
|
||||||
} else if !fInfo.st.IsRepresentableAsText() {
|
} else if !fInfo.st.IsRepresentableAsText() {
|
||||||
ctx.Data["NotEditableReason"] = ctx.Tr("repo.editor.cannot_edit_non_text_files")
|
ctx.Data["NotEditableReason"] = ctx.Tr("repo.editor.cannot_edit_non_text_files")
|
||||||
|
@ -443,10 +443,6 @@ func ViewPullMergeBox(ctx *context.Context) {
|
|||||||
preparePullViewPullInfo(ctx, issue)
|
preparePullViewPullInfo(ctx, issue)
|
||||||
preparePullViewReviewAndMerge(ctx, issue)
|
preparePullViewReviewAndMerge(ctx, issue)
|
||||||
ctx.Data["PullMergeBoxReloading"] = issue.PullRequest.IsChecking()
|
ctx.Data["PullMergeBoxReloading"] = issue.PullRequest.IsChecking()
|
||||||
|
|
||||||
// TODO: it should use a dedicated struct to render the pull merge box, to make sure all data is prepared correctly
|
|
||||||
ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
|
|
||||||
ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
|
|
||||||
ctx.HTML(http.StatusOK, tplPullMergeBox)
|
ctx.HTML(http.StatusOK, tplPullMergeBox)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,10 +267,8 @@ func LFSFileGet(ctx *context.Context) {
|
|||||||
buf = buf[:n]
|
buf = buf[:n]
|
||||||
|
|
||||||
st := typesniffer.DetectContentType(buf)
|
st := typesniffer.DetectContentType(buf)
|
||||||
// FIXME: there is no IsPlainText set, but template uses it
|
|
||||||
ctx.Data["IsTextFile"] = st.IsText()
|
ctx.Data["IsTextFile"] = st.IsText()
|
||||||
ctx.Data["FileSize"] = meta.Size
|
ctx.Data["FileSize"] = meta.Size
|
||||||
// FIXME: the last field is the URL-base64-encoded filename, it should not be "direct"
|
|
||||||
ctx.Data["RawFileLink"] = fmt.Sprintf("%s%s/%s.git/info/lfs/objects/%s/%s", setting.AppURL, url.PathEscape(ctx.Repo.Repository.OwnerName), url.PathEscape(ctx.Repo.Repository.Name), url.PathEscape(meta.Oid), "direct")
|
ctx.Data["RawFileLink"] = fmt.Sprintf("%s%s/%s.git/info/lfs/objects/%s/%s", setting.AppURL, url.PathEscape(ctx.Repo.Repository.OwnerName), url.PathEscape(ctx.Repo.Repository.Name), url.PathEscape(meta.Oid), "direct")
|
||||||
switch {
|
switch {
|
||||||
case st.IsRepresentableAsText():
|
case st.IsRepresentableAsText():
|
||||||
@ -311,6 +309,8 @@ func LFSFileGet(ctx *context.Context) {
|
|||||||
}
|
}
|
||||||
ctx.Data["LineNums"] = gotemplate.HTML(output.String())
|
ctx.Data["LineNums"] = gotemplate.HTML(output.String())
|
||||||
|
|
||||||
|
case st.IsPDF():
|
||||||
|
ctx.Data["IsPDFFile"] = true
|
||||||
case st.IsVideo():
|
case st.IsVideo():
|
||||||
ctx.Data["IsVideoFile"] = true
|
ctx.Data["IsVideoFile"] = true
|
||||||
case st.IsAudio():
|
case st.IsAudio():
|
||||||
|
@ -6,7 +6,6 @@ package repo
|
|||||||
import (
|
import (
|
||||||
"html/template"
|
"html/template"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
pull_model "code.gitea.io/gitea/models/pull"
|
pull_model "code.gitea.io/gitea/models/pull"
|
||||||
@ -112,7 +111,7 @@ func transformDiffTreeForWeb(renderedIconPool *fileicon.RenderedIconPool, diffTr
|
|||||||
item := &WebDiffFileItem{FullName: file.HeadPath, DiffStatus: file.Status}
|
item := &WebDiffFileItem{FullName: file.HeadPath, DiffStatus: file.Status}
|
||||||
item.IsViewed = filesViewedState[item.FullName] == pull_model.Viewed
|
item.IsViewed = filesViewedState[item.FullName] == pull_model.Viewed
|
||||||
item.NameHash = git.HashFilePathForWebUI(item.FullName)
|
item.NameHash = git.HashFilePathForWebUI(item.FullName)
|
||||||
item.FileIcon = fileicon.RenderEntryIconHTML(renderedIconPool, &fileicon.EntryInfo{BaseName: path.Base(file.HeadPath), EntryMode: file.HeadMode})
|
item.FileIcon = fileicon.RenderEntryIconHTML(renderedIconPool, &fileicon.EntryInfo{FullName: file.HeadPath, EntryMode: file.HeadMode})
|
||||||
|
|
||||||
switch file.HeadMode {
|
switch file.HeadMode {
|
||||||
case git.EntryModeTree:
|
case git.EntryModeTree:
|
||||||
|
@ -12,7 +12,6 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -60,63 +59,60 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type fileInfo struct {
|
type fileInfo struct {
|
||||||
|
isTextFile bool
|
||||||
|
isLFSFile bool
|
||||||
fileSize int64
|
fileSize int64
|
||||||
lfsMeta *lfs.Pointer
|
lfsMeta *lfs.Pointer
|
||||||
st typesniffer.SniffedType
|
st typesniffer.SniffedType
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fi *fileInfo) isLFSFile() bool {
|
func getFileReader(ctx gocontext.Context, repoID int64, blob *git.Blob) ([]byte, io.ReadCloser, *fileInfo, error) {
|
||||||
return fi.lfsMeta != nil && fi.lfsMeta.Oid != ""
|
dataRc, err := blob.DataAsync()
|
||||||
}
|
|
||||||
|
|
||||||
func getFileReader(ctx gocontext.Context, repoID int64, blob *git.Blob) (buf []byte, dataRc io.ReadCloser, fi *fileInfo, err error) {
|
|
||||||
dataRc, err = blob.DataAsync()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const prefetchSize = lfs.MetaFileMaxSize
|
buf := make([]byte, 1024)
|
||||||
|
|
||||||
buf = make([]byte, prefetchSize)
|
|
||||||
n, _ := util.ReadAtMost(dataRc, buf)
|
n, _ := util.ReadAtMost(dataRc, buf)
|
||||||
buf = buf[:n]
|
buf = buf[:n]
|
||||||
|
|
||||||
fi = &fileInfo{fileSize: blob.Size(), st: typesniffer.DetectContentType(buf)}
|
st := typesniffer.DetectContentType(buf)
|
||||||
|
isTextFile := st.IsText()
|
||||||
|
|
||||||
// FIXME: what happens when README file is an image?
|
// FIXME: what happens when README file is an image?
|
||||||
if !fi.st.IsText() || !setting.LFS.StartServer {
|
if !isTextFile || !setting.LFS.StartServer {
|
||||||
return buf, dataRc, fi, nil
|
return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
pointer, _ := lfs.ReadPointerFromBuffer(buf)
|
pointer, _ := lfs.ReadPointerFromBuffer(buf)
|
||||||
if !pointer.IsValid() { // fallback to a plain file
|
if !pointer.IsValid() { // fallback to plain file
|
||||||
return buf, dataRc, fi, nil
|
return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
meta, err := git_model.GetLFSMetaObjectByOid(ctx, repoID, pointer.Oid)
|
meta, err := git_model.GetLFSMetaObjectByOid(ctx, repoID, pointer.Oid)
|
||||||
if err != nil { // fallback to a plain file
|
if err != nil { // fallback to plain file
|
||||||
log.Warn("Unable to access LFS pointer %s in repo %d: %v", pointer.Oid, repoID, err)
|
log.Warn("Unable to access LFS pointer %s in repo %d: %v", pointer.Oid, repoID, err)
|
||||||
return buf, dataRc, fi, nil
|
return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// close the old dataRc and open the real LFS target
|
dataRc.Close()
|
||||||
_ = dataRc.Close()
|
|
||||||
dataRc, err = lfs.ReadMetaObject(pointer)
|
dataRc, err = lfs.ReadMetaObject(pointer)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
buf = make([]byte, prefetchSize)
|
buf = make([]byte, 1024)
|
||||||
n, err = util.ReadAtMost(dataRc, buf)
|
n, err = util.ReadAtMost(dataRc, buf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
_ = dataRc.Close()
|
dataRc.Close()
|
||||||
return nil, nil, fi, err
|
return nil, nil, nil, err
|
||||||
}
|
}
|
||||||
buf = buf[:n]
|
buf = buf[:n]
|
||||||
fi.st = typesniffer.DetectContentType(buf)
|
|
||||||
fi.fileSize = blob.Size()
|
st = typesniffer.DetectContentType(buf)
|
||||||
fi.lfsMeta = &meta.Pointer
|
|
||||||
return buf, dataRc, fi, nil
|
return buf, dataRc, &fileInfo{st.IsText(), true, meta.Size, &meta.Pointer, st}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool {
|
func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool {
|
||||||
@ -261,9 +257,7 @@ func prepareDirectoryFileIcons(ctx *context.Context, files []git.CommitInfo) {
|
|||||||
renderedIconPool := fileicon.NewRenderedIconPool()
|
renderedIconPool := fileicon.NewRenderedIconPool()
|
||||||
fileIcons := map[string]template.HTML{}
|
fileIcons := map[string]template.HTML{}
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
fullPath := path.Join(ctx.Repo.TreePath, f.Entry.Name())
|
fileIcons[f.Entry.Name()] = fileicon.RenderEntryIconHTML(renderedIconPool, fileicon.EntryInfoFromGitTreeEntry(f.Entry))
|
||||||
entryInfo := fileicon.EntryInfoFromGitTreeEntry(ctx.Repo.Commit, fullPath, f.Entry)
|
|
||||||
fileIcons[f.Entry.Name()] = fileicon.RenderEntryIconHTML(renderedIconPool, entryInfo)
|
|
||||||
}
|
}
|
||||||
fileIcons[".."] = fileicon.RenderEntryIconHTML(renderedIconPool, fileicon.EntryInfoFolder())
|
fileIcons[".."] = fileicon.RenderEntryIconHTML(renderedIconPool, fileicon.EntryInfoFolder())
|
||||||
ctx.Data["FileIcons"] = fileIcons
|
ctx.Data["FileIcons"] = fileIcons
|
||||||
|
@ -23,7 +23,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/markup"
|
"code.gitea.io/gitea/modules/markup"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/typesniffer"
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
"code.gitea.io/gitea/services/context"
|
"code.gitea.io/gitea/services/context"
|
||||||
issue_service "code.gitea.io/gitea/services/issue"
|
issue_service "code.gitea.io/gitea/services/issue"
|
||||||
@ -41,128 +40,7 @@ func prepareLatestCommitInfo(ctx *context.Context) bool {
|
|||||||
return loadLatestCommitData(ctx, commit)
|
return loadLatestCommitData(ctx, commit)
|
||||||
}
|
}
|
||||||
|
|
||||||
func prepareFileViewLfsAttrs(ctx *context.Context) (*attribute.Attributes, bool) {
|
func prepareToRenderFile(ctx *context.Context, entry *git.TreeEntry) {
|
||||||
attrsMap, err := attribute.CheckAttributes(ctx, ctx.Repo.GitRepo, ctx.Repo.CommitID, attribute.CheckAttributeOpts{
|
|
||||||
Filenames: []string{ctx.Repo.TreePath},
|
|
||||||
Attributes: []string{attribute.LinguistGenerated, attribute.LinguistVendored, attribute.LinguistLanguage, attribute.GitlabLanguage},
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
ctx.ServerError("attribute.CheckAttributes", err)
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
attrs := attrsMap[ctx.Repo.TreePath]
|
|
||||||
if attrs == nil {
|
|
||||||
// this case shouldn't happen, just in case.
|
|
||||||
setting.PanicInDevOrTesting("no attributes found for %s", ctx.Repo.TreePath)
|
|
||||||
attrs = attribute.NewAttributes()
|
|
||||||
}
|
|
||||||
ctx.Data["IsVendored"], ctx.Data["IsGenerated"] = attrs.GetVendored().Value(), attrs.GetGenerated().Value()
|
|
||||||
return attrs, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleFileViewRenderMarkup(ctx *context.Context, filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte, utf8Reader io.Reader) bool {
|
|
||||||
markupType := markup.DetectMarkupTypeByFileName(filename)
|
|
||||||
if markupType == "" {
|
|
||||||
markupType = markup.DetectRendererType(filename, sniffedType, prefetchBuf)
|
|
||||||
}
|
|
||||||
if markupType == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.Data["HasSourceRenderedToggle"] = true
|
|
||||||
|
|
||||||
if ctx.FormString("display") == "source" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.Data["MarkupType"] = markupType
|
|
||||||
metas := ctx.Repo.Repository.ComposeRepoFileMetas(ctx)
|
|
||||||
metas["RefTypeNameSubURL"] = ctx.Repo.RefTypeNameSubURL()
|
|
||||||
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
|
|
||||||
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
|
|
||||||
CurrentTreePath: path.Dir(ctx.Repo.TreePath),
|
|
||||||
}).
|
|
||||||
WithMarkupType(markupType).
|
|
||||||
WithRelativePath(ctx.Repo.TreePath).
|
|
||||||
WithMetas(metas)
|
|
||||||
|
|
||||||
var err error
|
|
||||||
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, utf8Reader)
|
|
||||||
if err != nil {
|
|
||||||
ctx.ServerError("Render", err)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
// to prevent iframe from loading third-party url
|
|
||||||
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleFileViewRenderSource(ctx *context.Context, filename string, attrs *attribute.Attributes, fInfo *fileInfo, utf8Reader io.Reader) bool {
|
|
||||||
if ctx.FormString("display") == "rendered" || !fInfo.st.IsRepresentableAsText() {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if !fInfo.st.IsText() {
|
|
||||||
if ctx.FormString("display") == "" {
|
|
||||||
// not text but representable as text, e.g. SVG
|
|
||||||
// since there is no "display" is specified, let other renders to handle
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
ctx.Data["HasSourceRenderedToggle"] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
buf, _ := io.ReadAll(utf8Reader)
|
|
||||||
// The Open Group Base Specification: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html
|
|
||||||
// empty: 0 lines; "a": 1 incomplete-line; "a\n": 1 line; "a\nb": 1 line, 1 incomplete-line;
|
|
||||||
// Gitea uses the definition (like most modern editors):
|
|
||||||
// empty: 0 lines; "a": 1 line; "a\n": 2 lines; "a\nb": 2 lines;
|
|
||||||
// When rendering, the last empty line is not rendered in UI, while the line-number is still counted, to tell users that the file contains a trailing EOL.
|
|
||||||
// To make the UI more consistent, it could use an icon mark to indicate that there is no trailing EOL, and show line-number as the rendered lines.
|
|
||||||
// This NumLines is only used for the display on the UI: "xxx lines"
|
|
||||||
if len(buf) == 0 {
|
|
||||||
ctx.Data["NumLines"] = 0
|
|
||||||
} else {
|
|
||||||
ctx.Data["NumLines"] = bytes.Count(buf, []byte{'\n'}) + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
language := attrs.GetLanguage().Value()
|
|
||||||
fileContent, lexerName, err := highlight.File(filename, language, buf)
|
|
||||||
ctx.Data["LexerName"] = lexerName
|
|
||||||
if err != nil {
|
|
||||||
log.Error("highlight.File failed, fallback to plain text: %v", err)
|
|
||||||
fileContent = highlight.PlainText(buf)
|
|
||||||
}
|
|
||||||
status := &charset.EscapeStatus{}
|
|
||||||
statuses := make([]*charset.EscapeStatus, len(fileContent))
|
|
||||||
for i, line := range fileContent {
|
|
||||||
statuses[i], fileContent[i] = charset.EscapeControlHTML(line, ctx.Locale)
|
|
||||||
status = status.Or(statuses[i])
|
|
||||||
}
|
|
||||||
ctx.Data["EscapeStatus"] = status
|
|
||||||
ctx.Data["FileContent"] = fileContent
|
|
||||||
ctx.Data["LineEscapeStatus"] = statuses
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleFileViewRenderImage(ctx *context.Context, fInfo *fileInfo, prefetchBuf []byte) bool {
|
|
||||||
if !fInfo.st.IsImage() {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if fInfo.st.IsSvgImage() && !setting.UI.SVG.Enabled {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if fInfo.st.IsSvgImage() {
|
|
||||||
ctx.Data["HasSourceRenderedToggle"] = true
|
|
||||||
} else {
|
|
||||||
img, _, err := image.DecodeConfig(bytes.NewReader(prefetchBuf))
|
|
||||||
if err == nil { // ignore the error for the formats that are not supported by image.DecodeConfig
|
|
||||||
ctx.Data["ImageSize"] = fmt.Sprintf("%dx%dpx", img.Width, img.Height)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func prepareFileView(ctx *context.Context, entry *git.TreeEntry) {
|
|
||||||
ctx.Data["IsViewFile"] = true
|
ctx.Data["IsViewFile"] = true
|
||||||
ctx.Data["HideRepoInfo"] = true
|
ctx.Data["HideRepoInfo"] = true
|
||||||
|
|
||||||
@ -208,8 +86,11 @@ func prepareFileView(ctx *context.Context, entry *git.TreeEntry) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
isDisplayingSource := ctx.FormString("display") == "source"
|
||||||
|
isDisplayingRendered := !isDisplayingSource
|
||||||
|
|
||||||
// Don't call any other repository functions depends on git.Repository until the dataRc closed to
|
// Don't call any other repository functions depends on git.Repository until the dataRc closed to
|
||||||
// avoid creating an unnecessary temporary cat file.
|
// avoid create unnecessary temporary cat file.
|
||||||
buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, blob)
|
buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, blob)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("getFileReader", err)
|
ctx.ServerError("getFileReader", err)
|
||||||
@ -217,62 +98,207 @@ func prepareFileView(ctx *context.Context, entry *git.TreeEntry) {
|
|||||||
}
|
}
|
||||||
defer dataRc.Close()
|
defer dataRc.Close()
|
||||||
|
|
||||||
if fInfo.isLFSFile() {
|
if fInfo.isLFSFile {
|
||||||
ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/media/" + ctx.Repo.RefTypeNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
|
ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/media/" + ctx.Repo.RefTypeNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !prepareFileViewEditorButtons(ctx) {
|
isRepresentableAsText := fInfo.st.IsRepresentableAsText()
|
||||||
return
|
if !isRepresentableAsText {
|
||||||
|
// If we can't show plain text, always try to render.
|
||||||
|
isDisplayingSource = false
|
||||||
|
isDisplayingRendered = true
|
||||||
}
|
}
|
||||||
|
ctx.Data["IsLFSFile"] = fInfo.isLFSFile
|
||||||
ctx.Data["IsLFSFile"] = fInfo.isLFSFile()
|
|
||||||
ctx.Data["FileSize"] = fInfo.fileSize
|
ctx.Data["FileSize"] = fInfo.fileSize
|
||||||
ctx.Data["IsRepresentableAsText"] = fInfo.st.IsRepresentableAsText()
|
ctx.Data["IsTextFile"] = fInfo.isTextFile
|
||||||
|
ctx.Data["IsRepresentableAsText"] = isRepresentableAsText
|
||||||
|
ctx.Data["IsDisplayingSource"] = isDisplayingSource
|
||||||
|
ctx.Data["IsDisplayingRendered"] = isDisplayingRendered
|
||||||
ctx.Data["IsExecutable"] = entry.IsExecutable()
|
ctx.Data["IsExecutable"] = entry.IsExecutable()
|
||||||
ctx.Data["CanCopyContent"] = fInfo.st.IsRepresentableAsText() || fInfo.st.IsImage()
|
|
||||||
|
|
||||||
attrs, ok := prepareFileViewLfsAttrs(ctx)
|
isTextSource := fInfo.isTextFile || isDisplayingSource
|
||||||
if !ok {
|
ctx.Data["IsTextSource"] = isTextSource
|
||||||
return
|
if isTextSource {
|
||||||
|
ctx.Data["CanCopyContent"] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: in the future maybe we need more accurate flags, for example:
|
// Check LFS Lock
|
||||||
// * IsRepresentableAsText: some files are text, some are not
|
lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
|
||||||
// * IsRenderableXxx: some files are rendered by backend "markup" engine, some are rendered by frontend (pdf, 3d)
|
ctx.Data["LFSLock"] = lfsLock
|
||||||
// * DefaultViewMode: when there is no "display" query parameter, which view mode should be used by default, source or rendered
|
if err != nil {
|
||||||
|
ctx.ServerError("GetTreePathLock", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if lfsLock != nil {
|
||||||
|
u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID)
|
||||||
|
if err != nil {
|
||||||
|
ctx.ServerError("GetTreePathLock", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ctx.Data["LFSLockOwner"] = u.Name
|
||||||
|
ctx.Data["LFSLockOwnerHomeLink"] = u.HomeLink()
|
||||||
|
ctx.Data["LFSLockHint"] = ctx.Tr("repo.editor.this_file_locked")
|
||||||
|
}
|
||||||
|
|
||||||
|
// read all needed attributes which will be used later
|
||||||
|
// there should be no performance different between reading 2 or 4 here
|
||||||
|
attrsMap, err := attribute.CheckAttributes(ctx, ctx.Repo.GitRepo, ctx.Repo.CommitID, attribute.CheckAttributeOpts{
|
||||||
|
Filenames: []string{ctx.Repo.TreePath},
|
||||||
|
Attributes: []string{attribute.LinguistGenerated, attribute.LinguistVendored, attribute.LinguistLanguage, attribute.GitlabLanguage},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ctx.ServerError("attribute.CheckAttributes", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
attrs := attrsMap[ctx.Repo.TreePath]
|
||||||
|
if attrs == nil {
|
||||||
|
// this case shouldn't happen, just in case.
|
||||||
|
setting.PanicInDevOrTesting("no attributes found for %s", ctx.Repo.TreePath)
|
||||||
|
attrs = attribute.NewAttributes()
|
||||||
|
}
|
||||||
|
|
||||||
utf8Reader := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
|
|
||||||
switch {
|
switch {
|
||||||
case fInfo.fileSize >= setting.UI.MaxDisplayFileSize:
|
case isRepresentableAsText:
|
||||||
|
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
|
||||||
ctx.Data["IsFileTooLarge"] = true
|
ctx.Data["IsFileTooLarge"] = true
|
||||||
case handleFileViewRenderMarkup(ctx, entry.Name(), fInfo.st, buf, utf8Reader):
|
break
|
||||||
// it also sets ctx.Data["FileContent"] and more
|
}
|
||||||
ctx.Data["IsMarkup"] = true
|
|
||||||
case handleFileViewRenderSource(ctx, entry.Name(), attrs, fInfo, utf8Reader):
|
if fInfo.st.IsSvgImage() {
|
||||||
// it also sets ctx.Data["FileContent"] and more
|
|
||||||
ctx.Data["IsDisplayingSource"] = true
|
|
||||||
case handleFileViewRenderImage(ctx, fInfo, buf):
|
|
||||||
ctx.Data["IsImageFile"] = true
|
ctx.Data["IsImageFile"] = true
|
||||||
|
ctx.Data["CanCopyContent"] = true
|
||||||
|
ctx.Data["HasSourceRenderedToggle"] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
|
||||||
|
|
||||||
|
shouldRenderSource := ctx.FormString("display") == "source"
|
||||||
|
readmeExist := util.IsReadmeFileName(blob.Name())
|
||||||
|
ctx.Data["ReadmeExist"] = readmeExist
|
||||||
|
|
||||||
|
markupType := markup.DetectMarkupTypeByFileName(blob.Name())
|
||||||
|
if markupType == "" {
|
||||||
|
markupType = markup.DetectRendererType(blob.Name(), bytes.NewReader(buf))
|
||||||
|
}
|
||||||
|
if markupType != "" {
|
||||||
|
ctx.Data["HasSourceRenderedToggle"] = true
|
||||||
|
}
|
||||||
|
if markupType != "" && !shouldRenderSource {
|
||||||
|
ctx.Data["IsMarkup"] = true
|
||||||
|
ctx.Data["MarkupType"] = markupType
|
||||||
|
metas := ctx.Repo.Repository.ComposeRepoFileMetas(ctx)
|
||||||
|
metas["RefTypeNameSubURL"] = ctx.Repo.RefTypeNameSubURL()
|
||||||
|
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
|
||||||
|
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
|
||||||
|
CurrentTreePath: path.Dir(ctx.Repo.TreePath),
|
||||||
|
}).
|
||||||
|
WithMarkupType(markupType).
|
||||||
|
WithRelativePath(ctx.Repo.TreePath).
|
||||||
|
WithMetas(metas)
|
||||||
|
|
||||||
|
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, rd)
|
||||||
|
if err != nil {
|
||||||
|
ctx.ServerError("Render", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// to prevent iframe load third-party url
|
||||||
|
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
|
||||||
|
} else {
|
||||||
|
buf, _ := io.ReadAll(rd)
|
||||||
|
|
||||||
|
// The Open Group Base Specification: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html
|
||||||
|
// empty: 0 lines; "a": 1 incomplete-line; "a\n": 1 line; "a\nb": 1 line, 1 incomplete-line;
|
||||||
|
// Gitea uses the definition (like most modern editors):
|
||||||
|
// empty: 0 lines; "a": 1 line; "a\n": 2 lines; "a\nb": 2 lines;
|
||||||
|
// When rendering, the last empty line is not rendered in UI, while the line-number is still counted, to tell users that the file contains a trailing EOL.
|
||||||
|
// To make the UI more consistent, it could use an icon mark to indicate that there is no trailing EOL, and show line-number as the rendered lines.
|
||||||
|
// This NumLines is only used for the display on the UI: "xxx lines"
|
||||||
|
if len(buf) == 0 {
|
||||||
|
ctx.Data["NumLines"] = 0
|
||||||
|
} else {
|
||||||
|
ctx.Data["NumLines"] = bytes.Count(buf, []byte{'\n'}) + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
language := attrs.GetLanguage().Value()
|
||||||
|
fileContent, lexerName, err := highlight.File(blob.Name(), language, buf)
|
||||||
|
ctx.Data["LexerName"] = lexerName
|
||||||
|
if err != nil {
|
||||||
|
log.Error("highlight.File failed, fallback to plain text: %v", err)
|
||||||
|
fileContent = highlight.PlainText(buf)
|
||||||
|
}
|
||||||
|
status := &charset.EscapeStatus{}
|
||||||
|
statuses := make([]*charset.EscapeStatus, len(fileContent))
|
||||||
|
for i, line := range fileContent {
|
||||||
|
statuses[i], fileContent[i] = charset.EscapeControlHTML(line, ctx.Locale)
|
||||||
|
status = status.Or(statuses[i])
|
||||||
|
}
|
||||||
|
ctx.Data["EscapeStatus"] = status
|
||||||
|
ctx.Data["FileContent"] = fileContent
|
||||||
|
ctx.Data["LineEscapeStatus"] = statuses
|
||||||
|
}
|
||||||
|
|
||||||
|
case fInfo.st.IsPDF():
|
||||||
|
ctx.Data["IsPDFFile"] = true
|
||||||
case fInfo.st.IsVideo():
|
case fInfo.st.IsVideo():
|
||||||
ctx.Data["IsVideoFile"] = true
|
ctx.Data["IsVideoFile"] = true
|
||||||
case fInfo.st.IsAudio():
|
case fInfo.st.IsAudio():
|
||||||
ctx.Data["IsAudioFile"] = true
|
ctx.Data["IsAudioFile"] = true
|
||||||
|
case fInfo.st.IsImage() && (setting.UI.SVG.Enabled || !fInfo.st.IsSvgImage()):
|
||||||
|
ctx.Data["IsImageFile"] = true
|
||||||
|
ctx.Data["CanCopyContent"] = true
|
||||||
default:
|
default:
|
||||||
// unable to render anything, show the "view raw" or let frontend handle it
|
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
|
||||||
|
ctx.Data["IsFileTooLarge"] = true
|
||||||
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: this logic duplicates with "isRepresentableAsText=true", it is not the same as "LFSFileGet" in "lfs.go"
|
||||||
|
// It is used by "external renders", markupRender will execute external programs to get rendered content.
|
||||||
|
if markupType := markup.DetectMarkupTypeByFileName(blob.Name()); markupType != "" {
|
||||||
|
rd := io.MultiReader(bytes.NewReader(buf), dataRc)
|
||||||
|
ctx.Data["IsMarkup"] = true
|
||||||
|
ctx.Data["MarkupType"] = markupType
|
||||||
|
|
||||||
|
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
|
||||||
|
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
|
||||||
|
CurrentTreePath: path.Dir(ctx.Repo.TreePath),
|
||||||
|
}).
|
||||||
|
WithMarkupType(markupType).
|
||||||
|
WithRelativePath(ctx.Repo.TreePath)
|
||||||
|
|
||||||
|
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, rd)
|
||||||
|
if err != nil {
|
||||||
|
ctx.ServerError("Render", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.Data["IsVendored"], ctx.Data["IsGenerated"] = attrs.GetVendored().Value(), attrs.GetGenerated().Value()
|
||||||
|
|
||||||
|
if fInfo.st.IsImage() && !fInfo.st.IsSvgImage() {
|
||||||
|
img, _, err := image.DecodeConfig(bytes.NewReader(buf))
|
||||||
|
if err == nil {
|
||||||
|
// There are Image formats go can't decode
|
||||||
|
// Instead of throwing an error in that case, we show the size only when we can decode
|
||||||
|
ctx.Data["ImageSize"] = fmt.Sprintf("%dx%dpx", img.Width, img.Height)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
prepareToRenderButtons(ctx, lfsLock)
|
||||||
}
|
}
|
||||||
|
|
||||||
func prepareFileViewEditorButtons(ctx *context.Context) bool {
|
func prepareToRenderButtons(ctx *context.Context, lfsLock *git_model.LFSLock) {
|
||||||
// archived or mirror repository, the buttons should not be shown
|
// archived or mirror repository, the buttons should not be shown
|
||||||
if !ctx.Repo.Repository.CanEnableEditor() {
|
if !ctx.Repo.Repository.CanEnableEditor() {
|
||||||
return true
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// The buttons should not be shown if it's not a branch
|
// The buttons should not be shown if it's not a branch
|
||||||
if !ctx.Repo.RefFullName.IsBranch() {
|
if !ctx.Repo.RefFullName.IsBranch() {
|
||||||
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
|
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
|
||||||
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
|
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
|
||||||
return true
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
|
if !ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
|
||||||
@ -280,24 +306,7 @@ func prepareFileViewEditorButtons(ctx *context.Context) bool {
|
|||||||
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.fork_before_edit")
|
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.fork_before_edit")
|
||||||
ctx.Data["CanDeleteFile"] = true
|
ctx.Data["CanDeleteFile"] = true
|
||||||
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_have_write_access")
|
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_have_write_access")
|
||||||
return true
|
return
|
||||||
}
|
|
||||||
|
|
||||||
lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
|
|
||||||
ctx.Data["LFSLock"] = lfsLock
|
|
||||||
if err != nil {
|
|
||||||
ctx.ServerError("GetTreePathLock", err)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if lfsLock != nil {
|
|
||||||
u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID)
|
|
||||||
if err != nil {
|
|
||||||
ctx.ServerError("GetTreePathLock", err)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
ctx.Data["LFSLockOwner"] = u.Name
|
|
||||||
ctx.Data["LFSLockOwnerHomeLink"] = u.HomeLink()
|
|
||||||
ctx.Data["LFSLockHint"] = ctx.Tr("repo.editor.this_file_locked")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// it's a lfs file and the user is not the owner of the lock
|
// it's a lfs file and the user is not the owner of the lock
|
||||||
@ -306,5 +315,4 @@ func prepareFileViewEditorButtons(ctx *context.Context) bool {
|
|||||||
ctx.Data["EditFileTooltip"] = util.Iif(isLFSLocked, ctx.Tr("repo.editor.this_file_locked"), ctx.Tr("repo.editor.edit_this_file"))
|
ctx.Data["EditFileTooltip"] = util.Iif(isLFSLocked, ctx.Tr("repo.editor.this_file_locked"), ctx.Tr("repo.editor.edit_this_file"))
|
||||||
ctx.Data["CanDeleteFile"] = !isLFSLocked
|
ctx.Data["CanDeleteFile"] = !isLFSLocked
|
||||||
ctx.Data["DeleteFileTooltip"] = util.Iif(isLFSLocked, ctx.Tr("repo.editor.this_file_locked"), ctx.Tr("repo.editor.delete_this_file"))
|
ctx.Data["DeleteFileTooltip"] = util.Iif(isLFSLocked, ctx.Tr("repo.editor.this_file_locked"), ctx.Tr("repo.editor.delete_this_file"))
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
|
@ -143,7 +143,7 @@ func prepareToRenderDirectory(ctx *context.Context) {
|
|||||||
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefFullName.ShortName())
|
ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefFullName.ShortName())
|
||||||
}
|
}
|
||||||
|
|
||||||
subfolder, readmeFile, err := findReadmeFileInEntries(ctx, ctx.Repo.TreePath, entries, true)
|
subfolder, readmeFile, err := findReadmeFileInEntries(ctx, entries, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("findReadmeFileInEntries", err)
|
ctx.ServerError("findReadmeFileInEntries", err)
|
||||||
return
|
return
|
||||||
@ -339,7 +339,7 @@ func prepareToRenderDirOrFile(entry *git.TreeEntry) func(ctx *context.Context) {
|
|||||||
if entry.IsDir() {
|
if entry.IsDir() {
|
||||||
prepareToRenderDirectory(ctx)
|
prepareToRenderDirectory(ctx)
|
||||||
} else {
|
} else {
|
||||||
prepareFileView(ctx, entry)
|
prepareToRenderFile(ctx, entry)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -377,8 +377,8 @@ func prepareHomeTreeSideBarSwitch(ctx *context.Context) {
|
|||||||
|
|
||||||
func redirectSrcToRaw(ctx *context.Context) bool {
|
func redirectSrcToRaw(ctx *context.Context) bool {
|
||||||
// GitHub redirects a tree path with "?raw=1" to the raw path
|
// GitHub redirects a tree path with "?raw=1" to the raw path
|
||||||
// It is useful to embed some raw contents into Markdown files,
|
// It is useful to embed some raw contents into markdown files,
|
||||||
// then viewing the Markdown in "src" path could embed the raw content correctly.
|
// then viewing the markdown in "src" path could embed the raw content correctly.
|
||||||
if ctx.Repo.TreePath != "" && ctx.FormBool("raw") {
|
if ctx.Repo.TreePath != "" && ctx.FormBool("raw") {
|
||||||
ctx.Redirect(ctx.Repo.RepoLink + "/raw/" + ctx.Repo.RefTypeNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath))
|
ctx.Redirect(ctx.Repo.RepoLink + "/raw/" + ctx.Repo.RefTypeNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath))
|
||||||
return true
|
return true
|
||||||
@ -386,20 +386,6 @@ func redirectSrcToRaw(ctx *context.Context) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func redirectFollowSymlink(ctx *context.Context, treePathEntry *git.TreeEntry) bool {
|
|
||||||
if ctx.Repo.TreePath == "" || !ctx.FormBool("follow_symlink") {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if treePathEntry.IsLink() {
|
|
||||||
if res, err := git.EntryFollowLinks(ctx.Repo.Commit, ctx.Repo.TreePath, treePathEntry); err == nil {
|
|
||||||
redirect := ctx.Repo.RepoLink + "/src/" + ctx.Repo.RefTypeNameSubURL() + "/" + util.PathEscapeSegments(res.TargetFullPath) + "?" + ctx.Req.URL.RawQuery
|
|
||||||
ctx.Redirect(redirect)
|
|
||||||
return true
|
|
||||||
} // else: don't handle the links we cannot resolve, so ignore the error
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Home render repository home page
|
// Home render repository home page
|
||||||
func Home(ctx *context.Context) {
|
func Home(ctx *context.Context) {
|
||||||
if handleRepoHomeFeed(ctx) {
|
if handleRepoHomeFeed(ctx) {
|
||||||
@ -408,7 +394,6 @@ func Home(ctx *context.Context) {
|
|||||||
if redirectSrcToRaw(ctx) {
|
if redirectSrcToRaw(ctx) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check whether the repo is viewable: not in migration, and the code unit should be enabled
|
// Check whether the repo is viewable: not in migration, and the code unit should be enabled
|
||||||
// Ideally the "feed" logic should be after this, but old code did so, so keep it as-is.
|
// Ideally the "feed" logic should be after this, but old code did so, so keep it as-is.
|
||||||
checkHomeCodeViewable(ctx)
|
checkHomeCodeViewable(ctx)
|
||||||
@ -439,10 +424,6 @@ func Home(ctx *context.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if redirectFollowSymlink(ctx, entry) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// prepare the tree path
|
// prepare the tree path
|
||||||
var treeNames, paths []string
|
var treeNames, paths []string
|
||||||
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.RefTypeNameSubURL()
|
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.RefTypeNameSubURL()
|
||||||
|
@ -32,7 +32,15 @@ import (
|
|||||||
// entries == ctx.Repo.Commit.SubTree(ctx.Repo.TreePath).ListEntries()
|
// entries == ctx.Repo.Commit.SubTree(ctx.Repo.TreePath).ListEntries()
|
||||||
//
|
//
|
||||||
// FIXME: There has to be a more efficient way of doing this
|
// FIXME: There has to be a more efficient way of doing this
|
||||||
func findReadmeFileInEntries(ctx *context.Context, parentDir string, entries []*git.TreeEntry, tryWellKnownDirs bool) (string, *git.TreeEntry, error) {
|
func findReadmeFileInEntries(ctx *context.Context, entries []*git.TreeEntry, tryWellKnownDirs bool) (string, *git.TreeEntry, error) {
|
||||||
|
// Create a list of extensions in priority order
|
||||||
|
// 1. Markdown files - with and without localisation - e.g. README.en-us.md or README.md
|
||||||
|
// 2. Txt files - e.g. README.txt
|
||||||
|
// 3. No extension - e.g. README
|
||||||
|
exts := append(localizedExtensions(".md", ctx.Locale.Language()), ".txt", "") // sorted by priority
|
||||||
|
extCount := len(exts)
|
||||||
|
readmeFiles := make([]*git.TreeEntry, extCount+1)
|
||||||
|
|
||||||
docsEntries := make([]*git.TreeEntry, 3) // (one of docs/, .gitea/ or .github/)
|
docsEntries := make([]*git.TreeEntry, 3) // (one of docs/, .gitea/ or .github/)
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
if tryWellKnownDirs && entry.IsDir() {
|
if tryWellKnownDirs && entry.IsDir() {
|
||||||
@ -54,23 +62,16 @@ func findReadmeFileInEntries(ctx *context.Context, parentDir string, entries []*
|
|||||||
docsEntries[2] = entry
|
docsEntries[2] = entry
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Create a list of extensions in priority order
|
|
||||||
// 1. Markdown files - with and without localisation - e.g. README.en-us.md or README.md
|
|
||||||
// 2. Txt files - e.g. README.txt
|
|
||||||
// 3. No extension - e.g. README
|
|
||||||
exts := append(localizedExtensions(".md", ctx.Locale.Language()), ".txt", "") // sorted by priority
|
|
||||||
extCount := len(exts)
|
|
||||||
readmeFiles := make([]*git.TreeEntry, extCount+1)
|
|
||||||
for _, entry := range entries {
|
|
||||||
if i, ok := util.IsReadmeFileExtension(entry.Name(), exts...); ok {
|
if i, ok := util.IsReadmeFileExtension(entry.Name(), exts...); ok {
|
||||||
fullPath := path.Join(parentDir, entry.Name())
|
log.Debug("Potential readme file: %s", entry.Name())
|
||||||
if readmeFiles[i] == nil || base.NaturalSortLess(readmeFiles[i].Name(), entry.Blob().Name()) {
|
if readmeFiles[i] == nil || base.NaturalSortLess(readmeFiles[i].Name(), entry.Blob().Name()) {
|
||||||
if entry.IsLink() {
|
if entry.IsLink() {
|
||||||
res, err := git.EntryFollowLinks(ctx.Repo.Commit, fullPath, entry)
|
target, err := entry.FollowLinks()
|
||||||
if err == nil && (res.TargetEntry.IsExecutable() || res.TargetEntry.IsRegular()) {
|
if err != nil && !git.IsErrSymlinkUnresolved(err) {
|
||||||
|
return "", nil, err
|
||||||
|
} else if target != nil && (target.IsExecutable() || target.IsRegular()) {
|
||||||
readmeFiles[i] = entry
|
readmeFiles[i] = entry
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -79,7 +80,6 @@ func findReadmeFileInEntries(ctx *context.Context, parentDir string, entries []*
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var readmeFile *git.TreeEntry
|
var readmeFile *git.TreeEntry
|
||||||
for _, f := range readmeFiles {
|
for _, f := range readmeFiles {
|
||||||
if f != nil {
|
if f != nil {
|
||||||
@ -103,7 +103,7 @@ func findReadmeFileInEntries(ctx *context.Context, parentDir string, entries []*
|
|||||||
return "", nil, err
|
return "", nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
subfolder, readmeFile, err := findReadmeFileInEntries(ctx, parentDir, childEntries, false)
|
subfolder, readmeFile, err := findReadmeFileInEntries(ctx, childEntries, false)
|
||||||
if err != nil && !git.IsErrNotExist(err) {
|
if err != nil && !git.IsErrNotExist(err) {
|
||||||
return "", nil, err
|
return "", nil, err
|
||||||
}
|
}
|
||||||
@ -139,52 +139,46 @@ func localizedExtensions(ext, languageCode string) (localizedExts []string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFile *git.TreeEntry) {
|
func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFile *git.TreeEntry) {
|
||||||
if readmeFile == nil {
|
target := readmeFile
|
||||||
|
if readmeFile != nil && readmeFile.IsLink() {
|
||||||
|
target, _ = readmeFile.FollowLinks()
|
||||||
|
}
|
||||||
|
if target == nil {
|
||||||
|
// if findReadmeFile() failed and/or gave us a broken symlink (which it shouldn't)
|
||||||
|
// simply skip rendering the README
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
readmeFullPath := path.Join(ctx.Repo.TreePath, subfolder, readmeFile.Name())
|
|
||||||
readmeTargetEntry := readmeFile
|
|
||||||
if readmeFile.IsLink() {
|
|
||||||
if res, err := git.EntryFollowLinks(ctx.Repo.Commit, readmeFullPath, readmeFile); err == nil {
|
|
||||||
readmeTargetEntry = res.TargetEntry
|
|
||||||
} else {
|
|
||||||
readmeTargetEntry = nil // if we cannot resolve the symlink, we cannot render the readme, ignore the error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if readmeTargetEntry == nil {
|
|
||||||
return // if no valid README entry found, skip rendering the README
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.Data["RawFileLink"] = ""
|
ctx.Data["RawFileLink"] = ""
|
||||||
ctx.Data["ReadmeInList"] = path.Join(subfolder, readmeFile.Name()) // the relative path to the readme file to the current tree path
|
ctx.Data["ReadmeInList"] = path.Join(subfolder, readmeFile.Name()) // the relative path to the readme file to the current tree path
|
||||||
ctx.Data["ReadmeExist"] = true
|
ctx.Data["ReadmeExist"] = true
|
||||||
ctx.Data["FileIsSymlink"] = readmeFile.IsLink()
|
ctx.Data["FileIsSymlink"] = readmeFile.IsLink()
|
||||||
|
|
||||||
buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, readmeTargetEntry.Blob())
|
buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, target.Blob())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("getFileReader", err)
|
ctx.ServerError("getFileReader", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer dataRc.Close()
|
defer dataRc.Close()
|
||||||
|
|
||||||
ctx.Data["FileIsText"] = fInfo.st.IsText()
|
ctx.Data["FileIsText"] = fInfo.isTextFile
|
||||||
ctx.Data["FileTreePath"] = readmeFullPath
|
ctx.Data["FileTreePath"] = path.Join(ctx.Repo.TreePath, subfolder, readmeFile.Name())
|
||||||
ctx.Data["FileSize"] = fInfo.fileSize
|
ctx.Data["FileSize"] = fInfo.fileSize
|
||||||
ctx.Data["IsLFSFile"] = fInfo.isLFSFile()
|
ctx.Data["IsLFSFile"] = fInfo.isLFSFile
|
||||||
|
|
||||||
if fInfo.isLFSFile() {
|
if fInfo.isLFSFile {
|
||||||
filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.Name()))
|
filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.Name()))
|
||||||
ctx.Data["RawFileLink"] = fmt.Sprintf("%s.git/info/lfs/objects/%s/%s", ctx.Repo.Repository.Link(), url.PathEscape(fInfo.lfsMeta.Oid), url.PathEscape(filenameBase64))
|
ctx.Data["RawFileLink"] = fmt.Sprintf("%s.git/info/lfs/objects/%s/%s", ctx.Repo.Repository.Link(), url.PathEscape(fInfo.lfsMeta.Oid), url.PathEscape(filenameBase64))
|
||||||
}
|
}
|
||||||
|
|
||||||
if !fInfo.st.IsText() {
|
if !fInfo.isTextFile {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
|
if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
|
||||||
// Pretend that this is a normal text file to display 'This file is too large to be shown'
|
// Pretend that this is a normal text file to display 'This file is too large to be shown'
|
||||||
ctx.Data["IsFileTooLarge"] = true
|
ctx.Data["IsFileTooLarge"] = true
|
||||||
|
ctx.Data["IsTextFile"] = true
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,10 +190,10 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil
|
|||||||
|
|
||||||
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
|
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
|
||||||
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
|
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
|
||||||
CurrentTreePath: path.Dir(readmeFullPath),
|
CurrentTreePath: path.Join(ctx.Repo.TreePath, subfolder),
|
||||||
}).
|
}).
|
||||||
WithMarkupType(markupType).
|
WithMarkupType(markupType).
|
||||||
WithRelativePath(readmeFullPath)
|
WithRelativePath(path.Join(ctx.Repo.TreePath, subfolder, readmeFile.Name())) // ctx.Repo.TreePath is the directory not the Readme so we must append the Readme filename (and path).
|
||||||
|
|
||||||
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, rd)
|
ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, rd)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -218,7 +212,7 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil
|
|||||||
ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlHTML(template.HTML(contentEscaped), ctx.Locale)
|
ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlHTML(template.HTML(contentEscaped), ctx.Locale)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !fInfo.isLFSFile() && ctx.Repo.Repository.CanEnableEditor() {
|
if !fInfo.isLFSFile && ctx.Repo.Repository.CanEnableEditor() {
|
||||||
ctx.Data["CanEditReadmeFile"] = true
|
ctx.Data["CanEditReadmeFile"] = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -203,6 +203,9 @@ func ViewPackageVersion(ctx *context.Context) {
|
|||||||
}
|
}
|
||||||
ctx.Data["PackageRegistryHost"] = registryHostURL.Host
|
ctx.Data["PackageRegistryHost"] = registryHostURL.Host
|
||||||
|
|
||||||
|
var pvs []*packages_model.PackageVersion
|
||||||
|
pvsTotal := int64(0)
|
||||||
|
|
||||||
switch pd.Package.Type {
|
switch pd.Package.Type {
|
||||||
case packages_model.TypeAlpine:
|
case packages_model.TypeAlpine:
|
||||||
branches := make(container.Set[string])
|
branches := make(container.Set[string])
|
||||||
@ -293,16 +296,12 @@ func ViewPackageVersion(ctx *context.Context) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ctx.Data["ContainerImageMetadata"] = imageMetadata
|
ctx.Data["ContainerImageMetadata"] = imageMetadata
|
||||||
}
|
|
||||||
var pvs []*packages_model.PackageVersion
|
|
||||||
var pvsTotal int64
|
|
||||||
if pd.Package.Type == packages_model.TypeContainer {
|
|
||||||
pvs, pvsTotal, err = container_model.SearchImageTags(ctx, &container_model.ImageTagsSearchOptions{
|
pvs, pvsTotal, err = container_model.SearchImageTags(ctx, &container_model.ImageTagsSearchOptions{
|
||||||
Paginator: db.NewAbsoluteListOptions(0, 5),
|
Paginator: db.NewAbsoluteListOptions(0, 5),
|
||||||
PackageID: pd.Package.ID,
|
PackageID: pd.Package.ID,
|
||||||
IsTagged: true,
|
IsTagged: true,
|
||||||
})
|
})
|
||||||
} else {
|
default:
|
||||||
pvs, pvsTotal, err = packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
pvs, pvsTotal, err = packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
|
||||||
Paginator: db.NewAbsoluteListOptions(0, 5),
|
Paginator: db.NewAbsoluteListOptions(0, 5),
|
||||||
PackageID: pd.Package.ID,
|
PackageID: pd.Package.ID,
|
||||||
@ -313,6 +312,7 @@ func ViewPackageVersion(ctx *context.Context) {
|
|||||||
ctx.ServerError("", err)
|
ctx.ServerError("", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.Data["LatestVersions"] = pvs
|
ctx.Data["LatestVersions"] = pvs
|
||||||
ctx.Data["TotalVersionCount"] = pvsTotal
|
ctx.Data["TotalVersionCount"] = pvsTotal
|
||||||
|
|
||||||
|
@ -39,8 +39,6 @@ type GetContentsOrListOptions struct {
|
|||||||
TreePath string
|
TreePath string
|
||||||
IncludeSingleFileContent bool // include the file's content when the tree path is a file
|
IncludeSingleFileContent bool // include the file's content when the tree path is a file
|
||||||
IncludeLfsMetadata bool
|
IncludeLfsMetadata bool
|
||||||
IncludeCommitMetadata bool
|
|
||||||
IncludeCommitMessage bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetContentsOrList gets the metadata of a file's contents (*ContentsResponse) if treePath not a tree
|
// GetContentsOrList gets the metadata of a file's contents (*ContentsResponse) if treePath not a tree
|
||||||
@ -134,19 +132,6 @@ func getFileContentsByEntryInternal(_ context.Context, repo *repo_model.Reposito
|
|||||||
}
|
}
|
||||||
selfURLString := selfURL.String()
|
selfURLString := selfURL.String()
|
||||||
|
|
||||||
// All content types have these fields in populated
|
|
||||||
contentsResponse := &api.ContentsResponse{
|
|
||||||
Name: entry.Name(),
|
|
||||||
Path: opts.TreePath,
|
|
||||||
SHA: entry.ID.String(),
|
|
||||||
Size: entry.Size(),
|
|
||||||
URL: &selfURLString,
|
|
||||||
Links: &api.FileLinksResponse{
|
|
||||||
Self: &selfURLString,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if opts.IncludeCommitMetadata || opts.IncludeCommitMessage {
|
|
||||||
err = gitRepo.AddLastCommitCache(repo.GetCommitsCountCacheKey(refCommit.InputRef, refType != git.RefTypeCommit), repo.FullName(), refCommit.CommitID)
|
err = gitRepo.AddLastCommitCache(repo.GetCommitsCountCacheKey(refCommit.InputRef, refType != git.RefTypeCommit), repo.FullName(), refCommit.CommitID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -157,23 +142,30 @@ func getFileContentsByEntryInternal(_ context.Context, repo *repo_model.Reposito
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.IncludeCommitMetadata {
|
// All content types have these fields in populated
|
||||||
contentsResponse.LastCommitSHA = util.ToPointer(lastCommit.ID.String())
|
contentsResponse := &api.ContentsResponse{
|
||||||
|
Name: entry.Name(),
|
||||||
|
Path: opts.TreePath,
|
||||||
|
SHA: entry.ID.String(),
|
||||||
|
LastCommitSHA: lastCommit.ID.String(),
|
||||||
|
Size: entry.Size(),
|
||||||
|
URL: &selfURLString,
|
||||||
|
Links: &api.FileLinksResponse{
|
||||||
|
Self: &selfURLString,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
// GitHub doesn't have these fields in the response, but we could follow other similar APIs to name them
|
// GitHub doesn't have these fields in the response, but we could follow other similar APIs to name them
|
||||||
// https://docs.github.com/en/rest/commits/commits?apiVersion=2022-11-28#list-commits
|
// https://docs.github.com/en/rest/commits/commits?apiVersion=2022-11-28#list-commits
|
||||||
if lastCommit.Committer != nil {
|
if lastCommit.Committer != nil {
|
||||||
contentsResponse.LastCommitterDate = util.ToPointer(lastCommit.Committer.When)
|
contentsResponse.LastCommitterDate = lastCommit.Committer.When
|
||||||
}
|
}
|
||||||
if lastCommit.Author != nil {
|
if lastCommit.Author != nil {
|
||||||
contentsResponse.LastAuthorDate = util.ToPointer(lastCommit.Author.When)
|
contentsResponse.LastAuthorDate = lastCommit.Author.When
|
||||||
}
|
|
||||||
}
|
|
||||||
if opts.IncludeCommitMessage {
|
|
||||||
contentsResponse.LastCommitMessage = util.ToPointer(lastCommit.Message())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
contentsResponse.LastCommitMessage = lastCommit.Message()
|
||||||
|
|
||||||
// Now populate the rest of the ContentsResponse based on the entry type
|
// Now populate the rest of the ContentsResponse based on entry type
|
||||||
if entry.IsRegular() || entry.IsExecutable() {
|
if entry.IsRegular() || entry.IsExecutable() {
|
||||||
contentsResponse.Type = string(ContentTypeRegular)
|
contentsResponse.Type = string(ContentTypeRegular)
|
||||||
// if it is listing the repo root dir, don't waste system resources on reading content
|
// if it is listing the repo root dir, don't waste system resources on reading content
|
||||||
|
@ -5,21 +5,57 @@ package files
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
"code.gitea.io/gitea/routers/api/v1/utils"
|
||||||
"code.gitea.io/gitea/services/contexttest"
|
"code.gitea.io/gitea/services/contexttest"
|
||||||
|
|
||||||
_ "code.gitea.io/gitea/models/actions"
|
_ "code.gitea.io/gitea/models/actions"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestMain(m *testing.M) {
|
func TestMain(m *testing.M) {
|
||||||
unittest.MainTest(m)
|
unittest.MainTest(m)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getExpectedReadmeContentsResponse() *api.ContentsResponse {
|
||||||
|
treePath := "README.md"
|
||||||
|
sha := "4b4851ad51df6a7d9f25c979345979eaeb5b349f"
|
||||||
|
encoding := "base64"
|
||||||
|
content := "IyByZXBvMQoKRGVzY3JpcHRpb24gZm9yIHJlcG8x"
|
||||||
|
selfURL := "https://try.gitea.io/api/v1/repos/user2/repo1/contents/" + treePath + "?ref=master"
|
||||||
|
htmlURL := "https://try.gitea.io/user2/repo1/src/branch/master/" + treePath
|
||||||
|
gitURL := "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/" + sha
|
||||||
|
downloadURL := "https://try.gitea.io/user2/repo1/raw/branch/master/" + treePath
|
||||||
|
return &api.ContentsResponse{
|
||||||
|
Name: treePath,
|
||||||
|
Path: treePath,
|
||||||
|
SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
|
||||||
|
LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
|
||||||
|
LastCommitterDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||||
|
LastAuthorDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||||
|
LastCommitMessage: "Initial commit\n",
|
||||||
|
Type: "file",
|
||||||
|
Size: 30,
|
||||||
|
Encoding: &encoding,
|
||||||
|
Content: &content,
|
||||||
|
URL: &selfURL,
|
||||||
|
HTMLURL: &htmlURL,
|
||||||
|
GitURL: &gitURL,
|
||||||
|
DownloadURL: &downloadURL,
|
||||||
|
Links: &api.FileLinksResponse{
|
||||||
|
Self: &selfURL,
|
||||||
|
GitURL: &gitURL,
|
||||||
|
HTMLURL: &htmlURL,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestGetContents(t *testing.T) {
|
func TestGetContents(t *testing.T) {
|
||||||
unittest.PrepareTestEnv(t)
|
unittest.PrepareTestEnv(t)
|
||||||
ctx, _ := contexttest.MockContext(t, "user2/repo1")
|
ctx, _ := contexttest.MockContext(t, "user2/repo1")
|
||||||
@ -28,8 +64,45 @@ func TestGetContents(t *testing.T) {
|
|||||||
contexttest.LoadRepoCommit(t, ctx)
|
contexttest.LoadRepoCommit(t, ctx)
|
||||||
contexttest.LoadUser(t, ctx, 2)
|
contexttest.LoadUser(t, ctx, 2)
|
||||||
contexttest.LoadGitRepo(t, ctx)
|
contexttest.LoadGitRepo(t, ctx)
|
||||||
|
defer ctx.Repo.GitRepo.Close()
|
||||||
|
repo, gitRepo := ctx.Repo.Repository, ctx.Repo.GitRepo
|
||||||
|
refCommit, err := utils.ResolveRefCommit(ctx, ctx.Repo.Repository, ctx.Repo.Repository.DefaultBranch)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
// GetContentsOrList's behavior is fully tested in integration tests, so we don't need to test it here.
|
t.Run("GetContentsOrList(README.md)-MetaOnly", func(t *testing.T) {
|
||||||
|
expectedContentsResponse := getExpectedReadmeContentsResponse()
|
||||||
|
expectedContentsResponse.Encoding = nil // because will be in a list, doesn't have encoding and content
|
||||||
|
expectedContentsResponse.Content = nil
|
||||||
|
extResp, err := GetContentsOrList(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: "README.md", IncludeSingleFileContent: false})
|
||||||
|
assert.Equal(t, expectedContentsResponse, extResp.FileContents)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("GetContentsOrList(README.md)", func(t *testing.T) {
|
||||||
|
expectedContentsResponse := getExpectedReadmeContentsResponse()
|
||||||
|
extResp, err := GetContentsOrList(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: "README.md", IncludeSingleFileContent: true})
|
||||||
|
assert.Equal(t, expectedContentsResponse, extResp.FileContents)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("GetContentsOrList(RootDir)", func(t *testing.T) {
|
||||||
|
readmeContentsResponse := getExpectedReadmeContentsResponse()
|
||||||
|
readmeContentsResponse.Encoding = nil // because will be in a list, doesn't have encoding and content
|
||||||
|
readmeContentsResponse.Content = nil
|
||||||
|
expectedContentsListResponse := []*api.ContentsResponse{readmeContentsResponse}
|
||||||
|
// even if IncludeFileContent is true, it has no effect for directory listing
|
||||||
|
extResp, err := GetContentsOrList(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: "", IncludeSingleFileContent: true})
|
||||||
|
assert.Equal(t, expectedContentsListResponse, extResp.DirContents)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("GetContentsOrList(NoSuchTreePath)", func(t *testing.T) {
|
||||||
|
extResp, err := GetContentsOrList(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: "no-such/file.md"})
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.EqualError(t, err, "object does not exist [id: , rel_path: no-such]")
|
||||||
|
assert.Nil(t, extResp.DirContents)
|
||||||
|
assert.Nil(t, extResp.FileContents)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("GetBlobBySHA", func(t *testing.T) {
|
t.Run("GetBlobBySHA", func(t *testing.T) {
|
||||||
sha := "65f1bf27bc3bf70f64657658635e66094edbcb4d"
|
sha := "65f1bf27bc3bf70f64657658635e66094edbcb4d"
|
||||||
|
@ -22,12 +22,7 @@ import (
|
|||||||
func GetContentsListFromTreePaths(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, treePaths []string) (files []*api.ContentsResponse) {
|
func GetContentsListFromTreePaths(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, refCommit *utils.RefCommit, treePaths []string) (files []*api.ContentsResponse) {
|
||||||
var size int64
|
var size int64
|
||||||
for _, treePath := range treePaths {
|
for _, treePath := range treePaths {
|
||||||
// ok if fails, then will be nil
|
fileContents, _ := GetFileContents(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{TreePath: treePath, IncludeSingleFileContent: true}) // ok if fails, then will be nil
|
||||||
fileContents, _ := GetFileContents(ctx, repo, gitRepo, refCommit, GetContentsOrListOptions{
|
|
||||||
TreePath: treePath,
|
|
||||||
IncludeSingleFileContent: true,
|
|
||||||
IncludeCommitMetadata: true,
|
|
||||||
})
|
|
||||||
if fileContents != nil && fileContents.Content != nil && *fileContents.Content != "" {
|
if fileContents != nil && fileContents.Content != nil && *fileContents.Content != "" {
|
||||||
// if content isn't empty (e.g., due to the single blob being too large), add file size to response size
|
// if content isn't empty (e.g., due to the single blob being too large), add file size to response size
|
||||||
size += int64(len(*fileContents.Content))
|
size += int64(len(*fileContents.Content))
|
||||||
|
@ -161,7 +161,7 @@ func newTreeViewNodeFromEntry(ctx context.Context, renderedIconPool *fileicon.Re
|
|||||||
FullPath: path.Join(parentDir, entry.Name()),
|
FullPath: path.Join(parentDir, entry.Name()),
|
||||||
}
|
}
|
||||||
|
|
||||||
entryInfo := fileicon.EntryInfoFromGitTreeEntry(commit, node.FullPath, entry)
|
entryInfo := fileicon.EntryInfoFromGitTreeEntry(entry)
|
||||||
node.EntryIcon = fileicon.RenderEntryIconHTML(renderedIconPool, entryInfo)
|
node.EntryIcon = fileicon.RenderEntryIconHTML(renderedIconPool, entryInfo)
|
||||||
if entryInfo.EntryMode.IsDir() {
|
if entryInfo.EntryMode.IsDir() {
|
||||||
entryInfo.IsOpen = true
|
entryInfo.IsOpen = true
|
||||||
|
@ -29,7 +29,7 @@ export default {
|
|||||||
important: true, // the frameworks are mixed together, so tailwind needs to override other framework's styles
|
important: true, // the frameworks are mixed together, so tailwind needs to override other framework's styles
|
||||||
content: [
|
content: [
|
||||||
isProduction && '!./templates/devtest/**/*',
|
isProduction && '!./templates/devtest/**/*',
|
||||||
isProduction && '!./web_src/js/standalone/devtest.ts',
|
isProduction && '!./web_src/js/standalone/devtest.js',
|
||||||
'!./templates/swagger/v1_json.tmpl',
|
'!./templates/swagger/v1_json.tmpl',
|
||||||
'!./templates/user/auth/oidc_wellknown.tmpl',
|
'!./templates/user/auth/oidc_wellknown.tmpl',
|
||||||
'!**/*_test.go',
|
'!**/*_test.go',
|
||||||
|
@ -82,8 +82,6 @@
|
|||||||
</table>
|
</table>
|
||||||
{{end}}{{/* end if .IsFileTooLarge */}}
|
{{end}}{{/* end if .IsFileTooLarge */}}
|
||||||
<div class="code-line-menu tippy-target">
|
<div class="code-line-menu tippy-target">
|
||||||
{{/*FIXME: the "HasSourceRenderedToggle" is never set on blame page, it should mean "whether the file is renderable".
|
|
||||||
If the file is renderable, then it must has the "display=source" parameter to make sure the file view page shows the source code, then line number works. */}}
|
|
||||||
{{if $.Permission.CanRead ctx.Consts.RepoUnitTypeIssues}}
|
{{if $.Permission.CanRead ctx.Consts.RepoUnitTypeIssues}}
|
||||||
<a class="item ref-in-new-issue" role="menuitem" data-url-issue-new="{{.RepoLink}}/issues/new" data-url-param-body-link="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}" rel="nofollow noindex">{{ctx.Locale.Tr "repo.issues.context.reference_issue"}}</a>
|
<a class="item ref-in-new-issue" role="menuitem" data-url-issue-new="{{.RepoLink}}/issues/new" data-url-param-body-link="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}" rel="nofollow noindex">{{ctx.Locale.Tr "repo.issues.context.reference_issue"}}</a>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
@ -108,13 +108,7 @@
|
|||||||
</td>
|
</td>
|
||||||
<td class="two wide ui">
|
<td class="two wide ui">
|
||||||
{{if and (not .DBBranch.IsDeleted) $.DefaultBranchBranch}}
|
{{if and (not .DBBranch.IsDeleted) $.DefaultBranchBranch}}
|
||||||
{{$tooltipDivergence := ""}}
|
<div class="commit-divergence">
|
||||||
{{if or .CommitsBehind .CommitsAhead}}
|
|
||||||
{{$tooltipDivergence = ctx.Locale.Tr "repo.branch.commits_divergence_from" .CommitsBehind .CommitsAhead $.DefaultBranchBranch.DBBranch.Name}}
|
|
||||||
{{else}}
|
|
||||||
{{$tooltipDivergence = ctx.Locale.Tr "repo.branch.commits_no_divergence" $.DefaultBranchBranch.DBBranch.Name}}
|
|
||||||
{{end}}
|
|
||||||
<div class="commit-divergence" data-tooltip-content="{{$tooltipDivergence}}">
|
|
||||||
<div class="bar-group">
|
<div class="bar-group">
|
||||||
<div class="count count-behind">{{.CommitsBehind}}</div>
|
<div class="count count-behind">{{.CommitsBehind}}</div>
|
||||||
{{/* old code bears 0/0.0 = NaN output, so it might output invalid "width: NaNpx", it just works and doesn't caues any problem. */}}
|
{{/* old code bears 0/0.0 = NaN output, so it might output invalid "width: NaNpx", it just works and doesn't caues any problem. */}}
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
{{range $i, $v := .TreeNames}}
|
{{range $i, $v := .TreeNames}}
|
||||||
<div class="breadcrumb-divider">/</div>
|
<div class="breadcrumb-divider">/</div>
|
||||||
{{if eq $i $l}}
|
{{if eq $i $l}}
|
||||||
<input id="file-name" maxlength="255" value="{{$v}}" placeholder="{{ctx.Locale.Tr (Iif $.PageIsUpload "repo.editor.add_subdir" "repo.editor.name_your_file")}}" data-editorconfig="{{$.EditorconfigJson}}" {{Iif $.PageIsUpload "" "required"}} autofocus>
|
<input id="file-name" maxlength="255" value="{{$v}}" placeholder="{{ctx.Locale.Tr (Iif $.PageIsUpload "repo.editor.add_subdir" "repo.editor.name_your_file")}}" data-editorconfig="{{$.EditorconfigJson}}" required autofocus>
|
||||||
<span data-tooltip-content="{{ctx.Locale.Tr "repo.editor.filename_help"}}">{{svg "octicon-info"}}</span>
|
<span data-tooltip-content="{{ctx.Locale.Tr "repo.editor.filename_help"}}">{{svg "octicon-info"}}</span>
|
||||||
{{else}}
|
{{else}}
|
||||||
<span class="section"><a href="{{$.BranchLink}}/{{index $.TreePaths $i | PathEscapeSegments}}">{{$v}}</a></span>
|
<span class="section"><a href="{{$.BranchLink}}/{{index $.TreePaths $i | PathEscapeSegments}}">{{$v}}</a></span>
|
||||||
|
@ -22,7 +22,7 @@
|
|||||||
<span class="label-filter-exclude-info">{{ctx.Locale.Tr "repo.issues.filter_label_exclude"}}</span>
|
<span class="label-filter-exclude-info">{{ctx.Locale.Tr "repo.issues.filter_label_exclude"}}</span>
|
||||||
<div class="divider"></div>
|
<div class="divider"></div>
|
||||||
<a class="item label-filter-query-default" href="{{QueryBuild $queryLink "labels" NIL}}">{{ctx.Locale.Tr "repo.issues.filter_label_no_select"}}</a>
|
<a class="item label-filter-query-default" href="{{QueryBuild $queryLink "labels" NIL}}">{{ctx.Locale.Tr "repo.issues.filter_label_no_select"}}</a>
|
||||||
<a class="item label-filter-query-not-set" href="{{QueryBuild $queryLink "labels" "0"}}">{{ctx.Locale.Tr "repo.issues.filter_label_select_no_label"}}</a>
|
<a class="item label-filter-query-not-set" href="{{QueryBuild $queryLink "labels" 0}}">{{ctx.Locale.Tr "repo.issues.filter_label_select_no_label"}}</a>
|
||||||
{{/* The logic here is not the same as the label selector in the issue sidebar.
|
{{/* The logic here is not the same as the label selector in the issue sidebar.
|
||||||
The one in the issue sidebar renders "repo labels | divider | org labels".
|
The one in the issue sidebar renders "repo labels | divider | org labels".
|
||||||
Maybe the logic should be updated to be consistent.*/}}
|
Maybe the logic should be updated to be consistent.*/}}
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
<input type="text" placeholder="{{ctx.Locale.Tr "repo.issues.filter_milestone"}}">
|
<input type="text" placeholder="{{ctx.Locale.Tr "repo.issues.filter_milestone"}}">
|
||||||
</div>
|
</div>
|
||||||
<div class="divider"></div>
|
<div class="divider"></div>
|
||||||
<a class="{{if not $.MilestoneID}}active selected {{end}}item" href="{{QueryBuild $queryLink "milestone" NIL}}">{{ctx.Locale.Tr "repo.issues.filter_milestone_all"}}</a>
|
<a class="{{if not $.MilestoneID}}active selected {{end}}item" href="{{QueryBuild $queryLink "milestone" 0}}">{{ctx.Locale.Tr "repo.issues.filter_milestone_all"}}</a>
|
||||||
<a class="{{if $.MilestoneID}}{{if eq $.MilestoneID -1}}active selected {{end}}{{end}}item" href="{{QueryBuild $queryLink "milestone" -1}}">{{ctx.Locale.Tr "repo.issues.filter_milestone_none"}}</a>
|
<a class="{{if $.MilestoneID}}{{if eq $.MilestoneID -1}}active selected {{end}}{{end}}item" href="{{QueryBuild $queryLink "milestone" -1}}">{{ctx.Locale.Tr "repo.issues.filter_milestone_none"}}</a>
|
||||||
{{if .OpenMilestones}}
|
{{if .OpenMilestones}}
|
||||||
<div class="divider"></div>
|
<div class="divider"></div>
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{{if and (or .HasIssuesOrPullsWritePermission .IsIssuePoster) (not .HasMerged) (not .Issue.IsClosed) (not .IsPullWorkInProgress)}}
|
{{if and (or .HasIssuesOrPullsWritePermission .IsIssuePoster) (not .HasMerged) (not .Issue.IsClosed) (not .IsPullWorkInProgress)}}
|
||||||
<a data-global-init="initPullRequestWipToggle" data-title="{{.Issue.Title}}" data-wip-prefix="{{index .PullRequestWorkInProgressPrefixes 0}}" data-update-url="{{.Issue.Link}}/title">
|
<a class="toggle-wip tw-block tw-mt-2" data-title="{{.Issue.Title}}" data-wip-prefix="{{index .PullRequestWorkInProgressPrefixes 0}}" data-update-url="{{.Issue.Link}}/title">
|
||||||
{{ctx.Locale.Tr "repo.pulls.still_in_progress"}} {{ctx.Locale.Tr "repo.pulls.add_prefix" (index .PullRequestWorkInProgressPrefixes 0)}}
|
{{ctx.Locale.Tr "repo.pulls.still_in_progress"}} {{ctx.Locale.Tr "repo.pulls.add_prefix" (index .PullRequestWorkInProgressPrefixes 0)}}
|
||||||
</a>
|
</a>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
@ -95,7 +95,7 @@
|
|||||||
{{ctx.Locale.Tr "repo.pulls.cannot_merge_work_in_progress"}}
|
{{ctx.Locale.Tr "repo.pulls.cannot_merge_work_in_progress"}}
|
||||||
</div>
|
</div>
|
||||||
{{if or .HasIssuesOrPullsWritePermission .IsIssuePoster}}
|
{{if or .HasIssuesOrPullsWritePermission .IsIssuePoster}}
|
||||||
<button class="ui compact button" data-global-init="initPullRequestWipToggle" data-title="{{.Issue.Title}}" data-wip-prefix="{{.WorkInProgressPrefix}}" data-update-url="{{.Issue.Link}}/title">
|
<button class="ui compact button toggle-wip" data-title="{{.Issue.Title}}" data-wip-prefix="{{.WorkInProgressPrefix}}" data-update-url="{{.Issue.Link}}/title">
|
||||||
{{ctx.Locale.Tr "repo.pulls.remove_prefix" .WorkInProgressPrefix}}
|
{{ctx.Locale.Tr "repo.pulls.remove_prefix" .WorkInProgressPrefix}}
|
||||||
</button>
|
</button>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
@ -30,6 +30,8 @@
|
|||||||
<audio controls src="{{$.RawFileLink}}">
|
<audio controls src="{{$.RawFileLink}}">
|
||||||
<strong>{{ctx.Locale.Tr "repo.audio_not_supported_in_browser"}}</strong>
|
<strong>{{ctx.Locale.Tr "repo.audio_not_supported_in_browser"}}</strong>
|
||||||
</audio>
|
</audio>
|
||||||
|
{{else if .IsPDFFile}}
|
||||||
|
<div class="pdf-content is-loading" data-global-init="initPdfViewer" data-src="{{$.RawFileLink}}" data-fallback-button-text="{{ctx.Locale.Tr "diff.view_file"}}"></div>
|
||||||
{{else}}
|
{{else}}
|
||||||
<a href="{{$.RawFileLink}}" rel="nofollow" class="tw-p-4">{{ctx.Locale.Tr "repo.file_view_raw"}}</a>
|
<a href="{{$.RawFileLink}}" rel="nofollow" class="tw-p-4">{{ctx.Locale.Tr "repo.file_view_raw"}}</a>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
<div {{if .ReadmeInList}}id="readme"{{end}} class="{{TabSizeClass .Editorconfig .FileTreePath}} non-diff-file-content"
|
<div {{if .ReadmeInList}}id="readme" {{end}}class="{{TabSizeClass .Editorconfig .FileTreePath}} non-diff-file-content">
|
||||||
data-global-init="initRepoFileView" data-raw-file-link="{{.RawFileLink}}">
|
|
||||||
|
|
||||||
{{- if .FileError}}
|
{{- if .FileError}}
|
||||||
<div class="ui error message">
|
<div class="ui error message">
|
||||||
<div class="text left tw-whitespace-pre">{{.FileError}}</div>
|
<div class="text left tw-whitespace-pre">{{.FileError}}</div>
|
||||||
@ -34,14 +32,13 @@
|
|||||||
{{template "repo/file_info" .}}
|
{{template "repo/file_info" .}}
|
||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
<div class="file-header-right file-actions flex-text-block tw-flex-wrap">
|
<div class="file-header-right file-actions tw-flex tw-items-center tw-flex-wrap">
|
||||||
{{/* this componment is also controlled by frontend plugin renders */}}
|
{{if .HasSourceRenderedToggle}}
|
||||||
<div class="ui compact icon buttons file-view-toggle-buttons {{Iif .HasSourceRenderedToggle "" "tw-hidden"}}">
|
<div class="ui compact icon buttons">
|
||||||
{{if .IsRepresentableAsText}}
|
<a href="?display=source" class="ui mini basic button {{if .IsDisplayingSource}}active{{end}}" data-tooltip-content="{{ctx.Locale.Tr "repo.file_view_source"}}">{{svg "octicon-code" 15}}</a>
|
||||||
<a href="?display=source" class="ui mini basic button file-view-toggle-source {{if .IsDisplayingSource}}active{{end}}" data-tooltip-content="{{ctx.Locale.Tr "repo.file_view_source"}}">{{svg "octicon-code" 15}}</a>
|
<a href="{{$.Link}}" class="ui mini basic button {{if .IsDisplayingRendered}}active{{end}}" data-tooltip-content="{{ctx.Locale.Tr "repo.file_view_rendered"}}">{{svg "octicon-file" 15}}</a>
|
||||||
{{end}}
|
|
||||||
<a href="?display=rendered" class="ui mini basic button file-view-toggle-rendered {{if not .IsDisplayingSource}}active{{end}}" data-tooltip-content="{{ctx.Locale.Tr "repo.file_view_rendered"}}">{{svg "octicon-file" 15}}</a>
|
|
||||||
</div>
|
</div>
|
||||||
|
{{end}}
|
||||||
{{if not .ReadmeInList}}
|
{{if not .ReadmeInList}}
|
||||||
<div class="ui buttons tw-mr-1">
|
<div class="ui buttons tw-mr-1">
|
||||||
<a class="ui mini basic button" href="{{$.RawFileLink}}">{{ctx.Locale.Tr "repo.file_raw"}}</a>
|
<a class="ui mini basic button" href="{{$.RawFileLink}}">{{ctx.Locale.Tr "repo.file_raw"}}</a>
|
||||||
@ -58,10 +55,7 @@
|
|||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
<a download class="btn-octicon" data-tooltip-content="{{ctx.Locale.Tr "repo.download_file"}}" href="{{$.RawFileLink}}">{{svg "octicon-download"}}</a>
|
<a download class="btn-octicon" data-tooltip-content="{{ctx.Locale.Tr "repo.download_file"}}" href="{{$.RawFileLink}}">{{svg "octicon-download"}}</a>
|
||||||
<a class="btn-octicon {{if not .CanCopyContent}}disabled{{end}}" data-global-click="onCopyContentButtonClick"
|
<a class="btn-octicon {{if not .CanCopyContent}} disabled{{end}}" data-global-click="onCopyContentButtonClick" {{if or .IsImageFile (and .HasSourceRenderedToggle (not .IsDisplayingSource))}} data-link="{{$.RawFileLink}}"{{end}} data-tooltip-content="{{if .CanCopyContent}}{{ctx.Locale.Tr "copy_content"}}{{else}}{{ctx.Locale.Tr "copy_type_unsupported"}}{{end}}">{{svg "octicon-copy"}}</a>
|
||||||
{{if not .IsDisplayingSource}}data-raw-file-link="{{$.RawFileLink}}"{{end}}
|
|
||||||
data-tooltip-content="{{if .CanCopyContent}}{{ctx.Locale.Tr "copy_content"}}{{else}}{{ctx.Locale.Tr "copy_type_unsupported"}}{{end}}"
|
|
||||||
>{{svg "octicon-copy"}}</a>
|
|
||||||
{{if .EnableFeed}}
|
{{if .EnableFeed}}
|
||||||
<a class="btn-octicon" href="{{$.RepoLink}}/rss/{{$.RefTypeNameSubURL}}/{{PathEscapeSegments .TreePath}}" data-tooltip-content="{{ctx.Locale.Tr "rss_feed"}}">
|
<a class="btn-octicon" href="{{$.RepoLink}}/rss/{{$.RefTypeNameSubURL}}/{{PathEscapeSegments .TreePath}}" data-tooltip-content="{{ctx.Locale.Tr "rss_feed"}}">
|
||||||
{{svg "octicon-rss"}}
|
{{svg "octicon-rss"}}
|
||||||
@ -88,21 +82,38 @@
|
|||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
</h4>
|
</h4>
|
||||||
|
|
||||||
<div class="ui bottom attached table unstackable segment">
|
<div class="ui bottom attached table unstackable segment">
|
||||||
{{if not .IsMarkup}}
|
{{if not (or .IsMarkup .IsRenderedHTML)}}
|
||||||
{{template "repo/unicode_escape_prompt" dict "EscapeStatus" .EscapeStatus}}
|
{{template "repo/unicode_escape_prompt" dict "EscapeStatus" .EscapeStatus "root" $}}
|
||||||
{{end}}
|
{{end}}
|
||||||
<div class="file-view {{if .IsMarkup}}markup {{.MarkupType}}{{else if .IsPlainText}}plain-text{{else if .IsDisplayingSource}}code-view{{end}}">
|
<div class="file-view{{if .IsMarkup}} markup {{.MarkupType}}{{else if .IsPlainText}} plain-text{{else if .IsTextSource}} code-view{{end}}">
|
||||||
{{if .IsFileTooLarge}}
|
{{if .IsFileTooLarge}}
|
||||||
{{template "shared/filetoolarge" dict "RawFileLink" .RawFileLink}}
|
{{template "shared/filetoolarge" dict "RawFileLink" .RawFileLink}}
|
||||||
{{else if not .FileSize}}
|
{{else if not .FileSize}}
|
||||||
{{template "shared/fileisempty"}}
|
{{template "shared/fileisempty"}}
|
||||||
{{else if .IsMarkup}}
|
{{else if .IsMarkup}}
|
||||||
{{.FileContent}}
|
{{if .FileContent}}{{.FileContent}}{{end}}
|
||||||
{{else if .IsPlainText}}
|
{{else if .IsPlainText}}
|
||||||
<pre>{{if .FileContent}}{{.FileContent}}{{end}}</pre>
|
<pre>{{if .FileContent}}{{.FileContent}}{{end}}</pre>
|
||||||
{{else if .FileContent}}
|
{{else if not .IsTextSource}}
|
||||||
|
<div class="view-raw">
|
||||||
|
{{if .IsImageFile}}
|
||||||
|
<img alt="{{$.RawFileLink}}" src="{{$.RawFileLink}}">
|
||||||
|
{{else if .IsVideoFile}}
|
||||||
|
<video controls src="{{$.RawFileLink}}">
|
||||||
|
<strong>{{ctx.Locale.Tr "repo.video_not_supported_in_browser"}}</strong>
|
||||||
|
</video>
|
||||||
|
{{else if .IsAudioFile}}
|
||||||
|
<audio controls src="{{$.RawFileLink}}">
|
||||||
|
<strong>{{ctx.Locale.Tr "repo.audio_not_supported_in_browser"}}</strong>
|
||||||
|
</audio>
|
||||||
|
{{else if .IsPDFFile}}
|
||||||
|
<div class="pdf-content is-loading" data-global-init="initPdfViewer" data-src="{{$.RawFileLink}}" data-fallback-button-text="{{ctx.Locale.Tr "repo.diff.view_file"}}"></div>
|
||||||
|
{{else}}
|
||||||
|
<a href="{{$.RawFileLink}}" rel="nofollow" class="tw-p-4">{{ctx.Locale.Tr "repo.file_view_raw"}}</a>
|
||||||
|
{{end}}
|
||||||
|
</div>
|
||||||
|
{{else if .FileSize}}
|
||||||
<table>
|
<table>
|
||||||
<tbody>
|
<tbody>
|
||||||
{{range $idx, $code := .FileContent}}
|
{{range $idx, $code := .FileContent}}
|
||||||
@ -117,29 +128,6 @@
|
|||||||
{{end}}
|
{{end}}
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
{{else}}
|
|
||||||
<div class="view-raw">
|
|
||||||
{{if .IsImageFile}}
|
|
||||||
<img alt="{{$.RawFileLink}}" src="{{$.RawFileLink}}">
|
|
||||||
{{else if .IsVideoFile}}
|
|
||||||
<video controls src="{{$.RawFileLink}}">
|
|
||||||
<strong>{{ctx.Locale.Tr "repo.video_not_supported_in_browser"}}</strong>
|
|
||||||
</video>
|
|
||||||
{{else if .IsAudioFile}}
|
|
||||||
<audio controls src="{{$.RawFileLink}}">
|
|
||||||
<strong>{{ctx.Locale.Tr "repo.audio_not_supported_in_browser"}}</strong>
|
|
||||||
</audio>
|
|
||||||
{{else}}
|
|
||||||
<div class="file-view-render-container">
|
|
||||||
<div class="file-view-raw-prompt tw-p-4">
|
|
||||||
<a href="{{$.RawFileLink}}" rel="nofollow">{{ctx.Locale.Tr "repo.file_view_raw"}}</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{{end}}
|
|
||||||
</div>
|
|
||||||
{{end}}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="code-line-menu tippy-target">
|
<div class="code-line-menu tippy-target">
|
||||||
{{if $.Permission.CanRead ctx.Consts.RepoUnitTypeIssues}}
|
{{if $.Permission.CanRead ctx.Consts.RepoUnitTypeIssues}}
|
||||||
<a class="item ref-in-new-issue" role="menuitem" data-url-issue-new="{{.RepoLink}}/issues/new" data-url-param-body-link="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}" rel="nofollow noindex">{{ctx.Locale.Tr "repo.issues.context.reference_issue"}}</a>
|
<a class="item ref-in-new-issue" role="menuitem" data-url-issue-new="{{.RepoLink}}/issues/new" data-url-param-body-link="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}" rel="nofollow noindex">{{ctx.Locale.Tr "repo.issues.context.reference_issue"}}</a>
|
||||||
@ -147,5 +135,7 @@
|
|||||||
<a class="item view_git_blame" role="menuitem" href="{{.Repository.Link}}/blame/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}">{{ctx.Locale.Tr "repo.view_git_blame"}}</a>
|
<a class="item view_git_blame" role="menuitem" href="{{.Repository.Link}}/blame/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}">{{ctx.Locale.Tr "repo.view_git_blame"}}</a>
|
||||||
<a class="item copy-line-permalink" role="menuitem" data-url="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}">{{ctx.Locale.Tr "repo.file_copy_permalink"}}</a>
|
<a class="item copy-line-permalink" role="menuitem" data-url="{{.Repository.Link}}/src/commit/{{PathEscape .CommitID}}/{{PathEscapeSegments .TreePath}}{{if $.HasSourceRenderedToggle}}?display=source{{end}}">{{ctx.Locale.Tr "repo.file_copy_permalink"}}</a>
|
||||||
</div>
|
</div>
|
||||||
|
{{end}}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -41,9 +41,6 @@
|
|||||||
</a>
|
</a>
|
||||||
{{else}}
|
{{else}}
|
||||||
<a class="entry-name" href="{{$.TreeLink}}/{{PathEscapeSegments $entry.Name}}" title="{{$entry.Name}}">{{$entry.Name}}</a>
|
<a class="entry-name" href="{{$.TreeLink}}/{{PathEscapeSegments $entry.Name}}" title="{{$entry.Name}}">{{$entry.Name}}</a>
|
||||||
{{if $entry.IsLink}}
|
|
||||||
<a class="entry-symbol-link flex-text-inline" data-tooltip-content title="{{ctx.Locale.Tr "repo.find_file.follow_symlink"}}" href="{{$.TreeLink}}/{{PathEscapeSegments $entry.Name}}?follow_symlink=1">{{svg "octicon-link" 12}}</a>
|
|
||||||
{{end}}
|
|
||||||
{{end}}
|
{{end}}
|
||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
|
4
templates/swagger/v1_json.tmpl
generated
4
templates/swagger/v1_json.tmpl
generated
@ -7547,7 +7547,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "path of the dir, file, symlink or submodule in the repo. Swagger requires path parameter to be \"required\", you can leave it empty or pass a single dot (\".\") to get the root directory.",
|
"description": "path of the dir, file, symlink or submodule in the repo",
|
||||||
"name": "filepath",
|
"name": "filepath",
|
||||||
"in": "path",
|
"in": "path",
|
||||||
"required": true
|
"required": true
|
||||||
@ -7560,7 +7560,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "By default this API's response only contains file's metadata. Use comma-separated \"includes\" options to retrieve more fields. Option \"file_content\" will try to retrieve the file content, \"lfs_metadata\" will try to retrieve LFS metadata, \"commit_metadata\" will try to retrieve commit metadata, and \"commit_message\" will try to retrieve commit message.",
|
"description": "By default this API's response only contains file's metadata. Use comma-separated \"includes\" options to retrieve more fields. Option \"file_content\" will try to retrieve the file content, option \"lfs_metadata\" will try to retrieve LFS metadata.",
|
||||||
"name": "includes",
|
"name": "includes",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
}
|
}
|
||||||
|
@ -157,14 +157,9 @@ gpgkey=%sapi/packages/%s/rpm/repository.key`,
|
|||||||
t.Run("Download", func(t *testing.T) {
|
t.Run("Download", func(t *testing.T) {
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
// download the package without the file name
|
|
||||||
req := NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s", groupURL, packageName, packageVersion, packageArchitecture))
|
req := NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s", groupURL, packageName, packageVersion, packageArchitecture))
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
assert.Equal(t, content, resp.Body.Bytes())
|
|
||||||
|
|
||||||
// download the package with a file name (it can be anything)
|
|
||||||
req = NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s/any-file-name", groupURL, packageName, packageVersion, packageArchitecture))
|
|
||||||
resp = MakeRequest(t, req, http.StatusOK)
|
|
||||||
assert.Equal(t, content, resp.Body.Bytes())
|
assert.Equal(t, content, resp.Body.Bytes())
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -452,8 +447,7 @@ gpgkey=%sapi/packages/%s/rpm/repository.key`,
|
|||||||
pub, err := openpgp.ReadArmoredKeyRing(gpgResp.Body)
|
pub, err := openpgp.ReadArmoredKeyRing(gpgResp.Body)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
rpmFileName := fmt.Sprintf("%s-%s.%s.rpm", packageName, packageVersion, packageArchitecture)
|
req = NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s", groupURL, packageName, packageVersion, packageArchitecture))
|
||||||
req = NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s/%s", groupURL, packageName, packageVersion, packageArchitecture, rpmFileName))
|
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
_, sigs, err := rpmutils.Verify(resp.Body, pub)
|
_, sigs, err := rpmutils.Verify(resp.Body, pub)
|
||||||
|
@ -19,7 +19,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/gitrepo"
|
"code.gitea.io/gitea/modules/gitrepo"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/util"
|
|
||||||
"code.gitea.io/gitea/services/context"
|
"code.gitea.io/gitea/services/context"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@ -53,12 +52,12 @@ func getCreateFileOptions() api.CreateFileOptions {
|
|||||||
func normalizeFileContentResponseCommitTime(c *api.ContentsResponse) {
|
func normalizeFileContentResponseCommitTime(c *api.ContentsResponse) {
|
||||||
// decoded JSON response may contain different timezone from the one parsed by git commit
|
// decoded JSON response may contain different timezone from the one parsed by git commit
|
||||||
// so we need to normalize the time to UTC to make "assert.Equal" pass
|
// so we need to normalize the time to UTC to make "assert.Equal" pass
|
||||||
c.LastCommitterDate = util.ToPointer(c.LastCommitterDate.UTC())
|
c.LastCommitterDate = c.LastCommitterDate.UTC()
|
||||||
c.LastAuthorDate = util.ToPointer(c.LastAuthorDate.UTC())
|
c.LastAuthorDate = c.LastAuthorDate.UTC()
|
||||||
}
|
}
|
||||||
|
|
||||||
type apiFileResponseInfo struct {
|
type apiFileResponseInfo struct {
|
||||||
repoFullName, commitID, treePath, lastCommitSHA string
|
repoFullName, commitID, treePath, lastCommitSHA, lastCommitMessage string
|
||||||
lastCommitterWhen, lastAuthorWhen time.Time
|
lastCommitterWhen, lastAuthorWhen time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -75,9 +74,10 @@ func getExpectedFileResponseForCreate(info apiFileResponseInfo) *api.FileRespons
|
|||||||
Name: path.Base(info.treePath),
|
Name: path.Base(info.treePath),
|
||||||
Path: info.treePath,
|
Path: info.treePath,
|
||||||
SHA: sha,
|
SHA: sha,
|
||||||
LastCommitSHA: util.ToPointer(info.lastCommitSHA),
|
LastCommitSHA: info.lastCommitSHA,
|
||||||
LastCommitterDate: util.ToPointer(info.lastCommitterWhen),
|
LastCommitterDate: info.lastCommitterWhen,
|
||||||
LastAuthorDate: util.ToPointer(info.lastAuthorWhen),
|
LastAuthorDate: info.lastAuthorWhen,
|
||||||
|
LastCommitMessage: info.lastCommitMessage,
|
||||||
Size: 16,
|
Size: 16,
|
||||||
Type: "file",
|
Type: "file",
|
||||||
Encoding: &encoding,
|
Encoding: &encoding,
|
||||||
@ -194,6 +194,7 @@ func TestAPICreateFile(t *testing.T) {
|
|||||||
lastCommitSHA: lastCommit.ID.String(),
|
lastCommitSHA: lastCommit.ID.String(),
|
||||||
lastCommitterWhen: lastCommit.Committer.When,
|
lastCommitterWhen: lastCommit.Committer.When,
|
||||||
lastAuthorWhen: lastCommit.Author.When,
|
lastAuthorWhen: lastCommit.Author.When,
|
||||||
|
lastCommitMessage: lastCommit.Message(),
|
||||||
})
|
})
|
||||||
var fileResponse api.FileResponse
|
var fileResponse api.FileResponse
|
||||||
DecodeJSON(t, resp, &fileResponse)
|
DecodeJSON(t, resp, &fileResponse)
|
||||||
|
@ -18,7 +18,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/gitrepo"
|
"code.gitea.io/gitea/modules/gitrepo"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/util"
|
|
||||||
"code.gitea.io/gitea/services/context"
|
"code.gitea.io/gitea/services/context"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@ -61,9 +60,9 @@ func getExpectedFileResponseForUpdate(info apiFileResponseInfo) *api.FileRespons
|
|||||||
Name: path.Base(info.treePath),
|
Name: path.Base(info.treePath),
|
||||||
Path: info.treePath,
|
Path: info.treePath,
|
||||||
SHA: sha,
|
SHA: sha,
|
||||||
LastCommitSHA: util.ToPointer(info.lastCommitSHA),
|
LastCommitSHA: info.lastCommitSHA,
|
||||||
LastCommitterDate: util.ToPointer(info.lastCommitterWhen),
|
LastCommitterDate: info.lastCommitterWhen,
|
||||||
LastAuthorDate: util.ToPointer(info.lastAuthorWhen),
|
LastAuthorDate: info.lastAuthorWhen,
|
||||||
Type: "file",
|
Type: "file",
|
||||||
Size: 20,
|
Size: 20,
|
||||||
Encoding: &encoding,
|
Encoding: &encoding,
|
||||||
|
@ -18,7 +18,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/gitrepo"
|
"code.gitea.io/gitea/modules/gitrepo"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/util"
|
|
||||||
repo_service "code.gitea.io/gitea/services/repository"
|
repo_service "code.gitea.io/gitea/services/repository"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@ -36,9 +35,10 @@ func getExpectedContentsListResponseForContents(ref, refType, lastCommitSHA stri
|
|||||||
Name: path.Base(treePath),
|
Name: path.Base(treePath),
|
||||||
Path: treePath,
|
Path: treePath,
|
||||||
SHA: sha,
|
SHA: sha,
|
||||||
LastCommitSHA: util.ToPointer(lastCommitSHA),
|
LastCommitSHA: lastCommitSHA,
|
||||||
LastCommitterDate: util.ToPointer(time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400))),
|
LastCommitterDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||||
LastAuthorDate: util.ToPointer(time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400))),
|
LastAuthorDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||||
|
LastCommitMessage: "Initial commit",
|
||||||
Type: "file",
|
Type: "file",
|
||||||
Size: 30,
|
Size: 30,
|
||||||
URL: &selfURL,
|
URL: &selfURL,
|
||||||
@ -66,6 +66,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
|||||||
repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) // public repo
|
repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) // public repo
|
||||||
repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3}) // public repo
|
repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3}) // public repo
|
||||||
repo16 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 16}) // private repo
|
repo16 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 16}) // private repo
|
||||||
|
treePath := "" // root dir
|
||||||
|
|
||||||
// Get user2's token
|
// Get user2's token
|
||||||
session := loginUser(t, user2.Name)
|
session := loginUser(t, user2.Name)
|
||||||
@ -94,7 +95,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
|||||||
// ref is default ref
|
// ref is default ref
|
||||||
ref := repo1.DefaultBranch
|
ref := repo1.DefaultBranch
|
||||||
refType := "branch"
|
refType := "branch"
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents?ref=%s", user2.Name, repo1.Name, ref)
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
var contentsListResponse []*api.ContentsResponse
|
var contentsListResponse []*api.ContentsResponse
|
||||||
DecodeJSON(t, resp, &contentsListResponse)
|
DecodeJSON(t, resp, &contentsListResponse)
|
||||||
@ -106,7 +107,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
|||||||
|
|
||||||
// No ref
|
// No ref
|
||||||
refType = "branch"
|
refType = "branch"
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/", user2.Name, repo1.Name)
|
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s", user2.Name, repo1.Name, treePath)
|
||||||
resp = MakeRequest(t, req, http.StatusOK)
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &contentsListResponse)
|
DecodeJSON(t, resp, &contentsListResponse)
|
||||||
assert.NotNil(t, contentsListResponse)
|
assert.NotNil(t, contentsListResponse)
|
||||||
@ -117,7 +118,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
|||||||
// ref is the branch we created above in setup
|
// ref is the branch we created above in setup
|
||||||
ref = newBranch
|
ref = newBranch
|
||||||
refType = "branch"
|
refType = "branch"
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents?ref=%s", user2.Name, repo1.Name, ref)
|
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||||
resp = MakeRequest(t, req, http.StatusOK)
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &contentsListResponse)
|
DecodeJSON(t, resp, &contentsListResponse)
|
||||||
assert.NotNil(t, contentsListResponse)
|
assert.NotNil(t, contentsListResponse)
|
||||||
@ -131,7 +132,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
|||||||
// ref is the new tag we created above in setup
|
// ref is the new tag we created above in setup
|
||||||
ref = newTag
|
ref = newTag
|
||||||
refType = "tag"
|
refType = "tag"
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/?ref=%s", user2.Name, repo1.Name, ref)
|
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||||
resp = MakeRequest(t, req, http.StatusOK)
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &contentsListResponse)
|
DecodeJSON(t, resp, &contentsListResponse)
|
||||||
assert.NotNil(t, contentsListResponse)
|
assert.NotNil(t, contentsListResponse)
|
||||||
@ -145,7 +146,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
|||||||
// ref is a commit
|
// ref is a commit
|
||||||
ref = commitID
|
ref = commitID
|
||||||
refType = "commit"
|
refType = "commit"
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/?ref=%s", user2.Name, repo1.Name, ref)
|
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||||
resp = MakeRequest(t, req, http.StatusOK)
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
DecodeJSON(t, resp, &contentsListResponse)
|
DecodeJSON(t, resp, &contentsListResponse)
|
||||||
assert.NotNil(t, contentsListResponse)
|
assert.NotNil(t, contentsListResponse)
|
||||||
@ -154,21 +155,21 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) {
|
|||||||
|
|
||||||
// Test file contents a file with a bad ref
|
// Test file contents a file with a bad ref
|
||||||
ref = "badref"
|
ref = "badref"
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/?ref=%s", user2.Name, repo1.Name, ref)
|
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||||
MakeRequest(t, req, http.StatusNotFound)
|
MakeRequest(t, req, http.StatusNotFound)
|
||||||
|
|
||||||
// Test accessing private ref with user token that does not have access - should fail
|
// Test accessing private ref with user token that does not have access - should fail
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/", user2.Name, repo16.Name).
|
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s", user2.Name, repo16.Name, treePath).
|
||||||
AddTokenAuth(token4)
|
AddTokenAuth(token4)
|
||||||
MakeRequest(t, req, http.StatusNotFound)
|
MakeRequest(t, req, http.StatusNotFound)
|
||||||
|
|
||||||
// Test access private ref of owner of token
|
// Test access private ref of owner of token
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/", user2.Name, repo16.Name).
|
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/readme.md", user2.Name, repo16.Name).
|
||||||
AddTokenAuth(token2)
|
AddTokenAuth(token2)
|
||||||
MakeRequest(t, req, http.StatusOK)
|
MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
// Test access of org org3 private repo file by owner user2
|
// Test access of org org3 private repo file by owner user2
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/", org3.Name, repo3.Name).
|
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s", org3.Name, repo3.Name, treePath).
|
||||||
AddTokenAuth(token2)
|
AddTokenAuth(token2)
|
||||||
MakeRequest(t, req, http.StatusOK)
|
MakeRequest(t, req, http.StatusOK)
|
||||||
}
|
}
|
||||||
|
@ -35,9 +35,10 @@ func getExpectedContentsResponseForContents(ref, refType, lastCommitSHA string)
|
|||||||
Name: treePath,
|
Name: treePath,
|
||||||
Path: treePath,
|
Path: treePath,
|
||||||
SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
|
SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
|
||||||
LastCommitSHA: util.ToPointer(lastCommitSHA),
|
LastCommitSHA: lastCommitSHA,
|
||||||
LastCommitterDate: util.ToPointer(time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400))),
|
LastCommitterDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||||
LastAuthorDate: util.ToPointer(time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400))),
|
LastAuthorDate: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
|
||||||
|
LastCommitMessage: "Initial commit",
|
||||||
Type: "file",
|
Type: "file",
|
||||||
Size: 30,
|
Size: 30,
|
||||||
Encoding: util.ToPointer("base64"),
|
Encoding: util.ToPointer("base64"),
|
||||||
@ -97,16 +98,11 @@ func testAPIGetContents(t *testing.T, u *url.URL) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
/*** END SETUP ***/
|
/*** END SETUP ***/
|
||||||
|
|
||||||
// not found
|
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/no-such/file.md", user2.Name, repo1.Name)
|
|
||||||
resp := MakeRequest(t, req, http.StatusNotFound)
|
|
||||||
assert.Contains(t, resp.Body.String(), "object does not exist [id: , rel_path: no-such]")
|
|
||||||
|
|
||||||
// ref is default ref
|
// ref is default ref
|
||||||
ref := repo1.DefaultBranch
|
ref := repo1.DefaultBranch
|
||||||
refType := "branch"
|
refType := "branch"
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/contents/%s?ref=%s", user2.Name, repo1.Name, treePath, ref)
|
||||||
resp = MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
var contentsResponse api.ContentsResponse
|
var contentsResponse api.ContentsResponse
|
||||||
DecodeJSON(t, resp, &contentsResponse)
|
DecodeJSON(t, resp, &contentsResponse)
|
||||||
lastCommit, _ := gitRepo.GetCommitByPath("README.md")
|
lastCommit, _ := gitRepo.GetCommitByPath("README.md")
|
||||||
@ -211,30 +207,14 @@ func testAPIGetContentsExt(t *testing.T) {
|
|||||||
session := loginUser(t, "user2")
|
session := loginUser(t, "user2")
|
||||||
token2 := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
|
token2 := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
|
||||||
t.Run("DirContents", func(t *testing.T) {
|
t.Run("DirContents", func(t *testing.T) {
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext?ref=sub-home-md-img-check")
|
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs?ref=sub-home-md-img-check")
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
var contentsResponse api.ContentsExtResponse
|
var contentsResponse api.ContentsExtResponse
|
||||||
DecodeJSON(t, resp, &contentsResponse)
|
DecodeJSON(t, resp, &contentsResponse)
|
||||||
assert.Nil(t, contentsResponse.FileContents)
|
assert.Nil(t, contentsResponse.FileContents)
|
||||||
assert.NotNil(t, contentsResponse.DirContents)
|
|
||||||
|
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/.?ref=sub-home-md-img-check")
|
|
||||||
resp = MakeRequest(t, req, http.StatusOK)
|
|
||||||
contentsResponse = api.ContentsExtResponse{}
|
|
||||||
DecodeJSON(t, resp, &contentsResponse)
|
|
||||||
assert.Nil(t, contentsResponse.FileContents)
|
|
||||||
assert.NotNil(t, contentsResponse.DirContents)
|
|
||||||
|
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs?ref=sub-home-md-img-check")
|
|
||||||
resp = MakeRequest(t, req, http.StatusOK)
|
|
||||||
contentsResponse = api.ContentsExtResponse{}
|
|
||||||
DecodeJSON(t, resp, &contentsResponse)
|
|
||||||
assert.Nil(t, contentsResponse.FileContents)
|
|
||||||
assert.Equal(t, "README.md", contentsResponse.DirContents[0].Name)
|
assert.Equal(t, "README.md", contentsResponse.DirContents[0].Name)
|
||||||
assert.Nil(t, contentsResponse.DirContents[0].Encoding)
|
assert.Nil(t, contentsResponse.DirContents[0].Encoding)
|
||||||
assert.Nil(t, contentsResponse.DirContents[0].Content)
|
assert.Nil(t, contentsResponse.DirContents[0].Content)
|
||||||
assert.Nil(t, contentsResponse.DirContents[0].LastCommitSHA)
|
|
||||||
assert.Nil(t, contentsResponse.DirContents[0].LastCommitMessage)
|
|
||||||
|
|
||||||
// "includes=file_content" shouldn't affect directory listing
|
// "includes=file_content" shouldn't affect directory listing
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs?ref=sub-home-md-img-check&includes=file_content")
|
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs?ref=sub-home-md-img-check&includes=file_content")
|
||||||
@ -261,7 +241,7 @@ func testAPIGetContentsExt(t *testing.T) {
|
|||||||
assert.Equal(t, util.ToPointer("0b8d8b5f15046343fd32f451df93acc2bdd9e6373be478b968e4cad6b6647351"), respFile.LfsOid)
|
assert.Equal(t, util.ToPointer("0b8d8b5f15046343fd32f451df93acc2bdd9e6373be478b968e4cad6b6647351"), respFile.LfsOid)
|
||||||
})
|
})
|
||||||
t.Run("FileContents", func(t *testing.T) {
|
t.Run("FileContents", func(t *testing.T) {
|
||||||
// by default, no file content or commit info is returned
|
// by default, no file content is returned
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs/README.md?ref=sub-home-md-img-check")
|
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs/README.md?ref=sub-home-md-img-check")
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
var contentsResponse api.ContentsExtResponse
|
var contentsResponse api.ContentsExtResponse
|
||||||
@ -270,11 +250,9 @@ func testAPIGetContentsExt(t *testing.T) {
|
|||||||
assert.Equal(t, "README.md", contentsResponse.FileContents.Name)
|
assert.Equal(t, "README.md", contentsResponse.FileContents.Name)
|
||||||
assert.Nil(t, contentsResponse.FileContents.Encoding)
|
assert.Nil(t, contentsResponse.FileContents.Encoding)
|
||||||
assert.Nil(t, contentsResponse.FileContents.Content)
|
assert.Nil(t, contentsResponse.FileContents.Content)
|
||||||
assert.Nil(t, contentsResponse.FileContents.LastCommitSHA)
|
|
||||||
assert.Nil(t, contentsResponse.FileContents.LastCommitMessage)
|
|
||||||
|
|
||||||
// file content is only returned when `includes=file_content`
|
// file content is only returned when `includes=file_content`
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs/README.md?ref=sub-home-md-img-check&includes=file_content,commit_metadata,commit_message")
|
req = NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/contents-ext/docs/README.md?ref=sub-home-md-img-check&includes=file_content")
|
||||||
resp = MakeRequest(t, req, http.StatusOK)
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
contentsResponse = api.ContentsExtResponse{}
|
contentsResponse = api.ContentsExtResponse{}
|
||||||
DecodeJSON(t, resp, &contentsResponse)
|
DecodeJSON(t, resp, &contentsResponse)
|
||||||
@ -282,8 +260,6 @@ func testAPIGetContentsExt(t *testing.T) {
|
|||||||
assert.Equal(t, "README.md", contentsResponse.FileContents.Name)
|
assert.Equal(t, "README.md", contentsResponse.FileContents.Name)
|
||||||
assert.NotNil(t, contentsResponse.FileContents.Encoding)
|
assert.NotNil(t, contentsResponse.FileContents.Encoding)
|
||||||
assert.NotNil(t, contentsResponse.FileContents.Content)
|
assert.NotNil(t, contentsResponse.FileContents.Content)
|
||||||
assert.Equal(t, "4649299398e4d39a5c09eb4f534df6f1e1eb87cc", *contentsResponse.FileContents.LastCommitSHA)
|
|
||||||
assert.Equal(t, "Test how READMEs render images when found in a subfolder\n", *contentsResponse.FileContents.LastCommitMessage)
|
|
||||||
|
|
||||||
req = NewRequestf(t, "GET", "/api/v1/repos/user2/lfs/contents-ext/jpeg.jpg?includes=file_content").AddTokenAuth(token2)
|
req = NewRequestf(t, "GET", "/api/v1/repos/user2/lfs/contents-ext/jpeg.jpg?includes=file_content").AddTokenAuth(token2)
|
||||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||||
@ -295,8 +271,6 @@ func testAPIGetContentsExt(t *testing.T) {
|
|||||||
assert.Equal(t, "jpeg.jpg", respFile.Name)
|
assert.Equal(t, "jpeg.jpg", respFile.Name)
|
||||||
assert.NotNil(t, respFile.Encoding)
|
assert.NotNil(t, respFile.Encoding)
|
||||||
assert.NotNil(t, respFile.Content)
|
assert.NotNil(t, respFile.Content)
|
||||||
assert.Nil(t, contentsResponse.FileContents.LastCommitSHA)
|
|
||||||
assert.Nil(t, contentsResponse.FileContents.LastCommitMessage)
|
|
||||||
assert.Equal(t, util.ToPointer(int64(107)), respFile.LfsSize)
|
assert.Equal(t, util.ToPointer(int64(107)), respFile.LfsSize)
|
||||||
assert.Equal(t, util.ToPointer("0b8d8b5f15046343fd32f451df93acc2bdd9e6373be478b968e4cad6b6647351"), respFile.LfsOid)
|
assert.Equal(t, util.ToPointer("0b8d8b5f15046343fd32f451df93acc2bdd9e6373be478b968e4cad6b6647351"), respFile.LfsOid)
|
||||||
})
|
})
|
||||||
|
@ -68,15 +68,14 @@ func TestLFSRender(t *testing.T) {
|
|||||||
req := NewRequest(t, "GET", "/user2/lfs/src/branch/master/crypt.bin")
|
req := NewRequest(t, "GET", "/user2/lfs/src/branch/master/crypt.bin")
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
doc := NewHTMLParser(t, resp.Body)
|
doc := NewHTMLParser(t, resp.Body).doc
|
||||||
|
|
||||||
fileInfo := doc.Find("div.file-info-entry").First().Text()
|
fileInfo := doc.Find("div.file-info-entry").First().Text()
|
||||||
assert.Contains(t, fileInfo, "LFS")
|
assert.Contains(t, fileInfo, "LFS")
|
||||||
|
|
||||||
// find new file view container
|
rawLink, exists := doc.Find("div.file-view > div.view-raw > a").Attr("href")
|
||||||
fileViewContainer := doc.Find("[data-global-init=initRepoFileView]")
|
assert.True(t, exists, "Download link should render instead of content because this is a binary file")
|
||||||
assert.Equal(t, "/user2/lfs/media/branch/master/crypt.bin", fileViewContainer.AttrOr("data-raw-file-link", ""))
|
assert.Equal(t, "/user2/lfs/media/branch/master/crypt.bin", rawLink, "The download link should use the proper /media link because it's in LFS")
|
||||||
AssertHTMLElement(t, doc, ".view-raw > .file-view-render-container > .file-view-raw-prompt", 1)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
// check that a directory with a README file shows its text
|
// check that a directory with a README file shows its text
|
||||||
|
@ -27,7 +27,6 @@ import (
|
|||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
"github.com/PuerkitoBio/goquery"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestRepoView(t *testing.T) {
|
func TestRepoView(t *testing.T) {
|
||||||
@ -42,7 +41,6 @@ func TestRepoView(t *testing.T) {
|
|||||||
t.Run("BlameFileInRepo", testBlameFileInRepo)
|
t.Run("BlameFileInRepo", testBlameFileInRepo)
|
||||||
t.Run("ViewRepoDirectory", testViewRepoDirectory)
|
t.Run("ViewRepoDirectory", testViewRepoDirectory)
|
||||||
t.Run("ViewRepoDirectoryReadme", testViewRepoDirectoryReadme)
|
t.Run("ViewRepoDirectoryReadme", testViewRepoDirectoryReadme)
|
||||||
t.Run("ViewRepoSymlink", testViewRepoSymlink)
|
|
||||||
t.Run("MarkDownReadmeImage", testMarkDownReadmeImage)
|
t.Run("MarkDownReadmeImage", testMarkDownReadmeImage)
|
||||||
t.Run("MarkDownReadmeImageSubfolder", testMarkDownReadmeImageSubfolder)
|
t.Run("MarkDownReadmeImageSubfolder", testMarkDownReadmeImageSubfolder)
|
||||||
t.Run("GeneratedSourceLink", testGeneratedSourceLink)
|
t.Run("GeneratedSourceLink", testGeneratedSourceLink)
|
||||||
@ -414,21 +412,6 @@ func testViewRepoDirectoryReadme(t *testing.T) {
|
|||||||
missing("symlink-loop", "/user2/readme-test/src/branch/symlink-loop/")
|
missing("symlink-loop", "/user2/readme-test/src/branch/symlink-loop/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func testViewRepoSymlink(t *testing.T) {
|
|
||||||
session := loginUser(t, "user2")
|
|
||||||
req := NewRequest(t, "GET", "/user2/readme-test/src/branch/symlink")
|
|
||||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
|
||||||
|
|
||||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
|
||||||
AssertHTMLElement(t, htmlDoc, ".entry-symbol-link", true)
|
|
||||||
followSymbolLinkHref := htmlDoc.Find(".entry-symbol-link").AttrOr("href", "")
|
|
||||||
require.Equal(t, "/user2/readme-test/src/branch/symlink/README.md?follow_symlink=1", followSymbolLinkHref)
|
|
||||||
|
|
||||||
req = NewRequest(t, "GET", followSymbolLinkHref)
|
|
||||||
resp = session.MakeRequest(t, req, http.StatusSeeOther)
|
|
||||||
assert.Equal(t, "/user2/readme-test/src/branch/symlink/some/other/path/awefulcake.txt?follow_symlink=1", resp.Header().Get("Location"))
|
|
||||||
}
|
|
||||||
|
|
||||||
func testMarkDownReadmeImage(t *testing.T) {
|
func testMarkDownReadmeImage(t *testing.T) {
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
|
@ -155,9 +155,10 @@ func getExpectedFileResponseForRepoFilesCreate(commitID string, lastCommit *git.
|
|||||||
Name: path.Base(treePath),
|
Name: path.Base(treePath),
|
||||||
Path: treePath,
|
Path: treePath,
|
||||||
SHA: "103ff9234cefeee5ec5361d22b49fbb04d385885",
|
SHA: "103ff9234cefeee5ec5361d22b49fbb04d385885",
|
||||||
LastCommitSHA: util.ToPointer(lastCommit.ID.String()),
|
LastCommitSHA: lastCommit.ID.String(),
|
||||||
LastCommitterDate: util.ToPointer(lastCommit.Committer.When),
|
LastCommitterDate: lastCommit.Committer.When,
|
||||||
LastAuthorDate: util.ToPointer(lastCommit.Author.When),
|
LastAuthorDate: lastCommit.Author.When,
|
||||||
|
LastCommitMessage: "Creates new/file.txt\n",
|
||||||
Type: "file",
|
Type: "file",
|
||||||
Size: 18,
|
Size: 18,
|
||||||
Encoding: &encoding,
|
Encoding: &encoding,
|
||||||
@ -225,9 +226,10 @@ func getExpectedFileResponseForRepoFilesUpdate(commitID, filename, lastCommitSHA
|
|||||||
Name: filename,
|
Name: filename,
|
||||||
Path: filename,
|
Path: filename,
|
||||||
SHA: "dbf8d00e022e05b7e5cf7e535de857de57925647",
|
SHA: "dbf8d00e022e05b7e5cf7e535de857de57925647",
|
||||||
LastCommitSHA: util.ToPointer(lastCommitSHA),
|
LastCommitSHA: lastCommitSHA,
|
||||||
LastCommitterDate: util.ToPointer(lastCommitterWhen),
|
LastCommitterDate: lastCommitterWhen,
|
||||||
LastAuthorDate: util.ToPointer(lastAuthorWhen),
|
LastAuthorDate: lastAuthorWhen,
|
||||||
|
LastCommitMessage: "Updates README.md\n",
|
||||||
Type: "file",
|
Type: "file",
|
||||||
Size: 43,
|
Size: 43,
|
||||||
Encoding: &encoding,
|
Encoding: &encoding,
|
||||||
@ -331,7 +333,8 @@ func getExpectedFileResponseForRepoFilesUpdateRename(commitID, lastCommitSHA str
|
|||||||
Name: detail.filename,
|
Name: detail.filename,
|
||||||
Path: detail.filename,
|
Path: detail.filename,
|
||||||
SHA: detail.sha,
|
SHA: detail.sha,
|
||||||
LastCommitSHA: util.ToPointer(lastCommitSHA),
|
LastCommitSHA: lastCommitSHA,
|
||||||
|
LastCommitMessage: "Rename files\n",
|
||||||
Type: "file",
|
Type: "file",
|
||||||
Size: detail.size,
|
Size: detail.size,
|
||||||
Encoding: util.ToPointer("base64"),
|
Encoding: util.ToPointer("base64"),
|
||||||
@ -537,7 +540,7 @@ func TestChangeRepoFilesForUpdateWithFileRename(t *testing.T) {
|
|||||||
lastCommit, _ := commit.GetCommitByPath(opts.Files[0].TreePath)
|
lastCommit, _ := commit.GetCommitByPath(opts.Files[0].TreePath)
|
||||||
expectedFileResponse := getExpectedFileResponseForRepoFilesUpdateRename(commit.ID.String(), lastCommit.ID.String())
|
expectedFileResponse := getExpectedFileResponseForRepoFilesUpdateRename(commit.ID.String(), lastCommit.ID.String())
|
||||||
for _, file := range filesResponse.Files {
|
for _, file := range filesResponse.Files {
|
||||||
file.LastCommitterDate, file.LastAuthorDate = nil, nil // there might be different time in one operation, so we ignore them
|
file.LastCommitterDate, file.LastAuthorDate = time.Time{}, time.Time{} // there might be different time in one operation, so we ignore them
|
||||||
}
|
}
|
||||||
assert.Len(t, filesResponse.Files, 4)
|
assert.Len(t, filesResponse.Files, 4)
|
||||||
assert.Equal(t, expectedFileResponse.Files, filesResponse.Files)
|
assert.Equal(t, expectedFileResponse.Files, filesResponse.Files)
|
||||||
|
@ -52,7 +52,8 @@ form.single-button-form.is-loading .button {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.markup pre.is-loading,
|
.markup pre.is-loading,
|
||||||
.editor-loading.is-loading {
|
.editor-loading.is-loading,
|
||||||
|
.pdf-content.is-loading {
|
||||||
height: var(--height-loading);
|
height: var(--height-loading);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -183,6 +183,42 @@ td .commit-summary {
|
|||||||
cursor: default;
|
cursor: default;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.view-raw {
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.view-raw > * {
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.view-raw audio,
|
||||||
|
.view-raw video,
|
||||||
|
.view-raw img {
|
||||||
|
margin: 1rem 0;
|
||||||
|
border-radius: 0;
|
||||||
|
object-fit: contain;
|
||||||
|
}
|
||||||
|
|
||||||
|
.view-raw img[src$=".svg" i] {
|
||||||
|
max-height: 600px !important;
|
||||||
|
max-width: 600px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pdf-content {
|
||||||
|
width: 100%;
|
||||||
|
height: 600px;
|
||||||
|
border: none !important;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pdf-content .pdf-fallback-button {
|
||||||
|
margin: 50px auto;
|
||||||
|
}
|
||||||
|
|
||||||
.repository.file.list .non-diff-file-content .plain-text {
|
.repository.file.list .non-diff-file-content .plain-text {
|
||||||
padding: 1em 2em;
|
padding: 1em 2em;
|
||||||
}
|
}
|
||||||
@ -205,6 +241,10 @@ td .commit-summary {
|
|||||||
padding: 0 !important;
|
padding: 0 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.non-diff-file-content .pdfobject {
|
||||||
|
border-radius: 0 0 var(--border-radius) var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
.repo-editor-header {
|
.repo-editor-header {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
@ -60,33 +60,3 @@
|
|||||||
.file-view.code-view .ui.button.code-line-button:hover {
|
.file-view.code-view .ui.button.code-line-button:hover {
|
||||||
background: var(--color-secondary);
|
background: var(--color-secondary);
|
||||||
}
|
}
|
||||||
|
|
||||||
.view-raw {
|
|
||||||
display: flex;
|
|
||||||
justify-content: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.view-raw > * {
|
|
||||||
max-width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.view-raw audio,
|
|
||||||
.view-raw video,
|
|
||||||
.view-raw img {
|
|
||||||
margin: 1rem;
|
|
||||||
border-radius: 0;
|
|
||||||
object-fit: contain;
|
|
||||||
}
|
|
||||||
|
|
||||||
.view-raw img[src$=".svg" i] {
|
|
||||||
max-height: 600px !important;
|
|
||||||
max-width: 600px !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.file-view-render-container {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.file-view-render-container :last-child {
|
|
||||||
border-radius: 0 0 var(--border-radius) var(--border-radius); /* to match the "ui segment" bottom radius */
|
|
||||||
}
|
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
// to make sure the error handler always works, we should never import `window.config`, because
|
// to make sure the error handler always works, we should never import `window.config`, because
|
||||||
// some user's custom template breaks it.
|
// some user's custom template breaks it.
|
||||||
import type {Intent} from './types.ts';
|
import type {Intent} from './types.ts';
|
||||||
import {html} from './utils/html.ts';
|
|
||||||
|
|
||||||
// This sets up the URL prefix used in webpack's chunk loading.
|
// This sets up the URL prefix used in webpack's chunk loading.
|
||||||
// This file must be imported before any lazy-loading is being attempted.
|
// This file must be imported before any lazy-loading is being attempted.
|
||||||
@ -24,7 +23,7 @@ export function showGlobalErrorMessage(msg: string, msgType: Intent = 'error') {
|
|||||||
let msgDiv = msgContainer.querySelector<HTMLDivElement>(`.js-global-error[data-global-error-msg-compact="${msgCompact}"]`);
|
let msgDiv = msgContainer.querySelector<HTMLDivElement>(`.js-global-error[data-global-error-msg-compact="${msgCompact}"]`);
|
||||||
if (!msgDiv) {
|
if (!msgDiv) {
|
||||||
const el = document.createElement('div');
|
const el = document.createElement('div');
|
||||||
el.innerHTML = html`<div class="ui container js-global-error tw-my-[--page-spacing]"><div class="ui ${msgType} message tw-text-center tw-whitespace-pre-line"></div></div>`;
|
el.innerHTML = `<div class="ui container js-global-error tw-my-[--page-spacing]"><div class="ui ${msgType} message tw-text-center tw-whitespace-pre-line"></div></div>`;
|
||||||
msgDiv = el.childNodes[0] as HTMLDivElement;
|
msgDiv = el.childNodes[0] as HTMLDivElement;
|
||||||
}
|
}
|
||||||
// merge duplicated messages into "the message (count)" format
|
// merge duplicated messages into "the message (count)" format
|
||||||
|
@ -2,7 +2,6 @@ import {reactive} from 'vue';
|
|||||||
import {GET} from '../modules/fetch.ts';
|
import {GET} from '../modules/fetch.ts';
|
||||||
import {pathEscapeSegments} from '../utils/url.ts';
|
import {pathEscapeSegments} from '../utils/url.ts';
|
||||||
import {createElementFromHTML} from '../utils/dom.ts';
|
import {createElementFromHTML} from '../utils/dom.ts';
|
||||||
import {html} from '../utils/html.ts';
|
|
||||||
|
|
||||||
export function createViewFileTreeStore(props: { repoLink: string, treePath: string, currentRefNameSubURL: string}) {
|
export function createViewFileTreeStore(props: { repoLink: string, treePath: string, currentRefNameSubURL: string}) {
|
||||||
const store = reactive({
|
const store = reactive({
|
||||||
@ -17,7 +16,7 @@ export function createViewFileTreeStore(props: { repoLink: string, treePath: str
|
|||||||
if (!document.querySelector(`.global-svg-icon-pool #${svgId}`)) poolSvgs.push(svgContent);
|
if (!document.querySelector(`.global-svg-icon-pool #${svgId}`)) poolSvgs.push(svgContent);
|
||||||
}
|
}
|
||||||
if (poolSvgs.length) {
|
if (poolSvgs.length) {
|
||||||
const svgContainer = createElementFromHTML(html`<div class="global-svg-icon-pool tw-hidden"></div>`);
|
const svgContainer = createElementFromHTML('<div class="global-svg-icon-pool tw-hidden"></div>');
|
||||||
svgContainer.innerHTML = poolSvgs.join('');
|
svgContainer.innerHTML = poolSvgs.join('');
|
||||||
document.body.append(svgContainer);
|
document.body.append(svgContainer);
|
||||||
}
|
}
|
||||||
|
@ -43,16 +43,13 @@ export function initGlobalDeleteButton(): void {
|
|||||||
|
|
||||||
fomanticQuery(modal).modal({
|
fomanticQuery(modal).modal({
|
||||||
closable: false,
|
closable: false,
|
||||||
onApprove: () => {
|
onApprove: async () => {
|
||||||
// if `data-type="form"` exists, then submit the form by the selector provided by `data-form="..."`
|
// if `data-type="form"` exists, then submit the form by the selector provided by `data-form="..."`
|
||||||
if (btn.getAttribute('data-type') === 'form') {
|
if (btn.getAttribute('data-type') === 'form') {
|
||||||
const formSelector = btn.getAttribute('data-form');
|
const formSelector = btn.getAttribute('data-form');
|
||||||
const form = document.querySelector<HTMLFormElement>(formSelector);
|
const form = document.querySelector<HTMLFormElement>(formSelector);
|
||||||
if (!form) throw new Error(`no form named ${formSelector} found`);
|
if (!form) throw new Error(`no form named ${formSelector} found`);
|
||||||
modal.classList.add('is-loading'); // the form is not in the modal, so also add loading indicator to the modal
|
|
||||||
form.classList.add('is-loading');
|
|
||||||
form.submit();
|
form.submit();
|
||||||
return false; // prevent modal from closing automatically
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// prepare an AJAX form by data attributes
|
// prepare an AJAX form by data attributes
|
||||||
@ -65,15 +62,12 @@ export function initGlobalDeleteButton(): void {
|
|||||||
postData.append('id', value);
|
postData.append('id', value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(async () => {
|
|
||||||
const response = await POST(btn.getAttribute('data-url'), {data: postData});
|
const response = await POST(btn.getAttribute('data-url'), {data: postData});
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
window.location.href = data.redirect;
|
window.location.href = data.redirect;
|
||||||
}
|
}
|
||||||
})();
|
|
||||||
modal.classList.add('is-loading'); // the request is in progress, so also add loading indicator to the modal
|
|
||||||
return false; // prevent modal from closing automatically
|
|
||||||
},
|
},
|
||||||
}).modal('show');
|
}).modal('show');
|
||||||
});
|
});
|
||||||
@ -164,7 +158,13 @@ function onShowModalClick(el: HTMLElement, e: MouseEvent) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fomanticQuery(elModal).modal('show');
|
fomanticQuery(elModal).modal('setting', {
|
||||||
|
onApprove: () => {
|
||||||
|
// "form-fetch-action" can handle network errors gracefully,
|
||||||
|
// so keep the modal dialog to make users can re-submit the form if anything wrong happens.
|
||||||
|
if (elModal.querySelector('.form-fetch-action')) return false;
|
||||||
|
},
|
||||||
|
}).modal('show');
|
||||||
}
|
}
|
||||||
|
|
||||||
export function initGlobalButtons(): void {
|
export function initGlobalButtons(): void {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {svg} from '../../svg.ts';
|
import {svg} from '../../svg.ts';
|
||||||
import {html, htmlRaw} from '../../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
import {createElementFromHTML} from '../../utils/dom.ts';
|
import {createElementFromHTML} from '../../utils/dom.ts';
|
||||||
import {fomanticQuery} from '../../modules/fomantic/base.ts';
|
import {fomanticQuery} from '../../modules/fomantic/base.ts';
|
||||||
|
|
||||||
@ -12,17 +12,17 @@ type ConfirmModalOptions = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function createConfirmModal({header = '', content = '', confirmButtonColor = 'primary'}:ConfirmModalOptions = {}): HTMLElement {
|
export function createConfirmModal({header = '', content = '', confirmButtonColor = 'primary'}:ConfirmModalOptions = {}): HTMLElement {
|
||||||
const headerHtml = header ? html`<div class="header">${header}</div>` : '';
|
const headerHtml = header ? `<div class="header">${htmlEscape(header)}</div>` : '';
|
||||||
return createElementFromHTML(html`
|
return createElementFromHTML(`
|
||||||
<div class="ui g-modal-confirm modal">
|
<div class="ui g-modal-confirm modal">
|
||||||
${htmlRaw(headerHtml)}
|
${headerHtml}
|
||||||
<div class="content">${content}</div>
|
<div class="content">${htmlEscape(content)}</div>
|
||||||
<div class="actions">
|
<div class="actions">
|
||||||
<button class="ui cancel button">${htmlRaw(svg('octicon-x'))} ${i18n.modal_cancel}</button>
|
<button class="ui cancel button">${svg('octicon-x')} ${htmlEscape(i18n.modal_cancel)}</button>
|
||||||
<button class="ui ${confirmButtonColor} ok button">${htmlRaw(svg('octicon-check'))} ${i18n.modal_confirm}</button>
|
<button class="ui ${confirmButtonColor} ok button">${svg('octicon-check')} ${htmlEscape(i18n.modal_confirm)}</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
`.trim());
|
`);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function confirmModal(modal: HTMLElement | ConfirmModalOptions): Promise<boolean> {
|
export function confirmModal(modal: HTMLElement | ConfirmModalOptions): Promise<boolean> {
|
||||||
|
@ -114,7 +114,7 @@ async function handleUploadFiles(editor: CodeMirrorEditor | TextareaEditor, drop
|
|||||||
|
|
||||||
export function removeAttachmentLinksFromMarkdown(text: string, fileUuid: string) {
|
export function removeAttachmentLinksFromMarkdown(text: string, fileUuid: string) {
|
||||||
text = text.replace(new RegExp(`!?\\[([^\\]]+)\\]\\(/?attachments/${fileUuid}\\)`, 'g'), '');
|
text = text.replace(new RegExp(`!?\\[([^\\]]+)\\]\\(/?attachments/${fileUuid}\\)`, 'g'), '');
|
||||||
text = text.replace(new RegExp(`[<]img[^>]+src="/?attachments/${fileUuid}"[^>]*>`, 'g'), '');
|
text = text.replace(new RegExp(`<img[^>]+src="/?attachments/${fileUuid}"[^>]*>`, 'g'), '');
|
||||||
return text;
|
return text;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +72,6 @@ export function initCompLabelEdit(pageSelector: string) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
submitFormFetchAction(form);
|
submitFormFetchAction(form);
|
||||||
return false;
|
|
||||||
},
|
},
|
||||||
}).modal('show');
|
}).modal('show');
|
||||||
};
|
};
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import {htmlEscape} from '../../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
import {fomanticQuery} from '../../modules/fomantic/base.ts';
|
import {fomanticQuery} from '../../modules/fomantic/base.ts';
|
||||||
|
|
||||||
const {appSubUrl} = window.config;
|
const {appSubUrl} = window.config;
|
||||||
|
@ -9,17 +9,17 @@ const {i18n} = window.config;
|
|||||||
export function initCopyContent() {
|
export function initCopyContent() {
|
||||||
registerGlobalEventFunc('click', 'onCopyContentButtonClick', async (btn: HTMLElement) => {
|
registerGlobalEventFunc('click', 'onCopyContentButtonClick', async (btn: HTMLElement) => {
|
||||||
if (btn.classList.contains('disabled') || btn.classList.contains('is-loading')) return;
|
if (btn.classList.contains('disabled') || btn.classList.contains('is-loading')) return;
|
||||||
const rawFileLink = btn.getAttribute('data-raw-file-link');
|
let content;
|
||||||
|
let isRasterImage = false;
|
||||||
|
const link = btn.getAttribute('data-link');
|
||||||
|
|
||||||
let content, isRasterImage = false;
|
// when data-link is present, we perform a fetch. this is either because
|
||||||
|
// the text to copy is not in the DOM, or it is an image which should be
|
||||||
// when "data-raw-link" is present, we perform a fetch. this is either because
|
|
||||||
// the text to copy is not in the DOM, or it is an image that should be
|
|
||||||
// fetched to copy in full resolution
|
// fetched to copy in full resolution
|
||||||
if (rawFileLink) {
|
if (link) {
|
||||||
btn.classList.add('is-loading', 'loading-icon-2px');
|
btn.classList.add('is-loading', 'loading-icon-2px');
|
||||||
try {
|
try {
|
||||||
const res = await GET(rawFileLink, {credentials: 'include', redirect: 'follow'});
|
const res = await GET(link, {credentials: 'include', redirect: 'follow'});
|
||||||
const contentType = res.headers.get('content-type');
|
const contentType = res.headers.get('content-type');
|
||||||
|
|
||||||
if (contentType.startsWith('image/') && !contentType.startsWith('image/svg')) {
|
if (contentType.startsWith('image/') && !contentType.startsWith('image/svg')) {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {svg} from '../svg.ts';
|
import {svg} from '../svg.ts';
|
||||||
import {html} from '../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
import {clippie} from 'clippie';
|
import {clippie} from 'clippie';
|
||||||
import {showTemporaryTooltip} from '../modules/tippy.ts';
|
import {showTemporaryTooltip} from '../modules/tippy.ts';
|
||||||
import {GET, POST} from '../modules/fetch.ts';
|
import {GET, POST} from '../modules/fetch.ts';
|
||||||
@ -33,14 +33,14 @@ export function generateMarkdownLinkForAttachment(file: Partial<CustomDropzoneFi
|
|||||||
// Scale down images from HiDPI monitors. This uses the <img> tag because it's the only
|
// Scale down images from HiDPI monitors. This uses the <img> tag because it's the only
|
||||||
// method to change image size in Markdown that is supported by all implementations.
|
// method to change image size in Markdown that is supported by all implementations.
|
||||||
// Make the image link relative to the repo path, then the final URL is "/sub-path/owner/repo/attachments/{uuid}"
|
// Make the image link relative to the repo path, then the final URL is "/sub-path/owner/repo/attachments/{uuid}"
|
||||||
fileMarkdown = html`<img width="${Math.round(width / dppx)}" alt="${file.name}" src="attachments/${file.uuid}">`;
|
fileMarkdown = `<img width="${Math.round(width / dppx)}" alt="${htmlEscape(file.name)}" src="attachments/${htmlEscape(file.uuid)}">`;
|
||||||
} else {
|
} else {
|
||||||
// Markdown always renders the image with a relative path, so the final URL is "/sub-path/owner/repo/attachments/{uuid}"
|
// Markdown always renders the image with a relative path, so the final URL is "/sub-path/owner/repo/attachments/{uuid}"
|
||||||
// TODO: it should also use relative path for consistency, because absolute is ambiguous for "/sub-path/attachments" or "/attachments"
|
// TODO: it should also use relative path for consistency, because absolute is ambiguous for "/sub-path/attachments" or "/attachments"
|
||||||
fileMarkdown = ``;
|
fileMarkdown = ``;
|
||||||
}
|
}
|
||||||
} else if (isVideoFile(file)) {
|
} else if (isVideoFile(file)) {
|
||||||
fileMarkdown = html`<video src="attachments/${file.uuid}" title="${file.name}" controls></video>`;
|
fileMarkdown = `<video src="attachments/${htmlEscape(file.uuid)}" title="${htmlEscape(file.name)}" controls></video>`;
|
||||||
}
|
}
|
||||||
return fileMarkdown;
|
return fileMarkdown;
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import emojis from '../../../assets/emoji.json' with {type: 'json'};
|
import emojis from '../../../assets/emoji.json' with {type: 'json'};
|
||||||
import {html} from '../utils/html.ts';
|
|
||||||
|
|
||||||
const {assetUrlPrefix, customEmojis} = window.config;
|
const {assetUrlPrefix, customEmojis} = window.config;
|
||||||
|
|
||||||
@ -25,11 +24,12 @@ for (const key of emojiKeys) {
|
|||||||
export function emojiHTML(name: string) {
|
export function emojiHTML(name: string) {
|
||||||
let inner;
|
let inner;
|
||||||
if (Object.hasOwn(customEmojis, name)) {
|
if (Object.hasOwn(customEmojis, name)) {
|
||||||
inner = html`<img alt=":${name}:" src="${assetUrlPrefix}/img/emoji/${name}.png">`;
|
inner = `<img alt=":${name}:" src="${assetUrlPrefix}/img/emoji/${name}.png">`;
|
||||||
} else {
|
} else {
|
||||||
inner = emojiString(name);
|
inner = emojiString(name);
|
||||||
}
|
}
|
||||||
return html`<span class="emoji" title=":${name}:">${inner}</span>`;
|
|
||||||
|
return `<span class="emoji" title=":${name}:">${inner}</span>`;
|
||||||
}
|
}
|
||||||
|
|
||||||
// retrieve string for given emoji name
|
// retrieve string for given emoji name
|
||||||
|
@ -1,76 +0,0 @@
|
|||||||
import type {FileRenderPlugin} from '../render/plugin.ts';
|
|
||||||
import {newRenderPlugin3DViewer} from '../render/plugins/3d-viewer.ts';
|
|
||||||
import {newRenderPluginPdfViewer} from '../render/plugins/pdf-viewer.ts';
|
|
||||||
import {registerGlobalInitFunc} from '../modules/observer.ts';
|
|
||||||
import {createElementFromHTML, showElem, toggleClass} from '../utils/dom.ts';
|
|
||||||
import {html} from '../utils/html.ts';
|
|
||||||
import {basename} from '../utils.ts';
|
|
||||||
|
|
||||||
const plugins: FileRenderPlugin[] = [];
|
|
||||||
|
|
||||||
function initPluginsOnce(): void {
|
|
||||||
if (plugins.length) return;
|
|
||||||
plugins.push(newRenderPlugin3DViewer(), newRenderPluginPdfViewer());
|
|
||||||
}
|
|
||||||
|
|
||||||
function findFileRenderPlugin(filename: string, mimeType: string): FileRenderPlugin | null {
|
|
||||||
return plugins.find((plugin) => plugin.canHandle(filename, mimeType)) || null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function showRenderRawFileButton(elFileView: HTMLElement, renderContainer: HTMLElement | null): void {
|
|
||||||
const toggleButtons = elFileView.querySelector('.file-view-toggle-buttons');
|
|
||||||
showElem(toggleButtons);
|
|
||||||
const displayingRendered = Boolean(renderContainer);
|
|
||||||
toggleClass(toggleButtons.querySelectorAll('.file-view-toggle-source'), 'active', !displayingRendered); // it may not exist
|
|
||||||
toggleClass(toggleButtons.querySelector('.file-view-toggle-rendered'), 'active', displayingRendered);
|
|
||||||
// TODO: if there is only one button, hide it?
|
|
||||||
}
|
|
||||||
|
|
||||||
async function renderRawFileToContainer(container: HTMLElement, rawFileLink: string, mimeType: string) {
|
|
||||||
const elViewRawPrompt = container.querySelector('.file-view-raw-prompt');
|
|
||||||
if (!rawFileLink || !elViewRawPrompt) throw new Error('unexpected file view container');
|
|
||||||
|
|
||||||
let rendered = false, errorMsg = '';
|
|
||||||
try {
|
|
||||||
const plugin = findFileRenderPlugin(basename(rawFileLink), mimeType);
|
|
||||||
if (plugin) {
|
|
||||||
container.classList.add('is-loading');
|
|
||||||
container.setAttribute('data-render-name', plugin.name); // not used yet
|
|
||||||
await plugin.render(container, rawFileLink);
|
|
||||||
rendered = true;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
errorMsg = `${e}`;
|
|
||||||
} finally {
|
|
||||||
container.classList.remove('is-loading');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (rendered) {
|
|
||||||
elViewRawPrompt.remove();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// remove all children from the container, and only show the raw file link
|
|
||||||
container.replaceChildren(elViewRawPrompt);
|
|
||||||
|
|
||||||
if (errorMsg) {
|
|
||||||
const elErrorMessage = createElementFromHTML(html`<div class="ui error message">${errorMsg}</div>`);
|
|
||||||
elViewRawPrompt.insertAdjacentElement('afterbegin', elErrorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function initRepoFileView(): void {
|
|
||||||
registerGlobalInitFunc('initRepoFileView', async (elFileView: HTMLElement) => {
|
|
||||||
initPluginsOnce();
|
|
||||||
const rawFileLink = elFileView.getAttribute('data-raw-file-link');
|
|
||||||
const mimeType = elFileView.getAttribute('data-mime-type') || ''; // not used yet
|
|
||||||
// TODO: we should also provide the prefetched file head bytes to let the plugin decide whether to render or not
|
|
||||||
const plugin = findFileRenderPlugin(basename(rawFileLink), mimeType);
|
|
||||||
if (!plugin) return;
|
|
||||||
|
|
||||||
const renderContainer = elFileView.querySelector<HTMLElement>('.file-view-render-container');
|
|
||||||
showRenderRawFileButton(elFileView, renderContainer);
|
|
||||||
// maybe in the future multiple plugins can render the same file, so we should not assume only one plugin will render it
|
|
||||||
if (renderContainer) await renderRawFileToContainer(renderContainer, rawFileLink, mimeType);
|
|
||||||
});
|
|
||||||
}
|
|
@ -1,4 +1,4 @@
|
|||||||
import {html, htmlRaw} from '../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
import {createCodeEditor} from './codeeditor.ts';
|
import {createCodeEditor} from './codeeditor.ts';
|
||||||
import {hideElem, queryElems, showElem, createElementFromHTML} from '../utils/dom.ts';
|
import {hideElem, queryElems, showElem, createElementFromHTML} from '../utils/dom.ts';
|
||||||
import {attachRefIssueContextPopup} from './contextpopup.ts';
|
import {attachRefIssueContextPopup} from './contextpopup.ts';
|
||||||
@ -87,10 +87,10 @@ export function initRepoEditor() {
|
|||||||
if (i < parts.length - 1) {
|
if (i < parts.length - 1) {
|
||||||
if (trimValue.length) {
|
if (trimValue.length) {
|
||||||
const linkElement = createElementFromHTML(
|
const linkElement = createElementFromHTML(
|
||||||
html`<span class="section"><a href="#">${value}</a></span>`,
|
`<span class="section"><a href="#">${htmlEscape(value)}</a></span>`,
|
||||||
);
|
);
|
||||||
const dividerElement = createElementFromHTML(
|
const dividerElement = createElementFromHTML(
|
||||||
html`<div class="breadcrumb-divider">/</div>`,
|
`<div class="breadcrumb-divider">/</div>`,
|
||||||
);
|
);
|
||||||
links.push(linkElement);
|
links.push(linkElement);
|
||||||
dividers.push(dividerElement);
|
dividers.push(dividerElement);
|
||||||
@ -113,7 +113,7 @@ export function initRepoEditor() {
|
|||||||
if (!warningDiv) {
|
if (!warningDiv) {
|
||||||
warningDiv = document.createElement('div');
|
warningDiv = document.createElement('div');
|
||||||
warningDiv.classList.add('ui', 'warning', 'message', 'flash-message', 'flash-warning', 'space-related');
|
warningDiv.classList.add('ui', 'warning', 'message', 'flash-message', 'flash-warning', 'space-related');
|
||||||
warningDiv.innerHTML = html`<p>File path contains leading or trailing whitespace.</p>`;
|
warningDiv.innerHTML = '<p>File path contains leading or trailing whitespace.</p>';
|
||||||
// Add display 'block' because display is set to 'none' in formantic\build\semantic.css
|
// Add display 'block' because display is set to 'none' in formantic\build\semantic.css
|
||||||
warningDiv.style.display = 'block';
|
warningDiv.style.display = 'block';
|
||||||
const inputContainer = document.querySelector('.repo-editor-header');
|
const inputContainer = document.querySelector('.repo-editor-header');
|
||||||
@ -196,8 +196,7 @@ export function initRepoEditor() {
|
|||||||
})();
|
})();
|
||||||
}
|
}
|
||||||
|
|
||||||
export function renderPreviewPanelContent(previewPanel: Element, htmlContent: string) {
|
export function renderPreviewPanelContent(previewPanel: Element, content: string) {
|
||||||
// the content is from the server, so it is safe to use innerHTML
|
previewPanel.innerHTML = `<div class="render-content markup">${content}</div>`;
|
||||||
previewPanel.innerHTML = html`<div class="render-content markup">${htmlRaw(htmlContent)}</div>`;
|
|
||||||
attachRefIssueContextPopup(previewPanel.querySelectorAll('p .ref-issue'));
|
attachRefIssueContextPopup(previewPanel.querySelectorAll('p .ref-issue'));
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import {updateIssuesMeta} from './repo-common.ts';
|
import {updateIssuesMeta} from './repo-common.ts';
|
||||||
import {toggleElem, queryElems, isElemVisible} from '../utils/dom.ts';
|
import {toggleElem, queryElems, isElemVisible} from '../utils/dom.ts';
|
||||||
import {html} from '../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
import {confirmModal} from './comp/ConfirmModal.ts';
|
import {confirmModal} from './comp/ConfirmModal.ts';
|
||||||
import {showErrorToast} from '../modules/toast.ts';
|
import {showErrorToast} from '../modules/toast.ts';
|
||||||
import {createSortable} from '../modules/sortable.ts';
|
import {createSortable} from '../modules/sortable.ts';
|
||||||
@ -138,10 +138,10 @@ function initDropdownUserRemoteSearch(el: Element) {
|
|||||||
// the content is provided by backend IssuePosters handler
|
// the content is provided by backend IssuePosters handler
|
||||||
processedResults.length = 0;
|
processedResults.length = 0;
|
||||||
for (const item of resp.results) {
|
for (const item of resp.results) {
|
||||||
let nameHtml = html`<img class="ui avatar tw-align-middle" src="${item.avatar_link}" aria-hidden="true" alt width="20" height="20"><span class="gt-ellipsis">${item.username}</span>`;
|
let html = `<img class="ui avatar tw-align-middle" src="${htmlEscape(item.avatar_link)}" aria-hidden="true" alt width="20" height="20"><span class="gt-ellipsis">${htmlEscape(item.username)}</span>`;
|
||||||
if (item.full_name) nameHtml += html`<span class="search-fullname tw-ml-2">${item.full_name}</span>`;
|
if (item.full_name) html += `<span class="search-fullname tw-ml-2">${htmlEscape(item.full_name)}</span>`;
|
||||||
if (selectedUsername.toLowerCase() === item.username.toLowerCase()) selectedUsername = item.username;
|
if (selectedUsername.toLowerCase() === item.username.toLowerCase()) selectedUsername = item.username;
|
||||||
processedResults.push({value: item.username, name: nameHtml});
|
processedResults.push({value: item.username, name: html});
|
||||||
}
|
}
|
||||||
resp.results = processedResults;
|
resp.results = processedResults;
|
||||||
return resp;
|
return resp;
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import {html, htmlEscape} from '../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
import {createTippy, showTemporaryTooltip} from '../modules/tippy.ts';
|
import {createTippy, showTemporaryTooltip} from '../modules/tippy.ts';
|
||||||
import {
|
import {
|
||||||
addDelegatedEventListener,
|
addDelegatedEventListener,
|
||||||
@ -17,7 +17,6 @@ import {showErrorToast} from '../modules/toast.ts';
|
|||||||
import {initRepoIssueSidebar} from './repo-issue-sidebar.ts';
|
import {initRepoIssueSidebar} from './repo-issue-sidebar.ts';
|
||||||
import {fomanticQuery} from '../modules/fomantic/base.ts';
|
import {fomanticQuery} from '../modules/fomantic/base.ts';
|
||||||
import {ignoreAreYouSure} from '../vendor/jquery.are-you-sure.ts';
|
import {ignoreAreYouSure} from '../vendor/jquery.are-you-sure.ts';
|
||||||
import {registerGlobalInitFunc} from '../modules/observer.ts';
|
|
||||||
|
|
||||||
const {appSubUrl} = window.config;
|
const {appSubUrl} = window.config;
|
||||||
|
|
||||||
@ -46,7 +45,8 @@ export function initRepoIssueSidebarDependency() {
|
|||||||
if (String(issue.id) === currIssueId) continue;
|
if (String(issue.id) === currIssueId) continue;
|
||||||
filteredResponse.results.push({
|
filteredResponse.results.push({
|
||||||
value: issue.id,
|
value: issue.id,
|
||||||
name: html`<div class="gt-ellipsis">#${issue.number} ${issue.title}</div><div class="text small tw-break-anywhere">${issue.repository.full_name}</div>`,
|
name: `<div class="gt-ellipsis">#${issue.number} ${htmlEscape(issue.title)}</div>
|
||||||
|
<div class="text small tw-break-anywhere">${htmlEscape(issue.repository.full_name)}</div>`,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return filteredResponse;
|
return filteredResponse;
|
||||||
@ -416,20 +416,25 @@ export function initRepoIssueWipNewTitle() {
|
|||||||
|
|
||||||
export function initRepoIssueWipToggle() {
|
export function initRepoIssueWipToggle() {
|
||||||
// Toggle WIP for existing PR
|
// Toggle WIP for existing PR
|
||||||
registerGlobalInitFunc('initPullRequestWipToggle', (toggleWip) => toggleWip.addEventListener('click', async (e) => {
|
queryElems(document, '.toggle-wip', (el) => el.addEventListener('click', async (e) => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
|
const toggleWip = el;
|
||||||
const title = toggleWip.getAttribute('data-title');
|
const title = toggleWip.getAttribute('data-title');
|
||||||
const wipPrefix = toggleWip.getAttribute('data-wip-prefix');
|
const wipPrefix = toggleWip.getAttribute('data-wip-prefix');
|
||||||
const updateUrl = toggleWip.getAttribute('data-update-url');
|
const updateUrl = toggleWip.getAttribute('data-update-url');
|
||||||
|
|
||||||
|
try {
|
||||||
const params = new URLSearchParams();
|
const params = new URLSearchParams();
|
||||||
params.append('title', title?.startsWith(wipPrefix) ? title.slice(wipPrefix.length).trim() : `${wipPrefix.trim()} ${title}`);
|
params.append('title', title?.startsWith(wipPrefix) ? title.slice(wipPrefix.length).trim() : `${wipPrefix.trim()} ${title}`);
|
||||||
|
|
||||||
const response = await POST(updateUrl, {data: params});
|
const response = await POST(updateUrl, {data: params});
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
showErrorToast(`Failed to toggle 'work in progress' status`);
|
throw new Error('Failed to toggle WIP status');
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
window.location.reload();
|
window.location.reload();
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
}
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {hideElem, querySingleVisibleElem, showElem, toggleElem} from '../utils/dom.ts';
|
import {hideElem, querySingleVisibleElem, showElem, toggleElem} from '../utils/dom.ts';
|
||||||
import {htmlEscape} from '../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
import {fomanticQuery} from '../modules/fomantic/base.ts';
|
import {fomanticQuery} from '../modules/fomantic/base.ts';
|
||||||
import {sanitizeRepoName} from './repo-common.ts';
|
import {sanitizeRepoName} from './repo-common.ts';
|
||||||
|
|
||||||
|
@ -2,7 +2,6 @@ import {validateTextareaNonEmpty, initComboMarkdownEditor} from './comp/ComboMar
|
|||||||
import {fomanticMobileScreen} from '../modules/fomantic.ts';
|
import {fomanticMobileScreen} from '../modules/fomantic.ts';
|
||||||
import {POST} from '../modules/fetch.ts';
|
import {POST} from '../modules/fetch.ts';
|
||||||
import type {ComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
|
import type {ComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
|
||||||
import {html, htmlRaw} from '../utils/html.ts';
|
|
||||||
|
|
||||||
async function initRepoWikiFormEditor() {
|
async function initRepoWikiFormEditor() {
|
||||||
const editArea = document.querySelector<HTMLTextAreaElement>('.repository.wiki .combo-markdown-editor textarea');
|
const editArea = document.querySelector<HTMLTextAreaElement>('.repository.wiki .combo-markdown-editor textarea');
|
||||||
@ -31,7 +30,7 @@ async function initRepoWikiFormEditor() {
|
|||||||
const response = await POST(editor.previewUrl, {data: formData});
|
const response = await POST(editor.previewUrl, {data: formData});
|
||||||
const data = await response.text();
|
const data = await response.text();
|
||||||
lastContent = newContent;
|
lastContent = newContent;
|
||||||
previewTarget.innerHTML = html`<div class="render-content markup ui segment">${htmlRaw(data)}</div>`;
|
previewTarget.innerHTML = `<div class="render-content markup ui segment">${data}</div>`;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error rendering preview:', error);
|
console.error('Error rendering preview:', error);
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {emojiKeys, emojiHTML, emojiString} from './emoji.ts';
|
import {emojiKeys, emojiHTML, emojiString} from './emoji.ts';
|
||||||
import {html, htmlRaw} from '../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
|
|
||||||
type TributeItem = Record<string, any>;
|
type TributeItem = Record<string, any>;
|
||||||
|
|
||||||
@ -26,18 +26,17 @@ export async function attachTribute(element: HTMLElement) {
|
|||||||
return emojiString(item.original);
|
return emojiString(item.original);
|
||||||
},
|
},
|
||||||
menuItemTemplate: (item: TributeItem) => {
|
menuItemTemplate: (item: TributeItem) => {
|
||||||
return html`<div class="tribute-item">${htmlRaw(emojiHTML(item.original))}<span>${item.original}</span></div>`;
|
return `<div class="tribute-item">${emojiHTML(item.original)}<span>${htmlEscape(item.original)}</span></div>`;
|
||||||
},
|
},
|
||||||
}, { // mentions
|
}, { // mentions
|
||||||
values: window.config.mentionValues ?? [],
|
values: window.config.mentionValues ?? [],
|
||||||
requireLeadingSpace: true,
|
requireLeadingSpace: true,
|
||||||
menuItemTemplate: (item: TributeItem) => {
|
menuItemTemplate: (item: TributeItem) => {
|
||||||
const fullNameHtml = item.original.fullname && item.original.fullname !== '' ? html`<span class="fullname">${item.original.fullname}</span>` : '';
|
return `
|
||||||
return html`
|
|
||||||
<div class="tribute-item">
|
<div class="tribute-item">
|
||||||
<img alt src="${item.original.avatar}" width="21" height="21"/>
|
<img alt src="${htmlEscape(item.original.avatar)}" width="21" height="21"/>
|
||||||
<span class="name">${item.original.name}</span>
|
<span class="name">${htmlEscape(item.original.name)}</span>
|
||||||
${htmlRaw(fullNameHtml)}
|
${item.original.fullname && item.original.fullname !== '' ? `<span class="fullname">${htmlEscape(item.original.fullname)}</span>` : ''}
|
||||||
</div>
|
</div>
|
||||||
`;
|
`;
|
||||||
},
|
},
|
||||||
|
@ -19,7 +19,7 @@ import {initRepoIssueContentHistory} from './features/repo-issue-content.ts';
|
|||||||
import {initStopwatch} from './features/stopwatch.ts';
|
import {initStopwatch} from './features/stopwatch.ts';
|
||||||
import {initFindFileInRepo} from './features/repo-findfile.ts';
|
import {initFindFileInRepo} from './features/repo-findfile.ts';
|
||||||
import {initMarkupContent} from './markup/content.ts';
|
import {initMarkupContent} from './markup/content.ts';
|
||||||
import {initRepoFileView} from './features/file-view.ts';
|
import {initPdfViewer} from './render/pdf.ts';
|
||||||
import {initUserAuthOauth2, initUserCheckAppUrl} from './features/user-auth.ts';
|
import {initUserAuthOauth2, initUserCheckAppUrl} from './features/user-auth.ts';
|
||||||
import {initRepoPullRequestAllowMaintainerEdit, initRepoPullRequestReview, initRepoIssueSidebarDependency, initRepoIssueFilterItemLabel} from './features/repo-issue.ts';
|
import {initRepoPullRequestAllowMaintainerEdit, initRepoPullRequestReview, initRepoIssueSidebarDependency, initRepoIssueFilterItemLabel} from './features/repo-issue.ts';
|
||||||
import {initRepoEllipsisButton, initCommitStatuses} from './features/repo-commit.ts';
|
import {initRepoEllipsisButton, initCommitStatuses} from './features/repo-commit.ts';
|
||||||
@ -159,11 +159,10 @@ onDomReady(() => {
|
|||||||
initUserAuthWebAuthnRegister,
|
initUserAuthWebAuthnRegister,
|
||||||
initUserSettings,
|
initUserSettings,
|
||||||
initRepoDiffView,
|
initRepoDiffView,
|
||||||
|
initPdfViewer,
|
||||||
initColorPickers,
|
initColorPickers,
|
||||||
|
|
||||||
initOAuth2SettingsDisableCheckbox,
|
initOAuth2SettingsDisableCheckbox,
|
||||||
|
|
||||||
initRepoFileView,
|
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// it must be the last one, then the "querySelectorAll" only needs to be executed once for global init functions.
|
// it must be the last one, then the "querySelectorAll" only needs to be executed once for global init functions.
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import {html, htmlRaw} from '../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
|
|
||||||
type Processor = (el: HTMLElement) => string | HTMLElement | void;
|
type Processor = (el: HTMLElement) => string | HTMLElement | void;
|
||||||
|
|
||||||
@ -38,10 +38,10 @@ function prepareProcessors(ctx:ProcessorContext): Processors {
|
|||||||
IMG(el: HTMLElement) {
|
IMG(el: HTMLElement) {
|
||||||
const alt = el.getAttribute('alt') || 'image';
|
const alt = el.getAttribute('alt') || 'image';
|
||||||
const src = el.getAttribute('src');
|
const src = el.getAttribute('src');
|
||||||
const widthAttr = el.hasAttribute('width') ? htmlRaw` width="${el.getAttribute('width') || ''}"` : '';
|
const widthAttr = el.hasAttribute('width') ? ` width="${htmlEscape(el.getAttribute('width') || '')}"` : '';
|
||||||
const heightAttr = el.hasAttribute('height') ? htmlRaw` height="${el.getAttribute('height') || ''}"` : '';
|
const heightAttr = el.hasAttribute('height') ? ` height="${htmlEscape(el.getAttribute('height') || '')}"` : '';
|
||||||
if (widthAttr || heightAttr) {
|
if (widthAttr || heightAttr) {
|
||||||
return html`<img alt="${alt}"${widthAttr}${heightAttr} src="${src}">`;
|
return `<img alt="${htmlEscape(alt)}"${widthAttr}${heightAttr} src="${htmlEscape(src)}">`;
|
||||||
}
|
}
|
||||||
return ``;
|
return ``;
|
||||||
},
|
},
|
||||||
|
@ -2,7 +2,6 @@ import {isDarkTheme} from '../utils.ts';
|
|||||||
import {makeCodeCopyButton} from './codecopy.ts';
|
import {makeCodeCopyButton} from './codecopy.ts';
|
||||||
import {displayError} from './common.ts';
|
import {displayError} from './common.ts';
|
||||||
import {queryElems} from '../utils/dom.ts';
|
import {queryElems} from '../utils/dom.ts';
|
||||||
import {html, htmlRaw} from '../utils/html.ts';
|
|
||||||
|
|
||||||
const {mermaidMaxSourceCharacters} = window.config;
|
const {mermaidMaxSourceCharacters} = window.config;
|
||||||
|
|
||||||
@ -47,7 +46,7 @@ export async function initMarkupCodeMermaid(elMarkup: HTMLElement): Promise<void
|
|||||||
|
|
||||||
const iframe = document.createElement('iframe');
|
const iframe = document.createElement('iframe');
|
||||||
iframe.classList.add('markup-content-iframe', 'tw-invisible');
|
iframe.classList.add('markup-content-iframe', 'tw-invisible');
|
||||||
iframe.srcdoc = html`<html><head><style>${htmlRaw(iframeCss)}</style></head><body>${htmlRaw(svg)}</body></html>`;
|
iframe.srcdoc = `<html><head><style>${iframeCss}</style></head><body>${svg}</body></html>`;
|
||||||
|
|
||||||
const mermaidBlock = document.createElement('div');
|
const mermaidBlock = document.createElement('div');
|
||||||
mermaidBlock.classList.add('mermaid-block', 'is-loading', 'tw-hidden');
|
mermaidBlock.classList.add('mermaid-block', 'is-loading', 'tw-hidden');
|
||||||
|
@ -9,9 +9,8 @@ const fomanticModalFn = $.fn.modal;
|
|||||||
export function initAriaModalPatch() {
|
export function initAriaModalPatch() {
|
||||||
if ($.fn.modal === ariaModalFn) throw new Error('initAriaModalPatch could only be called once');
|
if ($.fn.modal === ariaModalFn) throw new Error('initAriaModalPatch could only be called once');
|
||||||
$.fn.modal = ariaModalFn;
|
$.fn.modal = ariaModalFn;
|
||||||
(ariaModalFn as FomanticInitFunction).settings = fomanticModalFn.settings;
|
|
||||||
$.fn.fomanticExt.onModalBeforeHidden = onModalBeforeHidden;
|
$.fn.fomanticExt.onModalBeforeHidden = onModalBeforeHidden;
|
||||||
$.fn.modal.settings.onApprove = onModalApproveDefault;
|
(ariaModalFn as FomanticInitFunction).settings = fomanticModalFn.settings;
|
||||||
}
|
}
|
||||||
|
|
||||||
// the patched `$.fn.modal` modal function
|
// the patched `$.fn.modal` modal function
|
||||||
@ -35,29 +34,6 @@ function ariaModalFn(this: any, ...args: Parameters<FomanticInitFunction>) {
|
|||||||
function onModalBeforeHidden(this: any) {
|
function onModalBeforeHidden(this: any) {
|
||||||
const $modal = $(this);
|
const $modal = $(this);
|
||||||
const elModal = $modal[0];
|
const elModal = $modal[0];
|
||||||
hideToastsFrom(elModal.closest('.ui.dimmer') ?? document.body);
|
|
||||||
|
|
||||||
// reset the form after the modal is hidden, after other modal events and handlers (e.g. "onApprove", form submit)
|
|
||||||
setTimeout(() => {
|
|
||||||
queryElems(elModal, 'form', (form: HTMLFormElement) => form.reset());
|
queryElems(elModal, 'form', (form: HTMLFormElement) => form.reset());
|
||||||
}, 0);
|
hideToastsFrom(elModal.closest('.ui.dimmer') ?? document.body);
|
||||||
}
|
|
||||||
|
|
||||||
function onModalApproveDefault(this: any) {
|
|
||||||
const $modal = $(this);
|
|
||||||
const selectors = $modal.modal('setting', 'selector');
|
|
||||||
const elModal = $modal[0];
|
|
||||||
const elApprove = elModal.querySelector(selectors.approve);
|
|
||||||
const elForm = elApprove?.closest('form');
|
|
||||||
if (!elForm) return true; // no form, just allow closing the modal
|
|
||||||
|
|
||||||
// "form-fetch-action" can handle network errors gracefully,
|
|
||||||
// so keep the modal dialog to make users can re-submit the form if anything wrong happens.
|
|
||||||
if (elForm.matches('.form-fetch-action')) return false;
|
|
||||||
|
|
||||||
// There is an abuse for the "modal" + "form" combination, the "Approve" button is a traditional form submit button in the form.
|
|
||||||
// Then "approve" and "submit" occur at the same time, the modal will be closed immediately before the form is submitted.
|
|
||||||
// So here we prevent the modal from closing automatically by returning false, add the "is-loading" class to the form element.
|
|
||||||
elForm.classList.add('is-loading');
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,6 @@ import tippy, {followCursor} from 'tippy.js';
|
|||||||
import {isDocumentFragmentOrElementNode} from '../utils/dom.ts';
|
import {isDocumentFragmentOrElementNode} from '../utils/dom.ts';
|
||||||
import {formatDatetime} from '../utils/time.ts';
|
import {formatDatetime} from '../utils/time.ts';
|
||||||
import type {Content, Instance, Placement, Props} from 'tippy.js';
|
import type {Content, Instance, Placement, Props} from 'tippy.js';
|
||||||
import {html} from '../utils/html.ts';
|
|
||||||
|
|
||||||
type TippyOpts = {
|
type TippyOpts = {
|
||||||
role?: string,
|
role?: string,
|
||||||
@ -10,7 +9,7 @@ type TippyOpts = {
|
|||||||
} & Partial<Props>;
|
} & Partial<Props>;
|
||||||
|
|
||||||
const visibleInstances = new Set<Instance>();
|
const visibleInstances = new Set<Instance>();
|
||||||
const arrowSvg = html`<svg width="16" height="7"><path d="m0 7 8-7 8 7Z" class="tippy-svg-arrow-outer"/><path d="m0 8 8-7 8 7Z" class="tippy-svg-arrow-inner"/></svg>`;
|
const arrowSvg = `<svg width="16" height="7"><path d="m0 7 8-7 8 7Z" class="tippy-svg-arrow-outer"/><path d="m0 8 8-7 8 7Z" class="tippy-svg-arrow-inner"/></svg>`;
|
||||||
|
|
||||||
export function createTippy(target: Element, opts: TippyOpts = {}): Instance {
|
export function createTippy(target: Element, opts: TippyOpts = {}): Instance {
|
||||||
// the callback functions should be destructured from opts,
|
// the callback functions should be destructured from opts,
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import {htmlEscape} from '../utils/html.ts';
|
import {htmlEscape} from 'escape-goat';
|
||||||
import {svg} from '../svg.ts';
|
import {svg} from '../svg.ts';
|
||||||
import {animateOnce, queryElems, showElem} from '../utils/dom.ts';
|
import {animateOnce, queryElems, showElem} from '../utils/dom.ts';
|
||||||
import Toastify from 'toastify-js'; // don't use "async import", because when network error occurs, the "async import" also fails and nothing is shown
|
import Toastify from 'toastify-js'; // don't use "async import", because when network error occurs, the "async import" also fails and nothing is shown
|
||||||
|
17
web_src/js/render/pdf.ts
Normal file
17
web_src/js/render/pdf.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import {htmlEscape} from 'escape-goat';
|
||||||
|
import {registerGlobalInitFunc} from '../modules/observer.ts';
|
||||||
|
|
||||||
|
export async function initPdfViewer() {
|
||||||
|
registerGlobalInitFunc('initPdfViewer', async (el: HTMLInputElement) => {
|
||||||
|
const pdfobject = await import(/* webpackChunkName: "pdfobject" */'pdfobject');
|
||||||
|
|
||||||
|
const src = el.getAttribute('data-src');
|
||||||
|
const fallbackText = el.getAttribute('data-fallback-button-text');
|
||||||
|
pdfobject.embed(src, el, {
|
||||||
|
fallbackLink: htmlEscape`
|
||||||
|
<a role="button" class="ui basic button pdf-fallback-button" href="[url]">${fallbackText}</a>
|
||||||
|
`,
|
||||||
|
});
|
||||||
|
el.classList.remove('is-loading');
|
||||||
|
});
|
||||||
|
}
|
@ -1,10 +0,0 @@
|
|||||||
export type FileRenderPlugin = {
|
|
||||||
// unique plugin name
|
|
||||||
name: string;
|
|
||||||
|
|
||||||
// test if plugin can handle a specified file
|
|
||||||
canHandle: (filename: string, mimeType: string) => boolean;
|
|
||||||
|
|
||||||
// render file content
|
|
||||||
render: (container: HTMLElement, fileUrl: string, options?: any) => Promise<void>;
|
|
||||||
}
|
|
@ -1,60 +0,0 @@
|
|||||||
import type {FileRenderPlugin} from '../plugin.ts';
|
|
||||||
import {extname} from '../../utils.ts';
|
|
||||||
|
|
||||||
// support common 3D model file formats, use online-3d-viewer library for rendering
|
|
||||||
|
|
||||||
// eslint-disable-next-line multiline-comment-style
|
|
||||||
/* a simple text STL file example:
|
|
||||||
solid SimpleTriangle
|
|
||||||
facet normal 0 0 1
|
|
||||||
outer loop
|
|
||||||
vertex 0 0 0
|
|
||||||
vertex 1 0 0
|
|
||||||
vertex 0 1 0
|
|
||||||
endloop
|
|
||||||
endfacet
|
|
||||||
endsolid SimpleTriangle
|
|
||||||
*/
|
|
||||||
|
|
||||||
export function newRenderPlugin3DViewer(): FileRenderPlugin {
|
|
||||||
// Some extensions are text-based formats:
|
|
||||||
// .3mf .amf .brep: XML
|
|
||||||
// .fbx: XML or BINARY
|
|
||||||
// .dae .gltf: JSON
|
|
||||||
// .ifc, .igs, .iges, .stp, .step are: TEXT
|
|
||||||
// .stl .ply: TEXT or BINARY
|
|
||||||
// .obj .off .wrl: TEXT
|
|
||||||
// So we need to be able to render when the file is recognized as plaintext file by backend.
|
|
||||||
//
|
|
||||||
// It needs more logic to make it overall right (render a text 3D model automatically):
|
|
||||||
// we need to distinguish the ambiguous filename extensions.
|
|
||||||
// For example: "*.obj, *.off, *.step" might be or not be a 3D model file.
|
|
||||||
// So when it is a text file, we can't assume that "we only render it by 3D plugin",
|
|
||||||
// otherwise the end users would be impossible to view its real content when the file is not a 3D model.
|
|
||||||
const SUPPORTED_EXTENSIONS = [
|
|
||||||
'.3dm', '.3ds', '.3mf', '.amf', '.bim', '.brep',
|
|
||||||
'.dae', '.fbx', '.fcstd', '.glb', '.gltf',
|
|
||||||
'.ifc', '.igs', '.iges', '.stp', '.step',
|
|
||||||
'.stl', '.obj', '.off', '.ply', '.wrl',
|
|
||||||
];
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: '3d-model-viewer',
|
|
||||||
|
|
||||||
canHandle(filename: string, _mimeType: string): boolean {
|
|
||||||
const ext = extname(filename).toLowerCase();
|
|
||||||
return SUPPORTED_EXTENSIONS.includes(ext);
|
|
||||||
},
|
|
||||||
|
|
||||||
async render(container: HTMLElement, fileUrl: string): Promise<void> {
|
|
||||||
// TODO: height and/or max-height?
|
|
||||||
const OV = await import(/* webpackChunkName: "online-3d-viewer" */'online-3d-viewer');
|
|
||||||
const viewer = new OV.EmbeddedViewer(container, {
|
|
||||||
backgroundColor: new OV.RGBAColor(59, 68, 76, 0),
|
|
||||||
defaultColor: new OV.RGBColor(65, 131, 196),
|
|
||||||
edgeSettings: new OV.EdgeSettings(false, new OV.RGBColor(0, 0, 0), 1),
|
|
||||||
});
|
|
||||||
viewer.LoadModelFromUrlList([fileUrl]);
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,20 +0,0 @@
|
|||||||
import type {FileRenderPlugin} from '../plugin.ts';
|
|
||||||
|
|
||||||
export function newRenderPluginPdfViewer(): FileRenderPlugin {
|
|
||||||
return {
|
|
||||||
name: 'pdf-viewer',
|
|
||||||
|
|
||||||
canHandle(filename: string, _mimeType: string): boolean {
|
|
||||||
return filename.toLowerCase().endsWith('.pdf');
|
|
||||||
},
|
|
||||||
|
|
||||||
async render(container: HTMLElement, fileUrl: string): Promise<void> {
|
|
||||||
const PDFObject = await import(/* webpackChunkName: "pdfobject" */'pdfobject');
|
|
||||||
// TODO: the PDFObject library does not support dynamic height adjustment,
|
|
||||||
container.style.height = `${window.innerHeight - 100}px`;
|
|
||||||
if (!PDFObject.default.embed(fileUrl, container)) {
|
|
||||||
throw new Error('Unable to render the PDF file');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,6 +1,5 @@
|
|||||||
import {defineComponent, h, type PropType} from 'vue';
|
import {defineComponent, h, type PropType} from 'vue';
|
||||||
import {parseDom, serializeXml} from './utils.ts';
|
import {parseDom, serializeXml} from './utils.ts';
|
||||||
import {html, htmlRaw} from './utils/html.ts';
|
|
||||||
import giteaDoubleChevronLeft from '../../public/assets/img/svg/gitea-double-chevron-left.svg';
|
import giteaDoubleChevronLeft from '../../public/assets/img/svg/gitea-double-chevron-left.svg';
|
||||||
import giteaDoubleChevronRight from '../../public/assets/img/svg/gitea-double-chevron-right.svg';
|
import giteaDoubleChevronRight from '../../public/assets/img/svg/gitea-double-chevron-right.svg';
|
||||||
import giteaEmptyCheckbox from '../../public/assets/img/svg/gitea-empty-checkbox.svg';
|
import giteaEmptyCheckbox from '../../public/assets/img/svg/gitea-empty-checkbox.svg';
|
||||||
@ -221,7 +220,7 @@ export const SvgIcon = defineComponent({
|
|||||||
const classes = Array.from(svgOuter.classList);
|
const classes = Array.from(svgOuter.classList);
|
||||||
if (this.symbolId) {
|
if (this.symbolId) {
|
||||||
classes.push('tw-hidden', 'svg-symbol-container');
|
classes.push('tw-hidden', 'svg-symbol-container');
|
||||||
svgInnerHtml = html`<symbol id="${this.symbolId}" viewBox="${attrs['^viewBox']}">${htmlRaw(svgInnerHtml)}</symbol>`;
|
svgInnerHtml = `<symbol id="${this.symbolId}" viewBox="${attrs['^viewBox']}">${svgInnerHtml}</symbol>`;
|
||||||
}
|
}
|
||||||
// create VNode
|
// create VNode
|
||||||
return h('svg', {
|
return h('svg', {
|
||||||
|
@ -314,7 +314,6 @@ export function replaceTextareaSelection(textarea: HTMLTextAreaElement, text: st
|
|||||||
export function createElementFromHTML<T extends HTMLElement>(htmlString: string): T {
|
export function createElementFromHTML<T extends HTMLElement>(htmlString: string): T {
|
||||||
htmlString = htmlString.trim();
|
htmlString = htmlString.trim();
|
||||||
// some tags like "tr" are special, it must use a correct parent container to create
|
// some tags like "tr" are special, it must use a correct parent container to create
|
||||||
// eslint-disable-next-line github/unescaped-html-literal -- FIXME: maybe we need to use other approaches to create elements from HTML, e.g. using DOMParser
|
|
||||||
if (htmlString.startsWith('<tr')) {
|
if (htmlString.startsWith('<tr')) {
|
||||||
const container = document.createElement('table');
|
const container = document.createElement('table');
|
||||||
container.innerHTML = htmlString;
|
container.innerHTML = htmlString;
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
import {html, htmlEscape, htmlRaw} from './html.ts';
|
|
||||||
|
|
||||||
test('html', async () => {
|
|
||||||
expect(html`<a>${'<>&\'"'}</a>`).toBe(`<a><>&'"</a>`);
|
|
||||||
expect(html`<a>${htmlRaw('<img>')}</a>`).toBe(`<a><img></a>`);
|
|
||||||
expect(html`<a>${htmlRaw`<img ${'&'}>`}</a>`).toBe(`<a><img &></a>`);
|
|
||||||
expect(htmlEscape(`<a></a>`)).toBe(`<a></a>`);
|
|
||||||
});
|
|
@ -1,32 +0,0 @@
|
|||||||
export function htmlEscape(s: string, ...args: Array<any>): string {
|
|
||||||
if (args.length !== 0) throw new Error('use html or htmlRaw instead of htmlEscape'); // check legacy usages
|
|
||||||
return s.replace(/&/g, '&')
|
|
||||||
.replace(/"/g, '"')
|
|
||||||
.replace(/'/g, ''')
|
|
||||||
.replace(/</g, '<')
|
|
||||||
.replace(/>/g, '>');
|
|
||||||
}
|
|
||||||
|
|
||||||
class rawObject {
|
|
||||||
private readonly value: string;
|
|
||||||
constructor(v: string) { this.value = v }
|
|
||||||
toString(): string { return this.value }
|
|
||||||
}
|
|
||||||
|
|
||||||
export function html(tmpl: TemplateStringsArray, ...parts: Array<any>): string {
|
|
||||||
let output = tmpl[0];
|
|
||||||
for (let i = 0; i < parts.length; i++) {
|
|
||||||
const value = parts[i];
|
|
||||||
const valueEscaped = (value instanceof rawObject) ? value.toString() : htmlEscape(String(parts[i]));
|
|
||||||
output = output + valueEscaped + tmpl[i + 1];
|
|
||||||
}
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function htmlRaw(s: string|TemplateStringsArray, ...tmplParts: Array<any>): rawObject {
|
|
||||||
if (typeof s === 'string') {
|
|
||||||
if (tmplParts.length !== 0) throw new Error("either htmlRaw('str') or htmlRaw`tmpl`");
|
|
||||||
return new rawObject(s);
|
|
||||||
}
|
|
||||||
return new rawObject(html(s, ...tmplParts));
|
|
||||||
}
|
|
Loading…
x
Reference in New Issue
Block a user