mirror of
https://github.com/go-gitea/gitea.git
synced 2024-11-24 02:59:42 +08:00
Merge branch 'main' into lunny/repo_dep_org
This commit is contained in:
commit
569c3d181e
10
.github/workflows/pull-db-tests.yml
vendored
10
.github/workflows/pull-db-tests.yml
vendored
@ -154,12 +154,15 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
mysql:
|
mysql:
|
||||||
image: mysql:8.0
|
# the bitnami mysql image has more options than the official one, it's easier to customize
|
||||||
|
image: bitnami/mysql:8.0
|
||||||
env:
|
env:
|
||||||
MYSQL_ALLOW_EMPTY_PASSWORD: true
|
ALLOW_EMPTY_PASSWORD: true
|
||||||
MYSQL_DATABASE: testgitea
|
MYSQL_DATABASE: testgitea
|
||||||
ports:
|
ports:
|
||||||
- "3306:3306"
|
- "3306:3306"
|
||||||
|
options: >-
|
||||||
|
--mount type=tmpfs,destination=/bitnami/mysql/data
|
||||||
elasticsearch:
|
elasticsearch:
|
||||||
image: elasticsearch:7.5.0
|
image: elasticsearch:7.5.0
|
||||||
env:
|
env:
|
||||||
@ -188,7 +191,8 @@ jobs:
|
|||||||
- name: run migration tests
|
- name: run migration tests
|
||||||
run: make test-mysql-migration
|
run: make test-mysql-migration
|
||||||
- name: run tests
|
- name: run tests
|
||||||
run: make integration-test-coverage
|
# run: make integration-test-coverage (at the moment, no coverage is really handled)
|
||||||
|
run: make test-mysql
|
||||||
env:
|
env:
|
||||||
TAGS: bindata
|
TAGS: bindata
|
||||||
RACE_ENABLED: true
|
RACE_ENABLED: true
|
||||||
|
@ -1912,7 +1912,7 @@ LEVEL = Info
|
|||||||
;ENABLED = true
|
;ENABLED = true
|
||||||
;;
|
;;
|
||||||
;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types.
|
;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types.
|
||||||
;ALLOWED_TYPES = .csv,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip
|
;ALLOWED_TYPES = .avif,.cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.webp,.xls,.xlsx,.zip
|
||||||
;;
|
;;
|
||||||
;; Max size of each file. Defaults to 2048MB
|
;; Max size of each file. Defaults to 2048MB
|
||||||
;MAX_SIZE = 2048
|
;MAX_SIZE = 2048
|
||||||
|
@ -261,6 +261,7 @@ func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID strin
|
|||||||
}
|
}
|
||||||
|
|
||||||
// InsertRun inserts a run
|
// InsertRun inserts a run
|
||||||
|
// The title will be cut off at 255 characters if it's longer than 255 characters.
|
||||||
func InsertRun(ctx context.Context, run *ActionRun, jobs []*jobparser.SingleWorkflow) error {
|
func InsertRun(ctx context.Context, run *ActionRun, jobs []*jobparser.SingleWorkflow) error {
|
||||||
ctx, committer, err := db.TxContext(ctx)
|
ctx, committer, err := db.TxContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -273,6 +274,7 @@ func InsertRun(ctx context.Context, run *ActionRun, jobs []*jobparser.SingleWork
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
run.Index = index
|
run.Index = index
|
||||||
|
run.Title, _ = util.SplitStringAtByteN(run.Title, 255)
|
||||||
|
|
||||||
if err := db.Insert(ctx, run); err != nil {
|
if err := db.Insert(ctx, run); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -399,6 +401,7 @@ func UpdateRun(ctx context.Context, run *ActionRun, cols ...string) error {
|
|||||||
if len(cols) > 0 {
|
if len(cols) > 0 {
|
||||||
sess.Cols(cols...)
|
sess.Cols(cols...)
|
||||||
}
|
}
|
||||||
|
run.Title, _ = util.SplitStringAtByteN(run.Title, 255)
|
||||||
affected, err := sess.Update(run)
|
affected, err := sess.Update(run)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -252,6 +252,7 @@ func GetRunnerByID(ctx context.Context, id int64) (*ActionRunner, error) {
|
|||||||
// UpdateRunner updates runner's information.
|
// UpdateRunner updates runner's information.
|
||||||
func UpdateRunner(ctx context.Context, r *ActionRunner, cols ...string) error {
|
func UpdateRunner(ctx context.Context, r *ActionRunner, cols ...string) error {
|
||||||
e := db.GetEngine(ctx)
|
e := db.GetEngine(ctx)
|
||||||
|
r.Name, _ = util.SplitStringAtByteN(r.Name, 255)
|
||||||
var err error
|
var err error
|
||||||
if len(cols) == 0 {
|
if len(cols) == 0 {
|
||||||
_, err = e.ID(r.ID).AllCols().Update(r)
|
_, err = e.ID(r.ID).AllCols().Update(r)
|
||||||
@ -278,6 +279,7 @@ func CreateRunner(ctx context.Context, t *ActionRunner) error {
|
|||||||
// Remove OwnerID to avoid confusion; it's not worth returning an error here.
|
// Remove OwnerID to avoid confusion; it's not worth returning an error here.
|
||||||
t.OwnerID = 0
|
t.OwnerID = 0
|
||||||
}
|
}
|
||||||
|
t.Name, _ = util.SplitStringAtByteN(t.Name, 255)
|
||||||
return db.Insert(ctx, t)
|
return db.Insert(ctx, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,6 +12,7 @@ import (
|
|||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
webhook_module "code.gitea.io/gitea/modules/webhook"
|
webhook_module "code.gitea.io/gitea/modules/webhook"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -67,6 +68,7 @@ func CreateScheduleTask(ctx context.Context, rows []*ActionSchedule) error {
|
|||||||
|
|
||||||
// Loop through each schedule row
|
// Loop through each schedule row
|
||||||
for _, row := range rows {
|
for _, row := range rows {
|
||||||
|
row.Title, _ = util.SplitStringAtByteN(row.Title, 255)
|
||||||
// Create new schedule row
|
// Create new schedule row
|
||||||
if err = db.Insert(ctx, row); err != nil {
|
if err = db.Insert(ctx, row); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -26,7 +26,7 @@
|
|||||||
fork_id: 0
|
fork_id: 0
|
||||||
is_template: false
|
is_template: false
|
||||||
template_id: 0
|
template_id: 0
|
||||||
size: 8478
|
size: 0
|
||||||
is_fsck_enabled: true
|
is_fsck_enabled: true
|
||||||
close_issues_via_commit_in_any_branch: false
|
close_issues_via_commit_in_any_branch: false
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/references"
|
"code.gitea.io/gitea/modules/references"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"xorm.io/builder"
|
"xorm.io/builder"
|
||||||
)
|
)
|
||||||
@ -138,6 +139,7 @@ func ChangeIssueTitle(ctx context.Context, issue *Issue, doer *user_model.User,
|
|||||||
}
|
}
|
||||||
defer committer.Close()
|
defer committer.Close()
|
||||||
|
|
||||||
|
issue.Title, _ = util.SplitStringAtByteN(issue.Title, 255)
|
||||||
if err = UpdateIssueCols(ctx, issue, "name"); err != nil {
|
if err = UpdateIssueCols(ctx, issue, "name"); err != nil {
|
||||||
return fmt.Errorf("updateIssueCols: %w", err)
|
return fmt.Errorf("updateIssueCols: %w", err)
|
||||||
}
|
}
|
||||||
@ -386,6 +388,7 @@ func NewIssueWithIndex(ctx context.Context, doer *user_model.User, opts NewIssue
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewIssue creates new issue with labels for repository.
|
// NewIssue creates new issue with labels for repository.
|
||||||
|
// The title will be cut off at 255 characters if it's longer than 255 characters.
|
||||||
func NewIssue(ctx context.Context, repo *repo_model.Repository, issue *Issue, labelIDs []int64, uuids []string) (err error) {
|
func NewIssue(ctx context.Context, repo *repo_model.Repository, issue *Issue, labelIDs []int64, uuids []string) (err error) {
|
||||||
ctx, committer, err := db.TxContext(ctx)
|
ctx, committer, err := db.TxContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -399,6 +402,7 @@ func NewIssue(ctx context.Context, repo *repo_model.Repository, issue *Issue, la
|
|||||||
}
|
}
|
||||||
|
|
||||||
issue.Index = idx
|
issue.Index = idx
|
||||||
|
issue.Title, _ = util.SplitStringAtByteN(issue.Title, 255)
|
||||||
|
|
||||||
if err = NewIssueWithIndex(ctx, issue.Poster, NewIssueOptions{
|
if err = NewIssueWithIndex(ctx, issue.Poster, NewIssueOptions{
|
||||||
Repo: repo,
|
Repo: repo,
|
||||||
|
@ -572,6 +572,7 @@ func NewPullRequest(ctx context.Context, repo *repo_model.Repository, issue *Iss
|
|||||||
}
|
}
|
||||||
|
|
||||||
issue.Index = idx
|
issue.Index = idx
|
||||||
|
issue.Title, _ = util.SplitStringAtByteN(issue.Title, 255)
|
||||||
|
|
||||||
if err = NewIssueWithIndex(ctx, issue.Poster, NewIssueOptions{
|
if err = NewIssueWithIndex(ctx, issue.Poster, NewIssueOptions{
|
||||||
Repo: repo,
|
Repo: repo,
|
||||||
|
@ -8,7 +8,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
@ -16,7 +15,6 @@ import (
|
|||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/log"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/testlogger"
|
"code.gitea.io/gitea/modules/testlogger"
|
||||||
|
|
||||||
@ -35,27 +33,7 @@ func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, fu
|
|||||||
ourSkip := 2
|
ourSkip := 2
|
||||||
ourSkip += skip
|
ourSkip += skip
|
||||||
deferFn := testlogger.PrintCurrentTest(t, ourSkip)
|
deferFn := testlogger.PrintCurrentTest(t, ourSkip)
|
||||||
assert.NoError(t, os.RemoveAll(setting.RepoRootPath))
|
assert.NoError(t, unittest.SyncDirs(filepath.Join(filepath.Dir(setting.AppPath), "tests/gitea-repositories-meta"), setting.RepoRootPath))
|
||||||
assert.NoError(t, unittest.CopyDir(path.Join(filepath.Dir(setting.AppPath), "tests/gitea-repositories-meta"), setting.RepoRootPath))
|
|
||||||
ownerDirs, err := os.ReadDir(setting.RepoRootPath)
|
|
||||||
if err != nil {
|
|
||||||
assert.NoError(t, err, "unable to read the new repo root: %v\n", err)
|
|
||||||
}
|
|
||||||
for _, ownerDir := range ownerDirs {
|
|
||||||
if !ownerDir.Type().IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
repoDirs, err := os.ReadDir(filepath.Join(setting.RepoRootPath, ownerDir.Name()))
|
|
||||||
if err != nil {
|
|
||||||
assert.NoError(t, err, "unable to read the new repo root: %v\n", err)
|
|
||||||
}
|
|
||||||
for _, repoDir := range repoDirs {
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "objects", "pack"), 0o755)
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "objects", "info"), 0o755)
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "refs", "heads"), 0o755)
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "refs", "tag"), 0o755)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := deleteDB(); err != nil {
|
if err := deleteDB(); err != nil {
|
||||||
t.Errorf("unable to reset database: %v", err)
|
t.Errorf("unable to reset database: %v", err)
|
||||||
@ -112,39 +90,36 @@ func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, fu
|
|||||||
}
|
}
|
||||||
|
|
||||||
func MainTest(m *testing.M) {
|
func MainTest(m *testing.M) {
|
||||||
log.RegisterEventWriter("test", testlogger.NewTestLoggerWriter)
|
testlogger.Init()
|
||||||
|
|
||||||
giteaRoot := base.SetupGiteaRoot()
|
giteaRoot := base.SetupGiteaRoot()
|
||||||
if giteaRoot == "" {
|
if giteaRoot == "" {
|
||||||
fmt.Println("Environment variable $GITEA_ROOT not set")
|
testlogger.Fatalf("Environment variable $GITEA_ROOT not set\n")
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
giteaBinary := "gitea"
|
giteaBinary := "gitea"
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
giteaBinary += ".exe"
|
giteaBinary += ".exe"
|
||||||
}
|
}
|
||||||
setting.AppPath = path.Join(giteaRoot, giteaBinary)
|
setting.AppPath = filepath.Join(giteaRoot, giteaBinary)
|
||||||
if _, err := os.Stat(setting.AppPath); err != nil {
|
if _, err := os.Stat(setting.AppPath); err != nil {
|
||||||
fmt.Printf("Could not find gitea binary at %s\n", setting.AppPath)
|
testlogger.Fatalf("Could not find gitea binary at %s\n", setting.AppPath)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
giteaConf := os.Getenv("GITEA_CONF")
|
giteaConf := os.Getenv("GITEA_CONF")
|
||||||
if giteaConf == "" {
|
if giteaConf == "" {
|
||||||
giteaConf = path.Join(filepath.Dir(setting.AppPath), "tests/sqlite.ini")
|
giteaConf = filepath.Join(filepath.Dir(setting.AppPath), "tests/sqlite.ini")
|
||||||
fmt.Printf("Environment variable $GITEA_CONF not set - defaulting to %s\n", giteaConf)
|
fmt.Printf("Environment variable $GITEA_CONF not set - defaulting to %s\n", giteaConf)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !path.IsAbs(giteaConf) {
|
if !filepath.IsAbs(giteaConf) {
|
||||||
setting.CustomConf = path.Join(giteaRoot, giteaConf)
|
setting.CustomConf = filepath.Join(giteaRoot, giteaConf)
|
||||||
} else {
|
} else {
|
||||||
setting.CustomConf = giteaConf
|
setting.CustomConf = giteaConf
|
||||||
}
|
}
|
||||||
|
|
||||||
tmpDataPath, err := os.MkdirTemp("", "data")
|
tmpDataPath, err := os.MkdirTemp("", "data")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Printf("Unable to create temporary data path %v\n", err)
|
testlogger.Fatalf("Unable to create temporary data path %v\n", err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setting.CustomPath = filepath.Join(setting.AppWorkPath, "custom")
|
setting.CustomPath = filepath.Join(setting.AppWorkPath, "custom")
|
||||||
@ -152,8 +127,7 @@ func MainTest(m *testing.M) {
|
|||||||
|
|
||||||
unittest.InitSettings()
|
unittest.InitSettings()
|
||||||
if err = git.InitFull(context.Background()); err != nil {
|
if err = git.InitFull(context.Background()); err != nil {
|
||||||
fmt.Printf("Unable to InitFull: %v\n", err)
|
testlogger.Fatalf("Unable to InitFull: %v\n", err)
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
setting.LoadDBSetting()
|
setting.LoadDBSetting()
|
||||||
setting.InitLoggersForTest()
|
setting.InitLoggersForTest()
|
||||||
|
@ -242,6 +242,7 @@ func GetSearchOrderByBySortType(sortType string) db.SearchOrderBy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewProject creates a new Project
|
// NewProject creates a new Project
|
||||||
|
// The title will be cut off at 255 characters if it's longer than 255 characters.
|
||||||
func NewProject(ctx context.Context, p *Project) error {
|
func NewProject(ctx context.Context, p *Project) error {
|
||||||
if !IsTemplateTypeValid(p.TemplateType) {
|
if !IsTemplateTypeValid(p.TemplateType) {
|
||||||
p.TemplateType = TemplateTypeNone
|
p.TemplateType = TemplateTypeNone
|
||||||
@ -255,6 +256,8 @@ func NewProject(ctx context.Context, p *Project) error {
|
|||||||
return util.NewInvalidArgumentErrorf("project type is not valid")
|
return util.NewInvalidArgumentErrorf("project type is not valid")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
p.Title, _ = util.SplitStringAtByteN(p.Title, 255)
|
||||||
|
|
||||||
return db.WithTx(ctx, func(ctx context.Context) error {
|
return db.WithTx(ctx, func(ctx context.Context) error {
|
||||||
if err := db.Insert(ctx, p); err != nil {
|
if err := db.Insert(ctx, p); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -308,6 +311,7 @@ func UpdateProject(ctx context.Context, p *Project) error {
|
|||||||
p.CardType = CardTypeTextOnly
|
p.CardType = CardTypeTextOnly
|
||||||
}
|
}
|
||||||
|
|
||||||
|
p.Title, _ = util.SplitStringAtByteN(p.Title, 255)
|
||||||
_, err := db.GetEngine(ctx).ID(p.ID).Cols(
|
_, err := db.GetEngine(ctx).ID(p.ID).Cols(
|
||||||
"title",
|
"title",
|
||||||
"description",
|
"description",
|
||||||
|
@ -156,6 +156,7 @@ func IsReleaseExist(ctx context.Context, repoID int64, tagName string) (bool, er
|
|||||||
|
|
||||||
// UpdateRelease updates all columns of a release
|
// UpdateRelease updates all columns of a release
|
||||||
func UpdateRelease(ctx context.Context, rel *Release) error {
|
func UpdateRelease(ctx context.Context, rel *Release) error {
|
||||||
|
rel.Title, _ = util.SplitStringAtByteN(rel.Title, 255)
|
||||||
_, err := db.GetEngine(ctx).ID(rel.ID).AllCols().Update(rel)
|
_, err := db.GetEngine(ctx).ID(rel.ID).AllCols().Update(rel)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
|
"maps"
|
||||||
"net"
|
"net"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@ -165,10 +166,10 @@ type Repository struct {
|
|||||||
|
|
||||||
Status RepositoryStatus `xorm:"NOT NULL DEFAULT 0"`
|
Status RepositoryStatus `xorm:"NOT NULL DEFAULT 0"`
|
||||||
|
|
||||||
RenderingMetas map[string]string `xorm:"-"`
|
commonRenderingMetas map[string]string `xorm:"-"`
|
||||||
DocumentRenderingMetas map[string]string `xorm:"-"`
|
|
||||||
Units []*RepoUnit `xorm:"-"`
|
Units []*RepoUnit `xorm:"-"`
|
||||||
PrimaryLanguage *LanguageStat `xorm:"-"`
|
PrimaryLanguage *LanguageStat `xorm:"-"`
|
||||||
|
|
||||||
IsFork bool `xorm:"INDEX NOT NULL DEFAULT false"`
|
IsFork bool `xorm:"INDEX NOT NULL DEFAULT false"`
|
||||||
ForkID int64 `xorm:"INDEX"`
|
ForkID int64 `xorm:"INDEX"`
|
||||||
@ -473,9 +474,8 @@ func (repo *Repository) MustOwner(ctx context.Context) *user_model.User {
|
|||||||
return repo.Owner
|
return repo.Owner
|
||||||
}
|
}
|
||||||
|
|
||||||
// ComposeMetas composes a map of metas for properly rendering issue links and external issue trackers.
|
func (repo *Repository) composeCommonMetas(ctx context.Context) map[string]string {
|
||||||
func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string {
|
if len(repo.commonRenderingMetas) == 0 {
|
||||||
if len(repo.RenderingMetas) == 0 {
|
|
||||||
metas := map[string]string{
|
metas := map[string]string{
|
||||||
"user": repo.OwnerName,
|
"user": repo.OwnerName,
|
||||||
"repo": repo.Name,
|
"repo": repo.Name,
|
||||||
@ -508,21 +508,34 @@ func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string {
|
|||||||
metas["org"] = strings.ToLower(repo.OwnerName)
|
metas["org"] = strings.ToLower(repo.OwnerName)
|
||||||
}
|
}
|
||||||
|
|
||||||
repo.RenderingMetas = metas
|
repo.commonRenderingMetas = metas
|
||||||
}
|
}
|
||||||
return repo.RenderingMetas
|
return repo.commonRenderingMetas
|
||||||
}
|
}
|
||||||
|
|
||||||
// ComposeDocumentMetas composes a map of metas for properly rendering documents
|
// ComposeMetas composes a map of metas for properly rendering comments or comment-like contents (commit message)
|
||||||
|
func (repo *Repository) ComposeMetas(ctx context.Context) map[string]string {
|
||||||
|
metas := maps.Clone(repo.composeCommonMetas(ctx))
|
||||||
|
metas["markdownLineBreakStyle"] = "comment"
|
||||||
|
metas["markupAllowShortIssuePattern"] = "true"
|
||||||
|
return metas
|
||||||
|
}
|
||||||
|
|
||||||
|
// ComposeWikiMetas composes a map of metas for properly rendering wikis
|
||||||
|
func (repo *Repository) ComposeWikiMetas(ctx context.Context) map[string]string {
|
||||||
|
// does wiki need the "teams" and "org" from common metas?
|
||||||
|
metas := maps.Clone(repo.composeCommonMetas(ctx))
|
||||||
|
metas["markdownLineBreakStyle"] = "document"
|
||||||
|
metas["markupAllowShortIssuePattern"] = "true"
|
||||||
|
return metas
|
||||||
|
}
|
||||||
|
|
||||||
|
// ComposeDocumentMetas composes a map of metas for properly rendering documents (repo files)
|
||||||
func (repo *Repository) ComposeDocumentMetas(ctx context.Context) map[string]string {
|
func (repo *Repository) ComposeDocumentMetas(ctx context.Context) map[string]string {
|
||||||
if len(repo.DocumentRenderingMetas) == 0 {
|
// does document(file) need the "teams" and "org" from common metas?
|
||||||
metas := map[string]string{}
|
metas := maps.Clone(repo.composeCommonMetas(ctx))
|
||||||
for k, v := range repo.ComposeMetas(ctx) {
|
metas["markdownLineBreakStyle"] = "document"
|
||||||
metas[k] = v
|
return metas
|
||||||
}
|
|
||||||
repo.DocumentRenderingMetas = metas
|
|
||||||
}
|
|
||||||
return repo.DocumentRenderingMetas
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetBaseRepo populates repo.BaseRepo for a fork repository and
|
// GetBaseRepo populates repo.BaseRepo for a fork repository and
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
// Copyright 2017 The Gitea Authors. All rights reserved.
|
// Copyright 2017 The Gitea Authors. All rights reserved.
|
||||||
// SPDX-License-Identifier: MIT
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
package repo_test
|
package repo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
repo_model "code.gitea.io/gitea/models/repo"
|
|
||||||
"code.gitea.io/gitea/models/unit"
|
"code.gitea.io/gitea/models/unit"
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
@ -20,18 +19,18 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
countRepospts = repo_model.CountRepositoryOptions{OwnerID: 10}
|
countRepospts = CountRepositoryOptions{OwnerID: 10}
|
||||||
countReposptsPublic = repo_model.CountRepositoryOptions{OwnerID: 10, Private: optional.Some(false)}
|
countReposptsPublic = CountRepositoryOptions{OwnerID: 10, Private: optional.Some(false)}
|
||||||
countReposptsPrivate = repo_model.CountRepositoryOptions{OwnerID: 10, Private: optional.Some(true)}
|
countReposptsPrivate = CountRepositoryOptions{OwnerID: 10, Private: optional.Some(true)}
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestGetRepositoryCount(t *testing.T) {
|
func TestGetRepositoryCount(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
ctx := db.DefaultContext
|
ctx := db.DefaultContext
|
||||||
count, err1 := repo_model.CountRepositories(ctx, countRepospts)
|
count, err1 := CountRepositories(ctx, countRepospts)
|
||||||
privateCount, err2 := repo_model.CountRepositories(ctx, countReposptsPrivate)
|
privateCount, err2 := CountRepositories(ctx, countReposptsPrivate)
|
||||||
publicCount, err3 := repo_model.CountRepositories(ctx, countReposptsPublic)
|
publicCount, err3 := CountRepositories(ctx, countReposptsPublic)
|
||||||
assert.NoError(t, err1)
|
assert.NoError(t, err1)
|
||||||
assert.NoError(t, err2)
|
assert.NoError(t, err2)
|
||||||
assert.NoError(t, err3)
|
assert.NoError(t, err3)
|
||||||
@ -42,7 +41,7 @@ func TestGetRepositoryCount(t *testing.T) {
|
|||||||
func TestGetPublicRepositoryCount(t *testing.T) {
|
func TestGetPublicRepositoryCount(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
count, err := repo_model.CountRepositories(db.DefaultContext, countReposptsPublic)
|
count, err := CountRepositories(db.DefaultContext, countReposptsPublic)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int64(1), count)
|
assert.Equal(t, int64(1), count)
|
||||||
}
|
}
|
||||||
@ -50,14 +49,14 @@ func TestGetPublicRepositoryCount(t *testing.T) {
|
|||||||
func TestGetPrivateRepositoryCount(t *testing.T) {
|
func TestGetPrivateRepositoryCount(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
count, err := repo_model.CountRepositories(db.DefaultContext, countReposptsPrivate)
|
count, err := CountRepositories(db.DefaultContext, countReposptsPrivate)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int64(2), count)
|
assert.Equal(t, int64(2), count)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRepoAPIURL(t *testing.T) {
|
func TestRepoAPIURL(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10})
|
repo := unittest.AssertExistsAndLoadBean(t, &Repository{ID: 10})
|
||||||
|
|
||||||
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user12/repo10", repo.APIURL())
|
assert.Equal(t, "https://try.gitea.io/api/v1/repos/user12/repo10", repo.APIURL())
|
||||||
}
|
}
|
||||||
@ -65,22 +64,22 @@ func TestRepoAPIURL(t *testing.T) {
|
|||||||
func TestWatchRepo(t *testing.T) {
|
func TestWatchRepo(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
|
repo := unittest.AssertExistsAndLoadBean(t, &Repository{ID: 3})
|
||||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||||
|
|
||||||
assert.NoError(t, repo_model.WatchRepo(db.DefaultContext, user, repo, true))
|
assert.NoError(t, WatchRepo(db.DefaultContext, user, repo, true))
|
||||||
unittest.AssertExistsAndLoadBean(t, &repo_model.Watch{RepoID: repo.ID, UserID: user.ID})
|
unittest.AssertExistsAndLoadBean(t, &Watch{RepoID: repo.ID, UserID: user.ID})
|
||||||
unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID})
|
unittest.CheckConsistencyFor(t, &Repository{ID: repo.ID})
|
||||||
|
|
||||||
assert.NoError(t, repo_model.WatchRepo(db.DefaultContext, user, repo, false))
|
assert.NoError(t, WatchRepo(db.DefaultContext, user, repo, false))
|
||||||
unittest.AssertNotExistsBean(t, &repo_model.Watch{RepoID: repo.ID, UserID: user.ID})
|
unittest.AssertNotExistsBean(t, &Watch{RepoID: repo.ID, UserID: user.ID})
|
||||||
unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID})
|
unittest.CheckConsistencyFor(t, &Repository{ID: repo.ID})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMetas(t *testing.T) {
|
func TestMetas(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
repo := &repo_model.Repository{Name: "testRepo"}
|
repo := &Repository{Name: "testRepo"}
|
||||||
repo.Owner = &user_model.User{Name: "testOwner"}
|
repo.Owner = &user_model.User{Name: "testOwner"}
|
||||||
repo.OwnerName = repo.Owner.Name
|
repo.OwnerName = repo.Owner.Name
|
||||||
|
|
||||||
@ -90,16 +89,16 @@ func TestMetas(t *testing.T) {
|
|||||||
assert.Equal(t, "testRepo", metas["repo"])
|
assert.Equal(t, "testRepo", metas["repo"])
|
||||||
assert.Equal(t, "testOwner", metas["user"])
|
assert.Equal(t, "testOwner", metas["user"])
|
||||||
|
|
||||||
externalTracker := repo_model.RepoUnit{
|
externalTracker := RepoUnit{
|
||||||
Type: unit.TypeExternalTracker,
|
Type: unit.TypeExternalTracker,
|
||||||
Config: &repo_model.ExternalTrackerConfig{
|
Config: &ExternalTrackerConfig{
|
||||||
ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}",
|
ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
testSuccess := func(expectedStyle string) {
|
testSuccess := func(expectedStyle string) {
|
||||||
repo.Units = []*repo_model.RepoUnit{&externalTracker}
|
repo.Units = []*RepoUnit{&externalTracker}
|
||||||
repo.RenderingMetas = nil
|
repo.commonRenderingMetas = nil
|
||||||
metas := repo.ComposeMetas(db.DefaultContext)
|
metas := repo.ComposeMetas(db.DefaultContext)
|
||||||
assert.Equal(t, expectedStyle, metas["style"])
|
assert.Equal(t, expectedStyle, metas["style"])
|
||||||
assert.Equal(t, "testRepo", metas["repo"])
|
assert.Equal(t, "testRepo", metas["repo"])
|
||||||
@ -118,7 +117,7 @@ func TestMetas(t *testing.T) {
|
|||||||
externalTracker.ExternalTrackerConfig().ExternalTrackerStyle = markup.IssueNameStyleRegexp
|
externalTracker.ExternalTrackerConfig().ExternalTrackerStyle = markup.IssueNameStyleRegexp
|
||||||
testSuccess(markup.IssueNameStyleRegexp)
|
testSuccess(markup.IssueNameStyleRegexp)
|
||||||
|
|
||||||
repo, err := repo_model.GetRepositoryByID(db.DefaultContext, 3)
|
repo, err := GetRepositoryByID(db.DefaultContext, 3)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
metas = repo.ComposeMetas(db.DefaultContext)
|
metas = repo.ComposeMetas(db.DefaultContext)
|
||||||
@ -132,7 +131,7 @@ func TestGetRepositoryByURL(t *testing.T) {
|
|||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
t.Run("InvalidPath", func(t *testing.T) {
|
t.Run("InvalidPath", func(t *testing.T) {
|
||||||
repo, err := repo_model.GetRepositoryByURL(db.DefaultContext, "something")
|
repo, err := GetRepositoryByURL(db.DefaultContext, "something")
|
||||||
|
|
||||||
assert.Nil(t, repo)
|
assert.Nil(t, repo)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
@ -140,7 +139,7 @@ func TestGetRepositoryByURL(t *testing.T) {
|
|||||||
|
|
||||||
t.Run("ValidHttpURL", func(t *testing.T) {
|
t.Run("ValidHttpURL", func(t *testing.T) {
|
||||||
test := func(t *testing.T, url string) {
|
test := func(t *testing.T, url string) {
|
||||||
repo, err := repo_model.GetRepositoryByURL(db.DefaultContext, url)
|
repo, err := GetRepositoryByURL(db.DefaultContext, url)
|
||||||
|
|
||||||
assert.NotNil(t, repo)
|
assert.NotNil(t, repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -155,7 +154,7 @@ func TestGetRepositoryByURL(t *testing.T) {
|
|||||||
|
|
||||||
t.Run("ValidGitSshURL", func(t *testing.T) {
|
t.Run("ValidGitSshURL", func(t *testing.T) {
|
||||||
test := func(t *testing.T, url string) {
|
test := func(t *testing.T, url string) {
|
||||||
repo, err := repo_model.GetRepositoryByURL(db.DefaultContext, url)
|
repo, err := GetRepositoryByURL(db.DefaultContext, url)
|
||||||
|
|
||||||
assert.NotNil(t, repo)
|
assert.NotNil(t, repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -173,7 +172,7 @@ func TestGetRepositoryByURL(t *testing.T) {
|
|||||||
|
|
||||||
t.Run("ValidImplicitSshURL", func(t *testing.T) {
|
t.Run("ValidImplicitSshURL", func(t *testing.T) {
|
||||||
test := func(t *testing.T, url string) {
|
test := func(t *testing.T, url string) {
|
||||||
repo, err := repo_model.GetRepositoryByURL(db.DefaultContext, url)
|
repo, err := GetRepositoryByURL(db.DefaultContext, url)
|
||||||
|
|
||||||
assert.NotNil(t, repo)
|
assert.NotNil(t, repo)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
@ -200,21 +199,21 @@ func TestComposeSSHCloneURL(t *testing.T) {
|
|||||||
setting.SSH.Domain = "domain"
|
setting.SSH.Domain = "domain"
|
||||||
setting.SSH.Port = 22
|
setting.SSH.Port = 22
|
||||||
setting.Repository.UseCompatSSHURI = false
|
setting.Repository.UseCompatSSHURI = false
|
||||||
assert.Equal(t, "git@domain:user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "git@domain:user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
setting.Repository.UseCompatSSHURI = true
|
setting.Repository.UseCompatSSHURI = true
|
||||||
assert.Equal(t, "ssh://git@domain/user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "ssh://git@domain/user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
// test SSH_DOMAIN while use non-standard SSH port
|
// test SSH_DOMAIN while use non-standard SSH port
|
||||||
setting.SSH.Port = 123
|
setting.SSH.Port = 123
|
||||||
setting.Repository.UseCompatSSHURI = false
|
setting.Repository.UseCompatSSHURI = false
|
||||||
assert.Equal(t, "ssh://git@domain:123/user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "ssh://git@domain:123/user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
setting.Repository.UseCompatSSHURI = true
|
setting.Repository.UseCompatSSHURI = true
|
||||||
assert.Equal(t, "ssh://git@domain:123/user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "ssh://git@domain:123/user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
|
|
||||||
// test IPv6 SSH_DOMAIN
|
// test IPv6 SSH_DOMAIN
|
||||||
setting.Repository.UseCompatSSHURI = false
|
setting.Repository.UseCompatSSHURI = false
|
||||||
setting.SSH.Domain = "::1"
|
setting.SSH.Domain = "::1"
|
||||||
setting.SSH.Port = 22
|
setting.SSH.Port = 22
|
||||||
assert.Equal(t, "git@[::1]:user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "git@[::1]:user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
setting.SSH.Port = 123
|
setting.SSH.Port = 123
|
||||||
assert.Equal(t, "ssh://git@[::1]:123/user/repo.git", repo_model.ComposeSSHCloneURL("user", "repo"))
|
assert.Equal(t, "ssh://git@[::1]:123/user/repo.git", ComposeSSHCloneURL("user", "repo"))
|
||||||
}
|
}
|
||||||
|
@ -36,6 +36,7 @@ var OrderByMap = map[string]map[string]db.SearchOrderBy{
|
|||||||
var OrderByFlatMap = map[string]db.SearchOrderBy{
|
var OrderByFlatMap = map[string]db.SearchOrderBy{
|
||||||
"newest": OrderByMap["desc"]["created"],
|
"newest": OrderByMap["desc"]["created"],
|
||||||
"oldest": OrderByMap["asc"]["created"],
|
"oldest": OrderByMap["asc"]["created"],
|
||||||
|
"recentupdate": OrderByMap["desc"]["updated"],
|
||||||
"leastupdate": OrderByMap["asc"]["updated"],
|
"leastupdate": OrderByMap["asc"]["updated"],
|
||||||
"reversealphabetically": OrderByMap["desc"]["alpha"],
|
"reversealphabetically": OrderByMap["desc"]["alpha"],
|
||||||
"alphabetically": OrderByMap["asc"]["alpha"],
|
"alphabetically": OrderByMap["asc"]["alpha"],
|
||||||
|
@ -4,10 +4,8 @@
|
|||||||
package unittest
|
package unittest
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
@ -32,67 +30,73 @@ func Copy(src, dest string) error {
|
|||||||
return os.Symlink(target, dest)
|
return os.Symlink(target, dest)
|
||||||
}
|
}
|
||||||
|
|
||||||
sr, err := os.Open(src)
|
return util.CopyFile(src, dest)
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer sr.Close()
|
|
||||||
|
|
||||||
dw, err := os.Create(dest)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer dw.Close()
|
|
||||||
|
|
||||||
if _, err = io.Copy(dw, sr); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set back file information.
|
|
||||||
if err = os.Chtimes(dest, si.ModTime(), si.ModTime()); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return os.Chmod(dest, si.Mode())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// CopyDir copy files recursively from source to target directory.
|
// Sync synchronizes the two files. This is skipped if both files
|
||||||
//
|
// exist and the size, modtime, and mode match.
|
||||||
// The filter accepts a function that process the path info.
|
func Sync(srcPath, destPath string) error {
|
||||||
// and should return true for need to filter.
|
dest, err := os.Stat(destPath)
|
||||||
//
|
if err != nil {
|
||||||
// It returns error when error occurs in underlying functions.
|
if os.IsNotExist(err) {
|
||||||
func CopyDir(srcPath, destPath string, filters ...func(filePath string) bool) error {
|
return Copy(srcPath, destPath)
|
||||||
// Check if target directory exists.
|
}
|
||||||
if _, err := os.Stat(destPath); !errors.Is(err, os.ErrNotExist) {
|
return err
|
||||||
return util.NewAlreadyExistErrorf("file or directory already exists: %s", destPath)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
src, err := os.Stat(srcPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if src.Size() == dest.Size() &&
|
||||||
|
src.ModTime() == dest.ModTime() &&
|
||||||
|
src.Mode() == dest.Mode() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return Copy(srcPath, destPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SyncDirs synchronizes files recursively from source to target directory.
|
||||||
|
// It returns error when error occurs in underlying functions.
|
||||||
|
func SyncDirs(srcPath, destPath string) error {
|
||||||
err := os.MkdirAll(destPath, os.ModePerm)
|
err := os.MkdirAll(destPath, os.ModePerm)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Gather directory info.
|
// find and delete all untracked files
|
||||||
infos, err := util.StatDir(srcPath, true)
|
destFiles, err := util.StatDir(destPath, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
for _, destFile := range destFiles {
|
||||||
var filter func(filePath string) bool
|
destFilePath := filepath.Join(destPath, destFile)
|
||||||
if len(filters) > 0 {
|
if _, err = os.Stat(filepath.Join(srcPath, destFile)); err != nil {
|
||||||
filter = filters[0]
|
if os.IsNotExist(err) {
|
||||||
|
// if src file does not exist, remove dest file
|
||||||
|
if err = os.RemoveAll(destFilePath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, info := range infos {
|
// sync src files to dest
|
||||||
if filter != nil && filter(info) {
|
srcFiles, err := util.StatDir(srcPath, true)
|
||||||
continue
|
if err != nil {
|
||||||
}
|
return err
|
||||||
|
}
|
||||||
curPath := path.Join(destPath, info)
|
for _, srcFile := range srcFiles {
|
||||||
if strings.HasSuffix(info, "/") {
|
destFilePath := filepath.Join(destPath, srcFile)
|
||||||
err = os.MkdirAll(curPath, os.ModePerm)
|
// util.StatDir appends a slash to the directory name
|
||||||
|
if strings.HasSuffix(srcFile, "/") {
|
||||||
|
err = os.MkdirAll(destFilePath, os.ModePerm)
|
||||||
} else {
|
} else {
|
||||||
err = Copy(path.Join(srcPath, info), curPath)
|
err = Sync(filepath.Join(srcPath, srcFile), destFilePath)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -164,35 +164,13 @@ func MainTest(m *testing.M, testOpts ...*TestOptions) {
|
|||||||
if err = storage.Init(); err != nil {
|
if err = storage.Init(); err != nil {
|
||||||
fatalTestError("storage.Init: %v\n", err)
|
fatalTestError("storage.Init: %v\n", err)
|
||||||
}
|
}
|
||||||
if err = util.RemoveAll(repoRootPath); err != nil {
|
if err = SyncDirs(filepath.Join(giteaRoot, "tests", "gitea-repositories-meta"), setting.RepoRootPath); err != nil {
|
||||||
fatalTestError("util.RemoveAll: %v\n", err)
|
fatalTestError("util.SyncDirs: %v\n", err)
|
||||||
}
|
|
||||||
if err = CopyDir(filepath.Join(giteaRoot, "tests", "gitea-repositories-meta"), setting.RepoRootPath); err != nil {
|
|
||||||
fatalTestError("util.CopyDir: %v\n", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = git.InitFull(context.Background()); err != nil {
|
if err = git.InitFull(context.Background()); err != nil {
|
||||||
fatalTestError("git.Init: %v\n", err)
|
fatalTestError("git.Init: %v\n", err)
|
||||||
}
|
}
|
||||||
ownerDirs, err := os.ReadDir(setting.RepoRootPath)
|
|
||||||
if err != nil {
|
|
||||||
fatalTestError("unable to read the new repo root: %v\n", err)
|
|
||||||
}
|
|
||||||
for _, ownerDir := range ownerDirs {
|
|
||||||
if !ownerDir.Type().IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
repoDirs, err := os.ReadDir(filepath.Join(setting.RepoRootPath, ownerDir.Name()))
|
|
||||||
if err != nil {
|
|
||||||
fatalTestError("unable to read the new repo root: %v\n", err)
|
|
||||||
}
|
|
||||||
for _, repoDir := range repoDirs {
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "objects", "pack"), 0o755)
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "objects", "info"), 0o755)
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "refs", "heads"), 0o755)
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "refs", "tag"), 0o755)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(testOpts) > 0 && testOpts[0].SetUp != nil {
|
if len(testOpts) > 0 && testOpts[0].SetUp != nil {
|
||||||
if err := testOpts[0].SetUp(); err != nil {
|
if err := testOpts[0].SetUp(); err != nil {
|
||||||
@ -255,24 +233,7 @@ func PrepareTestDatabase() error {
|
|||||||
// by tests that use the above MainTest(..) function.
|
// by tests that use the above MainTest(..) function.
|
||||||
func PrepareTestEnv(t testing.TB) {
|
func PrepareTestEnv(t testing.TB) {
|
||||||
assert.NoError(t, PrepareTestDatabase())
|
assert.NoError(t, PrepareTestDatabase())
|
||||||
assert.NoError(t, util.RemoveAll(setting.RepoRootPath))
|
|
||||||
metaPath := filepath.Join(giteaRoot, "tests", "gitea-repositories-meta")
|
metaPath := filepath.Join(giteaRoot, "tests", "gitea-repositories-meta")
|
||||||
assert.NoError(t, CopyDir(metaPath, setting.RepoRootPath))
|
assert.NoError(t, SyncDirs(metaPath, setting.RepoRootPath))
|
||||||
ownerDirs, err := os.ReadDir(setting.RepoRootPath)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
for _, ownerDir := range ownerDirs {
|
|
||||||
if !ownerDir.Type().IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
repoDirs, err := os.ReadDir(filepath.Join(setting.RepoRootPath, ownerDir.Name()))
|
|
||||||
assert.NoError(t, err)
|
|
||||||
for _, repoDir := range repoDirs {
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "objects", "pack"), 0o755)
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "objects", "info"), 0o755)
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "refs", "heads"), 0o755)
|
|
||||||
_ = os.MkdirAll(filepath.Join(setting.RepoRootPath, ownerDir.Name(), repoDir.Name(), "refs", "tag"), 0o755)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
base.SetupGiteaRoot() // Makes sure GITEA_ROOT is set
|
base.SetupGiteaRoot() // Makes sure GITEA_ROOT is set
|
||||||
}
|
}
|
||||||
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -46,7 +46,7 @@ func ServeSetHeaders(w http.ResponseWriter, opts *ServeHeaderOptions) {
|
|||||||
w.Header().Add(gzhttp.HeaderNoCompression, "1")
|
w.Header().Add(gzhttp.HeaderNoCompression, "1")
|
||||||
}
|
}
|
||||||
|
|
||||||
contentType := typesniffer.ApplicationOctetStream
|
contentType := typesniffer.MimeTypeApplicationOctetStream
|
||||||
if opts.ContentType != "" {
|
if opts.ContentType != "" {
|
||||||
if opts.ContentTypeCharset != "" {
|
if opts.ContentTypeCharset != "" {
|
||||||
contentType = opts.ContentType + "; charset=" + strings.ToLower(opts.ContentTypeCharset)
|
contentType = opts.ContentType + "; charset=" + strings.ToLower(opts.ContentTypeCharset)
|
||||||
@ -107,7 +107,7 @@ func setServeHeadersByFile(r *http.Request, w http.ResponseWriter, filePath stri
|
|||||||
} else if isPlain {
|
} else if isPlain {
|
||||||
opts.ContentType = "text/plain"
|
opts.ContentType = "text/plain"
|
||||||
} else {
|
} else {
|
||||||
opts.ContentType = typesniffer.ApplicationOctetStream
|
opts.ContentType = typesniffer.MimeTypeApplicationOctetStream
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ import (
|
|||||||
_ "code.gitea.io/gitea/models/activities"
|
_ "code.gitea.io/gitea/models/activities"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
_ "github.com/mattn/go-sqlite3"
|
_ "github.com/mattn/go-sqlite3"
|
||||||
)
|
)
|
||||||
@ -284,15 +285,11 @@ func TestBleveIndexAndSearch(t *testing.T) {
|
|||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
idx := bleve.NewIndexer(dir)
|
idx := bleve.NewIndexer(dir)
|
||||||
_, err := idx.Init(context.Background())
|
|
||||||
if err != nil {
|
|
||||||
if idx != nil {
|
|
||||||
idx.Close()
|
|
||||||
}
|
|
||||||
assert.FailNow(t, "Unable to create bleve indexer Error: %v", err)
|
|
||||||
}
|
|
||||||
defer idx.Close()
|
defer idx.Close()
|
||||||
|
|
||||||
|
_, err := idx.Init(context.Background())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
testIndexer("beleve", t, idx)
|
testIndexer("beleve", t, idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,6 +86,8 @@ type ColoredValue struct {
|
|||||||
colors []ColorAttribute
|
colors []ColorAttribute
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var _ fmt.Formatter = (*ColoredValue)(nil)
|
||||||
|
|
||||||
func (c *ColoredValue) Format(f fmt.State, verb rune) {
|
func (c *ColoredValue) Format(f fmt.State, verb rune) {
|
||||||
_, _ = f.Write(ColorBytes(c.colors...))
|
_, _ = f.Write(ColorBytes(c.colors...))
|
||||||
s := fmt.Sprintf(fmt.FormatString(f, verb), c.v)
|
s := fmt.Sprintf(fmt.FormatString(f, verb), c.v)
|
||||||
@ -93,6 +95,10 @@ func (c *ColoredValue) Format(f fmt.State, verb rune) {
|
|||||||
_, _ = f.Write(resetBytes)
|
_, _ = f.Write(resetBytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *ColoredValue) Value() any {
|
||||||
|
return c.v
|
||||||
|
}
|
||||||
|
|
||||||
func NewColoredValue(v any, color ...ColorAttribute) *ColoredValue {
|
func NewColoredValue(v any, color ...ColorAttribute) *ColoredValue {
|
||||||
return &ColoredValue{v: v, colors: color}
|
return &ColoredValue{v: v, colors: color}
|
||||||
}
|
}
|
||||||
|
@ -7,11 +7,11 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"io"
|
"io"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/markup/common"
|
"code.gitea.io/gitea/modules/markup/common"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
|
||||||
|
|
||||||
"golang.org/x/net/html"
|
"golang.org/x/net/html"
|
||||||
"golang.org/x/net/html/atom"
|
"golang.org/x/net/html/atom"
|
||||||
@ -25,7 +25,27 @@ const (
|
|||||||
IssueNameStyleRegexp = "regexp"
|
IssueNameStyleRegexp = "regexp"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
// CSS class for action keywords (e.g. "closes: #1")
|
||||||
|
const keywordClass = "issue-keyword"
|
||||||
|
|
||||||
|
type globalVarsType struct {
|
||||||
|
hashCurrentPattern *regexp.Regexp
|
||||||
|
shortLinkPattern *regexp.Regexp
|
||||||
|
anyHashPattern *regexp.Regexp
|
||||||
|
comparePattern *regexp.Regexp
|
||||||
|
fullURLPattern *regexp.Regexp
|
||||||
|
emailRegex *regexp.Regexp
|
||||||
|
blackfridayExtRegex *regexp.Regexp
|
||||||
|
emojiShortCodeRegex *regexp.Regexp
|
||||||
|
issueFullPattern *regexp.Regexp
|
||||||
|
filesChangedFullPattern *regexp.Regexp
|
||||||
|
|
||||||
|
tagCleaner *regexp.Regexp
|
||||||
|
nulCleaner *strings.Replacer
|
||||||
|
}
|
||||||
|
|
||||||
|
var globalVars = sync.OnceValue[*globalVarsType](func() *globalVarsType {
|
||||||
|
v := &globalVarsType{}
|
||||||
// NOTE: All below regex matching do not perform any extra validation.
|
// NOTE: All below regex matching do not perform any extra validation.
|
||||||
// Thus a link is produced even if the linked entity does not exist.
|
// Thus a link is produced even if the linked entity does not exist.
|
||||||
// While fast, this is also incorrect and lead to false positives.
|
// While fast, this is also incorrect and lead to false positives.
|
||||||
@ -36,79 +56,56 @@ var (
|
|||||||
// hashCurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae
|
// hashCurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae
|
||||||
// Although SHA1 hashes are 40 chars long, SHA256 are 64, the regex matches the hash from 7 to 64 chars in length
|
// Although SHA1 hashes are 40 chars long, SHA256 are 64, the regex matches the hash from 7 to 64 chars in length
|
||||||
// so that abbreviated hash links can be used as well. This matches git and GitHub usability.
|
// so that abbreviated hash links can be used as well. This matches git and GitHub usability.
|
||||||
hashCurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,64})(?:\s|$|\)|\]|[.,:](\s|$))`)
|
v.hashCurrentPattern = regexp.MustCompile(`(?:\s|^|\(|\[)([0-9a-f]{7,64})(?:\s|$|\)|\]|[.,:](\s|$))`)
|
||||||
|
|
||||||
// shortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax
|
// shortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax
|
||||||
shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`)
|
v.shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`)
|
||||||
|
|
||||||
// anyHashPattern splits url containing SHA into parts
|
// anyHashPattern splits url containing SHA into parts
|
||||||
anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})(/[-+~%./\w]+)?(\?[-+~%.\w&=]+)?(#[-+~%.\w]+)?`)
|
v.anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})(/[-+~%./\w]+)?(\?[-+~%.\w&=]+)?(#[-+~%.\w]+)?`)
|
||||||
|
|
||||||
// comparePattern matches "http://domain/org/repo/compare/COMMIT1...COMMIT2#hash"
|
// comparePattern matches "http://domain/org/repo/compare/COMMIT1...COMMIT2#hash"
|
||||||
comparePattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{7,64})(\.\.\.?)([0-9a-f]{7,64})?(#[-+~_%.a-zA-Z0-9]+)?`)
|
v.comparePattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{7,64})(\.\.\.?)([0-9a-f]{7,64})?(#[-+~_%.a-zA-Z0-9]+)?`)
|
||||||
|
|
||||||
// fullURLPattern matches full URL like "mailto:...", "https://..." and "ssh+git://..."
|
// fullURLPattern matches full URL like "mailto:...", "https://..." and "ssh+git://..."
|
||||||
fullURLPattern = regexp.MustCompile(`^[a-z][-+\w]+:`)
|
v.fullURLPattern = regexp.MustCompile(`^[a-z][-+\w]+:`)
|
||||||
|
|
||||||
// emailRegex is definitely not perfect with edge cases,
|
// emailRegex is definitely not perfect with edge cases,
|
||||||
// it is still accepted by the CommonMark specification, as well as the HTML5 spec:
|
// it is still accepted by the CommonMark specification, as well as the HTML5 spec:
|
||||||
// http://spec.commonmark.org/0.28/#email-address
|
// http://spec.commonmark.org/0.28/#email-address
|
||||||
// https://html.spec.whatwg.org/multipage/input.html#e-mail-state-(type%3Demail)
|
// https://html.spec.whatwg.org/multipage/input.html#e-mail-state-(type%3Demail)
|
||||||
emailRegex = regexp.MustCompile("(?:\\s|^|\\(|\\[)([a-zA-Z0-9.!#$%&'*+\\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9]{2,}(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+)(?:\\s|$|\\)|\\]|;|,|\\?|!|\\.(\\s|$))")
|
v.emailRegex = regexp.MustCompile("(?:\\s|^|\\(|\\[)([a-zA-Z0-9.!#$%&'*+\\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9]{2,}(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+)(?:\\s|$|\\)|\\]|;|,|\\?|!|\\.(\\s|$))")
|
||||||
|
|
||||||
// blackfridayExtRegex is for blackfriday extensions create IDs like fn:user-content-footnote
|
// blackfridayExtRegex is for blackfriday extensions create IDs like fn:user-content-footnote
|
||||||
blackfridayExtRegex = regexp.MustCompile(`[^:]*:user-content-`)
|
v.blackfridayExtRegex = regexp.MustCompile(`[^:]*:user-content-`)
|
||||||
|
|
||||||
// emojiShortCodeRegex find emoji by alias like :smile:
|
// emojiShortCodeRegex find emoji by alias like :smile:
|
||||||
emojiShortCodeRegex = regexp.MustCompile(`:[-+\w]+:`)
|
v.emojiShortCodeRegex = regexp.MustCompile(`:[-+\w]+:`)
|
||||||
)
|
|
||||||
|
|
||||||
// CSS class for action keywords (e.g. "closes: #1")
|
// example: https://domain/org/repo/pulls/27#hash
|
||||||
const keywordClass = "issue-keyword"
|
v.issueFullPattern = regexp.MustCompile(`https?://(?:\S+/)[\w_.-]+/[\w_.-]+/(?:issues|pulls)/((?:\w{1,10}-)?[1-9][0-9]*)([\?|#](\S+)?)?\b`)
|
||||||
|
|
||||||
|
// example: https://domain/org/repo/pulls/27/files#hash
|
||||||
|
v.filesChangedFullPattern = regexp.MustCompile(`https?://(?:\S+/)[\w_.-]+/[\w_.-]+/pulls/((?:\w{1,10}-)?[1-9][0-9]*)/files([\?|#](\S+)?)?\b`)
|
||||||
|
|
||||||
|
v.tagCleaner = regexp.MustCompile(`<((?:/?\w+/\w+)|(?:/[\w ]+/)|(/?[hH][tT][mM][lL]\b)|(/?[hH][eE][aA][dD]\b))`)
|
||||||
|
v.nulCleaner = strings.NewReplacer("\000", "")
|
||||||
|
return v
|
||||||
|
})
|
||||||
|
|
||||||
// IsFullURLBytes reports whether link fits valid format.
|
// IsFullURLBytes reports whether link fits valid format.
|
||||||
func IsFullURLBytes(link []byte) bool {
|
func IsFullURLBytes(link []byte) bool {
|
||||||
return fullURLPattern.Match(link)
|
return globalVars().fullURLPattern.Match(link)
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsFullURLString(link string) bool {
|
func IsFullURLString(link string) bool {
|
||||||
return fullURLPattern.MatchString(link)
|
return globalVars().fullURLPattern.MatchString(link)
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsNonEmptyRelativePath(link string) bool {
|
func IsNonEmptyRelativePath(link string) bool {
|
||||||
return link != "" && !IsFullURLString(link) && link[0] != '/' && link[0] != '?' && link[0] != '#'
|
return link != "" && !IsFullURLString(link) && link[0] != '/' && link[0] != '?' && link[0] != '#'
|
||||||
}
|
}
|
||||||
|
|
||||||
// regexp for full links to issues/pulls
|
|
||||||
var issueFullPattern *regexp.Regexp
|
|
||||||
|
|
||||||
// Once for to prevent races
|
|
||||||
var issueFullPatternOnce sync.Once
|
|
||||||
|
|
||||||
// regexp for full links to hash comment in pull request files changed tab
|
|
||||||
var filesChangedFullPattern *regexp.Regexp
|
|
||||||
|
|
||||||
// Once for to prevent races
|
|
||||||
var filesChangedFullPatternOnce sync.Once
|
|
||||||
|
|
||||||
func getIssueFullPattern() *regexp.Regexp {
|
|
||||||
issueFullPatternOnce.Do(func() {
|
|
||||||
// example: https://domain/org/repo/pulls/27#hash
|
|
||||||
issueFullPattern = regexp.MustCompile(regexp.QuoteMeta(setting.AppURL) +
|
|
||||||
`[\w_.-]+/[\w_.-]+/(?:issues|pulls)/((?:\w{1,10}-)?[1-9][0-9]*)([\?|#](\S+)?)?\b`)
|
|
||||||
})
|
|
||||||
return issueFullPattern
|
|
||||||
}
|
|
||||||
|
|
||||||
func getFilesChangedFullPattern() *regexp.Regexp {
|
|
||||||
filesChangedFullPatternOnce.Do(func() {
|
|
||||||
// example: https://domain/org/repo/pulls/27/files#hash
|
|
||||||
filesChangedFullPattern = regexp.MustCompile(regexp.QuoteMeta(setting.AppURL) +
|
|
||||||
`[\w_.-]+/[\w_.-]+/pulls/((?:\w{1,10}-)?[1-9][0-9]*)/files([\?|#](\S+)?)?\b`)
|
|
||||||
})
|
|
||||||
return filesChangedFullPattern
|
|
||||||
}
|
|
||||||
|
|
||||||
// CustomLinkURLSchemes allows for additional schemes to be detected when parsing links within text
|
// CustomLinkURLSchemes allows for additional schemes to be detected when parsing links within text
|
||||||
func CustomLinkURLSchemes(schemes []string) {
|
func CustomLinkURLSchemes(schemes []string) {
|
||||||
schemes = append(schemes, "http", "https")
|
schemes = append(schemes, "http", "https")
|
||||||
@ -197,13 +194,6 @@ func RenderCommitMessage(
|
|||||||
content string,
|
content string,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
procs := commitMessageProcessors
|
procs := commitMessageProcessors
|
||||||
if ctx.DefaultLink != "" {
|
|
||||||
// we don't have to fear data races, because being
|
|
||||||
// commitMessageProcessors of fixed len and cap, every time we append
|
|
||||||
// something to it the slice is realloc+copied, so append always
|
|
||||||
// generates the slice ex-novo.
|
|
||||||
procs = append(procs, genDefaultLinkProcessor(ctx.DefaultLink))
|
|
||||||
}
|
|
||||||
return renderProcessString(ctx, procs, content)
|
return renderProcessString(ctx, procs, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -231,16 +221,17 @@ var emojiProcessors = []processor{
|
|||||||
// which changes every text node into a link to the passed default link.
|
// which changes every text node into a link to the passed default link.
|
||||||
func RenderCommitMessageSubject(
|
func RenderCommitMessageSubject(
|
||||||
ctx *RenderContext,
|
ctx *RenderContext,
|
||||||
content string,
|
defaultLink, content string,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
procs := commitMessageSubjectProcessors
|
procs := slices.Clone(commitMessageSubjectProcessors)
|
||||||
if ctx.DefaultLink != "" {
|
procs = append(procs, func(ctx *RenderContext, node *html.Node) {
|
||||||
// we don't have to fear data races, because being
|
ch := &html.Node{Parent: node, Type: html.TextNode, Data: node.Data}
|
||||||
// commitMessageSubjectProcessors of fixed len and cap, every time we
|
node.Type = html.ElementNode
|
||||||
// append something to it the slice is realloc+copied, so append always
|
node.Data = "a"
|
||||||
// generates the slice ex-novo.
|
node.DataAtom = atom.A
|
||||||
procs = append(procs, genDefaultLinkProcessor(ctx.DefaultLink))
|
node.Attr = []html.Attribute{{Key: "href", Val: defaultLink}, {Key: "class", Val: "muted"}}
|
||||||
}
|
node.FirstChild, node.LastChild = ch, ch
|
||||||
|
})
|
||||||
return renderProcessString(ctx, procs, content)
|
return renderProcessString(ctx, procs, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -249,10 +240,8 @@ func RenderIssueTitle(
|
|||||||
ctx *RenderContext,
|
ctx *RenderContext,
|
||||||
title string,
|
title string,
|
||||||
) (string, error) {
|
) (string, error) {
|
||||||
|
// do not render other issue/commit links in an issue's title - which in most cases is already a link.
|
||||||
return renderProcessString(ctx, []processor{
|
return renderProcessString(ctx, []processor{
|
||||||
issueIndexPatternProcessor,
|
|
||||||
commitCrossReferencePatternProcessor,
|
|
||||||
hashCurrentPatternProcessor,
|
|
||||||
emojiShortCodeProcessor,
|
emojiShortCodeProcessor,
|
||||||
emojiProcessor,
|
emojiProcessor,
|
||||||
}, title)
|
}, title)
|
||||||
@ -288,11 +277,6 @@ func RenderEmoji(
|
|||||||
return renderProcessString(ctx, emojiProcessors, content)
|
return renderProcessString(ctx, emojiProcessors, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
|
||||||
tagCleaner = regexp.MustCompile(`<((?:/?\w+/\w+)|(?:/[\w ]+/)|(/?[hH][tT][mM][lL]\b)|(/?[hH][eE][aA][dD]\b))`)
|
|
||||||
nulCleaner = strings.NewReplacer("\000", "")
|
|
||||||
)
|
|
||||||
|
|
||||||
func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output io.Writer) error {
|
func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output io.Writer) error {
|
||||||
defer ctx.Cancel()
|
defer ctx.Cancel()
|
||||||
// FIXME: don't read all content to memory
|
// FIXME: don't read all content to memory
|
||||||
@ -306,7 +290,7 @@ func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output
|
|||||||
// prepend "<html><body>"
|
// prepend "<html><body>"
|
||||||
strings.NewReader("<html><body>"),
|
strings.NewReader("<html><body>"),
|
||||||
// Strip out nuls - they're always invalid
|
// Strip out nuls - they're always invalid
|
||||||
bytes.NewReader(tagCleaner.ReplaceAll([]byte(nulCleaner.Replace(string(rawHTML))), []byte("<$1"))),
|
bytes.NewReader(globalVars().tagCleaner.ReplaceAll([]byte(globalVars().nulCleaner.Replace(string(rawHTML))), []byte("<$1"))),
|
||||||
// close the tags
|
// close the tags
|
||||||
strings.NewReader("</body></html>"),
|
strings.NewReader("</body></html>"),
|
||||||
))
|
))
|
||||||
@ -353,7 +337,7 @@ func visitNode(ctx *RenderContext, procs []processor, node *html.Node) *html.Nod
|
|||||||
// Add user-content- to IDs and "#" links if they don't already have them
|
// Add user-content- to IDs and "#" links if they don't already have them
|
||||||
for idx, attr := range node.Attr {
|
for idx, attr := range node.Attr {
|
||||||
val := strings.TrimPrefix(attr.Val, "#")
|
val := strings.TrimPrefix(attr.Val, "#")
|
||||||
notHasPrefix := !(strings.HasPrefix(val, "user-content-") || blackfridayExtRegex.MatchString(val))
|
notHasPrefix := !(strings.HasPrefix(val, "user-content-") || globalVars().blackfridayExtRegex.MatchString(val))
|
||||||
|
|
||||||
if attr.Key == "id" && notHasPrefix {
|
if attr.Key == "id" && notHasPrefix {
|
||||||
node.Attr[idx].Val = "user-content-" + attr.Val
|
node.Attr[idx].Val = "user-content-" + attr.Val
|
||||||
|
@ -54,7 +54,7 @@ func createCodeLink(href, content, class string) *html.Node {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func anyHashPatternExtract(s string) (ret anyHashPatternResult, ok bool) {
|
func anyHashPatternExtract(s string) (ret anyHashPatternResult, ok bool) {
|
||||||
m := anyHashPattern.FindStringSubmatchIndex(s)
|
m := globalVars().anyHashPattern.FindStringSubmatchIndex(s)
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return ret, false
|
return ret, false
|
||||||
}
|
}
|
||||||
@ -120,7 +120,7 @@ func comparePatternProcessor(ctx *RenderContext, node *html.Node) {
|
|||||||
node = node.NextSibling
|
node = node.NextSibling
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
m := comparePattern.FindStringSubmatchIndex(node.Data)
|
m := globalVars().comparePattern.FindStringSubmatchIndex(node.Data)
|
||||||
if m == nil || slices.Contains(m[:8], -1) { // ensure that every group (m[0]...m[7]) has a match
|
if m == nil || slices.Contains(m[:8], -1) { // ensure that every group (m[0]...m[7]) has a match
|
||||||
node = node.NextSibling
|
node = node.NextSibling
|
||||||
continue
|
continue
|
||||||
@ -173,7 +173,7 @@ func hashCurrentPatternProcessor(ctx *RenderContext, node *html.Node) {
|
|||||||
ctx.ShaExistCache = make(map[string]bool)
|
ctx.ShaExistCache = make(map[string]bool)
|
||||||
}
|
}
|
||||||
for node != nil && node != next && start < len(node.Data) {
|
for node != nil && node != next && start < len(node.Data) {
|
||||||
m := hashCurrentPattern.FindStringSubmatchIndex(node.Data[start:])
|
m := globalVars().hashCurrentPattern.FindStringSubmatchIndex(node.Data[start:])
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,7 @@ import "golang.org/x/net/html"
|
|||||||
func emailAddressProcessor(ctx *RenderContext, node *html.Node) {
|
func emailAddressProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
for node != nil && node != next {
|
for node != nil && node != next {
|
||||||
m := emailRegex.FindStringSubmatchIndex(node.Data)
|
m := globalVars().emailRegex.FindStringSubmatchIndex(node.Data)
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -62,7 +62,7 @@ func emojiShortCodeProcessor(ctx *RenderContext, node *html.Node) {
|
|||||||
start := 0
|
start := 0
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
for node != nil && node != next && start < len(node.Data) {
|
for node != nil && node != next && start < len(node.Data) {
|
||||||
m := emojiShortCodeRegex.FindStringSubmatchIndex(node.Data[start:])
|
m := globalVars().emojiShortCodeRegex.FindStringSubmatchIndex(node.Data[start:])
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -40,17 +40,19 @@ func link(href, class, contents string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var numericMetas = map[string]string{
|
var numericMetas = map[string]string{
|
||||||
"format": "https://someurl.com/{user}/{repo}/{index}",
|
"format": "https://someurl.com/{user}/{repo}/{index}",
|
||||||
"user": "someUser",
|
"user": "someUser",
|
||||||
"repo": "someRepo",
|
"repo": "someRepo",
|
||||||
"style": IssueNameStyleNumeric,
|
"style": IssueNameStyleNumeric,
|
||||||
|
"markupAllowShortIssuePattern": "true",
|
||||||
}
|
}
|
||||||
|
|
||||||
var alphanumericMetas = map[string]string{
|
var alphanumericMetas = map[string]string{
|
||||||
"format": "https://someurl.com/{user}/{repo}/{index}",
|
"format": "https://someurl.com/{user}/{repo}/{index}",
|
||||||
"user": "someUser",
|
"user": "someUser",
|
||||||
"repo": "someRepo",
|
"repo": "someRepo",
|
||||||
"style": IssueNameStyleAlphanumeric,
|
"style": IssueNameStyleAlphanumeric,
|
||||||
|
"markupAllowShortIssuePattern": "true",
|
||||||
}
|
}
|
||||||
|
|
||||||
var regexpMetas = map[string]string{
|
var regexpMetas = map[string]string{
|
||||||
@ -62,8 +64,15 @@ var regexpMetas = map[string]string{
|
|||||||
|
|
||||||
// these values should match the TestOrgRepo const above
|
// these values should match the TestOrgRepo const above
|
||||||
var localMetas = map[string]string{
|
var localMetas = map[string]string{
|
||||||
"user": "test-owner",
|
"user": "test-owner",
|
||||||
"repo": "test-repo",
|
"repo": "test-repo",
|
||||||
|
"markupAllowShortIssuePattern": "true",
|
||||||
|
}
|
||||||
|
|
||||||
|
var localWikiMetas = map[string]string{
|
||||||
|
"user": "test-owner",
|
||||||
|
"repo": "test-repo",
|
||||||
|
"markupContentMode": "wiki",
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRender_IssueIndexPattern(t *testing.T) {
|
func TestRender_IssueIndexPattern(t *testing.T) {
|
||||||
@ -124,9 +133,8 @@ func TestRender_IssueIndexPattern2(t *testing.T) {
|
|||||||
}
|
}
|
||||||
expectedNil := fmt.Sprintf(expectedFmt, links...)
|
expectedNil := fmt.Sprintf(expectedFmt, links...)
|
||||||
testRenderIssueIndexPattern(t, s, expectedNil, &RenderContext{
|
testRenderIssueIndexPattern(t, s, expectedNil, &RenderContext{
|
||||||
Ctx: git.DefaultContext,
|
Ctx: git.DefaultContext,
|
||||||
Metas: localMetas,
|
Metas: localMetas,
|
||||||
ContentMode: RenderContentAsComment,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
class := "ref-issue"
|
class := "ref-issue"
|
||||||
@ -139,9 +147,8 @@ func TestRender_IssueIndexPattern2(t *testing.T) {
|
|||||||
}
|
}
|
||||||
expectedNum := fmt.Sprintf(expectedFmt, links...)
|
expectedNum := fmt.Sprintf(expectedFmt, links...)
|
||||||
testRenderIssueIndexPattern(t, s, expectedNum, &RenderContext{
|
testRenderIssueIndexPattern(t, s, expectedNum, &RenderContext{
|
||||||
Ctx: git.DefaultContext,
|
Ctx: git.DefaultContext,
|
||||||
Metas: numericMetas,
|
Metas: numericMetas,
|
||||||
ContentMode: RenderContentAsComment,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -262,7 +269,7 @@ func TestRender_IssueIndexPattern5(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRender_IssueIndexPattern_Document(t *testing.T) {
|
func TestRender_IssueIndexPattern_NoShortPattern(t *testing.T) {
|
||||||
setting.AppURL = TestAppURL
|
setting.AppURL = TestAppURL
|
||||||
metas := map[string]string{
|
metas := map[string]string{
|
||||||
"format": "https://someurl.com/{user}/{repo}/{index}",
|
"format": "https://someurl.com/{user}/{repo}/{index}",
|
||||||
@ -285,6 +292,22 @@ func TestRender_IssueIndexPattern_Document(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestRender_RenderIssueTitle(t *testing.T) {
|
||||||
|
setting.AppURL = TestAppURL
|
||||||
|
metas := map[string]string{
|
||||||
|
"format": "https://someurl.com/{user}/{repo}/{index}",
|
||||||
|
"user": "someUser",
|
||||||
|
"repo": "someRepo",
|
||||||
|
"style": IssueNameStyleNumeric,
|
||||||
|
}
|
||||||
|
actual, err := RenderIssueTitle(&RenderContext{
|
||||||
|
Ctx: git.DefaultContext,
|
||||||
|
Metas: metas,
|
||||||
|
}, "#1")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, "#1", actual)
|
||||||
|
}
|
||||||
|
|
||||||
func testRenderIssueIndexPattern(t *testing.T, input, expected string, ctx *RenderContext) {
|
func testRenderIssueIndexPattern(t *testing.T, input, expected string, ctx *RenderContext) {
|
||||||
ctx.Links.AbsolutePrefix = true
|
ctx.Links.AbsolutePrefix = true
|
||||||
if ctx.Links.Base == "" {
|
if ctx.Links.Base == "" {
|
||||||
@ -318,8 +341,7 @@ func TestRender_AutoLink(t *testing.T) {
|
|||||||
Links: Links{
|
Links: Links{
|
||||||
Base: TestRepoURL,
|
Base: TestRepoURL,
|
||||||
},
|
},
|
||||||
Metas: localMetas,
|
Metas: localWikiMetas,
|
||||||
ContentMode: RenderContentAsWiki,
|
|
||||||
}, strings.NewReader(input), &buffer)
|
}, strings.NewReader(input), &buffer)
|
||||||
assert.Equal(t, err, nil)
|
assert.Equal(t, err, nil)
|
||||||
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer.String()))
|
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer.String()))
|
||||||
@ -391,10 +413,10 @@ func TestRegExp_sha1CurrentPattern(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, testCase := range trueTestCases {
|
for _, testCase := range trueTestCases {
|
||||||
assert.True(t, hashCurrentPattern.MatchString(testCase))
|
assert.True(t, globalVars().hashCurrentPattern.MatchString(testCase))
|
||||||
}
|
}
|
||||||
for _, testCase := range falseTestCases {
|
for _, testCase := range falseTestCases {
|
||||||
assert.False(t, hashCurrentPattern.MatchString(testCase))
|
assert.False(t, globalVars().hashCurrentPattern.MatchString(testCase))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -474,9 +496,9 @@ func TestRegExp_shortLinkPattern(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, testCase := range trueTestCases {
|
for _, testCase := range trueTestCases {
|
||||||
assert.True(t, shortLinkPattern.MatchString(testCase))
|
assert.True(t, globalVars().shortLinkPattern.MatchString(testCase))
|
||||||
}
|
}
|
||||||
for _, testCase := range falseTestCases {
|
for _, testCase := range falseTestCases {
|
||||||
assert.False(t, shortLinkPattern.MatchString(testCase))
|
assert.False(t, globalVars().shortLinkPattern.MatchString(testCase))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
|
"code.gitea.io/gitea/modules/httplib"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/references"
|
"code.gitea.io/gitea/modules/references"
|
||||||
"code.gitea.io/gitea/modules/regexplru"
|
"code.gitea.io/gitea/modules/regexplru"
|
||||||
@ -23,18 +24,21 @@ func fullIssuePatternProcessor(ctx *RenderContext, node *html.Node) {
|
|||||||
}
|
}
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
for node != nil && node != next {
|
for node != nil && node != next {
|
||||||
m := getIssueFullPattern().FindStringSubmatchIndex(node.Data)
|
m := globalVars().issueFullPattern.FindStringSubmatchIndex(node.Data)
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
mDiffView := getFilesChangedFullPattern().FindStringSubmatchIndex(node.Data)
|
mDiffView := globalVars().filesChangedFullPattern.FindStringSubmatchIndex(node.Data)
|
||||||
// leave it as it is if the link is from "Files Changed" tab in PR Diff View https://domain/org/repo/pulls/27/files
|
// leave it as it is if the link is from "Files Changed" tab in PR Diff View https://domain/org/repo/pulls/27/files
|
||||||
if mDiffView != nil {
|
if mDiffView != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
link := node.Data[m[0]:m[1]]
|
link := node.Data[m[0]:m[1]]
|
||||||
|
if !httplib.IsCurrentGiteaSiteURL(ctx.Ctx, link) {
|
||||||
|
return
|
||||||
|
}
|
||||||
text := "#" + node.Data[m[2]:m[3]]
|
text := "#" + node.Data[m[2]:m[3]]
|
||||||
// if m[4] and m[5] is not -1, then link is to a comment
|
// if m[4] and m[5] is not -1, then link is to a comment
|
||||||
// indicate that in the text by appending (comment)
|
// indicate that in the text by appending (comment)
|
||||||
@ -67,8 +71,10 @@ func issueIndexPatternProcessor(ctx *RenderContext, node *html.Node) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// crossLinkOnly if not comment and not wiki
|
// crossLinkOnly: do not parse "#123", only parse "owner/repo#123"
|
||||||
crossLinkOnly := ctx.ContentMode != RenderContentAsTitle && ctx.ContentMode != RenderContentAsComment && ctx.ContentMode != RenderContentAsWiki
|
// if there is no repo in the context, then the "#123" format can't be parsed
|
||||||
|
// old logic: crossLinkOnly := ctx.Metas["mode"] == "document" && !ctx.IsWiki
|
||||||
|
crossLinkOnly := ctx.Metas["markupAllowShortIssuePattern"] != "true"
|
||||||
|
|
||||||
var (
|
var (
|
||||||
found bool
|
found bool
|
||||||
|
@ -20,9 +20,9 @@ func ResolveLink(ctx *RenderContext, link, userContentAnchorPrefix string) (resu
|
|||||||
isAnchorFragment := link != "" && link[0] == '#'
|
isAnchorFragment := link != "" && link[0] == '#'
|
||||||
if !isAnchorFragment && !IsFullURLString(link) {
|
if !isAnchorFragment && !IsFullURLString(link) {
|
||||||
linkBase := ctx.Links.Base
|
linkBase := ctx.Links.Base
|
||||||
if ctx.ContentMode == RenderContentAsWiki {
|
if ctx.IsMarkupContentWiki() {
|
||||||
// no need to check if the link should be resolved as a wiki link or a wiki raw link
|
// no need to check if the link should be resolved as a wiki link or a wiki raw link
|
||||||
// just use wiki link here and it will be redirected to a wiki raw link if necessary
|
// just use wiki link here, and it will be redirected to a wiki raw link if necessary
|
||||||
linkBase = ctx.Links.WikiLink()
|
linkBase = ctx.Links.WikiLink()
|
||||||
} else if ctx.Links.BranchPath != "" || ctx.Links.TreePath != "" {
|
} else if ctx.Links.BranchPath != "" || ctx.Links.TreePath != "" {
|
||||||
// if there is no BranchPath, then the link will be something like "/owner/repo/src/{the-file-path}"
|
// if there is no BranchPath, then the link will be something like "/owner/repo/src/{the-file-path}"
|
||||||
@ -40,7 +40,7 @@ func ResolveLink(ctx *RenderContext, link, userContentAnchorPrefix string) (resu
|
|||||||
func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
for node != nil && node != next {
|
for node != nil && node != next {
|
||||||
m := shortLinkPattern.FindStringSubmatchIndex(node.Data)
|
m := globalVars().shortLinkPattern.FindStringSubmatchIndex(node.Data)
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -147,7 +147,7 @@ func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
|||||||
}
|
}
|
||||||
if image {
|
if image {
|
||||||
if !absoluteLink {
|
if !absoluteLink {
|
||||||
link = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.ContentMode == RenderContentAsWiki), link)
|
link = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.IsMarkupContentWiki()), link)
|
||||||
}
|
}
|
||||||
title := props["title"]
|
title := props["title"]
|
||||||
if title == "" {
|
if title == "" {
|
||||||
@ -200,25 +200,6 @@ func linkProcessor(ctx *RenderContext, node *html.Node) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func genDefaultLinkProcessor(defaultLink string) processor {
|
|
||||||
return func(ctx *RenderContext, node *html.Node) {
|
|
||||||
ch := &html.Node{
|
|
||||||
Parent: node,
|
|
||||||
Type: html.TextNode,
|
|
||||||
Data: node.Data,
|
|
||||||
}
|
|
||||||
|
|
||||||
node.Type = html.ElementNode
|
|
||||||
node.Data = "a"
|
|
||||||
node.DataAtom = atom.A
|
|
||||||
node.Attr = []html.Attribute{
|
|
||||||
{Key: "href", Val: defaultLink},
|
|
||||||
{Key: "class", Val: "default-link muted"},
|
|
||||||
}
|
|
||||||
node.FirstChild, node.LastChild = ch, ch
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// descriptionLinkProcessor creates links for DescriptionHTML
|
// descriptionLinkProcessor creates links for DescriptionHTML
|
||||||
func descriptionLinkProcessor(ctx *RenderContext, node *html.Node) {
|
func descriptionLinkProcessor(ctx *RenderContext, node *html.Node) {
|
||||||
next := node.NextSibling
|
next := node.NextSibling
|
||||||
|
@ -17,7 +17,7 @@ func visitNodeImg(ctx *RenderContext, img *html.Node) (next *html.Node) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if IsNonEmptyRelativePath(attr.Val) {
|
if IsNonEmptyRelativePath(attr.Val) {
|
||||||
attr.Val = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.ContentMode == RenderContentAsWiki), attr.Val)
|
attr.Val = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.IsMarkupContentWiki()), attr.Val)
|
||||||
|
|
||||||
// By default, the "<img>" tag should also be clickable,
|
// By default, the "<img>" tag should also be clickable,
|
||||||
// because frontend use `<img>` to paste the re-scaled image into the markdown,
|
// because frontend use `<img>` to paste the re-scaled image into the markdown,
|
||||||
@ -53,7 +53,7 @@ func visitNodeVideo(ctx *RenderContext, node *html.Node) (next *html.Node) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if IsNonEmptyRelativePath(attr.Val) {
|
if IsNonEmptyRelativePath(attr.Val) {
|
||||||
attr.Val = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.ContentMode == RenderContentAsWiki), attr.Val)
|
attr.Val = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.IsMarkupContentWiki()), attr.Val)
|
||||||
}
|
}
|
||||||
attr.Val = camoHandleLink(attr.Val)
|
attr.Val = camoHandleLink(attr.Val)
|
||||||
node.Attr[i] = attr
|
node.Attr[i] = attr
|
||||||
|
@ -27,6 +27,11 @@ var (
|
|||||||
"user": testRepoOwnerName,
|
"user": testRepoOwnerName,
|
||||||
"repo": testRepoName,
|
"repo": testRepoName,
|
||||||
}
|
}
|
||||||
|
localWikiMetas = map[string]string{
|
||||||
|
"user": testRepoOwnerName,
|
||||||
|
"repo": testRepoName,
|
||||||
|
"markupContentMode": "wiki",
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
type mockRepo struct {
|
type mockRepo struct {
|
||||||
@ -413,8 +418,7 @@ func TestRender_ShortLinks(t *testing.T) {
|
|||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: markup.TestRepoURL,
|
Base: markup.TestRepoURL,
|
||||||
},
|
},
|
||||||
Metas: localMetas,
|
Metas: localWikiMetas,
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(string(buffer)))
|
assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(string(buffer)))
|
||||||
@ -526,10 +530,9 @@ func TestRender_ShortLinks(t *testing.T) {
|
|||||||
func TestRender_RelativeMedias(t *testing.T) {
|
func TestRender_RelativeMedias(t *testing.T) {
|
||||||
render := func(input string, isWiki bool, links markup.Links) string {
|
render := func(input string, isWiki bool, links markup.Links) string {
|
||||||
buffer, err := markdown.RenderString(&markup.RenderContext{
|
buffer, err := markdown.RenderString(&markup.RenderContext{
|
||||||
Ctx: git.DefaultContext,
|
Ctx: git.DefaultContext,
|
||||||
Links: links,
|
Links: links,
|
||||||
Metas: localMetas,
|
Metas: util.Iif(isWiki, localWikiMetas, localMetas),
|
||||||
ContentMode: util.Iif(isWiki, markup.RenderContentAsWiki, markup.RenderContentAsComment),
|
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
return strings.TrimSpace(string(buffer))
|
return strings.TrimSpace(string(buffer))
|
||||||
|
@ -75,11 +75,12 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa
|
|||||||
// TODO: this was a quite unclear part, old code: `if metas["mode"] != "document" { use comment link break setting }`
|
// TODO: this was a quite unclear part, old code: `if metas["mode"] != "document" { use comment link break setting }`
|
||||||
// many places render non-comment contents with no mode=document, then these contents also use comment's hard line break setting
|
// many places render non-comment contents with no mode=document, then these contents also use comment's hard line break setting
|
||||||
// especially in many tests.
|
// especially in many tests.
|
||||||
|
markdownLineBreakStyle := ctx.Metas["markdownLineBreakStyle"]
|
||||||
if markup.RenderBehaviorForTesting.ForceHardLineBreak {
|
if markup.RenderBehaviorForTesting.ForceHardLineBreak {
|
||||||
v.SetHardLineBreak(true)
|
v.SetHardLineBreak(true)
|
||||||
} else if ctx.ContentMode == markup.RenderContentAsComment {
|
} else if markdownLineBreakStyle == "comment" {
|
||||||
v.SetHardLineBreak(setting.Markdown.EnableHardLineBreakInComments)
|
v.SetHardLineBreak(setting.Markdown.EnableHardLineBreakInComments)
|
||||||
} else {
|
} else if markdownLineBreakStyle == "document" {
|
||||||
v.SetHardLineBreak(setting.Markdown.EnableHardLineBreakInDocuments)
|
v.SetHardLineBreak(setting.Markdown.EnableHardLineBreakInDocuments)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -37,6 +37,12 @@ var localMetas = map[string]string{
|
|||||||
"repo": testRepoName,
|
"repo": testRepoName,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var localWikiMetas = map[string]string{
|
||||||
|
"user": testRepoOwnerName,
|
||||||
|
"repo": testRepoName,
|
||||||
|
"markupContentMode": "wiki",
|
||||||
|
}
|
||||||
|
|
||||||
type mockRepo struct {
|
type mockRepo struct {
|
||||||
OwnerName string
|
OwnerName string
|
||||||
RepoName string
|
RepoName string
|
||||||
@ -75,7 +81,7 @@ func TestRender_StandardLinks(t *testing.T) {
|
|||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: FullURL,
|
Base: FullURL,
|
||||||
},
|
},
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
Metas: localWikiMetas,
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(string(buffer)))
|
assert.Equal(t, strings.TrimSpace(expectedWiki), strings.TrimSpace(string(buffer)))
|
||||||
@ -307,9 +313,8 @@ func TestTotal_RenderWiki(t *testing.T) {
|
|||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: FullURL,
|
Base: FullURL,
|
||||||
},
|
},
|
||||||
Repo: newMockRepo(testRepoOwnerName, testRepoName),
|
Repo: newMockRepo(testRepoOwnerName, testRepoName),
|
||||||
Metas: localMetas,
|
Metas: localWikiMetas,
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
|
||||||
}, sameCases[i])
|
}, sameCases[i])
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, answers[i], string(line))
|
assert.Equal(t, answers[i], string(line))
|
||||||
@ -334,7 +339,7 @@ func TestTotal_RenderWiki(t *testing.T) {
|
|||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: FullURL,
|
Base: FullURL,
|
||||||
},
|
},
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
Metas: localWikiMetas,
|
||||||
}, testCases[i])
|
}, testCases[i])
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, testCases[i+1], string(line))
|
assert.EqualValues(t, testCases[i+1], string(line))
|
||||||
@ -657,9 +662,9 @@ mail@domain.com
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/image.jpg" rel="nofollow"><img src="/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/image.jpg" rel="nofollow"><img src="/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -684,9 +689,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/wiki/raw/image.jpg" rel="nofollow"><img src="/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/wiki/raw/image.jpg" rel="nofollow"><img src="/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -713,9 +718,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="https://gitea.io/image.jpg" rel="nofollow"><img src="https://gitea.io/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="https://gitea.io/image.jpg" rel="nofollow"><img src="https://gitea.io/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -742,9 +747,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="https://gitea.io/wiki/raw/image.jpg" rel="nofollow"><img src="https://gitea.io/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="https://gitea.io/wiki/raw/image.jpg" rel="nofollow"><img src="https://gitea.io/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -771,9 +776,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/image.jpg" rel="nofollow"><img src="/relative/path/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/image.jpg" rel="nofollow"><img src="/relative/path/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -800,9 +805,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -830,9 +835,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/user/repo/media/branch/main/image.jpg" rel="nofollow"><img src="/user/repo/media/branch/main/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/user/repo/media/branch/main/image.jpg" rel="nofollow"><img src="/user/repo/media/branch/main/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -860,9 +865,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -890,9 +895,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/user/repo/image.jpg" rel="nofollow"><img src="/user/repo/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/user/repo/image.jpg" rel="nofollow"><img src="/user/repo/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -920,9 +925,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -951,9 +956,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/user/repo/media/branch/main/sub/folder/image.jpg" rel="nofollow"><img src="/user/repo/media/branch/main/sub/folder/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/user/repo/media/branch/main/sub/folder/image.jpg" rel="nofollow"><img src="/user/repo/media/branch/main/sub/folder/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -982,9 +987,9 @@ space</p>
|
|||||||
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
<a href="https://example.com/image.jpg" target="_blank" rel="nofollow noopener"><img src="https://example.com/image.jpg" alt="remote image"/></a><br/>
|
||||||
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
<a href="/relative/path/wiki/raw/image.jpg" rel="nofollow"><img src="/relative/path/wiki/raw/image.jpg" title="local image" alt="local image"/></a><br/>
|
||||||
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
<a href="https://example.com/image.jpg" rel="nofollow"><img src="https://example.com/image.jpg" title="remote link" alt="remote link"/></a><br/>
|
||||||
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow">https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash</a><br/>
|
<a href="https://example.com/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash" rel="nofollow"><code>88fc37a3c0...12fc37a3c0 (hash)</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb pare<br/>
|
||||||
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow">https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb</a><br/>
|
<a href="https://example.com/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb" rel="nofollow"><code>88fc37a3c0</code></a><br/>
|
||||||
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit<br/>
|
||||||
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
<span class="emoji" aria-label="thumbs up">👍</span><br/>
|
||||||
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
<a href="mailto:mail@domain.com" rel="nofollow">mail@domain.com</a><br/>
|
||||||
@ -999,9 +1004,9 @@ space</p>
|
|||||||
defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableInternalAttributes, true)()
|
defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableInternalAttributes, true)()
|
||||||
for i, c := range cases {
|
for i, c := range cases {
|
||||||
result, err := markdown.RenderString(&markup.RenderContext{
|
result, err := markdown.RenderString(&markup.RenderContext{
|
||||||
Ctx: context.Background(),
|
Ctx: context.Background(),
|
||||||
Links: c.Links,
|
Links: c.Links,
|
||||||
ContentMode: util.Iif(c.IsWiki, markup.RenderContentAsWiki, markup.RenderContentAsDefault),
|
Metas: util.Iif(c.IsWiki, map[string]string{"markupContentMode": "wiki"}, map[string]string{}),
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err, "Unexpected error in testcase: %v", i)
|
assert.NoError(t, err, "Unexpected error in testcase: %v", i)
|
||||||
assert.Equal(t, c.Expected, string(result), "Unexpected result in testcase %v", i)
|
assert.Equal(t, c.Expected, string(result), "Unexpected result in testcase %v", i)
|
||||||
|
@ -21,7 +21,7 @@ func (g *ASTTransformer) transformImage(ctx *markup.RenderContext, v *ast.Image)
|
|||||||
// Check if the destination is a real link
|
// Check if the destination is a real link
|
||||||
if len(v.Destination) > 0 && !markup.IsFullURLBytes(v.Destination) {
|
if len(v.Destination) > 0 && !markup.IsFullURLBytes(v.Destination) {
|
||||||
v.Destination = []byte(giteautil.URLJoin(
|
v.Destination = []byte(giteautil.URLJoin(
|
||||||
ctx.Links.ResolveMediaLink(ctx.ContentMode == markup.RenderContentAsWiki),
|
ctx.Links.ResolveMediaLink(ctx.IsMarkupContentWiki()),
|
||||||
strings.TrimLeft(string(v.Destination), "/"),
|
strings.TrimLeft(string(v.Destination), "/"),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
@ -144,15 +144,14 @@ func (r *Writer) resolveLink(kind, link string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
base := r.Ctx.Links.Base
|
base := r.Ctx.Links.Base
|
||||||
isWiki := r.Ctx.ContentMode == markup.RenderContentAsWiki
|
if r.Ctx.IsMarkupContentWiki() {
|
||||||
if isWiki {
|
|
||||||
base = r.Ctx.Links.WikiLink()
|
base = r.Ctx.Links.WikiLink()
|
||||||
} else if r.Ctx.Links.HasBranchInfo() {
|
} else if r.Ctx.Links.HasBranchInfo() {
|
||||||
base = r.Ctx.Links.SrcLink()
|
base = r.Ctx.Links.SrcLink()
|
||||||
}
|
}
|
||||||
|
|
||||||
if kind == "image" || kind == "video" {
|
if kind == "image" || kind == "video" {
|
||||||
base = r.Ctx.Links.ResolveMediaLink(isWiki)
|
base = r.Ctx.Links.ResolveMediaLink(r.Ctx.IsMarkupContentWiki())
|
||||||
}
|
}
|
||||||
|
|
||||||
link = util.URLJoin(base, link)
|
link = util.URLJoin(base, link)
|
||||||
|
@ -27,7 +27,7 @@ func TestRender_StandardLinks(t *testing.T) {
|
|||||||
Base: "/relative-path",
|
Base: "/relative-path",
|
||||||
BranchPath: "branch/main",
|
BranchPath: "branch/main",
|
||||||
},
|
},
|
||||||
ContentMode: util.Iif(isWiki, markup.RenderContentAsWiki, markup.RenderContentAsDefault),
|
Metas: map[string]string{"markupContentMode": util.Iif(isWiki, "wiki", "")},
|
||||||
}, input)
|
}, input)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer))
|
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(buffer))
|
||||||
|
@ -27,15 +27,6 @@ const (
|
|||||||
RenderMetaAsTable RenderMetaMode = "table"
|
RenderMetaAsTable RenderMetaMode = "table"
|
||||||
)
|
)
|
||||||
|
|
||||||
type RenderContentMode string
|
|
||||||
|
|
||||||
const (
|
|
||||||
RenderContentAsDefault RenderContentMode = "" // empty means "default", no special handling, maybe just a simple "document"
|
|
||||||
RenderContentAsComment RenderContentMode = "comment"
|
|
||||||
RenderContentAsTitle RenderContentMode = "title"
|
|
||||||
RenderContentAsWiki RenderContentMode = "wiki"
|
|
||||||
)
|
|
||||||
|
|
||||||
var RenderBehaviorForTesting struct {
|
var RenderBehaviorForTesting struct {
|
||||||
// Markdown line break rendering has 2 default behaviors:
|
// Markdown line break rendering has 2 default behaviors:
|
||||||
// * Use hard: replace "\n" with "<br>" for comments, setting.Markdown.EnableHardLineBreakInComments=true
|
// * Use hard: replace "\n" with "<br>" for comments, setting.Markdown.EnableHardLineBreakInComments=true
|
||||||
@ -59,12 +50,14 @@ type RenderContext struct {
|
|||||||
// for file mode, it could be left as empty, and will be detected by file extension in RelativePath
|
// for file mode, it could be left as empty, and will be detected by file extension in RelativePath
|
||||||
MarkupType string
|
MarkupType string
|
||||||
|
|
||||||
// what the content will be used for: eg: for comment or for wiki? or just render a file?
|
Links Links // special link references for rendering, especially when there is a branch/tree path
|
||||||
ContentMode RenderContentMode
|
|
||||||
|
// user&repo, format&style®exp (for external issue pattern), teams&org (for mention)
|
||||||
|
// BranchNameSubURL (for iframe&asciicast)
|
||||||
|
// markupAllowShortIssuePattern, markupContentMode (wiki)
|
||||||
|
// markdownLineBreakStyle (comment, document)
|
||||||
|
Metas map[string]string
|
||||||
|
|
||||||
Links Links // special link references for rendering, especially when there is a branch/tree path
|
|
||||||
Metas map[string]string // user&repo, format&style®exp (for external issue pattern), teams&org (for mention), BranchNameSubURL(for iframe&asciicast)
|
|
||||||
DefaultLink string // TODO: need to figure out
|
|
||||||
GitRepo *git.Repository
|
GitRepo *git.Repository
|
||||||
Repo gitrepo.Repository
|
Repo gitrepo.Repository
|
||||||
ShaExistCache map[string]bool
|
ShaExistCache map[string]bool
|
||||||
@ -102,6 +95,10 @@ func (ctx *RenderContext) AddCancel(fn func()) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ctx *RenderContext) IsMarkupContentWiki() bool {
|
||||||
|
return ctx.Metas != nil && ctx.Metas["markupContentMode"] == "wiki"
|
||||||
|
}
|
||||||
|
|
||||||
// Render renders markup file to HTML with all specific handling stuff.
|
// Render renders markup file to HTML with all specific handling stuff.
|
||||||
func Render(ctx *RenderContext, input io.Reader, output io.Writer) error {
|
func Render(ctx *RenderContext, input io.Reader, output io.Writer) error {
|
||||||
if ctx.MarkupType == "" && ctx.RelativePath != "" {
|
if ctx.MarkupType == "" && ctx.RelativePath != "" {
|
||||||
@ -232,3 +229,7 @@ func Init(ph *ProcessorHelper) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ComposeSimpleDocumentMetas() map[string]string {
|
||||||
|
return map[string]string{"markdownLineBreakStyle": "document"}
|
||||||
|
}
|
||||||
|
@ -10,7 +10,7 @@ import (
|
|||||||
|
|
||||||
type Links struct {
|
type Links struct {
|
||||||
AbsolutePrefix bool // add absolute URL prefix to auto-resolved links like "#issue", but not for pre-provided links and medias
|
AbsolutePrefix bool // add absolute URL prefix to auto-resolved links like "#issue", but not for pre-provided links and medias
|
||||||
Base string // base prefix for pre-provided links and medias (images, videos)
|
Base string // base prefix for pre-provided links and medias (images, videos), usually it is the path to the repo
|
||||||
BranchPath string // actually it is the ref path, eg: "branch/features/feat-12", "tag/v1.0"
|
BranchPath string // actually it is the ref path, eg: "branch/features/feat-12", "tag/v1.0"
|
||||||
TreePath string // the dir of the file, eg: "doc" if the file "doc/CHANGE.md" is being rendered
|
TreePath string // the dir of the file, eg: "doc" if the file "doc/CHANGE.md" is being rendered
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ package queue
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -32,6 +33,7 @@ type ManagedWorkerPoolQueue interface {
|
|||||||
|
|
||||||
// FlushWithContext tries to make the handler process all items in the queue synchronously.
|
// FlushWithContext tries to make the handler process all items in the queue synchronously.
|
||||||
// It is for testing purpose only. It's not designed to be used in a cluster.
|
// It is for testing purpose only. It's not designed to be used in a cluster.
|
||||||
|
// Negative timeout means discarding all items in the queue.
|
||||||
FlushWithContext(ctx context.Context, timeout time.Duration) error
|
FlushWithContext(ctx context.Context, timeout time.Duration) error
|
||||||
|
|
||||||
// RemoveAllItems removes all items in the base queue (on-the-fly items are not affected)
|
// RemoveAllItems removes all items in the base queue (on-the-fly items are not affected)
|
||||||
@ -76,15 +78,16 @@ func (m *Manager) ManagedQueues() map[int64]ManagedWorkerPoolQueue {
|
|||||||
|
|
||||||
// FlushAll tries to make all managed queues process all items synchronously, until timeout or the queue is empty.
|
// FlushAll tries to make all managed queues process all items synchronously, until timeout or the queue is empty.
|
||||||
// It is for testing purpose only. It's not designed to be used in a cluster.
|
// It is for testing purpose only. It's not designed to be used in a cluster.
|
||||||
|
// Negative timeout means discarding all items in the queue.
|
||||||
func (m *Manager) FlushAll(ctx context.Context, timeout time.Duration) error {
|
func (m *Manager) FlushAll(ctx context.Context, timeout time.Duration) error {
|
||||||
var finalErr error
|
var finalErrors []error
|
||||||
qs := m.ManagedQueues()
|
qs := m.ManagedQueues()
|
||||||
for _, q := range qs {
|
for _, q := range qs {
|
||||||
if err := q.FlushWithContext(ctx, timeout); err != nil {
|
if err := q.FlushWithContext(ctx, timeout); err != nil {
|
||||||
finalErr = err // TODO: in Go 1.20: errors.Join
|
finalErrors = append(finalErrors, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return finalErr
|
return errors.Join(finalErrors...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateSimpleQueue creates a simple queue from global setting config provider by name
|
// CreateSimpleQueue creates a simple queue from global setting config provider by name
|
||||||
|
@ -23,7 +23,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
unhandledItemRequeueDuration.Store(int64(5 * time.Second))
|
unhandledItemRequeueDuration.Store(int64(time.Second))
|
||||||
}
|
}
|
||||||
|
|
||||||
// workerGroup is a group of workers to work with a WorkerPoolQueue
|
// workerGroup is a group of workers to work with a WorkerPoolQueue
|
||||||
@ -104,7 +104,12 @@ func (q *WorkerPoolQueue[T]) doWorkerHandle(batch []T) {
|
|||||||
// if none of the items were handled, it should back-off for a few seconds
|
// if none of the items were handled, it should back-off for a few seconds
|
||||||
// in this case the handler (eg: document indexer) may have encountered some errors/failures
|
// in this case the handler (eg: document indexer) may have encountered some errors/failures
|
||||||
if len(unhandled) == len(batch) && unhandledItemRequeueDuration.Load() != 0 {
|
if len(unhandled) == len(batch) && unhandledItemRequeueDuration.Load() != 0 {
|
||||||
|
if q.isFlushing.Load() {
|
||||||
|
return // do not requeue items when flushing, since all items failed, requeue them will continue failing.
|
||||||
|
}
|
||||||
log.Error("Queue %q failed to handle batch of %d items, backoff for a few seconds", q.GetName(), len(batch))
|
log.Error("Queue %q failed to handle batch of %d items, backoff for a few seconds", q.GetName(), len(batch))
|
||||||
|
// TODO: ideally it shouldn't "sleep" here (blocks the worker, then blocks flush).
|
||||||
|
// It could debounce the requeue operation, and try to requeue the items in the future.
|
||||||
select {
|
select {
|
||||||
case <-q.ctxRun.Done():
|
case <-q.ctxRun.Done():
|
||||||
case <-time.After(time.Duration(unhandledItemRequeueDuration.Load())):
|
case <-time.After(time.Duration(unhandledItemRequeueDuration.Load())):
|
||||||
@ -193,19 +198,37 @@ func (q *WorkerPoolQueue[T]) doStartNewWorker(wp *workerGroup[T]) {
|
|||||||
// doFlush flushes the queue: it tries to read all items from the queue and handles them.
|
// doFlush flushes the queue: it tries to read all items from the queue and handles them.
|
||||||
// It is for testing purpose only. It's not designed to work for a cluster.
|
// It is for testing purpose only. It's not designed to work for a cluster.
|
||||||
func (q *WorkerPoolQueue[T]) doFlush(wg *workerGroup[T], flush flushType) {
|
func (q *WorkerPoolQueue[T]) doFlush(wg *workerGroup[T], flush flushType) {
|
||||||
|
q.isFlushing.Store(true)
|
||||||
|
defer q.isFlushing.Store(false)
|
||||||
|
|
||||||
log.Debug("Queue %q starts flushing", q.GetName())
|
log.Debug("Queue %q starts flushing", q.GetName())
|
||||||
defer log.Debug("Queue %q finishes flushing", q.GetName())
|
defer log.Debug("Queue %q finishes flushing", q.GetName())
|
||||||
|
|
||||||
// stop all workers, and prepare a new worker context to start new workers
|
// stop all workers, and prepare a new worker context to start new workers
|
||||||
|
|
||||||
wg.ctxWorkerCancel()
|
wg.ctxWorkerCancel()
|
||||||
wg.wg.Wait()
|
wg.wg.Wait()
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
close(flush)
|
close(flush.c)
|
||||||
wg.doPrepareWorkerContext()
|
wg.doPrepareWorkerContext()
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
if flush.timeout < 0 {
|
||||||
|
// discard everything
|
||||||
|
wg.batchBuffer = nil
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-wg.popItemChan:
|
||||||
|
case <-wg.popItemErr:
|
||||||
|
case <-q.batchChan:
|
||||||
|
case <-q.ctxRun.Done():
|
||||||
|
return
|
||||||
|
default:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// drain the batch channel first
|
// drain the batch channel first
|
||||||
loop:
|
loop:
|
||||||
for {
|
for {
|
||||||
@ -221,6 +244,9 @@ loop:
|
|||||||
emptyCounter := 0
|
emptyCounter := 0
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
|
case <-q.ctxRun.Done():
|
||||||
|
log.Debug("Queue %q is shutting down", q.GetName())
|
||||||
|
return
|
||||||
case data, dataOk := <-wg.popItemChan:
|
case data, dataOk := <-wg.popItemChan:
|
||||||
if !dataOk {
|
if !dataOk {
|
||||||
return
|
return
|
||||||
@ -236,9 +262,6 @@ loop:
|
|||||||
log.Error("Failed to pop item from queue %q (doFlush): %v", q.GetName(), err)
|
log.Error("Failed to pop item from queue %q (doFlush): %v", q.GetName(), err)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
case <-q.ctxRun.Done():
|
|
||||||
log.Debug("Queue %q is shutting down", q.GetName())
|
|
||||||
return
|
|
||||||
case <-time.After(20 * time.Millisecond):
|
case <-time.After(20 * time.Millisecond):
|
||||||
// There is no reliable way to make sure all queue items are consumed by the Flush, there always might be some items stored in some buffers/temp variables.
|
// There is no reliable way to make sure all queue items are consumed by the Flush, there always might be some items stored in some buffers/temp variables.
|
||||||
// If we run Gitea in a cluster, we can even not guarantee all items are consumed in a deterministic instance.
|
// If we run Gitea in a cluster, we can even not guarantee all items are consumed in a deterministic instance.
|
||||||
@ -316,6 +339,15 @@ func (q *WorkerPoolQueue[T]) doRun() {
|
|||||||
var batchDispatchC <-chan time.Time = infiniteTimerC
|
var batchDispatchC <-chan time.Time = infiniteTimerC
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
|
case flush := <-q.flushChan:
|
||||||
|
// before flushing, it needs to try to dispatch the batch to worker first, in case there is no worker running
|
||||||
|
// after the flushing, there is at least one worker running, so "doFlush" could wait for workers to finish
|
||||||
|
// since we are already in a "flush" operation, so the dispatching function shouldn't read the flush chan.
|
||||||
|
q.doDispatchBatchToWorker(wg, skipFlushChan)
|
||||||
|
q.doFlush(wg, flush)
|
||||||
|
case <-q.ctxRun.Done():
|
||||||
|
log.Debug("Queue %q is shutting down", q.GetName())
|
||||||
|
return
|
||||||
case data, dataOk := <-wg.popItemChan:
|
case data, dataOk := <-wg.popItemChan:
|
||||||
if !dataOk {
|
if !dataOk {
|
||||||
return
|
return
|
||||||
@ -334,20 +366,11 @@ func (q *WorkerPoolQueue[T]) doRun() {
|
|||||||
case <-batchDispatchC:
|
case <-batchDispatchC:
|
||||||
batchDispatchC = infiniteTimerC
|
batchDispatchC = infiniteTimerC
|
||||||
q.doDispatchBatchToWorker(wg, q.flushChan)
|
q.doDispatchBatchToWorker(wg, q.flushChan)
|
||||||
case flush := <-q.flushChan:
|
|
||||||
// before flushing, it needs to try to dispatch the batch to worker first, in case there is no worker running
|
|
||||||
// after the flushing, there is at least one worker running, so "doFlush" could wait for workers to finish
|
|
||||||
// since we are already in a "flush" operation, so the dispatching function shouldn't read the flush chan.
|
|
||||||
q.doDispatchBatchToWorker(wg, skipFlushChan)
|
|
||||||
q.doFlush(wg, flush)
|
|
||||||
case err := <-wg.popItemErr:
|
case err := <-wg.popItemErr:
|
||||||
if !q.isCtxRunCanceled() {
|
if !q.isCtxRunCanceled() {
|
||||||
log.Error("Failed to pop item from queue %q (doRun): %v", q.GetName(), err)
|
log.Error("Failed to pop item from queue %q (doRun): %v", q.GetName(), err)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
case <-q.ctxRun.Done():
|
|
||||||
log.Debug("Queue %q is shutting down", q.GetName())
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,8 +32,9 @@ type WorkerPoolQueue[T any] struct {
|
|||||||
baseConfig *BaseConfig
|
baseConfig *BaseConfig
|
||||||
baseQueue baseQueue
|
baseQueue baseQueue
|
||||||
|
|
||||||
batchChan chan []T
|
batchChan chan []T
|
||||||
flushChan chan flushType
|
flushChan chan flushType
|
||||||
|
isFlushing atomic.Bool
|
||||||
|
|
||||||
batchLength int
|
batchLength int
|
||||||
workerNum int
|
workerNum int
|
||||||
@ -42,7 +43,10 @@ type WorkerPoolQueue[T any] struct {
|
|||||||
workerNumMu sync.Mutex
|
workerNumMu sync.Mutex
|
||||||
}
|
}
|
||||||
|
|
||||||
type flushType chan struct{}
|
type flushType struct {
|
||||||
|
timeout time.Duration
|
||||||
|
c chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
var _ ManagedWorkerPoolQueue = (*WorkerPoolQueue[any])(nil)
|
var _ ManagedWorkerPoolQueue = (*WorkerPoolQueue[any])(nil)
|
||||||
|
|
||||||
@ -104,12 +108,12 @@ func (q *WorkerPoolQueue[T]) FlushWithContext(ctx context.Context, timeout time.
|
|||||||
if timeout > 0 {
|
if timeout > 0 {
|
||||||
after = time.After(timeout)
|
after = time.After(timeout)
|
||||||
}
|
}
|
||||||
c := make(flushType)
|
flush := flushType{timeout: timeout, c: make(chan struct{})}
|
||||||
|
|
||||||
// send flush request
|
// send flush request
|
||||||
// if it blocks, it means that there is a flush in progress or the queue hasn't been started yet
|
// if it blocks, it means that there is a flush in progress or the queue hasn't been started yet
|
||||||
select {
|
select {
|
||||||
case q.flushChan <- c:
|
case q.flushChan <- flush:
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
case <-q.ctxRun.Done():
|
case <-q.ctxRun.Done():
|
||||||
@ -120,7 +124,7 @@ func (q *WorkerPoolQueue[T]) FlushWithContext(ctx context.Context, timeout time.
|
|||||||
|
|
||||||
// wait for flush to finish
|
// wait for flush to finish
|
||||||
select {
|
select {
|
||||||
case <-c:
|
case <-flush.c:
|
||||||
return nil
|
return nil
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
|
@ -38,8 +38,8 @@ func TestGetDirectorySize(t *testing.T) {
|
|||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
repo, err := repo_model.GetRepositoryByID(db.DefaultContext, 1)
|
repo, err := repo_model.GetRepositoryByID(db.DefaultContext, 1)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
size, err := getDirectorySize(repo.RepoPath())
|
size, err := getDirectorySize(repo.RepoPath())
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.EqualValues(t, size, repo.Size)
|
repo.Size = 8165 // real size on the disk
|
||||||
|
assert.EqualValues(t, repo.Size, size)
|
||||||
}
|
}
|
||||||
|
@ -3,33 +3,33 @@
|
|||||||
|
|
||||||
package setting
|
package setting
|
||||||
|
|
||||||
// Attachment settings
|
type AttachmentSettingType struct {
|
||||||
var Attachment = struct {
|
|
||||||
Storage *Storage
|
Storage *Storage
|
||||||
AllowedTypes string
|
AllowedTypes string
|
||||||
MaxSize int64
|
MaxSize int64
|
||||||
MaxFiles int
|
MaxFiles int
|
||||||
Enabled bool
|
Enabled bool
|
||||||
}{
|
|
||||||
Storage: &Storage{},
|
|
||||||
AllowedTypes: ".cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip",
|
|
||||||
MaxSize: 2048,
|
|
||||||
MaxFiles: 5,
|
|
||||||
Enabled: true,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var Attachment AttachmentSettingType
|
||||||
|
|
||||||
func loadAttachmentFrom(rootCfg ConfigProvider) (err error) {
|
func loadAttachmentFrom(rootCfg ConfigProvider) (err error) {
|
||||||
|
Attachment = AttachmentSettingType{
|
||||||
|
AllowedTypes: ".avif,.cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.webp,.xls,.xlsx,.zip",
|
||||||
|
MaxSize: 2048,
|
||||||
|
MaxFiles: 5,
|
||||||
|
Enabled: true,
|
||||||
|
}
|
||||||
sec, _ := rootCfg.GetSection("attachment")
|
sec, _ := rootCfg.GetSection("attachment")
|
||||||
if sec == nil {
|
if sec == nil {
|
||||||
Attachment.Storage, err = getStorage(rootCfg, "attachments", "", nil)
|
Attachment.Storage, err = getStorage(rootCfg, "attachments", "", nil)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
Attachment.AllowedTypes = sec.Key("ALLOWED_TYPES").MustString(".cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.xls,.xlsx,.zip")
|
Attachment.AllowedTypes = sec.Key("ALLOWED_TYPES").MustString(Attachment.AllowedTypes)
|
||||||
Attachment.MaxSize = sec.Key("MAX_SIZE").MustInt64(2048)
|
Attachment.MaxSize = sec.Key("MAX_SIZE").MustInt64(Attachment.MaxSize)
|
||||||
Attachment.MaxFiles = sec.Key("MAX_FILES").MustInt(5)
|
Attachment.MaxFiles = sec.Key("MAX_FILES").MustInt(Attachment.MaxFiles)
|
||||||
Attachment.Enabled = sec.Key("ENABLED").MustBool(true)
|
Attachment.Enabled = sec.Key("ENABLED").MustBool(Attachment.Enabled)
|
||||||
|
|
||||||
Attachment.Storage, err = getStorage(rootCfg, "attachments", "", sec)
|
Attachment.Storage, err = getStorage(rootCfg, "attachments", "", sec)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -86,6 +86,7 @@ var UI = struct {
|
|||||||
Reactions: []string{`+1`, `-1`, `laugh`, `hooray`, `confused`, `heart`, `rocket`, `eyes`},
|
Reactions: []string{`+1`, `-1`, `laugh`, `hooray`, `confused`, `heart`, `rocket`, `eyes`},
|
||||||
CustomEmojis: []string{`git`, `gitea`, `codeberg`, `gitlab`, `github`, `gogs`},
|
CustomEmojis: []string{`git`, `gitea`, `codeberg`, `gitlab`, `github`, `gogs`},
|
||||||
CustomEmojisMap: map[string]string{"git": ":git:", "gitea": ":gitea:", "codeberg": ":codeberg:", "gitlab": ":gitlab:", "github": ":github:", "gogs": ":gogs:"},
|
CustomEmojisMap: map[string]string{"git": ":git:", "gitea": ":gitea:", "codeberg": ":codeberg:", "gitlab": ":gitlab:", "github": ":github:", "gogs": ":gogs:"},
|
||||||
|
ExploreDefaultSort: "recentupdate",
|
||||||
PreferredTimestampTense: "mixed",
|
PreferredTimestampTense: "mixed",
|
||||||
|
|
||||||
AmbiguousUnicodeDetection: true,
|
AmbiguousUnicodeDetection: true,
|
||||||
|
@ -62,19 +62,18 @@ func (ut *RenderUtils) RenderCommitMessageLinkSubject(msg, urlDefault string, me
|
|||||||
}
|
}
|
||||||
msgLine = strings.TrimRightFunc(msgLine, unicode.IsSpace)
|
msgLine = strings.TrimRightFunc(msgLine, unicode.IsSpace)
|
||||||
if len(msgLine) == 0 {
|
if len(msgLine) == 0 {
|
||||||
return template.HTML("")
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// we can safely assume that it will not return any error, since there
|
// we can safely assume that it will not return any error, since there
|
||||||
// shouldn't be any special HTML.
|
// shouldn't be any special HTML.
|
||||||
renderedMessage, err := markup.RenderCommitMessageSubject(&markup.RenderContext{
|
renderedMessage, err := markup.RenderCommitMessageSubject(&markup.RenderContext{
|
||||||
Ctx: ut.ctx,
|
Ctx: ut.ctx,
|
||||||
DefaultLink: urlDefault,
|
Metas: metas,
|
||||||
Metas: metas,
|
}, urlDefault, template.HTMLEscapeString(msgLine))
|
||||||
}, template.HTMLEscapeString(msgLine))
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("RenderCommitMessageSubject: %v", err)
|
log.Error("RenderCommitMessageSubject: %v", err)
|
||||||
return template.HTML("")
|
return ""
|
||||||
}
|
}
|
||||||
return renderCodeBlock(template.HTML(renderedMessage))
|
return renderCodeBlock(template.HTML(renderedMessage))
|
||||||
}
|
}
|
||||||
@ -94,9 +93,8 @@ func (ut *RenderUtils) RenderCommitBody(msg string, metas map[string]string) tem
|
|||||||
}
|
}
|
||||||
|
|
||||||
renderedMessage, err := markup.RenderCommitMessage(&markup.RenderContext{
|
renderedMessage, err := markup.RenderCommitMessage(&markup.RenderContext{
|
||||||
Ctx: ut.ctx,
|
Ctx: ut.ctx,
|
||||||
Metas: metas,
|
Metas: metas,
|
||||||
ContentMode: markup.RenderContentAsComment,
|
|
||||||
}, template.HTMLEscapeString(msgLine))
|
}, template.HTMLEscapeString(msgLine))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("RenderCommitMessage: %v", err)
|
log.Error("RenderCommitMessage: %v", err)
|
||||||
@ -117,9 +115,8 @@ func renderCodeBlock(htmlEscapedTextToRender template.HTML) template.HTML {
|
|||||||
// RenderIssueTitle renders issue/pull title with defined post processors
|
// RenderIssueTitle renders issue/pull title with defined post processors
|
||||||
func (ut *RenderUtils) RenderIssueTitle(text string, metas map[string]string) template.HTML {
|
func (ut *RenderUtils) RenderIssueTitle(text string, metas map[string]string) template.HTML {
|
||||||
renderedText, err := markup.RenderIssueTitle(&markup.RenderContext{
|
renderedText, err := markup.RenderIssueTitle(&markup.RenderContext{
|
||||||
Ctx: ut.ctx,
|
Ctx: ut.ctx,
|
||||||
ContentMode: markup.RenderContentAsTitle,
|
Metas: metas,
|
||||||
Metas: metas,
|
|
||||||
}, template.HTMLEscapeString(text))
|
}, template.HTMLEscapeString(text))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("RenderIssueTitle: %v", err)
|
log.Error("RenderIssueTitle: %v", err)
|
||||||
@ -212,7 +209,7 @@ func reactionToEmoji(reaction string) template.HTML {
|
|||||||
func (ut *RenderUtils) MarkdownToHtml(input string) template.HTML { //nolint:revive
|
func (ut *RenderUtils) MarkdownToHtml(input string) template.HTML { //nolint:revive
|
||||||
output, err := markdown.RenderString(&markup.RenderContext{
|
output, err := markdown.RenderString(&markup.RenderContext{
|
||||||
Ctx: ut.ctx,
|
Ctx: ut.ctx,
|
||||||
Metas: map[string]string{"mode": "document"},
|
Metas: markup.ComposeSimpleDocumentMetas(),
|
||||||
}, input)
|
}, input)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("RenderString: %v", err)
|
log.Error("RenderString: %v", err)
|
||||||
|
@ -47,10 +47,11 @@ mail@domain.com
|
|||||||
}
|
}
|
||||||
|
|
||||||
var testMetas = map[string]string{
|
var testMetas = map[string]string{
|
||||||
"user": "user13",
|
"user": "user13",
|
||||||
"repo": "repo11",
|
"repo": "repo11",
|
||||||
"repoPath": "../../tests/gitea-repositories-meta/user13/repo11.git/",
|
"repoPath": "../../tests/gitea-repositories-meta/user13/repo11.git/",
|
||||||
"mode": "comment",
|
"markdownLineBreakStyle": "comment",
|
||||||
|
"markupAllowShortIssuePattern": "true",
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMain(m *testing.M) {
|
func TestMain(m *testing.M) {
|
||||||
@ -75,8 +76,7 @@ func newTestRenderUtils() *RenderUtils {
|
|||||||
func TestRenderCommitBody(t *testing.T) {
|
func TestRenderCommitBody(t *testing.T) {
|
||||||
defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableInternalAttributes, true)()
|
defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableInternalAttributes, true)()
|
||||||
type args struct {
|
type args struct {
|
||||||
msg string
|
msg string
|
||||||
metas map[string]string
|
|
||||||
}
|
}
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@ -108,7 +108,7 @@ func TestRenderCommitBody(t *testing.T) {
|
|||||||
ut := newTestRenderUtils()
|
ut := newTestRenderUtils()
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
assert.Equalf(t, tt.want, ut.RenderCommitBody(tt.args.msg, tt.args.metas), "RenderCommitBody(%v, %v)", tt.args.msg, tt.args.metas)
|
assert.Equalf(t, tt.want, ut.RenderCommitBody(tt.args.msg, nil), "RenderCommitBody(%v, %v)", tt.args.msg, nil)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -140,7 +140,7 @@ func TestRenderCommitMessage(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestRenderCommitMessageLinkSubject(t *testing.T) {
|
func TestRenderCommitMessageLinkSubject(t *testing.T) {
|
||||||
expected := `<a href="https://example.com/link" class="default-link muted">space </a><a href="/mention-user" data-markdown-generated-content="" class="mention">@mention-user</a>`
|
expected := `<a href="https://example.com/link" class="muted">space </a><a href="/mention-user" data-markdown-generated-content="" class="mention">@mention-user</a>`
|
||||||
assert.EqualValues(t, expected, newTestRenderUtils().RenderCommitMessageLinkSubject(testInput(), "https://example.com/link", testMetas))
|
assert.EqualValues(t, expected, newTestRenderUtils().RenderCommitMessageLinkSubject(testInput(), "https://example.com/link", testMetas))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -164,11 +164,11 @@ com 88fc37a3c0a4dda553bdcfc80c178a58247f42fb mit
|
|||||||
<span class="emoji" aria-label="thumbs up">👍</span>
|
<span class="emoji" aria-label="thumbs up">👍</span>
|
||||||
mail@domain.com
|
mail@domain.com
|
||||||
@mention-user test
|
@mention-user test
|
||||||
<a href="/user13/repo11/issues/123" class="ref-issue">#123</a>
|
#123
|
||||||
space<SPACE><SPACE>
|
space<SPACE><SPACE>
|
||||||
`
|
`
|
||||||
expected = strings.ReplaceAll(expected, "<SPACE>", " ")
|
expected = strings.ReplaceAll(expected, "<SPACE>", " ")
|
||||||
assert.EqualValues(t, expected, string(newTestRenderUtils().RenderIssueTitle(testInput(), testMetas)))
|
assert.EqualValues(t, expected, string(newTestRenderUtils().RenderIssueTitle(testInput(), nil)))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRenderMarkdownToHtml(t *testing.T) {
|
func TestRenderMarkdownToHtml(t *testing.T) {
|
||||||
|
@ -19,9 +19,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
prefix string
|
prefix string
|
||||||
SlowTest = 10 * time.Second
|
TestTimeout = 10 * time.Minute
|
||||||
SlowFlush = 5 * time.Second
|
TestSlowRun = 10 * time.Second
|
||||||
|
TestSlowFlush = 1 * time.Second
|
||||||
)
|
)
|
||||||
|
|
||||||
var WriterCloser = &testLoggerWriterCloser{}
|
var WriterCloser = &testLoggerWriterCloser{}
|
||||||
@ -89,79 +90,97 @@ func (w *testLoggerWriterCloser) Reset() {
|
|||||||
w.Unlock()
|
w.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Printf takes a format and args and prints the string to os.Stdout
|
||||||
|
func Printf(format string, args ...any) {
|
||||||
|
if !log.CanColorStdout {
|
||||||
|
for i := 0; i < len(args); i++ {
|
||||||
|
if c, ok := args[i].(*log.ColoredValue); ok {
|
||||||
|
args[i] = c.Value()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_, _ = fmt.Fprintf(os.Stdout, format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
// PrintCurrentTest prints the current test to os.Stdout
|
// PrintCurrentTest prints the current test to os.Stdout
|
||||||
func PrintCurrentTest(t testing.TB, skip ...int) func() {
|
func PrintCurrentTest(t testing.TB, skip ...int) func() {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
start := time.Now()
|
runStart := time.Now()
|
||||||
actualSkip := util.OptionalArg(skip) + 1
|
actualSkip := util.OptionalArg(skip) + 1
|
||||||
_, filename, line, _ := runtime.Caller(actualSkip)
|
_, filename, line, _ := runtime.Caller(actualSkip)
|
||||||
|
|
||||||
if log.CanColorStdout {
|
Printf("=== %s (%s:%d)\n", log.NewColoredValue(t.Name()), strings.TrimPrefix(filename, prefix), line)
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", fmt.Formatter(log.NewColoredValue(t.Name())), strings.TrimPrefix(filename, prefix), line)
|
|
||||||
} else {
|
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", t.Name(), strings.TrimPrefix(filename, prefix), line)
|
|
||||||
}
|
|
||||||
WriterCloser.pushT(t)
|
WriterCloser.pushT(t)
|
||||||
return func() {
|
timeoutChecker := time.AfterFunc(TestTimeout, func() {
|
||||||
took := time.Since(start)
|
l := 128 * 1024
|
||||||
if took > SlowTest {
|
var stack []byte
|
||||||
if log.CanColorStdout {
|
for {
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s is a slow test (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgYellow)), fmt.Formatter(log.NewColoredValue(took, log.Bold, log.FgYellow)))
|
stack = make([]byte, l)
|
||||||
} else {
|
n := runtime.Stack(stack, true)
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s is a slow test (took %v)\n", t.Name(), took)
|
if n <= l {
|
||||||
|
stack = stack[:n]
|
||||||
|
break
|
||||||
}
|
}
|
||||||
|
l = n
|
||||||
}
|
}
|
||||||
timer := time.AfterFunc(SlowFlush, func() {
|
Printf("!!! %s ... timeout: %v ... stacktrace:\n%s\n\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestTimeout, string(stack))
|
||||||
if log.CanColorStdout {
|
})
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), SlowFlush)
|
return func() {
|
||||||
} else {
|
flushStart := time.Now()
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s ... still flushing after %v ...\n", t.Name(), SlowFlush)
|
slowFlushChecker := time.AfterFunc(TestSlowFlush, func() {
|
||||||
}
|
Printf("+++ %s ... still flushing after %v ...\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestSlowFlush)
|
||||||
})
|
})
|
||||||
if err := queue.GetManager().FlushAll(context.Background(), time.Minute); err != nil {
|
if err := queue.GetManager().FlushAll(context.Background(), -1); err != nil {
|
||||||
t.Errorf("Flushing queues failed with error %v", err)
|
t.Errorf("Flushing queues failed with error %v", err)
|
||||||
}
|
}
|
||||||
timer.Stop()
|
slowFlushChecker.Stop()
|
||||||
flushTook := time.Since(start) - took
|
timeoutChecker.Stop()
|
||||||
if flushTook > SlowFlush {
|
|
||||||
if log.CanColorStdout {
|
runDuration := time.Since(runStart)
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", fmt.Formatter(log.NewColoredValue(t.Name(), log.Bold, log.FgRed)), fmt.Formatter(log.NewColoredValue(flushTook, log.Bold, log.FgRed)))
|
flushDuration := time.Since(flushStart)
|
||||||
} else {
|
if runDuration > TestSlowRun {
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "+++ %s had a slow clean-up flush (took %v)\n", t.Name(), flushTook)
|
Printf("+++ %s is a slow test (run: %v, flush: %v)\n", log.NewColoredValue(t.Name(), log.Bold, log.FgYellow), runDuration, flushDuration)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
WriterCloser.popT()
|
WriterCloser.popT()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Printf takes a format and args and prints the string to os.Stdout
|
|
||||||
func Printf(format string, args ...any) {
|
|
||||||
if log.CanColorStdout {
|
|
||||||
for i := 0; i < len(args); i++ {
|
|
||||||
args[i] = log.NewColoredValue(args[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_, _ = fmt.Fprintf(os.Stdout, "\t"+format, args...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestLogEventWriter is a logger which will write to the testing log
|
// TestLogEventWriter is a logger which will write to the testing log
|
||||||
type TestLogEventWriter struct {
|
type TestLogEventWriter struct {
|
||||||
*log.EventWriterBaseImpl
|
*log.EventWriterBaseImpl
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewTestLoggerWriter creates a TestLogEventWriter as a log.LoggerProvider
|
// newTestLoggerWriter creates a TestLogEventWriter as a log.LoggerProvider
|
||||||
func NewTestLoggerWriter(name string, mode log.WriterMode) log.EventWriter {
|
func newTestLoggerWriter(name string, mode log.WriterMode) log.EventWriter {
|
||||||
w := &TestLogEventWriter{}
|
w := &TestLogEventWriter{}
|
||||||
w.EventWriterBaseImpl = log.NewEventWriterBase(name, "test-log-writer", mode)
|
w.EventWriterBaseImpl = log.NewEventWriterBase(name, "test-log-writer", mode)
|
||||||
w.OutputWriteCloser = WriterCloser
|
w.OutputWriteCloser = WriterCloser
|
||||||
return w
|
return w
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func Init() {
|
||||||
const relFilePath = "modules/testlogger/testlogger.go"
|
const relFilePath = "modules/testlogger/testlogger.go"
|
||||||
_, filename, _, _ := runtime.Caller(0)
|
_, filename, _, _ := runtime.Caller(0)
|
||||||
if !strings.HasSuffix(filename, relFilePath) {
|
if !strings.HasSuffix(filename, relFilePath) {
|
||||||
panic("source code file path doesn't match expected: " + relFilePath)
|
panic("source code file path doesn't match expected: " + relFilePath)
|
||||||
}
|
}
|
||||||
prefix = strings.TrimSuffix(filename, relFilePath)
|
prefix = strings.TrimSuffix(filename, relFilePath)
|
||||||
|
|
||||||
|
log.RegisterEventWriter("test", newTestLoggerWriter)
|
||||||
|
|
||||||
|
duration, err := time.ParseDuration(os.Getenv("GITEA_TEST_SLOW_RUN"))
|
||||||
|
if err == nil && duration > 0 {
|
||||||
|
TestSlowRun = duration
|
||||||
|
}
|
||||||
|
|
||||||
|
duration, err = time.ParseDuration(os.Getenv("GITEA_TEST_SLOW_FLUSH"))
|
||||||
|
if err == nil && duration > 0 {
|
||||||
|
TestSlowFlush = duration
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Fatalf(format string, args ...any) {
|
||||||
|
Printf(format+"\n", args...)
|
||||||
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
@ -5,10 +5,12 @@ package typesniffer
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
@ -18,10 +20,10 @@ import (
|
|||||||
const sniffLen = 1024
|
const sniffLen = 1024
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// SvgMimeType MIME type of SVG images.
|
MimeTypeImageSvg = "image/svg+xml"
|
||||||
SvgMimeType = "image/svg+xml"
|
MimeTypeImageAvif = "image/avif"
|
||||||
// ApplicationOctetStream MIME type of binary files.
|
|
||||||
ApplicationOctetStream = "application/octet-stream"
|
MimeTypeApplicationOctetStream = "application/octet-stream"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -47,7 +49,7 @@ func (ct SniffedType) IsImage() bool {
|
|||||||
|
|
||||||
// IsSvgImage detects if data is an SVG image format
|
// IsSvgImage detects if data is an SVG image format
|
||||||
func (ct SniffedType) IsSvgImage() bool {
|
func (ct SniffedType) IsSvgImage() bool {
|
||||||
return strings.Contains(ct.contentType, SvgMimeType)
|
return strings.Contains(ct.contentType, MimeTypeImageSvg)
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsPDF detects if data is a PDF format
|
// IsPDF detects if data is a PDF format
|
||||||
@ -81,6 +83,26 @@ func (ct SniffedType) GetMimeType() string {
|
|||||||
return strings.SplitN(ct.contentType, ";", 2)[0]
|
return strings.SplitN(ct.contentType, ";", 2)[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// https://en.wikipedia.org/wiki/ISO_base_media_file_format#File_type_box
|
||||||
|
func detectFileTypeBox(data []byte) (brands []string, found bool) {
|
||||||
|
if len(data) < 12 {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
boxSize := int(binary.BigEndian.Uint32(data[:4]))
|
||||||
|
if boxSize < 12 || boxSize > len(data) {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
tag := string(data[4:8])
|
||||||
|
if tag != "ftyp" {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
brands = append(brands, string(data[8:12]))
|
||||||
|
for i := 16; i+4 <= boxSize; i += 4 {
|
||||||
|
brands = append(brands, string(data[i:i+4]))
|
||||||
|
}
|
||||||
|
return brands, true
|
||||||
|
}
|
||||||
|
|
||||||
// DetectContentType extends http.DetectContentType with more content types. Defaults to text/unknown if input is empty.
|
// DetectContentType extends http.DetectContentType with more content types. Defaults to text/unknown if input is empty.
|
||||||
func DetectContentType(data []byte) SniffedType {
|
func DetectContentType(data []byte) SniffedType {
|
||||||
if len(data) == 0 {
|
if len(data) == 0 {
|
||||||
@ -94,7 +116,6 @@ func DetectContentType(data []byte) SniffedType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
|
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
|
||||||
|
|
||||||
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
|
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
|
||||||
detectByXML := strings.Contains(ct, "text/xml")
|
detectByXML := strings.Contains(ct, "text/xml")
|
||||||
if detectByHTML || detectByXML {
|
if detectByHTML || detectByXML {
|
||||||
@ -102,7 +123,7 @@ func DetectContentType(data []byte) SniffedType {
|
|||||||
dataProcessed = bytes.TrimSpace(dataProcessed)
|
dataProcessed = bytes.TrimSpace(dataProcessed)
|
||||||
if detectByHTML && svgTagRegex.Match(dataProcessed) ||
|
if detectByHTML && svgTagRegex.Match(dataProcessed) ||
|
||||||
detectByXML && svgTagInXMLRegex.Match(dataProcessed) {
|
detectByXML && svgTagInXMLRegex.Match(dataProcessed) {
|
||||||
ct = SvgMimeType
|
ct = MimeTypeImageSvg
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -116,6 +137,11 @@ func DetectContentType(data []byte) SniffedType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fileTypeBrands, found := detectFileTypeBox(data)
|
||||||
|
if found && slices.Contains(fileTypeBrands, "avif") {
|
||||||
|
ct = MimeTypeImageAvif
|
||||||
|
}
|
||||||
|
|
||||||
if ct == "application/ogg" {
|
if ct == "application/ogg" {
|
||||||
dataHead := data
|
dataHead := data
|
||||||
if len(dataHead) > 256 {
|
if len(dataHead) > 256 {
|
||||||
|
@ -134,3 +134,33 @@ func TestDetectContentTypeOgg(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.True(t, st.IsVideo())
|
assert.True(t, st.IsVideo())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDetectFileTypeBox(t *testing.T) {
|
||||||
|
_, found := detectFileTypeBox([]byte("\x00\x00\xff\xffftypAAAA...."))
|
||||||
|
assert.False(t, found)
|
||||||
|
|
||||||
|
brands, found := detectFileTypeBox([]byte("\x00\x00\x00\x0cftypAAAA"))
|
||||||
|
assert.True(t, found)
|
||||||
|
assert.Equal(t, []string{"AAAA"}, brands)
|
||||||
|
|
||||||
|
brands, found = detectFileTypeBox([]byte("\x00\x00\x00\x10ftypAAAA....BBBB"))
|
||||||
|
assert.True(t, found)
|
||||||
|
assert.Equal(t, []string{"AAAA"}, brands)
|
||||||
|
|
||||||
|
brands, found = detectFileTypeBox([]byte("\x00\x00\x00\x14ftypAAAA....BBBB"))
|
||||||
|
assert.True(t, found)
|
||||||
|
assert.Equal(t, []string{"AAAA", "BBBB"}, brands)
|
||||||
|
|
||||||
|
_, found = detectFileTypeBox([]byte("\x00\x00\x00\x14ftypAAAA....BBB"))
|
||||||
|
assert.False(t, found)
|
||||||
|
|
||||||
|
brands, found = detectFileTypeBox([]byte("\x00\x00\x00\x13ftypAAAA....BBB"))
|
||||||
|
assert.True(t, found)
|
||||||
|
assert.Equal(t, []string{"AAAA"}, brands)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDetectContentTypeAvif(t *testing.T) {
|
||||||
|
buf := []byte("\x00\x00\x00\x20ftypavif.......................")
|
||||||
|
st := DetectContentType(buf)
|
||||||
|
assert.Equal(t, MimeTypeImageAvif, st.contentType)
|
||||||
|
}
|
||||||
|
@ -47,11 +47,12 @@ func RenderMarkup(ctx *context.Base, repo *context.Repository, mode, text, urlPa
|
|||||||
switch mode {
|
switch mode {
|
||||||
case "gfm": // legacy mode, do nothing
|
case "gfm": // legacy mode, do nothing
|
||||||
case "comment":
|
case "comment":
|
||||||
renderCtx.ContentMode = markup.RenderContentAsComment
|
renderCtx.Metas = map[string]string{"markdownLineBreakStyle": "comment"}
|
||||||
case "wiki":
|
case "wiki":
|
||||||
renderCtx.ContentMode = markup.RenderContentAsWiki
|
renderCtx.Metas = map[string]string{"markdownLineBreakStyle": "document", "markupContentMode": "wiki"}
|
||||||
case "file":
|
case "file":
|
||||||
// render the repo file content by its extension
|
// render the repo file content by its extension
|
||||||
|
renderCtx.Metas = map[string]string{"markdownLineBreakStyle": "document"}
|
||||||
renderCtx.MarkupType = ""
|
renderCtx.MarkupType = ""
|
||||||
renderCtx.RelativePath = filePath
|
renderCtx.RelativePath = filePath
|
||||||
renderCtx.InStandalonePage = true
|
renderCtx.InStandalonePage = true
|
||||||
@ -74,10 +75,12 @@ func RenderMarkup(ctx *context.Base, repo *context.Repository, mode, text, urlPa
|
|||||||
|
|
||||||
if repo != nil && repo.Repository != nil {
|
if repo != nil && repo.Repository != nil {
|
||||||
renderCtx.Repo = repo.Repository
|
renderCtx.Repo = repo.Repository
|
||||||
if renderCtx.ContentMode == markup.RenderContentAsComment {
|
if mode == "file" {
|
||||||
renderCtx.Metas = repo.Repository.ComposeMetas(ctx)
|
|
||||||
} else {
|
|
||||||
renderCtx.Metas = repo.Repository.ComposeDocumentMetas(ctx)
|
renderCtx.Metas = repo.Repository.ComposeDocumentMetas(ctx)
|
||||||
|
} else if mode == "wiki" {
|
||||||
|
renderCtx.Metas = repo.Repository.ComposeWikiMetas(ctx)
|
||||||
|
} else if mode == "comment" {
|
||||||
|
renderCtx.Metas = repo.Repository.ComposeMetas(ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err := markup.Render(renderCtx, strings.NewReader(text), ctx.Resp); err != nil {
|
if err := markup.Render(renderCtx, strings.NewReader(text), ctx.Resp); err != nil {
|
||||||
|
@ -122,6 +122,8 @@ func SignInOAuthCallback(ctx *context.Context) {
|
|||||||
}
|
}
|
||||||
if err, ok := err.(*go_oauth2.RetrieveError); ok {
|
if err, ok := err.(*go_oauth2.RetrieveError); ok {
|
||||||
ctx.Flash.Error("OAuth2 RetrieveError: "+err.Error(), true)
|
ctx.Flash.Error("OAuth2 RetrieveError: "+err.Error(), true)
|
||||||
|
ctx.Redirect(setting.AppSubURL + "/user/login")
|
||||||
|
return
|
||||||
}
|
}
|
||||||
ctx.ServerError("UserSignIn", err)
|
ctx.ServerError("UserSignIn", err)
|
||||||
return
|
return
|
||||||
|
@ -56,7 +56,7 @@ func renderMarkdown(ctx *context.Context, act *activities_model.Action, content
|
|||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: act.GetRepoLink(ctx),
|
Base: act.GetRepoLink(ctx),
|
||||||
},
|
},
|
||||||
Metas: map[string]string{
|
Metas: map[string]string{ // FIXME: not right here, it should use issue to compose the metas
|
||||||
"user": act.GetRepoUserName(ctx),
|
"user": act.GetRepoUserName(ctx),
|
||||||
"repo": act.GetRepoName(ctx),
|
"repo": act.GetRepoName(ctx),
|
||||||
},
|
},
|
||||||
|
@ -46,9 +46,7 @@ func showUserFeed(ctx *context.Context, formatType string) {
|
|||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: ctx.ContextUser.HTMLURL(),
|
Base: ctx.ContextUser.HTMLURL(),
|
||||||
},
|
},
|
||||||
Metas: map[string]string{
|
Metas: markup.ComposeSimpleDocumentMetas(),
|
||||||
"user": ctx.ContextUser.GetDisplayName(),
|
|
||||||
},
|
|
||||||
}, ctx.ContextUser.Description)
|
}, ctx.ContextUser.Description)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ctx.ServerError("RenderString", err)
|
ctx.ServerError("RenderString", err)
|
||||||
|
@ -189,7 +189,7 @@ func prepareOrgProfileReadme(ctx *context.Context, viewRepositories bool) bool {
|
|||||||
Base: profileDbRepo.Link(),
|
Base: profileDbRepo.Link(),
|
||||||
BranchPath: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)),
|
BranchPath: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)),
|
||||||
},
|
},
|
||||||
Metas: map[string]string{"mode": "document"},
|
Metas: markup.ComposeSimpleDocumentMetas(),
|
||||||
}, bytes); err != nil {
|
}, bytes); err != nil {
|
||||||
log.Error("failed to RenderString: %v", err)
|
log.Error("failed to RenderString: %v", err)
|
||||||
} else {
|
} else {
|
||||||
|
@ -289,9 +289,8 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
rctx := &markup.RenderContext{
|
rctx := &markup.RenderContext{
|
||||||
Ctx: ctx,
|
Ctx: ctx,
|
||||||
ContentMode: markup.RenderContentAsWiki,
|
Metas: ctx.Repo.Repository.ComposeWikiMetas(ctx),
|
||||||
Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
|
|
||||||
Links: markup.Links{
|
Links: markup.Links{
|
||||||
Base: ctx.Repo.RepoLink,
|
Base: ctx.Repo.RepoLink,
|
||||||
},
|
},
|
||||||
|
@ -50,7 +50,7 @@ func PrepareContextForProfileBigAvatar(ctx *context.Context) {
|
|||||||
ctx.Data["OpenIDs"] = openIDs
|
ctx.Data["OpenIDs"] = openIDs
|
||||||
if len(ctx.ContextUser.Description) != 0 {
|
if len(ctx.ContextUser.Description) != 0 {
|
||||||
content, err := markdown.RenderString(&markup.RenderContext{
|
content, err := markdown.RenderString(&markup.RenderContext{
|
||||||
Metas: map[string]string{"mode": "document"},
|
Metas: markup.ComposeSimpleDocumentMetas(),
|
||||||
Ctx: ctx,
|
Ctx: ctx,
|
||||||
}, ctx.ContextUser.Description)
|
}, ctx.ContextUser.Description)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
package auth
|
package auth
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -141,6 +142,15 @@ func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore
|
|||||||
}
|
}
|
||||||
|
|
||||||
if skipper, ok := source.Cfg.(LocalTwoFASkipper); !ok || !skipper.IsSkipLocalTwoFA() {
|
if skipper, ok := source.Cfg.(LocalTwoFASkipper); !ok || !skipper.IsSkipLocalTwoFA() {
|
||||||
|
// Check if the user has webAuthn registration
|
||||||
|
hasWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(req.Context(), u.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if hasWebAuthn {
|
||||||
|
return nil, errors.New("Basic authorization is not allowed while webAuthn enrolled")
|
||||||
|
}
|
||||||
|
|
||||||
if err := validateTOTP(req, u); err != nil {
|
if err := validateTOTP(req, u); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -142,6 +142,7 @@ func CreateRelease(gitRepo *git.Repository, rel *repo_model.Release, attachmentU
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rel.Title, _ = util.SplitStringAtByteN(rel.Title, 255)
|
||||||
rel.LowerTagName = strings.ToLower(rel.TagName)
|
rel.LowerTagName = strings.ToLower(rel.TagName)
|
||||||
if err = db.Insert(gitRepo.Ctx, rel); err != nil {
|
if err = db.Insert(gitRepo.Ctx, rel); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -89,7 +89,7 @@ func TestListUnadoptedRepositories_ListOptions(t *testing.T) {
|
|||||||
|
|
||||||
func TestAdoptRepository(t *testing.T) {
|
func TestAdoptRepository(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
assert.NoError(t, unittest.CopyDir(filepath.Join(setting.RepoRootPath, "user2", "repo1.git"), filepath.Join(setting.RepoRootPath, "user2", "test-adopt.git")))
|
assert.NoError(t, unittest.SyncDirs(filepath.Join(setting.RepoRootPath, "user2", "repo1.git"), filepath.Join(setting.RepoRootPath, "user2", "test-adopt.git")))
|
||||||
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||||
_, err := AdoptRepository(db.DefaultContext, user2, user2, CreateRepoOptions{Name: "test-adopt"})
|
_, err := AdoptRepository(db.DefaultContext, user2, user2, CreateRepoOptions{Name: "test-adopt"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -68,7 +68,7 @@ func (e RepoRefNotFoundError) Is(err error) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewRequest creates an archival request, based on the URI. The
|
// NewRequest creates an archival request, based on the URI. The
|
||||||
// resulting ArchiveRequest is suitable for being passed to ArchiveRepository()
|
// resulting ArchiveRequest is suitable for being passed to Await()
|
||||||
// if it's determined that the request still needs to be satisfied.
|
// if it's determined that the request still needs to be satisfied.
|
||||||
func NewRequest(repoID int64, repo *git.Repository, uri string) (*ArchiveRequest, error) {
|
func NewRequest(repoID int64, repo *git.Repository, uri string) (*ArchiveRequest, error) {
|
||||||
r := &ArchiveRequest{
|
r := &ArchiveRequest{
|
||||||
@ -151,13 +151,14 @@ func (aReq *ArchiveRequest) Await(ctx context.Context) (*repo_model.RepoArchiver
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// doArchive satisfies the ArchiveRequest being passed in. Processing
|
||||||
|
// will occur in a separate goroutine, as this phase may take a while to
|
||||||
|
// complete. If the archive already exists, doArchive will not do
|
||||||
|
// anything. In all cases, the caller should be examining the *ArchiveRequest
|
||||||
|
// being returned for completion, as it may be different than the one they passed
|
||||||
|
// in.
|
||||||
func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver, error) {
|
func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver, error) {
|
||||||
txCtx, committer, err := db.TxContext(ctx)
|
ctx, _, finished := process.GetManager().AddContext(ctx, fmt.Sprintf("ArchiveRequest[%d]: %s", r.RepoID, r.GetArchiveName()))
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer committer.Close()
|
|
||||||
ctx, _, finished := process.GetManager().AddContext(txCtx, fmt.Sprintf("ArchiveRequest[%d]: %s", r.RepoID, r.GetArchiveName()))
|
|
||||||
defer finished()
|
defer finished()
|
||||||
|
|
||||||
archiver, err := repo_model.GetRepoArchiver(ctx, r.RepoID, r.Type, r.CommitID)
|
archiver, err := repo_model.GetRepoArchiver(ctx, r.RepoID, r.Type, r.CommitID)
|
||||||
@ -192,7 +193,7 @@ func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return archiver, committer.Commit()
|
return archiver, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if !errors.Is(err, os.ErrNotExist) {
|
if !errors.Is(err, os.ErrNotExist) {
|
||||||
@ -261,17 +262,7 @@ func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return archiver, committer.Commit()
|
return archiver, nil
|
||||||
}
|
|
||||||
|
|
||||||
// ArchiveRepository satisfies the ArchiveRequest being passed in. Processing
|
|
||||||
// will occur in a separate goroutine, as this phase may take a while to
|
|
||||||
// complete. If the archive already exists, ArchiveRepository will not do
|
|
||||||
// anything. In all cases, the caller should be examining the *ArchiveRequest
|
|
||||||
// being returned for completion, as it may be different than the one they passed
|
|
||||||
// in.
|
|
||||||
func ArchiveRepository(ctx context.Context, request *ArchiveRequest) (*repo_model.RepoArchiver, error) {
|
|
||||||
return doArchive(ctx, request)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var archiverQueue *queue.WorkerPoolQueue[*ArchiveRequest]
|
var archiverQueue *queue.WorkerPoolQueue[*ArchiveRequest]
|
||||||
@ -281,8 +272,10 @@ func Init(ctx context.Context) error {
|
|||||||
handler := func(items ...*ArchiveRequest) []*ArchiveRequest {
|
handler := func(items ...*ArchiveRequest) []*ArchiveRequest {
|
||||||
for _, archiveReq := range items {
|
for _, archiveReq := range items {
|
||||||
log.Trace("ArchiverData Process: %#v", archiveReq)
|
log.Trace("ArchiverData Process: %#v", archiveReq)
|
||||||
if _, err := doArchive(ctx, archiveReq); err != nil {
|
if archiver, err := doArchive(ctx, archiveReq); err != nil {
|
||||||
log.Error("Archive %v failed: %v", archiveReq, err)
|
log.Error("Archive %v failed: %v", archiveReq, err)
|
||||||
|
} else {
|
||||||
|
log.Trace("ArchiverData Success: %#v", archiver)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
@ -80,13 +80,13 @@ func TestArchive_Basic(t *testing.T) {
|
|||||||
inFlight[1] = tgzReq
|
inFlight[1] = tgzReq
|
||||||
inFlight[2] = secondReq
|
inFlight[2] = secondReq
|
||||||
|
|
||||||
ArchiveRepository(db.DefaultContext, zipReq)
|
doArchive(db.DefaultContext, zipReq)
|
||||||
ArchiveRepository(db.DefaultContext, tgzReq)
|
doArchive(db.DefaultContext, tgzReq)
|
||||||
ArchiveRepository(db.DefaultContext, secondReq)
|
doArchive(db.DefaultContext, secondReq)
|
||||||
|
|
||||||
// Make sure sending an unprocessed request through doesn't affect the queue
|
// Make sure sending an unprocessed request through doesn't affect the queue
|
||||||
// count.
|
// count.
|
||||||
ArchiveRepository(db.DefaultContext, zipReq)
|
doArchive(db.DefaultContext, zipReq)
|
||||||
|
|
||||||
// Sleep two seconds to make sure the queue doesn't change.
|
// Sleep two seconds to make sure the queue doesn't change.
|
||||||
time.Sleep(2 * time.Second)
|
time.Sleep(2 * time.Second)
|
||||||
@ -101,7 +101,7 @@ func TestArchive_Basic(t *testing.T) {
|
|||||||
// We still have the other three stalled at completion, waiting to remove
|
// We still have the other three stalled at completion, waiting to remove
|
||||||
// from archiveInProgress. Try to submit this new one before its
|
// from archiveInProgress. Try to submit this new one before its
|
||||||
// predecessor has cleared out of the queue.
|
// predecessor has cleared out of the queue.
|
||||||
ArchiveRepository(db.DefaultContext, zipReq2)
|
doArchive(db.DefaultContext, zipReq2)
|
||||||
|
|
||||||
// Now we'll submit a request and TimedWaitForCompletion twice, before and
|
// Now we'll submit a request and TimedWaitForCompletion twice, before and
|
||||||
// after we release it. We should trigger both the timeout and non-timeout
|
// after we release it. We should trigger both the timeout and non-timeout
|
||||||
@ -109,7 +109,7 @@ func TestArchive_Basic(t *testing.T) {
|
|||||||
timedReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, secondCommit+".tar.gz")
|
timedReq, err := NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, secondCommit+".tar.gz")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotNil(t, timedReq)
|
assert.NotNil(t, timedReq)
|
||||||
ArchiveRepository(db.DefaultContext, timedReq)
|
doArchive(db.DefaultContext, timedReq)
|
||||||
|
|
||||||
zipReq2, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
|
zipReq2, err = NewRequest(ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit+".zip")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
<div class="field {{if not .item.VisibleOnForm}}tw-hidden{{end}}">
|
<div class="field {{if not .item.VisibleOnForm}}tw-hidden{{end}}">
|
||||||
<div>{{ctx.RenderUtils.MarkdownToHtml .item.Attributes.value}}</div>
|
<div class="markup">{{ctx.RenderUtils.MarkdownToHtml .item.Attributes.value}}</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{{template "base/head" .}}
|
{{template "base/head" .}}
|
||||||
<div role="main" aria-label="{{.Title}}" class="page-content repository projects edit-project new milestone">
|
<div role="main" aria-label="{{.Title}}" class="page-content repository projects edit-project new">
|
||||||
{{template "repo/header" .}}
|
{{template "repo/header" .}}
|
||||||
<div class="ui container">
|
<div class="ui container">
|
||||||
{{template "projects/new" .}}
|
{{template "projects/new" .}}
|
||||||
|
@ -29,7 +29,7 @@
|
|||||||
<div class="file-header-left tw-flex tw-items-center tw-py-2 tw-pr-4">
|
<div class="file-header-left tw-flex tw-items-center tw-py-2 tw-pr-4">
|
||||||
{{if .ReadmeInList}}
|
{{if .ReadmeInList}}
|
||||||
{{svg "octicon-book" 16 "tw-mr-2"}}
|
{{svg "octicon-book" 16 "tw-mr-2"}}
|
||||||
<strong><a class="default-link muted" href="#readme">{{.FileName}}</a></strong>
|
<strong><a class="muted" href="#readme">{{.FileName}}</a></strong>
|
||||||
{{else}}
|
{{else}}
|
||||||
{{template "repo/file_info" .}}
|
{{template "repo/file_info" .}}
|
||||||
{{end}}
|
{{end}}
|
||||||
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1 +0,0 @@
|
|||||||
Unnamed repository; edit this file 'description' to name the repository.
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
@ -1,6 +0,0 @@
|
|||||||
# git ls-files --others --exclude-from=.git/info/exclude
|
|
||||||
# Lines that start with '#' are comments.
|
|
||||||
# For a project mostly in C, the following would be a good set of
|
|
||||||
# exclude patterns (uncomment them if you want to use them):
|
|
||||||
# *.[oa]
|
|
||||||
# *~
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user