mirror of
https://github.com/go-gitea/gitea.git
synced 2025-01-19 14:03:40 +03:00
Merge branch 'main' into xormigrate
This commit is contained in:
commit
d917463c89
13
cmd/hook.go
13
cmd/hook.go
@ -591,8 +591,9 @@ Gitea or set your environment appropriately.`, "")
|
|||||||
// S: ... ...
|
// S: ... ...
|
||||||
// S: flush-pkt
|
// S: flush-pkt
|
||||||
hookOptions := private.HookOptions{
|
hookOptions := private.HookOptions{
|
||||||
UserName: pusherName,
|
UserName: pusherName,
|
||||||
UserID: pusherID,
|
UserID: pusherID,
|
||||||
|
GitPushOptions: make(map[string]string),
|
||||||
}
|
}
|
||||||
hookOptions.OldCommitIDs = make([]string, 0, hookBatchSize)
|
hookOptions.OldCommitIDs = make([]string, 0, hookBatchSize)
|
||||||
hookOptions.NewCommitIDs = make([]string, 0, hookBatchSize)
|
hookOptions.NewCommitIDs = make([]string, 0, hookBatchSize)
|
||||||
@ -617,8 +618,6 @@ Gitea or set your environment appropriately.`, "")
|
|||||||
hookOptions.RefFullNames = append(hookOptions.RefFullNames, git.RefName(t[2]))
|
hookOptions.RefFullNames = append(hookOptions.RefFullNames, git.RefName(t[2]))
|
||||||
}
|
}
|
||||||
|
|
||||||
hookOptions.GitPushOptions = make(map[string]string)
|
|
||||||
|
|
||||||
if hasPushOptions {
|
if hasPushOptions {
|
||||||
for {
|
for {
|
||||||
rs, err = readPktLine(ctx, reader, pktLineTypeUnknow)
|
rs, err = readPktLine(ctx, reader, pktLineTypeUnknow)
|
||||||
@ -629,11 +628,7 @@ Gitea or set your environment appropriately.`, "")
|
|||||||
if rs.Type == pktLineTypeFlush {
|
if rs.Type == pktLineTypeFlush {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
hookOptions.GitPushOptions.AddFromKeyValue(string(rs.Data))
|
||||||
kv := strings.SplitN(string(rs.Data), "=", 2)
|
|
||||||
if len(kv) == 2 {
|
|
||||||
hookOptions.GitPushOptions[kv[0]] = kv[1]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,6 +64,11 @@ func main() {
|
|||||||
Value: "",
|
Value: "",
|
||||||
Usage: "Forked user name on Github",
|
Usage: "Forked user name on Github",
|
||||||
},
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "gh-access-token",
|
||||||
|
Value: "",
|
||||||
|
Usage: "Access token for GitHub api request",
|
||||||
|
},
|
||||||
&cli.BoolFlag{
|
&cli.BoolFlag{
|
||||||
Name: "no-fetch",
|
Name: "no-fetch",
|
||||||
Usage: "Set this flag to prevent fetch of remote branches",
|
Usage: "Set this flag to prevent fetch of remote branches",
|
||||||
@ -169,9 +174,10 @@ func runBackport(c *cli.Context) error {
|
|||||||
fmt.Printf("* Backporting %s to %s as %s\n", pr, localReleaseBranch, backportBranch)
|
fmt.Printf("* Backporting %s to %s as %s\n", pr, localReleaseBranch, backportBranch)
|
||||||
|
|
||||||
sha := c.String("cherry-pick")
|
sha := c.String("cherry-pick")
|
||||||
|
accessToken := c.String("gh-access-token")
|
||||||
if sha == "" {
|
if sha == "" {
|
||||||
var err error
|
var err error
|
||||||
sha, err = determineSHAforPR(ctx, pr)
|
sha, err = determineSHAforPR(ctx, pr, accessToken)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -427,13 +433,16 @@ func readVersion() string {
|
|||||||
return strings.Join(split[:2], ".")
|
return strings.Join(split[:2], ".")
|
||||||
}
|
}
|
||||||
|
|
||||||
func determineSHAforPR(ctx context.Context, prStr string) (string, error) {
|
func determineSHAforPR(ctx context.Context, prStr, accessToken string) (string, error) {
|
||||||
prNum, err := strconv.Atoi(prStr)
|
prNum, err := strconv.Atoi(prStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
client := github.NewClient(http.DefaultClient)
|
client := github.NewClient(http.DefaultClient)
|
||||||
|
if accessToken != "" {
|
||||||
|
client = client.WithAuthToken(accessToken)
|
||||||
|
}
|
||||||
|
|
||||||
pr, _, err := client.PullRequests.Get(ctx, "go-gitea", "gitea", prNum)
|
pr, _, err := client.PullRequests.Get(ctx, "go-gitea", "gitea", prNum)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
2
go.mod
2
go.mod
@ -54,7 +54,7 @@ require (
|
|||||||
github.com/go-chi/chi/v5 v5.0.13
|
github.com/go-chi/chi/v5 v5.0.13
|
||||||
github.com/go-chi/cors v1.2.1
|
github.com/go-chi/cors v1.2.1
|
||||||
github.com/go-co-op/gocron v1.37.0
|
github.com/go-co-op/gocron v1.37.0
|
||||||
github.com/go-enry/go-enry/v2 v2.8.8
|
github.com/go-enry/go-enry/v2 v2.9.1
|
||||||
github.com/go-git/go-billy/v5 v5.5.0
|
github.com/go-git/go-billy/v5 v5.5.0
|
||||||
github.com/go-git/go-git/v5 v5.12.0
|
github.com/go-git/go-git/v5 v5.12.0
|
||||||
github.com/go-ldap/ldap/v3 v3.4.6
|
github.com/go-ldap/ldap/v3 v3.4.6
|
||||||
|
4
go.sum
4
go.sum
@ -329,8 +329,8 @@ github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4=
|
|||||||
github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
|
github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
|
||||||
github.com/go-co-op/gocron v1.37.0 h1:ZYDJGtQ4OMhTLKOKMIch+/CY70Brbb1dGdooLEhh7b0=
|
github.com/go-co-op/gocron v1.37.0 h1:ZYDJGtQ4OMhTLKOKMIch+/CY70Brbb1dGdooLEhh7b0=
|
||||||
github.com/go-co-op/gocron v1.37.0/go.mod h1:3L/n6BkO7ABj+TrfSVXLRzsP26zmikL4ISkLQ0O8iNY=
|
github.com/go-co-op/gocron v1.37.0/go.mod h1:3L/n6BkO7ABj+TrfSVXLRzsP26zmikL4ISkLQ0O8iNY=
|
||||||
github.com/go-enry/go-enry/v2 v2.8.8 h1:EhfxWpw4DQ3WEFB1Y77X8vKqZL0D0EDUUWYDUAIv9/4=
|
github.com/go-enry/go-enry/v2 v2.9.1 h1:G9iDteJ/Mc0F4Di5NeQknf83R2OkRbwY9cAYmcqVG6U=
|
||||||
github.com/go-enry/go-enry/v2 v2.8.8/go.mod h1:9yrj4ES1YrbNb1Wb7/PWYr2bpaCXUGRt0uafN0ISyG8=
|
github.com/go-enry/go-enry/v2 v2.9.1/go.mod h1:9yrj4ES1YrbNb1Wb7/PWYr2bpaCXUGRt0uafN0ISyG8=
|
||||||
github.com/go-enry/go-oniguruma v1.2.1 h1:k8aAMuJfMrqm/56SG2lV9Cfti6tC4x8673aHCcBk+eo=
|
github.com/go-enry/go-oniguruma v1.2.1 h1:k8aAMuJfMrqm/56SG2lV9Cfti6tC4x8673aHCcBk+eo=
|
||||||
github.com/go-enry/go-oniguruma v1.2.1/go.mod h1:bWDhYP+S6xZQgiRL7wlTScFYBe023B6ilRZbCAD5Hf4=
|
github.com/go-enry/go-oniguruma v1.2.1/go.mod h1:bWDhYP+S6xZQgiRL7wlTScFYBe023B6ilRZbCAD5Hf4=
|
||||||
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||||
|
@ -69,7 +69,7 @@ func CreateArtifact(ctx context.Context, t *ActionTask, artifactName, artifactPa
|
|||||||
OwnerID: t.OwnerID,
|
OwnerID: t.OwnerID,
|
||||||
CommitSHA: t.CommitSHA,
|
CommitSHA: t.CommitSHA,
|
||||||
Status: int64(ArtifactStatusUploadPending),
|
Status: int64(ArtifactStatusUploadPending),
|
||||||
ExpiredUnix: timeutil.TimeStamp(time.Now().Unix() + 3600*24*expiredDays),
|
ExpiredUnix: timeutil.TimeStamp(time.Now().Unix() + timeutil.Day*expiredDays),
|
||||||
}
|
}
|
||||||
if _, err := db.GetEngine(ctx).Insert(artifact); err != nil {
|
if _, err := db.GetEngine(ctx).Insert(artifact); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -78,6 +78,13 @@ func CreateArtifact(ctx context.Context, t *ActionTask, artifactName, artifactPa
|
|||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if _, err := db.GetEngine(ctx).ID(artifact.ID).Cols("expired_unix").Update(&ActionArtifact{
|
||||||
|
ExpiredUnix: timeutil.TimeStamp(time.Now().Unix() + timeutil.Day*expiredDays),
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
return artifact, nil
|
return artifact, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -712,3 +712,24 @@
|
|||||||
type: 3
|
type: 3
|
||||||
config: "{\"IgnoreWhitespaceConflicts\":false,\"AllowMerge\":true,\"AllowRebase\":true,\"AllowRebaseMerge\":true,\"AllowSquash\":true}"
|
config: "{\"IgnoreWhitespaceConflicts\":false,\"AllowMerge\":true,\"AllowRebase\":true,\"AllowRebaseMerge\":true,\"AllowSquash\":true}"
|
||||||
created_unix: 946684810
|
created_unix: 946684810
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 108
|
||||||
|
repo_id: 62
|
||||||
|
type: 1
|
||||||
|
config: "{}"
|
||||||
|
created_unix: 946684810
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 109
|
||||||
|
repo_id: 62
|
||||||
|
type: 2
|
||||||
|
config: "{\"EnableTimetracker\":true,\"AllowOnlyContributorsToTrackTime\":true}"
|
||||||
|
created_unix: 946684810
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 110
|
||||||
|
repo_id: 62
|
||||||
|
type: 3
|
||||||
|
config: "{\"IgnoreWhitespaceConflicts\":false,\"AllowMerge\":true,\"AllowRebase\":true,\"AllowRebaseMerge\":true,\"AllowSquash\":true}"
|
||||||
|
created_unix: 946684810
|
||||||
|
@ -1768,3 +1768,34 @@
|
|||||||
size: 0
|
size: 0
|
||||||
is_fsck_enabled: true
|
is_fsck_enabled: true
|
||||||
close_issues_via_commit_in_any_branch: false
|
close_issues_via_commit_in_any_branch: false
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 62
|
||||||
|
owner_id: 42
|
||||||
|
owner_name: org42
|
||||||
|
lower_name: search-by-path
|
||||||
|
name: search-by-path
|
||||||
|
default_branch: master
|
||||||
|
num_watches: 0
|
||||||
|
num_stars: 0
|
||||||
|
num_forks: 0
|
||||||
|
num_issues: 0
|
||||||
|
num_closed_issues: 0
|
||||||
|
num_pulls: 0
|
||||||
|
num_closed_pulls: 0
|
||||||
|
num_milestones: 0
|
||||||
|
num_closed_milestones: 0
|
||||||
|
num_projects: 0
|
||||||
|
num_closed_projects: 0
|
||||||
|
is_private: false
|
||||||
|
is_empty: false
|
||||||
|
is_archived: false
|
||||||
|
is_mirror: false
|
||||||
|
status: 0
|
||||||
|
is_fork: false
|
||||||
|
fork_id: 0
|
||||||
|
is_template: false
|
||||||
|
template_id: 0
|
||||||
|
size: 0
|
||||||
|
is_fsck_enabled: true
|
||||||
|
close_issues_via_commit_in_any_branch: false
|
||||||
|
@ -1517,3 +1517,40 @@
|
|||||||
repo_admin_change_team_access: false
|
repo_admin_change_team_access: false
|
||||||
theme: ""
|
theme: ""
|
||||||
keep_activity_private: false
|
keep_activity_private: false
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 42
|
||||||
|
lower_name: org42
|
||||||
|
name: org42
|
||||||
|
full_name: Org42
|
||||||
|
email: org42@example.com
|
||||||
|
keep_email_private: false
|
||||||
|
email_notifications_preference: onmention
|
||||||
|
passwd: ZogKvWdyEx:password
|
||||||
|
passwd_hash_algo: dummy
|
||||||
|
must_change_password: false
|
||||||
|
login_source: 0
|
||||||
|
login_name: org42
|
||||||
|
type: 1
|
||||||
|
salt: ZogKvWdyEx
|
||||||
|
max_repo_creation: -1
|
||||||
|
is_active: false
|
||||||
|
is_admin: false
|
||||||
|
is_restricted: false
|
||||||
|
allow_git_hook: false
|
||||||
|
allow_import_local: false
|
||||||
|
allow_create_organization: true
|
||||||
|
prohibit_login: false
|
||||||
|
avatar: avatar42
|
||||||
|
avatar_email: org42@example.com
|
||||||
|
use_custom_avatar: false
|
||||||
|
num_followers: 0
|
||||||
|
num_following: 0
|
||||||
|
num_stars: 0
|
||||||
|
num_repos: 1
|
||||||
|
num_teams: 0
|
||||||
|
num_members: 0
|
||||||
|
visibility: 0
|
||||||
|
repo_admin_change_team_access: false
|
||||||
|
theme: ""
|
||||||
|
keep_activity_private: false
|
||||||
|
@ -138,12 +138,12 @@ func getTestCases() []struct {
|
|||||||
{
|
{
|
||||||
name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative",
|
name: "AllPublic/PublicRepositoriesOfUserIncludingCollaborative",
|
||||||
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: optional.Some(false)},
|
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, AllPublic: true, Template: optional.Some(false)},
|
||||||
count: 33,
|
count: 34,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
|
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborative",
|
||||||
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: optional.Some(false)},
|
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 15, Private: true, AllPublic: true, AllLimited: true, Template: optional.Some(false)},
|
||||||
count: 38,
|
count: 39,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName",
|
name: "AllPublic/PublicAndPrivateRepositoriesOfUserIncludingCollaborativeByName",
|
||||||
@ -158,7 +158,7 @@ func getTestCases() []struct {
|
|||||||
{
|
{
|
||||||
name: "AllPublic/PublicRepositoriesOfOrganization",
|
name: "AllPublic/PublicRepositoriesOfOrganization",
|
||||||
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: optional.Some(false), Template: optional.Some(false)},
|
opts: &repo_model.SearchRepoOptions{ListOptions: db.ListOptions{Page: 1, PageSize: 10}, OwnerID: 17, AllPublic: true, Collaborate: optional.Some(false), Template: optional.Some(false)},
|
||||||
count: 33,
|
count: 34,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AllTemplates",
|
name: "AllTemplates",
|
||||||
|
@ -92,7 +92,10 @@ func TestSearchUsers(t *testing.T) {
|
|||||||
testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 4, PageSize: 2}},
|
testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 4, PageSize: 2}},
|
||||||
[]int64{26, 41})
|
[]int64{26, 41})
|
||||||
|
|
||||||
testOrgSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 5, PageSize: 2}},
|
testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 5, PageSize: 2}},
|
||||||
|
[]int64{42})
|
||||||
|
|
||||||
|
testOrgSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 6, PageSize: 2}},
|
||||||
[]int64{})
|
[]int64{})
|
||||||
|
|
||||||
// test users
|
// test users
|
||||||
|
@ -17,6 +17,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/charset"
|
"code.gitea.io/gitea/modules/charset"
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/gitrepo"
|
"code.gitea.io/gitea/modules/gitrepo"
|
||||||
|
path_filter "code.gitea.io/gitea/modules/indexer/code/bleve/token/path"
|
||||||
"code.gitea.io/gitea/modules/indexer/code/internal"
|
"code.gitea.io/gitea/modules/indexer/code/internal"
|
||||||
indexer_internal "code.gitea.io/gitea/modules/indexer/internal"
|
indexer_internal "code.gitea.io/gitea/modules/indexer/internal"
|
||||||
inner_bleve "code.gitea.io/gitea/modules/indexer/internal/bleve"
|
inner_bleve "code.gitea.io/gitea/modules/indexer/internal/bleve"
|
||||||
@ -53,6 +54,7 @@ type RepoIndexerData struct {
|
|||||||
RepoID int64
|
RepoID int64
|
||||||
CommitID string
|
CommitID string
|
||||||
Content string
|
Content string
|
||||||
|
Filename string
|
||||||
Language string
|
Language string
|
||||||
UpdatedAt time.Time
|
UpdatedAt time.Time
|
||||||
}
|
}
|
||||||
@ -64,8 +66,10 @@ func (d *RepoIndexerData) Type() string {
|
|||||||
|
|
||||||
const (
|
const (
|
||||||
repoIndexerAnalyzer = "repoIndexerAnalyzer"
|
repoIndexerAnalyzer = "repoIndexerAnalyzer"
|
||||||
|
filenameIndexerAnalyzer = "filenameIndexerAnalyzer"
|
||||||
|
filenameIndexerTokenizer = "filenameIndexerTokenizer"
|
||||||
repoIndexerDocType = "repoIndexerDocType"
|
repoIndexerDocType = "repoIndexerDocType"
|
||||||
repoIndexerLatestVersion = 6
|
repoIndexerLatestVersion = 7
|
||||||
)
|
)
|
||||||
|
|
||||||
// generateBleveIndexMapping generates a bleve index mapping for the repo indexer
|
// generateBleveIndexMapping generates a bleve index mapping for the repo indexer
|
||||||
@ -79,6 +83,11 @@ func generateBleveIndexMapping() (mapping.IndexMapping, error) {
|
|||||||
textFieldMapping.IncludeInAll = false
|
textFieldMapping.IncludeInAll = false
|
||||||
docMapping.AddFieldMappingsAt("Content", textFieldMapping)
|
docMapping.AddFieldMappingsAt("Content", textFieldMapping)
|
||||||
|
|
||||||
|
fileNamedMapping := bleve.NewTextFieldMapping()
|
||||||
|
fileNamedMapping.IncludeInAll = false
|
||||||
|
fileNamedMapping.Analyzer = filenameIndexerAnalyzer
|
||||||
|
docMapping.AddFieldMappingsAt("Filename", fileNamedMapping)
|
||||||
|
|
||||||
termFieldMapping := bleve.NewTextFieldMapping()
|
termFieldMapping := bleve.NewTextFieldMapping()
|
||||||
termFieldMapping.IncludeInAll = false
|
termFieldMapping.IncludeInAll = false
|
||||||
termFieldMapping.Analyzer = analyzer_keyword.Name
|
termFieldMapping.Analyzer = analyzer_keyword.Name
|
||||||
@ -90,6 +99,7 @@ func generateBleveIndexMapping() (mapping.IndexMapping, error) {
|
|||||||
docMapping.AddFieldMappingsAt("UpdatedAt", timeFieldMapping)
|
docMapping.AddFieldMappingsAt("UpdatedAt", timeFieldMapping)
|
||||||
|
|
||||||
mapping := bleve.NewIndexMapping()
|
mapping := bleve.NewIndexMapping()
|
||||||
|
|
||||||
if err := addUnicodeNormalizeTokenFilter(mapping); err != nil {
|
if err := addUnicodeNormalizeTokenFilter(mapping); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if err := mapping.AddCustomAnalyzer(repoIndexerAnalyzer, map[string]any{
|
} else if err := mapping.AddCustomAnalyzer(repoIndexerAnalyzer, map[string]any{
|
||||||
@ -100,6 +110,16 @@ func generateBleveIndexMapping() (mapping.IndexMapping, error) {
|
|||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := mapping.AddCustomAnalyzer(filenameIndexerAnalyzer, map[string]any{
|
||||||
|
"type": analyzer_custom.Name,
|
||||||
|
"char_filters": []string{},
|
||||||
|
"tokenizer": unicode.Name,
|
||||||
|
"token_filters": []string{unicodeNormalizeName, path_filter.Name, lowercase.Name},
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
mapping.DefaultAnalyzer = repoIndexerAnalyzer
|
mapping.DefaultAnalyzer = repoIndexerAnalyzer
|
||||||
mapping.AddDocumentMapping(repoIndexerDocType, docMapping)
|
mapping.AddDocumentMapping(repoIndexerDocType, docMapping)
|
||||||
mapping.AddDocumentMapping("_all", bleve.NewDocumentDisabledMapping())
|
mapping.AddDocumentMapping("_all", bleve.NewDocumentDisabledMapping())
|
||||||
@ -174,6 +194,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
|||||||
return batch.Index(id, &RepoIndexerData{
|
return batch.Index(id, &RepoIndexerData{
|
||||||
RepoID: repo.ID,
|
RepoID: repo.ID,
|
||||||
CommitID: commitSha,
|
CommitID: commitSha,
|
||||||
|
Filename: update.Filename,
|
||||||
Content: string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
Content: string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
||||||
Language: analyze.GetCodeLanguage(update.Filename, fileContents),
|
Language: analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||||
UpdatedAt: time.Now().UTC(),
|
UpdatedAt: time.Now().UTC(),
|
||||||
@ -240,14 +261,19 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
|||||||
keywordQuery query.Query
|
keywordQuery query.Query
|
||||||
)
|
)
|
||||||
|
|
||||||
phraseQuery := bleve.NewMatchPhraseQuery(opts.Keyword)
|
pathQuery := bleve.NewPrefixQuery(strings.ToLower(opts.Keyword))
|
||||||
phraseQuery.FieldVal = "Content"
|
pathQuery.FieldVal = "Filename"
|
||||||
phraseQuery.Analyzer = repoIndexerAnalyzer
|
pathQuery.SetBoost(10)
|
||||||
keywordQuery = phraseQuery
|
|
||||||
|
contentQuery := bleve.NewMatchQuery(opts.Keyword)
|
||||||
|
contentQuery.FieldVal = "Content"
|
||||||
|
|
||||||
if opts.IsKeywordFuzzy {
|
if opts.IsKeywordFuzzy {
|
||||||
phraseQuery.Fuzziness = inner_bleve.GuessFuzzinessByKeyword(opts.Keyword)
|
contentQuery.Fuzziness = inner_bleve.GuessFuzzinessByKeyword(opts.Keyword)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
keywordQuery = bleve.NewDisjunctionQuery(contentQuery, pathQuery)
|
||||||
|
|
||||||
if len(opts.RepoIDs) > 0 {
|
if len(opts.RepoIDs) > 0 {
|
||||||
repoQueries := make([]query.Query, 0, len(opts.RepoIDs))
|
repoQueries := make([]query.Query, 0, len(opts.RepoIDs))
|
||||||
for _, repoID := range opts.RepoIDs {
|
for _, repoID := range opts.RepoIDs {
|
||||||
@ -277,7 +303,7 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
|||||||
|
|
||||||
from, pageSize := opts.GetSkipTake()
|
from, pageSize := opts.GetSkipTake()
|
||||||
searchRequest := bleve.NewSearchRequestOptions(indexerQuery, pageSize, from, false)
|
searchRequest := bleve.NewSearchRequestOptions(indexerQuery, pageSize, from, false)
|
||||||
searchRequest.Fields = []string{"Content", "RepoID", "Language", "CommitID", "UpdatedAt"}
|
searchRequest.Fields = []string{"Content", "Filename", "RepoID", "Language", "CommitID", "UpdatedAt"}
|
||||||
searchRequest.IncludeLocations = true
|
searchRequest.IncludeLocations = true
|
||||||
|
|
||||||
if len(opts.Language) == 0 {
|
if len(opts.Language) == 0 {
|
||||||
@ -307,6 +333,10 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
|||||||
endIndex = locationEnd
|
endIndex = locationEnd
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if len(hit.Locations["Filename"]) > 0 {
|
||||||
|
startIndex, endIndex = internal.FilenameMatchIndexPos(hit.Fields["Content"].(string))
|
||||||
|
}
|
||||||
|
|
||||||
language := hit.Fields["Language"].(string)
|
language := hit.Fields["Language"].(string)
|
||||||
var updatedUnix timeutil.TimeStamp
|
var updatedUnix timeutil.TimeStamp
|
||||||
if t, err := time.Parse(time.RFC3339, hit.Fields["UpdatedAt"].(string)); err == nil {
|
if t, err := time.Parse(time.RFC3339, hit.Fields["UpdatedAt"].(string)); err == nil {
|
||||||
|
101
modules/indexer/code/bleve/token/path/path.go
Normal file
101
modules/indexer/code/bleve/token/path/path.go
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package path
|
||||||
|
|
||||||
|
import (
|
||||||
|
"slices"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
|
"github.com/blevesearch/bleve/v2/registry"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
Name = "gitea/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TokenFilter struct{}
|
||||||
|
|
||||||
|
func NewTokenFilter() *TokenFilter {
|
||||||
|
return &TokenFilter{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TokenFilterConstructor(config map[string]any, cache *registry.Cache) (analysis.TokenFilter, error) {
|
||||||
|
return NewTokenFilter(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *TokenFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
|
||||||
|
if len(input) == 1 {
|
||||||
|
// if there is only one token, we dont need to generate the reversed chain
|
||||||
|
return generatePathTokens(input, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
normal := generatePathTokens(input, false)
|
||||||
|
reversed := generatePathTokens(input, true)
|
||||||
|
|
||||||
|
return append(normal, reversed...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generates path tokens from the input tokens.
|
||||||
|
// This mimics the behavior of the path hierarchy tokenizer in ES. It takes the input tokens and combine them, generating a term for each component
|
||||||
|
// in tree (e.g., foo/bar/baz.md will generate foo, foo/bar, and foo/bar/baz.md).
|
||||||
|
//
|
||||||
|
// If the reverse flag is set, the order of the tokens is reversed (the same input will generate baz.md, baz.md/bar, baz.md/bar/foo). This is useful
|
||||||
|
// to efficiently search for filenames without supplying the fullpath.
|
||||||
|
func generatePathTokens(input analysis.TokenStream, reversed bool) analysis.TokenStream {
|
||||||
|
terms := make([]string, 0, len(input))
|
||||||
|
longestTerm := 0
|
||||||
|
|
||||||
|
if reversed {
|
||||||
|
slices.Reverse(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < len(input); i++ {
|
||||||
|
var sb strings.Builder
|
||||||
|
sb.WriteString(string(input[0].Term))
|
||||||
|
|
||||||
|
for j := 1; j < i; j++ {
|
||||||
|
sb.WriteString("/")
|
||||||
|
sb.WriteString(string(input[j].Term))
|
||||||
|
}
|
||||||
|
|
||||||
|
term := sb.String()
|
||||||
|
|
||||||
|
if longestTerm < len(term) {
|
||||||
|
longestTerm = len(term)
|
||||||
|
}
|
||||||
|
|
||||||
|
terms = append(terms, term)
|
||||||
|
}
|
||||||
|
|
||||||
|
output := make(analysis.TokenStream, 0, len(terms))
|
||||||
|
|
||||||
|
for _, term := range terms {
|
||||||
|
var start, end int
|
||||||
|
|
||||||
|
if reversed {
|
||||||
|
start = 0
|
||||||
|
end = len(term)
|
||||||
|
} else {
|
||||||
|
start = longestTerm - len(term)
|
||||||
|
end = longestTerm
|
||||||
|
}
|
||||||
|
|
||||||
|
token := analysis.Token{
|
||||||
|
Position: 1,
|
||||||
|
Start: start,
|
||||||
|
End: end,
|
||||||
|
Type: analysis.AlphaNumeric,
|
||||||
|
Term: []byte(term),
|
||||||
|
}
|
||||||
|
|
||||||
|
output = append(output, &token)
|
||||||
|
}
|
||||||
|
|
||||||
|
return output
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
registry.RegisterTokenFilter(Name, TokenFilterConstructor)
|
||||||
|
}
|
76
modules/indexer/code/bleve/token/path/path_test.go
Normal file
76
modules/indexer/code/bleve/token/path/path_test.go
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package path
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/blevesearch/bleve/v2/analysis"
|
||||||
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Scenario struct {
|
||||||
|
Input string
|
||||||
|
Tokens []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTokenFilter(t *testing.T) {
|
||||||
|
scenarios := []struct {
|
||||||
|
Input string
|
||||||
|
Terms []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
Input: "Dockerfile",
|
||||||
|
Terms: []string{"Dockerfile"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Input: "Dockerfile.rootless",
|
||||||
|
Terms: []string{"Dockerfile.rootless"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Input: "a/b/c/Dockerfile.rootless",
|
||||||
|
Terms: []string{"a", "a/b", "a/b/c", "a/b/c/Dockerfile.rootless", "Dockerfile.rootless", "Dockerfile.rootless/c", "Dockerfile.rootless/c/b", "Dockerfile.rootless/c/b/a"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Input: "",
|
||||||
|
Terms: []string{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, scenario := range scenarios {
|
||||||
|
t.Run(fmt.Sprintf("ensure terms of '%s'", scenario.Input), func(t *testing.T) {
|
||||||
|
terms := extractTerms(scenario.Input)
|
||||||
|
|
||||||
|
assert.Len(t, terms, len(scenario.Terms))
|
||||||
|
|
||||||
|
for _, term := range terms {
|
||||||
|
assert.Contains(t, scenario.Terms, term)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractTerms(input string) []string {
|
||||||
|
tokens := tokenize(input)
|
||||||
|
filteredTokens := filter(tokens)
|
||||||
|
terms := make([]string, 0, len(filteredTokens))
|
||||||
|
|
||||||
|
for _, token := range filteredTokens {
|
||||||
|
terms = append(terms, string(token.Term))
|
||||||
|
}
|
||||||
|
|
||||||
|
return terms
|
||||||
|
}
|
||||||
|
|
||||||
|
func filter(input analysis.TokenStream) analysis.TokenStream {
|
||||||
|
filter := NewTokenFilter()
|
||||||
|
return filter.Filter(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
func tokenize(input string) analysis.TokenStream {
|
||||||
|
tokenizer := unicode.NewUnicodeTokenizer()
|
||||||
|
return tokenizer.Tokenize([]byte(input))
|
||||||
|
}
|
@ -30,7 +30,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
esRepoIndexerLatestVersion = 1
|
esRepoIndexerLatestVersion = 2
|
||||||
// multi-match-types, currently only 2 types are used
|
// multi-match-types, currently only 2 types are used
|
||||||
// Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types
|
// Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types
|
||||||
esMultiMatchTypeBestFields = "best_fields"
|
esMultiMatchTypeBestFields = "best_fields"
|
||||||
@ -57,12 +57,50 @@ func NewIndexer(url, indexerName string) *Indexer {
|
|||||||
|
|
||||||
const (
|
const (
|
||||||
defaultMapping = `{
|
defaultMapping = `{
|
||||||
|
"settings": {
|
||||||
|
"analysis": {
|
||||||
|
"analyzer": {
|
||||||
|
"filename_path_analyzer": {
|
||||||
|
"tokenizer": "path_tokenizer"
|
||||||
|
},
|
||||||
|
"reversed_filename_path_analyzer": {
|
||||||
|
"tokenizer": "reversed_path_tokenizer"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tokenizer": {
|
||||||
|
"path_tokenizer": {
|
||||||
|
"type": "path_hierarchy",
|
||||||
|
"delimiter": "/"
|
||||||
|
},
|
||||||
|
"reversed_path_tokenizer": {
|
||||||
|
"type": "path_hierarchy",
|
||||||
|
"delimiter": "/",
|
||||||
|
"reverse": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"mappings": {
|
"mappings": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"repo_id": {
|
"repo_id": {
|
||||||
"type": "long",
|
"type": "long",
|
||||||
"index": true
|
"index": true
|
||||||
},
|
},
|
||||||
|
"filename": {
|
||||||
|
"type": "text",
|
||||||
|
"term_vector": "with_positions_offsets",
|
||||||
|
"index": true,
|
||||||
|
"fields": {
|
||||||
|
"path": {
|
||||||
|
"type": "text",
|
||||||
|
"analyzer": "reversed_filename_path_analyzer"
|
||||||
|
},
|
||||||
|
"path_reversed": {
|
||||||
|
"type": "text",
|
||||||
|
"analyzer": "filename_path_analyzer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"content": {
|
"content": {
|
||||||
"type": "text",
|
"type": "text",
|
||||||
"term_vector": "with_positions_offsets",
|
"term_vector": "with_positions_offsets",
|
||||||
@ -136,6 +174,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
|
|||||||
Id(id).
|
Id(id).
|
||||||
Doc(map[string]any{
|
Doc(map[string]any{
|
||||||
"repo_id": repo.ID,
|
"repo_id": repo.ID,
|
||||||
|
"filename": update.Filename,
|
||||||
"content": string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
"content": string(charset.ToUTF8DropErrors(fileContents, charset.ConvertOpts{})),
|
||||||
"commit_id": sha,
|
"commit_id": sha,
|
||||||
"language": analyze.GetCodeLanguage(update.Filename, fileContents),
|
"language": analyze.GetCodeLanguage(update.Filename, fileContents),
|
||||||
@ -231,11 +270,11 @@ func (b *Indexer) doDelete(ctx context.Context, repoID int64) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// indexPos find words positions for start and the following end on content. It will
|
// contentMatchIndexPos find words positions for start and the following end on content. It will
|
||||||
// return the beginning position of the first start and the ending position of the
|
// return the beginning position of the first start and the ending position of the
|
||||||
// first end following the start string.
|
// first end following the start string.
|
||||||
// If not found any of the positions, it will return -1, -1.
|
// If not found any of the positions, it will return -1, -1.
|
||||||
func indexPos(content, start, end string) (int, int) {
|
func contentMatchIndexPos(content, start, end string) (int, int) {
|
||||||
startIdx := strings.Index(content, start)
|
startIdx := strings.Index(content, start)
|
||||||
if startIdx < 0 {
|
if startIdx < 0 {
|
||||||
return -1, -1
|
return -1, -1
|
||||||
@ -244,22 +283,29 @@ func indexPos(content, start, end string) (int, int) {
|
|||||||
if endIdx < 0 {
|
if endIdx < 0 {
|
||||||
return -1, -1
|
return -1, -1
|
||||||
}
|
}
|
||||||
return startIdx, startIdx + len(start) + endIdx + len(end)
|
return startIdx, (startIdx + len(start) + endIdx + len(end)) - 9 // remove the length <em></em> since we give Content the original data
|
||||||
}
|
}
|
||||||
|
|
||||||
func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) {
|
func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) {
|
||||||
hits := make([]*internal.SearchResult, 0, pageSize)
|
hits := make([]*internal.SearchResult, 0, pageSize)
|
||||||
for _, hit := range searchResult.Hits.Hits {
|
for _, hit := range searchResult.Hits.Hits {
|
||||||
|
repoID, fileName := internal.ParseIndexerID(hit.Id)
|
||||||
|
res := make(map[string]any)
|
||||||
|
if err := json.Unmarshal(hit.Source, &res); err != nil {
|
||||||
|
return 0, nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME: There is no way to get the position the keyword on the content currently on the same request.
|
// FIXME: There is no way to get the position the keyword on the content currently on the same request.
|
||||||
// So we get it from content, this may made the query slower. See
|
// So we get it from content, this may made the query slower. See
|
||||||
// https://discuss.elastic.co/t/fetching-position-of-keyword-in-matched-document/94291
|
// https://discuss.elastic.co/t/fetching-position-of-keyword-in-matched-document/94291
|
||||||
var startIndex, endIndex int
|
var startIndex, endIndex int
|
||||||
c, ok := hit.Highlight["content"]
|
if c, ok := hit.Highlight["filename"]; ok && len(c) > 0 {
|
||||||
if ok && len(c) > 0 {
|
startIndex, endIndex = internal.FilenameMatchIndexPos(res["content"].(string))
|
||||||
|
} else if c, ok := hit.Highlight["content"]; ok && len(c) > 0 {
|
||||||
// FIXME: Since the highlighting content will include <em> and </em> for the keywords,
|
// FIXME: Since the highlighting content will include <em> and </em> for the keywords,
|
||||||
// now we should find the positions. But how to avoid html content which contains the
|
// now we should find the positions. But how to avoid html content which contains the
|
||||||
// <em> and </em> tags? If elastic search has handled that?
|
// <em> and </em> tags? If elastic search has handled that?
|
||||||
startIndex, endIndex = indexPos(c[0], "<em>", "</em>")
|
startIndex, endIndex = contentMatchIndexPos(c[0], "<em>", "</em>")
|
||||||
if startIndex == -1 {
|
if startIndex == -1 {
|
||||||
panic(fmt.Sprintf("1===%s,,,%#v,,,%s", kw, hit.Highlight, c[0]))
|
panic(fmt.Sprintf("1===%s,,,%#v,,,%s", kw, hit.Highlight, c[0]))
|
||||||
}
|
}
|
||||||
@ -267,12 +313,6 @@ func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int)
|
|||||||
panic(fmt.Sprintf("2===%#v", hit.Highlight))
|
panic(fmt.Sprintf("2===%#v", hit.Highlight))
|
||||||
}
|
}
|
||||||
|
|
||||||
repoID, fileName := internal.ParseIndexerID(hit.Id)
|
|
||||||
res := make(map[string]any)
|
|
||||||
if err := json.Unmarshal(hit.Source, &res); err != nil {
|
|
||||||
return 0, nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
language := res["language"].(string)
|
language := res["language"].(string)
|
||||||
|
|
||||||
hits = append(hits, &internal.SearchResult{
|
hits = append(hits, &internal.SearchResult{
|
||||||
@ -283,7 +323,7 @@ func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int)
|
|||||||
UpdatedUnix: timeutil.TimeStamp(res["updated_at"].(float64)),
|
UpdatedUnix: timeutil.TimeStamp(res["updated_at"].(float64)),
|
||||||
Language: language,
|
Language: language,
|
||||||
StartIndex: startIndex,
|
StartIndex: startIndex,
|
||||||
EndIndex: endIndex - 9, // remove the length <em></em> since we give Content the original data
|
EndIndex: endIndex,
|
||||||
Color: enry.GetColor(language),
|
Color: enry.GetColor(language),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -315,7 +355,10 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
|||||||
searchType = esMultiMatchTypeBestFields
|
searchType = esMultiMatchTypeBestFields
|
||||||
}
|
}
|
||||||
|
|
||||||
kwQuery := elastic.NewMultiMatchQuery(opts.Keyword, "content").Type(searchType)
|
kwQuery := elastic.NewBoolQuery().Should(
|
||||||
|
elastic.NewMultiMatchQuery(opts.Keyword, "content").Type(searchType),
|
||||||
|
elastic.NewMultiMatchQuery(opts.Keyword, "filename^10").Type(esMultiMatchTypePhrasePrefix),
|
||||||
|
)
|
||||||
query := elastic.NewBoolQuery()
|
query := elastic.NewBoolQuery()
|
||||||
query = query.Must(kwQuery)
|
query = query.Must(kwQuery)
|
||||||
if len(opts.RepoIDs) > 0 {
|
if len(opts.RepoIDs) > 0 {
|
||||||
@ -341,6 +384,7 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
|||||||
Highlight(
|
Highlight(
|
||||||
elastic.NewHighlight().
|
elastic.NewHighlight().
|
||||||
Field("content").
|
Field("content").
|
||||||
|
Field("filename").
|
||||||
NumOfFragments(0). // return all highting content on fragments
|
NumOfFragments(0). // return all highting content on fragments
|
||||||
HighlighterType("fvh"),
|
HighlighterType("fvh"),
|
||||||
).
|
).
|
||||||
@ -373,6 +417,7 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
|
|||||||
Highlight(
|
Highlight(
|
||||||
elastic.NewHighlight().
|
elastic.NewHighlight().
|
||||||
Field("content").
|
Field("content").
|
||||||
|
Field("filename").
|
||||||
NumOfFragments(0). // return all highting content on fragments
|
NumOfFragments(0). // return all highting content on fragments
|
||||||
HighlighterType("fvh"),
|
HighlighterType("fvh"),
|
||||||
).
|
).
|
||||||
|
@ -10,7 +10,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestIndexPos(t *testing.T) {
|
func TestIndexPos(t *testing.T) {
|
||||||
startIdx, endIdx := indexPos("test index start and end", "start", "end")
|
startIdx, endIdx := contentMatchIndexPos("test index start and end", "start", "end")
|
||||||
assert.EqualValues(t, 11, startIdx)
|
assert.EqualValues(t, 11, startIdx)
|
||||||
assert.EqualValues(t, 24, endIdx)
|
assert.EqualValues(t, 15, endIdx)
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,7 @@ package code
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"os"
|
||||||
|
"slices"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
@ -20,53 +21,166 @@ import (
|
|||||||
_ "code.gitea.io/gitea/models/activities"
|
_ "code.gitea.io/gitea/models/activities"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
|
_ "github.com/mattn/go-sqlite3"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type codeSearchResult struct {
|
||||||
|
Filename string
|
||||||
|
Content string
|
||||||
|
}
|
||||||
|
|
||||||
func TestMain(m *testing.M) {
|
func TestMain(m *testing.M) {
|
||||||
unittest.MainTest(m)
|
unittest.MainTest(m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
|
func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
|
||||||
t.Run(name, func(t *testing.T) {
|
t.Run(name, func(t *testing.T) {
|
||||||
var repoID int64 = 1
|
assert.NoError(t, setupRepositoryIndexes(git.DefaultContext, indexer))
|
||||||
err := index(git.DefaultContext, indexer, repoID)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
keywords := []struct {
|
keywords := []struct {
|
||||||
RepoIDs []int64
|
RepoIDs []int64
|
||||||
Keyword string
|
Keyword string
|
||||||
IDs []int64
|
|
||||||
Langs int
|
Langs int
|
||||||
|
Results []codeSearchResult
|
||||||
}{
|
}{
|
||||||
|
// Search for an exact match on the contents of a file
|
||||||
|
// This scenario yields a single result (the file README.md on the repo '1')
|
||||||
{
|
{
|
||||||
RepoIDs: nil,
|
RepoIDs: nil,
|
||||||
Keyword: "Description",
|
Keyword: "Description",
|
||||||
IDs: []int64{repoID},
|
|
||||||
Langs: 1,
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "README.md",
|
||||||
|
Content: "# repo1\n\nDescription for repo1",
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
// Search for an exact match on the contents of a file within the repo '2'.
|
||||||
|
// This scenario yields no results
|
||||||
{
|
{
|
||||||
RepoIDs: []int64{2},
|
RepoIDs: []int64{2},
|
||||||
Keyword: "Description",
|
Keyword: "Description",
|
||||||
IDs: []int64{},
|
|
||||||
Langs: 0,
|
Langs: 0,
|
||||||
},
|
},
|
||||||
|
// Search for an exact match on the contents of a file
|
||||||
|
// This scenario yields a single result (the file README.md on the repo '1')
|
||||||
{
|
{
|
||||||
RepoIDs: nil,
|
RepoIDs: nil,
|
||||||
Keyword: "repo1",
|
Keyword: "repo1",
|
||||||
IDs: []int64{repoID},
|
|
||||||
Langs: 1,
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "README.md",
|
||||||
|
Content: "# repo1\n\nDescription for repo1",
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
// Search for an exact match on the contents of a file within the repo '2'.
|
||||||
|
// This scenario yields no results
|
||||||
{
|
{
|
||||||
RepoIDs: []int64{2},
|
RepoIDs: []int64{2},
|
||||||
Keyword: "repo1",
|
Keyword: "repo1",
|
||||||
IDs: []int64{},
|
|
||||||
Langs: 0,
|
Langs: 0,
|
||||||
},
|
},
|
||||||
|
// Search for a non-existing term.
|
||||||
|
// This scenario yields no results
|
||||||
{
|
{
|
||||||
RepoIDs: nil,
|
RepoIDs: nil,
|
||||||
Keyword: "non-exist",
|
Keyword: "non-exist",
|
||||||
IDs: []int64{},
|
|
||||||
Langs: 0,
|
Langs: 0,
|
||||||
},
|
},
|
||||||
|
// Search for an exact match on the contents of a file within the repo '62'.
|
||||||
|
// This scenario yields a single result (the file avocado.md on the repo '62')
|
||||||
|
{
|
||||||
|
RepoIDs: []int64{62},
|
||||||
|
Keyword: "pineaple",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "avocado.md",
|
||||||
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Search for an exact match on the filename within the repo '62'.
|
||||||
|
// This scenario yields a single result (the file avocado.md on the repo '62')
|
||||||
|
{
|
||||||
|
RepoIDs: []int64{62},
|
||||||
|
Keyword: "avocado.md",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "avocado.md",
|
||||||
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Search for an partial match on the filename within the repo '62'.
|
||||||
|
// This scenario yields a single result (the file avocado.md on the repo '62')
|
||||||
|
{
|
||||||
|
RepoIDs: []int64{62},
|
||||||
|
Keyword: "avo",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "avocado.md",
|
||||||
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Search for matches on both the contents and the filenames within the repo '62'.
|
||||||
|
// This scenario yields two results: the first result is baed on the file (cucumber.md) while the second is based on the contents
|
||||||
|
{
|
||||||
|
RepoIDs: []int64{62},
|
||||||
|
Keyword: "cucumber",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "cucumber.md",
|
||||||
|
Content: "Salad is good for your health",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Filename: "avocado.md",
|
||||||
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Search for matches on the filenames within the repo '62'.
|
||||||
|
// This scenario yields two results (both are based on filename, the first one is an exact match)
|
||||||
|
{
|
||||||
|
RepoIDs: []int64{62},
|
||||||
|
Keyword: "ham",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "ham.md",
|
||||||
|
Content: "This is also not cheese",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Filename: "potato/ham.md",
|
||||||
|
Content: "This is not cheese",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Search for matches on the contents of files within the repo '62'.
|
||||||
|
// This scenario yields two results (both are based on contents, the first one is an exact match where as the second is a 'fuzzy' one)
|
||||||
|
{
|
||||||
|
RepoIDs: []int64{62},
|
||||||
|
Keyword: "This is not cheese",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "potato/ham.md",
|
||||||
|
Content: "This is not cheese",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Filename: "ham.md",
|
||||||
|
Content: "This is also not cheese",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, kw := range keywords {
|
for _, kw := range keywords {
|
||||||
@ -81,19 +195,37 @@ func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
|
|||||||
IsKeywordFuzzy: true,
|
IsKeywordFuzzy: true,
|
||||||
})
|
})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, kw.IDs, int(total))
|
|
||||||
assert.Len(t, langs, kw.Langs)
|
assert.Len(t, langs, kw.Langs)
|
||||||
|
|
||||||
ids := make([]int64, 0, len(res))
|
hits := make([]codeSearchResult, 0, len(res))
|
||||||
for _, hit := range res {
|
|
||||||
ids = append(ids, hit.RepoID)
|
if total > 0 {
|
||||||
assert.EqualValues(t, "# repo1\n\nDescription for repo1", hit.Content)
|
assert.NotEmpty(t, kw.Results, "The given scenario does not provide any expected results")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, hit := range res {
|
||||||
|
hits = append(hits, codeSearchResult{
|
||||||
|
Filename: hit.Filename,
|
||||||
|
Content: hit.Content,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
lastIndex := -1
|
||||||
|
|
||||||
|
for _, expected := range kw.Results {
|
||||||
|
index := slices.Index(hits, expected)
|
||||||
|
if index == -1 {
|
||||||
|
assert.Failf(t, "Result not found", "Expected %v in %v", expected, hits)
|
||||||
|
} else if lastIndex > index {
|
||||||
|
assert.Failf(t, "Result is out of order", "The order of %v within %v is wrong", expected, hits)
|
||||||
|
} else {
|
||||||
|
lastIndex = index
|
||||||
|
}
|
||||||
}
|
}
|
||||||
assert.EqualValues(t, kw.IDs, ids)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.NoError(t, indexer.Delete(context.Background(), repoID))
|
assert.NoError(t, tearDownRepositoryIndexes(indexer))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -136,3 +268,25 @@ func TestESIndexAndSearch(t *testing.T) {
|
|||||||
|
|
||||||
testIndexer("elastic_search", t, indexer)
|
testIndexer("elastic_search", t, indexer)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func setupRepositoryIndexes(ctx context.Context, indexer internal.Indexer) error {
|
||||||
|
for _, repoID := range repositoriesToSearch() {
|
||||||
|
if err := index(ctx, indexer, repoID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func tearDownRepositoryIndexes(indexer internal.Indexer) error {
|
||||||
|
for _, repoID := range repositoriesToSearch() {
|
||||||
|
if err := indexer.Delete(context.Background(), repoID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func repositoriesToSearch() []int64 {
|
||||||
|
return []int64{1, 62}
|
||||||
|
}
|
||||||
|
@ -10,6 +10,10 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
filenameMatchNumberOfLines = 7 // Copied from github search
|
||||||
|
)
|
||||||
|
|
||||||
func FilenameIndexerID(repoID int64, filename string) string {
|
func FilenameIndexerID(repoID int64, filename string) string {
|
||||||
return internal.Base36(repoID) + "_" + filename
|
return internal.Base36(repoID) + "_" + filename
|
||||||
}
|
}
|
||||||
@ -30,3 +34,17 @@ func FilenameOfIndexerID(indexerID string) string {
|
|||||||
}
|
}
|
||||||
return indexerID[index+1:]
|
return indexerID[index+1:]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Given the contents of file, returns the boundaries of its first seven lines.
|
||||||
|
func FilenameMatchIndexPos(content string) (int, int) {
|
||||||
|
count := 1
|
||||||
|
for i, c := range content {
|
||||||
|
if c == '\n' {
|
||||||
|
count++
|
||||||
|
if count == filenameMatchNumberOfLines {
|
||||||
|
return 0, i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, len(content)
|
||||||
|
}
|
||||||
|
@ -11,10 +11,15 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/v2"
|
"github.com/blevesearch/bleve/v2"
|
||||||
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||||
"github.com/blevesearch/bleve/v2/index/upsidedown"
|
"github.com/blevesearch/bleve/v2/index/upsidedown"
|
||||||
"github.com/ethantkoenig/rupture"
|
"github.com/ethantkoenig/rupture"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
maxFuzziness = 2
|
||||||
|
)
|
||||||
|
|
||||||
// openIndexer open the index at the specified path, checking for metadata
|
// openIndexer open the index at the specified path, checking for metadata
|
||||||
// updates and bleve version updates. If index needs to be created (or
|
// updates and bleve version updates. If index needs to be created (or
|
||||||
// re-created), returns (nil, nil)
|
// re-created), returns (nil, nil)
|
||||||
@ -48,7 +53,27 @@ func openIndexer(path string, latestVersion int) (bleve.Index, int, error) {
|
|||||||
return index, 0, nil
|
return index, 0, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This method test the GuessFuzzinessByKeyword method. The fuzziness is based on the levenshtein distance and determines how many chars
|
||||||
|
// may be different on two string and they still be considered equivalent.
|
||||||
|
// Given a phrasse, its shortest word determines its fuzziness. If a phrase uses CJK (eg: `갃갃갃` `啊啊啊`), the fuzziness is zero.
|
||||||
func GuessFuzzinessByKeyword(s string) int {
|
func GuessFuzzinessByKeyword(s string) int {
|
||||||
|
tokenizer := unicode.NewUnicodeTokenizer()
|
||||||
|
tokens := tokenizer.Tokenize([]byte(s))
|
||||||
|
|
||||||
|
if len(tokens) > 0 {
|
||||||
|
fuzziness := maxFuzziness
|
||||||
|
|
||||||
|
for _, token := range tokens {
|
||||||
|
fuzziness = min(fuzziness, guessFuzzinessByKeyword(string(token.Term)))
|
||||||
|
}
|
||||||
|
|
||||||
|
return fuzziness
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func guessFuzzinessByKeyword(s string) int {
|
||||||
// according to https://github.com/blevesearch/bleve/issues/1563, the supported max fuzziness is 2
|
// according to https://github.com/blevesearch/bleve/issues/1563, the supported max fuzziness is 2
|
||||||
// magic number 4 was chosen to determine the levenshtein distance per each character of a keyword
|
// magic number 4 was chosen to determine the levenshtein distance per each character of a keyword
|
||||||
// BUT, when using CJK (eg: `갃갃갃` `啊啊啊`), it mismatches a lot.
|
// BUT, when using CJK (eg: `갃갃갃` `啊啊啊`), it mismatches a lot.
|
||||||
@ -57,5 +82,5 @@ func GuessFuzzinessByKeyword(s string) int {
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return min(2, len(s)/4)
|
return min(maxFuzziness, len(s)/4)
|
||||||
}
|
}
|
||||||
|
45
modules/indexer/internal/bleve/util_test.go
Normal file
45
modules/indexer/internal/bleve/util_test.go
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package bleve
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBleveGuessFuzzinessByKeyword(t *testing.T) {
|
||||||
|
scenarios := []struct {
|
||||||
|
Input string
|
||||||
|
Fuzziness int // See util.go for the definition of fuzziness in this particular context
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
Input: "",
|
||||||
|
Fuzziness: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Input: "Avocado",
|
||||||
|
Fuzziness: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Input: "Geschwindigkeit",
|
||||||
|
Fuzziness: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Input: "non-exist",
|
||||||
|
Fuzziness: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Input: "갃갃갃",
|
||||||
|
Fuzziness: 0,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, scenario := range scenarios {
|
||||||
|
t.Run(fmt.Sprintf("ensure fuzziness of '%s' is '%d'", scenario.Input, scenario.Fuzziness), func(t *testing.T) {
|
||||||
|
assert.Equal(t, scenario.Fuzziness, GuessFuzzinessByKeyword(scenario.Input))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -37,6 +37,7 @@ type PullRequest struct {
|
|||||||
ForeignIndex int64
|
ForeignIndex int64
|
||||||
Context DownloaderContext `yaml:"-"`
|
Context DownloaderContext `yaml:"-"`
|
||||||
EnsuredSafe bool `yaml:"ensured_safe"`
|
EnsuredSafe bool `yaml:"ensured_safe"`
|
||||||
|
IsDraft bool `yaml:"is_draft"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *PullRequest) GetLocalIndex() int64 { return p.Number }
|
func (p *PullRequest) GetLocalIndex() int64 { return p.Number }
|
||||||
|
@ -7,11 +7,9 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/optional"
|
|
||||||
"code.gitea.io/gitea/modules/repository"
|
"code.gitea.io/gitea/modules/repository"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
)
|
)
|
||||||
@ -24,25 +22,6 @@ const (
|
|||||||
GitPushOptionCount = "GIT_PUSH_OPTION_COUNT"
|
GitPushOptionCount = "GIT_PUSH_OPTION_COUNT"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GitPushOptions is a wrapper around a map[string]string
|
|
||||||
type GitPushOptions map[string]string
|
|
||||||
|
|
||||||
// GitPushOptions keys
|
|
||||||
const (
|
|
||||||
GitPushOptionRepoPrivate = "repo.private"
|
|
||||||
GitPushOptionRepoTemplate = "repo.template"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Bool checks for a key in the map and parses as a boolean
|
|
||||||
func (g GitPushOptions) Bool(key string) optional.Option[bool] {
|
|
||||||
if val, ok := g[key]; ok {
|
|
||||||
if b, err := strconv.ParseBool(val); err == nil {
|
|
||||||
return optional.Some(b)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return optional.None[bool]()
|
|
||||||
}
|
|
||||||
|
|
||||||
// HookOptions represents the options for the Hook calls
|
// HookOptions represents the options for the Hook calls
|
||||||
type HookOptions struct {
|
type HookOptions struct {
|
||||||
OldCommitIDs []string
|
OldCommitIDs []string
|
||||||
|
45
modules/private/pushoptions.go
Normal file
45
modules/private/pushoptions.go
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package private
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/optional"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GitPushOptions is a wrapper around a map[string]string
|
||||||
|
type GitPushOptions map[string]string
|
||||||
|
|
||||||
|
// GitPushOptions keys
|
||||||
|
const (
|
||||||
|
GitPushOptionRepoPrivate = "repo.private"
|
||||||
|
GitPushOptionRepoTemplate = "repo.template"
|
||||||
|
GitPushOptionForcePush = "force-push"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Bool checks for a key in the map and parses as a boolean
|
||||||
|
// An option without value is considered true, eg: "-o force-push" or "-o repo.private"
|
||||||
|
func (g GitPushOptions) Bool(key string) optional.Option[bool] {
|
||||||
|
if val, ok := g[key]; ok {
|
||||||
|
if val == "" {
|
||||||
|
return optional.Some(true)
|
||||||
|
}
|
||||||
|
if b, err := strconv.ParseBool(val); err == nil {
|
||||||
|
return optional.Some(b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return optional.None[bool]()
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddFromKeyValue adds a key value pair to the map by "key=value" format or "key" for empty value
|
||||||
|
func (g GitPushOptions) AddFromKeyValue(line string) {
|
||||||
|
kv := strings.SplitN(line, "=", 2)
|
||||||
|
if len(kv) == 2 {
|
||||||
|
g[kv[0]] = kv[1]
|
||||||
|
} else {
|
||||||
|
g[kv[0]] = ""
|
||||||
|
}
|
||||||
|
}
|
30
modules/private/pushoptions_test.go
Normal file
30
modules/private/pushoptions_test.go
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package private
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGitPushOptions(t *testing.T) {
|
||||||
|
o := GitPushOptions{}
|
||||||
|
|
||||||
|
v := o.Bool("no-such")
|
||||||
|
assert.False(t, v.Has())
|
||||||
|
assert.False(t, v.Value())
|
||||||
|
|
||||||
|
o.AddFromKeyValue("opt1=a=b")
|
||||||
|
o.AddFromKeyValue("opt2=false")
|
||||||
|
o.AddFromKeyValue("opt3=true")
|
||||||
|
o.AddFromKeyValue("opt4")
|
||||||
|
|
||||||
|
assert.Equal(t, "a=b", o["opt1"])
|
||||||
|
assert.False(t, o.Bool("opt1").Value())
|
||||||
|
assert.True(t, o.Bool("opt2").Has())
|
||||||
|
assert.False(t, o.Bool("opt2").Value())
|
||||||
|
assert.True(t, o.Bool("opt3").Value())
|
||||||
|
assert.True(t, o.Bool("opt4").Value())
|
||||||
|
}
|
@ -217,13 +217,14 @@ const (
|
|||||||
|
|
||||||
// IssueCommentPayload represents a payload information of issue comment event.
|
// IssueCommentPayload represents a payload information of issue comment event.
|
||||||
type IssueCommentPayload struct {
|
type IssueCommentPayload struct {
|
||||||
Action HookIssueCommentAction `json:"action"`
|
Action HookIssueCommentAction `json:"action"`
|
||||||
Issue *Issue `json:"issue"`
|
Issue *Issue `json:"issue"`
|
||||||
Comment *Comment `json:"comment"`
|
PullRequest *PullRequest `json:"pull_request,omitempty"`
|
||||||
Changes *ChangesPayload `json:"changes,omitempty"`
|
Comment *Comment `json:"comment"`
|
||||||
Repository *Repository `json:"repository"`
|
Changes *ChangesPayload `json:"changes,omitempty"`
|
||||||
Sender *User `json:"sender"`
|
Repository *Repository `json:"repository"`
|
||||||
IsPull bool `json:"is_pull"`
|
Sender *User `json:"sender"`
|
||||||
|
IsPull bool `json:"is_pull"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// JSONPayload implements Payload
|
// JSONPayload implements Payload
|
||||||
|
@ -10,11 +10,11 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
packages_model "code.gitea.io/gitea/models/packages"
|
packages_model "code.gitea.io/gitea/models/packages"
|
||||||
container_model "code.gitea.io/gitea/models/packages/container"
|
container_model "code.gitea.io/gitea/models/packages/container"
|
||||||
|
"code.gitea.io/gitea/modules/globallock"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
packages_module "code.gitea.io/gitea/modules/packages"
|
packages_module "code.gitea.io/gitea/modules/packages"
|
||||||
container_module "code.gitea.io/gitea/modules/packages/container"
|
container_module "code.gitea.io/gitea/modules/packages/container"
|
||||||
@ -22,8 +22,6 @@ import (
|
|||||||
packages_service "code.gitea.io/gitea/services/packages"
|
packages_service "code.gitea.io/gitea/services/packages"
|
||||||
)
|
)
|
||||||
|
|
||||||
var uploadVersionMutex sync.Mutex
|
|
||||||
|
|
||||||
// saveAsPackageBlob creates a package blob from an upload
|
// saveAsPackageBlob creates a package blob from an upload
|
||||||
// The uploaded blob gets stored in a special upload version to link them to the package/image
|
// The uploaded blob gets stored in a special upload version to link them to the package/image
|
||||||
func saveAsPackageBlob(ctx context.Context, hsr packages_module.HashedSizeReader, pci *packages_service.PackageCreationInfo) (*packages_model.PackageBlob, error) { //nolint:unparam
|
func saveAsPackageBlob(ctx context.Context, hsr packages_module.HashedSizeReader, pci *packages_service.PackageCreationInfo) (*packages_model.PackageBlob, error) { //nolint:unparam
|
||||||
@ -90,13 +88,20 @@ func mountBlob(ctx context.Context, pi *packages_service.PackageInfo, pb *packag
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func containerPkgName(piOwnerID int64, piName string) string {
|
||||||
|
return fmt.Sprintf("pkg_%d_container_%s", piOwnerID, strings.ToLower(piName))
|
||||||
|
}
|
||||||
|
|
||||||
func getOrCreateUploadVersion(ctx context.Context, pi *packages_service.PackageInfo) (*packages_model.PackageVersion, error) {
|
func getOrCreateUploadVersion(ctx context.Context, pi *packages_service.PackageInfo) (*packages_model.PackageVersion, error) {
|
||||||
var uploadVersion *packages_model.PackageVersion
|
var uploadVersion *packages_model.PackageVersion
|
||||||
|
|
||||||
// FIXME: Replace usage of mutex with database transaction
|
releaser, err := globallock.Lock(ctx, containerPkgName(pi.Owner.ID, pi.Name))
|
||||||
// https://github.com/go-gitea/gitea/pull/21862
|
if err != nil {
|
||||||
uploadVersionMutex.Lock()
|
return nil, err
|
||||||
err := db.WithTx(ctx, func(ctx context.Context) error {
|
}
|
||||||
|
defer releaser()
|
||||||
|
|
||||||
|
err = db.WithTx(ctx, func(ctx context.Context) error {
|
||||||
created := true
|
created := true
|
||||||
p := &packages_model.Package{
|
p := &packages_model.Package{
|
||||||
OwnerID: pi.Owner.ID,
|
OwnerID: pi.Owner.ID,
|
||||||
@ -140,7 +145,6 @@ func getOrCreateUploadVersion(ctx context.Context, pi *packages_service.PackageI
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
uploadVersionMutex.Unlock()
|
|
||||||
|
|
||||||
return uploadVersion, err
|
return uploadVersion, err
|
||||||
}
|
}
|
||||||
@ -173,6 +177,12 @@ func createFileForBlob(ctx context.Context, pv *packages_model.PackageVersion, p
|
|||||||
}
|
}
|
||||||
|
|
||||||
func deleteBlob(ctx context.Context, ownerID int64, image, digest string) error {
|
func deleteBlob(ctx context.Context, ownerID int64, image, digest string) error {
|
||||||
|
releaser, err := globallock.Lock(ctx, containerPkgName(ownerID, image))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer releaser()
|
||||||
|
|
||||||
return db.WithTx(ctx, func(ctx context.Context) error {
|
return db.WithTx(ctx, func(ctx context.Context) error {
|
||||||
pfds, err := container_model.GetContainerBlobs(ctx, &container_model.BlobSearchOptions{
|
pfds, err := container_model.GetContainerBlobs(ctx, &container_model.BlobSearchOptions{
|
||||||
OwnerID: ownerID,
|
OwnerID: ownerID,
|
||||||
|
@ -41,80 +41,93 @@ func SearchIssues(ctx *context.APIContext) {
|
|||||||
// parameters:
|
// parameters:
|
||||||
// - name: state
|
// - name: state
|
||||||
// in: query
|
// in: query
|
||||||
// description: whether issue is open or closed
|
// description: State of the issue
|
||||||
// type: string
|
// type: string
|
||||||
|
// enum: [open, closed, all]
|
||||||
|
// default: open
|
||||||
// - name: labels
|
// - name: labels
|
||||||
// in: query
|
// in: query
|
||||||
// description: comma separated list of labels. Fetch only issues that have any of this labels. Non existent labels are discarded
|
// description: Comma-separated list of label names. Fetch only issues that have any of these labels. Non existent labels are discarded.
|
||||||
// type: string
|
// type: string
|
||||||
// - name: milestones
|
// - name: milestones
|
||||||
// in: query
|
// in: query
|
||||||
// description: comma separated list of milestone names. Fetch only issues that have any of this milestones. Non existent are discarded
|
// description: Comma-separated list of milestone names. Fetch only issues that have any of these milestones. Non existent milestones are discarded.
|
||||||
// type: string
|
// type: string
|
||||||
// - name: q
|
// - name: q
|
||||||
// in: query
|
// in: query
|
||||||
// description: search string
|
// description: Search string
|
||||||
// type: string
|
// type: string
|
||||||
// - name: priority_repo_id
|
// - name: priority_repo_id
|
||||||
// in: query
|
// in: query
|
||||||
// description: repository to prioritize in the results
|
// description: Repository ID to prioritize in the results
|
||||||
// type: integer
|
// type: integer
|
||||||
// format: int64
|
// format: int64
|
||||||
// - name: type
|
// - name: type
|
||||||
// in: query
|
// in: query
|
||||||
// description: filter by type (issues / pulls) if set
|
// description: Filter by issue type
|
||||||
// type: string
|
// type: string
|
||||||
|
// enum: [issues, pulls]
|
||||||
// - name: since
|
// - name: since
|
||||||
// in: query
|
// in: query
|
||||||
// description: Only show notifications updated after the given time. This is a timestamp in RFC 3339 format
|
// description: Only show issues updated after the given time (RFC 3339 format)
|
||||||
// type: string
|
// type: string
|
||||||
// format: date-time
|
// format: date-time
|
||||||
// required: false
|
|
||||||
// - name: before
|
// - name: before
|
||||||
// in: query
|
// in: query
|
||||||
// description: Only show notifications updated before the given time. This is a timestamp in RFC 3339 format
|
// description: Only show issues updated before the given time (RFC 3339 format)
|
||||||
// type: string
|
// type: string
|
||||||
// format: date-time
|
// format: date-time
|
||||||
// required: false
|
|
||||||
// - name: assigned
|
// - name: assigned
|
||||||
// in: query
|
// in: query
|
||||||
// description: filter (issues / pulls) assigned to you, default is false
|
// description: Filter issues or pulls assigned to the authenticated user
|
||||||
// type: boolean
|
// type: boolean
|
||||||
|
// default: false
|
||||||
// - name: created
|
// - name: created
|
||||||
// in: query
|
// in: query
|
||||||
// description: filter (issues / pulls) created by you, default is false
|
// description: Filter issues or pulls created by the authenticated user
|
||||||
// type: boolean
|
// type: boolean
|
||||||
|
// default: false
|
||||||
// - name: mentioned
|
// - name: mentioned
|
||||||
// in: query
|
// in: query
|
||||||
// description: filter (issues / pulls) mentioning you, default is false
|
// description: Filter issues or pulls mentioning the authenticated user
|
||||||
// type: boolean
|
// type: boolean
|
||||||
|
// default: false
|
||||||
// - name: review_requested
|
// - name: review_requested
|
||||||
// in: query
|
// in: query
|
||||||
// description: filter pulls requesting your review, default is false
|
// description: Filter pull requests where the authenticated user's review was requested
|
||||||
// type: boolean
|
// type: boolean
|
||||||
|
// default: false
|
||||||
// - name: reviewed
|
// - name: reviewed
|
||||||
// in: query
|
// in: query
|
||||||
// description: filter pulls reviewed by you, default is false
|
// description: Filter pull requests reviewed by the authenticated user
|
||||||
// type: boolean
|
// type: boolean
|
||||||
|
// default: false
|
||||||
// - name: owner
|
// - name: owner
|
||||||
// in: query
|
// in: query
|
||||||
// description: filter by owner
|
// description: Filter by repository owner
|
||||||
// type: string
|
// type: string
|
||||||
// - name: team
|
// - name: team
|
||||||
// in: query
|
// in: query
|
||||||
// description: filter by team (requires organization owner parameter to be provided)
|
// description: Filter by team (requires organization owner parameter)
|
||||||
// type: string
|
// type: string
|
||||||
// - name: page
|
// - name: page
|
||||||
// in: query
|
// in: query
|
||||||
// description: page number of results to return (1-based)
|
// description: Page number of results to return (1-based)
|
||||||
// type: integer
|
// type: integer
|
||||||
|
// minimum: 1
|
||||||
|
// default: 1
|
||||||
// - name: limit
|
// - name: limit
|
||||||
// in: query
|
// in: query
|
||||||
// description: page size of results
|
// description: Number of items per page
|
||||||
// type: integer
|
// type: integer
|
||||||
|
// minimum: 0
|
||||||
// responses:
|
// responses:
|
||||||
// "200":
|
// "200":
|
||||||
// "$ref": "#/responses/IssueList"
|
// "$ref": "#/responses/IssueList"
|
||||||
|
// "400":
|
||||||
|
// "$ref": "#/responses/error"
|
||||||
|
// "422":
|
||||||
|
// "$ref": "#/responses/validationError"
|
||||||
|
|
||||||
before, since, err := context.GetQueryBeforeSince(ctx.Base)
|
before, since, err := context.GetQueryBeforeSince(ctx.Base)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -208,7 +208,7 @@ func HookPostReceive(ctx *gitea_context.PrivateContext) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
cols := make([]string, 0, len(opts.GitPushOptions))
|
cols := make([]string, 0, 2)
|
||||||
|
|
||||||
if isPrivate.Has() {
|
if isPrivate.Has() {
|
||||||
repo.IsPrivate = isPrivate.Value()
|
repo.IsPrivate = isPrivate.Value()
|
||||||
|
@ -887,8 +887,6 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
|||||||
}
|
}
|
||||||
|
|
||||||
if pull.HeadRepo != nil {
|
if pull.HeadRepo != nil {
|
||||||
ctx.Data["SourcePath"] = pull.HeadRepo.Link() + "/src/commit/" + endCommitID
|
|
||||||
|
|
||||||
if !pull.HasMerged && ctx.Doer != nil {
|
if !pull.HasMerged && ctx.Doer != nil {
|
||||||
perm, err := access_model.GetUserRepoPermission(ctx, pull.HeadRepo, ctx.Doer)
|
perm, err := access_model.GetUserRepoPermission(ctx, pull.HeadRepo, ctx.Doer)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1461,6 +1461,35 @@ func registerRoutes(m *web.Router) {
|
|||||||
)
|
)
|
||||||
// end "/{username}/{reponame}/activity"
|
// end "/{username}/{reponame}/activity"
|
||||||
|
|
||||||
|
m.Group("/{username}/{reponame}", func() {
|
||||||
|
m.Group("/pulls/{index}", func() {
|
||||||
|
m.Get("", repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewIssue)
|
||||||
|
m.Get(".diff", repo.DownloadPullDiff)
|
||||||
|
m.Get(".patch", repo.DownloadPullPatch)
|
||||||
|
m.Group("/commits", func() {
|
||||||
|
m.Get("", context.RepoRef(), repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewPullCommits)
|
||||||
|
m.Get("/list", context.RepoRef(), repo.GetPullCommits)
|
||||||
|
m.Get("/{sha:[a-f0-9]{7,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForSingleCommit)
|
||||||
|
})
|
||||||
|
m.Post("/merge", context.RepoMustNotBeArchived(), web.Bind(forms.MergePullRequestForm{}), repo.MergePullRequest)
|
||||||
|
m.Post("/cancel_auto_merge", context.RepoMustNotBeArchived(), repo.CancelAutoMergePullRequest)
|
||||||
|
m.Post("/update", repo.UpdatePullRequest)
|
||||||
|
m.Post("/set_allow_maintainer_edit", web.Bind(forms.UpdateAllowEditsForm{}), repo.SetAllowEdits)
|
||||||
|
m.Post("/cleanup", context.RepoMustNotBeArchived(), context.RepoRef(), repo.CleanUpPullRequest)
|
||||||
|
m.Group("/files", func() {
|
||||||
|
m.Get("", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForAllCommitsOfPr)
|
||||||
|
m.Get("/{sha:[a-f0-9]{7,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesStartingFromCommit)
|
||||||
|
m.Get("/{shaFrom:[a-f0-9]{7,40}}..{shaTo:[a-f0-9]{7,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForRange)
|
||||||
|
m.Group("/reviews", func() {
|
||||||
|
m.Get("/new_comment", repo.RenderNewCodeCommentForm)
|
||||||
|
m.Post("/comments", web.Bind(forms.CodeCommentForm{}), repo.SetShowOutdatedComments, repo.CreateCodeComment)
|
||||||
|
m.Post("/submit", web.Bind(forms.SubmitReviewForm{}), repo.SubmitReview)
|
||||||
|
}, context.RepoMustNotBeArchived())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}, ignSignIn, context.RepoAssignment, repo.MustAllowPulls, reqRepoPullsReader)
|
||||||
|
// end "/{username}/{reponame}/pulls/{index}": repo pull request
|
||||||
|
|
||||||
m.Group("/{username}/{reponame}", func() {
|
m.Group("/{username}/{reponame}", func() {
|
||||||
m.Group("/activity_author_data", func() {
|
m.Group("/activity_author_data", func() {
|
||||||
m.Get("", repo.ActivityAuthors)
|
m.Get("", repo.ActivityAuthors)
|
||||||
@ -1499,32 +1528,6 @@ func registerRoutes(m *web.Router) {
|
|||||||
return cancel
|
return cancel
|
||||||
})
|
})
|
||||||
|
|
||||||
m.Group("/pulls/{index}", func() {
|
|
||||||
m.Get("", repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewIssue)
|
|
||||||
m.Get(".diff", repo.DownloadPullDiff)
|
|
||||||
m.Get(".patch", repo.DownloadPullPatch)
|
|
||||||
m.Group("/commits", func() {
|
|
||||||
m.Get("", context.RepoRef(), repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewPullCommits)
|
|
||||||
m.Get("/list", context.RepoRef(), repo.GetPullCommits)
|
|
||||||
m.Get("/{sha:[a-f0-9]{7,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForSingleCommit)
|
|
||||||
})
|
|
||||||
m.Post("/merge", context.RepoMustNotBeArchived(), web.Bind(forms.MergePullRequestForm{}), repo.MergePullRequest)
|
|
||||||
m.Post("/cancel_auto_merge", context.RepoMustNotBeArchived(), repo.CancelAutoMergePullRequest)
|
|
||||||
m.Post("/update", repo.UpdatePullRequest)
|
|
||||||
m.Post("/set_allow_maintainer_edit", web.Bind(forms.UpdateAllowEditsForm{}), repo.SetAllowEdits)
|
|
||||||
m.Post("/cleanup", context.RepoMustNotBeArchived(), context.RepoRef(), repo.CleanUpPullRequest)
|
|
||||||
m.Group("/files", func() {
|
|
||||||
m.Get("", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForAllCommitsOfPr)
|
|
||||||
m.Get("/{sha:[a-f0-9]{7,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesStartingFromCommit)
|
|
||||||
m.Get("/{shaFrom:[a-f0-9]{7,40}}..{shaTo:[a-f0-9]{7,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForRange)
|
|
||||||
m.Group("/reviews", func() {
|
|
||||||
m.Get("/new_comment", repo.RenderNewCodeCommentForm)
|
|
||||||
m.Post("/comments", web.Bind(forms.CodeCommentForm{}), repo.SetShowOutdatedComments, repo.CreateCodeComment)
|
|
||||||
m.Post("/submit", web.Bind(forms.SubmitReviewForm{}), repo.SubmitReview)
|
|
||||||
}, context.RepoMustNotBeArchived())
|
|
||||||
})
|
|
||||||
}, repo.MustAllowPulls)
|
|
||||||
|
|
||||||
m.Group("/media", func() {
|
m.Group("/media", func() {
|
||||||
m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.SingleDownloadOrLFS)
|
m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.SingleDownloadOrLFS)
|
||||||
m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.SingleDownloadOrLFS)
|
m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.SingleDownloadOrLFS)
|
||||||
|
@ -116,11 +116,20 @@ func (input *notifyInput) Notify(ctx context.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func notify(ctx context.Context, input *notifyInput) error {
|
func notify(ctx context.Context, input *notifyInput) error {
|
||||||
|
shouldDetectSchedules := input.Event == webhook_module.HookEventPush && input.Ref.BranchName() == input.Repo.DefaultBranch
|
||||||
if input.Doer.IsActions() {
|
if input.Doer.IsActions() {
|
||||||
// avoiding triggering cyclically, for example:
|
// avoiding triggering cyclically, for example:
|
||||||
// a comment of an issue will trigger the runner to add a new comment as reply,
|
// a comment of an issue will trigger the runner to add a new comment as reply,
|
||||||
// and the new comment will trigger the runner again.
|
// and the new comment will trigger the runner again.
|
||||||
log.Debug("ignore executing %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
|
log.Debug("ignore executing %v for event %v whose doer is %v", getMethod(ctx), input.Event, input.Doer.Name)
|
||||||
|
|
||||||
|
// we should update schedule tasks in this case, because
|
||||||
|
// 1. schedule tasks cannot be triggered by other events, so cyclic triggering will not occur
|
||||||
|
// 2. some schedule tasks may update the repo periodically, so the refs of schedule tasks need to be updated
|
||||||
|
if shouldDetectSchedules {
|
||||||
|
return DetectAndHandleSchedules(ctx, input.Repo)
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if input.Repo.IsEmpty || input.Repo.IsArchived {
|
if input.Repo.IsEmpty || input.Repo.IsArchived {
|
||||||
@ -174,7 +183,6 @@ func notify(ctx context.Context, input *notifyInput) error {
|
|||||||
|
|
||||||
var detectedWorkflows []*actions_module.DetectedWorkflow
|
var detectedWorkflows []*actions_module.DetectedWorkflow
|
||||||
actionsConfig := input.Repo.MustGetUnit(ctx, unit_model.TypeActions).ActionsConfig()
|
actionsConfig := input.Repo.MustGetUnit(ctx, unit_model.TypeActions).ActionsConfig()
|
||||||
shouldDetectSchedules := input.Event == webhook_module.HookEventPush && input.Ref.BranchName() == input.Repo.DefaultBranch
|
|
||||||
workflows, schedules, err := actions_module.DetectWorkflows(gitRepo, commit,
|
workflows, schedules, err := actions_module.DetectWorkflows(gitRepo, commit,
|
||||||
input.Event,
|
input.Event,
|
||||||
input.Payload,
|
input.Payload,
|
||||||
|
@ -7,7 +7,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
issues_model "code.gitea.io/gitea/models/issues"
|
issues_model "code.gitea.io/gitea/models/issues"
|
||||||
@ -24,10 +23,10 @@ import (
|
|||||||
// ProcReceive handle proc receive work
|
// ProcReceive handle proc receive work
|
||||||
func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, opts *private.HookOptions) ([]private.HookProcReceiveRefResult, error) {
|
func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, opts *private.HookOptions) ([]private.HookProcReceiveRefResult, error) {
|
||||||
results := make([]private.HookProcReceiveRefResult, 0, len(opts.OldCommitIDs))
|
results := make([]private.HookProcReceiveRefResult, 0, len(opts.OldCommitIDs))
|
||||||
|
forcePush := opts.GitPushOptions.Bool(private.GitPushOptionForcePush)
|
||||||
topicBranch := opts.GitPushOptions["topic"]
|
topicBranch := opts.GitPushOptions["topic"]
|
||||||
forcePush, _ := strconv.ParseBool(opts.GitPushOptions["force-push"])
|
|
||||||
title := strings.TrimSpace(opts.GitPushOptions["title"])
|
title := strings.TrimSpace(opts.GitPushOptions["title"])
|
||||||
description := strings.TrimSpace(opts.GitPushOptions["description"]) // TODO: Add more options?
|
description := strings.TrimSpace(opts.GitPushOptions["description"])
|
||||||
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
|
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
|
||||||
userName := strings.ToLower(opts.UserName)
|
userName := strings.ToLower(opts.UserName)
|
||||||
|
|
||||||
@ -56,19 +55,19 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.
|
|||||||
}
|
}
|
||||||
|
|
||||||
baseBranchName := opts.RefFullNames[i].ForBranchName()
|
baseBranchName := opts.RefFullNames[i].ForBranchName()
|
||||||
curentTopicBranch := ""
|
currentTopicBranch := ""
|
||||||
if !gitRepo.IsBranchExist(baseBranchName) {
|
if !gitRepo.IsBranchExist(baseBranchName) {
|
||||||
// try match refs/for/<target-branch>/<topic-branch>
|
// try match refs/for/<target-branch>/<topic-branch>
|
||||||
for p, v := range baseBranchName {
|
for p, v := range baseBranchName {
|
||||||
if v == '/' && gitRepo.IsBranchExist(baseBranchName[:p]) && p != len(baseBranchName)-1 {
|
if v == '/' && gitRepo.IsBranchExist(baseBranchName[:p]) && p != len(baseBranchName)-1 {
|
||||||
curentTopicBranch = baseBranchName[p+1:]
|
currentTopicBranch = baseBranchName[p+1:]
|
||||||
baseBranchName = baseBranchName[:p]
|
baseBranchName = baseBranchName[:p]
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(topicBranch) == 0 && len(curentTopicBranch) == 0 {
|
if len(topicBranch) == 0 && len(currentTopicBranch) == 0 {
|
||||||
results = append(results, private.HookProcReceiveRefResult{
|
results = append(results, private.HookProcReceiveRefResult{
|
||||||
OriginalRef: opts.RefFullNames[i],
|
OriginalRef: opts.RefFullNames[i],
|
||||||
OldOID: opts.OldCommitIDs[i],
|
OldOID: opts.OldCommitIDs[i],
|
||||||
@ -78,18 +77,18 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(curentTopicBranch) == 0 {
|
if len(currentTopicBranch) == 0 {
|
||||||
curentTopicBranch = topicBranch
|
currentTopicBranch = topicBranch
|
||||||
}
|
}
|
||||||
|
|
||||||
// because different user maybe want to use same topic,
|
// because different user maybe want to use same topic,
|
||||||
// So it's better to make sure the topic branch name
|
// So it's better to make sure the topic branch name
|
||||||
// has user name prefix
|
// has username prefix
|
||||||
var headBranch string
|
var headBranch string
|
||||||
if !strings.HasPrefix(curentTopicBranch, userName+"/") {
|
if !strings.HasPrefix(currentTopicBranch, userName+"/") {
|
||||||
headBranch = userName + "/" + curentTopicBranch
|
headBranch = userName + "/" + currentTopicBranch
|
||||||
} else {
|
} else {
|
||||||
headBranch = curentTopicBranch
|
headBranch = currentTopicBranch
|
||||||
}
|
}
|
||||||
|
|
||||||
pr, err := issues_model.GetUnmergedPullRequest(ctx, repo.ID, repo.ID, headBranch, baseBranchName, issues_model.PullRequestFlowAGit)
|
pr, err := issues_model.GetUnmergedPullRequest(ctx, repo.ID, repo.ID, headBranch, baseBranchName, issues_model.PullRequestFlowAGit)
|
||||||
@ -178,7 +177,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if !forcePush {
|
if !forcePush.Value() {
|
||||||
output, _, err := git.NewCommand(ctx, "rev-list", "--max-count=1").
|
output, _, err := git.NewCommand(ctx, "rev-list", "--max-count=1").
|
||||||
AddDynamicArguments(oldCommitID, "^"+opts.NewCommitIDs[i]).
|
AddDynamicArguments(oldCommitID, "^"+opts.NewCommitIDs[i]).
|
||||||
RunStdString(&git.RunOpts{Dir: repo.RepoPath(), Env: os.Environ()})
|
RunStdString(&git.RunOpts{Dir: repo.RepoPath(), Env: os.Environ()})
|
||||||
|
@ -58,6 +58,9 @@ func RequireRepoWriterOr(unitTypes ...unit.Type) func(ctx *Context) {
|
|||||||
func RequireRepoReader(unitType unit.Type) func(ctx *Context) {
|
func RequireRepoReader(unitType unit.Type) func(ctx *Context) {
|
||||||
return func(ctx *Context) {
|
return func(ctx *Context) {
|
||||||
if !ctx.Repo.CanRead(unitType) {
|
if !ctx.Repo.CanRead(unitType) {
|
||||||
|
if unitType == unit.TypeCode && canWriteAsMaintainer(ctx) {
|
||||||
|
return
|
||||||
|
}
|
||||||
if log.IsTrace() {
|
if log.IsTrace() {
|
||||||
if ctx.IsSigned {
|
if ctx.IsSigned {
|
||||||
log.Trace("Permission Denied: User %-v cannot read %-v in Repo %-v\n"+
|
log.Trace("Permission Denied: User %-v cannot read %-v in Repo %-v\n"+
|
||||||
|
@ -374,7 +374,7 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ctx.Repo.Permission.HasAnyUnitAccessOrEveryoneAccess() {
|
if !ctx.Repo.Permission.HasAnyUnitAccessOrEveryoneAccess() && !canWriteAsMaintainer(ctx) {
|
||||||
if ctx.FormString("go-get") == "1" {
|
if ctx.FormString("go-get") == "1" {
|
||||||
EarlyResponseForGoGetMeta(ctx)
|
EarlyResponseForGoGetMeta(ctx)
|
||||||
return
|
return
|
||||||
@ -1058,3 +1058,11 @@ func GitHookService() func(ctx *Context) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// canWriteAsMaintainer check if the doer can write to a branch as a maintainer
|
||||||
|
func canWriteAsMaintainer(ctx *Context) bool {
|
||||||
|
branchName := getRefNameFromPath(ctx.Repo, ctx.PathParam("*"), func(branchName string) bool {
|
||||||
|
return issues_model.CanMaintainerWriteToBranch(ctx, ctx.Repo.Permission, branchName, ctx.Doer)
|
||||||
|
})
|
||||||
|
return len(branchName) > 0
|
||||||
|
}
|
||||||
|
@ -760,10 +760,15 @@ func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*issues_model
|
|||||||
pr.Updated = pr.Created
|
pr.Updated = pr.Created
|
||||||
}
|
}
|
||||||
|
|
||||||
|
prTitle := pr.Title
|
||||||
|
if pr.IsDraft && !issues_model.HasWorkInProgressPrefix(pr.Title) {
|
||||||
|
prTitle = fmt.Sprintf("%s %s", setting.Repository.PullRequest.WorkInProgressPrefixes[0], pr.Title)
|
||||||
|
}
|
||||||
|
|
||||||
issue := issues_model.Issue{
|
issue := issues_model.Issue{
|
||||||
RepoID: g.repo.ID,
|
RepoID: g.repo.ID,
|
||||||
Repo: g.repo,
|
Repo: g.repo,
|
||||||
Title: pr.Title,
|
Title: prTitle,
|
||||||
Index: pr.Number,
|
Index: pr.Number,
|
||||||
Content: pr.Content,
|
Content: pr.Content,
|
||||||
MilestoneID: milestoneID,
|
MilestoneID: milestoneID,
|
||||||
|
@ -737,6 +737,7 @@ func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullReq
|
|||||||
PatchURL: pr.GetPatchURL(), // see below for SECURITY related issues here
|
PatchURL: pr.GetPatchURL(), // see below for SECURITY related issues here
|
||||||
Reactions: reactions,
|
Reactions: reactions,
|
||||||
ForeignIndex: int64(*pr.Number),
|
ForeignIndex: int64(*pr.Number),
|
||||||
|
IsDraft: pr.GetDraft(),
|
||||||
})
|
})
|
||||||
|
|
||||||
// SECURITY: Ensure that the PR is safe
|
// SECURITY: Ensure that the PR is safe
|
||||||
|
@ -722,6 +722,7 @@ func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque
|
|||||||
PatchURL: pr.WebURL + ".patch",
|
PatchURL: pr.WebURL + ".patch",
|
||||||
ForeignIndex: int64(pr.IID),
|
ForeignIndex: int64(pr.IID),
|
||||||
Context: gitlabIssueContext{IsMergeRequest: true},
|
Context: gitlabIssueContext{IsMergeRequest: true},
|
||||||
|
IsDraft: pr.Draft,
|
||||||
})
|
})
|
||||||
|
|
||||||
// SECURITY: Ensure that the PR is safe
|
// SECURITY: Ensure that the PR is safe
|
||||||
|
@ -32,6 +32,10 @@ import (
|
|||||||
|
|
||||||
// RenameUser renames a user
|
// RenameUser renames a user
|
||||||
func RenameUser(ctx context.Context, u *user_model.User, newUserName string) error {
|
func RenameUser(ctx context.Context, u *user_model.User, newUserName string) error {
|
||||||
|
if newUserName == u.Name {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// Non-local users are not allowed to change their username.
|
// Non-local users are not allowed to change their username.
|
||||||
if !u.IsOrganization() && !u.IsLocal() {
|
if !u.IsOrganization() && !u.IsLocal() {
|
||||||
return user_model.ErrUserIsNotLocal{
|
return user_model.ErrUserIsNotLocal{
|
||||||
@ -40,10 +44,6 @@ func RenameUser(ctx context.Context, u *user_model.User, newUserName string) err
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if newUserName == u.Name {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := user_model.IsUsableUsername(newUserName); err != nil {
|
if err := user_model.IsUsableUsername(newUserName); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -59,7 +59,7 @@ func (m *webhookNotifier) IssueClearLabels(ctx context.Context, doer *user_model
|
|||||||
err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestLabel, &api.PullRequestPayload{
|
err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestLabel, &api.PullRequestPayload{
|
||||||
Action: api.HookIssueLabelCleared,
|
Action: api.HookIssueLabelCleared,
|
||||||
Index: issue.Index,
|
Index: issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer),
|
||||||
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
})
|
})
|
||||||
@ -150,7 +150,7 @@ func (m *webhookNotifier) IssueChangeAssignee(ctx context.Context, doer *user_mo
|
|||||||
}
|
}
|
||||||
apiPullRequest := &api.PullRequestPayload{
|
apiPullRequest := &api.PullRequestPayload{
|
||||||
Index: issue.Index,
|
Index: issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer),
|
||||||
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
}
|
}
|
||||||
@ -201,7 +201,7 @@ func (m *webhookNotifier) IssueChangeTitle(ctx context.Context, doer *user_model
|
|||||||
From: oldTitle,
|
From: oldTitle,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer),
|
||||||
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
})
|
})
|
||||||
@ -236,7 +236,7 @@ func (m *webhookNotifier) IssueChangeStatus(ctx context.Context, doer *user_mode
|
|||||||
// Merge pull request calls issue.changeStatus so we need to handle separately.
|
// Merge pull request calls issue.changeStatus so we need to handle separately.
|
||||||
apiPullRequest := &api.PullRequestPayload{
|
apiPullRequest := &api.PullRequestPayload{
|
||||||
Index: issue.Index,
|
Index: issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer),
|
||||||
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
CommitID: commitID,
|
CommitID: commitID,
|
||||||
@ -307,7 +307,7 @@ func (m *webhookNotifier) NewPullRequest(ctx context.Context, pull *issues_model
|
|||||||
if err := PrepareWebhooks(ctx, EventSource{Repository: pull.Issue.Repo}, webhook_module.HookEventPullRequest, &api.PullRequestPayload{
|
if err := PrepareWebhooks(ctx, EventSource{Repository: pull.Issue.Repo}, webhook_module.HookEventPullRequest, &api.PullRequestPayload{
|
||||||
Action: api.HookIssueOpened,
|
Action: api.HookIssueOpened,
|
||||||
Index: pull.Issue.Index,
|
Index: pull.Issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, pull, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, pull, pull.Issue.Poster),
|
||||||
Repository: convert.ToRepo(ctx, pull.Issue.Repo, permission),
|
Repository: convert.ToRepo(ctx, pull.Issue.Repo, permission),
|
||||||
Sender: convert.ToUser(ctx, pull.Issue.Poster, nil),
|
Sender: convert.ToUser(ctx, pull.Issue.Poster, nil),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
@ -336,7 +336,7 @@ func (m *webhookNotifier) IssueChangeContent(ctx context.Context, doer *user_mod
|
|||||||
From: oldContent,
|
From: oldContent,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer),
|
||||||
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
})
|
})
|
||||||
@ -375,17 +375,20 @@ func (m *webhookNotifier) UpdateComment(ctx context.Context, doer *user_model.Us
|
|||||||
}
|
}
|
||||||
|
|
||||||
var eventType webhook_module.HookEventType
|
var eventType webhook_module.HookEventType
|
||||||
|
var pullRequest *api.PullRequest
|
||||||
if c.Issue.IsPull {
|
if c.Issue.IsPull {
|
||||||
eventType = webhook_module.HookEventPullRequestComment
|
eventType = webhook_module.HookEventPullRequestComment
|
||||||
|
pullRequest = convert.ToAPIPullRequest(ctx, c.Issue.PullRequest, doer)
|
||||||
} else {
|
} else {
|
||||||
eventType = webhook_module.HookEventIssueComment
|
eventType = webhook_module.HookEventIssueComment
|
||||||
}
|
}
|
||||||
|
|
||||||
permission, _ := access_model.GetUserRepoPermission(ctx, c.Issue.Repo, doer)
|
permission, _ := access_model.GetUserRepoPermission(ctx, c.Issue.Repo, doer)
|
||||||
if err := PrepareWebhooks(ctx, EventSource{Repository: c.Issue.Repo}, eventType, &api.IssueCommentPayload{
|
if err := PrepareWebhooks(ctx, EventSource{Repository: c.Issue.Repo}, eventType, &api.IssueCommentPayload{
|
||||||
Action: api.HookIssueCommentEdited,
|
Action: api.HookIssueCommentEdited,
|
||||||
Issue: convert.ToAPIIssue(ctx, doer, c.Issue),
|
Issue: convert.ToAPIIssue(ctx, doer, c.Issue),
|
||||||
Comment: convert.ToAPIComment(ctx, c.Issue.Repo, c),
|
PullRequest: pullRequest,
|
||||||
|
Comment: convert.ToAPIComment(ctx, c.Issue.Repo, c),
|
||||||
Changes: &api.ChangesPayload{
|
Changes: &api.ChangesPayload{
|
||||||
Body: &api.ChangesFromPayload{
|
Body: &api.ChangesFromPayload{
|
||||||
From: oldContent,
|
From: oldContent,
|
||||||
@ -403,20 +406,23 @@ func (m *webhookNotifier) CreateIssueComment(ctx context.Context, doer *user_mod
|
|||||||
issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
|
issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
|
||||||
) {
|
) {
|
||||||
var eventType webhook_module.HookEventType
|
var eventType webhook_module.HookEventType
|
||||||
|
var pullRequest *api.PullRequest
|
||||||
if issue.IsPull {
|
if issue.IsPull {
|
||||||
eventType = webhook_module.HookEventPullRequestComment
|
eventType = webhook_module.HookEventPullRequestComment
|
||||||
|
pullRequest = convert.ToAPIPullRequest(ctx, issue.PullRequest, doer)
|
||||||
} else {
|
} else {
|
||||||
eventType = webhook_module.HookEventIssueComment
|
eventType = webhook_module.HookEventIssueComment
|
||||||
}
|
}
|
||||||
|
|
||||||
permission, _ := access_model.GetUserRepoPermission(ctx, repo, doer)
|
permission, _ := access_model.GetUserRepoPermission(ctx, repo, doer)
|
||||||
if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, eventType, &api.IssueCommentPayload{
|
if err := PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, eventType, &api.IssueCommentPayload{
|
||||||
Action: api.HookIssueCommentCreated,
|
Action: api.HookIssueCommentCreated,
|
||||||
Issue: convert.ToAPIIssue(ctx, doer, issue),
|
Issue: convert.ToAPIIssue(ctx, doer, issue),
|
||||||
Comment: convert.ToAPIComment(ctx, repo, comment),
|
PullRequest: pullRequest,
|
||||||
Repository: convert.ToRepo(ctx, repo, permission),
|
Comment: convert.ToAPIComment(ctx, repo, comment),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Repository: convert.ToRepo(ctx, repo, permission),
|
||||||
IsPull: issue.IsPull,
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
|
IsPull: issue.IsPull,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("PrepareWebhooks [comment_id: %d]: %v", comment.ID, err)
|
log.Error("PrepareWebhooks [comment_id: %d]: %v", comment.ID, err)
|
||||||
}
|
}
|
||||||
@ -440,20 +446,23 @@ func (m *webhookNotifier) DeleteComment(ctx context.Context, doer *user_model.Us
|
|||||||
}
|
}
|
||||||
|
|
||||||
var eventType webhook_module.HookEventType
|
var eventType webhook_module.HookEventType
|
||||||
|
var pullRequest *api.PullRequest
|
||||||
if comment.Issue.IsPull {
|
if comment.Issue.IsPull {
|
||||||
eventType = webhook_module.HookEventPullRequestComment
|
eventType = webhook_module.HookEventPullRequestComment
|
||||||
|
pullRequest = convert.ToAPIPullRequest(ctx, comment.Issue.PullRequest, doer)
|
||||||
} else {
|
} else {
|
||||||
eventType = webhook_module.HookEventIssueComment
|
eventType = webhook_module.HookEventIssueComment
|
||||||
}
|
}
|
||||||
|
|
||||||
permission, _ := access_model.GetUserRepoPermission(ctx, comment.Issue.Repo, doer)
|
permission, _ := access_model.GetUserRepoPermission(ctx, comment.Issue.Repo, doer)
|
||||||
if err := PrepareWebhooks(ctx, EventSource{Repository: comment.Issue.Repo}, eventType, &api.IssueCommentPayload{
|
if err := PrepareWebhooks(ctx, EventSource{Repository: comment.Issue.Repo}, eventType, &api.IssueCommentPayload{
|
||||||
Action: api.HookIssueCommentDeleted,
|
Action: api.HookIssueCommentDeleted,
|
||||||
Issue: convert.ToAPIIssue(ctx, doer, comment.Issue),
|
Issue: convert.ToAPIIssue(ctx, doer, comment.Issue),
|
||||||
Comment: convert.ToAPIComment(ctx, comment.Issue.Repo, comment),
|
PullRequest: pullRequest,
|
||||||
Repository: convert.ToRepo(ctx, comment.Issue.Repo, permission),
|
Comment: convert.ToAPIComment(ctx, comment.Issue.Repo, comment),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Repository: convert.ToRepo(ctx, comment.Issue.Repo, permission),
|
||||||
IsPull: comment.Issue.IsPull,
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
|
IsPull: comment.Issue.IsPull,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("PrepareWebhooks [comment_id: %d]: %v", comment.ID, err)
|
log.Error("PrepareWebhooks [comment_id: %d]: %v", comment.ID, err)
|
||||||
}
|
}
|
||||||
@ -525,7 +534,7 @@ func (m *webhookNotifier) IssueChangeLabels(ctx context.Context, doer *user_mode
|
|||||||
err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestLabel, &api.PullRequestPayload{
|
err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestLabel, &api.PullRequestPayload{
|
||||||
Action: api.HookIssueLabelUpdated,
|
Action: api.HookIssueLabelUpdated,
|
||||||
Index: issue.Index,
|
Index: issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer),
|
||||||
Repository: convert.ToRepo(ctx, issue.Repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
|
Repository: convert.ToRepo(ctx, issue.Repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
})
|
})
|
||||||
@ -567,7 +576,7 @@ func (m *webhookNotifier) IssueChangeMilestone(ctx context.Context, doer *user_m
|
|||||||
err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestMilestone, &api.PullRequestPayload{
|
err = PrepareWebhooks(ctx, EventSource{Repository: issue.Repo}, webhook_module.HookEventPullRequestMilestone, &api.PullRequestPayload{
|
||||||
Action: hookAction,
|
Action: hookAction,
|
||||||
Index: issue.Index,
|
Index: issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer),
|
||||||
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
})
|
})
|
||||||
@ -640,7 +649,7 @@ func (*webhookNotifier) MergePullRequest(ctx context.Context, doer *user_model.U
|
|||||||
// Merge pull request calls issue.changeStatus so we need to handle separately.
|
// Merge pull request calls issue.changeStatus so we need to handle separately.
|
||||||
apiPullRequest := &api.PullRequestPayload{
|
apiPullRequest := &api.PullRequestPayload{
|
||||||
Index: pr.Issue.Index,
|
Index: pr.Issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, pr, doer),
|
||||||
Repository: convert.ToRepo(ctx, pr.Issue.Repo, permission),
|
Repository: convert.ToRepo(ctx, pr.Issue.Repo, permission),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
Action: api.HookIssueClosed,
|
Action: api.HookIssueClosed,
|
||||||
@ -668,7 +677,7 @@ func (m *webhookNotifier) PullRequestChangeTargetBranch(ctx context.Context, doe
|
|||||||
From: oldBranch,
|
From: oldBranch,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, pr, doer),
|
||||||
Repository: convert.ToRepo(ctx, issue.Repo, mode),
|
Repository: convert.ToRepo(ctx, issue.Repo, mode),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
@ -703,11 +712,12 @@ func (m *webhookNotifier) PullRequestReview(ctx context.Context, pr *issues_mode
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if err := PrepareWebhooks(ctx, EventSource{Repository: review.Issue.Repo}, reviewHookType, &api.PullRequestPayload{
|
if err := PrepareWebhooks(ctx, EventSource{Repository: review.Issue.Repo}, reviewHookType, &api.PullRequestPayload{
|
||||||
Action: api.HookIssueReviewed,
|
Action: api.HookIssueReviewed,
|
||||||
Index: review.Issue.Index,
|
Index: review.Issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, pr, review.Reviewer),
|
||||||
Repository: convert.ToRepo(ctx, review.Issue.Repo, permission),
|
RequestedReviewer: convert.ToUser(ctx, review.Reviewer, nil),
|
||||||
Sender: convert.ToUser(ctx, review.Reviewer, nil),
|
Repository: convert.ToRepo(ctx, review.Issue.Repo, permission),
|
||||||
|
Sender: convert.ToUser(ctx, review.Reviewer, nil),
|
||||||
Review: &api.ReviewPayload{
|
Review: &api.ReviewPayload{
|
||||||
Type: string(reviewHookType),
|
Type: string(reviewHookType),
|
||||||
Content: review.Content,
|
Content: review.Content,
|
||||||
@ -729,7 +739,7 @@ func (m *webhookNotifier) PullRequestReviewRequest(ctx context.Context, doer *us
|
|||||||
}
|
}
|
||||||
apiPullRequest := &api.PullRequestPayload{
|
apiPullRequest := &api.PullRequestPayload{
|
||||||
Index: issue.Index,
|
Index: issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, issue.PullRequest, doer),
|
||||||
RequestedReviewer: convert.ToUser(ctx, reviewer, nil),
|
RequestedReviewer: convert.ToUser(ctx, reviewer, nil),
|
||||||
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
Repository: convert.ToRepo(ctx, issue.Repo, permission),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
@ -774,7 +784,7 @@ func (m *webhookNotifier) PullRequestSynchronized(ctx context.Context, doer *use
|
|||||||
if err := PrepareWebhooks(ctx, EventSource{Repository: pr.Issue.Repo}, webhook_module.HookEventPullRequestSync, &api.PullRequestPayload{
|
if err := PrepareWebhooks(ctx, EventSource{Repository: pr.Issue.Repo}, webhook_module.HookEventPullRequestSync, &api.PullRequestPayload{
|
||||||
Action: api.HookIssueSynchronized,
|
Action: api.HookIssueSynchronized,
|
||||||
Index: pr.Issue.Index,
|
Index: pr.Issue.Index,
|
||||||
PullRequest: convert.ToAPIPullRequest(ctx, pr, nil),
|
PullRequest: convert.ToAPIPullRequest(ctx, pr, doer),
|
||||||
Repository: convert.ToRepo(ctx, pr.Issue.Repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
|
Repository: convert.ToRepo(ctx, pr.Issue.Repo, access_model.Permission{AccessMode: perm.AccessModeOwner}),
|
||||||
Sender: convert.ToUser(ctx, doer, nil),
|
Sender: convert.ToUser(ctx, doer, nil),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
@ -29,15 +29,16 @@
|
|||||||
<div class="default text">empty multiple dropdown</div>
|
<div class="default text">empty multiple dropdown</div>
|
||||||
<div class="menu">
|
<div class="menu">
|
||||||
<div class="item">item</div>
|
<div class="item">item</div>
|
||||||
</div>
|
<div class="item">sm1</div>
|
||||||
</div>
|
<div class="item">sm2</div>
|
||||||
<div class="ui multiple clearable search selection dropdown">
|
<div class="item">medium1</div>
|
||||||
<input type="hidden" value="1">
|
<div class="item">medium2</div>
|
||||||
{{svg "octicon-triangle-down" 14 "dropdown icon"}}
|
<div class="item">large item1</div>
|
||||||
{{svg "octicon-x" 14 "remove icon"}}
|
<div class="item">large item2</div>
|
||||||
<div class="default text">clearable search dropdown</div>
|
<div class="item">large item3</div>
|
||||||
<div class="menu">
|
<div class="item">very large item test 1</div>
|
||||||
<div class="item" data-value="1">item</div>
|
<div class="item">very large item test 2</div>
|
||||||
|
<div class="item">very large item test 3</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="ui buttons">
|
<div class="ui buttons">
|
||||||
@ -50,6 +51,27 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div>
|
||||||
|
<div class="ui multiple clearable search selection dropdown tw-max-w-[220px]">
|
||||||
|
<input type="hidden" value="1,2,3,4,5,10">
|
||||||
|
{{svg "octicon-triangle-down" 14 "dropdown icon"}}
|
||||||
|
{{svg "octicon-x" 14 "remove icon"}}
|
||||||
|
<div class="default text">clearable search dropdown</div>
|
||||||
|
<div class="menu">
|
||||||
|
<div class="item" data-value="1">item</div>
|
||||||
|
<div class="item" data-value="2">sm1</div>
|
||||||
|
<div class="item" data-value="3">sm2</div>
|
||||||
|
<div class="item" data-value="4">medium1</div>
|
||||||
|
<div class="item" data-value="5">medium2</div>
|
||||||
|
<div class="item" data-value="6">large item1</div>
|
||||||
|
<div class="item" data-value="7">large item2</div>
|
||||||
|
<div class="item" data-value="8">large item3</div>
|
||||||
|
<div class="item" data-value="9">very large item test 1</div>
|
||||||
|
<div class="item" data-value="10">very large item test 2</div>
|
||||||
|
<div class="item" data-value="11">very large item test 3</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<h2>Selection</h2>
|
<h2>Selection</h2>
|
||||||
<div>
|
<div>
|
||||||
|
58
templates/swagger/v1_json.tmpl
generated
58
templates/swagger/v1_json.tmpl
generated
@ -3444,107 +3444,125 @@
|
|||||||
"operationId": "issueSearchIssues",
|
"operationId": "issueSearchIssues",
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
|
"enum": [
|
||||||
|
"open",
|
||||||
|
"closed",
|
||||||
|
"all"
|
||||||
|
],
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "whether issue is open or closed",
|
"default": "open",
|
||||||
|
"description": "State of the issue",
|
||||||
"name": "state",
|
"name": "state",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "comma separated list of labels. Fetch only issues that have any of this labels. Non existent labels are discarded",
|
"description": "Comma-separated list of label names. Fetch only issues that have any of these labels. Non existent labels are discarded.",
|
||||||
"name": "labels",
|
"name": "labels",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "comma separated list of milestone names. Fetch only issues that have any of this milestones. Non existent are discarded",
|
"description": "Comma-separated list of milestone names. Fetch only issues that have any of these milestones. Non existent milestones are discarded.",
|
||||||
"name": "milestones",
|
"name": "milestones",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "search string",
|
"description": "Search string",
|
||||||
"name": "q",
|
"name": "q",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"format": "int64",
|
"format": "int64",
|
||||||
"description": "repository to prioritize in the results",
|
"description": "Repository ID to prioritize in the results",
|
||||||
"name": "priority_repo_id",
|
"name": "priority_repo_id",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"enum": [
|
||||||
|
"issues",
|
||||||
|
"pulls"
|
||||||
|
],
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "filter by type (issues / pulls) if set",
|
"description": "Filter by issue type",
|
||||||
"name": "type",
|
"name": "type",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "date-time",
|
"format": "date-time",
|
||||||
"description": "Only show notifications updated after the given time. This is a timestamp in RFC 3339 format",
|
"description": "Only show issues updated after the given time (RFC 3339 format)",
|
||||||
"name": "since",
|
"name": "since",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "date-time",
|
"format": "date-time",
|
||||||
"description": "Only show notifications updated before the given time. This is a timestamp in RFC 3339 format",
|
"description": "Only show issues updated before the given time (RFC 3339 format)",
|
||||||
"name": "before",
|
"name": "before",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"description": "filter (issues / pulls) assigned to you, default is false",
|
"default": false,
|
||||||
|
"description": "Filter issues or pulls assigned to the authenticated user",
|
||||||
"name": "assigned",
|
"name": "assigned",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"description": "filter (issues / pulls) created by you, default is false",
|
"default": false,
|
||||||
|
"description": "Filter issues or pulls created by the authenticated user",
|
||||||
"name": "created",
|
"name": "created",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"description": "filter (issues / pulls) mentioning you, default is false",
|
"default": false,
|
||||||
|
"description": "Filter issues or pulls mentioning the authenticated user",
|
||||||
"name": "mentioned",
|
"name": "mentioned",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"description": "filter pulls requesting your review, default is false",
|
"default": false,
|
||||||
|
"description": "Filter pull requests where the authenticated user's review was requested",
|
||||||
"name": "review_requested",
|
"name": "review_requested",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"description": "filter pulls reviewed by you, default is false",
|
"default": false,
|
||||||
|
"description": "Filter pull requests reviewed by the authenticated user",
|
||||||
"name": "reviewed",
|
"name": "reviewed",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "filter by owner",
|
"description": "Filter by repository owner",
|
||||||
"name": "owner",
|
"name": "owner",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "filter by team (requires organization owner parameter to be provided)",
|
"description": "Filter by team (requires organization owner parameter)",
|
||||||
"name": "team",
|
"name": "team",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"minimum": 1,
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"description": "page number of results to return (1-based)",
|
"default": 1,
|
||||||
|
"description": "Page number of results to return (1-based)",
|
||||||
"name": "page",
|
"name": "page",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"minimum": 0,
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"description": "page size of results",
|
"description": "Number of items per page",
|
||||||
"name": "limit",
|
"name": "limit",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
}
|
}
|
||||||
@ -3552,6 +3570,12 @@
|
|||||||
"responses": {
|
"responses": {
|
||||||
"200": {
|
"200": {
|
||||||
"$ref": "#/responses/IssueList"
|
"$ref": "#/responses/IssueList"
|
||||||
|
},
|
||||||
|
"400": {
|
||||||
|
"$ref": "#/responses/error"
|
||||||
|
},
|
||||||
|
"422": {
|
||||||
|
"$ref": "#/responses/validationError"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1 @@
|
|||||||
|
|
@ -0,0 +1 @@
|
|||||||
|
ref: refs/heads/master
|
@ -0,0 +1,4 @@
|
|||||||
|
[core]
|
||||||
|
repositoryformatversion = 0
|
||||||
|
filemode = true
|
||||||
|
bare = true
|
@ -0,0 +1,8 @@
|
|||||||
|
This repository will be used to test code search. The snippet below shows its directory structure
|
||||||
|
|
||||||
|
.
|
||||||
|
├── avocado.md
|
||||||
|
├── cucumber.md
|
||||||
|
├── ham.md
|
||||||
|
└── potato
|
||||||
|
└── ham.md
|
@ -0,0 +1,7 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
ORI_DIR=`pwd`
|
||||||
|
SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
|
||||||
|
cd "$ORI_DIR"
|
||||||
|
for i in `ls "$SHELL_FOLDER/post-receive.d"`; do
|
||||||
|
sh "$SHELL_FOLDER/post-receive.d/$i"
|
||||||
|
done
|
@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
"$GITEA_ROOT/gitea" hook --config="$GITEA_ROOT/$GITEA_CONF" post-receive
|
7
tests/gitea-repositories-meta/org42/search-by-path.git/hooks/pre-receive
Executable file
7
tests/gitea-repositories-meta/org42/search-by-path.git/hooks/pre-receive
Executable file
@ -0,0 +1,7 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
ORI_DIR=`pwd`
|
||||||
|
SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
|
||||||
|
cd "$ORI_DIR"
|
||||||
|
for i in `ls "$SHELL_FOLDER/pre-receive.d"`; do
|
||||||
|
sh "$SHELL_FOLDER/pre-receive.d/$i"
|
||||||
|
done
|
@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
"$GITEA_ROOT/gitea" hook --config="$GITEA_ROOT/$GITEA_CONF" pre-receive
|
@ -0,0 +1,7 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
ORI_DIR=`pwd`
|
||||||
|
SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
|
||||||
|
cd "$ORI_DIR"
|
||||||
|
for i in `ls "$SHELL_FOLDER/proc-receive.d"`; do
|
||||||
|
sh "$SHELL_FOLDER/proc-receive.d/$i"
|
||||||
|
done
|
@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
"$GITEA_ROOT/gitea" hook --config="$GITEA_ROOT/$GITEA_CONF" proc-receive
|
7
tests/gitea-repositories-meta/org42/search-by-path.git/hooks/update
Executable file
7
tests/gitea-repositories-meta/org42/search-by-path.git/hooks/update
Executable file
@ -0,0 +1,7 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
ORI_DIR=`pwd`
|
||||||
|
SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
|
||||||
|
cd "$ORI_DIR"
|
||||||
|
for i in `ls "$SHELL_FOLDER/update.d"`; do
|
||||||
|
sh "$SHELL_FOLDER/update.d/$i" $1 $2 $3
|
||||||
|
done
|
@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
"$GITEA_ROOT/gitea" hook --config="$GITEA_ROOT/$GITEA_CONF" update $1 $2 $3
|
@ -0,0 +1,6 @@
|
|||||||
|
# git ls-files --others --exclude-from=.git/info/exclude
|
||||||
|
# Lines that start with '#' are comments.
|
||||||
|
# For a project mostly in C, the following would be a good set of
|
||||||
|
# exclude patterns (uncomment them if you want to use them):
|
||||||
|
# *.[oa]
|
||||||
|
# *~
|
@ -0,0 +1,13 @@
|
|||||||
|
90c1019714259b24fb81711d4416ac0f18667dfa refs/heads/DefaultBranch
|
||||||
|
985f0301dba5e7b34be866819cd15ad3d8f508ee refs/heads/branch2
|
||||||
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/develop
|
||||||
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/feature/1
|
||||||
|
78fb907e3a3309eae4fe8fef030874cebbf1cd5e refs/heads/home-md-img-check
|
||||||
|
3731fe53b763859aaf83e703ee731f6b9447ff1e refs/heads/master
|
||||||
|
62fb502a7172d4453f0322a2cc85bddffa57f07a refs/heads/pr-to-update
|
||||||
|
4649299398e4d39a5c09eb4f534df6f1e1eb87cc refs/heads/sub-home-md-img-check
|
||||||
|
3fa2f829675543ecfc16b2891aebe8bf0608a8f4 refs/notes/commits
|
||||||
|
4a357436d925b5c974181ff12a994538ddc5a269 refs/pull/2/head
|
||||||
|
5f22f7d0d95d614d25a5b68592adb345a4b5c7fd refs/pull/3/head
|
||||||
|
62fb502a7172d4453f0322a2cc85bddffa57f07a refs/pull/5/head
|
||||||
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/tags/v1.1
|
Binary file not shown.
@ -0,0 +1,2 @@
|
|||||||
|
P pack-393dc29256bc27cb2ec73898507df710be7a3cf5.pack
|
||||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,14 @@
|
|||||||
|
# pack-refs with: peeled fully-peeled sorted
|
||||||
|
90c1019714259b24fb81711d4416ac0f18667dfa refs/heads/DefaultBranch
|
||||||
|
985f0301dba5e7b34be866819cd15ad3d8f508ee refs/heads/branch2
|
||||||
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/develop
|
||||||
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/feature/1
|
||||||
|
78fb907e3a3309eae4fe8fef030874cebbf1cd5e refs/heads/home-md-img-check
|
||||||
|
3731fe53b763859aaf83e703ee731f6b9447ff1e refs/heads/master
|
||||||
|
62fb502a7172d4453f0322a2cc85bddffa57f07a refs/heads/pr-to-update
|
||||||
|
4649299398e4d39a5c09eb4f534df6f1e1eb87cc refs/heads/sub-home-md-img-check
|
||||||
|
3fa2f829675543ecfc16b2891aebe8bf0608a8f4 refs/notes/commits
|
||||||
|
4a357436d925b5c974181ff12a994538ddc5a269 refs/pull/2/head
|
||||||
|
5f22f7d0d95d614d25a5b68592adb345a4b5c7fd refs/pull/3/head
|
||||||
|
62fb502a7172d4453f0322a2cc85bddffa57f07a refs/pull/5/head
|
||||||
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/tags/v1.1
|
@ -177,7 +177,7 @@ func TestAPIGetAll(t *testing.T) {
|
|||||||
var apiOrgList []*api.Organization
|
var apiOrgList []*api.Organization
|
||||||
|
|
||||||
DecodeJSON(t, resp, &apiOrgList)
|
DecodeJSON(t, resp, &apiOrgList)
|
||||||
assert.Len(t, apiOrgList, 12)
|
assert.Len(t, apiOrgList, 13)
|
||||||
assert.Equal(t, "Limited Org 36", apiOrgList[1].FullName)
|
assert.Equal(t, "Limited Org 36", apiOrgList[1].FullName)
|
||||||
assert.Equal(t, "limited", apiOrgList[1].Visibility)
|
assert.Equal(t, "limited", apiOrgList[1].Visibility)
|
||||||
|
|
||||||
@ -186,7 +186,7 @@ func TestAPIGetAll(t *testing.T) {
|
|||||||
resp = MakeRequest(t, req, http.StatusOK)
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
DecodeJSON(t, resp, &apiOrgList)
|
DecodeJSON(t, resp, &apiOrgList)
|
||||||
assert.Len(t, apiOrgList, 8)
|
assert.Len(t, apiOrgList, 9)
|
||||||
assert.Equal(t, "org 17", apiOrgList[0].FullName)
|
assert.Equal(t, "org 17", apiOrgList[0].FullName)
|
||||||
assert.Equal(t, "public", apiOrgList[0].Visibility)
|
assert.Equal(t, "public", apiOrgList[0].Visibility)
|
||||||
}
|
}
|
||||||
|
@ -94,9 +94,9 @@ func TestAPISearchRepo(t *testing.T) {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "RepositoriesMax50", requestURL: "/api/v1/repos/search?limit=50&private=false", expectedResults: expectedResults{
|
name: "RepositoriesMax50", requestURL: "/api/v1/repos/search?limit=50&private=false", expectedResults: expectedResults{
|
||||||
nil: {count: 35},
|
nil: {count: 36},
|
||||||
user: {count: 35},
|
user: {count: 36},
|
||||||
user2: {count: 35},
|
user2: {count: 36},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -5,6 +5,7 @@ package integration
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"crypto/rand"
|
"crypto/rand"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -943,3 +944,59 @@ func TestDataAsync_Issue29101(t *testing.T) {
|
|||||||
defer r2.Close()
|
defer r2.Close()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAgitPullPush(t *testing.T) {
|
||||||
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
|
baseAPITestContext := NewAPITestContext(t, "user2", "repo1", auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
|
||||||
|
u.Path = baseAPITestContext.GitPath()
|
||||||
|
u.User = url.UserPassword("user2", userPassword)
|
||||||
|
|
||||||
|
dstPath := t.TempDir()
|
||||||
|
doGitClone(dstPath, u)(t)
|
||||||
|
|
||||||
|
gitRepo, err := git.OpenRepository(context.Background(), dstPath)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
defer gitRepo.Close()
|
||||||
|
|
||||||
|
doGitCreateBranch(dstPath, "test-agit-push")
|
||||||
|
|
||||||
|
// commit 1
|
||||||
|
_, err = generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// push to create an agit pull request
|
||||||
|
err = git.NewCommand(git.DefaultContext, "push", "origin",
|
||||||
|
"-o", "title=test-title", "-o", "description=test-description",
|
||||||
|
"HEAD:refs/for/master/test-agit-push",
|
||||||
|
).Run(&git.RunOpts{Dir: dstPath})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// check pull request exist
|
||||||
|
pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{BaseRepoID: 1, Flow: issues_model.PullRequestFlowAGit, HeadBranch: "user2/test-agit-push"})
|
||||||
|
assert.NoError(t, pr.LoadIssue(db.DefaultContext))
|
||||||
|
assert.Equal(t, "test-title", pr.Issue.Title)
|
||||||
|
assert.Equal(t, "test-description", pr.Issue.Content)
|
||||||
|
|
||||||
|
// commit 2
|
||||||
|
_, err = generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-2-")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// push 2
|
||||||
|
err = git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master/test-agit-push").Run(&git.RunOpts{Dir: dstPath})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// reset to first commit
|
||||||
|
err = git.NewCommand(git.DefaultContext, "reset", "--hard", "HEAD~1").Run(&git.RunOpts{Dir: dstPath})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// test force push without confirm
|
||||||
|
_, stderr, err := git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master/test-agit-push").RunStdString(&git.RunOpts{Dir: dstPath})
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Contains(t, stderr, "[remote rejected] HEAD -> refs/for/master/test-agit-push (request `force-push` push option)")
|
||||||
|
|
||||||
|
// test force push with confirm
|
||||||
|
err = git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master/test-agit-push", "-o", "force-push").Run(&git.RunOpts{Dir: dstPath})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
@ -14,6 +14,7 @@ import (
|
|||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/test"
|
||||||
repo_service "code.gitea.io/gitea/services/repository"
|
repo_service "code.gitea.io/gitea/services/repository"
|
||||||
"code.gitea.io/gitea/tests"
|
"code.gitea.io/gitea/tests"
|
||||||
|
|
||||||
@ -73,3 +74,80 @@ func TestPullCompare(t *testing.T) {
|
|||||||
assert.EqualValues(t, editButtonCount, 0, "Expected not to find a button to edit a file in the PR diff view because head repository has been deleted")
|
assert.EqualValues(t, editButtonCount, 0, "Expected not to find a button to edit a file in the PR diff view because head repository has been deleted")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPullCompare_EnableAllowEditsFromMaintainer(t *testing.T) {
|
||||||
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
|
// repo3 is private
|
||||||
|
repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
|
||||||
|
assert.True(t, repo3.IsPrivate)
|
||||||
|
|
||||||
|
// user4 forks repo3
|
||||||
|
user4Session := loginUser(t, "user4")
|
||||||
|
forkedRepoName := "user4-forked-repo3"
|
||||||
|
testRepoFork(t, user4Session, repo3.OwnerName, repo3.Name, "user4", forkedRepoName, "")
|
||||||
|
forkedRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user4", Name: forkedRepoName})
|
||||||
|
assert.True(t, forkedRepo.IsPrivate)
|
||||||
|
|
||||||
|
// user4 creates a new branch and a PR
|
||||||
|
testEditFileToNewBranch(t, user4Session, "user4", forkedRepoName, "master", "user4/update-readme", "README.md", "Hello, World\n(Edited by user4)\n")
|
||||||
|
resp := testPullCreateDirectly(t, user4Session, repo3.OwnerName, repo3.Name, "master", "user4", forkedRepoName, "user4/update-readme", "PR for user4 forked repo3")
|
||||||
|
prURL := test.RedirectURL(resp)
|
||||||
|
|
||||||
|
// user2 (admin of repo3) goes to the PR files page
|
||||||
|
user2Session := loginUser(t, "user2")
|
||||||
|
resp = user2Session.MakeRequest(t, NewRequest(t, "GET", fmt.Sprintf("%s/files", prURL)), http.StatusOK)
|
||||||
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
|
nodes := htmlDoc.doc.Find(".diff-file-box[data-new-filename=\"README.md\"] .diff-file-header-actions .dropdown .menu a")
|
||||||
|
if assert.Equal(t, 1, nodes.Length()) {
|
||||||
|
// there is only "View File" button, no "Edit File" button
|
||||||
|
assert.Equal(t, "View File", nodes.First().Text())
|
||||||
|
viewFileLink, exists := nodes.First().Attr("href")
|
||||||
|
if assert.True(t, exists) {
|
||||||
|
user2Session.MakeRequest(t, NewRequest(t, "GET", viewFileLink), http.StatusOK)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// user4 goes to the PR page and enable "Allow maintainers to edit"
|
||||||
|
resp = user4Session.MakeRequest(t, NewRequest(t, "GET", prURL), http.StatusOK)
|
||||||
|
htmlDoc = NewHTMLParser(t, resp.Body)
|
||||||
|
dataURL, exists := htmlDoc.doc.Find("#allow-edits-from-maintainers").Attr("data-url")
|
||||||
|
assert.True(t, exists)
|
||||||
|
req := NewRequestWithValues(t, "POST", fmt.Sprintf("%s/set_allow_maintainer_edit", dataURL), map[string]string{
|
||||||
|
"_csrf": htmlDoc.GetCSRF(),
|
||||||
|
"allow_maintainer_edit": "true",
|
||||||
|
})
|
||||||
|
user4Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
// user2 (admin of repo3) goes to the PR files page again
|
||||||
|
resp = user2Session.MakeRequest(t, NewRequest(t, "GET", fmt.Sprintf("%s/files", prURL)), http.StatusOK)
|
||||||
|
htmlDoc = NewHTMLParser(t, resp.Body)
|
||||||
|
nodes = htmlDoc.doc.Find(".diff-file-box[data-new-filename=\"README.md\"] .diff-file-header-actions .dropdown .menu a")
|
||||||
|
if assert.Equal(t, 2, nodes.Length()) {
|
||||||
|
// there are "View File" button and "Edit File" button
|
||||||
|
assert.Equal(t, "View File", nodes.First().Text())
|
||||||
|
viewFileLink, exists := nodes.First().Attr("href")
|
||||||
|
if assert.True(t, exists) {
|
||||||
|
user2Session.MakeRequest(t, NewRequest(t, "GET", viewFileLink), http.StatusOK)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, "Edit File", nodes.Last().Text())
|
||||||
|
editFileLink, exists := nodes.Last().Attr("href")
|
||||||
|
if assert.True(t, exists) {
|
||||||
|
// edit the file
|
||||||
|
resp := user2Session.MakeRequest(t, NewRequest(t, "GET", editFileLink), http.StatusOK)
|
||||||
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
|
lastCommit := htmlDoc.GetInputValueByName("last_commit")
|
||||||
|
assert.NotEmpty(t, lastCommit)
|
||||||
|
req := NewRequestWithValues(t, "POST", editFileLink, map[string]string{
|
||||||
|
"_csrf": htmlDoc.GetCSRF(),
|
||||||
|
"last_commit": lastCommit,
|
||||||
|
"tree_path": "README.md",
|
||||||
|
"content": "File is edited by the maintainer user2",
|
||||||
|
"commit_summary": "user2 updated the file",
|
||||||
|
"commit_choice": "direct",
|
||||||
|
})
|
||||||
|
user2Session.MakeRequest(t, req, http.StatusSeeOther)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
@ -1364,6 +1364,10 @@ table th[data-sortt-desc] .svg {
|
|||||||
min-width: 0; /* make ellipsis work */
|
min-width: 0; /* make ellipsis work */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.ui.multiple.selection.dropdown {
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
.ui.ui.dropdown.selection {
|
.ui.ui.dropdown.selection {
|
||||||
min-width: 14em; /* match the default min width */
|
min-width: 14em; /* match the default min width */
|
||||||
}
|
}
|
||||||
|
@ -91,3 +91,11 @@ export function checkAppUrl() {
|
|||||||
showGlobalErrorMessage(`Your ROOT_URL in app.ini is "${appUrl}", it's unlikely matching the site you are visiting.
|
showGlobalErrorMessage(`Your ROOT_URL in app.ini is "${appUrl}", it's unlikely matching the site you are visiting.
|
||||||
Mismatched ROOT_URL config causes wrong URL links for web UI/mail content/webhook notification/OAuth2 sign-in.`, 'warning');
|
Mismatched ROOT_URL config causes wrong URL links for web UI/mail content/webhook notification/OAuth2 sign-in.`, 'warning');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function checkAppUrlScheme() {
|
||||||
|
const curUrl = window.location.href;
|
||||||
|
// some users visit "http://domain" while appUrl is "https://domain", COOKIE_SECURE makes it impossible to sign in
|
||||||
|
if (curUrl.startsWith('http:') && appUrl.startsWith('https:')) {
|
||||||
|
showGlobalErrorMessage(`This instance is configured to run under HTTPS (by ROOT_URL config), you are accessing by HTTP. Mismatched scheme might cause problems for sign-in/sign-up.`, 'warning');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -8,7 +8,9 @@ export function initDiffFileTree() {
|
|||||||
|
|
||||||
const fileTreeView = createApp(DiffFileTree);
|
const fileTreeView = createApp(DiffFileTree);
|
||||||
fileTreeView.mount(el);
|
fileTreeView.mount(el);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function initDiffFileList() {
|
||||||
const fileListElement = document.querySelector('#diff-file-list');
|
const fileListElement = document.querySelector('#diff-file-list');
|
||||||
if (!fileListElement) return;
|
if (!fileListElement) return;
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import $ from 'jquery';
|
import $ from 'jquery';
|
||||||
import {initCompReactionSelector} from './comp/ReactionSelector.ts';
|
import {initCompReactionSelector} from './comp/ReactionSelector.ts';
|
||||||
import {initRepoIssueContentHistory} from './repo-issue-content.ts';
|
import {initRepoIssueContentHistory} from './repo-issue-content.ts';
|
||||||
import {initDiffFileTree} from './repo-diff-filetree.ts';
|
import {initDiffFileTree, initDiffFileList} from './repo-diff-filetree.ts';
|
||||||
import {initDiffCommitSelect} from './repo-diff-commitselect.ts';
|
import {initDiffCommitSelect} from './repo-diff-commitselect.ts';
|
||||||
import {validateTextareaNonEmpty} from './comp/ComboMarkdownEditor.ts';
|
import {validateTextareaNonEmpty} from './comp/ComboMarkdownEditor.ts';
|
||||||
import {initViewedCheckboxListenerFor, countAndUpdateViewedFiles, initExpandAndCollapseFilesButton} from './pull-view-file.ts';
|
import {initViewedCheckboxListenerFor, countAndUpdateViewedFiles, initExpandAndCollapseFilesButton} from './pull-view-file.ts';
|
||||||
@ -216,6 +216,7 @@ export function initRepoDiffView() {
|
|||||||
initRepoDiffConversationForm();
|
initRepoDiffConversationForm();
|
||||||
if (!$('#diff-file-list').length) return;
|
if (!$('#diff-file-list').length) return;
|
||||||
initDiffFileTree();
|
initDiffFileTree();
|
||||||
|
initDiffFileList();
|
||||||
initDiffCommitSelect();
|
initDiffCommitSelect();
|
||||||
initRepoDiffShowMore();
|
initRepoDiffShowMore();
|
||||||
initRepoDiffReviewButton();
|
initRepoDiffReviewButton();
|
||||||
|
@ -187,14 +187,17 @@ export function initRepoIssueCommentDelete() {
|
|||||||
const path = conversationHolder.getAttribute('data-path');
|
const path = conversationHolder.getAttribute('data-path');
|
||||||
const side = conversationHolder.getAttribute('data-side');
|
const side = conversationHolder.getAttribute('data-side');
|
||||||
const idx = conversationHolder.getAttribute('data-idx');
|
const idx = conversationHolder.getAttribute('data-idx');
|
||||||
const lineType = conversationHolder.closest('tr').getAttribute('data-line-type');
|
const lineType = conversationHolder.closest('tr')?.getAttribute('data-line-type');
|
||||||
|
|
||||||
if (lineType === 'same') {
|
// the conversation holder could appear either on the "Conversation" page, or the "Files Changed" page
|
||||||
document.querySelector(`[data-path="${path}"] .add-code-comment[data-idx="${idx}"]`).classList.remove('tw-invisible');
|
// on the Conversation page, there is no parent "tr", so no need to do anything for "add-code-comment"
|
||||||
} else {
|
if (lineType) {
|
||||||
document.querySelector(`[data-path="${path}"] .add-code-comment[data-side="${side}"][data-idx="${idx}"]`).classList.remove('tw-invisible');
|
if (lineType === 'same') {
|
||||||
|
document.querySelector(`[data-path="${path}"] .add-code-comment[data-idx="${idx}"]`).classList.remove('tw-invisible');
|
||||||
|
} else {
|
||||||
|
document.querySelector(`[data-path="${path}"] .add-code-comment[data-side="${side}"][data-idx="${idx}"]`).classList.remove('tw-invisible');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
conversationHolder.remove();
|
conversationHolder.remove();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
import {checkAppUrl} from './common-page.ts';
|
import {checkAppUrl, checkAppUrlScheme} from './common-page.ts';
|
||||||
|
|
||||||
|
export function initUserCheckAppUrl() {
|
||||||
|
if (!document.querySelector('.page-content.user.signin, .page-content.user.signup, .page-content.user.link-account')) return;
|
||||||
|
checkAppUrlScheme();
|
||||||
|
}
|
||||||
|
|
||||||
export function initUserAuthOauth2() {
|
export function initUserAuthOauth2() {
|
||||||
const outer = document.querySelector('#oauth2-login-navigator');
|
const outer = document.querySelector('#oauth2-login-navigator');
|
||||||
|
@ -24,7 +24,7 @@ import {initFindFileInRepo} from './features/repo-findfile.ts';
|
|||||||
import {initCommentContent, initMarkupContent} from './markup/content.ts';
|
import {initCommentContent, initMarkupContent} from './markup/content.ts';
|
||||||
import {initPdfViewer} from './render/pdf.ts';
|
import {initPdfViewer} from './render/pdf.ts';
|
||||||
|
|
||||||
import {initUserAuthOauth2} from './features/user-auth.ts';
|
import {initUserAuthOauth2, initUserCheckAppUrl} from './features/user-auth.ts';
|
||||||
import {
|
import {
|
||||||
initRepoIssueDue,
|
initRepoIssueDue,
|
||||||
initRepoIssueReferenceRepositorySearch,
|
initRepoIssueReferenceRepositorySearch,
|
||||||
@ -219,6 +219,7 @@ onDomReady(() => {
|
|||||||
initCommitStatuses,
|
initCommitStatuses,
|
||||||
initCaptcha,
|
initCaptcha,
|
||||||
|
|
||||||
|
initUserCheckAppUrl,
|
||||||
initUserAuthOauth2,
|
initUserAuthOauth2,
|
||||||
initUserAuthWebAuthn,
|
initUserAuthWebAuthn,
|
||||||
initUserAuthWebAuthnRegister,
|
initUserAuthWebAuthnRegister,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user