Merge pull request '[gitea] v1.21.10-0 cherry-pick' (#2993) from earl-warren/forgejo:wip-v1.21-gitea-cherry-pick into v1.21/forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/2993 Reviewed-by: Gusted <gusted@noreply.codeberg.org>
This commit is contained in:
commit
bb25ae7bbe
27 changed files with 189 additions and 57 deletions
|
@ -139,13 +139,7 @@ func ParseCommitWithSignature(ctx context.Context, c *git.Commit) *CommitVerific
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
keyID := ""
|
keyID := tryGetKeyIDFromSignature(sig)
|
||||||
if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 {
|
|
||||||
keyID = fmt.Sprintf("%X", *sig.IssuerKeyId)
|
|
||||||
}
|
|
||||||
if keyID == "" && sig.IssuerFingerprint != nil && len(sig.IssuerFingerprint) > 0 {
|
|
||||||
keyID = fmt.Sprintf("%X", sig.IssuerFingerprint[12:20])
|
|
||||||
}
|
|
||||||
defaultReason := NoKeyFound
|
defaultReason := NoKeyFound
|
||||||
|
|
||||||
// First check if the sig has a keyID and if so just look at that
|
// First check if the sig has a keyID and if so just look at that
|
||||||
|
|
|
@ -134,3 +134,13 @@ func extractSignature(s string) (*packet.Signature, error) {
|
||||||
}
|
}
|
||||||
return sig, nil
|
return sig, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func tryGetKeyIDFromSignature(sig *packet.Signature) string {
|
||||||
|
if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 {
|
||||||
|
return fmt.Sprintf("%016X", *sig.IssuerKeyId)
|
||||||
|
}
|
||||||
|
if sig.IssuerFingerprint != nil && len(sig.IssuerFingerprint) > 0 {
|
||||||
|
return fmt.Sprintf("%016X", sig.IssuerFingerprint[12:20])
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
|
@ -11,7 +11,9 @@ import (
|
||||||
"code.gitea.io/gitea/models/unittest"
|
"code.gitea.io/gitea/models/unittest"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
|
"github.com/keybase/go-crypto/openpgp/packet"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -391,3 +393,13 @@ epiDVQ==
|
||||||
assert.Equal(t, time.Unix(1586105389, 0), expire)
|
assert.Equal(t, time.Unix(1586105389, 0), expire)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTryGetKeyIDFromSignature(t *testing.T) {
|
||||||
|
assert.Empty(t, tryGetKeyIDFromSignature(&packet.Signature{}))
|
||||||
|
assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{
|
||||||
|
IssuerKeyId: util.ToPointer(uint64(0x38D1A3EADDBEA9C)),
|
||||||
|
}))
|
||||||
|
assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{
|
||||||
|
IssuerFingerprint: []uint8{0xb, 0x23, 0x24, 0xc7, 0xe6, 0xfe, 0x4f, 0x3a, 0x6, 0x26, 0xc1, 0x21, 0x3, 0x8d, 0x1a, 0x3e, 0xad, 0xdb, 0xea, 0x9c},
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
|
@ -120,6 +120,16 @@ func (c *halfCommitter) Close() error {
|
||||||
|
|
||||||
// TxContext represents a transaction Context,
|
// TxContext represents a transaction Context,
|
||||||
// it will reuse the existing transaction in the parent context or create a new one.
|
// it will reuse the existing transaction in the parent context or create a new one.
|
||||||
|
// Some tips to use:
|
||||||
|
//
|
||||||
|
// 1 It's always recommended to use `WithTx` in new code instead of `TxContext`, since `WithTx` will handle the transaction automatically.
|
||||||
|
// 2. To maintain the old code which uses `TxContext`:
|
||||||
|
// a. Always call `Close()` before returning regardless of whether `Commit()` has been called.
|
||||||
|
// b. Always call `Commit()` before returning if there are no errors, even if the code did not change any data.
|
||||||
|
// c. Remember the `Committer` will be a halfCommitter when a transaction is being reused.
|
||||||
|
// So calling `Commit()` will do nothing, but calling `Close()` without calling `Commit()` will rollback the transaction.
|
||||||
|
// And all operations submitted by the caller stack will be rollbacked as well, not only the operations in the current function.
|
||||||
|
// d. It doesn't mean rollback is forbidden, but always do it only when there is an error, and you do want to rollback.
|
||||||
func TxContext(parentCtx context.Context) (*Context, Committer, error) {
|
func TxContext(parentCtx context.Context) (*Context, Committer, error) {
|
||||||
if sess, ok := inTransaction(parentCtx); ok {
|
if sess, ok := inTransaction(parentCtx); ok {
|
||||||
return newContext(parentCtx, sess, true), &halfCommitter{committer: sess}, nil
|
return newContext(parentCtx, sess, true), &halfCommitter{committer: sess}, nil
|
||||||
|
|
|
@ -117,6 +117,10 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := comments.LoadAttachments(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// Find all reviews by ReviewID
|
// Find all reviews by ReviewID
|
||||||
reviews := make(map[int64]*Review)
|
reviews := make(map[int64]*Review)
|
||||||
ids := make([]int64, 0, len(comments))
|
ids := make([]int64, 0, len(comments))
|
||||||
|
|
|
@ -622,7 +622,7 @@ func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_mo
|
||||||
|
|
||||||
// skip it when reviewer hase been request to review
|
// skip it when reviewer hase been request to review
|
||||||
if review != nil && review.Type == ReviewTypeRequest {
|
if review != nil && review.Type == ReviewTypeRequest {
|
||||||
return nil, nil
|
return nil, committer.Commit() // still commit the transaction, or committer.Close() will rollback it, even if it's a reused transaction.
|
||||||
}
|
}
|
||||||
|
|
||||||
// if the reviewer is an official reviewer,
|
// if the reviewer is an official reviewer,
|
||||||
|
|
|
@ -319,8 +319,9 @@ func CreateOrganization(org *Organization, owner *user_model.User) (err error) {
|
||||||
|
|
||||||
// Add initial creator to organization and owner team.
|
// Add initial creator to organization and owner team.
|
||||||
if err = db.Insert(ctx, &OrgUser{
|
if err = db.Insert(ctx, &OrgUser{
|
||||||
UID: owner.ID,
|
UID: owner.ID,
|
||||||
OrgID: org.ID,
|
OrgID: org.ID,
|
||||||
|
IsPublic: setting.Service.DefaultOrgMemberVisible,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return fmt.Errorf("insert org-user relation: %w", err)
|
return fmt.Errorf("insert org-user relation: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -216,6 +216,12 @@ func fixBrokenRepoUnit16961(repoUnit *repo_model.RepoUnit, bs []byte) (fixed boo
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var cfg any
|
||||||
|
err = json.UnmarshalHandleDoubleEncode(bs, &cfg)
|
||||||
|
if err == nil {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
switch repoUnit.Type {
|
switch repoUnit.Type {
|
||||||
case unit.TypeCode, unit.TypeReleases, unit.TypeWiki, unit.TypeProjects:
|
case unit.TypeCode, unit.TypeReleases, unit.TypeWiki, unit.TypeProjects:
|
||||||
cfg := &repo_model.UnitConfig{}
|
cfg := &repo_model.UnitConfig{}
|
||||||
|
|
|
@ -162,7 +162,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
|
||||||
if opts.RepoArchives || opts.All {
|
if opts.RepoArchives || opts.All {
|
||||||
if err := commonCheckStorage(ctx, logger, autofix,
|
if err := commonCheckStorage(ctx, logger, autofix,
|
||||||
&commonStorageCheckOptions{
|
&commonStorageCheckOptions{
|
||||||
storer: storage.RepoAvatars,
|
storer: storage.RepoArchives,
|
||||||
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
|
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
|
||||||
exists, err := repo.ExistsRepoArchiverWithStoragePath(ctx, path)
|
exists, err := repo.ExistsRepoArchiverWithStoragePath(ctx, path)
|
||||||
if err == nil || errors.Is(err, util.ErrInvalidArgument) {
|
if err == nil || errors.Is(err, util.ErrInvalidArgument) {
|
||||||
|
|
|
@ -47,6 +47,12 @@ func convertPGPSignature(c *object.Commit) *CommitGPGSignature {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if c.Encoding != "" && c.Encoding != "UTF-8" {
|
||||||
|
if _, err = fmt.Fprintf(&w, "\nencoding %s\n", c.Encoding); err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if _, err = fmt.Fprintf(&w, "\n\n%s", c.Message); err != nil {
|
if _, err = fmt.Fprintf(&w, "\n\n%s", c.Message); err != nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,6 +84,8 @@ readLoop:
|
||||||
commit.Committer = &Signature{}
|
commit.Committer = &Signature{}
|
||||||
commit.Committer.Decode(data)
|
commit.Committer.Decode(data)
|
||||||
_, _ = payloadSB.Write(line)
|
_, _ = payloadSB.Write(line)
|
||||||
|
case "encoding":
|
||||||
|
_, _ = payloadSB.Write(line)
|
||||||
case "gpgsig":
|
case "gpgsig":
|
||||||
_, _ = signatureSB.Write(data)
|
_, _ = signatureSB.Write(data)
|
||||||
_ = signatureSB.WriteByte('\n')
|
_ = signatureSB.WriteByte('\n')
|
||||||
|
|
|
@ -125,6 +125,73 @@ empty commit`, commitFromReader.Signature.Payload)
|
||||||
assert.EqualValues(t, commitFromReader, commitFromReader2)
|
assert.EqualValues(t, commitFromReader, commitFromReader2)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestCommitWithEncodingFromReader(t *testing.T) {
|
||||||
|
commitString := `feaf4ba6bc635fec442f46ddd4512416ec43c2c2 commit 1074
|
||||||
|
tree ca3fad42080dd1a6d291b75acdfc46e5b9b307e5
|
||||||
|
parent 47b24e7ab977ed31c5a39989d570847d6d0052af
|
||||||
|
author KN4CK3R <admin@oldschoolhack.me> 1711702962 +0100
|
||||||
|
committer KN4CK3R <admin@oldschoolhack.me> 1711702962 +0100
|
||||||
|
encoding ISO-8859-1
|
||||||
|
gpgsig -----BEGIN PGP SIGNATURE-----
|
||||||
|
|
||||||
|
iQGzBAABCgAdFiEE9HRrbqvYxPT8PXbefPSEkrowAa8FAmYGg7IACgkQfPSEkrow
|
||||||
|
Aa9olwv+P0HhtCM6CRvlUmPaqswRsDPNR4i66xyXGiSxdI9V5oJL7HLiQIM7KrFR
|
||||||
|
gizKa2COiGtugv8fE+TKqXKaJx6uJUJEjaBd8E9Af9PrAzjWj+A84lU6/PgPS8hq
|
||||||
|
zOfZraLOEWRH4tZcS+u2yFLu3ez2Wqh1xW5LNy7xqEedMXEFD1HwSJ0+pjacNkzr
|
||||||
|
frp6Asyt7xRI6YmgFJZJoRsS3Ktr6rtKeRL2IErSQQyorOqj6gKrglhrhfG/114j
|
||||||
|
FKB1v4or0WZ1DE8iP2SJZ3n+/K1IuWAINh7MVdb7PndfBPEa+IL+ucNk5uzEE8Jd
|
||||||
|
G8smGxXUeFEt2cP1dj2W8EgAxuA9sTnH9dqI5aRqy5ifDjuya7Emm8sdOUvtGdmn
|
||||||
|
SONRzusmu5n3DgV956REL7x62h7JuqmBz/12HZkr0z0zgXkcZ04q08pSJATX5N1F
|
||||||
|
yN+tWxTsWg+zhDk96d5Esdo9JMjcFvPv0eioo30GAERaz1hoD7zCMT4jgUFTQwgz
|
||||||
|
jw4YcO5u
|
||||||
|
=r3UU
|
||||||
|
-----END PGP SIGNATURE-----
|
||||||
|
|
||||||
|
ISO-8859-1`
|
||||||
|
|
||||||
|
sha := SHA1{0xfe, 0xaf, 0x4b, 0xa6, 0xbc, 0x63, 0x5f, 0xec, 0x44, 0x2f, 0x46, 0xdd, 0xd4, 0x51, 0x24, 0x16, 0xec, 0x43, 0xc2, 0xc2}
|
||||||
|
gitRepo, err := openRepositoryWithDefaultContext(filepath.Join(testReposDir, "repo1_bare"))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, gitRepo)
|
||||||
|
defer gitRepo.Close()
|
||||||
|
|
||||||
|
commitFromReader, err := CommitFromReader(gitRepo, sha, strings.NewReader(commitString))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if !assert.NotNil(t, commitFromReader) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
assert.EqualValues(t, sha, commitFromReader.ID)
|
||||||
|
assert.EqualValues(t, `-----BEGIN PGP SIGNATURE-----
|
||||||
|
|
||||||
|
iQGzBAABCgAdFiEE9HRrbqvYxPT8PXbefPSEkrowAa8FAmYGg7IACgkQfPSEkrow
|
||||||
|
Aa9olwv+P0HhtCM6CRvlUmPaqswRsDPNR4i66xyXGiSxdI9V5oJL7HLiQIM7KrFR
|
||||||
|
gizKa2COiGtugv8fE+TKqXKaJx6uJUJEjaBd8E9Af9PrAzjWj+A84lU6/PgPS8hq
|
||||||
|
zOfZraLOEWRH4tZcS+u2yFLu3ez2Wqh1xW5LNy7xqEedMXEFD1HwSJ0+pjacNkzr
|
||||||
|
frp6Asyt7xRI6YmgFJZJoRsS3Ktr6rtKeRL2IErSQQyorOqj6gKrglhrhfG/114j
|
||||||
|
FKB1v4or0WZ1DE8iP2SJZ3n+/K1IuWAINh7MVdb7PndfBPEa+IL+ucNk5uzEE8Jd
|
||||||
|
G8smGxXUeFEt2cP1dj2W8EgAxuA9sTnH9dqI5aRqy5ifDjuya7Emm8sdOUvtGdmn
|
||||||
|
SONRzusmu5n3DgV956REL7x62h7JuqmBz/12HZkr0z0zgXkcZ04q08pSJATX5N1F
|
||||||
|
yN+tWxTsWg+zhDk96d5Esdo9JMjcFvPv0eioo30GAERaz1hoD7zCMT4jgUFTQwgz
|
||||||
|
jw4YcO5u
|
||||||
|
=r3UU
|
||||||
|
-----END PGP SIGNATURE-----
|
||||||
|
`, commitFromReader.Signature.Signature)
|
||||||
|
assert.EqualValues(t, `tree ca3fad42080dd1a6d291b75acdfc46e5b9b307e5
|
||||||
|
parent 47b24e7ab977ed31c5a39989d570847d6d0052af
|
||||||
|
author KN4CK3R <admin@oldschoolhack.me> 1711702962 +0100
|
||||||
|
committer KN4CK3R <admin@oldschoolhack.me> 1711702962 +0100
|
||||||
|
encoding ISO-8859-1
|
||||||
|
|
||||||
|
ISO-8859-1`, commitFromReader.Signature.Payload)
|
||||||
|
assert.EqualValues(t, "KN4CK3R <admin@oldschoolhack.me>", commitFromReader.Author.String())
|
||||||
|
|
||||||
|
commitFromReader2, err := CommitFromReader(gitRepo, sha, strings.NewReader(commitString+"\n\n"))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
commitFromReader.CommitMessage += "\n\n"
|
||||||
|
commitFromReader.Signature.Payload += "\n\n"
|
||||||
|
assert.EqualValues(t, commitFromReader, commitFromReader2)
|
||||||
|
}
|
||||||
|
|
||||||
func TestHasPreviousCommit(t *testing.T) {
|
func TestHasPreviousCommit(t *testing.T) {
|
||||||
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
|
bareRepo1Path := filepath.Join(testReposDir, "repo1_bare")
|
||||||
|
|
||||||
|
|
|
@ -248,7 +248,7 @@ type DivergeObject struct {
|
||||||
// GetDivergingCommits returns the number of commits a targetBranch is ahead or behind a baseBranch
|
// GetDivergingCommits returns the number of commits a targetBranch is ahead or behind a baseBranch
|
||||||
func GetDivergingCommits(ctx context.Context, repoPath, baseBranch, targetBranch string) (do DivergeObject, err error) {
|
func GetDivergingCommits(ctx context.Context, repoPath, baseBranch, targetBranch string) (do DivergeObject, err error) {
|
||||||
cmd := NewCommand(ctx, "rev-list", "--count", "--left-right").
|
cmd := NewCommand(ctx, "rev-list", "--count", "--left-right").
|
||||||
AddDynamicArguments(baseBranch + "..." + targetBranch)
|
AddDynamicArguments(baseBranch + "..." + targetBranch).AddArguments("--")
|
||||||
stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath})
|
stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return do, err
|
return do, err
|
||||||
|
|
|
@ -509,9 +509,17 @@ func TestMathBlock(t *testing.T) {
|
||||||
`\(a\) \(b\)`,
|
`\(a\) \(b\)`,
|
||||||
`<p><code class="language-math is-loading">a</code> <code class="language-math is-loading">b</code></p>` + nl,
|
`<p><code class="language-math is-loading">a</code> <code class="language-math is-loading">b</code></p>` + nl,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
`$a$.`,
|
||||||
|
`<p><code class="language-math is-loading">a</code>.</p>` + nl,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.$a$`,
|
||||||
|
`<p>.$a$</p>` + nl,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
`$a a$b b$`,
|
`$a a$b b$`,
|
||||||
`<p><code class="language-math is-loading">a a$b b</code></p>` + nl,
|
`<p>$a a$b b$</p>` + nl,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`a a$b b`,
|
`a a$b b`,
|
||||||
|
@ -519,7 +527,15 @@ func TestMathBlock(t *testing.T) {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`a$b $a a$b b$`,
|
`a$b $a a$b b$`,
|
||||||
`<p>a$b <code class="language-math is-loading">a a$b b</code></p>` + nl,
|
`<p>a$b $a a$b b$</p>` + nl,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"a$x$",
|
||||||
|
`<p>a$x$</p>` + nl,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"$x$a",
|
||||||
|
`<p>$x$a</p>` + nl,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"$$a$$",
|
"$$a$$",
|
||||||
|
|
|
@ -41,9 +41,12 @@ func (parser *inlineParser) Trigger() []byte {
|
||||||
return parser.start[0:1]
|
return parser.start[0:1]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isPunctuation(b byte) bool {
|
||||||
|
return b == '.' || b == '!' || b == '?' || b == ',' || b == ';' || b == ':'
|
||||||
|
}
|
||||||
|
|
||||||
func isAlphanumeric(b byte) bool {
|
func isAlphanumeric(b byte) bool {
|
||||||
// Github only cares about 0-9A-Za-z
|
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
|
||||||
return (b >= '0' && b <= '9') || (b >= 'A' && b <= 'Z') || (b >= 'a' && b <= 'z')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse parses the current line and returns a result of parsing.
|
// Parse parses the current line and returns a result of parsing.
|
||||||
|
@ -56,7 +59,7 @@ func (parser *inlineParser) Parse(parent ast.Node, block text.Reader, pc parser.
|
||||||
}
|
}
|
||||||
|
|
||||||
precedingCharacter := block.PrecendingCharacter()
|
precedingCharacter := block.PrecendingCharacter()
|
||||||
if precedingCharacter < 256 && isAlphanumeric(byte(precedingCharacter)) {
|
if precedingCharacter < 256 && (isAlphanumeric(byte(precedingCharacter)) || isPunctuation(byte(precedingCharacter))) {
|
||||||
// need to exclude things like `a$` from being considered a start
|
// need to exclude things like `a$` from being considered a start
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -75,14 +78,19 @@ func (parser *inlineParser) Parse(parent ast.Node, block text.Reader, pc parser.
|
||||||
ender += pos
|
ender += pos
|
||||||
|
|
||||||
// Now we want to check the character at the end of our parser section
|
// Now we want to check the character at the end of our parser section
|
||||||
// that is ender + len(parser.end)
|
// that is ender + len(parser.end) and check if char before ender is '\'
|
||||||
pos = ender + len(parser.end)
|
pos = ender + len(parser.end)
|
||||||
if len(line) <= pos {
|
if len(line) <= pos {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
if !isAlphanumeric(line[pos]) {
|
suceedingCharacter := line[pos]
|
||||||
|
if !isPunctuation(suceedingCharacter) && !(suceedingCharacter == ' ') {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if line[ender-1] != '\\' {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
// move the pointer onwards
|
// move the pointer onwards
|
||||||
ender += len(parser.end)
|
ender += len(parser.end)
|
||||||
}
|
}
|
||||||
|
|
|
@ -3110,7 +3110,6 @@ config.enable_openid_signin = Enable OpenID Sign-In
|
||||||
config.show_registration_button = Show Register Button
|
config.show_registration_button = Show Register Button
|
||||||
config.require_sign_in_view = Require Sign-In to View Pages
|
config.require_sign_in_view = Require Sign-In to View Pages
|
||||||
config.mail_notify = Enable Email Notifications
|
config.mail_notify = Enable Email Notifications
|
||||||
config.disable_key_size_check = Disable Minimum Key Size Check
|
|
||||||
config.enable_captcha = Enable CAPTCHA
|
config.enable_captcha = Enable CAPTCHA
|
||||||
config.active_code_lives = Active Code Lives
|
config.active_code_lives = Active Code Lives
|
||||||
config.reset_password_code_lives = Recover Account Code Expiry Time
|
config.reset_password_code_lives = Recover Account Code Expiry Time
|
||||||
|
|
8
package-lock.json
generated
8
package-lock.json
generated
|
@ -27,7 +27,7 @@
|
||||||
"escape-goat": "4.0.0",
|
"escape-goat": "4.0.0",
|
||||||
"fast-glob": "3.3.1",
|
"fast-glob": "3.3.1",
|
||||||
"jquery": "3.7.1",
|
"jquery": "3.7.1",
|
||||||
"katex": "0.16.9",
|
"katex": "0.16.10",
|
||||||
"license-checker-webpack-plugin": "0.2.1",
|
"license-checker-webpack-plugin": "0.2.1",
|
||||||
"lightningcss-loader": "2.1.0",
|
"lightningcss-loader": "2.1.0",
|
||||||
"mermaid": "10.6.1",
|
"mermaid": "10.6.1",
|
||||||
|
@ -6726,9 +6726,9 @@
|
||||||
"integrity": "sha512-b+z6yF1d4EOyDgylzQo5IminlUmzSeqR1hs/bzjBNjuGras4FXq/6TrzjxfN0j+TmI0ltJzTNlqXUMCniciwKQ=="
|
"integrity": "sha512-b+z6yF1d4EOyDgylzQo5IminlUmzSeqR1hs/bzjBNjuGras4FXq/6TrzjxfN0j+TmI0ltJzTNlqXUMCniciwKQ=="
|
||||||
},
|
},
|
||||||
"node_modules/katex": {
|
"node_modules/katex": {
|
||||||
"version": "0.16.9",
|
"version": "0.16.10",
|
||||||
"resolved": "https://registry.npmjs.org/katex/-/katex-0.16.9.tgz",
|
"resolved": "https://registry.npmjs.org/katex/-/katex-0.16.10.tgz",
|
||||||
"integrity": "sha512-fsSYjWS0EEOwvy81j3vRA8TEAhQhKiqO+FQaKWp0m39qwOzHVBgAUBIXWj1pB+O2W3fIpNa6Y9KSKCVbfPhyAQ==",
|
"integrity": "sha512-ZiqaC04tp2O5utMsl2TEZTXxa6WSC4yo0fv5ML++D3QZv/vx2Mct0mTlRx3O+uUkjfuAgOkzsCmq5MiUEsDDdA==",
|
||||||
"funding": [
|
"funding": [
|
||||||
"https://opencollective.com/katex",
|
"https://opencollective.com/katex",
|
||||||
"https://github.com/sponsors/katex"
|
"https://github.com/sponsors/katex"
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
"escape-goat": "4.0.0",
|
"escape-goat": "4.0.0",
|
||||||
"fast-glob": "3.3.1",
|
"fast-glob": "3.3.1",
|
||||||
"jquery": "3.7.1",
|
"jquery": "3.7.1",
|
||||||
"katex": "0.16.9",
|
"katex": "0.16.10",
|
||||||
"license-checker-webpack-plugin": "0.2.1",
|
"license-checker-webpack-plugin": "0.2.1",
|
||||||
"lightningcss-loader": "2.1.0",
|
"lightningcss-loader": "2.1.0",
|
||||||
"mermaid": "10.6.1",
|
"mermaid": "10.6.1",
|
||||||
|
|
|
@ -8,6 +8,7 @@ import (
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/base"
|
"code.gitea.io/gitea/modules/base"
|
||||||
"code.gitea.io/gitea/modules/context"
|
"code.gitea.io/gitea/modules/context"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -17,7 +18,7 @@ const (
|
||||||
// FindFiles render the page to find repository files
|
// FindFiles render the page to find repository files
|
||||||
func FindFiles(ctx *context.Context) {
|
func FindFiles(ctx *context.Context) {
|
||||||
path := ctx.Params("*")
|
path := ctx.Params("*")
|
||||||
ctx.Data["TreeLink"] = ctx.Repo.RepoLink + "/src/" + path
|
ctx.Data["TreeLink"] = ctx.Repo.RepoLink + "/src/" + util.PathEscapeSegments(path)
|
||||||
ctx.Data["DataLink"] = ctx.Repo.RepoLink + "/tree-list/" + path
|
ctx.Data["DataLink"] = ctx.Repo.RepoLink + "/tree-list/" + util.PathEscapeSegments(path)
|
||||||
ctx.HTML(http.StatusOK, tplFindFiles)
|
ctx.HTML(http.StatusOK, tplFindFiles)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,6 @@ package auth
|
||||||
import (
|
import (
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
)
|
)
|
||||||
|
@ -29,40 +28,33 @@ func (s *Session) Name() string {
|
||||||
// object for that uid.
|
// object for that uid.
|
||||||
// Returns nil if there is no user uid stored in the session.
|
// Returns nil if there is no user uid stored in the session.
|
||||||
func (s *Session) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
|
func (s *Session) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
|
||||||
user := SessionUser(sess)
|
|
||||||
if user != nil {
|
|
||||||
return user, nil
|
|
||||||
}
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// SessionUser returns the user object corresponding to the "uid" session variable.
|
|
||||||
func SessionUser(sess SessionStore) *user_model.User {
|
|
||||||
if sess == nil {
|
if sess == nil {
|
||||||
return nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get user ID
|
// Get user ID
|
||||||
uid := sess.Get("uid")
|
uid := sess.Get("uid")
|
||||||
if uid == nil {
|
if uid == nil {
|
||||||
return nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
log.Trace("Session Authorization: Found user[%d]", uid)
|
log.Trace("Session Authorization: Found user[%d]", uid)
|
||||||
|
|
||||||
id, ok := uid.(int64)
|
id, ok := uid.(int64)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get user object
|
// Get user object
|
||||||
user, err := user_model.GetUserByID(db.DefaultContext, id)
|
user, err := user_model.GetUserByID(req.Context(), id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !user_model.IsErrUserNotExist(err) {
|
if !user_model.IsErrUserNotExist(err) {
|
||||||
log.Error("GetUserById: %v", err)
|
log.Error("GetUserByID: %v", err)
|
||||||
|
// Return the err as-is to keep current signed-in session, in case the err is something like context.Canceled. Otherwise non-existing user (nil, nil) will make the caller clear the signed-in session.
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
return nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Trace("Session Authorization: Logged in user %-v", user)
|
log.Trace("Session Authorization: Logged in user %-v", user)
|
||||||
return user
|
return user, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -250,14 +250,13 @@ func migrateRepository(ctx context.Context, doer *user_model.User, downloader ba
|
||||||
}
|
}
|
||||||
log.Warn("migrating milestones is not supported, ignored")
|
log.Warn("migrating milestones is not supported, ignored")
|
||||||
}
|
}
|
||||||
|
|
||||||
msBatchSize := uploader.MaxBatchInsertSize("milestone")
|
msBatchSize := uploader.MaxBatchInsertSize("milestone")
|
||||||
for len(milestones) > 0 {
|
for len(milestones) > 0 {
|
||||||
if len(milestones) < msBatchSize {
|
if len(milestones) < msBatchSize {
|
||||||
msBatchSize = len(milestones)
|
msBatchSize = len(milestones)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := uploader.CreateMilestones(milestones...); err != nil {
|
if err := uploader.CreateMilestones(milestones[:msBatchSize]...); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
milestones = milestones[msBatchSize:]
|
milestones = milestones[msBatchSize:]
|
||||||
|
|
|
@ -132,10 +132,7 @@ func loadOneBranch(ctx context.Context, repo *repo_model.Repository, dbBranch *g
|
||||||
p := protectedBranches.GetFirstMatched(branchName)
|
p := protectedBranches.GetFirstMatched(branchName)
|
||||||
isProtected := p != nil
|
isProtected := p != nil
|
||||||
|
|
||||||
divergence := &git.DivergeObject{
|
var divergence *git.DivergeObject
|
||||||
Ahead: -1,
|
|
||||||
Behind: -1,
|
|
||||||
}
|
|
||||||
|
|
||||||
// it's not default branch
|
// it's not default branch
|
||||||
if repo.DefaultBranch != dbBranch.Name && !dbBranch.IsDeleted {
|
if repo.DefaultBranch != dbBranch.Name && !dbBranch.IsDeleted {
|
||||||
|
@ -146,6 +143,11 @@ func loadOneBranch(ctx context.Context, repo *repo_model.Repository, dbBranch *g
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if divergence == nil {
|
||||||
|
// tolerate error that we can't get divergence
|
||||||
|
divergence = &git.DivergeObject{Ahead: -1, Behind: -1}
|
||||||
|
}
|
||||||
|
|
||||||
pr, err := issues_model.GetLatestPullRequestByHeadInfo(repo.ID, branchName)
|
pr, err := issues_model.GetLatestPullRequestByHeadInfo(repo.ID, branchName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("GetLatestPullRequestByHeadInfo: %v", err)
|
return nil, fmt.Errorf("GetLatestPullRequestByHeadInfo: %v", err)
|
||||||
|
|
|
@ -105,7 +105,6 @@ func TestWebhookDeliverAuthorizationHeader(t *testing.T) {
|
||||||
err := hook.SetHeaderAuthorization("Bearer s3cr3t-t0ken")
|
err := hook.SetHeaderAuthorization("Bearer s3cr3t-t0ken")
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, hook))
|
assert.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, hook))
|
||||||
db.GetEngine(db.DefaultContext).NoAutoTime().DB().Logger.ShowSQL(true)
|
|
||||||
|
|
||||||
hookTask := &webhook_model.HookTask{HookID: hook.ID, EventType: webhook_module.HookEventPush, Payloader: &api.PushPayload{}}
|
hookTask := &webhook_model.HookTask{HookID: hook.ID, EventType: webhook_module.HookEventPush, Payloader: &api.PushPayload{}}
|
||||||
|
|
||||||
|
|
|
@ -151,8 +151,6 @@
|
||||||
<dd>{{if .Service.RequireSignInView}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}</dd>
|
<dd>{{if .Service.RequireSignInView}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}</dd>
|
||||||
<dt>{{ctx.Locale.Tr "admin.config.mail_notify"}}</dt>
|
<dt>{{ctx.Locale.Tr "admin.config.mail_notify"}}</dt>
|
||||||
<dd>{{if .Service.EnableNotifyMail}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}</dd>
|
<dd>{{if .Service.EnableNotifyMail}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}</dd>
|
||||||
<dt>{{ctx.Locale.Tr "admin.config.disable_key_size_check"}}</dt>
|
|
||||||
<dd>{{if .SSH.MinimumKeySizeCheck}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}</dd>
|
|
||||||
<dt>{{ctx.Locale.Tr "admin.config.enable_captcha"}}</dt>
|
<dt>{{ctx.Locale.Tr "admin.config.enable_captcha"}}</dt>
|
||||||
<dd>{{if .Service.EnableCaptcha}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}</dd>
|
<dd>{{if .Service.EnableCaptcha}}{{svg "octicon-check"}}{{else}}{{svg "octicon-x"}}{{end}}</dd>
|
||||||
<dt>{{ctx.Locale.Tr "admin.config.default_keep_email_private"}}</dt>
|
<dt>{{ctx.Locale.Tr "admin.config.default_keep_email_private"}}</dt>
|
||||||
|
|
|
@ -25,7 +25,7 @@
|
||||||
</div>
|
</div>
|
||||||
<div class="flex-item-trailing">
|
<div class="flex-item-trailing">
|
||||||
<button class="ui tiny red button">
|
<button class="ui tiny red button">
|
||||||
{{svg "octicon-warning" 14}} CJK文本测试
|
{{svg "octicon-alert" 14}} CJK文本测试
|
||||||
</button>
|
</button>
|
||||||
<button class="ui tiny primary button">
|
<button class="ui tiny primary button">
|
||||||
{{svg "octicon-info" 14}} Button
|
{{svg "octicon-info" 14}} Button
|
||||||
|
@ -54,7 +54,7 @@
|
||||||
</div>
|
</div>
|
||||||
<div class="flex-item-trailing">
|
<div class="flex-item-trailing">
|
||||||
<button class="ui tiny red button">
|
<button class="ui tiny red button">
|
||||||
{{svg "octicon-warning" 12}} CJK文本测试 <!-- single CJK text test, it shouldn't be horizontal -->
|
{{svg "octicon-alert" 12}} CJK文本测试 <!-- single CJK text test, it shouldn't be horizontal -->
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
)}}
|
)}}
|
||||||
|
|
||||||
<div class="field footer gt-mx-3">
|
<div class="field footer gt-mx-3">
|
||||||
<span class="markup-info">{{svg "octicon-markup"}} {{ctx.Locale.Tr "repo.diff.comment.markdown_info"}}</span>
|
<span class="markup-info">{{svg "octicon-markdown"}} {{ctx.Locale.Tr "repo.diff.comment.markdown_info"}}</span>
|
||||||
<div class="gt-text-right">
|
<div class="gt-text-right">
|
||||||
{{if $.reply}}
|
{{if $.reply}}
|
||||||
<button class="ui submit primary tiny button btn-reply" type="submit">{{ctx.Locale.Tr "repo.diff.comment.reply"}}</button>
|
<button class="ui submit primary tiny button btn-reply" type="submit">{{ctx.Locale.Tr "repo.diff.comment.reply"}}</button>
|
||||||
|
|
|
@ -162,7 +162,8 @@ export function initRepoIssueCommentDelete() {
|
||||||
_csrf: csrfToken,
|
_csrf: csrfToken,
|
||||||
}).done(() => {
|
}).done(() => {
|
||||||
const $conversationHolder = $this.closest('.conversation-holder');
|
const $conversationHolder = $this.closest('.conversation-holder');
|
||||||
|
const $parentTimelineItem = $this.closest('.timeline-item');
|
||||||
|
const $parentTimelineGroup = $this.closest('.timeline-item-group');
|
||||||
// Check if this was a pending comment.
|
// Check if this was a pending comment.
|
||||||
if ($conversationHolder.find('.pending-label').length) {
|
if ($conversationHolder.find('.pending-label').length) {
|
||||||
const $counter = $('#review-box .review-comments-counter');
|
const $counter = $('#review-box .review-comments-counter');
|
||||||
|
@ -185,6 +186,11 @@ export function initRepoIssueCommentDelete() {
|
||||||
}
|
}
|
||||||
$conversationHolder.remove();
|
$conversationHolder.remove();
|
||||||
}
|
}
|
||||||
|
// Check if there is no review content, move the time avatar upward to avoid overlapping the content below.
|
||||||
|
if (!$parentTimelineGroup.find('.timeline-item.comment').length && !$parentTimelineItem.find('.conversation-holder').length) {
|
||||||
|
const $timelineAvatar = $parentTimelineGroup.find('.timeline-avatar');
|
||||||
|
$timelineAvatar.removeClass('timeline-avatar-offset');
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
|
Loading…
Reference in a new issue