Reduce memory usage in testgit (#15306)
* reduce memory use in rawtest * just use hashsum for diffs Signed-off-by: Andrew Thornton <art27@cantab.net>
This commit is contained in:
parent
b101fa83a6
commit
8be2cc4fc7
2 changed files with 69 additions and 25 deletions
|
@ -208,13 +208,13 @@ func rawTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS s
|
|||
|
||||
// Request raw paths
|
||||
req := NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", little))
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
assert.Equal(t, littleSize, resp.Body.Len())
|
||||
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||
assert.Equal(t, littleSize, resp.Length)
|
||||
|
||||
setting.CheckLFSVersion()
|
||||
if setting.LFS.StartServer {
|
||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", littleLFS))
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
assert.NotEqual(t, littleSize, resp.Body.Len())
|
||||
assert.LessOrEqual(t, resp.Body.Len(), 1024)
|
||||
if resp.Body.Len() != littleSize && resp.Body.Len() <= 1024 {
|
||||
|
@ -224,12 +224,12 @@ func rawTest(t *testing.T, ctx *APITestContext, little, big, littleLFS, bigLFS s
|
|||
|
||||
if !testing.Short() {
|
||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", big))
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
assert.Equal(t, bigSize, resp.Body.Len())
|
||||
resp := session.MakeRequestNilResponseRecorder(t, req, http.StatusOK)
|
||||
assert.Equal(t, bigSize, resp.Length)
|
||||
|
||||
if setting.LFS.StartServer {
|
||||
req = NewRequest(t, "GET", path.Join("/", username, reponame, "/raw/branch/master/", bigLFS))
|
||||
resp = session.MakeRequest(t, req, http.StatusOK)
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
assert.NotEqual(t, bigSize, resp.Body.Len())
|
||||
if resp.Body.Len() != bigSize && resp.Body.Len() <= 1024 {
|
||||
assert.Contains(t, resp.Body.String(), models.LFSMetaFileIdentifier)
|
||||
|
@ -450,27 +450,27 @@ func doMergeFork(ctx, baseCtx APITestContext, baseBranch, headBranch string) fun
|
|||
t.Run("EnsureCanSeePull", doEnsureCanSeePull(baseCtx, pr))
|
||||
|
||||
// Then get the diff string
|
||||
var diffStr string
|
||||
var diffHash string
|
||||
t.Run("GetDiff", func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(baseCtx.Username), url.PathEscape(baseCtx.Reponame), pr.Index))
|
||||
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
diffStr = resp.Body.String()
|
||||
resp := ctx.Session.MakeRequestNilResponseHashSumRecorder(t, req, http.StatusOK)
|
||||
diffHash = string(resp.Hash.Sum(nil))
|
||||
})
|
||||
|
||||
// Now: Merge the PR & make sure that doesn't break the PR page or change its diff
|
||||
t.Run("MergePR", doAPIMergePullRequest(baseCtx, baseCtx.Username, baseCtx.Reponame, pr.Index))
|
||||
t.Run("EnsureCanSeePull", doEnsureCanSeePull(baseCtx, pr))
|
||||
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffStr))
|
||||
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffHash))
|
||||
|
||||
// Then: Delete the head branch & make sure that doesn't break the PR page or change its diff
|
||||
t.Run("DeleteHeadBranch", doBranchDelete(baseCtx, baseCtx.Username, baseCtx.Reponame, headBranch))
|
||||
t.Run("EnsureCanSeePull", doEnsureCanSeePull(baseCtx, pr))
|
||||
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffStr))
|
||||
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffHash))
|
||||
|
||||
// Delete the head repository & make sure that doesn't break the PR page or change its diff
|
||||
t.Run("DeleteHeadRepository", doAPIDeleteRepository(ctx))
|
||||
t.Run("EnsureCanSeePull", doEnsureCanSeePull(baseCtx, pr))
|
||||
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffStr))
|
||||
t.Run("EnsureDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffHash))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -514,22 +514,14 @@ func doEnsureCanSeePull(ctx APITestContext, pr api.PullRequest) func(t *testing.
|
|||
}
|
||||
}
|
||||
|
||||
func doEnsureDiffNoChange(ctx APITestContext, pr api.PullRequest, diffStr string) func(t *testing.T) {
|
||||
func doEnsureDiffNoChange(ctx APITestContext, pr api.PullRequest, diffHash string) func(t *testing.T) {
|
||||
return func(t *testing.T) {
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d.diff", url.PathEscape(ctx.Username), url.PathEscape(ctx.Reponame), pr.Index))
|
||||
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
||||
expectedMaxLen := len(diffStr)
|
||||
if expectedMaxLen > 800 {
|
||||
expectedMaxLen = 800
|
||||
}
|
||||
actual := resp.Body.String()
|
||||
actualMaxLen := len(actual)
|
||||
if actualMaxLen > 800 {
|
||||
actualMaxLen = 800
|
||||
}
|
||||
resp := ctx.Session.MakeRequestNilResponseHashSumRecorder(t, req, http.StatusOK)
|
||||
actual := string(resp.Hash.Sum(nil))
|
||||
|
||||
equal := diffStr == actual
|
||||
assert.True(t, equal, "Unexpected change in the diff string: expected: %s but was actually: %s", diffStr[:expectedMaxLen], actual[:actualMaxLen])
|
||||
equal := diffHash == actual
|
||||
assert.True(t, equal, "Unexpected change in the diff string: expected hash: %s but was actually: %s", diffHash, actual)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,8 @@ import (
|
|||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"hash"
|
||||
"hash/fnv"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/cookiejar"
|
||||
|
@ -58,6 +60,26 @@ func NewNilResponseRecorder() *NilResponseRecorder {
|
|||
}
|
||||
}
|
||||
|
||||
type NilResponseHashSumRecorder struct {
|
||||
httptest.ResponseRecorder
|
||||
Hash hash.Hash
|
||||
Length int
|
||||
}
|
||||
|
||||
func (n *NilResponseHashSumRecorder) Write(b []byte) (int, error) {
|
||||
_, _ = n.Hash.Write(b)
|
||||
n.Length += len(b)
|
||||
return len(b), nil
|
||||
}
|
||||
|
||||
// NewRecorder returns an initialized ResponseRecorder.
|
||||
func NewNilResponseHashSumRecorder() *NilResponseHashSumRecorder {
|
||||
return &NilResponseHashSumRecorder{
|
||||
Hash: fnv.New32(),
|
||||
ResponseRecorder: *httptest.NewRecorder(),
|
||||
}
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
defer log.Close()
|
||||
|
||||
|
@ -284,6 +306,23 @@ func (s *TestSession) MakeRequestNilResponseRecorder(t testing.TB, req *http.Req
|
|||
return resp
|
||||
}
|
||||
|
||||
func (s *TestSession) MakeRequestNilResponseHashSumRecorder(t testing.TB, req *http.Request, expectedStatus int) *NilResponseHashSumRecorder {
|
||||
t.Helper()
|
||||
baseURL, err := url.Parse(setting.AppURL)
|
||||
assert.NoError(t, err)
|
||||
for _, c := range s.jar.Cookies(baseURL) {
|
||||
req.AddCookie(c)
|
||||
}
|
||||
resp := MakeRequestNilResponseHashSumRecorder(t, req, expectedStatus)
|
||||
|
||||
ch := http.Header{}
|
||||
ch.Add("Cookie", strings.Join(resp.Header()["Set-Cookie"], ";"))
|
||||
cr := http.Request{Header: ch}
|
||||
s.jar.SetCookies(baseURL, cr.Cookies())
|
||||
|
||||
return resp
|
||||
}
|
||||
|
||||
const userPassword = "password"
|
||||
|
||||
var loginSessionCache = make(map[string]*TestSession, 10)
|
||||
|
@ -429,6 +468,19 @@ func MakeRequestNilResponseRecorder(t testing.TB, req *http.Request, expectedSta
|
|||
return recorder
|
||||
}
|
||||
|
||||
func MakeRequestNilResponseHashSumRecorder(t testing.TB, req *http.Request, expectedStatus int) *NilResponseHashSumRecorder {
|
||||
t.Helper()
|
||||
recorder := NewNilResponseHashSumRecorder()
|
||||
c.ServeHTTP(recorder, req)
|
||||
if expectedStatus != NoExpectedStatus {
|
||||
if !assert.EqualValues(t, expectedStatus, recorder.Code,
|
||||
"Request: %s %s", req.Method, req.URL.String()) {
|
||||
logUnexpectedResponse(t, &recorder.ResponseRecorder)
|
||||
}
|
||||
}
|
||||
return recorder
|
||||
}
|
||||
|
||||
// logUnexpectedResponse logs the contents of an unexpected response.
|
||||
func logUnexpectedResponse(t testing.TB, recorder *httptest.ResponseRecorder) {
|
||||
t.Helper()
|
||||
|
|
Reference in a new issue