mirror of
https://github.com/go-gitea/gitea.git
synced 2026-02-22 19:43:31 +01:00
Batch blob_excerpt requests for "Expand All" into a single fetch
When expanding all collapsed sections in a diff, the frontend now collects per-gap parameters from all expander buttons, joins them with commas, and makes a single GET request. The backend detects batch mode (comma in last_left), renders each section to HTML, and returns a JSON string array. Single-section expands are unchanged. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
e86f2ff72b
commit
e643492464
@ -4,6 +4,7 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
gocontext "context"
|
||||
"encoding/csv"
|
||||
"errors"
|
||||
@ -12,6 +13,7 @@ import (
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
@ -746,6 +748,20 @@ func attachHiddenCommentIDs(section *gitdiff.DiffSection, lineComments map[int64
|
||||
}
|
||||
}
|
||||
|
||||
// splitInts splits a comma-separated string of integers into a slice.
|
||||
func splitInts(s string) ([]int, error) {
|
||||
parts := strings.Split(s, ",")
|
||||
result := make([]int, len(parts))
|
||||
for i, p := range parts {
|
||||
v, err := strconv.Atoi(strings.TrimSpace(p))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result[i] = v
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// ExcerptBlob render blob excerpt contents
|
||||
func ExcerptBlob(ctx *context.Context) {
|
||||
commitID := ctx.PathParam("sha")
|
||||
@ -778,6 +794,13 @@ func ExcerptBlob(ctx *context.Context) {
|
||||
diffBlobExcerptData.BaseLink = ctx.Repo.RepoLink + "/wiki/blob_excerpt"
|
||||
}
|
||||
|
||||
// Batch mode: if last_left contains a comma, treat all per-gap params as
|
||||
// comma-separated lists and return a JSON array of HTML strings.
|
||||
if strings.Contains(ctx.FormString("last_left"), ",") {
|
||||
excerptBlobBatch(ctx, gitRepo, commitID, filePath, opts.Language, diffBlobExcerptData)
|
||||
return
|
||||
}
|
||||
|
||||
commit, err := gitRepo.GetCommit(commitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommit", err)
|
||||
@ -839,3 +862,128 @@ func ExcerptBlob(ctx *context.Context) {
|
||||
|
||||
ctx.HTML(http.StatusOK, tplBlobExcerpt)
|
||||
}
|
||||
|
||||
func excerptBlobBatch(ctx *context.Context, gitRepo *git.Repository, commitID, filePath, language string, diffBlobExcerptData *gitdiff.DiffBlobExcerptData) {
|
||||
lastLefts, err := splitInts(ctx.FormString("last_left"))
|
||||
if err != nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "invalid last_left values")
|
||||
return
|
||||
}
|
||||
lastRights, err := splitInts(ctx.FormString("last_right"))
|
||||
if err != nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "invalid last_right values")
|
||||
return
|
||||
}
|
||||
lefts, err := splitInts(ctx.FormString("left"))
|
||||
if err != nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "invalid left values")
|
||||
return
|
||||
}
|
||||
rights, err := splitInts(ctx.FormString("right"))
|
||||
if err != nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "invalid right values")
|
||||
return
|
||||
}
|
||||
leftHunkSizes, err := splitInts(ctx.FormString("left_hunk_size"))
|
||||
if err != nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "invalid left_hunk_size values")
|
||||
return
|
||||
}
|
||||
rightHunkSizes, err := splitInts(ctx.FormString("right_hunk_size"))
|
||||
if err != nil {
|
||||
ctx.HTTPError(http.StatusBadRequest, "invalid right_hunk_size values")
|
||||
return
|
||||
}
|
||||
|
||||
n := len(lastLefts)
|
||||
if len(lastRights) != n || len(lefts) != n || len(rights) != n || len(leftHunkSizes) != n || len(rightHunkSizes) != n {
|
||||
ctx.HTTPError(http.StatusBadRequest, "all per-gap parameter arrays must have the same length")
|
||||
return
|
||||
}
|
||||
|
||||
commit, err := gitRepo.GetCommit(commitID)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetCommit", err)
|
||||
return
|
||||
}
|
||||
blob, err := commit.Tree.GetBlobByPath(filePath)
|
||||
if err != nil {
|
||||
ctx.ServerError("GetBlobByPath", err)
|
||||
return
|
||||
}
|
||||
reader, err := blob.DataAsync()
|
||||
if err != nil {
|
||||
ctx.ServerError("DataAsync", err)
|
||||
return
|
||||
}
|
||||
blobData, err := io.ReadAll(reader)
|
||||
reader.Close()
|
||||
if err != nil {
|
||||
ctx.ServerError("ReadAll", err)
|
||||
return
|
||||
}
|
||||
|
||||
diffBlobExcerptData.PullIssueIndex = ctx.FormInt64("pull_issue_index")
|
||||
var lineComments map[int64][]*issues_model.Comment
|
||||
if diffBlobExcerptData.PullIssueIndex > 0 {
|
||||
if !ctx.Repo.CanRead(unit.TypePullRequests) {
|
||||
ctx.NotFound(nil)
|
||||
return
|
||||
}
|
||||
issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, diffBlobExcerptData.PullIssueIndex)
|
||||
if err != nil {
|
||||
log.Error("GetIssueByIndex error: %v", err)
|
||||
} else if issue.IsPull {
|
||||
ctx.Data["Issue"] = issue
|
||||
ctx.Data["CanBlockUser"] = func(blocker, blockee *user_model.User) bool {
|
||||
return user_service.CanBlockUser(ctx, ctx.Doer, blocker, blockee)
|
||||
}
|
||||
ctx.Data["PageIsPullFiles"] = true
|
||||
ctx.Data["AfterCommitID"] = diffBlobExcerptData.AfterCommitID
|
||||
allComments, err := issues_model.FetchCodeComments(ctx, issue, ctx.Doer, ctx.FormBool("show_outdated"))
|
||||
if err != nil {
|
||||
log.Error("FetchCodeComments error: %v", err)
|
||||
} else {
|
||||
lineComments = allComments[filePath]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ctx.Data["FileNameHash"] = git.HashFilePathForWebUI(filePath)
|
||||
ctx.Data["DiffBlobExcerptData"] = diffBlobExcerptData
|
||||
|
||||
htmlStrings := make([]string, n)
|
||||
for i := range n {
|
||||
opts := gitdiff.BlobExcerptOptions{
|
||||
LastLeft: lastLefts[i],
|
||||
LastRight: lastRights[i],
|
||||
LeftIndex: lefts[i],
|
||||
RightIndex: rights[i],
|
||||
LeftHunkSize: leftHunkSizes[i],
|
||||
RightHunkSize: rightHunkSizes[i],
|
||||
Direction: "full",
|
||||
Language: language,
|
||||
}
|
||||
|
||||
section, err := gitdiff.BuildBlobExcerptDiffSection(filePath, bytes.NewReader(blobData), opts)
|
||||
if err != nil {
|
||||
ctx.ServerError("BuildBlobExcerptDiffSection", err)
|
||||
return
|
||||
}
|
||||
|
||||
if lineComments != nil {
|
||||
attachCommentsToLines(section, lineComments)
|
||||
attachHiddenCommentIDs(section, lineComments)
|
||||
}
|
||||
|
||||
ctx.Data["section"] = section
|
||||
html, err := ctx.RenderToHTML(tplBlobExcerpt, ctx.Data)
|
||||
if err != nil {
|
||||
ctx.ServerError("RenderToHTML", err)
|
||||
return
|
||||
}
|
||||
htmlStrings[i] = string(html)
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusOK, htmlStrings)
|
||||
}
|
||||
|
||||
@ -12,11 +12,13 @@ import (
|
||||
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/json"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
repo_service "code.gitea.io/gitea/services/repository"
|
||||
"code.gitea.io/gitea/tests"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCompareTag(t *testing.T) {
|
||||
@ -157,3 +159,136 @@ func TestCompareCodeExpand(t *testing.T) {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestBlobExcerptSingleAndBatch(t *testing.T) {
|
||||
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||
user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||
repo, err := repo_service.CreateRepositoryDirectly(t.Context(), user1, user1, repo_service.CreateRepoOptions{
|
||||
Name: "test_blob_excerpt_batch",
|
||||
Readme: "Default",
|
||||
AutoInit: true,
|
||||
DefaultBranch: "main",
|
||||
}, true)
|
||||
require.NoError(t, err)
|
||||
|
||||
session := loginUser(t, user1.Name)
|
||||
|
||||
// Create a file with 50 lines so the diff has multiple collapsed sections
|
||||
lines := make([]string, 50)
|
||||
for i := range lines {
|
||||
lines[i] = fmt.Sprintf("line %d", i+1)
|
||||
}
|
||||
testEditFile(t, session, user1.Name, repo.Name, "main", "README.md", strings.Join(lines, "\n")+"\n")
|
||||
|
||||
// Create a branch and change a line in the middle to produce two expander gaps
|
||||
testEditFileToNewBranch(t, session, user1.Name, repo.Name, "main", "excerpt-branch", "README.md",
|
||||
func() string {
|
||||
modified := make([]string, 50)
|
||||
copy(modified, lines)
|
||||
modified[24] = "CHANGED line 25"
|
||||
return strings.Join(modified, "\n") + "\n"
|
||||
}(),
|
||||
)
|
||||
|
||||
// Load the compare page
|
||||
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/compare/main...excerpt-branch", user1.Name, repo.Name))
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||
els := htmlDoc.Find(`button.code-expander-button[data-url]`)
|
||||
|
||||
// We need at least 2 expander buttons to test batch mode
|
||||
require.GreaterOrEqual(t, els.Length(), 2, "expected at least 2 expander buttons")
|
||||
|
||||
// Deduplicate by anchor param to get one URL per collapsed section
|
||||
// (updown rows have two buttons with the same anchor but different directions)
|
||||
seen := map[string]bool{}
|
||||
var expanderURLs []string
|
||||
for i := range els.Length() {
|
||||
link := els.Eq(i).AttrOr("data-url", "")
|
||||
parsed, err := url.Parse(link)
|
||||
require.NoError(t, err)
|
||||
anchor := parsed.Query().Get("anchor")
|
||||
if !seen[anchor] {
|
||||
seen[anchor] = true
|
||||
expanderURLs = append(expanderURLs, link)
|
||||
}
|
||||
}
|
||||
require.GreaterOrEqual(t, len(expanderURLs), 2, "expected at least 2 unique expander sections")
|
||||
|
||||
t.Run("SingleFetch", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
// Rewrite direction to "full" as the frontend does for expand-all
|
||||
singleURL := strings.Replace(expanderURLs[0], "direction=down", "direction=full", 1)
|
||||
singleURL = strings.Replace(singleURL, "direction=up", "direction=full", 1)
|
||||
req := NewRequest(t, "GET", singleURL)
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
body := resp.Body.String()
|
||||
// Single mode returns HTML directly, should contain diff table rows
|
||||
assert.Contains(t, body, `class="lines-`)
|
||||
assert.NotContains(t, body, `[`) // should not be JSON
|
||||
})
|
||||
|
||||
t.Run("BatchFetch", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
// Parse per-gap params from each expander URL and join with commas
|
||||
paramKeys := []string{"last_left", "last_right", "left", "right", "left_hunk_size", "right_hunk_size"}
|
||||
batchValues := make(map[string][]string)
|
||||
var basePath string
|
||||
var sharedParams url.Values
|
||||
|
||||
for i, expanderURL := range expanderURLs {
|
||||
parsed, err := url.Parse(expanderURL)
|
||||
require.NoError(t, err)
|
||||
if i == 0 {
|
||||
basePath = parsed.Path
|
||||
sharedParams = parsed.Query()
|
||||
}
|
||||
q := parsed.Query()
|
||||
for _, key := range paramKeys {
|
||||
batchValues[key] = append(batchValues[key], q.Get(key))
|
||||
}
|
||||
}
|
||||
|
||||
// Build batch URL
|
||||
batchParams := url.Values{}
|
||||
for _, key := range paramKeys {
|
||||
batchParams.Set(key, strings.Join(batchValues[key], ","))
|
||||
}
|
||||
for _, key := range []string{"path", "filelang", "style"} {
|
||||
if v := sharedParams.Get(key); v != "" {
|
||||
batchParams.Set(key, v)
|
||||
}
|
||||
}
|
||||
batchParams.Set("direction", "full")
|
||||
batchURL := basePath + "?" + batchParams.Encode()
|
||||
|
||||
req := NewRequest(t, "GET", batchURL)
|
||||
resp := session.MakeRequest(t, req, http.StatusOK)
|
||||
|
||||
// Batch mode returns a JSON array of HTML strings
|
||||
var htmlArray []string
|
||||
err := json.Unmarshal(resp.Body.Bytes(), &htmlArray)
|
||||
require.NoError(t, err, "response should be valid JSON string array")
|
||||
assert.Len(t, htmlArray, len(expanderURLs))
|
||||
|
||||
for i, html := range htmlArray {
|
||||
assert.Contains(t, html, `class="lines-`, "batch result %d should contain diff HTML", i)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("BatchFetchMismatchedParams", func(t *testing.T) {
|
||||
defer tests.PrintCurrentTest(t)()
|
||||
|
||||
// Build a batch URL with mismatched param lengths — should return 400
|
||||
parsed, err := url.Parse(expanderURLs[0])
|
||||
require.NoError(t, err)
|
||||
q := parsed.Query()
|
||||
q.Set("last_left", q.Get("last_left")+",0") // 2 values
|
||||
// other params remain with 1 value
|
||||
badURL := parsed.Path + "?" + q.Encode()
|
||||
req := NewRequest(t, "GET", badURL)
|
||||
session.MakeRequest(t, req, http.StatusBadRequest)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@ -299,14 +299,29 @@ async function expandAllLines(btn: HTMLElement, fileBox: HTMLElement) {
|
||||
|
||||
btn.classList.add('disabled');
|
||||
try {
|
||||
// Loop: expand all collapsed sections until none remain.
|
||||
// Each round fetches all current expander URLs in parallel, replaces their
|
||||
// target <tr> rows with the response rows, then rescans for new expanders
|
||||
// that may have appeared in the inserted content.
|
||||
while (true) {
|
||||
const expanders = collectExpanderButtons(fileBody, true);
|
||||
if (expanders.length === 0) break;
|
||||
await Promise.all(expanders.map(({tr, url}) => fetchBlobExcerpt(tr, url)));
|
||||
const expanders = collectExpanderButtons(fileBody, true);
|
||||
if (expanders.length === 0) return;
|
||||
|
||||
if (expanders.length === 1) {
|
||||
await fetchBlobExcerpt(expanders[0].tr, expanders[0].url);
|
||||
} else {
|
||||
// Batch mode: join per-gap params with commas into a single request
|
||||
const parsed = expanders.map(({url}) => new URL(url, window.location.origin));
|
||||
const batchParams = new URLSearchParams();
|
||||
for (const key of ['last_left', 'last_right', 'left', 'right', 'left_hunk_size', 'right_hunk_size']) {
|
||||
batchParams.set(key, parsed.map((u) => u.searchParams.get(key) ?? '0').join(','));
|
||||
}
|
||||
for (const [key, val] of parsed[0].searchParams) {
|
||||
if (!batchParams.has(key)) batchParams.set(key, val);
|
||||
}
|
||||
batchParams.set('direction', 'full');
|
||||
|
||||
const htmlArray: string[] = await (await GET(`${parsed[0].pathname}?${batchParams}`)).json();
|
||||
for (const [index, html] of htmlArray.entries()) {
|
||||
const tempTbody = document.createElement('tbody');
|
||||
tempTbody.innerHTML = html;
|
||||
expanders[index].tr.replaceWith(...tempTbody.children);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
btn.classList.remove('disabled');
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user