0
0
mirror of https://github.com/go-gitea/gitea.git synced 2025-11-24 08:15:02 +01:00

restart from scratch

This commit is contained in:
wxiaoguang 2025-10-29 14:08:14 +08:00
parent 95b18eb781
commit d318098ba4
18 changed files with 266 additions and 252 deletions

View File

@ -1,47 +0,0 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package git
import (
"bufio"
"context"
)
type Batch struct {
cancel context.CancelFunc
Reader *bufio.Reader
Writer WriteCloserError
}
// NewBatch creates a new batch for the given repository, the Close must be invoked before release the batch
func NewBatch(ctx context.Context, repoPath string) (*Batch, error) {
// Now because of some insanity with git cat-file not immediately failing if not run in a valid git directory we need to run git rev-parse first!
if err := ensureValidGitRepository(ctx, repoPath); err != nil {
return nil, err
}
var batch Batch
batch.Writer, batch.Reader, batch.cancel = catFileBatch(ctx, repoPath)
return &batch, nil
}
func NewBatchCheck(ctx context.Context, repoPath string) (*Batch, error) {
// Now because of some insanity with git cat-file not immediately failing if not run in a valid git directory we need to run git rev-parse first!
if err := ensureValidGitRepository(ctx, repoPath); err != nil {
return nil, err
}
var check Batch
check.Writer, check.Reader, check.cancel = catFileBatchCheck(ctx, repoPath)
return &check, nil
}
func (b *Batch) Close() {
if b.cancel != nil {
b.cancel()
b.Reader = nil
b.Writer = nil
b.cancel = nil
}
}

View File

@ -6,8 +6,6 @@
package git
import (
"bufio"
"bytes"
"io"
"code.gitea.io/gitea/modules/log"
@ -25,35 +23,27 @@ type Blob struct {
// DataAsync gets a ReadCloser for the contents of a blob without reading it all.
// Calling the Close function on the result will discard all unread output.
func (b *Blob) DataAsync() (io.ReadCloser, error) {
wr, rd, cancel, err := b.repo.CatFileBatch(b.repo.Ctx)
func (b *Blob) DataAsync() (_ io.ReadCloser, retErr error) {
batch, cancel, err := b.repo.CatFileBatch(b.repo.Ctx)
if err != nil {
return nil, err
}
defer func() {
if retErr != nil {
cancel()
}
}()
_, err = wr.Write([]byte(b.ID.String() + "\n"))
rd, err := batch.QueryContent(b.ID.String())
if err != nil {
cancel()
return nil, err
}
_, _, size, err := ReadBatchLine(rd)
if err != nil {
cancel()
return nil, err
}
b.gotSize = true
b.size = size
if size < 4096 {
bs, err := io.ReadAll(io.LimitReader(rd, size))
defer cancel()
if err != nil {
return nil, err
}
_, err = rd.Discard(1)
return io.NopCloser(bytes.NewReader(bs)), err
}
return &blobReader{
rd: rd,
n: size,
@ -67,13 +57,13 @@ func (b *Blob) Size() int64 {
return b.size
}
wr, rd, cancel, err := b.repo.CatFileBatchCheck(b.repo.Ctx)
batch, cancel, err := b.repo.CatFileBatch(b.repo.Ctx)
if err != nil {
log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
return 0
}
defer cancel()
_, err = wr.Write([]byte(b.ID.String() + "\n"))
rd, err := batch.QueryInfo(b.ID.String())
if err != nil {
log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
return 0
@ -85,12 +75,11 @@ func (b *Blob) Size() int64 {
}
b.gotSize = true
return b.size
}
type blobReader struct {
rd *bufio.Reader
rd BufferedReader
n int64
cancel func()
}

View File

@ -0,0 +1,41 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package git
import (
"context"
"io"
)
type BufferedReader interface {
io.Reader
Buffered() int
Peek(n int) ([]byte, error)
Discard(n int) (int, error)
ReadString(sep byte) (string, error)
ReadSlice(sep byte) ([]byte, error)
ReadBytes(sep byte) ([]byte, error)
}
type CatFileBatchContent interface {
QueryContent(obj string) (BufferedReader, error)
}
type CatFileBatchInfo interface {
QueryInfo(obj string) (BufferedReader, error)
}
type CatFileBatch interface {
CatFileBatchInfo
CatFileBatchContent
}
type CatFileBatchCloser interface {
CatFileBatch
Close()
}
func NewBatch(ctx context.Context, repoPath string) (CatFileBatchCloser, error) {
return newCatFileBatchLegacy(ctx, repoPath)
}

View File

@ -0,0 +1,73 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package git
import (
"context"
"io"
"code.gitea.io/gitea/modules/git/gitcmd"
)
// catFileBatchLegacy implements the CatFileBatch interface using the "cat-file --batch" command and "cat-file --batch-check" command
// for git version < 2.36
// to align with "--batch-command", it creates the two commands for querying object contents and object info separately
// ref: https://git-scm.com/docs/git-cat-file#Documentation/git-cat-file.txt---batch
type catFileBatchLegacy struct {
ctx context.Context
repoPath string
batchContent *catFileBatchCommunicator
batchCheck *catFileBatchCommunicator
}
var _ CatFileBatchCloser = (*catFileBatchLegacy)(nil)
// newCatFileBatchLegacy creates a new batch and a new batch check for the given repository, the Close must be invoked before release the batch
func newCatFileBatchLegacy(ctx context.Context, repoPath string) (*catFileBatchLegacy, error) {
if err := ensureValidGitRepository(ctx, repoPath); err != nil {
return nil, err
}
return &catFileBatchLegacy{
ctx: ctx,
repoPath: repoPath,
}, nil
}
func (b *catFileBatchLegacy) getBatchContent() *catFileBatchCommunicator {
if b.batchContent != nil {
return b.batchContent
}
b.batchContent = newCatFileBatch(b.ctx, b.repoPath, gitcmd.NewCommand("cat-file", "--batch"))
return b.batchContent
}
func (b *catFileBatchLegacy) getBatchCheck() *catFileBatchCommunicator {
if b.batchCheck != nil {
return b.batchCheck
}
b.batchCheck = newCatFileBatch(b.ctx, b.repoPath, gitcmd.NewCommand("cat-file", "--batch-check"))
return b.batchCheck
}
func (b *catFileBatchLegacy) QueryContent(obj string) (BufferedReader, error) {
_, err := io.WriteString(b.getBatchContent().writer, obj+"\n")
return b.getBatchContent().reader, err
}
func (b *catFileBatchLegacy) QueryInfo(obj string) (BufferedReader, error) {
_, err := io.WriteString(b.getBatchCheck().writer, obj+"\n")
return b.getBatchCheck().reader, err
}
func (b *catFileBatchLegacy) Close() {
if b.batchContent != nil {
b.batchContent.Close()
b.batchContent = nil
}
if b.batchCheck != nil {
b.batchCheck.Close()
b.batchCheck = nil
}
}

View File

@ -14,20 +14,35 @@ import (
"code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/log"
"github.com/djherbis/buffer"
"github.com/djherbis/nio/v3"
)
// WriteCloserError wraps an io.WriteCloser with an additional CloseWithError function
type WriteCloserError interface {
// writeCloserError wraps an io.WriteCloser with an additional CloseWithError function (for nio.Pipe)
type writeCloserError interface {
io.WriteCloser
CloseWithError(err error) error
}
type catFileBatchCommunicator struct {
cancel context.CancelFunc
reader *bufio.Reader
writer writeCloserError
}
func (b *catFileBatchCommunicator) Close() {
if b.cancel != nil {
b.cancel()
b.reader = nil
b.writer = nil
b.cancel = nil
}
}
// ensureValidGitRepository runs git rev-parse in the repository path - thus ensuring that the repository is a valid repository.
// Run before opening git cat-file.
// This is needed otherwise the git cat-file will hang for invalid repositories.
// FIXME: the comment is from https://github.com/go-gitea/gitea/pull/17991 but it doesn't seem to be true.
// The real problem is that Golang's Cmd.Wait hangs because it waits for the pipes to be closed, but we can't close the pipes before Wait returns
// Need to refactor to use StdinPipe and StdoutPipe
func ensureValidGitRepository(ctx context.Context, repoPath string) error {
stderr := strings.Builder{}
err := gitcmd.NewCommand("rev-parse").
@ -40,16 +55,19 @@ func ensureValidGitRepository(ctx context.Context, repoPath string) error {
return nil
}
// catFileBatchCheck opens git cat-file --batch-check in the provided repo and returns a stdin pipe, a stdout reader and cancel function
func catFileBatchCheck(ctx context.Context, repoPath string) (WriteCloserError, *bufio.Reader, func()) {
// newCatFileBatch opens git cat-file --batch in the provided repo and returns a stdin pipe, a stdout reader and cancel function
func newCatFileBatch(ctx context.Context, repoPath string, cmdCatFile *gitcmd.Command) *catFileBatchCommunicator {
// We often want to feed the commits in order into cat-file --batch, followed by their trees and subtrees as necessary.
// so let's create a batch stdin and stdout
batchStdinReader, batchStdinWriter := io.Pipe()
batchStdoutReader, batchStdoutWriter := io.Pipe()
ctx, ctxCancel := context.WithCancel(ctx)
closed := make(chan struct{})
cancel := func() {
ctxCancel()
_ = batchStdoutReader.Close()
_ = batchStdinWriter.Close()
_ = batchStdoutReader.Close()
<-closed
}
@ -61,7 +79,7 @@ func catFileBatchCheck(ctx context.Context, repoPath string) (WriteCloserError,
go func() {
stderr := strings.Builder{}
err := gitcmd.NewCommand("cat-file", "--batch-check").
err := cmdCatFile.
WithDir(repoPath).
WithStdin(batchStdinReader).
WithStdout(batchStdoutWriter).
@ -78,62 +96,20 @@ func catFileBatchCheck(ctx context.Context, repoPath string) (WriteCloserError,
close(closed)
}()
// For simplicities sake we'll use a buffered reader to read from the cat-file --batch-check
batchReader := bufio.NewReader(batchStdoutReader)
return batchStdinWriter, batchReader, cancel
}
// catFileBatch opens git cat-file --batch in the provided repo and returns a stdin pipe, a stdout reader and cancel function
func catFileBatch(ctx context.Context, repoPath string) (WriteCloserError, *bufio.Reader, func()) {
// We often want to feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary.
// so let's create a batch stdin and stdout
batchStdinReader, batchStdinWriter := io.Pipe()
batchStdoutReader, batchStdoutWriter := nio.Pipe(buffer.New(32 * 1024))
ctx, ctxCancel := context.WithCancel(ctx)
closed := make(chan struct{})
cancel := func() {
ctxCancel()
_ = batchStdinWriter.Close()
_ = batchStdoutReader.Close()
<-closed
}
// Ensure cancel is called as soon as the provided context is cancelled
go func() {
<-ctx.Done()
cancel()
}()
go func() {
stderr := strings.Builder{}
err := gitcmd.NewCommand("cat-file", "--batch").
WithDir(repoPath).
WithStdin(batchStdinReader).
WithStdout(batchStdoutWriter).
WithStderr(&stderr).
WithUseContextTimeout(true).
Run(ctx)
if err != nil {
_ = batchStdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String()))
_ = batchStdinReader.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String()))
} else {
_ = batchStdoutWriter.Close()
_ = batchStdinReader.Close()
}
close(closed)
}()
// For simplicities sake we'll us a buffered reader to read from the cat-file --batch
// use a buffered reader to read from the cat-file --batch (StringReader.ReadString)
batchReader := bufio.NewReaderSize(batchStdoutReader, 32*1024)
return batchStdinWriter, batchReader, cancel
return &catFileBatchCommunicator{
writer: batchStdinWriter,
reader: batchReader,
cancel: cancel,
}
}
// ReadBatchLine reads the header line from cat-file --batch
// We expect: <oid> SP <type> SP <size> LF
// then leaving the rest of the stream "<contents> LF" to be read
func ReadBatchLine(rd *bufio.Reader) (sha []byte, typ string, size int64, err error) {
func ReadBatchLine(rd BufferedReader) (sha []byte, typ string, size int64, err error) {
typ, err = rd.ReadString('\n')
if err != nil {
return sha, typ, size, err
@ -165,7 +141,7 @@ func ReadBatchLine(rd *bufio.Reader) (sha []byte, typ string, size int64, err er
}
// ReadTagObjectID reads a tag object ID hash from a cat-file --batch stream, throwing away the rest of the stream.
func ReadTagObjectID(rd *bufio.Reader, size int64) (string, error) {
func ReadTagObjectID(rd BufferedReader, size int64) (string, error) {
var id string
var n int64
headerLoop:
@ -191,7 +167,7 @@ headerLoop:
}
// ReadTreeID reads a tree ID from a cat-file --batch stream, throwing away the rest of the stream.
func ReadTreeID(rd *bufio.Reader, size int64) (string, error) {
func ReadTreeID(rd BufferedReader, size int64) (string, error) {
var id string
var n int64
headerLoop:
@ -225,7 +201,7 @@ headerLoop:
// constant hextable to help quickly convert between binary and hex representation
const hextable = "0123456789abcdef"
// BinToHexHeash converts a binary Hash into a hex encoded one. Input and output can be the
// BinToHex converts a binary Hash into a hex encoded one. Input and output can be the
// same byte slice to support in place conversion without allocations.
// This is at least 100x quicker that hex.EncodeToString
func BinToHex(objectFormat ObjectFormat, sha, out []byte) []byte {
@ -246,7 +222,7 @@ func BinToHex(objectFormat ObjectFormat, sha, out []byte) []byte {
// <mode-in-ascii-dropping-initial-zeros> SP <fname> NUL <binary HASH>
//
// We don't attempt to convert the raw HASH to save a lot of time
func ParseCatFileTreeLine(objectFormat ObjectFormat, rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fname, sha []byte, n int, err error) {
func ParseCatFileTreeLine(objectFormat ObjectFormat, rd BufferedReader, modeBuf, fnameBuf, shaBuf []byte) (mode, fname, sha []byte, n int, err error) {
var readBytes []byte
// Read the Mode & fname
@ -305,7 +281,7 @@ func ParseCatFileTreeLine(objectFormat ObjectFormat, rd *bufio.Reader, modeBuf,
return mode, fname, sha, n, err
}
func DiscardFull(rd *bufio.Reader, discard int64) error {
func DiscardFull(rd BufferedReader, discard int64) error {
if discard > math.MaxInt32 {
n, err := rd.Discard(math.MaxInt32)
discard -= int64(n)

View File

@ -22,18 +22,14 @@ import (
func GetLanguageStats(repo *git.Repository, commitID string) (map[string]int64, error) {
// We will feed the commit IDs in order into cat-file --batch, followed by blobs as necessary.
// so let's create a batch stdin and stdout
batchStdinWriter, batchReader, cancel, err := repo.CatFileBatch(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
writeID := func(id string) error {
_, err := batchStdinWriter.Write([]byte(id + "\n"))
return err
}
if err := writeID(commitID); err != nil {
batchReader, err := batch.QueryContent(commitID)
if err != nil {
return nil, err
}
shaBytes, typ, size, err := git.ReadBatchLine(batchReader)
@ -144,7 +140,7 @@ func GetLanguageStats(repo *git.Repository, commitID string) (map[string]int64,
// If content can not be read or file is too big just do detection by filename
if f.Size() <= bigFileSize {
if err := writeID(f.ID.String()); err != nil {
if _, err := batch.QueryContent(f.ID.String()); err != nil {
return nil, err
}
_, _, size, err := git.ReadBatchLine(batchReader)

View File

@ -6,7 +6,6 @@
package git
import (
"bufio"
"bytes"
"fmt"
"io"
@ -49,7 +48,7 @@ func parseTreeEntries(data []byte, ptree *Tree) ([]*TreeEntry, error) {
return entries, nil
}
func catBatchParseTreeEntries(objectFormat ObjectFormat, ptree *Tree, rd *bufio.Reader, sz int64) ([]*TreeEntry, error) {
func catBatchParseTreeEntries(objectFormat ObjectFormat, ptree *Tree, rd BufferedReader, sz int64) ([]*TreeEntry, error) {
fnameBuf := make([]byte, 4096)
modeBuf := make([]byte, 40)
shaBuf := make([]byte, objectFormat.FullLength())

View File

@ -47,7 +47,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
// Next feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary.
// so let's create a batch stdin and stdout
batchStdinWriter, batchReader, cancel, err := repo.CatFileBatch(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
@ -64,14 +64,10 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
for scan.Scan() {
// Get the next commit ID
commitID := scan.Bytes()
commitID := scan.Text()
// push the commit to the cat-file --batch process
_, err := batchStdinWriter.Write(commitID)
if err != nil {
return nil, err
}
_, err = batchStdinWriter.Write([]byte{'\n'})
batchReader, err := batch.QueryContent(commitID)
if err != nil {
return nil, err
}
@ -93,14 +89,13 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
if err != nil {
return nil, err
}
_, err = batchStdinWriter.Write([]byte(id + "\n"))
if err != nil {
if _, err = batch.QueryContent(id); err != nil {
return nil, err
}
continue
case "commit":
// Read in the commit to get its tree and in case this is one of the last used commits
curCommit, err = git.CommitFromReader(repo, git.MustIDFromString(string(commitID)), io.LimitReader(batchReader, size))
curCommit, err = git.CommitFromReader(repo, git.MustIDFromString(commitID), io.LimitReader(batchReader, size))
if err != nil {
return nil, err
}
@ -108,7 +103,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
return nil, err
}
if _, err := batchStdinWriter.Write([]byte(curCommit.Tree.ID.String() + "\n")); err != nil {
if _, err := batch.QueryContent(curCommit.Tree.ID.String()); err != nil {
return nil, err
}
curPath = ""
@ -140,11 +135,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
return nil, err
}
if len(trees) > 0 {
_, err := batchStdinWriter.Write(trees[len(trees)-1])
if err != nil {
return nil, err
}
_, err = batchStdinWriter.Write([]byte("\n"))
_, err := batch.QueryContent(string(trees[len(trees)-1]))
if err != nil {
return nil, err
}

View File

@ -7,9 +7,9 @@
package git
import (
"bufio"
"context"
"path/filepath"
"sync"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
@ -25,11 +25,9 @@ type Repository struct {
gpgSettings *GPGSettings
batchInUse bool
batch *Batch
checkInUse bool
check *Batch
mu sync.Mutex
catFileBatchCloser CatFileBatchCloser
catFileBatchInUse bool
Ctx context.Context
LastCommitCache *LastCommitCache
@ -58,69 +56,56 @@ func OpenRepository(ctx context.Context, repoPath string) (*Repository, error) {
}, nil
}
// FIXME: for debugging purpose only
// At the moment, the old logic (debugTestAlwaysNewBatch=false: reuse the existing batch if not in use)
// causes random test failures: it makes the `t.Context()` occasionally canceled with unknown reasons.
// In theory, the `t.Context()` should never be affected by testing code and can never be canceled, but it does happen.
// The stranger thing is that the failure tests are almost around TestAPIPullUpdateByRebase,
// it almost are during MSSQL testing, sometimes PGSQL, never others.
var debugTestAlwaysNewBatch = false
// CatFileBatch obtains a CatFileBatch for this repository
func (repo *Repository) CatFileBatch(ctx context.Context) (WriteCloserError, *bufio.Reader, func(), error) {
if repo.batch == nil {
var err error
repo.batch, err = NewBatch(ctx, repo.Path)
func (repo *Repository) CatFileBatch(ctx context.Context) (_ CatFileBatch, closeFunc func(), err error) {
repo.mu.Lock()
defer repo.mu.Unlock()
if debugTestAlwaysNewBatch {
b, err := NewBatch(ctx, repo.Path)
return b, b.Close, err
}
if repo.catFileBatchCloser == nil {
repo.catFileBatchCloser, err = NewBatch(ctx, repo.Path)
if err != nil {
return nil, nil, nil, err
return nil, nil, err
}
}
if !repo.batchInUse {
repo.batchInUse = true
return repo.batch.Writer, repo.batch.Reader, func() {
repo.batchInUse = false
if !repo.catFileBatchInUse {
repo.catFileBatchInUse = true
return CatFileBatch(repo.catFileBatchCloser), func() {
repo.mu.Lock()
defer repo.mu.Unlock()
repo.catFileBatchInUse = false
}, nil
}
log.Debug("Opening temporary cat file batch for: %s", repo.Path)
tempBatch, err := NewBatch(ctx, repo.Path)
if err != nil {
return nil, nil, nil, err
return nil, nil, err
}
return tempBatch.Writer, tempBatch.Reader, tempBatch.Close, nil
}
// CatFileBatchCheck obtains a CatFileBatchCheck for this repository
func (repo *Repository) CatFileBatchCheck(ctx context.Context) (WriteCloserError, *bufio.Reader, func(), error) {
if repo.check == nil {
var err error
repo.check, err = NewBatchCheck(ctx, repo.Path)
if err != nil {
return nil, nil, nil, err
}
}
if !repo.checkInUse {
repo.checkInUse = true
return repo.check.Writer, repo.check.Reader, func() {
repo.checkInUse = false
}, nil
}
log.Debug("Opening temporary cat file batch-check for: %s", repo.Path)
tempBatchCheck, err := NewBatchCheck(ctx, repo.Path)
if err != nil {
return nil, nil, nil, err
}
return tempBatchCheck.Writer, tempBatchCheck.Reader, tempBatchCheck.Close, nil
return tempBatch, tempBatch.Close, nil
}
func (repo *Repository) Close() error {
if repo == nil {
return nil
}
if repo.batch != nil {
repo.batch.Close()
repo.batch = nil
repo.batchInUse = false
}
if repo.check != nil {
repo.check.Close()
repo.check = nil
repo.checkInUse = false
if repo.catFileBatchCloser != nil {
repo.catFileBatchCloser.Close()
repo.catFileBatchCloser = nil
repo.catFileBatchInUse = false
}
repo.LastCommitCache = nil
repo.tagCache = nil

View File

@ -23,13 +23,13 @@ func (repo *Repository) IsObjectExist(name string) bool {
return false
}
wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
log.Debug("Error writing to CatFileBatchCheck %v", err)
return false
}
defer cancel()
_, err = wr.Write([]byte(name + "\n"))
rd, err := batch.QueryInfo(name)
if err != nil {
log.Debug("Error writing to CatFileBatchCheck %v", err)
return false
@ -44,13 +44,13 @@ func (repo *Repository) IsReferenceExist(name string) bool {
return false
}
wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
log.Debug("Error writing to CatFileBatchCheck %v", err)
return false
}
defer cancel()
_, err = wr.Write([]byte(name + "\n"))
rd, err := batch.QueryInfo(name)
if err != nil {
log.Debug("Error writing to CatFileBatchCheck %v", err)
return false

View File

@ -6,7 +6,6 @@
package git
import (
"bufio"
"errors"
"io"
"strings"
@ -37,12 +36,12 @@ func (repo *Repository) ResolveReference(name string) (string, error) {
// GetRefCommitID returns the last commit ID string of given reference (branch or tag).
func (repo *Repository) GetRefCommitID(name string) (string, error) {
wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return "", err
}
defer cancel()
_, err = wr.Write([]byte(name + "\n"))
rd, err := batch.QueryInfo(name)
if err != nil {
return "", err
}
@ -68,18 +67,20 @@ func (repo *Repository) IsCommitExist(name string) bool {
}
func (repo *Repository) getCommit(id ObjectID) (*Commit, error) {
wr, rd, cancel, err := repo.CatFileBatch(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
_, _ = wr.Write([]byte(id.String() + "\n"))
return repo.getCommitFromBatchReader(wr, rd, id)
rd, err := batch.QueryContent(id.String())
if err != nil {
return nil, err
}
return repo.getCommitFromBatchReader(batch, rd, id)
}
func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio.Reader, id ObjectID) (*Commit, error) {
func (repo *Repository) getCommitFromBatchReader(batchContent CatFileBatchContent, rd BufferedReader, id ObjectID) (*Commit, error) {
_, typ, size, err := ReadBatchLine(rd)
if err != nil {
if errors.Is(err, io.EOF) || IsErrNotExist(err) {
@ -106,12 +107,10 @@ func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio.
if err != nil {
return nil, err
}
if _, err := wr.Write([]byte(tag.Object.String() + "\n")); err != nil {
if _, err := batchContent.QueryContent(tag.Object.String()); err != nil {
return nil, err
}
commit, err := repo.getCommitFromBatchReader(wr, rd, tag.Object)
commit, err := repo.getCommitFromBatchReader(batchContent, rd, tag.Object)
if err != nil {
return nil, err
}
@ -152,12 +151,12 @@ func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) {
}
}
wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
_, err = wr.Write([]byte(commitID + "\n"))
rd, err := batch.QueryInfo(commitID)
if err != nil {
return nil, err
}

View File

@ -24,12 +24,12 @@ func (repo *Repository) IsTagExist(name string) bool {
// GetTagType gets the type of the tag, either commit (simple) or tag (annotated)
func (repo *Repository) GetTagType(id ObjectID) (string, error) {
wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return "", err
}
defer cancel()
_, err = wr.Write([]byte(id.String() + "\n"))
rd, err := batch.QueryInfo(id.String())
if err != nil {
return "", err
}
@ -88,13 +88,14 @@ func (repo *Repository) getTag(tagID ObjectID, name string) (*Tag, error) {
}
// The tag is an annotated tag with a message.
wr, rd, cancel, err := repo.CatFileBatch(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
if _, err := wr.Write([]byte(tagID.String() + "\n")); err != nil {
rd, err := batch.QueryContent(tagID.String())
if err != nil {
return nil, err
}
_, typ, size, err := ReadBatchLine(rd)

View File

@ -10,13 +10,16 @@ import (
)
func (repo *Repository) getTree(id ObjectID) (*Tree, error) {
wr, rd, cancel, err := repo.CatFileBatch(repo.Ctx)
batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
_, _ = wr.Write([]byte(id.String() + "\n"))
rd, err := batch.QueryContent(id.String())
if err != nil {
return nil, err
}
// ignore the SHA
_, typ, size, err := ReadBatchLine(rd)
@ -36,10 +39,10 @@ func (repo *Repository) getTree(id ObjectID) (*Tree, error) {
return nil, err
}
if _, err := wr.Write([]byte(tag.Object.String() + "\n")); err != nil {
if _, err := batch.QueryContent(tag.Object.String()); err != nil {
return nil, err
}
commit, err := repo.getCommitFromBatchReader(wr, rd, tag.Object)
commit, err := repo.getCommitFromBatchReader(batch, rd, tag.Object)
if err != nil {
return nil, err
}

View File

@ -36,13 +36,13 @@ func (te *TreeEntry) Size() int64 {
return te.size
}
wr, rd, cancel, err := te.ptree.repo.CatFileBatchCheck(te.ptree.repo.Ctx)
batch, cancel, err := te.ptree.repo.CatFileBatch(te.ptree.repo.Ctx)
if err != nil {
log.Debug("error whilst reading size for %s in %s. Error: %v", te.ID.String(), te.ptree.repo.Path, err)
return 0
}
defer cancel()
_, err = wr.Write([]byte(te.ID.String() + "\n"))
rd, err := batch.QueryInfo(te.ID.String())
if err != nil {
log.Debug("error whilst reading size for %s in %s. Error: %v", te.ID.String(), te.ptree.repo.Path, err)
return 0

View File

@ -35,23 +35,33 @@ func (t *Tree) ListEntries() (Entries, error) {
}
if t.repo != nil {
wr, rd, cancel, err := t.repo.CatFileBatch(t.repo.Ctx)
batch, cancel, err := t.repo.CatFileBatch(t.repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
_, _ = wr.Write([]byte(t.ID.String() + "\n"))
rd, err := batch.QueryContent(t.ID.String())
if err != nil {
return nil, err
}
_, typ, sz, err := ReadBatchLine(rd)
if err != nil {
return nil, err
}
if typ == "commit" {
treeID, err := ReadTreeID(rd, sz)
if err != nil && err != io.EOF {
return nil, err
}
_, _ = wr.Write([]byte(treeID + "\n"))
newRd, err := batch.QueryContent(treeID)
if err != nil {
return nil, err
}
if newRd != rd {
panic("abused batch reader")
}
_, typ, sz, err = ReadBatchLine(rd)
if err != nil {
return nil, err

View File

@ -4,7 +4,6 @@
package bleve
import (
"bufio"
"context"
"fmt"
"io"
@ -151,7 +150,7 @@ func NewIndexer(indexDir string) *Indexer {
}
}
func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserError, batchReader *bufio.Reader, commitSha string,
func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, commitSha string,
update internal.FileUpdate, repo *repo_model.Repository, batch *inner_bleve.FlushingBatch,
) error {
// Ignore vendored files in code search
@ -177,7 +176,8 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
return b.addDelete(update.Filename, repo, batch)
}
if _, err := batchWriter.Write([]byte(update.BlobSha + "\n")); err != nil {
batchReader, err := catFileBatch.QueryContent(update.BlobSha)
if err != nil {
return err
}
@ -225,11 +225,10 @@ func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha st
defer gitBatch.Close()
for _, update := range changes.Updates {
if err := b.addUpdate(ctx, gitBatch.Writer, gitBatch.Reader, sha, update, repo, batch); err != nil {
if err := b.addUpdate(ctx, gitBatch, sha, update, repo, batch); err != nil {
return err
}
}
gitBatch.Close()
}
for _, filename := range changes.RemovedFilenames {
if err := b.addDelete(filename, repo, batch); err != nil {

View File

@ -4,7 +4,6 @@
package elasticsearch
import (
"bufio"
"context"
"fmt"
"io"
@ -139,7 +138,7 @@ const (
}`
)
func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserError, batchReader *bufio.Reader, sha string, update internal.FileUpdate, repo *repo_model.Repository) ([]elastic.BulkableRequest, error) {
func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, sha string, update internal.FileUpdate, repo *repo_model.Repository) ([]elastic.BulkableRequest, error) {
// Ignore vendored files in code search
if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) {
return nil, nil
@ -162,7 +161,8 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
return []elastic.BulkableRequest{b.addDelete(update.Filename, repo)}, nil
}
if _, err := batchWriter.Write([]byte(update.BlobSha + "\n")); err != nil {
batchReader, err := catFileBatch.QueryContent(update.BlobSha)
if err != nil {
return nil, err
}
@ -217,7 +217,7 @@ func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha st
defer batch.Close()
for _, update := range changes.Updates {
updateReqs, err := b.addUpdate(ctx, batch.Writer, batch.Reader, sha, update, repo)
updateReqs, err := b.addUpdate(ctx, batch, sha, update, repo)
if err != nil {
return err
}
@ -225,7 +225,6 @@ func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha st
reqs = append(reqs, updateReqs...)
}
}
batch.Close()
}
for _, filename := range changes.RemovedFilenames {

View File

@ -150,9 +150,9 @@ func setCsvCompareContext(ctx *context.Context) {
if err != nil {
return nil, nil, err
}
var closer io.Closer = reader
csvReader, err := csv_module.CreateReaderAndDetermineDelimiter(ctx, charset.ToUTF8WithFallbackReader(reader, charset.ConvertOpts{}))
return csvReader, reader, err
return csvReader, closer, err
}
baseReader, baseBlobCloser, err := csvReaderFromCommit(markup.NewRenderContext(ctx).WithRelativePath(diffFile.OldName), baseBlob)