2020-08-30 18:08:01 +02:00
|
|
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
2022-11-27 19:20:29 +01:00
|
|
|
// SPDX-License-Identifier: MIT
|
2020-08-30 18:08:01 +02:00
|
|
|
|
|
|
|
package code
|
|
|
|
|
|
|
|
import (
|
2022-01-27 09:30:51 +01:00
|
|
|
"context"
|
Refactor indexer (#25174)
Refactor `modules/indexer` to make it more maintainable. And it can be
easier to support more features. I'm trying to solve some of issue
searching, this is a precursor to making functional changes.
Current supported engines and the index versions:
| engines | issues | code |
| - | - | - |
| db | Just a wrapper for database queries, doesn't need version | - |
| bleve | The version of index is **2** | The version of index is **6**
|
| elasticsearch | The old index has no version, will be treated as
version **0** in this PR | The version of index is **1** |
| meilisearch | The old index has no version, will be treated as version
**0** in this PR | - |
## Changes
### Split
Splited it into mutiple packages
```text
indexer
├── internal
│ ├── bleve
│ ├── db
│ ├── elasticsearch
│ └── meilisearch
├── code
│ ├── bleve
│ ├── elasticsearch
│ └── internal
└── issues
├── bleve
├── db
├── elasticsearch
├── internal
└── meilisearch
```
- `indexer/interanal`: Internal shared package for indexer.
- `indexer/interanal/[engine]`: Internal shared package for each engine
(bleve/db/elasticsearch/meilisearch).
- `indexer/code`: Implementations for code indexer.
- `indexer/code/internal`: Internal shared package for code indexer.
- `indexer/code/[engine]`: Implementation via each engine for code
indexer.
- `indexer/issues`: Implementations for issues indexer.
### Deduplication
- Combine `Init/Ping/Close` for code indexer and issues indexer.
- ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to
`internal.IndexHolder`.~ Remove it, use dummy indexer instead when the
indexer is not ready.
- Duplicate two copies of creating ES clients.
- Duplicate two copies of `indexerID()`.
### Enhancement
- [x] Support index version for elasticsearch issues indexer, the old
index without version will be treated as version 0.
- [x] Fix spell of `elastic_search/ElasticSearch`, it should be
`Elasticsearch`.
- [x] Improve versioning of ES index. We don't need `Aliases`:
- Gitea does't need aliases for "Zero Downtime" because it never delete
old indexes.
- The old code of issues indexer uses the orignal name to create issue
index, so it's tricky to convert it to an alias.
- [x] Support index version for meilisearch issues indexer, the old
index without version will be treated as version 0.
- [x] Do "ping" only when `Ping` has been called, don't ping
periodically and cache the status.
- [x] Support the context parameter whenever possible.
- [x] Fix outdated example config.
- [x] Give up the requeue logic of issues indexer: When indexing fails,
call Ping to check if it was caused by the engine being unavailable, and
only requeue the task if the engine is unavailable.
- It is fragile and tricky, could cause data losing (It did happen when
I was doing some tests for this PR). And it works for ES only.
- Just always requeue the failed task, if it caused by bad data, it's a
bug of Gitea which should be fixed.
---------
Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 14:37:56 +02:00
|
|
|
"os"
|
2024-10-12 01:35:04 +02:00
|
|
|
"slices"
|
2020-08-30 18:08:01 +02:00
|
|
|
"testing"
|
|
|
|
|
2024-03-16 11:32:45 +01:00
|
|
|
"code.gitea.io/gitea/models/db"
|
2021-11-12 15:36:47 +01:00
|
|
|
"code.gitea.io/gitea/models/unittest"
|
2022-01-20 00:26:57 +01:00
|
|
|
"code.gitea.io/gitea/modules/git"
|
Refactor indexer (#25174)
Refactor `modules/indexer` to make it more maintainable. And it can be
easier to support more features. I'm trying to solve some of issue
searching, this is a precursor to making functional changes.
Current supported engines and the index versions:
| engines | issues | code |
| - | - | - |
| db | Just a wrapper for database queries, doesn't need version | - |
| bleve | The version of index is **2** | The version of index is **6**
|
| elasticsearch | The old index has no version, will be treated as
version **0** in this PR | The version of index is **1** |
| meilisearch | The old index has no version, will be treated as version
**0** in this PR | - |
## Changes
### Split
Splited it into mutiple packages
```text
indexer
├── internal
│ ├── bleve
│ ├── db
│ ├── elasticsearch
│ └── meilisearch
├── code
│ ├── bleve
│ ├── elasticsearch
│ └── internal
└── issues
├── bleve
├── db
├── elasticsearch
├── internal
└── meilisearch
```
- `indexer/interanal`: Internal shared package for indexer.
- `indexer/interanal/[engine]`: Internal shared package for each engine
(bleve/db/elasticsearch/meilisearch).
- `indexer/code`: Implementations for code indexer.
- `indexer/code/internal`: Internal shared package for code indexer.
- `indexer/code/[engine]`: Implementation via each engine for code
indexer.
- `indexer/issues`: Implementations for issues indexer.
### Deduplication
- Combine `Init/Ping/Close` for code indexer and issues indexer.
- ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to
`internal.IndexHolder`.~ Remove it, use dummy indexer instead when the
indexer is not ready.
- Duplicate two copies of creating ES clients.
- Duplicate two copies of `indexerID()`.
### Enhancement
- [x] Support index version for elasticsearch issues indexer, the old
index without version will be treated as version 0.
- [x] Fix spell of `elastic_search/ElasticSearch`, it should be
`Elasticsearch`.
- [x] Improve versioning of ES index. We don't need `Aliases`:
- Gitea does't need aliases for "Zero Downtime" because it never delete
old indexes.
- The old code of issues indexer uses the orignal name to create issue
index, so it's tricky to convert it to an alias.
- [x] Support index version for meilisearch issues indexer, the old
index without version will be treated as version 0.
- [x] Do "ping" only when `Ping` has been called, don't ping
periodically and cache the status.
- [x] Support the context parameter whenever possible.
- [x] Fix outdated example config.
- [x] Give up the requeue logic of issues indexer: When indexing fails,
call Ping to check if it was caused by the engine being unavailable, and
only requeue the task if the engine is unavailable.
- It is fragile and tricky, could cause data losing (It did happen when
I was doing some tests for this PR). And it works for ES only.
- Just always requeue the failed task, if it caused by bad data, it's a
bug of Gitea which should be fixed.
---------
Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 14:37:56 +02:00
|
|
|
"code.gitea.io/gitea/modules/indexer/code/bleve"
|
|
|
|
"code.gitea.io/gitea/modules/indexer/code/elasticsearch"
|
|
|
|
"code.gitea.io/gitea/modules/indexer/code/internal"
|
2021-11-12 15:36:47 +01:00
|
|
|
|
2021-12-15 06:39:34 +01:00
|
|
|
_ "code.gitea.io/gitea/models"
|
2023-09-08 06:51:15 +02:00
|
|
|
_ "code.gitea.io/gitea/models/actions"
|
|
|
|
_ "code.gitea.io/gitea/models/activities"
|
2021-12-15 06:39:34 +01:00
|
|
|
|
2020-08-30 18:08:01 +02:00
|
|
|
"github.com/stretchr/testify/assert"
|
2024-10-12 01:35:04 +02:00
|
|
|
|
|
|
|
_ "github.com/mattn/go-sqlite3"
|
2020-08-30 18:08:01 +02:00
|
|
|
)
|
|
|
|
|
2024-10-12 01:35:04 +02:00
|
|
|
type codeSearchResult struct {
|
|
|
|
Filename string
|
|
|
|
Content string
|
|
|
|
}
|
|
|
|
|
2020-08-30 18:08:01 +02:00
|
|
|
func TestMain(m *testing.M) {
|
2023-09-28 03:38:53 +02:00
|
|
|
unittest.MainTest(m)
|
2020-08-30 18:08:01 +02:00
|
|
|
}
|
|
|
|
|
Refactor indexer (#25174)
Refactor `modules/indexer` to make it more maintainable. And it can be
easier to support more features. I'm trying to solve some of issue
searching, this is a precursor to making functional changes.
Current supported engines and the index versions:
| engines | issues | code |
| - | - | - |
| db | Just a wrapper for database queries, doesn't need version | - |
| bleve | The version of index is **2** | The version of index is **6**
|
| elasticsearch | The old index has no version, will be treated as
version **0** in this PR | The version of index is **1** |
| meilisearch | The old index has no version, will be treated as version
**0** in this PR | - |
## Changes
### Split
Splited it into mutiple packages
```text
indexer
├── internal
│ ├── bleve
│ ├── db
│ ├── elasticsearch
│ └── meilisearch
├── code
│ ├── bleve
│ ├── elasticsearch
│ └── internal
└── issues
├── bleve
├── db
├── elasticsearch
├── internal
└── meilisearch
```
- `indexer/interanal`: Internal shared package for indexer.
- `indexer/interanal/[engine]`: Internal shared package for each engine
(bleve/db/elasticsearch/meilisearch).
- `indexer/code`: Implementations for code indexer.
- `indexer/code/internal`: Internal shared package for code indexer.
- `indexer/code/[engine]`: Implementation via each engine for code
indexer.
- `indexer/issues`: Implementations for issues indexer.
### Deduplication
- Combine `Init/Ping/Close` for code indexer and issues indexer.
- ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to
`internal.IndexHolder`.~ Remove it, use dummy indexer instead when the
indexer is not ready.
- Duplicate two copies of creating ES clients.
- Duplicate two copies of `indexerID()`.
### Enhancement
- [x] Support index version for elasticsearch issues indexer, the old
index without version will be treated as version 0.
- [x] Fix spell of `elastic_search/ElasticSearch`, it should be
`Elasticsearch`.
- [x] Improve versioning of ES index. We don't need `Aliases`:
- Gitea does't need aliases for "Zero Downtime" because it never delete
old indexes.
- The old code of issues indexer uses the orignal name to create issue
index, so it's tricky to convert it to an alias.
- [x] Support index version for meilisearch issues indexer, the old
index without version will be treated as version 0.
- [x] Do "ping" only when `Ping` has been called, don't ping
periodically and cache the status.
- [x] Support the context parameter whenever possible.
- [x] Fix outdated example config.
- [x] Give up the requeue logic of issues indexer: When indexing fails,
call Ping to check if it was caused by the engine being unavailable, and
only requeue the task if the engine is unavailable.
- It is fragile and tricky, could cause data losing (It did happen when
I was doing some tests for this PR). And it works for ES only.
- Just always requeue the failed task, if it caused by bad data, it's a
bug of Gitea which should be fixed.
---------
Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 14:37:56 +02:00
|
|
|
func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
|
2020-08-30 18:08:01 +02:00
|
|
|
t.Run(name, func(t *testing.T) {
|
2024-10-12 01:35:04 +02:00
|
|
|
assert.NoError(t, setupRepositoryIndexes(git.DefaultContext, indexer))
|
|
|
|
|
2022-01-20 18:46:10 +01:00
|
|
|
keywords := []struct {
|
|
|
|
RepoIDs []int64
|
|
|
|
Keyword string
|
|
|
|
Langs int
|
2024-10-12 01:35:04 +02:00
|
|
|
Results []codeSearchResult
|
2022-01-20 18:46:10 +01:00
|
|
|
}{
|
2024-10-12 01:35:04 +02:00
|
|
|
// Search for an exact match on the contents of a file
|
|
|
|
// This scenario yields a single result (the file README.md on the repo '1')
|
2022-01-20 18:46:10 +01:00
|
|
|
{
|
|
|
|
RepoIDs: nil,
|
|
|
|
Keyword: "Description",
|
|
|
|
Langs: 1,
|
2024-10-12 01:35:04 +02:00
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "README.md",
|
|
|
|
Content: "# repo1\n\nDescription for repo1",
|
|
|
|
},
|
|
|
|
},
|
2022-01-20 18:46:10 +01:00
|
|
|
},
|
2024-10-12 01:35:04 +02:00
|
|
|
// Search for an exact match on the contents of a file within the repo '2'.
|
|
|
|
// This scenario yields no results
|
2022-01-20 18:46:10 +01:00
|
|
|
{
|
|
|
|
RepoIDs: []int64{2},
|
|
|
|
Keyword: "Description",
|
|
|
|
Langs: 0,
|
|
|
|
},
|
2024-10-12 01:35:04 +02:00
|
|
|
// Search for an exact match on the contents of a file
|
|
|
|
// This scenario yields a single result (the file README.md on the repo '1')
|
2022-01-20 18:46:10 +01:00
|
|
|
{
|
|
|
|
RepoIDs: nil,
|
|
|
|
Keyword: "repo1",
|
|
|
|
Langs: 1,
|
2024-10-12 01:35:04 +02:00
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "README.md",
|
|
|
|
Content: "# repo1\n\nDescription for repo1",
|
|
|
|
},
|
|
|
|
},
|
2022-01-20 18:46:10 +01:00
|
|
|
},
|
2024-10-12 01:35:04 +02:00
|
|
|
// Search for an exact match on the contents of a file within the repo '2'.
|
|
|
|
// This scenario yields no results
|
2022-01-20 18:46:10 +01:00
|
|
|
{
|
|
|
|
RepoIDs: []int64{2},
|
|
|
|
Keyword: "repo1",
|
|
|
|
Langs: 0,
|
|
|
|
},
|
2024-10-12 01:35:04 +02:00
|
|
|
// Search for a non-existing term.
|
|
|
|
// This scenario yields no results
|
2022-01-20 18:46:10 +01:00
|
|
|
{
|
|
|
|
RepoIDs: nil,
|
|
|
|
Keyword: "non-exist",
|
|
|
|
Langs: 0,
|
|
|
|
},
|
2024-10-12 01:35:04 +02:00
|
|
|
// Search for an exact match on the contents of a file within the repo '62'.
|
|
|
|
// This scenario yields a single result (the file avocado.md on the repo '62')
|
|
|
|
{
|
|
|
|
RepoIDs: []int64{62},
|
|
|
|
Keyword: "pineaple",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "avocado.md",
|
|
|
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Search for an exact match on the filename within the repo '62'.
|
|
|
|
// This scenario yields a single result (the file avocado.md on the repo '62')
|
|
|
|
{
|
|
|
|
RepoIDs: []int64{62},
|
|
|
|
Keyword: "avocado.md",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "avocado.md",
|
|
|
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Search for an partial match on the filename within the repo '62'.
|
|
|
|
// This scenario yields a single result (the file avocado.md on the repo '62')
|
|
|
|
{
|
|
|
|
RepoIDs: []int64{62},
|
|
|
|
Keyword: "avo",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "avocado.md",
|
|
|
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Search for matches on both the contents and the filenames within the repo '62'.
|
|
|
|
// This scenario yields two results: the first result is baed on the file (cucumber.md) while the second is based on the contents
|
|
|
|
{
|
|
|
|
RepoIDs: []int64{62},
|
|
|
|
Keyword: "cucumber",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "cucumber.md",
|
|
|
|
Content: "Salad is good for your health",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Filename: "avocado.md",
|
|
|
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Search for matches on the filenames within the repo '62'.
|
|
|
|
// This scenario yields two results (both are based on filename, the first one is an exact match)
|
|
|
|
{
|
|
|
|
RepoIDs: []int64{62},
|
|
|
|
Keyword: "ham",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "ham.md",
|
|
|
|
Content: "This is also not cheese",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Filename: "potato/ham.md",
|
|
|
|
Content: "This is not cheese",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Search for matches on the contents of files within the repo '62'.
|
|
|
|
// This scenario yields two results (both are based on contents, the first one is an exact match where as the second is a 'fuzzy' one)
|
|
|
|
{
|
|
|
|
RepoIDs: []int64{62},
|
|
|
|
Keyword: "This is not cheese",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "potato/ham.md",
|
|
|
|
Content: "This is not cheese",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Filename: "ham.md",
|
|
|
|
Content: "This is also not cheese",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2024-11-06 21:51:20 +01:00
|
|
|
// Search for matches on the contents of files regardless of case.
|
|
|
|
{
|
|
|
|
RepoIDs: nil,
|
|
|
|
Keyword: "dESCRIPTION",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "README.md",
|
|
|
|
Content: "# repo1\n\nDescription for repo1",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Search for an exact match on the filename within the repo '62' (case insenstive).
|
|
|
|
// This scenario yields a single result (the file avocado.md on the repo '62')
|
|
|
|
{
|
|
|
|
RepoIDs: []int64{62},
|
|
|
|
Keyword: "AVOCADO.MD",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "avocado.md",
|
|
|
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Search for matches on the contents of files when the criteria is a expression.
|
|
|
|
{
|
|
|
|
RepoIDs: []int64{62},
|
|
|
|
Keyword: "console.log",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "example-file.js",
|
|
|
|
Content: "console.log(\"Hello, World!\")",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Search for matches on the contents of files when the criteria is part of a expression.
|
|
|
|
{
|
|
|
|
RepoIDs: []int64{62},
|
|
|
|
Keyword: "log",
|
|
|
|
Langs: 1,
|
|
|
|
Results: []codeSearchResult{
|
|
|
|
{
|
|
|
|
Filename: "example-file.js",
|
|
|
|
Content: "console.log(\"Hello, World!\")",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2022-01-20 18:46:10 +01:00
|
|
|
}
|
2020-08-30 18:08:01 +02:00
|
|
|
|
|
|
|
for _, kw := range keywords {
|
|
|
|
t.Run(kw.Keyword, func(t *testing.T) {
|
2024-03-16 11:32:45 +01:00
|
|
|
total, res, langs, err := indexer.Search(context.TODO(), &internal.SearchOptions{
|
|
|
|
RepoIDs: kw.RepoIDs,
|
|
|
|
Keyword: kw.Keyword,
|
|
|
|
Paginator: &db.ListOptions{
|
|
|
|
Page: 1,
|
|
|
|
PageSize: 10,
|
|
|
|
},
|
|
|
|
IsKeywordFuzzy: true,
|
|
|
|
})
|
2020-08-30 18:08:01 +02:00
|
|
|
assert.NoError(t, err)
|
2021-06-07 07:27:09 +02:00
|
|
|
assert.Len(t, langs, kw.Langs)
|
2020-08-30 18:08:01 +02:00
|
|
|
|
2024-10-12 01:35:04 +02:00
|
|
|
hits := make([]codeSearchResult, 0, len(res))
|
|
|
|
|
|
|
|
if total > 0 {
|
|
|
|
assert.NotEmpty(t, kw.Results, "The given scenario does not provide any expected results")
|
|
|
|
}
|
|
|
|
|
2020-08-30 18:08:01 +02:00
|
|
|
for _, hit := range res {
|
2024-10-12 01:35:04 +02:00
|
|
|
hits = append(hits, codeSearchResult{
|
|
|
|
Filename: hit.Filename,
|
|
|
|
Content: hit.Content,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
lastIndex := -1
|
|
|
|
|
|
|
|
for _, expected := range kw.Results {
|
|
|
|
index := slices.Index(hits, expected)
|
|
|
|
if index == -1 {
|
|
|
|
assert.Failf(t, "Result not found", "Expected %v in %v", expected, hits)
|
|
|
|
} else if lastIndex > index {
|
|
|
|
assert.Failf(t, "Result is out of order", "The order of %v within %v is wrong", expected, hits)
|
|
|
|
} else {
|
|
|
|
lastIndex = index
|
|
|
|
}
|
2020-08-30 18:08:01 +02:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2024-10-12 01:35:04 +02:00
|
|
|
assert.NoError(t, tearDownRepositoryIndexes(indexer))
|
2020-08-30 18:08:01 +02:00
|
|
|
})
|
|
|
|
}
|
Refactor indexer (#25174)
Refactor `modules/indexer` to make it more maintainable. And it can be
easier to support more features. I'm trying to solve some of issue
searching, this is a precursor to making functional changes.
Current supported engines and the index versions:
| engines | issues | code |
| - | - | - |
| db | Just a wrapper for database queries, doesn't need version | - |
| bleve | The version of index is **2** | The version of index is **6**
|
| elasticsearch | The old index has no version, will be treated as
version **0** in this PR | The version of index is **1** |
| meilisearch | The old index has no version, will be treated as version
**0** in this PR | - |
## Changes
### Split
Splited it into mutiple packages
```text
indexer
├── internal
│ ├── bleve
│ ├── db
│ ├── elasticsearch
│ └── meilisearch
├── code
│ ├── bleve
│ ├── elasticsearch
│ └── internal
└── issues
├── bleve
├── db
├── elasticsearch
├── internal
└── meilisearch
```
- `indexer/interanal`: Internal shared package for indexer.
- `indexer/interanal/[engine]`: Internal shared package for each engine
(bleve/db/elasticsearch/meilisearch).
- `indexer/code`: Implementations for code indexer.
- `indexer/code/internal`: Internal shared package for code indexer.
- `indexer/code/[engine]`: Implementation via each engine for code
indexer.
- `indexer/issues`: Implementations for issues indexer.
### Deduplication
- Combine `Init/Ping/Close` for code indexer and issues indexer.
- ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to
`internal.IndexHolder`.~ Remove it, use dummy indexer instead when the
indexer is not ready.
- Duplicate two copies of creating ES clients.
- Duplicate two copies of `indexerID()`.
### Enhancement
- [x] Support index version for elasticsearch issues indexer, the old
index without version will be treated as version 0.
- [x] Fix spell of `elastic_search/ElasticSearch`, it should be
`Elasticsearch`.
- [x] Improve versioning of ES index. We don't need `Aliases`:
- Gitea does't need aliases for "Zero Downtime" because it never delete
old indexes.
- The old code of issues indexer uses the orignal name to create issue
index, so it's tricky to convert it to an alias.
- [x] Support index version for meilisearch issues indexer, the old
index without version will be treated as version 0.
- [x] Do "ping" only when `Ping` has been called, don't ping
periodically and cache the status.
- [x] Support the context parameter whenever possible.
- [x] Fix outdated example config.
- [x] Give up the requeue logic of issues indexer: When indexing fails,
call Ping to check if it was caused by the engine being unavailable, and
only requeue the task if the engine is unavailable.
- It is fragile and tricky, could cause data losing (It did happen when
I was doing some tests for this PR). And it works for ES only.
- Just always requeue the failed task, if it caused by bad data, it's a
bug of Gitea which should be fixed.
---------
Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 14:37:56 +02:00
|
|
|
|
|
|
|
func TestBleveIndexAndSearch(t *testing.T) {
|
|
|
|
unittest.PrepareTestEnv(t)
|
|
|
|
|
|
|
|
dir := t.TempDir()
|
|
|
|
|
|
|
|
idx := bleve.NewIndexer(dir)
|
|
|
|
_, err := idx.Init(context.Background())
|
|
|
|
if err != nil {
|
|
|
|
if idx != nil {
|
|
|
|
idx.Close()
|
|
|
|
}
|
2023-10-11 13:02:24 +02:00
|
|
|
assert.FailNow(t, "Unable to create bleve indexer Error: %v", err)
|
Refactor indexer (#25174)
Refactor `modules/indexer` to make it more maintainable. And it can be
easier to support more features. I'm trying to solve some of issue
searching, this is a precursor to making functional changes.
Current supported engines and the index versions:
| engines | issues | code |
| - | - | - |
| db | Just a wrapper for database queries, doesn't need version | - |
| bleve | The version of index is **2** | The version of index is **6**
|
| elasticsearch | The old index has no version, will be treated as
version **0** in this PR | The version of index is **1** |
| meilisearch | The old index has no version, will be treated as version
**0** in this PR | - |
## Changes
### Split
Splited it into mutiple packages
```text
indexer
├── internal
│ ├── bleve
│ ├── db
│ ├── elasticsearch
│ └── meilisearch
├── code
│ ├── bleve
│ ├── elasticsearch
│ └── internal
└── issues
├── bleve
├── db
├── elasticsearch
├── internal
└── meilisearch
```
- `indexer/interanal`: Internal shared package for indexer.
- `indexer/interanal/[engine]`: Internal shared package for each engine
(bleve/db/elasticsearch/meilisearch).
- `indexer/code`: Implementations for code indexer.
- `indexer/code/internal`: Internal shared package for code indexer.
- `indexer/code/[engine]`: Implementation via each engine for code
indexer.
- `indexer/issues`: Implementations for issues indexer.
### Deduplication
- Combine `Init/Ping/Close` for code indexer and issues indexer.
- ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to
`internal.IndexHolder`.~ Remove it, use dummy indexer instead when the
indexer is not ready.
- Duplicate two copies of creating ES clients.
- Duplicate two copies of `indexerID()`.
### Enhancement
- [x] Support index version for elasticsearch issues indexer, the old
index without version will be treated as version 0.
- [x] Fix spell of `elastic_search/ElasticSearch`, it should be
`Elasticsearch`.
- [x] Improve versioning of ES index. We don't need `Aliases`:
- Gitea does't need aliases for "Zero Downtime" because it never delete
old indexes.
- The old code of issues indexer uses the orignal name to create issue
index, so it's tricky to convert it to an alias.
- [x] Support index version for meilisearch issues indexer, the old
index without version will be treated as version 0.
- [x] Do "ping" only when `Ping` has been called, don't ping
periodically and cache the status.
- [x] Support the context parameter whenever possible.
- [x] Fix outdated example config.
- [x] Give up the requeue logic of issues indexer: When indexing fails,
call Ping to check if it was caused by the engine being unavailable, and
only requeue the task if the engine is unavailable.
- It is fragile and tricky, could cause data losing (It did happen when
I was doing some tests for this PR). And it works for ES only.
- Just always requeue the failed task, if it caused by bad data, it's a
bug of Gitea which should be fixed.
---------
Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 14:37:56 +02:00
|
|
|
}
|
|
|
|
defer idx.Close()
|
|
|
|
|
|
|
|
testIndexer("beleve", t, idx)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestESIndexAndSearch(t *testing.T) {
|
|
|
|
unittest.PrepareTestEnv(t)
|
|
|
|
|
|
|
|
u := os.Getenv("TEST_INDEXER_CODE_ES_URL")
|
|
|
|
if u == "" {
|
|
|
|
t.SkipNow()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
indexer := elasticsearch.NewIndexer(u, "gitea_codes")
|
|
|
|
if _, err := indexer.Init(context.Background()); err != nil {
|
|
|
|
if indexer != nil {
|
|
|
|
indexer.Close()
|
|
|
|
}
|
2023-10-11 13:02:24 +02:00
|
|
|
assert.FailNow(t, "Unable to init ES indexer Error: %v", err)
|
Refactor indexer (#25174)
Refactor `modules/indexer` to make it more maintainable. And it can be
easier to support more features. I'm trying to solve some of issue
searching, this is a precursor to making functional changes.
Current supported engines and the index versions:
| engines | issues | code |
| - | - | - |
| db | Just a wrapper for database queries, doesn't need version | - |
| bleve | The version of index is **2** | The version of index is **6**
|
| elasticsearch | The old index has no version, will be treated as
version **0** in this PR | The version of index is **1** |
| meilisearch | The old index has no version, will be treated as version
**0** in this PR | - |
## Changes
### Split
Splited it into mutiple packages
```text
indexer
├── internal
│ ├── bleve
│ ├── db
│ ├── elasticsearch
│ └── meilisearch
├── code
│ ├── bleve
│ ├── elasticsearch
│ └── internal
└── issues
├── bleve
├── db
├── elasticsearch
├── internal
└── meilisearch
```
- `indexer/interanal`: Internal shared package for indexer.
- `indexer/interanal/[engine]`: Internal shared package for each engine
(bleve/db/elasticsearch/meilisearch).
- `indexer/code`: Implementations for code indexer.
- `indexer/code/internal`: Internal shared package for code indexer.
- `indexer/code/[engine]`: Implementation via each engine for code
indexer.
- `indexer/issues`: Implementations for issues indexer.
### Deduplication
- Combine `Init/Ping/Close` for code indexer and issues indexer.
- ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to
`internal.IndexHolder`.~ Remove it, use dummy indexer instead when the
indexer is not ready.
- Duplicate two copies of creating ES clients.
- Duplicate two copies of `indexerID()`.
### Enhancement
- [x] Support index version for elasticsearch issues indexer, the old
index without version will be treated as version 0.
- [x] Fix spell of `elastic_search/ElasticSearch`, it should be
`Elasticsearch`.
- [x] Improve versioning of ES index. We don't need `Aliases`:
- Gitea does't need aliases for "Zero Downtime" because it never delete
old indexes.
- The old code of issues indexer uses the orignal name to create issue
index, so it's tricky to convert it to an alias.
- [x] Support index version for meilisearch issues indexer, the old
index without version will be treated as version 0.
- [x] Do "ping" only when `Ping` has been called, don't ping
periodically and cache the status.
- [x] Support the context parameter whenever possible.
- [x] Fix outdated example config.
- [x] Give up the requeue logic of issues indexer: When indexing fails,
call Ping to check if it was caused by the engine being unavailable, and
only requeue the task if the engine is unavailable.
- It is fragile and tricky, could cause data losing (It did happen when
I was doing some tests for this PR). And it works for ES only.
- Just always requeue the failed task, if it caused by bad data, it's a
bug of Gitea which should be fixed.
---------
Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 14:37:56 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
defer indexer.Close()
|
|
|
|
|
|
|
|
testIndexer("elastic_search", t, indexer)
|
|
|
|
}
|
2024-10-12 01:35:04 +02:00
|
|
|
|
|
|
|
func setupRepositoryIndexes(ctx context.Context, indexer internal.Indexer) error {
|
|
|
|
for _, repoID := range repositoriesToSearch() {
|
|
|
|
if err := index(ctx, indexer, repoID); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func tearDownRepositoryIndexes(indexer internal.Indexer) error {
|
|
|
|
for _, repoID := range repositoriesToSearch() {
|
|
|
|
if err := indexer.Delete(context.Background(), repoID); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func repositoriesToSearch() []int64 {
|
|
|
|
return []int64{1, 62}
|
|
|
|
}
|