fix(files/search): enable searching by multiple keywords
This commit is contained in:
parent
9510d6e730
commit
302d3a6af8
4 changed files with 55 additions and 24 deletions
|
@ -4,6 +4,7 @@ import (
|
|||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/ihexxa/quickshare/src/handlers/fileshdr"
|
||||
"github.com/parnurzeal/gorequest"
|
||||
|
@ -249,10 +250,15 @@ func (cl *FilesClient) GetSharingDir(shareID string) (*http.Response, string, []
|
|||
return resp, sdResp.SharingDir, nil
|
||||
}
|
||||
|
||||
func (cl *FilesClient) SearchItems(keyword string) (*http.Response, *fileshdr.SearchItemsResp, []error) {
|
||||
func (cl *FilesClient) SearchItems(keywords []string) (*http.Response, *fileshdr.SearchItemsResp, []error) {
|
||||
values := url.Values{}
|
||||
for _, keyword := range keywords {
|
||||
values.Add(fileshdr.Keyword, keyword)
|
||||
}
|
||||
|
||||
resp, body, errs := cl.r.Get(cl.url("/v1/fs/search")).
|
||||
AddCookie(cl.token).
|
||||
Param(fileshdr.Keyword, keyword).
|
||||
Query(values.Encode()).
|
||||
End()
|
||||
|
||||
searchResp := &fileshdr.SearchItemsResp{}
|
||||
|
|
|
@ -1158,17 +1158,36 @@ type SearchItemsResp struct {
|
|||
}
|
||||
|
||||
func (h *FileHandlers) SearchItems(c *gin.Context) {
|
||||
keyword := c.Query(Keyword)
|
||||
if keyword == "" {
|
||||
keywords := c.QueryArray(Keyword)
|
||||
if len(keywords) == 0 {
|
||||
c.JSON(q.ErrResp(c, 400, errors.New("empty keyword")))
|
||||
return
|
||||
}
|
||||
|
||||
results, err := h.deps.FileIndex().Search(keyword)
|
||||
if err != nil {
|
||||
c.JSON(q.ErrResp(c, 500, err))
|
||||
return
|
||||
resultsMap := map[string]int{}
|
||||
for _, keyword := range keywords {
|
||||
searchResults, err := h.deps.FileIndex().Search(keyword)
|
||||
if err != nil {
|
||||
c.JSON(q.ErrResp(c, 500, err))
|
||||
return
|
||||
}
|
||||
|
||||
for _, searchResult := range searchResults {
|
||||
fmt.Println(keyword, searchResult)
|
||||
if _, ok := resultsMap[searchResult]; !ok {
|
||||
resultsMap[searchResult] = 0
|
||||
}
|
||||
resultsMap[searchResult] += 1
|
||||
}
|
||||
}
|
||||
|
||||
results := []string{}
|
||||
for pathname, count := range resultsMap {
|
||||
if count >= len(keywords) {
|
||||
results = append(results, pathname)
|
||||
}
|
||||
}
|
||||
|
||||
c.JSON(200, &SearchItemsResp{Results: results})
|
||||
}
|
||||
|
||||
|
|
|
@ -868,39 +868,45 @@ func TestFileHandlers(t *testing.T) {
|
|||
return true
|
||||
}
|
||||
|
||||
t.Run("Search", func(t *testing.T) {
|
||||
t.Run("SearchItems", func(t *testing.T) {
|
||||
path1 := "qs/files/search/keyword1keyword2/file"
|
||||
path1Dir := "qs/files/search/keyword1keyword2"
|
||||
path2 := "qs/files/search/path/keyword1keyword2"
|
||||
path22 := "qs/files/search/new_path/keyword1keyword2"
|
||||
path3 := "qs/files/search/normal_file"
|
||||
|
||||
files := map[string]string{
|
||||
"qs/files/search/keyword": "12345678",
|
||||
"qs/files/search/path/keyword": "12345678",
|
||||
"qs/files/search/normal_file": "12345678",
|
||||
path1: "12345678",
|
||||
path2: "12345678",
|
||||
path3: "12345678",
|
||||
}
|
||||
expected := map[string]bool{
|
||||
"qs/files/search/keyword": true,
|
||||
"qs/files/search/path/keyword": true,
|
||||
path1Dir: true,
|
||||
path2: true,
|
||||
}
|
||||
toDelete := map[string]bool{
|
||||
"qs/files/search/keyword": true,
|
||||
path1Dir: true,
|
||||
}
|
||||
afterDeleted := map[string]bool{
|
||||
"qs/files/search/path/keyword": true,
|
||||
path2: true,
|
||||
}
|
||||
toMove := map[string]string{
|
||||
"qs/files/search/path/keyword": "qs/files/search/newPath/keyword",
|
||||
path2: path22,
|
||||
}
|
||||
afterMoved := map[string]bool{
|
||||
"qs/files/search/newPath/keyword": true,
|
||||
path22: true,
|
||||
}
|
||||
|
||||
for filePath, content := range files {
|
||||
assertUploadOK(t, filePath, content, addr, token)
|
||||
|
||||
err = fs.Sync()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
resp, searchItemsResp, errs := cl.SearchItems("keyword")
|
||||
keywords := []string{"keyword1", "keyword2"}
|
||||
resp, searchItemsResp, errs := cl.SearchItems(keywords)
|
||||
if len(errs) > 0 {
|
||||
t.Fatal(errs)
|
||||
} else if resp.StatusCode != 200 {
|
||||
|
@ -921,7 +927,7 @@ func TestFileHandlers(t *testing.T) {
|
|||
t.Fatal(resp.StatusCode)
|
||||
}
|
||||
}
|
||||
resp, searchItemsResp, errs = cl.SearchItems("keyword")
|
||||
resp, searchItemsResp, errs = cl.SearchItems(keywords)
|
||||
if len(errs) > 0 {
|
||||
t.Fatal(errs)
|
||||
} else if resp.StatusCode != 200 {
|
||||
|
@ -949,7 +955,7 @@ func TestFileHandlers(t *testing.T) {
|
|||
t.Fatal(resp.StatusCode)
|
||||
}
|
||||
}
|
||||
resp, searchItemsResp, errs = cl.SearchItems("keyword")
|
||||
resp, searchItemsResp, errs = cl.SearchItems(keywords)
|
||||
if len(errs) > 0 {
|
||||
t.Fatal(errs)
|
||||
} else if resp.StatusCode != 200 {
|
||||
|
@ -1008,7 +1014,7 @@ func TestFileHandlers(t *testing.T) {
|
|||
// still need to wait for worker finishing indexing...
|
||||
time.Sleep(3 * time.Second)
|
||||
|
||||
resp, searchItemsResp, errs := cl.SearchItems("reindexkey")
|
||||
resp, searchItemsResp, errs := cl.SearchItems([]string{"reindexkey"})
|
||||
if len(errs) > 0 {
|
||||
t.Fatal(errs)
|
||||
} else if resp.StatusCode != 200 {
|
||||
|
|
|
@ -494,7 +494,7 @@ func TestPermissions(t *testing.T) {
|
|||
assertResp(t, resp, errs, expectedCodes["Delete"], fmt.Sprintf("%s-%s", desc, "Delete"))
|
||||
resp, _, errs = filesCl.Delete(targetPathFile)
|
||||
assertResp(t, resp, errs, expectedCodes["DeleteTarget"], fmt.Sprintf("%s-%s", desc, "DeleteTarget"))
|
||||
resp, _, errs = filesCl.SearchItems(targetPathFile)
|
||||
resp, _, errs = filesCl.SearchItems([]string{targetPathFile})
|
||||
assertResp(t, resp, errs, expectedCodes["SearchTarget"], fmt.Sprintf("%s-%s", desc, "SearchTarget"))
|
||||
|
||||
if requireAuth {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue