feat(multi-home): enable separated home dir for each user (#64)

* feat(files): make files service supporting home dir

* fix(files): add path access control and avoid redirecting path in the backend

* feat(files): add ListHome API

* fix(server): fix access control issues

* feat(client/web): support multi-home

* feat(server): cleanup

* fix(server): failed to init admin folder
This commit is contained in:
Hexxa 2021-07-24 21:05:36 -05:00 committed by GitHub
parent 9748d0cab4
commit 81da97650b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 527 additions and 212 deletions

View file

@ -10,15 +10,17 @@ import (
)
type FilesClient struct {
addr string
r *gorequest.SuperAgent
addr string
r *gorequest.SuperAgent
token *http.Cookie
}
func NewFilesClient(addr string) *FilesClient {
func NewFilesClient(addr string, token *http.Cookie) *FilesClient {
gr := gorequest.New()
return &FilesClient{
addr: addr,
r: gr,
addr: addr,
r: gr,
token: token,
}
}
@ -28,6 +30,7 @@ func (cl *FilesClient) url(urlpath string) string {
func (cl *FilesClient) Create(filepath string, size int64) (*http.Response, string, []error) {
return cl.r.Post(cl.url("/v1/fs/files")).
AddCookie(cl.token).
Send(fileshdr.CreateReq{
Path: filepath,
FileSize: size,
@ -37,12 +40,14 @@ func (cl *FilesClient) Create(filepath string, size int64) (*http.Response, stri
func (cl *FilesClient) Delete(filepath string) (*http.Response, string, []error) {
return cl.r.Delete(cl.url("/v1/fs/files")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath).
End()
}
func (cl *FilesClient) Metadata(filepath string) (*http.Response, *fileshdr.MetadataResp, []error) {
resp, body, errs := cl.r.Get(cl.url("/v1/fs/metadata")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath).
End()
@ -57,12 +62,14 @@ func (cl *FilesClient) Metadata(filepath string) (*http.Response, *fileshdr.Meta
func (cl *FilesClient) Mkdir(dirpath string) (*http.Response, string, []error) {
return cl.r.Post(cl.url("/v1/fs/dirs")).
AddCookie(cl.token).
Send(fileshdr.MkdirReq{Path: dirpath}).
End()
}
func (cl *FilesClient) Move(oldpath, newpath string) (*http.Response, string, []error) {
return cl.r.Patch(cl.url("/v1/fs/files/move")).
AddCookie(cl.token).
Send(fileshdr.MoveReq{
OldPath: oldpath,
NewPath: newpath,
@ -72,6 +79,7 @@ func (cl *FilesClient) Move(oldpath, newpath string) (*http.Response, string, []
func (cl *FilesClient) UploadChunk(filepath string, content string, offset int64) (*http.Response, string, []error) {
return cl.r.Patch(cl.url("/v1/fs/files/chunks")).
AddCookie(cl.token).
Send(fileshdr.UploadChunkReq{
Path: filepath,
Content: content,
@ -82,6 +90,7 @@ func (cl *FilesClient) UploadChunk(filepath string, content string, offset int64
func (cl *FilesClient) UploadStatus(filepath string) (*http.Response, *fileshdr.UploadStatusResp, []error) {
resp, body, errs := cl.r.Get(cl.url("/v1/fs/files/chunks")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath).
End()
@ -96,6 +105,7 @@ func (cl *FilesClient) UploadStatus(filepath string) (*http.Response, *fileshdr.
func (cl *FilesClient) Download(filepath string, headers map[string]string) (*http.Response, string, []error) {
r := cl.r.Get(cl.url("/v1/fs/files")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath)
for key, val := range headers {
r = r.Set(key, val)
@ -105,6 +115,7 @@ func (cl *FilesClient) Download(filepath string, headers map[string]string) (*ht
func (cl *FilesClient) List(dirPath string) (*http.Response, *fileshdr.ListResp, []error) {
resp, body, errs := cl.r.Get(cl.url("/v1/fs/dirs")).
AddCookie(cl.token).
Param(fileshdr.ListDirQuery, dirPath).
End()
if len(errs) > 0 {
@ -121,6 +132,7 @@ func (cl *FilesClient) List(dirPath string) (*http.Response, *fileshdr.ListResp,
func (cl *FilesClient) ListUploadings() (*http.Response, *fileshdr.ListUploadingsResp, []error) {
resp, body, errs := cl.r.Get(cl.url("/v1/fs/uploadings")).
AddCookie(cl.token).
End()
if len(errs) > 0 {
return nil, nil, errs
@ -136,6 +148,7 @@ func (cl *FilesClient) ListUploadings() (*http.Response, *fileshdr.ListUploading
func (cl *FilesClient) DelUploading(filepath string) (*http.Response, string, []error) {
return cl.r.Delete(cl.url("/v1/fs/uploadings")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath).
End()
}

View file

@ -136,6 +136,14 @@ export class FilesClient extends BaseClient {
});
};
listHome = (): Promise<Response<ListResp>> => {
return this.do({
method: "get",
url: `${this.url}/v1/fs/dirs/home`,
params: {},
});
};
listUploadings = (): Promise<Response<ListUploadingsResp>> => {
return this.do({
method: "get",

View file

@ -19,6 +19,7 @@ export class FilesClient {
private uploadStatusMockResps: Array<Promise<Response<UploadStatusResp>>>;
private uploadStatusMockRespID: number = 0;
private listMockResp: Promise<Response<ListResp>>;
private listHomeMockResp: Promise<Response<ListResp>>;
private listUploadingsMockResp: Promise<Response<ListUploadingsResp>>;
private deleteUploadingMockResp: Promise<Response>;
@ -58,6 +59,10 @@ export class FilesClient {
this.listMockResp = resp;
};
listHomeMock = (resp: Promise<Response<ListResp>>) => {
this.listMockResp = resp;
};
listUploadingsMock = (resp: Promise<Response<ListUploadingsResp>>) => {
this.listUploadingsMockResp = resp;
}
@ -111,6 +116,10 @@ export class FilesClient {
return this.listMockResp;
};
listHome = (): Promise<Response<ListResp>> => {
return this.listHomeMockResp;
};
listUploadings = (): Promise<Response<ListUploadingsResp>> => {
return this.listUploadingsMockResp;
};

View file

@ -17,6 +17,7 @@ export interface UploadStatusResp {
}
export interface ListResp {
cwd: string;
metadatas: MetadataResp[];
}
@ -50,6 +51,7 @@ export interface IFilesClient {
) => Promise<Response<UploadStatusResp>>;
uploadStatus: (filePath: string) => Promise<Response<UploadStatusResp>>;
list: (dirPath: string) => Promise<Response<ListResp>>;
listHome: () => Promise<Response<ListResp>>;
listUploadings: () => Promise<Response<ListUploadingsResp>>;
deleteUploading: (filePath: string) => Promise<Response>;
}

View file

@ -114,6 +114,27 @@ export class Updater {
: this.props.items;
};
setHomeItems = async (): Promise<void> => {
const listResp = await this.filesClient.listHome();
this.props.dirPath = List<string>(listResp.data.cwd.split("/"));
this.props.items =
listResp.status === 200
? List<MetadataResp>(listResp.data.metadatas)
: this.props.items;
};
goHome = async (): Promise<void> => {
const listResp = await this.filesClient.listHome();
// how to get current dir? to dirPath?
// this.props.dirPath = dirParts;
this.props.items =
listResp.status === 200
? List<MetadataResp>(listResp.data.metadatas)
: this.props.items;
};
moveHere = async (
srcDir: string,
dstDir: string,

View file

@ -104,9 +104,7 @@ export class AuthPane extends React.Component<Props, State, {}> {
this.update(PanesUpdater.updateState);
// refresh
return BrowserUpdater().setItems(
List<string>(["."])
);
return BrowserUpdater().setHomeItems();
} else {
this.setState({ user: "", pwd: "" });
alert("Failed to login.");

View file

@ -20,7 +20,7 @@ export class StateMgr extends React.Component<Props, State, {}> {
BrowserUpdater().init(state.panel.browser);
BrowserUpdater().setClients(new UsersClient(""), new FilesClient(""));
BrowserUpdater()
.setItems(state.panel.browser.dirPath)
.setHomeItems()
.then(() => {
return BrowserUpdater().refreshUploadings();
})

View file

@ -1,15 +1,16 @@
package fileshdr
import (
"crypto/sha1"
"encoding/base64"
"errors"
"fmt"
"github.com/ihexxa/quickshare/src/userstore"
"io"
"net/http"
"os"
"path"
"path/filepath"
"strings"
"time"
"github.com/gin-gonic/gin"
@ -18,14 +19,9 @@ import (
"github.com/ihexxa/quickshare/src/depidx"
q "github.com/ihexxa/quickshare/src/handlers"
"github.com/ihexxa/quickshare/src/handlers/singleuserhdr"
)
var (
// dirs
UploadDir = "uploadings"
FsDir = "files"
// queries
FilePathQuery = "fp"
ListDirQuery = "dp"
@ -45,20 +41,11 @@ type FileHandlers struct {
}
func NewFileHandlers(cfg gocfg.ICfg, deps *depidx.Deps) (*FileHandlers, error) {
var err error
if err = deps.FS().MkdirAll(UploadDir); err != nil {
return nil, err
}
if err = deps.FS().MkdirAll(FsDir); err != nil {
return nil, err
}
return &FileHandlers{
cfg: cfg,
deps: deps,
uploadMgr: NewUploadMgr(deps.KV()),
}, err
}, nil
}
type AutoLocker struct {
@ -95,12 +82,24 @@ func (lk *AutoLocker) Exec(handler func()) {
lk.c.JSON(q.ErrResp(lk.c, 500, errors.New("fail to lock the file")))
return
}
locked = true
locked = true
handler()
}
func (h *FileHandlers) canAccess(userID, role, path string) bool {
if role == userstore.AdminRole {
return true
}
// the file path must start with userID: <userID>/...
parts := strings.Split(path, "/")
if len(parts) < 1 {
return false
}
return parts[0] == userID
}
type CreateReq struct {
Path string `json:"path"`
FileSize int64 `json:"fileSize"`
@ -112,10 +111,15 @@ func (h *FileHandlers) Create(c *gin.Context) {
c.JSON(q.ErrResp(c, 500, err))
return
}
userName := c.MustGet(singleuserhdr.UserParam).(string)
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, req.Path) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
tmpFilePath := h.getTmpPath(req.Path)
locker := h.NewAutoLocker(c, lockName(userName, tmpFilePath))
tmpFilePath := q.GetTmpPath(userID, req.Path)
locker := h.NewAutoLocker(c, lockName(tmpFilePath))
locker.Exec(func() {
err := h.deps.FS().Create(tmpFilePath)
if err != nil {
@ -126,14 +130,14 @@ func (h *FileHandlers) Create(c *gin.Context) {
}
return
}
err = h.uploadMgr.AddInfo(userName, req.Path, tmpFilePath, req.FileSize)
err = h.uploadMgr.AddInfo(userID, req.Path, tmpFilePath, req.FileSize)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
}
fileDir := h.FsPath(filepath.Dir(req.Path))
err = h.deps.FS().MkdirAll(fileDir)
// fileDir := q.FsPath(userID, filepath.Dir(req.Path))
err = h.deps.FS().MkdirAll(filepath.Dir(req.Path))
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -149,8 +153,14 @@ func (h *FileHandlers) Delete(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file path")))
return
}
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, filePath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
filePath = h.FsPath(filePath)
// filePath = q.FsPath(userID, filePath)
err := h.deps.FS().Remove(filePath)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
@ -173,8 +183,14 @@ func (h *FileHandlers) Metadata(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file path")))
return
}
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, filePath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
filePath = h.FsPath(filePath)
// filePath = q.FsPath(userID, filePath)
info, err := h.deps.FS().Stat(filePath)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
@ -199,9 +215,15 @@ func (h *FileHandlers) Mkdir(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, err))
return
}
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, req.Path) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
dirPath := h.FsPath(req.Path)
err := h.deps.FS().MkdirAll(dirPath)
// dirPath := q.FsPath(userID, req.Path)
err := h.deps.FS().MkdirAll(req.Path)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -221,15 +243,21 @@ func (h *FileHandlers) Move(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, err))
return
}
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, req.OldPath) || !h.canAccess(userID, role, req.NewPath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
oldPath := h.FsPath(req.OldPath)
newPath := h.FsPath(req.NewPath)
_, err := h.deps.FS().Stat(oldPath)
// oldPath := q.FsPath(userID, req.OldPath)
// newPath := q.FsPath(userID, req.NewPath)
_, err := h.deps.FS().Stat(req.OldPath)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
}
_, err = h.deps.FS().Stat(newPath)
_, err = h.deps.FS().Stat(req.NewPath)
if err != nil && !os.IsNotExist(err) {
c.JSON(q.ErrResp(c, 500, err))
return
@ -239,7 +267,7 @@ func (h *FileHandlers) Move(c *gin.Context) {
return
}
err = h.deps.FS().Rename(oldPath, newPath)
err = h.deps.FS().Rename(req.OldPath, req.NewPath)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -260,14 +288,19 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
c.JSON(q.ErrResp(c, 500, err))
return
}
userName := c.MustGet(singleuserhdr.UserParam).(string)
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, req.Path) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
tmpFilePath := h.getTmpPath(req.Path)
locker := h.NewAutoLocker(c, lockName(userName, tmpFilePath))
tmpFilePath := q.GetTmpPath(userID, req.Path)
locker := h.NewAutoLocker(c, lockName(tmpFilePath))
locker.Exec(func() {
var err error
_, fileSize, uploaded, err := h.uploadMgr.GetInfo(userName, tmpFilePath)
_, fileSize, uploaded, err := h.uploadMgr.GetInfo(userID, tmpFilePath)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -288,7 +321,7 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
return
}
err = h.uploadMgr.SetInfo(userName, tmpFilePath, req.Offset+int64(wrote))
err = h.uploadMgr.SetInfo(userID, tmpFilePath, req.Offset+int64(wrote))
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -296,7 +329,7 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
// move the file from uploading dir to uploaded dir
if uploaded+int64(wrote) == fileSize {
fsFilePath, err := h.getFSFilePath(req.Path)
fsFilePath, err := h.getFSFilePath(userID, req.Path)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -307,7 +340,7 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
c.JSON(q.ErrResp(c, 500, fmt.Errorf("%s error: %w", req.Path, err)))
return
}
err = h.uploadMgr.DelInfo(userName, tmpFilePath)
err = h.uploadMgr.DelInfo(userID, tmpFilePath)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -323,8 +356,8 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
})
}
func (h *FileHandlers) getFSFilePath(reqPath string) (string, error) {
fsFilePath := h.FsPath(reqPath)
func (h *FileHandlers) getFSFilePath(userID, fsFilePath string) (string, error) {
// fsFilePath := q.FsPath(userID, reqPath)
_, err := h.deps.FS().Stat(fsFilePath)
if err != nil {
if os.IsNotExist(err) {
@ -367,12 +400,17 @@ func (h *FileHandlers) UploadStatus(c *gin.Context) {
if filePath == "" {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file name")))
}
userName := c.MustGet(singleuserhdr.UserParam).(string)
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, filePath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
tmpFilePath := h.getTmpPath(filePath)
locker := h.NewAutoLocker(c, lockName(userName, tmpFilePath))
tmpFilePath := q.GetTmpPath(userID, filePath)
locker := h.NewAutoLocker(c, lockName(tmpFilePath))
locker.Exec(func() {
_, fileSize, uploaded, err := h.uploadMgr.GetInfo(userName, tmpFilePath)
_, fileSize, uploaded, err := h.uploadMgr.GetInfo(userID, tmpFilePath)
if err != nil {
if os.IsNotExist(err) {
c.JSON(q.ErrResp(c, 404, err))
@ -400,9 +438,16 @@ func (h *FileHandlers) Download(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file name")))
return
}
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, filePath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
// TODO: when sharing is introduced, move following logics to a separeted method
// concurrently file accessing is managed by os
filePath = h.FsPath(filePath)
// filePath = q.FsPath(userID, filePath)
info, err := h.deps.FS().Stat(filePath)
if err != nil {
if os.IsNotExist(err) {
@ -446,7 +491,7 @@ func (h *FileHandlers) Download(c *gin.Context) {
// respond to range requests
parts, err := multipart.RangeToParts(rangeVal, contentType, fmt.Sprintf("%d", info.Size()))
if err != nil {
c.JSON(q.ErrResp(c, 401, err))
c.JSON(q.ErrResp(c, 400, err))
return
}
@ -463,17 +508,24 @@ func (h *FileHandlers) Download(c *gin.Context) {
}
type ListResp struct {
Cwd string `json:"cwd"`
Metadatas []*MetadataResp `json:"metadatas"`
}
func (h *FileHandlers) List(c *gin.Context) {
dirPath := c.Query(ListDirQuery)
if dirPath == "" {
c.JSON(q.ErrResp(c, 402, errors.New("incorrect path name")))
c.JSON(q.ErrResp(c, 400, errors.New("incorrect path name")))
return
}
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, dirPath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
dirPath = h.FsPath(dirPath)
// dirPath = q.FsPath(userID, dirPath)
infos, err := h.deps.FS().ListDir(dirPath)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
@ -489,7 +541,33 @@ func (h *FileHandlers) List(c *gin.Context) {
})
}
c.JSON(200, &ListResp{Metadatas: metadatas})
c.JSON(200, &ListResp{
Cwd: dirPath,
Metadatas: metadatas,
})
}
func (h *FileHandlers) ListHome(c *gin.Context) {
userID := c.MustGet(q.UserIDParam).(string)
infos, err := h.deps.FS().ListDir(userID)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
}
metadatas := []*MetadataResp{}
for _, info := range infos {
metadatas = append(metadatas, &MetadataResp{
Name: info.Name(),
Size: info.Size(),
ModTime: info.ModTime(),
IsDir: info.IsDir(),
})
}
c.JSON(200, &ListResp{
Cwd: userID,
Metadatas: metadatas,
})
}
func (h *FileHandlers) Copy(c *gin.Context) {
@ -500,16 +578,8 @@ func (h *FileHandlers) CopyDir(c *gin.Context) {
c.JSON(q.NewMsgResp(501, "Not Implemented"))
}
func (h *FileHandlers) getTmpPath(filePath string) string {
return path.Join(UploadDir, fmt.Sprintf("%x", sha1.Sum([]byte(filePath))))
}
func lockName(user, filePath string) string {
return fmt.Sprintf("%s/%s", user, filePath)
}
func (h *FileHandlers) FsPath(filePath string) string {
return path.Join(FsDir, filePath)
func lockName(filePath string) string {
return filePath
}
type ListUploadingsResp struct {
@ -517,9 +587,9 @@ type ListUploadingsResp struct {
}
func (h *FileHandlers) ListUploadings(c *gin.Context) {
userName := c.MustGet(singleuserhdr.UserParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
infos, err := h.uploadMgr.ListInfo(userName)
infos, err := h.uploadMgr.ListInfo(userID)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -533,11 +603,11 @@ func (h *FileHandlers) DelUploading(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file path")))
return
}
userName := c.MustGet(singleuserhdr.UserParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
var err error
tmpFilePath := h.getTmpPath(filePath)
locker := h.NewAutoLocker(c, lockName(userName, tmpFilePath))
tmpFilePath := q.GetTmpPath(userID, filePath)
locker := h.NewAutoLocker(c, lockName(tmpFilePath))
locker.Exec(func() {
err = h.deps.FS().Remove(tmpFilePath)
if err != nil {
@ -545,7 +615,7 @@ func (h *FileHandlers) DelUploading(c *gin.Context) {
return
}
err = h.uploadMgr.DelInfo(userName, tmpFilePath)
err = h.uploadMgr.DelInfo(userID, tmpFilePath)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return

View file

@ -3,6 +3,7 @@ package multiusers
import (
"errors"
"fmt"
"path/filepath"
"strconv"
"time"
@ -18,30 +19,95 @@ import (
var (
ErrInvalidUser = errors.New("invalid user name or password")
ErrInvalidConfig = errors.New("invalid user config")
UserIDParam = "uid"
UserParam = "user"
PwdParam = "pwd"
NewPwdParam = "newpwd"
RoleParam = "role"
ExpireParam = "expire"
TokenCookie = "tk"
)
type MultiUsersSvc struct {
cfg gocfg.ICfg
deps *depidx.Deps
cfg gocfg.ICfg
deps *depidx.Deps
apiACRules map[string]bool
}
func NewMultiUsersSvc(cfg gocfg.ICfg, deps *depidx.Deps) (*MultiUsersSvc, error) {
publicPath := filepath.Join("/", cfg.GrabString("Server.PublicPath"))
apiACRules := map[string]bool{
// TODO: make these configurable
// admin rules
apiRuleCname(userstore.AdminRole, "GET", "/"): true,
apiRuleCname(userstore.AdminRole, "GET", publicPath): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/users/login"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/users/logout"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/users/isauthed"): true,
apiRuleCname(userstore.AdminRole, "PATCH", "/v1/users/pwd"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/users/"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/roles/"): true,
apiRuleCname(userstore.AdminRole, "DELETE", "/v1/roles/"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/roles/"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/fs/files"): true,
apiRuleCname(userstore.AdminRole, "DELETE", "/v1/fs/files"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/files"): true,
apiRuleCname(userstore.AdminRole, "PATCH", "/v1/fs/files/chunks"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/files/chunks"): true,
apiRuleCname(userstore.AdminRole, "PATCH", "/v1/fs/files/copy"): true,
apiRuleCname(userstore.AdminRole, "PATCH", "/v1/fs/files/move"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/dirs"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/dirs/home"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/fs/dirs"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/uploadings"): true,
apiRuleCname(userstore.AdminRole, "DELETE", "/v1/fs/uploadings"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/metadata"): true,
apiRuleCname(userstore.AdminRole, "OPTIONS", "/v1/settings/health"): true,
// user rules
apiRuleCname(userstore.UserRole, "GET", "/"): true,
apiRuleCname(userstore.UserRole, "GET", publicPath): true,
apiRuleCname(userstore.UserRole, "POST", "/v1/users/logout"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/users/isauthed"): true,
apiRuleCname(userstore.UserRole, "PATCH", "/v1/users/pwd"): true,
apiRuleCname(userstore.UserRole, "POST", "/v1/fs/files"): true,
apiRuleCname(userstore.UserRole, "DELETE", "/v1/fs/files"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/files"): true,
apiRuleCname(userstore.UserRole, "PATCH", "/v1/fs/files/chunks"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/files/chunks"): true,
apiRuleCname(userstore.UserRole, "PATCH", "/v1/fs/files/copy"): true,
apiRuleCname(userstore.UserRole, "PATCH", "/v1/fs/files/move"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/dirs"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/dirs/home"): true,
apiRuleCname(userstore.UserRole, "POST", "/v1/fs/dirs"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/uploadings"): true,
apiRuleCname(userstore.UserRole, "DELETE", "/v1/fs/uploadings"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/metadata"): true,
apiRuleCname(userstore.UserRole, "OPTIONS", "/v1/settings/health"): true,
// visitor rules
apiRuleCname(userstore.VisitorRole, "GET", "/"): true,
apiRuleCname(userstore.VisitorRole, "GET", publicPath): true,
apiRuleCname(userstore.VisitorRole, "POST", "/v1/users/login"): true,
apiRuleCname(userstore.VisitorRole, "GET", "/v1/users/isauthed"): true,
apiRuleCname(userstore.VisitorRole, "GET", "/v1/fs/files"): true,
apiRuleCname(userstore.VisitorRole, "OPTIONS", "/v1/settings/health"): true,
}
return &MultiUsersSvc{
cfg: cfg,
deps: deps,
cfg: cfg,
deps: deps,
apiACRules: apiACRules,
}, nil
}
func (h *MultiUsersSvc) Init(adminName, adminPwd string) (string, error) {
var err error
userID := "0"
fsPath := q.HomePath(userID, "/")
if err = h.deps.FS().MkdirAll(fsPath); err != nil {
return "", err
}
uploadingsPath := q.GetTmpPath(userID, "/")
if err = h.deps.FS().MkdirAll(uploadingsPath); err != nil {
return "", err
}
// TODO: return "" for being compatible with singleuser service, should remove this
err := h.deps.Users().Init(adminName, adminPwd)
err = h.deps.Users().Init(adminName, adminPwd)
return "", err
}
@ -75,10 +141,10 @@ func (h *MultiUsersSvc) Login(c *gin.Context) {
ttl := h.cfg.GrabInt("Users.CookieTTL")
token, err := h.deps.Token().ToToken(map[string]string{
UserIDParam: fmt.Sprint(user.ID),
UserParam: user.Name,
RoleParam: user.Role,
ExpireParam: fmt.Sprintf("%d", time.Now().Unix()+int64(ttl)),
q.UserIDParam: fmt.Sprint(user.ID),
q.UserParam: user.Name,
q.RoleParam: user.Role,
q.ExpireParam: fmt.Sprintf("%d", time.Now().Unix()+int64(ttl)),
})
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
@ -87,7 +153,7 @@ func (h *MultiUsersSvc) Login(c *gin.Context) {
secure := h.cfg.GrabBool("Users.CookieSecure")
httpOnly := h.cfg.GrabBool("Users.CookieHttpOnly")
c.SetCookie(TokenCookie, token, ttl, "/", "", secure, httpOnly)
c.SetCookie(q.TokenCookie, token, ttl, "/", "", secure, httpOnly)
c.JSON(q.Resp(200))
}
@ -98,12 +164,17 @@ func (h *MultiUsersSvc) Logout(c *gin.Context) {
// token alreay verified in the authn middleware
secure := h.cfg.GrabBool("Users.CookieSecure")
httpOnly := h.cfg.GrabBool("Users.CookieHttpOnly")
c.SetCookie(TokenCookie, "", 0, "/", "", secure, httpOnly)
c.SetCookie(q.TokenCookie, "", 0, "/", "", secure, httpOnly)
c.JSON(q.Resp(200))
}
func (h *MultiUsersSvc) IsAuthed(c *gin.Context) {
// token alreay verified in the authn middleware
role := c.MustGet(q.RoleParam).(string)
if role == userstore.VisitorRole {
c.JSON(q.ErrResp(c, 401, q.ErrUnauthorized))
return
}
c.JSON(q.Resp(200))
}
@ -128,7 +199,7 @@ func (h *MultiUsersSvc) SetPwd(c *gin.Context) {
return
}
uid, err := strconv.ParseUint(claims[UserIDParam], 10, 64)
uid, err := strconv.ParseUint(claims[q.UserIDParam], 10, 64)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -177,13 +248,13 @@ func (h *MultiUsersSvc) AddUser(c *gin.Context) {
return
}
// TODO: do more comprehensive validation
// Role and duplicated name will be validated by the store
if len(req.Name) < 2 {
c.JSON(q.ErrResp(c, 400, errors.New("name length must be greater than 2")))
var err error
if err = h.isValidUserName(req.Name); err != nil {
c.JSON(q.ErrResp(c, 400, err))
return
} else if len(req.Name) < 3 {
c.JSON(q.ErrResp(c, 400, errors.New("password length must be greater than 2")))
} else if err = h.isValidPwd(req.Pwd); err != nil {
c.JSON(q.ErrResp(c, 400, err))
return
}
@ -194,6 +265,20 @@ func (h *MultiUsersSvc) AddUser(c *gin.Context) {
return
}
// TODO: following operations must be atomic
// TODO: check if the folders already exists
userID := c.MustGet(q.UserIDParam).(string)
homePath := q.HomePath(userID, "/")
if err = h.deps.FS().MkdirAll(homePath); err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
}
uploadingsPath := q.GetTmpPath(userID, "/")
if err = h.deps.FS().MkdirAll(uploadingsPath); err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
}
err = h.deps.Users().AddUser(&userstore.User{
ID: uid,
Name: req.Name,
@ -213,19 +298,19 @@ type AddRoleReq struct {
}
func (h *MultiUsersSvc) AddRole(c *gin.Context) {
var err error
req := &AddRoleReq{}
if err := c.ShouldBindJSON(&req); err != nil {
if err = c.ShouldBindJSON(&req); err != nil {
c.JSON(q.ErrResp(c, 400, err))
return
}
// TODO: do more comprehensive validation
if len(req.Role) < 2 {
c.JSON(q.ErrResp(c, 400, errors.New("name length must be greater than 2")))
if err = h.isValidRole(req.Role); err != nil {
c.JSON(q.ErrResp(c, 400, err))
return
}
err := h.deps.Users().AddRole(req.Role)
err = h.deps.Users().AddRole(req.Role)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -239,19 +324,19 @@ type DelRoleReq struct {
}
func (h *MultiUsersSvc) DelRole(c *gin.Context) {
var err error
req := &DelRoleReq{}
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(q.ErrResp(c, 400, err))
return
}
// TODO: do more comprehensive validation
if len(req.Role) < 2 {
c.JSON(q.ErrResp(c, 400, errors.New("name length must be greater than 2")))
if err = h.isValidRole(req.Role); err != nil {
c.JSON(q.ErrResp(c, 400, err))
return
}
err := h.deps.Users().DelRole(req.Role)
err = h.deps.Users().DelRole(req.Role)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
@ -276,24 +361,47 @@ func (h *MultiUsersSvc) ListRoles(c *gin.Context) {
}
func (h *MultiUsersSvc) getUserInfo(c *gin.Context) (map[string]string, error) {
tokenStr, err := c.Cookie(TokenCookie)
tokenStr, err := c.Cookie(q.TokenCookie)
if err != nil {
return nil, err
}
claims, err := h.deps.Token().FromToken(
tokenStr,
map[string]string{
UserIDParam: "",
UserParam: "",
RoleParam: "",
ExpireParam: "",
q.UserIDParam: "",
q.UserParam: "",
q.RoleParam: "",
q.ExpireParam: "",
},
)
if err != nil {
return nil, err
} else if claims[UserIDParam] == "" || claims[UserParam] == "" {
} else if claims[q.UserIDParam] == "" || claims[q.UserParam] == "" {
return nil, ErrInvalidConfig
}
return claims, nil
}
func (h *MultiUsersSvc) isValidUserName(userName string) error {
minUserNameLen := h.cfg.GrabInt("Users.MinUserNameLen")
if len(userName) < minUserNameLen {
return errors.New("name is too short")
}
return nil
}
func (h *MultiUsersSvc) isValidPwd(pwd string) error {
minPwdLen := h.cfg.GrabInt("Users.MinPwdLen")
if len(pwd) < minPwdLen {
return errors.New("password is too short")
}
return nil
}
func (h *MultiUsersSvc) isValidRole(role string) error {
if role == userstore.AdminRole || role == userstore.UserRole || role == userstore.VisitorRole {
return errors.New("predefined roles can not be added/deleted")
}
return h.isValidUserName(role)
}

View file

@ -1,81 +1,84 @@
package multiusers
import (
"errors"
"fmt"
"net/http"
"strconv"
"strings"
"time"
"github.com/gin-gonic/gin"
q "github.com/ihexxa/quickshare/src/handlers"
"github.com/ihexxa/quickshare/src/userstore"
)
var exposedAPIs = map[string]bool{
"Login-fm": true,
"Health-fm": true,
func apiRuleCname(role, method, path string) string {
return fmt.Sprintf("%s-%s-%s", role, method, path)
}
var publicRootPath = "/"
var publicStaticPath = "/static"
func IsPublicPath(accessPath string) bool {
return accessPath == publicRootPath || strings.HasPrefix(accessPath, publicStaticPath)
}
func GetHandlerName(fullname string) (string, error) {
parts := strings.Split(fullname, ".")
if len(parts) == 0 {
return "", errors.New("invalid handler name")
}
return parts[len(parts)-1], nil
}
func (h *MultiUsersSvc) Auth() gin.HandlerFunc {
func (h *MultiUsersSvc) AuthN() gin.HandlerFunc {
return func(c *gin.Context) {
handlerName, err := GetHandlerName(c.HandlerName())
if err != nil {
c.JSON(q.ErrResp(c, 401, err))
return
}
accessPath := c.Request.URL.String()
enableAuth := h.cfg.GrabBool("Users.EnableAuth")
if enableAuth && !exposedAPIs[handlerName] && !IsPublicPath(accessPath) {
token, err := c.Cookie(TokenCookie)
if err != nil {
c.AbortWithStatusJSON(q.ErrResp(c, 401, err))
return
}
claims := map[string]string{
UserIDParam: "",
UserParam: "",
RoleParam: "",
ExpireParam: "",
}
claims, err = h.deps.Token().FromToken(token, claims)
if err != nil {
c.AbortWithStatusJSON(q.ErrResp(c, 401, err))
return
}
for key, val := range claims {
c.Set(key, val)
}
now := time.Now().Unix()
expire, err := strconv.ParseInt(claims[ExpireParam], 10, 64)
if err != nil || expire <= now {
c.AbortWithStatusJSON(q.ErrResp(c, 401, err))
return
}
// no one is allowed to download
} else {
// this is for UploadMgr to get user info to get related namespace
c.Set(UserParam, "quickshare_anonymous")
claims := map[string]string{
q.UserIDParam: "",
q.UserParam: "",
q.RoleParam: userstore.VisitorRole,
q.ExpireParam: "",
}
if enableAuth {
token, err := c.Cookie(q.TokenCookie)
if err != nil {
if err != http.ErrNoCookie {
c.AbortWithStatusJSON(q.ErrResp(c, 401, err))
return
}
// set default values if no cookie is found
} else if token != "" {
claims, err = h.deps.Token().FromToken(token, claims)
if err != nil {
c.AbortWithStatusJSON(q.ErrResp(c, 401, err))
return
}
now := time.Now().Unix()
expire, err := strconv.ParseInt(claims[q.ExpireParam], 10, 64)
if err != nil || expire <= now {
c.AbortWithStatusJSON(q.ErrResp(c, 401, err))
return
}
}
// set default values if token is empty
} else {
claims[q.UserIDParam] = "0"
claims[q.UserParam] = "admin"
claims[q.RoleParam] = userstore.AdminRole
claims[q.ExpireParam] = ""
}
for key, val := range claims {
c.Set(key, val)
}
c.Next()
}
}
func (h *MultiUsersSvc) APIAccessControl() gin.HandlerFunc {
return func(c *gin.Context) {
role := c.MustGet(q.RoleParam).(string)
method := c.Request.Method
accessPath := c.Request.URL.Path
// we don't lock the map because we only read it
if h.apiACRules[apiRuleCname(role, method, accessPath)] {
c.Next()
return
} else if accessPath == "/" || // TODO: temporarily allow accessing static resources
accessPath == "/favicon.ico" ||
strings.HasPrefix(accessPath, "/static") {
c.Next()
return
}
c.AbortWithStatusJSON(q.ErrResp(c, 403, q.ErrAccessDenied))
}
}

View file

@ -1,11 +1,31 @@
package handlers
import (
"crypto/sha1"
"errors"
"fmt"
"path/filepath"
"github.com/gin-gonic/gin"
)
var (
// dirs
UploadDir = "uploadings"
FsDir = "files"
UserIDParam = "uid"
UserParam = "user"
PwdParam = "pwd"
NewPwdParam = "newpwd"
RoleParam = "role"
ExpireParam = "expire"
TokenCookie = "tk"
ErrAccessDenied = errors.New("access denied")
ErrUnauthorized = errors.New("unauthorized")
)
var statusCodes = map[int]string{
100: "Continue", // RFC 7231, 6.2.1
101: "SwitchingProtocols", // RFC 7231, 6.2.2
@ -102,3 +122,15 @@ func ErrResp(c *gin.Context, code int, err error) (int, interface{}) {
return code, gErr.JSON()
}
func FsPath(userID, relFilePath string) string {
return filepath.Join(userID, FsDir, relFilePath)
}
func HomePath(userID, relFilePath string) string {
return filepath.Join(userID, relFilePath)
}
func GetTmpPath(userID, relFilePath string) string {
return filepath.Join(UploadDir, userID, fmt.Sprintf("%x", sha1.Sum([]byte(relFilePath))))
}

View file

@ -15,6 +15,8 @@ type UsersCfg struct {
CookieTTL int `json:"cookieTTL" yaml:"cookieTTL"`
CookieSecure bool `json:"cookieSecure" yaml:"cookieSecure"`
CookieHttpOnly bool `json:"cookieHttpOnly" yaml:"cookieHttpOnly"`
MinUserNameLen int `json:"minUserNameLen" yaml:"minUserNameLen"`
MinPwdLen int `json:"minPwdLen" yaml:"minPwdLen"`
}
type Secrets struct {
@ -56,6 +58,8 @@ func DefaultConfig() (string, error) {
CookieTTL: 3600 * 24 * 7, // 1 week
CookieSecure: false,
CookieHttpOnly: true,
MinUserNameLen: 4,
MinPwdLen: 6,
},
Secrets: &Secrets{
TokenSecret: "",

View file

@ -165,7 +165,8 @@ func initHandlers(router *gin.Engine, cfg gocfg.ICfg, deps *depidx.Deps) (*gin.E
}
// middleware
router.Use(userHdrs.Auth())
router.Use(userHdrs.AuthN())
router.Use(userHdrs.APIAccessControl())
// tmp static server
router.Use(static.Serve("/", static.LocalFile(publicPath, false)))
@ -194,6 +195,7 @@ func initHandlers(router *gin.Engine, cfg gocfg.ICfg, deps *depidx.Deps) (*gin.E
filesAPI.PATCH("/files/move", fileHdrs.Move)
filesAPI.GET("/dirs", fileHdrs.List)
filesAPI.GET("/dirs/home", fileHdrs.ListHome)
filesAPI.POST("/dirs", fileHdrs.Mkdir)
// files.POST("/dirs/copy", fileHdrs.CopyDir)

View file

@ -14,6 +14,7 @@ import (
"testing"
"github.com/ihexxa/quickshare/src/client"
q "github.com/ihexxa/quickshare/src/handlers"
"github.com/ihexxa/quickshare/src/handlers/fileshdr"
)
@ -22,7 +23,9 @@ func TestFileHandlers(t *testing.T) {
root := "testData"
config := `{
"users": {
"enableAuth": false
"enableAuth": true,
"minUserNameLen": 2,
"minPwdLen": 4
},
"server": {
"debug": true
@ -32,6 +35,11 @@ func TestFileHandlers(t *testing.T) {
}
}`
adminName := "qs"
adminPwd := "quicksh@re"
os.Setenv("DEFAULTADMIN", adminName)
os.Setenv("DEFAULTADMINPWD", adminPwd)
os.RemoveAll(root)
err := os.MkdirAll(root, 0700)
if err != nil {
@ -42,14 +50,23 @@ func TestFileHandlers(t *testing.T) {
srv := startTestServer(config)
defer srv.Shutdown()
fs := srv.depsFS()
cl := client.NewFilesClient(addr)
if !waitForReady(addr) {
t.Fatal("fail to start server")
}
usersCl := client.NewSingleUserClient(addr)
resp, _, errs := usersCl.Login(adminName, adminPwd)
if len(errs) > 0 {
t.Fatal(errs)
} else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode)
}
token := client.GetCookie(resp.Cookies(), q.TokenCookie)
cl := client.NewFilesClient(addr, token)
assertUploadOK := func(t *testing.T, filePath, content string) bool {
cl := client.NewFilesClient(addr)
cl := client.NewFilesClient(addr, token)
fileSize := int64(len([]byte(content)))
res, _, errs := cl.Create(filePath, fileSize)
@ -82,7 +99,7 @@ func TestFileHandlers(t *testing.T) {
fileSize = int64(len([]byte(content)))
)
cl := client.NewFilesClient(addr)
// cl := client.NewFilesClient(addr)
rd := rand.Intn(3)
switch rd {
@ -151,12 +168,12 @@ func TestFileHandlers(t *testing.T) {
t.Run("test uploading files with duplicated names", func(t *testing.T) {
files := map[string]string{
"dupdir/dup_file1": "12345678",
"dupdir/dup_file2.ext": "12345678",
"0/dupdir/dup_file1": "12345678",
"0/dupdir/dup_file2.ext": "12345678",
}
renames := map[string]string{
"dupdir/dup_file1": "dupdir/dup_file1_1",
"dupdir/dup_file2.ext": "dupdir/dup_file2_1.ext",
"0/dupdir/dup_file1": "0/dupdir/dup_file1_1",
"0/dupdir/dup_file2.ext": "0/dupdir/dup_file2_1.ext",
}
for filePath, content := range files {
@ -183,8 +200,8 @@ func TestFileHandlers(t *testing.T) {
t.Run("test files APIs: Create-UploadChunk-UploadStatus-Metadata-Delete", func(t *testing.T) {
for filePath, content := range map[string]string{
"path1/f1.md": "1111 1111 1111 1111",
"path1/path2/f2.md": "1010 1010 1111 0000 0010",
"0/path1/f1.md": "1111 1111 1111 1111",
"0/path1/path2/f2.md": "1010 1010 1111 0000 0010",
} {
fileSize := int64(len([]byte(content)))
// create a file
@ -196,7 +213,7 @@ func TestFileHandlers(t *testing.T) {
}
// check uploading file
uploadFilePath := path.Join(fileshdr.UploadDir, fmt.Sprintf("%x", sha1.Sum([]byte(filePath))))
uploadFilePath := path.Join(q.UploadDir, "0", fmt.Sprintf("%x", sha1.Sum([]byte(filePath))))
info, err := fs.Stat(uploadFilePath)
if err != nil {
t.Fatal(err)
@ -242,12 +259,12 @@ func TestFileHandlers(t *testing.T) {
}
// check uploaded file
fsFilePath := filepath.Join(fileshdr.FsDir, filePath)
info, err = fs.Stat(fsFilePath)
// fsFilePath := filepath.Join("0", filePath)
info, err = fs.Stat(filePath)
if err != nil {
t.Fatal(err)
} else if info.Name() != filepath.Base(fsFilePath) {
t.Fatal(info.Name(), filepath.Base(fsFilePath))
} else if info.Name() != filepath.Base(filePath) {
t.Fatal(info.Name(), filepath.Base(filePath))
}
// metadata
@ -273,11 +290,11 @@ func TestFileHandlers(t *testing.T) {
t.Run("test dirs APIs: Mkdir-Create-UploadChunk-List", func(t *testing.T) {
for dirPath, files := range map[string]map[string]string{
"dir/path1": map[string]string{
"0/dir/path1": map[string]string{
"f1.md": "11111",
"f2.md": "22222222222",
},
"dir/path2/path2": map[string]string{
"0/dir/path2/path2": map[string]string{
"f3.md": "3333333",
},
} {
@ -314,8 +331,8 @@ func TestFileHandlers(t *testing.T) {
})
t.Run("test operation APIs: Mkdir-Create-UploadChunk-Move-List", func(t *testing.T) {
srcDir := "move/src"
dstDir := "move/dst"
srcDir := "0/move/src"
dstDir := "0/move/dst"
for _, dirPath := range []string{srcDir, dstDir} {
res, _, errs := cl.Mkdir(dirPath)
@ -366,8 +383,8 @@ func TestFileHandlers(t *testing.T) {
t.Run("test download APIs: Download(normal, ranges)", func(t *testing.T) {
for filePath, content := range map[string]string{
"download/path1/f1": "123456",
"download/path1/path2": "12345678",
"0/download/path1/f1": "123456",
"0/download/path1/path2": "12345678",
} {
assertUploadOK(t, filePath, content)
@ -431,8 +448,8 @@ func TestFileHandlers(t *testing.T) {
t.Run("test uploading APIs: Create, ListUploadings, DelUploading", func(t *testing.T) {
files := map[string]string{
"uploadings/path1/f1": "123456",
"uploadings/path1/path2": "12345678",
"0/uploadings/path1/f1": "123456",
"0/uploadings/path1/path2": "12345678",
}
for filePath, content := range files {
@ -487,10 +504,10 @@ func TestFileHandlers(t *testing.T) {
})
t.Run("test uploading APIs: Create, Stop, UploadChunk", func(t *testing.T) {
cl := client.NewFilesClient(addr)
// cl := client.NewFilesClient(addr)
files := map[string]string{
"uploadings/path1/f1": "12345678",
"0/uploadings/path1/f1": "12345678",
}
for filePath, content := range files {
@ -542,12 +559,12 @@ func TestFileHandlers(t *testing.T) {
})
t.Run("test uploading APIs: Create and UploadChunk randomly", func(t *testing.T) {
cl := client.NewFilesClient(addr)
// cl := client.NewFilesClient(addr)
files := map[string]string{
"uploadings/random/path1/f1": "12345678",
"uploadings/random/path1/f2": "87654321",
"uploadings/random/path1/f3": "17654321",
"0/uploadings/random/path1/f1": "12345678",
"0/uploadings/random/path1/f2": "87654321",
"0/uploadings/random/path1/f3": "17654321",
}
for filePath, content := range files {
@ -598,7 +615,7 @@ func TestFileHandlers(t *testing.T) {
t.Fatal("incorrect uploaded size", mRes)
}
isEqual, err := compareFileContent(fs, filePath, content)
isEqual, err := compareFileContent(fs, "0", filePath, content)
if err != nil {
t.Fatalf("err comparing content: %s", err)
} else if !isEqual {
@ -608,4 +625,11 @@ func TestFileHandlers(t *testing.T) {
assetDownloadOK(t, filePath, content)
}
})
resp, _, errs = usersCl.Logout(token)
if len(errs) > 0 {
t.Fatal(errs)
} else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode)
}
}

View file

@ -15,7 +15,9 @@ func TestSingleUserHandlers(t *testing.T) {
root := "testData"
config := `{
"users": {
"enableAuth": true
"enableAuth": true,
"minUserNameLen": 2,
"minPwdLen": 4
},
"server": {
"debug": true
@ -78,7 +80,7 @@ func TestSingleUserHandlers(t *testing.T) {
}
})
t.Run("test users APIs: Login-AddUser-Logout-Login", func(t *testing.T) {
t.Run("test users APIs: Login-AddUser-Logout-Login-Logout", func(t *testing.T) {
resp, _, errs := usersCl.Login(adminName, adminNewPwd)
if len(errs) > 0 {
t.Fatal(errs)
@ -111,9 +113,16 @@ func TestSingleUserHandlers(t *testing.T) {
} else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode)
}
resp, _, errs = usersCl.Logout(token)
if len(errs) > 0 {
t.Fatal(errs)
} else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode)
}
})
t.Run("test roles APIs: Login-AddRole-ListRoles-DelRole-ListRoles", func(t *testing.T) {
t.Run("test roles APIs: Login-AddRole-ListRoles-DelRole-ListRoles-Logout", func(t *testing.T) {
resp, _, errs := usersCl.Login(adminName, adminNewPwd)
if len(errs) > 0 {
t.Fatal(errs)
@ -169,5 +178,12 @@ func TestSingleUserHandlers(t *testing.T) {
t.Fatalf("role(%s) should not exist", role)
}
}
resp, _, errs = usersCl.Logout(token)
if len(errs) > 0 {
t.Fatal(errs)
} else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode)
}
})
}

View file

@ -2,7 +2,7 @@ package server
import (
"io/ioutil"
"path"
// "path"
"time"
"github.com/ihexxa/gocfg"
@ -51,8 +51,8 @@ func waitForReady(addr string) bool {
return false
}
func compareFileContent(fs fspkg.ISimpleFS, filePath string, expectedContent string) (bool, error) {
reader, err := fs.GetFileReader(path.Join("files", filePath))
func compareFileContent(fs fspkg.ISimpleFS, uid, filePath string, expectedContent string) (bool, error) {
reader, err := fs.GetFileReader(filePath)
if err != nil {
return false, err
}