feat(multi-home): enable separated home dir for each user (#64)

* feat(files): make files service supporting home dir

* fix(files): add path access control and avoid redirecting path in the backend

* feat(files): add ListHome API

* fix(server): fix access control issues

* feat(client/web): support multi-home

* feat(server): cleanup

* fix(server): failed to init admin folder
This commit is contained in:
Hexxa 2021-07-24 21:05:36 -05:00 committed by GitHub
parent 9748d0cab4
commit 81da97650b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 527 additions and 212 deletions

View file

@ -19,3 +19,5 @@ users:
cookieTTL: 604800 # 1 week cookieTTL: 604800 # 1 week
cookieSecure: false cookieSecure: false
cookieHttpOnly: true cookieHttpOnly: true
minUserNameLen: 2
minPwdLen: 4

View file

@ -19,3 +19,6 @@ users:
cookieTTL: 604800 # 1 week cookieTTL: 604800 # 1 week
cookieSecure: false cookieSecure: false
cookieHttpOnly: true cookieHttpOnly: true
minUserNameLen: 4
minPwdLen: 6

View file

@ -12,13 +12,15 @@ import (
type FilesClient struct { type FilesClient struct {
addr string addr string
r *gorequest.SuperAgent r *gorequest.SuperAgent
token *http.Cookie
} }
func NewFilesClient(addr string) *FilesClient { func NewFilesClient(addr string, token *http.Cookie) *FilesClient {
gr := gorequest.New() gr := gorequest.New()
return &FilesClient{ return &FilesClient{
addr: addr, addr: addr,
r: gr, r: gr,
token: token,
} }
} }
@ -28,6 +30,7 @@ func (cl *FilesClient) url(urlpath string) string {
func (cl *FilesClient) Create(filepath string, size int64) (*http.Response, string, []error) { func (cl *FilesClient) Create(filepath string, size int64) (*http.Response, string, []error) {
return cl.r.Post(cl.url("/v1/fs/files")). return cl.r.Post(cl.url("/v1/fs/files")).
AddCookie(cl.token).
Send(fileshdr.CreateReq{ Send(fileshdr.CreateReq{
Path: filepath, Path: filepath,
FileSize: size, FileSize: size,
@ -37,12 +40,14 @@ func (cl *FilesClient) Create(filepath string, size int64) (*http.Response, stri
func (cl *FilesClient) Delete(filepath string) (*http.Response, string, []error) { func (cl *FilesClient) Delete(filepath string) (*http.Response, string, []error) {
return cl.r.Delete(cl.url("/v1/fs/files")). return cl.r.Delete(cl.url("/v1/fs/files")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath). Param(fileshdr.FilePathQuery, filepath).
End() End()
} }
func (cl *FilesClient) Metadata(filepath string) (*http.Response, *fileshdr.MetadataResp, []error) { func (cl *FilesClient) Metadata(filepath string) (*http.Response, *fileshdr.MetadataResp, []error) {
resp, body, errs := cl.r.Get(cl.url("/v1/fs/metadata")). resp, body, errs := cl.r.Get(cl.url("/v1/fs/metadata")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath). Param(fileshdr.FilePathQuery, filepath).
End() End()
@ -57,12 +62,14 @@ func (cl *FilesClient) Metadata(filepath string) (*http.Response, *fileshdr.Meta
func (cl *FilesClient) Mkdir(dirpath string) (*http.Response, string, []error) { func (cl *FilesClient) Mkdir(dirpath string) (*http.Response, string, []error) {
return cl.r.Post(cl.url("/v1/fs/dirs")). return cl.r.Post(cl.url("/v1/fs/dirs")).
AddCookie(cl.token).
Send(fileshdr.MkdirReq{Path: dirpath}). Send(fileshdr.MkdirReq{Path: dirpath}).
End() End()
} }
func (cl *FilesClient) Move(oldpath, newpath string) (*http.Response, string, []error) { func (cl *FilesClient) Move(oldpath, newpath string) (*http.Response, string, []error) {
return cl.r.Patch(cl.url("/v1/fs/files/move")). return cl.r.Patch(cl.url("/v1/fs/files/move")).
AddCookie(cl.token).
Send(fileshdr.MoveReq{ Send(fileshdr.MoveReq{
OldPath: oldpath, OldPath: oldpath,
NewPath: newpath, NewPath: newpath,
@ -72,6 +79,7 @@ func (cl *FilesClient) Move(oldpath, newpath string) (*http.Response, string, []
func (cl *FilesClient) UploadChunk(filepath string, content string, offset int64) (*http.Response, string, []error) { func (cl *FilesClient) UploadChunk(filepath string, content string, offset int64) (*http.Response, string, []error) {
return cl.r.Patch(cl.url("/v1/fs/files/chunks")). return cl.r.Patch(cl.url("/v1/fs/files/chunks")).
AddCookie(cl.token).
Send(fileshdr.UploadChunkReq{ Send(fileshdr.UploadChunkReq{
Path: filepath, Path: filepath,
Content: content, Content: content,
@ -82,6 +90,7 @@ func (cl *FilesClient) UploadChunk(filepath string, content string, offset int64
func (cl *FilesClient) UploadStatus(filepath string) (*http.Response, *fileshdr.UploadStatusResp, []error) { func (cl *FilesClient) UploadStatus(filepath string) (*http.Response, *fileshdr.UploadStatusResp, []error) {
resp, body, errs := cl.r.Get(cl.url("/v1/fs/files/chunks")). resp, body, errs := cl.r.Get(cl.url("/v1/fs/files/chunks")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath). Param(fileshdr.FilePathQuery, filepath).
End() End()
@ -96,6 +105,7 @@ func (cl *FilesClient) UploadStatus(filepath string) (*http.Response, *fileshdr.
func (cl *FilesClient) Download(filepath string, headers map[string]string) (*http.Response, string, []error) { func (cl *FilesClient) Download(filepath string, headers map[string]string) (*http.Response, string, []error) {
r := cl.r.Get(cl.url("/v1/fs/files")). r := cl.r.Get(cl.url("/v1/fs/files")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath) Param(fileshdr.FilePathQuery, filepath)
for key, val := range headers { for key, val := range headers {
r = r.Set(key, val) r = r.Set(key, val)
@ -105,6 +115,7 @@ func (cl *FilesClient) Download(filepath string, headers map[string]string) (*ht
func (cl *FilesClient) List(dirPath string) (*http.Response, *fileshdr.ListResp, []error) { func (cl *FilesClient) List(dirPath string) (*http.Response, *fileshdr.ListResp, []error) {
resp, body, errs := cl.r.Get(cl.url("/v1/fs/dirs")). resp, body, errs := cl.r.Get(cl.url("/v1/fs/dirs")).
AddCookie(cl.token).
Param(fileshdr.ListDirQuery, dirPath). Param(fileshdr.ListDirQuery, dirPath).
End() End()
if len(errs) > 0 { if len(errs) > 0 {
@ -121,6 +132,7 @@ func (cl *FilesClient) List(dirPath string) (*http.Response, *fileshdr.ListResp,
func (cl *FilesClient) ListUploadings() (*http.Response, *fileshdr.ListUploadingsResp, []error) { func (cl *FilesClient) ListUploadings() (*http.Response, *fileshdr.ListUploadingsResp, []error) {
resp, body, errs := cl.r.Get(cl.url("/v1/fs/uploadings")). resp, body, errs := cl.r.Get(cl.url("/v1/fs/uploadings")).
AddCookie(cl.token).
End() End()
if len(errs) > 0 { if len(errs) > 0 {
return nil, nil, errs return nil, nil, errs
@ -136,6 +148,7 @@ func (cl *FilesClient) ListUploadings() (*http.Response, *fileshdr.ListUploading
func (cl *FilesClient) DelUploading(filepath string) (*http.Response, string, []error) { func (cl *FilesClient) DelUploading(filepath string) (*http.Response, string, []error) {
return cl.r.Delete(cl.url("/v1/fs/uploadings")). return cl.r.Delete(cl.url("/v1/fs/uploadings")).
AddCookie(cl.token).
Param(fileshdr.FilePathQuery, filepath). Param(fileshdr.FilePathQuery, filepath).
End() End()
} }

View file

@ -136,6 +136,14 @@ export class FilesClient extends BaseClient {
}); });
}; };
listHome = (): Promise<Response<ListResp>> => {
return this.do({
method: "get",
url: `${this.url}/v1/fs/dirs/home`,
params: {},
});
};
listUploadings = (): Promise<Response<ListUploadingsResp>> => { listUploadings = (): Promise<Response<ListUploadingsResp>> => {
return this.do({ return this.do({
method: "get", method: "get",

View file

@ -19,6 +19,7 @@ export class FilesClient {
private uploadStatusMockResps: Array<Promise<Response<UploadStatusResp>>>; private uploadStatusMockResps: Array<Promise<Response<UploadStatusResp>>>;
private uploadStatusMockRespID: number = 0; private uploadStatusMockRespID: number = 0;
private listMockResp: Promise<Response<ListResp>>; private listMockResp: Promise<Response<ListResp>>;
private listHomeMockResp: Promise<Response<ListResp>>;
private listUploadingsMockResp: Promise<Response<ListUploadingsResp>>; private listUploadingsMockResp: Promise<Response<ListUploadingsResp>>;
private deleteUploadingMockResp: Promise<Response>; private deleteUploadingMockResp: Promise<Response>;
@ -58,6 +59,10 @@ export class FilesClient {
this.listMockResp = resp; this.listMockResp = resp;
}; };
listHomeMock = (resp: Promise<Response<ListResp>>) => {
this.listMockResp = resp;
};
listUploadingsMock = (resp: Promise<Response<ListUploadingsResp>>) => { listUploadingsMock = (resp: Promise<Response<ListUploadingsResp>>) => {
this.listUploadingsMockResp = resp; this.listUploadingsMockResp = resp;
} }
@ -111,6 +116,10 @@ export class FilesClient {
return this.listMockResp; return this.listMockResp;
}; };
listHome = (): Promise<Response<ListResp>> => {
return this.listHomeMockResp;
};
listUploadings = (): Promise<Response<ListUploadingsResp>> => { listUploadings = (): Promise<Response<ListUploadingsResp>> => {
return this.listUploadingsMockResp; return this.listUploadingsMockResp;
}; };

View file

@ -17,6 +17,7 @@ export interface UploadStatusResp {
} }
export interface ListResp { export interface ListResp {
cwd: string;
metadatas: MetadataResp[]; metadatas: MetadataResp[];
} }
@ -50,6 +51,7 @@ export interface IFilesClient {
) => Promise<Response<UploadStatusResp>>; ) => Promise<Response<UploadStatusResp>>;
uploadStatus: (filePath: string) => Promise<Response<UploadStatusResp>>; uploadStatus: (filePath: string) => Promise<Response<UploadStatusResp>>;
list: (dirPath: string) => Promise<Response<ListResp>>; list: (dirPath: string) => Promise<Response<ListResp>>;
listHome: () => Promise<Response<ListResp>>;
listUploadings: () => Promise<Response<ListUploadingsResp>>; listUploadings: () => Promise<Response<ListUploadingsResp>>;
deleteUploading: (filePath: string) => Promise<Response>; deleteUploading: (filePath: string) => Promise<Response>;
} }

View file

@ -114,6 +114,27 @@ export class Updater {
: this.props.items; : this.props.items;
}; };
setHomeItems = async (): Promise<void> => {
const listResp = await this.filesClient.listHome();
this.props.dirPath = List<string>(listResp.data.cwd.split("/"));
this.props.items =
listResp.status === 200
? List<MetadataResp>(listResp.data.metadatas)
: this.props.items;
};
goHome = async (): Promise<void> => {
const listResp = await this.filesClient.listHome();
// how to get current dir? to dirPath?
// this.props.dirPath = dirParts;
this.props.items =
listResp.status === 200
? List<MetadataResp>(listResp.data.metadatas)
: this.props.items;
};
moveHere = async ( moveHere = async (
srcDir: string, srcDir: string,
dstDir: string, dstDir: string,

View file

@ -104,9 +104,7 @@ export class AuthPane extends React.Component<Props, State, {}> {
this.update(PanesUpdater.updateState); this.update(PanesUpdater.updateState);
// refresh // refresh
return BrowserUpdater().setItems( return BrowserUpdater().setHomeItems();
List<string>(["."])
);
} else { } else {
this.setState({ user: "", pwd: "" }); this.setState({ user: "", pwd: "" });
alert("Failed to login."); alert("Failed to login.");

View file

@ -20,7 +20,7 @@ export class StateMgr extends React.Component<Props, State, {}> {
BrowserUpdater().init(state.panel.browser); BrowserUpdater().init(state.panel.browser);
BrowserUpdater().setClients(new UsersClient(""), new FilesClient("")); BrowserUpdater().setClients(new UsersClient(""), new FilesClient(""));
BrowserUpdater() BrowserUpdater()
.setItems(state.panel.browser.dirPath) .setHomeItems()
.then(() => { .then(() => {
return BrowserUpdater().refreshUploadings(); return BrowserUpdater().refreshUploadings();
}) })

View file

@ -1,15 +1,16 @@
package fileshdr package fileshdr
import ( import (
"crypto/sha1"
"encoding/base64" "encoding/base64"
"errors" "errors"
"fmt" "fmt"
"github.com/ihexxa/quickshare/src/userstore"
"io" "io"
"net/http" "net/http"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"strings"
"time" "time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
@ -18,14 +19,9 @@ import (
"github.com/ihexxa/quickshare/src/depidx" "github.com/ihexxa/quickshare/src/depidx"
q "github.com/ihexxa/quickshare/src/handlers" q "github.com/ihexxa/quickshare/src/handlers"
"github.com/ihexxa/quickshare/src/handlers/singleuserhdr"
) )
var ( var (
// dirs
UploadDir = "uploadings"
FsDir = "files"
// queries // queries
FilePathQuery = "fp" FilePathQuery = "fp"
ListDirQuery = "dp" ListDirQuery = "dp"
@ -45,20 +41,11 @@ type FileHandlers struct {
} }
func NewFileHandlers(cfg gocfg.ICfg, deps *depidx.Deps) (*FileHandlers, error) { func NewFileHandlers(cfg gocfg.ICfg, deps *depidx.Deps) (*FileHandlers, error) {
var err error
if err = deps.FS().MkdirAll(UploadDir); err != nil {
return nil, err
}
if err = deps.FS().MkdirAll(FsDir); err != nil {
return nil, err
}
return &FileHandlers{ return &FileHandlers{
cfg: cfg, cfg: cfg,
deps: deps, deps: deps,
uploadMgr: NewUploadMgr(deps.KV()), uploadMgr: NewUploadMgr(deps.KV()),
}, err }, nil
} }
type AutoLocker struct { type AutoLocker struct {
@ -95,12 +82,24 @@ func (lk *AutoLocker) Exec(handler func()) {
lk.c.JSON(q.ErrResp(lk.c, 500, errors.New("fail to lock the file"))) lk.c.JSON(q.ErrResp(lk.c, 500, errors.New("fail to lock the file")))
return return
} }
locked = true
locked = true locked = true
handler() handler()
} }
func (h *FileHandlers) canAccess(userID, role, path string) bool {
if role == userstore.AdminRole {
return true
}
// the file path must start with userID: <userID>/...
parts := strings.Split(path, "/")
if len(parts) < 1 {
return false
}
return parts[0] == userID
}
type CreateReq struct { type CreateReq struct {
Path string `json:"path"` Path string `json:"path"`
FileSize int64 `json:"fileSize"` FileSize int64 `json:"fileSize"`
@ -112,10 +111,15 @@ func (h *FileHandlers) Create(c *gin.Context) {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
} }
userName := c.MustGet(singleuserhdr.UserParam).(string) role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, req.Path) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
tmpFilePath := h.getTmpPath(req.Path) tmpFilePath := q.GetTmpPath(userID, req.Path)
locker := h.NewAutoLocker(c, lockName(userName, tmpFilePath)) locker := h.NewAutoLocker(c, lockName(tmpFilePath))
locker.Exec(func() { locker.Exec(func() {
err := h.deps.FS().Create(tmpFilePath) err := h.deps.FS().Create(tmpFilePath)
if err != nil { if err != nil {
@ -126,14 +130,14 @@ func (h *FileHandlers) Create(c *gin.Context) {
} }
return return
} }
err = h.uploadMgr.AddInfo(userName, req.Path, tmpFilePath, req.FileSize) err = h.uploadMgr.AddInfo(userID, req.Path, tmpFilePath, req.FileSize)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
} }
fileDir := h.FsPath(filepath.Dir(req.Path)) // fileDir := q.FsPath(userID, filepath.Dir(req.Path))
err = h.deps.FS().MkdirAll(fileDir) err = h.deps.FS().MkdirAll(filepath.Dir(req.Path))
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -149,8 +153,14 @@ func (h *FileHandlers) Delete(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file path"))) c.JSON(q.ErrResp(c, 400, errors.New("invalid file path")))
return return
} }
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, filePath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
filePath = h.FsPath(filePath) // filePath = q.FsPath(userID, filePath)
err := h.deps.FS().Remove(filePath) err := h.deps.FS().Remove(filePath)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
@ -173,8 +183,14 @@ func (h *FileHandlers) Metadata(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file path"))) c.JSON(q.ErrResp(c, 400, errors.New("invalid file path")))
return return
} }
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, filePath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
filePath = h.FsPath(filePath) // filePath = q.FsPath(userID, filePath)
info, err := h.deps.FS().Stat(filePath) info, err := h.deps.FS().Stat(filePath)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
@ -199,9 +215,15 @@ func (h *FileHandlers) Mkdir(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, err)) c.JSON(q.ErrResp(c, 400, err))
return return
} }
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, req.Path) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
dirPath := h.FsPath(req.Path) // dirPath := q.FsPath(userID, req.Path)
err := h.deps.FS().MkdirAll(dirPath) err := h.deps.FS().MkdirAll(req.Path)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -221,15 +243,21 @@ func (h *FileHandlers) Move(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, err)) c.JSON(q.ErrResp(c, 400, err))
return return
} }
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, req.OldPath) || !h.canAccess(userID, role, req.NewPath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
oldPath := h.FsPath(req.OldPath) // oldPath := q.FsPath(userID, req.OldPath)
newPath := h.FsPath(req.NewPath) // newPath := q.FsPath(userID, req.NewPath)
_, err := h.deps.FS().Stat(oldPath) _, err := h.deps.FS().Stat(req.OldPath)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
} }
_, err = h.deps.FS().Stat(newPath) _, err = h.deps.FS().Stat(req.NewPath)
if err != nil && !os.IsNotExist(err) { if err != nil && !os.IsNotExist(err) {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -239,7 +267,7 @@ func (h *FileHandlers) Move(c *gin.Context) {
return return
} }
err = h.deps.FS().Rename(oldPath, newPath) err = h.deps.FS().Rename(req.OldPath, req.NewPath)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -260,14 +288,19 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
} }
userName := c.MustGet(singleuserhdr.UserParam).(string) role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, req.Path) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
tmpFilePath := h.getTmpPath(req.Path) tmpFilePath := q.GetTmpPath(userID, req.Path)
locker := h.NewAutoLocker(c, lockName(userName, tmpFilePath)) locker := h.NewAutoLocker(c, lockName(tmpFilePath))
locker.Exec(func() { locker.Exec(func() {
var err error var err error
_, fileSize, uploaded, err := h.uploadMgr.GetInfo(userName, tmpFilePath) _, fileSize, uploaded, err := h.uploadMgr.GetInfo(userID, tmpFilePath)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -288,7 +321,7 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
return return
} }
err = h.uploadMgr.SetInfo(userName, tmpFilePath, req.Offset+int64(wrote)) err = h.uploadMgr.SetInfo(userID, tmpFilePath, req.Offset+int64(wrote))
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -296,7 +329,7 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
// move the file from uploading dir to uploaded dir // move the file from uploading dir to uploaded dir
if uploaded+int64(wrote) == fileSize { if uploaded+int64(wrote) == fileSize {
fsFilePath, err := h.getFSFilePath(req.Path) fsFilePath, err := h.getFSFilePath(userID, req.Path)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -307,7 +340,7 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
c.JSON(q.ErrResp(c, 500, fmt.Errorf("%s error: %w", req.Path, err))) c.JSON(q.ErrResp(c, 500, fmt.Errorf("%s error: %w", req.Path, err)))
return return
} }
err = h.uploadMgr.DelInfo(userName, tmpFilePath) err = h.uploadMgr.DelInfo(userID, tmpFilePath)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -323,8 +356,8 @@ func (h *FileHandlers) UploadChunk(c *gin.Context) {
}) })
} }
func (h *FileHandlers) getFSFilePath(reqPath string) (string, error) { func (h *FileHandlers) getFSFilePath(userID, fsFilePath string) (string, error) {
fsFilePath := h.FsPath(reqPath) // fsFilePath := q.FsPath(userID, reqPath)
_, err := h.deps.FS().Stat(fsFilePath) _, err := h.deps.FS().Stat(fsFilePath)
if err != nil { if err != nil {
if os.IsNotExist(err) { if os.IsNotExist(err) {
@ -367,12 +400,17 @@ func (h *FileHandlers) UploadStatus(c *gin.Context) {
if filePath == "" { if filePath == "" {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file name"))) c.JSON(q.ErrResp(c, 400, errors.New("invalid file name")))
} }
userName := c.MustGet(singleuserhdr.UserParam).(string) role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, filePath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
tmpFilePath := h.getTmpPath(filePath) tmpFilePath := q.GetTmpPath(userID, filePath)
locker := h.NewAutoLocker(c, lockName(userName, tmpFilePath)) locker := h.NewAutoLocker(c, lockName(tmpFilePath))
locker.Exec(func() { locker.Exec(func() {
_, fileSize, uploaded, err := h.uploadMgr.GetInfo(userName, tmpFilePath) _, fileSize, uploaded, err := h.uploadMgr.GetInfo(userID, tmpFilePath)
if err != nil { if err != nil {
if os.IsNotExist(err) { if os.IsNotExist(err) {
c.JSON(q.ErrResp(c, 404, err)) c.JSON(q.ErrResp(c, 404, err))
@ -400,9 +438,16 @@ func (h *FileHandlers) Download(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file name"))) c.JSON(q.ErrResp(c, 400, errors.New("invalid file name")))
return return
} }
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, filePath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return
}
// TODO: when sharing is introduced, move following logics to a separeted method
// concurrently file accessing is managed by os // concurrently file accessing is managed by os
filePath = h.FsPath(filePath) // filePath = q.FsPath(userID, filePath)
info, err := h.deps.FS().Stat(filePath) info, err := h.deps.FS().Stat(filePath)
if err != nil { if err != nil {
if os.IsNotExist(err) { if os.IsNotExist(err) {
@ -446,7 +491,7 @@ func (h *FileHandlers) Download(c *gin.Context) {
// respond to range requests // respond to range requests
parts, err := multipart.RangeToParts(rangeVal, contentType, fmt.Sprintf("%d", info.Size())) parts, err := multipart.RangeToParts(rangeVal, contentType, fmt.Sprintf("%d", info.Size()))
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 401, err)) c.JSON(q.ErrResp(c, 400, err))
return return
} }
@ -463,17 +508,24 @@ func (h *FileHandlers) Download(c *gin.Context) {
} }
type ListResp struct { type ListResp struct {
Cwd string `json:"cwd"`
Metadatas []*MetadataResp `json:"metadatas"` Metadatas []*MetadataResp `json:"metadatas"`
} }
func (h *FileHandlers) List(c *gin.Context) { func (h *FileHandlers) List(c *gin.Context) {
dirPath := c.Query(ListDirQuery) dirPath := c.Query(ListDirQuery)
if dirPath == "" { if dirPath == "" {
c.JSON(q.ErrResp(c, 402, errors.New("incorrect path name"))) c.JSON(q.ErrResp(c, 400, errors.New("incorrect path name")))
return
}
role := c.MustGet(q.RoleParam).(string)
userID := c.MustGet(q.UserIDParam).(string)
if !h.canAccess(userID, role, dirPath) {
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
return return
} }
dirPath = h.FsPath(dirPath) // dirPath = q.FsPath(userID, dirPath)
infos, err := h.deps.FS().ListDir(dirPath) infos, err := h.deps.FS().ListDir(dirPath)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
@ -489,7 +541,33 @@ func (h *FileHandlers) List(c *gin.Context) {
}) })
} }
c.JSON(200, &ListResp{Metadatas: metadatas}) c.JSON(200, &ListResp{
Cwd: dirPath,
Metadatas: metadatas,
})
}
func (h *FileHandlers) ListHome(c *gin.Context) {
userID := c.MustGet(q.UserIDParam).(string)
infos, err := h.deps.FS().ListDir(userID)
if err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
}
metadatas := []*MetadataResp{}
for _, info := range infos {
metadatas = append(metadatas, &MetadataResp{
Name: info.Name(),
Size: info.Size(),
ModTime: info.ModTime(),
IsDir: info.IsDir(),
})
}
c.JSON(200, &ListResp{
Cwd: userID,
Metadatas: metadatas,
})
} }
func (h *FileHandlers) Copy(c *gin.Context) { func (h *FileHandlers) Copy(c *gin.Context) {
@ -500,16 +578,8 @@ func (h *FileHandlers) CopyDir(c *gin.Context) {
c.JSON(q.NewMsgResp(501, "Not Implemented")) c.JSON(q.NewMsgResp(501, "Not Implemented"))
} }
func (h *FileHandlers) getTmpPath(filePath string) string { func lockName(filePath string) string {
return path.Join(UploadDir, fmt.Sprintf("%x", sha1.Sum([]byte(filePath)))) return filePath
}
func lockName(user, filePath string) string {
return fmt.Sprintf("%s/%s", user, filePath)
}
func (h *FileHandlers) FsPath(filePath string) string {
return path.Join(FsDir, filePath)
} }
type ListUploadingsResp struct { type ListUploadingsResp struct {
@ -517,9 +587,9 @@ type ListUploadingsResp struct {
} }
func (h *FileHandlers) ListUploadings(c *gin.Context) { func (h *FileHandlers) ListUploadings(c *gin.Context) {
userName := c.MustGet(singleuserhdr.UserParam).(string) userID := c.MustGet(q.UserIDParam).(string)
infos, err := h.uploadMgr.ListInfo(userName) infos, err := h.uploadMgr.ListInfo(userID)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -533,11 +603,11 @@ func (h *FileHandlers) DelUploading(c *gin.Context) {
c.JSON(q.ErrResp(c, 400, errors.New("invalid file path"))) c.JSON(q.ErrResp(c, 400, errors.New("invalid file path")))
return return
} }
userName := c.MustGet(singleuserhdr.UserParam).(string) userID := c.MustGet(q.UserIDParam).(string)
var err error var err error
tmpFilePath := h.getTmpPath(filePath) tmpFilePath := q.GetTmpPath(userID, filePath)
locker := h.NewAutoLocker(c, lockName(userName, tmpFilePath)) locker := h.NewAutoLocker(c, lockName(tmpFilePath))
locker.Exec(func() { locker.Exec(func() {
err = h.deps.FS().Remove(tmpFilePath) err = h.deps.FS().Remove(tmpFilePath)
if err != nil { if err != nil {
@ -545,7 +615,7 @@ func (h *FileHandlers) DelUploading(c *gin.Context) {
return return
} }
err = h.uploadMgr.DelInfo(userName, tmpFilePath) err = h.uploadMgr.DelInfo(userID, tmpFilePath)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return

View file

@ -3,6 +3,7 @@ package multiusers
import ( import (
"errors" "errors"
"fmt" "fmt"
"path/filepath"
"strconv" "strconv"
"time" "time"
@ -18,30 +19,95 @@ import (
var ( var (
ErrInvalidUser = errors.New("invalid user name or password") ErrInvalidUser = errors.New("invalid user name or password")
ErrInvalidConfig = errors.New("invalid user config") ErrInvalidConfig = errors.New("invalid user config")
UserIDParam = "uid"
UserParam = "user"
PwdParam = "pwd"
NewPwdParam = "newpwd"
RoleParam = "role"
ExpireParam = "expire"
TokenCookie = "tk"
) )
type MultiUsersSvc struct { type MultiUsersSvc struct {
cfg gocfg.ICfg cfg gocfg.ICfg
deps *depidx.Deps deps *depidx.Deps
apiACRules map[string]bool
} }
func NewMultiUsersSvc(cfg gocfg.ICfg, deps *depidx.Deps) (*MultiUsersSvc, error) { func NewMultiUsersSvc(cfg gocfg.ICfg, deps *depidx.Deps) (*MultiUsersSvc, error) {
publicPath := filepath.Join("/", cfg.GrabString("Server.PublicPath"))
apiACRules := map[string]bool{
// TODO: make these configurable
// admin rules
apiRuleCname(userstore.AdminRole, "GET", "/"): true,
apiRuleCname(userstore.AdminRole, "GET", publicPath): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/users/login"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/users/logout"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/users/isauthed"): true,
apiRuleCname(userstore.AdminRole, "PATCH", "/v1/users/pwd"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/users/"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/roles/"): true,
apiRuleCname(userstore.AdminRole, "DELETE", "/v1/roles/"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/roles/"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/fs/files"): true,
apiRuleCname(userstore.AdminRole, "DELETE", "/v1/fs/files"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/files"): true,
apiRuleCname(userstore.AdminRole, "PATCH", "/v1/fs/files/chunks"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/files/chunks"): true,
apiRuleCname(userstore.AdminRole, "PATCH", "/v1/fs/files/copy"): true,
apiRuleCname(userstore.AdminRole, "PATCH", "/v1/fs/files/move"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/dirs"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/dirs/home"): true,
apiRuleCname(userstore.AdminRole, "POST", "/v1/fs/dirs"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/uploadings"): true,
apiRuleCname(userstore.AdminRole, "DELETE", "/v1/fs/uploadings"): true,
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/metadata"): true,
apiRuleCname(userstore.AdminRole, "OPTIONS", "/v1/settings/health"): true,
// user rules
apiRuleCname(userstore.UserRole, "GET", "/"): true,
apiRuleCname(userstore.UserRole, "GET", publicPath): true,
apiRuleCname(userstore.UserRole, "POST", "/v1/users/logout"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/users/isauthed"): true,
apiRuleCname(userstore.UserRole, "PATCH", "/v1/users/pwd"): true,
apiRuleCname(userstore.UserRole, "POST", "/v1/fs/files"): true,
apiRuleCname(userstore.UserRole, "DELETE", "/v1/fs/files"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/files"): true,
apiRuleCname(userstore.UserRole, "PATCH", "/v1/fs/files/chunks"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/files/chunks"): true,
apiRuleCname(userstore.UserRole, "PATCH", "/v1/fs/files/copy"): true,
apiRuleCname(userstore.UserRole, "PATCH", "/v1/fs/files/move"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/dirs"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/dirs/home"): true,
apiRuleCname(userstore.UserRole, "POST", "/v1/fs/dirs"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/uploadings"): true,
apiRuleCname(userstore.UserRole, "DELETE", "/v1/fs/uploadings"): true,
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/metadata"): true,
apiRuleCname(userstore.UserRole, "OPTIONS", "/v1/settings/health"): true,
// visitor rules
apiRuleCname(userstore.VisitorRole, "GET", "/"): true,
apiRuleCname(userstore.VisitorRole, "GET", publicPath): true,
apiRuleCname(userstore.VisitorRole, "POST", "/v1/users/login"): true,
apiRuleCname(userstore.VisitorRole, "GET", "/v1/users/isauthed"): true,
apiRuleCname(userstore.VisitorRole, "GET", "/v1/fs/files"): true,
apiRuleCname(userstore.VisitorRole, "OPTIONS", "/v1/settings/health"): true,
}
return &MultiUsersSvc{ return &MultiUsersSvc{
cfg: cfg, cfg: cfg,
deps: deps, deps: deps,
apiACRules: apiACRules,
}, nil }, nil
} }
func (h *MultiUsersSvc) Init(adminName, adminPwd string) (string, error) { func (h *MultiUsersSvc) Init(adminName, adminPwd string) (string, error) {
var err error
userID := "0"
fsPath := q.HomePath(userID, "/")
if err = h.deps.FS().MkdirAll(fsPath); err != nil {
return "", err
}
uploadingsPath := q.GetTmpPath(userID, "/")
if err = h.deps.FS().MkdirAll(uploadingsPath); err != nil {
return "", err
}
// TODO: return "" for being compatible with singleuser service, should remove this // TODO: return "" for being compatible with singleuser service, should remove this
err := h.deps.Users().Init(adminName, adminPwd) err = h.deps.Users().Init(adminName, adminPwd)
return "", err return "", err
} }
@ -75,10 +141,10 @@ func (h *MultiUsersSvc) Login(c *gin.Context) {
ttl := h.cfg.GrabInt("Users.CookieTTL") ttl := h.cfg.GrabInt("Users.CookieTTL")
token, err := h.deps.Token().ToToken(map[string]string{ token, err := h.deps.Token().ToToken(map[string]string{
UserIDParam: fmt.Sprint(user.ID), q.UserIDParam: fmt.Sprint(user.ID),
UserParam: user.Name, q.UserParam: user.Name,
RoleParam: user.Role, q.RoleParam: user.Role,
ExpireParam: fmt.Sprintf("%d", time.Now().Unix()+int64(ttl)), q.ExpireParam: fmt.Sprintf("%d", time.Now().Unix()+int64(ttl)),
}) })
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
@ -87,7 +153,7 @@ func (h *MultiUsersSvc) Login(c *gin.Context) {
secure := h.cfg.GrabBool("Users.CookieSecure") secure := h.cfg.GrabBool("Users.CookieSecure")
httpOnly := h.cfg.GrabBool("Users.CookieHttpOnly") httpOnly := h.cfg.GrabBool("Users.CookieHttpOnly")
c.SetCookie(TokenCookie, token, ttl, "/", "", secure, httpOnly) c.SetCookie(q.TokenCookie, token, ttl, "/", "", secure, httpOnly)
c.JSON(q.Resp(200)) c.JSON(q.Resp(200))
} }
@ -98,12 +164,17 @@ func (h *MultiUsersSvc) Logout(c *gin.Context) {
// token alreay verified in the authn middleware // token alreay verified in the authn middleware
secure := h.cfg.GrabBool("Users.CookieSecure") secure := h.cfg.GrabBool("Users.CookieSecure")
httpOnly := h.cfg.GrabBool("Users.CookieHttpOnly") httpOnly := h.cfg.GrabBool("Users.CookieHttpOnly")
c.SetCookie(TokenCookie, "", 0, "/", "", secure, httpOnly) c.SetCookie(q.TokenCookie, "", 0, "/", "", secure, httpOnly)
c.JSON(q.Resp(200)) c.JSON(q.Resp(200))
} }
func (h *MultiUsersSvc) IsAuthed(c *gin.Context) { func (h *MultiUsersSvc) IsAuthed(c *gin.Context) {
// token alreay verified in the authn middleware // token alreay verified in the authn middleware
role := c.MustGet(q.RoleParam).(string)
if role == userstore.VisitorRole {
c.JSON(q.ErrResp(c, 401, q.ErrUnauthorized))
return
}
c.JSON(q.Resp(200)) c.JSON(q.Resp(200))
} }
@ -128,7 +199,7 @@ func (h *MultiUsersSvc) SetPwd(c *gin.Context) {
return return
} }
uid, err := strconv.ParseUint(claims[UserIDParam], 10, 64) uid, err := strconv.ParseUint(claims[q.UserIDParam], 10, 64)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -177,13 +248,13 @@ func (h *MultiUsersSvc) AddUser(c *gin.Context) {
return return
} }
// TODO: do more comprehensive validation
// Role and duplicated name will be validated by the store // Role and duplicated name will be validated by the store
if len(req.Name) < 2 { var err error
c.JSON(q.ErrResp(c, 400, errors.New("name length must be greater than 2"))) if err = h.isValidUserName(req.Name); err != nil {
c.JSON(q.ErrResp(c, 400, err))
return return
} else if len(req.Name) < 3 { } else if err = h.isValidPwd(req.Pwd); err != nil {
c.JSON(q.ErrResp(c, 400, errors.New("password length must be greater than 2"))) c.JSON(q.ErrResp(c, 400, err))
return return
} }
@ -194,6 +265,20 @@ func (h *MultiUsersSvc) AddUser(c *gin.Context) {
return return
} }
// TODO: following operations must be atomic
// TODO: check if the folders already exists
userID := c.MustGet(q.UserIDParam).(string)
homePath := q.HomePath(userID, "/")
if err = h.deps.FS().MkdirAll(homePath); err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
}
uploadingsPath := q.GetTmpPath(userID, "/")
if err = h.deps.FS().MkdirAll(uploadingsPath); err != nil {
c.JSON(q.ErrResp(c, 500, err))
return
}
err = h.deps.Users().AddUser(&userstore.User{ err = h.deps.Users().AddUser(&userstore.User{
ID: uid, ID: uid,
Name: req.Name, Name: req.Name,
@ -213,19 +298,19 @@ type AddRoleReq struct {
} }
func (h *MultiUsersSvc) AddRole(c *gin.Context) { func (h *MultiUsersSvc) AddRole(c *gin.Context) {
var err error
req := &AddRoleReq{} req := &AddRoleReq{}
if err := c.ShouldBindJSON(&req); err != nil { if err = c.ShouldBindJSON(&req); err != nil {
c.JSON(q.ErrResp(c, 400, err)) c.JSON(q.ErrResp(c, 400, err))
return return
} }
// TODO: do more comprehensive validation if err = h.isValidRole(req.Role); err != nil {
if len(req.Role) < 2 { c.JSON(q.ErrResp(c, 400, err))
c.JSON(q.ErrResp(c, 400, errors.New("name length must be greater than 2")))
return return
} }
err := h.deps.Users().AddRole(req.Role) err = h.deps.Users().AddRole(req.Role)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -239,19 +324,19 @@ type DelRoleReq struct {
} }
func (h *MultiUsersSvc) DelRole(c *gin.Context) { func (h *MultiUsersSvc) DelRole(c *gin.Context) {
var err error
req := &DelRoleReq{} req := &DelRoleReq{}
if err := c.ShouldBindJSON(&req); err != nil { if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(q.ErrResp(c, 400, err)) c.JSON(q.ErrResp(c, 400, err))
return return
} }
// TODO: do more comprehensive validation if err = h.isValidRole(req.Role); err != nil {
if len(req.Role) < 2 { c.JSON(q.ErrResp(c, 400, err))
c.JSON(q.ErrResp(c, 400, errors.New("name length must be greater than 2")))
return return
} }
err := h.deps.Users().DelRole(req.Role) err = h.deps.Users().DelRole(req.Role)
if err != nil { if err != nil {
c.JSON(q.ErrResp(c, 500, err)) c.JSON(q.ErrResp(c, 500, err))
return return
@ -276,24 +361,47 @@ func (h *MultiUsersSvc) ListRoles(c *gin.Context) {
} }
func (h *MultiUsersSvc) getUserInfo(c *gin.Context) (map[string]string, error) { func (h *MultiUsersSvc) getUserInfo(c *gin.Context) (map[string]string, error) {
tokenStr, err := c.Cookie(TokenCookie) tokenStr, err := c.Cookie(q.TokenCookie)
if err != nil { if err != nil {
return nil, err return nil, err
} }
claims, err := h.deps.Token().FromToken( claims, err := h.deps.Token().FromToken(
tokenStr, tokenStr,
map[string]string{ map[string]string{
UserIDParam: "", q.UserIDParam: "",
UserParam: "", q.UserParam: "",
RoleParam: "", q.RoleParam: "",
ExpireParam: "", q.ExpireParam: "",
}, },
) )
if err != nil { if err != nil {
return nil, err return nil, err
} else if claims[UserIDParam] == "" || claims[UserParam] == "" { } else if claims[q.UserIDParam] == "" || claims[q.UserParam] == "" {
return nil, ErrInvalidConfig return nil, ErrInvalidConfig
} }
return claims, nil return claims, nil
} }
func (h *MultiUsersSvc) isValidUserName(userName string) error {
minUserNameLen := h.cfg.GrabInt("Users.MinUserNameLen")
if len(userName) < minUserNameLen {
return errors.New("name is too short")
}
return nil
}
func (h *MultiUsersSvc) isValidPwd(pwd string) error {
minPwdLen := h.cfg.GrabInt("Users.MinPwdLen")
if len(pwd) < minPwdLen {
return errors.New("password is too short")
}
return nil
}
func (h *MultiUsersSvc) isValidRole(role string) error {
if role == userstore.AdminRole || role == userstore.UserRole || role == userstore.VisitorRole {
return errors.New("predefined roles can not be added/deleted")
}
return h.isValidUserName(role)
}

View file

@ -1,81 +1,84 @@
package multiusers package multiusers
import ( import (
"errors" "fmt"
"net/http"
"strconv" "strconv"
"strings" "strings"
"time" "time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
q "github.com/ihexxa/quickshare/src/handlers" q "github.com/ihexxa/quickshare/src/handlers"
"github.com/ihexxa/quickshare/src/userstore"
) )
var exposedAPIs = map[string]bool{ func apiRuleCname(role, method, path string) string {
"Login-fm": true, return fmt.Sprintf("%s-%s-%s", role, method, path)
"Health-fm": true,
} }
var publicRootPath = "/" func (h *MultiUsersSvc) AuthN() gin.HandlerFunc {
var publicStaticPath = "/static"
func IsPublicPath(accessPath string) bool {
return accessPath == publicRootPath || strings.HasPrefix(accessPath, publicStaticPath)
}
func GetHandlerName(fullname string) (string, error) {
parts := strings.Split(fullname, ".")
if len(parts) == 0 {
return "", errors.New("invalid handler name")
}
return parts[len(parts)-1], nil
}
func (h *MultiUsersSvc) Auth() gin.HandlerFunc {
return func(c *gin.Context) { return func(c *gin.Context) {
handlerName, err := GetHandlerName(c.HandlerName())
if err != nil {
c.JSON(q.ErrResp(c, 401, err))
return
}
accessPath := c.Request.URL.String()
enableAuth := h.cfg.GrabBool("Users.EnableAuth") enableAuth := h.cfg.GrabBool("Users.EnableAuth")
if enableAuth && !exposedAPIs[handlerName] && !IsPublicPath(accessPath) { claims := map[string]string{
token, err := c.Cookie(TokenCookie) q.UserIDParam: "",
q.UserParam: "",
q.RoleParam: userstore.VisitorRole,
q.ExpireParam: "",
}
if enableAuth {
token, err := c.Cookie(q.TokenCookie)
if err != nil { if err != nil {
if err != http.ErrNoCookie {
c.AbortWithStatusJSON(q.ErrResp(c, 401, err)) c.AbortWithStatusJSON(q.ErrResp(c, 401, err))
return return
} }
// set default values if no cookie is found
claims := map[string]string{ } else if token != "" {
UserIDParam: "",
UserParam: "",
RoleParam: "",
ExpireParam: "",
}
claims, err = h.deps.Token().FromToken(token, claims) claims, err = h.deps.Token().FromToken(token, claims)
if err != nil { if err != nil {
c.AbortWithStatusJSON(q.ErrResp(c, 401, err)) c.AbortWithStatusJSON(q.ErrResp(c, 401, err))
return return
} }
for key, val := range claims {
c.Set(key, val)
}
now := time.Now().Unix() now := time.Now().Unix()
expire, err := strconv.ParseInt(claims[ExpireParam], 10, 64) expire, err := strconv.ParseInt(claims[q.ExpireParam], 10, 64)
if err != nil || expire <= now { if err != nil || expire <= now {
c.AbortWithStatusJSON(q.ErrResp(c, 401, err)) c.AbortWithStatusJSON(q.ErrResp(c, 401, err))
return return
} }
}
// no one is allowed to download // set default values if token is empty
} else { } else {
// this is for UploadMgr to get user info to get related namespace claims[q.UserIDParam] = "0"
c.Set(UserParam, "quickshare_anonymous") claims[q.UserParam] = "admin"
claims[q.RoleParam] = userstore.AdminRole
claims[q.ExpireParam] = ""
} }
for key, val := range claims {
c.Set(key, val)
}
c.Next() c.Next()
} }
} }
func (h *MultiUsersSvc) APIAccessControl() gin.HandlerFunc {
return func(c *gin.Context) {
role := c.MustGet(q.RoleParam).(string)
method := c.Request.Method
accessPath := c.Request.URL.Path
// we don't lock the map because we only read it
if h.apiACRules[apiRuleCname(role, method, accessPath)] {
c.Next()
return
} else if accessPath == "/" || // TODO: temporarily allow accessing static resources
accessPath == "/favicon.ico" ||
strings.HasPrefix(accessPath, "/static") {
c.Next()
return
}
c.AbortWithStatusJSON(q.ErrResp(c, 403, q.ErrAccessDenied))
}
}

View file

@ -1,11 +1,31 @@
package handlers package handlers
import ( import (
"crypto/sha1"
"errors"
"fmt" "fmt"
"path/filepath"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
) )
var (
// dirs
UploadDir = "uploadings"
FsDir = "files"
UserIDParam = "uid"
UserParam = "user"
PwdParam = "pwd"
NewPwdParam = "newpwd"
RoleParam = "role"
ExpireParam = "expire"
TokenCookie = "tk"
ErrAccessDenied = errors.New("access denied")
ErrUnauthorized = errors.New("unauthorized")
)
var statusCodes = map[int]string{ var statusCodes = map[int]string{
100: "Continue", // RFC 7231, 6.2.1 100: "Continue", // RFC 7231, 6.2.1
101: "SwitchingProtocols", // RFC 7231, 6.2.2 101: "SwitchingProtocols", // RFC 7231, 6.2.2
@ -102,3 +122,15 @@ func ErrResp(c *gin.Context, code int, err error) (int, interface{}) {
return code, gErr.JSON() return code, gErr.JSON()
} }
func FsPath(userID, relFilePath string) string {
return filepath.Join(userID, FsDir, relFilePath)
}
func HomePath(userID, relFilePath string) string {
return filepath.Join(userID, relFilePath)
}
func GetTmpPath(userID, relFilePath string) string {
return filepath.Join(UploadDir, userID, fmt.Sprintf("%x", sha1.Sum([]byte(relFilePath))))
}

View file

@ -15,6 +15,8 @@ type UsersCfg struct {
CookieTTL int `json:"cookieTTL" yaml:"cookieTTL"` CookieTTL int `json:"cookieTTL" yaml:"cookieTTL"`
CookieSecure bool `json:"cookieSecure" yaml:"cookieSecure"` CookieSecure bool `json:"cookieSecure" yaml:"cookieSecure"`
CookieHttpOnly bool `json:"cookieHttpOnly" yaml:"cookieHttpOnly"` CookieHttpOnly bool `json:"cookieHttpOnly" yaml:"cookieHttpOnly"`
MinUserNameLen int `json:"minUserNameLen" yaml:"minUserNameLen"`
MinPwdLen int `json:"minPwdLen" yaml:"minPwdLen"`
} }
type Secrets struct { type Secrets struct {
@ -56,6 +58,8 @@ func DefaultConfig() (string, error) {
CookieTTL: 3600 * 24 * 7, // 1 week CookieTTL: 3600 * 24 * 7, // 1 week
CookieSecure: false, CookieSecure: false,
CookieHttpOnly: true, CookieHttpOnly: true,
MinUserNameLen: 4,
MinPwdLen: 6,
}, },
Secrets: &Secrets{ Secrets: &Secrets{
TokenSecret: "", TokenSecret: "",

View file

@ -165,7 +165,8 @@ func initHandlers(router *gin.Engine, cfg gocfg.ICfg, deps *depidx.Deps) (*gin.E
} }
// middleware // middleware
router.Use(userHdrs.Auth()) router.Use(userHdrs.AuthN())
router.Use(userHdrs.APIAccessControl())
// tmp static server // tmp static server
router.Use(static.Serve("/", static.LocalFile(publicPath, false))) router.Use(static.Serve("/", static.LocalFile(publicPath, false)))
@ -194,6 +195,7 @@ func initHandlers(router *gin.Engine, cfg gocfg.ICfg, deps *depidx.Deps) (*gin.E
filesAPI.PATCH("/files/move", fileHdrs.Move) filesAPI.PATCH("/files/move", fileHdrs.Move)
filesAPI.GET("/dirs", fileHdrs.List) filesAPI.GET("/dirs", fileHdrs.List)
filesAPI.GET("/dirs/home", fileHdrs.ListHome)
filesAPI.POST("/dirs", fileHdrs.Mkdir) filesAPI.POST("/dirs", fileHdrs.Mkdir)
// files.POST("/dirs/copy", fileHdrs.CopyDir) // files.POST("/dirs/copy", fileHdrs.CopyDir)

View file

@ -14,6 +14,7 @@ import (
"testing" "testing"
"github.com/ihexxa/quickshare/src/client" "github.com/ihexxa/quickshare/src/client"
q "github.com/ihexxa/quickshare/src/handlers"
"github.com/ihexxa/quickshare/src/handlers/fileshdr" "github.com/ihexxa/quickshare/src/handlers/fileshdr"
) )
@ -22,7 +23,9 @@ func TestFileHandlers(t *testing.T) {
root := "testData" root := "testData"
config := `{ config := `{
"users": { "users": {
"enableAuth": false "enableAuth": true,
"minUserNameLen": 2,
"minPwdLen": 4
}, },
"server": { "server": {
"debug": true "debug": true
@ -32,6 +35,11 @@ func TestFileHandlers(t *testing.T) {
} }
}` }`
adminName := "qs"
adminPwd := "quicksh@re"
os.Setenv("DEFAULTADMIN", adminName)
os.Setenv("DEFAULTADMINPWD", adminPwd)
os.RemoveAll(root) os.RemoveAll(root)
err := os.MkdirAll(root, 0700) err := os.MkdirAll(root, 0700)
if err != nil { if err != nil {
@ -42,14 +50,23 @@ func TestFileHandlers(t *testing.T) {
srv := startTestServer(config) srv := startTestServer(config)
defer srv.Shutdown() defer srv.Shutdown()
fs := srv.depsFS() fs := srv.depsFS()
cl := client.NewFilesClient(addr)
if !waitForReady(addr) { if !waitForReady(addr) {
t.Fatal("fail to start server") t.Fatal("fail to start server")
} }
usersCl := client.NewSingleUserClient(addr)
resp, _, errs := usersCl.Login(adminName, adminPwd)
if len(errs) > 0 {
t.Fatal(errs)
} else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode)
}
token := client.GetCookie(resp.Cookies(), q.TokenCookie)
cl := client.NewFilesClient(addr, token)
assertUploadOK := func(t *testing.T, filePath, content string) bool { assertUploadOK := func(t *testing.T, filePath, content string) bool {
cl := client.NewFilesClient(addr) cl := client.NewFilesClient(addr, token)
fileSize := int64(len([]byte(content))) fileSize := int64(len([]byte(content)))
res, _, errs := cl.Create(filePath, fileSize) res, _, errs := cl.Create(filePath, fileSize)
@ -82,7 +99,7 @@ func TestFileHandlers(t *testing.T) {
fileSize = int64(len([]byte(content))) fileSize = int64(len([]byte(content)))
) )
cl := client.NewFilesClient(addr) // cl := client.NewFilesClient(addr)
rd := rand.Intn(3) rd := rand.Intn(3)
switch rd { switch rd {
@ -151,12 +168,12 @@ func TestFileHandlers(t *testing.T) {
t.Run("test uploading files with duplicated names", func(t *testing.T) { t.Run("test uploading files with duplicated names", func(t *testing.T) {
files := map[string]string{ files := map[string]string{
"dupdir/dup_file1": "12345678", "0/dupdir/dup_file1": "12345678",
"dupdir/dup_file2.ext": "12345678", "0/dupdir/dup_file2.ext": "12345678",
} }
renames := map[string]string{ renames := map[string]string{
"dupdir/dup_file1": "dupdir/dup_file1_1", "0/dupdir/dup_file1": "0/dupdir/dup_file1_1",
"dupdir/dup_file2.ext": "dupdir/dup_file2_1.ext", "0/dupdir/dup_file2.ext": "0/dupdir/dup_file2_1.ext",
} }
for filePath, content := range files { for filePath, content := range files {
@ -183,8 +200,8 @@ func TestFileHandlers(t *testing.T) {
t.Run("test files APIs: Create-UploadChunk-UploadStatus-Metadata-Delete", func(t *testing.T) { t.Run("test files APIs: Create-UploadChunk-UploadStatus-Metadata-Delete", func(t *testing.T) {
for filePath, content := range map[string]string{ for filePath, content := range map[string]string{
"path1/f1.md": "1111 1111 1111 1111", "0/path1/f1.md": "1111 1111 1111 1111",
"path1/path2/f2.md": "1010 1010 1111 0000 0010", "0/path1/path2/f2.md": "1010 1010 1111 0000 0010",
} { } {
fileSize := int64(len([]byte(content))) fileSize := int64(len([]byte(content)))
// create a file // create a file
@ -196,7 +213,7 @@ func TestFileHandlers(t *testing.T) {
} }
// check uploading file // check uploading file
uploadFilePath := path.Join(fileshdr.UploadDir, fmt.Sprintf("%x", sha1.Sum([]byte(filePath)))) uploadFilePath := path.Join(q.UploadDir, "0", fmt.Sprintf("%x", sha1.Sum([]byte(filePath))))
info, err := fs.Stat(uploadFilePath) info, err := fs.Stat(uploadFilePath)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -242,12 +259,12 @@ func TestFileHandlers(t *testing.T) {
} }
// check uploaded file // check uploaded file
fsFilePath := filepath.Join(fileshdr.FsDir, filePath) // fsFilePath := filepath.Join("0", filePath)
info, err = fs.Stat(fsFilePath) info, err = fs.Stat(filePath)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} else if info.Name() != filepath.Base(fsFilePath) { } else if info.Name() != filepath.Base(filePath) {
t.Fatal(info.Name(), filepath.Base(fsFilePath)) t.Fatal(info.Name(), filepath.Base(filePath))
} }
// metadata // metadata
@ -273,11 +290,11 @@ func TestFileHandlers(t *testing.T) {
t.Run("test dirs APIs: Mkdir-Create-UploadChunk-List", func(t *testing.T) { t.Run("test dirs APIs: Mkdir-Create-UploadChunk-List", func(t *testing.T) {
for dirPath, files := range map[string]map[string]string{ for dirPath, files := range map[string]map[string]string{
"dir/path1": map[string]string{ "0/dir/path1": map[string]string{
"f1.md": "11111", "f1.md": "11111",
"f2.md": "22222222222", "f2.md": "22222222222",
}, },
"dir/path2/path2": map[string]string{ "0/dir/path2/path2": map[string]string{
"f3.md": "3333333", "f3.md": "3333333",
}, },
} { } {
@ -314,8 +331,8 @@ func TestFileHandlers(t *testing.T) {
}) })
t.Run("test operation APIs: Mkdir-Create-UploadChunk-Move-List", func(t *testing.T) { t.Run("test operation APIs: Mkdir-Create-UploadChunk-Move-List", func(t *testing.T) {
srcDir := "move/src" srcDir := "0/move/src"
dstDir := "move/dst" dstDir := "0/move/dst"
for _, dirPath := range []string{srcDir, dstDir} { for _, dirPath := range []string{srcDir, dstDir} {
res, _, errs := cl.Mkdir(dirPath) res, _, errs := cl.Mkdir(dirPath)
@ -366,8 +383,8 @@ func TestFileHandlers(t *testing.T) {
t.Run("test download APIs: Download(normal, ranges)", func(t *testing.T) { t.Run("test download APIs: Download(normal, ranges)", func(t *testing.T) {
for filePath, content := range map[string]string{ for filePath, content := range map[string]string{
"download/path1/f1": "123456", "0/download/path1/f1": "123456",
"download/path1/path2": "12345678", "0/download/path1/path2": "12345678",
} { } {
assertUploadOK(t, filePath, content) assertUploadOK(t, filePath, content)
@ -431,8 +448,8 @@ func TestFileHandlers(t *testing.T) {
t.Run("test uploading APIs: Create, ListUploadings, DelUploading", func(t *testing.T) { t.Run("test uploading APIs: Create, ListUploadings, DelUploading", func(t *testing.T) {
files := map[string]string{ files := map[string]string{
"uploadings/path1/f1": "123456", "0/uploadings/path1/f1": "123456",
"uploadings/path1/path2": "12345678", "0/uploadings/path1/path2": "12345678",
} }
for filePath, content := range files { for filePath, content := range files {
@ -487,10 +504,10 @@ func TestFileHandlers(t *testing.T) {
}) })
t.Run("test uploading APIs: Create, Stop, UploadChunk", func(t *testing.T) { t.Run("test uploading APIs: Create, Stop, UploadChunk", func(t *testing.T) {
cl := client.NewFilesClient(addr) // cl := client.NewFilesClient(addr)
files := map[string]string{ files := map[string]string{
"uploadings/path1/f1": "12345678", "0/uploadings/path1/f1": "12345678",
} }
for filePath, content := range files { for filePath, content := range files {
@ -542,12 +559,12 @@ func TestFileHandlers(t *testing.T) {
}) })
t.Run("test uploading APIs: Create and UploadChunk randomly", func(t *testing.T) { t.Run("test uploading APIs: Create and UploadChunk randomly", func(t *testing.T) {
cl := client.NewFilesClient(addr) // cl := client.NewFilesClient(addr)
files := map[string]string{ files := map[string]string{
"uploadings/random/path1/f1": "12345678", "0/uploadings/random/path1/f1": "12345678",
"uploadings/random/path1/f2": "87654321", "0/uploadings/random/path1/f2": "87654321",
"uploadings/random/path1/f3": "17654321", "0/uploadings/random/path1/f3": "17654321",
} }
for filePath, content := range files { for filePath, content := range files {
@ -598,7 +615,7 @@ func TestFileHandlers(t *testing.T) {
t.Fatal("incorrect uploaded size", mRes) t.Fatal("incorrect uploaded size", mRes)
} }
isEqual, err := compareFileContent(fs, filePath, content) isEqual, err := compareFileContent(fs, "0", filePath, content)
if err != nil { if err != nil {
t.Fatalf("err comparing content: %s", err) t.Fatalf("err comparing content: %s", err)
} else if !isEqual { } else if !isEqual {
@ -608,4 +625,11 @@ func TestFileHandlers(t *testing.T) {
assetDownloadOK(t, filePath, content) assetDownloadOK(t, filePath, content)
} }
}) })
resp, _, errs = usersCl.Logout(token)
if len(errs) > 0 {
t.Fatal(errs)
} else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode)
}
} }

View file

@ -15,7 +15,9 @@ func TestSingleUserHandlers(t *testing.T) {
root := "testData" root := "testData"
config := `{ config := `{
"users": { "users": {
"enableAuth": true "enableAuth": true,
"minUserNameLen": 2,
"minPwdLen": 4
}, },
"server": { "server": {
"debug": true "debug": true
@ -78,7 +80,7 @@ func TestSingleUserHandlers(t *testing.T) {
} }
}) })
t.Run("test users APIs: Login-AddUser-Logout-Login", func(t *testing.T) { t.Run("test users APIs: Login-AddUser-Logout-Login-Logout", func(t *testing.T) {
resp, _, errs := usersCl.Login(adminName, adminNewPwd) resp, _, errs := usersCl.Login(adminName, adminNewPwd)
if len(errs) > 0 { if len(errs) > 0 {
t.Fatal(errs) t.Fatal(errs)
@ -111,9 +113,16 @@ func TestSingleUserHandlers(t *testing.T) {
} else if resp.StatusCode != 200 { } else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode) t.Fatal(resp.StatusCode)
} }
resp, _, errs = usersCl.Logout(token)
if len(errs) > 0 {
t.Fatal(errs)
} else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode)
}
}) })
t.Run("test roles APIs: Login-AddRole-ListRoles-DelRole-ListRoles", func(t *testing.T) { t.Run("test roles APIs: Login-AddRole-ListRoles-DelRole-ListRoles-Logout", func(t *testing.T) {
resp, _, errs := usersCl.Login(adminName, adminNewPwd) resp, _, errs := usersCl.Login(adminName, adminNewPwd)
if len(errs) > 0 { if len(errs) > 0 {
t.Fatal(errs) t.Fatal(errs)
@ -169,5 +178,12 @@ func TestSingleUserHandlers(t *testing.T) {
t.Fatalf("role(%s) should not exist", role) t.Fatalf("role(%s) should not exist", role)
} }
} }
resp, _, errs = usersCl.Logout(token)
if len(errs) > 0 {
t.Fatal(errs)
} else if resp.StatusCode != 200 {
t.Fatal(resp.StatusCode)
}
}) })
} }

View file

@ -2,7 +2,7 @@ package server
import ( import (
"io/ioutil" "io/ioutil"
"path" // "path"
"time" "time"
"github.com/ihexxa/gocfg" "github.com/ihexxa/gocfg"
@ -51,8 +51,8 @@ func waitForReady(addr string) bool {
return false return false
} }
func compareFileContent(fs fspkg.ISimpleFS, filePath string, expectedContent string) (bool, error) { func compareFileContent(fs fspkg.ISimpleFS, uid, filePath string, expectedContent string) (bool, error) {
reader, err := fs.GetFileReader(path.Join("files", filePath)) reader, err := fs.GetFileReader(filePath)
if err != nil { if err != nil {
return false, err return false, err
} }