s
This commit is contained in:
parent
e47ee4aa8c
commit
8518072c7e
8 changed files with 134 additions and 30 deletions
|
@ -31,6 +31,7 @@ export interface MetadataResp {
|
||||||
size: number;
|
size: number;
|
||||||
modTime: string;
|
modTime: string;
|
||||||
isDir: boolean;
|
isDir: boolean;
|
||||||
|
sha1: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface UploadStatusResp {
|
export interface UploadStatusResp {
|
||||||
|
|
|
@ -453,6 +453,8 @@ export class Browser extends React.Component<Props, State, {}> {
|
||||||
</span>
|
</span>
|
||||||
/
|
/
|
||||||
<span>{FileSize(item.size, { round: 0 })}</span>
|
<span>{FileSize(item.size, { round: 0 })}</span>
|
||||||
|
/
|
||||||
|
<span>{item.sha1}</span>
|
||||||
</div>
|
</div>
|
||||||
</span>,
|
</span>,
|
||||||
])}
|
])}
|
||||||
|
|
|
@ -50,9 +50,9 @@ export class PaneSettings extends React.Component<Props, State, {}> {
|
||||||
.setPwd(this.state.oldPwd, this.state.newPwd1)
|
.setPwd(this.state.oldPwd, this.state.newPwd1)
|
||||||
.then((ok: boolean) => {
|
.then((ok: boolean) => {
|
||||||
if (ok) {
|
if (ok) {
|
||||||
alertMsg(this.props.msg.pkg.get("settings.pwd.updated"));
|
alertMsg(this.props.msg.pkg.get("update.ok"));
|
||||||
} else {
|
} else {
|
||||||
alertMsg(this.props.msg.pkg.get("settings.pwd.fail"));
|
alertMsg(this.props.msg.pkg.get("update.fail"));
|
||||||
}
|
}
|
||||||
this.setState({
|
this.setState({
|
||||||
oldPwd: "",
|
oldPwd: "",
|
||||||
|
|
|
@ -39,6 +39,7 @@ type IFileInfoStore interface {
|
||||||
SetInfo(itemPath string, info *FileInfo) error
|
SetInfo(itemPath string, info *FileInfo) error
|
||||||
DelInfo(itemPath string) error
|
DelInfo(itemPath string) error
|
||||||
SetSha1(itemPath, sign string) error
|
SetSha1(itemPath, sign string) error
|
||||||
|
GetInfos(itemPaths []string) (map[string]*FileInfo, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type FileInfoStore struct {
|
type FileInfoStore struct {
|
||||||
|
@ -145,6 +146,22 @@ func (fi *FileInfoStore) GetInfo(itemPath string) (*FileInfo, error) {
|
||||||
return info, nil
|
return info, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (fi *FileInfoStore) GetInfos(itemPaths []string) (map[string]*FileInfo, error) {
|
||||||
|
infos := map[string]*FileInfo{}
|
||||||
|
for _, itemPath := range itemPaths {
|
||||||
|
info, err := fi.GetInfo(itemPath)
|
||||||
|
if err != nil {
|
||||||
|
if !IsNotFound(err) {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
infos[itemPath] = info
|
||||||
|
}
|
||||||
|
|
||||||
|
return infos, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (fi *FileInfoStore) SetInfo(itemPath string, info *FileInfo) error {
|
func (fi *FileInfoStore) SetInfo(itemPath string, info *FileInfo) error {
|
||||||
infoStr, err := json.Marshal(info)
|
infoStr, err := json.Marshal(info)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -232,6 +232,7 @@ type MetadataResp struct {
|
||||||
Size int64 `json:"size"`
|
Size int64 `json:"size"`
|
||||||
ModTime time.Time `json:"modTime"`
|
ModTime time.Time `json:"modTime"`
|
||||||
IsDir bool `json:"isDir"`
|
IsDir bool `json:"isDir"`
|
||||||
|
Sha1 string `json"sha1"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *FileHandlers) Metadata(c *gin.Context) {
|
func (h *FileHandlers) Metadata(c *gin.Context) {
|
||||||
|
@ -610,6 +611,37 @@ type ListResp struct {
|
||||||
Metadatas []*MetadataResp `json:"metadatas"`
|
Metadatas []*MetadataResp `json:"metadatas"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h *FileHandlers) MergeFileInfos(dirPath string, infos []os.FileInfo) ([]*MetadataResp, error) {
|
||||||
|
filePaths := []string{}
|
||||||
|
metadatas := []*MetadataResp{}
|
||||||
|
for _, info := range infos {
|
||||||
|
if !info.IsDir() {
|
||||||
|
filePaths = append(filePaths, filepath.Join(dirPath, info.Name()))
|
||||||
|
}
|
||||||
|
metadatas = append(metadatas, &MetadataResp{
|
||||||
|
Name: info.Name(),
|
||||||
|
Size: info.Size(),
|
||||||
|
ModTime: info.ModTime(),
|
||||||
|
IsDir: info.IsDir(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
dbInfos, err := h.deps.FileInfos().GetInfos(filePaths)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, metadata := range metadatas {
|
||||||
|
if !metadata.IsDir {
|
||||||
|
dbInfo, ok := dbInfos[filepath.Join(dirPath, metadata.Name)]
|
||||||
|
if ok {
|
||||||
|
metadata.Sha1 = dbInfo.Sha1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return metadatas, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (h *FileHandlers) List(c *gin.Context) {
|
func (h *FileHandlers) List(c *gin.Context) {
|
||||||
dirPath := c.Query(ListDirQuery)
|
dirPath := c.Query(ListDirQuery)
|
||||||
if dirPath == "" {
|
if dirPath == "" {
|
||||||
|
@ -628,14 +660,11 @@ func (h *FileHandlers) List(c *gin.Context) {
|
||||||
c.JSON(q.ErrResp(c, 500, err))
|
c.JSON(q.ErrResp(c, 500, err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
metadatas := []*MetadataResp{}
|
|
||||||
for _, info := range infos {
|
metadatas, err := h.MergeFileInfos(dirPath, infos)
|
||||||
metadatas = append(metadatas, &MetadataResp{
|
if err != nil {
|
||||||
Name: info.Name(),
|
c.JSON(q.ErrResp(c, 500, err))
|
||||||
Size: info.Size(),
|
return
|
||||||
ModTime: info.ModTime(),
|
|
||||||
IsDir: info.IsDir(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(200, &ListResp{
|
c.JSON(200, &ListResp{
|
||||||
|
@ -647,19 +676,17 @@ func (h *FileHandlers) List(c *gin.Context) {
|
||||||
func (h *FileHandlers) ListHome(c *gin.Context) {
|
func (h *FileHandlers) ListHome(c *gin.Context) {
|
||||||
userName := c.MustGet(q.UserParam).(string)
|
userName := c.MustGet(q.UserParam).(string)
|
||||||
fsPath := q.FsRootPath(userName, "/")
|
fsPath := q.FsRootPath(userName, "/")
|
||||||
|
|
||||||
infos, err := h.deps.FS().ListDir(fsPath)
|
infos, err := h.deps.FS().ListDir(fsPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.JSON(q.ErrResp(c, 500, err))
|
c.JSON(q.ErrResp(c, 500, err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
metadatas := []*MetadataResp{}
|
|
||||||
for _, info := range infos {
|
metadatas, err := h.MergeFileInfos(fsPath, infos)
|
||||||
metadatas = append(metadatas, &MetadataResp{
|
if err != nil {
|
||||||
Name: info.Name(),
|
c.JSON(q.ErrResp(c, 500, err))
|
||||||
Size: info.Size(),
|
return
|
||||||
ModTime: info.ModTime(),
|
|
||||||
IsDir: info.IsDir(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(200, &ListResp{
|
c.JSON(200, &ListResp{
|
||||||
|
@ -832,6 +859,38 @@ func (h *FileHandlers) ListSharings(c *gin.Context) {
|
||||||
c.JSON(200, &SharingResp{SharingDirs: dirs})
|
c.JSON(200, &SharingResp{SharingDirs: dirs})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type HashBody struct {
|
||||||
|
FilePath string `json:"filePath"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *FileHandlers) GenerateHash(c *gin.Context) {
|
||||||
|
req := &HashBody{}
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
c.JSON(q.ErrResp(c, 400, err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
role := c.MustGet(q.RoleParam).(string)
|
||||||
|
userName := c.MustGet(q.UserParam).(string)
|
||||||
|
if !h.canAccess(userName, role, "hash.gen", req.FilePath) {
|
||||||
|
c.JSON(q.ErrResp(c, 403, q.ErrAccessDenied))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err := h.workers.TryPut(
|
||||||
|
localworker.NewMsg(
|
||||||
|
h.deps.ID().Gen(),
|
||||||
|
map[string]string{localworker.MsgTypeKey: "sha1"},
|
||||||
|
req.FilePath,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(q.ErrResp(c, 500, err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(q.Resp(200))
|
||||||
|
}
|
||||||
|
|
||||||
func (h *FileHandlers) GetStreamReader(userID uint64, fd io.Reader) (io.Reader, error) {
|
func (h *FileHandlers) GetStreamReader(userID uint64, fd io.Reader) (io.Reader, error) {
|
||||||
pr, pw := io.Pipe()
|
pr, pw := io.Pipe()
|
||||||
chunkSize := 100 * 1024 // notice: it can not be greater than limiter's token count
|
chunkSize := 100 * 1024 // notice: it can not be greater than limiter's token count
|
||||||
|
|
|
@ -69,6 +69,8 @@ func NewMultiUsersSvc(cfg gocfg.ICfg, deps *depidx.Deps) (*MultiUsersSvc, error)
|
||||||
apiRuleCname(userstore.AdminRole, "DELETE", "/v1/fs/sharings"): true,
|
apiRuleCname(userstore.AdminRole, "DELETE", "/v1/fs/sharings"): true,
|
||||||
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/sharings"): true,
|
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/sharings"): true,
|
||||||
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/sharings/exist"): true,
|
apiRuleCname(userstore.AdminRole, "GET", "/v1/fs/sharings/exist"): true,
|
||||||
|
apiRuleCname(userstore.AdminRole, "GET", "/hashes/sha1"): true,
|
||||||
|
|
||||||
// user rules
|
// user rules
|
||||||
apiRuleCname(userstore.UserRole, "GET", "/"): true,
|
apiRuleCname(userstore.UserRole, "GET", "/"): true,
|
||||||
apiRuleCname(userstore.UserRole, "GET", publicPath): true,
|
apiRuleCname(userstore.UserRole, "GET", publicPath): true,
|
||||||
|
@ -97,6 +99,7 @@ func NewMultiUsersSvc(cfg gocfg.ICfg, deps *depidx.Deps) (*MultiUsersSvc, error)
|
||||||
apiRuleCname(userstore.UserRole, "DELETE", "/v1/fs/sharings"): true,
|
apiRuleCname(userstore.UserRole, "DELETE", "/v1/fs/sharings"): true,
|
||||||
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/sharings"): true,
|
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/sharings"): true,
|
||||||
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/sharings/exist"): true,
|
apiRuleCname(userstore.UserRole, "GET", "/v1/fs/sharings/exist"): true,
|
||||||
|
apiRuleCname(userstore.AdminRole, "GET", "/hashes/sha1"): true,
|
||||||
// visitor rules
|
// visitor rules
|
||||||
apiRuleCname(userstore.VisitorRole, "GET", "/"): true,
|
apiRuleCname(userstore.VisitorRole, "GET", "/"): true,
|
||||||
apiRuleCname(userstore.VisitorRole, "GET", publicPath): true,
|
apiRuleCname(userstore.VisitorRole, "GET", publicPath): true,
|
||||||
|
|
|
@ -8,7 +8,9 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
|
"os/signal"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"syscall"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gin-contrib/static"
|
"github.com/gin-contrib/static"
|
||||||
|
@ -41,6 +43,7 @@ type Server struct {
|
||||||
cfg gocfg.ICfg
|
cfg gocfg.ICfg
|
||||||
deps *depidx.Deps
|
deps *depidx.Deps
|
||||||
workers worker.IWorkerPool
|
workers worker.IWorkerPool
|
||||||
|
signalChan chan os.Signal
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewServer(cfg gocfg.ICfg) (*Server, error) {
|
func NewServer(cfg gocfg.ICfg) (*Server, error) {
|
||||||
|
@ -49,8 +52,9 @@ func NewServer(cfg gocfg.ICfg) (*Server, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
deps := initDeps(cfg)
|
deps := initDeps(cfg)
|
||||||
|
workers := localworker.NewWorkerPool(1024, 5000, 2, deps)
|
||||||
router := gin.Default()
|
router := gin.Default()
|
||||||
router, err := initHandlers(router, cfg, deps)
|
router, err := initHandlers(router, cfg, deps, workers)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -67,6 +71,7 @@ func NewServer(cfg gocfg.ICfg) (*Server, error) {
|
||||||
server: srv,
|
server: srv,
|
||||||
deps: deps,
|
deps: deps,
|
||||||
cfg: cfg,
|
cfg: cfg,
|
||||||
|
workers: workers,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -130,10 +135,7 @@ func initDeps(cfg gocfg.ICfg) *depidx.Deps {
|
||||||
return deps
|
return deps
|
||||||
}
|
}
|
||||||
|
|
||||||
func initHandlers(router *gin.Engine, cfg gocfg.ICfg, deps *depidx.Deps) (*gin.Engine, error) {
|
func initHandlers(router *gin.Engine, cfg gocfg.ICfg, deps *depidx.Deps, workers worker.IWorkerPool) (*gin.Engine, error) {
|
||||||
// workers
|
|
||||||
workers := localworker.NewWorkerPool(1024, 5000, 2, deps)
|
|
||||||
|
|
||||||
// handlers
|
// handlers
|
||||||
userHdrs, err := multiusers.NewMultiUsersSvc(cfg, deps)
|
userHdrs, err := multiusers.NewMultiUsersSvc(cfg, deps)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -237,6 +239,8 @@ func initHandlers(router *gin.Engine, cfg gocfg.ICfg, deps *depidx.Deps) (*gin.E
|
||||||
|
|
||||||
filesAPI.GET("/metadata", fileHdrs.Metadata)
|
filesAPI.GET("/metadata", fileHdrs.Metadata)
|
||||||
|
|
||||||
|
filesAPI.POST("/hashes/sha1", fileHdrs.GenerateHash)
|
||||||
|
|
||||||
settingsAPI := v1.Group("/settings")
|
settingsAPI := v1.Group("/settings")
|
||||||
settingsAPI.OPTIONS("/health", settingsSvc.Health)
|
settingsAPI.OPTIONS("/health", settingsSvc.Health)
|
||||||
|
|
||||||
|
@ -263,6 +267,18 @@ func initLogger(cfg gocfg.ICfg) *zap.SugaredLogger {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) Start() error {
|
func (s *Server) Start() error {
|
||||||
|
s.signalChan = make(chan os.Signal, 4)
|
||||||
|
signal.Notify(s.signalChan, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
go func() {
|
||||||
|
sig := <-s.signalChan
|
||||||
|
if sig != nil {
|
||||||
|
s.deps.Log().Infow(
|
||||||
|
fmt.Sprintf("received signal %s: shutting down\n", sig.String()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
s.Shutdown()
|
||||||
|
}()
|
||||||
|
|
||||||
s.deps.Log().Infow(
|
s.deps.Log().Infow(
|
||||||
"quickshare is starting",
|
"quickshare is starting",
|
||||||
"hostname:port",
|
"hostname:port",
|
||||||
|
@ -282,6 +298,7 @@ func (s *Server) Start() error {
|
||||||
|
|
||||||
func (s *Server) Shutdown() error {
|
func (s *Server) Shutdown() error {
|
||||||
// TODO: add timeout
|
// TODO: add timeout
|
||||||
|
s.workers.Stop()
|
||||||
s.deps.Log().Sync()
|
s.deps.Log().Sync()
|
||||||
return s.server.Shutdown(context.Background())
|
return s.server.Shutdown(context.Background())
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,7 +88,12 @@ func (wp *WorkerPool) Start() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (wp *WorkerPool) Stop() {
|
func (wp *WorkerPool) Stop() {
|
||||||
|
defer close(wp.queue)
|
||||||
wp.on = false
|
wp.on = false
|
||||||
|
for wp.started > 0 {
|
||||||
|
wp.deps.Log().Errorf(fmt.Sprintf("%d workers still in working", wp.started))
|
||||||
|
time.Sleep(time.Duration(1) * time.Second)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (wp *WorkerPool) startWorker() {
|
func (wp *WorkerPool) startWorker() {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue